gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.server;
import com.google.common.base.Joiner;
import com.typesafe.config.ConfigValueFactory;
import org.apache.drill.test.BaseTestQuery;
import org.apache.drill.common.config.DrillProperties;
import org.apache.drill.common.config.DrillConfig;
import org.apache.drill.common.util.DrillStringUtils;
import org.apache.drill.exec.ExecConstants;
import org.apache.drill.exec.rpc.user.security.testing.UserAuthenticatorTestImpl;
import org.apache.drill.exec.server.options.OptionManager;
import org.apache.drill.exec.util.ImpersonationUtil;
import org.apache.drill.test.ClientFixture;
import org.apache.drill.test.ClusterFixture;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
import static org.apache.drill.exec.rpc.user.security.testing.UserAuthenticatorTestImpl.ADMIN_GROUP;
import static org.apache.drill.exec.rpc.user.security.testing.UserAuthenticatorTestImpl.ADMIN_USER;
import static org.apache.drill.exec.rpc.user.security.testing.UserAuthenticatorTestImpl.ADMIN_USER_PASSWORD;
import static org.apache.drill.exec.rpc.user.security.testing.UserAuthenticatorTestImpl.PROCESS_USER;
import static org.apache.drill.exec.rpc.user.security.testing.UserAuthenticatorTestImpl.PROCESS_USER_PASSWORD;
import static org.apache.drill.exec.rpc.user.security.testing.UserAuthenticatorTestImpl.TEST_USER_1;
import static org.apache.drill.exec.rpc.user.security.testing.UserAuthenticatorTestImpl.TEST_USER_1_PASSWORD;
import static org.apache.drill.exec.rpc.user.security.testing.UserAuthenticatorTestImpl.TEST_USER_2;
import static org.apache.drill.exec.rpc.user.security.testing.UserAuthenticatorTestImpl.TEST_USER_2_PASSWORD;
import java.util.Properties;
/**
* Test setting system scoped options with user authentication enabled. (DRILL-3622)
*/
public class TestOptionsAuthEnabled extends BaseTestQuery {
private static final String setSysOptionQuery =
String.format("ALTER SYSTEM SET `%s` = %d;", ExecConstants.SLICE_TARGET, 200);
@BeforeClass
public static void setupCluster() throws Exception {
// Create a new DrillConfig which has user authentication enabled and test authenticator set
final DrillConfig config = new DrillConfig(DrillConfig.create(cloneDefaultTestConfigProperties())
.withValue(ExecConstants.USER_AUTHENTICATION_ENABLED, ConfigValueFactory.fromAnyRef(true))
.withValue(ExecConstants.USER_AUTHENTICATOR_IMPL,
ConfigValueFactory.fromAnyRef(UserAuthenticatorTestImpl.TYPE)),
false);
final Properties connectionProps = new Properties();
connectionProps.setProperty(DrillProperties.USER, PROCESS_USER);
connectionProps.setProperty(DrillProperties.PASSWORD, PROCESS_USER_PASSWORD);
updateTestCluster(1, config, connectionProps);
// Add user "admin" to admin username list
test(String.format("ALTER SYSTEM SET `%s`='%s,%s'", ExecConstants.ADMIN_USERS_KEY, ADMIN_USER, PROCESS_USER));
// Set "admingrp" to admin username list
test(String.format("ALTER SYSTEM SET `%s`='%s'", ExecConstants.ADMIN_USER_GROUPS_KEY, ADMIN_GROUP));
}
@Test
public void updateSysOptAsAdminUser() throws Exception {
updateClient(ADMIN_USER, ADMIN_USER_PASSWORD);
setOptHelper();
}
@Test
public void updateSysOptAsNonAdminUser() throws Exception {
updateClient(TEST_USER_2, TEST_USER_2_PASSWORD);
errorMsgTestHelper(setSysOptionQuery, "Not authorized to change SYSTEM options.");
}
@Test
public void updateSysOptAsUserInAdminGroup() throws Exception {
updateClient(TEST_USER_1, TEST_USER_1_PASSWORD);
setOptHelper();
}
@Test
public void trySettingAdminOptsAtSessionScopeAsAdmin() throws Exception {
updateClient(ADMIN_USER, ADMIN_USER_PASSWORD);
final String setOptionQuery =
String.format("ALTER SESSION SET `%s`='%s,%s'", ExecConstants.ADMIN_USERS_KEY, ADMIN_USER, PROCESS_USER);
errorMsgTestHelper(setOptionQuery, "PERMISSION ERROR: Cannot change option security.admin.users in scope SESSION");
}
@Test
public void trySettingAdminOptsAtSessionScopeAsNonAdmin() throws Exception {
updateClient(TEST_USER_2, TEST_USER_2_PASSWORD);
final String setOptionQuery =
String.format("ALTER SESSION SET `%s`='%s,%s'", ExecConstants.ADMIN_USERS_KEY, ADMIN_USER, PROCESS_USER);
errorMsgTestHelper(setOptionQuery, "PERMISSION ERROR: Cannot change option security.admin.users in scope SESSION");
}
private void setOptHelper() throws Exception {
try {
test(setSysOptionQuery);
testBuilder()
.sqlQuery(String.format("SELECT num_val FROM sys.options WHERE name = '%s' AND optionScope = 'SYSTEM'",
ExecConstants.SLICE_TARGET))
.unOrdered()
.baselineColumns("num_val")
.baselineValues(200L)
.go();
} finally {
test(String.format("ALTER SYSTEM SET `%s` = %d;", ExecConstants.SLICE_TARGET, ExecConstants.SLICE_TARGET_DEFAULT));
}
}
@Test
public void testAdminUserOptions() throws Exception {
try (ClusterFixture cluster = ClusterFixture.standardCluster(dirTestWatcher);
ClientFixture client = cluster.clientFixture()) {
OptionManager optionManager = cluster.drillbit().getContext().getOptionManager();
// Admin Users Tests
// config file should have the 'fake' default admin user and it should be returned
// by the option manager if the option has not been set by the user
String configAdminUser = optionManager.getOption(ExecConstants.ADMIN_USERS_VALIDATOR);
assertEquals(configAdminUser, ExecConstants.ADMIN_USERS_VALIDATOR.DEFAULT_ADMIN_USERS);
// Option accessor should never return the 'fake' default from the config
String adminUser1 = ExecConstants.ADMIN_USERS_VALIDATOR.getAdminUsers(optionManager);
assertNotEquals(adminUser1, ExecConstants.ADMIN_USERS_VALIDATOR.DEFAULT_ADMIN_USERS);
// Change testAdminUser if necessary
String testAdminUser = "ronswanson";
if (adminUser1.equals(testAdminUser)) {
testAdminUser += "thefirst";
}
// Check if the admin option accessor honors a user-supplied value
client.alterSystem(ExecConstants.ADMIN_USERS_KEY, testAdminUser);
String adminUser2 = ExecConstants.ADMIN_USERS_VALIDATOR.getAdminUsers(optionManager);
assertEquals(adminUser2, testAdminUser);
// Ensure that the default admin users have admin privileges
client.resetSystem(ExecConstants.ADMIN_USERS_KEY);
client.resetSystem(ExecConstants.ADMIN_USER_GROUPS_KEY);
String systemAdminUsersList0 = ExecConstants.ADMIN_USERS_VALIDATOR.getAdminUsers(optionManager);
String systemAdminUserGroupsList0 = ExecConstants.ADMIN_USER_GROUPS_VALIDATOR.getAdminUserGroups(optionManager);
for (String user : systemAdminUsersList0.split(",")) {
assertTrue(ImpersonationUtil.hasAdminPrivileges(user, systemAdminUsersList0, systemAdminUserGroupsList0));
}
// test if admin users, set by the user, have admin privileges
// test if we can handle a user-supplied list that is not well formatted
String crummyTestAdminUsersList = " alice, bob bob, charlie ,, dave ";
client.alterSystem(ExecConstants.ADMIN_USERS_KEY, crummyTestAdminUsersList);
String[] sanitizedAdminUsers = {"alice", "bob bob", "charlie", "dave"};
// also test the CSV sanitizer
assertEquals(Joiner.on(",").join(sanitizedAdminUsers), DrillStringUtils.sanitizeCSV(crummyTestAdminUsersList));
String systemAdminUsersList1 = ExecConstants.ADMIN_USERS_VALIDATOR.getAdminUsers(optionManager);
String systemAdminUserGroupsList1 = ExecConstants.ADMIN_USER_GROUPS_VALIDATOR.getAdminUserGroups(optionManager);
for (String user : sanitizedAdminUsers) {
assertTrue(ImpersonationUtil.hasAdminPrivileges(user, systemAdminUsersList1, systemAdminUserGroupsList1));
}
// Admin User Groups Tests
// config file should have the 'fake' default admin user and it should be returned
// by the option manager if the option has not been set by the user
String configAdminUserGroups = optionManager.getOption(ExecConstants.ADMIN_USER_GROUPS_VALIDATOR);
assertEquals(configAdminUserGroups, ExecConstants.ADMIN_USER_GROUPS_VALIDATOR.DEFAULT_ADMIN_USER_GROUPS);
// Option accessor should never return the 'fake' default from the config
String adminUserGroups1 = ExecConstants.ADMIN_USER_GROUPS_VALIDATOR.getAdminUserGroups(optionManager);
assertNotEquals(adminUserGroups1, ExecConstants.ADMIN_USER_GROUPS_VALIDATOR.DEFAULT_ADMIN_USER_GROUPS);
// Change testAdminUserGroups if necessary
String testAdminUserGroups = "yakshavers";
if (adminUserGroups1.equals(testAdminUserGroups)) {
testAdminUserGroups += ",wormracers";
}
// Check if the admin option accessor honors a user-supplied values
client.alterSystem(ExecConstants.ADMIN_USER_GROUPS_KEY, testAdminUserGroups);
String adminUserGroups2 = ExecConstants.ADMIN_USER_GROUPS_VALIDATOR.getAdminUserGroups(optionManager);
assertEquals(adminUserGroups2, testAdminUserGroups);
// Test if we can handle a user-supplied admin user groups list that is not well formatted
String crummyTestAdminUserGroupsList = " g1, g 2, g4 ,, g5 ";
client.alterSystem(ExecConstants.ADMIN_USER_GROUPS_KEY, crummyTestAdminUserGroupsList);
String systemAdminUserGroupsList2 = ExecConstants.ADMIN_USER_GROUPS_VALIDATOR.getAdminUserGroups(optionManager);
// test if all the group tokens are well-formed
// Note: A hasAdminPrivilege() test cannot be done here, like in the tests for handling a crummy admin user list.
// This is because ImpersonationUtil currently does not implement an API that takes a group as an input to check
// for admin privileges
for (String group : systemAdminUserGroupsList2.split(",")) {
assertTrue(group.length() != 0);
assertTrue(group.trim().equals(group));
}
}
}
}
| |
package net.sf.taverna.t2.activities.wsdl.xmlsplitter;
import java.util.ArrayList;
import java.util.List;
import net.sf.taverna.t2.activities.wsdl.WSDLActivity;
import net.sf.taverna.t2.workflowmodel.CompoundEdit;
import net.sf.taverna.t2.workflowmodel.Dataflow;
import net.sf.taverna.t2.workflowmodel.Edit;
import net.sf.taverna.t2.workflowmodel.EditException;
import net.sf.taverna.t2.workflowmodel.Edits;
import net.sf.taverna.t2.workflowmodel.EditsRegistry;
import net.sf.taverna.t2.workflowmodel.EventForwardingOutputPort;
import net.sf.taverna.t2.workflowmodel.EventHandlingInputPort;
import net.sf.taverna.t2.workflowmodel.InputPort;
import net.sf.taverna.t2.workflowmodel.OutputPort;
import net.sf.taverna.t2.workflowmodel.Processor;
import net.sf.taverna.t2.workflowmodel.ProcessorInputPort;
import net.sf.taverna.t2.workflowmodel.ProcessorOutputPort;
import net.sf.taverna.t2.workflowmodel.impl.AbstractDataflowEdit;
import net.sf.taverna.t2.workflowmodel.impl.DataflowImpl;
import net.sf.taverna.t2.workflowmodel.impl.Tools;
import net.sf.taverna.t2.workflowmodel.processor.activity.Activity;
import net.sf.taverna.wsdl.parser.TypeDescriptor;
public class AddXMLSplitterEdit extends AbstractDataflowEdit {
private final Edits edits = EditsRegistry.getEdits();
private final Activity<?> activity;
private final String portName;
private final boolean isInput;
private final Object oldBean = null;
private CompoundEdit compoundEdit1 = null;
private Edit<?> linkUpEdit;
public AddXMLSplitterEdit(Dataflow dataflow, Activity<?> activity,
String portName, boolean isInput) {
super(dataflow);
this.activity = activity;
this.portName = portName;
this.isInput = isInput;
}
@Override
protected void doEditAction(DataflowImpl dataflow) throws EditException {
List<Edit<?>> editList = new ArrayList<Edit<?>>();
Activity<XMLSplitterConfigurationBean> splitter = null;
String sourcePortName = "";
Processor sourceProcessor = null;
Activity<?> sourceActivity = null;
String sinkPortName = "";
Processor sinkProcessor = null;
Activity<?> sinkActivity = null;
String name = Tools.uniqueProcessorName(portName + "XML", dataflow);
Processor splitterProcessor = edits.createProcessor(name);
Processor activityProcessor = findProcessorForActivity(dataflow,
activity);
if (activityProcessor == null) {
throw new EditException(
"Cannot find the processor that the activity belongs to");
}
try {
if (activity instanceof XMLInputSplitterActivity) {
if (!isInput) {
throw new EditException(
"Can only add an input splitter to another input splitter");
}
TypeDescriptor descriptor = ((XMLInputSplitterActivity) activity)
.getTypeDescriptorForInputPort(portName);
XMLSplitterConfigurationBean bean = XMLSplitterConfigurationBeanBuilder
.buildBeanForInput(descriptor);
splitter = new XMLInputSplitterActivity();
editList.add(edits.getConfigureActivityEdit(splitter, bean));
} else if (activity instanceof XMLOutputSplitterActivity) {
if (isInput) {
throw new EditException(
"Can only add an output splitter to another output splitter");
}
TypeDescriptor descriptor = ((XMLOutputSplitterActivity) activity)
.getTypeDescriptorForOutputPort(portName);
XMLSplitterConfigurationBean bean = XMLSplitterConfigurationBeanBuilder
.buildBeanForInput(descriptor);
splitter = new XMLInputSplitterActivity();
editList.add(edits.getConfigureActivityEdit(splitter, bean));
} else if (activity instanceof WSDLActivity) {
if (isInput) {
TypeDescriptor descriptor = ((WSDLActivity) activity)
.getTypeDescriptorForInputPort(portName);
XMLSplitterConfigurationBean bean = XMLSplitterConfigurationBeanBuilder
.buildBeanForInput(descriptor);
splitter = new XMLInputSplitterActivity();
editList
.add(edits.getConfigureActivityEdit(splitter, bean));
} else {
TypeDescriptor descriptor = ((WSDLActivity) activity)
.getTypeDescriptorForOutputPort(portName);
XMLSplitterConfigurationBean bean = XMLSplitterConfigurationBeanBuilder
.buildBeanForOutput(descriptor);
splitter = new XMLOutputSplitterActivity();
editList
.add(edits.getConfigureActivityEdit(splitter, bean));
}
} else {
throw new EditException(
"The activity type is not suitable for adding xml processing processors");
}
} catch (Exception e) {
throw new EditException(
"An error occured whilst tyring to add an XMLSplitter to the activity:"
+ activity, e);
}
if (isInput) {
sourcePortName = "output";
sinkPortName = portName;
sinkProcessor = activityProcessor;
sinkActivity = activity;
sourceProcessor = splitterProcessor;
sourceActivity = splitter;
}
else {
sourcePortName = portName;
sinkPortName = "input";
sinkProcessor = splitterProcessor;
sinkActivity = splitter;
sourceProcessor = activityProcessor;
sourceActivity = activity;
}
editList.add(edits.getDefaultDispatchStackEdit(splitterProcessor));
editList.add(edits.getAddActivityEdit(splitterProcessor, splitter));
// editList.add(edits
// .getMapProcessorPortsForActivityEdit(splitterProcessor));
editList.add(edits.getAddProcessorEdit(dataflow, splitterProcessor));
compoundEdit1 = new CompoundEdit(editList);
compoundEdit1.doEdit();
List<Edit<?>> linkUpEditList = new ArrayList<Edit<?>>();
EventForwardingOutputPort source = getSourcePort(sourceProcessor, sourceActivity,
sourcePortName, linkUpEditList);
EventHandlingInputPort sink = getSinkPort(sinkProcessor, sinkActivity, sinkPortName, linkUpEditList);
if (source == null)
throw new EditException(
"Unable to find the source port when linking up "
+ sourcePortName + " to " + sinkPortName);
if (sink == null)
throw new EditException(
"Unable to find the sink port when linking up "
+ sourcePortName + " to " + sinkPortName);
linkUpEditList.add(net.sf.taverna.t2.workflowmodel.utils.Tools.getCreateAndConnectDatalinkEdit(dataflow, source, sink));
linkUpEdit = new CompoundEdit(linkUpEditList);
linkUpEdit.doEdit();
}
private EventHandlingInputPort getSinkPort(Processor processor, Activity<?> activity,
String portName, List<Edit<?>> editList) {
InputPort activityPort = net.sf.taverna.t2.workflowmodel.utils.Tools.getActivityInputPort(activity, portName);
//check if processor port exists
EventHandlingInputPort input = net.sf.taverna.t2.workflowmodel.utils.Tools.getProcessorInputPort(processor, activity, activityPort);
if (input == null) {
//port doesn't exist so create a processor port and map it
ProcessorInputPort processorInputPort =
edits.createProcessorInputPort(processor, activityPort.getName(), activityPort.getDepth());
editList.add(edits.getAddProcessorInputPortEdit(processor, processorInputPort));
editList.add(edits.getAddActivityInputPortMappingEdit(activity, activityPort.getName(), activityPort.getName()));
input = processorInputPort;
}
return input;
}
private EventForwardingOutputPort getSourcePort(Processor processor, Activity<?> activity,
String portName, List<Edit<?>> editList) {
OutputPort activityPort = net.sf.taverna.t2.workflowmodel.utils.Tools.getActivityOutputPort(activity, portName);
//check if processor port exists
EventForwardingOutputPort output = net.sf.taverna.t2.workflowmodel.utils.Tools.getProcessorOutputPort(processor, activity, activityPort);
if (output == null) {
//port doesn't exist so create a processor port and map it
ProcessorOutputPort processorOutputPort =
edits.createProcessorOutputPort(processor, activityPort.getName(), activityPort.getDepth(), activityPort.getGranularDepth());
editList.add(edits.getAddProcessorOutputPortEdit(processor, processorOutputPort));
editList.add(edits.getAddActivityOutputPortMappingEdit(activity, activityPort.getName(), activityPort.getName()));
output = processorOutputPort;
}
return output;
}
@Override
protected void undoEditAction(DataflowImpl dataflow) {
if (linkUpEdit.isApplied())
linkUpEdit.undo();
if (compoundEdit1.isApplied())
compoundEdit1.undo();
}
private Processor findProcessorForActivity(Dataflow dataflow,
Activity<?> activity) {
for (Processor p : dataflow.getProcessors()) {
for (Activity<?> a : p.getActivityList()) {
if (a == activity)
return p;
}
}
return null;
}
}
| |
package com.orange.spring.cloud.connector.s3.cloudfoundry;
import com.orange.spring.cloud.connector.s3.core.service.S3ServiceInfo;
import org.junit.Test;
import org.springframework.cloud.cloudfoundry.AbstractCloudFoundryConnectorTest;
import org.springframework.cloud.service.ServiceInfo;
import java.util.List;
import static org.junit.Assert.*;
import static org.mockito.Mockito.when;
/**
* Copyright (C) 2016 Arthur Halet
* <p/>
* This software is distributed under the terms and conditions of the 'MIT'
* license which can be found in the file 'LICENSE' in this package distribution
* or at 'http://opensource.org/licenses/MIT'.
* <p/>
* Author: Arthur Halet
* Date: 24/02/2016
*/
public class CloudFoundryConnectorS3ServiceTest extends AbstractCloudFoundryConnectorTest {
protected String accessKeyId = "mypublickey";
protected String accessKeyIdUrlEncode = "mypublickey%3D%3D";
protected String accessKeyIdUrlEncodeDecoded = "mypublickey==";
protected String secretAccessKey = "mysecretkey";
protected String secretAccessKeyUrlEncode = "mysecretkey%3D%3D";
protected String secretAccessKeyUrlEncodeDecoded = "mysecretkey==";
protected String bucketName1 = "bucket-1";
protected String bucketName2 = "bucket-2";
public CloudFoundryConnectorS3ServiceTest() {
}
protected static String getUrl() {
return "https://" + "10.20.30.40" + ":" + 80;
}
@Test
public void s3ServiceCreation() {
when(this.mockEnvironment.getEnvValue("VCAP_SERVICES")).thenReturn(getServicesPayload(this.getS3ServicePayload("s3-1", "10.20.30.40", 80, accessKeyId, secretAccessKey, bucketName1),
this.getS3ServicePayload("s3-2", "10.20.30.40", 80, accessKeyId, secretAccessKey, bucketName2)));
List<ServiceInfo> serviceInfos = this.testCloudConnector.getServiceInfos();
S3ServiceInfo info1 = (S3ServiceInfo) getServiceInfo(serviceInfos, "s3-1");
S3ServiceInfo info2 = (S3ServiceInfo) getServiceInfo(serviceInfos, "s3-2");
this.asserting(info1, info2);
assertTrue("Info1 is an aws s3", !info1.isAwsS3());
assertTrue("Info2 is an aws s3", !info2.isAwsS3());
assertFalse("Info1 is an aws s3 with virtual host", info1.isVirtualHostBuckets());
assertFalse("Info2 is an aws s3 with virtual host", info2.isVirtualHostBuckets());
assertEquals(getUrl(), info1.getS3Host());
assertEquals(getUrl(), info2.getS3Host());
}
@Test
public void s3ServiceCreationUrlEncoded() {
when(this.mockEnvironment.getEnvValue("VCAP_SERVICES")).thenReturn(getServicesPayload(this.getS3ServicePayload("s3-1", "10.20.30.40", 80, accessKeyIdUrlEncode, secretAccessKeyUrlEncode, bucketName1),
this.getS3ServicePayload("s3-2", "10.20.30.40", 80, accessKeyIdUrlEncode, secretAccessKeyUrlEncode, bucketName2)));
List<ServiceInfo> serviceInfos = this.testCloudConnector.getServiceInfos();
S3ServiceInfo info1 = (S3ServiceInfo) getServiceInfo(serviceInfos, "s3-1");
S3ServiceInfo info2 = (S3ServiceInfo) getServiceInfo(serviceInfos, "s3-2");
this.assertingUrlEncode(info1, info2);
assertTrue("Info1 is an aws s3", !info1.isAwsS3());
assertTrue("Info2 is an aws s3", !info2.isAwsS3());
assertFalse("Info1 is an aws s3 with virtual host", info1.isVirtualHostBuckets());
assertEquals(getUrl(), info1.getS3Host());
assertEquals(getUrl(), info2.getS3Host());
}
public void asserting(S3ServiceInfo info1, S3ServiceInfo info2) {
assertServiceFoundOfType(info1, S3ServiceInfo.class);
assertServiceFoundOfType(info2, S3ServiceInfo.class);
assertEquals(bucketName1, info1.getBucket());
assertEquals(bucketName2, info2.getBucket());
assertEquals(accessKeyId, info1.getAccessKeyId());
assertEquals(accessKeyId, info2.getAccessKeyId());
assertEquals(secretAccessKey, info1.getSecretAccessKey());
assertEquals(secretAccessKey, info2.getSecretAccessKey());
}
public void assertingUrlEncode(S3ServiceInfo info1, S3ServiceInfo info2) {
assertServiceFoundOfType(info1, S3ServiceInfo.class);
assertServiceFoundOfType(info2, S3ServiceInfo.class);
assertEquals(bucketName1, info1.getBucket());
assertEquals(bucketName2, info2.getBucket());
assertEquals(accessKeyIdUrlEncodeDecoded, info1.getAccessKeyId());
assertEquals(accessKeyIdUrlEncodeDecoded, info2.getAccessKeyId());
assertEquals(secretAccessKeyUrlEncodeDecoded, info1.getSecretAccessKey());
assertEquals(secretAccessKeyUrlEncodeDecoded, info2.getSecretAccessKey());
}
@Test
public void s3ServiceCreationWithLabelNoTags() {
when(this.mockEnvironment.getEnvValue("VCAP_SERVICES")).thenReturn(getServicesPayload(this.getS3ServicePayloadWithLabelNoTags("s3-1", "10.20.30.40", 80, accessKeyId, secretAccessKey, bucketName1),
this.getS3ServicePayloadWithLabelNoTags("s3-2", "10.20.30.40", 80, accessKeyId, secretAccessKey, bucketName2)));
List<ServiceInfo> serviceInfos = this.testCloudConnector.getServiceInfos();
S3ServiceInfo info1 = (S3ServiceInfo) getServiceInfo(serviceInfos, "s3-1");
S3ServiceInfo info2 = (S3ServiceInfo) getServiceInfo(serviceInfos, "s3-2");
this.asserting(info1, info2);
assertTrue("Info1 is an aws s3", !info1.isAwsS3());
assertTrue("Info2 is an aws s3", !info2.isAwsS3());
assertFalse("Info1 is an aws s3 with virtual host", info1.isVirtualHostBuckets());
assertFalse("Info2 is an aws s3 with virtual host", info2.isVirtualHostBuckets());
assertEquals(getUrl(), info1.getS3Host());
assertEquals(getUrl(), info2.getS3Host());
}
@Test
public void s3ServiceCreationNoLabelNoTags() {
String hostNameDns = "mys3.com";
when(this.mockEnvironment.getEnvValue("VCAP_SERVICES")).thenReturn(getServicesPayload(this.getS3ServicePayloadNoLabelNoTags("s3-1", hostNameDns, 80, accessKeyId, secretAccessKey, bucketName1),
this.getS3ServicePayloadNoLabelNoTags("s3-2", hostNameDns, 80, accessKeyId, secretAccessKey, bucketName2)));
List<ServiceInfo> serviceInfos = this.testCloudConnector.getServiceInfos();
S3ServiceInfo info1 = (S3ServiceInfo) getServiceInfo(serviceInfos, "s3-1");
S3ServiceInfo info2 = (S3ServiceInfo) getServiceInfo(serviceInfos, "s3-2");
this.asserting(info1, info2);
assertTrue("Info1 is an aws s3", !info1.isAwsS3());
assertTrue("Info2 is an aws s3", !info2.isAwsS3());
assertTrue("Info1 is not an aws s3 with virtual host", info1.isVirtualHostBuckets());
assertTrue("Info2 is not an aws s3 with virtual host", info2.isVirtualHostBuckets());
assertEquals("https://" + hostNameDns + ":" + 80, info1.getS3Host());
assertEquals("https://" + hostNameDns + ":" + 80, info2.getS3Host());
}
@Test
public void AwsS3ServiceCreationNoLabelNoTags() {
when(this.mockEnvironment.getEnvValue("VCAP_SERVICES")).thenReturn(getServicesPayload(this.getAwsS3ServicePayloadNoLabelNoTags("s3-1", "10.20.30.40", 80, accessKeyId, secretAccessKey, bucketName1),
this.getAwsS3ServicePayloadNoLabelNoTags("s3-2", "10.20.30.40", 80, accessKeyId, secretAccessKey, bucketName2)));
List<ServiceInfo> serviceInfos = this.testCloudConnector.getServiceInfos();
S3ServiceInfo info1 = (S3ServiceInfo) getServiceInfo(serviceInfos, "s3-1");
S3ServiceInfo info2 = (S3ServiceInfo) getServiceInfo(serviceInfos, "s3-2");
this.asserting(info1, info2);
assertTrue("Info1 is not an aws s3", info1.isAwsS3());
assertTrue("Info2 is not an aws s3", info2.isAwsS3());
assertFalse("Info1 is an aws s3 with virtual host", info1.isVirtualHostBuckets());
assertFalse("Info2 is an aws s3 with virtual host", info2.isVirtualHostBuckets());
assertEquals("http://s3.amazonaws.com", info1.getS3Host());
assertEquals("http://s3.amazonaws.com", info2.getS3Host());
}
@Test
public void AwsS3ServiceCreationWithVirtualBucket() {
when(this.mockEnvironment.getEnvValue("VCAP_SERVICES")).thenReturn(getServicesPayload(this.getAwsS3ServicePayloadVirtualBucket("s3-1", "10.20.30.40", 80, accessKeyId, secretAccessKey, bucketName1),
this.getAwsS3ServicePayloadVirtualBucket("s3-2", "10.20.30.40", 80, accessKeyId, secretAccessKey, bucketName2)));
List<ServiceInfo> serviceInfos = this.testCloudConnector.getServiceInfos();
S3ServiceInfo info1 = (S3ServiceInfo) getServiceInfo(serviceInfos, "s3-1");
S3ServiceInfo info2 = (S3ServiceInfo) getServiceInfo(serviceInfos, "s3-2");
this.asserting(info1, info2);
assertTrue("Info1 is not an aws s3", info1.isAwsS3());
assertTrue("Info2 is not an aws s3", info2.isAwsS3());
assertTrue("Info1 is an aws s3 with virtual host", info1.isVirtualHostBuckets());
assertTrue("Info2 is an aws s3 with virtual host", info2.isVirtualHostBuckets());
assertEquals("http://s3.amazonaws.com", info1.getS3Host());
assertEquals("http://s3.amazonaws.com", info2.getS3Host());
}
@Test
public void AwsS3ServiceCreation() {
when(this.mockEnvironment.getEnvValue("VCAP_SERVICES")).thenReturn(getServicesPayload(this.getAwsS3ServicePayload("s3-1", "10.20.30.40", 80, accessKeyId, secretAccessKey, bucketName1),
this.getAwsS3ServicePayload("s3-2", "10.20.30.40", 80, accessKeyId, secretAccessKey, bucketName2)));
List<ServiceInfo> serviceInfos = this.testCloudConnector.getServiceInfos();
S3ServiceInfo info1 = (S3ServiceInfo) getServiceInfo(serviceInfos, "s3-1");
S3ServiceInfo info2 = (S3ServiceInfo) getServiceInfo(serviceInfos, "s3-2");
this.asserting(info1, info2);
assertTrue("Info1 is not an aws s3", info1.isAwsS3());
assertTrue("Info2 is not an aws s3", info2.isAwsS3());
assertTrue("Info1 is not an aws s3 with virtual host", info1.isVirtualHostBuckets());
assertTrue("Info2 is not an aws s3 with virtual host", info2.isVirtualHostBuckets());
assertEquals("http://s3-aws-region.amazonaws.com", info1.getS3Host());
assertEquals("http://s3-aws-region.amazonaws.com", info2.getS3Host());
}
private String getRelationalPayload(String templateFile, String serviceName, String hostname, int port, String accessKeyId, String secretAccessKey, String bucketName) {
String payload = this.readTestDataFile(templateFile);
payload = payload.replace("$serviceName", serviceName);
payload = payload.replace("$hostname", hostname);
payload = payload.replace("$port", Integer.toString(port));
payload = payload.replace("$access_key_id", accessKeyId);
payload = payload.replace("$secret_access_key", secretAccessKey);
payload = payload.replace("$bucketName", bucketName);
return payload;
}
private String getS3ServicePayload(String serviceName, String hostname, int port, String accessKeyId, String secretAccessKey, String bucketName) {
return this.getRelationalPayload("test-s3-info.json", serviceName, hostname, port, accessKeyId, secretAccessKey, bucketName);
}
private String getS3ServicePayloadWithUrlEncode(String serviceName, String hostname, int port, String accessKeyId, String secretAccessKey, String bucketName) {
return this.getRelationalPayload("test-s3-info.json", serviceName, hostname, port, accessKeyId, secretAccessKey, bucketName);
}
private String getAwsS3ServicePayload(String serviceName, String hostname, int port, String accessKeyId, String secretAccessKey, String bucketName) {
return this.getRelationalPayload("test-s3-aws-info.json", serviceName, hostname, port, accessKeyId, secretAccessKey, bucketName);
}
private String getS3ServicePayloadWithLabelNoTags(String serviceName, String hostname, int port, String accessKeyId, String secretAccessKey, String bucketName) {
return this.getRelationalPayload("test-s3-info-with-label-no-tags.json", serviceName, hostname, port, accessKeyId, secretAccessKey, bucketName);
}
private String getS3ServicePayloadNoLabelNoTags(String serviceName, String hostname, int port, String accessKeyId, String secretAccessKey, String bucketName) {
return this.getRelationalPayload("test-s3-info-no-label-no-tags.json", serviceName, hostname, port, accessKeyId, secretAccessKey, bucketName);
}
private String getAwsS3ServicePayloadNoLabelNoTags(String serviceName, String hostname, int port, String accessKeyId, String secretAccessKey, String bucketName) {
return this.getRelationalPayload("test-s3-aws-info-no-label-no-tags.json", serviceName, hostname, port, accessKeyId, secretAccessKey, bucketName);
}
private String getAwsS3ServicePayloadVirtualBucket(String serviceName, String hostname, int port, String accessKeyId, String secretAccessKey, String bucketName) {
return this.getRelationalPayload("test-s3-aws-virtual-bucket.json", serviceName, hostname, port, accessKeyId, secretAccessKey, bucketName);
}
}
| |
/**
* Copyright 2014-2015 SHAF-WORK
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.shaf.core.process;
import java.io.IOException;
import java.util.EventListener;
import java.util.Properties;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.shaf.core.content.DefaultOptionInfo;
import org.shaf.core.event.EventListenerHandler;
import org.shaf.core.event.ProcessEvent;
import org.shaf.core.event.ProcessListener;
import org.shaf.core.process.cmd.OptionMissingException;
import org.shaf.core.process.config.InputModeAlreadySetException;
import org.shaf.core.process.config.OutputModeAlreadySetException;
import org.shaf.core.process.config.ProcessConfiguration;
import org.shaf.core.process.config.PropertyNotFoundException;
import org.shaf.core.process.handle.CompositeHandler;
import org.shaf.core.process.handle.EmulatorDbMapHandler;
import org.shaf.core.process.handle.EmulatorMemoryMapHandler;
import org.shaf.core.process.handle.EmulatorMultiMapHandler;
import org.shaf.core.process.handle.EmulatorSingleMapHandler;
import org.shaf.core.process.handle.HadoopHandler;
import org.shaf.core.process.handle.LocalHandler;
import org.shaf.core.process.handle.ProcessHandler;
import org.shaf.core.process.type.comp.CompositeProcess;
import org.shaf.core.process.type.dist.DistributedProcess;
import org.shaf.core.process.type.local.LocalProcess;
import org.shaf.core.util.ClassUtils;
import org.shaf.core.util.IOUtils;
import org.shaf.core.util.Log;
/**
* The process executor.
*
* @author Mykola Galushka
*/
public class ProcessExecutor extends EventListenerHandler implements Runnable {
/**
* Defines a logger.
*/
private static final Log LOG = Log.forClass(ProcessExecutor.class);
/**
* Returns the process executor for the specified process.
*
* @param pcls
* the process class.
* @param settings
* the process settings containing 'key/value' pairs, which are
* used for process initialization.
* @param config
* the process configuration (If the specified process
* configuration is {@code null}, it can be initialized inside
* the method, when the following conditions are met:
* {@code settings != null && settings.size()>0)})
* @param listeners
* the process execution listeners.
* @return the result of the process execution.
* @throws OptionMissingException
* if the required command-line options is missing.
* @throws PropertyNotFoundException
* if the required property is not found.
* @throws InputModeAlreadySetException
* if an attempt to change already set input mode.
* @throws OutputModeAlreadySetException
* if an attempt to change already set output mode.
*/
public static ProcessExecutor forProcess(
final Class<? extends Process> pcls, final Properties settings,
ProcessConfiguration config, EventListener[] listeners)
throws OptionMissingException, InputModeAlreadySetException,
PropertyNotFoundException, OutputModeAlreadySetException {
/*
* Validates that the process class is set.
*/
if (pcls == null) {
new NullPointerException("process class");
}
/*
* Depending on the process type, it transfers the "specialized"
* arguments values to the configuration object.
*/
if (ClassUtils.isDistributedProcess(pcls)) {
/*
* Transfers the INPUT arguments.
*/
if (!config.isInputDefined()) {
String name = DefaultOptionInfo.INPUT.getOptionInfo().getName();
String option = name + ".option";
String value = name + ".value";
if (settings.containsKey(option) && settings.containsKey(value)) {
settings.remove(option);
config.addInput((String) settings.remove(value));
} else {
throw new OptionMissingException(name);
}
}
/*
* Transfers the OUTPUT arguments.
*/
if (!config.isOutputDefined()) {
String name = DefaultOptionInfo.OUTPUT.getOptionInfo()
.getName();
String option = name + ".option";
String value = name + ".value";
if (settings.containsKey(option) && settings.containsKey(value)) {
settings.remove(option);
config.setOutput((String) settings.remove(value));
} else {
throw new OptionMissingException(name);
}
}
}
/*
* Transfers fields values to the configuration object.
*/
if (settings != null) {
if (settings.size() > 0) {
config = config == null ? new ProcessConfiguration() : config;
for (String key : settings.stringPropertyNames()) {
String name = pcls.getCanonicalName() + "@" + key;
config.setProperty(name, settings.getProperty(key));
}
}
}
return new ProcessExecutor(pcls, config, listeners);
}
/**
* The {@link Process} class, which need to be executed.
*/
private final Class<? extends Process> pcls;
/**
* The executing process configuration.
*/
private final ProcessConfiguration config;
/**
* The object which represents an outcome of the {@link Process} execution.
* It can receive the result of the process execution or an exception if the
* execution has failed.
*/
private Object outcome;
/**
* Constructs a new container for process execution.
*
* @param pcls
* the {@link Process} class, which need to be executed.
* @param config
* the process configuration.
* @param listeners
* the registering listeners.
*/
private ProcessExecutor(final Class<? extends Process> pcls,
ProcessConfiguration config, final EventListener[] listeners) {
super(listeners);
this.pcls = pcls;
this.config = config;
this.outcome = null;
}
/**
* Executes process associated with this container.
*/
@Override
public void run() {
this.fireProcessStarted();
try {
ProcessHandler handler = null;
if (ClassUtils.inherits(this.pcls, CompositeProcess.class)) {
handler = new CompositeHandler(this.pcls, this.config,
this.listeners);
LOG.debug("Initialized the COMPOSITE process handler for: "
+ this.pcls.getSimpleName());
} else if (ClassUtils.inherits(this.pcls, LocalProcess.class)) {
handler = new LocalHandler(this.pcls, this.config,
this.listeners);
LOG.debug("Initialized the LOCAL process handler for: "
+ this.pcls.getSimpleName());
} else if (ClassUtils.inherits(this.pcls, DistributedProcess.class)) {
/*
* Before launching the distributed process handler this
* executer deletes the output directory. This option can be
* enabled or disabled via the process configuration.
*/
if (this.config.isForceDeleteOutput()) {
try (FileSystem fs = this.config.isForceEmulatorMode() ? IOUtils
.getLocalFileSystem() : IOUtils.getFileSystem();) {
Path path = new Path(this.config.getBase(),
this.config.getOutput());
if (fs.exists(path)) {
if (!fs.delete(path, true)) {
throw new IOException(
"Failed to delete the output directory: "
+ path);
} else {
LOG.debug("Successfully deleted the output directory: "
+ path);
}
}
}
}
if (IOUtils.isHadoopAvailable()
& !this.config.isForceEmulatorMode()) {
handler = new HadoopHandler(this.pcls, this.config,
this.listeners);
LOG.debug("Initialized the DISTRIBUTED process handler in HADOOP mode for: "
+ this.pcls.getSimpleName());
} else {
LOG.debug("Initialized the DISTRIBUTED process handler in EMULATION mode for: "
+ this.pcls.getSimpleName());
if (this.config.isMultiFormatPathInput()) {
handler = new EmulatorMultiMapHandler(this.pcls,
this.config, this.listeners);
LOG.debug("Selected the MULTI-map Handler");
} else if (this.config.isSingleFormatPathInput()) {
handler = new EmulatorSingleMapHandler(this.pcls,
this.config, this.listeners);
LOG.debug("Selected the SINGLE-map Handler");
} else if (this.config.isDatabaseInput()) {
handler = new EmulatorDbMapHandler(this.pcls,
this.config, this.listeners);
LOG.debug("Selected the DB-map Handler");
} else if (this.config.isMemoryInput()) {
handler = new EmulatorMemoryMapHandler(this.pcls,
this.config, this.listeners);
LOG.debug("Selected the MEMORY-map Handler");
}
}
} else {
throw new ProcessException("Unsupported process type");
}
this.outcome = handler.run();
} catch (Throwable exc) {
this.outcome = exc;
LOG.error("The process execution error.", exc);
}
this.fireProcessFinished();
}
/**
* This method is invoked when process execution is interrupted.
*/
public void interrupt() {
this.outcome = new InterruptedException(
"Process was interapted due to reaching timeout.");
}
/**
* Returns the process class associated with this executor.
*
* @return the process class
*/
public final Class<? extends Process> getProcessClass() {
return this.pcls;
}
/**
* Returns the result of the process execution;
*
* @return the result of the process execution;
* @throws Exception
* if the process execution has failed.
*/
public final Object getResult() throws Exception {
if (this.hasFailed()) {
if (this.outcome instanceof ProcessException) {
throw (ProcessException)this.outcome;
} else {
throw new Exception(
"Unexpected error occurred during '"
+ this.pcls.getCanonicalName()
+ "' process execution.",
(Throwable) this.outcome);
}
} else {
return this.outcome;
}
}
/**
* Returns the process execution status.
*
* @return {@code true} if process execution is successful and {@code false}
* otherwise.
*/
public final boolean hasFailed() {
if (this.outcome == null) {
return false;
} else {
return ClassUtils.inherits(this.outcome, Throwable.class);
}
}
/**
* Fires the {@link ProcessListener#processStarted(ProcessEvent)} action.
*/
private void fireProcessStarted() {
if (this.listeners != null) {
for (ProcessListener listener : super
.getEventListeners(ProcessListener.class)) {
listener.processStarted(new ProcessEvent(this, this.pcls));
}
}
}
/**
* Fires the {@link ProcessListener#processFinished(ProcessEvent)} action.
*/
private void fireProcessFinished() {
if (this.listeners != null) {
for (ProcessListener listener : super
.getEventListeners(ProcessListener.class)) {
listener.processFinished(new ProcessEvent(this, this.pcls));
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.sql;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.collect.Iterables;
import com.google.errorprone.annotations.concurrent.GuardedBy;
import org.apache.calcite.avatica.remote.TypedValue;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.sql.parser.SqlParseException;
import org.apache.calcite.tools.RelConversionException;
import org.apache.calcite.tools.ValidationException;
import org.apache.druid.java.util.common.DateTimes;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.guava.Sequence;
import org.apache.druid.java.util.common.guava.SequenceWrapper;
import org.apache.druid.java.util.common.guava.Sequences;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.java.util.emitter.service.ServiceEmitter;
import org.apache.druid.java.util.emitter.service.ServiceMetricEvent;
import org.apache.druid.query.QueryInterruptedException;
import org.apache.druid.query.QueryTimeoutException;
import org.apache.druid.server.QueryStats;
import org.apache.druid.server.RequestLogLine;
import org.apache.druid.server.log.RequestLogger;
import org.apache.druid.server.security.Access;
import org.apache.druid.server.security.AuthenticationResult;
import org.apache.druid.server.security.AuthorizationUtils;
import org.apache.druid.server.security.ForbiddenException;
import org.apache.druid.server.security.Resource;
import org.apache.druid.sql.calcite.planner.DruidPlanner;
import org.apache.druid.sql.calcite.planner.PlannerContext;
import org.apache.druid.sql.calcite.planner.PlannerFactory;
import org.apache.druid.sql.calcite.planner.PlannerResult;
import org.apache.druid.sql.calcite.planner.PrepareResult;
import org.apache.druid.sql.calcite.planner.ValidationResult;
import org.apache.druid.sql.http.SqlParameter;
import org.apache.druid.sql.http.SqlQuery;
import javax.annotation.Nullable;
import javax.servlet.http.HttpServletRequest;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
/**
* Similar to {@link org.apache.druid.server.QueryLifecycle}, this class manages the lifecycle of a SQL query.
* It ensures that a SQL query goes through the following stages, in the proper order:
*
* <ol>
* <li>Initialization ({@link #initialize(String, Map)})</li>
* <li>Validation and Authorization ({@link #validateAndAuthorize(HttpServletRequest)} or {@link #validateAndAuthorize(AuthenticationResult)})</li>
* <li>Planning ({@link #plan()})</li>
* <li>Execution ({@link #execute()})</li>
* <li>Logging ({@link #emitLogsAndMetrics(Throwable, String, long)})</li>
* </ol>
*
* <p>Unlike QueryLifecycle, this class is designed to be <b>thread safe</b> so that it can be used in multi-threaded
* scenario (JDBC) without external synchronization.
*/
public class SqlLifecycle
{
private static final Logger log = new Logger(SqlLifecycle.class);
private final PlannerFactory plannerFactory;
private final ServiceEmitter emitter;
private final RequestLogger requestLogger;
private final long startMs;
private final long startNs;
private final Object lock = new Object();
@GuardedBy("lock")
private State state = State.NEW;
// init during intialize
@GuardedBy("lock")
private String sql;
@GuardedBy("lock")
private Map<String, Object> queryContext;
@GuardedBy("lock")
private List<TypedValue> parameters;
// init during plan
@GuardedBy("lock")
private PlannerContext plannerContext;
@GuardedBy("lock")
private ValidationResult validationResult;
@GuardedBy("lock")
private PrepareResult prepareResult;
@GuardedBy("lock")
private PlannerResult plannerResult;
public SqlLifecycle(
PlannerFactory plannerFactory,
ServiceEmitter emitter,
RequestLogger requestLogger,
long startMs,
long startNs
)
{
this.plannerFactory = plannerFactory;
this.emitter = emitter;
this.requestLogger = requestLogger;
this.startMs = startMs;
this.startNs = startNs;
this.parameters = Collections.emptyList();
}
/**
* Initialize the query lifecycle, setting the raw string SQL, initial query context, and assign a sql query id.
*
* If successful (it will be), it will transition the lifecycle to {@link State#INITIALIZED}.
*/
public String initialize(String sql, Map<String, Object> queryContext)
{
synchronized (lock) {
transition(State.NEW, State.INITIALIZED);
this.sql = sql;
this.queryContext = contextWithSqlId(queryContext);
return sqlQueryId();
}
}
@GuardedBy("lock")
private Map<String, Object> contextWithSqlId(Map<String, Object> queryContext)
{
Map<String, Object> newContext = new HashMap<>();
if (queryContext != null) {
newContext.putAll(queryContext);
}
newContext.computeIfAbsent(PlannerContext.CTX_SQL_QUERY_ID, k -> UUID.randomUUID().toString());
return newContext;
}
@GuardedBy("lock")
private String sqlQueryId()
{
return (String) this.queryContext.get(PlannerContext.CTX_SQL_QUERY_ID);
}
/**
* Assign dynamic parameters to be used to substitute values during query exection. This can be performed at any
* part of the lifecycle.
*/
public void setParameters(List<TypedValue> parameters)
{
synchronized (lock) {
this.parameters = parameters;
if (this.plannerContext != null) {
this.plannerContext.setParameters(parameters);
}
}
}
/**
* Validate SQL query and authorize against any datasources or views which will take part in the query.
*
* If successful, the lifecycle will first transition from {@link State#INITIALIZED} first to
* {@link State#AUTHORIZING} and then to either {@link State#AUTHORIZED} or {@link State#UNAUTHORIZED}.
*/
public void validateAndAuthorize(AuthenticationResult authenticationResult)
{
synchronized (lock) {
if (state == State.AUTHORIZED) {
return;
}
transition(State.INITIALIZED, State.AUTHORIZING);
validate(authenticationResult);
Access access = doAuthorize(
AuthorizationUtils.authorizeAllResourceActions(
authenticationResult,
Iterables.transform(validationResult.getResources(), AuthorizationUtils.RESOURCE_READ_RA_GENERATOR),
plannerFactory.getAuthorizerMapper()
)
);
checkAccess(access);
}
}
/**
* Validate SQL query and authorize against any datasources or views which the query. Like
* {@link #validateAndAuthorize(AuthenticationResult)} but for a {@link HttpServletRequest}.
*
* If successful, the lifecycle will first transition from {@link State#INITIALIZED} first to
* {@link State#AUTHORIZING} and then to either {@link State#AUTHORIZED} or {@link State#UNAUTHORIZED}.
*/
public void validateAndAuthorize(HttpServletRequest req)
{
synchronized (lock) {
transition(State.INITIALIZED, State.AUTHORIZING);
AuthenticationResult authResult = AuthorizationUtils.authenticationResultFromRequest(req);
validate(authResult);
Access access = doAuthorize(
AuthorizationUtils.authorizeAllResourceActions(
req,
Iterables.transform(validationResult.getResources(), AuthorizationUtils.RESOURCE_READ_RA_GENERATOR),
plannerFactory.getAuthorizerMapper()
)
);
checkAccess(access);
}
}
@GuardedBy("lock")
private ValidationResult validate(AuthenticationResult authenticationResult)
{
try (DruidPlanner planner = plannerFactory.createPlanner(queryContext)) {
// set planner context for logs/metrics in case something explodes early
this.plannerContext = planner.getPlannerContext();
this.plannerContext.setAuthenticationResult(authenticationResult);
// set parameters on planner context, if parameters have already been set
this.plannerContext.setParameters(parameters);
this.validationResult = planner.validate(sql);
return validationResult;
}
// we can't collapse catch clauses since SqlPlanningException has type-sensitive constructors.
catch (SqlParseException e) {
throw new SqlPlanningException(e);
}
catch (ValidationException e) {
throw new SqlPlanningException(e);
}
}
@GuardedBy("lock")
private Access doAuthorize(final Access authorizationResult)
{
if (!authorizationResult.isAllowed()) {
// Not authorized; go straight to Jail, do not pass Go.
transition(State.AUTHORIZING, State.UNAUTHORIZED);
} else {
transition(State.AUTHORIZING, State.AUTHORIZED);
}
return authorizationResult;
}
@GuardedBy("lock")
private void checkAccess(Access access)
{
plannerContext.setAuthorizationResult(access);
if (!access.isAllowed()) {
throw new ForbiddenException(access.toString());
}
}
/**
* Prepare the query lifecycle for execution, without completely planning into something that is executable, but
* including some initial parsing and validation and any dyanmic parameter type resolution, to support prepared
* statements via JDBC.
*
*/
public PrepareResult prepare() throws RelConversionException
{
synchronized (lock) {
if (state != State.AUTHORIZED) {
throw new ISE("Cannot prepare because current state[%s] is not [%s].", state, State.AUTHORIZED);
}
Preconditions.checkNotNull(plannerContext, "Cannot prepare, plannerContext is null");
try (DruidPlanner planner = plannerFactory.createPlannerWithContext(plannerContext)) {
this.prepareResult = planner.prepare(sql);
return prepareResult;
}
// we can't collapse catch clauses since SqlPlanningException has type-sensitive constructors.
catch (SqlParseException e) {
throw new SqlPlanningException(e);
}
catch (ValidationException e) {
throw new SqlPlanningException(e);
}
}
}
/**
* Plan the query to enable execution.
*
* If successful, the lifecycle will first transition from {@link State#AUTHORIZED} to {@link State#PLANNED}.
*/
public PlannerContext plan() throws RelConversionException
{
synchronized (lock) {
transition(State.AUTHORIZED, State.PLANNED);
Preconditions.checkNotNull(plannerContext, "Cannot plan, plannerContext is null");
try (DruidPlanner planner = plannerFactory.createPlannerWithContext(plannerContext)) {
this.plannerResult = planner.plan(sql);
}
// we can't collapse catch clauses since SqlPlanningException has type-sensitive constructors.
catch (SqlParseException e) {
throw new SqlPlanningException(e);
}
catch (ValidationException e) {
throw new SqlPlanningException(e);
}
return plannerContext;
}
}
/**
* Execute the fully planned query.
*
* If successful, the lifecycle will first transition from {@link State#PLANNED} to {@link State#EXECUTING}.
*/
public Sequence<Object[]> execute()
{
synchronized (lock) {
transition(State.PLANNED, State.EXECUTING);
return plannerResult.run();
}
}
@VisibleForTesting
public Sequence<Object[]> runSimple(
String sql,
Map<String, Object> queryContext,
List<SqlParameter> parameters,
AuthenticationResult authenticationResult
) throws RelConversionException
{
Sequence<Object[]> result;
initialize(sql, queryContext);
try {
setParameters(SqlQuery.getParameterList(parameters));
validateAndAuthorize(authenticationResult);
plan();
result = execute();
}
catch (Throwable e) {
emitLogsAndMetrics(e, null, -1);
throw e;
}
return Sequences.wrap(result, new SequenceWrapper()
{
@Override
public void after(boolean isDone, Throwable thrown)
{
emitLogsAndMetrics(thrown, null, -1);
}
});
}
@VisibleForTesting
public ValidationResult runAnalyzeResources(AuthenticationResult authenticationResult)
{
synchronized (lock) {
return validate(authenticationResult);
}
}
public RelDataType rowType()
{
synchronized (lock) {
return plannerResult != null ? plannerResult.rowType() : prepareResult.getRowType();
}
}
/**
* Emit logs and metrics for this query.
*
* @param e exception that occurred while processing this query
* @param remoteAddress remote address, for logging; or null if unknown
* @param bytesWritten number of bytes written; will become a query/bytes metric if >= 0
*/
public void emitLogsAndMetrics(
@Nullable final Throwable e,
@Nullable final String remoteAddress,
final long bytesWritten
)
{
synchronized (lock) {
if (sql == null) {
// Never initialized, don't log or emit anything.
return;
}
if (state == State.DONE) {
log.warn("Tried to emit logs and metrics twice for query[%s]!", sqlQueryId());
}
state = State.DONE;
final boolean success = e == null;
final long queryTimeNs = System.nanoTime() - startNs;
try {
ServiceMetricEvent.Builder metricBuilder = ServiceMetricEvent.builder();
if (plannerContext != null) {
metricBuilder.setDimension("id", plannerContext.getSqlQueryId());
metricBuilder.setDimension("nativeQueryIds", plannerContext.getNativeQueryIds().toString());
}
if (validationResult != null) {
metricBuilder.setDimension(
"dataSource",
validationResult.getResources().stream().map(Resource::getName).collect(Collectors.toList()).toString()
);
}
metricBuilder.setDimension("remoteAddress", StringUtils.nullToEmptyNonDruidDataString(remoteAddress));
metricBuilder.setDimension("success", String.valueOf(success));
emitter.emit(metricBuilder.build("sqlQuery/time", TimeUnit.NANOSECONDS.toMillis(queryTimeNs)));
if (bytesWritten >= 0) {
emitter.emit(metricBuilder.build("sqlQuery/bytes", bytesWritten));
}
final Map<String, Object> statsMap = new LinkedHashMap<>();
statsMap.put("sqlQuery/time", TimeUnit.NANOSECONDS.toMillis(queryTimeNs));
statsMap.put("sqlQuery/bytes", bytesWritten);
statsMap.put("success", success);
statsMap.put("context", queryContext);
if (plannerContext != null) {
statsMap.put("identity", plannerContext.getAuthenticationResult().getIdentity());
queryContext.put("nativeQueryIds", plannerContext.getNativeQueryIds().toString());
}
if (e != null) {
statsMap.put("exception", e.toString());
if (e instanceof QueryInterruptedException || e instanceof QueryTimeoutException) {
statsMap.put("interrupted", true);
statsMap.put("reason", e.toString());
}
}
requestLogger.logSqlQuery(
RequestLogLine.forSql(
sql,
queryContext,
DateTimes.utc(startMs),
remoteAddress,
new QueryStats(statsMap)
)
);
}
catch (Exception ex) {
log.error(ex, "Unable to log SQL [%s]!", sql);
}
}
}
@GuardedBy("lock")
private void transition(final State from, final State to)
{
if (state != from) {
throw new ISE("Cannot transition from[%s] to[%s] because current state[%s] is not [%s].", from, to, state, from);
}
state = to;
}
enum State
{
NEW,
INITIALIZED,
AUTHORIZING,
AUTHORIZED,
PLANNED,
EXECUTING,
UNAUTHORIZED,
DONE
}
}
| |
package com.kinetica.kafka;
import java.util.Map;
import java.util.UUID;
import org.apache.kafka.common.config.AbstractConfig;
import org.apache.kafka.common.config.ConfigDef;
import org.apache.kafka.common.config.ConfigDef.Range;
import org.apache.kafka.connect.errors.ConnectException;
import org.apache.kafka.connect.sink.SinkTask;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class KineticaSinkConnectorConfig extends AbstractConfig {
private static final Logger LOGGER = LoggerFactory.getLogger(KineticaSinkConnectorConfig.class);
// config params
public static final String PARAM_URL = "kinetica.url";
public static final String PARAM_USERNAME = "kinetica.username";
public static final String PARAM_PASSWORD = "kinetica.password";
public static final String PARAM_TIMEOUT = "kinetica.timeout";
public static final String PARAM_ENABLE_MULTI_HEAD = "kinetica.enable_multihead";
public static final String PARAM_RETRY_COUNT = "kinetica.retry_count";
public static final String PARAM_BATCH_SIZE = "kinetica.batch_size";
public static final String DEPRECATED_PARAM_COLLECTION = "kinetica.collection_name";
public static final String DEPRECATED_PARAM_DEST_TABLE_OVERRIDE = "kinetica.dest_table_override";
public static final String DEPRECATED_PARAM_TABLE_PREFIX = "kinetica.table_prefix";
public static final String DEPRECATED_PARAM_CREATE_TABLE = "kinetica.create_table";
public static final String DEPRECATED_PARAM_ADD_NEW_FIELDS = "kinetica.add_new_fields_as_columns";
public static final String DEPRECATED_PARAM_MAKE_MISSING_FIELDS_NULLABLE = "kinetica.make_missing_field_nullable";
public static final String DEPRECATED_PARAM_SINGLE_TABLE_PER_TOPIC = "kinetica.single_table_per_topic";
public static final String DEPRECATED_PARAM_ALLOW_SCHEMA_EVOLUTION = "kinetica.allow_schema_evolution";
public static final String DEPRECATED_PARAM_UPDATE_ON_EXISTING_PK = "kinetica.update_on_existing_pk";
public static final String PARAM_CREATE_TABLE = "kinetica.tables.create_table";
public static final String PARAM_TABLE_PREFIX = "kinetica.tables.prefix";
public static final String PARAM_SCHEMA = "kinetica.tables.schema_name";
public static final String PARAM_DEST_TABLE_OVERRIDE = "kinetica.tables.destination_name";
public static final String PARAM_SINGLE_TABLE_PER_TOPIC = "kinetica.tables.single_table_per_topic";
public static final String PARAM_UPDATE_ON_EXISTING_PK = "kinetica.tables.update_on_existing_pk";
public static final String PARAM_ALLOW_SCHEMA_EVOLUTION = "kinetica.schema_evolution.enabled";
public static final String PARAM_ADD_NEW_FIELDS = "kinetica.schema_evolution.add_new_fields_as_columns";
public static final String PARAM_MAKE_MISSING_FIELDS_NULLABLE = "kinetica.schema_evolution.make_missing_field_nullable";
private static final String DEFAULT_TIMEOUT = "0";
private static final String DEFAULT_BATCH_SIZE = "10000";
public static final String DEFAULT_DOT_REPLACEMENT = "_";
private static final String PARAM_GROUP = "Kinetica Properties";
static ConfigDef config = baseConfigDef();
private final String connectorName;
public KineticaSinkConnectorConfig(Map<String, String> props) {
this(config, props);
}
protected KineticaSinkConnectorConfig(ConfigDef config, Map<String, String> props) {
super(config, props);
connectorName = props.containsKey("name") ? props.get("name") : UUID.randomUUID().toString();
// Validate destination table override value
// When SinkConnector has a single_table_per_topic flag set, check the lengths of topics name list
// and destination table override names list, exit with error if list lengths differ
if ( new Boolean(props.get(PARAM_SINGLE_TABLE_PER_TOPIC)) &&
!validateOverride(props.get(SinkTask.TOPICS_CONFIG), props.get(PARAM_DEST_TABLE_OVERRIDE))) {
throw new ConnectException("Invalid configuration, with " +
PARAM_SINGLE_TABLE_PER_TOPIC + " = " + props.get(PARAM_SINGLE_TABLE_PER_TOPIC) + "\n" +
"expected exactly one destination table name per each topic name:\n" +
PARAM_DEST_TABLE_OVERRIDE + " = " + props.get(PARAM_DEST_TABLE_OVERRIDE) + "\n" +
SinkTask.TOPICS_CONFIG + " = " + props.get(SinkTask.TOPICS_CONFIG) + "\n" +
"Both parameters can be comma-separated lists of equal length or " + PARAM_DEST_TABLE_OVERRIDE + " can be left blank.");
}
if (!props.containsKey(PARAM_TABLE_PREFIX)) {
if (props.containsKey(DEPRECATED_PARAM_TABLE_PREFIX)) {
props.put(PARAM_TABLE_PREFIX, props.get(DEPRECATED_PARAM_TABLE_PREFIX));
} else {
props.put(PARAM_TABLE_PREFIX, config.configKeys().get(PARAM_TABLE_PREFIX).defaultValue.toString());
}
}
if (!props.containsKey(PARAM_DEST_TABLE_OVERRIDE)) {
if (props.containsKey(DEPRECATED_PARAM_DEST_TABLE_OVERRIDE)) {
props.put(PARAM_DEST_TABLE_OVERRIDE, props.get(DEPRECATED_PARAM_DEST_TABLE_OVERRIDE));
} else {
props.put(PARAM_DEST_TABLE_OVERRIDE, config.configKeys().get(PARAM_DEST_TABLE_OVERRIDE).defaultValue.toString());
}
}
if (!props.containsKey(PARAM_SCHEMA)) {
if (props.containsKey(DEPRECATED_PARAM_COLLECTION)) {
props.put(PARAM_SCHEMA, props.get(DEPRECATED_PARAM_COLLECTION));
} else {
props.put(PARAM_SCHEMA, config.configKeys().get(PARAM_SCHEMA).defaultValue.toString());
}
}
if (!props.containsKey(PARAM_SINGLE_TABLE_PER_TOPIC)) {
if (props.containsKey(DEPRECATED_PARAM_SINGLE_TABLE_PER_TOPIC)) {
props.put(PARAM_SINGLE_TABLE_PER_TOPIC, props.get(DEPRECATED_PARAM_SINGLE_TABLE_PER_TOPIC));
} else {
props.put(PARAM_SINGLE_TABLE_PER_TOPIC, config.configKeys().get(PARAM_SINGLE_TABLE_PER_TOPIC).defaultValue.toString());
}
}
if (!props.containsKey(PARAM_CREATE_TABLE)) {
if (props.containsKey(DEPRECATED_PARAM_CREATE_TABLE)) {
props.put(PARAM_CREATE_TABLE, props.get(DEPRECATED_PARAM_CREATE_TABLE));
} else {
props.put(PARAM_CREATE_TABLE, config.configKeys().get(PARAM_CREATE_TABLE).defaultValue.toString());
}
}
if (!props.containsKey(PARAM_ADD_NEW_FIELDS)) {
if (props.containsKey(DEPRECATED_PARAM_ADD_NEW_FIELDS)) {
props.put(PARAM_ADD_NEW_FIELDS, props.get(DEPRECATED_PARAM_ADD_NEW_FIELDS));
} else {
props.put(PARAM_ADD_NEW_FIELDS, config.configKeys().get(PARAM_ADD_NEW_FIELDS).defaultValue.toString());
}
}
if (!props.containsKey(PARAM_MAKE_MISSING_FIELDS_NULLABLE)) {
if (props.containsKey(DEPRECATED_PARAM_MAKE_MISSING_FIELDS_NULLABLE)) {
props.put(PARAM_MAKE_MISSING_FIELDS_NULLABLE, props.get(DEPRECATED_PARAM_MAKE_MISSING_FIELDS_NULLABLE));
} else {
props.put(PARAM_MAKE_MISSING_FIELDS_NULLABLE, config.configKeys().get(PARAM_MAKE_MISSING_FIELDS_NULLABLE).defaultValue.toString());
}
}
if (!props.containsKey(PARAM_ALLOW_SCHEMA_EVOLUTION)) {
if (props.containsKey(DEPRECATED_PARAM_ALLOW_SCHEMA_EVOLUTION)) {
props.put(PARAM_ALLOW_SCHEMA_EVOLUTION, props.get(DEPRECATED_PARAM_ALLOW_SCHEMA_EVOLUTION));
} else {
props.put(PARAM_ALLOW_SCHEMA_EVOLUTION, config.configKeys().get(PARAM_ALLOW_SCHEMA_EVOLUTION).defaultValue.toString());
}
}
if (!props.containsKey(PARAM_UPDATE_ON_EXISTING_PK)) {
if (props.containsKey(DEPRECATED_PARAM_UPDATE_ON_EXISTING_PK)) {
props.put(PARAM_UPDATE_ON_EXISTING_PK, props.get(DEPRECATED_PARAM_UPDATE_ON_EXISTING_PK));
} else {
props.put(PARAM_UPDATE_ON_EXISTING_PK, config.configKeys().get(PARAM_UPDATE_ON_EXISTING_PK).defaultValue.toString());
}
}
}
/**
* Returns unique Connector name (used to manage Connectors through REST interface)
* @return Connector name
*/
public String getConnectorName() {
return connectorName;
}
/**
* Returns basic Sink Connector configuration
* @return ConfigDef
*/
public static ConfigDef baseConfigDef() {
return new ConfigDef()
.define(PARAM_URL, ConfigDef.Type.STRING, ConfigDef.Importance.HIGH,
"Kinetica URL, e.g. 'http://localhost:9191'", PARAM_GROUP, 1, ConfigDef.Width.SHORT,
"Kinetica URL")
.define(PARAM_USERNAME, ConfigDef.Type.STRING, "", ConfigDef.Importance.MEDIUM,
"Kinetica username (optional)", PARAM_GROUP, 2, ConfigDef.Width.SHORT, "Username")
.define(PARAM_PASSWORD, ConfigDef.Type.STRING, "", ConfigDef.Importance.MEDIUM,
"Kinetica password (optional)", PARAM_GROUP, 3, ConfigDef.Width.SHORT, "Password")
.define(PARAM_SCHEMA, ConfigDef.Type.STRING, "", ConfigDef.Importance.HIGH,
"Kinetica schema name (optional, default empty)", PARAM_GROUP, 4,
ConfigDef.Width.LONG, "Schema Name")
.define(PARAM_TABLE_PREFIX, ConfigDef.Type.STRING, "", ConfigDef.Importance.HIGH,
"Prefix applied to tablenames from Kafka schema. (optional)", PARAM_GROUP, 5,
ConfigDef.Width.LONG, "Table Prefix")
.define(PARAM_DEST_TABLE_OVERRIDE, ConfigDef.Type.STRING, "", ConfigDef.Importance.HIGH,
"Table name that will replace name automatically generated from the schema. (optional)",
PARAM_GROUP, 6, ConfigDef.Width.LONG, "Table Override")
.define(PARAM_TIMEOUT, ConfigDef.Type.INT, DEFAULT_TIMEOUT, Range.atLeast(0), ConfigDef.Importance.LOW,
"Kinetica timeout (ms) (optional, default " + DEFAULT_TIMEOUT + "); 0 = no timeout",
PARAM_GROUP, 7, ConfigDef.Width.SHORT, "Connection Timeout")
.define(PARAM_BATCH_SIZE, ConfigDef.Type.INT, DEFAULT_BATCH_SIZE, Range.atLeast(1),
ConfigDef.Importance.LOW, "Kinetica batch size (optional, default " + DEFAULT_BATCH_SIZE + ")",
PARAM_GROUP, 8, ConfigDef.Width.SHORT, "Batch Size")
.define(PARAM_CREATE_TABLE, ConfigDef.Type.BOOLEAN, true, ConfigDef.Importance.LOW,
"Create missing tables. (optional, default true)", PARAM_GROUP, 9, ConfigDef.Width.SHORT,
"Create Table")
.define(PARAM_SINGLE_TABLE_PER_TOPIC, ConfigDef.Type.BOOLEAN, false, ConfigDef.Importance.LOW,
"Creates a single kinetica table per each Kafka topic. (optional, default false)",
PARAM_GROUP, 10, ConfigDef.Width.SHORT, "Single table per topic")
.define(PARAM_ADD_NEW_FIELDS, ConfigDef.Type.BOOLEAN, false, ConfigDef.Importance.LOW,
"Add new field names as columns to Kinetica table. (optional, default false)",
PARAM_GROUP, 11, ConfigDef.Width.SHORT,
"Add new columns")
.define(PARAM_MAKE_MISSING_FIELDS_NULLABLE, ConfigDef.Type.BOOLEAN, false, ConfigDef.Importance.LOW,
"Make missing from schema fields nullable columns in Kinetica table. (optional, default false)",
PARAM_GROUP, 12, ConfigDef.Width.SHORT,
"Alter existing column to nullable")
.define(PARAM_RETRY_COUNT, ConfigDef.Type.INT, 1, Range.atLeast(1),
ConfigDef.Importance.LOW, "Number of attempts to insert record into Kinetica table. (optional, default 1)",
PARAM_GROUP, 13, ConfigDef.Width.SHORT, "Retry count")
.define(PARAM_ALLOW_SCHEMA_EVOLUTION, ConfigDef.Type.BOOLEAN, false, ConfigDef.Importance.LOW,
"Schema evolution enabled for incoming Kafka messages. (optional, default false)", PARAM_GROUP, 14, ConfigDef.Width.SHORT,
"Schema evolution enabled")
.define(PARAM_UPDATE_ON_EXISTING_PK, ConfigDef.Type.BOOLEAN, true, ConfigDef.Importance.LOW,
"Force update on existing PK when inserting Kafka messages. (optional, default true)",
PARAM_GROUP, 15, ConfigDef.Width.SHORT, "Force update on existing PK")
.define(PARAM_ENABLE_MULTI_HEAD, ConfigDef.Type.BOOLEAN, true, ConfigDef.Importance.LOW,
"Allow multi-head data ingest. (optional, default true)", PARAM_GROUP, 16, ConfigDef.Width.SHORT,
"Allow multi-head data ingest")
.define(DEPRECATED_PARAM_COLLECTION, ConfigDef.Type.STRING, "", ConfigDef.Importance.LOW,
"Kinetica collection name (deprecated, use " + PARAM_SCHEMA + " instead)", PARAM_GROUP, 17,
ConfigDef.Width.LONG, String.format("%s (deprecated)", DEPRECATED_PARAM_COLLECTION))
.define(DEPRECATED_PARAM_TABLE_PREFIX, ConfigDef.Type.STRING, "", ConfigDef.Importance.LOW,
"Prefix applied to tablenames from Kafka schema. (deprecated, use " + PARAM_TABLE_PREFIX + " instead)",
PARAM_GROUP, 18, ConfigDef.Width.LONG, String.format("%s (deprecated)", DEPRECATED_PARAM_TABLE_PREFIX))
.define(DEPRECATED_PARAM_DEST_TABLE_OVERRIDE, ConfigDef.Type.STRING, "", ConfigDef.Importance.LOW,
"Table name that will replace name automatically generated from the schema. (deprecated, use " +
PARAM_DEST_TABLE_OVERRIDE + " instead)",
PARAM_GROUP, 19, ConfigDef.Width.LONG, String.format("%s (deprecated)", DEPRECATED_PARAM_DEST_TABLE_OVERRIDE))
.define(DEPRECATED_PARAM_CREATE_TABLE, ConfigDef.Type.BOOLEAN, true, ConfigDef.Importance.LOW,
"Create missing tables. (deprecated, use " + PARAM_CREATE_TABLE + " instead)", PARAM_GROUP, 20, ConfigDef.Width.SHORT,
String.format("%s (deprecated)", DEPRECATED_PARAM_CREATE_TABLE))
.define(DEPRECATED_PARAM_SINGLE_TABLE_PER_TOPIC, ConfigDef.Type.BOOLEAN, false, ConfigDef.Importance.LOW,
"Creates a single kinetica table per each Kafka topic. (deprecated, use " + PARAM_SINGLE_TABLE_PER_TOPIC + " instead)",
PARAM_GROUP, 21, ConfigDef.Width.SHORT, String.format("%s (deprecated)", DEPRECATED_PARAM_SINGLE_TABLE_PER_TOPIC))
.define(DEPRECATED_PARAM_ADD_NEW_FIELDS, ConfigDef.Type.BOOLEAN, false, ConfigDef.Importance.LOW,
"Add new field names as columns to Kinetica table. (deprecated, use " + PARAM_ADD_NEW_FIELDS +
" instead)", PARAM_GROUP, 22, ConfigDef.Width.SHORT,
String.format("%s (deprecated)", DEPRECATED_PARAM_ADD_NEW_FIELDS))
.define(DEPRECATED_PARAM_MAKE_MISSING_FIELDS_NULLABLE, ConfigDef.Type.BOOLEAN, false, ConfigDef.Importance.LOW,
"Make missing from schema fields nullable columns in Kinetica table. (deprecated, use " +
PARAM_MAKE_MISSING_FIELDS_NULLABLE + " instead)", PARAM_GROUP, 23, ConfigDef.Width.SHORT,
String.format("%s (deprecated)", DEPRECATED_PARAM_MAKE_MISSING_FIELDS_NULLABLE))
.define(DEPRECATED_PARAM_ALLOW_SCHEMA_EVOLUTION, ConfigDef.Type.BOOLEAN, false, ConfigDef.Importance.LOW,
"Allow schema evolution for incoming Kafka messages. (deprecated, use " + DEPRECATED_PARAM_ALLOW_SCHEMA_EVOLUTION
+ " instead)", PARAM_GROUP, 24, ConfigDef.Width.SHORT, String.format("%s (deprecated)", DEPRECATED_PARAM_ALLOW_SCHEMA_EVOLUTION))
.define(DEPRECATED_PARAM_UPDATE_ON_EXISTING_PK, ConfigDef.Type.BOOLEAN, true, ConfigDef.Importance.LOW,
"Allow update on existing PK when inserting Kafka messages. (deprecated, use " + PARAM_UPDATE_ON_EXISTING_PK +
" instead)", PARAM_GROUP, 25, ConfigDef.Width.SHORT,
String.format("%s (deprecated)", DEPRECATED_PARAM_UPDATE_ON_EXISTING_PK));
}
public static void main(String[] args) {
System.out.println(config.toRst());
}
/**
* Validates the table override parameters
*
* @param topicNames list of topics as a String
* @param tableOverrideNames list of table names overriding topic names in Kinetica, could be left empty
*
* @return whether tableOverrideNames is well-formed and override is possible
*/
private static boolean validateOverride (String topicNames, String tableOverrideNames) {
if (tableOverrideNames == null || tableOverrideNames.isEmpty()) {
// no override to be performed
return true;
}
if (topicNames!=null && !topicNames.isEmpty()) {
if (!topicNames.contains(",") && !tableOverrideNames.contains(",")) {
// single topic name found and single override name found
return true;
}
if (topicNames.contains(",") && tableOverrideNames.contains(",") &&
topicNames.split(",").length == tableOverrideNames.split(",").length) {
// both topics and override names are comma-separated lists of equal size
return true;
}
}
// no one-to-one topic name override possible
return false;
}
}
| |
/*
* Copyright (C) 2014 Square, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.squareup.okhttp.ws;
import com.squareup.okhttp.Call;
import com.squareup.okhttp.Callback;
import com.squareup.okhttp.Connection;
import com.squareup.okhttp.OkHttpClient;
import com.squareup.okhttp.Request;
import com.squareup.okhttp.Response;
import com.squareup.okhttp.internal.Internal;
import com.squareup.okhttp.internal.NamedRunnable;
import com.squareup.okhttp.internal.Util;
import com.squareup.okhttp.internal.ws.RealWebSocket;
import com.squareup.okhttp.internal.ws.WebSocketProtocol;
import java.io.IOException;
import java.net.ProtocolException;
import java.net.Socket;
import java.security.SecureRandom;
import java.util.Collections;
import java.util.Random;
import java.util.concurrent.Executor;
import java.util.concurrent.LinkedBlockingDeque;
import java.util.concurrent.ThreadPoolExecutor;
import okio.BufferedSink;
import okio.BufferedSource;
import okio.ByteString;
import okio.Okio;
import static java.util.concurrent.TimeUnit.SECONDS;
public class WebSocketCall {
/**
* Prepares the {@code request} to create a web socket at some point in the future.
*/
public static WebSocketCall create(OkHttpClient client, Request request) {
return new WebSocketCall(client, request);
}
private final Request request;
private final Call call;
private final Random random;
private final String key;
protected WebSocketCall(OkHttpClient client, Request request) {
this(client, request, new SecureRandom());
}
WebSocketCall(OkHttpClient client, Request request, Random random) {
if (!"GET".equals(request.method())) {
throw new IllegalArgumentException("Request must be GET: " + request.method());
}
String url = request.urlString();
String httpUrl;
if (url.startsWith("ws://")) {
httpUrl = "http://" + url.substring(5);
} else if (url.startsWith("wss://")) {
httpUrl = "https://" + url.substring(6);
} else if (url.startsWith("http://") || url.startsWith("https://")) {
httpUrl = url;
} else {
throw new IllegalArgumentException(
"Request url must use 'ws', 'wss', 'http', or 'https' scheme: " + url);
}
this.random = random;
byte[] nonce = new byte[16];
random.nextBytes(nonce);
key = ByteString.of(nonce).base64();
// Copy the client. Otherwise changes (socket factory, redirect policy,
// etc.) may incorrectly be reflected in the request when it is executed.
client = client.clone();
// Force HTTP/1.1 until the WebSocket over HTTP/2 version is finalized.
client.setProtocols(Collections.singletonList(com.squareup.okhttp.Protocol.HTTP_1_1));
request = request.newBuilder()
.url(httpUrl)
.header("Upgrade", "websocket")
.header("Connection", "Upgrade")
.header("Sec-WebSocket-Key", key)
.header("Sec-WebSocket-Version", "13")
.build();
this.request = request;
call = client.newCall(request);
}
/**
* Schedules the request to be executed at some point in the future.
*
* <p>The {@link OkHttpClient#getDispatcher dispatcher} defines when the request will run:
* usually immediately unless there are several other requests currently being executed.
*
* <p>This client will later call back {@code responseCallback} with either an HTTP response or a
* failure exception. If you {@link #cancel} a request before it completes the callback will not
* be invoked.
*
* @throws IllegalStateException when the call has already been executed.
*/
public void enqueue(final WebSocketListener listener) {
Callback responseCallback = new Callback() {
@Override public void onResponse(Response response) throws IOException {
try {
createWebSocket(response, listener);
} catch (IOException e) {
listener.onFailure(e);
}
}
@Override public void onFailure(Request request, IOException e) {
listener.onFailure(e);
}
};
// TODO call.enqueue(responseCallback, true);
Internal.instance.callEnqueue(call, responseCallback, true);
}
/** Cancels the request, if possible. Requests that are already complete cannot be canceled. */
public void cancel() {
call.cancel();
}
private void createWebSocket(Response response, WebSocketListener listener)
throws IOException {
if (response.code() != 101) {
// TODO call.engine.releaseConnection();
Internal.instance.callEngineReleaseConnection(call);
throw new ProtocolException("Expected HTTP 101 response but was '"
+ response.code()
+ " "
+ response.message()
+ "'");
}
String headerConnection = response.header("Connection");
if (!"Upgrade".equalsIgnoreCase(headerConnection)) {
throw new ProtocolException(
"Expected 'Connection' header value 'Upgrade' but was '" + headerConnection + "'");
}
String headerUpgrade = response.header("Upgrade");
if (!"websocket".equalsIgnoreCase(headerUpgrade)) {
throw new ProtocolException(
"Expected 'Upgrade' header value 'websocket' but was '" + headerUpgrade + "'");
}
String headerAccept = response.header("Sec-WebSocket-Accept");
String acceptExpected = Util.shaBase64(key + WebSocketProtocol.ACCEPT_MAGIC);
if (!acceptExpected.equals(headerAccept)) {
throw new ProtocolException("Expected 'Sec-WebSocket-Accept' header value '"
+ acceptExpected
+ "' but was '"
+ headerAccept
+ "'");
}
// TODO connection = call.engine.getConnection();
Connection connection = Internal.instance.callEngineGetConnection(call);
// TODO if (!connection.clearOwner()) {
if (!Internal.instance.clearOwner(connection)) {
throw new IllegalStateException("Unable to take ownership of connection.");
}
Socket socket = connection.getSocket();
BufferedSource source = Okio.buffer(Okio.source(socket));
BufferedSink sink = Okio.buffer(Okio.sink(socket));
final RealWebSocket webSocket =
ConnectionWebSocket.create(response, connection, source, sink, random, listener);
// Start a dedicated thread for reading the web socket.
new Thread(new NamedRunnable("OkHttp WebSocket reader %s", request.urlString()) {
@Override protected void execute() {
while (webSocket.readMessage()) {
}
}
}).start();
// TODO connection.setOwner(webSocket);
Internal.instance.connectionSetOwner(connection, webSocket);
listener.onOpen(webSocket, request, response);
}
// Keep static so that the WebSocketCall instance can be garbage collected.
private static class ConnectionWebSocket extends RealWebSocket {
static RealWebSocket create(Response response, Connection connection, BufferedSource source,
BufferedSink sink, Random random, WebSocketListener listener) {
String url = response.request().urlString();
ThreadPoolExecutor replyExecutor =
new ThreadPoolExecutor(1, 1, 1, SECONDS, new LinkedBlockingDeque<Runnable>(),
Util.threadFactory(String.format("OkHttp %s WebSocket", url), true));
replyExecutor.allowCoreThreadTimeOut(true);
return new ConnectionWebSocket(connection, source, sink, random, replyExecutor, listener,
url);
}
private final Connection connection;
private ConnectionWebSocket(Connection connection, BufferedSource source, BufferedSink sink,
Random random, Executor replyExecutor, WebSocketListener listener, String url) {
super(true /* is client */, source, sink, random, replyExecutor, listener, url);
this.connection = connection;
}
@Override protected void closeConnection() throws IOException {
// TODO connection.closeIfOwnedBy(this);
Internal.instance.closeIfOwnedBy(connection, this);
}
}
}
| |
/*
* Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.config;
import com.hazelcast.internal.cluster.Versions;
import com.hazelcast.test.HazelcastParallelClassRunner;
import com.hazelcast.test.annotation.ParallelJVMTest;
import com.hazelcast.test.annotation.QuickTest;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.w3c.dom.Attr;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.Node;
import org.w3c.dom.TypeInfo;
import org.w3c.dom.traversal.DocumentTraversal;
import org.w3c.dom.traversal.NodeFilter;
import org.w3c.dom.traversal.NodeIterator;
import org.xml.sax.SAXException;
import javax.xml.XMLConstants;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.transform.Source;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMResult;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import javax.xml.transform.stream.StreamSource;
import javax.xml.validation.Schema;
import javax.xml.validation.SchemaFactory;
import javax.xml.validation.Validator;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import java.io.StringWriter;
import java.net.URL;
import static com.hazelcast.instance.BuildInfoProvider.HAZELCAST_INTERNAL_OVERRIDE_VERSION;
import static com.hazelcast.internal.util.XmlUtil.getNsAwareDocumentBuilderFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.w3c.dom.TypeInfo.DERIVATION_RESTRICTION;
/**
* Test cases specific only to XML based configuration. The cases not
* XML specific should be added to {@link XMLConfigBuilderTest}.
* <p>
* This test class is expected to contain only <strong>extra</strong> test
* cases over the ones defined in {@link XMLConfigBuilderTest} in order
* to cover XML specific cases where XML configuration derives from the
* YAML configuration to allow usage of XML-native constructs.
*
* @see XMLConfigBuilderTest
*/
@RunWith(HazelcastParallelClassRunner.class)
@Category({QuickTest.class, ParallelJVMTest.class})
public class XmlOnlyConfigBuilderTest {
private static final String HAZELCAST_START_TAG = "<hazelcast xmlns=\"http://www.hazelcast.com/schema/config\">\n";
private static final String HAZELCAST_END_TAG = "</hazelcast>\n";
@Test(expected = InvalidConfigurationException.class)
public void testMissingNamespace() {
String xml = "<hazelcast/>";
buildConfig(xml);
}
@Test(expected = InvalidConfigurationException.class)
public void testInvalidNamespace() {
String xml = "<hazelcast xmlns=\"http://foo.bar\"/>";
buildConfig(xml);
}
@Test
public void testValidNamespace() {
String xml = HAZELCAST_START_TAG + HAZELCAST_END_TAG;
buildConfig(xml);
}
@Test(expected = InvalidConfigurationException.class)
public void testHazelcastTagAppearsTwice() {
String xml = HAZELCAST_START_TAG + "<hazelcast/>" + HAZELCAST_END_TAG;
buildConfig(xml);
}
@Test(expected = InvalidConfigurationException.class)
public void testHazelcastInstanceNameEmpty() {
String xml = HAZELCAST_START_TAG + "<instance-name></instance-name>" + HAZELCAST_END_TAG;
buildConfig(xml);
}
@Test
public void testXsdVersion() {
String origVersionOverride = System.getProperty(HAZELCAST_INTERNAL_OVERRIDE_VERSION);
assertXsdVersion("0.0", "0.0");
assertXsdVersion("3.9", "3.9");
assertXsdVersion("3.9-SNAPSHOT", "3.9");
assertXsdVersion("3.9.1-SNAPSHOT", "3.9");
assertXsdVersion("3.10", "3.10");
assertXsdVersion("3.10-SNAPSHOT", "3.10");
assertXsdVersion("3.10.1-SNAPSHOT", "3.10");
assertXsdVersion("99.99.99", "99.99");
assertXsdVersion("99.99.99-SNAPSHOT", "99.99");
assertXsdVersion("99.99.99-Beta", "99.99");
assertXsdVersion("99.99.99-Beta-SNAPSHOT", "99.99");
if (origVersionOverride != null) {
System.setProperty(HAZELCAST_INTERNAL_OVERRIDE_VERSION, origVersionOverride);
} else {
System.clearProperty(HAZELCAST_INTERNAL_OVERRIDE_VERSION);
}
}
@Test
public void testConfig2Xml2DefaultConfig() {
testConfig2Xml2Config("hazelcast-default.xml");
}
@Test
public void testConfig2Xml2FullConfig() {
testConfig2Xml2Config("hazelcast-fullconfig.xml");
}
@Test
public void testConfig2Xml2Config_withAdvancedNetworkConfig() {
testConfig2Xml2Config("hazelcast-fullconfig-advanced-network-config.xml");
}
@Test
public void testXSDDefaultXML() throws Exception {
testXSDConfigXML("hazelcast-default.xml");
}
@Test
public void testFullConfig() throws Exception {
testXSDConfigXML("hazelcast-fullconfig.xml");
}
@Test(expected = IllegalArgumentException.class)
public void testAttributeConfig_noName_noExtractor() {
String xml = HAZELCAST_START_TAG
+ " <map name=\"people\">\n"
+ " <attributes>\n"
+ " <attribute></attribute>\n"
+ " </attributes>"
+ " </map>"
+ HAZELCAST_END_TAG;
buildConfig(xml);
}
@Test(expected = IllegalArgumentException.class)
public void testAttributeConfig_noName_noExtractor_singleTag() {
String xml = HAZELCAST_START_TAG
+ " <map name=\"people\">\n"
+ " <attributes>\n"
+ " <attribute/>\n"
+ " </attributes>"
+ " </map>"
+ HAZELCAST_END_TAG;
buildConfig(xml);
}
@Test(expected = InvalidConfigurationException.class)
public void testCacheConfig_withInvalidEvictionConfig_failsFast() {
String xml = HAZELCAST_START_TAG
+ " <cache name=\"cache\">"
+ " <eviction size=\"10000000\" max-size-policy=\"ENTRY_COUNT\" eviction-policy=\"INVALID\"/>"
+ " </cache>"
+ HAZELCAST_END_TAG;
buildConfig(xml);
}
@Test(expected = InvalidConfigurationException.class)
public void testInvalidRootElement() {
String xml = "<hazelcast-client>"
+ "<cluster-name>dev</cluster-name>"
+ "</hazelcast-client>";
buildConfig(xml);
}
@Test
public void testAddWhitespaceToNonSpaceStrings() throws Exception {
// parse the default config file
InputStream xmlResource = XMLConfigBuilderTest.class.getClassLoader().getResourceAsStream("hazelcast-default.xml");
DocumentBuilder builder = getNsAwareDocumentBuilderFactory().newDocumentBuilder();
Document doc = builder.parse(xmlResource);
// validate to augment with type information
Validator validator = getValidator();
DOMResult result = new DOMResult();
validator.validate(new DOMSource(doc), result);
Document validated = (Document) result.getNode();
// add whitespace to non-space-string nodes
assertTrue("No whitespace added", addWhitespaceToNonSpaceStrings(validated));
String xml = serialize(validated);
buildConfig(xml);
}
private static void assertXsdVersion(String buildVersion, String expectedXsdVersion) {
System.setProperty(HAZELCAST_INTERNAL_OVERRIDE_VERSION, buildVersion);
assertEquals("Unexpected release version retrieved for build version " + buildVersion, expectedXsdVersion,
new XmlConfigBuilder().getReleaseVersion());
}
private static void testConfig2Xml2Config(String fileName) {
Config config = new ClasspathXmlConfig(fileName);
String xml = new ConfigXmlGenerator(true, false).generate(config);
Config config2 = new InMemoryXmlConfig(xml);
assertTrue(ConfigCompatibilityChecker.isCompatible(config, config2));
}
private static Config buildConfig(String xml) {
ByteArrayInputStream bis = new ByteArrayInputStream(xml.getBytes());
XmlConfigBuilder configBuilder = new XmlConfigBuilder(bis);
return configBuilder.build();
}
private static void testXSDConfigXML(String xmlFileName) throws Exception {
InputStream xmlResource = XMLConfigBuilderTest.class.getClassLoader().getResourceAsStream(xmlFileName);
Source source = new StreamSource(xmlResource);
Validator validator = getValidator();
try {
validator.validate(source);
} catch (SAXException ex) {
fail(xmlFileName + " is not valid because: " + ex.toString());
}
}
private static Validator getValidator() throws SAXException {
SchemaFactory factory = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI);
URL schemaResource = XMLConfigBuilderTest.class.getClassLoader().getResource("hazelcast-config-"
+ Versions.CURRENT_CLUSTER_VERSION + ".xsd");
Schema schema = factory.newSchema(schemaResource);
return schema.newValidator();
}
private static String serialize(Node node) throws TransformerException {
StringWriter result = new StringWriter();
Transformer transformer = TransformerFactory.newInstance().newTransformer();
transformer.transform(new DOMSource(node), new StreamResult(result));
return result.toString();
}
private static boolean addWhitespaceToNonSpaceStrings(Document doc) {
NodeIterator nodeIterator = ((DocumentTraversal) doc).createNodeIterator(
doc.getDocumentElement(), NodeFilter.SHOW_ELEMENT, null, true);
boolean added = false;
Node node;
while ((node = nodeIterator.nextNode()) != null) {
if (isNonSpaceString(node)) {
addWhitespace(node);
added = true;
}
NamedNodeMap attrs = node.getAttributes();
for (int i = 0; i < attrs.getLength(); i++) {
Node attr = attrs.item(i);
if (isNonSpaceString(attr)) {
addWhitespace(attr);
added = true;
}
}
}
return added;
}
private static void addWhitespace(Node node) {
node.setNodeValue(" \n " + node.getNodeValue() + " \n ");
}
private static boolean isNonSpaceString(Node node) {
TypeInfo typeInfo;
if (node.getNodeType() == Node.ELEMENT_NODE) {
typeInfo = ((Element) node).getSchemaTypeInfo();
} else if (node.getNodeType() == Node.ATTRIBUTE_NODE) {
typeInfo = ((Attr) node).getSchemaTypeInfo();
} else {
typeInfo = null;
}
return typeInfo != null && typeInfo.isDerivedFrom("http://www.hazelcast.com/schema/config",
"non-space-string", DERIVATION_RESTRICTION);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.uima.cev;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.uima.cev.artifactViewer.ArtifactViewerComparator;
import org.apache.uima.cev.extension.ICEVArtifactViewerFactory;
import org.apache.uima.cev.extension.ICEVEditorFactory;
import org.apache.uima.cev.extension.ICEVSearchStrategy;
import org.apache.uima.cev.extension.ICEVSearchStrategyFactory;
import org.apache.uima.cev.extension.ICEVViewFactory;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IConfigurationElement;
import org.eclipse.core.runtime.IExtension;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.Platform;
import org.eclipse.core.runtime.Status;
import org.eclipse.core.runtime.jobs.Job;
import org.eclipse.jface.resource.ImageDescriptor;
import org.eclipse.swt.custom.BusyIndicator;
import org.eclipse.swt.widgets.Display;
import org.eclipse.ui.IWorkbenchSite;
import org.eclipse.ui.plugin.AbstractUIPlugin;
import org.eclipse.ui.progress.IWorkbenchSiteProgressService;
import org.osgi.framework.BundleContext;
public class CEVPlugin extends AbstractUIPlugin {
public static final String PLUGIN_ID = "org.apache.uima.cev";
private static final String ATT_PRIORITY = "priority";
public static final String ATT_ADAPTER = "adapter";
public static final String ATT_FACTORY = "factory";
// Shared instance
private static CEVPlugin plugin;
public CEVPlugin() {
}
/*
* (non-Javadoc)
*
* @see org.eclipse.ui.plugin.AbstractUIPlugin#start(org.osgi.framework.BundleContext)
*/
@Override
public void start(BundleContext context) throws Exception {
super.start(context);
plugin = this;
}
/*
* (non-Javadoc)
*
* @see org.eclipse.ui.plugin.AbstractUIPlugin#stop(org.osgi.framework.BundleContext)
*/
@Override
public void stop(BundleContext context) throws Exception {
plugin = null;
super.stop(context);
}
public static CEVPlugin getDefault() {
return plugin;
}
public static ImageDescriptor getImageDescriptor(String path) {
return imageDescriptorFromPlugin(PLUGIN_ID, path);
}
public static Map<Class<?>, ICEVEditorFactory> getEditorAdapters() {
Map<Class<?>, ICEVEditorFactory> result = new HashMap<Class<?>, ICEVEditorFactory>();
IExtension[] editorExtensions = Platform.getExtensionRegistry()
.getExtensionPoint(CEVPlugin.PLUGIN_ID, "cevEditors").getExtensions();
for (IExtension extension : editorExtensions) {
IConfigurationElement[] configurationElements = extension.getConfigurationElements();
for (IConfigurationElement configurationElement : configurationElements) {
Object factoryObject = null;
try {
factoryObject = configurationElement.createExecutableExtension(ATT_FACTORY);
} catch (CoreException e) {
CEVPlugin.error(e);
}
if (factoryObject instanceof ICEVEditorFactory) {
ICEVEditorFactory editorFactory = (ICEVEditorFactory) factoryObject;
result.put(editorFactory.getAdapterInterface(), editorFactory);
}
}
}
return result;
}
public static Map<Class<?>, ICEVViewFactory> getViewAdapters() {
Map<Class<?>, ICEVViewFactory> result = new HashMap<Class<?>, ICEVViewFactory>();
IExtension[] viewExtensions = Platform.getExtensionRegistry()
.getExtensionPoint(CEVPlugin.PLUGIN_ID, "cevViews").getExtensions();
for (IExtension extension : viewExtensions) {
IConfigurationElement[] configurationElements = extension.getConfigurationElements();
for (IConfigurationElement configurationElement : configurationElements) {
Object factoryObject = null;
try {
factoryObject = configurationElement.createExecutableExtension(ATT_FACTORY);
} catch (CoreException e) {
CEVPlugin.error(e);
}
if (factoryObject instanceof ICEVViewFactory) {
ICEVViewFactory viewFactory = (ICEVViewFactory) factoryObject;
result.put(viewFactory.getAdapterInterface(), viewFactory);
}
}
}
return result;
}
public static List<ICEVArtifactViewerFactory> getArtifactViewerFactories() {
List<ICEVArtifactViewerFactory> result = new ArrayList<ICEVArtifactViewerFactory>();
IExtension[] viewerExtensions = Platform.getExtensionRegistry()
.getExtensionPoint(CEVPlugin.PLUGIN_ID, "cevArtifactViewers").getExtensions();
for (IExtension extension : viewerExtensions) {
IConfigurationElement[] configurationElements = extension.getConfigurationElements();
for (IConfigurationElement configurationElement : configurationElements) {
Object factoryObject = null;
String priorityString = null;
try {
factoryObject = configurationElement.createExecutableExtension(ATT_FACTORY);
priorityString = configurationElement.getAttribute(ATT_PRIORITY);
} catch (CoreException e) {
CEVPlugin.error(e);
}
if (factoryObject instanceof ICEVArtifactViewerFactory) {
ICEVArtifactViewerFactory viewerFactory = (ICEVArtifactViewerFactory) factoryObject;
int priority = 100;
try {
priority = Integer.parseInt(priorityString);
} catch (NumberFormatException e) {
// bad string
}
viewerFactory.setPriority(priority);
result.add(viewerFactory);
}
}
}
Collections.sort(result, new ArtifactViewerComparator());
return result;
}
public static List<ICEVSearchStrategy> getSearchStrategies() {
List<ICEVSearchStrategy> result = new ArrayList<ICEVSearchStrategy>();
IExtension[] searchExtensions = Platform.getExtensionRegistry()
.getExtensionPoint(CEVPlugin.PLUGIN_ID, "cevSearchStrategies").getExtensions();
for (IExtension extension : searchExtensions) {
IConfigurationElement[] configurationElements = extension.getConfigurationElements();
for (IConfigurationElement configurationElement : configurationElements) {
Object factoryObject = null;
String priorityString = null;
try {
factoryObject = configurationElement.createExecutableExtension(ATT_FACTORY);
priorityString = configurationElement.getAttribute(ATT_PRIORITY);
} catch (CoreException e) {
CEVPlugin.error(e);
}
if (factoryObject instanceof ICEVSearchStrategyFactory) {
ICEVSearchStrategyFactory searchFactory = (ICEVSearchStrategyFactory) factoryObject;
int priority = 100;
try {
priority = Integer.parseInt(priorityString);
} catch (NumberFormatException e) {
// bad string
}
ICEVSearchStrategy strategy = searchFactory.createSearchStrategy(priority);
result.add(strategy);
}
}
}
Collections.sort(result, new Comparator<ICEVSearchStrategy>() {
public int compare(ICEVSearchStrategy o1, ICEVSearchStrategy o2) {
if (o1.getPriority() < o2.getPriority()) {
return -1;
} else if (o1.getPriority() > o2.getPriority()) {
return 1;
} else {
return 0;
}
}
});
return result;
}
public static void error(Throwable t) {
plugin.getLog().log(new Status(IStatus.ERROR, PLUGIN_ID, IStatus.OK, t.getMessage(), t));
}
public static void schedule(Job job, IWorkbenchSite site) {
if (site != null) {
IWorkbenchSiteProgressService siteProgress = (IWorkbenchSiteProgressService) site
.getAdapter(IWorkbenchSiteProgressService.class);
if (siteProgress != null) {
siteProgress.schedule(job, 0, true /* use half-busy cursor */);
return;
}
}
job.schedule();
}
public static void runInUIThread(final Runnable runnable) {
if (Display.getCurrent() != null) {
BusyIndicator.showWhile(Display.getCurrent(), runnable);
} else {
Display.getDefault().syncExec(new Runnable() {
public void run() {
BusyIndicator.showWhile(Display.getCurrent(), runnable);
}
});
}
}
}
| |
/*
* Licensed to the University of California, Berkeley under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package tachyon.master;
import java.io.File;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.ArrayList;
import java.util.List;
import org.apache.curator.test.TestingServer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Supplier;
import com.google.common.base.Throwables;
import tachyon.Constants;
import tachyon.UnderFileSystem;
import tachyon.client.TachyonFS;
import tachyon.conf.CommonConf;
import tachyon.conf.MasterConf;
import tachyon.conf.UserConf;
import tachyon.conf.WorkerConf;
import tachyon.util.CommonUtils;
import tachyon.util.NetworkUtils;
import tachyon.worker.TachyonWorker;
/**
* A local Tachyon cluster with Multiple masters
*/
public class LocalTachyonClusterMultiMaster {
private static final Logger LOG = LoggerFactory.getLogger(Constants.LOGGER_TYPE);
public static void main(String[] args) throws Exception {
LocalTachyonCluster cluster = new LocalTachyonCluster(100);
cluster.start();
CommonUtils.sleepMs(null, Constants.SECOND_MS);
cluster.stop();
CommonUtils.sleepMs(null, Constants.SECOND_MS);
cluster = new LocalTachyonCluster(100);
cluster.start();
CommonUtils.sleepMs(null, Constants.SECOND_MS);
cluster.stop();
CommonUtils.sleepMs(null, Constants.SECOND_MS);
}
private TestingServer mCuratorServer = null;
private int mNumOfMasters = 0;
private TachyonWorker mWorker = null;
private final long mWorkerCapacityBytes;
private String mTachyonHome;
private String mWorkerDataFolder;
private Thread mWorkerThread = null;
private String mLocalhostName = null;
private final List<LocalTachyonMaster> mMasters = new ArrayList<LocalTachyonMaster>();
private final Supplier<String> mClientSuppliers = new Supplier<String>() {
@Override
public String get() {
return getUri();
}
};
private final ClientPool mClientPool = new ClientPool(mClientSuppliers);
public LocalTachyonClusterMultiMaster(long workerCapacityBytes, int masters) {
mNumOfMasters = masters;
mWorkerCapacityBytes = workerCapacityBytes;
try {
mCuratorServer = new TestingServer();
} catch (Exception e) {
throw Throwables.propagate(e);
}
}
public synchronized TachyonFS getClient() throws IOException {
return mClientPool.getClient();
}
public String getUri() {
return Constants.HEADER_FT + mCuratorServer.getConnectString();
}
public boolean killLeader() {
for (int k = 0; k < mNumOfMasters; k ++) {
if (mMasters.get(k).isStarted()) {
try {
mMasters.get(k).stop();
} catch (Exception e) {
LOG.error(e.getMessage(), e);
return false;
}
return true;
}
}
return false;
}
private void deleteDir(String path) throws IOException {
UnderFileSystem ufs = UnderFileSystem.get(path);
if (ufs.exists(path) && !ufs.delete(path, true)) {
throw new IOException("Folder " + path + " already exists but can not be deleted.");
}
}
private void mkdir(String path) throws IOException {
UnderFileSystem ufs = UnderFileSystem.get(path);
if (ufs.exists(path)) {
ufs.delete(path, true);
}
if (!ufs.mkdirs(path, true)) {
throw new IOException("Failed to make folder: " + path);
}
}
public void start() throws IOException {
int maxLevel = 1;
mTachyonHome =
File.createTempFile("Tachyon", "U" + System.currentTimeMillis()).getAbsolutePath();
mWorkerDataFolder = "/datastore";
String masterDataFolder = mTachyonHome + "/data";
String masterLogFolder = mTachyonHome + "/logs";
// re-build the dir to set permission to 777
deleteDir(mTachyonHome);
mkdir(mTachyonHome);
mkdir(masterDataFolder);
mkdir(masterLogFolder);
mLocalhostName = NetworkUtils.getLocalHostName();
System.setProperty("tachyon.test.mode", "true");
System.setProperty("tachyon.home", mTachyonHome);
System.setProperty("tachyon.usezookeeper", "true");
System.setProperty("tachyon.zookeeper.address", mCuratorServer.getConnectString());
System.setProperty("tachyon.zookeeper.election.path", "/election");
System.setProperty("tachyon.zookeeper.leader.path", "/leader");
System.setProperty("tachyon.worker.data.folder", mWorkerDataFolder);
if (System.getProperty("tachyon.worker.hierarchystore.level.max") == null) {
System.setProperty("tachyon.worker.hierarchystore.level.max", 1 + "");
}
System.setProperty("tachyon.worker.hierarchystore.level0.alias", "MEM");
System.setProperty("tachyon.worker.hierarchystore.level0.dirs.path", mTachyonHome + "/ramdisk");
System
.setProperty("tachyon.worker.hierarchystore.level0.dirs.quota", mWorkerCapacityBytes + "");
for (int level = 1; level < maxLevel; level ++) {
String path =
System.getProperty("tachyon.worker.hierarchystore.level" + level + ".dirs.path");
if (path == null) {
throw new IOException("Paths for StorageDirs are not set! Level:" + level);
}
String[] dirPaths = path.split(",");
String newPath = "";
for (int i = 0; i < dirPaths.length; i ++) {
newPath += mTachyonHome + dirPaths[i] + ",";
}
System.setProperty("tachyon.worker.hierarchystore.level" + level + ".dirs.path",
newPath.substring(0, newPath.length() - 1));
}
System.setProperty("tachyon.worker.to.master.heartbeat.interval.ms", 15 + "");
CommonConf.clear();
MasterConf.clear();
WorkerConf.clear();
UserConf.clear();
mkdir(CommonConf.get().UNDERFS_DATA_FOLDER);
mkdir(CommonConf.get().UNDERFS_WORKERS_FOLDER);
for (int k = 0; k < mNumOfMasters; k ++) {
final LocalTachyonMaster master = LocalTachyonMaster.create(mTachyonHome);
master.start();
mMasters.add(master);
}
CommonUtils.sleepMs(null, 10);
mWorker =
TachyonWorker.createWorker(
CommonUtils.parseInetSocketAddress(mCuratorServer.getConnectString()),
new InetSocketAddress(mLocalhostName, 0), 0, 1, 1, 1);
Runnable runWorker = new Runnable() {
@Override
public void run() {
try {
mWorker.start();
} catch (Exception e) {
throw new RuntimeException(e + " \n Start Master Error \n" + e.getMessage(), e);
}
}
};
mWorkerThread = new Thread(runWorker);
mWorkerThread.start();
System.setProperty("tachyon.worker.port", mWorker.getMetaPort() + "");
System.setProperty("tachyon.worker.data.port", mWorker.getDataPort() + "");
}
public void stop() throws Exception {
stopTFS();
stopUFS();
}
public void stopTFS() throws Exception {
mClientPool.close();
mWorker.stop();
for (int k = 0; k < mNumOfMasters; k ++) {
mMasters.get(k).stop();
}
mCuratorServer.stop();
System.clearProperty("tachyon.home");
System.clearProperty("tachyon.usezookeeper");
System.clearProperty("tachyon.zookeeper.address");
System.clearProperty("tachyon.zookeeper.election.path");
System.clearProperty("tachyon.zookeeper.leader.path");
System.clearProperty("tachyon.master.hostname");
System.clearProperty("tachyon.master.port");
System.clearProperty("tachyon.master.web.port");
System.clearProperty("tachyon.worker.port");
System.clearProperty("tachyon.worker.data.port");
System.clearProperty("tachyon.worker.data.folder");
System.clearProperty("tachyon.worker.memory.size");
System.clearProperty("tachyon.worker.to.master.heartbeat.interval.ms");
}
public void stopUFS() throws Exception {
// masters share underfs, so only need to call on the first master
mMasters.get(0).cleanupUnderfs();
}
}
| |
package org.aikodi.java.core.expression.invocation;
import java.util.ArrayList;
import java.util.List;
import org.aikodi.chameleon.core.lookup.LookupException;
import org.aikodi.chameleon.oo.language.ObjectOrientedLanguage;
import org.aikodi.chameleon.oo.language.ObjectOrientedLanguageImpl;
import org.aikodi.chameleon.oo.type.TypeReference;
import org.aikodi.chameleon.oo.type.Type;
import org.aikodi.chameleon.oo.type.TypeReference;
import org.aikodi.chameleon.oo.type.generics.TypeArgument;
import org.aikodi.chameleon.oo.type.generics.EqualityTypeArgument;
import org.aikodi.chameleon.oo.type.generics.ExtendsWildcard;
import org.aikodi.chameleon.oo.type.generics.InstantiatedTypeParameter;
import org.aikodi.chameleon.oo.type.generics.SuperWildcard;
import org.aikodi.chameleon.oo.type.generics.TypeParameter;
import org.aikodi.chameleon.util.Util;
import org.aikodi.java.core.language.Java7;
import org.aikodi.java.core.type.JavaTypeReference;
/**
* A >> F
*/
public class GGConstraint extends FirstPhaseConstraint {
public GGConstraint(TypeReference A, Type F) {
super(A,F);
}
// @Override
// public List<SecondPhaseConstraint> processSpecifics() throws LookupException {
// return null;
// }
public SubtypeConstraint FequalsTj(TypeParameter declarator, TypeReference type) {
return new SubtypeConstraint(declarator, type);
}
@Override
public FirstPhaseConstraint Array(TypeReference componentType, Type componentTypeReference) {
GGConstraint ggConstraint = new GGConstraint(componentType, componentTypeReference);
parent().addGenerated(ggConstraint);
ggConstraint.setUniParent(parent());
return ggConstraint;
}
@Override
public void caseSSFormalBasic(List<SecondPhaseConstraint> result, TypeReference U, int index) throws LookupException {
try {
if(A().parameters(TypeParameter.class).isEmpty()) {
// If A is an instance of a non-generic type, then no constraint is implied on Tj.
} else {
Type G = F().baseType();
Type H = A().baseType();
if(G.subtypeOf(H)) {
if(! G.sameAs(H)) {
// No need to include F() itself since the base types aren't equal.
// G(S1,..,Sindex-1,U,Sindex+1,...,Sn) -> H
List<TypeParameter> formalArgs = new ArrayList<TypeParameter>();
for(TypeParameter par: G.parameters(TypeParameter.class)) {
TypeParameter clone = (TypeParameter) par.clone();
formalArgs.add(clone);
}
Type GG = G.language(ObjectOrientedLanguage.class).instantiatedType(TypeParameter.class,formalArgs, G);
GG.setUniParent(G.lexical().parent());
// replace the index-th parameter with a clone of type reference U.
TypeParameter oldParameter = GG.parameters(TypeParameter.class).get(index);
EqualityTypeArgument actual = (EqualityTypeArgument) U.lexical().parent();
TypeParameter newParameter = new InstantiatedTypeParameter(oldParameter.name(), actual);
GG.replaceParameter(TypeParameter.class,oldParameter, newParameter);
Type V= GG.superTypeJudge().get(H);
if(F().subtypeOf(V)) {
GGConstraint recursive = new GGConstraint(ARef(), V);
parent().addGenerated(recursive);
recursive.setUniParent(parent());
result.addAll(recursive.process());
}
} else {
TypeParameter ithTypeParameterOfA = A().parameters(TypeParameter.class).get(index);
if(ithTypeParameterOfA instanceof InstantiatedTypeParameter) {
TypeArgument arg = ((InstantiatedTypeParameter)ithTypeParameterOfA).argument();
if(arg instanceof EqualityTypeArgument) {
EQConstraint recursive = new EQConstraint(((EqualityTypeArgument)arg).typeReference(), U.getElement());
parent().addGenerated(recursive);
recursive.setUniParent(parent());
result.addAll(recursive.process());
} else if(arg instanceof ExtendsWildcard) {
GGConstraint recursive = new GGConstraint(((ExtendsWildcard)arg).typeReference(), U.getElement());
parent().addGenerated(recursive);
recursive.setUniParent(parent());
result.addAll(recursive.process());
} else if(arg instanceof SuperWildcard) {
SSConstraint recursive = new SSConstraint(((SuperWildcard)arg).typeReference(), U.getElement());
recursive.setUniParent(parent());
result.addAll(recursive.process());
}
}
}
}
}
}
catch(IndexOutOfBoundsException exc) {
return;
}
}
@Override
public void caseSSFormalExtends(List<SecondPhaseConstraint> result, TypeReference U, int index) throws LookupException {
try {
if(A().parameters(TypeParameter.class).isEmpty()) {
// If A is an instance of a non-generic type, then no constraint is implied on Tj.
} else {
Type G = F().baseType();
Type H = A().baseType();
if(! G.sameAs(H)) {
// No need to include F() itself since the base types aren't equal.
// G(S1,..,Sindex-1,U,Sindex+1,...,Sn) -> H
List<TypeParameter> formalArgs = new ArrayList<TypeParameter>();
for(TypeParameter par: G.parameters(TypeParameter.class)) {
TypeParameter clone = (TypeParameter) par.clone();
formalArgs.add(clone);
}
Type GG = G.language(ObjectOrientedLanguage.class).instantiatedType(TypeParameter.class,formalArgs, G);
GG.setUniParent(G.lexical().parent());
// replace the index-th parameter with a clone of type reference U.
TypeParameter oldParameter = GG.parameters(TypeParameter.class).get(index);
EqualityTypeArgument actual = (EqualityTypeArgument) U.lexical().parent();
TypeParameter newParameter = new InstantiatedTypeParameter(oldParameter.name(), actual);
GG.replaceParameter(TypeParameter.class,oldParameter, newParameter);
// Type V=typeWithSameBaseTypeAs(H, GG.getAllSuperTypes());
Type V= GG.superTypeJudge().get(H);
// Replace actual parameters with extends wildcards
for(TypeParameter par: V.parameters(TypeParameter.class)) {
InstantiatedTypeParameter inst = (InstantiatedTypeParameter) par;
EqualityTypeArgument basic = (EqualityTypeArgument) inst.argument();
TypeReference typeReference = basic.typeReference();
ExtendsWildcard ext = par.language(Java7.class).createExtendsWildcard(Util.clone(typeReference));
ext.setUniParent(typeReference.lexical().parent());
TypeParameter newP = new InstantiatedTypeParameter(par.name(),ext);
V.replaceParameter(TypeParameter.class,par, newP);
}
if(F().subtypeOf(V)) {
GGConstraint recursive = new GGConstraint(ARef(), V);
parent().addGenerated(recursive);
recursive.setUniParent(parent());
result.addAll(recursive.process());
}
} else {
TypeParameter ithTypeParameterOfA = A().parameters(TypeParameter.class).get(index);
if(ithTypeParameterOfA instanceof InstantiatedTypeParameter) {
TypeArgument arg = ((InstantiatedTypeParameter)ithTypeParameterOfA).argument();
if(arg instanceof ExtendsWildcard) {
GGConstraint recursive = new GGConstraint((JavaTypeReference) ((ExtendsWildcard)arg).typeReference(), U.getElement());
parent().addGenerated(recursive);
recursive.setUniParent(parent());
result.addAll(recursive.process());
}
}
}
}
}
catch(IndexOutOfBoundsException exc) {
return;
}
}
@Override
public void caseSSFormalSuper(List<SecondPhaseConstraint> result, TypeReference U, int index) throws LookupException {
try {
if(A().parameters(TypeParameter.class).isEmpty()) {
// If A is an instance of a non-generic type, then no constraint is implied on Tj.
} else {
Type G = F().baseType();
Type H = A().baseType();
if(! G.sameAs(H)) {
// No need to include F() itself since the base types aren't equal.
// G(S1,..,Sindex-1,U,Sindex+1,...,Sn) -> H
List<TypeParameter> formalArgs = new ArrayList<TypeParameter>();
for(TypeParameter par: G.parameters(TypeParameter.class)) {
TypeParameter clone = (TypeParameter) par.clone();
formalArgs.add(clone);
}
Type GG = G.language(ObjectOrientedLanguage.class).instantiatedType(TypeParameter.class,formalArgs, G);
GG.setUniParent(G.lexical().parent());
// replace the index-th parameter with a clone of type reference U.
TypeParameter oldParameter = GG.parameters(TypeParameter.class).get(index);
EqualityTypeArgument actual = (EqualityTypeArgument) U.lexical().parent();
TypeParameter newParameter = new InstantiatedTypeParameter(oldParameter.name(), actual);
GG.replaceParameter(TypeParameter.class,oldParameter, newParameter);
// Type V=typeWithSameBaseTypeAs(H, GG.getAllSuperTypes());
Type V= GG.superTypeJudge().get(H);
// Replace actual parameters with extends wildcards
for(TypeParameter par: V.parameters(TypeParameter.class)) {
InstantiatedTypeParameter inst = (InstantiatedTypeParameter) par;
EqualityTypeArgument basic = (EqualityTypeArgument) inst.argument();
TypeReference typeReference = basic.typeReference();
SuperWildcard ext = par.language(Java7.class).createSuperWildcard(Util.clone(typeReference));
ext.setUniParent(typeReference.lexical().parent());
TypeParameter newP = new InstantiatedTypeParameter(par.name(),ext);
V.replaceParameter(TypeParameter.class,par, newP);
}
if(F().subtypeOf(V)) {
GGConstraint recursive = new GGConstraint(ARef(), V);
parent().addGenerated(recursive);
recursive.setUniParent(parent());
result.addAll(recursive.process());
}
} else {
TypeParameter ithTypeParameterOfA = A().parameters(TypeParameter.class).get(index);
if(ithTypeParameterOfA instanceof InstantiatedTypeParameter) {
TypeArgument arg = ((InstantiatedTypeParameter)ithTypeParameterOfA).argument();
if(arg instanceof SuperWildcard) {
SSConstraint recursive = new SSConstraint((JavaTypeReference) ((SuperWildcard)arg).typeReference(), U.getElement());
recursive.setUniParent(parent());
result.addAll(recursive.process());
}
}
}
}
}
catch(IndexOutOfBoundsException exc) {
return;
}
}
@Override
public String toString() {
return this.ARef().toString() +" >> " +this.F().toString();
}
}
| |
// Copyright 2019 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.neighborgood.servlets;
import static com.google.appengine.api.datastore.FetchOptions.Builder.withLimit;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.*;
import com.google.appengine.api.datastore.DatastoreService;
import com.google.appengine.api.datastore.DatastoreServiceFactory;
import com.google.appengine.api.datastore.Entity;
import com.google.appengine.api.datastore.KeyFactory;
import com.google.appengine.api.datastore.PreparedQuery;
import com.google.appengine.api.datastore.Query;
import com.google.appengine.api.users.UserService;
import com.google.appengine.api.users.UserServiceFactory;
import com.google.appengine.tools.development.testing.LocalDatastoreServiceTestConfig;
import com.google.appengine.tools.development.testing.LocalServiceTestHelper;
import com.google.appengine.tools.development.testing.LocalUserServiceTestConfig;
import com.google.common.collect.ImmutableMap;
import java.io.*;
import javax.servlet.http.*;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/** Unit test on the MessageServlet file */
@RunWith(JUnit4.class)
public final class MessageServletTest {
/* Set up the test environment with Datastore and UserService and simulate the situation
* that the user is logged in as admin, has a specific email and userID.
*/
private LocalServiceTestHelper helper =
new LocalServiceTestHelper(
new LocalDatastoreServiceTestConfig(), new LocalUserServiceTestConfig())
.setEnvIsAdmin(true)
.setEnvIsLoggedIn(true)
.setEnvEmail("leo@xxx.com")
.setEnvAuthDomain("1234567890")
.setEnvAttributes(
ImmutableMap.of(
"com.google.appengine.api.users.UserService.user_id_key", "1234567890"));
private final ByteArrayOutputStream errContent = new ByteArrayOutputStream();
private final PrintStream originalErr = System.err;
private UserService userService;
private DatastoreService ds;
private HttpServletRequest request;
private HttpServletResponse response;
private Entity userEntity;
private Entity taskEntity;
private String keyString;
@Before
public void setUp() {
helper.setUp();
userService = UserServiceFactory.getUserService();
ds = DatastoreServiceFactory.getDatastoreService();
request = mock(HttpServletRequest.class);
response = mock(HttpServletResponse.class);
userEntity = new Entity("UserInfo", "1234567890");
userEntity.setProperty("nickname", "Leonard");
userEntity.setProperty("address", "xxx");
userEntity.setProperty("email", "leo@xxx.com");
userEntity.setProperty("userId", "1234567890");
userEntity.setProperty("country", "US");
userEntity.setProperty("zipcode", "15213");
userEntity.setProperty("points", 0);
ds.put(userEntity);
taskEntity = new Entity("Task", userEntity.getKey());
taskEntity.setProperty("detail", "Test task");
taskEntity.setProperty("timestamp", 123);
taskEntity.setProperty("reward", 50);
taskEntity.setProperty("status", "IN PROGRESS");
taskEntity.setProperty("Owner", "1234567890");
taskEntity.setProperty("Helper", "1234567890");
taskEntity.setProperty("Address", "xxx");
taskEntity.setProperty("zipcode", "15213");
taskEntity.setProperty("country", "US");
taskEntity.setProperty("category", "Garden");
ds.put(taskEntity);
keyString = KeyFactory.keyToString(taskEntity.getKey());
}
@After
public void tearDown() {
helper.tearDown();
}
@Test
public void testEnvironmentTest() {
// Test the UserService feature
assertTrue(userService.isUserAdmin());
assertTrue(userService.isUserLoggedIn());
// Test the DataStore feature
assertEquals(0, ds.prepare(new Query("dummy")).countEntities(withLimit(10)));
ds.put(new Entity("dummy"));
ds.put(new Entity("dummy"));
assertEquals(2, ds.prepare(new Query("dummy")).countEntities(withLimit(10)));
}
@Test
public void testInsertOneMessage() throws IOException {
// Check whether the datastore is empty before the test
assertEquals(0, ds.prepare(new Query("Message")).countEntities(withLimit(10)));
when(request.getParameter("task-id")).thenReturn(keyString);
when(request.getParameter("msg")).thenReturn("Testing message");
new MessageServlet().doPost(request, response);
// After sending the POST request, there should be one entity in the datastore
assertEquals(1, ds.prepare(new Query("Message")).countEntities(withLimit(10)));
PreparedQuery results = ds.prepare(new Query("Message"));
Entity entity = results.asSingleEntity();
// The entity can't be null
assertNotNull(entity);
// Test the stored message information
assertEquals("Testing message", (String) entity.getProperty("message"));
assertEquals(keyString, (String) entity.getProperty("taskId"));
assertEquals("1234567890", (String) entity.getProperty("sender"));
// Also, there should be one corresponding notification entity created
assertEquals(1, ds.prepare(new Query("Notification")).countEntities(withLimit(10)));
results = ds.prepare(new Query("Notification"));
entity = results.asSingleEntity();
// The entity can't be null
assertNotNull(entity);
// Test the stored notification information
assertEquals(keyString, (String) entity.getProperty("taskId"));
assertEquals("1234567890", (String) entity.getProperty("receiver"));
}
@Test
public void testInsertMultipleMessage() throws IOException {
// Check whether the datastore is empty before the test
assertEquals(0, ds.prepare(new Query("Message")).countEntities(withLimit(10)));
for (int i = 1; i < 11; i++) {
String index = Integer.toString(i);
when(request.getParameter("task-id")).thenReturn(keyString);
when(request.getParameter("msg")).thenReturn("Testing message " + index);
new MessageServlet().doPost(request, response);
assertEquals(i, ds.prepare(new Query("Message")).countEntities(withLimit(10)));
assertEquals(i, ds.prepare(new Query("Notification")).countEntities(withLimit(10)));
}
}
@Test
public void testEmptyInputEdgeCase() throws IOException {
// Check whether the datastore is empty before the test
assertEquals(0, ds.prepare(new Query("Message")).countEntities(withLimit(10)));
// First test the situation when task-id is not provided
System.setErr(new PrintStream(errContent));
new MessageServlet().doPost(request, response);
// This will lead to the first error handling clause of the doPost function of MessageServlet
assertEquals("The task id is not included\n", errContent.toString());
assertEquals(0, ds.prepare(new Query("Message")).countEntities(withLimit(10)));
assertEquals(0, ds.prepare(new Query("Notification")).countEntities(withLimit(10)));
errContent.reset();
System.setErr(originalErr);
when(request.getParameter("task-id")).thenReturn(keyString);
// Now test the situation the message is not provided
System.setErr(new PrintStream(errContent));
new MessageServlet().doPost(request, response);
// This will lead to the second error handling clause of doPost() in MessageServlet
assertEquals("The message is not provided\n", errContent.toString());
assertEquals(0, ds.prepare(new Query("Message")).countEntities(withLimit(10)));
assertEquals(0, ds.prepare(new Query("Notification")).countEntities(withLimit(10)));
errContent.reset();
System.setErr(originalErr);
// Now test the situation the message is empty
when(request.getParameter("msg")).thenReturn(" ");
System.setErr(new PrintStream(errContent));
new MessageServlet().doPost(request, response);
// This will lead to the second error handling clause of doPost() in MessageServlet
assertEquals("The input message is empty\n", errContent.toString());
assertEquals(0, ds.prepare(new Query("Message")).countEntities(withLimit(10)));
assertEquals(0, ds.prepare(new Query("Notification")).countEntities(withLimit(10)));
errContent.reset();
System.setErr(originalErr);
// Now test the normal case
when(request.getParameter("msg")).thenReturn("Testing message");
new MessageServlet().doPost(request, response);
// After sending the POST request, there should be one entity in the datastore
assertEquals(1, ds.prepare(new Query("Message")).countEntities(withLimit(10)));
PreparedQuery results = ds.prepare(new Query("Message"));
Entity entity = results.asSingleEntity();
// The entity can't be null
assertNotNull(entity);
// Test the stored message information
assertEquals("Testing message", (String) entity.getProperty("message"));
assertEquals(keyString, (String) entity.getProperty("taskId"));
assertEquals("1234567890", (String) entity.getProperty("sender"));
// Also, there should be one corresponding notification entity created
assertEquals(1, ds.prepare(new Query("Notification")).countEntities(withLimit(10)));
results = ds.prepare(new Query("Notification"));
entity = results.asSingleEntity();
// The entity can't be null
assertNotNull(entity);
// Test the stored notification information
assertEquals(keyString, (String) entity.getProperty("taskId"));
assertEquals("1234567890", (String) entity.getProperty("receiver"));
}
@Test
public void doGetTest() throws IOException {
// Check whether the datastore is empty before the test
assertEquals(0, ds.prepare(new Query("Message")).countEntities(withLimit(10)));
Entity dummy = new Entity("Message");
dummy.setProperty("message", "Test 1");
dummy.setProperty("taskId", "1");
dummy.setProperty("sender", userService.getCurrentUser().getUserId());
dummy.setProperty("sentTime", 0);
Entity dummy2 = new Entity("Message");
dummy2.setProperty("message", "Test 2");
dummy2.setProperty("taskId", "1");
dummy2.setProperty("sender", userService.getCurrentUser().getUserId());
dummy2.setProperty("sentTime", 5);
ds.put(dummy);
ds.put(dummy2);
when(request.getParameter("key")).thenReturn("1");
StringWriter stringWriter = new StringWriter();
PrintWriter writer = new PrintWriter(stringWriter);
when(response.getWriter()).thenReturn(writer);
new MessageServlet().doGet(request, response);
// After sending the GET request, the doGet function should output the json string
writer.flush();
System.out.println(stringWriter.toString());
assertTrue(
stringWriter
.toString()
.contains("{\"message\":\"Test 1\",\"className\":\"sentByMe\",\"sentTime\":0}"));
assertTrue(
stringWriter
.toString()
.contains("{\"message\":\"Test 2\",\"className\":\"sentByMe\",\"sentTime\":5}"));
// Finally, ensure that the servlet file has set the content type to json
verify(response).setContentType("application/json;");
}
@Test
public void doGetWithoutTaskIdTest() throws IOException {
// Check whether the datastore is empty before the test
assertEquals(0, ds.prepare(new Query("Message")).countEntities(withLimit(10)));
Entity dummy = new Entity("Message");
dummy.setProperty("message", "Test 1");
dummy.setProperty("taskId", "1");
dummy.setProperty("sender", userService.getCurrentUser().getUserId());
dummy.setProperty("sentTime", 0);
Entity dummy2 = new Entity("Message");
dummy2.setProperty("message", "Test 2");
dummy2.setProperty("taskId", "1");
dummy2.setProperty("sender", userService.getCurrentUser().getUserId());
dummy2.setProperty("sentTime", 5);
ds.put(dummy);
ds.put(dummy2);
// Now test the edge case that task id is not provided
System.setErr(new PrintStream(errContent));
new MessageServlet().doGet(request, response);
// This will lead to the first error handling clause of doGet() in MessageServlet
assertEquals("No task id provided\n", errContent.toString());
assertEquals(2, ds.prepare(new Query("Message")).countEntities(withLimit(10)));
errContent.reset();
System.setErr(originalErr);
}
}
| |
/*
* Created on Sep 14, 2005
*
* Copyright 2005-2010 Ignis Software Tools Ltd. All rights reserved.
*/
package jsystem.extensions.analyzers.tabletext;
import java.util.HashSet;
import java.util.Set;
/**
* @author guy.arieli
*
*/
public abstract class TableBasic implements TTable {
protected String stringTable = null;
protected String[] lines = null;
protected int[] fieldsOffset = null;
protected int numberOfFields = -1;
protected String[] header1 = null;
protected String[] header2 = null;
protected String[][] cells = null;
/**
* Create a table instance from a cli string input.
*
* @param stringTable
* the string to be analyze into table structure
* @throws Exception
*/
public TableBasic(String stringTable) throws Exception {
this.stringTable = stringTable;
initTable(stringTable);
}
public TableBasic() {
}
/*
* (non-Javadoc)
*
* @see jsystem.extensions.analyzers.tabletext.TTable#getNumberOfRows()
*/
public int getNumberOfRows() {
return cells.length;
}
/*
* (non-Javadoc)
*
* @see jsystem.extensions.analyzers.tabletext.TTable#getNumberOfColumns()
*/
public int getNumberOfColumns() {
return numberOfFields;
}
/*
* (non-Javadoc)
*
* @see jsystem.extensions.analyzers.tabletext.TTable#getCell(int, int)
*/
public String getCell(int row, int col) {
if (col >= numberOfFields || row >= getNumberOfRows()) {
return null;
}
return cells[row][col];
}
/*
* (non-Javadoc)
*
* @see jsystem.extensions.analyzers.tabletext.TTable#getRow(int)
*/
public String[] getRow(int row) {
if (row >= getNumberOfRows()) {
return null;
}
return cells[row];
}
/*
* (non-Javadoc)
*
* @see jsystem.extensions.analyzers.tabletext.TTable#getColumn(int)
*/
public String[] getColumn(int col) {
int numberOfRows = getNumberOfRows();
String[] returnArray = new String[numberOfRows];
for (int i = 0; i < numberOfRows; i++) {
returnArray[i] = getCell(i, col);
}
return returnArray;
}
protected void initLines() {
lines = stringTable.split("[\\r\\n]+");
}
/*
* (non-Javadoc)
*
* @see jsystem.extensions.analyzers.tabletext.TTable#findFieldInRow(java.lang.String,
* int)
*/
public int findFieldInRow(String fieldName, int lineIndex) {
String[] rowArray = getRow(lineIndex);
for (int i = 0; i < rowArray.length; i++) {
if (rowArray[i].equals(fieldName)) {
return i;
}
}
return -1;
}
protected abstract void initHeaders() throws Exception;
/*
* (non-Javadoc)
*
* @see jsystem.extensions.analyzers.tabletext.TTable#getHeaders()
*/
public String[] getHeaders() throws Exception {
return header1;
}
/*
* (non-Javadoc)
*
* @see jsystem.extensions.analyzers.tabletext.TTable#getHeaderFieldIndex(java.lang.String)
*/
public abstract int getHeaderFieldIndex(String fieldName) throws Exception;
public String toString() {
StringBuffer sb = new StringBuffer();
for (int i = 0; i < getNumberOfRows(); i++) {
for (int j = 0; j < getNumberOfColumns(); j++) {
sb.append(getCell(i, j));
if (j != getNumberOfColumns() - 1) {
sb.append(",");
}
}
sb.append("\n");
}
return sb.toString();
}
/*
* (non-Javadoc)
*
* @see jsystem.extensions.analyzers.tabletext.TTable#getTableString()
*/
public String getTableString() {
return stringTable;
}
/*
* (non-Javadoc)
*
* @see jsystem.extensions.analyzers.tabletext.TTable#getFirstRowIndex(java.lang.String,
* java.lang.String)
*/
public int getFirstRowIndex(String headerField, String cellValue) throws Exception {
int colIndex = getHeaderFieldIndex(headerField);
String[] col = getColumn(colIndex);
for (int i = 0; i < col.length; i++) {
if (cellValue.equals(col[i])) {
// if(cellValue.compareToIgnoreCase(col[i])==0){
return i;
}
}
throw new Exception("Row: " + headerField + " doesn't contain field: " + cellValue);
}
/*
* (non-Javadoc)
*
* @see jsystem.extensions.analyzers.tabletext.TTable#getFirstRowIndex(int,
* java.lang.String)
*/
public int getFirstRowIndex(int colIndex, String cellValue) throws Exception {
String[] col = getColumn(colIndex);
for (int i = 0; i < col.length; i++) {
if (cellValue.equals(col[i])) {
// if(cellValue.compareToIgnoreCase(col[i])==0){
return i;
}
}
throw new Exception("Row index: " + colIndex + " doesn't contain field: " + cellValue);
}
public boolean isRealKeyHeader(String keyHeader, Object testAgainst) {
// to find it out we put the first column in a hashset.
// If all the values in our table's first column are different,
// then the size of the hashset will be the same as the size of our table's first column.
// If the size is equal than it is a table with keyheader, otherwise it is without keyheader.
boolean isRealKeyHeader;
GetTableColumn gtc = null;
try {
gtc = new GetTableColumn(keyHeader);
} catch (Exception e) {
// throwable = e;
isRealKeyHeader = false;
return isRealKeyHeader;
}
gtc.setTestAgainst(testAgainst);
gtc.analyze();
String[] keyHeaders = gtc.getColumn();
Set<String> myHashSet = new HashSet<String>();
for (int i = 0; i < keyHeaders.length; i++) {
myHashSet.add(keyHeaders[i]);
}
if (myHashSet.size() == keyHeaders.length)
isRealKeyHeader = true;
else
isRealKeyHeader = false;
return isRealKeyHeader;
}
}
| |
package net.javaci.mobile.bomberman.core.server;
import net.javaci.mobile.bomberman.core.GameFactory;
import net.javaci.mobile.bomberman.core.World;
import net.javaci.mobile.bomberman.core.mediator.GameScreenMediator;
import net.javaci.mobile.bomberman.core.models.BombModel;
import net.javaci.mobile.bomberman.core.models.GhostModel;
import net.javaci.mobile.bomberman.core.models.GhostMovement;
import net.javaci.mobile.bomberman.core.net.NetworkInterface;
import net.javaci.mobile.bomberman.core.net.NetworkListenerAdapter;
import net.javaci.mobile.bomberman.core.net.protocol.*;
import net.javaci.mobile.bomberman.core.session.UserSession;
import net.javaci.mobile.bomberman.core.util.Log;
import net.javaci.mobile.bomberman.core.view.GameScreen;
import java.util.*;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
public class GameServer {
private ScheduledExecutorService executorService;
private boolean isGameStarted = false;
private boolean isDisposed = false;
private static int WAIT_MOVE_GHOST_IN_SECOND = 3;
private static Map<Integer, GhostMovement> ghostMovements = new HashMap<Integer, GhostMovement>();
static {
int count = 0;
for (int i=0; i<4; i++) {
switch (i) {
case 0:
for (int j=1; j<10; j++) {
ghostMovements.put(count++, new GhostMovement(GameScreen.Direction.UP, j));
}
break;
case 1:
for (int j=1; j<10; j++) {
ghostMovements.put(count++, new GhostMovement(GameScreen.Direction.DOWN, j));
}
break;
case 2:
for (int j=1; j<10; j++) {
ghostMovements.put(count++, new GhostMovement(GameScreen.Direction.RIGHT, j));
}
break;
case 3:
for (int j=1; j<10; j++) {
ghostMovements.put(count++, new GhostMovement(GameScreen.Direction.LEFT, j));
}
break;
}
}
}
private Random rand = new Random();
private NetworkInterface networkInterface;
private GameScreenMediator gameScreenMediator;
private CommandFactory commandFactory = new CommandFactory();
private World world;
public GameServer(World world) {
this.world = world;
}
public void initialize(NetworkInterface networkInterface, GameScreenMediator gameScreenMediator) {
this.networkInterface = networkInterface;
this.gameScreenMediator = gameScreenMediator;
this.networkInterface.addNetworkListener(new NetworkListenerAdapter() {
@Override
public void onMessageReceived(String from, String message) {
Command command = commandFactory.createCommand(message);
if (command != null) {
switch (command.getCommand()) {
case Command.MOVE_START:
handleStartMoveCommand((MoveCommand) command);
break;
default:
break;
}
}
}
});
GameFactory.GameModel gameModel = GameFactory.getGameModel(gameScreenMediator.getLevel());
executorService = Executors.newScheduledThreadPool(gameModel.numGhosts);
executorService.scheduleAtFixedRate(new Runnable() {
@Override
public void run() {
if (!isDisposed && isGameStarted) {
int waitingGhosts = 0;
for (GhostModel ghostModel : GameServer.this.world.getGhostModels().values().toArray(new GhostModel[0])) {
Log.d("Ghost State : " + ghostModel.getState());
if (ghostModel.getState() == GhostModel.State.STANDING_DOWN || ghostModel.getState() == GhostModel.State.STANDING_UP || ghostModel.getState() == GhostModel.State.STANDING_LEFT || ghostModel.getState() == GhostModel.State.STANDING_RIGHT) {
waitingGhosts++;
moveGhost(ghostModel.getId());
}
}
Log.d("Total Waiting Ghosts : " + waitingGhosts);
}
}
}, WAIT_MOVE_GHOST_IN_SECOND, WAIT_MOVE_GHOST_IN_SECOND, TimeUnit.SECONDS);
}
private void handleStartMoveCommand(MoveCommand command) {
}
public void startGame() {
StartGameCommand startGameCommand = new StartGameCommand();
startGameCommand.setFromUser(UserSession.getInstance().getUsername());
networkInterface.sendMessage(startGameCommand.serialize());
for (GhostModel ghostModel : world.getGhostModels().values()) {
moveGhost(ghostModel.getId());
}
isGameStarted = true;
networkInterface.startGame(UserSession.getInstance().getRoom().getId());
}
public void createGame() {
CreateGameCommand createGameCommand = new CreateGameCommand();
createGameCommand.setFromUser(UserSession.getInstance().getUsername());
createGameCommand.setGhostModels(new ArrayList<GhostModel>(world.getGhostModels().values()));
createGameCommand.setGrid(world.getLabyrinthModel().getGrid());
String serializedMessage = createGameCommand.serialize();
for (String message : createGameCommand.splitMessage(serializedMessage)) {
networkInterface.sendMessage(message);
}
startGame();
}
public void moveGhost(final int ghostId) {
executorService.schedule(new Runnable() {
@Override
public void run() {
GhostModel ghostModel = world.getGhostModels().get(ghostId);
if (ghostModel == null) {
return;
}
int numTry = 3;
GhostMovement movement;
do {
movement = getGhostMovement();
numTry--;
}
while (numTry > 0 && movement != null && !movement.movable(ghostModel, world.getLabyrinthModel().getGrid()));
MoveGhostCommand command = new MoveGhostCommand();
command.setFromUser(UserSession.getInstance().getUsername());
command.setId(ghostId);
command.setStartGridX(world.getGridX(ghostModel.getOriginX()));
command.setStartGridY(world.getGridY(ghostModel.getOriginY()));
command.setGridX(ghostModel.getGridX());
command.setGridY(ghostModel.getGridY());
command.setDirection(movement.getDirection().toString());
command.setDistance(movement.getDistance());
networkInterface.sendMessage(command.serialize());
}
}, rand.nextInt(WAIT_MOVE_GHOST_IN_SECOND), TimeUnit.SECONDS);
}
private GhostMovement getGhostMovement() {
return ghostMovements.get(rand.nextInt(ghostMovements.size()));
}
public void sendBombExplosion(BombModel bombModel, World world) {
ExplodeBombCommand explodeBombCommand = new ExplodeBombCommand();
explodeBombCommand.setFromUser(UserSession.getInstance().getUsername());
explodeBombCommand.setId(bombModel.getId());
explodeBombCommand.setGridX(bombModel.getGridX());
explodeBombCommand.setGridY(bombModel.getGridY());
List<String> explodedPlayers = world.getExplodedPlayerNames(bombModel);
explodeBombCommand.setExplodedPlayers(explodedPlayers);
List<Integer> explodedGhosts = world.getExplodedGhosts(bombModel);
explodeBombCommand.setExplodedGhosts(explodedGhosts);
networkInterface.sendMessage(explodeBombCommand.serialize());
}
public void caughtPlayer(int ghostId, List<String> players) {
GhostCaughtCommand ghostCaughtCommand = new GhostCaughtCommand();
ghostCaughtCommand.setFromUser(UserSession.getInstance().getUsername());
ghostCaughtCommand.setId(ghostId);
ghostCaughtCommand.setCaughtPlayers(players);
networkInterface.sendMessage(ghostCaughtCommand.serialize());
}
public void dispose() {
isDisposed = true;
if (executorService != null) {
executorService.shutdown();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jasper.compiler;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FilePermission;
import java.io.UnsupportedEncodingException;
import java.net.URL;
import java.net.URLClassLoader;
import java.net.URLDecoder;
import java.security.CodeSource;
import java.security.PermissionCollection;
import java.security.Policy;
import java.security.cert.Certificate;
import java.util.Iterator;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicInteger;
import javax.servlet.ServletContext;
import org.apache.jasper.Constants;
import org.apache.jasper.JspCompilationContext;
import org.apache.jasper.Options;
import org.apache.jasper.servlet.JspServletWrapper;
import org.apache.jasper.util.ExceptionUtils;
import org.apache.jasper.util.FastRemovalDequeue;
import org.apache.juli.logging.Log;
import org.apache.juli.logging.LogFactory;
/**
* Class for tracking JSP compile time file dependencies when the
* &060;%@include file="..."%&062; directive is used.
*
* A background thread periodically checks the files a JSP page
* is dependent upon. If a dependent file changes the JSP page
* which included it is recompiled.
*
* Only used if a web application context is a directory.
*
* @author Glenn L. Nielsen
*/
public final class JspRuntimeContext {
// Logger
private final Log log = LogFactory.getLog(JspRuntimeContext.class);
/*
* Counts how many times the webapp's JSPs have been reloaded.
*/
private final AtomicInteger jspReloadCount = new AtomicInteger(0);
/*
* Counts how many times JSPs have been unloaded in this webapp.
*/
private final AtomicInteger jspUnloadCount = new AtomicInteger(0);
// ----------------------------------------------------------- Constructors
/**
* Create a JspRuntimeContext for a web application context.
*
* Loads in any previously generated dependencies from file.
*
* @param context ServletContext for web application
*/
public JspRuntimeContext(ServletContext context, Options options) {
this.context = context;
this.options = options;
// Get the parent class loader
ClassLoader loader = Thread.currentThread().getContextClassLoader();
if (loader == null) {
loader = this.getClass().getClassLoader();
}
if (log.isDebugEnabled()) {
if (loader != null) {
log.debug(Localizer.getMessage("jsp.message.parent_class_loader_is",
loader.toString()));
} else {
log.debug(Localizer.getMessage("jsp.message.parent_class_loader_is",
"<none>"));
}
}
parentClassLoader = loader;
classpath = initClassPath();
if (context instanceof org.apache.jasper.servlet.JspCServletContext) {
codeSource = null;
permissionCollection = null;
return;
}
if (Constants.IS_SECURITY_ENABLED) {
SecurityHolder holder = initSecurity();
codeSource = holder.cs;
permissionCollection = holder.pc;
} else {
codeSource = null;
permissionCollection = null;
}
// If this web application context is running from a
// directory, start the background compilation thread
String appBase = context.getRealPath("/");
if (!options.getDevelopment()
&& appBase != null
&& options.getCheckInterval() > 0) {
lastCompileCheck = System.currentTimeMillis();
}
if (options.getMaxLoadedJsps() > 0) {
jspQueue = new FastRemovalDequeue<>(options.getMaxLoadedJsps());
if (log.isDebugEnabled()) {
log.debug(Localizer.getMessage("jsp.message.jsp_queue_created",
"" + options.getMaxLoadedJsps(), context.getContextPath()));
}
}
/* Init parameter is in seconds, locally we use milliseconds */
jspIdleTimeout = options.getJspIdleTimeout() * 1000;
}
// ----------------------------------------------------- Instance Variables
/**
* This web applications ServletContext
*/
private final ServletContext context;
private final Options options;
private final ClassLoader parentClassLoader;
private final PermissionCollection permissionCollection;
private final CodeSource codeSource;
private final String classpath;
private volatile long lastCompileCheck = -1L;
private volatile long lastJspQueueUpdate = System.currentTimeMillis();
/* JSP idle timeout in milliseconds */
private long jspIdleTimeout;
/**
* Maps JSP pages to their JspServletWrapper's
*/
private final Map<String, JspServletWrapper> jsps =
new ConcurrentHashMap<>();
/**
* Keeps JSP pages ordered by last access.
*/
private FastRemovalDequeue<JspServletWrapper> jspQueue = null;
// ------------------------------------------------------ Public Methods
/**
* Add a new JspServletWrapper.
*
* @param jspUri JSP URI
* @param jsw Servlet wrapper for JSP
*/
public void addWrapper(String jspUri, JspServletWrapper jsw) {
jsps.put(jspUri, jsw);
}
/**
* Get an already existing JspServletWrapper.
*
* @param jspUri JSP URI
* @return JspServletWrapper for JSP
*/
public JspServletWrapper getWrapper(String jspUri) {
return jsps.get(jspUri);
}
/**
* Remove a JspServletWrapper.
*
* @param jspUri JSP URI of JspServletWrapper to remove
*/
public void removeWrapper(String jspUri) {
jsps.remove(jspUri);
}
/**
* Push a newly compiled JspServletWrapper into the queue at first
* execution of jsp. Destroy any JSP that has been replaced in the queue.
*
* @param jsw Servlet wrapper for jsp.
* @return an unloadHandle that can be pushed to front of queue at later execution times.
* */
public FastRemovalDequeue<JspServletWrapper>.Entry push(JspServletWrapper jsw) {
if (log.isTraceEnabled()) {
log.trace(Localizer.getMessage("jsp.message.jsp_added",
jsw.getJspUri(), context.getContextPath()));
}
FastRemovalDequeue<JspServletWrapper>.Entry entry = jspQueue.push(jsw);
JspServletWrapper replaced = entry.getReplaced();
if (replaced != null) {
if (log.isDebugEnabled()) {
log.debug(Localizer.getMessage("jsp.message.jsp_removed_excess",
replaced.getJspUri(), context.getContextPath()));
}
unloadJspServletWrapper(replaced);
entry.clearReplaced();
}
return entry;
}
/**
* Push unloadHandle for JspServletWrapper to front of the queue.
*
* @param unloadHandle the unloadHandle for the jsp.
* */
public void makeYoungest(FastRemovalDequeue<JspServletWrapper>.Entry unloadHandle) {
if (log.isTraceEnabled()) {
JspServletWrapper jsw = unloadHandle.getContent();
log.trace(Localizer.getMessage("jsp.message.jsp_queue_update",
jsw.getJspUri(), context.getContextPath()));
}
jspQueue.moveFirst(unloadHandle);
}
/**
* Returns the number of JSPs for which JspServletWrappers exist, i.e.,
* the number of JSPs that have been loaded into the webapp.
*
* @return The number of JSPs that have been loaded into the webapp
*/
public int getJspCount() {
return jsps.size();
}
/**
* Get the SecurityManager Policy CodeSource for this web
* application context.
*
* @return CodeSource for JSP
*/
public CodeSource getCodeSource() {
return codeSource;
}
/**
* Get the parent ClassLoader.
*
* @return ClassLoader parent
*/
public ClassLoader getParentClassLoader() {
return parentClassLoader;
}
/**
* Get the SecurityManager PermissionCollection for this
* web application context.
*
* @return PermissionCollection permissions
*/
public PermissionCollection getPermissionCollection() {
return permissionCollection;
}
/**
* Process a "destroy" event for this web application context.
*/
public void destroy() {
Iterator<JspServletWrapper> servlets = jsps.values().iterator();
while (servlets.hasNext()) {
servlets.next().destroy();
}
}
/**
* Increments the JSP reload counter.
*/
public void incrementJspReloadCount() {
jspReloadCount.incrementAndGet();
}
/**
* Resets the JSP reload counter.
*
* @param count Value to which to reset the JSP reload counter
*/
public void setJspReloadCount(int count) {
jspReloadCount.set(count);
}
/**
* Gets the current value of the JSP reload counter.
*
* @return The current value of the JSP reload counter
*/
public int getJspReloadCount() {
return jspReloadCount.intValue();
}
/**
* Gets the number of JSPs that are in the JSP limiter queue
*
* @return The number of JSPs (in the webapp with which this JspServlet is
* associated) that are in the JSP limiter queue
*/
public int getJspQueueLength() {
if (jspQueue != null) {
return jspQueue.getSize();
}
return -1;
}
/**
* Gets the number of JSPs that have been unloaded.
*
* @return The number of JSPs (in the webapp with which this JspServlet is
* associated) that have been unloaded
*/
public int getJspUnloadCount() {
return jspUnloadCount.intValue();
}
/**
* Method used by background thread to check the JSP dependencies
* registered with this class for JSP's.
*/
public void checkCompile() {
if (lastCompileCheck < 0) {
// Checking was disabled
return;
}
long now = System.currentTimeMillis();
if (now > (lastCompileCheck + (options.getCheckInterval() * 1000L))) {
lastCompileCheck = now;
} else {
return;
}
Object [] wrappers = jsps.values().toArray();
for (int i = 0; i < wrappers.length; i++ ) {
JspServletWrapper jsw = (JspServletWrapper)wrappers[i];
JspCompilationContext ctxt = jsw.getJspEngineContext();
// JspServletWrapper also synchronizes on this when
// it detects it has to do a reload
synchronized(jsw) {
try {
ctxt.compile();
} catch (FileNotFoundException ex) {
ctxt.incrementRemoved();
} catch (Throwable t) {
ExceptionUtils.handleThrowable(t);
jsw.getServletContext().log("Background compile failed",
t);
}
}
}
}
/**
* The classpath that is passed off to the Java compiler.
*/
public String getClassPath() {
return classpath;
}
/**
* Last time the update background task has run
*/
public long getLastJspQueueUpdate() {
return lastJspQueueUpdate;
}
// -------------------------------------------------------- Private Methods
/**
* Method used to initialize classpath for compiles.
*/
private String initClassPath() {
StringBuilder cpath = new StringBuilder();
String sep = System.getProperty("path.separator");
if (parentClassLoader instanceof URLClassLoader) {
URL [] urls = ((URLClassLoader)parentClassLoader).getURLs();
for (int i = 0; i < urls.length; i++) {
// Tomcat can use URLs other than file URLs. However, a protocol
// other than file: will generate a bad file system path, so
// only add file: protocol URLs to the classpath.
if (urls[i].getProtocol().equals("file") ) {
try {
// Need to decode the URL, primarily to convert %20
// sequences back to spaces
String decoded = URLDecoder.decode(urls[i].getPath(), "UTF-8");
cpath.append(decoded + sep);
} catch (UnsupportedEncodingException e) {
// All JREs are required to support UTF-8
}
}
}
}
cpath.append(options.getScratchDir() + sep);
String cp = (String) context.getAttribute(Constants.SERVLET_CLASSPATH);
if (cp == null || cp.equals("")) {
cp = options.getClassPath();
}
String path = cpath.toString() + cp;
if(log.isDebugEnabled()) {
log.debug("Compilation classpath initialized: " + path);
}
return path;
}
// Helper class to allow initSecurity() to return two items
private static class SecurityHolder{
private final CodeSource cs;
private final PermissionCollection pc;
private SecurityHolder(CodeSource cs, PermissionCollection pc){
this.cs = cs;
this.pc = pc;
}
}
/**
* Method used to initialize SecurityManager data.
*/
private SecurityHolder initSecurity() {
// Setup the PermissionCollection for this web app context
// based on the permissions configured for the root of the
// web app context directory, then add a file read permission
// for that directory.
Policy policy = Policy.getPolicy();
CodeSource source = null;
PermissionCollection permissions = null;
if( policy != null ) {
try {
// Get the permissions for the web app context
String docBase = context.getRealPath("/");
if( docBase == null ) {
docBase = options.getScratchDir().toString();
}
String codeBase = docBase;
if (!codeBase.endsWith(File.separator)){
codeBase = codeBase + File.separator;
}
File contextDir = new File(codeBase);
URL url = contextDir.getCanonicalFile().toURI().toURL();
source = new CodeSource(url,(Certificate[])null);
permissions = policy.getPermissions(source);
// Create a file read permission for web app context directory
if (!docBase.endsWith(File.separator)){
permissions.add
(new FilePermission(docBase,"read"));
docBase = docBase + File.separator;
} else {
permissions.add
(new FilePermission
(docBase.substring(0,docBase.length() - 1),"read"));
}
docBase = docBase + "-";
permissions.add(new FilePermission(docBase,"read"));
// Spec says apps should have read/write for their temp
// directory. This is fine, as no security sensitive files, at
// least any that the app doesn't have full control of anyway,
// will be written here.
String workDir = options.getScratchDir().toString();
if (!workDir.endsWith(File.separator)){
permissions.add
(new FilePermission(workDir,"read,write"));
workDir = workDir + File.separator;
}
workDir = workDir + "-";
permissions.add(new FilePermission(
workDir,"read,write,delete"));
// Allow the JSP to access org.apache.jasper.runtime.HttpJspBase
permissions.add( new RuntimePermission(
"accessClassInPackage.org.apache.jasper.runtime") );
if (parentClassLoader instanceof URLClassLoader) {
URL [] urls = ((URLClassLoader)parentClassLoader).getURLs();
String jarUrl = null;
String jndiUrl = null;
for (int i=0; i<urls.length; i++) {
if (jndiUrl == null
&& urls[i].toString().startsWith("jndi:") ) {
jndiUrl = urls[i].toString() + "-";
}
if (jarUrl == null
&& urls[i].toString().startsWith("jar:jndi:")
) {
jarUrl = urls[i].toString();
jarUrl = jarUrl.substring(0,jarUrl.length() - 2);
jarUrl = jarUrl.substring(0,
jarUrl.lastIndexOf('/')) + "/-";
}
}
if (jarUrl != null) {
permissions.add(
new FilePermission(jarUrl,"read"));
permissions.add(
new FilePermission(jarUrl.substring(4),"read"));
}
if (jndiUrl != null)
permissions.add(
new FilePermission(jndiUrl,"read") );
}
} catch(Exception e) {
context.log("Security Init for context failed",e);
}
}
return new SecurityHolder(source, permissions);
}
private void unloadJspServletWrapper(JspServletWrapper jsw) {
removeWrapper(jsw.getJspUri());
synchronized(jsw) {
jsw.destroy();
}
jspUnloadCount.incrementAndGet();
}
/**
* Method used by background thread to check if any JSP's should be unloaded.
*/
public void checkUnload() {
if (log.isTraceEnabled()) {
int queueLength = -1;
if (jspQueue != null) {
queueLength = jspQueue.getSize();
}
log.trace(Localizer.getMessage("jsp.message.jsp_unload_check",
context.getContextPath(), "" + jsps.size(), "" + queueLength));
}
long now = System.currentTimeMillis();
if (jspIdleTimeout > 0) {
long unloadBefore = now - jspIdleTimeout;
Object [] wrappers = jsps.values().toArray();
for (int i = 0; i < wrappers.length; i++ ) {
JspServletWrapper jsw = (JspServletWrapper)wrappers[i];
synchronized(jsw) {
if (jsw.getLastUsageTime() < unloadBefore) {
if (log.isDebugEnabled()) {
log.debug(Localizer.getMessage("jsp.message.jsp_removed_idle",
jsw.getJspUri(), context.getContextPath(),
"" + (now-jsw.getLastUsageTime())));
}
if (jspQueue != null) {
jspQueue.remove(jsw.getUnloadHandle());
}
unloadJspServletWrapper(jsw);
}
}
}
}
lastJspQueueUpdate = now;
}
}
| |
/**
*
*/
package edacc.configurator.aac.racing;
import java.sql.SQLException;
import java.text.NumberFormat;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Locale;
import java.util.Random;
import edacc.api.API;
import edacc.configurator.aac.InstanceIdSeed;
import edacc.configurator.aac.AAC;
import edacc.configurator.aac.Parameters;
import edacc.configurator.aac.SolverConfiguration;
import edacc.model.ConfigurationScenarioDAO;
import edacc.model.ExperimentResult;
/**
* @author balint
*/
public class STTRace extends RacingMethods {
SolverConfiguration bestSC;
int incumbentNumber;
int num_instances;
// Threshold for the test
private double a;
// max number of evaluations
private int maxE;
// minimum number of evaluations to beat best
private int minEB;
// add random jobs from best or follow parcours?
private boolean randJob;
private static NumberFormat nf;
/**
* @param pacc
* @param api
* @param parameters
* @throws SQLException
*/
public STTRace(AAC pacc, Random rng, API api, Parameters parameters, List<SolverConfiguration> firstSCs, List<SolverConfiguration> referenceSCs) throws SQLException {
super(pacc, rng, api, parameters, firstSCs, referenceSCs);
this.a = 0.7;
this.minEB = 10;
incumbentNumber = 0;
num_instances = ConfigurationScenarioDAO.getConfigurationScenarioByExperimentId(parameters.getIdExperiment())
.getCourse().getInitialLength();
this.maxE = parameters.getMaxParcoursExpansionFactor() * num_instances;
this.randJob = true;
String val;
if ((val = parameters.getRacingMethodParameters().get("STTRace_a")) != null)
this.a = Double.parseDouble(val);
if ((val = parameters.getRacingMethodParameters().get("STTRace_randJob")) != null)
this.randJob = Boolean.parseBoolean(val);
if (this.minEB < 0) {
System.out.println("<STTRace_minEB> <0 not allowed! Setting <STTRace_minEB> to default!)");
this.minEB = 10;
}
nf = NumberFormat.getInstance(Locale.ENGLISH);
nf.setMinimumFractionDigits(5);
nf.setMaximumFractionDigits(15);
}
public String toString() {
return "\nThis is a sequential t-test racing method with the following parameters:\n" + "<STTRace_a> = "
+ this.a + " (threshold a value) \n" + "<STTRace_minEB> = " + this.minEB
+ " (minimum number of evaluations before replacing best as %(1..) of num instances) \n"
+ "<STTRace_randJob> = " + this.randJob + "(new config gets random jobs from best) \n";
}
/*
* compares sc1 to sc2 with a sequential t-test. returns -1 if sc1 is not
* better than sc2 and the test can be stopped (or has reached maxE) 1 if
* sc1 is better than sc2 and the test can be stopped (or has reached maxE)
* 0 if they are equal and the maximum number of evaluations has been
* reached -2 if further evaluations are necessary and possible
*/
@Override
public int compareTo(SolverConfiguration sc1, SolverConfiguration sc2) {
this.numCompCalls++;
// number of jobs that sc1 and sc2 have in common.
int n1 = sc1.getJobCount();
int n2 = sc2.getJobCount();
int n1f = sc1.getFinishedJobs().size();
int n2f = sc2.getFinishedJobs().size();
int n = Math.min(n1f, n2f);
System.out.println("S: " + sc1.getNumber() + "(" + sc1.getIdSolverConfiguration() + ")" + " vsn "
+ sc2.getNumber() + "(" + sc2.getIdSolverConfiguration() + ")");
System.out.println("S: " + n1 + "(e)" + " vse " + n2 + "(e)");
System.out.println("S: " + n1f + "(ef)" + " vse " + n2f + "(ef)");
System.out.println("S: n = " + n);
double[] y = new double[n]; // times of sc1
double[] z = new double[n]; // times of sc2
double[] x = new double[n]; // time difference y-z
int i = 0;
double testValue, threshold = 2.D * a / (double) n;
double mean = 0., std2 = 0.; // mean and quadratic standard deviation
double meany = 0, meanz = 0;
HashMap<InstanceIdSeed, ExperimentResult> sc1JobsMap = new HashMap<InstanceIdSeed, ExperimentResult>();
// System.out.println("STTRACE: " + sc1.getFinishedJobs().size() + "(e)"
// + " vs " + sc2.getFinishedJobs().size() + "(e)");
for (ExperimentResult job : sc1.getFinishedJobs()) {
sc1JobsMap.put(new InstanceIdSeed(job.getInstanceId(), job.getSeed()), job);
}
// System.out.println("S: sc1 has " + sc1JobsMap.size());
for (ExperimentResult job : sc2.getFinishedJobs()) {
InstanceIdSeed tmp = new InstanceIdSeed(job.getInstanceId(), job.getSeed());
ExperimentResult sc1Job;
if ((sc1Job = sc1JobsMap.get(tmp)) != null) {
// System.out.println(sc1Job.getSeed() + " " +
// sc1Job.getInstanceId() + " " + sc1Job.getStatus());
// System.out.println(job.getSeed() + " " + job.getInstanceId()
// + " " + job.getStatus());
y[i] = parameters.getStatistics().getCostFunction().singleCost(sc1Job);
z[i] = parameters.getStatistics().getCostFunction().singleCost(job);
x[i] = y[i] - z[i];
i++;
}
}
// compute the mean;
for (int j = 0; j < n; j++) {
mean += x[j];
meany += y[j];
meanz += z[j];
}
mean = mean / (double) n;
meanz = meanz / (double) n;
meany = meany / (double) n;
System.out.println("S: " + nf.format(meany) + "(t=" + sc1.getDbCost() + ")" + " vss " + nf.format(meanz)
+ "(t=" + sc2.getDbCost() + ")");
System.out.println("S: mean = " + mean);
// compute std2
for (int j = 0; j < n; j++) {
std2 += (x[j] - mean) * (x[j] - mean);
}
std2 = std2 / (double) n;
System.out.println("S: std2 = " + std2);
// the sequential t-test
testValue = Math.log(1.0 + mean * mean / std2);
System.out.println("S: testValue = " + nf.format(testValue));
System.out.println("S: threshold = " + nf.format(threshold));
if ((testValue > threshold) || (n == this.maxE)) {
// test can stop
if (n == this.maxE)
System.out.println("dead end!");
if (mean > 0) { // sc2 better
System.out.println("sc2 better");
return -1;
} else if (mean < 0) {
System.out.println("sc1 better");
return 1;
} else
return 0;
} else {
System.out.println("More!");
return -2;
}
}
public void initFirstSC(SolverConfiguration firstSC) throws Exception {
this.bestSC = firstSC;
bestSC.setIncumbentNumber(incumbentNumber++);
pacc.log("i " + pacc.getWallTime() + " ," + firstSC.getCost() + ", n.A.," + bestSC.getIdSolverConfiguration()
+ ", n.A.," + bestSC.getParameterConfiguration().toString());
int expansion = 0;
if (bestSC.getJobCount() < parameters.getMaxParcoursExpansionFactor() * num_instances) {
expansion = Math.min(parameters.getMaxParcoursExpansionFactor() * num_instances - bestSC.getJobCount(),
parameters.getInitialDefaultParcoursLength());
pacc.expandParcoursSC(bestSC, expansion);
}
if (expansion > 0) {
pacc.log("c Expanding parcours of best solver config " + bestSC.getIdSolverConfiguration() + " by "
+ expansion);
}
// update the status of the jobs of bestSC and if first level wait
// also for jobs to finish
if (expansion > 0) {
pacc.log("c Waiting for currently best solver config " + bestSC.getIdSolverConfiguration() + " to finish "
+ expansion + "job(s)");
while (true) {
pacc.updateJobsStatus(bestSC);
if (bestSC.getNotStartedJobs().isEmpty() && bestSC.getRunningJobs().isEmpty()) {
break;
}
pacc.sleep(1000);
}
} else {
pacc.updateJobsStatus(bestSC);
}
}
@Override
public List<SolverConfiguration> getBestSolverConfigurations() {
List<SolverConfiguration> res = new LinkedList<SolverConfiguration>();
res.add(bestSC);
return res;
}
/*
* goes through the list of finished SC and compares them to the bestSC.
* There are several cases that can be taken into consideration: 1. sc can
* beat bestSC on less jobs than num of bestSC 2. sc should have at least
* num of bestSC / c 3. sc should have at least as many as bestSC 4.
*/
@Override
public void solverConfigurationsFinished(List<SolverConfiguration> scs) throws Exception {
for (SolverConfiguration sc : scs) {
if (sc == bestSC)
continue;
int comp = compareTo(sc, bestSC);
if (comp >= 0) {// sc won against bestSC
// TODO: hier stimmt was nicht!!!
if (((sc.getJobCount() == bestSC.getJobCount()) && randJob)
|| ((sc.getJobCount() >= this.num_instances / this.minEB) && (!randJob))) {
sc.setFinished(true);
// all jobs from bestSC computed and won against
// best:
System.out.println("sc1 won!!!");
if (comp > 0) {
bestSC = sc;
sc.setIncumbentNumber(incumbentNumber++);
pacc.log("i " + pacc.getWallTime() + "," + sc.getCost() + ",n.A. ,"
+ sc.getIdSolverConfiguration() + ",n.A. ," + sc.getParameterConfiguration().toString());
}
// api.updateSolverConfigurationCost(sc.getIdSolverConfiguration(),
// sc.getCost(),
// statistics.getCostFunction());
// listNewSC.remove(i);
} else {
if (this.randJob) {
int generated = pacc.addRandomJob(1, sc, bestSC, Integer.MAX_VALUE - sc.getNumber());
pacc.log("c Generated " + generated + " jobs for solver config id " + sc.getNumber());
} else {
pacc.expandParcoursSC(sc, 1);
}
pacc.addSolverConfigurationToListNewSC(sc);
}
} else if (comp == -1) {// lost against best on part of the actual
// parcours:
sc.setFinished(true);
if ((parameters.isDeleteSolverConfigs()) && (sc.getIncumbentNumber() == -1)) {
api.removeSolverConfig(sc.getIdSolverConfiguration());
pacc.log("d Solver config " + sc.getNumber() + " with cost " + sc.getCost()
+ " lost against best solver config on " + sc.getJobCount() + " runs.");
}
} else { // comp == -2 further jobs are needed to asses performance
//TODO: Hier muss man noch die anzahl der finished jobs abfragen
if (sc.getJobCount() > bestSC.getJobCount()) {
if (bestSC.getJobCount() < this.maxE) {
pacc.log("c Expanding parcours of best solver config (strange case)" + bestSC.getNumber()
+ " by 1");
pacc.expandParcoursSC(bestSC, 1);
}
pacc.addSolverConfigurationToListNewSC(bestSC);
pacc.addSolverConfigurationToListNewSC(sc);
} else if (sc.getJobCount() < bestSC.getJobCount()) {
if (this.randJob) {
pacc.log("c Expanding parcours of solver config " + sc.getNumber() + " by 1 random job from best");
int generated = pacc.addRandomJob(1, sc, bestSC, Integer.MAX_VALUE - sc.getNumber());
pacc.log("c Generated " + generated + " jobs for solver config id "
+ sc.getIdSolverConfiguration());
} else {
pacc.log("c Expanding parcours of solver config " + sc.getNumber() + " by 1");
pacc.expandParcoursSC(sc, 1);
}
pacc.addSolverConfigurationToListNewSC(sc);
} else {
if (bestSC.getJobCount() < this.maxE) {
System.out.println("best hat "+bestSC.getJobCount()+" sc1 hat "+sc.getJobCount());
pacc.log("c Expanding parcours of best and competitor solver config " + bestSC.getNumber()
+ " by 1");
pacc.expandParcoursSC(bestSC, 1);
pacc.expandParcoursSC(sc, 1);
}
pacc.addSolverConfigurationToListNewSC(sc);
pacc.addSolverConfigurationToListNewSC(bestSC);
}
}
}
}
/*
* (non-Javadoc)
*
* @see
* edacc.configurator.proar.racing.PROARRacing#solverConfigurationsCreated
* (java.util.List)
*/
@Override
public void solverConfigurationsCreated(List<SolverConfiguration> scs) throws Exception {
int gen;
for (SolverConfiguration sc : scs) {
if (this.randJob) {
gen = pacc.addRandomJob(parameters.getMinRuns(), sc, bestSC, Integer.MAX_VALUE - sc.getNumber());
System.out.println("added ->" + gen);
} else {
pacc.expandParcoursSC(sc, parameters.getMinRuns());
}
pacc.addSolverConfigurationToListNewSC(sc);
}
}
/*
* (non-Javadoc)
*
* @see
* edacc.configurator.proar.racing.PROARRacing#computeOptimalExpansion(int,
* int, int)
*/
@Override
public int computeOptimalExpansion(int coreCount, int jobs, int listNewSCSize) {
int res = 0;
if (coreCount < parameters.getMinCPUCount() || coreCount > parameters.getMaxCPUCount()) {
pacc.log("w Warning: Current core count is " + coreCount);
}
int min_sc = (Math.max(Math.round(4.f * coreCount), 8) - jobs) / parameters.getMinRuns();
if (min_sc > 0) {
res = (Math.max(Math.round(6.f * coreCount), 8) - jobs) / parameters.getMinRuns();
}
if (listNewSCSize == 0 && res == 0) {
res = 1;
}
return res;
}
@Override
public List<String> getParameters() {
List<String> p = new LinkedList<String>();
p.add("% ---STTRace parameters---");
p.add("STTRace_a= "+this.a+ "(constant to compute the threshold for passing the t-test = 2*a/n)");
p.add("STTRace_randJob = " + this.randJob + "(wheater to pick random jobs from best for new configs)");
p.add("STTRace_minEB = " + this.minEB + "(minimum number of jobs to beat best; only valid when randJob=false)");
p.add("% -----------------------");
return p;
}
@Override
public void stopEvaluation(List<SolverConfiguration> scs) throws Exception {
// TODO Auto-generated method stub
}
@Override
public void raceFinished() {
// TODO Auto-generated method stub
}
}
| |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.pinpoint.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* Provides information about the type and the names of attributes that were removed from all the endpoints that are
* associated with an application.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/pinpoint-2016-12-01/AttributesResource" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class AttributesResource implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* The unique identifier for the application.
* </p>
*/
private String applicationId;
/**
* <p>
* The type of attribute or attributes that were removed from the endpoints. Valid values are:
* </p>
* <ul>
* <li>
* <p>
* endpoint-custom-attributes - Custom attributes that describe endpoints
* </p>
* </li>
* <li>
* <p>
* endpoint-custom-metrics - Custom metrics that your app reports to Amazon Pinpoint for endpoints
* </p>
* </li>
* <li>
* <p>
* endpoint-user-attributes - Custom attributes that describe users
* </p>
* </li>
* </ul>
*/
private String attributeType;
/**
* <p>
* An array that specifies the names of the attributes that were removed from the endpoints.
* </p>
*/
private java.util.List<String> attributes;
/**
* <p>
* The unique identifier for the application.
* </p>
*
* @param applicationId
* The unique identifier for the application.
*/
public void setApplicationId(String applicationId) {
this.applicationId = applicationId;
}
/**
* <p>
* The unique identifier for the application.
* </p>
*
* @return The unique identifier for the application.
*/
public String getApplicationId() {
return this.applicationId;
}
/**
* <p>
* The unique identifier for the application.
* </p>
*
* @param applicationId
* The unique identifier for the application.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public AttributesResource withApplicationId(String applicationId) {
setApplicationId(applicationId);
return this;
}
/**
* <p>
* The type of attribute or attributes that were removed from the endpoints. Valid values are:
* </p>
* <ul>
* <li>
* <p>
* endpoint-custom-attributes - Custom attributes that describe endpoints
* </p>
* </li>
* <li>
* <p>
* endpoint-custom-metrics - Custom metrics that your app reports to Amazon Pinpoint for endpoints
* </p>
* </li>
* <li>
* <p>
* endpoint-user-attributes - Custom attributes that describe users
* </p>
* </li>
* </ul>
*
* @param attributeType
* The type of attribute or attributes that were removed from the endpoints. Valid values are:</p>
* <ul>
* <li>
* <p>
* endpoint-custom-attributes - Custom attributes that describe endpoints
* </p>
* </li>
* <li>
* <p>
* endpoint-custom-metrics - Custom metrics that your app reports to Amazon Pinpoint for endpoints
* </p>
* </li>
* <li>
* <p>
* endpoint-user-attributes - Custom attributes that describe users
* </p>
* </li>
*/
public void setAttributeType(String attributeType) {
this.attributeType = attributeType;
}
/**
* <p>
* The type of attribute or attributes that were removed from the endpoints. Valid values are:
* </p>
* <ul>
* <li>
* <p>
* endpoint-custom-attributes - Custom attributes that describe endpoints
* </p>
* </li>
* <li>
* <p>
* endpoint-custom-metrics - Custom metrics that your app reports to Amazon Pinpoint for endpoints
* </p>
* </li>
* <li>
* <p>
* endpoint-user-attributes - Custom attributes that describe users
* </p>
* </li>
* </ul>
*
* @return The type of attribute or attributes that were removed from the endpoints. Valid values are:</p>
* <ul>
* <li>
* <p>
* endpoint-custom-attributes - Custom attributes that describe endpoints
* </p>
* </li>
* <li>
* <p>
* endpoint-custom-metrics - Custom metrics that your app reports to Amazon Pinpoint for endpoints
* </p>
* </li>
* <li>
* <p>
* endpoint-user-attributes - Custom attributes that describe users
* </p>
* </li>
*/
public String getAttributeType() {
return this.attributeType;
}
/**
* <p>
* The type of attribute or attributes that were removed from the endpoints. Valid values are:
* </p>
* <ul>
* <li>
* <p>
* endpoint-custom-attributes - Custom attributes that describe endpoints
* </p>
* </li>
* <li>
* <p>
* endpoint-custom-metrics - Custom metrics that your app reports to Amazon Pinpoint for endpoints
* </p>
* </li>
* <li>
* <p>
* endpoint-user-attributes - Custom attributes that describe users
* </p>
* </li>
* </ul>
*
* @param attributeType
* The type of attribute or attributes that were removed from the endpoints. Valid values are:</p>
* <ul>
* <li>
* <p>
* endpoint-custom-attributes - Custom attributes that describe endpoints
* </p>
* </li>
* <li>
* <p>
* endpoint-custom-metrics - Custom metrics that your app reports to Amazon Pinpoint for endpoints
* </p>
* </li>
* <li>
* <p>
* endpoint-user-attributes - Custom attributes that describe users
* </p>
* </li>
* @return Returns a reference to this object so that method calls can be chained together.
*/
public AttributesResource withAttributeType(String attributeType) {
setAttributeType(attributeType);
return this;
}
/**
* <p>
* An array that specifies the names of the attributes that were removed from the endpoints.
* </p>
*
* @return An array that specifies the names of the attributes that were removed from the endpoints.
*/
public java.util.List<String> getAttributes() {
return attributes;
}
/**
* <p>
* An array that specifies the names of the attributes that were removed from the endpoints.
* </p>
*
* @param attributes
* An array that specifies the names of the attributes that were removed from the endpoints.
*/
public void setAttributes(java.util.Collection<String> attributes) {
if (attributes == null) {
this.attributes = null;
return;
}
this.attributes = new java.util.ArrayList<String>(attributes);
}
/**
* <p>
* An array that specifies the names of the attributes that were removed from the endpoints.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setAttributes(java.util.Collection)} or {@link #withAttributes(java.util.Collection)} if you want to
* override the existing values.
* </p>
*
* @param attributes
* An array that specifies the names of the attributes that were removed from the endpoints.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public AttributesResource withAttributes(String... attributes) {
if (this.attributes == null) {
setAttributes(new java.util.ArrayList<String>(attributes.length));
}
for (String ele : attributes) {
this.attributes.add(ele);
}
return this;
}
/**
* <p>
* An array that specifies the names of the attributes that were removed from the endpoints.
* </p>
*
* @param attributes
* An array that specifies the names of the attributes that were removed from the endpoints.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public AttributesResource withAttributes(java.util.Collection<String> attributes) {
setAttributes(attributes);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getApplicationId() != null)
sb.append("ApplicationId: ").append(getApplicationId()).append(",");
if (getAttributeType() != null)
sb.append("AttributeType: ").append(getAttributeType()).append(",");
if (getAttributes() != null)
sb.append("Attributes: ").append(getAttributes());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof AttributesResource == false)
return false;
AttributesResource other = (AttributesResource) obj;
if (other.getApplicationId() == null ^ this.getApplicationId() == null)
return false;
if (other.getApplicationId() != null && other.getApplicationId().equals(this.getApplicationId()) == false)
return false;
if (other.getAttributeType() == null ^ this.getAttributeType() == null)
return false;
if (other.getAttributeType() != null && other.getAttributeType().equals(this.getAttributeType()) == false)
return false;
if (other.getAttributes() == null ^ this.getAttributes() == null)
return false;
if (other.getAttributes() != null && other.getAttributes().equals(this.getAttributes()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getApplicationId() == null) ? 0 : getApplicationId().hashCode());
hashCode = prime * hashCode + ((getAttributeType() == null) ? 0 : getAttributeType().hashCode());
hashCode = prime * hashCode + ((getAttributes() == null) ? 0 : getAttributes().hashCode());
return hashCode;
}
@Override
public AttributesResource clone() {
try {
return (AttributesResource) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.pinpoint.model.transform.AttributesResourceMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| |
/*
* This file is part of dependency-check-core.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2015 Institute for Defense Analyses. All Rights Reserved.
*/
package org.owasp.dependencycheck.analyzer;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.StringUtils;
import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.dependency.Confidence;
import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.utils.Checksum;
import org.owasp.dependencycheck.utils.FileFilterBuilder;
import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.FileFilter;
import java.io.IOException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* <p>
* Used to analyze CMake build files, and collect information that can be used to determine the associated CPE.</p>
* <p/>
* <p>
* Note: This analyzer catches straightforward invocations of the project command, plus some other observed patterns of version
* inclusion in real CMake projects. Many projects make use of older versions of CMake and/or use custom "homebrew" ways to insert
* version information. Hopefully as the newer CMake call pattern grows in usage, this analyzer allow more CPEs to be
* identified.</p>
*
* @author Dale Visser <dvisser@ida.org>
*/
public class CMakeAnalyzer extends AbstractFileTypeAnalyzer {
/**
* The logger.
*/
private static final Logger LOGGER = LoggerFactory.getLogger(CMakeAnalyzer.class);
/**
* Used when compiling file scanning regex patterns.
*/
private static final int REGEX_OPTIONS = Pattern.DOTALL
| Pattern.CASE_INSENSITIVE | Pattern.MULTILINE;
private static final Pattern PROJECT = Pattern.compile(
"^ *project *\\([ \\n]*(\\w+)[ \\n]*.*?\\)", REGEX_OPTIONS);
// Group 1: Product
// Group 2: Version
private static final Pattern SET_VERSION = Pattern
.compile(
"^ *set\\s*\\(\\s*(\\w+)_version\\s+\"?(\\d+(?:\\.\\d+)+)[\\s\"]?\\)",
REGEX_OPTIONS);
/**
* Detects files that can be analyzed.
*/
private static final FileFilter FILTER = FileFilterBuilder.newInstance().addExtensions(".cmake")
.addFilenames("CMakeLists.txt").build();
/**
* A reference to SHA1 message digest.
*/
private static MessageDigest sha1 = null;
static {
try {
sha1 = MessageDigest.getInstance("SHA1");
} catch (NoSuchAlgorithmException e) {
LOGGER.error(e.getMessage());
}
}
/**
* Returns the name of the CMake analyzer.
*
* @return the name of the analyzer
*
*/
@Override
public String getName() {
return "CMake Analyzer";
}
/**
* Tell that we are used for information collection.
*
* @return INFORMATION_COLLECTION
*/
@Override
public AnalysisPhase getAnalysisPhase() {
return AnalysisPhase.INFORMATION_COLLECTION;
}
/**
* Returns the set of supported file extensions.
*
* @return the set of supported file extensions
*/
@Override
protected FileFilter getFileFilter() {
return FILTER;
}
/**
* No-op initializer implementation.
*
* @throws Exception never thrown
*/
@Override
protected void initializeFileTypeAnalyzer() throws Exception {
// Nothing to do here.
}
/**
* Analyzes python packages and adds evidence to the dependency.
*
* @param dependency the dependency being analyzed
* @param engine the engine being used to perform the scan
* @throws AnalysisException thrown if there is an unrecoverable error analyzing the dependency
*/
@Override
protected void analyzeFileType(Dependency dependency, Engine engine)
throws AnalysisException {
final File file = dependency.getActualFile();
final String parentName = file.getParentFile().getName();
final String name = file.getName();
dependency.setDisplayFileName(String.format("%s%c%s", parentName, File.separatorChar, name));
String contents;
try {
contents = FileUtils.readFileToString(file).trim();
} catch (IOException e) {
throw new AnalysisException(
"Problem occurred while reading dependency file.", e);
}
if (StringUtils.isNotBlank(contents)) {
final Matcher m = PROJECT.matcher(contents);
int count = 0;
while (m.find()) {
count++;
LOGGER.debug(String.format(
"Found project command match with %d groups: %s",
m.groupCount(), m.group(0)));
final String group = m.group(1);
LOGGER.debug("Group 1: " + group);
dependency.getProductEvidence().addEvidence(name, "Project",
group, Confidence.HIGH);
}
LOGGER.debug(String.format("Found %d matches.", count));
analyzeSetVersionCommand(dependency, engine, contents);
}
}
private void analyzeSetVersionCommand(Dependency dependency, Engine engine, String contents) {
final Dependency orig = dependency;
final Matcher m = SET_VERSION.matcher(contents);
int count = 0;
while (m.find()) {
count++;
LOGGER.debug(String.format(
"Found project command match with %d groups: %s",
m.groupCount(), m.group(0)));
String product = m.group(1);
final String version = m.group(2);
LOGGER.debug("Group 1: " + product);
LOGGER.debug("Group 2: " + version);
final String aliasPrefix = "ALIASOF_";
if (product.startsWith(aliasPrefix)) {
product = product.replaceFirst(aliasPrefix, "");
}
if (count > 1) {
//TODO - refactor so we do not assign to the parameter (checkstyle)
dependency = new Dependency(orig.getActualFile());
dependency.setDisplayFileName(String.format("%s:%s", orig.getDisplayFileName(), product));
final String filePath = String.format("%s:%s", orig.getFilePath(), product);
dependency.setFilePath(filePath);
// prevents coalescing into the dependency provided by engine
dependency.setSha1sum(Checksum.getHex(sha1.digest(filePath.getBytes())));
engine.getDependencies().add(dependency);
}
final String source = dependency.getDisplayFileName();
dependency.getProductEvidence().addEvidence(source, "Product",
product, Confidence.MEDIUM);
dependency.getVersionEvidence().addEvidence(source, "Version",
version, Confidence.MEDIUM);
}
LOGGER.debug(String.format("Found %d matches.", count));
}
@Override
protected String getAnalyzerEnabledSettingKey() {
return Settings.KEYS.ANALYZER_CMAKE_ENABLED;
}
}
| |
package PJC.map;
import java.util.Map;
import java.util.function.*;
import PJC.collection.TIntCollection;
import PJC.map.iterator.TByteIntIterator;
import PJC.set.TByteSet;
/**
* Interface for a primitive map of #k# keys and #v# values.
*/
public interface TByteIntMap {
/**
* Removes all of the mappings from this map. The map will be
* empty after this call returns
*/
public void clear();
/**
* Attempts to compute a mapping for the specified key and its current mapped
* value (or null if there is no current mapping). If the function returns null,
* the mapping is removed (or remain absent if initially absent). If the function
* itself throws an exception, the exception is rethrown, and the current mapping
* is left unchanged.
* @param key key with which the specified value is to be associated
* @param remappingFunction the function to compute a value
* @return the new value associated with the specified key, or null if non
*/
public int compute (byte key, BiFunction<? super Byte, ? super Integer,
? extends Integer> remappingFunction);
/**
* If the specified key is not already associated with a value (or is mapped to
* null), attempts to compute its value using the given mapping function ante enters
* it into this map unless null.
* If the function returns null no mapping is recorded. If the function itself throws
* an exception, the exception is rethrown, and no mapping is recorded. The most
* common usage is to construct a new object serving as an initial mapped value or
* memorized result.
* @param key key with which the specified value is to be associated
* @param remappingFunction the function to compute a value
* @return the current (existing or computed) value is to be associated
*/
public int comptueIfAbsent (byte key, Function<? super Byte,
? extends Integer> remappingFunction);
/**
* If the value for the specified key is present and non-null, attempts to compute
* a new mapping given the key and its current mapped value.
* If the function returns null, the mapping is removed. If the function itself throws
* an exception, the exception is rethrown, and the current mapping is left unchanged.
* @param key key with which the specified value is to be associated
* @param remappingFunction the function to compute a value
* @return the new value associated with the specified key, or null if none
*/
public int computeIfPresent (byte key, BiFunction<? super Byte, ? super Integer,
? extends Integer> remappingFunction);
/**
* Returns true if this map contains a mapping for the specified key. There
* can be at most one such mapping.
* @param key key whose presence in this map is to be tested
* @return true if this map contains a mapping for the specified key
*/
public boolean containsKey (byte key);
/**
* Returns true if this map maps one or more keys to the specified value.
* @param value value whose presence in this map is to be tested
* @return true if this map maps one or more keys to the specified value
*/
public boolean containsValue (int value);
/**
* Returns true if this map maps one or more keys to the specified value.
* @param value value whose presence in this map is to be tested
* @return true if this map maps one or more keys to the specified value
*/
public boolean containsValuePar (int value, int numProcs);
/**
* Compares the specified object with this map for equality. Returns true if
* the given object is also a map and the two maps represent the same mappings.
* This ensures that the equals method works properly across different
* implementations of the <code>Map</code> and <code>TIntIntMap</code> interfaces.
* @param o object to be compared for equality with this map
* @return true if the specified object is equal to this map
*/
boolean equals (Object o);
/**
* Compares the specified object with this map for equality. Returns true if
* the given object is also a map and the two maps represent the same mappings.
* This ensures that the equals method works properly across different
* implementations of the <code>Map</code> and <code>TIntIntMap</code> interfaces.
* @param o object to be compared for equality with this map
* @param numProcs number of threads that will be used
* @return true if the specified object is equal to this map
*/
boolean equalsPar (Object o, int numProcs);
/**
* Performs the given action for each pair in this map until all of them have been
* processed or the action throws an exception. Exceptions thrown by the action are
* replayed to the caller.
* @param action the action to be performed for each entry
*/
public void forEach (BiConsumer<? super Byte, ? super Integer> action);
/**
* Performs the given action for each pair in this map until all of them have been
* processed or the action throws an exception. Exceptions thrown by the action are
* replayed to the caller.
* @param action the action to be performed for each entry
* @param numProcs number of threads that will be used
*/
public void forEachPar (BiConsumer<? super Byte, ? super Integer> action, int numProcs);
/**
* Returns the value to which the specified key is mapped, or null if this map
* contains no mapping for the key. There can be at most one such mapping.
* If this map permits null values, then a return value of null does not
* necessarily indicates that the map contains no mapping for the key; it's also
* possible that the map explicitly maps the key to null. The <code>containsKey</code>
* operation may be used to distinguish these two cases.
* @param key the key whose associated value is to be returned
* @return the value to which the specified key is mapped, or null if this map
* contains no mapping for the key
*/
public int get (byte key);
/**
* Returns the value that will be returned from {@link #get} or {@link #put} if no
* entry exists for a given key. The default value is generally zero, but can be
* changed during construction of the collection.
*
* @return the value that represents a null key in this collection.
*/
public byte getNoEntryKey();
/**
* Returns the value that will be returned from {@link #get} or {@link #put} if no
* entry exists for a given key. The default value is generally zero, but can be
* changed during construction of the collection.
*
* @return the value that represents a null value in this collection.
*/
public int getNoEntryValue();
/**
* Returns the value to which the specified key is mapped, or defaultValue if this
* map contains no mapping for the key.
* @param key the key whose associated value is to be returned
* @param defaultValue the default mapping of the key
* @return the value to which the specified key is mapped, or defaultValue if
* this map contains no mapping for the key
*/
public int getOrDefault (byte key, int defaultValue);
/**
* Returns the hash code value for this map. The hash code of a map is defined to be
* the sum of the hash codes of each entry in the map.
* @return the hash code value for this map
*/
public int hashCode();
/**
* Returns the hash code value for this map. The hash code of a map is defined to be
* the sum of the hash codes of each entry in the map.
* @param numProcs number of threads that will be used
* @return the hash code value for this map
*/
public int hashCodePar (int numProcs);
/**
* Returns true if this map contains no key-value mappings.
* @return true if this map contains no key-value mappings
*/
public boolean isEmpty();
/**
* Returns a TIntSet view of the keys contained in this map. The set is backed by
* the map, so changes to the map are reflected in the set, and vice-versa. If the
* map is modified while an iteration over the set is in progress, the results of
* the iteration are undefined. The set supports element removal, which removes the
* corresponding mapping from the map. It does not support the add or addAll operations.
* @return a set view of the keys contained in this map.
*/
public TByteSet keySet();
public TByteIntIterator iterator();
/**
* If the specified key is not already associated with a value or is associated with
* null, associates it with the given non-null value. Otherwise, replaces the associated
* value with the results of the given remapping function, ore removes if the result
* is null. This method may be of use when combining multiple mapped values for a key.
* @param key key with which the resulting value is to be associated
* @param value the non-null value to be merged with the existing value associated with
* the key, or, if no existing value or a null value is associated with the key, to be
* associated with the key
* @param remappingFunction the function to recompute a value if present
* @return the new value associated with the specified key, or null if no value is
* associated with the key
*/
public int merge (byte key, int value, BiFunction<? super Byte, ? super Integer,
? extends Integer> remappingFunction);
/**
* Associates the specified value with the specified key in this map. If the map previously
* contained a mapping for the key, the old value is replaced by the specified value. A
* map m is said to contain a mapping for a key k if and only if <code>m.containsKey(k)</code>
* would return true.
* @param key key with which the specified value is to be associated
* @param value value to be associated with the specified key
* @return the previous value associated with key, or null if there was no mapping for key. A
* null return can also indicate that the map previously associated null with key, if the
* implementation supports null values
*/
public int put(byte key, int value );
/**
* Copies all of the mappings from the specified map to this map. The effect of this call
* is equivalent to that of calling <code>put (k, v)</code> on this map once for each
* mapping from key k to value v in the specified map. The behavior of this operation is
* undefined if the specified map is modified while the operation is in progress.
* @param m mappings to be stored in this map
*/
public void putAll (Map<? extends Byte, ? extends Integer> m);
/**
* Copies all of the mappings from the specified map to this map. The effect of this call
* is equivalent to that of calling <code>put (k, v)</code> on this map once for each
* mapping from key k to value v in the specified map. The behavior of this operation is
* undefined if the specified map is modified while the operation is in progress.
* @param m mappings to be stored in this map
* @param numProcs number of threads that will be used
*/
public void putAllPar (Map<? extends Byte, ? extends Integer> m, int numProcs);
/**
* Copies all of the mappings from the specified map to this map. The effect of this call
* is equivalent to that of calling <code>put (k, v)</code> on this map once for each
* mapping from key k to value v in the specified map. The behavior of this operation is
* undefined if the specified map is modified while the operation is in progress.
* @param m mappings to be stored in this map
*/
public void putAll (TByteIntMap m);
/**
* Copies all of the mappings from the specified map to this map. The effect of this call
* is equivalent to that of calling <code>put (k, v)</code> on this map once for each
* mapping from key k to value v in the specified map. The behavior of this operation is
* undefined if the specified map is modified while the operation is in progress.
* @param m mappings to be stored in this map
* @param numProcs number of threads that will be used
*/
public void putAllPar (TByteIntMap m, int numProcs);
/**
* Copies all of the mappings from the specified arrays of keys and values to this map.
* The effect of this call is equivalent to that of calling <code>put (k, v)</code> on this
* map once for each mapping from key k to value v in the specified map. The behavior of
* this operation is undefined if at least one of the specified arrays are modified while
* the operation is in progress.
* @param keys array of keys to be stored in this map
* @param values array of values to be stored in this map
*/
public void putAll (byte[] keys, int[] values);
/**
* Copies all of the mappings from the specified arrays of keys and values to this map.
* The effect of this call is equivalent to that of calling <code>put (k, v)</code> on this
* map once for each mapping from key k to value v in the specified map. The behavior of
* this operation is undefined if at least one of the specified arrays are modified while
* the operation is in progress.
* @param keys array of keys to be stored in this map
* @param values array of values to be stored in this map
* @param numProcs number of threads that will be used
*/
public void putAllPar (byte[] keys, int[] values, int numProcs);
/**
* If the specified key is not already associated with a value (or is mapped to null)
* associates it with the given value and returns null; else, returns the current value.
* @param key key with which the specified value is to be associated
* @param value value to be associated with the specified key
* @return the previous value associated with the specified key, or null if there was no
* mapping for the key. A null return can also indicates that the map previously associated
* null with the key, if the implementation supports null values
*/
public int putIfAbsent (byte key, int value );
/**
* Removes the mapping for a key from this map if it is present. Returns the value to
* which this map previously associated the key, or null if the map contained no mapping
* for the key.
* If this map permits null values, then a return value of null does not necessarily indicate
* that the map contained no mapping for the key; it's also possible that the map explicitly
* mapped the key to null. The map will no contain a mapping for the specified key one the call
* returns.
* @param key key whose mapping is to be removed from the map
* @return the previous value associated with key, or null if there was no mapping for key
*/
public int remove (byte key );
/**
* Removes the entry for the specified key only if it's currently mapped to the specified value.
* @param key key with which the specified value is to be associated
* @param value value expected to be associated with the specified key
* @return true if the value was removed
*/
public boolean remove (byte key, int value);
/**
* Replaces the entry for the specified key only if it's currently mapped to some value.
* @param key key with which the specified value is associated
* @param value value to be associated with the specified key
* @return the previous value associated with the specified key, or null if there was no
* mapping for the key. A null return can also indicates that the map previously associated
* null with the key, if the implementation supports null values.
*/
public int replace (byte key, int value);
/**
* Replaces the entry for the specified key only if currently mapped to the specified value.
* @param key key with which the specified value is associated
* @param oldValue value expected to be associated with the specified key
* @param newValue value to be associated with the specified key
* @return true if the value was replace
*/
public boolean replace (byte key, int oldValue, int newValue);
/**
* Replaces each entry's value with the result of invoking the given function on that
* entry until all of them have been processed or the function throws an exception.
* Exceptions thrown by the function are relayed to the caller.
* @param function the function to apply to each entry
*/
public void replaceAll (BiFunction<? super Byte, ? super Integer,
? extends Integer> function);
/**
* Replaces each entry's value with the result of invoking the given function on that
* entry until all of them have been processed or the function throws an exception.
* Exceptions thrown by the function are relayed to the caller.
* @param function the function to apply to each entry
* @param numProcs number of threads that will be used
*/
public void replaceAllPar (BiFunction<? super Byte, ? super Integer,
? extends Integer> function, int numProcs);
/**
* Returns the number of key-value mappings in this map. If hte map contains more than
* Integer.MAX_VALUE elements, returns Integer.MAX_VALUE.
* @return the number of key-value mappings in this map
*/
public int size();
/**
* Returns a Collection view of the values contained in this map. The collection is
* backed by the map, so changes to the map are reflected in the collection, and vice-versa.
* If the map is modified while an iteration over the collection is in progress, the results
* of the iteration are undefined. The collection supports elemental removal, which removes
* the corresponding mapping from the map. It does not support the add or addAll operations.
* @return a collection view of the values contained in this map
*/
public TIntCollection values();
}
| |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.management.appservice.v2016_09_01.implementation;
import com.microsoft.azure.arm.resources.models.implementation.GroupableResourceCoreImpl;
import com.microsoft.azure.management.appservice.v2016_09_01.AppServicePlan;
import rx.Observable;
import com.microsoft.azure.management.appservice.v2016_09_01.AppServicePlanPatchResource;
import org.joda.time.DateTime;
import com.microsoft.azure.management.appservice.v2016_09_01.StatusOptions;
import com.microsoft.azure.management.appservice.v2016_09_01.HostingEnvironmentProfile;
import com.microsoft.azure.management.appservice.v2016_09_01.ProvisioningState;
import com.microsoft.azure.management.appservice.v2016_09_01.SkuDescription;
import rx.functions.Func1;
class AppServicePlanImpl extends GroupableResourceCoreImpl<AppServicePlan, AppServicePlanInner, AppServicePlanImpl, AppServiceManager> implements AppServicePlan, AppServicePlan.Definition, AppServicePlan.Update {
private AppServicePlanPatchResource updateParameter;
AppServicePlanImpl(String name, AppServicePlanInner inner, AppServiceManager manager) {
super(name, inner, manager);
this.updateParameter = new AppServicePlanPatchResource();
}
@Override
public Observable<AppServicePlan> createResourceAsync() {
AppServicePlansInner client = this.manager().inner().appServicePlans();
return client.createOrUpdateAsync(this.resourceGroupName(), this.name(), this.inner())
.map(new Func1<AppServicePlanInner, AppServicePlanInner>() {
@Override
public AppServicePlanInner call(AppServicePlanInner resource) {
resetCreateUpdateParameters();
return resource;
}
})
.map(innerToFluentMap(this));
}
@Override
public Observable<AppServicePlan> updateResourceAsync() {
AppServicePlansInner client = this.manager().inner().appServicePlans();
return client.updateAsync(this.resourceGroupName(), this.name(), this.updateParameter)
.map(new Func1<AppServicePlanInner, AppServicePlanInner>() {
@Override
public AppServicePlanInner call(AppServicePlanInner resource) {
resetCreateUpdateParameters();
return resource;
}
})
.map(innerToFluentMap(this));
}
@Override
protected Observable<AppServicePlanInner> getInnerAsync() {
AppServicePlansInner client = this.manager().inner().appServicePlans();
return client.getByResourceGroupAsync(this.resourceGroupName(), this.name());
}
@Override
public boolean isInCreateMode() {
return this.inner().id() == null;
}
private void resetCreateUpdateParameters() {
this.updateParameter = new AppServicePlanPatchResource();
}
@Override
public String adminSiteName() {
return this.inner().adminSiteName();
}
@Override
public String appServicePlanName() {
return this.inner().appServicePlanName();
}
@Override
public String geoRegion() {
return this.inner().geoRegion();
}
@Override
public HostingEnvironmentProfile hostingEnvironmentProfile() {
return this.inner().hostingEnvironmentProfile();
}
@Override
public Boolean isSpot() {
return this.inner().isSpot();
}
@Override
public String kind() {
return this.inner().kind();
}
@Override
public Integer maximumNumberOfWorkers() {
return this.inner().maximumNumberOfWorkers();
}
@Override
public Integer numberOfSites() {
return this.inner().numberOfSites();
}
@Override
public Boolean perSiteScaling() {
return this.inner().perSiteScaling();
}
@Override
public ProvisioningState provisioningState() {
return this.inner().provisioningState();
}
@Override
public Boolean reserved() {
return this.inner().reserved();
}
@Override
public String resourceGroup() {
return this.inner().resourceGroup();
}
@Override
public SkuDescription sku() {
return this.inner().sku();
}
@Override
public DateTime spotExpirationTime() {
return this.inner().spotExpirationTime();
}
@Override
public StatusOptions status() {
return this.inner().status();
}
@Override
public String subscription() {
return this.inner().subscription();
}
@Override
public Integer targetWorkerCount() {
return this.inner().targetWorkerCount();
}
@Override
public Integer targetWorkerSizeId() {
return this.inner().targetWorkerSizeId();
}
@Override
public String workerTierName() {
return this.inner().workerTierName();
}
@Override
public AppServicePlanImpl withAppServicePlanName(String appServicePlanName) {
this.inner().withAppServicePlanName(appServicePlanName);
return this;
}
@Override
public AppServicePlanImpl withSku(SkuDescription sku) {
this.inner().withSku(sku);
return this;
}
@Override
public AppServicePlanImpl withAdminSiteName(String adminSiteName) {
if (isInCreateMode()) {
this.inner().withAdminSiteName(adminSiteName);
} else {
this.updateParameter.withAdminSiteName(adminSiteName);
}
return this;
}
@Override
public AppServicePlanImpl withHostingEnvironmentProfile(HostingEnvironmentProfile hostingEnvironmentProfile) {
if (isInCreateMode()) {
this.inner().withHostingEnvironmentProfile(hostingEnvironmentProfile);
} else {
this.updateParameter.withHostingEnvironmentProfile(hostingEnvironmentProfile);
}
return this;
}
@Override
public AppServicePlanImpl withIsSpot(Boolean isSpot) {
if (isInCreateMode()) {
this.inner().withIsSpot(isSpot);
} else {
this.updateParameter.withIsSpot(isSpot);
}
return this;
}
@Override
public AppServicePlanImpl withKind(String kind) {
if (isInCreateMode()) {
this.inner().withKind(kind);
} else {
this.updateParameter.withKind(kind);
}
return this;
}
@Override
public AppServicePlanImpl withPerSiteScaling(Boolean perSiteScaling) {
if (isInCreateMode()) {
this.inner().withPerSiteScaling(perSiteScaling);
} else {
this.updateParameter.withPerSiteScaling(perSiteScaling);
}
return this;
}
@Override
public AppServicePlanImpl withReserved(Boolean reserved) {
if (isInCreateMode()) {
this.inner().withReserved(reserved);
} else {
this.updateParameter.withReserved(reserved);
}
return this;
}
@Override
public AppServicePlanImpl withSpotExpirationTime(DateTime spotExpirationTime) {
if (isInCreateMode()) {
this.inner().withSpotExpirationTime(spotExpirationTime);
} else {
this.updateParameter.withSpotExpirationTime(spotExpirationTime);
}
return this;
}
@Override
public AppServicePlanImpl withTargetWorkerCount(Integer targetWorkerCount) {
if (isInCreateMode()) {
this.inner().withTargetWorkerCount(targetWorkerCount);
} else {
this.updateParameter.withTargetWorkerCount(targetWorkerCount);
}
return this;
}
@Override
public AppServicePlanImpl withTargetWorkerSizeId(Integer targetWorkerSizeId) {
if (isInCreateMode()) {
this.inner().withTargetWorkerSizeId(targetWorkerSizeId);
} else {
this.updateParameter.withTargetWorkerSizeId(targetWorkerSizeId);
}
return this;
}
@Override
public AppServicePlanImpl withWorkerTierName(String workerTierName) {
if (isInCreateMode()) {
this.inner().withWorkerTierName(workerTierName);
} else {
this.updateParameter.withWorkerTierName(workerTierName);
}
return this;
}
}
| |
/*
* Copyright 2001-2009 Terracotta, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
*/
package org.quartz.core;
import com.yammer.metrics.Metrics;
import com.yammer.metrics.core.Counter;
import com.yammer.metrics.core.MetricName;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.quartz.Job;
import org.quartz.JobDetail;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
import org.quartz.JobPersistenceException;
import org.quartz.Scheduler;
import org.quartz.SchedulerException;
import org.quartz.Trigger;
import org.quartz.listeners.SchedulerListenerSupport;
import org.quartz.spi.TriggerFiredBundle;
/**
* <p>
* JobRunShell instances are responsible for providing the 'safe' environment
* for <code>Job</code> s to run in, and for performing all of the work of
* executing the <code>Job</code>, catching ANY thrown exceptions, updating
* the <code>Trigger</code> with the <code>Job</code>'s completion code,
* etc.
* </p>
*
* <p>
* A <code>JobRunShell</code> instance is created by a <code>JobRunShellFactory</code>
* on behalf of the <code>QuartzSchedulerThread</code> which then runs the
* shell in a thread from the configured <code>ThreadPool</code> when the
* scheduler determines that a <code>Job</code> has been triggered.
* </p>
*
* @see JobRunShellFactory
* @see org.quartz.core.QuartzSchedulerThread
* @see org.quartz.Job
* @see org.quartz.Trigger
*
* @author James House
*/
public class JobRunShell extends SchedulerListenerSupport implements Runnable {
/*
* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*
* Data members.
*
* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*/
private static final Counter JOB_STARTED_COUNT = Metrics.newCounter(new MetricName("Quartz", "started", "Jobs"));
protected JobExecutionContext jec = null;
protected QuartzScheduler qs = null;
protected Scheduler scheduler = null;
protected SchedulingContext schdCtxt = null;
protected JobRunShellFactory jobRunShellFactory = null;
protected volatile boolean shutdownRequested = false;
private final Logger log = LoggerFactory.getLogger(getClass());
/*
* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*
* Constructors.
*
* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*/
/**
* <p>
* Create a JobRunShell instance with the given settings.
* </p>
*
* @param jobRunShellFactory
* A handle to the <code>JobRunShellFactory</code> that produced
* this <code>JobRunShell</code>.
* @param scheduler
* The <code>Scheduler</code> instance that should be made
* available within the <code>JobExecutionContext</code>.
* @param schdCtxt
* the <code>SchedulingContext</code> that should be used by the
* <code>JobRunShell</code> when making updates to the <code>JobStore</code>.
*/
public JobRunShell(JobRunShellFactory jobRunShellFactory,
Scheduler scheduler, SchedulingContext schdCtxt) {
this.jobRunShellFactory = jobRunShellFactory;
this.scheduler = scheduler;
this.schdCtxt = schdCtxt;
}
/*
* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*
* Interface.
*
* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*/
@Override
public void schedulerShuttingdown() {
requestShutdown();
}
protected Logger getLog() {
return log;
}
public void initialize(QuartzScheduler qs, TriggerFiredBundle firedBundle)
throws SchedulerException {
this.qs = qs;
Job job = null;
JobDetail jobDetail = firedBundle.getJobDetail();
try {
job = qs.getJobFactory().newJob(firedBundle);
} catch (SchedulerException se) {
qs.notifySchedulerListenersError(
"An error occured instantiating job to be executed. job= '"
+ jobDetail.getFullName() + "'", se);
throw se;
} catch (Throwable ncdfe) { // such as NoClassDefFoundError
SchedulerException se = new SchedulerException(
"Problem instantiating class '"
+ jobDetail.getJobClass().getName() + "' - ", ncdfe);
qs.notifySchedulerListenersError(
"An error occured instantiating job to be executed. job= '"
+ jobDetail.getFullName() + "'", se);
throw se;
}
this.jec = new JobExecutionContext(scheduler, firedBundle, job);
}
public void requestShutdown() {
shutdownRequested = true;
}
public void run() {
try {
scheduler.addSchedulerListener(this);
} catch (SchedulerException ignore) {
// can never happen on a local scheduler - which by definition this will be (since we are executing on it)
}
try {
Trigger trigger = jec.getTrigger();
JobDetail jobDetail = jec.getJobDetail();
do {
JobExecutionException jobExEx = null;
Job job = jec.getJobInstance();
try {
begin();
} catch (SchedulerException se) {
qs.notifySchedulerListenersError("Error executing Job ("
+ jec.getJobDetail().getFullName()
+ ": couldn't begin execution.", se);
break;
}
// notify job & trigger listeners...
try {
if (!notifyListenersBeginning(jec)) {
break;
}
} catch(VetoedException ve) {
try {
int instCode = trigger.executionComplete(jec, null);
try {
qs.notifyJobStoreJobVetoed(schdCtxt, trigger, jobDetail, instCode);
} catch(JobPersistenceException jpe) {
vetoedJobRetryLoop(trigger, jobDetail, instCode);
}
complete(true);
} catch (SchedulerException se) {
qs.notifySchedulerListenersError("Error during veto of Job ("
+ jec.getJobDetail().getFullName()
+ ": couldn't finalize execution.", se);
}
break;
}
long startTime = System.currentTimeMillis();
long endTime = startTime;
// execute the job
try {
log.debug("Calling execute on job " + jobDetail.getFullName());
job.execute(jec);
endTime = System.currentTimeMillis();
} catch (JobExecutionException jee) {
endTime = System.currentTimeMillis();
jobExEx = jee;
getLog().info("Job " + jobDetail.getFullName() +
" threw a JobExecutionException: ", jobExEx);
} catch (Throwable e) {
endTime = System.currentTimeMillis();
getLog().error("Job " + jobDetail.getFullName() +
" threw an unhandled Exception: ", e);
SchedulerException se = new SchedulerException(
"Job threw an unhandled exception.", e);
se.setErrorCode(SchedulerException.ERR_JOB_EXECUTION_THREW_EXCEPTION);
qs.notifySchedulerListenersError("Job ("
+ jec.getJobDetail().getFullName()
+ " threw an exception.", se);
jobExEx = new JobExecutionException(se, false);
jobExEx.setErrorCode(JobExecutionException.ERR_JOB_EXECUTION_THREW_EXCEPTION);
}
jec.setJobRunTime(endTime - startTime);
// notify all job listeners
if (!notifyJobListenersComplete(jec, jobExEx)) {
break;
}
int instCode = Trigger.INSTRUCTION_NOOP;
// update the trigger
try {
instCode = trigger.executionComplete(jec, jobExEx);
} catch (Exception e) {
// If this happens, there's a bug in the trigger...
SchedulerException se = new SchedulerException(
"Trigger threw an unhandled exception.", e);
se.setErrorCode(SchedulerException.ERR_TRIGGER_THREW_EXCEPTION);
qs.notifySchedulerListenersError(
"Please report this error to the Quartz developers.",
se);
}
// notify all trigger listeners
if (!notifyTriggerListenersComplete(jec, instCode)) {
break;
}
// update job/trigger or re-execute job
if (instCode == Trigger.INSTRUCTION_RE_EXECUTE_JOB) {
jec.incrementRefireCount();
try {
complete(false);
} catch (SchedulerException se) {
qs.notifySchedulerListenersError("Error executing Job ("
+ jec.getJobDetail().getFullName()
+ ": couldn't finalize execution.", se);
}
continue;
}
try {
complete(true);
} catch (SchedulerException se) {
qs.notifySchedulerListenersError("Error executing Job ("
+ jec.getJobDetail().getFullName()
+ ": couldn't finalize execution.", se);
continue;
}
try {
qs.notifyJobStoreJobComplete(schdCtxt, trigger, jobDetail,
instCode);
} catch (JobPersistenceException jpe) {
qs.notifySchedulerListenersError(
"An error occured while marking executed job complete. job= '"
+ jobDetail.getFullName() + "'", jpe);
if (!completeTriggerRetryLoop(trigger, jobDetail, instCode)) {
return;
}
}
break;
} while (true);
} finally {
try {
scheduler.removeSchedulerListener(this);
} catch (SchedulerException e) {
// can never happen on a local scheduler - which by definition this will be (since we are executing on it)
}
jobRunShellFactory.returnJobRunShell(this);
}
}
protected void begin() throws SchedulerException {
}
protected void complete(boolean successfulExecution)
throws SchedulerException {
}
public void passivate() {
jec = null;
qs = null;
}
private boolean notifyListenersBeginning(JobExecutionContext jec) throws VetoedException {
boolean vetoed = false;
// notify all trigger listeners
try {
vetoed = qs.notifyTriggerListenersFired(jec);
} catch (SchedulerException se) {
qs.notifySchedulerListenersError(
"Unable to notify TriggerListener(s) while firing trigger "
+ "(Trigger and Job will NOT be fired!). trigger= "
+ jec.getTrigger().getFullName() + " job= "
+ jec.getJobDetail().getFullName(), se);
return false;
}
if(vetoed) {
try {
qs.notifyJobListenersWasVetoed(jec);
} catch (SchedulerException se) {
qs.notifySchedulerListenersError(
"Unable to notify JobListener(s) of vetoed execution " +
"while firing trigger (Trigger and Job will NOT be " +
"fired!). trigger= "
+ jec.getTrigger().getFullName() + " job= "
+ jec.getJobDetail().getFullName(), se);
}
throw new VetoedException();
}
// notify all job listeners
try {
qs.notifyJobListenersToBeExecuted(jec);
} catch (SchedulerException se) {
qs.notifySchedulerListenersError(
"Unable to notify JobListener(s) of Job to be executed: "
+ "(Job will NOT be executed!). trigger= "
+ jec.getTrigger().getFullName() + " job= "
+ jec.getJobDetail().getFullName(), se);
return false;
}
JOB_STARTED_COUNT.inc();
return true;
}
private boolean notifyJobListenersComplete(JobExecutionContext jec,
JobExecutionException jobExEx) {
try {
qs.notifyJobListenersWasExecuted(jec, jobExEx);
} catch (SchedulerException se) {
qs.notifySchedulerListenersError(
"Unable to notify JobListener(s) of Job that was executed: "
+ "(error will be ignored). trigger= "
+ jec.getTrigger().getFullName() + " job= "
+ jec.getJobDetail().getFullName(), se);
return false;
}
return true;
}
private boolean notifyTriggerListenersComplete(JobExecutionContext jec,
int instCode) {
try {
qs.notifyTriggerListenersComplete(jec, instCode);
} catch (SchedulerException se) {
qs.notifySchedulerListenersError(
"Unable to notify TriggerListener(s) of Job that was executed: "
+ "(error will be ignored). trigger= "
+ jec.getTrigger().getFullName() + " job= "
+ jec.getJobDetail().getFullName(), se);
return false;
}
if (jec.getTrigger().getNextFireTime() == null) {
qs.notifySchedulerListenersFinalized(jec.getTrigger());
}
return true;
}
public boolean completeTriggerRetryLoop(Trigger trigger,
JobDetail jobDetail, int instCode) {
long count = 0;
while (!shutdownRequested && !qs.isShuttingDown()) {
try {
Thread.sleep(15 * 1000L); // retry every 15 seconds (the db
// connection must be failed)
qs.notifyJobStoreJobComplete(schdCtxt, trigger, jobDetail,
instCode);
return true;
} catch (JobPersistenceException jpe) {
if(count % 4 == 0)
qs.notifySchedulerListenersError(
"An error occured while marking executed job complete (will continue attempts). job= '"
+ jobDetail.getFullName() + "'", jpe);
} catch (InterruptedException ignore) {
}
count++;
}
return false;
}
public boolean vetoedJobRetryLoop(Trigger trigger, JobDetail jobDetail, int instCode) {
while (!shutdownRequested) {
try {
Thread.sleep(5 * 1000L); // retry every 5 seconds (the db
// connection must be failed)
qs.notifyJobStoreJobVetoed(schdCtxt, trigger, jobDetail, instCode);
return true;
} catch (JobPersistenceException jpe) {
qs.notifySchedulerListenersError(
"An error occured while marking executed job vetoed. job= '"
+ jobDetail.getFullName() + "'", jpe);
} catch (InterruptedException ignore) {
}
}
return false;
}
class VetoedException extends Exception {
public VetoedException() {
}
}
}
| |
/**
* VideoPlayerView.java
* ChilliSource
* Created by Ian Copland on 15/08/2012.
*
* The MIT License (MIT)
*
* Copyright (c) 2012 Tag Games Limited
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.chilliworks.chillisource.video;
import com.chilliworks.chillisource.core.Logging;
import android.content.Context;
import android.content.res.AssetFileDescriptor;
import android.graphics.Point;
import android.media.AudioManager;
import android.media.MediaPlayer;
import android.media.MediaPlayer.OnCompletionListener;
import android.media.MediaPlayer.OnErrorListener;
import android.media.MediaPlayer.OnPreparedListener;
import android.view.Display;
import android.view.MotionEvent;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import java.io.File;
import java.io.FileDescriptor;
import java.io.FileInputStream;
//========================================================
/// Media Player View
///
/// A view for displaying the media player.
//========================================================
public class VideoPlayerView extends SurfaceView implements OnPreparedListener, OnErrorListener, OnCompletionListener, SurfaceHolder.Callback
{
//------------------------------------------------------------------------
/// Constants
//------------------------------------------------------------------------
private long kqwTapLengthInMS = 150;
//------------------------------------------------------------------------
/// Private Member Data
//------------------------------------------------------------------------
private boolean mbInAPK;
private String m_filePath;
private int m_fileOffset = -1;
private int m_fileLength = -1;
private boolean mbCanDismissWithTap;
private MediaPlayer m_mediaPlayer;
private int mdwSeekPosition;
private VideoPlayerActivity mActivity;
private long mqwTapTime;
//------------------------------------------------------------------------
/// Constructor
//------------------------------------------------------------------------
@SuppressWarnings("deprecation")
public VideoPlayerView(Context inContext, boolean inbInAPK, String instrFilename, int in_fileOffset, int in_fileLength, boolean inbCanDismissWithTap, int indwSeekPosition)
{
super(inContext);
mActivity = VideoPlayerActivity.GetActivity();
mbInAPK = inbInAPK;
m_filePath = instrFilename;
m_fileOffset = in_fileOffset;
m_fileLength = in_fileLength;
mbCanDismissWithTap = inbCanDismissWithTap;
mdwSeekPosition = indwSeekPosition;
mqwTapTime = 0;
//setup the holder
getHolder().setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
getHolder().addCallback(this);
}
//--------------------------------------------------------------
/// Get Time
///
/// @return the current position through the video.
//--------------------------------------------------------------
public synchronized int GetTime()
{
if (m_mediaPlayer != null)
{
return m_mediaPlayer.getCurrentPosition();
}
return 0;
}
//--------------------------------------------------------------
/// Prepare Media Player
///
/// Prepares the media player for use.
//--------------------------------------------------------------
private synchronized void PrepareVideoPlayer()
{
if (m_mediaPlayer == null)
{
try
{
//setup the media player
m_mediaPlayer = new MediaPlayer();
m_mediaPlayer.setOnPreparedListener(this);
m_mediaPlayer.setOnErrorListener(this);
m_mediaPlayer.setOnCompletionListener(this);
m_mediaPlayer.setDisplay(getHolder());
m_mediaPlayer.setScreenOnWhilePlaying(true);
m_mediaPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC);
//set the data source
if (mbInAPK == true)
{
AssetFileDescriptor fileDesc = mActivity.getAssets().openFd(m_filePath);
if (m_fileOffset == -1 && m_fileLength == -1)
{
m_mediaPlayer.setDataSource(fileDesc.getFileDescriptor(), fileDesc.getStartOffset(), fileDesc.getLength());
}
else
{
assert (m_fileLength > 0) : "Cannot create 0 size stream";
m_mediaPlayer.setDataSource(fileDesc.getFileDescriptor(), fileDesc.getStartOffset() + m_fileOffset, m_fileLength);
}
}
else
{
if (m_fileOffset == -1 && m_fileLength == -1)
{
m_mediaPlayer.setDataSource(m_filePath);
}
else
{
assert (m_fileLength > 0) : "Cannot create 0 size stream";
FileInputStream stream = new FileInputStream(m_filePath);
m_mediaPlayer.setDataSource(stream.getFD(), m_fileOffset, m_fileLength);
}
}
//prepare the media player asynchronously
m_mediaPlayer.prepareAsync();
}
catch (Exception e)
{
Logging.logError("Error trying to open video file: " + m_filePath);
onError(m_mediaPlayer, 0, 0);
}
}
}
//--------------------------------------------------------------
/// Cleanup Video
///
/// Cleans up the video, releasing everything and setting back
/// to initial state.
//--------------------------------------------------------------
public synchronized void Cleanup()
{
//clean up the video
if (m_mediaPlayer != null)
{
if (m_mediaPlayer.isPlaying() == true)
{
m_mediaPlayer.stop();
}
m_mediaPlayer.release();
m_mediaPlayer = null;
}
}
//--------------------------------------------------------------
/// surface Created
///
/// Called when the surface is created. Prepares the video for
/// displaying in a background thread. Once the video has finished
/// loading it will present the video and calls the OnVideoPrepared.
///
/// @param the created holder.
//--------------------------------------------------------------
@Override public synchronized void surfaceCreated(SurfaceHolder inHolder)
{
if (getHolder() == inHolder)
{
PrepareVideoPlayer();
}
}
//--------------------------------------------------------------
/// surface Changed
///
/// called whenever the surface changes.
///
/// @param the holder.
/// @param the format.
/// @param the width.
/// @param the height.
//--------------------------------------------------------------
@Override public synchronized void surfaceChanged(SurfaceHolder inHolder, int indwFormat, int indwWidth, int indwHeight)
{
}
//--------------------------------------------------------------
/// surface Destroyed
///
/// called whenever the surface is destroyed.
///
/// @param the holder.
//--------------------------------------------------------------
@Override public synchronized void surfaceDestroyed(SurfaceHolder inHolder)
{
}
//--------------------------------------------------------------
/// on Prepared
///
/// Called once the video is prepared. This will start the video
/// playing and call OnVideoStarted.
//--------------------------------------------------------------
@SuppressWarnings("deprecation")
@Override public synchronized void onPrepared(MediaPlayer inMediaPlayer)
{
if (m_mediaPlayer == inMediaPlayer)
{
//Since the view were placing the video onto is normally full screen, we need to fit the
//video by width aspect, i.e video is full width of screen but maintains height aspect
int dwVideoHeight = m_mediaPlayer.getVideoHeight();
int dwVideoWidth = m_mediaPlayer.getVideoWidth();
int dwScreenWidth = 0;
int dwScreenHeight = 0;
Display display = mActivity.getWindowManager().getDefaultDisplay();
if (android.os.Build.VERSION.SDK_INT < 13)
{
dwScreenWidth = display.getWidth();
dwScreenHeight = display.getHeight();
}
else
{
Point size = new Point();
display.getSize(size);
dwScreenWidth = size.x;
dwScreenHeight = size.y;
}
float fVideoAspectRatio = (float)dwVideoHeight / (float)dwVideoWidth;
int dwDiffWidth = Math.abs(dwScreenWidth - dwVideoWidth);
int dwDiffHeight = Math.abs(dwScreenHeight - dwVideoHeight);
float fVideoViewWidth = 0.0f;
float fVideoViewHeight = 0.0f;
if(dwDiffWidth > dwDiffHeight)
{
float fWidthMultiplier = (float)dwScreenWidth / (float)dwVideoWidth;
float fHeightMultiplier = fWidthMultiplier * fVideoAspectRatio;
fVideoViewWidth = dwVideoWidth * fWidthMultiplier;
fVideoViewHeight = dwVideoWidth * fHeightMultiplier;
}
else
{
float fHeightMultiplier = (float)dwScreenHeight / (float)dwVideoHeight;
float fWidthMultiplier = fHeightMultiplier * fVideoAspectRatio;
fVideoViewHeight = dwVideoHeight * fHeightMultiplier;
fVideoViewWidth = dwVideoHeight * fWidthMultiplier;
}
float fYPadding = dwScreenHeight - fVideoViewHeight;
if(fYPadding < 0)
fYPadding = 0;
float fXPadding = dwScreenWidth - fVideoViewWidth;
if(fXPadding < 0)
fXPadding = 0;
//We set the top and bottom padding on the viewContainer to difference in video / screen height * 0.5
mActivity.mAspectViewContainer.setPadding(0, (int)(fYPadding * 0.5f), 0, (int)(fYPadding * 0.5f));
//start the video
m_mediaPlayer.seekTo(mdwSeekPosition);
m_mediaPlayer.start();
}
}
//--------------------------------------------------------------
/// on Error
///
/// Called when the video preparation step fails. This clears up
/// the video and calls onCompletion.
//--------------------------------------------------------------
@Override public synchronized boolean onError(MediaPlayer inMediaPlayer, int indwWhat, int indwExtra)
{
if (m_mediaPlayer == inMediaPlayer)
{
Logging.logError("Media player has encountered an error while preparing.");
onCompletion(inMediaPlayer);
return true;
}
return false;
}
//--------------------------------------------------------------
/// on Completion
///
/// Called when the video has finished playing. This tidies up
/// the video player and calls OnVideoStopped.
//--------------------------------------------------------------
@Override public synchronized void onCompletion(MediaPlayer inMediaPlayer)
{
if (m_mediaPlayer == inMediaPlayer)
{
Cleanup();
mActivity.finish();
}
}
//-----------------------------------------------------------------
/// On Touch Event
///
/// This records all touch events that occur.
///
/// @param the motion event.
/// @return whether or not this view wants to pass the touch event
/// on to the next applicable view.
//-----------------------------------------------------------------
@Override public synchronized boolean onTouchEvent(final MotionEvent event)
{
if (mbCanDismissWithTap == true)
{
int dwActionData = event.getAction();
int dwActionEvent = dwActionData & MotionEvent.ACTION_MASK;
if (dwActionEvent == MotionEvent.ACTION_DOWN)
{
mqwTapTime = System.currentTimeMillis();
}
else if (dwActionEvent == MotionEvent.ACTION_UP)
{
if (System.currentTimeMillis() - mqwTapTime <= kqwTapLengthInMS)
{
onCompletion(m_mediaPlayer);
}
}
}
return true;
}
//---------------------------------------------------------------
/// On Back Pressed
///
/// Called when the back button is pressed. This will dismiss
/// the video if tap dismissal is enabled.
//---------------------------------------------------------------
public void OnBackPressed()
{
if (mbCanDismissWithTap == true)
{
onCompletion(m_mediaPlayer);
}
}
}
| |
package eft.util;
import eft.Constants;
import eft.EftException;
import eft.crypto.Crypto;
import java.io.UnsupportedEncodingException;
import java.math.BigInteger;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.Date;
public final class Convert {
private static final char[] hexChars = { '0','1','2','3','4','5','6','7','8','9','a','b','c','d','e','f' };
private static final long[] multipliers = {1, 10, 100, 1000, 10000, 100000, 1000000, 10000000, 100000000};
public static final BigInteger two64 = new BigInteger("18446744073709551616");
private Convert() {} //never
public static byte[] parseHexString(String hex) {
if (hex == null) {
return null;
}
byte[] bytes = new byte[hex.length() / 2];
for (int i = 0; i < bytes.length; i++) {
int char1 = hex.charAt(i * 2);
char1 = char1 > 0x60 ? char1 - 0x57 : char1 - 0x30;
int char2 = hex.charAt(i * 2 + 1);
char2 = char2 > 0x60 ? char2 - 0x57 : char2 - 0x30;
if (char1 < 0 || char2 < 0 || char1 > 15 || char2 > 15) {
throw new NumberFormatException("Invalid hex number: " + hex);
}
bytes[i] = (byte)((char1 << 4) + char2);
}
return bytes;
}
public static String toHexString(byte[] bytes) {
if (bytes == null) {
return null;
}
char[] chars = new char[bytes.length * 2];
for (int i = 0; i < bytes.length; i++) {
chars[i * 2] = hexChars[((bytes[i] >> 4) & 0xF)];
chars[i * 2 + 1] = hexChars[(bytes[i] & 0xF)];
}
return String.valueOf(chars);
}
public static String toUnsignedLong(long objectId) {
if (objectId >= 0) {
return String.valueOf(objectId);
}
BigInteger id = BigInteger.valueOf(objectId).add(two64);
return id.toString();
}
public static String toUnsignedLong(Long objectId) {
return toUnsignedLong(nullToZero(objectId));
}
public static Long parseUnsignedLong(String number) {
if (number == null) {
return null;
}
BigInteger bigInt = new BigInteger(number.trim());
if (bigInt.signum() < 0 || bigInt.compareTo(two64) != -1) {
throw new IllegalArgumentException("overflow: " + number);
}
return zeroToNull(bigInt.longValue());
}
public static long parseLong(Object o) {
if (o == null) {
return 0;
} else if (o instanceof Long) {
return ((Long)o);
} else if (o instanceof String) {
return Long.parseLong((String)o);
} else {
throw new IllegalArgumentException("Not a long: " + o);
}
}
public static Long parseAccountId(String account) {
if (account == null) {
return null;
}
account = account.toUpperCase();
if (account.startsWith("EFT-")) {
return zeroToNull(Crypto.rsDecode(account.substring(4)));
} else {
return parseUnsignedLong(account);
}
}
public static String rsAccount(Long accountId) {
return "EFT-" + Crypto.rsEncode(nullToZero(accountId));
}
public static Long fullHashToId(byte[] hash) {
if (hash == null || hash.length < 8) {
throw new IllegalArgumentException("Invalid hash: " + Arrays.toString(hash));
}
BigInteger bigInteger = new BigInteger(1, new byte[] {hash[7], hash[6], hash[5], hash[4], hash[3], hash[2], hash[1], hash[0]});
return bigInteger.longValue();
}
public static Long fullHashToId(String hash) {
if (hash == null) {
return null;
}
return fullHashToId(Convert.parseHexString(hash));
}
public static int getEpochTime() {
return (int)((System.currentTimeMillis() - Constants.EPOCH_BEGINNING ) / 1000);
}
public static Date fromEpochTime(int epochTime) {
return new Date(epochTime * 1000L + Constants.EPOCH_BEGINNING );
}
public static Long zeroToNull(long l) {
return l == 0 ? null : l;
}
public static long nullToZero(Long l) {
return l == null ? 0 : l;
}
public static int nullToZero(Integer i) {
return i == null ? 0 : i;
}
public static String emptyToNull(String s) {
return s == null || s.length() == 0 ? null : s;
}
public static String nullToEmpty(String s) {
return s == null ? "" : s;
}
public static byte[] emptyToNull(byte[] bytes) {
if (bytes == null) {
return null;
}
for (byte b : bytes) {
if (b != 0) {
return bytes;
}
}
return null;
}
public static byte[] toBytes(String s) {
try {
return s.getBytes("UTF-8");
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e.toString(), e);
}
}
public static String toString(byte[] bytes) {
try {
return new String(bytes, "UTF-8").trim().intern();
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e.toString(), e);
}
}
public static String readString(ByteBuffer buffer, int numBytes, int maxLength) throws EftException.ValidationException {
if (numBytes > 3 * maxLength) {
throw new EftException.NotValidException("Max parameter length exceeded");
}
byte[] bytes = new byte[numBytes];
buffer.get(bytes);
return Convert.toString(bytes);
}
public static String truncate(String s, String replaceNull, int limit, boolean dots) {
return s == null ? replaceNull : s.length() > limit ? (s.substring(0, dots ? limit - 3 : limit) + (dots ? "..." : "")) : s;
}
public static long parseEFT(String eft) {
return parseStringFraction(eft, 8, Constants.MAX_BALANCE_EFT);
}
private static long parseStringFraction(String value, int decimals, long maxValue) {
String[] s = value.trim().split("\\.");
if (s.length == 0 || s.length > 2) {
throw new NumberFormatException("Invalid number: " + value);
}
long wholePart = Long.parseLong(s[0]);
if (wholePart > maxValue) {
throw new IllegalArgumentException("Whole part of value exceeds maximum possible");
}
if (s.length == 1) {
return wholePart * multipliers[decimals];
}
long fractionalPart = Long.parseLong(s[1]);
if (fractionalPart >= multipliers[decimals] || s[1].length() > decimals) {
throw new IllegalArgumentException("Fractional part exceeds maximum allowed divisibility");
}
for (int i = s[1].length(); i < decimals; i++) {
fractionalPart *= 10;
}
return wholePart * multipliers[decimals] + fractionalPart;
}
// overflow checking based on https://www.securecoding.cert.org/confluence/display/java/NUM00-J.+Detect+or+prevent+integer+overflow
public static long safeAdd(long left, long right)
throws ArithmeticException {
if (right > 0 ? left > Long.MAX_VALUE - right
: left < Long.MIN_VALUE - right) {
throw new ArithmeticException("Integer overflow");
}
return left + right;
}
public static long safeSubtract(long left, long right)
throws ArithmeticException {
if (right > 0 ? left < Long.MIN_VALUE + right
: left > Long.MAX_VALUE + right) {
throw new ArithmeticException("Integer overflow");
}
return left - right;
}
public static long safeMultiply(long left, long right)
throws ArithmeticException {
if (right > 0 ? left > Long.MAX_VALUE/right
|| left < Long.MIN_VALUE/right
: (right < -1 ? left > Long.MIN_VALUE/right
|| left < Long.MAX_VALUE/right
: right == -1
&& left == Long.MIN_VALUE) ) {
throw new ArithmeticException("Integer overflow");
}
return left * right;
}
public static long safeDivide(long left, long right)
throws ArithmeticException {
if ((left == Long.MIN_VALUE) && (right == -1)) {
throw new ArithmeticException("Integer overflow");
}
return left / right;
}
public static long safeNegate(long a) throws ArithmeticException {
if (a == Long.MIN_VALUE) {
throw new ArithmeticException("Integer overflow");
}
return -a;
}
public static long safeAbs(long a) throws ArithmeticException {
if (a == Long.MIN_VALUE) {
throw new ArithmeticException("Integer overflow");
}
return Math.abs(a);
}
}
| |
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.execution.impl;
import com.intellij.execution.filters.Filter;
import com.intellij.execution.filters.HyperlinkInfo;
import com.intellij.execution.filters.HyperlinkInfoBase;
import com.intellij.ide.OccurenceNavigator;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.LogicalPosition;
import com.intellij.openapi.editor.colors.CodeInsightColors;
import com.intellij.openapi.editor.colors.EditorColorsManager;
import com.intellij.openapi.editor.event.EditorMouseEvent;
import com.intellij.openapi.editor.event.EditorMouseEventArea;
import com.intellij.openapi.editor.event.EditorMouseListener;
import com.intellij.openapi.editor.event.EditorMouseMotionListener;
import com.intellij.openapi.editor.ex.EditorEx;
import com.intellij.openapi.editor.ex.MarkupModelEx;
import com.intellij.openapi.editor.ex.RangeHighlighterEx;
import com.intellij.openapi.editor.ex.util.EditorUtil;
import com.intellij.openapi.editor.markup.HighlighterLayer;
import com.intellij.openapi.editor.markup.HighlighterTargetArea;
import com.intellij.openapi.editor.markup.RangeHighlighter;
import com.intellij.openapi.editor.markup.TextAttributes;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Key;
import com.intellij.pom.NavigatableAdapter;
import com.intellij.ui.awt.RelativePoint;
import com.intellij.util.CommonProcessors;
import com.intellij.util.Consumer;
import com.intellij.util.FilteringProcessor;
import com.intellij.util.containers.hash.LinkedHashMap;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.awt.*;
import java.awt.event.MouseEvent;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
/**
* @author peter
*/
public class EditorHyperlinkSupport {
private static final Key<TextAttributes> OLD_HYPERLINK_TEXT_ATTRIBUTES = Key.create("OLD_HYPERLINK_TEXT_ATTRIBUTES");
private static final Key<HyperlinkInfoTextAttributes> HYPERLINK = Key.create("HYPERLINK");
private static final Key<EditorHyperlinkSupport> EDITOR_HYPERLINK_SUPPORT_KEY = Key.create("EDITOR_HYPERLINK_SUPPORT_KEY");
private final EditorEx myEditor;
@NotNull private final Project myProject;
private final AsyncFilterRunner myFilterRunner;
/**
* If your editor has a project inside, better use {@link #get(Editor)}
*/
public EditorHyperlinkSupport(@NotNull final Editor editor, @NotNull final Project project) {
myEditor = (EditorEx)editor;
myProject = project;
myFilterRunner = new AsyncFilterRunner(this, myEditor);
editor.addEditorMouseListener(new EditorMouseListener() {
@Override
public void mouseClicked(@NotNull EditorMouseEvent e) {
final MouseEvent mouseEvent = e.getMouseEvent();
if (mouseEvent.getButton() == MouseEvent.BUTTON1 && !mouseEvent.isPopupTrigger()) {
Runnable runnable = getLinkNavigationRunnable(myEditor.xyToLogicalPosition(e.getMouseEvent().getPoint()));
if (runnable != null) {
runnable.run();
}
}
}
});
editor.addEditorMouseMotionListener(new EditorMouseMotionListener() {
@Override
public void mouseMoved(@NotNull EditorMouseEvent e) {
if (e.getArea() != EditorMouseEventArea.EDITING_AREA) return;
final HyperlinkInfo info = getHyperlinkInfoByPoint(e.getMouseEvent().getPoint());
myEditor.setCustomCursor(EditorHyperlinkSupport.class, info == null ? null : Cursor.getPredefinedCursor(Cursor.HAND_CURSOR));
}
}
);
}
public static EditorHyperlinkSupport get(@NotNull final Editor editor) {
EditorHyperlinkSupport instance = editor.getUserData(EDITOR_HYPERLINK_SUPPORT_KEY);
if (instance == null) {
Project project = editor.getProject();
assert project != null;
instance = new EditorHyperlinkSupport(editor, project);
editor.putUserData(EDITOR_HYPERLINK_SUPPORT_KEY, instance);
}
return instance;
}
public void clearHyperlinks() {
for (RangeHighlighter highlighter : getHyperlinks(0, myEditor.getDocument().getTextLength(), myEditor)) {
removeHyperlink(highlighter);
}
}
@SuppressWarnings("SameParameterValue")
public void waitForPendingFilters(long timeoutMs) {
myFilterRunner.waitForPendingFilters(timeoutMs);
}
@Deprecated
public Map<RangeHighlighter, HyperlinkInfo> getHyperlinks() {
LinkedHashMap<RangeHighlighter, HyperlinkInfo> result = new LinkedHashMap<>();
for (RangeHighlighter highlighter : getHyperlinks(0, myEditor.getDocument().getTextLength(), myEditor)) {
HyperlinkInfo info = getHyperlinkInfo(highlighter);
if (info != null) {
result.put(highlighter, info);
}
}
return result;
}
@Nullable
public Runnable getLinkNavigationRunnable(final LogicalPosition logical) {
if (EditorUtil.inVirtualSpace(myEditor, logical)) {
return null;
}
final RangeHighlighter range = findLinkRangeAt(myEditor.logicalPositionToOffset(logical));
if (range != null) {
final HyperlinkInfo hyperlinkInfo = getHyperlinkInfo(range);
if (hyperlinkInfo != null) {
return () -> {
if (hyperlinkInfo instanceof HyperlinkInfoBase) {
final Point point = myEditor.logicalPositionToXY(logical);
final MouseEvent event = new MouseEvent(myEditor.getContentComponent(), 0, 0, 0, point.x, point.y, 1, false);
((HyperlinkInfoBase)hyperlinkInfo).navigate(myProject, new RelativePoint(event));
}
else {
hyperlinkInfo.navigate(myProject);
}
linkFollowed(myEditor, getHyperlinks(0, myEditor.getDocument().getTextLength(),myEditor), range);
};
}
}
return null;
}
@Nullable
public static HyperlinkInfo getHyperlinkInfo(@NotNull RangeHighlighter range) {
final HyperlinkInfoTextAttributes attributes = range.getUserData(HYPERLINK);
return attributes != null ? attributes.getHyperlinkInfo() : null;
}
@Nullable
private RangeHighlighter findLinkRangeAt(final int offset) {
//noinspection LoopStatementThatDoesntLoop
for (final RangeHighlighter highlighter : getHyperlinks(offset, offset, myEditor)) {
return highlighter;
}
return null;
}
@Nullable
private HyperlinkInfo getHyperlinkAt(final int offset) {
RangeHighlighter range = findLinkRangeAt(offset);
return range == null ? null : getHyperlinkInfo(range);
}
public List<RangeHighlighter> findAllHyperlinksOnLine(int line) {
final int lineStart = myEditor.getDocument().getLineStartOffset(line);
final int lineEnd = myEditor.getDocument().getLineEndOffset(line);
return getHyperlinks(lineStart, lineEnd, myEditor);
}
private static List<RangeHighlighter> getHyperlinks(int startOffset, int endOffset, final Editor editor) {
final MarkupModelEx markupModel = (MarkupModelEx)editor.getMarkupModel();
final CommonProcessors.CollectProcessor<RangeHighlighterEx> processor = new CommonProcessors.CollectProcessor<>();
markupModel.processRangeHighlightersOverlappingWith(startOffset, endOffset,
new FilteringProcessor<>(
rangeHighlighterEx -> rangeHighlighterEx.isValid() &&
getHyperlinkInfo(rangeHighlighterEx) != null, processor)
);
return new ArrayList<>(processor.getResults());
}
public void removeHyperlink(@NotNull RangeHighlighter hyperlink) {
myEditor.getMarkupModel().removeHighlighter(hyperlink);
}
@Nullable
public HyperlinkInfo getHyperlinkInfoByLineAndCol(final int line, final int col) {
return getHyperlinkAt(myEditor.logicalPositionToOffset(new LogicalPosition(line, col)));
}
/**
* @deprecated for binary compatibility with older plugins
* @see #createHyperlink(int, int, TextAttributes, HyperlinkInfo)
*/
@Deprecated
public void addHyperlink(final int highlightStartOffset,
final int highlightEndOffset,
@Nullable final TextAttributes highlightAttributes,
@NotNull final HyperlinkInfo hyperlinkInfo) {
createHyperlink(highlightStartOffset, highlightEndOffset, highlightAttributes, hyperlinkInfo);
}
public void createHyperlink(@NotNull RangeHighlighter highlighter, @NotNull HyperlinkInfo hyperlinkInfo) {
associateHyperlink(highlighter, hyperlinkInfo, null);
}
@NotNull
public RangeHighlighter createHyperlink(int highlightStartOffset,
int highlightEndOffset,
@Nullable TextAttributes highlightAttributes,
@NotNull HyperlinkInfo hyperlinkInfo) {
return createHyperlink(highlightStartOffset, highlightEndOffset, highlightAttributes, hyperlinkInfo, null,
HighlighterLayer.HYPERLINK);
}
@NotNull
private RangeHighlighter createHyperlink(final int highlightStartOffset,
final int highlightEndOffset,
@Nullable final TextAttributes highlightAttributes,
@NotNull final HyperlinkInfo hyperlinkInfo,
@Nullable TextAttributes followedHyperlinkAttributes,
int layer) {
TextAttributes textAttributes = highlightAttributes != null ? highlightAttributes : getHyperlinkAttributes();
final RangeHighlighter highlighter = myEditor.getMarkupModel().addRangeHighlighter(highlightStartOffset,
highlightEndOffset,
layer,
textAttributes,
HighlighterTargetArea.EXACT_RANGE);
associateHyperlink(highlighter, hyperlinkInfo, followedHyperlinkAttributes);
return highlighter;
}
/**
* @deprecated Use {@link #get(Editor)} and then {@link #createHyperlink(RangeHighlighter, HyperlinkInfo)}
*/
@Deprecated
public static void associateHyperlink(@NotNull RangeHighlighter highlighter, @NotNull HyperlinkInfo hyperlinkInfo) {
associateHyperlink(highlighter, hyperlinkInfo, null);
}
private static void associateHyperlink(@NotNull RangeHighlighter highlighter,
@NotNull HyperlinkInfo hyperlinkInfo,
@Nullable TextAttributes followedHyperlinkAttributes) {
highlighter.putUserData(HYPERLINK, new HyperlinkInfoTextAttributes(hyperlinkInfo, followedHyperlinkAttributes));
}
@Nullable
public HyperlinkInfo getHyperlinkInfoByPoint(final Point p) {
final LogicalPosition pos = myEditor.xyToLogicalPosition(new Point(p.x, p.y));
if (EditorUtil.inVirtualSpace(myEditor, pos)) {
return null;
}
return getHyperlinkInfoByLineAndCol(pos.line, pos.column);
}
@Deprecated
public void highlightHyperlinks(final Filter customFilter, final Filter predefinedMessageFilter, final int line1, final int endLine) {
highlightHyperlinks((line, entireLength) -> {
Filter.Result result = customFilter.applyFilter(line, entireLength);
return result != null ? result : predefinedMessageFilter.applyFilter(line, entireLength);
}, line1, endLine);
}
public void highlightHyperlinks(final Filter customFilter, final int line1, final int endLine) {
myFilterRunner.highlightHyperlinks(customFilter, Math.max(0, line1), endLine);
}
void highlightHyperlinks(@NotNull Filter.Result result, int offsetDelta) {
Document document = myEditor.getDocument();
for (Filter.ResultItem resultItem : result.getResultItems()) {
int start = resultItem.getHighlightStartOffset() + offsetDelta;
int end = resultItem.getHighlightEndOffset() + offsetDelta;
if (start < 0 || end < start || end > document.getTextLength()) {
continue;
}
TextAttributes attributes = resultItem.getHighlightAttributes();
if (resultItem.getHyperlinkInfo() != null) {
createHyperlink(start, end, attributes, resultItem.getHyperlinkInfo(), resultItem.getFollowedHyperlinkAttributes(),
resultItem.getHighlighterLayer());
}
else if (attributes != null) {
addHighlighter(start, end, attributes, resultItem.getHighlighterLayer());
}
}
}
public void addHighlighter(int highlightStartOffset, int highlightEndOffset, TextAttributes highlightAttributes) {
addHighlighter(highlightStartOffset, highlightEndOffset, highlightAttributes, HighlighterLayer.CONSOLE_FILTER);
}
public void addHighlighter(int highlightStartOffset, int highlightEndOffset, TextAttributes highlightAttributes, int highlighterLayer) {
myEditor.getMarkupModel().addRangeHighlighter(highlightStartOffset, highlightEndOffset, highlighterLayer, highlightAttributes,
HighlighterTargetArea.EXACT_RANGE);
}
private static TextAttributes getHyperlinkAttributes() {
return EditorColorsManager.getInstance().getGlobalScheme().getAttributes(CodeInsightColors.HYPERLINK_ATTRIBUTES);
}
@NotNull
private static TextAttributes getFollowedHyperlinkAttributes(@NotNull RangeHighlighter range) {
HyperlinkInfoTextAttributes attrs = HYPERLINK.get(range);
TextAttributes result = attrs != null ? attrs.getFollowedHyperlinkAttributes() : null;
if (result == null) {
result = EditorColorsManager.getInstance().getGlobalScheme().getAttributes(CodeInsightColors.FOLLOWED_HYPERLINK_ATTRIBUTES);
}
return result;
}
@Nullable
public static OccurenceNavigator.OccurenceInfo getNextOccurrence(final Editor editor,
final int delta,
final Consumer<? super RangeHighlighter> action) {
final List<RangeHighlighter> ranges = getHyperlinks(0, editor.getDocument().getTextLength(),editor);
if (ranges.isEmpty()) {
return null;
}
int i;
for (i = 0; i < ranges.size(); i++) {
RangeHighlighter range = ranges.get(i);
if (range.getUserData(OLD_HYPERLINK_TEXT_ATTRIBUTES) != null) {
break;
}
}
i %= ranges.size();
int newIndex = i;
while (newIndex < ranges.size() && newIndex >= 0) {
newIndex = (newIndex + delta + ranges.size()) % ranges.size();
final RangeHighlighter next = ranges.get(newIndex);
HyperlinkInfo info = getHyperlinkInfo(next);
assert info != null;
if (info.includeInOccurenceNavigation()) {
boolean inCollapsedRegion = editor.getFoldingModel().getCollapsedRegionAtOffset(next.getStartOffset()) != null;
if (!inCollapsedRegion) {
return new OccurenceNavigator.OccurenceInfo(new NavigatableAdapter() {
@Override
public void navigate(final boolean requestFocus) {
action.consume(next);
linkFollowed(editor, ranges, next);
}
}, newIndex == -1 ? -1 : newIndex + 1, ranges.size());
}
}
if (newIndex == i) {
break; // cycled through everything, found no next/prev hyperlink
}
}
return null;
}
// todo fix link followed here!
private static void linkFollowed(Editor editor, Collection<? extends RangeHighlighter> ranges, final RangeHighlighter link) {
MarkupModelEx markupModel = (MarkupModelEx)editor.getMarkupModel();
for (RangeHighlighter range : ranges) {
TextAttributes oldAttr = range.getUserData(OLD_HYPERLINK_TEXT_ATTRIBUTES);
if (oldAttr != null) {
markupModel.setRangeHighlighterAttributes(range, oldAttr);
range.putUserData(OLD_HYPERLINK_TEXT_ATTRIBUTES, null);
}
if (range == link) {
range.putUserData(OLD_HYPERLINK_TEXT_ATTRIBUTES, range.getTextAttributes());
markupModel.setRangeHighlighterAttributes(range, getFollowedHyperlinkAttributes(range));
}
}
//refresh highlighter text attributes
markupModel.addRangeHighlighter(0, 0, link.getLayer(), getHyperlinkAttributes(), HighlighterTargetArea.EXACT_RANGE).dispose();
}
@NotNull
public static String getLineText(@NotNull Document document, int lineNumber, boolean includeEol) {
return getLineSequence(document, lineNumber, includeEol).toString();
}
@NotNull
private static CharSequence getLineSequence(@NotNull Document document, int lineNumber, boolean includeEol) {
int endOffset = document.getLineEndOffset(lineNumber);
if (includeEol && endOffset < document.getTextLength()) {
endOffset++;
}
return document.getImmutableCharSequence().subSequence(document.getLineStartOffset(lineNumber), endOffset);
}
private static class HyperlinkInfoTextAttributes extends TextAttributes {
private final HyperlinkInfo myHyperlinkInfo;
private final TextAttributes myFollowedHyperlinkAttributes;
HyperlinkInfoTextAttributes(@NotNull HyperlinkInfo hyperlinkInfo, @Nullable TextAttributes followedHyperlinkAttributes) {
myHyperlinkInfo = hyperlinkInfo;
myFollowedHyperlinkAttributes = followedHyperlinkAttributes;
}
@NotNull
HyperlinkInfo getHyperlinkInfo() {
return myHyperlinkInfo;
}
@Nullable
TextAttributes getFollowedHyperlinkAttributes() {
return myFollowedHyperlinkAttributes;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.hadoop;
import java.io.IOException;
import java.util.concurrent.atomic.AtomicReference;
/**
* Error simulator.
*/
public class HadoopErrorSimulator {
/** No-op singleton instance. */
public static final HadoopErrorSimulator noopInstance = new HadoopErrorSimulator();
/** Instance ref. */
private static final AtomicReference<HadoopErrorSimulator> ref = new AtomicReference<>(noopInstance);
/**
* Creates simulator of given kind with given stage bits.
*
* @param kind The kind.
* @param bits The stage bits.
* @return The simulator.
*/
public static HadoopErrorSimulator create(Kind kind, int bits) {
switch (kind) {
case Noop:
return noopInstance;
case Runtime:
return new RuntimeExceptionBitHadoopErrorSimulator(bits);
case IOException:
return new IOExceptionBitHadoopErrorSimulator(bits);
case Error:
return new ErrorBitHadoopErrorSimulator(bits);
default:
throw new IllegalStateException("Unknown kind: " + kind);
}
}
/**
* Gets the error simulator instance.
*/
public static HadoopErrorSimulator instance() {
return ref.get();
}
/**
* Sets instance.
*/
public static boolean setInstance(HadoopErrorSimulator expect, HadoopErrorSimulator update) {
return ref.compareAndSet(expect, update);
}
/**
* Constructor.
*/
private HadoopErrorSimulator() {
// no-op
}
/**
* Invoked on the named stage.
*/
public void onMapConfigure() {
// no-op
}
/**
* Invoked on the named stage.
*/
public void onMapSetup() throws IOException, InterruptedException {
// no-op
}
/**
* Invoked on the named stage.
*/
public void onMap() throws IOException {
// no-op
}
/**
* Invoked on the named stage.
*/
public void onMapCleanup() throws IOException, InterruptedException {
// no-op
}
/**
* Invoked on the named stage.
*/
public void onMapClose() throws IOException {
// no-op
}
/**
* setConf() does not declare IOException to be thrown.
*/
public void onCombineConfigure() {
// no-op
}
/**
* Invoked on the named stage.
*/
public void onCombineSetup() throws IOException, InterruptedException {
// no-op
}
/**
* Invoked on the named stage.
*/
public void onCombine() throws IOException {
// no-op
}
/**
* Invoked on the named stage.
*/
public void onCombineCleanup() throws IOException, InterruptedException {
// no-op
}
/**
* Invoked on the named stage.
*/
public void onReduceConfigure() {
// no-op
}
/**
* Invoked on the named stage.
*/
public void onReduceSetup() throws IOException, InterruptedException {
// no-op
}
/**
* Invoked on the named stage.
*/
public void onReduce() throws IOException {
// no-op
}
/**
* Invoked on the named stage.
*/
public void onReduceCleanup() throws IOException, InterruptedException {
// no-op
}
/**
* Error kind.
*/
public enum Kind {
/** No error. */
Noop,
/** Runtime. */
Runtime,
/** IOException. */
IOException,
/** java.lang.Error. */
Error
}
/**
* Runtime error simulator.
*/
public static class RuntimeExceptionBitHadoopErrorSimulator extends HadoopErrorSimulator {
/** Stage bits: defines what map-reduce stages will cause errors. */
private final int bits;
/**
* Constructor.
*/
protected RuntimeExceptionBitHadoopErrorSimulator(int b) {
bits = b;
}
/**
* Simulates an error.
*/
protected void simulateError() throws IOException {
throw new RuntimeException("An error simulated by " + getClass().getSimpleName());
}
/** {@inheritDoc} */
@Override public final void onMapConfigure() {
try {
if ((bits & 1) != 0)
simulateError();
}
catch (IOException e) {
// ignore
}
}
/** {@inheritDoc} */
@Override public final void onMapSetup() throws IOException, InterruptedException {
if ((bits & 2) != 0)
simulateError();
}
/** {@inheritDoc} */
@Override public final void onMap() throws IOException {
if ((bits & 4) != 0)
simulateError();
}
/** {@inheritDoc} */
@Override public final void onMapCleanup() throws IOException, InterruptedException {
if ((bits & 8) != 0)
simulateError();
}
/** {@inheritDoc} */
@Override public final void onCombineConfigure() {
try {
if ((bits & 16) != 0)
simulateError();
}
catch (IOException e) {
// ignore
}
}
/** {@inheritDoc} */
@Override public final void onCombineSetup() throws IOException, InterruptedException {
if ((bits & 32) != 0)
simulateError();
}
/** {@inheritDoc} */
@Override public final void onCombine() throws IOException {
if ((bits & 64) != 0)
simulateError();
}
/** {@inheritDoc} */
@Override public final void onCombineCleanup() throws IOException, InterruptedException {
if ((bits & 128) != 0)
simulateError();
}
/** {@inheritDoc} */
@Override public final void onReduceConfigure() {
try {
if ((bits & 256) != 0)
simulateError();
}
catch (IOException e) {
// ignore
}
}
/** {@inheritDoc} */
@Override public final void onReduceSetup() throws IOException, InterruptedException {
if ((bits & 512) != 0)
simulateError();
}
/** {@inheritDoc} */
@Override public final void onReduce() throws IOException {
if ((bits & 1024) != 0)
simulateError();
}
/** {@inheritDoc} */
@Override public final void onReduceCleanup() throws IOException, InterruptedException {
if ((bits & 2048) != 0)
simulateError();
}
}
/**
* java.lang.Error simulator.
*/
public static class ErrorBitHadoopErrorSimulator extends RuntimeExceptionBitHadoopErrorSimulator {
/**
* Constructor.
*/
public ErrorBitHadoopErrorSimulator(int bits) {
super(bits);
}
/** {@inheritDoc} */
@Override protected void simulateError() {
throw new Error("An error simulated by " + getClass().getSimpleName());
}
}
/**
* IOException simulator.
*/
public static class IOExceptionBitHadoopErrorSimulator extends RuntimeExceptionBitHadoopErrorSimulator {
/**
* Constructor.
*/
public IOExceptionBitHadoopErrorSimulator(int bits) {
super(bits);
}
/** {@inheritDoc} */
@Override protected void simulateError() throws IOException {
throw new IOException("An IOException simulated by " + getClass().getSimpleName());
}
}
}
| |
/*
* Copyright (C) 2008 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.common.primitives;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkElementIndex;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkPositionIndexes;
import static com.google.common.base.Strings.lenientFormat;
import static java.lang.Double.NEGATIVE_INFINITY;
import static java.lang.Double.POSITIVE_INFINITY;
import com.google.common.annotations.Beta;
import com.google.common.annotations.GwtCompatible;
import com.google.common.annotations.GwtIncompatible;
import com.google.common.base.Converter;
import java.io.Serializable;
import java.util.AbstractList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.RandomAccess;
import org.checkerframework.checker.nullness.compatqual.NullableDecl;
/**
* Static utility methods pertaining to {@code double} primitives, that are not already found in
* either {@link Double} or {@link Arrays}.
*
* <p>See the Guava User Guide article on <a
* href="https://github.com/google/guava/wiki/PrimitivesExplained">primitive utilities</a>.
*
* @author Kevin Bourrillion
* @since 1.0
*/
@GwtCompatible(emulated = true)
public final class Doubles extends DoublesMethodsForWeb {
private Doubles() {}
/**
* The number of bytes required to represent a primitive {@code double} value.
*
* <p><b>Java 8 users:</b> use {@link Double#BYTES} instead.
*
* @since 10.0
*/
public static final int BYTES = Double.SIZE / Byte.SIZE;
/**
* Returns a hash code for {@code value}; equal to the result of invoking {@code ((Double)
* value).hashCode()}.
*
* <p><b>Java 8 users:</b> use {@link Double#hashCode(double)} instead.
*
* @param value a primitive {@code double} value
* @return a hash code for the value
*/
public static int hashCode(double value) {
return ((Double) value).hashCode();
// TODO(kevinb): do it this way when we can (GWT problem):
// long bits = Double.doubleToLongBits(value);
// return (int) (bits ^ (bits >>> 32));
}
/**
* Compares the two specified {@code double} values. The sign of the value returned is the same as
* that of <code>((Double) a).{@linkplain Double#compareTo compareTo}(b)</code>. As with that
* method, {@code NaN} is treated as greater than all other values, and {@code 0.0 > -0.0}.
*
* <p><b>Note:</b> this method simply delegates to the JDK method {@link Double#compare}. It is
* provided for consistency with the other primitive types, whose compare methods were not added
* to the JDK until JDK 7.
*
* @param a the first {@code double} to compare
* @param b the second {@code double} to compare
* @return a negative value if {@code a} is less than {@code b}; a positive value if {@code a} is
* greater than {@code b}; or zero if they are equal
*/
public static int compare(double a, double b) {
return Double.compare(a, b);
}
/**
* Returns {@code true} if {@code value} represents a real number. This is equivalent to, but not
* necessarily implemented as, {@code !(Double.isInfinite(value) || Double.isNaN(value))}.
*
* <p><b>Java 8 users:</b> use {@link Double#isFinite(double)} instead.
*
* @since 10.0
*/
public static boolean isFinite(double value) {
return NEGATIVE_INFINITY < value && value < POSITIVE_INFINITY;
}
/**
* Returns {@code true} if {@code target} is present as an element anywhere in {@code array}. Note
* that this always returns {@code false} when {@code target} is {@code NaN}.
*
* @param array an array of {@code double} values, possibly empty
* @param target a primitive {@code double} value
* @return {@code true} if {@code array[i] == target} for some value of {@code i}
*/
public static boolean contains(double[] array, double target) {
for (double value : array) {
if (value == target) {
return true;
}
}
return false;
}
/**
* Returns the index of the first appearance of the value {@code target} in {@code array}. Note
* that this always returns {@code -1} when {@code target} is {@code NaN}.
*
* @param array an array of {@code double} values, possibly empty
* @param target a primitive {@code double} value
* @return the least index {@code i} for which {@code array[i] == target}, or {@code -1} if no
* such index exists.
*/
public static int indexOf(double[] array, double target) {
return indexOf(array, target, 0, array.length);
}
// TODO(kevinb): consider making this public
private static int indexOf(double[] array, double target, int start, int end) {
for (int i = start; i < end; i++) {
if (array[i] == target) {
return i;
}
}
return -1;
}
/**
* Returns the start position of the first occurrence of the specified {@code target} within
* {@code array}, or {@code -1} if there is no such occurrence.
*
* <p>More formally, returns the lowest index {@code i} such that {@code Arrays.copyOfRange(array,
* i, i + target.length)} contains exactly the same elements as {@code target}.
*
* <p>Note that this always returns {@code -1} when {@code target} contains {@code NaN}.
*
* @param array the array to search for the sequence {@code target}
* @param target the array to search for as a sub-sequence of {@code array}
*/
public static int indexOf(double[] array, double[] target) {
checkNotNull(array, "array");
checkNotNull(target, "target");
if (target.length == 0) {
return 0;
}
outer:
for (int i = 0; i < array.length - target.length + 1; i++) {
for (int j = 0; j < target.length; j++) {
if (array[i + j] != target[j]) {
continue outer;
}
}
return i;
}
return -1;
}
/**
* Returns the index of the last appearance of the value {@code target} in {@code array}. Note
* that this always returns {@code -1} when {@code target} is {@code NaN}.
*
* @param array an array of {@code double} values, possibly empty
* @param target a primitive {@code double} value
* @return the greatest index {@code i} for which {@code array[i] == target}, or {@code -1} if no
* such index exists.
*/
public static int lastIndexOf(double[] array, double target) {
return lastIndexOf(array, target, 0, array.length);
}
// TODO(kevinb): consider making this public
private static int lastIndexOf(double[] array, double target, int start, int end) {
for (int i = end - 1; i >= start; i--) {
if (array[i] == target) {
return i;
}
}
return -1;
}
/**
* Returns the least value present in {@code array}, using the same rules of comparison as {@link
* Math#min(double, double)}.
*
* @param array a <i>nonempty</i> array of {@code double} values
* @return the value present in {@code array} that is less than or equal to every other value in
* the array
* @throws IllegalArgumentException if {@code array} is empty
*/
@GwtIncompatible(
"Available in GWT! Annotation is to avoid conflict with GWT specialization of base class.")
public static double min(double... array) {
checkArgument(array.length > 0);
double min = array[0];
for (int i = 1; i < array.length; i++) {
min = Math.min(min, array[i]);
}
return min;
}
/**
* Returns the greatest value present in {@code array}, using the same rules of comparison as
* {@link Math#max(double, double)}.
*
* @param array a <i>nonempty</i> array of {@code double} values
* @return the value present in {@code array} that is greater than or equal to every other value
* in the array
* @throws IllegalArgumentException if {@code array} is empty
*/
@GwtIncompatible(
"Available in GWT! Annotation is to avoid conflict with GWT specialization of base class.")
public static double max(double... array) {
checkArgument(array.length > 0);
double max = array[0];
for (int i = 1; i < array.length; i++) {
max = Math.max(max, array[i]);
}
return max;
}
/**
* Returns the value nearest to {@code value} which is within the closed range {@code [min..max]}.
*
* <p>If {@code value} is within the range {@code [min..max]}, {@code value} is returned
* unchanged. If {@code value} is less than {@code min}, {@code min} is returned, and if {@code
* value} is greater than {@code max}, {@code max} is returned.
*
* @param value the {@code double} value to constrain
* @param min the lower bound (inclusive) of the range to constrain {@code value} to
* @param max the upper bound (inclusive) of the range to constrain {@code value} to
* @throws IllegalArgumentException if {@code min > max}
* @since 21.0
*/
@Beta
public static double constrainToRange(double value, double min, double max) {
// avoid auto-boxing by not using Preconditions.checkArgument(); see Guava issue 3984
// Reject NaN by testing for the good case (min <= max) instead of the bad (min > max).
if (min <= max) {
return Math.min(Math.max(value, min), max);
}
throw new IllegalArgumentException(
lenientFormat("min (%s) must be less than or equal to max (%s)", min, max));
}
/**
* Returns the values from each provided array combined into a single array. For example, {@code
* concat(new double[] {a, b}, new double[] {}, new double[] {c}} returns the array {@code {a, b,
* c}}.
*
* @param arrays zero or more {@code double} arrays
* @return a single array containing all the values from the source arrays, in order
*/
public static double[] concat(double[]... arrays) {
int length = 0;
for (double[] array : arrays) {
length += array.length;
}
double[] result = new double[length];
int pos = 0;
for (double[] array : arrays) {
System.arraycopy(array, 0, result, pos, array.length);
pos += array.length;
}
return result;
}
private static final class DoubleConverter extends Converter<String, Double>
implements Serializable {
static final DoubleConverter INSTANCE = new DoubleConverter();
@Override
protected Double doForward(String value) {
return Double.valueOf(value);
}
@Override
protected String doBackward(Double value) {
return value.toString();
}
@Override
public String toString() {
return "Doubles.stringConverter()";
}
private Object readResolve() {
return INSTANCE;
}
private static final long serialVersionUID = 1;
}
/**
* Returns a serializable converter object that converts between strings and doubles using {@link
* Double#valueOf} and {@link Double#toString()}.
*
* @since 16.0
*/
@Beta
public static Converter<String, Double> stringConverter() {
return DoubleConverter.INSTANCE;
}
/**
* Returns an array containing the same values as {@code array}, but guaranteed to be of a
* specified minimum length. If {@code array} already has a length of at least {@code minLength},
* it is returned directly. Otherwise, a new array of size {@code minLength + padding} is
* returned, containing the values of {@code array}, and zeroes in the remaining places.
*
* @param array the source array
* @param minLength the minimum length the returned array must guarantee
* @param padding an extra amount to "grow" the array by if growth is necessary
* @throws IllegalArgumentException if {@code minLength} or {@code padding} is negative
* @return an array containing the values of {@code array}, with guaranteed minimum length {@code
* minLength}
*/
public static double[] ensureCapacity(double[] array, int minLength, int padding) {
checkArgument(minLength >= 0, "Invalid minLength: %s", minLength);
checkArgument(padding >= 0, "Invalid padding: %s", padding);
return (array.length < minLength) ? Arrays.copyOf(array, minLength + padding) : array;
}
/**
* Returns a string containing the supplied {@code double} values, converted to strings as
* specified by {@link Double#toString(double)}, and separated by {@code separator}. For example,
* {@code join("-", 1.0, 2.0, 3.0)} returns the string {@code "1.0-2.0-3.0"}.
*
* <p>Note that {@link Double#toString(double)} formats {@code double} differently in GWT
* sometimes. In the previous example, it returns the string {@code "1-2-3"}.
*
* @param separator the text that should appear between consecutive values in the resulting string
* (but not at the start or end)
* @param array an array of {@code double} values, possibly empty
*/
public static String join(String separator, double... array) {
checkNotNull(separator);
if (array.length == 0) {
return "";
}
// For pre-sizing a builder, just get the right order of magnitude
StringBuilder builder = new StringBuilder(array.length * 12);
builder.append(array[0]);
for (int i = 1; i < array.length; i++) {
builder.append(separator).append(array[i]);
}
return builder.toString();
}
/**
* Returns a comparator that compares two {@code double} arrays <a
* href="http://en.wikipedia.org/wiki/Lexicographical_order">lexicographically</a>. That is, it
* compares, using {@link #compare(double, double)}), the first pair of values that follow any
* common prefix, or when one array is a prefix of the other, treats the shorter array as the
* lesser. For example, {@code [] < [1.0] < [1.0, 2.0] < [2.0]}.
*
* <p>The returned comparator is inconsistent with {@link Object#equals(Object)} (since arrays
* support only identity equality), but it is consistent with {@link Arrays#equals(double[],
* double[])}.
*
* @since 2.0
*/
public static Comparator<double[]> lexicographicalComparator() {
return LexicographicalComparator.INSTANCE;
}
private enum LexicographicalComparator implements Comparator<double[]> {
INSTANCE;
@Override
public int compare(double[] left, double[] right) {
int minLength = Math.min(left.length, right.length);
for (int i = 0; i < minLength; i++) {
int result = Double.compare(left[i], right[i]);
if (result != 0) {
return result;
}
}
return left.length - right.length;
}
@Override
public String toString() {
return "Doubles.lexicographicalComparator()";
}
}
/**
* Sorts the elements of {@code array} in descending order.
*
* <p>Note that this method uses the total order imposed by {@link Double#compare}, which treats
* all NaN values as equal and 0.0 as greater than -0.0.
*
* @since 23.1
*/
public static void sortDescending(double[] array) {
checkNotNull(array);
sortDescending(array, 0, array.length);
}
/**
* Sorts the elements of {@code array} between {@code fromIndex} inclusive and {@code toIndex}
* exclusive in descending order.
*
* <p>Note that this method uses the total order imposed by {@link Double#compare}, which treats
* all NaN values as equal and 0.0 as greater than -0.0.
*
* @since 23.1
*/
public static void sortDescending(double[] array, int fromIndex, int toIndex) {
checkNotNull(array);
checkPositionIndexes(fromIndex, toIndex, array.length);
Arrays.sort(array, fromIndex, toIndex);
reverse(array, fromIndex, toIndex);
}
/**
* Reverses the elements of {@code array}. This is equivalent to {@code
* Collections.reverse(Doubles.asList(array))}, but is likely to be more efficient.
*
* @since 23.1
*/
public static void reverse(double[] array) {
checkNotNull(array);
reverse(array, 0, array.length);
}
/**
* Reverses the elements of {@code array} between {@code fromIndex} inclusive and {@code toIndex}
* exclusive. This is equivalent to {@code
* Collections.reverse(Doubles.asList(array).subList(fromIndex, toIndex))}, but is likely to be
* more efficient.
*
* @throws IndexOutOfBoundsException if {@code fromIndex < 0}, {@code toIndex > array.length}, or
* {@code toIndex > fromIndex}
* @since 23.1
*/
public static void reverse(double[] array, int fromIndex, int toIndex) {
checkNotNull(array);
checkPositionIndexes(fromIndex, toIndex, array.length);
for (int i = fromIndex, j = toIndex - 1; i < j; i++, j--) {
double tmp = array[i];
array[i] = array[j];
array[j] = tmp;
}
}
/**
* Returns an array containing each value of {@code collection}, converted to a {@code double}
* value in the manner of {@link Number#doubleValue}.
*
* <p>Elements are copied from the argument collection as if by {@code collection.toArray()}.
* Calling this method is as thread-safe as calling that method.
*
* @param collection a collection of {@code Number} instances
* @return an array containing the same values as {@code collection}, in the same order, converted
* to primitives
* @throws NullPointerException if {@code collection} or any of its elements is null
* @since 1.0 (parameter was {@code Collection<Double>} before 12.0)
*/
public static double[] toArray(Collection<? extends Number> collection) {
if (collection instanceof DoubleArrayAsList) {
return ((DoubleArrayAsList) collection).toDoubleArray();
}
Object[] boxedArray = collection.toArray();
int len = boxedArray.length;
double[] array = new double[len];
for (int i = 0; i < len; i++) {
// checkNotNull for GWT (do not optimize)
array[i] = ((Number) checkNotNull(boxedArray[i])).doubleValue();
}
return array;
}
/**
* Returns a fixed-size list backed by the specified array, similar to {@link
* Arrays#asList(Object[])}. The list supports {@link List#set(int, Object)}, but any attempt to
* set a value to {@code null} will result in a {@link NullPointerException}.
*
* <p>The returned list maintains the values, but not the identities, of {@code Double} objects
* written to or read from it. For example, whether {@code list.get(0) == list.get(0)} is true for
* the returned list is unspecified.
*
* <p>The returned list may have unexpected behavior if it contains {@code NaN}, or if {@code NaN}
* is used as a parameter to any of its methods.
*
* <p><b>Note:</b> when possible, you should represent your data as an {@link
* ImmutableDoubleArray} instead, which has an {@link ImmutableDoubleArray#asList asList} view.
*
* @param backingArray the array to back the list
* @return a list view of the array
*/
public static List<Double> asList(double... backingArray) {
if (backingArray.length == 0) {
return Collections.emptyList();
}
return new DoubleArrayAsList(backingArray);
}
@GwtCompatible
private static class DoubleArrayAsList extends AbstractList<Double>
implements RandomAccess, Serializable {
final double[] array;
final int start;
final int end;
DoubleArrayAsList(double[] array) {
this(array, 0, array.length);
}
DoubleArrayAsList(double[] array, int start, int end) {
this.array = array;
this.start = start;
this.end = end;
}
@Override
public int size() {
return end - start;
}
@Override
public boolean isEmpty() {
return false;
}
@Override
public Double get(int index) {
checkElementIndex(index, size());
return array[start + index];
}
@Override
public boolean contains(Object target) {
// Overridden to prevent a ton of boxing
return (target instanceof Double)
&& Doubles.indexOf(array, (Double) target, start, end) != -1;
}
@Override
public int indexOf(Object target) {
// Overridden to prevent a ton of boxing
if (target instanceof Double) {
int i = Doubles.indexOf(array, (Double) target, start, end);
if (i >= 0) {
return i - start;
}
}
return -1;
}
@Override
public int lastIndexOf(Object target) {
// Overridden to prevent a ton of boxing
if (target instanceof Double) {
int i = Doubles.lastIndexOf(array, (Double) target, start, end);
if (i >= 0) {
return i - start;
}
}
return -1;
}
@Override
public Double set(int index, Double element) {
checkElementIndex(index, size());
double oldValue = array[start + index];
// checkNotNull for GWT (do not optimize)
array[start + index] = checkNotNull(element);
return oldValue;
}
@Override
public List<Double> subList(int fromIndex, int toIndex) {
int size = size();
checkPositionIndexes(fromIndex, toIndex, size);
if (fromIndex == toIndex) {
return Collections.emptyList();
}
return new DoubleArrayAsList(array, start + fromIndex, start + toIndex);
}
@Override
public boolean equals(@NullableDecl Object object) {
if (object == this) {
return true;
}
if (object instanceof DoubleArrayAsList) {
DoubleArrayAsList that = (DoubleArrayAsList) object;
int size = size();
if (that.size() != size) {
return false;
}
for (int i = 0; i < size; i++) {
if (array[start + i] != that.array[that.start + i]) {
return false;
}
}
return true;
}
return super.equals(object);
}
@Override
public int hashCode() {
int result = 1;
for (int i = start; i < end; i++) {
result = 31 * result + Doubles.hashCode(array[i]);
}
return result;
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder(size() * 12);
builder.append('[').append(array[start]);
for (int i = start + 1; i < end; i++) {
builder.append(", ").append(array[i]);
}
return builder.append(']').toString();
}
double[] toDoubleArray() {
return Arrays.copyOfRange(array, start, end);
}
private static final long serialVersionUID = 0;
}
/**
* This is adapted from the regex suggested by {@link Double#valueOf(String)} for prevalidating
* inputs. All valid inputs must pass this regex, but it's semantically fine if not all inputs
* that pass this regex are valid -- only a performance hit is incurred, not a semantics bug.
*/
@GwtIncompatible // regular expressions
static final
java.util.regex.Pattern
FLOATING_POINT_PATTERN = fpPattern();
@GwtIncompatible // regular expressions
private static
java.util.regex.Pattern
fpPattern() {
/*
* We use # instead of * for possessive quantifiers. This lets us strip them out when building
* the regex for RE2 (which doesn't support them) but leave them in when building it for
* java.util.regex (where we want them in order to avoid catastrophic backtracking).
*/
String decimal = "(?:\\d+#(?:\\.\\d*#)?|\\.\\d+#)";
String completeDec = decimal + "(?:[eE][+-]?\\d+#)?[fFdD]?";
String hex = "(?:[0-9a-fA-F]+#(?:\\.[0-9a-fA-F]*#)?|\\.[0-9a-fA-F]+#)";
String completeHex = "0[xX]" + hex + "[pP][+-]?\\d+#[fFdD]?";
String fpPattern = "[+-]?(?:NaN|Infinity|" + completeDec + "|" + completeHex + ")";
fpPattern =
fpPattern.replace(
"#",
"+"
);
return
java.util.regex.Pattern
.compile(fpPattern);
}
/**
* Parses the specified string as a double-precision floating point value. The ASCII character
* {@code '-'} (<code>'\u002D'</code>) is recognized as the minus sign.
*
* <p>Unlike {@link Double#parseDouble(String)}, this method returns {@code null} instead of
* throwing an exception if parsing fails. Valid inputs are exactly those accepted by {@link
* Double#valueOf(String)}, except that leading and trailing whitespace is not permitted.
*
* <p>This implementation is likely to be faster than {@code Double.parseDouble} if many failures
* are expected.
*
* @param string the string representation of a {@code double} value
* @return the floating point value represented by {@code string}, or {@code null} if {@code
* string} has a length of zero or cannot be parsed as a {@code double} value
* @throws NullPointerException if {@code string} is {@code null}
* @since 14.0
*/
@Beta
@GwtIncompatible // regular expressions
@NullableDecl
public static Double tryParse(String string) {
if (FLOATING_POINT_PATTERN.matcher(string).matches()) {
// TODO(lowasser): could be potentially optimized, but only with
// extensive testing
try {
return Double.parseDouble(string);
} catch (NumberFormatException e) {
// Double.parseDouble has changed specs several times, so fall through
// gracefully
}
}
return null;
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.networkmanager.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/networkmanager-2019-07-05/UpdateLink" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class UpdateLinkRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The ID of the global network.
* </p>
*/
private String globalNetworkId;
/**
* <p>
* The ID of the link.
* </p>
*/
private String linkId;
/**
* <p>
* A description of the link.
* </p>
* <p>
* Constraints: Maximum length of 256 characters.
* </p>
*/
private String description;
/**
* <p>
* The type of the link.
* </p>
* <p>
* Constraints: Maximum length of 128 characters.
* </p>
*/
private String type;
/**
* <p>
* The upload and download speed in Mbps.
* </p>
*/
private Bandwidth bandwidth;
/**
* <p>
* The provider of the link.
* </p>
* <p>
* Constraints: Maximum length of 128 characters.
* </p>
*/
private String provider;
/**
* <p>
* The ID of the global network.
* </p>
*
* @param globalNetworkId
* The ID of the global network.
*/
public void setGlobalNetworkId(String globalNetworkId) {
this.globalNetworkId = globalNetworkId;
}
/**
* <p>
* The ID of the global network.
* </p>
*
* @return The ID of the global network.
*/
public String getGlobalNetworkId() {
return this.globalNetworkId;
}
/**
* <p>
* The ID of the global network.
* </p>
*
* @param globalNetworkId
* The ID of the global network.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateLinkRequest withGlobalNetworkId(String globalNetworkId) {
setGlobalNetworkId(globalNetworkId);
return this;
}
/**
* <p>
* The ID of the link.
* </p>
*
* @param linkId
* The ID of the link.
*/
public void setLinkId(String linkId) {
this.linkId = linkId;
}
/**
* <p>
* The ID of the link.
* </p>
*
* @return The ID of the link.
*/
public String getLinkId() {
return this.linkId;
}
/**
* <p>
* The ID of the link.
* </p>
*
* @param linkId
* The ID of the link.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateLinkRequest withLinkId(String linkId) {
setLinkId(linkId);
return this;
}
/**
* <p>
* A description of the link.
* </p>
* <p>
* Constraints: Maximum length of 256 characters.
* </p>
*
* @param description
* A description of the link.</p>
* <p>
* Constraints: Maximum length of 256 characters.
*/
public void setDescription(String description) {
this.description = description;
}
/**
* <p>
* A description of the link.
* </p>
* <p>
* Constraints: Maximum length of 256 characters.
* </p>
*
* @return A description of the link.</p>
* <p>
* Constraints: Maximum length of 256 characters.
*/
public String getDescription() {
return this.description;
}
/**
* <p>
* A description of the link.
* </p>
* <p>
* Constraints: Maximum length of 256 characters.
* </p>
*
* @param description
* A description of the link.</p>
* <p>
* Constraints: Maximum length of 256 characters.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateLinkRequest withDescription(String description) {
setDescription(description);
return this;
}
/**
* <p>
* The type of the link.
* </p>
* <p>
* Constraints: Maximum length of 128 characters.
* </p>
*
* @param type
* The type of the link.</p>
* <p>
* Constraints: Maximum length of 128 characters.
*/
public void setType(String type) {
this.type = type;
}
/**
* <p>
* The type of the link.
* </p>
* <p>
* Constraints: Maximum length of 128 characters.
* </p>
*
* @return The type of the link.</p>
* <p>
* Constraints: Maximum length of 128 characters.
*/
public String getType() {
return this.type;
}
/**
* <p>
* The type of the link.
* </p>
* <p>
* Constraints: Maximum length of 128 characters.
* </p>
*
* @param type
* The type of the link.</p>
* <p>
* Constraints: Maximum length of 128 characters.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateLinkRequest withType(String type) {
setType(type);
return this;
}
/**
* <p>
* The upload and download speed in Mbps.
* </p>
*
* @param bandwidth
* The upload and download speed in Mbps.
*/
public void setBandwidth(Bandwidth bandwidth) {
this.bandwidth = bandwidth;
}
/**
* <p>
* The upload and download speed in Mbps.
* </p>
*
* @return The upload and download speed in Mbps.
*/
public Bandwidth getBandwidth() {
return this.bandwidth;
}
/**
* <p>
* The upload and download speed in Mbps.
* </p>
*
* @param bandwidth
* The upload and download speed in Mbps.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateLinkRequest withBandwidth(Bandwidth bandwidth) {
setBandwidth(bandwidth);
return this;
}
/**
* <p>
* The provider of the link.
* </p>
* <p>
* Constraints: Maximum length of 128 characters.
* </p>
*
* @param provider
* The provider of the link.</p>
* <p>
* Constraints: Maximum length of 128 characters.
*/
public void setProvider(String provider) {
this.provider = provider;
}
/**
* <p>
* The provider of the link.
* </p>
* <p>
* Constraints: Maximum length of 128 characters.
* </p>
*
* @return The provider of the link.</p>
* <p>
* Constraints: Maximum length of 128 characters.
*/
public String getProvider() {
return this.provider;
}
/**
* <p>
* The provider of the link.
* </p>
* <p>
* Constraints: Maximum length of 128 characters.
* </p>
*
* @param provider
* The provider of the link.</p>
* <p>
* Constraints: Maximum length of 128 characters.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateLinkRequest withProvider(String provider) {
setProvider(provider);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getGlobalNetworkId() != null)
sb.append("GlobalNetworkId: ").append(getGlobalNetworkId()).append(",");
if (getLinkId() != null)
sb.append("LinkId: ").append(getLinkId()).append(",");
if (getDescription() != null)
sb.append("Description: ").append(getDescription()).append(",");
if (getType() != null)
sb.append("Type: ").append(getType()).append(",");
if (getBandwidth() != null)
sb.append("Bandwidth: ").append(getBandwidth()).append(",");
if (getProvider() != null)
sb.append("Provider: ").append(getProvider());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof UpdateLinkRequest == false)
return false;
UpdateLinkRequest other = (UpdateLinkRequest) obj;
if (other.getGlobalNetworkId() == null ^ this.getGlobalNetworkId() == null)
return false;
if (other.getGlobalNetworkId() != null && other.getGlobalNetworkId().equals(this.getGlobalNetworkId()) == false)
return false;
if (other.getLinkId() == null ^ this.getLinkId() == null)
return false;
if (other.getLinkId() != null && other.getLinkId().equals(this.getLinkId()) == false)
return false;
if (other.getDescription() == null ^ this.getDescription() == null)
return false;
if (other.getDescription() != null && other.getDescription().equals(this.getDescription()) == false)
return false;
if (other.getType() == null ^ this.getType() == null)
return false;
if (other.getType() != null && other.getType().equals(this.getType()) == false)
return false;
if (other.getBandwidth() == null ^ this.getBandwidth() == null)
return false;
if (other.getBandwidth() != null && other.getBandwidth().equals(this.getBandwidth()) == false)
return false;
if (other.getProvider() == null ^ this.getProvider() == null)
return false;
if (other.getProvider() != null && other.getProvider().equals(this.getProvider()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getGlobalNetworkId() == null) ? 0 : getGlobalNetworkId().hashCode());
hashCode = prime * hashCode + ((getLinkId() == null) ? 0 : getLinkId().hashCode());
hashCode = prime * hashCode + ((getDescription() == null) ? 0 : getDescription().hashCode());
hashCode = prime * hashCode + ((getType() == null) ? 0 : getType().hashCode());
hashCode = prime * hashCode + ((getBandwidth() == null) ? 0 : getBandwidth().hashCode());
hashCode = prime * hashCode + ((getProvider() == null) ? 0 : getProvider().hashCode());
return hashCode;
}
@Override
public UpdateLinkRequest clone() {
return (UpdateLinkRequest) super.clone();
}
}
| |
/*
* Android Myo library by darken
* Matthias Urhahn (matthias.urhahn@rwth-aachen.de)
* mHealth - Uniklinik RWTH-Aachen.
*/
package eu.darken.myolib;
import android.annotation.TargetApi;
import android.bluetooth.BluetoothDevice;
import android.bluetooth.BluetoothGatt;
import android.bluetooth.BluetoothGattCallback;
import android.bluetooth.BluetoothGattCharacteristic;
import android.bluetooth.BluetoothGattDescriptor;
import android.bluetooth.BluetoothGattService;
import android.bluetooth.BluetoothProfile;
import android.content.Context;
import android.os.Build;
import android.support.annotation.NonNull;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import eu.darken.myolib.msgs.MyoMsg;
import eu.darken.myolib.msgs.ReadMsg;
import eu.darken.myolib.msgs.WriteMsg;
import eu.darken.myolib.processor.BaseDataPacket;
import eu.darken.myolib.processor.BaseProcessor;
import eu.darken.myolib.processor.Processor;
import eu.darken.myolib.services.Battery;
import eu.darken.myolib.services.Classifier;
import eu.darken.myolib.services.Control;
import eu.darken.myolib.services.Emg;
import eu.darken.myolib.services.Imu;
import eu.darken.myolib.services.MyoDescriptor;
import eu.darken.myolib.tools.ApiHelper;
import eu.darken.myolib.tools.Logy;
/**
* This is the base class for all Myo communication.
* It wraps a {@link BluetoothGatt} object and supplies methods to easy communication.
* Communication is encapsulated via {@link MyoMsg} and {@link #submit(MyoMsg)}.
*/
public class BaseMyo extends BluetoothGattCallback {
protected static String TAG;
private final BlockingQueue<MyoMsg> mDispatchQueue = new LinkedBlockingQueue<>();
private final Object mThreadControl = new Object();
private volatile boolean mRunning = false;
private final Context mContext;
private final BluetoothDevice mDevice;
private BluetoothGatt mBluetoothGatt;
private volatile ConnectionState mConnectionState = ConnectionState.DISCONNECTED;
private final Semaphore mWaitToken = new Semaphore(0);
private final Map<String, MyoMsg> mMsgCallbackMap = new HashMap<>();
private final Map<UUID, List<Processor>> mSubscriptionMap = new HashMap<>();
private final List<ConnectionListener> mConnectionListeners = new ArrayList<>();
private ConnectionSpeed mConnectionSpeed = ConnectionSpeed.BALANCED;
private volatile long mTimeoutSendQueue = 250;
/**
* The state of this device, relates to {@link BluetoothProfile#STATE_CONNECTED} etc.
*/
public enum ConnectionState {
CONNECTING, CONNECTED, DISCONNECTING, DISCONNECTED
}
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
public enum ConnectionSpeed {
/**
* Saves battery power but reducs the data rate.<br>
* About ~50 packets/s.
*/
BATTERY_CONSERVING(BluetoothGatt.CONNECTION_PRIORITY_LOW_POWER),
/**
* Balance between battery saving and data rate.<br>
* About 84 packets/s.
*/
BALANCED(BluetoothGatt.CONNECTION_PRIORITY_BALANCED),
/**
* Maximum performance, causes high battery drain.<br>
* Data rates of 450+ packets/s
*/
HIGH(BluetoothGatt.CONNECTION_PRIORITY_HIGH);
private final int mPriority;
ConnectionSpeed(int priority) {
mPriority = priority;
}
public int getPriority() {
return mPriority;
}
}
public BaseMyo(Context context, BluetoothDevice device) {
mContext = context;
mDevice = device;
TAG = "MyoLib:BaseMyo:" + device.getAddress();
}
/**
* Time until a packet without confirmation is treated as failure.
*
* @return time in miliseconds, default of 250ms.
*/
public long getTimeoutSendQueue() {
return mTimeoutSendQueue;
}
/**
* Sets the time interval for how long the dispatcher waits until it sends the next packet,
* if there was still no confirmation for the current one.
*
* @param timeoutSendQueue time in milliseconds, default 250ms, -1 for infinite time, 0 for no waiting.
*/
public void setTimeoutSendQueue(long timeoutSendQueue) {
mTimeoutSendQueue = timeoutSendQueue;
}
public String getDeviceAddress() {
return getBluetoothDevice().getAddress();
}
/**
* Requires API21+ (Lollipop+)
* Calling this on < API21 will have no effect.<br>
* Changes the connection speed of this Myo.
* This can be done on the fly.
*
* @param speed a value from{@link eu.darken.myolib.BaseMyo.ConnectionSpeed}
*/
public void setConnectionSpeed(@NonNull ConnectionSpeed speed) {
if (ApiHelper.hasLolliPop())
mConnectionSpeed = speed;
}
public ConnectionSpeed getConnectionSpeed() {
return mConnectionSpeed;
}
public BluetoothDevice getBluetoothDevice() {
return mDevice;
}
public ConnectionState getConnectionState() {
return mConnectionState;
}
public interface ConnectionListener {
void onConnectionStateChanged(BaseMyo myo, ConnectionState state);
}
public void addConnectionListener(ConnectionListener listener) {
mConnectionListeners.add(listener);
}
public void removeConnectionListener(ConnectionListener listener) {
mConnectionListeners.remove(listener);
}
@Override
public void onConnectionStateChange(BluetoothGatt gatt, int status, int newState) {
if (newState == BluetoothProfile.STATE_CONNECTING) {
mConnectionState = ConnectionState.CONNECTING;
} else if (newState == BluetoothProfile.STATE_CONNECTED) {
mConnectionState = ConnectionState.CONNECTED;
Logy.d(TAG, "Device connected, discovering services...");
gatt.discoverServices();
} else if (newState == BluetoothProfile.STATE_DISCONNECTING) {
mConnectionState = ConnectionState.DISCONNECTING;
mWaitToken.drainPermits();
} else if (newState == BluetoothProfile.STATE_DISCONNECTED) {
mConnectionState = ConnectionState.DISCONNECTED;
} else {
throw new RuntimeException("Unknown connection state");
}
Logy.d(TAG, "status:" + status + ", newState:" + mConnectionState.name());
for (ConnectionListener listener : mConnectionListeners)
listener.onConnectionStateChanged(this, mConnectionState);
super.onConnectionStateChange(gatt, status, newState);
}
/**
* Checks available Myo services and enables EMG and IMU characteristic notifications.
*/
@Override
public void onServicesDiscovered(BluetoothGatt gatt, int status) {
if (status != BluetoothGatt.GATT_SUCCESS) {
Logy.w(TAG, "Service discovered failed!");
return;
}
BluetoothGattService controlService = mBluetoothGatt.getService(Control.getServiceUUID());
if (controlService != null) {
Logy.d(TAG, "Service Control: available");
BluetoothGattCharacteristic myoInfo = controlService.getCharacteristic(Control.MYOINFO.getCharacteristicUUID());
Logy.d(TAG, "Characteristic MyoInfo: " + (myoInfo != null ? "available" : "unavailable"));
BluetoothGattCharacteristic fimwareInfo = controlService.getCharacteristic(Control.FIRMWARE_VERSION.getCharacteristicUUID());
Logy.d(TAG, "Characteristic FirmwareInfo: " + (fimwareInfo != null ? "available" : "unavailable"));
BluetoothGattCharacteristic commandCharacteristic = controlService.getCharacteristic(Control.COMMAND.getCharacteristicUUID());
Logy.d(TAG, "Characteristic Command: " + (commandCharacteristic != null ? "available" : "unavailable"));
} else {
Logy.w(TAG, "Service Control: unavailable");
}
BluetoothGattService emgService = mBluetoothGatt.getService(Emg.SERVICE.getServiceUUID());
if (emgService != null) {
Logy.d(TAG, "Service EMG: available");
enableNotifications(emgService, Emg.EMGDATA0_DESCRIPTOR);
enableNotifications(emgService, Emg.EMGDATA1_DESCRIPTOR);
enableNotifications(emgService, Emg.EMGDATA2_DESCRIPTOR);
enableNotifications(emgService, Emg.EMGDATA3_DESCRIPTOR);
} else {
Logy.w(TAG, "Service EMG: unavailable");
}
BluetoothGattService imuService = mBluetoothGatt.getService(Imu.getServiceUUID());
if (imuService != null) {
Logy.d(TAG, "Service IMU: available");
enableNotifications(imuService, Imu.IMUDATA_DESCRIPTOR);
enableIndication(imuService, Imu.MOTIONEVENT_DESCRIPTOR);
} else {
Logy.w(TAG, "Service IMU: unavailable");
}
BluetoothGattService classifierService = mBluetoothGatt.getService(Classifier.getServiceUUID());
if (classifierService != null) {
Logy.d(TAG, "Service Classifier: available");
enableIndication(classifierService, Classifier.CLASSIFIEREVENT_DESCRIPTOR);
} else {
Logy.w(TAG, "Service Classifier: unavailable");
}
BluetoothGattService batteryService = mBluetoothGatt.getService(Battery.getServiceUUID());
if (batteryService != null) {
Logy.d(TAG, "Service Battery: available");
} else {
Logy.w(TAG, "Service Battery: unavailable");
}
super.onServicesDiscovered(gatt, status);
Logy.d(TAG, "Services discovered.");
mWaitToken.release();
}
private void enableNotifications(BluetoothGattService service, final MyoDescriptor descriptor) {
BluetoothGattCharacteristic classifier = service.getCharacteristic(descriptor.getCharacteristicUUID());
if (classifier != null && mBluetoothGatt.setCharacteristicNotification(classifier, true)) {
WriteMsg msg = new WriteMsg(descriptor,
BluetoothGattDescriptor.ENABLE_NOTIFICATION_VALUE,
new MyoMsg.Callback() {
@Override
public void onResult(MyoMsg msg) {
Logy.d(TAG, "Notification '" + descriptor.getName() + "' enabled");
}
});
submit(msg);
}
}
private void enableIndication(BluetoothGattService service, final MyoDescriptor descriptor) {
BluetoothGattCharacteristic classifier = service.getCharacteristic(descriptor.getCharacteristicUUID());
if (classifier != null && mBluetoothGatt.setCharacteristicNotification(classifier, true)) {
WriteMsg msg = new WriteMsg(descriptor,
BluetoothGattDescriptor.ENABLE_INDICATION_VALUE,
new MyoMsg.Callback() {
@Override
public void onResult(MyoMsg msg) {
Logy.d(TAG, "Indication '" + descriptor.getName() + "' enabled");
}
});
submit(msg);
}
}
/**
* Submits a new message to the dispatcher of this device.
* It will be put at the end of the queue and once it reaches the front.
* Messages are sequentially as otherwise instruction can be lost.<br>
* If dispatcher of this Myo is not yet running, {@link #connect()} will be called.
* It will be taken care of that the Myo is ready before any transmission attempt will be made.
* Don't alter the message object after submitting it
*
* @param msg A {@link WriteMsg} or {@link ReadMsg}
*/
public void submit(@NonNull MyoMsg msg) {
mDispatchQueue.add(msg);
synchronized (mThreadControl) {
if (!mRunning)
connect();
}
}
/**
* "Starts this Myo"<br>
* Launches the innerloop that dispatches {@link MyoMsg}.
* This loop will wait until {@link #getConnectionState()} changes to {@link eu.darken.myolib.BaseMyo.ConnectionState#CONNECTED}
* <p>
* Calling this multiple times has no effect.
*/
public void connect() {
synchronized (mThreadControl) {
if (mRunning) {
return;
} else {
Logy.d(TAG, "Connecting to " + mDevice.getName());
mWaitToken.drainPermits();
mRunning = true;
new Thread(mLoop).start();
}
}
}
/**
* Disconnects the bluetooth connection and stops the dispatcher loop.
*/
public void disconnect() {
synchronized (mThreadControl) {
if (!mRunning) {
return;
} else {
mRunning = false;
mWaitToken.release();
Logy.d(TAG, "Disconnecting from " + mDevice.getName());
}
}
}
/**
* Whether the Dispatcher is running.<br>
* NOT if the Myo device is connected.
* Use {@link #getConnectionState()} for that.<br>
* The dispatcher can be running, but the Myo device temporarily disconnected.
*
* @return true if the dispatcher is running
*/
public boolean isRunning() {
return mRunning;
}
private Runnable mLoop = new Runnable() {
private int mPriority = ConnectionSpeed.BALANCED.getPriority();
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
@Override
public void run() {
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_DEFAULT);
mBluetoothGatt = mDevice.connectGatt(mContext, true, BaseMyo.this);
while (mRunning) {
if (mConnectionState != ConnectionState.CONNECTED)
continue;
if (ApiHelper.hasLolliPop()) {
if (getConnectionSpeed().getPriority() != mPriority) {
mPriority = getConnectionSpeed().getPriority();
mBluetoothGatt.requestConnectionPriority(mPriority);
}
}
try {
if (mTimeoutSendQueue == -1) {
mWaitToken.acquire();
} else {
if (!mWaitToken.tryAcquire(mTimeoutSendQueue, TimeUnit.MILLISECONDS))
Logy.w(TAG, "Lost packet!");
}
} catch (InterruptedException e) {
e.printStackTrace();
}
if (!mRunning)
break;
MyoMsg msg = mDispatchQueue.poll();
if (msg != null) {
internalSend(msg);
} else {
mWaitToken.release();
}
}
mBluetoothGatt.disconnect();
mBluetoothGatt.close();
mBluetoothGatt = null;
}
};
private long mDispatchTime = 0;
private void internalSend(MyoMsg msg) {
BluetoothGattService gattService = mBluetoothGatt.getService(msg.getServiceUUID());
if (gattService == null) {
Logy.w(TAG, "BluetoothGattService unavailable!: " + msg.toString());
return;
}
BluetoothGattCharacteristic gattChar = gattService.getCharacteristic(msg.getCharacteristicUUID());
if (gattChar == null) {
Logy.w(TAG, "BluetoothGattCharacteristic unavailable!: " + msg.toString());
return;
}
mDispatchTime = System.currentTimeMillis();
if (msg.getDescriptorUUID() != null) {
BluetoothGattDescriptor gattDesc = gattChar.getDescriptor(msg.getDescriptorUUID());
if (gattDesc == null) {
Logy.w(TAG, "BluetoothGattDescriptor unavailable!: " + msg.toString());
return;
}
mMsgCallbackMap.put(msg.getIdentifier(), msg);
if (msg instanceof WriteMsg) {
gattDesc.setValue(((WriteMsg) msg).getData());
mBluetoothGatt.writeDescriptor(gattDesc);
} else {
mBluetoothGatt.readDescriptor(gattDesc);
}
} else {
mMsgCallbackMap.put(msg.getIdentifier(), msg);
if (msg instanceof WriteMsg) {
gattChar.setValue(((WriteMsg) msg).getData());
mBluetoothGatt.writeCharacteristic(gattChar);
} else {
mBluetoothGatt.readCharacteristic(gattChar);
}
}
Logy.v(TAG, "Processed: " + msg.getIdentifier());
}
@Override
public void onCharacteristicRead(BluetoothGatt gatt, BluetoothGattCharacteristic characteristic, int gattStatus) {
ReadMsg msg = (ReadMsg) mMsgCallbackMap.remove(MyoMsg.toIdentifier(characteristic));
mWaitToken.release();
msg.setGattStatus(gattStatus);
if (gattStatus == BluetoothGatt.GATT_SUCCESS) {
Logy.v(TAG, "rtt: " + (System.currentTimeMillis() - mDispatchTime) + "ms | SUCCESS | " + msg.toString());
msg.setState(MyoMsg.State.SUCCESS);
msg.setValue(characteristic.getValue());
if (msg.getCallback() != null)
msg.getCallback().onResult(msg);
} else {
Logy.w(TAG, "rtt: " + (System.currentTimeMillis() - mDispatchTime) + "ms | ERROR(" + gattStatus + ") | " + msg.toString());
msg.setState(MyoMsg.State.ERROR);
if (msg.getRetryCounter() == 0) {
if (msg.getCallback() != null)
msg.getCallback().onResult(msg);
} else {
msg.decreaseRetryCounter();
submit(msg);
}
}
super.onCharacteristicRead(gatt, characteristic, gattStatus);
}
@Override
public void onCharacteristicWrite(BluetoothGatt gatt, BluetoothGattCharacteristic characteristic, int gattStatus) {
WriteMsg msg = (WriteMsg) mMsgCallbackMap.remove(MyoMsg.toIdentifier(characteristic));
mWaitToken.release();
msg.setGattStatus(gattStatus);
if (gattStatus == BluetoothGatt.GATT_SUCCESS) {
Logy.v(TAG, "rtt: " + (System.currentTimeMillis() - mDispatchTime) + "ms | SUCCESS | " + msg.toString());
msg.setState(MyoMsg.State.SUCCESS);
if (msg.getCallback() != null)
msg.getCallback().onResult(msg);
} else {
Logy.w(TAG, "rtt: " + (System.currentTimeMillis() - mDispatchTime) + "ms | ERROR(" + gattStatus + ") | " + msg.toString());
msg.setState(MyoMsg.State.ERROR);
if (msg.getRetryCounter() == 0) {
if (msg.getCallback() != null)
msg.getCallback().onResult(msg);
} else {
msg.decreaseRetryCounter();
submit(msg);
}
}
super.onCharacteristicWrite(gatt, characteristic, gattStatus);
}
@Override
public void onDescriptorRead(BluetoothGatt gatt, BluetoothGattDescriptor descriptor, int gattStatus) {
ReadMsg msg = (ReadMsg) mMsgCallbackMap.remove(MyoMsg.toIdentifier(descriptor));
mWaitToken.release();
msg.setGattStatus(gattStatus);
if (gattStatus == BluetoothGatt.GATT_SUCCESS) {
Logy.v(TAG, "rtt: " + (System.currentTimeMillis() - mDispatchTime) + "ms | SUCCESS | " + msg.toString());
msg.setState(MyoMsg.State.SUCCESS);
msg.setValue(descriptor.getValue());
if (msg.getCallback() != null)
msg.getCallback().onResult(msg);
} else {
Logy.w(TAG, "rtt: " + (System.currentTimeMillis() - mDispatchTime) + "ms | ERROR(" + gattStatus + ") | " + msg.toString());
msg.setState(MyoMsg.State.ERROR);
if (msg.getRetryCounter() == 0) {
if (msg.getCallback() != null)
msg.getCallback().onResult(msg);
} else {
msg.decreaseRetryCounter();
submit(msg);
}
}
super.onDescriptorRead(gatt, descriptor, gattStatus);
}
@Override
public void onDescriptorWrite(BluetoothGatt gatt, BluetoothGattDescriptor descriptor, int gattStatus) {
WriteMsg msg = (WriteMsg) mMsgCallbackMap.remove(MyoMsg.toIdentifier(descriptor));
mWaitToken.release();
msg.setGattStatus(gattStatus);
if (gattStatus == BluetoothGatt.GATT_SUCCESS) {
Logy.v(TAG, "rtt: " + (System.currentTimeMillis() - mDispatchTime) + "ms | SUCCESS | " + msg.toString());
msg.setState(MyoMsg.State.SUCCESS);
if (msg.getCallback() != null)
msg.getCallback().onResult(msg);
} else {
Logy.w(TAG, "rtt: " + (System.currentTimeMillis() - mDispatchTime) + "ms | ERROR(" + gattStatus + ") | " + msg.toString());
msg.setState(MyoMsg.State.ERROR);
if (msg.getRetryCounter() == 0) {
if (msg.getCallback() != null)
msg.getCallback().onResult(msg);
} else {
msg.decreaseRetryCounter();
submit(msg);
}
}
super.onDescriptorWrite(gatt, descriptor, gattStatus);
}
/**
* Adds a Processor object to this Myo, make sure it is unique.
*/
public void addProcessor(Processor processor) {
for (UUID subscriptionTarget : processor.getSubscriptions()) {
List<Processor> subscriberList = mSubscriptionMap.get(subscriptionTarget);
if (subscriberList == null) {
subscriberList = new ArrayList<>();
mSubscriptionMap.put(subscriptionTarget, subscriberList);
} else {
if (subscriberList.contains(processor))
continue;
}
subscriberList.add(processor);
}
processor.onAdded();
}
public void removeProcessor(BaseProcessor processor) {
processor.onRemoved();
for (UUID subscriptionTarget : processor.getSubscriptions()) {
List<Processor> subscriberList = mSubscriptionMap.get(subscriptionTarget);
if (subscriberList != null)
subscriberList.remove(processor);
}
}
@Override
public void onCharacteristicChanged(BluetoothGatt gatt, BluetoothGattCharacteristic characteristic) {
BaseDataPacket packet = new BaseDataPacket(gatt, characteristic);
List<Processor> subscribers = mSubscriptionMap.get(characteristic.getUuid());
if (subscribers != null) {
for (Processor subscriber : subscribers)
subscriber.submit(packet);
}
super.onCharacteristicChanged(gatt, characteristic);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*
*/
package org.apache.directory.server.dhcp.service;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.util.Iterator;
import org.apache.directory.server.dhcp.DhcpException;
import org.apache.directory.server.dhcp.messages.DhcpMessage;
import org.apache.directory.server.dhcp.options.DhcpOption;
import org.apache.directory.server.dhcp.options.OptionsField;
import org.apache.directory.server.dhcp.options.dhcp.ParameterRequestList;
import org.apache.directory.server.dhcp.options.dhcp.ServerIdentifier;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Abstract implementation of the server-side DHCP protocol. This class just
* provides some utility methods and dispatches server-bound messages to handler
* methods which can be overridden to provide the functionality.
* <p>
* Client-bound messages and BOOTP messages are ignored.
*
* @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
*
*/
public abstract class AbstractDhcpService implements DhcpService
{
private static final Logger LOGGER = LoggerFactory.getLogger( AbstractDhcpService.class );
/*
* @see org.apache.directory.server.dhcp.DhcpService#getReplyFor(org.apache.directory.server.dhcp.messages.DhcpMessage)
*/
public final DhcpMessage getReplyFor( InetSocketAddress localAddress,
InetSocketAddress clientAddress, DhcpMessage request )
throws DhcpException
{
// ignore messages with an op != REQUEST/REPLY
if ( ( request.getOp() != DhcpMessage.OP_BOOTREQUEST )
&& ( request.getOp() != DhcpMessage.OP_BOOTREPLY ) )
{
return null;
}
// message type option MUST be set - we don't support plain BOOTP.
if ( null == request.getMessageType() )
{
LOGGER.warn( "Missing message type option - plain BOOTP not supported." );
return null;
}
// dispatch based on the message type
switch ( request.getMessageType() )
{
// client-to-server messages
case DHCPDISCOVER:
return handleDISCOVER( localAddress, clientAddress, request );
case DHCPREQUEST:
return handleREQUEST( localAddress, clientAddress, request );
case DHCPRELEASE:
return handleRELEASE( localAddress, clientAddress, request );
case DHCPINFORM:
return handleINFORM( localAddress, clientAddress, request );
case DHCPOFFER:
return handleOFFER( localAddress, clientAddress, request );
// server-to-client messages
case DHCPDECLINE:
case DHCPACK:
case DHCPNAK:
return null; // just ignore them
default:
return handleUnknownMessage( clientAddress, request );
}
}
/**
* Handle unknown DHCP message. The default implementation just logs and
* ignores it.
*
* @param clientAddress
* @param request the request message
* @return DhcpMessage response message or <code>null</code> to ignore (don't reply to)
* it.
*/
protected DhcpMessage handleUnknownMessage( InetSocketAddress clientAddress,
DhcpMessage request )
{
if ( LOGGER.isWarnEnabled() )
{
LOGGER.warn( "Got unknkown DHCP message: {} from: {}", request, clientAddress );
}
return null;
}
/**
* Handle DHCPINFORM message. The default implementation just ignores it.
*
* @param localAddress
* @param clientAddress
* @param request the request message
* @return DhcpMessage response message or <code>null</code> to ignore (don't reply to)
* it.
*/
protected DhcpMessage handleINFORM( InetSocketAddress localAddress,
InetSocketAddress clientAddress, DhcpMessage request )
throws DhcpException
{
if ( LOGGER.isDebugEnabled() )
{
LOGGER.debug( "Got INFORM message: {} from: {}", request, clientAddress );
}
return null;
}
/**
* Handle DHCPRELEASE message. The default implementation just ignores it.
*
* @param localAddress
* @param clientAddress
* @param request the request message
* @return DhcpMessage response message or <code>null</code> to ignore (don't reply to)
* it.
*/
protected DhcpMessage handleRELEASE( InetSocketAddress localAddress,
InetSocketAddress clientAddress, DhcpMessage request )
throws DhcpException
{
if ( LOGGER.isDebugEnabled() )
{
LOGGER.debug( "Got RELEASE message: {} from: {}", request, clientAddress );
}
return null;
}
/**
* Handle DHCPREQUEST message. The default implementation just ignores it.
*
* @param localAddress
* @param clientAddress
* @param request the request message
* @return DhcpMessage response message or <code>null</code> to ignore (don't reply to)
* it.
*/
protected DhcpMessage handleREQUEST( InetSocketAddress localAddress,
InetSocketAddress clientAddress, DhcpMessage request )
throws DhcpException
{
if ( LOGGER.isDebugEnabled() )
{
LOGGER.debug( "Got REQUEST message: {} from: {}", request, clientAddress );
}
return null;
}
/**
* Handle DHCPDISCOVER message. The default implementation just ignores it.
*
* @param localAddress
* @param clientAddress
* @param request the request message
* @return DhcpMessage response message or <code>null</code> to ignore (don't reply to)
* it.
* @throws DhcpException
*/
protected DhcpMessage handleDISCOVER( InetSocketAddress localAddress,
InetSocketAddress clientAddress, DhcpMessage request )
throws DhcpException
{
if ( LOGGER.isDebugEnabled() )
{
LOGGER.debug( "Got DISCOVER message: {} from: {}", request, clientAddress );
}
return null;
}
/**
* Handle DHCPOFFER message. The default implementation just ignores it.
*
* @param localAddress
* @param clientAddress
* @param request the request message
* @return DhcpMessage response message or <code>null</code> to ignore (don't reply to)
* it.
* @throws DhcpException
*/
protected DhcpMessage handleOFFER( InetSocketAddress localAddress,
InetSocketAddress clientAddress, DhcpMessage request )
throws DhcpException
{
if ( LOGGER.isDebugEnabled() )
{
LOGGER.debug( "Got OFFER message: {} from: {}", request, clientAddress );
}
return null;
}
/**
* Initialize a general DHCP reply message. Sets:
* <ul>
* <li>op=BOOTREPLY
* <li>htype, hlen, xid, flags, giaddr, chaddr like in request message
* <li>hops, secs to 0.
* <li>server hostname to the hostname appropriate for the interface the
* request was received on
* <li>the server identifier set to the address of the interface the request
* was received on
* </ul>
*
* @param localAddress
* @param request
* @return DhcpMessage
*/
protected final DhcpMessage initGeneralReply( InetSocketAddress localAddress,
DhcpMessage request )
{
DhcpMessage reply = new DhcpMessage();
reply.setOp( DhcpMessage.OP_BOOTREPLY );
reply.setHardwareAddress( request.getHardwareAddress() );
reply.setTransactionId( request.getTransactionId() );
reply.setFlags( request.getFlags() );
reply.setRelayAgentAddress( request.getRelayAgentAddress() );
// set server hostname
reply.setServerHostname( localAddress.getHostName() );
// set server identifier based on the IF on which we received the packet
reply.getOptions().add( new ServerIdentifier( localAddress.getAddress() ) );
return reply;
}
/**
* Check if an address is the zero-address
*
* @param addr
* @return boolean
*/
private boolean isZeroAddress( byte[] addr )
{
for ( int i = 0; i < addr.length; i++ )
{
if ( addr[i] != 0 )
{
return false;
}
}
return true;
}
/**
* Determine address on which to base selection. If the relay agent address is
* set, we use the relay agent's address, otherwise we use the address we
* received the request from.
*
* @param clientAddress
* @param request
* @return InetAddress
*/
protected final InetAddress determineSelectionBase(
InetSocketAddress clientAddress, DhcpMessage request )
{
// FIXME: do we know
// a) the interface address over which we received a message (!)
// b) the client address (if specified)
// c) the relay agent address?
// if the relay agent address is set, we use it as the selection base
if ( !isZeroAddress( request.getRelayAgentAddress().getAddress() ) )
{
return request.getRelayAgentAddress();
}
return clientAddress.getAddress();
}
/**
* Strip options that the client doesn't want, if the ParameterRequestList
* option is present.
*
* @param request
* @param options
*/
protected final void stripUnwantedOptions( DhcpMessage request,
OptionsField options )
{
ParameterRequestList prl = ( ParameterRequestList ) request
.getOptions().get( ParameterRequestList.class );
if ( null != prl )
{
byte[] list = prl.getData();
for ( Iterator i = options.iterator(); i.hasNext(); )
{
DhcpOption o = ( DhcpOption ) i.next();
boolean found = false;
for ( int j = 0; j < list.length; j++ )
{
if ( list[j] == o.getTag() )
{
found = true;
break;
}
}
if ( !found )
{
i.remove();
}
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.ml.nn;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.util.Map;
import java.util.Spliterator;
import org.apache.ignite.lang.IgniteUuid;
import org.apache.ignite.ml.math.exceptions.CardinalityException;
import org.apache.ignite.ml.math.functions.IgniteBiConsumer;
import org.apache.ignite.ml.math.functions.IgniteBiFunction;
import org.apache.ignite.ml.math.functions.IgniteDoubleFunction;
import org.apache.ignite.ml.math.functions.IgniteFunction;
import org.apache.ignite.ml.math.functions.IgniteTriFunction;
import org.apache.ignite.ml.math.functions.IntIntToDoubleFunction;
import org.apache.ignite.ml.math.primitives.matrix.Matrix;
import org.apache.ignite.ml.math.primitives.matrix.MatrixStorage;
import org.apache.ignite.ml.math.primitives.matrix.impl.DenseMatrix;
import org.apache.ignite.ml.math.primitives.vector.Vector;
/**
* Convenient way to create matrix of replicated columns or rows from vector.
* This class should be considered as utility class: not all matrix methods are implemented here, only those which
* were necessary for MLPs.
*/
class ReplicatedVectorMatrix implements Matrix {
/**
* Vector to replicate.
*/
private Vector vector;
/**
* Flag determining is vector replicated as column or row.
*/
private boolean asCol;
/**
* Count of vector replications.
*/
private int replicationCnt;
/**
* Construct ReplicatedVectorMatrix.
*
* @param vector Vector to replicate.
* @param replicationCnt Count of replications.
* @param asCol Should vector be replicated as a column or as a row.
*/
ReplicatedVectorMatrix(Vector vector, int replicationCnt, boolean asCol) {
this.vector = vector;
this.asCol = asCol;
this.replicationCnt = replicationCnt;
}
/**
* Constructor for externalization.
*/
public ReplicatedVectorMatrix() {
// No-op.
}
/** {@inheritDoc} */
@Override public boolean isSequentialAccess() {
return vector.isSequentialAccess();
}
/** {@inheritDoc} */
@Override public boolean isRandomAccess() {
return vector.isRandomAccess();
}
/** {@inheritDoc} */
@Override public boolean isDense() {
return vector.isDense();
}
/** {@inheritDoc} */
@Override public boolean isArrayBased() {
return vector.isArrayBased();
}
/** {@inheritDoc} */
@Override public boolean isDistributed() {
return vector.isDistributed();
}
/** {@inheritDoc} */
@Override public double maxValue() {
return vector.maxValue();
}
/** {@inheritDoc} */
@Override public double minValue() {
return vector.minValue();
}
/** {@inheritDoc} */
@Override public Element maxElement() {
return new Element() {
@Override public double get() {
return vector.maxElement().get();
}
@Override public int row() {
return asCol ? vector.maxElement().index() : 0;
}
@Override public int column() {
return asCol ? 0 : vector.maxElement().index();
}
@Override public void set(double val) {
}
};
}
/** {@inheritDoc} */
@Override public Element minElement() {
return new Element() {
@Override public double get() {
return vector.minElement().get();
}
@Override public int row() {
return asCol ? vector.minElement().index() : 0;
}
@Override public int column() {
return asCol ? 0 : vector.minElement().index();
}
@Override public void set(double val) {
}
};
}
/** {@inheritDoc} */
@Override public Element getElement(int row, int col) {
Vector.Element el = asCol ? vector.getElement(row) : vector.getElement(col);
int r = asCol ? el.index() : 0;
int c = asCol ? 0 : el.index();
return new Element() {
@Override public double get() {
return el.get();
}
@Override public int row() {
return r;
}
@Override public int column() {
return c;
}
@Override public void set(double val) {
}
};
}
/** {@inheritDoc} */
@Override public Matrix swapRows(int row1, int row2) {
return asCol ? new ReplicatedVectorMatrix(swap(row1, row2), replicationCnt, asCol) : this;
}
/** {@inheritDoc} */
private Vector swap(int idx1, int idx2) {
double val = vector.getX(idx1);
vector.setX(idx1, vector.getX(idx2));
vector.setX(idx2, val);
return vector;
}
/** {@inheritDoc} */
@Override public Matrix swapColumns(int col1, int col2) {
return asCol ? this : new ReplicatedVectorMatrix(swap(col1, col2), replicationCnt, asCol);
}
/** {@inheritDoc} */
@Override public Matrix assign(double val) {
return new ReplicatedVectorMatrix(vector.assign(val), replicationCnt, asCol);
}
/** {@inheritDoc} */
@Override public Matrix assign(double[][] vals) {
return new DenseMatrix(vals);
}
/** {@inheritDoc} */
@Override public Matrix assign(Matrix mtx) {
return mtx.copy();
}
/** {@inheritDoc} */
@Override public Matrix assign(IntIntToDoubleFunction fun) {
Vector vec = asCol ? this.vector.assign(idx -> fun.apply(idx, 0)) : this.vector.assign(idx -> fun.apply(0, idx));
return new ReplicatedVectorMatrix(vec, replicationCnt, asCol);
}
/** {@inheritDoc} */
@Override public Matrix map(IgniteDoubleFunction<Double> fun) {
Vector vec = vector.map(fun);
return new ReplicatedVectorMatrix(vec, replicationCnt, asCol);
}
/** {@inheritDoc} */
@Override public Matrix map(Matrix mtx, IgniteBiFunction<Double, Double, Double> fun) {
throw new UnsupportedOperationException();
}
/** {@inheritDoc} */
@Override public int nonZeroElements() {
return vector.nonZeroElements() * (asCol ? columnSize() : rowSize());
}
/** {@inheritDoc} */
@Override public Spliterator<Double> allSpliterator() {
throw new UnsupportedOperationException();
}
/** {@inheritDoc} */
@Override public Spliterator<Double> nonZeroSpliterator() {
throw new UnsupportedOperationException();
}
/** {@inheritDoc} */
@Override public Matrix assignColumn(int col, Vector vec) {
int rows = asCol ? vector.size() : replicationCnt;
int cols = asCol ? replicationCnt : vector.size();
int times = asCol ? cols : rows;
Matrix res = new DenseMatrix(rows, cols);
IgniteBiConsumer<Integer, Vector> replicantAssigner = asCol ? res::assignColumn : res::assignRow;
IgniteBiConsumer<Integer, Vector> assigner = res::assignColumn;
assign(replicantAssigner, assigner, vector, vec, times, col);
return res;
}
/** {@inheritDoc} */
@Override public Matrix assignRow(int row, Vector vec) {
int rows = asCol ? vector.size() : replicationCnt;
int cols = asCol ? replicationCnt : vector.size();
int times = asCol ? cols : rows;
Matrix res = new DenseMatrix(rows, cols);
IgniteBiConsumer<Integer, Vector> replicantAssigner = asCol ? res::assignColumn : res::assignRow;
IgniteBiConsumer<Integer, Vector> assigner = res::assignRow;
assign(replicantAssigner, assigner, vector, vec, times, row);
return res;
}
/** */
private void assign(IgniteBiConsumer<Integer, Vector> replicantAssigner,
IgniteBiConsumer<Integer, Vector> assigner, Vector replicant, Vector vector, int times, int idx) {
for (int i = 0; i < times; i++)
replicantAssigner.accept(i, replicant);
assigner.accept(idx, vector);
}
/** {@inheritDoc} */
@Override public Vector foldRows(IgniteFunction<Vector, Double> fun) {
throw new UnsupportedOperationException();
}
/** {@inheritDoc} */
@Override public Vector foldColumns(IgniteFunction<Vector, Double> fun) {
throw new UnsupportedOperationException();
}
/** {@inheritDoc} */
@Override public <T> T foldMap(IgniteBiFunction<T, Double, T> foldFun, IgniteDoubleFunction<Double> mapFun,
T zeroVal) {
throw new UnsupportedOperationException();
}
/** {@inheritDoc} */
@Override public boolean density(double threshold) {
return false;
}
/** {@inheritDoc} */
@Override public int columnSize() {
return asCol ? replicationCnt : vector.size();
}
/** {@inheritDoc} */
@Override public int rowSize() {
return asCol ? vector.size() : replicationCnt;
}
/** {@inheritDoc} */
@Override public Matrix divide(double x) {
return new ReplicatedVectorMatrix(vector.divide(x), replicationCnt, asCol);
}
/** {@inheritDoc} */
@Override public double get(int row, int col) {
return asCol ? vector.get(row) : vector.get(col);
}
/** {@inheritDoc} */
@Override public double getX(int row, int col) {
return asCol ? vector.getX(row) : vector.getX(col);
}
/** {@inheritDoc} */
@Override public MatrixStorage getStorage() {
throw new UnsupportedOperationException();
}
/** {@inheritDoc} */
@Override public Matrix copy() {
Vector cp = vector.copy();
return new ReplicatedVectorMatrix(cp, replicationCnt, asCol);
}
/** {@inheritDoc} */
@Override public Matrix like(int rows, int cols) {
Vector lk = vector.like(vector.size());
return new ReplicatedVectorMatrix(lk, replicationCnt, asCol);
}
/** {@inheritDoc} */
@Override public Vector likeVector(int crd) {
throw new UnsupportedOperationException();
}
/** {@inheritDoc} */
@Override public Matrix minus(Matrix mtx) {
throw new UnsupportedOperationException();
}
/**
* Specialized optimized version of minus for ReplicatedVectorMatrix.
*
* @param mtx Matrix to be subtracted.
* @return new ReplicatedVectorMatrix resulting from subtraction.
*/
public Matrix minus(ReplicatedVectorMatrix mtx) {
if (isColumnReplicated() == mtx.isColumnReplicated()) {
checkCardinality(mtx.rowSize(), mtx.columnSize());
Vector minus = vector.minus(mtx.replicant());
return new ReplicatedVectorMatrix(minus, replicationCnt, asCol);
}
throw new UnsupportedOperationException();
}
/** {@inheritDoc} */
@Override public Matrix plus(double x) {
throw new UnsupportedOperationException();
}
/** {@inheritDoc} */
@Override public Matrix plus(Matrix mtx) {
throw new UnsupportedOperationException();
}
/**
* Specialized optimized version of plus for ReplicatedVectorMatrix.
*
* @param mtx Matrix to be added.
* @return new ReplicatedVectorMatrix resulting from addition.
*/
public Matrix plus(ReplicatedVectorMatrix mtx) {
if (isColumnReplicated() == mtx.isColumnReplicated()) {
checkCardinality(mtx.rowSize(), mtx.columnSize());
Vector plus = vector.plus(mtx.replicant());
return new ReplicatedVectorMatrix(plus, replicationCnt, asCol);
}
throw new UnsupportedOperationException();
}
/**
* Checks that dimensions of this matrix are equal to given dimensions.
*
* @param rows Rows.
* @param cols Columns.
*/
private void checkCardinality(int rows, int cols) {
if (rows != rowSize())
throw new CardinalityException(rowSize(), rows);
if (cols != columnSize())
throw new CardinalityException(columnSize(), cols);
}
/** {@inheritDoc} */
@Override public IgniteUuid guid() {
return null;
}
/** {@inheritDoc} */
@Override public Matrix set(int row, int col, double val) {
vector.set(asCol ? row : col, val);
return this;
}
/** {@inheritDoc} */
@Override public Matrix setRow(int row, double[] data) {
return null;
}
/** {@inheritDoc} */
@Override public Vector getRow(int row) {
return null;
}
/** {@inheritDoc} */
@Override public Matrix setColumn(int col, double[] data) {
return null;
}
/** {@inheritDoc} */
@Override public Vector getCol(int col) {
return null;
}
/** {@inheritDoc} */
@Override public Matrix setX(int row, int col, double val) {
return null;
}
/** {@inheritDoc} */
@Override public Matrix times(double x) {
return new ReplicatedVectorMatrix(vector.times(x), replicationCnt, asCol);
}
/** {@inheritDoc} */
@Override public Matrix times(Matrix mtx) {
if (!asCol) {
Vector row = vector.like(mtx.columnSize());
for (int i = 0; i < mtx.columnSize(); i++)
row.setX(i, vector.dot(mtx.getCol(i)));
return new ReplicatedVectorMatrix(row, replicationCnt, false);
}
else
throw new UnsupportedOperationException();
}
/** {@inheritDoc} */
@Override public Vector times(Vector vec) {
Vector res = vec.like(vec.size());
if (asCol) {
for (int i = 0; i < rowSize(); i++)
res.setX(i, vec.sum() * vector.getX(i));
}
else {
double val = vector.dot(vec);
for (int i = 0; i < rowSize(); i++)
res.setX(i, val);
}
return res;
}
/** {@inheritDoc} */
@Override public double sum() {
return vector.sum() * replicationCnt;
}
/** {@inheritDoc} */
@Override public Matrix transpose() {
return new ReplicatedVectorMatrix(vector, replicationCnt, !asCol);
}
/** {@inheritDoc} */
@Override public Vector viewRow(int row) {
return null;
}
/** {@inheritDoc} */
@Override public Vector viewColumn(int col) {
return null;
}
/** {@inheritDoc} */
@Override public Vector viewDiagonal() {
return null;
}
/** {@inheritDoc} */
@Override public void compute(int row, int col, IgniteTriFunction<Integer, Integer, Double, Double> f) {
// This operation cannot be performed because computing function depends on both indexes and therefore
// result of compute will be in general case not ReplicatedVectorMatrix.
throw new UnsupportedOperationException();
}
/** {@inheritDoc} */
@Override public void writeExternal(ObjectOutput out) throws IOException {
throw new UnsupportedOperationException();
}
/** {@inheritDoc} */
@Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {
throw new UnsupportedOperationException();
}
/** {@inheritDoc} */
@Override public Map<String, Object> getMetaStorage() {
return null;
}
/**
* Returns true if matrix constructed by replicating vector as column and false otherwise.
*/
public boolean isColumnReplicated() {
return asCol;
}
/**
* Returns replicated vector.
*/
public Vector replicant() {
return vector;
}
}
| |
/**
* Copyright [2012-2014] PayPal Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ml.shifu.shifu.util;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import ml.shifu.shifu.container.obj.ColumnConfig;
import ml.shifu.shifu.container.obj.ColumnConfig.ColumnFlag;
import ml.shifu.shifu.container.obj.ColumnConfig.ColumnType;
import ml.shifu.shifu.container.obj.EvalConfig;
import ml.shifu.shifu.container.obj.ModelConfig;
import ml.shifu.shifu.container.obj.ModelTrainConf.ALGORITHM;
import ml.shifu.shifu.container.obj.RawSourceData.SourceType;
import ml.shifu.shifu.udf.CalculateStatsUDF;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.fs.FileStatus;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.Test;
import com.fasterxml.jackson.core.JsonGenerationException;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;
/**
* CommonUtilsTest
*/
public class CommonUtilsTest {
private static final Logger LOG = LoggerFactory
.getLogger(CommonUtilsTest.class);
private ObjectMapper jsonMapper = new ObjectMapper();
@Test
public void stringToIntegerListTest() {
Assert.assertEquals(Arrays.asList(new Integer[] { 1, 2, 3 }),
CommonUtils.stringToIntegerList("[1, 2, 3]"));
}
// @Test
public void syncTest() throws IOException {
ModelConfig config = ModelConfig.createInitModelConfig(".",
ALGORITHM.NN, "test");
config.setModelSetName("testModel");
jsonMapper.writerWithDefaultPrettyPrinter().writeValue(
new File("ModelConfig.json"), config);
ColumnConfig col = new ColumnConfig();
col.setColumnName("ColumnA");
List<ColumnConfig> columnConfigList = new ArrayList<ColumnConfig>();
columnConfigList.add(col);
config.getDataSet().setSource(SourceType.LOCAL);
;
jsonMapper.writerWithDefaultPrettyPrinter().writeValue(
new File("ColumnConfig.json"), columnConfigList);
File file = null;
file = new File("models");
if (!file.exists()) {
FileUtils.forceMkdir(file);
}
file = new File("models/model1.nn");
if (!file.exists()) {
if (file.createNewFile()) {
BufferedWriter writer = new BufferedWriter(
new OutputStreamWriter(new FileOutputStream(file),
Constants.DEFAULT_CHARSET));
writer.write("test string");
writer.close();
} else {
LOG.warn("Create file {} failed", file.getAbsolutePath());
}
}
file = new File("EvalSets/test");
if (!file.exists()) {
FileUtils.forceMkdir(file);
}
file = new File("EvalSets/test/EvalConfig.json");
if (!file.exists()) {
BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(
new FileOutputStream(file), Constants.DEFAULT_CHARSET));
writer.write("test string");
writer.close();
}
CommonUtils.copyConfFromLocalToHDFS(config);
file = new File("ModelSets");
Assert.assertTrue(file.exists());
file = new File("ModelSets/testModel");
Assert.assertTrue(file.exists());
file = new File("ModelSets/testModel/ModelConfig.json");
Assert.assertTrue(file.exists());
file = new File("ModelSets/testModel/ColumnConfig.json");
Assert.assertTrue(file.exists());
file = new File("ModelSets/testModel/ReasonCodeMap.json");
Assert.assertTrue(file.exists());
file = new File("ModelSets/testModel/models/model1.nn");
Assert.assertTrue(file.exists());
file = new File("ModelSets/testModel/EvalSets/test/EvalConfig.json");
Assert.assertTrue(file.exists());
file = new File("ModelSets");
if (file.exists()) {
FileUtils.deleteDirectory(file);
}
file = new File("ColumnConfig.json");
FileUtils.deleteQuietly(file);
file = new File("ModelConfig.json");
FileUtils.deleteQuietly(file);
FileUtils.deleteDirectory(new File("models"));
FileUtils.deleteDirectory(new File("EvalSets"));
}
// @Test
public void syncUpEvalTest() throws IOException {
ModelConfig config = ModelConfig.createInitModelConfig(".",
ALGORITHM.NN, "test");
config.setModelSetName("shifu");
File file = new File("evals/EvalA");
if (!file.exists()) {
FileUtils.forceMkdir(file);
}
file = new File("testEval/EvalConfig.json");
FileUtils.touch(file);
// CommonUtils.copyEvalConfFromLocalToHDFS(config, "testEval");
Assert.assertTrue(file.exists());
FileUtils.deleteDirectory(new File("ModelSets"));
FileUtils.deleteDirectory(new File("evals"));
}
@Test
public void loadModelConfigTest() throws JsonGenerationException,
JsonMappingException, IOException {
ModelConfig config = ModelConfig.createInitModelConfig(".", ALGORITHM.NN, "test");
config.setModelSetName("shifu");
jsonMapper.writerWithDefaultPrettyPrinter().writeValue(new File("ModelConfig.json"), config);
ModelConfig anotherConfig = CommonUtils.loadModelConfig();
Assert.assertEquals(config, anotherConfig);
FileUtils.deleteQuietly(new File("ModelConfig.json"));
}
@Test
public void getFinalSelectColumnConfigListTest() {
Collection<ColumnConfig> configList = new ArrayList<ColumnConfig>();
ColumnConfig config = new ColumnConfig();
config.setColumnName("A");
config.setFinalSelect(false);
configList.add(config);
config = new ColumnConfig();
config.setFinalSelect(true);
config.setColumnName("B");
configList.add(config);
config = new ColumnConfig();
config.setFinalSelect(false);
config.setColumnName("C");
configList.add(config);
configList = CommonUtils.getFinalSelectColumnConfigList(configList);
Assert.assertTrue(configList.size() == 1);
}
@Test
public void getBinNumTest() {
ColumnConfig config = new ColumnConfig();
config.setColumnName("A");
config.setColumnType(ColumnType.C);
config.setBinCategory(Arrays.asList(new String[] { "2", "1", "3" }));
int rt = CommonUtils.getBinNum(config, "2");
Assert.assertTrue(rt == 0);
}
@Test
public void testStringToIntegerList() {
Assert.assertEquals(CommonUtils.stringToIntegerList("[]").size(), 1);
}
// @Test
// public void assembleDataPairTest() throws Exception {
// Map<String, String> rawDataMap = new HashMap<String, String>();
// rawDataMap.put("ColumnA", "TestValue");
//
// ColumnConfig config = new ColumnConfig();
// config.setColumnName("ColumnA");
// List<ColumnConfig> columnConfigList = new ArrayList<ColumnConfig>();
// columnConfigList.add(config);
//
// MLDataPair dp = CommonUtils.assembleDataPair(columnConfigList,
// rawDataMap);
// Assert.assertTrue(dp.getInput().getData().length == 0);
//
// Map<String, Object> objDataMap = new HashMap<String, Object>();
// objDataMap.put("ColumnA", 10);
// config.setFinalSelect(true);
// config.setMean(12.0);
// config.setStdDev(4.6);
// MLDataPair pair = CommonUtils.assembleDataPair(columnConfigList,
// objDataMap);
// Assert.assertTrue(pair.getInput().getData()[0] < 0.0);
// }
@Test
public void getTargetColumnNumTest() {
List<ColumnConfig> list = new ArrayList<ColumnConfig>();
ColumnConfig config = new ColumnConfig();
config.setColumnFlag(null);
list.add(config);
config = new ColumnConfig();
config.setColumnFlag(ColumnFlag.Target);
config.setColumnNum(20);
list.add(config);
config = new ColumnConfig();
config.setColumnFlag(null);
list.add(config);
Assert.assertEquals(Integer.valueOf(20), CommonUtils.getTargetColumnNum(list));
}
@Test
public void loadModelsTest() {
// TODO load models test
}
@Test
public void getRawDataMapTest() {
Map<String, String> map = CommonUtils.getRawDataMap(new String[] {"input1", "input2" }, new String[] { "1", "2" });
Assert.assertTrue(map.containsKey("input2"));
Assert.assertTrue(map.keySet().size() == 2);
}
@Test
public void stringToDoubleListTest() {
String str = "[0,1,2,3]";
List<Integer> list = CommonUtils.stringToIntegerList(str);
Assert.assertTrue(list.get(0) == 0);
}
// @Test
public void updateColumnConfigFlagsTest() throws IOException {
ModelConfig config = ModelConfig.createInitModelConfig("test", ALGORITHM.NN, "test");
config.getDataSet().setMetaColumnNameFile("./conf/meta_column_conf.txt");
;
config.getVarSelect().setForceRemoveColumnNameFile("./conf/remove_column_list.txt");
List<ColumnConfig> list = new ArrayList<ColumnConfig>();
ColumnConfig e = new ColumnConfig();
e.setColumnName("a");
list.add(e);
e = new ColumnConfig();
e.setColumnName("c");
list.add(e);
e = new ColumnConfig();
e.setColumnName("d");
list.add(e);
CommonUtils.updateColumnConfigFlags(config, list);
Assert.assertTrue(list.get(0).isMeta());
}
@Test
public void stringToStringListTest() {
String str = "[1,2,3,,4]";
List<Integer> list = CommonUtils.stringToIntegerList(str);
Assert.assertTrue(list.get(0) == 1);
}
@Test
public void getDerivedColumnNamesTest() {
List<ColumnConfig> list = new ArrayList<ColumnConfig>();
ColumnConfig e = new ColumnConfig();
e.setColumnName("a");
list.add(e);
e = new ColumnConfig();
e.setColumnName("derived_c");
list.add(e);
e = new ColumnConfig();
e.setColumnName("d");
list.add(e);
List<String> output = CommonUtils.getDerivedColumnNames(list);
Assert.assertEquals(output.get(0), "derived_c");
}
@Test
public void testLoadModelConfig() throws IOException {
ModelConfig config = CommonUtils.loadModelConfig(
"src/test/resources/example/wdbc/wdbcModelSetLocal/ModelConfig.json", SourceType.LOCAL);
Assert.assertEquals(config.getDataSet().getNegTags().get(0), "B");
}
@Test
public void testEscape() {
Assert.assertEquals("\\\\t", CommonUtils.escapePigString("\t"));
}
@AfterClass
public void delete() throws IOException {
FileUtils.deleteDirectory(new File("common-utils"));
}
@Test
public void testFindModels() throws IOException {
ModelConfig modelConfig = CommonUtils.loadModelConfig(
"src/test/resources/example/cancer-judgement/ModelStore/ModelSet1/ModelConfig.json", SourceType.LOCAL);
File srcModels = new File("src/test/resources/example/cancer-judgement/ModelStore/ModelSet1/models");
File dstModels = new File("models");
FileUtils.copyDirectory(srcModels, dstModels);
List<FileStatus> modelFiles = CommonUtils.findModels(modelConfig, null, SourceType.LOCAL);
Assert.assertEquals(5, modelFiles.size());
EvalConfig evalConfig = modelConfig.getEvalConfigByName("EvalA");
evalConfig.setCustomPaths(new HashMap<String, String>());
evalConfig.getCustomPaths().put(Constants.KEY_MODELS_PATH, null);
modelFiles = CommonUtils.findModels(modelConfig, evalConfig, SourceType.LOCAL);
Assert.assertEquals(5, modelFiles.size());
evalConfig.getCustomPaths().put(Constants.KEY_MODELS_PATH, " ");
modelFiles = CommonUtils.findModels(modelConfig, evalConfig, SourceType.LOCAL);
Assert.assertEquals(5, modelFiles.size());
FileUtils.deleteDirectory(dstModels);
evalConfig.getCustomPaths().put(Constants.KEY_MODELS_PATH, "./src/test/resources/example/cancer-judgement/ModelStore/ModelSet1/models");
modelFiles = CommonUtils.findModels(modelConfig, evalConfig, SourceType.LOCAL);
Assert.assertEquals(5, modelFiles.size());
evalConfig.getCustomPaths().put(Constants.KEY_MODELS_PATH, "./src/test/resources/example/cancer-judgement/ModelStore/ModelSet1/models/model0.nn");
modelFiles = CommonUtils.findModels(modelConfig, evalConfig, SourceType.LOCAL);
Assert.assertEquals(1, modelFiles.size());
evalConfig.getCustomPaths().put(Constants.KEY_MODELS_PATH, "not-exists");
modelFiles = CommonUtils.findModels(modelConfig, evalConfig, SourceType.LOCAL);
Assert.assertEquals(0, modelFiles.size());
evalConfig.getCustomPaths().put(Constants.KEY_MODELS_PATH, "./src/test/resources/example/cancer-judgement/ModelStore/ModelSet1/models/*.nn");
modelFiles = CommonUtils.findModels(modelConfig, evalConfig, SourceType.LOCAL);
Assert.assertEquals(5, modelFiles.size());
evalConfig.getCustomPaths().put(Constants.KEY_MODELS_PATH, "./src/test/resources/example/cancer-judgement/ModelStore/ModelSet{0,1,9}/*/*.nn");
modelFiles = CommonUtils.findModels(modelConfig, evalConfig, SourceType.LOCAL);
Assert.assertEquals(5, modelFiles.size());
}
@Test
public void testStringToArray() {
String input = "[-37.075125208681136, 0.5043788517677587, 1.2588712402838798, 2.543219666931007, 4.896511355654414, 8.986345381526105, 17.06859410430839, 33.557046979865774, 73.27777777777777, 231.63698630136986, 100000.0]";
List<Double> output = CommonUtils.stringToDoubleList(input);
Assert.assertEquals(
output,
Arrays.asList(new Double[] { -37.075125208681136,
0.5043788517677587, 1.2588712402838798,
2.543219666931007, 4.896511355654414,
8.986345381526105, 17.06859410430839,
33.557046979865774, 73.27777777777777,
231.63698630136986, 100000.0 }));
}
@Test
public void testCategoryVauleSepartor() {
List<String> strList = new ArrayList<String>();
strList.add("[Hello, Testing");
strList.add("Haha, It's a testing]");
String joinStr = StringUtils.join(strList, CalculateStatsUDF.CATEGORY_VAL_SEPARATOR);
List<String> recoverList = CommonUtils.stringToStringList(joinStr, CalculateStatsUDF.CATEGORY_VAL_SEPARATOR);
Assert.assertEquals(2, recoverList.size());
Assert.assertEquals(strList.get(0).substring(1), recoverList.get(0));
Assert.assertEquals(strList.get(1).substring(0, strList.get(1).length() - 1), recoverList.get(1));
}
@Test
public void testSortFileNames() {
File[] modelFiles = new File[5];
modelFiles[0] = new File("model3.nn");
modelFiles[1] = new File("model1.nn");
modelFiles[2] = new File("model0.nn");
modelFiles[3] = new File("model4.nn");
modelFiles[4] = new File("model2.nn");
Arrays.sort(modelFiles, new Comparator<File>() {
@Override
public int compare(File from, File to) {
return from.getName().compareTo(to.getName());
}
});
Assert.assertEquals(modelFiles[0].getName(), "model0.nn");
Assert.assertEquals(modelFiles[4].getName(), "model4.nn");
}
@AfterClass
public void tearDown() {
FileUtils.deleteQuietly(new File(Constants.DEFAULT_META_COLUMN_FILE));
FileUtils.deleteQuietly(new File(Constants.DEFAULT_CATEGORICAL_COLUMN_FILE));
FileUtils.deleteQuietly(new File(Constants.DEFAULT_FORCESELECT_COLUMN_FILE));
FileUtils.deleteQuietly(new File(Constants.DEFAULT_FORCEREMOVE_COLUMN_FILE));
FileUtils.deleteQuietly(new File("Eval1"+ Constants.DEFAULT_EVALSCORE_META_COLUMN_FILE));
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.dataformat.bindy;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import org.apache.camel.dataformat.bindy.annotation.BindyConverter;
import org.apache.camel.dataformat.bindy.annotation.KeyValuePairField;
import org.apache.camel.dataformat.bindy.annotation.Link;
import org.apache.camel.dataformat.bindy.annotation.Message;
import org.apache.camel.dataformat.bindy.annotation.OneToMany;
import org.apache.camel.dataformat.bindy.annotation.Section;
import org.apache.camel.dataformat.bindy.util.ConverterUtils;
import org.apache.camel.util.ObjectHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* The BindyKeyValuePairFactory is the class who allows to bind data of type key
* value pair. Such format exist in financial messages FIX. This class allows to
* generate a model associated to message, bind data from a message to the
* POJOs, export data of POJOs to a message and format data into String, Date,
* Double, ... according to the format/pattern defined
*/
public class BindyKeyValuePairFactory extends BindyAbstractFactory implements BindyFactory {
private static final Logger LOG = LoggerFactory.getLogger(BindyKeyValuePairFactory.class);
private Map<Integer, KeyValuePairField> keyValuePairFields = new LinkedHashMap<Integer, KeyValuePairField>();
private Map<Integer, Field> annotatedFields = new LinkedHashMap<Integer, Field>();
private Map<String, Integer> sections = new HashMap<String, Integer>();
private String keyValuePairSeparator;
private String pairSeparator;
private boolean messageOrdered;
public BindyKeyValuePairFactory(Class<?> type) throws Exception {
super(type);
// Initialize what is specific to Key Value Pair model
initKeyValuePairModel();
}
/**
* method uses to initialize the model representing the classes who will
* bind the data This process will scan for classes according to the package
* name provided, check the annotated classes and fields. Next, we retrieve
* the parameters required like : Pair Separator & key value pair separator
*
* @throws Exception
*/
public void initKeyValuePairModel() throws Exception {
// Find annotated KeyValuePairfields declared in the Model classes
initAnnotatedFields();
// Initialize key value pair parameter(s)
initMessageParameters();
}
@Override
public void initAnnotatedFields() {
for (Class<?> cl : models) {
List<Field> linkFields = new ArrayList<Field>();
for (Field field : cl.getDeclaredFields()) {
KeyValuePairField keyValuePairField = field.getAnnotation(KeyValuePairField.class);
if (keyValuePairField != null) {
if (LOG.isDebugEnabled()) {
LOG.debug("Key declared in the class : {}, key : {}, Field : {}", new Object[]{cl.getName(), keyValuePairField.tag(), keyValuePairField});
}
keyValuePairFields.put(keyValuePairField.tag(), keyValuePairField);
annotatedFields.put(keyValuePairField.tag(), field);
}
Link linkField = field.getAnnotation(Link.class);
if (linkField != null) {
if (LOG.isDebugEnabled()) {
LOG.debug("Class linked : {}, Field {}", cl.getName(), field);
}
linkFields.add(field);
}
}
if (!linkFields.isEmpty()) {
annotatedLinkFields.put(cl.getName(), linkFields);
}
}
}
@Override
public void bind(List<String> data, Map<String, Object> model, int line) throws Exception {
// Map to hold the model @OneToMany classes while binding
Map<String, List<Object>> lists = new HashMap<String, List<Object>>();
bind(data, model, line, lists);
}
public void bind(List<String> data, Map<String, Object> model, int line, Map<String, List<Object>> lists) throws Exception {
Map<Integer, List<String>> results = new HashMap<Integer, List<String>>();
LOG.debug("Key value pairs data : {}", data);
// Separate the key from its value
// e.g 8=FIX 4.1 --> key = 8 and Value = FIX 4.1
ObjectHelper.notNull(keyValuePairSeparator, "Key Value Pair not defined in the @Message annotation");
// Generate map of key value
// We use a Map of List as we can have the same key several times
// (relation one to many)
for (String s : data) {
// Get KeyValuePair
String[] keyValuePair = s.split(getKeyValuePairSeparator());
// Extract only if value is populated in key:value pair in incoming message.
if (keyValuePair.length > 1) {
// Extract Key
int key = Integer.parseInt(keyValuePair[0]);
// Extract key value
String value = keyValuePair[1];
LOG.debug("Key: {}, value: {}", key, value);
// Add value to the Map using key value as key
if (!results.containsKey(key)) {
List<String> list = new LinkedList<String>();
list.add(value);
results.put(key, list);
} else {
List<String> list = results.get(key);
list.add(value);
}
}
}
// Iterate over the model
for (Class<?> clazz : models) {
Object obj = model.get(clazz.getName());
if (obj != null) {
// Generate model from key value map
generateModelFromKeyValueMap(clazz, obj, results, line, lists);
}
}
}
private void generateModelFromKeyValueMap(Class<?> clazz, Object obj, Map<Integer, List<String>> results, int line, Map<String, List<Object>> lists) throws Exception {
for (Field field : clazz.getDeclaredFields()) {
field.setAccessible(true);
KeyValuePairField keyValuePairField = field.getAnnotation(KeyValuePairField.class);
if (keyValuePairField != null) {
// Key
int key = keyValuePairField.tag();
// Get Value
List<String> values = results.get(key);
String value = null;
// we don't received data
if (values == null) {
/*
* The relation is one to one So we check if we are in a
* target class and if the field is mandatory
*/
if (obj != null) {
// Check mandatory field
if (keyValuePairField.required()) {
throw new IllegalArgumentException("The mandatory key/tag : " + key + " has not been defined !");
}
Object result = getDefaultValueForPrimitive(field.getType());
try {
field.set(obj, result);
} catch (Exception e) {
throw new IllegalArgumentException("Setting of field " + field + " failed for object : " + obj + " and result : " + result);
}
} else {
/*
* The relation is one to many So, we create an object
* with empty fields and we don't check if the fields
* are mandatory
*/
// Get List from Map
List<Object> l = lists.get(clazz.getName());
if (l != null) {
// BigIntegerFormatFactory if object exist
if (!l.isEmpty()) {
obj = l.get(0);
} else {
obj = clazz.newInstance();
}
Object result = getDefaultValueForPrimitive(field.getType());
try {
field.set(obj, result);
} catch (Exception e) {
throw new IllegalArgumentException("Setting of field " + field + " failed for object : " + obj + " and result : " + result);
}
// Add object created to the list
if (!l.isEmpty()) {
l.set(0, obj);
} else {
l.add(0, obj);
}
// and to the Map
lists.put(clazz.getName(), l);
// Reset obj to null
obj = null;
} else {
throw new IllegalArgumentException("The list of values is empty for the following key : " + key + " defined in the class : " + clazz.getName());
}
} // end of test if obj != null
} else {
// Data have been retrieved from message
if (values.size() >= 1) {
if (obj != null) {
// Relation OneToOne
value = values.get(0);
Object result = null;
if (value != null) {
// Create format object to format the field
FormattingOptions formattingOptions = ConverterUtils.convert(keyValuePairField,
field.getType(),
field.getAnnotation(BindyConverter.class),
getLocale());
Format<?> format = formatFactory.getFormat(formattingOptions);
// format the value of the key received
result = formatField(format, value, key, line);
LOG.debug("Value formated : {}", result);
} else {
result = getDefaultValueForPrimitive(field.getType());
}
try {
field.set(obj, result);
} catch (Exception e) {
throw new IllegalArgumentException("Setting of field " + field + " failed for object : " + obj + " and result : " + result);
}
} else {
// Get List from Map
List<Object> l = lists.get(clazz.getName());
if (l != null) {
// Relation OneToMany
for (int i = 0; i < values.size(); i++) {
// BigIntegerFormatFactory if object exist
if ((!l.isEmpty()) && (l.size() > i)) {
obj = l.get(i);
} else {
obj = clazz.newInstance();
}
value = values.get(i);
// Create format object to format the field
FormattingOptions formattingOptions = ConverterUtils.convert(keyValuePairField,
field.getType(),
field.getAnnotation(BindyConverter.class),
getLocale());
Format<?> format = formatFactory.getFormat(formattingOptions);
// format the value of the key received
Object result = formatField(format, value, key, line);
LOG.debug("Value formated : {}", result);
try {
if (value != null) {
field.set(obj, result);
} else {
field.set(obj, getDefaultValueForPrimitive(field.getType()));
}
} catch (Exception e) {
throw new IllegalArgumentException("Setting of field " + field + " failed for object: " + obj + " and result: " + result);
}
// Add object created to the list
if ((!l.isEmpty()) && (l.size() > i)) {
l.set(i, obj);
} else {
l.add(i, obj);
}
// and to the Map
lists.put(clazz.getName(), l);
// Reset obj to null
obj = null;
}
} else {
throw new IllegalArgumentException("The list of values is empty for the following key: " + key + " defined in the class: " + clazz.getName());
}
}
} else {
// No values found from message
Object result = getDefaultValueForPrimitive(field.getType());
try {
field.set(obj, result);
} catch (Exception e) {
throw new IllegalArgumentException("Setting of field " + field + " failed for object: " + obj + " and result: " + result);
}
}
}
}
OneToMany oneToMany = field.getAnnotation(OneToMany.class);
if (oneToMany != null) {
String targetClass = oneToMany.mappedTo();
if (!targetClass.equals("")) {
// Class cl = Class.forName(targetClass); Does not work in
// OSGI when class is defined in another bundle
Class<?> cl = null;
try {
cl = Thread.currentThread().getContextClassLoader().loadClass(targetClass);
} catch (ClassNotFoundException e) {
cl = getClass().getClassLoader().loadClass(targetClass);
}
if (!lists.containsKey(cl.getName())) {
lists.put(cl.getName(), new ArrayList<Object>());
}
generateModelFromKeyValueMap(cl, null, results, line, lists);
// Add list of objects
field.set(obj, lists.get(cl.getName()));
} else {
throw new IllegalArgumentException("No target class has been defined in @OneToMany annotation");
}
}
}
}
/**
*
*/
@Override
public String unbind(Map<String, Object> model) throws Exception {
StringBuilder builder = new StringBuilder();
Map<Integer, KeyValuePairField> keyValuePairFieldsSorted = new TreeMap<Integer, KeyValuePairField>(keyValuePairFields);
Iterator<Integer> it = keyValuePairFieldsSorted.keySet().iterator();
// Map containing the OUT position of the field
// The key is double and is created using the position of the field and
// location of the class in the message (using section)
Map<Integer, String> positions = new TreeMap<Integer, String>();
// Check if separator exists
ObjectHelper.notNull(this.pairSeparator, "The pair separator has not been instantiated or property not defined in the @Message annotation");
char separator = ConverterUtils.getCharDelimiter(this.getPairSeparator());
if (LOG.isDebugEnabled()) {
LOG.debug("Separator converted: '0x{}', from: {}", Integer.toHexString(separator), this.getPairSeparator());
}
while (it.hasNext()) {
KeyValuePairField keyValuePairField = keyValuePairFieldsSorted.get(it.next());
ObjectHelper.notNull(keyValuePairField, "KeyValuePair");
// Retrieve the field
Field field = annotatedFields.get(keyValuePairField.tag());
// Change accessibility to allow to read protected/private fields
field.setAccessible(true);
if (LOG.isDebugEnabled()) {
LOG.debug("Tag: {}, Field type: {}, class: {}", new Object[]{keyValuePairField.tag(), field.getType(), field.getDeclaringClass().getName()});
}
// Retrieve the format, pattern and precision associated to the type
Class<?> type = field.getType();
// Create format
FormattingOptions formattingOptions = ConverterUtils.convert(keyValuePairField,
field.getType(),
field.getAnnotation(BindyConverter.class),
getLocale());
Format<Object> format = (Format<Object>) formatFactory.getFormat(formattingOptions);
// Get object to be formatted
Object obj = model.get(field.getDeclaringClass().getName());
if (obj != null) {
// Get field value
Object keyValue = field.get(obj);
if (this.isMessageOrdered()) {
// Generate a key using the number of the section
// and the position of the field
Integer key1 = sections.get(obj.getClass().getName());
Integer key2 = keyValuePairField.position();
LOG.debug("Key of the section: {}, and the field: {}", key1, key2);
Integer keyGenerated = generateKey(key1, key2);
if (LOG.isDebugEnabled()) {
LOG.debug("Key generated: {}, for section: {}", String.valueOf(keyGenerated), key1);
}
// Add value to the list if not null
if (keyValue != null) {
// Format field value
String valueFormatted;
try {
valueFormatted = format.format(keyValue);
} catch (Exception e) {
throw new IllegalArgumentException("Formatting error detected for the tag: " + keyValuePairField.tag(), e);
}
// Create the key value string
String value = keyValuePairField.tag() + this.getKeyValuePairSeparator() + valueFormatted;
if (LOG.isDebugEnabled()) {
LOG.debug("Value to be formatted: {}, for the tag: {}, and its formatted value: {}", new Object[]{keyValue, keyValuePairField.tag(), valueFormatted});
}
// Add the content to the TreeMap according to the
// position defined
positions.put(keyGenerated, value);
if (LOG.isDebugEnabled()) {
LOG.debug("Positions size: {}", positions.size());
}
}
} else {
// Add value to the list if not null
if (keyValue != null) {
// Format field value
String valueFormatted;
try {
valueFormatted = format.format(keyValue);
} catch (Exception e) {
throw new IllegalArgumentException("Formatting error detected for the tag: " + keyValuePairField.tag(), e);
}
// Create the key value string
String value = keyValuePairField.tag() + this.getKeyValuePairSeparator() + valueFormatted + separator;
// Add content to the stringBuilder
builder.append(value);
if (LOG.isDebugEnabled()) {
LOG.debug("Value added: {}{}{}{}", new Object[]{keyValuePairField.tag(), this.getKeyValuePairSeparator(), valueFormatted, separator});
}
}
}
}
}
// Iterate through the list to generate
// the message according to the order/position
if (this.isMessageOrdered()) {
Iterator<Integer> posit = positions.keySet().iterator();
while (posit.hasNext()) {
String value = positions.get(posit.next());
if (LOG.isDebugEnabled()) {
LOG.debug("Value added at the position ({}) : {}{}", new Object[]{posit, value, separator});
}
builder.append(value + separator);
}
}
return builder.toString();
}
private Object formatField(Format<?> format, String value, int tag, int line) throws Exception {
Object obj = null;
if (value != null) {
// Format field value
try {
obj = format.parse(value);
} catch (Exception e) {
throw new IllegalArgumentException("Parsing error detected for field defined at the tag: " + tag + ", line: " + line, e);
}
}
return obj;
}
/**
* Find the pair separator used to delimit the key value pair fields
*/
public String getPairSeparator() {
return pairSeparator;
}
/**
* Find the key value pair separator used to link the key with its value
*/
public String getKeyValuePairSeparator() {
return keyValuePairSeparator;
}
/**
* Flag indicating if the message must be ordered
*
* @return boolean
*/
public boolean isMessageOrdered() {
return messageOrdered;
}
/**
* Get parameters defined in @Message annotation
*/
private void initMessageParameters() {
if ((pairSeparator == null) || (keyValuePairSeparator == null)) {
for (Class<?> cl : models) {
// Get annotation @Message from the class
Message message = cl.getAnnotation(Message.class);
// Get annotation @Section from the class
Section section = cl.getAnnotation(Section.class);
if (message != null) {
// Get Pair Separator parameter
ObjectHelper.notNull(message.pairSeparator(), "No Pair Separator has been defined in the @Message annotation");
pairSeparator = message.pairSeparator();
LOG.debug("Pair Separator defined for the message: {}", pairSeparator);
// Get KeyValuePair Separator parameter
ObjectHelper.notNull(message.keyValuePairSeparator(), "No Key Value Pair Separator has been defined in the @Message annotation");
keyValuePairSeparator = message.keyValuePairSeparator();
LOG.debug("Key Value Pair Separator defined for the message: {}", keyValuePairSeparator);
// Get carriage return parameter
crlf = message.crlf();
LOG.debug("Carriage return defined for the message: {}", crlf);
// Get isOrdered parameter
messageOrdered = message.isOrdered();
LOG.debug("Is the message ordered in output: {}", messageOrdered);
}
if (section != null) {
// BigIntegerFormatFactory if section number is not null
ObjectHelper.notNull(section.number(), "No number has been defined for the section");
// Get section number and add it to the sections
sections.put(cl.getName(), section.number());
}
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.olingo.server.tecsvc.provider;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.apache.olingo.commons.api.ODataException;
import org.apache.olingo.commons.api.edm.FullQualifiedName;
import org.apache.olingo.commons.api.edm.provider.CsdlAction;
import org.apache.olingo.commons.api.edm.provider.CsdlParameter;
import org.apache.olingo.commons.api.edm.provider.CsdlReturnType;
public class ActionProvider {
// Bound Actions
public static final FullQualifiedName nameBAESAllPrimRTETAllPrim =
new FullQualifiedName(SchemaProvider.NAMESPACE, "BAESAllPrimRTETAllPrim");
public static final FullQualifiedName nameBAESTwoKeyNavRTESTwoKeyNav =
new FullQualifiedName(SchemaProvider.NAMESPACE, "BAESTwoKeyNavRTESTwoKeyNav");
public static final FullQualifiedName nameBAESTwoKeyNavRTESKeyNav =
new FullQualifiedName(SchemaProvider.NAMESPACE, "BAESTwoKeyNavRTESKeyNav");
public static final FullQualifiedName nameBAETBaseTwoKeyNavRTETBaseTwoKeyNav =
new FullQualifiedName(SchemaProvider.NAMESPACE, "BAETBaseTwoKeyNavRTETBaseTwoKeyNav");
public static final FullQualifiedName nameBAETTwoBaseTwoKeyNavRTETBaseTwoKeyNav =
new FullQualifiedName(SchemaProvider.NAMESPACE, "BAETTwoBaseTwoKeyNavRTETBaseTwoKeyNav");
public static final FullQualifiedName nameBAETTwoKeyNavRTETTwoKeyNav =
new FullQualifiedName(SchemaProvider.NAMESPACE, "BAETTwoKeyNavRTETTwoKeyNav");
public static final FullQualifiedName nameBAESAllPrimRT =
new FullQualifiedName(SchemaProvider.NAMESPACE, "BAESAllPrimRT");
public static final FullQualifiedName nameBAETAllPrimRT =
new FullQualifiedName(SchemaProvider.NAMESPACE, "BAETAllPrimRT");
// Unbound Actions
public static final FullQualifiedName nameUARTString = new FullQualifiedName(SchemaProvider.NAMESPACE,
"UARTString");
public static final FullQualifiedName nameUARTCollStringTwoParam = new FullQualifiedName(SchemaProvider.NAMESPACE,
"UARTCollStringTwoParam");
public static final FullQualifiedName nameUARTCollCTTwoPrimParam = new FullQualifiedName(SchemaProvider.NAMESPACE,
"UARTCollCTTwoPrimParam");
public static final FullQualifiedName nameUARTCTTwoPrimParam = new FullQualifiedName(SchemaProvider.NAMESPACE,
"UARTCTTwoPrimParam");
public static final FullQualifiedName nameUARTETTwoKeyTwoPrimParam =
new FullQualifiedName(SchemaProvider.NAMESPACE, "UARTETTwoKeyTwoPrimParam");
public static final FullQualifiedName nameUARTCollETKeyNavParam =
new FullQualifiedName(SchemaProvider.NAMESPACE, "UARTCollETKeyNavParam");
public static final FullQualifiedName nameUARTETAllPrimParam =
new FullQualifiedName(SchemaProvider.NAMESPACE, "UARTETAllPrimParam");
public static final FullQualifiedName nameUARTCollETAllPrimParam =
new FullQualifiedName(SchemaProvider.NAMESPACE, "UARTCollETAllPrimParam");
public static final FullQualifiedName nameUART = new FullQualifiedName(SchemaProvider.NAMESPACE, "UART");
public static final FullQualifiedName nameUARTParam =
new FullQualifiedName(SchemaProvider.NAMESPACE, "UARTParam");
public static final FullQualifiedName nameUARTTwoParam =
new FullQualifiedName(SchemaProvider.NAMESPACE, "UARTTwoParam");
public List<CsdlAction> getActions(final FullQualifiedName actionName) throws ODataException {
if (actionName.equals(nameUARTString)) {
return Collections.singletonList(
new CsdlAction().setName(nameUARTString.getName())
.setReturnType(new CsdlReturnType().setType(PropertyProvider.nameString)));
} else if (actionName.equals(nameUARTCollStringTwoParam)) {
return Collections.singletonList(
new CsdlAction().setName(nameUARTCollStringTwoParam.getName())
.setParameters(Arrays.asList(
new CsdlParameter().setName("ParameterInt16").setType(PropertyProvider.nameInt16),
new CsdlParameter().setName("ParameterDuration").setType(PropertyProvider.nameDuration)))
.setReturnType(new CsdlReturnType().setType(PropertyProvider.nameString).setCollection(true)));
} else if (actionName.equals(nameUARTCTTwoPrimParam)) {
return Collections.singletonList(
new CsdlAction().setName(nameUARTCTTwoPrimParam.getName())
.setParameters(Collections.singletonList(
new CsdlParameter().setName("ParameterInt16").setType(PropertyProvider.nameInt16)
.setNullable(false)))
.setReturnType(
new CsdlReturnType().setType(ComplexTypeProvider.nameCTTwoPrim).setNullable(false)));
} else if (actionName.equals(nameUARTCollCTTwoPrimParam)) {
return Collections.singletonList(
new CsdlAction().setName(nameUARTCollCTTwoPrimParam.getName())
.setParameters(Collections.singletonList(
new CsdlParameter().setName("ParameterInt16").setType(PropertyProvider.nameInt16)))
.setReturnType(
new CsdlReturnType().setType(ComplexTypeProvider.nameCTTwoPrim).setCollection(true)));
} else if (actionName.equals(nameUARTETTwoKeyTwoPrimParam)) {
return Collections.singletonList(
new CsdlAction().setName(nameUARTETTwoKeyTwoPrimParam.getName())
.setParameters(Collections.singletonList(
new CsdlParameter().setName("ParameterInt16").setType(PropertyProvider.nameInt16)))
.setReturnType(
new CsdlReturnType().setType(EntityTypeProvider.nameETTwoKeyTwoPrim)));
} else if (actionName.equals(nameUARTCollETKeyNavParam)) {
return Collections.singletonList(
new CsdlAction().setName(nameUARTCollETKeyNavParam.getName())
.setParameters(Collections.singletonList(
new CsdlParameter().setName("ParameterInt16").setType(PropertyProvider.nameInt16)))
.setReturnType(
new CsdlReturnType().setType(EntityTypeProvider.nameETKeyNav).setCollection(true)));
} else if (actionName.equals(nameUARTETAllPrimParam)) {
return Collections.singletonList(
new CsdlAction().setName(nameUARTETAllPrimParam.getName())
.setParameters(Collections.singletonList(
new CsdlParameter().setName("ParameterDate").setType(PropertyProvider.nameDate)))
.setReturnType(
new CsdlReturnType().setType(EntityTypeProvider.nameETAllPrim)));
} else if (actionName.equals(nameUARTCollETAllPrimParam)) {
return Collections.singletonList(
new CsdlAction().setName(nameUARTCollETAllPrimParam.getName())
.setParameters(Collections.singletonList(
new CsdlParameter().setName("ParameterTimeOfDay")
.setType(PropertyProvider.nameTimeOfDay)))
.setReturnType(
new CsdlReturnType().setType(EntityTypeProvider.nameETAllPrim).setCollection(true)));
} else if (actionName.equals(nameUART)) {
return Collections.singletonList(new CsdlAction().setName(nameUART.getName()));
} else if (actionName.equals(nameUARTParam)) {
return Collections.singletonList(
new CsdlAction()
.setName(nameUARTParam.getName())
.setParameters(Collections.singletonList(
new CsdlParameter().setName("ParameterInt16").setType(PropertyProvider.nameInt16))));
} else if (actionName.equals(nameUARTTwoParam)) {
return Collections.singletonList(
new CsdlAction()
.setName(nameUARTTwoParam.getName())
.setParameters(Arrays.asList(
new CsdlParameter().setName("ParameterInt16").setType(PropertyProvider.nameInt16),
new CsdlParameter().setName("ParameterDuration").setType(PropertyProvider.nameDuration))));
} else if (actionName.equals(nameBAETTwoKeyNavRTETTwoKeyNav)) {
return Arrays.asList(
new CsdlAction().setName("BAETTwoKeyNavRTETTwoKeyNav")
.setParameters(Arrays.asList(
new CsdlParameter().setName("ParameterETTwoKeyNav").setType(EntityTypeProvider.nameETTwoKeyNav)
.setNullable(false)))
.setBound(true)
.setReturnType(
new CsdlReturnType().setType(EntityTypeProvider.nameETTwoKeyNav))
,
new CsdlAction().setName("BAETTwoKeyNavRTETTwoKeyNav")
.setParameters(Arrays.asList(
new CsdlParameter().setName("ParameterETKeyNav").setType(EntityTypeProvider.nameETKeyNav)
.setNullable(false)))
.setBound(true)
.setReturnType(
new CsdlReturnType().setType(EntityTypeProvider.nameETTwoKeyNav))
);
} else if (actionName.equals(nameBAESAllPrimRTETAllPrim)) {
return Arrays.asList(
new CsdlAction().setName("BAESAllPrimRTETAllPrim")
.setParameters(Arrays.asList(
new CsdlParameter().setName("ParameterESAllPrim").setType(EntityTypeProvider.nameETAllPrim)
.setCollection(true).setNullable(false)))
.setBound(true)
.setReturnType(
new CsdlReturnType().setType(EntityTypeProvider.nameETAllPrim)));
} else if (actionName.equals(nameBAESTwoKeyNavRTESTwoKeyNav)) {
return Arrays.asList(
new CsdlAction().setName("BAESTwoKeyNavRTESTwoKeyNav")
.setParameters(Arrays.asList(
new CsdlParameter().setName("ParameterETTwoKeyNav").setType(EntityTypeProvider.nameETTwoKeyNav)
.setCollection(true).setNullable(false)))
.setBound(true)
.setReturnType(
new CsdlReturnType().setType(EntityTypeProvider.nameETTwoKeyNav).setCollection(true)));
} else if (actionName.equals(nameBAESTwoKeyNavRTESKeyNav)) {
return Arrays.asList(
new CsdlAction().setName("BAESTwoKeyNavRTESKeyNav")
.setBound(true)
.setEntitySetPath("BindingParam/NavPropertyETKeyNavMany")
.setParameters(Arrays.asList(
new CsdlParameter().setName("ParameterETTwoKeyNav")
.setType(EntityTypeProvider.nameETTwoKeyNav)
.setCollection(true)
.setNullable(false)))
.setReturnType(
new CsdlReturnType().setType(EntityTypeProvider.nameETKeyNav).setCollection(true)));
} else if (actionName.equals(nameBAETBaseTwoKeyNavRTETBaseTwoKeyNav)) {
return Arrays.asList(
new CsdlAction().setName("BAETBaseTwoKeyNavRTETBaseTwoKeyNav")
.setParameters(Arrays.asList(
new CsdlParameter().setName("ParameterETTwoKeyNav").setType(EntityTypeProvider.nameETBaseTwoKeyNav)
.setNullable(false)))
.setBound(true)
.setReturnType(
new CsdlReturnType().setType(EntityTypeProvider.nameETTwoKeyNav)));
} else if (actionName.equals(nameBAETTwoBaseTwoKeyNavRTETBaseTwoKeyNav)) {
return Arrays.asList(
new CsdlAction().setName("BAETTwoBaseTwoKeyNavRTETBaseTwoKeyNav")
.setParameters(Arrays.asList(
new CsdlParameter().setName("ParameterETTwoBaseTwoKeyNav")
.setType(EntityTypeProvider.nameETTwoBaseTwoKeyNav)
.setNullable(false)))
.setBound(true)
.setReturnType(
new CsdlReturnType().setType(EntityTypeProvider.nameETBaseTwoKeyNav)));
} else if (actionName.equals(nameBAETAllPrimRT)) {
return Arrays.asList(
new CsdlAction().setName("BAETAllPrimRT")
.setBound(true)
.setParameters(Arrays.asList(
new CsdlParameter().setName("ParameterETAllPrim")
.setNullable(false)
.setType(EntityTypeProvider.nameETAllPrim)
)));
} else if (actionName.equals(nameBAESAllPrimRT)) {
return Arrays.asList(
new CsdlAction().setName("BAESAllPrimRT")
.setBound(true)
.setParameters(Arrays.asList(
new CsdlParameter().setName("ParameterETAllPrim")
.setNullable(false)
.setCollection(true)
.setType(EntityTypeProvider.nameETAllPrim)
))
);
}
return null;
}
}
| |
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
/**
* DescribeInstanceAttributeResponse.java
*
* This file was auto-generated from WSDL
* by the Apache Axis2 version: 1.5.1 Built on : Oct 19, 2009 (10:59:34 EDT)
*/
package com.amazon.ec2;
/**
* DescribeInstanceAttributeResponse bean class
*/
public class DescribeInstanceAttributeResponse
implements org.apache.axis2.databinding.ADBBean{
public static final javax.xml.namespace.QName MY_QNAME = new javax.xml.namespace.QName(
"http://ec2.amazonaws.com/doc/2009-10-31/",
"DescribeInstanceAttributeResponse",
"ns1");
private static java.lang.String generatePrefix(java.lang.String namespace) {
if(namespace.equals("http://ec2.amazonaws.com/doc/2009-10-31/")){
return "ns1";
}
return org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix();
}
/**
* field for DescribeInstanceAttributeResponse
*/
protected com.amazon.ec2.DescribeInstanceAttributeResponseType localDescribeInstanceAttributeResponse ;
/**
* Auto generated getter method
* @return com.amazon.ec2.DescribeInstanceAttributeResponseType
*/
public com.amazon.ec2.DescribeInstanceAttributeResponseType getDescribeInstanceAttributeResponse(){
return localDescribeInstanceAttributeResponse;
}
/**
* Auto generated setter method
* @param param DescribeInstanceAttributeResponse
*/
public void setDescribeInstanceAttributeResponse(com.amazon.ec2.DescribeInstanceAttributeResponseType param){
this.localDescribeInstanceAttributeResponse=param;
}
/**
* isReaderMTOMAware
* @return true if the reader supports MTOM
*/
public static boolean isReaderMTOMAware(javax.xml.stream.XMLStreamReader reader) {
boolean isReaderMTOMAware = false;
try{
isReaderMTOMAware = java.lang.Boolean.TRUE.equals(reader.getProperty(org.apache.axiom.om.OMConstants.IS_DATA_HANDLERS_AWARE));
}catch(java.lang.IllegalArgumentException e){
isReaderMTOMAware = false;
}
return isReaderMTOMAware;
}
/**
*
* @param parentQName
* @param factory
* @return org.apache.axiom.om.OMElement
*/
public org.apache.axiom.om.OMElement getOMElement (
final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory) throws org.apache.axis2.databinding.ADBException{
org.apache.axiom.om.OMDataSource dataSource =
new org.apache.axis2.databinding.ADBDataSource(this,MY_QNAME){
public void serialize(org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
DescribeInstanceAttributeResponse.this.serialize(MY_QNAME,factory,xmlWriter);
}
};
return new org.apache.axiom.om.impl.llom.OMSourcedElementImpl(
MY_QNAME,factory,dataSource);
}
public void serialize(final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory,
org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter)
throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{
serialize(parentQName,factory,xmlWriter,false);
}
public void serialize(final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory,
org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter,
boolean serializeType)
throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{
//We can safely assume an element has only one type associated with it
if (localDescribeInstanceAttributeResponse==null){
throw new org.apache.axis2.databinding.ADBException("Property cannot be null!");
}
localDescribeInstanceAttributeResponse.serialize(MY_QNAME,factory,xmlWriter);
}
/**
* Util method to write an attribute with the ns prefix
*/
private void writeAttribute(java.lang.String prefix,java.lang.String namespace,java.lang.String attName,
java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{
if (xmlWriter.getPrefix(namespace) == null) {
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
xmlWriter.writeAttribute(namespace,attName,attValue);
}
/**
* Util method to write an attribute without the ns prefix
*/
private void writeAttribute(java.lang.String namespace,java.lang.String attName,
java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{
if (namespace.equals(""))
{
xmlWriter.writeAttribute(attName,attValue);
}
else
{
registerPrefix(xmlWriter, namespace);
xmlWriter.writeAttribute(namespace,attName,attValue);
}
}
/**
* Util method to write an attribute without the ns prefix
*/
private void writeQNameAttribute(java.lang.String namespace, java.lang.String attName,
javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
java.lang.String attributeNamespace = qname.getNamespaceURI();
java.lang.String attributePrefix = xmlWriter.getPrefix(attributeNamespace);
if (attributePrefix == null) {
attributePrefix = registerPrefix(xmlWriter, attributeNamespace);
}
java.lang.String attributeValue;
if (attributePrefix.trim().length() > 0) {
attributeValue = attributePrefix + ":" + qname.getLocalPart();
} else {
attributeValue = qname.getLocalPart();
}
if (namespace.equals("")) {
xmlWriter.writeAttribute(attName, attributeValue);
} else {
registerPrefix(xmlWriter, namespace);
xmlWriter.writeAttribute(namespace, attName, attributeValue);
}
}
/**
* method to handle Qnames
*/
private void writeQName(javax.xml.namespace.QName qname,
javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
java.lang.String namespaceURI = qname.getNamespaceURI();
if (namespaceURI != null) {
java.lang.String prefix = xmlWriter.getPrefix(namespaceURI);
if (prefix == null) {
prefix = generatePrefix(namespaceURI);
xmlWriter.writeNamespace(prefix, namespaceURI);
xmlWriter.setPrefix(prefix,namespaceURI);
}
if (prefix.trim().length() > 0){
xmlWriter.writeCharacters(prefix + ":" + org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
} else {
// i.e this is the default namespace
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
}
} else {
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
}
}
private void writeQNames(javax.xml.namespace.QName[] qnames,
javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
if (qnames != null) {
// we have to store this data until last moment since it is not possible to write any
// namespace data after writing the charactor data
java.lang.StringBuffer stringToWrite = new java.lang.StringBuffer();
java.lang.String namespaceURI = null;
java.lang.String prefix = null;
for (int i = 0; i < qnames.length; i++) {
if (i > 0) {
stringToWrite.append(" ");
}
namespaceURI = qnames[i].getNamespaceURI();
if (namespaceURI != null) {
prefix = xmlWriter.getPrefix(namespaceURI);
if ((prefix == null) || (prefix.length() == 0)) {
prefix = generatePrefix(namespaceURI);
xmlWriter.writeNamespace(prefix, namespaceURI);
xmlWriter.setPrefix(prefix,namespaceURI);
}
if (prefix.trim().length() > 0){
stringToWrite.append(prefix).append(":").append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
} else {
stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
}
} else {
stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
}
}
xmlWriter.writeCharacters(stringToWrite.toString());
}
}
/**
* Register a namespace prefix
*/
private java.lang.String registerPrefix(javax.xml.stream.XMLStreamWriter xmlWriter, java.lang.String namespace) throws javax.xml.stream.XMLStreamException {
java.lang.String prefix = xmlWriter.getPrefix(namespace);
if (prefix == null) {
prefix = generatePrefix(namespace);
while (xmlWriter.getNamespaceContext().getNamespaceURI(prefix) != null) {
prefix = org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix();
}
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
return prefix;
}
/**
* databinding method to get an XML representation of this object
*
*/
public javax.xml.stream.XMLStreamReader getPullParser(javax.xml.namespace.QName qName)
throws org.apache.axis2.databinding.ADBException{
//We can safely assume an element has only one type associated with it
return localDescribeInstanceAttributeResponse.getPullParser(MY_QNAME);
}
/**
* Factory class that keeps the parse method
*/
public static class Factory{
/**
* static method to create the object
* Precondition: If this object is an element, the current or next start element starts this object and any intervening reader events are ignorable
* If this object is not an element, it is a complex type and the reader is at the event just after the outer start element
* Postcondition: If this object is an element, the reader is positioned at its end element
* If this object is a complex type, the reader is positioned at the end element of its outer element
*/
public static DescribeInstanceAttributeResponse parse(javax.xml.stream.XMLStreamReader reader) throws java.lang.Exception{
DescribeInstanceAttributeResponse object =
new DescribeInstanceAttributeResponse();
int event;
java.lang.String nillableValue = null;
java.lang.String prefix ="";
java.lang.String namespaceuri ="";
try {
while (!reader.isStartElement() && !reader.isEndElement())
reader.next();
// Note all attributes that were handled. Used to differ normal attributes
// from anyAttributes.
java.util.Vector handledAttributes = new java.util.Vector();
while(!reader.isEndElement()) {
if (reader.isStartElement() ){
if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/","DescribeInstanceAttributeResponse").equals(reader.getName())){
object.setDescribeInstanceAttributeResponse(com.amazon.ec2.DescribeInstanceAttributeResponseType.Factory.parse(reader));
} // End of if for expected property start element
else{
// A start element we are not expecting indicates an invalid parameter was passed
throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName());
}
} else {
reader.next();
}
} // end of while loop
} catch (javax.xml.stream.XMLStreamException e) {
throw new java.lang.Exception(e);
}
return object;
}
}//end of factory class
}
| |
package it.finsiel.siged.mvc.presentation.actionform.amministrazione.org.aoo;
import it.finsiel.siged.model.organizzazione.AreaOrganizzativa;
import it.finsiel.siged.mvc.business.LookupDelegate;
import it.finsiel.siged.mvc.vo.IdentityVO;
import it.finsiel.siged.mvc.vo.organizzazione.AreaOrganizzativaVO;
import it.finsiel.siged.util.DateUtil;
import it.finsiel.siged.util.NumberUtil;
import java.util.ArrayList;
import java.util.Collection;
import javax.servlet.http.HttpServletRequest;
import org.apache.log4j.Logger;
import org.apache.struts.action.ActionErrors;
import org.apache.struts.action.ActionForm;
import org.apache.struts.action.ActionMapping;
import org.apache.struts.action.ActionMessage;
public final class AreaOrganizzativaForm extends ActionForm {
/**
* @return Returns the areaorganizzativa.
*/
public AreaOrganizzativaVO getAreaorganizzativa() {
return areaorganizzativa;
}
/**
* @param areaorganizzativa
* The areaorganizzativa to set.
*/
public void setAreaorganizzativa(AreaOrganizzativaVO areaorganizzativa) {
this.areaorganizzativa = areaorganizzativa;
}
static Logger logger = Logger.getLogger(AreaOrganizzativa.class.getName());
private int id;
private String codi_aoo;
private String description;
private String data_istituzione;
private String responsabile_nome;
private String responsabile_cognome;
private String responsabile_email;
private String responsabile_telefono;
private String data_soppressione;
private String telefono;
private String fax;
private String indi_dug;
private String indi_toponimo;
private String indi_civico;
private String indi_cap;
private String indi_comune;
private Collection province;
private String email;
private String dipartimento_codice;
private String dipartimento_descrizione;
private String tipo_aoo;
private int provincia_id;
private String codi_documento_doc;
private int amministrazione_id;
private String desc_amministrazione;
private int versione;
private Collection areeOrganizzative;
private AreaOrganizzativaVO areaorganizzativa;
// campi dati posta elettronica normale e certificata
private String pec_indirizzo;
private String pec_username;
private String pec_pwd;
private boolean pecAbilitata;
private String pec_ssl_port;
private String pec_pop3;
private String pec_smtp;
private String pec_smtp_port;
private String pn_indirizzo;
private String pn_username;
private String pn_pwd;
private boolean pn_ssl;
private String pn_ssl_port;
private String pn_pop3;
private String pn_smtp;
private int pecTimer;
private String dirDocumenti;
private int dipendenzaTitolarioUfficio;
private int titolarioLivelloMinimo;
private boolean modificabileDipendenzaTitolarioUfficio;
/**
* @param tipo_aoo
* The tipo_aoo to set.
*/
public void setTipo_aoo(String tipo_aoo) {
this.tipo_aoo = tipo_aoo;
}
public void reset(ActionMapping mapping, HttpServletRequest request) {
}
public ActionErrors validate(ActionMapping mapping,
HttpServletRequest request) {
ActionErrors errors = new ActionErrors();
if (request.getParameter("btnSalva") != null) {
if (getDescription() == null || "".equals(getDescription().trim())) {
errors.add("description", new ActionMessage(
"campo.obbligatorio", "Descrizione", ""));
}
if (getCodi_aoo() == null || "".equals(getCodi_aoo().trim())) {
errors.add("description", new ActionMessage(
"campo.obbligatorio", "Codice", ""));
}
if (getIndi_dug() == null) {
errors.add("indi_dug", new ActionMessage("campo.obbligatorio",
"Data istituzione", ""));
}
String dataIst = getData_istituzione();
if (dataIst != null && !"".equals(dataIst)) {
if (!DateUtil.isData(dataIst)) {
// la data di ricezione deve essere nel formato valido:
// gg/mm/aaaa
errors.add("dataIst", new ActionMessage(
"formato.data.errato", "Data istituzione"));
}
} else {
errors.add("description", new ActionMessage(
"campo.obbligatorio", "Data istituzione", ""));
}
if (getIndi_dug() == null || "".equals(getIndi_dug().trim())) {
errors.add("Dug", new ActionMessage("campo.obbligatorio",
"Dug", ""));
}
if (getIndi_toponimo() == null
|| "".equals(getIndi_toponimo().trim())) {
errors.add("Toponimo", new ActionMessage("campo.obbligatorio",
"Toponimo", ""));
}
if (getIndi_civico() == null || "".equals(getIndi_civico().trim())) {
errors.add("Toponimo", new ActionMessage("campo.obbligatorio",
"Civico", ""));
}
if (getIndi_cap() == null || "".equals(getIndi_cap().trim())) {
errors.add("Toponimo", new ActionMessage("campo.obbligatorio",
"CAP", ""));
}
if (getIndi_comune() == null || "".equals(getIndi_comune().trim())) {
errors.add("Toponimo", new ActionMessage("campo.obbligatorio",
"Comune", ""));
}
if (getDipartimento_codice() != null
&& !NumberUtil.isInteger(getDipartimento_codice())) {
errors.add("codice DIpartimento", new ActionMessage(
"formato.numerico.errato", "Codice dipartimento", ""));
}
} else if (request.getParameter("btnCancella") != null
&& (request.getParameter("id") == null)) {
errors.add("id", new ActionMessage("campo.obbligatorio",
"AreaOrganizzativa", ""));
}
return errors;
}
public void inizializzaForm() {
setId(0);
setDescription(null);
setResponsabile_nome(null);
setAmministrazione_id(0);
setCodi_aoo(null);
setCodi_documento_doc(null);
setData_istituzione(null);
setData_soppressione(null);
setDipartimento_codice(null);
setDipartimento_descrizione(null);
setEmail(null);
setFax(null);
setIndi_cap(null);
setIndi_civico(null);
setIndi_comune(null);
setIndi_dug(null);
setIndi_toponimo(null);
setProvincia_id(0);
setResponsabile_cognome(null);
setResponsabile_email(null);
setResponsabile_telefono(null);
setTelefono(null);
setTipo_aoo("L");
setVersione(0);
setPec_indirizzo(null);
setPec_pop3(null);
setPec_pwd(null);
setPec_smtp(null);
setPecAbilitata(false);
setPec_ssl_port(null);
setPec_username(null);
setPn_indirizzo(null);
setPn_pop3(null);
setPn_pwd(null);
setPn_smtp(null);
setPn_ssl(false);
setPn_ssl_port(null);
setPn_username(null);
setMsgSuccess(null);
setModificabileDipendenzaTitolarioUfficio(true);
}
/**
* @return Returns the profili.
*/
/**
* @return Returns the amministrazione_id.
*/
public int getAmministrazione_id() {
return amministrazione_id;
}
/**
* @param amministrazione_id
* The amministrazione_id to set.
*/
public void setAmministrazione_id(int amministrazione_id) {
this.amministrazione_id = amministrazione_id;
}
/**
* @return Returns the codi_documento_doc.
*/
public String getCodi_documento_doc() {
return codi_documento_doc;
}
/**
* @param codi_documento_doc
* The codi_documento_doc to set.
*/
public void setCodi_documento_doc(String codi_documento_doc) {
this.codi_documento_doc = codi_documento_doc;
}
/**
* @return Returns the data_istituzione.
*/
public String getData_istituzione() {
return data_istituzione;
}
/**
* @param data_istituzione
* The data_istituzione to set.
*/
public void setData_istituzione(String data_istituzione) {
this.data_istituzione = data_istituzione;
}
/**
* @return Returns the data_soppressione.
*/
public String getData_soppressione() {
return data_soppressione;
}
/**
* @param data_soppressione
* The data_soppressione to set.
*/
public void setData_soppressione(String data_soppressione) {
this.data_soppressione = data_soppressione;
}
/**
* @return Returns the dipartimento_codice.
*/
public String getDipartimento_codice() {
return dipartimento_codice;
}
/**
* @param dipartimento_codice
* The dipartimento_codice to set.
*/
public void setDipartimento_codice(String dipartimento_codice) {
this.dipartimento_codice = dipartimento_codice;
}
/**
* @return Returns the dipartimento_descrizione.
*/
public String getDipartimento_descrizione() {
return dipartimento_descrizione;
}
/**
* @param dipartimento_descrizione
* The dipartimento_descrizione to set.
*/
public void setDipartimento_descrizione(String dipartimento_descrizione) {
this.dipartimento_descrizione = dipartimento_descrizione;
}
/**
* @return Returns the email.
*/
public String getEmail() {
return email;
}
/**
* @param email
* The email to set.
*/
public void setEmail(String email) {
this.email = email;
}
/**
* @return Returns the fax.
*/
public String getFax() {
return fax;
}
/**
* @param fax
* The fax to set.
*/
public void setFax(String fax) {
this.fax = fax;
}
/**
* @return Returns the id.
*/
public int getId() {
return id;
}
/**
* @param id
* The id to set.
*/
public void setId(int id) {
this.id = id;
}
/**
* @return Returns the indi_cap.
*/
public String getIndi_cap() {
return indi_cap;
}
/**
* @param indi_cap
* The indi_cap to set.
*/
public void setIndi_cap(String indi_cap) {
this.indi_cap = indi_cap;
}
/**
* @return Returns the indi_civico.
*/
public String getIndi_civico() {
return indi_civico;
}
/**
* @param indi_civico
* The indi_civico to set.
*/
public void setIndi_civico(String indi_civico) {
this.indi_civico = indi_civico;
}
/**
* @return Returns the indi_comune.
*/
public String getIndi_comune() {
return indi_comune;
}
/**
* @param indi_comune
* The indi_comune to set.
*/
public void setIndi_comune(String indi_comune) {
this.indi_comune = indi_comune;
}
/**
* @return Returns the indi_dug.
*/
public String getIndi_dug() {
return indi_dug;
}
/**
* @param indi_dug
* The indi_dug to set.
*/
public void setIndi_dug(String indi_dug) {
this.indi_dug = indi_dug;
}
/**
* @return Returns the indi_toponimo.
*/
public String getIndi_toponimo() {
return indi_toponimo;
}
/**
* @param indi_toponimo
* The indi_toponimo to set.
*/
public void setIndi_toponimo(String indi_toponimo) {
this.indi_toponimo = indi_toponimo;
}
/**
* @return Returns the provincia_id.
*/
public int getProvincia_id() {
return provincia_id;
}
/**
* @param provincia_id
* The provincia_id to set.
*/
public void setProvincia_id(int provincia_id) {
this.provincia_id = provincia_id;
}
/**
* @return Returns the responsabile_cognome.
*/
public String getResponsabile_cognome() {
return responsabile_cognome;
}
/**
* @param responsabile_cognome
* The responsabile_cognome to set.
*/
public void setResponsabile_cognome(String responsabile_cognome) {
this.responsabile_cognome = responsabile_cognome;
}
/**
* @return Returns the responsabile_email.
*/
public String getResponsabile_email() {
return responsabile_email;
}
/**
* @param responsabile_email
* The responsabile_email to set.
*/
public void setResponsabile_email(String responsabile_email) {
this.responsabile_email = responsabile_email;
}
/**
* @return Returns the responsabile_nome.
*/
public String getResponsabile_nome() {
return responsabile_nome;
}
/**
* @param responsabile_nome
* The responsabile_nome to set.
*/
public void setResponsabile_nome(String responsabile_nome) {
this.responsabile_nome = responsabile_nome;
}
/**
* @return Returns the responsabile_telefono.
*/
public String getResponsabile_telefono() {
return responsabile_telefono;
}
/**
* @param responsabile_telefono
* The responsabile_telefono to set.
*/
public void setResponsabile_telefono(String responsabile_telefono) {
this.responsabile_telefono = responsabile_telefono;
}
/**
* @return Returns the telefono.
*/
public String getTelefono() {
return telefono;
}
/**
* @param telefono
* The telefono to set.
*/
public void setTelefono(String telefono) {
this.telefono = telefono;
}
/**
* @return Returns the tipo_aoo.
*/
public String getTipo_aoo() {
return tipo_aoo;
}
/**
* @return Returns the versione.
*/
public int getVersione() {
return versione;
}
/**
* @param versione
* The versione to set.
*/
public void setVersione(int versione) {
this.versione = versione;
}
/**
* @return Returns the areeOrganizzative.
*/
public Collection getAreeOrganizzative() {
return areeOrganizzative;
}
/**
* @param areeOrganizzative
* The areeOrganizzative to set.
*/
public void setAreeOrganizzative(Collection areeOrganizzative) {
this.areeOrganizzative = areeOrganizzative;
}
/**
* @return Returns the description.
*/
public String getDescription() {
return description;
}
/**
* @param description
* The description to set.
*/
public void setDescription(String description) {
this.description = description;
}
/**
* @return Returns the codice.
*/
/**
* @return Returns the codi_aoo.
*/
public String getCodi_aoo() {
return codi_aoo;
}
/**
* @param codi_aoo
* The codi_aoo to set.
*/
public void setCodi_aoo(String codi_aoo) {
this.codi_aoo = codi_aoo;
}
/**
* @return Returns the province.
*/
public Collection getProvince() {
return LookupDelegate.getInstance().getProvince();
}
/**
* @param province
* The province to set.
*/
public void setProvince(Collection province) {
this.province = province;
}
public String getDesc_amministrazione() {
return desc_amministrazione;
}
public void setDesc_amministrazione(String desc_amministrazione) {
this.desc_amministrazione = desc_amministrazione;
}
public static Logger getLogger() {
return logger;
}
public static void setLogger(Logger logger) {
AreaOrganizzativaForm.logger = logger;
}
public String getPec_indirizzo() {
return pec_indirizzo;
}
public void setPec_indirizzo(String pec_indirizzo) {
this.pec_indirizzo = pec_indirizzo;
}
public String getPec_pop3() {
return pec_pop3;
}
public void setPec_pop3(String pec_pop3) {
this.pec_pop3 = pec_pop3;
}
public String getPec_pwd() {
return pec_pwd;
}
public void setPec_pwd(String pec_pwd) {
this.pec_pwd = pec_pwd;
}
public String getPec_smtp() {
return pec_smtp;
}
public void setPec_smtp(String pec_smtp) {
this.pec_smtp = pec_smtp;
}
public boolean getPecAbilitata() {
return pecAbilitata;
}
public void setPecAbilitata(boolean pec_ssl) {
this.pecAbilitata = pec_ssl;
}
public String getPec_ssl_port() {
return pec_ssl_port;
}
public void setPec_ssl_port(String pec_ssl_port) {
this.pec_ssl_port = pec_ssl_port;
}
public String getPec_username() {
return pec_username;
}
public void setPec_username(String pec_username) {
this.pec_username = pec_username;
}
public String getPn_indirizzo() {
return pn_indirizzo;
}
public void setPn_indirizzo(String pn_indirizzo) {
this.pn_indirizzo = pn_indirizzo;
}
public String getPn_pop3() {
return pn_pop3;
}
public void setPn_pop3(String pn_pop3) {
this.pn_pop3 = pn_pop3;
}
public String getPn_pwd() {
return pn_pwd;
}
public void setPn_pwd(String pn_pwd) {
this.pn_pwd = pn_pwd;
}
public String getPn_smtp() {
return pn_smtp;
}
public void setPn_smtp(String pn_smtp) {
this.pn_smtp = pn_smtp;
}
public boolean getPn_ssl() {
return pn_ssl;
}
public void setPn_ssl(boolean pn_ssl) {
this.pn_ssl = pn_ssl;
}
public String getPn_ssl_port() {
return pn_ssl_port;
}
public void setPn_ssl_port(String pn_ssl_port) {
this.pn_ssl_port = pn_ssl_port;
}
public String getPn_username() {
return pn_username;
}
public void setPn_username(String pn_username) {
this.pn_username = pn_username;
}
public Collection getTipiAoo() {
Collection tipiAoo = new ArrayList();
IdentityVO idVO;
idVO = new IdentityVO();
idVO.setCodice("L");
idVO.setDescription("AOO Light");
tipiAoo.add(idVO);
idVO = new IdentityVO();
idVO.setCodice("F");
idVO.setDescription("AOO Full");
tipiAoo.add(idVO);
return tipiAoo;
}
public String getPec_smtp_port() {
return pec_smtp_port;
}
public void setPec_smtp_port(String pec_smtp_port) {
this.pec_smtp_port = pec_smtp_port;
}
private String msgSuccess;
public String getMsgSuccess() {
return msgSuccess;
}
public void setMsgSuccess(String msgSuccess) {
this.msgSuccess = msgSuccess;
}
public int getPecTimer() {
return pecTimer;
}
public void setPecTimer(int pecTimer) {
this.pecTimer = pecTimer;
}
public String getDirDocumenti() {
return dirDocumenti;
}
public void setDirDocumenti(String dirDocumenti) {
this.dirDocumenti = dirDocumenti;
}
public int getDipendenzaTitolarioUfficio() {
return dipendenzaTitolarioUfficio;
}
public void setDipendenzaTitolarioUfficio(int dipendenzaTitolarioUfficio) {
this.dipendenzaTitolarioUfficio = dipendenzaTitolarioUfficio;
}
public int getTitolarioLivelloMinimo() {
return titolarioLivelloMinimo;
}
public void setTitolarioLivelloMinimo(int titolarioLivelloMinimo) {
this.titolarioLivelloMinimo = titolarioLivelloMinimo;
}
public boolean getModificabileDipendenzaTitolarioUfficio() {
return modificabileDipendenzaTitolarioUfficio;
}
public void setModificabileDipendenzaTitolarioUfficio(
boolean isModificabileDipendenzaTitolarioUfficio) {
this.modificabileDipendenzaTitolarioUfficio = isModificabileDipendenzaTitolarioUfficio;
}
}
| |
/*
* Copyright 1999-2010 Luca Garulli (l.garulli--at--orientechnologies.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.orientechnologies.orient.core.id;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.List;
import com.orientechnologies.orient.core.db.ODatabaseRecordThreadLocal;
import com.orientechnologies.orient.core.db.record.ODatabaseRecord;
import com.orientechnologies.orient.core.db.record.OIdentifiable;
import com.orientechnologies.orient.core.exception.ODatabaseException;
import com.orientechnologies.orient.core.record.ORecord;
import com.orientechnologies.orient.core.serialization.OBinaryProtocol;
import com.orientechnologies.orient.core.serialization.OMemoryStream;
import com.orientechnologies.orient.core.serialization.serializer.OStringSerializerHelper;
public class ORecordId implements ORID {
private static final long serialVersionUID = 247070594054408657L;
public static final int PERSISTENT_SIZE = OBinaryProtocol.SIZE_SHORT + OBinaryProtocol.SIZE_LONG;
public static final ORecordId EMPTY_RECORD_ID = new ORecordId();
public static final byte[] EMPTY_RECORD_ID_STREAM = EMPTY_RECORD_ID.toStream();
public int clusterId = CLUSTER_ID_INVALID; // INT TO AVOID
// JVM
// PENALITY, BUT
// IT'S STORED
// AS SHORT
public long clusterPosition = CLUSTER_POS_INVALID;
public ORecordId() {
}
public ORecordId(final int iClusterId, final long iPosition) {
clusterId = iClusterId;
checkClusterLimits();
clusterPosition = iPosition;
}
public ORecordId(final int iClusterIdId) {
clusterId = iClusterIdId;
checkClusterLimits();
}
public ORecordId(final String iRecordId) {
fromString(iRecordId);
}
/**
* Copy constructor.
*
* @param parentRid
* Source object
*/
public ORecordId(final ORID parentRid) {
clusterId = parentRid.getClusterId();
clusterPosition = parentRid.getClusterPosition();
}
public void reset() {
clusterId = CLUSTER_ID_INVALID;
clusterPosition = CLUSTER_POS_INVALID;
}
public boolean isValid() {
return clusterPosition != CLUSTER_POS_INVALID;
}
public boolean isPersistent() {
return clusterId > -1 && clusterPosition > -1;
}
public boolean isNew() {
return clusterPosition < 0;
}
public boolean isTemporary() {
return clusterId != -1 && clusterPosition < -1;
}
@Override
public String toString() {
return generateString(clusterId, clusterPosition);
}
public StringBuilder toString(StringBuilder iBuffer) {
if (iBuffer == null)
iBuffer = new StringBuilder();
iBuffer.append(PREFIX);
iBuffer.append(clusterId);
iBuffer.append(SEPARATOR);
iBuffer.append(clusterPosition);
return iBuffer;
}
public static String generateString(final int iClusterId, final long iPosition) {
final StringBuilder buffer = new StringBuilder(12);
buffer.append(PREFIX);
buffer.append(iClusterId);
buffer.append(SEPARATOR);
buffer.append(iPosition);
return buffer.toString();
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + clusterId;
result = prime * result + (int) (clusterPosition ^ (clusterPosition >>> 32));
return result;
}
@Override
public boolean equals(final Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (!(obj instanceof OIdentifiable))
return false;
final ORecordId other = (ORecordId) ((OIdentifiable) obj).getIdentity();
if (clusterId != other.clusterId)
return false;
if (clusterPosition != other.clusterPosition)
return false;
return true;
}
public int compareTo(final OIdentifiable iOther) {
if (iOther == this)
return 0;
if (iOther == null)
return 1;
final ORID other = iOther.getIdentity();
if (clusterId == other.getClusterId()) {
if (clusterPosition == other.getClusterPosition())
return 0;
else if (clusterPosition > other.getClusterPosition())
return 1;
else if (clusterPosition < other.getClusterPosition())
return -1;
} else if (clusterId > other.getClusterId())
return 1;
return -1;
}
public int compare(final OIdentifiable iObj1, final OIdentifiable iObj2) {
if (iObj1 == iObj2)
return 0;
if (iObj1 != null)
return iObj1.compareTo(iObj2);
return -1;
}
public ORecordId copy() {
return new ORecordId(clusterId, clusterPosition);
}
private void checkClusterLimits() {
if (clusterId < -2)
throw new ODatabaseException("RecordId cannot support negative cluster id. You've used: " + clusterId);
if (clusterId > CLUSTER_MAX)
throw new ODatabaseException("RecordId cannot support cluster id major than 32767. You've used: " + clusterId);
}
public ORecordId fromStream(final InputStream iStream) throws IOException {
clusterId = OBinaryProtocol.bytes2short(iStream);
clusterPosition = OBinaryProtocol.bytes2long(iStream);
return this;
}
public ORecordId fromStream(final OMemoryStream iStream) {
clusterId = iStream.getAsShort();
clusterPosition = iStream.getAsLong();
return this;
}
public ORecordId fromStream(final byte[] iBuffer) {
if (iBuffer != null) {
clusterId = OBinaryProtocol.bytes2short(iBuffer, 0);
clusterPosition = OBinaryProtocol.bytes2long(iBuffer, OBinaryProtocol.SIZE_SHORT);
}
return this;
}
public int toStream(final OutputStream iStream) throws IOException {
final int beginOffset = OBinaryProtocol.short2bytes((short) clusterId, iStream);
OBinaryProtocol.long2bytes(clusterPosition, iStream);
return beginOffset;
}
public int toStream(final OMemoryStream iStream) throws IOException {
final int beginOffset = OBinaryProtocol.short2bytes((short) clusterId, iStream);
OBinaryProtocol.long2bytes(clusterPosition, iStream);
return beginOffset;
}
public byte[] toStream() {
byte[] buffer = new byte[PERSISTENT_SIZE];
OBinaryProtocol.short2bytes((short) clusterId, buffer, 0);
OBinaryProtocol.long2bytes(clusterPosition, buffer, OBinaryProtocol.SIZE_SHORT);
return buffer;
}
public int getClusterId() {
return clusterId;
}
public long getClusterPosition() {
return clusterPosition;
}
public void fromString(String iRecordId) {
if (iRecordId != null)
iRecordId = iRecordId.trim();
if (iRecordId == null || iRecordId.isEmpty()) {
clusterId = CLUSTER_ID_INVALID;
clusterPosition = CLUSTER_POS_INVALID;
return;
}
if (!OStringSerializerHelper.contains(iRecordId, SEPARATOR))
throw new IllegalArgumentException("Argument '" + iRecordId
+ "' is not a RecordId in form of string. Format must be: <cluster-id>:<cluster-position>");
final List<String> parts = OStringSerializerHelper.split(iRecordId, SEPARATOR, PREFIX);
if (parts.size() != 2)
throw new IllegalArgumentException("Argument received '" + iRecordId
+ "' is not a RecordId in form of string. Format must be: #<cluster-id>:<cluster-position>. Example: #3:12");
clusterId = Integer.parseInt(parts.get(0));
checkClusterLimits();
clusterPosition = Long.parseLong(parts.get(1));
}
public void copyFrom(final ORID iSource) {
if (iSource == null)
throw new IllegalArgumentException("Source is null");
clusterId = iSource.getClusterId();
clusterPosition = iSource.getClusterPosition();
}
public String next() {
return generateString(clusterId, clusterPosition + 1);
}
public ORID getIdentity() {
return this;
}
@SuppressWarnings("unchecked")
public <T extends ORecord<?>> T getRecord() {
final ODatabaseRecord db = ODatabaseRecordThreadLocal.INSTANCE.get();
if (db == null)
throw new ODatabaseException(
"No database found in current thread local space. If you manually control databases over threads assure to set the current database before to use it by calling: ODatabaseRecordThreadLocal.INSTANCE.set(db);");
return (T) db.load(this);
}
}
| |
/*
* Copyright 2014-2015 Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.rest.resources;
import java.io.IOException;
import java.io.InputStream;
import java.util.Objects;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriBuilder;
import javax.ws.rs.core.UriInfo;
import org.onosproject.core.ApplicationId;
import org.onosproject.core.CoreService;
import org.onosproject.net.intent.HostToHostIntent;
import org.onosproject.net.intent.Intent;
import org.onosproject.net.intent.IntentEvent;
import org.onosproject.net.intent.IntentListener;
import org.onosproject.net.intent.IntentService;
import org.onosproject.net.intent.IntentState;
import org.onosproject.net.intent.Key;
import org.onosproject.net.intent.PointToPointIntent;
import org.onosproject.rest.AbstractWebResource;
import org.slf4j.Logger;
import com.fasterxml.jackson.databind.node.ObjectNode;
import static org.onlab.util.Tools.nullIsNotFound;
import static org.onosproject.net.intent.IntentState.FAILED;
import static org.onosproject.net.intent.IntentState.WITHDRAWN;
import static org.slf4j.LoggerFactory.getLogger;
/**
* Query, submit and withdraw network intents.
*/
@Path("intents")
public class IntentsWebResource extends AbstractWebResource {
@Context
UriInfo uriInfo;
private static final Logger log = getLogger(IntentsWebResource.class);
private static final int WITHDRAW_EVENT_TIMEOUT_SECONDS = 5;
public static final String INTENT_NOT_FOUND = "Intent is not found";
/**
* Get all intents.
* Returns array containing all the intents in the system.
*
* @return array of all the intents in the system
*/
@GET
@Produces(MediaType.APPLICATION_JSON)
public Response getIntents() {
final Iterable<Intent> intents = get(IntentService.class).getIntents();
final ObjectNode root = encodeArray(Intent.class, "intents", intents);
return ok(root).build();
}
/**
* Get intent by application and key.
* Returns details of the specified intent.
*
* @param appId application identifier
* @param key intent key
* @return intent data
*/
@GET
@Produces(MediaType.APPLICATION_JSON)
@Path("{appId}/{key}")
public Response getIntentById(@PathParam("appId") String appId,
@PathParam("key") String key) {
final ApplicationId app = get(CoreService.class).getAppId(appId);
Intent intent = get(IntentService.class).getIntent(Key.of(key, app));
if (intent == null) {
long numericalKey = Long.decode(key);
intent = get(IntentService.class).getIntent(Key.of(numericalKey, app));
}
nullIsNotFound(intent, INTENT_NOT_FOUND);
final ObjectNode root;
if (intent instanceof HostToHostIntent) {
root = codec(HostToHostIntent.class).encode((HostToHostIntent) intent, this);
} else if (intent instanceof PointToPointIntent) {
root = codec(PointToPointIntent.class).encode((PointToPointIntent) intent, this);
} else {
root = codec(Intent.class).encode(intent, this);
}
return ok(root).build();
}
class DeleteListener implements IntentListener {
final Key key;
final CountDownLatch latch;
DeleteListener(Key key, CountDownLatch latch) {
this.key = key;
this.latch = latch;
}
@Override
public void event(IntentEvent event) {
if (Objects.equals(event.subject().key(), key) &&
(event.type() == IntentEvent.Type.WITHDRAWN ||
event.type() == IntentEvent.Type.FAILED)) {
latch.countDown();
}
}
}
/**
* Submit a new intent.
* Creates and submits intent from the JSON request.
*
* @param stream input JSON
* @return status of the request - CREATED if the JSON is correct,
* BAD_REQUEST if the JSON is invalid
*/
@POST
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Response createIntent(InputStream stream) {
try {
IntentService service = get(IntentService.class);
ObjectNode root = (ObjectNode) mapper().readTree(stream);
Intent intent = codec(Intent.class).decode(root, this);
service.submit(intent);
UriBuilder locationBuilder = uriInfo.getBaseUriBuilder()
.path("intents")
.path(Short.toString(intent.appId().id()))
.path(Long.toString(intent.id().fingerprint()));
return Response
.created(locationBuilder.build())
.build();
} catch (IOException ioe) {
throw new IllegalArgumentException(ioe);
}
}
/**
* Withdraw intent.
* Withdraws the specified intent from the system.
*
* @param appId application identifier
* @param key intent key
*/
@DELETE
@Path("{appId}/{key}")
public void deleteIntentById(@PathParam("appId") String appId,
@PathParam("key") String key) {
final ApplicationId app = get(CoreService.class).getAppId(appId);
Intent intent = get(IntentService.class).getIntent(Key.of(key, app));
IntentService service = get(IntentService.class);
if (intent == null) {
intent = service
.getIntent(Key.of(Long.decode(key), app));
}
if (intent == null) {
// No such intent. REST standards recommend a positive status code
// in this case.
return;
}
Key k = intent.key();
// set up latch and listener to track uninstall progress
CountDownLatch latch = new CountDownLatch(1);
IntentListener listener = new DeleteListener(k, latch);
service.addListener(listener);
try {
// request the withdraw
service.withdraw(intent);
try {
latch.await(WITHDRAW_EVENT_TIMEOUT_SECONDS, TimeUnit.SECONDS);
} catch (InterruptedException e) {
log.info("REST Delete operation timed out waiting for intent {}", k);
}
// double check the state
IntentState state = service.getIntentState(k);
if (state == WITHDRAWN || state == FAILED) {
service.purge(intent);
}
} finally {
// clean up the listener
service.removeListener(listener);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.fileupload2.jaksrvlt;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.security.Principal;
import java.util.Collection;
import java.util.Enumeration;
import java.util.Locale;
import java.util.Map;
import org.apache.commons.fileupload2.FileUploadBase;
import jakarta.servlet.AsyncContext;
import jakarta.servlet.DispatcherType;
import jakarta.servlet.ReadListener;
import jakarta.servlet.RequestDispatcher;
import jakarta.servlet.ServletContext;
import jakarta.servlet.ServletException;
import jakarta.servlet.ServletInputStream;
import jakarta.servlet.ServletRequest;
import jakarta.servlet.ServletResponse;
import jakarta.servlet.http.Cookie;
import jakarta.servlet.http.HttpServletRequest;
import jakarta.servlet.http.HttpServletResponse;
import jakarta.servlet.http.HttpSession;
import jakarta.servlet.http.HttpUpgradeHandler;
import jakarta.servlet.http.Part;
public class MockJakSrvltHttpRequest implements HttpServletRequest {
private final InputStream mRequestData;
private long length;
private final String mStrContentType;
private int readLimit = -1;
private final Map<String, String> mHeaders = new java.util.HashMap<>();
/**
* Creates a new instance with the given request data
* and content type.
*/
public MockJakSrvltHttpRequest(
final byte[] requestData,
final String strContentType) {
this(new ByteArrayInputStream(requestData),
requestData.length, strContentType);
}
/**
* Creates a new instance with the given request data
* and content type.
*/
public MockJakSrvltHttpRequest(
final InputStream requestData,
final long requestLength,
final String strContentType) {
mRequestData = requestData;
length = requestLength;
mStrContentType = strContentType;
mHeaders.put(FileUploadBase.CONTENT_TYPE, strContentType);
}
/**
* @see javax.servlet.http.HttpServletRequest#getAuthType()
*/
@Override
public String getAuthType() {
return null;
}
/**
* @see javax.servlet.http.HttpServletRequest#getCookies()
*/
@Override
public Cookie[] getCookies() {
return null;
}
/**
* @see javax.servlet.http.HttpServletRequest#getDateHeader(String)
*/
@Override
public long getDateHeader(final String arg0) {
return 0;
}
/**
* @see javax.servlet.http.HttpServletRequest#getHeader(String)
*/
@Override
public String getHeader(final String headerName) {
return mHeaders.get(headerName);
}
/**
* @see javax.servlet.http.HttpServletRequest#getHeaders(String)
*/
@Override
public Enumeration<String> getHeaders(final String arg0) {
// todo - implement
return null;
}
/**
* @see javax.servlet.http.HttpServletRequest#getHeaderNames()
*/
@Override
public Enumeration<String> getHeaderNames() {
// todo - implement
return null;
}
/**
* @see javax.servlet.http.HttpServletRequest#getIntHeader(String)
*/
@Override
public int getIntHeader(final String arg0) {
return 0;
}
/**
* @see javax.servlet.http.HttpServletRequest#getMethod()
*/
@Override
public String getMethod() {
return null;
}
/**
* @see javax.servlet.http.HttpServletRequest#getPathInfo()
*/
@Override
public String getPathInfo() {
return null;
}
/**
* @see javax.servlet.http.HttpServletRequest#getPathTranslated()
*/
@Override
public String getPathTranslated() {
return null;
}
/**
* @see javax.servlet.http.HttpServletRequest#getContextPath()
*/
@Override
public String getContextPath() {
return null;
}
/**
* @see javax.servlet.http.HttpServletRequest#getQueryString()
*/
@Override
public String getQueryString() {
return null;
}
/**
* @see javax.servlet.http.HttpServletRequest#getRemoteUser()
*/
@Override
public String getRemoteUser() {
return null;
}
/**
* @see javax.servlet.http.HttpServletRequest#isUserInRole(String)
*/
@Override
public boolean isUserInRole(final String arg0) {
return false;
}
/**
* @see javax.servlet.http.HttpServletRequest#getUserPrincipal()
*/
@Override
public Principal getUserPrincipal() {
return null;
}
/**
* @see javax.servlet.http.HttpServletRequest#getRequestedSessionId()
*/
@Override
public String getRequestedSessionId() {
return null;
}
/**
* @see javax.servlet.http.HttpServletRequest#getRequestURI()
*/
@Override
public String getRequestURI() {
return null;
}
/**
* @see javax.servlet.http.HttpServletRequest#getRequestURL()
*/
@Override
public StringBuffer getRequestURL() {
return null;
}
/**
* @see javax.servlet.http.HttpServletRequest#getServletPath()
*/
@Override
public String getServletPath() {
return null;
}
/**
* @see javax.servlet.http.HttpServletRequest#getSession(boolean)
*/
@Override
public HttpSession getSession(final boolean arg0) {
return null;
}
/**
* @see javax.servlet.http.HttpServletRequest#getSession()
*/
@Override
public HttpSession getSession() {
return null;
}
/**
* @see javax.servlet.http.HttpServletRequest#isRequestedSessionIdValid()
*/
@Override
public boolean isRequestedSessionIdValid() {
return false;
}
/**
* @see javax.servlet.http.HttpServletRequest#isRequestedSessionIdFromCookie()
*/
@Override
public boolean isRequestedSessionIdFromCookie() {
return false;
}
/**
* @see javax.servlet.http.HttpServletRequest#isRequestedSessionIdFromURL()
*/
@Override
public boolean isRequestedSessionIdFromURL() {
return false;
}
/**
* @see javax.servlet.http.HttpServletRequest#isRequestedSessionIdFromUrl()
* @deprecated
*/
@Override
@Deprecated
public boolean isRequestedSessionIdFromUrl() {
return false;
}
/**
* @see javax.servlet.ServletRequest#getAttribute(String)
*/
@Override
public Object getAttribute(final String arg0) {
return null;
}
/**
* @see javax.servlet.ServletRequest#getAttributeNames()
*/
@Override
public Enumeration<String> getAttributeNames() {
return null;
}
/**
* @see javax.servlet.ServletRequest#getCharacterEncoding()
*/
@Override
public String getCharacterEncoding() {
return null;
}
/**
* @see javax.servlet.ServletRequest#setCharacterEncoding(String)
*/
@Override
public void setCharacterEncoding(final String arg0)
throws UnsupportedEncodingException {
}
/**
* @see javax.servlet.ServletRequest#getContentLength()
*/
@Override
public int getContentLength() {
int iLength;
if (null == mRequestData) {
iLength = -1;
} else {
if (length > Integer.MAX_VALUE) {
throw new RuntimeException("Value '" + length + "' is too large to be converted to int");
}
iLength = (int) length;
}
return iLength;
}
/**
* For testing attack scenarios in SizesTest.
*/
public void setContentLength(final long length) {
this.length = length;
}
/**
* @see javax.servlet.ServletRequest#getContentType()
*/
@Override
public String getContentType() {
return mStrContentType;
}
/**
* @see javax.servlet.ServletRequest#getInputStream()
*/
@Override
public ServletInputStream getInputStream() throws IOException {
return new MyServletInputStream(mRequestData, readLimit);
}
/**
* Sets the read limit. This can be used to limit the number of bytes to read ahead.
*
* @param readLimit the read limit to use
*/
public void setReadLimit(final int readLimit) {
this.readLimit = readLimit;
}
/**
* @see javax.servlet.ServletRequest#getParameter(String)
*/
@Override
public String getParameter(final String arg0) {
return null;
}
/**
* @see javax.servlet.ServletRequest#getParameterNames()
*/
@Override
public Enumeration<String> getParameterNames() {
return null;
}
/**
* @see javax.servlet.ServletRequest#getParameterValues(String)
*/
@Override
public String[] getParameterValues(final String arg0) {
return null;
}
/**
* @see javax.servlet.ServletRequest#getParameterMap()
*/
@Override
public Map<String, String[]> getParameterMap() {
return null;
}
/**
* @see javax.servlet.ServletRequest#getProtocol()
*/
@Override
public String getProtocol() {
return null;
}
/**
* @see javax.servlet.ServletRequest#getScheme()
*/
@Override
public String getScheme() {
return null;
}
/**
* @see javax.servlet.ServletRequest#getServerName()
*/
@Override
public String getServerName() {
return null;
}
/**
* @see javax.servlet.ServletRequest#getLocalName()
*/
@Override
@SuppressWarnings("javadoc") // This is a Servlet 2.4 method
public String getLocalName() {
return null;
}
/**
* @see javax.servlet.ServletRequest#getServerPort()
*/
@Override
public int getServerPort() {
return 0;
}
/**
* @see javax.servlet.ServletRequest#getLocalPort()
*/
@Override
@SuppressWarnings("javadoc") // This is a Servlet 2.4 method
public int getLocalPort() {
return 0;
}
/**
* @see javax.servlet.ServletRequest#getRemotePort()
*/
@Override
@SuppressWarnings("javadoc") // This is a Servlet 2.4 method
public int getRemotePort() {
return 0;
}
/**
* @see javax.servlet.ServletRequest#getReader()
*/
@Override
public BufferedReader getReader() throws IOException {
return null;
}
/**
* @see javax.servlet.ServletRequest#getRemoteAddr()
*/
@Override
public String getRemoteAddr() {
return null;
}
/**
* @see javax.servlet.ServletRequest#getLocalAddr()
*/
@Override
@SuppressWarnings("javadoc") // This is a Servlet 2.4 method
public String getLocalAddr() {
return null;
}
/**
* @see javax.servlet.ServletRequest#getRemoteHost()
*/
@Override
public String getRemoteHost() {
return null;
}
/**
* @see javax.servlet.ServletRequest#setAttribute(String, Object)
*/
@Override
public void setAttribute(final String arg0, final Object arg1) {
}
/**
* @see javax.servlet.ServletRequest#removeAttribute(String)
*/
@Override
public void removeAttribute(final String arg0) {
}
/**
* @see javax.servlet.ServletRequest#getLocale()
*/
@Override
public Locale getLocale() {
return null;
}
/**
* @see javax.servlet.ServletRequest#getLocales()
*/
@Override
public Enumeration<Locale> getLocales() {
return null;
}
/**
* @see javax.servlet.ServletRequest#isSecure()
*/
@Override
public boolean isSecure() {
return false;
}
/**
* @see javax.servlet.ServletRequest#getRequestDispatcher(String)
*/
@Override
public RequestDispatcher getRequestDispatcher(final String arg0) {
return null;
}
private static class MyServletInputStream
extends jakarta.servlet.ServletInputStream {
private final InputStream in;
private final int readLimit;
/**
* Creates a new instance, which returns the given
* streams data.
*/
public MyServletInputStream(final InputStream pStream, final int readLimit) {
in = pStream;
this.readLimit = readLimit;
}
@Override
public int read() throws IOException {
return in.read();
}
@Override
public int read(final byte[] b, final int off, final int len) throws IOException {
if (readLimit > 0) {
return in.read(b, off, Math.min(readLimit, len));
}
return in.read(b, off, len);
}
@Override
public boolean isFinished() {
return false;
}
@Override
public boolean isReady() {
return false;
}
@Override
public void setReadListener(final ReadListener readListener) {
throw new IllegalStateException("Not implemented");
}
}
@Override
public long getContentLengthLong() {
return getContentLength();
}
@Override
public ServletContext getServletContext() {
final HttpSession session = getSession();
if (session == null) {
return null;
}
return session.getServletContext();
}
@Override
public AsyncContext startAsync() throws IllegalStateException {
throw new IllegalStateException("Not implemented");
}
@Override
public AsyncContext startAsync(final ServletRequest servletRequest, final ServletResponse servletResponse)
throws IllegalStateException {
throw new IllegalStateException("Not implemented");
}
@Override
public boolean isAsyncStarted() {
return false;
}
@Override
public boolean isAsyncSupported() {
return false;
}
@Override
public AsyncContext getAsyncContext() {
return null;
}
@Override
public DispatcherType getDispatcherType() {
return null;
}
@Override
public String changeSessionId() {
return null;
}
@Override
public boolean authenticate(final HttpServletResponse response) throws IOException, ServletException {
return false;
}
@Override
public void login(final String username, final String password) throws ServletException {
throw new IllegalStateException("Not implemented");
}
@Override
public void logout() throws ServletException {
throw new IllegalStateException("Not implemented");
}
@Override
public Collection<Part> getParts() throws IOException, ServletException {
return null;
}
@Override
public Part getPart(final String name) throws IOException, ServletException {
return null;
}
@Override
public <T extends HttpUpgradeHandler> T upgrade(final Class<T> handlerClass) throws IOException, ServletException {
throw new IllegalStateException("Not implemented");
}
@Override
public String getRealPath(final String path) {
return null;
}
}
| |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.debugger.engine;
import com.intellij.debugger.DebuggerBundle;
import com.intellij.debugger.DebuggerInvocationUtil;
import com.intellij.debugger.EvaluatingComputable;
import com.intellij.debugger.SourcePosition;
import com.intellij.debugger.engine.evaluation.*;
import com.intellij.debugger.engine.evaluation.expression.*;
import com.intellij.debugger.engine.events.DebuggerContextCommandImpl;
import com.intellij.debugger.engine.events.SuspendContextCommandImpl;
import com.intellij.debugger.impl.DebuggerContextImpl;
import com.intellij.debugger.impl.DebuggerSession;
import com.intellij.debugger.ui.impl.watch.NodeDescriptorImpl;
import com.intellij.debugger.ui.impl.watch.ValueDescriptorImpl;
import com.intellij.openapi.progress.util.ProgressWindow;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.PsiElement;
import com.intellij.xdebugger.frame.XValueModifier;
import com.sun.jdi.*;
import org.jetbrains.annotations.NotNull;
import static com.intellij.psi.CommonClassNames.JAVA_LANG_STRING;
/*
* Class SetValueAction
* @author Jeka
*/
public abstract class JavaValueModifier extends XValueModifier {
private final JavaValue myJavaValue;
public JavaValueModifier(JavaValue javaValue) {
myJavaValue = javaValue;
}
@Override
public void calculateInitialValueEditorText(final XInitialValueCallback callback) {
final Value value = myJavaValue.getDescriptor().getValue();
if (value instanceof PrimitiveValue) {
String valueString = myJavaValue.getValueString();
int pos = valueString.lastIndexOf('('); //skip hex presentation if any
if (pos > 1) {
valueString = valueString.substring(0, pos).trim();
}
callback.setValue(valueString);
}
else if (value instanceof StringReference) {
final EvaluationContextImpl evaluationContext = myJavaValue.getEvaluationContext();
evaluationContext.getManagerThread().schedule(new SuspendContextCommandImpl(evaluationContext.getSuspendContext()) {
@Override
public Priority getPriority() {
return Priority.NORMAL;
}
@Override
public void contextAction() throws Exception {
callback.setValue(
StringUtil.wrapWithDoubleQuote(DebuggerUtils.translateStringValue(DebuggerUtils.getValueAsString(evaluationContext, value))));
}
});
}
else {
callback.setValue(null);
}
}
//public void update(AnActionEvent e) {
// boolean enable = false;
// DebuggerTreeNodeImpl node = getSelectedNode(e.getDataContext());
// if (node != null) {
// NodeDescriptorImpl descriptor = node.getDescriptor();
// if(descriptor instanceof ValueDescriptorImpl){
// ValueDescriptorImpl valueDescriptor = ((ValueDescriptorImpl)descriptor);
// enable = valueDescriptor.canSetValue();
// }
// }
// e.getPresentation().setVisible(enable);
//}
//
protected static void update(final DebuggerContextImpl context) {
DebuggerInvocationUtil.swingInvokeLater(context.getProject(), () -> {
final DebuggerSession session = context.getDebuggerSession();
if (session != null) {
session.refresh(false);
}
});
//node.setState(context);
}
protected abstract void setValueImpl(@NotNull String expression, @NotNull XModificationCallback callback);
@Override
public void setValue(@NotNull String expression, @NotNull XModificationCallback callback) {
final NodeDescriptorImpl descriptor = myJavaValue.getDescriptor();
if(!((ValueDescriptorImpl)descriptor).canSetValue()) {
return;
}
if (myJavaValue.getEvaluationContext().getSuspendContext().isResumed()) {
callback.errorOccurred(DebuggerBundle.message("error.context.has.changed"));
return;
}
setValueImpl(expression, callback);
}
protected static Value preprocessValue(EvaluationContextImpl context, Value value, Type varType) throws EvaluateException {
if (value != null && JAVA_LANG_STRING.equals(varType.name()) && !(value instanceof StringReference)) {
String v = DebuggerUtils.getValueAsString(context, value);
if (v != null) {
value = context.getSuspendContext().getDebugProcess().getVirtualMachineProxy().mirrorOf(v);
}
}
if (value instanceof DoubleValue) {
double dValue = ((DoubleValue) value).doubleValue();
if(varType instanceof FloatType && Float.MIN_VALUE <= dValue && dValue <= Float.MAX_VALUE){
value = context.getSuspendContext().getDebugProcess().getVirtualMachineProxy().mirrorOf((float)dValue);
}
}
if (value != null) {
if (varType instanceof PrimitiveType) {
if (!(value instanceof PrimitiveValue)) {
value = (Value)new UnBoxingEvaluator(new IdentityEvaluator(value)).evaluate(context);
}
}
else if (varType instanceof ReferenceType) {
if (value instanceof PrimitiveValue) {
value = (Value)new BoxingEvaluator(new IdentityEvaluator(value)).evaluate(context);
}
}
}
return value;
}
protected interface SetValueRunnable {
void setValue(EvaluationContextImpl evaluationContext, Value newValue) throws ClassNotLoadedException,
InvalidTypeException,
EvaluateException,
IncompatibleThreadStateException;
ReferenceType loadClass(EvaluationContextImpl evaluationContext, String className) throws EvaluateException,
InvocationException,
ClassNotLoadedException,
IncompatibleThreadStateException,
InvalidTypeException;
}
private static void setValue(String expressionToShow, ExpressionEvaluator evaluator, EvaluationContextImpl evaluationContext, SetValueRunnable setValueRunnable) throws EvaluateException {
Value value;
try {
value = evaluator.evaluate(evaluationContext);
setValueRunnable.setValue(evaluationContext, value);
}
catch (IllegalArgumentException ex) {
throw EvaluateExceptionUtil.createEvaluateException(ex.getMessage());
}
catch (InvalidTypeException ex) {
throw EvaluateExceptionUtil.createEvaluateException(DebuggerBundle.message("evaluation.error.type.mismatch"));
}
catch (IncompatibleThreadStateException e) {
throw EvaluateExceptionUtil.createEvaluateException(e);
}
catch (ClassNotLoadedException ex) {
if (!evaluationContext.isAutoLoadClasses()) {
throw EvaluateExceptionUtil.createEvaluateException(ex);
}
final ReferenceType refType;
try {
refType = setValueRunnable.loadClass(evaluationContext, ex.className());
if (refType != null) {
//try again
setValue(expressionToShow, evaluator, evaluationContext, setValueRunnable);
}
}
catch (InvocationException | InvalidTypeException | IncompatibleThreadStateException | ClassNotLoadedException e) {
throw EvaluateExceptionUtil.createEvaluateException(e);
}
catch (ObjectCollectedException e) {
throw EvaluateExceptionUtil.OBJECT_WAS_COLLECTED;
}
}
}
protected void set(@NotNull final String expression, final XModificationCallback callback, final DebuggerContextImpl debuggerContext, final SetValueRunnable setValueRunnable) {
final ProgressWindow progressWindow = new ProgressWindow(true, debuggerContext.getProject());
final EvaluationContextImpl evaluationContext = myJavaValue.getEvaluationContext();
SuspendContextCommandImpl askSetAction = new DebuggerContextCommandImpl(debuggerContext) {
public Priority getPriority() {
return Priority.HIGH;
}
public void threadAction(@NotNull SuspendContextImpl suspendContext) {
ExpressionEvaluator evaluator;
try {
Project project = evaluationContext.getProject();
SourcePosition position = ContextUtil.getSourcePosition(evaluationContext);
PsiElement context = ContextUtil.getContextElement(evaluationContext, position);
evaluator = DebuggerInvocationUtil.commitAndRunReadAction(project, new EvaluatingComputable<ExpressionEvaluator>() {
public ExpressionEvaluator compute() throws EvaluateException {
return EvaluatorBuilderImpl
.build(new TextWithImportsImpl(CodeFragmentKind.EXPRESSION, expression), context, position, project);
}
});
setValue(expression, evaluator, evaluationContext, new SetValueRunnable() {
public void setValue(EvaluationContextImpl evaluationContext, Value newValue) throws ClassNotLoadedException,
InvalidTypeException,
EvaluateException,
IncompatibleThreadStateException {
if (!progressWindow.isCanceled()) {
setValueRunnable.setValue(evaluationContext, newValue);
//node.calcValue();
}
}
public ReferenceType loadClass(EvaluationContextImpl evaluationContext, String className) throws
InvocationException,
ClassNotLoadedException,
EvaluateException,
IncompatibleThreadStateException,
InvalidTypeException {
return setValueRunnable.loadClass(evaluationContext, className);
}
});
callback.valueModified();
} catch (EvaluateException e) {
callback.errorOccurred(e.getMessage());
}
//String initialString = "";
//if (descriptor instanceof ValueDescriptorImpl) {
// Value currentValue = ((ValueDescriptorImpl) descriptor).getValue();
// if (currentValue instanceof StringReference) {
// initialString = DebuggerUtilsEx.getValueOrErrorAsString(debuggerContext.createEvaluationContext(), currentValue);
// initialString = initialString == null ? "" : "\"" + DebuggerUtilsEx.translateStringValue(initialString) + "\"";
// }
// else if (currentValue instanceof PrimitiveValue) {
// ValueLabelRenderer renderer = ((ValueDescriptorImpl) descriptor).getRenderer(debuggerContext.getDebugProcess());
// initialString = getDisplayableString((PrimitiveValue) currentValue, renderer instanceof NodeRenderer && HexRenderer.UNIQUE_ID.equals(renderer.getUniqueId()));
// }
//
// final String initialString1 = initialString;
// final Project project = debuggerContext.getProject();
// DebuggerInvocationUtil.swingInvokeLater(project, new Runnable() {
// public void run() {
// showEditor(new TextWithImportsImpl(CodeFragmentKind.EXPRESSION, initialString1), node, debuggerContext, setValueRunnable);
// }
// });
//}
}
};
progressWindow.setTitle(DebuggerBundle.message("title.evaluating"));
evaluationContext.getDebugProcess().getManagerThread().startProgress(askSetAction, progressWindow);
}
//private void showEditor(final TextWithImports initialString,
// final DebuggerTreeNodeImpl node,
// final DebuggerContextImpl debuggerContext,
// final SetValueRunnable setValueRunnable) {
// final JPanel editorPanel = new JPanel();
// editorPanel.setLayout(new BoxLayout(editorPanel, BoxLayout.X_AXIS));
// SimpleColoredComponent label = new SimpleColoredComponent();
// label.setIcon(node.getIcon());
// DebuggerTreeRenderer.getDescriptorTitle(debuggerContext, node.getDescriptor()).appendToComponent(label);
// editorPanel.add(label);
//
// final DebuggerExpressionComboBox comboBox = new DebuggerExpressionComboBox(
// debuggerContext.getProject(),
// PositionUtil.getContextElement(debuggerContext),
// "setValue", DefaultCodeFragmentFactory.getInstance());
// comboBox.setText(initialString);
// comboBox.selectAll();
// editorPanel.add(comboBox);
//
// final DebuggerTreeInplaceEditor editor = new DebuggerTreeInplaceEditor(node) {
// public JComponent createInplaceEditorComponent() {
// return editorPanel;
// }
//
// public JComponent getPreferredFocusedComponent() {
// return comboBox;
// }
//
// public Editor getEditor() {
// return comboBox.getEditor();
// }
//
// public JComponent getEditorComponent() {
// return comboBox.getEditorComponent();
// }
//
// private void flushValue() {
// if (comboBox.isPopupVisible()) {
// comboBox.selectPopupValue();
// }
//
// Editor editor = getEditor();
// if(editor == null) {
// return;
// }
//
// final TextWithImports text = comboBox.getText();
//
// PsiFile psiFile = PsiDocumentManager.getInstance(debuggerContext.getProject()).getPsiFile(editor.getDocument());
//
// final ProgressWindowWithNotification progressWindow = new ProgressWindowWithNotification(true, getProject());
// EditorEvaluationCommand evaluationCommand = new EditorEvaluationCommand(getEditor(), psiFile, debuggerContext, progressWindow) {
// public void threadAction() {
// try {
// evaluate();
// }
// catch(EvaluateException e) {
// progressWindow.cancel();
// }
// catch(ProcessCanceledException e) {
// progressWindow.cancel();
// }
// finally{
// if (!progressWindow.isCanceled()) {
// DebuggerInvocationUtil.swingInvokeLater(debuggerContext.getProject(), new Runnable() {
// public void run() {
// comboBox.addRecent(text);
// cancelEditing();
// }
// });
// }
// }
// }
//
// protected Object evaluate(final EvaluationContextImpl evaluationContext) throws EvaluateException {
// ExpressionEvaluator evaluator = DebuggerInvocationUtil.commitAndRunReadAction(evaluationContext.getProject(), new com.intellij.debugger.EvaluatingComputable<ExpressionEvaluator>() {
// public ExpressionEvaluator compute() throws EvaluateException {
// return EvaluatorBuilderImpl.build(text, ContextUtil.getContextElement(evaluationContext), ContextUtil.getSourcePosition(evaluationContext));
// }
// });
//
// setValue(text.getText(), evaluator, evaluationContext, new SetValueRunnable() {
// public void setValue(EvaluationContextImpl evaluationContext, Value newValue) throws ClassNotLoadedException,
// InvalidTypeException,
// EvaluateException,
// IncompatibleThreadStateException {
// if (!progressWindow.isCanceled()) {
// setValueRunnable.setValue(evaluationContext, newValue);
// node.calcValue();
// }
// }
//
// public ReferenceType loadClass(EvaluationContextImpl evaluationContext, String className) throws
// InvocationException,
// ClassNotLoadedException,
// EvaluateException,
// IncompatibleThreadStateException,
// InvalidTypeException {
// return setValueRunnable.loadClass(evaluationContext, className);
// }
// });
//
// return null;
// }
// };
//
// progressWindow.addListener(new ProgressIndicatorListenerAdapter() {
// //should return whether to stop processing
// public void stopped() {
// if(!progressWindow.isCanceled()) {
// IJSwingUtilities.invoke(new Runnable() {
// public void run() {
// cancelEditing();
// }
// });
// }
// }
//
//
// });
//
// progressWindow.setTitle(DebuggerBundle.message("progress.set.value"));
// debuggerContext.getDebugProcess().getManagerThread().startProgress(evaluationCommand, progressWindow);
// }
//
// public void cancelEditing() {
// try {
// super.cancelEditing();
// }
// finally {
// comboBox.dispose();
// }
// }
//
// public void doOKAction() {
// try {
// flushValue();
// }
// finally {
// comboBox.dispose();
// }
// }
//
// };
//
// final DebuggerStateManager stateManager = DebuggerManagerEx.getInstanceEx(debuggerContext.getProject()).getContextManager();
//
// stateManager.addListener(new DebuggerContextListener() {
// public void changeEvent(DebuggerContextImpl newContext, int event) {
// if (event != DebuggerSession.EVENT_THREADS_REFRESH) {
// stateManager.removeListener(this);
// editor.cancelEditing();
// }
// }
// });
//
// node.getTree().hideTooltip();
//
// editor.show();
//}
@SuppressWarnings({"HardCodedStringLiteral", "StringToUpperCaseOrToLowerCaseWithoutLocale"})
private static String getDisplayableString(PrimitiveValue value, boolean showAsHex) {
if (value instanceof CharValue) {
long longValue = value.longValue();
return showAsHex ? "0x" + Long.toHexString(longValue).toUpperCase() : Long.toString(longValue);
}
if (value instanceof ByteValue) {
byte val = value.byteValue();
String strValue = Integer.toHexString(val).toUpperCase();
if (strValue.length() > 2) {
strValue = strValue.substring(strValue.length() - 2);
}
return showAsHex ? "0x" + strValue : value.toString();
}
if (value instanceof ShortValue) {
short val = value.shortValue();
String strValue = Integer.toHexString(val).toUpperCase();
if (strValue.length() > 4) {
strValue = strValue.substring(strValue.length() - 4);
}
return showAsHex ? "0x" + strValue : value.toString();
}
if (value instanceof IntegerValue) {
int val = value.intValue();
return showAsHex ? "0x" + Integer.toHexString(val).toUpperCase() : value.toString();
}
if (value instanceof LongValue) {
long val = value.longValue();
return showAsHex ? "0x" + Long.toHexString(val).toUpperCase() + "L" : value.toString() + "L";
}
return DebuggerUtils.translateStringValue(value.toString());
}
}
| |
package de.avalax.fitbuddy.presentation;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.design.widget.BottomNavigationView;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentTransaction;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import com.google.android.gms.ads.MobileAds;
import javax.inject.Inject;
import de.avalax.fitbuddy.R;
import de.avalax.fitbuddy.application.ad_mod.AdMobProvider;
import de.avalax.fitbuddy.application.billing.BillingProvider;
import de.avalax.fitbuddy.application.billing.NotificationAsyncTask;
import de.avalax.fitbuddy.application.edit.workout.EditWorkoutService;
import de.avalax.fitbuddy.application.summary.FinishedWorkoutService;
import de.avalax.fitbuddy.application.workout.WorkoutService;
import de.avalax.fitbuddy.domain.model.workout.Workout;
import de.avalax.fitbuddy.domain.model.workout.WorkoutException;
import de.avalax.fitbuddy.presentation.dialog.SupportDialogFragment;
import de.avalax.fitbuddy.presentation.edit.workout.EditWorkoutActivity;
import de.avalax.fitbuddy.presentation.summary.FinishedWorkoutListFragment;
import de.avalax.fitbuddy.presentation.welcome_screen.WorkoutListFragment;
import de.avalax.fitbuddy.presentation.workout.ExerciseFragment;
import static android.widget.Toast.LENGTH_SHORT;
import static android.widget.Toast.makeText;
import static de.avalax.fitbuddy.presentation.FitbuddyApplication.ADD_WORKOUT;
import static de.avalax.fitbuddy.presentation.FitbuddyApplication.EDIT_WORKOUT;
import static java.lang.String.valueOf;
public class MainActivity extends AppCompatActivity
implements BottomNavigationView.OnNavigationItemSelectedListener,
SupportDialogFragment.DialogListener, NotificationAsyncTask.NotificationPostExecute {
private Menu menu;
private BottomNavigationView bottomNavigation;
@Inject
EditWorkoutService editWorkoutService;
@Inject
FinishedWorkoutService finishedWorkoutService;
@Inject
WorkoutService workoutService;
@Inject
BillingProvider billingProvider;
@Inject
AdMobProvider adMobProvider;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
MobileAds.initialize(this, "ca-app-pub-3067141613739864~9851773284");
((FitbuddyApplication) getApplication()).getComponent().inject(this);
Fragment workoutListFragment = new WorkoutListFragment();
getSupportFragmentManager().beginTransaction()
.add(R.id.fragment_content, workoutListFragment)
.setTransition(FragmentTransaction.TRANSIT_NONE)
.commit();
bottomNavigation = findViewById(R.id.bottom_navigation);
bottomNavigation.setOnNavigationItemSelectedListener(this);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
this.menu = menu;
mainToolbar();
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
if (item.getItemId() == R.id.toolbar_edit_workout) {
WorkoutListFragment workoutListFragment = (WorkoutListFragment)
getSupportFragmentManager().findFragmentById(R.id.fragment_content);
workoutListFragment.removeSelection();
mainToolbar();
startActivityForResult(item.getIntent(), EDIT_WORKOUT);
return true;
}
if (item.getItemId() == R.id.toolbar_delete_workout) {
Workout workout = (Workout) item.getIntent().getSerializableExtra("workout");
editWorkoutService.deleteWorkout(workout.getWorkoutId());
removeWorkoutFromList(workout);
mainToolbar();
return true;
}
if (item.getItemId() == R.id.toolbar_delete_finished_workout) {
removeSelectedFinishedWorkouts();
mainToolbar();
return true;
}
if (item.getItemId() == R.id.toolbar_support) {
showSupportDialog();
return true;
}
return false;
}
private void showSupportDialog() {
FragmentManager fm = getSupportFragmentManager();
SupportDialogFragment.newInstance().show(fm, "fragment_support");
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (requestCode == ADD_WORKOUT && resultCode == Activity.RESULT_OK) {
Workout workout = (Workout) data.getSerializableExtra("workout");
addWorkoutToList(workout);
}
if (requestCode == EDIT_WORKOUT && resultCode == Activity.RESULT_OK) {
Workout workout = (Workout) data.getSerializableExtra("workout");
Integer position = data.getIntExtra("position", -1);
updateWorkoutInList(position, workout);
}
}
private void addWorkoutToList(Workout workout) {
WorkoutListFragment workoutListFragment = (WorkoutListFragment)
getSupportFragmentManager().findFragmentById(R.id.fragment_content);
workoutListFragment.addWorkout(workout);
}
private void updateWorkoutInList(Integer position, Workout workout) {
WorkoutListFragment workoutListFragment = (WorkoutListFragment)
getSupportFragmentManager().findFragmentById(R.id.fragment_content);
workoutListFragment.updateWorkout(position, workout);
}
private void removeWorkoutFromList(Workout workout) {
WorkoutListFragment workoutListFragment = (WorkoutListFragment)
getSupportFragmentManager().findFragmentById(R.id.fragment_content);
workoutListFragment.removeWorkout(workout);
}
private void removeSelectedFinishedWorkouts() {
FinishedWorkoutListFragment workoutListFragment = (FinishedWorkoutListFragment)
getSupportFragmentManager().findFragmentById(R.id.fragment_content);
workoutListFragment.removeFinishedWorkout();
}
public void updateEditToolbar(int position, Workout workout) {
menu.clear();
getMenuInflater().inflate(R.menu.menu_main_edit_workout, menu);
Intent intent = new Intent(this, EditWorkoutActivity.class);
intent.putExtra("workout", workout);
intent.putExtra("position", position);
MenuItem item = menu.findItem(R.id.toolbar_edit_workout);
item.setIntent(intent);
MenuItem itemDelete = menu.findItem(R.id.toolbar_delete_workout);
itemDelete.setIntent(intent);
}
public void updateEditToolbar(int selectionCount) {
if (selectionCount > 0) {
menu.clear();
getMenuInflater().inflate(R.menu.menu_summary_edit, menu);
MenuItem item = menu.findItem(R.id.toolbar_delete_finished_workout);
item.setTitle(valueOf(selectionCount));
} else {
menu.clear();
getMenuInflater().inflate(R.menu.menu_main, menu);
}
}
public void mainToolbar() {
menu.clear();
if (billingProvider.isPaid()) {
getMenuInflater().inflate(R.menu.menu_main, menu);
} else {
getMenuInflater().inflate(R.menu.menu_main_support, menu);
}
View adView = findViewById(R.id.adView);
adMobProvider.initAdView(adView);
}
public void selectWorkout(Workout workout) {
try {
workoutService.switchWorkout(workout);
} catch (WorkoutException e) {
Log.e("WorkoutException", e.getMessage(), e);
}
bottomNavigation.setSelectedItemId(R.id.navigation_workout_item);
}
public void showSummary() {
bottomNavigation.setSelectedItemId(R.id.navigation_summary_item);
}
@Override
public boolean onNavigationItemSelected(@NonNull MenuItem item) {
if (item.getItemId() == R.id.navigation_workout_item) {
boolean hasActiveWorkout = workoutService.hasActiveWorkout();
if (hasActiveWorkout) {
Fragment exerciseFragment = new ExerciseFragment();
getSupportFragmentManager().beginTransaction()
.replace(R.id.fragment_content, exerciseFragment)
.setTransition(FragmentTransaction.TRANSIT_NONE)
.commit();
} else {
Context context = getApplicationContext();
makeText(context, R.string.message_select_workout_first, LENGTH_SHORT)
.show();
}
return hasActiveWorkout;
}
if (item.getItemId() == R.id.navigation_start_item) {
Fragment workoutListFragment = new WorkoutListFragment();
getSupportFragmentManager().beginTransaction()
.replace(R.id.fragment_content, workoutListFragment)
.setTransition(FragmentTransaction.TRANSIT_NONE)
.commit();
return true;
}
if (item.getItemId() == R.id.navigation_summary_item) {
Fragment finishedWorkoutListFragment = new FinishedWorkoutListFragment();
getSupportFragmentManager().beginTransaction()
.replace(R.id.fragment_content, finishedWorkoutListFragment)
.setTransition(FragmentTransaction.TRANSIT_NONE)
.commit();
return true;
}
return false;
}
@Override
public void onBackPressed() {
if (bottomNavigation.getSelectedItemId() == R.id.navigation_start_item) {
super.onBackPressed();
} else {
bottomNavigation.setSelectedItemId(R.id.navigation_start_item);
}
}
@Override
public void onDialogPositiveClick(SupportDialogFragment editRepsDialogFragment) {
new NotificationAsyncTask(billingProvider, this).execute();
}
@Override
public void onPostExecute(int result) {
if (billingProvider.hasNotificationSend()) {
makeText(getApplicationContext(), R.string.message_payment_available_soon, LENGTH_SHORT)
.show();
}
}
}
| |
/*
* Copyright (c) 2009-2012 Panxiaobo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.googlecode.dex2jar.test;
import com.android.dx.cf.direct.DirectClassFile;
import com.android.dx.cf.direct.StdAttributeFactory;
import com.android.dx.dex.DexOptions;
import com.android.dx.dex.cf.CfOptions;
import com.android.dx.dex.cf.CfTranslator;
import com.googlecode.d2j.DexConstants;
import com.googlecode.d2j.DexException;
import com.googlecode.d2j.dex.ClassVisitorFactory;
import com.googlecode.d2j.dex.Dex2Asm;
import com.googlecode.d2j.node.DexClassNode;
import com.googlecode.d2j.node.DexFileNode;
import com.googlecode.d2j.node.DexMethodNode;
import com.googlecode.d2j.reader.zip.ZipUtil;
import com.googlecode.d2j.smali.BaksmaliDumper;
import com.googlecode.d2j.visitors.DexClassVisitor;
import org.junit.Assert;
import org.junit.Ignore;
import org.objectweb.asm.ClassReader;
import org.objectweb.asm.ClassVisitor;
import org.objectweb.asm.ClassWriter;
import org.objectweb.asm.MethodVisitor;
import org.objectweb.asm.tree.ClassNode;
import org.objectweb.asm.tree.MethodNode;
import org.objectweb.asm.tree.TryCatchBlockNode;
import org.objectweb.asm.tree.analysis.Analyzer;
import org.objectweb.asm.tree.analysis.AnalyzerException;
import org.objectweb.asm.tree.analysis.BasicVerifier;
import org.objectweb.asm.tree.analysis.Frame;
import org.objectweb.asm.util.CheckClassAdapter;
import org.objectweb.asm.util.Printer;
import org.objectweb.asm.util.Textifier;
import org.objectweb.asm.util.TraceMethodVisitor;
import java.io.*;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.net.URL;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Enumeration;
import java.util.List;
import java.util.zip.ZipEntry;
import java.util.zip.ZipException;
import java.util.zip.ZipFile;
/**
* @author <a href="mailto:pxb1988@gmail.com">Panxiaobo</a>
*
*/
@Ignore
public abstract class TestUtils {
public static void breakPoint() {
}
public static void checkZipFile(File zip) throws ZipException, Exception {
ZipFile zipFile = new ZipFile(zip);
for (Enumeration<? extends ZipEntry> e = zipFile.entries(); e.hasMoreElements();) {
ZipEntry entry = e.nextElement();
if (entry.getName().endsWith(".class")) {
StringWriter sw = new StringWriter();
// PrintWriter pw = new PrintWriter(sw);
try (InputStream is = zipFile.getInputStream(entry)) {
verify(new ClassReader(ZipUtil.toByteArray(is)));
}
Assert.assertTrue(sw.toString(), sw.toString().length() == 0);
}
}
}
public static File dex(File file, File distFile) throws Exception {
return dex(new File[] { file }, distFile);
}
public static File dex(File[] files) throws Exception {
return dex(files, null);
}
public static File dex(File[] files, File distFile) throws Exception {
return dex(Arrays.asList(files), distFile);
}
public static File dexP(List<Path> files, File distFile) throws Exception {
Class<?> c = com.android.dx.command.Main.class;
Method m = c.getMethod("main", String[].class);
if (distFile == null) {
distFile = File.createTempFile("dex", ".dex");
}
List<String> args = new ArrayList<String>();
args.addAll(Arrays.asList("--dex", "--no-strict", "--output=" + distFile.getCanonicalPath()));
for (Path f : files) {
args.add(f.toAbsolutePath().toString());
}
m.invoke(null, new Object[] { args.toArray(new String[0]) });
return distFile;
}
public static File dex(List<File> files, File distFile) throws Exception {
Class<?> c = com.android.dx.command.Main.class;
Method m = c.getMethod("main", String[].class);
if (distFile == null) {
distFile = File.createTempFile("dex", ".dex");
}
List<String> args = new ArrayList<String>();
args.addAll(Arrays.asList("--dex", "--no-strict", "--output=" + distFile.getCanonicalPath()));
for (File f : files) {
args.add(f.getCanonicalPath());
}
m.invoke(null, new Object[] { args.toArray(new String[0]) });
return distFile;
}
private static String getShortName(final String name) {
int n = name.lastIndexOf('/');
return n == -1 ? name : "o";
}
public static List<Path> listTestDexFiles() {
Class<?> testClass = TestUtils.class;
URL url = testClass.getResource("/" + testClass.getName().replace('.', '/') + ".class");
Assert.assertNotNull(url);
final String fileStr = url.getFile();
Assert.assertNotNull(fileStr);
String dirx = fileStr.substring(0, fileStr.length() - testClass.getName().length() - ".class".length());
System.out.println("dirx is " + dirx);
File file = new File(dirx, "dexes");
return listPath(file, ".apk", ".dex", ".zip");
}
public static List<Path> listPath(File file, final String... exts) {
final List<Path> list = new ArrayList<>();
try {
Files.walkFileTree(file.toPath(), new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
String name = file.getFileName().toString();
boolean add = false;
for (String ext : exts) {
if (name.endsWith(ext)) {
add = true;
break;
}
}
if (add) {
list.add(file);
}
return super.visitFile(file, attrs);
}
});
} catch (IOException e) {
e.printStackTrace();
}
return list;
}
static Field buf;
static {
try {
buf = Printer.class.getDeclaredField("buf");
} catch (NoSuchFieldException | SecurityException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
buf.setAccessible(true);
}
static void printAnalyzerResult(MethodNode method, Analyzer a, final PrintWriter pw)
throws IllegalArgumentException, IllegalAccessException {
Frame[] frames = a.getFrames();
Textifier t = new Textifier();
TraceMethodVisitor mv = new TraceMethodVisitor(t);
String format = "%05d %-" + (method.maxStack + method.maxLocals + 6) + "s|%s";
for (int j = 0; j < method.instructions.size(); ++j) {
method.instructions.get(j).accept(mv);
StringBuffer s = new StringBuffer();
Frame f = frames[j];
if (f == null) {
s.append('?');
} else {
for (int k = 0; k < f.getLocals(); ++k) {
s.append(getShortName(f.getLocal(k).toString()));
}
s.append(" : ");
for (int k = 0; k < f.getStackSize(); ++k) {
s.append(getShortName(f.getStack(k).toString()));
}
}
pw.printf(format, j, s, buf.get(t)); // mv.text.get(j));
}
for (int j = 0; j < method.tryCatchBlocks.size(); ++j) {
((TryCatchBlockNode) method.tryCatchBlocks.get(j)).accept(mv);
pw.print(" " + buf.get(t));
}
pw.println();
pw.flush();
}
public static void verify(final ClassReader cr) throws AnalyzerException, IllegalArgumentException,
IllegalAccessException {
try {
verify(cr, new PrintWriter(new OutputStreamWriter(System.out, "UTF-8")));
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
}
}
@SuppressWarnings("rawtypes")
public static void verify(final ClassReader cr, PrintWriter out) throws AnalyzerException,
IllegalArgumentException, IllegalAccessException {
ClassNode cn = new ClassNode();
cr.accept(new CheckClassAdapter(cn, false), ClassReader.SKIP_DEBUG);
List methods = cn.methods;
for (int i = 0; i < methods.size(); ++i) {
MethodNode method = (MethodNode) methods.get(i);
List tryCatchBlocks = method.tryCatchBlocks;
for (int j = 0; j < tryCatchBlocks.size(); j++) {
TryCatchBlockNode tcn = (TryCatchBlockNode) tryCatchBlocks.get(j);
if (tcn.start.equals(tcn.end)) {
throw new DexException("try/catch block %d in %s has same start(%s) and end(%s)", j, method.name,
tcn.start.getLabel(), tcn.end.getLabel());
}
}
BasicVerifier verifier = new BasicVerifier();
Analyzer a = new Analyzer(verifier);
try {
a.analyze(cn.name, method);
} catch (Exception e) {
out.println(cr.getClassName() + "." + method.name + method.desc);
printAnalyzerResult(method, a, out);
e.printStackTrace(out);
out.flush();
throw new DexException("method " + method.name + " " + method.desc, e);
}
}
}
public static byte[] testDexASMifier(Class<?> clz, String methodName) throws Exception {
return testDexASMifier(clz, methodName, "xxxx/" + methodName);
}
public static byte[] testDexASMifier(Class<?> clz, String methodName, String generateClassName) throws Exception {
DexClassNode clzNode = new DexClassNode(DexConstants.ACC_PUBLIC, "L" + generateClassName + ";",
"Ljava/lang/Object;", null);
Method m = clz.getMethod(methodName, DexClassVisitor.class);
if (m == null) {
throw new java.lang.NoSuchMethodException(methodName);
}
m.setAccessible(true);
if (Modifier.isStatic(m.getModifiers())) {
m.invoke(null, clzNode);
} else {
m.invoke(clz.newInstance(), clzNode);
}
return translateAndCheck(clzNode);
}
public static byte[] translateAndCheck(DexFileNode fileNode, DexClassNode clzNode) throws AnalyzerException,
IllegalAccessException {
// 1. convert to .class
Dex2Asm dex2Asm = new Dex2Asm() {
@Override
public void convertCode(DexMethodNode methodNode, MethodVisitor mv) {
try {
super.convertCode(methodNode, mv);
} catch (Exception ex) {
BaksmaliDumper d = new BaksmaliDumper();
try {
BufferedWriter out = new BufferedWriter(new OutputStreamWriter(System.err, "UTF-8"));
d.baksmaliMethod(methodNode, out);
out.flush();
} catch (IOException e) {
e.printStackTrace();
}
throw new DexException(ex, "fail convert code %s", methodNode.method);
}
}
};
final ClassWriter cw = new ClassWriter(ClassWriter.COMPUTE_MAXS);
ClassVisitorFactory cvf = new ClassVisitorFactory() {
@Override
public ClassVisitor create(String classInternalName) {
return cw;
}
};
if (fileNode != null) {
dex2Asm.convertClass(clzNode, cvf, fileNode);
} else {
dex2Asm.convertClass(clzNode, cvf);
}
byte[] data = cw.toByteArray();
// 2. verify .class
ClassReader cr = new ClassReader(data);
TestUtils.verify(cr);
// 3. convert back to dex
CfOptions cfOptions = new CfOptions();
cfOptions.strictNameCheck = false;
DexOptions dexOptions = new DexOptions();
DirectClassFile dcf = new DirectClassFile(data, clzNode.className.substring(1, clzNode.className.length() - 1) + ".class", true);
dcf.setAttributeFactory(new StdAttributeFactory());
com.android.dx.dex.file.DexFile dxFile = new com.android.dx.dex.file.DexFile(dexOptions);
CfTranslator.translate(dcf, data, cfOptions, dexOptions, dxFile);
return data;
}
public static byte[] translateAndCheck(DexClassNode clzNode) throws AnalyzerException, IllegalAccessException {
return translateAndCheck(null, clzNode);
}
public static Class<?> defineClass(String type, byte[] data) {
return new CL().xxxDefine(type, data);
}
static class CL extends ClassLoader {
public Class<?> xxxDefine(String type, byte[] data) {
return super.defineClass(type, data, 0, data.length);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.db.compaction;
import java.util.*;
import java.util.function.LongPredicate;
import java.util.function.Predicate;
import org.apache.cassandra.config.Config;
import org.apache.cassandra.db.Memtable;
import org.apache.cassandra.db.rows.UnfilteredRowIterator;
import com.google.common.base.Predicates;
import com.google.common.collect.Iterables;
import com.google.common.util.concurrent.RateLimiter;
import org.apache.cassandra.db.partitions.Partition;
import org.apache.cassandra.io.sstable.format.SSTableReader;
import org.apache.cassandra.io.util.FileDataInput;
import org.apache.cassandra.io.util.FileUtils;
import org.apache.cassandra.schema.CompactionParams.TombstoneOption;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.cassandra.db.*;
import org.apache.cassandra.utils.AlwaysPresentFilter;
import org.apache.cassandra.utils.OverlapIterator;
import org.apache.cassandra.utils.concurrent.Refs;
import static org.apache.cassandra.db.lifecycle.SSTableIntervalTree.buildIntervals;
/**
* Manage compaction options.
*/
public class CompactionController implements AutoCloseable
{
private static final Logger logger = LoggerFactory.getLogger(CompactionController.class);
private static final String NEVER_PURGE_TOMBSTONES_PROPERTY = Config.PROPERTY_PREFIX + "never_purge_tombstones";
static final boolean NEVER_PURGE_TOMBSTONES = Boolean.getBoolean(NEVER_PURGE_TOMBSTONES_PROPERTY);
public final ColumnFamilyStore cfs;
private final boolean compactingRepaired;
// note that overlapIterator and overlappingSSTables will be null if NEVER_PURGE_TOMBSTONES is set - this is a
// good thing so that noone starts using them and thinks that if overlappingSSTables is empty, there
// is no overlap.
private Refs<SSTableReader> overlappingSSTables;
private OverlapIterator<PartitionPosition, SSTableReader> overlapIterator;
private final Iterable<SSTableReader> compacting;
private final RateLimiter limiter;
private final long minTimestamp;
final TombstoneOption tombstoneOption;
final Map<SSTableReader, FileDataInput> openDataFiles = new HashMap<>();
public final int gcBefore;
protected CompactionController(ColumnFamilyStore cfs, int maxValue)
{
this(cfs, null, maxValue);
}
public CompactionController(ColumnFamilyStore cfs, Set<SSTableReader> compacting, int gcBefore)
{
this(cfs, compacting, gcBefore, null,
cfs.getCompactionStrategyManager().getCompactionParams().tombstoneOption());
}
public CompactionController(ColumnFamilyStore cfs, Set<SSTableReader> compacting, int gcBefore, RateLimiter limiter, TombstoneOption tombstoneOption)
{
assert cfs != null;
this.cfs = cfs;
this.gcBefore = gcBefore;
this.compacting = compacting;
this.limiter = limiter;
compactingRepaired = compacting != null && compacting.stream().allMatch(SSTableReader::isRepaired);
this.tombstoneOption = tombstoneOption;
this.minTimestamp = compacting != null && !compacting.isEmpty() // check needed for test
? compacting.stream().mapToLong(SSTableReader::getMinTimestamp).min().getAsLong()
: 0;
refreshOverlaps();
if (NEVER_PURGE_TOMBSTONES)
logger.warn("You are running with -Dcassandra.never_purge_tombstones=true, this is dangerous!");
}
public void maybeRefreshOverlaps()
{
if (NEVER_PURGE_TOMBSTONES)
{
logger.debug("not refreshing overlaps - running with -D{}=true",
NEVER_PURGE_TOMBSTONES_PROPERTY);
return;
}
if (ignoreOverlaps())
{
logger.debug("not refreshing overlaps - running with ignoreOverlaps activated");
return;
}
for (SSTableReader reader : overlappingSSTables)
{
if (reader.isMarkedCompacted())
{
refreshOverlaps();
return;
}
}
}
private void refreshOverlaps()
{
if (NEVER_PURGE_TOMBSTONES)
return;
if (this.overlappingSSTables != null)
close();
if (compacting == null || ignoreOverlaps())
overlappingSSTables = Refs.tryRef(Collections.<SSTableReader>emptyList());
else
overlappingSSTables = cfs.getAndReferenceOverlappingLiveSSTables(compacting);
this.overlapIterator = new OverlapIterator<>(buildIntervals(overlappingSSTables));
}
public Set<SSTableReader> getFullyExpiredSSTables()
{
return getFullyExpiredSSTables(cfs, compacting, overlappingSSTables, gcBefore, ignoreOverlaps());
}
/**
* Finds expired sstables
*
* works something like this;
* 1. find "global" minTimestamp of overlapping sstables, compacting sstables and memtables containing any non-expired data
* 2. build a list of fully expired candidates
* 3. check if the candidates to be dropped actually can be dropped {@code (maxTimestamp < global minTimestamp)}
* - if not droppable, remove from candidates
* 4. return candidates.
*
* @param cfStore
* @param compacting we take the drop-candidates from this set, it is usually the sstables included in the compaction
* @param overlapping the sstables that overlap the ones in compacting.
* @param gcBefore
* @param ignoreOverlaps don't check if data shadows/overlaps any data in other sstables
* @return
*/
public static Set<SSTableReader> getFullyExpiredSSTables(ColumnFamilyStore cfStore,
Iterable<SSTableReader> compacting,
Iterable<SSTableReader> overlapping,
int gcBefore,
boolean ignoreOverlaps)
{
logger.trace("Checking droppable sstables in {}", cfStore);
if (NEVER_PURGE_TOMBSTONES || compacting == null)
return Collections.emptySet();
if (cfStore.getCompactionStrategyManager().onlyPurgeRepairedTombstones() && !Iterables.all(compacting, SSTableReader::isRepaired))
return Collections.emptySet();
if (ignoreOverlaps)
{
Set<SSTableReader> fullyExpired = new HashSet<>();
for (SSTableReader candidate : compacting)
{
if (candidate.getSSTableMetadata().maxLocalDeletionTime < gcBefore)
{
fullyExpired.add(candidate);
logger.trace("Dropping overlap ignored expired SSTable {} (maxLocalDeletionTime={}, gcBefore={})",
candidate, candidate.getSSTableMetadata().maxLocalDeletionTime, gcBefore);
}
}
return fullyExpired;
}
List<SSTableReader> candidates = new ArrayList<>();
long minTimestamp = Long.MAX_VALUE;
for (SSTableReader sstable : overlapping)
{
// Overlapping might include fully expired sstables. What we care about here is
// the min timestamp of the overlapping sstables that actually contain live data.
if (sstable.getSSTableMetadata().maxLocalDeletionTime >= gcBefore)
minTimestamp = Math.min(minTimestamp, sstable.getMinTimestamp());
}
for (SSTableReader candidate : compacting)
{
if (candidate.getSSTableMetadata().maxLocalDeletionTime < gcBefore)
candidates.add(candidate);
else
minTimestamp = Math.min(minTimestamp, candidate.getMinTimestamp());
}
for (Memtable memtable : cfStore.getTracker().getView().getAllMemtables())
minTimestamp = Math.min(minTimestamp, memtable.getMinTimestamp());
// At this point, minTimestamp denotes the lowest timestamp of any relevant
// SSTable or Memtable that contains a constructive value. candidates contains all the
// candidates with no constructive values. The ones out of these that have
// (getMaxTimestamp() < minTimestamp) serve no purpose anymore.
Iterator<SSTableReader> iterator = candidates.iterator();
while (iterator.hasNext())
{
SSTableReader candidate = iterator.next();
if (candidate.getMaxTimestamp() >= minTimestamp)
{
iterator.remove();
}
else
{
logger.trace("Dropping expired SSTable {} (maxLocalDeletionTime={}, gcBefore={})",
candidate, candidate.getSSTableMetadata().maxLocalDeletionTime, gcBefore);
}
}
return new HashSet<>(candidates);
}
public static Set<SSTableReader> getFullyExpiredSSTables(ColumnFamilyStore cfStore,
Iterable<SSTableReader> compacting,
Iterable<SSTableReader> overlapping,
int gcBefore)
{
return getFullyExpiredSSTables(cfStore, compacting, overlapping, gcBefore, false);
}
public String getKeyspace()
{
return cfs.keyspace.getName();
}
public String getColumnFamily()
{
return cfs.name;
}
/**
* @param key
* @return a predicate for whether tombstones marked for deletion at the given time for the given partition are
* purgeable; we calculate this by checking whether the deletion time is less than the min timestamp of all SSTables
* containing his partition and not participating in the compaction. This means there isn't any data in those
* sstables that might still need to be suppressed by a tombstone at this timestamp.
*/
public LongPredicate getPurgeEvaluator(DecoratedKey key)
{
if (NEVER_PURGE_TOMBSTONES || !compactingRepaired())
return time -> false;
overlapIterator.update(key);
Set<SSTableReader> filteredSSTables = overlapIterator.overlaps();
Iterable<Memtable> memtables = cfs.getTracker().getView().getAllMemtables();
long minTimestampSeen = Long.MAX_VALUE;
boolean hasTimestamp = false;
for (SSTableReader sstable: filteredSSTables)
{
// if we don't have bloom filter(bf_fp_chance=1.0 or filter file is missing),
// we check index file instead.
if (sstable.getBloomFilter() instanceof AlwaysPresentFilter && sstable.getPosition(key, SSTableReader.Operator.EQ, false) != null
|| sstable.getBloomFilter().isPresent(key))
{
minTimestampSeen = Math.min(minTimestampSeen, sstable.getMinTimestamp());
hasTimestamp = true;
}
}
for (Memtable memtable : memtables)
{
Partition partition = memtable.getPartition(key);
if (partition != null)
{
minTimestampSeen = Math.min(minTimestampSeen, partition.stats().minTimestamp);
hasTimestamp = true;
}
}
if (!hasTimestamp)
return time -> true;
else
{
final long finalTimestamp = minTimestampSeen;
return time -> time < finalTimestamp;
}
}
public void close()
{
if (overlappingSSTables != null)
overlappingSSTables.release();
FileUtils.closeQuietly(openDataFiles.values());
openDataFiles.clear();
}
public boolean compactingRepaired()
{
return !cfs.getCompactionStrategyManager().onlyPurgeRepairedTombstones() || compactingRepaired;
}
boolean provideTombstoneSources()
{
return tombstoneOption != TombstoneOption.NONE;
}
// caller must close iterators
public Iterable<UnfilteredRowIterator> shadowSources(DecoratedKey key, boolean tombstoneOnly)
{
if (!provideTombstoneSources() || !compactingRepaired() || NEVER_PURGE_TOMBSTONES)
return null;
overlapIterator.update(key);
return Iterables.filter(Iterables.transform(overlapIterator.overlaps(),
reader -> getShadowIterator(reader, key, tombstoneOnly)),
Predicates.notNull());
}
@SuppressWarnings("resource") // caller to close
private UnfilteredRowIterator getShadowIterator(SSTableReader reader, DecoratedKey key, boolean tombstoneOnly)
{
if (reader.isMarkedSuspect() ||
reader.getMaxTimestamp() <= minTimestamp ||
tombstoneOnly && !reader.mayHaveTombstones())
return null;
RowIndexEntry<?> position = reader.getPosition(key, SSTableReader.Operator.EQ);
if (position == null)
return null;
FileDataInput dfile = openDataFiles.computeIfAbsent(reader, this::openDataFile);
return reader.simpleIterator(dfile, key, position, tombstoneOnly);
}
/**
* Is overlapped sstables ignored
*
* Control whether or not we are taking into account overlapping sstables when looking for fully expired sstables.
* In order to reduce the amount of work needed, we look for sstables that can be dropped instead of compacted.
* As a safeguard mechanism, for each time range of data in a sstable, we are checking globally to see if all data
* of this time range is fully expired before considering to drop the sstable.
* This strategy can retain for a long time a lot of sstables on disk (see CASSANDRA-13418) so this option
* control whether or not this check should be ignored.
*
* @return false by default
*/
protected boolean ignoreOverlaps()
{
return false;
}
private FileDataInput openDataFile(SSTableReader reader)
{
return limiter != null ? reader.openDataReader(limiter) : reader.openDataReader();
}
}
| |
/**
* Copyright (C) 2013-2015 Dell, Inc
*
* ====================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ====================================================================
*/
package org.dasein.cloud.azure.compute.vm;
import org.dasein.cloud.*;
import org.dasein.cloud.azure.Azure;
import org.dasein.cloud.compute.Architecture;
import org.dasein.cloud.compute.ImageClass;
import org.dasein.cloud.compute.Platform;
import org.dasein.cloud.compute.VirtualMachineCapabilities;
import org.dasein.cloud.compute.VMScalingCapabilities;
import org.dasein.cloud.compute.VmState;
import org.dasein.cloud.util.NamingConstraints;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.Collections;
import java.util.Locale;
/**
* Describes the capabilities of Azure with respect to Dasein virtual machine operations.
* <p>Created by Danielle Mayne: 4/03/14 14:00 PM</p>
*
* @author Danielle Mayne
* @version 2014.03 initial version
* @since 2014.03
*/
public class VMCapabilities extends AbstractCapabilities<Azure> implements VirtualMachineCapabilities {
public VMCapabilities( @Nonnull Azure provider ) {
super(provider);
}
@Override
public boolean canAlter( @Nonnull VmState fromState ) throws CloudException, InternalException {
return true;
}
@Override
public boolean canClone( @Nonnull VmState fromState ) throws CloudException, InternalException {
return false;
}
@Override
public boolean canPause( @Nonnull VmState fromState ) throws CloudException, InternalException {
return false;
}
@Override
public boolean canReboot( @Nonnull VmState fromState ) throws CloudException, InternalException {
return VmState.RUNNING.equals(fromState);
}
@Override
public boolean canResume( @Nonnull VmState fromState ) throws CloudException, InternalException {
return false;
}
@Override
public boolean canStart( @Nonnull VmState fromState ) throws CloudException, InternalException {
return !fromState.equals(VmState.RUNNING);
}
@Override
public boolean canStop( @Nonnull VmState fromState ) throws CloudException, InternalException {
return !fromState.equals(VmState.STOPPED);
}
@Override
public boolean canSuspend( @Nonnull VmState fromState ) throws CloudException, InternalException {
return false;
}
@Override
public boolean canTerminate( @Nonnull VmState fromState ) throws CloudException, InternalException {
return !fromState.equals(VmState.TERMINATED);
}
@Override
public boolean canUnpause( @Nonnull VmState fromState ) throws CloudException, InternalException {
return false;
}
@Override
public int getMaximumVirtualMachineCount() throws CloudException, InternalException {
return -2;
}
@Override
public int getCostFactor( @Nonnull VmState state ) throws CloudException, InternalException {
return !state.equals(VmState.TERMINATED) ? 100 : 0;
}
@Nonnull
@Override
public String getProviderTermForVirtualMachine( @Nonnull Locale locale ) throws CloudException, InternalException {
return "virtual machine";
}
@Nullable
@Override
public VMScalingCapabilities getVerticalScalingCapabilities() throws CloudException, InternalException {
return VMScalingCapabilities.getInstance(false, true, false);
}
@Nonnull
@Override
public NamingConstraints getVirtualMachineNamingConstraints() throws CloudException, InternalException {
return NamingConstraints.getStrictInstance(3, 15).constrainedBy(new char[]{'-'});
}
@Nullable
@Override
public VisibleScope getVirtualMachineVisibleScope() {
return null;
}
@Nullable
@Override
public VisibleScope getVirtualMachineProductVisibleScope() {
return null;
}
@Nonnull
@Override
public Requirement identifyDataCenterLaunchRequirement() throws CloudException, InternalException {
return Requirement.REQUIRED;
}
@Nonnull
@Override
public Requirement identifyImageRequirement( @Nonnull ImageClass cls ) throws CloudException, InternalException {
return ( cls.equals(ImageClass.MACHINE) ? Requirement.REQUIRED : Requirement.NONE );
}
@Nonnull
@Override
public Requirement identifyPasswordRequirement( Platform platform ) throws CloudException, InternalException {
return Requirement.OPTIONAL;
}
@Nonnull
@Override
public Requirement identifyRootVolumeRequirement() throws CloudException, InternalException {
return Requirement.NONE;
}
@Nonnull
@Override
public Requirement identifyShellKeyRequirement( Platform platform ) throws CloudException, InternalException {
return Requirement.NONE;
}
@Nonnull
@Override
public Requirement identifyStaticIPRequirement() throws CloudException, InternalException {
return Requirement.NONE;
}
@Nonnull
@Override
public Requirement identifySubnetRequirement() throws CloudException, InternalException {
return Requirement.OPTIONAL;
}
@Nonnull
@Override
public Requirement identifyVlanRequirement() throws CloudException, InternalException {
return Requirement.OPTIONAL;
}
@Override
public boolean isAPITerminationPreventable() throws CloudException, InternalException {
return false;
}
@Override
public boolean isBasicAnalyticsSupported() throws CloudException, InternalException {
return false;
}
@Override
public boolean isExtendedAnalyticsSupported() throws CloudException, InternalException {
return false;
}
@Override
public boolean isUserDataSupported() throws CloudException, InternalException {
return true;
}
@Override
public boolean isUserDefinedPrivateIPSupported() throws CloudException, InternalException{return false;}
@Nonnull
@Override
public Iterable<Architecture> listSupportedArchitectures() throws InternalException, CloudException {
return Collections.singletonList(Architecture.I64);
}
@Override
public boolean supportsSpotVirtualMachines() throws InternalException, CloudException {
return false;
}
@Override
public boolean supportsClientRequestToken() throws InternalException, CloudException {return false;}
@Override
public boolean supportsCloudStoredShellKey() throws InternalException, CloudException {return false;}
@Override
public boolean isVMProductDCConstrained() { return false; }
/**
* Non VMState Defined lifecycle supported operations
* The 'can' operations return similar values but based on a specific VM state. These return whether or not there is support at all.
*/
@Override
public boolean supportsAlterVM() {
return true;
}
@Override
public boolean supportsClone() {
return false;
}
@Override
public boolean supportsPause() {
return false;
}
@Override
public boolean supportsReboot() {
return true;
}
@Override
public boolean supportsResume() {
return false;
}
@Override
public boolean supportsStart() {
return true;
}
@Override
public boolean supportsStop() {
return true;
}
@Override
public boolean supportsSuspend() {
return false;
}
@Override
public boolean supportsTerminate() {
return true;
}
@Override
public boolean supportsUnPause() {
return false;
}
}
| |
package org.activiti.engine.test.cfg.executioncount;
import java.util.List;
import java.util.Map;
import org.activiti.engine.history.HistoricActivityInstance;
import org.activiti.engine.impl.cfg.CommandExecutorImpl;
import org.activiti.engine.impl.db.DbSqlSessionFactory;
import org.activiti.engine.impl.history.DefaultHistoryManager;
import org.activiti.engine.impl.history.HistoryLevel;
import org.activiti.engine.impl.interceptor.CommandInterceptor;
import org.activiti.engine.impl.test.PluggableActivitiTestCase;
import org.activiti.engine.repository.Deployment;
import org.activiti.engine.task.Task;
import org.activiti.engine.test.profiler.ActivitiProfiler;
import org.activiti.engine.test.profiler.CommandStats;
import org.activiti.engine.test.profiler.ConsoleLogger;
import org.activiti.engine.test.profiler.ProfileSession;
import org.activiti.engine.test.profiler.ProfilingDbSqlSessionFactory;
import org.activiti.engine.test.profiler.TotalExecutionTimeCommandInterceptor;
import org.junit.Assert;
/**
* @author Joram Barrez
*/
public class VerifyDatabaseOperationsTest extends PluggableActivitiTestCase {
protected boolean oldExecutionTreeFetchValue;
protected boolean oldExecutionRelationshipCountValue;
protected boolean oldenableProcessDefinitionInfoCacheValue;
protected CommandInterceptor oldFirstCommandInterceptor;
protected DbSqlSessionFactory oldDbSqlSessionFactory;
protected HistoryLevel oldHistoryLevel;
@Override
protected void setUp() throws Exception {
super.setUp();
// Enable flags
this.oldExecutionTreeFetchValue = processEngineConfiguration.getPerformanceSettings().isEnableEagerExecutionTreeFetching();
this.oldExecutionRelationshipCountValue = processEngineConfiguration.getPerformanceSettings().isEnableExecutionRelationshipCounts();
this.oldenableProcessDefinitionInfoCacheValue = processEngineConfiguration.isEnableProcessDefinitionInfoCache();
oldHistoryLevel = ((DefaultHistoryManager) processEngineConfiguration.getHistoryManager()).getHistoryLevel();
processEngineConfiguration.getPerformanceSettings().setEnableEagerExecutionTreeFetching(true);
processEngineConfiguration.getPerformanceSettings().setEnableExecutionRelationshipCounts(true);
processEngineConfiguration.setEnableProcessDefinitionInfoCache(false);
((DefaultHistoryManager) processEngineConfiguration.getHistoryManager()).setHistoryLevel(HistoryLevel.AUDIT);
// The time interceptor should be first
CommandExecutorImpl commandExecutor = ((CommandExecutorImpl) processEngineConfiguration.getCommandExecutor());
this.oldFirstCommandInterceptor = commandExecutor.getFirst();
TotalExecutionTimeCommandInterceptor timeCommandInterceptor = new TotalExecutionTimeCommandInterceptor();
timeCommandInterceptor.setNext(oldFirstCommandInterceptor);
commandExecutor.setFirst(timeCommandInterceptor);
// Add dbsqlSession factory that captures CRUD operations
this.oldDbSqlSessionFactory = processEngineConfiguration.getDbSqlSessionFactory();
DbSqlSessionFactory newDbSqlSessionFactory = new ProfilingDbSqlSessionFactory();
newDbSqlSessionFactory.setDatabaseType(oldDbSqlSessionFactory.getDatabaseType());
newDbSqlSessionFactory.setDatabaseTablePrefix(oldDbSqlSessionFactory.getDatabaseTablePrefix());
newDbSqlSessionFactory.setTablePrefixIsSchema(oldDbSqlSessionFactory.isTablePrefixIsSchema());
newDbSqlSessionFactory.setDatabaseCatalog(oldDbSqlSessionFactory.getDatabaseCatalog());
newDbSqlSessionFactory.setDatabaseSchema(oldDbSqlSessionFactory.getDatabaseSchema());
newDbSqlSessionFactory.setSqlSessionFactory(oldDbSqlSessionFactory.getSqlSessionFactory());
newDbSqlSessionFactory.setIdGenerator(oldDbSqlSessionFactory.getIdGenerator());
newDbSqlSessionFactory.setDbIdentityUsed(oldDbSqlSessionFactory.isDbIdentityUsed());
newDbSqlSessionFactory.setDbHistoryUsed(oldDbSqlSessionFactory.isDbHistoryUsed());
processEngineConfiguration.addSessionFactory(newDbSqlSessionFactory);
}
@Override
protected void tearDown() throws Exception {
processEngineConfiguration.getPerformanceSettings().setEnableEagerExecutionTreeFetching(oldExecutionTreeFetchValue);
processEngineConfiguration.getPerformanceSettings().setEnableExecutionRelationshipCounts(oldExecutionRelationshipCountValue);
processEngineConfiguration.setEnableProcessDefinitionInfoCache(oldenableProcessDefinitionInfoCacheValue);
((DefaultHistoryManager) processEngineConfiguration.getHistoryManager()).setHistoryLevel(oldHistoryLevel);
((CommandExecutorImpl) processEngineConfiguration.getCommandExecutor()).setFirst(oldFirstCommandInterceptor);;
processEngineConfiguration.addSessionFactory(oldDbSqlSessionFactory);
// Validate (cause this tended to be screwed up)
List<HistoricActivityInstance> historicActivityInstances = historyService.createHistoricActivityInstanceQuery().list();
for (HistoricActivityInstance historicActivityInstance : historicActivityInstances) {
Assert.assertTrue(historicActivityInstance.getStartTime() != null);
Assert.assertTrue(historicActivityInstance.getEndTime() != null);
}
ActivitiProfiler.getInstance().reset();
for (Deployment deployment : repositoryService.createDeploymentQuery().list()) {
repositoryService.deleteDeployment(deployment.getId(), true);
}
super.tearDown();
}
public void testStartToEnd() {
deployStartProcessInstanceAndProfile("process01.bpmn20.xml", "process01");
assertDatabaseSelects("StartProcessInstanceCmd",
"selectLatestProcessDefinitionByKey", 1L);
assertDatabaseInserts("StartProcessInstanceCmd",
"HistoricActivityInstanceEntityImpl-bulk-with-2", 1L,
"HistoricProcessInstanceEntityImpl", 1L);
assertNoUpdatesAndDeletes("StartProcessInstanceCmd");
Assert.assertEquals(0, runtimeService.createProcessInstanceQuery().count());
Assert.assertEquals(1, historyService.createHistoricProcessInstanceQuery().finished().count());
}
public void testVariablesAndPassthrough() {
deployStartProcessInstanceAndProfile("process-variables-servicetask01.bpmn20.xml", "process-variables-servicetask01");
assertDatabaseSelects("StartProcessInstanceCmd",
"selectLatestProcessDefinitionByKey", 1L);
assertDatabaseInserts("StartProcessInstanceCmd",
"HistoricVariableInstanceEntityImpl-bulk-with-4", 1L,
"HistoricProcessInstanceEntityImpl", 1L,
"HistoricActivityInstanceEntityImpl-bulk-with-9", 1L);
assertNoUpdatesAndDeletes("StartProcessInstanceCmd");
Assert.assertEquals(0, runtimeService.createProcessInstanceQuery().count());
Assert.assertEquals(1, historyService.createHistoricProcessInstanceQuery().finished().count());
}
public void testManyVariablesViaServiceTaskAndPassthroughs() {
deployStartProcessInstanceAndProfile("process-variables-servicetask02.bpmn20.xml", "process-variables-servicetask02");
assertDatabaseSelects("StartProcessInstanceCmd",
"selectLatestProcessDefinitionByKey", 1L);
assertDatabaseInserts("StartProcessInstanceCmd",
"HistoricVariableInstanceEntityImpl-bulk-with-50", 1L,
"HistoricProcessInstanceEntityImpl", 1L,
"HistoricActivityInstanceEntityImpl-bulk-with-9", 1L);
assertNoUpdatesAndDeletes("StartProcessInstanceCmd");
Assert.assertEquals(0, runtimeService.createProcessInstanceQuery().count());
Assert.assertEquals(1, historyService.createHistoricProcessInstanceQuery().finished().count());
}
public void testOnlyPassThroughs() {
deployStartProcessInstanceAndProfile("process02.bpmn20.xml", "process02");
assertDatabaseSelects("StartProcessInstanceCmd",
"selectLatestProcessDefinitionByKey", 1L);
assertDatabaseInserts("StartProcessInstanceCmd",
"HistoricActivityInstanceEntityImpl-bulk-with-9", 1L,
"HistoricProcessInstanceEntityImpl", 1L);
assertNoUpdatesAndDeletes("StartProcessInstanceCmd");
Assert.assertEquals(0, runtimeService.createProcessInstanceQuery().count());
Assert.assertEquals(1, historyService.createHistoricProcessInstanceQuery().finished().count());
}
public void testParallelForkAndJoin() {
deployStartProcessInstanceAndProfile("process03.bpmn20.xml", "process03");
assertDatabaseSelects("StartProcessInstanceCmd",
"selectLatestProcessDefinitionByKey", 1L);
assertDatabaseInserts("StartProcessInstanceCmd",
"HistoricActivityInstanceEntityImpl-bulk-with-7", 1L,
"HistoricProcessInstanceEntityImpl", 1L);
assertNoUpdatesAndDeletes("StartProcessInstanceCmd");
Assert.assertEquals(0, runtimeService.createProcessInstanceQuery().count());
Assert.assertEquals(1, historyService.createHistoricProcessInstanceQuery().finished().count());
}
public void testNestedParallelForkAndJoin() {
deployStartProcessInstanceAndProfile("process04.bpmn20.xml", "process04");
assertDatabaseSelects("StartProcessInstanceCmd",
"selectLatestProcessDefinitionByKey", 1L);
assertDatabaseInserts("StartProcessInstanceCmd",
"HistoricActivityInstanceEntityImpl-bulk-with-21", 1L,
"HistoricProcessInstanceEntityImpl", 1L);
assertNoUpdatesAndDeletes("StartProcessInstanceCmd");
Assert.assertEquals(0, runtimeService.createProcessInstanceQuery().count());
Assert.assertEquals(1, historyService.createHistoricProcessInstanceQuery().finished().count());
}
public void testExlusiveGateway() {
deployStartProcessInstanceAndProfile("process05.bpmn20.xml", "process05");
assertDatabaseSelects("StartProcessInstanceCmd",
"selectLatestProcessDefinitionByKey", 1L);
assertDatabaseInserts("StartProcessInstanceCmd",
"HistoricActivityInstanceEntityImpl-bulk-with-5", 1L,
"HistoricProcessInstanceEntityImpl", 1L,
"HistoricVariableInstanceEntityImpl", 1L);
assertNoUpdatesAndDeletes("StartProcessInstanceCmd");
Assert.assertEquals(0, runtimeService.createProcessInstanceQuery().count());
Assert.assertEquals(1, historyService.createHistoricProcessInstanceQuery().finished().count());
}
public void testOneTaskProcess() {
deployStartProcessInstanceAndProfile("process-usertask-01.bpmn20.xml", "process-usertask-01", false);
Task task = taskService.createTaskQuery().singleResult();
taskService.complete(task.getId());
stopProfiling();
assertExecutedCommands("StartProcessInstanceCmd", "org.activiti.engine.impl.TaskQueryImpl", "CompleteTaskCmd");
// Start process instance
assertDatabaseSelects("StartProcessInstanceCmd",
"selectLatestProcessDefinitionByKey", 1L);
assertDatabaseInserts("StartProcessInstanceCmd",
"ExecutionEntityImpl-bulk-with-2", 1L,
"TaskEntityImpl", 1L,
"HistoricActivityInstanceEntityImpl-bulk-with-2", 1L,
"HistoricTaskInstanceEntityImpl", 1L,
"HistoricProcessInstanceEntityImpl", 1L);
assertNoUpdatesAndDeletes("StartProcessInstanceCmd");
// Task Query
assertDatabaseSelects("org.activiti.engine.impl.TaskQueryImpl",
"selectTaskByQueryCriteria", 1L);
assertNoInserts("org.activiti.engine.impl.TaskQueryImpl");
assertNoUpdates("org.activiti.engine.impl.TaskQueryImpl");
assertNoDeletes("org.activiti.engine.impl.TaskQueryImpl");
// Task Complete
// TODO: implement counting for tasks similar to executions
assertDatabaseSelects("CompleteTaskCmd",
"selectById org.activiti.engine.impl.persistence.entity.HistoricProcessInstanceEntityImpl", 1L,
"selectById org.activiti.engine.impl.persistence.entity.HistoricTaskInstanceEntityImpl", 1L,
"selectById org.activiti.engine.impl.persistence.entity.TaskEntityImpl", 1L,
"selectUnfinishedHistoricActivityInstanceExecutionIdAndActivityId", 2L,
"selectTasksByParentTaskId", 1L,
"selectIdentityLinksByTask", 1L,
"selectVariablesByTaskId", 1L,
"selectExecutionsWithSameRootProcessInstanceId", 1L,
"selectTasksByExecutionId", 1L
);
assertDatabaseInserts("CompleteTaskCmd",
"HistoricActivityInstanceEntityImpl", 1L);
assertNoUpdates("CompleteTaskCmd");
assertDatabaseDeletes("CompleteTaskCmd",
"TaskEntityImpl", 1L,
"ExecutionEntityImpl", 2L); // execution and processinstance
}
// ---------------------------------
// HELPERS
// ---------------------------------
protected void assertExecutedCommands(String...commands) {
ProfileSession profileSession = ActivitiProfiler.getInstance().getProfileSessions().get(0);
Map<String, CommandStats> allStats = profileSession.calculateSummaryStatistics();
if (commands.length != allStats.size()) {
System.out.println("Following commands were found: ");
for (String command : allStats.keySet()) {
System.out.println(command);
}
}
Assert.assertEquals(commands.length, allStats.size());
for (String command : commands) {
Assert.assertNotNull("Could not get stats for " + command, getStatsForCommand(command, allStats));
}
}
protected void assertDatabaseSelects(String commandClass, Object ... expectedSelects) {
CommandStats stats = getStats(commandClass);
if (expectedSelects.length / 2 != stats.getDbSelects().size()) {
Assert.fail("Unexpected number of database selects : " + stats.getDbSelects().size());
}
for (int i=0; i<expectedSelects.length; i+=2) {
String dbSelect = (String) expectedSelects[i];
Long count = (Long) expectedSelects[i+1];
Assert.assertEquals("Wrong select count for " + dbSelect, count, stats.getDbSelects().get(dbSelect));
}
}
protected void assertDatabaseInserts(String commandClass, Object ... expectedInserts) {
CommandStats stats = getStats(commandClass);
if (expectedInserts.length / 2 != stats.getDbInserts().size()) {
Assert.fail("Unexpected number of database inserts : " + stats.getDbInserts().size() + ", but expected " + expectedInserts.length / 2);
}
for (int i=0; i<expectedInserts.length; i+=2) {
String dbInsert = (String) expectedInserts[i];
Long count = (Long) expectedInserts[i+1];
Assert.assertEquals("Insert count for " + dbInsert + "not correct", count, stats.getDbInserts().get("org.activiti.engine.impl.persistence.entity." + dbInsert));
}
}
protected void assertDatabaseDeletes(String commandClass, Object ... expectedDeletes) {
CommandStats stats = getStats(commandClass);
if (expectedDeletes.length / 2 != stats.getDbDeletes().size()) {
Assert.fail("Unexpected number of database deletes : " + stats.getDbDeletes().size());
}
for (int i=0; i<expectedDeletes.length; i+=2) {
String dbDelete = (String) expectedDeletes[i];
Long count = (Long) expectedDeletes[i+1];
Assert.assertEquals("Delete count count for " + dbDelete + "not correct", count, stats.getDbDeletes().get("org.activiti.engine.impl.persistence.entity." + dbDelete));
}
}
protected void assertNoInserts(String commandClass) {
CommandStats stats = getStats(commandClass);
Assert.assertEquals(0, stats.getDbInserts().size());
}
protected void assertNoUpdatesAndDeletes(String commandClass) {
assertNoDeletes(commandClass);
assertNoUpdates(commandClass);
}
protected void assertNoDeletes(String commandClass) {
CommandStats stats = getStats(commandClass);
Assert.assertEquals(0, stats.getDbDeletes().size());
}
protected void assertNoUpdates(String commandClass) {
CommandStats stats = getStats(commandClass);
Assert.assertEquals(0, stats.getDbUpdates().size());
}
protected CommandStats getStats(String commandClass) {
ProfileSession profileSession = ActivitiProfiler.getInstance().getProfileSessions().get(0);
Map<String, CommandStats> allStats = profileSession.calculateSummaryStatistics();
CommandStats stats = getStatsForCommand(commandClass, allStats);
return stats;
}
protected CommandStats getStatsForCommand(String commandClass, Map<String, CommandStats> allStats) {
String clazz = commandClass;
if (!clazz.startsWith("org.activiti")) {
clazz = "org.activiti.engine.impl.cmd." + clazz;
}
CommandStats stats = allStats.get(clazz);
return stats;
}
// HELPERS
protected ActivitiProfiler deployStartProcessInstanceAndProfile(String path, String processDefinitionKey) {
return deployStartProcessInstanceAndProfile(path, processDefinitionKey, true);
}
protected ActivitiProfiler deployStartProcessInstanceAndProfile(String path, String processDefinitionKey, boolean stopProfilingAfterStart) {
deploy(path);
ActivitiProfiler activitiProfiler = startProcessInstanceAndProfile(processDefinitionKey);
if (stopProfilingAfterStart) {
stopProfiling();
}
return activitiProfiler;
}
protected void deploy(String path) {
repositoryService.createDeployment().addClasspathResource("org/activiti/engine/test/cfg/executioncount/" + path).deploy();
}
protected ActivitiProfiler startProcessInstanceAndProfile(String processDefinitionKey) {
ActivitiProfiler activitiProfiler = ActivitiProfiler.getInstance();
activitiProfiler.startProfileSession("Profiling session");
runtimeService.startProcessInstanceByKey(processDefinitionKey);
return activitiProfiler;
}
protected void stopProfiling() {
ActivitiProfiler profiler = ActivitiProfiler.getInstance();
profiler.stopCurrentProfileSession();
new ConsoleLogger(profiler).log();;
}
}
| |
/*
* Braille Utils (C) 2010-2011 Daisy Consortium
*
* This library is free software; you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the Free
* Software Foundation; either version 2.1 of the License, or (at your option)
* any later version.
*
* This library is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
* details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this library; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package org.daisy.braille.utils.impl.provider.indexbraille;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import org.daisy.dotify.api.embosser.Device;
import org.daisy.dotify.api.embosser.EmbosserFeatures;
import org.daisy.dotify.api.embosser.EmbosserWriter;
import org.daisy.dotify.api.embosser.PrintPage;
import org.daisy.dotify.api.embosser.PrintPage.PrintDirection;
import org.daisy.dotify.api.paper.Area;
import org.daisy.dotify.api.paper.PageFormat;
import org.daisy.dotify.api.paper.Paper;
import org.daisy.dotify.api.paper.SheetPaper;
import org.daisy.dotify.api.paper.SheetPaperFormat;
import org.daisy.dotify.api.paper.SheetPaperFormat.Orientation;
import org.daisy.dotify.api.paper.TractorPaperFormat;
import org.daisy.dotify.api.table.TableCatalogService;
import org.daisy.braille.utils.impl.provider.indexbraille.IndexEmbosserProvider.EmbosserType;
import org.daisy.braille.utils.impl.tools.embosser.AbstractEmbosser;
import org.daisy.braille.utils.impl.tools.embosser.EmbosserTools;
import org.daisy.braille.utils.impl.tools.embosser.FileToDeviceEmbosserWriter;
public abstract class IndexEmbosser extends AbstractEmbosser {
/**
*
*/
private static final long serialVersionUID = 4942671719606902452L;
protected EmbosserType type;
private double minPageLengthAlongFeed = 50d;
private double maxPageLengthAlongFeed = Double.MAX_VALUE;
private double minPageLengthAcrossFeed = 50d;
private double maxPageLengthAcrossFeed = Double.MAX_VALUE;
private double minPrintPageWidth = 50d;
private double maxPrintPageWidth = Double.MAX_VALUE;
private double minPrintPageHeight = 50d;
private double maxPrintPageHeight = Double.MAX_VALUE;
protected int numberOfCopies = 1;
protected boolean zFoldingEnabled = false;
protected boolean saddleStitchEnabled = false;
protected boolean duplexEnabled = false;
protected int maxNumberOfCopies = 1;
protected int marginInner = 0;
protected final int marginOuter = 0;
protected final int marginTop = 0;
protected final int marginBottom = 0;
protected int minMarginInner = 0;
protected final int minMarginOuter = 0;
protected final int minMarginTop = 0;
protected final int minMarginBottom = 0;
protected int maxMarginInner = 0;
protected int maxMarginOuter = 0;
protected int maxMarginTop = 0;
protected int maxMarginBottom = 0;
protected int minCellsInWidth = 0;
protected int minLinesInHeight = 0;
protected int maxCellsInWidth = Integer.MAX_VALUE;
protected int maxLinesInHeight = Integer.MAX_VALUE;
public IndexEmbosser(TableCatalogService service, EmbosserType props) {
super(service, props);
type = props;
switch (type) {
case INDEX_BASIC_BLUE_BAR:
maxPrintPageWidth = 280d;
maxPrintPageHeight = 12*EmbosserTools.INCH_IN_MM;
break;
case INDEX_BASIC_S_V2:
case INDEX_BASIC_D_V2:
minPrintPageWidth = 138d; // = 23*6
minPrintPageHeight = 1*EmbosserTools.INCH_IN_MM;
maxPrintPageHeight = (20+2/3)*EmbosserTools.INCH_IN_MM;
break;
case INDEX_EVEREST_D_V2:
minPrintPageWidth = 138d; // = 23*6
minPrintPageHeight = 100d;
maxPrintPageHeight = 350d;
break;
case INDEX_4X4_PRO_V2:
minPrintPageWidth = 138d; // = 23*6
minPrintPageHeight = 100d;
maxPrintPageHeight = 297d;
minPageLengthAlongFeed = 110d;
maxPageLengthAlongFeed = 500d;
break;
case INDEX_BASIC_S_V3:
case INDEX_BASIC_D_V3:
minPrintPageWidth = 90d;
minPrintPageHeight = 1*EmbosserTools.INCH_IN_MM;
maxPrintPageWidth = 295d;
maxPrintPageHeight = 17*EmbosserTools.INCH_IN_MM;
break;
case INDEX_BASIC_D_V4:
minPrintPageWidth = 120d;
minPrintPageHeight = 1*EmbosserTools.INCH_IN_MM;
maxPrintPageWidth = 325d;
maxPrintPageHeight = 17*EmbosserTools.INCH_IN_MM;
break;
case INDEX_EVEREST_D_V3:
case INDEX_4X4_PRO_V3:
minPageLengthAcrossFeed = 130d;
maxPageLengthAcrossFeed = 297d;
minPageLengthAlongFeed = 120d;
maxPageLengthAlongFeed = 585d;
break;
case INDEX_EVEREST_D_V4:
minPageLengthAcrossFeed = 130d;
maxPageLengthAcrossFeed = 297d;
minPageLengthAlongFeed = 120d;
maxPageLengthAlongFeed = 590d;
break;
case INDEX_4WAVES_PRO_V3:
minPrintPageWidth = 90d;
minPrintPageHeight = 11*EmbosserTools.INCH_IN_MM;
maxPrintPageWidth = 295d;
maxPrintPageHeight = 12*EmbosserTools.INCH_IN_MM;
break;
case INDEX_BRAILLE_BOX_V4:
break;
case INDEX_BASIC_D_V5:
case INDEX_FANFOLD_V5:
minPrintPageWidth = 120d;
minPrintPageHeight = 1*EmbosserTools.INCH_IN_MM;
maxPrintPageWidth = 325d;
maxPrintPageHeight = 17*EmbosserTools.INCH_IN_MM;
break;
case INDEX_EVEREST_D_V5:
minPageLengthAcrossFeed = 130d;
maxPageLengthAcrossFeed = 297d;
minPageLengthAlongFeed = 120d;
maxPageLengthAlongFeed = 590d;
break;
case INDEX_BRAILLE_BOX_V5:
break;
default:
throw new IllegalArgumentException("Unsupported embosser type");
}
}
@Override
protected double getCellWidth() {
return 6d;
}
@Override
protected double getCellHeight() {
return 10d;
}
@Override
public boolean supportsPaper(Paper paper) {
if (paper == null) { return false; }
try {
switch (getPaperType()) {
case SHEET:
SheetPaper p = paper.asSheetPaper();
if (supportsPageFormat(new SheetPaperFormat(p, Orientation.DEFAULT))) { return true; }
if (supportsPageFormat(new SheetPaperFormat(p, Orientation.REVERSED))) { return true; }
break;
case TRACTOR:
return supportsPageFormat(new TractorPaperFormat(paper.asTractorPaper()));
default:
break;
}
} catch (ClassCastException e) {
}
return false;
}
@Override
public boolean supportsPageFormat(PageFormat format) {
if (format == null) { return false; }
try {
switch (getPaperType()) {
case SHEET:
return supportsPrintPage(getPrintPage(format.asSheetPaperFormat()));
case TRACTOR:
return supportsPrintPage(getPrintPage(format.asTractorPaperFormat()));
default:
break;
}
} catch (ClassCastException e) {
}
return false;
}
@Override
public boolean supportsPrintPage(PrintPage dim) {
if (dim==null) { return false; }
double w = dim.getWidth();
double h = dim.getHeight();
double across = dim.getLengthAcrossFeed().asMillimeter();
double along = dim.getLengthAlongFeed().asMillimeter();
return (w <= maxPrintPageWidth) &&
(w >= minPrintPageWidth) &&
(h <= maxPrintPageHeight) &&
(h >= minPrintPageHeight) &&
(across <= maxPageLengthAcrossFeed) &&
(across >= minPageLengthAcrossFeed) &&
(along <= maxPageLengthAlongFeed) &&
(along >= minPageLengthAlongFeed);
}
@Override
public boolean supportsVolumes() {
return false;
}
@Override
public boolean supportsAligning() {
return true;
}
@Override
public boolean supports8dot() {
return false;
}
@Override
public boolean supportsZFolding() {
switch (type) {
case INDEX_BASIC_S_V3:
case INDEX_BASIC_D_V2:
case INDEX_BASIC_D_V3:
case INDEX_BASIC_D_V4:
case INDEX_BASIC_D_V5:
case INDEX_FANFOLD_V5:
case INDEX_4WAVES_PRO_V3:
return true;
case INDEX_BASIC_BLUE_BAR:
case INDEX_BASIC_S_V2:
case INDEX_EVEREST_D_V2:
case INDEX_EVEREST_D_V3:
case INDEX_EVEREST_D_V4:
case INDEX_EVEREST_D_V5:
case INDEX_4X4_PRO_V2:
case INDEX_4X4_PRO_V3:
case INDEX_BRAILLE_BOX_V4:
case INDEX_BRAILLE_BOX_V5:
default:
return false;
}
}
@Override
public boolean supportsPrintMode(PrintMode mode) {
switch (type) {
case INDEX_4X4_PRO_V2:
case INDEX_4X4_PRO_V3:
case INDEX_EVEREST_D_V4:
case INDEX_EVEREST_D_V5:
case INDEX_BRAILLE_BOX_V4:
case INDEX_BRAILLE_BOX_V5:
return true;
case INDEX_BASIC_D_V2:
case INDEX_EVEREST_D_V2:
case INDEX_BASIC_D_V3:
case INDEX_EVEREST_D_V3:
case INDEX_4WAVES_PRO_V3:
case INDEX_BASIC_D_V4:
case INDEX_BASIC_D_V5:
case INDEX_FANFOLD_V5:
case INDEX_BASIC_BLUE_BAR:
case INDEX_BASIC_S_V2:
case INDEX_BASIC_S_V3:
default:
return PrintMode.REGULAR == mode;
}
}
private Paper.Type getPaperType() {
switch (type) {
case INDEX_BASIC_BLUE_BAR:
case INDEX_BASIC_D_V2:
case INDEX_BASIC_D_V3:
case INDEX_BASIC_D_V4:
case INDEX_BASIC_D_V5:
case INDEX_FANFOLD_V5:
case INDEX_BASIC_S_V2:
case INDEX_BASIC_S_V3:
case INDEX_4WAVES_PRO_V3:
return Paper.Type.TRACTOR;
case INDEX_EVEREST_D_V2:
case INDEX_EVEREST_D_V3:
case INDEX_EVEREST_D_V4:
case INDEX_EVEREST_D_V5:
case INDEX_4X4_PRO_V2:
case INDEX_4X4_PRO_V3:
case INDEX_BRAILLE_BOX_V4:
case INDEX_BRAILLE_BOX_V5:
default:
return Paper.Type.SHEET;
}
}
private PrintDirection getPrintDirection() {
switch (type) {
case INDEX_4X4_PRO_V2:
return PrintDirection.SIDEWAYS;
case INDEX_4X4_PRO_V3:
//return saddleStitchEnabled?PrintDirection.SIDEWAYS:PrintDirection.UPRIGHT;
return PrintDirection.SIDEWAYS;
case INDEX_EVEREST_D_V4:
case INDEX_EVEREST_D_V5:
return saddleStitchEnabled?PrintDirection.SIDEWAYS:PrintDirection.UPRIGHT;
//case INDEX_BASIC_D_V4:
//return swZFoldingEnabled?PrintDirection.SIDEWAYS:PrintDirection.UPRIGHT;
case INDEX_BRAILLE_BOX_V4:
case INDEX_BRAILLE_BOX_V5:
return PrintDirection.SIDEWAYS;
default:
return PrintDirection.UPRIGHT;
}
}
@Override
public EmbosserWriter newEmbosserWriter(Device device) {
try {
File f = File.createTempFile(this.getClass().getCanonicalName(), ".tmp");
f.deleteOnExit();
EmbosserWriter ew = newEmbosserWriter(new FileOutputStream(f));
return new FileToDeviceEmbosserWriter(ew, f, device);
} catch (IOException e) {
}
throw new IllegalArgumentException("Embosser does not support this feature.");
}
@Override
public void setFeature(String key, Object value) {
if (EmbosserFeatures.NUMBER_OF_COPIES.equals(key) && maxNumberOfCopies > 1) {
try {
int copies = (Integer)value;
if (copies < 1 || copies > maxNumberOfCopies) {
throw new IllegalArgumentException("Unsupported value for number of copies.");
}
numberOfCopies = copies;
} catch (ClassCastException e) {
throw new IllegalArgumentException("Unsupported value for number of copies.");
}
} else if (EmbosserFeatures.SADDLE_STITCH.equals(key) && supportsPrintMode(PrintMode.MAGAZINE)) {
try {
saddleStitchEnabled = (Boolean)value;
// if (!(type==EmbosserType.INDEX_EVEREST_D_V4 ||
// type==EmbosserType.INDEX_BRAILLE_BOX_V4)) {
duplexEnabled = duplexEnabled || saddleStitchEnabled;
// }
} catch (ClassCastException e) {
throw new IllegalArgumentException("Unsupported value for saddle stitch.");
}
} else if (EmbosserFeatures.Z_FOLDING.equals(key) && supportsZFolding()) {
try {
zFoldingEnabled = (Boolean)value;
if (type==EmbosserType.INDEX_BASIC_D_V2) {
duplexEnabled = duplexEnabled || zFoldingEnabled;
}
} catch (ClassCastException e) {
throw new IllegalArgumentException("Unsupported value for z-folding.");
}
} else if (EmbosserFeatures.DUPLEX.equals(key) && supportsDuplex()) {
try {
duplexEnabled = (Boolean)value;
// if (!(type==EmbosserType.INDEX_EVEREST_D_V4 ||
// type==EmbosserType.INDEX_BRAILLE_BOX_V4)) {
duplexEnabled = duplexEnabled || saddleStitchEnabled;
// }
if (type==EmbosserType.INDEX_BASIC_D_V2) {
zFoldingEnabled = zFoldingEnabled && duplexEnabled;
}
} catch (ClassCastException e) {
throw new IllegalArgumentException("Unsupported value for duplex.");
}
} else {
super.setFeature(key, value);
}
}
@Override
public Object getFeature(String key) {
if (EmbosserFeatures.NUMBER_OF_COPIES.equals(key) && maxNumberOfCopies > 1) {
return numberOfCopies;
} else if (EmbosserFeatures.SADDLE_STITCH.equals(key) && supportsPrintMode(PrintMode.MAGAZINE)) {
return saddleStitchEnabled;
} else if (EmbosserFeatures.Z_FOLDING.equals(key) && supportsZFolding()) {
return zFoldingEnabled;
} else if (EmbosserFeatures.DUPLEX.equals(key) && supportsDuplex()) {
return duplexEnabled;
} else {
return super.getFeature(key);
}
}
@Override
public PrintPage getPrintPage(PageFormat pageFormat) {
PrintMode mode = saddleStitchEnabled?PrintMode.MAGAZINE:PrintMode.REGULAR;
PrintDirection direction = getPrintDirection();
return new PrintPage(pageFormat, direction, mode);
}
@Override
public Area getPrintableArea(PageFormat pageFormat) {
Area maxArea = getPrintArea(pageFormat);
double cellWidth = getCellWidth();
double cellHeight = getCellHeight();
return new Area(maxArea.getWidth() - (marginInner + marginOuter) * cellWidth,
maxArea.getHeight() - (marginTop + marginBottom) * cellHeight,
maxArea.getOffsetX() + marginInner * cellWidth,
maxArea.getOffsetY() + marginTop * cellHeight);
}
protected Area getPrintArea(PageFormat pageFormat) {
PrintPage printPage = getPrintPage(pageFormat);
double cellWidth = getCellWidth();
double cellHeight = getCellHeight();
double lengthAcrossFeed = printPage.getLengthAcrossFeed().asMillimeter();
double printablePageWidth = printPage.getWidth()
- type.getUnprintable().getLeft().asMillimeter()
- type.getUnprintable().getRight().asMillimeter();
double printablePageHeight = printPage.getHeight()
- type.getUnprintable().getTop().asMillimeter()
- type.getUnprintable().getBottom().asMillimeter();
switch (type) {
case INDEX_4X4_PRO_V2:
printablePageHeight = Math.min(lengthAcrossFeed, 248.5);
break;
case INDEX_BASIC_D_V4:
case INDEX_BASIC_D_V5:
case INDEX_FANFOLD_V5:
printablePageWidth = Math.min(lengthAcrossFeed, 301.152);
break;
case INDEX_EVEREST_D_V2:
case INDEX_EVEREST_D_V3:
case INDEX_BASIC_S_V2:
case INDEX_BASIC_D_V2:
case INDEX_BASIC_S_V3:
case INDEX_BASIC_D_V3:
case INDEX_4WAVES_PRO_V3:
printablePageWidth = Math.min(lengthAcrossFeed, 248.5);
break;
default:
break;
}
printablePageWidth = Math.min(printablePageWidth, maxCellsInWidth * cellWidth);
printablePageHeight = Math.min(printablePageHeight, maxLinesInHeight * cellHeight);
double unprintableInner = type.getUnprintable().getLeft().asMillimeter();
double unprintableTop = type.getUnprintable().getTop().asMillimeter();
switch (type) {
case INDEX_BASIC_S_V3:
case INDEX_BASIC_D_V3:
unprintableInner = Math.max(0, lengthAcrossFeed - 276.4);
break;
case INDEX_EVEREST_D_V3:
unprintableInner = Math.max(0, lengthAcrossFeed - 272.75);
break;
default:
break;
}
return new Area(printablePageWidth, printablePageHeight, unprintableInner, unprintableTop);
}
}
| |
/*
* ConnectBot: simple, powerful, open-source SSH client for Android
* Copyright 2007 Kenny Root, Jeffrey Sharkey
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.connectbot;
import java.security.KeyPair;
import java.security.KeyPairGenerator;
import java.security.PrivateKey;
import java.security.PublicKey;
import java.security.SecureRandom;
import java.security.Security;
import org.connectbot.bean.PubkeyBean;
import org.connectbot.util.EntropyDialog;
import org.connectbot.util.EntropyView;
import org.connectbot.util.OnEntropyGatheredListener;
import org.connectbot.util.PubkeyDatabase;
import org.connectbot.util.PubkeyUtils;
import com.trilead.ssh2.signature.ECDSASHA2Verify;
import android.app.Dialog;
import android.app.ProgressDialog;
import android.graphics.PorterDuff;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.text.Editable;
import android.text.TextWatcher;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.View.OnFocusChangeListener;
import android.widget.Button;
import android.widget.CheckBox;
import android.widget.EditText;
import android.widget.RadioButton;
import android.widget.RadioGroup;
import android.widget.RadioGroup.OnCheckedChangeListener;
import android.widget.SeekBar;
import android.widget.SeekBar.OnSeekBarChangeListener;
public class GeneratePubkeyActivity extends AppCompatActivity implements OnEntropyGatheredListener {
private static final int RSA_MINIMUM_BITS = 768;
public final static String TAG = "CB.GeneratePubkeyAct";
final static int DEFAULT_BITS = 2048;
final static int[] ECDSA_SIZES = ECDSASHA2Verify.getCurveSizes();
final static int ECDSA_DEFAULT_BITS = ECDSA_SIZES[0];
private LayoutInflater inflater = null;
private EditText nickname;
private RadioGroup keyTypeGroup;
private SeekBar bitsSlider;
private EditText bitsText;
private CheckBox unlockAtStartup;
private CheckBox confirmUse;
private Button save;
private Dialog entropyDialog;
private ProgressDialog progress;
private EditText password1, password2;
private String keyType = PubkeyDatabase.KEY_TYPE_RSA;
private int minBits = 768;
private int bits = DEFAULT_BITS;
private byte[] entropy;
@Override
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
setContentView(R.layout.act_generatepubkey);
nickname = (EditText) findViewById(R.id.nickname);
keyTypeGroup = (RadioGroup) findViewById(R.id.key_type);
bitsText = (EditText) findViewById(R.id.bits);
bitsSlider = (SeekBar) findViewById(R.id.bits_slider);
password1 = (EditText) findViewById(R.id.password1);
password2 = (EditText) findViewById(R.id.password2);
unlockAtStartup = (CheckBox) findViewById(R.id.unlock_at_startup);
confirmUse = (CheckBox) findViewById(R.id.confirm_use);
save = (Button) findViewById(R.id.save);
inflater = LayoutInflater.from(this);
nickname.addTextChangedListener(textChecker);
password1.addTextChangedListener(textChecker);
password2.addTextChangedListener(textChecker);
// TODO add BC to provide EC for devices that don't have it.
if (Security.getProviders("KeyPairGenerator.EC") == null) {
((RadioButton) findViewById(R.id.ec)).setEnabled(false);
}
keyTypeGroup.setOnCheckedChangeListener(new OnCheckedChangeListener() {
public void onCheckedChanged(RadioGroup group, int checkedId) {
if (checkedId == R.id.rsa) {
minBits = RSA_MINIMUM_BITS;
bitsSlider.setEnabled(true);
bitsSlider.setProgress(DEFAULT_BITS - minBits);
bitsText.setText(String.valueOf(DEFAULT_BITS));
bitsText.setEnabled(true);
keyType = PubkeyDatabase.KEY_TYPE_RSA;
} else if (checkedId == R.id.dsa) {
// DSA keys can only be 1024 bits
bitsSlider.setEnabled(false);
bitsSlider.setProgress(DEFAULT_BITS - minBits);
bitsText.setText(String.valueOf(DEFAULT_BITS));
bitsText.setEnabled(false);
keyType = PubkeyDatabase.KEY_TYPE_DSA;
} else if (checkedId == R.id.ec) {
minBits = ECDSA_DEFAULT_BITS;
bitsSlider.setEnabled(true);
bitsSlider.setProgress(ECDSA_DEFAULT_BITS - minBits);
bitsText.setText(String.valueOf(ECDSA_DEFAULT_BITS));
bitsText.setEnabled(true);
keyType = PubkeyDatabase.KEY_TYPE_EC;
}
}
});
bitsSlider.setOnSeekBarChangeListener(new OnSeekBarChangeListener() {
public void onProgressChanged(SeekBar seekBar, int progress,
boolean fromTouch) {
if (PubkeyDatabase.KEY_TYPE_EC.equals(keyType)) {
bits = getClosestFieldSize(progress + minBits);
seekBar.setProgress(bits - minBits);
} else {
// Stay evenly divisible by 8 because it looks nicer to have
// 2048 than 2043 bits.
final int ourProgress = progress - (progress % 8);
bits = minBits + ourProgress;
}
bitsText.setText(String.valueOf(bits));
}
public void onStartTrackingTouch(SeekBar seekBar) {
// We don't care about the start.
}
public void onStopTrackingTouch(SeekBar seekBar) {
// We don't care about the stop.
}
});
bitsText.setOnFocusChangeListener(new OnFocusChangeListener() {
public void onFocusChange(View v, boolean hasFocus) {
if (!hasFocus) {
final boolean isEc = PubkeyDatabase.KEY_TYPE_EC.equals(keyType);
try {
bits = Integer.parseInt(bitsText.getText().toString());
if (bits < minBits) {
bits = minBits;
bitsText.setText(String.valueOf(bits));
}
if (isEc) {
bits = getClosestFieldSize(bits);
}
} catch (NumberFormatException nfe) {
bits = isEc ? ECDSA_DEFAULT_BITS : DEFAULT_BITS;
bitsText.setText(String.valueOf(bits));
}
bitsSlider.setProgress(bits - minBits);
}
}
});
save.setOnClickListener(new OnClickListener() {
public void onClick(View view) {
GeneratePubkeyActivity.this.save.setEnabled(false);
GeneratePubkeyActivity.this.startEntropyGather();
}
});
}
private void checkEntries() {
boolean allowSave = true;
if (!password1.getText().toString().equals(password2.getText().toString()))
allowSave = false;
if (nickname.getText().length() == 0)
allowSave = false;
if (allowSave) {
save.getBackground().setColorFilter(getResources().getColor(R.color.accent), PorterDuff.Mode.SRC_IN);
} else {
save.getBackground().setColorFilter(null);
}
save.setEnabled(allowSave);
}
private void startEntropyGather() {
final View entropyView = inflater.inflate(R.layout.dia_gatherentropy, null, false);
((EntropyView) entropyView.findViewById(R.id.entropy)).addOnEntropyGatheredListener(GeneratePubkeyActivity.this);
entropyDialog = new EntropyDialog(GeneratePubkeyActivity.this, entropyView);
entropyDialog.show();
}
public void onEntropyGathered(byte[] entropy) {
// For some reason the entropy dialog was aborted, exit activity
if (entropy == null) {
finish();
return;
}
this.entropy = entropy.clone();
int numSetBits = 0;
for (int i = 0; i < 20; i++)
numSetBits += measureNumberOfSetBits(this.entropy[i]);
Log.d(TAG, "Entropy distribution=" + (int) (100.0 * numSetBits / 160.0) + "%");
Log.d(TAG, "entropy gathered; attemping to generate key...");
startKeyGen();
}
private void startKeyGen() {
progress = new ProgressDialog(GeneratePubkeyActivity.this);
progress.setMessage(GeneratePubkeyActivity.this.getResources().getText(R.string.pubkey_generating));
progress.setIndeterminate(true);
progress.setCancelable(false);
progress.show();
Thread keyGenThread = new Thread(mKeyGen);
keyGenThread.setName("KeyGen");
keyGenThread.start();
}
final private Runnable mKeyGen = new Runnable() {
public void run() {
try {
boolean encrypted = false;
SecureRandom random = new SecureRandom();
// Work around JVM bug
random.nextInt();
random.setSeed(entropy);
KeyPairGenerator keyPairGen = KeyPairGenerator.getInstance(keyType);
keyPairGen.initialize(bits, random);
KeyPair pair = keyPairGen.generateKeyPair();
PrivateKey priv = pair.getPrivate();
PublicKey pub = pair.getPublic();
String secret = password1.getText().toString();
if (secret.length() > 0)
encrypted = true;
Log.d(TAG, "private: " + PubkeyUtils.formatKey(priv));
Log.d(TAG, "public: " + PubkeyUtils.formatKey(pub));
PubkeyBean pubkey = new PubkeyBean();
pubkey.setNickname(nickname.getText().toString());
pubkey.setType(keyType);
pubkey.setPrivateKey(PubkeyUtils.getEncodedPrivate(priv, secret));
pubkey.setPublicKey(pub.getEncoded());
pubkey.setEncrypted(encrypted);
pubkey.setStartup(unlockAtStartup.isChecked());
pubkey.setConfirmUse(confirmUse.isChecked());
PubkeyDatabase pubkeydb = PubkeyDatabase.get(GeneratePubkeyActivity.this);
pubkeydb.savePubkey(pubkey);
} catch (Exception e) {
Log.e(TAG, "Could not generate key pair");
e.printStackTrace();
}
GeneratePubkeyActivity.this.runOnUiThread(new Runnable() {
public void run() {
progress.dismiss();
GeneratePubkeyActivity.this.finish();
}
});
}
};
final private TextWatcher textChecker = new TextWatcher() {
public void afterTextChanged(Editable s) {}
public void beforeTextChanged(CharSequence s, int start, int count,
int after) {}
public void onTextChanged(CharSequence s, int start, int before,
int count) {
checkEntries();
}
};
private int measureNumberOfSetBits(byte b) {
int numSetBits = 0;
for (int i = 0; i < 8; i++) {
if ((b & 1) == 1)
numSetBits++;
b >>= 1;
}
return numSetBits;
}
private int getClosestFieldSize(int bits) {
int outBits = ECDSA_DEFAULT_BITS;
int distance = Math.abs(bits - ECDSA_DEFAULT_BITS);
for (int i = 1; i < ECDSA_SIZES.length; i++) {
int thisDistance = Math.abs(bits - ECDSA_SIZES[i]);
if (thisDistance < distance) {
distance = thisDistance;
outBits = ECDSA_SIZES[i];
}
}
return outBits;
}
}
| |
/*
*
* Copyright 2016 Big Data Curation Lab, University of Toronto,
* Patricia Arocena,
* Boris Glavic,
* Renee J. Miller
*
* This software also contains code derived from STBenchmark as described in
* with the permission of the authors:
*
* Bogdan Alexe, Wang-Chiew Tan, Yannis Velegrakis
*
* This code was originally described in:
*
* STBenchmark: Towards a Benchmark for Mapping Systems
* Alexe, Bogdan and Tan, Wang-Chiew and Velegrakis, Yannis
* PVLDB: Proceedings of the VLDB Endowment archive
* 2008, vol. 1, no. 1, pp. 230-244
*
* The copyright of the ToxGene (included as a jar file: toxgene.jar) belongs to
* Denilson Barbosa. The iBench distribution contains this jar file with the
* permission of the author of ToxGene
* (http://www.cs.toronto.edu/tox/toxgene/index.html)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package tresc.benchmark.schemaGen;
import java.util.Random;
import java.util.Vector;
import smark.support.SMarkElement;
import tresc.benchmark.Configuration;
import tresc.benchmark.Constants;
import tresc.benchmark.Modules;
import tresc.benchmark.utils.Utils;
import vtools.dataModel.expression.AND;
import vtools.dataModel.expression.EQ;
import vtools.dataModel.expression.ForeignKey;
import vtools.dataModel.expression.FromClauseList;
import vtools.dataModel.expression.Path;
import vtools.dataModel.expression.Projection;
import vtools.dataModel.expression.SPJQuery;
import vtools.dataModel.expression.Variable;
import vtools.dataModel.types.Atomic;
import vtools.dataModel.types.Rcd;
import vtools.dataModel.types.Set;
import vtools.dataModel.types.Type;
import vtools.utils.structures.EqClassManager;
/**
* This class is a module/part of the GLAVScenarioGenerator. It should not be
* created independently.
*/
public class GLAVScenarioSrcGenerator
{
private final int FROM = 0;
private final int TO = 1;
// counter used for naming the set elements
private int _si = 0;
// counter used for naming the atomic elements
private int _ai = 0;
/*
* It returns (i) An array of elements that are actually the root set
* elements (ii) a Vector of all the constraints we created (iii) The query
* we created which will serve as a basis for the mapping query (iv) The
* list of all the atomic elements we have created
*/
public Object[] generateSourceSchema(Configuration configuration, Random generator, EqClassManager eqClassMgr,
int repetition)
{
// Decide how many tables to have
int numOfFragmentsParam = configuration.getParam(Constants.ParameterName.JoinSize);
int numOfFragmentsDeviation = configuration.getDeviation(Constants.ParameterName.JoinSize);
int F = Utils.getRandomNumberAroundSomething(generator, numOfFragmentsParam, numOfFragmentsDeviation);
F = (F < 1) ? 1 : F;
// decide if we need self join
int numOfSelfJoins = configuration.getScenarioRepetitions(Constants.ScenarioName.SELFJOINS.ordinal());
numOfSelfJoins = (numOfSelfJoins != 0) ? 1 : 0;
// a list with the root set elements we generate and the atomic elements
// of each such root set
SMarkElement[] rootSets = new SMarkElement[F];
Vector<SMarkElement>[] atomicElements = new Vector[F];
int[] numOfSetElements = new int[F + numOfSelfJoins];
// a list with all the set elements
Vector<SMarkElement> setElementsVector = new Vector<SMarkElement>();
// create the fragments
for (int f = 0; f < F; f++)
{
// Decide how deep we need to go for this root set
int nestingParam = configuration.getParam(Constants.ParameterName.NestingDepth);
int nestingDeviation = configuration.getDeviation(Constants.ParameterName.NestingDepth);
int maxNestingdepth = Utils.getRandomNumberAroundSomething(generator, nestingParam, nestingDeviation);
// now create the element
String randomName = Modules.nameFactory.getARandomName();
String hook = GLAVScenarioGenerator._stamp + repetition + "S" + _si;
String name = randomName + "_" + hook;
// create the root set element
SMarkElement setElement = new SMarkElement(name, new Set(), null, f, 0);
setElement.setHook(hook);
// create the variable for its eqClass
Variable var = new Variable("X" + _si);
eqClassMgr.classify(setElement);
Object oclass = eqClassMgr.getEqClass(setElement);
eqClassMgr.setEqClassHook(oclass, var);
rootSets[f] = setElement;
_si++;
populateSetElement(setElement, configuration, 0, maxNestingdepth, repetition, generator, f, 0,
eqClassMgr);
generateAtomicElementsEqClassValues(setElement, var, eqClassMgr);
Vector<SMarkElement> atomicElementsVector = new Vector<SMarkElement>();
collectAtomicElements(setElement, atomicElementsVector, false);
atomicElements[f] = atomicElementsVector;
}
// partial query that will be used in the final mapping query
SPJQuery query = new SPJQuery();
FromClauseList from = query.getFrom();
// create the from clause list and keep track of
// the number of set elements in each fragment
for (int i = 0, imax = rootSets.length; i < imax; i++)
{
generateFromClause(rootSets[i], Path.ROOT, from, setElementsVector, eqClassMgr);
numOfSetElements[i] = from.size();
}
// generate the source constraints and add them in the where clause of
// query
Vector<ForeignKey> constraints = new Vector<ForeignKey>();
generateConstraints(rootSets, configuration, generator, numOfSetElements, setElementsVector, eqClassMgr,
query, constraints);
// Now we need to take the appropriate steps in the case of self join
Vector<SMarkElement> selfJoinAtomicElements = new Vector<SMarkElement>();
if (numOfSelfJoins != 0)
{
// we clone the first fragment (i.e. we make a self join on the
// first fragment) and we assign each of its elements to its own
// eqclass
SMarkElement selfJoinEl = rootSets[0].clone();
selfJoinHouseKeeping(selfJoinEl, Path.ROOT, eqClassMgr);
// collect its atomic elements that we will need to append to the
// list of elements we will return
collectAtomicElements(selfJoinEl, selfJoinAtomicElements, false);
// now we create a join between the fragment 0 and the selfJoin
// fragment. We decided to make the selfJoin only on 1 attribute
// for simplicity. The FROM fragment is the 0 fragment and TO
// fragment is the selfJoin Fragment.
// select a random set from the fragment FROM fragment
int numberOfSetsInFr = numOfSetElements[0];
int randomSetNum = generator.nextInt(numberOfSetsInFr);
SMarkElement fromSetElement = setElementsVector.elementAt(randomSetNum);
// collect the atomic elements of the set element and select
// randomly one of them.
Vector<SMarkElement> fromAtomicSubElem = new Vector<SMarkElement>();
collectAtomicElements(fromSetElement, fromAtomicSubElem, true);
int rf = generator.nextInt(fromAtomicSubElem.size());
SMarkElement fromAtomicElement = fromAtomicSubElem.remove(rf);
// collect the set elements and select a random set from the
// fragment TO fragment (i.e. the self join)
randomSetNum = generator.nextInt(numberOfSetsInFr);
Vector<SMarkElement> selfJoinSetElements = new Vector<SMarkElement>();
generateFromClause(selfJoinEl, Path.ROOT, from, selfJoinSetElements, eqClassMgr);
SMarkElement toSetElement = selfJoinSetElements.elementAt(randomSetNum);
// collect the atomic elements of the set element and select
// randomly one of them.
Vector<SMarkElement> toAtomicSubElem = new Vector<SMarkElement>();
collectAtomicElements(toSetElement, toAtomicSubElem, true);
int rt = generator.nextInt(toAtomicSubElem.size());
SMarkElement toAtomicElement = toAtomicSubElem.remove(rt);
// if (log.isDebugEnabled()) {log.debug(fromAtomicElement + " "
// +
// eqClassMgr.getEqClassHook(eqClassMgr.getEqClass(fromAtomicElement))
// + "\n" + fromSetElement
// + "\n");};
// if (log.isDebugEnabled()) {log.debug(toAtomicElement + " "
// +
// eqClassMgr.getEqClassHook(eqClassMgr.getEqClass(toAtomicElement))
// + "\n" + toSetElement + "\n");};
// and now we need to create a foreign key from the
// fromAtomicElement (part of the fromSetElement) to the
// toAtomicElement (part of the toSetElement)
ForeignKey fKeySrc = new ForeignKey();
Vector<Variable> varsF = new Vector<Variable>();
Vector<Path> exprsF = new Vector<Path>();
getPathTerms(fromSetElement, varsF, exprsF, from, eqClassMgr);
for (int k = varsF.size() - 1; k >= 0; k--)
{
fKeySrc.addLeftTerm(varsF.elementAt(k), exprsF.elementAt(k));
}
Vector<Variable> varsT = new Vector<Variable>();
Vector<Path> exprsT = new Vector<Path>();
getPathTerms(toSetElement, varsT, exprsT, from, eqClassMgr);
for (int k = varsT.size() - 1; k >= 0; k--)
{
fKeySrc.addRightTerm(varsT.elementAt(k), exprsT.elementAt(k));
}
Projection pathFrom = (Projection)getPathExprFromSet(fromAtomicElement, eqClassMgr);
Projection pathTo = (Projection) getPathExprFromSet(toAtomicElement, eqClassMgr);
fKeySrc.addFKeyAttr(pathTo, pathFrom);
constraints.add(fKeySrc);
// add the join condition to the where clause
AND where = (AND) query.getWhere();
where.add(new EQ(pathFrom.clone(), pathTo.clone()));
query.setWhere(where);
// we merge the eqClasses for the self join elements
Object oclassF = eqClassMgr.getEqClass(fromAtomicElement);
Object oclassT = eqClassMgr.getEqClass(toAtomicElement);
eqClassMgr.mergeEqClass(oclassF, oclassT);
}
// put all the atomic elements together into one big Vector
Vector<SMarkElement> allAtomicElements = new Vector<SMarkElement>();
for (int i = 0; i < F; i++)
{
Vector<SMarkElement> v = atomicElements[i];
for (int j = 0, jmax = v.size(); j < jmax; j++)
{
allAtomicElements.add(v.elementAt(j));
}
}
for (int j = 0, jmax = selfJoinAtomicElements.size(); j < jmax; j++)
{
allAtomicElements.add(selfJoinAtomicElements.elementAt(j));
}
// return whatever you have generated
Object[] retVal = new Object[4];
retVal[0] = rootSets;
retVal[1] = constraints;
retVal[2] = query;
retVal[3] = allAtomicElements;
return retVal;
}
/*
* Generates the join constraints. Create the where clause of the query,
* i.e. it contains the attributes evolved in the join constraints.
*/
private void generateConstraints(SMarkElement[] rootSets, Configuration configuration, Random generator,
int[] numOfSetElements, Vector<SMarkElement> setElementsVector, EqClassManager eqClassMgr,
SPJQuery query, Vector<ForeignKey> constraints)
{
int F = rootSets.length;
// decide the kind of join we will do
int joinKind = configuration.getParam(Constants.ParameterName.JoinKind);
if (joinKind == Constants.JoinKind.VARIABLE.ordinal())
{
int randomInt = generator.nextInt(2);
joinKind = (randomInt == 0) ? Constants.JoinKind.CHAIN.ordinal() : Constants.JoinKind.STAR.ordinal();
}
// we keep all the atomic elements on which foreign keys are
// defined s.t. we merge their eqClasses at the end
SMarkElement[][][] allPairs = new SMarkElement[F][][];
FromClauseList from = query.getFrom();
AND where = new AND();
for (int i = 1; i < F; i++)
{
// read params for how many attributes to be used in the join
int joinWidth = configuration.getParam(Constants.ParameterName.NumOfJoinAttributes);
int joinWidthDeviation = configuration.getDeviation(Constants.ParameterName.NumOfJoinAttributes);
// select a join width. Note that the number above may be 0 in which
// case we have a cartezian product.
int numOfJoinAttr = Utils.getRandomNumberAroundSomething(generator, joinWidth, joinWidthDeviation);
int fromFr = (joinKind == Constants.JoinKind.CHAIN.ordinal()) ? i : 0;
int toFr = (joinKind == Constants.JoinKind.CHAIN.ordinal()) ? (i - 1) : i;
// select a random set from the fragment fromFr
int numberOfSetsInFr = numOfSetElements[fromFr] - ((fromFr == 0) ? 0 : numOfSetElements[fromFr - 1]);
int randomSetNum = generator.nextInt(numberOfSetsInFr);
int setNum = ((fromFr == 0) ? 0 : numOfSetElements[fromFr - 1]) + randomSetNum;
SMarkElement fromSetElement = setElementsVector.elementAt(setNum);
// collect the atomic elements of the set element
Vector<SMarkElement> fromAtomicSubElem = new Vector<SMarkElement>();
collectAtomicElements(fromSetElement, fromAtomicSubElem, true);
// select a random set from the fragment toFr
numberOfSetsInFr = numOfSetElements[toFr] - ((toFr == 0) ? 0 : numOfSetElements[toFr - 1]);
randomSetNum = generator.nextInt(numberOfSetsInFr);
setNum = ((toFr == 0) ? 0 : numOfSetElements[toFr - 1]) + randomSetNum;
SMarkElement toSetElement = setElementsVector.elementAt(setNum);
// collect the atomic elements of the set element
Vector<SMarkElement> toAtomicSubElem = new Vector<SMarkElement>();
collectAtomicElements(toSetElement, toAtomicSubElem, true);
numOfJoinAttr = (numOfJoinAttr > fromAtomicSubElem.size()) ? fromAtomicSubElem.size() : numOfJoinAttr;
numOfJoinAttr = (numOfJoinAttr > toAtomicSubElem.size()) ? toAtomicSubElem.size() : numOfJoinAttr;
// select randomly numOfJoinAttr from each vector of atomic elements
SMarkElement[][] pairs = new SMarkElement[numOfJoinAttr][2];
for (int j = 0; j < numOfJoinAttr; j++)
{
int rf = generator.nextInt(fromAtomicSubElem.size());
pairs[j][FROM] = fromAtomicSubElem.remove(rf);
int rt = generator.nextInt(toAtomicSubElem.size());
pairs[j][TO] = toAtomicSubElem.remove(rt);
}
allPairs[i] = pairs;
ForeignKey fKeySrc = new ForeignKey();
Vector<Variable> varsF = new Vector<Variable>();
Vector<Path> exprsF = new Vector<Path>();
getPathTerms(fromSetElement, varsF, exprsF, from, eqClassMgr);
for (int k = varsF.size() - 1; k >= 0; k--)
{
fKeySrc.addLeftTerm(varsF.elementAt(k), exprsF.elementAt(k));
}
Vector<Variable> varsT = new Vector<Variable>();
Vector<Path> exprsT = new Vector<Path>();
getPathTerms(toSetElement, varsT, exprsT, from, eqClassMgr);
for (int k = varsT.size() - 1; k >= 0; k--)
{
fKeySrc.addRightTerm(varsT.elementAt(k), exprsT.elementAt(k));
}
for (int j = 0; j < numOfJoinAttr; j++)
{
Object oclass = eqClassMgr.getEqClass(pairs[j][FROM]);
Projection pathFrom = (Projection) eqClassMgr.getEqClassHook(oclass);
oclass = eqClassMgr.getEqClass(pairs[j][TO]);
Projection pathTo = (Projection) eqClassMgr.getEqClassHook(oclass);
fKeySrc.addFKeyAttr(pathTo, pathFrom);
// add the join condition to the where clause
where.add(new EQ(pathFrom.clone(), pathTo.clone()));
}
constraints.add(fKeySrc);
}
query.setWhere(where);
// we merge the eqClasses for all the pairs
for (int i = 1; i < F; i++)
{
SMarkElement[][] pairs = allPairs[i];
for (int j = 0, jmax = pairs.length; j < jmax; j++)
{
Object oclassF = eqClassMgr.getEqClass(pairs[j][FROM]);
Object oclassT = eqClassMgr.getEqClass(pairs[j][TO]);
eqClassMgr.mergeEqClass(oclassF, oclassT);
}
}
}
/*
* Traverses the contents of an element, makes its contents classable and
* assigns to them the right eqClass value.
*/
private void selfJoinHouseKeeping(SMarkElement curEl, Path parentPath, EqClassManager eqClassMgr)
{
Type type = curEl.getType();
if (type instanceof Atomic)
{
Path valuePath = parentPath.clone();
Projection attr = new Projection(valuePath, curEl.getLabel());
if (eqClassMgr.getEqClass(curEl) != null)
throw new RuntimeException("Should not happen 98342394");
eqClassMgr.classify(curEl);
Object oclass = eqClassMgr.getEqClass(curEl);
eqClassMgr.setEqClassHook(oclass, attr);
}
else if (type instanceof Rcd)
{
Path valuePath = parentPath.clone();
Projection attr = new Projection(valuePath, curEl.getLabel());
if (eqClassMgr.getEqClass(curEl) != null)
throw new RuntimeException("Should not happen 98342394");
eqClassMgr.classify(curEl);
Object oclass = eqClassMgr.getEqClass(curEl);
eqClassMgr.setEqClassHook(oclass, attr);
// we call its children now
for (int i = 0, imax = curEl.size(); i < imax; i++)
{
SMarkElement child = (SMarkElement) curEl.getSubElement(i);
selfJoinHouseKeeping(child, attr, eqClassMgr);
}
}
else if (type instanceof Set)
{
// create a new variable for its eqClass
Variable var = new Variable("X" + _si);
_si++;
// classify the current element
if (eqClassMgr.getEqClass(curEl) != null)
throw new RuntimeException("Should not happen 98342394");
eqClassMgr.classify(curEl);
Object oclass = eqClassMgr.getEqClass(curEl);
eqClassMgr.setEqClassHook(oclass, var);
// and since it is a set visit its children
for (int i = 0, imax = curEl.size(); i < imax; i++)
{
SMarkElement child = (SMarkElement) curEl.getSubElement(i);
selfJoinHouseKeeping(child, var, eqClassMgr);
}
}
else
{
throw new RuntimeException("It should not happen 7687678687");
}
}
/*
* Returns for each set the terms you put in the generators of a foreign
* key.
*/
private void getPathTerms(SMarkElement curEl, Vector<Variable> vars, Vector<Path> exprs, FromClauseList from,
EqClassManager eqClassMgr)
{
Object oclass = eqClassMgr.getEqClass(curEl);
Variable var = (Variable) eqClassMgr.getEqClassHook(oclass);
vars.add(var.clone());
int pos = from.getExprVarPosition(var);
Path expr = (Path) from.getExpression(pos);
exprs.add(expr.clone());
SMarkElement parent = (SMarkElement) curEl.getParent();
if (parent == null)
{
;
}
else
{
getPathTerms(parent, vars, exprs, from, eqClassMgr);
}
}
/*
* Returns for an atomic element its path expression from the first parent
* set
*/
private Path getPathExprFromSet(SMarkElement curEl, EqClassManager eqClassMgr)
{
if (curEl == null)
return Path.ROOT;
Type t = curEl.getType();
if ((t instanceof Atomic) || (t instanceof Rcd))
{
Path p = getPathExprFromSet((SMarkElement) curEl.getParent(), eqClassMgr);
return new Projection(p, curEl.getLabel());
}
else if (t instanceof Set)
{
Object oclass = eqClassMgr.getEqClass(curEl);
Variable var = (Variable) eqClassMgr.getEqClassHook(oclass);
return var.clone();
}
else throw new RuntimeException("Should not happen 782343784");
}
/*
* Classifies the atomic elements and assigns the value of each eqClass to
* be the appropriate path expression
*/
private void generateAtomicElementsEqClassValues(SMarkElement curEl, Path curPath, EqClassManager eqClassMgr)
{
Type type = curEl.getType();
if (type instanceof Atomic)
{
Projection attr = new Projection(curPath.clone(), curEl.getLabel());
eqClassMgr.classify(curEl);
Object oclass = eqClassMgr.getEqClass(curEl);
eqClassMgr.setEqClassHook(oclass, attr);
}
else if (type instanceof Rcd)
{
Projection attr = new Projection(curPath.clone(), curEl.getLabel());
eqClassMgr.classify(curEl);
Object oclass = eqClassMgr.getEqClass(curEl);
eqClassMgr.setEqClassHook(oclass, attr);
for (int i = 0, imax = curEl.size(); i < imax; i++)
{
SMarkElement child = (SMarkElement) curEl.getSubElement(i);
generateAtomicElementsEqClassValues(child, attr, eqClassMgr);
}
}
else if (type instanceof Set)
{
Object oclass = eqClassMgr.getEqClass(curEl);
Variable var = (Variable) eqClassMgr.getEqClassHook(oclass);
for (int i = 0, imax = curEl.size(); i < imax; i++)
{
SMarkElement child = (SMarkElement) curEl.getSubElement(i);
generateAtomicElementsEqClassValues(child, var, eqClassMgr);
}
}
else
{
throw new RuntimeException("It should not happen");
}
}
/*
* Collects into a vector all the atomic elements of an element. The
* variable stopOnSets determines whether we take the atomic elements of the
* whole subtree or not
*/
private void collectAtomicElements(SMarkElement currEl, Vector<SMarkElement> v, boolean stopOnSets)
{
Type type = currEl.getType();
if (type instanceof Atomic)
{
v.add(currEl);
}
else if (type instanceof Rcd)
{
for (int i = 0, imax = currEl.size(); i < imax; i++)
{
SMarkElement child = (SMarkElement) currEl.getSubElement(i);
Type type2 = child.getType();
if (stopOnSets && (type2 instanceof Set))
continue;
collectAtomicElements(child, v, stopOnSets);
}
}
else if (type instanceof Set)
{
for (int i = 0, imax = currEl.size(); i < imax; i++)
{
SMarkElement child = (SMarkElement) currEl.getSubElement(i);
Type type2 = child.getType();
if (stopOnSets && (type2 instanceof Set))
continue;
collectAtomicElements(child, v, stopOnSets);
}
}
else
{
throw new RuntimeException("Should not happen");
}
}
/*
* Generates the From clause entries for a part of the schema. Also, creates
* a vector with the set elements that are found from the curEl in the whole
* fragment.
*/
private void generateFromClause(SMarkElement curEl, Path parentPath, FromClauseList from,
Vector<SMarkElement> setElementsVector, EqClassManager eqClassMgr)
{
Type type = curEl.getType();
if (type instanceof Set)
{
Object oclass = eqClassMgr.getEqClass(curEl);
Variable var = (Variable) eqClassMgr.getEqClassHook(oclass);
Projection expr = new Projection(parentPath.clone(), curEl.getLabel());
from.add(var, expr);
// add the set element in the vector s.t. we know for each
// position in the From Clause what element corresponds to
setElementsVector.add(curEl);
for (int i = 0, imax = curEl.size(); i < imax; i++)
{
SMarkElement child = (SMarkElement) curEl.getSubElement(i);
generateFromClause(child, var, from, setElementsVector, eqClassMgr);
}
}
else if (type instanceof Atomic)
{
;
}
else if (type instanceof Rcd)
{
Object oclass = eqClassMgr.getEqClass(curEl);
Path path = (Path) eqClassMgr.getEqClassHook(oclass);
for (int i = 0, imax = curEl.size(); i < imax; i++)
{
SMarkElement child = (SMarkElement) curEl.getSubElement(i);
generateFromClause(child, path, from, setElementsVector, eqClassMgr);
}
}
else
{
throw new RuntimeException("It should not happen");
}
}
/**
* Generates the sub elements of an element and for the set sub elements it
* calls recursively itself until the max depth has been reached.
*/
private void populateSetElement(SMarkElement parentElement, Configuration configuration, int nestingDepth,
int maxNestingdepth, int repetition, Random generator, int fragment, int fragmentAppearance,
EqClassManager eqClassMgr)
{
// first decide how many attributes you will create.
int numOfElements = configuration.getParam(Constants.ParameterName.NumOfSubElements);
int numOfElementsDeviation = configuration.getDeviation(Constants.ParameterName.NumOfSubElements);
int N = Utils.getRandomNumberAroundSomething(generator, numOfElements, numOfElementsDeviation);
N = (N < 1) ? 1 : N;
// number of set elements
int numOfSetElements = configuration.getParam(Constants.ParameterName.JoinSize);
int numOfSetElementsDeviation = configuration.getDeviation(Constants.ParameterName.JoinSize);
int S = Utils.getRandomNumberAroundSomething(generator, numOfSetElements, numOfSetElementsDeviation);
if (nestingDepth < maxNestingdepth)
{
if (S > N)
S = N;
if (S == 0) // if we need to go deeper, but nSetElts
// came out 0, we set it to 1
S = 1;
}
else S = 0;
// number of atomic elements
int A = N - S;
// just a safety to have at least one atomic element in the max depth
// if ((nestingDepth == maxNestingdepth) && (A == 0))
// A = 1;
// we should have at least one atomic attribute.
A = (A == 0) ? 1 : A;
// generate A atomic elements
for (int i = 0, imax = A; i < imax; i++)
{
String randomName = Modules.nameFactory.getARandomName();
String hook = GLAVScenarioGenerator._stamp + repetition + "A" + _ai;
String name = randomName + "_" + hook;
SMarkElement atomicElement = new SMarkElement(name, Atomic.STRING, null, fragment, fragmentAppearance);
atomicElement.setHook(hook);
parentElement.addSubElement(atomicElement);
_ai++;
}
// generate the set elements.
for (int i = 0, imax = S; i < imax; i++)
{
String randomName = Modules.nameFactory.getARandomName();
String hook = GLAVScenarioGenerator._stamp + repetition + "S" + _si;
String name = randomName + "_" + hook;
SMarkElement setElement = new SMarkElement(name, new Set(), null, fragment, fragmentAppearance);
setElement.setHook(hook);
parentElement.addSubElement(setElement);
// create the variable for its eq class
Variable var = new Variable("X" + _si);
eqClassMgr.classify(setElement);
Object oclass = eqClassMgr.getEqClass(setElement);
eqClassMgr.setEqClassHook(oclass, var);
_si++;
populateSetElement(setElement, configuration, nestingDepth + 1, maxNestingdepth, repetition,
generator, fragment, fragmentAppearance, eqClassMgr);
}
}
}
| |
package com.crawljax.plugins.testilizer.generated.photogallery_EXND;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import org.junit.*;
import static org.junit.Assert.*;
import org.openqa.selenium.*;
import org.openqa.selenium.firefox.FirefoxDriver;
import org.openqa.selenium.firefox.FirefoxProfile;
import org.w3c.dom.Document;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.NodeList;
import com.crawljax.forms.RandomInputValueGenerator;
import com.crawljax.util.DomUtils;
/*
* Generated @ Wed Apr 16 20:43:32 PDT 2014
*/
public class GeneratedTestCase11 {
private WebDriver driver;
private String url;
private boolean acceptNextAlert = true;
private StringBuffer verificationErrors = new StringBuffer();
private DOMElement element;
private DOMElement parentElement;
private ArrayList<DOMElement> childrenElements = new ArrayList<DOMElement>();
private String DOM = null;
boolean getCoverageReport = false;
@Before
public void setUp() throws Exception {
// Setting the JavaScript code coverage switch
getCoverageReport = com.crawljax.plugins.testilizer.Testilizer.getCoverageReport();
if (getCoverageReport)
driver = new FirefoxDriver(getProfile());
else
driver = new FirefoxDriver();
url = "http://localhost:8888/phormer331/";
driver.manage().timeouts().implicitlyWait(30, TimeUnit.SECONDS);
}
public static FirefoxProfile getProfile() {
FirefoxProfile profile = new FirefoxProfile();
profile.setPreference("network.proxy.http", "localhost");
profile.setPreference("network.proxy.http_port", 3128);
profile.setPreference("network.proxy.type", 1);
/* use proxy for everything, including localhost */
profile.setPreference("network.proxy.no_proxies_on", "");
return profile;
}
@After
public void tearDown() throws Exception {
if (getCoverageReport)
((JavascriptExecutor) driver).executeScript(" if (window.jscoverage_report) {return jscoverage_report('CodeCoverageReport');}");
driver.quit();
String verificationErrorString = verificationErrors.toString();
if (!"".equals(verificationErrorString)) {
fail(verificationErrorString);
}
}
/*
* Test Cases
*/
@Test
public void method11(){
driver.get(url);
//From state 0 to state 80
//Eventable{eventType=click, identification=xpath /HTML[1]/BODY[1]/CENTER[1]/DIV[1]/DIV[1]/DIV[1]/DIV[2]/DIV[2]/DIV[1]/SPAN[1]/A[1], element=Element{node=[A: null], tag=A, text=???Photos[0], attributes={href=.?s=2, title=Greenery !! [2014/01/16]}}, source=StateVertexImpl{id=0, name=index}, target=StateVertexImpl{id=80, name=state80}}
mutateDOMTree(0);
checkState0_OriginalAssertions();
checkState0_ReusedAssertions();
checkState0_GeneratedAssertions();
checkState0_LearnedAssertions();
checkState0_AllAssertions();
checkState0_RandAssertions1();
checkState0_RandAssertions2();
checkState0_RandAssertions3();
checkState0_RandAssertions4();
checkState0_RandAssertions5();
driver.findElement(By.xpath("/HTML[1]/BODY[1]/CENTER[1]/DIV[1]/DIV[1]/DIV[1]/DIV[2]/DIV[2]/DIV[1]/SPAN[1]/A[1]")).click();
//Sink node at state 80
mutateDOMTree(80);
checkState80_OriginalAssertions();
checkState80_ReusedAssertions();
checkState80_GeneratedAssertions();
checkState80_LearnedAssertions();
checkState80_AllAssertions();
checkState80_RandAssertions1();
checkState80_RandAssertions2();
checkState80_RandAssertions3();
checkState80_RandAssertions4();
checkState80_RandAssertions5();
}
public void checkState0_OriginalAssertions(){
if(!(isElementPresent(By.cssSelector("div#Granny")))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // original assertion
}
public void checkState0_ReusedAssertions(){
}
public void checkState0_GeneratedAssertions(){
element = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"Granny\"")));
parentElement = new DOMElement("CENTER", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("id=\"sidecol\""))));
childrenElements.add(new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"maincol\""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
// if(!(isElementRegionFullPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // generated assertion in case of AEP for Original
element = new DOMElement("IMG", "", new ArrayList<String>(Arrays.asList("id=\"ss_photo\"","onload=\"javscript:ss_loaddone();\"","src=\"images/000006_rryky_0.jpg\"")));
parentElement = new DOMElement("A", "", new ArrayList<String>(Arrays.asList("href=\".?p=6\"","id=\"ss_link2\"","style=\"display:inline;\"")));
childrenElements.clear();
if(!(isElementRegionTagPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // generated assertion in case of RegionTagMatch
element = new DOMElement("SPAN", "1", new ArrayList<String>(Arrays.asList("id=\"ss_n\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("class=\"titlepart\"","style=\"letter-spacing: 2px; padding-bottom: 1em; margin-bottom: 0px;\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "1", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // generated assertion in case of RegionTagMatch
element = new DOMElement("SPAN", "2", new ArrayList<String>(Arrays.asList("id=\"ss_n\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("class=\"titlepart\"","style=\"letter-spacing: 2px; padding-bottom: 1em; margin-bottom: 0px;\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "2", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // generated assertion in case of RegionTagMatch
element = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"photoBoxes\"","style=\"display: block;\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("class=\"wholePhoto\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("class=\"photoBox\""))));
childrenElements.add(new DOMElement("DIV", "PhotoNotesNoDescripton.", new ArrayList<String>(Arrays.asList("class=\"photoBox\""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("class=\"photoBox\"","style=\"line-height: 150%\""))));
childrenElements.add(new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // generated assertion in case of RegionTagMatch
}
public void checkState0_LearnedAssertions(){
element = new DOMElement("DIV", "StoriesPhotos[0]DefaultStory[6]", new ArrayList<String>(Arrays.asList("class=\"submenu\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("class=\"part\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("class=\"item\""))));
childrenElements.add(new DOMElement("DIV", "Photos[0]", new ArrayList<String>(Arrays.asList("class=\"item\""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("DIV", "DefaultStory[6]", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // predicted region assertion
element = new DOMElement("DIV", "EtcEmailRSSAdminPagePoweredbyPhormer3.31", new ArrayList<String>(Arrays.asList("class=\"titlepart\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("class=\"part\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("SPAN", "", new ArrayList<String>(Arrays.asList("class=\"reddot\""))));
childrenElements.add(new DOMElement("#text", "Poweredby", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // predicted region assertion
element = new DOMElement("SPAN", "RSS", new ArrayList<String>(Arrays.asList("class=\"dot\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("class=\"item\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // predicted region assertion
element = new DOMElement("DIV", "100_0794November9tho
12", new ArrayList<String>(Arrays.asList("class=\"aThumb\"","onmouseout=\"javascript: DarkenIt(this);\"","onmouseover=\"javascript: LightenIt(this);\"","style=\"-moz-opacity:0.6;\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("class=\"submenu\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("CENTER", "100_0794November9tho
12", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // predicted region assertion
element = new DOMElement("DIV", "100_0758November9tho
12", new ArrayList<String>(Arrays.asList("class=\"thumbNameLine\"")));
parentElement = new DOMElement("A", "", new ArrayList<String>(Arrays.asList("href=\".?p=1\"","title=\"100_0758: 25 KB, 3681 hits and rated 0 by 0 person\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "100_0758", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // predicted region assertion
}
public void checkState0_AllAssertions(){
if(!(isElementPresent(By.cssSelector("div#Granny")))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // original assertion
element = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"Granny\"")));
parentElement = new DOMElement("CENTER", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("id=\"sidecol\""))));
childrenElements.add(new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"maincol\""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
// if(!(isElementRegionFullPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // generated assertion in case of AEP for Original
element = new DOMElement("DIV", "StoriesPhotos[0]DefaultStory[6]", new ArrayList<String>(Arrays.asList("class=\"submenu\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("class=\"part\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("class=\"item\""))));
childrenElements.add(new DOMElement("DIV", "Photos[0]", new ArrayList<String>(Arrays.asList("class=\"item\""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("DIV", "DefaultStory[6]", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // predicted region assertion
element = new DOMElement("DIV", "EtcEmailRSSAdminPagePoweredbyPhormer3.31", new ArrayList<String>(Arrays.asList("class=\"titlepart\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("class=\"part\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("SPAN", "", new ArrayList<String>(Arrays.asList("class=\"reddot\""))));
childrenElements.add(new DOMElement("#text", "Poweredby", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // predicted region assertion
element = new DOMElement("SPAN", "RSS", new ArrayList<String>(Arrays.asList("class=\"dot\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("class=\"item\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // predicted region assertion
}
public void checkState0_RandAssertions1(){
element = new DOMElement("SPAN", "Categories", new ArrayList<String>(Arrays.asList("class=\"reddot\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("class=\"titlepart\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
// if(!(isElementRegionFullPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // Random element assertion
// if(!(isElementPresent(By.xpath("/HTML/BODY/DIV/A[href=\".\"]")))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // Random element assertion
// if(!(isElementPresent(By.xpath("/HTML/BODY/CENTER/DIV/DIV/DIV/DIV[4]/DIV[5]/DIV[class=\"item\"]")))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // Random element assertion
// if(!(isElementPresent(By.xpath("/HTML/BODY/CENTER/DIV/DIV[2]/DIV/DIV/DIV/SPAN/A[href=\".?feat=slideshow\"]")))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // Random element assertion
element = new DOMElement("IMG", "100_0759November9tho
12", new ArrayList<String>(Arrays.asList("height=\"75px\"","src=\"images/000002_ieqey_3.jpg\"","width=\"75px\"")));
parentElement = new DOMElement("A", "", new ArrayList<String>(Arrays.asList("href=\".?p=2\"","title=\"100_0759: 29 KB, 1440 hits and rated 1 by 3 person\"")));
childrenElements.clear();
if(!(isElementRegionTagPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // Random element assertion
}
public void checkState0_RandAssertions2(){
// if(!(isElementPresent(By.xpath("/HTML/BODY/CENTER/DIV/DIV/DIV/DIV[3]/DIV[2]/DIV[5]/SPAN[class=\"dot\"]")))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // Random element assertion
element = new DOMElement("A", "11monthsagoNovember9tho
12", new ArrayList<String>(Arrays.asList("href=\".?p=4\"","title=\"100_0773: 62 KB, 1367 hits and rated 0 by 0 person\"")));
parentElement = new DOMElement("CENTER", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("height=\"75px\"","src=\"images/000004_dzeck_3.jpg\"","width=\"75px\""))));
childrenElements.add(new DOMElement("IMG", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("BR", "", new ArrayList<String>(Arrays.asList("class=\"thumbNameLine\""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("class=\"thumbDate\""))));
childrenElements.add(new DOMElement("DIV", "11monthsago", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("DIV", "November9tho
12", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
// if(!(isElementRegionFullPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // Random element assertion
// if(!(isElementPresent(By.xpath("/HTML/BODY/CENTER/DIV/DIV[2]/DIV/DIV/DIV[4]/DIV/SPAN[2]/A[[href=\".?trn=50#tr\"]")))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // Random element assertion
element = new DOMElement("DIV", "11monthsagoNovember9tho
12", new ArrayList<String>(Arrays.asList("class=\"thumbDate\"")));
parentElement = new DOMElement("A", "", new ArrayList<String>(Arrays.asList("href=\".?p=6\"","title=\"100_0794: 91 KB, 2101 hits and rated 0 by 0 person\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "November9tho
12", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // Random element assertion
element = new DOMElement("DIV", "Photos[0]DefaultStory[6]", new ArrayList<String>(Arrays.asList("class=\"item\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("class=\"submenu\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("class=\"categeach\""))));
childrenElements.add(new DOMElement("SPAN", "Photos[0]", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // Random element assertion
}
public void checkState0_RandAssertions3(){
element = new DOMElement("SPAN", "MyPhotoGallery", new ArrayList<String>(Arrays.asList("class=\"dot\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("class=\"item\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // Random element assertion
// if(!(isElementPresent(By.xpath("/HTML/BODY/CENTER/DIV/DIV/DIV/DIV/DIV[2]/DIV[3]/SPAN[class=\"categeach\"]")))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // Random element assertion
element = new DOMElement("DIV", "100_0760November9tho
12", new ArrayList<String>(Arrays.asList("class=\"thumbDate\"")));
parentElement = new DOMElement("A", "", new ArrayList<String>(Arrays.asList("href=\".?p=3\"","title=\"100_0760: 26 KB, 1480 hits and rated 0 by 0 person\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "November9tho
12", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // Random element assertion
element = new DOMElement("DIV", "2monthsagoNovember9tho
12", new ArrayList<String>(Arrays.asList("class=\"thumbDate\"")));
parentElement = new DOMElement("A", "", new ArrayList<String>(Arrays.asList("href=\".?p=3\"","title=\"100_0760: 26 KB, 1480 hits and rated 0 by 0 person\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "November9tho
12", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // Random element assertion
element = new DOMElement("DIV", "100_0794November9tho
12", new ArrayList<String>(Arrays.asList("class=\"aThumb\"","onmouseout=\"javascript: DarkenIt(this);\"","onmouseover=\"javascript: LightenIt(this);\"","style=\"-moz-opacity:0.6;\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("class=\"submenu\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("CENTER", "100_0794November9tho
12", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
// if(!(isElementRegionFullPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // Random element assertion
}
public void checkState0_RandAssertions4(){
// if(!(isElementPresent(By.xpath("/HTML/BODY/CENTER/DIV/DIV/DIV/DIV[5]/DIV[3]/SPAN[class=\"reddot\"]")))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // Random element assertion
element = new DOMElement("SPAN", "", new ArrayList<String>(Arrays.asList("class=\"reddot\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("class=\"titlepart\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
// if(!(isElementRegionFullPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // Random element assertion
element = new DOMElement("A", "AdminPage", new ArrayList<String>(Arrays.asList("href=\"admin.php\"","title=\"Login to the Administration Region\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("class=\"item\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "AdminPage", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // Random element assertion
element = new DOMElement("CENTER", "11monthsagoNovember9tho
12", new ArrayList<String>(Arrays.asList("")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("class=\"aThumb\"","onmouseout=\"javascript: DarkenIt(this);\"","onmouseover=\"javascript: LightenIt(this);\"","style=\"-moz-opacity:0.6;\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("href=\".?p=1\"","title=\"100_0758: 25 KB, 3681 hits and rated 0 by 0 person\""))));
childrenElements.add(new DOMElement("A", "11monthsagoNovember9tho
12", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // Random element assertion
// if(!(isElementPresent(By.xpath("/HTML/BODY/CENTER/DIV/DIV[2]/DIV/DIV/DIV[2]/DIV/CENTER/A/IMG[@height=\"75px\" and @@src=\"images/000006_rryky_3.jpg\" and @width=\"75px\"]")))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // Random element assertion
}
public void checkState0_RandAssertions5(){
element = new DOMElement("IMG", "100_0760November9tho
12", new ArrayList<String>(Arrays.asList("height=\"75px\"","src=\"images/000003_ncupw_3.jpg\"","width=\"75px\"")));
parentElement = new DOMElement("A", "", new ArrayList<String>(Arrays.asList("href=\".?p=3\"","title=\"100_0760: 26 KB, 1480 hits and rated 0 by 0 person\"")));
childrenElements.clear();
// if(!(isElementRegionFullPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // Random element assertion
element = new DOMElement("SPAN", "flowers[0]", new ArrayList<String>(Arrays.asList("class=\"categinfo\"")));
parentElement = new DOMElement("A", "", new ArrayList<String>(Arrays.asList("href=\".?c=5\"","title=\"contains photos of all the flowers ! \"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "[0]", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // Random element assertion
// if(!(isElementPresent(By.xpath("/HTML/BODY/CENTER/DIV/DIV/DIV/DIV/DIV[2]/DIV/SPAN[class=\"categeach\"]")))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // Random element assertion
element = new DOMElement("SPAN", "PhotoHits:11,410", new ArrayList<String>(Arrays.asList("class=\"dot\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("class=\"item\"","style=\"line-height: 130%\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
// if(!(isElementRegionFullPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // Random element assertion
// if(!(isElementPresent(By.xpath("/HTML/BODY/CENTER/DIV/DIV/DIV/DIV/DIV[2]/DIV/SPAN/A/SPAN[[class=\"categinfo\"]")))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // Random element assertion
}
public void checkState80_OriginalAssertions(){
}
public void checkState80_ReusedAssertions(){
}
public void checkState80_GeneratedAssertions(){
element = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"Granny\"")));
parentElement = new DOMElement("CENTER", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("id=\"sidecol\""))));
childrenElements.add(new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"maincol\""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // generated assertion in case of RegionTagAttMatch
element = new DOMElement("DIV", "Thedefaultcategory", new ArrayList<String>(Arrays.asList("class=\"midInfo\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("class=\"partmain\"","id=\"slideShow\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "Thedefaultcategory", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("BR", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // generated assertion in case of RegionTagAttMatch
element = new DOMElement("DIV", "Thedefaultstory", new ArrayList<String>(Arrays.asList("class=\"midInfo\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("class=\"partmain\"","id=\"slideShow\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "Thedefaultstory", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("BR", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // generated assertion in case of RegionTagAttMatch
if(!(isElementPresent(By.cssSelector("div#Granny[id=\"Granny\"]")))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // generated assertion in case of ElementTagAttMatch
if(!(isElementPresent(By.cssSelector("div.midInfo[class=\"midInfo\"]")))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // generated assertion in case of ElementTagAttMatch
}
public void checkState80_LearnedAssertions(){
element = new DOMElement("DIV", "VisitorsOnline:1", new ArrayList<String>(Arrays.asList("class=\"item\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("class=\"submenu\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("SPAN", "", new ArrayList<String>(Arrays.asList("class=\"dot\""))));
childrenElements.add(new DOMElement("#text", "VisitorsOnline:1", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // predicted region assertion
element = new DOMElement("DIV", "StoriesPhotos[0]DefaultStory[6]", new ArrayList<String>(Arrays.asList("class=\"submenu\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("class=\"part\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("class=\"item\""))));
childrenElements.add(new DOMElement("DIV", "Photos[0]", new ArrayList<String>(Arrays.asList("class=\"item\""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("DIV", "DefaultStory[6]", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // predicted region assertion
element = new DOMElement("DIV", "EtcEmailRSSAdminPagePoweredbyPhormer3.31", new ArrayList<String>(Arrays.asList("class=\"titlepart\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("class=\"part\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("SPAN", "", new ArrayList<String>(Arrays.asList("class=\"reddot\""))));
childrenElements.add(new DOMElement("#text", "Poweredby", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // predicted region assertion
element = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("class=\"partmain\"","id=\"slideShow\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"maincolinner\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("class=\"titlepart\""))));
childrenElements.add(new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("class=\"midInfo\""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("class=\"start\""))));
childrenElements.add(new DOMElement("DIV", "Greenery!!", new ArrayList<String>(Arrays.asList("class=\"submenu\""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("class=\"end\""))));
childrenElements.add(new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // predicted region assertion
element = new DOMElement("SPAN", "[Edit]", new ArrayList<String>(Arrays.asList("class=\"pvTitleInfo\"","style=\"position: relative; top: -8px;\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("class=\"titlepart\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "[", new ArrayList<String>(Arrays.asList("href=\"admin.php?page=stories&cmd=doEdt&sid=2#add\""))));
childrenElements.add(new DOMElement("A", "Edit", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "]", new ArrayList<String>(Arrays.asList(""))));
// if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // predicted region assertion
}
public void checkState80_AllAssertions(){
element = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"Granny\"")));
parentElement = new DOMElement("CENTER", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("id=\"sidecol\""))));
childrenElements.add(new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"maincol\""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // generated assertion in case of RegionTagAttMatch
element = new DOMElement("DIV", "Thedefaultcategory", new ArrayList<String>(Arrays.asList("class=\"midInfo\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("class=\"partmain\"","id=\"slideShow\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "Thedefaultcategory", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("BR", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // generated assertion in case of RegionTagAttMatch
element = new DOMElement("DIV", "Thedefaultstory", new ArrayList<String>(Arrays.asList("class=\"midInfo\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("class=\"partmain\"","id=\"slideShow\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "Thedefaultstory", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("BR", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // generated assertion in case of RegionTagAttMatch
element = new DOMElement("DIV", "VisitorsOnline:1", new ArrayList<String>(Arrays.asList("class=\"item\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("class=\"submenu\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("SPAN", "", new ArrayList<String>(Arrays.asList("class=\"dot\""))));
childrenElements.add(new DOMElement("#text", "VisitorsOnline:1", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // predicted region assertion
element = new DOMElement("DIV", "StoriesPhotos[0]DefaultStory[6]", new ArrayList<String>(Arrays.asList("class=\"submenu\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("class=\"part\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("class=\"item\""))));
childrenElements.add(new DOMElement("DIV", "Photos[0]", new ArrayList<String>(Arrays.asList("class=\"item\""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("DIV", "DefaultStory[6]", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // predicted region assertion
}
public void checkState80_RandAssertions1(){
// if(!(isElementPresent(By.xpath("/HTML/BODY/CENTER/DIV/DIV/DIV/DIV/DIV[2]/DIV[2]/SPAN/A[@href=\".?c=5\" and @title=\"contains photos of all the flowers ! \"]")))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // Random element assertion
element = new DOMElement("A", "flowers[0]", new ArrayList<String>(Arrays.asList("href=\".?c=3\"","title=\"contains photos of all the flowers ! \"")));
parentElement = new DOMElement("SPAN", "", new ArrayList<String>(Arrays.asList("class=\"categeach\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("SPAN", "", new ArrayList<String>(Arrays.asList("class=\"dot\""))));
childrenElements.add(new DOMElement("#text", "flowers", new ArrayList<String>(Arrays.asList("class=\"categinfo\""))));
childrenElements.add(new DOMElement("SPAN", "[0]", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // Random element assertion
element = new DOMElement("SPAN", "DefaultStory[6]", new ArrayList<String>(Arrays.asList("class=\"categeach\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("class=\"item\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("A", "DefaultStory[6]", new ArrayList<String>(Arrays.asList("href=\".?s=1\"","title=\"The default story [2006/04/01]\""))));
// if(!(isElementRegionFullPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // Random element assertion
element = new DOMElement("SPAN", "Phorm", new ArrayList<String>(Arrays.asList("class=\"dot\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("class=\"item\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
// if(!(isElementRegionFullPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // Random element assertion
element = new DOMElement("DIV", "MyLinksMyPhotoGalleryOtherLinksPhorm", new ArrayList<String>(Arrays.asList("class=\"part\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"sidecolinner\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("class=\"submenu\""))));
childrenElements.add(new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("class=\"titlepart\""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("class=\"submenu\""))));
childrenElements.add(new DOMElement("DIV", "MyLinks", new ArrayList<String>(Arrays.asList("class=\"titlepart\""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("class=\"submenu\""))));
childrenElements.add(new DOMElement("DIV", "MyPhotoGallery", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("DIV", "OtherLinks", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("DIV", "Phorm", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // Random element assertion
}
public void checkState80_RandAssertions2(){
element = new DOMElement("A", "MyPhotoGallery...containsmyphotos!", new ArrayList<String>(Arrays.asList("href=\".\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("class=\"topPhorm\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "MyPhotoGallery", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // Random element assertion
// if(!(isElementPresent(By.xpath("/HTML/BODY/CENTER/DIV/DIV/DIV/DIV[[class=\"part\"]")))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // Random element assertion
element = new DOMElement("DIV", "TodayHits:2", new ArrayList<String>(Arrays.asList("class=\"item\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("class=\"submenu\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("SPAN", "", new ArrayList<String>(Arrays.asList("class=\"dot\""))));
childrenElements.add(new DOMElement("#text", "TodayHits:2", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // Random element assertion
// if(!(isElementPresent(By.xpath("/HTML/BODY/CENTER/DIV/DIV[2]/DIV/DIV/DIV/SPAN[4]/A[@class=\"theTitleA\" and @href=\".?s=2\"]")))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // Random element assertion
element = new DOMElement("BR", "", new ArrayList<String>(Arrays.asList("")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("class=\"submenu\"")));
childrenElements.clear();
if(!(isElementRegionTagPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // Random element assertion
}
public void checkState80_RandAssertions3(){
element = new DOMElement("SPAN", "[Edit]", new ArrayList<String>(Arrays.asList("class=\"pvTitleInfo\"","style=\"position: relative; top: -8px;\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("class=\"titlepart\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "[", new ArrayList<String>(Arrays.asList("href=\"admin.php?page=stories&cmd=doEdt&sid=2#add\""))));
childrenElements.add(new DOMElement("A", "Edit", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "]", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // Random element assertion
element = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("class=\"part\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"sidecolinner\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("class=\"titlepart\""))));
childrenElements.add(new DOMElement("DIV", "Categories", new ArrayList<String>(Arrays.asList("class=\"submenu\""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // Random element assertion
// if(!(isElementPresent(By.xpath("/HTML/BODY/CENTER/DIV/DIV[2]/DIV/DIV/DIV[[class=\"end\"]")))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // Random element assertion
// if(!(isElementPresent(By.xpath("/HTML/BODY/CENTER/DIV/DIV[id=\"sidecol\"]")))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // Random element assertion
// if(!(isElementPresent(By.xpath("/HTML/BODY/CENTER/DIV/DIV/DIV/DIV/DIV[2]/DIV[4]/SPAN[class=\"categeach\"]")))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // Random element assertion
}
public void checkState80_RandAssertions4(){
element = new DOMElement("SPAN", "Phormer3.31", new ArrayList<String>(Arrays.asList("class=\"dot\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("class=\"item\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // Random element assertion
// if(!(isElementPresent(By.xpath("/HTML/BODY/CENTER/DIV/DIV/DIV/DIV/DIV[2]/DIV[2]/SPAN/A/SPAN[[class=\"categinfo\"]")))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // Random element assertion
// if(!(isElementPresent(By.xpath("/HTML/BODY/CENTER/DIV/DIV/DIV/DIV[5]/BR[]")))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // Random element assertion
element = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("class=\"submenu\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("class=\"part\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("class=\"item\""))));
childrenElements.add(new DOMElement("DIV", "DefaultCategory[6]", new ArrayList<String>(Arrays.asList("class=\"item\""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("class=\"item\""))));
childrenElements.add(new DOMElement("DIV", "flowers[0]", new ArrayList<String>(Arrays.asList("class=\"item\""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("class=\"item\""))));
childrenElements.add(new DOMElement("DIV", "flowers[0]", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("DIV", "flowers[0]", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("DIV", "flowers[0]", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
// if(!(isElementRegionFullPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // Random element assertion
// if(!(isElementPresent(By.xpath("/HTML/BODY/DIV[[style=\"clear:both;\"]")))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // Random element assertion
}
public void checkState80_RandAssertions5(){
element = new DOMElement("SPAN", "ThisMonth:2", new ArrayList<String>(Arrays.asList("class=\"dot\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("class=\"item\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // Random element assertion
element = new DOMElement("DIV", "flowers[0]", new ArrayList<String>(Arrays.asList("class=\"item\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("class=\"submenu\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("class=\"categeach\""))));
childrenElements.add(new DOMElement("SPAN", "flowers[0]", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
if(!(isElementRegionTagAttPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // Random element assertion
if(!(isElementPresent(By.xpath("/HTML/BODY/CENTER/DIV/DIV/DIV/DIV[5]/DIV[4]/DIV/A[@href=\"http://p.horm.org/er\" and @title=\"Rephorm Your Phormer Pharm!\"]")))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // Random element assertion
element = new DOMElement("SPAN", "PhotoHits:11,409", new ArrayList<String>(Arrays.asList("class=\"dot\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("class=\"item\"","style=\"line-height: 130%\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
// if(!(isElementRegionFullPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // Random element assertion
element = new DOMElement("SPAN", "MyLinks", new ArrayList<String>(Arrays.asList("class=\"reddot\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("class=\"titlepart\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
// if(!(isElementRegionFullPresent(parentElement , element, childrenElements))){System.out.println(Thread.currentThread().getStackTrace()[1].getMethodName()); return;} // Random element assertion
}
/*
* Auxiliary methods
*/
private boolean isElementPresent(By by) {
try {
driver.findElement(by);
return true;
} catch (NoSuchElementException e) {
return false;
}
}
private boolean isElementRegionTagPresent(DOMElement parent, DOMElement element, ArrayList<DOMElement> children) {
try {
String source = driver.getPageSource();
Document dom = DomUtils.asDocument(source);
NodeList nodeList = dom.getElementsByTagName(element.getTagName());
org.w3c.dom.Element sourceElement = null;
for (int i = 0; i < nodeList.getLength(); i++){
sourceElement = (org.w3c.dom.Element) nodeList.item(i);
// check parent node's tag and attributes
String parentTagName = sourceElement.getParentNode().getNodeName();
if (!parentTagName.equals(parent.getTagName()))
continue;
// check children nodes' tags
HashSet<String> childrenTagNameFromDOM = new HashSet<String>();
for (int j=0; j<sourceElement.getChildNodes().getLength();j++)
childrenTagNameFromDOM.add(sourceElement.getChildNodes().item(j).getNodeName());
HashSet<String> childrenTagNameToTest = new HashSet<String>();
for (int k=0; k<children.size();k++)
childrenTagNameToTest.add(children.get(k).getTagName());
if (!childrenTagNameToTest.equals(childrenTagNameFromDOM))
continue;
return true;
}
} catch (IOException e) {
e.printStackTrace();
}
return false;
}
private boolean isElementRegionTagAttPresent(DOMElement parent, DOMElement element, ArrayList<DOMElement> children) {
try {
String source = driver.getPageSource();
Document dom = DomUtils.asDocument(source);
NodeList nodeList = dom.getElementsByTagName(element.getTagName());
org.w3c.dom.Element sourceElement = null;
for (int i = 0; i < nodeList.getLength(); i++){
// check node's attributes
sourceElement = (org.w3c.dom.Element) nodeList.item(i);
NamedNodeMap elementAttList = sourceElement.getAttributes();
HashSet<String> elemetAtts = new HashSet<String>();
for (int j = 0; j < elementAttList.getLength(); j++)
elemetAtts.add(elementAttList.item(j).getNodeName() + "=\"" + elementAttList.item(j).getNodeValue() + "\"");
if (!element.getAttributes().equals(elemetAtts))
continue;
// check parent node's tag and attributes
String parentTagName = sourceElement.getParentNode().getNodeName();
if (!parentTagName.equals(parent.getTagName()))
continue;
NamedNodeMap parentAttList = sourceElement.getParentNode().getAttributes();
HashSet<String> parentAtts = new HashSet<String>();
for (int j = 0; j < parentAttList.getLength(); j++)
parentAtts.add(parentAttList.item(j).getNodeName() + "=\"" + parentAttList.item(j).getNodeValue() + "\"");
if (!parent.getAttributes().equals(parentAtts))
continue;
// check children nodes' tags
HashSet<String> childrenTagNameFromDOM = new HashSet<String>();
for (int j=0; j<sourceElement.getChildNodes().getLength();j++)
childrenTagNameFromDOM.add(sourceElement.getChildNodes().item(j).getNodeName());
HashSet<String> childrenTagNameToTest = new HashSet<String>();
for (int k=0; k<children.size();k++)
childrenTagNameToTest.add(children.get(k).getTagName());
if (!childrenTagNameToTest.equals(childrenTagNameFromDOM))
continue;
// check children nodes' attributes
HashSet<HashSet<String>> childrenAttsFromDOM = new HashSet<HashSet<String>>();
for (int j=0; j<sourceElement.getChildNodes().getLength();j++){
NamedNodeMap childAttListFromDOM = sourceElement.getChildNodes().item(j).getAttributes();
HashSet<String> childAtts = new HashSet<String>();
if (childAttListFromDOM!=null)
for (int k = 0; k < childAttListFromDOM.getLength(); k++)
childAtts.add(childAttListFromDOM.item(k).getNodeName() + "=\"" + childAttListFromDOM.item(k).getNodeValue() + "\"");
childrenAttsFromDOM.add(childAtts);
}
HashSet<HashSet<String>> childrenAttsToTest = new HashSet<HashSet<String>>();
for (int k=0; k<children.size();k++)
childrenAttsToTest.add(children.get(k).getAttributes());
if (!childrenAttsToTest.equals(childrenAttsFromDOM))
continue;
return true;
}
} catch (IOException e) {
e.printStackTrace();
}
return false;
}
private boolean isElementRegionFullPresent(DOMElement parent, DOMElement element, ArrayList<DOMElement> children) {
try {
String source = driver.getPageSource();
Document dom = DomUtils.asDocument(source);
NodeList nodeList = dom.getElementsByTagName(element.getTagName());
org.w3c.dom.Element sourceElement = null;
for (int i = 0; i < nodeList.getLength(); i++){
// check node's text and attributes
sourceElement = (org.w3c.dom.Element) nodeList.item(i);
if (!element.getTextContent().equals(sourceElement.getTextContent().replace("\n", "").replace("\r", "").replace(" ", "").replace("\t", "").replaceAll("[^\\x00-\\x7F]", "")))
continue;
NamedNodeMap elementAttList = sourceElement.getAttributes();
HashSet<String> elemetAtts = new HashSet<String>();
for (int j = 0; j < elementAttList.getLength(); j++)
elemetAtts.add(elementAttList.item(j).getNodeName() + "=\"" + elementAttList.item(j).getNodeValue() + "\"");
if (!element.getAttributes().equals(elemetAtts))
continue;
// check parent node's text, tag and attributes
String parentTagName = sourceElement.getParentNode().getNodeName();
if (!parent.getTextContent().equals(sourceElement.getParentNode().getTextContent().replace("\n", "").replace("\r", "").replace(" ", "").replace("\t", "").replaceAll("[^\\x00-\\x7F]", "")))
continue;
if (!parentTagName.equals(parent.getTagName()))
continue;
NamedNodeMap parentAttList = sourceElement.getParentNode().getAttributes();
HashSet<String> parentAtts = new HashSet<String>();
for (int j = 0; j < parentAttList.getLength(); j++)
parentAtts.add(parentAttList.item(j).getNodeName() + "=\"" + parentAttList.item(j).getNodeValue() + "\"");
if (!parent.getAttributes().equals(parentAtts))
continue;
// check children nodes' text
HashSet<String> childrenTextFromDOM = new HashSet<String>();
for (int j=0; j<sourceElement.getChildNodes().getLength();j++)
childrenTextFromDOM.add(sourceElement.getChildNodes().item(j).getTextContent().replace("\n", "").replace("\r", "").replace(" ", "").replace("\t", "").replaceAll("[^\\x00-\\x7F]", ""));
HashSet<String> childrenTextToTest = new HashSet<String>();
for (int k=0; k<children.size();k++)
childrenTextToTest.add(children.get(k).getTextContent());
if (!childrenTextToTest.equals(childrenTextFromDOM))
continue;
// check children nodes' tags
HashSet<String> childrenTagNameFromDOM = new HashSet<String>();
for (int j=0; j<sourceElement.getChildNodes().getLength();j++)
childrenTagNameFromDOM.add(sourceElement.getChildNodes().item(j).getNodeName());
HashSet<String> childrenTagNameToTest = new HashSet<String>();
for (int k=0; k<children.size();k++)
childrenTagNameToTest.add(children.get(k).getTagName());
if (!childrenTagNameToTest.equals(childrenTagNameFromDOM))
continue;
// check children nodes' attributes
HashSet<HashSet<String>> childrenAttsFromDOM = new HashSet<HashSet<String>>();
for (int j=0; j<sourceElement.getChildNodes().getLength();j++){
NamedNodeMap childAttListFromDOM = sourceElement.getChildNodes().item(j).getAttributes();
HashSet<String> childAtts = new HashSet<String>();
if (childAttListFromDOM!=null)
for (int k = 0; k < childAttListFromDOM.getLength(); k++)
childAtts.add(childAttListFromDOM.item(k).getNodeName() + "=\"" + childAttListFromDOM.item(k).getNodeValue() + "\"");
childrenAttsFromDOM.add(childAtts);
}
HashSet<HashSet<String>> childrenAttsToTest = new HashSet<HashSet<String>>();
for (int k=0; k<children.size();k++)
childrenAttsToTest.add(children.get(k).getAttributes());
if (!childrenAttsToTest.equals(childrenAttsFromDOM))
continue;
return true;
}
} catch (IOException e) {
e.printStackTrace();
}
return false;
}
private boolean isAlertPresent() {
try {
driver.switchTo().alert();
return true;
} catch (NoAlertPresentException e) {
return false;
}
}
private String closeAlertAndGetItsText() {
try {
Alert alert = driver.switchTo().alert();
String alertText = alert.getText();
if (acceptNextAlert) {
alert.accept();
} else {
alert.dismiss();
}
return alertText;
} finally {
acceptNextAlert = true;
}
}
public class DOMElement {
private String tagName;
private String textContent;
private HashSet<String> attributes = new HashSet<String>();
public DOMElement(String tagName, String textContent, ArrayList<String> attributes){
this.tagName = tagName;
this.textContent = textContent;
if (attributes.get(0)!="")
for (int i=0; i<attributes.size();i++)
this.attributes.add(attributes.get(i));
}
public String getTagName() {
return tagName;
}
public String getTextContent() {
return textContent;
}
public HashSet<String> getAttributes() {
return attributes;
}
}
private void mutateDOMTree(int stateID){
// execute JavaScript code to mutate DOM
String code = com.crawljax.plugins.testilizer.Testilizer.mutateDOMTreeCode(stateID);
if (code!= null){
long RandomlySelectedDOMElementID = (long) ((JavascriptExecutor)driver).executeScript(code);
int MutationOperatorCode = com.crawljax.plugins.testilizer.Testilizer.MutationOperatorCode;
int StateToBeMutated = com.crawljax.plugins.testilizer.Testilizer.StateToBeMutated;
com.crawljax.plugins.testilizer.Testilizer.SelectedRandomElementInDOM[MutationOperatorCode][StateToBeMutated]
= (int) RandomlySelectedDOMElementID;
}
}
}
| |
/*
* Copyright (c) 2004-2022, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hisp.dhis.dataapproval;
import static com.google.common.collect.Sets.newHashSet;
import static org.hisp.dhis.dataapproval.DataApprovalAction.ACCEPT;
import static org.hisp.dhis.dataapproval.DataApprovalAction.APPROVE;
import static org.hisp.dhis.dataapproval.DataApprovalAction.UNACCEPT;
import static org.hisp.dhis.dataapproval.DataApprovalAction.UNAPPROVE;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.util.Date;
import java.util.List;
import java.util.Set;
import org.hisp.dhis.TransactionalIntegrationTest;
import org.hisp.dhis.category.Category;
import org.hisp.dhis.category.CategoryCombo;
import org.hisp.dhis.category.CategoryOption;
import org.hisp.dhis.category.CategoryOptionCombo;
import org.hisp.dhis.category.CategoryOptionGroup;
import org.hisp.dhis.category.CategoryOptionGroupSet;
import org.hisp.dhis.category.CategoryService;
import org.hisp.dhis.common.BaseIdentifiableObject;
import org.hisp.dhis.common.CodeGenerator;
import org.hisp.dhis.common.IdentifiableObjectManager;
import org.hisp.dhis.mock.MockCurrentUserService;
import org.hisp.dhis.organisationunit.OrganisationUnit;
import org.hisp.dhis.organisationunit.OrganisationUnitService;
import org.hisp.dhis.period.MonthlyPeriodType;
import org.hisp.dhis.period.Period;
import org.hisp.dhis.period.PeriodService;
import org.hisp.dhis.period.PeriodType;
import org.hisp.dhis.user.CurrentUserService;
import org.hisp.dhis.user.CurrentUserServiceTarget;
import org.hisp.dhis.user.User;
import org.hisp.dhis.user.UserGroup;
import org.hisp.dhis.user.UserGroupAccessService;
import org.hisp.dhis.user.UserGroupService;
import org.hisp.dhis.user.UserRole;
import org.hisp.dhis.user.UserService;
import org.hisp.dhis.user.sharing.UserGroupAccess;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import com.google.common.collect.Sets;
/**
* @author Jim Grace
*/
// FIXME refactor this test to use mocks
class DataApprovalAuditServiceTest extends TransactionalIntegrationTest
{
private static final String ACCESS_NONE = "--------";
private static final String ACCESS_READ = "r-------";
@Autowired
private DataApprovalAuditService dataApprovalAuditService;
@Autowired
private DataApprovalAuditStore dataApprovalAuditStore;
@Autowired
private DataApprovalLevelService dataApprovalLevelService;
@Autowired
private DataApprovalService dataApprovalService;
@Autowired
private PeriodService periodService;
@Autowired
private CategoryService categoryService;
@Autowired
private UserService userService;
@Autowired
protected UserGroupAccessService userGroupAccessService;
@Autowired
protected UserGroupService userGroupService;
@Autowired
protected IdentifiableObjectManager identifiableObjectManager;
@Autowired
private CurrentUserService currentUserService;
@Autowired
private OrganisationUnitService organisationUnitService;
// -------------------------------------------------------------------------
// Supporting data
// -------------------------------------------------------------------------
private DataApprovalLevel level1;
private DataApprovalLevel level2;
private DataApprovalLevel level3;
private DataApprovalWorkflow workflowA;
private DataApprovalWorkflow workflowB;
private Period periodA;
private Period periodB;
private OrganisationUnit sourceA;
private OrganisationUnit sourceB;
private CurrentUserService superUserService;
private CurrentUserService userAService;
private CurrentUserService userBService;
private CurrentUserService userCService;
private CurrentUserService userDService;
private User userZ;
private CategoryOption optionA;
private CategoryOption optionB;
private Category categoryA;
private CategoryCombo categoryComboA;
private CategoryOptionCombo optionComboA;
private CategoryOptionCombo optionComboB;
private CategoryOptionCombo optionComboC;
private CategoryOptionGroup optionGroupA;
private CategoryOptionGroup optionGroupB;
private CategoryOptionGroupSet optionGroupSetB;
private Date dateA;
private Date dateB;
private DataApprovalAudit auditAA1;
private DataApprovalAudit auditAB1;
private DataApprovalAudit auditAC1;
private DataApprovalAudit auditBA2;
private DataApprovalAudit auditBB2;
private DataApprovalAudit auditBC2;
private DataApprovalAudit auditBA3;
private DataApprovalAudit auditBB3;
private DataApprovalAudit auditBC3;
// -------------------------------------------------------------------------
// Set up/tear down helper methods
// -------------------------------------------------------------------------
private CurrentUserService getMockCurrentUserService( String userName, boolean superUserFlag,
OrganisationUnit orgUnit, String... auths )
{
CurrentUserService mockCurrentUserService = new MockCurrentUserService( superUserFlag,
Sets.newHashSet( orgUnit ), Sets.newHashSet( orgUnit ), auths );
User user = mockCurrentUserService.getCurrentUser();
user.setFirstName( "Test" );
user.setSurname( userName );
user.setUsername( userName );
for ( UserRole role : user.getUserRoles() )
{
// Give the role an
role.setName( CodeGenerator.generateUid() );
// arbitrary name
userService.addUserRole( role );
}
userService.addUser( user );
return mockCurrentUserService;
}
private UserGroup getUserGroup( String userGroupName, Set<User> users )
{
UserGroup userGroup = new UserGroup();
userGroup.setAutoFields();
userGroup.setName( userGroupName );
userGroup.setMembers( users );
userGroupService.addUserGroup( userGroup );
return userGroup;
}
private void setPrivateAccess( BaseIdentifiableObject object, UserGroup... userGroups )
{
object.getSharing().setPublicAccess( ACCESS_NONE );
// Needed for sharing to work
object.setOwner( userZ.getUid() );
object.getSharing().setOwner( userZ );
for ( UserGroup group : userGroups )
{
object.getSharing().addUserGroupAccess( new UserGroupAccess( group, ACCESS_READ ) );
}
identifiableObjectManager.updateNoAcl( object );
}
// -------------------------------------------------------------------------
// Set up/tear down
// -------------------------------------------------------------------------
@Override
public boolean emptyDatabaseAfterTest()
{
return true;
}
@Override
public void setUpTest()
throws Exception
{
// ---------------------------------------------------------------------
// Add supporting data
// ---------------------------------------------------------------------
PeriodType periodType = PeriodType.getPeriodTypeByName( "Monthly" );
periodA = createPeriod( new MonthlyPeriodType(), getDate( 2017, 1, 1 ), getDate( 2017, 1, 31 ) );
periodB = createPeriod( new MonthlyPeriodType(), getDate( 2018, 1, 1 ), getDate( 2018, 1, 31 ) );
periodService.addPeriod( periodA );
periodService.addPeriod( periodB );
sourceA = createOrganisationUnit( 'A' );
sourceB = createOrganisationUnit( 'B', sourceA );
organisationUnitService.addOrganisationUnit( sourceA );
organisationUnitService.addOrganisationUnit( sourceB );
superUserService = getMockCurrentUserService( "SuperUser", true, sourceA, UserRole.AUTHORITY_ALL );
userAService = getMockCurrentUserService( "UserA", false, sourceA );
userBService = getMockCurrentUserService( "UserB", false, sourceB );
userCService = getMockCurrentUserService( "UserC", false, sourceB );
userDService = getMockCurrentUserService( "UserD", false, sourceB );
userZ = createUser( 'Z' );
userService.addUser( userZ );
UserGroup userGroupC = getUserGroup( "UserGroupA", Sets.newHashSet( userCService.getCurrentUser() ) );
UserGroup userGroupD = getUserGroup( "UserGroupB", Sets.newHashSet( userDService.getCurrentUser() ) );
userCService.getCurrentUser().getGroups().add( userGroupC );
userDService.getCurrentUser().getGroups().add( userGroupD );
optionA = new CategoryOption( "CategoryOptionA" );
optionB = new CategoryOption( "CategoryOptionB" );
categoryService.addCategoryOption( optionA );
categoryService.addCategoryOption( optionB );
categoryA = createCategory( 'A', optionA, optionB );
categoryService.addCategory( categoryA );
categoryComboA = createCategoryCombo( 'A', categoryA );
categoryService.addCategoryCombo( categoryComboA );
optionComboA = createCategoryOptionCombo( categoryComboA, optionA );
optionComboB = createCategoryOptionCombo( categoryComboA, optionB );
optionComboC = createCategoryOptionCombo( categoryComboA, optionA, optionB );
categoryService.addCategoryOptionCombo( optionComboA );
categoryService.addCategoryOptionCombo( optionComboB );
categoryService.addCategoryOptionCombo( optionComboC );
optionGroupA = createCategoryOptionGroup( 'A', optionA );
optionGroupB = createCategoryOptionGroup( 'B', optionB );
categoryService.saveCategoryOptionGroup( optionGroupA );
categoryService.saveCategoryOptionGroup( optionGroupB );
optionGroupSetB = new CategoryOptionGroupSet( "OptionGroupSetB" );
categoryService.saveCategoryOptionGroupSet( optionGroupSetB );
optionGroupSetB.addCategoryOptionGroup( optionGroupA );
optionGroupSetB.addCategoryOptionGroup( optionGroupB );
optionGroupA.getGroupSets().add( optionGroupSetB );
optionGroupB.getGroupSets().add( optionGroupSetB );
setPrivateAccess( optionA, userGroupC );
setPrivateAccess( optionB );
setPrivateAccess( optionGroupA );
setPrivateAccess( optionGroupB, userGroupD );
categoryService.updateCategoryOptionGroupSet( optionGroupSetB );
categoryService.updateCategoryOptionGroup( optionGroupA );
categoryService.updateCategoryOptionGroup( optionGroupB );
userCService.getCurrentUser().getCatDimensionConstraints().add( categoryA );
userDService.getCurrentUser().getCogsDimensionConstraints().add( optionGroupSetB );
dateA = getDate( 2017, 1, 1 );
dateB = getDate( 2018, 1, 1 );
level1 = new DataApprovalLevel( "01", 1, null );
level2 = new DataApprovalLevel( "02", 2, null );
level3 = new DataApprovalLevel( "03", 2, optionGroupSetB );
dataApprovalLevelService.addDataApprovalLevel( level1 );
dataApprovalLevelService.addDataApprovalLevel( level2 );
dataApprovalLevelService.addDataApprovalLevel( level3 );
workflowA = new DataApprovalWorkflow( "workflowA", periodType, newHashSet( level1 ) );
workflowB = new DataApprovalWorkflow( "workflowB", periodType, newHashSet( level1, level2, level3 ) );
dataApprovalService.addWorkflow( workflowA );
dataApprovalService.addWorkflow( workflowB );
DataApproval approvalAA1 = new DataApproval( level1, workflowA, periodA, sourceA, optionComboA, false, dateA,
userZ );
DataApproval approvalAB1 = new DataApproval( level1, workflowA, periodA, sourceA, optionComboB, false, dateA,
userZ );
DataApproval approvalAC1 = new DataApproval( level1, workflowA, periodA, sourceA, optionComboC, false, dateA,
userZ );
DataApproval approvalBA2 = new DataApproval( level2, workflowB, periodB, sourceB, optionComboA, false, dateB,
userZ );
DataApproval approvalBB2 = new DataApproval( level2, workflowB, periodB, sourceB, optionComboB, false, dateB,
userZ );
DataApproval approvalBC2 = new DataApproval( level2, workflowB, periodB, sourceB, optionComboC, false, dateB,
userZ );
DataApproval approvalBA3 = new DataApproval( level3, workflowB, periodB, sourceB, optionComboA, false, dateB,
userZ );
DataApproval approvalBB3 = new DataApproval( level3, workflowB, periodB, sourceB, optionComboB, false, dateB,
userZ );
DataApproval approvalBC3 = new DataApproval( level3, workflowB, periodB, sourceB, optionComboC, false, dateB,
userZ );
auditAA1 = new DataApprovalAudit( approvalAA1, APPROVE );
auditAB1 = new DataApprovalAudit( approvalAB1, UNAPPROVE );
auditAC1 = new DataApprovalAudit( approvalAC1, ACCEPT );
auditBA2 = new DataApprovalAudit( approvalBA2, UNACCEPT );
auditBB2 = new DataApprovalAudit( approvalBB2, APPROVE );
auditBC2 = new DataApprovalAudit( approvalBC2, UNAPPROVE );
auditBA3 = new DataApprovalAudit( approvalBA3, ACCEPT );
auditBB3 = new DataApprovalAudit( approvalBB3, UNACCEPT );
auditBC3 = new DataApprovalAudit( approvalBC3, APPROVE );
dataApprovalAuditStore.save( auditAA1 );
dataApprovalAuditStore.save( auditAB1 );
dataApprovalAuditStore.save( auditAC1 );
dataApprovalAuditStore.save( auditBA2 );
dataApprovalAuditStore.save( auditBB2 );
dataApprovalAuditStore.save( auditBC2 );
dataApprovalAuditStore.save( auditBA3 );
dataApprovalAuditStore.save( auditBB3 );
dataApprovalAuditStore.save( auditBC3 );
}
@Override
public void tearDownTest()
{
setDependency( CurrentUserServiceTarget.class, CurrentUserServiceTarget::setCurrentUserService,
currentUserService, dataApprovalLevelService, dataApprovalAuditService, dataApprovalAuditStore );
}
// -------------------------------------------------------------------------
// Test helper methods
// -------------------------------------------------------------------------
private void setMockUserService( CurrentUserService mockUserService )
{
setDependency( CurrentUserServiceTarget.class, CurrentUserServiceTarget::setCurrentUserService, mockUserService,
dataApprovalLevelService, dataApprovalAuditService, dataApprovalAuditStore );
}
// -------------------------------------------------------------------------
// DataApprovalAudit
// -------------------------------------------------------------------------
@Test
void testDeleteDataApprovalAudits()
{
DataApprovalAuditQueryParams params = new DataApprovalAuditQueryParams();
List<DataApprovalAudit> audits;
setMockUserService( userAService );
dataApprovalAuditService.deleteDataApprovalAudits( sourceB );
audits = dataApprovalAuditService.getDataApprovalAudits( params );
assertEquals( 3, audits.size() );
assertTrue( audits.contains( auditAA1 ) );
assertTrue( audits.contains( auditAB1 ) );
assertTrue( audits.contains( auditAC1 ) );
}
@Test
void TestGetDataApprovalAudits()
{
DataApprovalAuditQueryParams params = new DataApprovalAuditQueryParams();
List<DataApprovalAudit> audits;
// Superuser can see all audits.
setMockUserService( superUserService );
audits = dataApprovalAuditStore.getDataApprovalAudits( params );
assertEquals( 9, audits.size() );
assertTrue( audits.contains( auditAA1 ) );
assertTrue( audits.contains( auditAB1 ) );
assertTrue( audits.contains( auditAC1 ) );
assertTrue( audits.contains( auditBA2 ) );
assertTrue( audits.contains( auditBB2 ) );
assertTrue( audits.contains( auditBC2 ) );
assertTrue( audits.contains( auditBA3 ) );
assertTrue( audits.contains( auditBB3 ) );
assertTrue( audits.contains( auditBC3 ) );
// User A can see all options from sourceA or its children.
setMockUserService( userAService );
audits = dataApprovalAuditService.getDataApprovalAudits( params );
assertEquals( 9, audits.size() );
assertTrue( audits.contains( auditAA1 ) );
assertTrue( audits.contains( auditAB1 ) );
assertTrue( audits.contains( auditAC1 ) );
assertTrue( audits.contains( auditBA2 ) );
assertTrue( audits.contains( auditBB2 ) );
assertTrue( audits.contains( auditBC2 ) );
assertTrue( audits.contains( auditBA3 ) );
assertTrue( audits.contains( auditBB3 ) );
assertTrue( audits.contains( auditBC3 ) );
// User B can see all options from sourceB.
setMockUserService( userBService );
audits = dataApprovalAuditService.getDataApprovalAudits( params );
assertEquals( 6, audits.size() );
assertTrue( audits.contains( auditBA2 ) );
assertTrue( audits.contains( auditBB2 ) );
assertTrue( audits.contains( auditBC2 ) );
assertTrue( audits.contains( auditBA3 ) );
assertTrue( audits.contains( auditBB3 ) );
assertTrue( audits.contains( auditBC3 ) );
// User C can see only level 3, optionA from sourceB.
setMockUserService( userCService );
audits = dataApprovalAuditService.getDataApprovalAudits( params );
assertEquals( 1, audits.size() );
assertTrue( audits.contains( auditBA3 ) );
// User D can see only level 3, optionB from sourceB.
setMockUserService( userDService );
audits = dataApprovalAuditService.getDataApprovalAudits( params );
assertEquals( 1, audits.size() );
assertTrue( audits.contains( auditBB3 ) );
}
}
| |
package org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.policies;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import org.eclipse.core.runtime.IAdaptable;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.ecore.EStructuralFeature;
import org.eclipse.gef.EditPart;
import org.eclipse.gef.commands.Command;
import org.eclipse.gmf.runtime.diagram.core.util.ViewUtil;
import org.eclipse.gmf.runtime.diagram.ui.commands.DeferredLayoutCommand;
import org.eclipse.gmf.runtime.diagram.ui.commands.ICommandProxy;
import org.eclipse.gmf.runtime.diagram.ui.commands.SetViewMutabilityCommand;
import org.eclipse.gmf.runtime.diagram.ui.editpolicies.CanonicalEditPolicy;
import org.eclipse.gmf.runtime.diagram.ui.requests.CreateViewRequest;
import org.eclipse.gmf.runtime.emf.core.util.EObjectAdapter;
import org.eclipse.gmf.runtime.notation.Node;
import org.eclipse.gmf.runtime.notation.View;
import org.wso2.developerstudio.eclipse.gmf.esb.EsbPackage;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.APIResourceEndpointEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.AddressEndPointEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.AddressingEndpointEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.AggregateMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.BAMMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.BeanMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.BuilderMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CacheMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CallMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CallTemplateMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CalloutMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ClassMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CloneMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CloudConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CloudConnectorOperationEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CommandMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ConditionalRouterMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.DBLookupMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.DBReportMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.DataMapperMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.DefaultEndPointEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.DropMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EJBMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EnqueueMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EnrichMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EntitlementMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EventMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.FailoverEndPointEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.FastXSLTMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.FaultMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.FilterMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ForEachMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.HTTPEndpointEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.HeaderMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.IterateMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.JsonTransformMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.LoadBalanceEndPointEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.LogMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.LoopBackMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.NamedEndpointEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.OAuthMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.PayloadFactoryMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.PropertyGroupMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.PropertyMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.PublishEventMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.RMSequenceMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.RecipientListEndPointEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.RespondMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.RouterMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.RuleMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ScriptMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SendMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SequenceEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SmooksMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SpringMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.StoreMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SwitchMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.TemplateEndpointEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ThrottleMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.TransactionMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.URLRewriteMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ValidateMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.WSDLEndPointEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.XQueryMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.XSLTMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.part.EsbDiagramUpdater;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.part.EsbNodeDescriptor;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.part.EsbVisualIDRegistry;
/**
* @generated
*/
public class MediatorFlowMediatorFlowCompartmentCanonicalEditPolicy extends CanonicalEditPolicy {
/**
* @generated
*/
protected void refreshOnActivate() {
// Need to activate editpart children before invoking the canonical refresh for EditParts to add event listeners
List<?> c = getHost().getChildren();
for (int i = 0; i < c.size(); i++) {
((EditPart) c.get(i)).activate();
}
super.refreshOnActivate();
}
/**
* @generated
*/
protected EStructuralFeature getFeatureToSynchronize() {
return EsbPackage.eINSTANCE.getMediatorFlow_Children();
}
/**
* @generated
*/
@SuppressWarnings("rawtypes")
protected List getSemanticChildrenList() {
View viewObject = (View) getHost().getModel();
LinkedList<EObject> result = new LinkedList<EObject>();
List<EsbNodeDescriptor> childDescriptors = EsbDiagramUpdater
.getMediatorFlowMediatorFlowCompartment_7034SemanticChildren(viewObject);
for (EsbNodeDescriptor d : childDescriptors) {
result.add(d.getModelElement());
}
return result;
}
/**
* @generated
*/
protected boolean isOrphaned(Collection<EObject> semanticChildren, final View view) {
return isMyDiagramElement(view) && !semanticChildren.contains(view.getElement());
}
/**
* @generated
*/
private boolean isMyDiagramElement(View view) {
int visualID = EsbVisualIDRegistry.getVisualID(view);
switch (visualID) {
case DropMediatorEditPart.VISUAL_ID:
case PropertyMediatorEditPart.VISUAL_ID:
case PropertyGroupMediatorEditPart.VISUAL_ID:
case ThrottleMediatorEditPart.VISUAL_ID:
case FilterMediatorEditPart.VISUAL_ID:
case LogMediatorEditPart.VISUAL_ID:
case EnrichMediatorEditPart.VISUAL_ID:
case XSLTMediatorEditPart.VISUAL_ID:
case SwitchMediatorEditPart.VISUAL_ID:
case SequenceEditPart.VISUAL_ID:
case EventMediatorEditPart.VISUAL_ID:
case EntitlementMediatorEditPart.VISUAL_ID:
case ClassMediatorEditPart.VISUAL_ID:
case SpringMediatorEditPart.VISUAL_ID:
case ScriptMediatorEditPart.VISUAL_ID:
case FaultMediatorEditPart.VISUAL_ID:
case XQueryMediatorEditPart.VISUAL_ID:
case CommandMediatorEditPart.VISUAL_ID:
case DBLookupMediatorEditPart.VISUAL_ID:
case DBReportMediatorEditPart.VISUAL_ID:
case SmooksMediatorEditPart.VISUAL_ID:
case SendMediatorEditPart.VISUAL_ID:
case HeaderMediatorEditPart.VISUAL_ID:
case CloneMediatorEditPart.VISUAL_ID:
case CacheMediatorEditPart.VISUAL_ID:
case IterateMediatorEditPart.VISUAL_ID:
case CalloutMediatorEditPart.VISUAL_ID:
case TransactionMediatorEditPart.VISUAL_ID:
case RMSequenceMediatorEditPart.VISUAL_ID:
case RuleMediatorEditPart.VISUAL_ID:
case OAuthMediatorEditPart.VISUAL_ID:
case AggregateMediatorEditPart.VISUAL_ID:
case StoreMediatorEditPart.VISUAL_ID:
case BuilderMediatorEditPart.VISUAL_ID:
case CallTemplateMediatorEditPart.VISUAL_ID:
case PayloadFactoryMediatorEditPart.VISUAL_ID:
case EnqueueMediatorEditPart.VISUAL_ID:
case URLRewriteMediatorEditPart.VISUAL_ID:
case ValidateMediatorEditPart.VISUAL_ID:
case RouterMediatorEditPart.VISUAL_ID:
case ConditionalRouterMediatorEditPart.VISUAL_ID:
case BAMMediatorEditPart.VISUAL_ID:
case BeanMediatorEditPart.VISUAL_ID:
case EJBMediatorEditPart.VISUAL_ID:
case DefaultEndPointEditPart.VISUAL_ID:
case AddressEndPointEditPart.VISUAL_ID:
case FailoverEndPointEditPart.VISUAL_ID:
case RecipientListEndPointEditPart.VISUAL_ID:
case WSDLEndPointEditPart.VISUAL_ID:
case NamedEndpointEditPart.VISUAL_ID:
case LoadBalanceEndPointEditPart.VISUAL_ID:
case APIResourceEndpointEditPart.VISUAL_ID:
case AddressingEndpointEditPart.VISUAL_ID:
case HTTPEndpointEditPart.VISUAL_ID:
case TemplateEndpointEditPart.VISUAL_ID:
case CloudConnectorEditPart.VISUAL_ID:
case CloudConnectorOperationEditPart.VISUAL_ID:
case LoopBackMediatorEditPart.VISUAL_ID:
case RespondMediatorEditPart.VISUAL_ID:
case CallMediatorEditPart.VISUAL_ID:
case DataMapperMediatorEditPart.VISUAL_ID:
case FastXSLTMediatorEditPart.VISUAL_ID:
case ForEachMediatorEditPart.VISUAL_ID:
case PublishEventMediatorEditPart.VISUAL_ID:
case JsonTransformMediatorEditPart.VISUAL_ID:
return true;
}
return false;
}
/**
* @generated
*/
protected void refreshSemantic() {
if (resolveSemanticElement() == null) {
return;
}
LinkedList<IAdaptable> createdViews = new LinkedList<IAdaptable>();
List<EsbNodeDescriptor> childDescriptors = EsbDiagramUpdater
.getMediatorFlowMediatorFlowCompartment_7034SemanticChildren((View) getHost().getModel());
LinkedList<View> orphaned = new LinkedList<View>();
// we care to check only views we recognize as ours
LinkedList<View> knownViewChildren = new LinkedList<View>();
for (View v : getViewChildren()) {
if (isMyDiagramElement(v)) {
knownViewChildren.add(v);
}
}
// alternative to #cleanCanonicalSemanticChildren(getViewChildren(), semanticChildren)
//
// iteration happens over list of desired semantic elements, trying to find best matching View, while original CEP
// iterates views, potentially losing view (size/bounds) information - i.e. if there are few views to reference same EObject, only last one
// to answer isOrphaned == true will be used for the domain element representation, see #cleanCanonicalSemanticChildren()
for (Iterator<EsbNodeDescriptor> descriptorsIterator = childDescriptors.iterator(); descriptorsIterator
.hasNext();) {
EsbNodeDescriptor next = descriptorsIterator.next();
String hint = EsbVisualIDRegistry.getType(next.getVisualID());
LinkedList<View> perfectMatch = new LinkedList<View>(); // both semanticElement and hint match that of NodeDescriptor
for (View childView : getViewChildren()) {
EObject semanticElement = childView.getElement();
if (next.getModelElement().equals(semanticElement)) {
if (hint.equals(childView.getType())) {
perfectMatch.add(childView);
// actually, can stop iteration over view children here, but
// may want to use not the first view but last one as a 'real' match (the way original CEP does
// with its trick with viewToSemanticMap inside #cleanCanonicalSemanticChildren
}
}
}
if (perfectMatch.size() > 0) {
descriptorsIterator.remove(); // precise match found no need to create anything for the NodeDescriptor
// use only one view (first or last?), keep rest as orphaned for further consideration
knownViewChildren.remove(perfectMatch.getFirst());
}
}
// those left in knownViewChildren are subject to removal - they are our diagram elements we didn't find match to,
// or those we have potential matches to, and thus need to be recreated, preserving size/location information.
orphaned.addAll(knownViewChildren);
//
ArrayList<CreateViewRequest.ViewDescriptor> viewDescriptors = new ArrayList<CreateViewRequest.ViewDescriptor>(
childDescriptors.size());
for (EsbNodeDescriptor next : childDescriptors) {
String hint = EsbVisualIDRegistry.getType(next.getVisualID());
IAdaptable elementAdapter = new CanonicalElementAdapter(next.getModelElement(), hint);
CreateViewRequest.ViewDescriptor descriptor = new CreateViewRequest.ViewDescriptor(elementAdapter,
Node.class, hint, ViewUtil.APPEND, false, host().getDiagramPreferencesHint());
viewDescriptors.add(descriptor);
}
boolean changed = deleteViews(orphaned.iterator());
//
CreateViewRequest request = getCreateViewRequest(viewDescriptors);
Command cmd = getCreateViewCommand(request);
if (cmd != null && cmd.canExecute()) {
SetViewMutabilityCommand.makeMutable(new EObjectAdapter(host().getNotationView())).execute();
executeCommand(cmd);
@SuppressWarnings("unchecked")
List<IAdaptable> nl = (List<IAdaptable>) request.getNewObject();
createdViews.addAll(nl);
}
if (changed || createdViews.size() > 0) {
postProcessRefreshSemantic(createdViews);
}
if (createdViews.size() > 1) {
// perform a layout of the container
DeferredLayoutCommand layoutCmd = new DeferredLayoutCommand(host().getEditingDomain(), createdViews,
host());
executeCommand(new ICommandProxy(layoutCmd));
}
makeViewsImmutable(createdViews);
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v10/resources/ad.proto
package com.google.ads.googleads.v10.resources;
public interface AdOrBuilder extends
// @@protoc_insertion_point(interface_extends:google.ads.googleads.v10.resources.Ad)
com.google.protobuf.MessageOrBuilder {
/**
* <pre>
* Immutable. The resource name of the ad.
* Ad resource names have the form:
* `customers/{customer_id}/ads/{ad_id}`
* </pre>
*
* <code>string resource_name = 37 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code>
* @return The resourceName.
*/
java.lang.String getResourceName();
/**
* <pre>
* Immutable. The resource name of the ad.
* Ad resource names have the form:
* `customers/{customer_id}/ads/{ad_id}`
* </pre>
*
* <code>string resource_name = 37 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code>
* @return The bytes for resourceName.
*/
com.google.protobuf.ByteString
getResourceNameBytes();
/**
* <pre>
* Output only. The ID of the ad.
* </pre>
*
* <code>optional int64 id = 40 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return Whether the id field is set.
*/
boolean hasId();
/**
* <pre>
* Output only. The ID of the ad.
* </pre>
*
* <code>optional int64 id = 40 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The id.
*/
long getId();
/**
* <pre>
* The list of possible final URLs after all cross-domain redirects for the
* ad.
* </pre>
*
* <code>repeated string final_urls = 41;</code>
* @return A list containing the finalUrls.
*/
java.util.List<java.lang.String>
getFinalUrlsList();
/**
* <pre>
* The list of possible final URLs after all cross-domain redirects for the
* ad.
* </pre>
*
* <code>repeated string final_urls = 41;</code>
* @return The count of finalUrls.
*/
int getFinalUrlsCount();
/**
* <pre>
* The list of possible final URLs after all cross-domain redirects for the
* ad.
* </pre>
*
* <code>repeated string final_urls = 41;</code>
* @param index The index of the element to return.
* @return The finalUrls at the given index.
*/
java.lang.String getFinalUrls(int index);
/**
* <pre>
* The list of possible final URLs after all cross-domain redirects for the
* ad.
* </pre>
*
* <code>repeated string final_urls = 41;</code>
* @param index The index of the value to return.
* @return The bytes of the finalUrls at the given index.
*/
com.google.protobuf.ByteString
getFinalUrlsBytes(int index);
/**
* <pre>
* A list of final app URLs that will be used on mobile if the user has the
* specific app installed.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.common.FinalAppUrl final_app_urls = 35;</code>
*/
java.util.List<com.google.ads.googleads.v10.common.FinalAppUrl>
getFinalAppUrlsList();
/**
* <pre>
* A list of final app URLs that will be used on mobile if the user has the
* specific app installed.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.common.FinalAppUrl final_app_urls = 35;</code>
*/
com.google.ads.googleads.v10.common.FinalAppUrl getFinalAppUrls(int index);
/**
* <pre>
* A list of final app URLs that will be used on mobile if the user has the
* specific app installed.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.common.FinalAppUrl final_app_urls = 35;</code>
*/
int getFinalAppUrlsCount();
/**
* <pre>
* A list of final app URLs that will be used on mobile if the user has the
* specific app installed.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.common.FinalAppUrl final_app_urls = 35;</code>
*/
java.util.List<? extends com.google.ads.googleads.v10.common.FinalAppUrlOrBuilder>
getFinalAppUrlsOrBuilderList();
/**
* <pre>
* A list of final app URLs that will be used on mobile if the user has the
* specific app installed.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.common.FinalAppUrl final_app_urls = 35;</code>
*/
com.google.ads.googleads.v10.common.FinalAppUrlOrBuilder getFinalAppUrlsOrBuilder(
int index);
/**
* <pre>
* The list of possible final mobile URLs after all cross-domain redirects
* for the ad.
* </pre>
*
* <code>repeated string final_mobile_urls = 42;</code>
* @return A list containing the finalMobileUrls.
*/
java.util.List<java.lang.String>
getFinalMobileUrlsList();
/**
* <pre>
* The list of possible final mobile URLs after all cross-domain redirects
* for the ad.
* </pre>
*
* <code>repeated string final_mobile_urls = 42;</code>
* @return The count of finalMobileUrls.
*/
int getFinalMobileUrlsCount();
/**
* <pre>
* The list of possible final mobile URLs after all cross-domain redirects
* for the ad.
* </pre>
*
* <code>repeated string final_mobile_urls = 42;</code>
* @param index The index of the element to return.
* @return The finalMobileUrls at the given index.
*/
java.lang.String getFinalMobileUrls(int index);
/**
* <pre>
* The list of possible final mobile URLs after all cross-domain redirects
* for the ad.
* </pre>
*
* <code>repeated string final_mobile_urls = 42;</code>
* @param index The index of the value to return.
* @return The bytes of the finalMobileUrls at the given index.
*/
com.google.protobuf.ByteString
getFinalMobileUrlsBytes(int index);
/**
* <pre>
* The URL template for constructing a tracking URL.
* </pre>
*
* <code>optional string tracking_url_template = 43;</code>
* @return Whether the trackingUrlTemplate field is set.
*/
boolean hasTrackingUrlTemplate();
/**
* <pre>
* The URL template for constructing a tracking URL.
* </pre>
*
* <code>optional string tracking_url_template = 43;</code>
* @return The trackingUrlTemplate.
*/
java.lang.String getTrackingUrlTemplate();
/**
* <pre>
* The URL template for constructing a tracking URL.
* </pre>
*
* <code>optional string tracking_url_template = 43;</code>
* @return The bytes for trackingUrlTemplate.
*/
com.google.protobuf.ByteString
getTrackingUrlTemplateBytes();
/**
* <pre>
* The suffix to use when constructing a final URL.
* </pre>
*
* <code>optional string final_url_suffix = 44;</code>
* @return Whether the finalUrlSuffix field is set.
*/
boolean hasFinalUrlSuffix();
/**
* <pre>
* The suffix to use when constructing a final URL.
* </pre>
*
* <code>optional string final_url_suffix = 44;</code>
* @return The finalUrlSuffix.
*/
java.lang.String getFinalUrlSuffix();
/**
* <pre>
* The suffix to use when constructing a final URL.
* </pre>
*
* <code>optional string final_url_suffix = 44;</code>
* @return The bytes for finalUrlSuffix.
*/
com.google.protobuf.ByteString
getFinalUrlSuffixBytes();
/**
* <pre>
* The list of mappings that can be used to substitute custom parameter tags
* in a `tracking_url_template`, `final_urls`, or `mobile_final_urls`.
* For mutates, please use url custom parameter operations.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.common.CustomParameter url_custom_parameters = 10;</code>
*/
java.util.List<com.google.ads.googleads.v10.common.CustomParameter>
getUrlCustomParametersList();
/**
* <pre>
* The list of mappings that can be used to substitute custom parameter tags
* in a `tracking_url_template`, `final_urls`, or `mobile_final_urls`.
* For mutates, please use url custom parameter operations.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.common.CustomParameter url_custom_parameters = 10;</code>
*/
com.google.ads.googleads.v10.common.CustomParameter getUrlCustomParameters(int index);
/**
* <pre>
* The list of mappings that can be used to substitute custom parameter tags
* in a `tracking_url_template`, `final_urls`, or `mobile_final_urls`.
* For mutates, please use url custom parameter operations.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.common.CustomParameter url_custom_parameters = 10;</code>
*/
int getUrlCustomParametersCount();
/**
* <pre>
* The list of mappings that can be used to substitute custom parameter tags
* in a `tracking_url_template`, `final_urls`, or `mobile_final_urls`.
* For mutates, please use url custom parameter operations.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.common.CustomParameter url_custom_parameters = 10;</code>
*/
java.util.List<? extends com.google.ads.googleads.v10.common.CustomParameterOrBuilder>
getUrlCustomParametersOrBuilderList();
/**
* <pre>
* The list of mappings that can be used to substitute custom parameter tags
* in a `tracking_url_template`, `final_urls`, or `mobile_final_urls`.
* For mutates, please use url custom parameter operations.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.common.CustomParameter url_custom_parameters = 10;</code>
*/
com.google.ads.googleads.v10.common.CustomParameterOrBuilder getUrlCustomParametersOrBuilder(
int index);
/**
* <pre>
* The URL that appears in the ad description for some ad formats.
* </pre>
*
* <code>optional string display_url = 45;</code>
* @return Whether the displayUrl field is set.
*/
boolean hasDisplayUrl();
/**
* <pre>
* The URL that appears in the ad description for some ad formats.
* </pre>
*
* <code>optional string display_url = 45;</code>
* @return The displayUrl.
*/
java.lang.String getDisplayUrl();
/**
* <pre>
* The URL that appears in the ad description for some ad formats.
* </pre>
*
* <code>optional string display_url = 45;</code>
* @return The bytes for displayUrl.
*/
com.google.protobuf.ByteString
getDisplayUrlBytes();
/**
* <pre>
* Output only. The type of ad.
* </pre>
*
* <code>.google.ads.googleads.v10.enums.AdTypeEnum.AdType type = 5 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The enum numeric value on the wire for type.
*/
int getTypeValue();
/**
* <pre>
* Output only. The type of ad.
* </pre>
*
* <code>.google.ads.googleads.v10.enums.AdTypeEnum.AdType type = 5 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The type.
*/
com.google.ads.googleads.v10.enums.AdTypeEnum.AdType getType();
/**
* <pre>
* Output only. Indicates if this ad was automatically added by Google Ads and not by a
* user. For example, this could happen when ads are automatically created as
* suggestions for new ads based on knowledge of how existing ads are
* performing.
* </pre>
*
* <code>optional bool added_by_google_ads = 46 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return Whether the addedByGoogleAds field is set.
*/
boolean hasAddedByGoogleAds();
/**
* <pre>
* Output only. Indicates if this ad was automatically added by Google Ads and not by a
* user. For example, this could happen when ads are automatically created as
* suggestions for new ads based on knowledge of how existing ads are
* performing.
* </pre>
*
* <code>optional bool added_by_google_ads = 46 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The addedByGoogleAds.
*/
boolean getAddedByGoogleAds();
/**
* <pre>
* The device preference for the ad. You can only specify a preference for
* mobile devices. When this preference is set the ad will be preferred over
* other ads when being displayed on a mobile device. The ad can still be
* displayed on other device types, e.g. if no other ads are available.
* If unspecified (no device preference), all devices are targeted.
* This is only supported by some ad types.
* </pre>
*
* <code>.google.ads.googleads.v10.enums.DeviceEnum.Device device_preference = 20;</code>
* @return The enum numeric value on the wire for devicePreference.
*/
int getDevicePreferenceValue();
/**
* <pre>
* The device preference for the ad. You can only specify a preference for
* mobile devices. When this preference is set the ad will be preferred over
* other ads when being displayed on a mobile device. The ad can still be
* displayed on other device types, e.g. if no other ads are available.
* If unspecified (no device preference), all devices are targeted.
* This is only supported by some ad types.
* </pre>
*
* <code>.google.ads.googleads.v10.enums.DeviceEnum.Device device_preference = 20;</code>
* @return The devicePreference.
*/
com.google.ads.googleads.v10.enums.DeviceEnum.Device getDevicePreference();
/**
* <pre>
* Additional URLs for the ad that are tagged with a unique identifier that
* can be referenced from other fields in the ad.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.common.UrlCollection url_collections = 26;</code>
*/
java.util.List<com.google.ads.googleads.v10.common.UrlCollection>
getUrlCollectionsList();
/**
* <pre>
* Additional URLs for the ad that are tagged with a unique identifier that
* can be referenced from other fields in the ad.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.common.UrlCollection url_collections = 26;</code>
*/
com.google.ads.googleads.v10.common.UrlCollection getUrlCollections(int index);
/**
* <pre>
* Additional URLs for the ad that are tagged with a unique identifier that
* can be referenced from other fields in the ad.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.common.UrlCollection url_collections = 26;</code>
*/
int getUrlCollectionsCount();
/**
* <pre>
* Additional URLs for the ad that are tagged with a unique identifier that
* can be referenced from other fields in the ad.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.common.UrlCollection url_collections = 26;</code>
*/
java.util.List<? extends com.google.ads.googleads.v10.common.UrlCollectionOrBuilder>
getUrlCollectionsOrBuilderList();
/**
* <pre>
* Additional URLs for the ad that are tagged with a unique identifier that
* can be referenced from other fields in the ad.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.common.UrlCollection url_collections = 26;</code>
*/
com.google.ads.googleads.v10.common.UrlCollectionOrBuilder getUrlCollectionsOrBuilder(
int index);
/**
* <pre>
* Immutable. The name of the ad. This is only used to be able to identify the ad. It
* does not need to be unique and does not affect the served ad. The name
* field is currently only supported for DisplayUploadAd, ImageAd,
* ShoppingComparisonListingAd and VideoAd.
* </pre>
*
* <code>optional string name = 47 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return Whether the name field is set.
*/
boolean hasName();
/**
* <pre>
* Immutable. The name of the ad. This is only used to be able to identify the ad. It
* does not need to be unique and does not affect the served ad. The name
* field is currently only supported for DisplayUploadAd, ImageAd,
* ShoppingComparisonListingAd and VideoAd.
* </pre>
*
* <code>optional string name = 47 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The name.
*/
java.lang.String getName();
/**
* <pre>
* Immutable. The name of the ad. This is only used to be able to identify the ad. It
* does not need to be unique and does not affect the served ad. The name
* field is currently only supported for DisplayUploadAd, ImageAd,
* ShoppingComparisonListingAd and VideoAd.
* </pre>
*
* <code>optional string name = 47 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The bytes for name.
*/
com.google.protobuf.ByteString
getNameBytes();
/**
* <pre>
* Output only. If this ad is system managed, then this field will indicate the source.
* This field is read-only.
* </pre>
*
* <code>.google.ads.googleads.v10.enums.SystemManagedResourceSourceEnum.SystemManagedResourceSource system_managed_resource_source = 27 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The enum numeric value on the wire for systemManagedResourceSource.
*/
int getSystemManagedResourceSourceValue();
/**
* <pre>
* Output only. If this ad is system managed, then this field will indicate the source.
* This field is read-only.
* </pre>
*
* <code>.google.ads.googleads.v10.enums.SystemManagedResourceSourceEnum.SystemManagedResourceSource system_managed_resource_source = 27 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The systemManagedResourceSource.
*/
com.google.ads.googleads.v10.enums.SystemManagedResourceSourceEnum.SystemManagedResourceSource getSystemManagedResourceSource();
/**
* <pre>
* Immutable. Details pertaining to a text ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.TextAdInfo text_ad = 6 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return Whether the textAd field is set.
*/
boolean hasTextAd();
/**
* <pre>
* Immutable. Details pertaining to a text ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.TextAdInfo text_ad = 6 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The textAd.
*/
com.google.ads.googleads.v10.common.TextAdInfo getTextAd();
/**
* <pre>
* Immutable. Details pertaining to a text ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.TextAdInfo text_ad = 6 [(.google.api.field_behavior) = IMMUTABLE];</code>
*/
com.google.ads.googleads.v10.common.TextAdInfoOrBuilder getTextAdOrBuilder();
/**
* <pre>
* Details pertaining to an expanded text ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.ExpandedTextAdInfo expanded_text_ad = 7;</code>
* @return Whether the expandedTextAd field is set.
*/
boolean hasExpandedTextAd();
/**
* <pre>
* Details pertaining to an expanded text ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.ExpandedTextAdInfo expanded_text_ad = 7;</code>
* @return The expandedTextAd.
*/
com.google.ads.googleads.v10.common.ExpandedTextAdInfo getExpandedTextAd();
/**
* <pre>
* Details pertaining to an expanded text ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.ExpandedTextAdInfo expanded_text_ad = 7;</code>
*/
com.google.ads.googleads.v10.common.ExpandedTextAdInfoOrBuilder getExpandedTextAdOrBuilder();
/**
* <pre>
* Details pertaining to a call ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.CallAdInfo call_ad = 49;</code>
* @return Whether the callAd field is set.
*/
boolean hasCallAd();
/**
* <pre>
* Details pertaining to a call ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.CallAdInfo call_ad = 49;</code>
* @return The callAd.
*/
com.google.ads.googleads.v10.common.CallAdInfo getCallAd();
/**
* <pre>
* Details pertaining to a call ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.CallAdInfo call_ad = 49;</code>
*/
com.google.ads.googleads.v10.common.CallAdInfoOrBuilder getCallAdOrBuilder();
/**
* <pre>
* Immutable. Details pertaining to an Expanded Dynamic Search Ad.
* This type of ad has its headline, final URLs, and display URL
* auto-generated at serving time according to domain name specific
* information provided by `dynamic_search_ads_setting` linked at the
* campaign level.
* </pre>
*
* <code>.google.ads.googleads.v10.common.ExpandedDynamicSearchAdInfo expanded_dynamic_search_ad = 14 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return Whether the expandedDynamicSearchAd field is set.
*/
boolean hasExpandedDynamicSearchAd();
/**
* <pre>
* Immutable. Details pertaining to an Expanded Dynamic Search Ad.
* This type of ad has its headline, final URLs, and display URL
* auto-generated at serving time according to domain name specific
* information provided by `dynamic_search_ads_setting` linked at the
* campaign level.
* </pre>
*
* <code>.google.ads.googleads.v10.common.ExpandedDynamicSearchAdInfo expanded_dynamic_search_ad = 14 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The expandedDynamicSearchAd.
*/
com.google.ads.googleads.v10.common.ExpandedDynamicSearchAdInfo getExpandedDynamicSearchAd();
/**
* <pre>
* Immutable. Details pertaining to an Expanded Dynamic Search Ad.
* This type of ad has its headline, final URLs, and display URL
* auto-generated at serving time according to domain name specific
* information provided by `dynamic_search_ads_setting` linked at the
* campaign level.
* </pre>
*
* <code>.google.ads.googleads.v10.common.ExpandedDynamicSearchAdInfo expanded_dynamic_search_ad = 14 [(.google.api.field_behavior) = IMMUTABLE];</code>
*/
com.google.ads.googleads.v10.common.ExpandedDynamicSearchAdInfoOrBuilder getExpandedDynamicSearchAdOrBuilder();
/**
* <pre>
* Details pertaining to a hotel ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.HotelAdInfo hotel_ad = 15;</code>
* @return Whether the hotelAd field is set.
*/
boolean hasHotelAd();
/**
* <pre>
* Details pertaining to a hotel ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.HotelAdInfo hotel_ad = 15;</code>
* @return The hotelAd.
*/
com.google.ads.googleads.v10.common.HotelAdInfo getHotelAd();
/**
* <pre>
* Details pertaining to a hotel ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.HotelAdInfo hotel_ad = 15;</code>
*/
com.google.ads.googleads.v10.common.HotelAdInfoOrBuilder getHotelAdOrBuilder();
/**
* <pre>
* Details pertaining to a Smart Shopping ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.ShoppingSmartAdInfo shopping_smart_ad = 17;</code>
* @return Whether the shoppingSmartAd field is set.
*/
boolean hasShoppingSmartAd();
/**
* <pre>
* Details pertaining to a Smart Shopping ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.ShoppingSmartAdInfo shopping_smart_ad = 17;</code>
* @return The shoppingSmartAd.
*/
com.google.ads.googleads.v10.common.ShoppingSmartAdInfo getShoppingSmartAd();
/**
* <pre>
* Details pertaining to a Smart Shopping ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.ShoppingSmartAdInfo shopping_smart_ad = 17;</code>
*/
com.google.ads.googleads.v10.common.ShoppingSmartAdInfoOrBuilder getShoppingSmartAdOrBuilder();
/**
* <pre>
* Details pertaining to a Shopping product ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.ShoppingProductAdInfo shopping_product_ad = 18;</code>
* @return Whether the shoppingProductAd field is set.
*/
boolean hasShoppingProductAd();
/**
* <pre>
* Details pertaining to a Shopping product ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.ShoppingProductAdInfo shopping_product_ad = 18;</code>
* @return The shoppingProductAd.
*/
com.google.ads.googleads.v10.common.ShoppingProductAdInfo getShoppingProductAd();
/**
* <pre>
* Details pertaining to a Shopping product ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.ShoppingProductAdInfo shopping_product_ad = 18;</code>
*/
com.google.ads.googleads.v10.common.ShoppingProductAdInfoOrBuilder getShoppingProductAdOrBuilder();
/**
* <pre>
* Immutable. Details pertaining to a Gmail ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.GmailAdInfo gmail_ad = 21 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return Whether the gmailAd field is set.
*/
boolean hasGmailAd();
/**
* <pre>
* Immutable. Details pertaining to a Gmail ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.GmailAdInfo gmail_ad = 21 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The gmailAd.
*/
com.google.ads.googleads.v10.common.GmailAdInfo getGmailAd();
/**
* <pre>
* Immutable. Details pertaining to a Gmail ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.GmailAdInfo gmail_ad = 21 [(.google.api.field_behavior) = IMMUTABLE];</code>
*/
com.google.ads.googleads.v10.common.GmailAdInfoOrBuilder getGmailAdOrBuilder();
/**
* <pre>
* Immutable. Details pertaining to an Image ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.ImageAdInfo image_ad = 22 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return Whether the imageAd field is set.
*/
boolean hasImageAd();
/**
* <pre>
* Immutable. Details pertaining to an Image ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.ImageAdInfo image_ad = 22 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The imageAd.
*/
com.google.ads.googleads.v10.common.ImageAdInfo getImageAd();
/**
* <pre>
* Immutable. Details pertaining to an Image ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.ImageAdInfo image_ad = 22 [(.google.api.field_behavior) = IMMUTABLE];</code>
*/
com.google.ads.googleads.v10.common.ImageAdInfoOrBuilder getImageAdOrBuilder();
/**
* <pre>
* Details pertaining to a Video ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.VideoAdInfo video_ad = 24;</code>
* @return Whether the videoAd field is set.
*/
boolean hasVideoAd();
/**
* <pre>
* Details pertaining to a Video ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.VideoAdInfo video_ad = 24;</code>
* @return The videoAd.
*/
com.google.ads.googleads.v10.common.VideoAdInfo getVideoAd();
/**
* <pre>
* Details pertaining to a Video ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.VideoAdInfo video_ad = 24;</code>
*/
com.google.ads.googleads.v10.common.VideoAdInfoOrBuilder getVideoAdOrBuilder();
/**
* <pre>
* Details pertaining to a Video responsive ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.VideoResponsiveAdInfo video_responsive_ad = 39;</code>
* @return Whether the videoResponsiveAd field is set.
*/
boolean hasVideoResponsiveAd();
/**
* <pre>
* Details pertaining to a Video responsive ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.VideoResponsiveAdInfo video_responsive_ad = 39;</code>
* @return The videoResponsiveAd.
*/
com.google.ads.googleads.v10.common.VideoResponsiveAdInfo getVideoResponsiveAd();
/**
* <pre>
* Details pertaining to a Video responsive ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.VideoResponsiveAdInfo video_responsive_ad = 39;</code>
*/
com.google.ads.googleads.v10.common.VideoResponsiveAdInfoOrBuilder getVideoResponsiveAdOrBuilder();
/**
* <pre>
* Details pertaining to a responsive search ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.ResponsiveSearchAdInfo responsive_search_ad = 25;</code>
* @return Whether the responsiveSearchAd field is set.
*/
boolean hasResponsiveSearchAd();
/**
* <pre>
* Details pertaining to a responsive search ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.ResponsiveSearchAdInfo responsive_search_ad = 25;</code>
* @return The responsiveSearchAd.
*/
com.google.ads.googleads.v10.common.ResponsiveSearchAdInfo getResponsiveSearchAd();
/**
* <pre>
* Details pertaining to a responsive search ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.ResponsiveSearchAdInfo responsive_search_ad = 25;</code>
*/
com.google.ads.googleads.v10.common.ResponsiveSearchAdInfoOrBuilder getResponsiveSearchAdOrBuilder();
/**
* <pre>
* Details pertaining to a legacy responsive display ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.LegacyResponsiveDisplayAdInfo legacy_responsive_display_ad = 28;</code>
* @return Whether the legacyResponsiveDisplayAd field is set.
*/
boolean hasLegacyResponsiveDisplayAd();
/**
* <pre>
* Details pertaining to a legacy responsive display ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.LegacyResponsiveDisplayAdInfo legacy_responsive_display_ad = 28;</code>
* @return The legacyResponsiveDisplayAd.
*/
com.google.ads.googleads.v10.common.LegacyResponsiveDisplayAdInfo getLegacyResponsiveDisplayAd();
/**
* <pre>
* Details pertaining to a legacy responsive display ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.LegacyResponsiveDisplayAdInfo legacy_responsive_display_ad = 28;</code>
*/
com.google.ads.googleads.v10.common.LegacyResponsiveDisplayAdInfoOrBuilder getLegacyResponsiveDisplayAdOrBuilder();
/**
* <pre>
* Details pertaining to an app ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.AppAdInfo app_ad = 29;</code>
* @return Whether the appAd field is set.
*/
boolean hasAppAd();
/**
* <pre>
* Details pertaining to an app ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.AppAdInfo app_ad = 29;</code>
* @return The appAd.
*/
com.google.ads.googleads.v10.common.AppAdInfo getAppAd();
/**
* <pre>
* Details pertaining to an app ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.AppAdInfo app_ad = 29;</code>
*/
com.google.ads.googleads.v10.common.AppAdInfoOrBuilder getAppAdOrBuilder();
/**
* <pre>
* Immutable. Details pertaining to a legacy app install ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.LegacyAppInstallAdInfo legacy_app_install_ad = 30 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return Whether the legacyAppInstallAd field is set.
*/
boolean hasLegacyAppInstallAd();
/**
* <pre>
* Immutable. Details pertaining to a legacy app install ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.LegacyAppInstallAdInfo legacy_app_install_ad = 30 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The legacyAppInstallAd.
*/
com.google.ads.googleads.v10.common.LegacyAppInstallAdInfo getLegacyAppInstallAd();
/**
* <pre>
* Immutable. Details pertaining to a legacy app install ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.LegacyAppInstallAdInfo legacy_app_install_ad = 30 [(.google.api.field_behavior) = IMMUTABLE];</code>
*/
com.google.ads.googleads.v10.common.LegacyAppInstallAdInfoOrBuilder getLegacyAppInstallAdOrBuilder();
/**
* <pre>
* Details pertaining to a responsive display ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.ResponsiveDisplayAdInfo responsive_display_ad = 31;</code>
* @return Whether the responsiveDisplayAd field is set.
*/
boolean hasResponsiveDisplayAd();
/**
* <pre>
* Details pertaining to a responsive display ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.ResponsiveDisplayAdInfo responsive_display_ad = 31;</code>
* @return The responsiveDisplayAd.
*/
com.google.ads.googleads.v10.common.ResponsiveDisplayAdInfo getResponsiveDisplayAd();
/**
* <pre>
* Details pertaining to a responsive display ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.ResponsiveDisplayAdInfo responsive_display_ad = 31;</code>
*/
com.google.ads.googleads.v10.common.ResponsiveDisplayAdInfoOrBuilder getResponsiveDisplayAdOrBuilder();
/**
* <pre>
* Details pertaining to a local ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.LocalAdInfo local_ad = 32;</code>
* @return Whether the localAd field is set.
*/
boolean hasLocalAd();
/**
* <pre>
* Details pertaining to a local ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.LocalAdInfo local_ad = 32;</code>
* @return The localAd.
*/
com.google.ads.googleads.v10.common.LocalAdInfo getLocalAd();
/**
* <pre>
* Details pertaining to a local ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.LocalAdInfo local_ad = 32;</code>
*/
com.google.ads.googleads.v10.common.LocalAdInfoOrBuilder getLocalAdOrBuilder();
/**
* <pre>
* Details pertaining to a display upload ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.DisplayUploadAdInfo display_upload_ad = 33;</code>
* @return Whether the displayUploadAd field is set.
*/
boolean hasDisplayUploadAd();
/**
* <pre>
* Details pertaining to a display upload ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.DisplayUploadAdInfo display_upload_ad = 33;</code>
* @return The displayUploadAd.
*/
com.google.ads.googleads.v10.common.DisplayUploadAdInfo getDisplayUploadAd();
/**
* <pre>
* Details pertaining to a display upload ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.DisplayUploadAdInfo display_upload_ad = 33;</code>
*/
com.google.ads.googleads.v10.common.DisplayUploadAdInfoOrBuilder getDisplayUploadAdOrBuilder();
/**
* <pre>
* Details pertaining to an app engagement ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.AppEngagementAdInfo app_engagement_ad = 34;</code>
* @return Whether the appEngagementAd field is set.
*/
boolean hasAppEngagementAd();
/**
* <pre>
* Details pertaining to an app engagement ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.AppEngagementAdInfo app_engagement_ad = 34;</code>
* @return The appEngagementAd.
*/
com.google.ads.googleads.v10.common.AppEngagementAdInfo getAppEngagementAd();
/**
* <pre>
* Details pertaining to an app engagement ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.AppEngagementAdInfo app_engagement_ad = 34;</code>
*/
com.google.ads.googleads.v10.common.AppEngagementAdInfoOrBuilder getAppEngagementAdOrBuilder();
/**
* <pre>
* Details pertaining to a Shopping Comparison Listing ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.ShoppingComparisonListingAdInfo shopping_comparison_listing_ad = 36;</code>
* @return Whether the shoppingComparisonListingAd field is set.
*/
boolean hasShoppingComparisonListingAd();
/**
* <pre>
* Details pertaining to a Shopping Comparison Listing ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.ShoppingComparisonListingAdInfo shopping_comparison_listing_ad = 36;</code>
* @return The shoppingComparisonListingAd.
*/
com.google.ads.googleads.v10.common.ShoppingComparisonListingAdInfo getShoppingComparisonListingAd();
/**
* <pre>
* Details pertaining to a Shopping Comparison Listing ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.ShoppingComparisonListingAdInfo shopping_comparison_listing_ad = 36;</code>
*/
com.google.ads.googleads.v10.common.ShoppingComparisonListingAdInfoOrBuilder getShoppingComparisonListingAdOrBuilder();
/**
* <pre>
* Details pertaining to a Smart campaign ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.SmartCampaignAdInfo smart_campaign_ad = 48;</code>
* @return Whether the smartCampaignAd field is set.
*/
boolean hasSmartCampaignAd();
/**
* <pre>
* Details pertaining to a Smart campaign ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.SmartCampaignAdInfo smart_campaign_ad = 48;</code>
* @return The smartCampaignAd.
*/
com.google.ads.googleads.v10.common.SmartCampaignAdInfo getSmartCampaignAd();
/**
* <pre>
* Details pertaining to a Smart campaign ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.SmartCampaignAdInfo smart_campaign_ad = 48;</code>
*/
com.google.ads.googleads.v10.common.SmartCampaignAdInfoOrBuilder getSmartCampaignAdOrBuilder();
/**
* <pre>
* Details pertaining to an app pre-registration ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.AppPreRegistrationAdInfo app_pre_registration_ad = 50;</code>
* @return Whether the appPreRegistrationAd field is set.
*/
boolean hasAppPreRegistrationAd();
/**
* <pre>
* Details pertaining to an app pre-registration ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.AppPreRegistrationAdInfo app_pre_registration_ad = 50;</code>
* @return The appPreRegistrationAd.
*/
com.google.ads.googleads.v10.common.AppPreRegistrationAdInfo getAppPreRegistrationAd();
/**
* <pre>
* Details pertaining to an app pre-registration ad.
* </pre>
*
* <code>.google.ads.googleads.v10.common.AppPreRegistrationAdInfo app_pre_registration_ad = 50;</code>
*/
com.google.ads.googleads.v10.common.AppPreRegistrationAdInfoOrBuilder getAppPreRegistrationAdOrBuilder();
public com.google.ads.googleads.v10.resources.Ad.AdDataCase getAdDataCase();
}
| |
/*
* Copyright (C) 2016 - 2019 team-cachebox.de
*
* Licensed under the : GNU General Public License (GPL);
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.gnu.org/licenses/gpl.html
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.longri.cachebox3;
import com.badlogic.gdx.ApplicationAdapter;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.Preferences;
import com.badlogic.gdx.graphics.Color;
import com.badlogic.gdx.graphics.GL20;
import com.badlogic.gdx.graphics.OrthographicCamera;
import com.badlogic.gdx.graphics.g2d.Batch;
import com.badlogic.gdx.graphics.g2d.Sprite;
import com.badlogic.gdx.graphics.g2d.SpriteBatch;
import com.badlogic.gdx.utils.Scaling;
import com.badlogic.gdx.utils.reflect.ClassReflection;
import com.badlogic.gdx.utils.reflect.Constructor;
import com.badlogic.gdx.utils.viewport.ScalingViewport;
import com.badlogic.gdx.utils.viewport.Viewport;
import de.longri.cachebox3.events.EventHandler;
import de.longri.cachebox3.events.location.GpsEventHelper;
import de.longri.cachebox3.gpx.AbstractGpxStreamImporter;
import de.longri.cachebox3.gui.stages.NamedStage;
import de.longri.cachebox3.gui.stages.Splash;
import de.longri.cachebox3.gui.stages.StageManager;
import de.longri.cachebox3.gui.stages.ViewManager;
import de.longri.cachebox3.gui.views.AbstractView;
import de.longri.cachebox3.settings.Config;
import de.longri.cachebox3.sqlite.Database;
import de.longri.cachebox3.utils.NamedRunnable;
import de.longri.cachebox3.utils.converter.Base64;
import de.longri.serializable.BitStore;
import org.oscim.backend.GL;
import org.oscim.renderer.GLState;
import org.oscim.renderer.MapRenderer;
import org.oscim.theme.XmlThemeBuilder;
import org.oscim.utils.Parameters;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.concurrent.atomic.AtomicBoolean;
import static org.oscim.backend.GLAdapter.gl;
import static org.oscim.renderer.MapRenderer.COORD_SCALE;
import static org.slf4j.impl.LibgdxLoggerFactory.EXCLUDE_LIST;
public class CacheboxMain extends ApplicationAdapter {
static private final Logger log = LoggerFactory.getLogger(CacheboxMain.class);
static private final String SAVE_INSTANCE_KEY = "SaveInstanceState";
public static AtomicBoolean drawMap = new AtomicBoolean(false);
static {
Parameters.MAP_EVENT_LAYER2 = true;
Parameters.TEXTURE_ATLAS = true;
Parameters.POT_TEXTURES = true;
COORD_SCALE = 1;
EventHandler.INIT();
// INCLUDE_LIST.add(CB.class.getName());
// INCLUDE_LIST.add("de.longri.cachebox3.IOS_DescriptionView");
// INCLUDE_LIST.add(DescriptionView.class.getName());
// INCLUDE_LIST.add(SvgSkinUtil.class.getName());
// INCLUDE_LIST.add(SkinLoaderTask.class.getName());
// INCLUDE_LIST.add("de.longri.cachebox3.IOS_DescriptionView");
// INCLUDE_LIST.add("de.longri.cachebox3.gui.widgets.filter_settings.FilterSetListView");
// INCLUDE_LIST.add(BlockUiProgress_Activity.class.getName());
// INCLUDE_LIST.add(CircularProgressWidget.class.getName());
EXCLUDE_LIST.add("de.longri.cachebox3.gui.animations.map.MapAnimator");
EXCLUDE_LIST.add("de.longri.cachebox3.events.GpsEventHelper");
// EXCLUDE_LIST.add(StageManager.class.getName());
EXCLUDE_LIST.add(NamedStage.class.getName());
// EXCLUDE_LIST.add(CB.class.getName());
EXCLUDE_LIST.add(XmlThemeBuilder.class.getName());
//
// EXCLUDE_LIST.add("com.badlogic.gdx.sqlite.desktop.DesktopDatabase");
// EXCLUDE_LIST.add("com.badlogic.gdx.sqlite.android.AndroidDatabase");
// EXCLUDE_LIST.add("com.badlogic.gdx.sqlite.robovm.RobovmDatabase");
// EXCLUDE_LIST.add("EMPTY");
// EXCLUDE_LIST.add("DB:cachebox");
// EXCLUDE_LIST.add(LocationAccuracyLayer.class.getName());
// EXCLUDE_LIST.add(LocationTextureRenderer.class.getName());
// EXCLUDE_LIST.add(DoubleAnimator.class.getName());
EXCLUDE_LIST.add(GpsEventHelper.class.getName());
// EXCLUDE_LIST.add(DirectLineLayer.class.getName());
// EXCLUDE_LIST.add(EventHandler.class.getName());
EXCLUDE_LIST.add(AbstractGpxStreamImporter.class.getName());
// iOS Platform debug includes
// INCLUDE_LIST.add("org.oscim.ios.backend.IOS_RealSvgBitmap");
// INCLUDE_LIST.add("de.longri.cachebox3.IOS_PlatformConnector");
// INCLUDE_LIST.add("de.longri.cachebox3.IOS_PlatformConnector");
// INCLUDE_LIST.add("de.longri.cachebox3.IOS_Launcher");
// INCLUDE_LIST.add("de.longri.cachebox3.IOS_DescriptionView");
// INCLUDE_LIST.add("de.longri.cachebox3.GenerateApiKeyWebViewController");
// INCLUDE_LIST.add(GetApiKey_Activity.class.getName());
// INCLUDE_LIST.add(Config.class.getName());
}
public MapRenderer mMapRenderer;
protected Sprite FpsInfoSprite;
private int mapDrawX, mapDrawY, mapDrawWidth, mapDrawHeight;
private Batch batch;
private int FpsInfoPos = 0;
private BitStore instanceStateReader;
@Override
public void create() {
log.debug("create");
CB.cbMain = this;
CB.stageManager = new StageManager();
Gdx.graphics.setContinuousRendering(true);
//maybe restore last instance state
try {
Preferences prefs = Gdx.app.getPreferences(SAVE_INSTANCE_KEY);
instanceStateReader = new BitStore(Base64.decode(prefs.getString(SAVE_INSTANCE_KEY)));
if (instanceStateReader.readBool()) {
// exit was called restore nothing
instanceStateReader = null;
}
} catch (Exception e) {
// exit was called restore nothing
instanceStateReader = null;
}
final Viewport viewport = new ScalingViewport(Scaling.stretch, Gdx.graphics.getWidth(), Gdx.graphics.getHeight(), new OrthographicCamera());
final Batch batch = new SpriteBatch();
Splash splash = new Splash(() -> {
log.debug("Splash ready");
Config.AppRaterlaunchCount.setValue(Config.AppRaterlaunchCount.getValue() + 1);
Config.AcceptChanges();
// Splash is ready with initialisation
// now switch Stage to ViewManager
Gdx.app.postRunnable(() -> {
log.debug("switch Stage to ViewManager");
new ViewManager(CacheboxMain.this, CB.stageManager.viewport, CB.stageManager.batch);
CB.stageManager.setMainStage(CB.viewmanager);
batch.dispose();
FpsInfoSprite = null;
Gdx.graphics.setContinuousRendering(true);
restoreInstanceState(instanceStateReader);
});
}, viewport, batch, instanceStateReader);
CB.stageManager.setMainStage(splash);
Gdx.graphics.requestRendering();
CB.initThreadCheck();
log.debug("create end");
}
public void setMapPosAndSize(int x, int y, int width, int height) {
mapDrawX = x;
mapDrawY = y;
mapDrawWidth = width;
mapDrawHeight = height;
mMapRenderer.onSurfaceChanged(width, height);
}
@Override
public void render() {
if (CB.isBackground) return;
CB.stateTime += Gdx.graphics.getDeltaTime();
if (drawMap.get() && mMapRenderer != null && CB.stageManager.isMainStageOnlyDrawing()) {
GLState.enableVertexArrays(-1, -1);
// set map position and size
gl.viewport(mapDrawX, mapDrawY, mapDrawWidth, mapDrawHeight);
gl.frontFace(GL.CW);
try {
mMapRenderer.onDrawFrame();
} catch (Exception e) {
e.printStackTrace();
}
//release Buffers from map renderer
GLState.bindVertexBuffer(0);
GLState.bindElementBuffer(0);
} else {
// if MapRenderer not drawn, we must clear before draw stage
Gdx.gl.glClearColor(CB.backgroundColor.r, CB.backgroundColor.g, CB.backgroundColor.b, CB.backgroundColor.a);
Gdx.gl.glClear(GL20.GL_COLOR_BUFFER_BIT | GL20.GL_DEPTH_BUFFER_BIT | (Gdx.graphics.getBufferFormat().coverageSampling ?
GL20.GL_COVERAGE_BUFFER_BIT_NV : 0));
}
gl.flush();
gl.viewport(0, 0, Gdx.graphics.getWidth(), Gdx.graphics.getHeight());
// MapRender sets the FrontFace to GL.CW, so we revert to GL.CCW
gl.frontFace(GL.CCW);
try {
CB.stageManager.draw();
} catch (Exception e) {
log.error("Draw StageManager", e);
}
if (CB.isTestVersion()) {
float FpsInfoSize = CB.getScaledFloat(4f);
if (FpsInfoSprite != null) {
batch = CB.stageManager.getBatch();
if (!batch.isDrawing())
batch.begin();
Color lastColor = batch.getColor();
batch.setColor(1.0f, 0.0f, 0.0f, 1.0f);
batch.draw(FpsInfoSprite, FpsInfoPos, 2, FpsInfoSize, FpsInfoSize);
batch.setColor(lastColor);
batch.end();
} else {
Sprite sprite = CB.getSprite("color");
if (sprite != null) {
FpsInfoSprite = new Sprite(sprite);
FpsInfoSprite.setColor(1.0f, 0.0f, 0.0f, 1.0f);
FpsInfoSprite.setSize(FpsInfoSize, FpsInfoSize);
}
}
FpsInfoPos += FpsInfoSize;
if (FpsInfoPos > 60 * FpsInfoSize) {
FpsInfoPos = 0;
}
}
}
@Override
public void dispose() {
log.debug("onDispose");
CB.viewmanager.dispose();
batch.dispose();
if (CB.isQuitCalled()) {
PlatformConnector.callQuit();
}
}
@Override
public void pause() {
log.debug("onPause");
CB.isBackground = true;
if (CB.viewmanager != null) {
if (CB.isQuitCalled()) {
log.debug("save instance state quit called");
CB.viewmanager.quit();
BitStore saveInstanceStateWriter = new BitStore();
saveInstanceStateWriter.write(true); // nothing to restore
Preferences prefs = Gdx.app.getPreferences(SAVE_INSTANCE_KEY);
prefs.putString(SAVE_INSTANCE_KEY, Base64.encodeBytes(saveInstanceStateWriter.getArray()));
prefs.flush();
} else {
log.debug("save instance state");
CB.viewmanager.pause();
BitStore saveInstanceStateWriter = new BitStore();
saveInstanceStateWriter.write(false);
//store DB name
saveInstanceStateWriter.write(Config.DatabaseName.getValue().replace(CB.WorkPath, "?"));
saveInstanceState(saveInstanceStateWriter);
Preferences prefs = Gdx.app.getPreferences(SAVE_INSTANCE_KEY);
prefs.putString(SAVE_INSTANCE_KEY, Base64.encodeBytes(saveInstanceStateWriter.getArray()));
prefs.flush();
}
}
if (EventHandler.getSelectedCache() != null) {
//save selected Cache
Config.LastSelectedCache.setValue(EventHandler.getSelectedCache().getGeoCacheCode().toString());
log.debug("Store LastSelectedCache = " + EventHandler.getSelectedCache().getGeoCacheCode());
}
//store MapState
Config.lastMapState.setValue(CB.lastMapState.serialize());
Config.lastMapStateBeforeCar.setValue(CB.lastMapStateBeforeCar.serialize());
Config.AcceptChanges();
log.debug("App on pause close databases");
if (Database.Data != null) Database.Data.close();
if (Database.Settings != null) Database.Settings.close();
if (Database.Drafts != null) Database.Drafts.close();
}
@Override
public void resume() {
log.debug("onResume");
FpsInfoSprite = null;
Gdx.graphics.setContinuousRendering(true);
CB.postOnGlThread(new NamedRunnable("onResume") {
@Override
public void run() {
if (CB.viewmanager != null) CB.viewmanager.resume();
log.debug("App on resume reopen databases");
if (Database.Data != null) Database.Data.open();
if (Database.Settings != null) Database.Settings.open();
if (Database.Drafts != null) Database.Drafts.open();
CB.isBackground = false;
//restore MapState
CB.lastMapState.deserialize(Config.lastMapState.getValue());
CB.lastMapStateBeforeCar.deserialize(Config.lastMapStateBeforeCar.getValue());
}
});
}
private void saveInstanceState(BitStore writer) {
// save last actView
AbstractView abstractView = CB.viewmanager.getCurrentView();
writer.write(abstractView.getClass().getName());
writer.write(abstractView.name);
abstractView.saveInstanceState(writer);
}
private void restoreInstanceState(BitStore reader) {
//restore last view
CB.postOnGLThreadDelayed(500, new NamedRunnable("Restore last View") {
@Override
public void run() {
try {
String className = reader.readString();
Class clazz = ClassReflection.forName(className);
Constructor constructor = ClassReflection.getConstructor(clazz, de.longri.serializable.BitStore.class);
Object obj = constructor.newInstance(reader);
AbstractView newInstanceAbstractView = (AbstractView) obj;
CB.viewmanager.showView(newInstanceAbstractView);
} catch (Exception e) {
log.error("can't restore last view");
}
}
});
}
}
| |
/*
* Zed Attack Proxy (ZAP) and its related class files.
*
* ZAP is an HTTP/HTTPS proxy for assessing web application security.
*
* Copyright 2012 The ZAP Development Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.zaproxy.zap.extension.pscanrules;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import net.htmlparser.jericho.Element;
import net.htmlparser.jericho.HTMLElementName;
import net.htmlparser.jericho.Source;
import net.htmlparser.jericho.StartTagType;
import net.htmlparser.jericho.Tag;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.parosproxy.paros.Constant;
import org.parosproxy.paros.core.scanner.Alert;
import org.parosproxy.paros.network.HttpMessage;
import org.zaproxy.zap.extension.pscan.PassiveScanThread;
import org.zaproxy.zap.extension.pscan.PluginPassiveScanner;
public class InformationDisclosureSuspiciousCommentsScanRule extends PluginPassiveScanner {
private static final String MESSAGE_PREFIX =
"pscanrules.informationdisclosuresuspiciouscomments.";
private static final int PLUGIN_ID = 10027;
private static final int MAX_ELEMENT_CHRS_TO_REPORT = 128;
public static final String suspiciousCommentsListDir = "xml";
public static final String suspiciousCommentsListFile = "suspicious-comments.txt";
private static final Logger logger =
LogManager.getLogger(InformationDisclosureSuspiciousCommentsScanRule.class);
private static List<Pattern> patterns = null;
@Override
public void scanHttpResponseReceive(HttpMessage msg, int id, Source source) {
List<Pattern> patterns = getPatterns();
Map<String, List<AlertSummary>> alertMap = new HashMap<>();
if (msg.getResponseBody().length() > 0 && msg.getResponseHeader().isText()) {
if (msg.getResponseHeader().isJavaScript()) {
// Just treat as text
String[] lines = msg.getResponseBody().toString().split("\n");
for (String line : lines) {
for (Pattern pattern : patterns) {
Matcher m = pattern.matcher(line);
if (m.find()) {
recordAlertSummary(
alertMap,
new AlertSummary(
pattern.toString(),
line,
Alert.CONFIDENCE_LOW,
m.group()));
break; // Only need to record this line once
}
}
}
} else {
// Can use the parser
// Check the comments
List<Tag> tags = source.getAllTags(StartTagType.COMMENT);
for (Tag tag : tags) {
String tagStr = tag.toString();
for (Pattern pattern : patterns) {
Matcher m = pattern.matcher(tagStr);
if (m.find()) {
recordAlertSummary(
alertMap,
new AlertSummary(
pattern.toString(),
tagStr,
Alert.CONFIDENCE_MEDIUM,
m.group()));
break; // Only need to record this comment once
}
}
}
// Check the scripts
Element el;
int offset = 0;
while ((el = source.getNextElement(offset, HTMLElementName.SCRIPT)) != null) {
for (Pattern pattern : patterns) {
String elStr = el.toString();
Matcher m = pattern.matcher(elStr);
if (m.find()) {
recordAlertSummary(
alertMap,
new AlertSummary(
pattern.toString(),
elStr,
Alert.CONFIDENCE_LOW,
m.group()));
break; // Only need to record this script once
}
}
offset = el.getEnd();
}
}
}
// Only raise one alert for each pattern detected, giving a total count if > 1 instance
for (Entry<String, List<AlertSummary>> entry : alertMap.entrySet()) {
String other;
AlertSummary firstSummary = entry.getValue().get(0);
if (entry.getValue().size() == 1) {
other =
Constant.messages.getString(
MESSAGE_PREFIX + "otherinfo",
firstSummary.getPattern(),
truncateString(firstSummary.getDetail()));
} else {
other =
Constant.messages.getString(
MESSAGE_PREFIX + "otherinfo2",
firstSummary.getPattern(),
truncateString(firstSummary.getDetail()),
entry.getValue().size());
}
this.raiseAlert(
msg, id, other, firstSummary.getConfidence(), firstSummary.getEvidence());
}
}
private static void recordAlertSummary(
Map<String, List<AlertSummary>> alertMap, AlertSummary summary) {
alertMap.computeIfAbsent(summary.getPattern(), k -> new ArrayList<>()).add(summary);
}
private String truncateString(String str) {
if (str.length() > MAX_ELEMENT_CHRS_TO_REPORT) {
return str.substring(0, MAX_ELEMENT_CHRS_TO_REPORT);
}
return str;
}
private void raiseAlert(
HttpMessage msg, int id, String detail, int confidence, String evidence) {
newAlert()
.setRisk(Alert.RISK_INFO)
.setConfidence(confidence)
.setDescription(getDescription())
.setOtherInfo(detail)
.setSolution(getSolution())
.setCweId(200) // CWE Id 200 - Information Exposure
.setWascId(13) // WASC Id 13 - Info leakage
.setEvidence(evidence)
.raise();
}
private static List<Pattern> getPatterns() {
if (patterns == null) {
patterns = new ArrayList<>();
try {
File f =
new File(
Constant.getZapHome()
+ File.separator
+ suspiciousCommentsListDir
+ File.separator
+ suspiciousCommentsListFile);
if (!f.exists()) {
throw new IOException("Couldn't find resource: " + f.getAbsolutePath());
}
try (BufferedReader reader = new BufferedReader(new FileReader(f))) {
String line = null;
while ((line = reader.readLine()) != null) {
line = line.trim();
if (!line.startsWith("#") && line.length() > 0) {
patterns.add(
Pattern.compile(
"\\b" + line + "\\b", Pattern.CASE_INSENSITIVE));
}
}
}
} catch (IOException e) {
logger.error(
"Error on opening/reading suspicious comments file: {}{}{}{} Error: {}",
File.separator,
suspiciousCommentsListDir,
File.separator,
suspiciousCommentsListFile,
e.getMessage());
}
}
return patterns;
}
@Override
public void setParent(PassiveScanThread parent) {
// Nothing to do.
}
@Override
public String getName() {
return Constant.messages.getString(MESSAGE_PREFIX + "name");
}
private String getSolution() {
return Constant.messages.getString(MESSAGE_PREFIX + "soln");
}
private String getDescription() {
return Constant.messages.getString(MESSAGE_PREFIX + "desc");
}
@Override
public int getPluginId() {
return PLUGIN_ID;
}
private static class AlertSummary {
private final String pattern;
private final String detail;
private final int confidence;
private final String evidence;
public AlertSummary(String pattern, String detail, int confidence, String evidence) {
super();
this.pattern = pattern;
this.detail = detail;
this.confidence = confidence;
this.evidence = evidence;
}
public String getPattern() {
return pattern;
}
public String getDetail() {
return detail;
}
public int getConfidence() {
return confidence;
}
public String getEvidence() {
return evidence;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.io.compress;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.*;
import junit.framework.TestCase;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.conf.Configuration;
public class TestCodecFactory extends TestCase {
private static class BaseCodec implements CompressionCodec {
private Configuration conf;
public void setConf(Configuration conf) {
this.conf = conf;
}
public Configuration getConf() {
return conf;
}
public CompressionOutputStream createOutputStream(OutputStream out)
throws IOException {
return null;
}
public Class<? extends Compressor> getCompressorType() {
return null;
}
public Compressor createCompressor() {
return null;
}
public CompressionInputStream createInputStream(InputStream in,
Decompressor decompressor)
throws IOException {
return null;
}
public CompressionInputStream createInputStream(InputStream in)
throws IOException {
return null;
}
public CompressionOutputStream createOutputStream(OutputStream out,
Compressor compressor)
throws IOException {
return null;
}
public Class<? extends Decompressor> getDecompressorType() {
return null;
}
public Decompressor createDecompressor() {
return null;
}
public String getDefaultExtension() {
return ".base";
}
}
private static class BarCodec extends BaseCodec {
public String getDefaultExtension() {
return "bar";
}
}
private static class FooBarCodec extends BaseCodec {
public String getDefaultExtension() {
return ".foo.bar";
}
}
private static class FooCodec extends BaseCodec {
public String getDefaultExtension() {
return ".foo";
}
}
/**
* Returns a factory for a given set of codecs
* @param classes the codec classes to include
* @return a new factory
*/
private static CompressionCodecFactory setClasses(Class[] classes) {
Configuration conf = new Configuration();
CompressionCodecFactory.setCodecClasses(conf, Arrays.asList(classes));
return new CompressionCodecFactory(conf);
}
private static void checkCodec(String msg,
Class expected, CompressionCodec actual) {
assertEquals(msg + " unexpected codec found",
expected.getName(),
actual.getClass().getName());
}
public static void testFinding() {
CompressionCodecFactory factory =
new CompressionCodecFactory(new Configuration());
CompressionCodec codec = factory.getCodec(new Path("/tmp/foo.bar"));
assertEquals("default factory foo codec", null, codec);
codec = factory.getCodecByClassName(BarCodec.class.getCanonicalName());
assertEquals("default factory foo codec", null, codec);
codec = factory.getCodec(new Path("/tmp/foo.gz"));
checkCodec("default factory for .gz", GzipCodec.class, codec);
codec = factory.getCodecByClassName(GzipCodec.class.getCanonicalName());
checkCodec("default factory for gzip codec", GzipCodec.class, codec);
codec = factory.getCodecByName("gzip");
checkCodec("default factory for gzip codec", GzipCodec.class, codec);
codec = factory.getCodecByName("GZIP");
checkCodec("default factory for gzip codec", GzipCodec.class, codec);
codec = factory.getCodecByName("GZIPCodec");
checkCodec("default factory for gzip codec", GzipCodec.class, codec);
codec = factory.getCodecByName("gzipcodec");
checkCodec("default factory for gzip codec", GzipCodec.class, codec);
Class klass = factory.getCodecClassByName("gzipcodec");
assertEquals(GzipCodec.class, klass);
codec = factory.getCodec(new Path("/tmp/foo.bz2"));
checkCodec("default factory for .bz2", BZip2Codec.class, codec);
codec = factory.getCodecByClassName(BZip2Codec.class.getCanonicalName());
checkCodec("default factory for bzip2 codec", BZip2Codec.class, codec);
codec = factory.getCodecByName("bzip2");
checkCodec("default factory for bzip2 codec", BZip2Codec.class, codec);
codec = factory.getCodecByName("bzip2codec");
checkCodec("default factory for bzip2 codec", BZip2Codec.class, codec);
codec = factory.getCodecByName("BZIP2");
checkCodec("default factory for bzip2 codec", BZip2Codec.class, codec);
codec = factory.getCodecByName("BZIP2CODEC");
checkCodec("default factory for bzip2 codec", BZip2Codec.class, codec);
codec = factory.getCodecByClassName(DeflateCodec.class.getCanonicalName());
checkCodec("default factory for deflate codec", DeflateCodec.class, codec);
codec = factory.getCodecByName("deflate");
checkCodec("default factory for deflate codec", DeflateCodec.class, codec);
codec = factory.getCodecByName("deflatecodec");
checkCodec("default factory for deflate codec", DeflateCodec.class, codec);
codec = factory.getCodecByName("DEFLATE");
checkCodec("default factory for deflate codec", DeflateCodec.class, codec);
codec = factory.getCodecByName("DEFLATECODEC");
checkCodec("default factory for deflate codec", DeflateCodec.class, codec);
factory = setClasses(new Class[0]);
codec = factory.getCodec(new Path("/tmp/foo.bar"));
assertEquals("empty codec bar codec", null, codec);
codec = factory.getCodecByClassName(BarCodec.class.getCanonicalName());
assertEquals("empty codec bar codec", null, codec);
codec = factory.getCodec(new Path("/tmp/foo.gz"));
assertEquals("empty codec gz codec", null, codec);
codec = factory.getCodecByClassName(GzipCodec.class.getCanonicalName());
assertEquals("empty codec gz codec", null, codec);
codec = factory.getCodec(new Path("/tmp/foo.bz2"));
assertEquals("empty factory for .bz2", null, codec);
codec = factory.getCodecByClassName(BZip2Codec.class.getCanonicalName());
assertEquals("empty factory for bzip2 codec", null, codec);
factory = setClasses(new Class[]{BarCodec.class, FooCodec.class,
FooBarCodec.class});
codec = factory.getCodec(new Path("/tmp/.foo.bar.gz"));
assertEquals("full factory gz codec", null, codec);
codec = factory.getCodecByClassName(GzipCodec.class.getCanonicalName());
assertEquals("full codec gz codec", null, codec);
codec = factory.getCodec(new Path("/tmp/foo.bz2"));
assertEquals("full factory for .bz2", null, codec);
codec = factory.getCodecByClassName(BZip2Codec.class.getCanonicalName());
assertEquals("full codec bzip2 codec", null, codec);
codec = factory.getCodec(new Path("/tmp/foo.bar"));
checkCodec("full factory bar codec", BarCodec.class, codec);
codec = factory.getCodecByClassName(BarCodec.class.getCanonicalName());
checkCodec("full factory bar codec", BarCodec.class, codec);
codec = factory.getCodecByName("bar");
checkCodec("full factory bar codec", BarCodec.class, codec);
codec = factory.getCodecByName("BAR");
checkCodec("full factory bar codec", BarCodec.class, codec);
codec = factory.getCodec(new Path("/tmp/foo/baz.foo.bar"));
checkCodec("full factory foo bar codec", FooBarCodec.class, codec);
codec = factory.getCodecByClassName(FooBarCodec.class.getCanonicalName());
checkCodec("full factory foo bar codec", FooBarCodec.class, codec);
codec = factory.getCodecByName("foobar");
checkCodec("full factory foo bar codec", FooBarCodec.class, codec);
codec = factory.getCodecByName("FOOBAR");
checkCodec("full factory foo bar codec", FooBarCodec.class, codec);
codec = factory.getCodec(new Path("/tmp/foo.foo"));
checkCodec("full factory foo codec", FooCodec.class, codec);
codec = factory.getCodecByClassName(FooCodec.class.getCanonicalName());
checkCodec("full factory foo codec", FooCodec.class, codec);
codec = factory.getCodecByName("foo");
checkCodec("full factory foo codec", FooCodec.class, codec);
codec = factory.getCodecByName("FOO");
checkCodec("full factory foo codec", FooCodec.class, codec);
}
}
| |
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.pzexoplayer.upstream;
import com.google.android.pzexoplayer.C;
import com.google.android.pzexoplayer.util.Assertions;
import com.google.android.pzexoplayer.util.Predicate;
import com.google.android.pzexoplayer.util.Util;
import android.text.TextUtils;
import android.util.Log;
import java.io.EOFException;
import java.io.IOException;
import java.io.InputStream;
import java.io.InterruptedIOException;
import java.io.OutputStream;
import java.net.HttpURLConnection;
import java.net.NoRouteToHostException;
import java.net.ProtocolException;
import java.net.URL;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicReference;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* A {@link HttpDataSource} that uses Android's {@link HttpURLConnection}.
* <p>
* By default this implementation will not follow cross-protocol redirects (i.e. redirects from
* HTTP to HTTPS or vice versa). Cross-protocol redirects can be enabled by using the
* {@link #DefaultHttpDataSource(String, Predicate, TransferListener, int, int, boolean)}
* constructor and passing {@code true} as the final argument.
*/
public class DefaultHttpDataSource implements HttpDataSource {
/**
* The default connection timeout, in milliseconds.
*/
public static final int DEFAULT_CONNECT_TIMEOUT_MILLIS = 8 * 1000;
/**
* The default read timeout, in milliseconds.
*/
public static final int DEFAULT_READ_TIMEOUT_MILLIS = 8 * 1000;
private static final int MAX_REDIRECTS = 20; // Same limit as okhttp.
private static final String TAG = "HttpDataSource";
private static final Pattern CONTENT_RANGE_HEADER =
Pattern.compile("^bytes (\\d+)-(\\d+)/(\\d+)$");
private static final AtomicReference<byte[]> skipBufferReference = new AtomicReference<>();
private final boolean allowCrossProtocolRedirects;
private final int connectTimeoutMillis;
private final int readTimeoutMillis;
private final String userAgent;
private final Predicate<String> contentTypePredicate;
private final HashMap<String, String> requestProperties;
private final TransferListener listener;
private DataSpec dataSpec;
private HttpURLConnection connection;
private InputStream inputStream;
private boolean opened;
private long bytesToSkip;
private long bytesToRead;
private long bytesSkipped;
private long bytesRead;
/**
* @param userAgent The User-Agent string that should be used.
* @param contentTypePredicate An optional {@link Predicate}. If a content type is
* rejected by the predicate then a {@link HttpDataSource.InvalidContentTypeException} is
* thrown from {@link #open(DataSpec)}.
*/
public DefaultHttpDataSource(String userAgent, Predicate<String> contentTypePredicate) {
this(userAgent, contentTypePredicate, null);
}
/**
* @param userAgent The User-Agent string that should be used.
* @param contentTypePredicate An optional {@link Predicate}. If a content type is
* rejected by the predicate then a {@link HttpDataSource.InvalidContentTypeException} is
* thrown from {@link #open(DataSpec)}.
* @param listener An optional listener.
*/
public DefaultHttpDataSource(String userAgent, Predicate<String> contentTypePredicate,
TransferListener listener) {
this(userAgent, contentTypePredicate, listener, DEFAULT_CONNECT_TIMEOUT_MILLIS,
DEFAULT_READ_TIMEOUT_MILLIS);
}
/**
* @param userAgent The User-Agent string that should be used.
* @param contentTypePredicate An optional {@link Predicate}. If a content type is
* rejected by the predicate then a {@link HttpDataSource.InvalidContentTypeException} is
* thrown from {@link #open(DataSpec)}.
* @param listener An optional listener.
* @param connectTimeoutMillis The connection timeout, in milliseconds. A timeout of zero is
* interpreted as an infinite timeout.
* @param readTimeoutMillis The read timeout, in milliseconds. A timeout of zero is interpreted
* as an infinite timeout.
*/
public DefaultHttpDataSource(String userAgent, Predicate<String> contentTypePredicate,
TransferListener listener, int connectTimeoutMillis, int readTimeoutMillis) {
this(userAgent, contentTypePredicate, listener, connectTimeoutMillis, readTimeoutMillis, false);
}
/**
* @param userAgent The User-Agent string that should be used.
* @param contentTypePredicate An optional {@link Predicate}. If a content type is
* rejected by the predicate then a {@link HttpDataSource.InvalidContentTypeException} is
* thrown from {@link #open(DataSpec)}.
* @param listener An optional listener.
* @param connectTimeoutMillis The connection timeout, in milliseconds. A timeout of zero is
* interpreted as an infinite timeout. Pass {@link #DEFAULT_CONNECT_TIMEOUT_MILLIS} to use
* the default value.
* @param readTimeoutMillis The read timeout, in milliseconds. A timeout of zero is interpreted
* as an infinite timeout. Pass {@link #DEFAULT_READ_TIMEOUT_MILLIS} to use the default value.
* @param allowCrossProtocolRedirects Whether cross-protocol redirects (i.e. redirects from HTTP
* to HTTPS and vice versa) are enabled.
*/
public DefaultHttpDataSource(String userAgent, Predicate<String> contentTypePredicate,
TransferListener listener, int connectTimeoutMillis, int readTimeoutMillis,
boolean allowCrossProtocolRedirects) {
this.userAgent = Assertions.checkNotEmpty(userAgent);
this.contentTypePredicate = contentTypePredicate;
this.listener = listener;
this.requestProperties = new HashMap<>();
this.connectTimeoutMillis = connectTimeoutMillis;
this.readTimeoutMillis = readTimeoutMillis;
this.allowCrossProtocolRedirects = allowCrossProtocolRedirects;
}
@Override
public String getUri() {
return connection == null ? null : connection.getURL().toString();
}
@Override
public Map<String, List<String>> getResponseHeaders() {
return connection == null ? null : connection.getHeaderFields();
}
@Override
public void setRequestProperty(String name, String value) {
Assertions.checkNotNull(name);
Assertions.checkNotNull(value);
synchronized (requestProperties) {
requestProperties.put(name, value);
}
}
@Override
public void clearRequestProperty(String name) {
Assertions.checkNotNull(name);
synchronized (requestProperties) {
requestProperties.remove(name);
}
}
@Override
public void clearAllRequestProperties() {
synchronized (requestProperties) {
requestProperties.clear();
}
}
@Override
public long open(DataSpec dataSpec) throws HttpDataSourceException {
this.dataSpec = dataSpec;
this.bytesRead = 0;
this.bytesSkipped = 0;
try {
connection = makeConnection(dataSpec);
} catch (IOException e) {
throw new HttpDataSourceException("Unable to connect to " + dataSpec.uri.toString(), e,
dataSpec);
}
int responseCode;
try {
responseCode = connection.getResponseCode();
} catch (IOException e) {
closeConnection();
throw new HttpDataSourceException("Unable to connect to " + dataSpec.uri.toString(), e,
dataSpec);
}
// Check for a valid response code.
if (responseCode < 200 || responseCode > 299) {
Map<String, List<String>> headers = connection.getHeaderFields();
closeConnection();
throw new InvalidResponseCodeException(responseCode, headers, dataSpec);
}
// Check for a valid content type.
String contentType = connection.getContentType();
if (contentTypePredicate != null && !contentTypePredicate.evaluate(contentType)) {
closeConnection();
throw new InvalidContentTypeException(contentType, dataSpec);
}
// If we requested a range starting from a non-zero position and received a 200 rather than a
// 206, then the server does not support partial requests. We'll need to manually skip to the
// requested position.
bytesToSkip = responseCode == 200 && dataSpec.position != 0 ? dataSpec.position : 0;
// Determine the length of the data to be read, after skipping.
if ((dataSpec.flags & DataSpec.FLAG_ALLOW_GZIP) == 0) {
long contentLength = getContentLength(connection);
bytesToRead = dataSpec.length != C.LENGTH_UNBOUNDED ? dataSpec.length
: contentLength != C.LENGTH_UNBOUNDED ? contentLength - bytesToSkip
: C.LENGTH_UNBOUNDED;
} else {
// Gzip is enabled. If the server opts to use gzip then the content length in the response
// will be that of the compressed data, which isn't what we want. Furthermore, there isn't a
// reliable way to determine whether the gzip was used or not. Always use the dataSpec length
// in this case.
bytesToRead = dataSpec.length;
}
try {
inputStream = connection.getInputStream();
} catch (IOException e) {
closeConnection();
throw new HttpDataSourceException(e, dataSpec);
}
opened = true;
if (listener != null) {
listener.onTransferStart();
}
return bytesToRead;
}
@Override
public int read(byte[] buffer, int offset, int readLength) throws HttpDataSourceException {
try {
skipInternal();
return readInternal(buffer, offset, readLength);
} catch (IOException e) {
throw new HttpDataSourceException(e, dataSpec);
}
}
@Override
public void close() throws HttpDataSourceException {
try {
if (inputStream != null) {
Util.maybeTerminateInputStream(connection, bytesRemaining());
try {
inputStream.close();
} catch (IOException e) {
throw new HttpDataSourceException(e, dataSpec);
}
}
} finally {
inputStream = null;
closeConnection();
if (opened) {
opened = false;
if (listener != null) {
listener.onTransferEnd();
}
}
}
}
/**
* Returns the current connection, or null if the source is not currently opened.
*
* @return The current open connection, or null.
*/
protected final HttpURLConnection getConnection() {
return connection;
}
/**
* Returns the number of bytes that have been skipped since the most recent call to
* {@link #open(DataSpec)}.
*
* @return The number of bytes skipped.
*/
protected final long bytesSkipped() {
return bytesSkipped;
}
/**
* Returns the number of bytes that have been read since the most recent call to
* {@link #open(DataSpec)}.
*
* @return The number of bytes read.
*/
protected final long bytesRead() {
return bytesRead;
}
/**
* Returns the number of bytes that are still to be read for the current {@link DataSpec}.
* <p>
* If the total length of the data being read is known, then this length minus {@code bytesRead()}
* is returned. If the total length is unknown, {@link C#LENGTH_UNBOUNDED} is returned.
*
* @return The remaining length, or {@link C#LENGTH_UNBOUNDED}.
*/
protected final long bytesRemaining() {
return bytesToRead == C.LENGTH_UNBOUNDED ? bytesToRead : bytesToRead - bytesRead;
}
/**
* Establishes a connection, following redirects to do so where permitted.
*/
private HttpURLConnection makeConnection(DataSpec dataSpec) throws IOException {
URL url = new URL(dataSpec.uri.toString());
byte[] postBody = dataSpec.postBody;
long position = dataSpec.position;
long length = dataSpec.length;
boolean allowGzip = (dataSpec.flags & DataSpec.FLAG_ALLOW_GZIP) != 0;
if (!allowCrossProtocolRedirects) {
// HttpURLConnection disallows cross-protocol redirects, but otherwise performs redirection
// automatically. This is the behavior we want, so use it.
HttpURLConnection connection = makeConnection(
url, postBody, position, length, allowGzip, true /* followRedirects */);
return connection;
}
// We need to handle redirects ourselves to allow cross-protocol redirects.
int redirectCount = 0;
while (redirectCount++ <= MAX_REDIRECTS) {
HttpURLConnection connection = makeConnection(
url, postBody, position, length, allowGzip, false /* followRedirects */);
int responseCode = connection.getResponseCode();
if (responseCode == HttpURLConnection.HTTP_MULT_CHOICE
|| responseCode == HttpURLConnection.HTTP_MOVED_PERM
|| responseCode == HttpURLConnection.HTTP_MOVED_TEMP
|| responseCode == HttpURLConnection.HTTP_SEE_OTHER
|| (postBody == null
&& (responseCode == 307 /* HTTP_TEMP_REDIRECT */
|| responseCode == 308 /* HTTP_PERM_REDIRECT */))) {
// For 300, 301, 302, and 303 POST requests follow the redirect and are transformed into
// GET requests. For 307 and 308 POST requests are not redirected.
postBody = null;
String location = connection.getHeaderField("Location");
connection.disconnect();
url = handleRedirect(url, location);
} else {
return connection;
}
}
// If we get here we've been redirected more times than are permitted.
throw new NoRouteToHostException("Too many redirects: " + redirectCount);
}
/**
* Configures a connection and opens it.
*
* @param url The url to connect to.
* @param postBody The body data for a POST request.
* @param position The byte offset of the requested data.
* @param length The length of the requested data, or {@link C#LENGTH_UNBOUNDED}.
* @param allowGzip Whether to allow the use of gzip.
* @param followRedirects Whether to follow redirects.
*/
private HttpURLConnection makeConnection(URL url, byte[] postBody, long position,
long length, boolean allowGzip, boolean followRedirects) throws IOException {
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
connection.setConnectTimeout(connectTimeoutMillis);
connection.setReadTimeout(readTimeoutMillis);
synchronized (requestProperties) {
for (Map.Entry<String, String> property : requestProperties.entrySet()) {
connection.setRequestProperty(property.getKey(), property.getValue());
}
}
if (!(position == 0 && length == C.LENGTH_UNBOUNDED)) {
String rangeRequest = "bytes=" + position + "-";
if (length != C.LENGTH_UNBOUNDED) {
rangeRequest += (position + length - 1);
}
connection.setRequestProperty("Range", rangeRequest);
}
connection.setRequestProperty("User-Agent", userAgent);
if (!allowGzip) {
connection.setRequestProperty("Accept-Encoding", "identity");
}
connection.setInstanceFollowRedirects(followRedirects);
connection.setDoOutput(postBody != null);
if (postBody != null) {
connection.setFixedLengthStreamingMode(postBody.length);
connection.connect();
OutputStream os = connection.getOutputStream();
os.write(postBody);
os.close();
} else {
connection.connect();
}
return connection;
}
/**
* Handles a redirect.
*
* @param originalUrl The original URL.
* @param location The Location header in the response.
* @return The next URL.
* @throws IOException If redirection isn't possible.
*/
private static URL handleRedirect(URL originalUrl, String location) throws IOException {
if (location == null) {
throw new ProtocolException("Null location redirect");
}
// Form the new url.
URL url = new URL(originalUrl, location);
// Check that the protocol of the new url is supported.
String protocol = url.getProtocol();
if (!"https".equals(protocol) && !"http".equals(protocol)) {
throw new ProtocolException("Unsupported protocol redirect: " + protocol);
}
// Currently this method is only called if allowCrossProtocolRedirects is true, and so the code
// below isn't required. If we ever decide to handle redirects ourselves when cross-protocol
// redirects are disabled, we'll need to uncomment this block of code.
// if (!allowCrossProtocolRedirects && !protocol.equals(originalUrl.getProtocol())) {
// throw new ProtocolException("Disallowed cross-protocol redirect ("
// + originalUrl.getProtocol() + " to " + protocol + ")");
// }
return url;
}
/**
* Attempts to extract the length of the content from the response headers of an open connection.
*
* @param connection The open connection.
* @return The extracted length, or {@link C#LENGTH_UNBOUNDED}.
*/
private static long getContentLength(HttpURLConnection connection) {
long contentLength = C.LENGTH_UNBOUNDED;
String contentLengthHeader = connection.getHeaderField("Content-Length");
if (!TextUtils.isEmpty(contentLengthHeader)) {
try {
contentLength = Long.parseLong(contentLengthHeader);
} catch (NumberFormatException e) {
Log.e(TAG, "Unexpected Content-Length [" + contentLengthHeader + "]");
}
}
String contentRangeHeader = connection.getHeaderField("Content-Range");
if (!TextUtils.isEmpty(contentRangeHeader)) {
Matcher matcher = CONTENT_RANGE_HEADER.matcher(contentRangeHeader);
if (matcher.find()) {
try {
long contentLengthFromRange =
Long.parseLong(matcher.group(2)) - Long.parseLong(matcher.group(1)) + 1;
if (contentLength < 0) {
// Some proxy servers strip the Content-Length header. Fall back to the length
// calculated here in this case.
contentLength = contentLengthFromRange;
} else if (contentLength != contentLengthFromRange) {
// If there is a discrepancy between the Content-Length and Content-Range headers,
// assume the one with the larger value is correct. We have seen cases where carrier
// change one of them to reduce the size of a request, but it is unlikely anybody would
// increase it.
Log.w(TAG, "Inconsistent headers [" + contentLengthHeader + "] [" + contentRangeHeader
+ "]");
contentLength = Math.max(contentLength, contentLengthFromRange);
}
} catch (NumberFormatException e) {
Log.e(TAG, "Unexpected Content-Range [" + contentRangeHeader + "]");
}
}
}
return contentLength;
}
/**
* Skips any bytes that need skipping. Else does nothing.
* <p>
* This implementation is based roughly on {@code libcore.io.Streams.skipByReading()}.
*
* @throws InterruptedIOException If the thread is interrupted during the operation.
* @throws EOFException If the end of the input stream is reached before the bytes are skipped.
*/
private void skipInternal() throws IOException {
if (bytesSkipped == bytesToSkip) {
return;
}
// Acquire the shared skip buffer.
byte[] skipBuffer = skipBufferReference.getAndSet(null);
if (skipBuffer == null) {
skipBuffer = new byte[4096];
}
while (bytesSkipped != bytesToSkip) {
int readLength = (int) Math.min(bytesToSkip - bytesSkipped, skipBuffer.length);
int read = inputStream.read(skipBuffer, 0, readLength);
if (Thread.interrupted()) {
throw new InterruptedIOException();
}
if (read == -1) {
throw new EOFException();
}
bytesSkipped += read;
if (listener != null) {
listener.onBytesTransferred(read);
}
}
// Release the shared skip buffer.
skipBufferReference.set(skipBuffer);
}
/**
* Reads up to {@code length} bytes of data and stores them into {@code buffer}, starting at
* index {@code offset}.
* <p>
* This method blocks until at least one byte of data can be read, the end of the opened range is
* detected, or an exception is thrown.
*
* @param buffer The buffer into which the read data should be stored.
* @param offset The start offset into {@code buffer} at which data should be written.
* @param readLength The maximum number of bytes to read.
* @return The number of bytes read, or {@link C#RESULT_END_OF_INPUT} if the end of the opened
* range is reached.
* @throws IOException If an error occurs reading from the source.
*/
private int readInternal(byte[] buffer, int offset, int readLength) throws IOException {
readLength = bytesToRead == C.LENGTH_UNBOUNDED ? readLength
: (int) Math.min(readLength, bytesToRead - bytesRead);
if (readLength == 0) {
// We've read all of the requested data.
return C.RESULT_END_OF_INPUT;
}
int read = inputStream.read(buffer, offset, readLength);
if (read == -1) {
if (bytesToRead != C.LENGTH_UNBOUNDED && bytesToRead != bytesRead) {
// The server closed the connection having not sent sufficient data.
throw new EOFException();
}
return C.RESULT_END_OF_INPUT;
}
bytesRead += read;
if (listener != null) {
listener.onBytesTransferred(read);
}
return read;
}
/**
* Closes the current connection, if there is one.
*/
private void closeConnection() {
if (connection != null) {
connection.disconnect();
connection = null;
}
}
}
| |
/*
* Copyright (c) 2015-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*/
package com.facebook.drawee.gestures;
import android.view.MotionEvent;
import android.view.ViewConfiguration;
import com.facebook.testing.robolectric.v2.WithTestDefaultsRunner;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.Robolectric;
import static android.view.MotionEvent.*;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
/**
* Tests for {@link GestureDetector}
*/
@RunWith(WithTestDefaultsRunner.class)
public class GestureDetectorTest {
private GestureDetector.ClickListener mClickListener;
private ViewConfiguration mViewConfiguration;
private long mScaledTouchSlop;
private long mLongPressTimeout;
private GestureDetector mGestureDetector;
@Before
public void setup() {
mClickListener = mock(GestureDetector.ClickListener.class);
mViewConfiguration = ViewConfiguration.get(Robolectric.application);
mScaledTouchSlop = mViewConfiguration.getScaledTouchSlop();
mLongPressTimeout = mViewConfiguration.getLongPressTimeout();
mGestureDetector = new GestureDetector(Robolectric.application);
mGestureDetector.setClickListener(mClickListener);
}
@Test
public void testInitialstate() {
assertEquals(mScaledTouchSlop, mGestureDetector.mSingleTapSlopPx, 0f);
assertEquals(false, mGestureDetector.mIsCapturingGesture);
assertEquals(false, mGestureDetector.mIsClickCandidate);
}
@Test
public void testSetClickListener() {
GestureDetector.ClickListener clickListener = mock(GestureDetector.ClickListener.class);
mGestureDetector.setClickListener(clickListener);
assertSame(clickListener, mGestureDetector.mClickListener);
mGestureDetector.setClickListener(null);
assertSame(null, mGestureDetector.mClickListener);
}
@Test
public void testOnClick_NoListener() {
MotionEvent event1 = obtain(1000, 1000, ACTION_DOWN, 100.f, 100.f, 0);
MotionEvent event2 = obtain(1000, 1001, ACTION_UP, 100.f, 100.f, 0);
mGestureDetector.setClickListener(mClickListener);
mGestureDetector.onTouchEvent(event1);
mGestureDetector.onTouchEvent(event2);
verify(mClickListener).onClick();
mGestureDetector.setClickListener(null);
mGestureDetector.onTouchEvent(event1);
mGestureDetector.onTouchEvent(event2);
verifyNoMoreInteractions(mClickListener);
event1.recycle();
event2.recycle();
}
@Test
public void testOnClick_Valid() {
float s = mScaledTouchSlop;
long T0 = 1000;
long T1 = T0;
MotionEvent event1 = obtain(T0, T1, ACTION_DOWN, 100.f, 100.f, 0);
mGestureDetector.onTouchEvent(event1);
assertEquals(true, mGestureDetector.mIsCapturingGesture);
assertEquals(true, mGestureDetector.mIsClickCandidate);
assertEquals(event1.getEventTime(), mGestureDetector.mActionDownTime);
assertEquals(event1.getX(), mGestureDetector.mActionDownX, 0f);
assertEquals(event1.getY(), mGestureDetector.mActionDownY, 0f);
long T2 = T0 + mLongPressTimeout * 1 / 3;
MotionEvent event2 = obtain(T0, T2, ACTION_MOVE, 100.f + s * 0.3f, 100.f - s * 0.3f, 0);
mGestureDetector.onTouchEvent(event2);
assertEquals(true, mGestureDetector.mIsCapturingGesture);
assertEquals(true, mGestureDetector.mIsClickCandidate);
assertEquals(event1.getEventTime(), mGestureDetector.mActionDownTime);
assertEquals(event1.getX(), mGestureDetector.mActionDownX, 0f);
assertEquals(event1.getY(), mGestureDetector.mActionDownY, 0f);
long T3 = T0 + mLongPressTimeout * 2 / 3;
MotionEvent event3 = obtain(T0, T3, ACTION_MOVE, 100.f + s * 0.6f, 100.f - s * 0.6f, 0);
mGestureDetector.onTouchEvent(event3);
assertEquals(true, mGestureDetector.mIsCapturingGesture);
assertEquals(true, mGestureDetector.mIsClickCandidate);
assertEquals(event1.getEventTime(), mGestureDetector.mActionDownTime);
assertEquals(event1.getX(), mGestureDetector.mActionDownX, 0f);
assertEquals(event1.getY(), mGestureDetector.mActionDownY, 0f);
long T4 = T0 + mLongPressTimeout;
MotionEvent event4 = obtain(T0, T4, ACTION_UP, 100.f + s, 100.f - s, 0);
mGestureDetector.onTouchEvent(event4);
assertEquals(false, mGestureDetector.mIsCapturingGesture);
assertEquals(false, mGestureDetector.mIsClickCandidate);
assertEquals(event1.getEventTime(), mGestureDetector.mActionDownTime);
assertEquals(event1.getX(), mGestureDetector.mActionDownX, 0f);
assertEquals(event1.getY(), mGestureDetector.mActionDownY, 0f);
verify(mClickListener).onClick();
event1.recycle();
event2.recycle();
event3.recycle();
event4.recycle();
}
@Test
public void testOnClick_ToFar() {
float s = mScaledTouchSlop;
long T0 = 1000;
long T1 = T0;
MotionEvent event1 = obtain(T0, T1, ACTION_DOWN, 100.f, 100.f, 0);
mGestureDetector.onTouchEvent(event1);
assertEquals(true, mGestureDetector.mIsCapturingGesture);
assertEquals(true, mGestureDetector.mIsClickCandidate);
assertEquals(event1.getEventTime(), mGestureDetector.mActionDownTime);
assertEquals(event1.getX(), mGestureDetector.mActionDownX, 0f);
assertEquals(event1.getY(), mGestureDetector.mActionDownY, 0f);
long T2 = T0 + mLongPressTimeout * 1 / 3;
MotionEvent event2 = obtain(T0, T2, ACTION_MOVE, 100.f + s * 0.5f, 100.f - s * 0.5f, 0);
mGestureDetector.onTouchEvent(event2);
assertEquals(true, mGestureDetector.mIsCapturingGesture);
assertEquals(true, mGestureDetector.mIsClickCandidate);
assertEquals(event1.getEventTime(), mGestureDetector.mActionDownTime);
assertEquals(event1.getX(), mGestureDetector.mActionDownX, 0f);
assertEquals(event1.getY(), mGestureDetector.mActionDownY, 0f);
// maximum allowed distance is `s` px, but here we went `s * 1.1` px away from down point
long T3 = T0 + mLongPressTimeout * 2 / 3;
MotionEvent event3 = obtain(T0, T3, ACTION_MOVE, 100.f + s * 1.1f, 100.f - s * 0.5f, 0);
mGestureDetector.onTouchEvent(event3);
assertEquals(true, mGestureDetector.mIsCapturingGesture);
assertEquals(false, mGestureDetector.mIsClickCandidate);
assertEquals(event1.getEventTime(), mGestureDetector.mActionDownTime);
assertEquals(event1.getX(), mGestureDetector.mActionDownX, 0f);
assertEquals(event1.getY(), mGestureDetector.mActionDownY, 0f);
long T4 = T0 + mLongPressTimeout;
MotionEvent event4 = obtain(T0, T4, ACTION_UP, 100.f + s, 100.f - s, 0);
mGestureDetector.onTouchEvent(event4);
assertEquals(false, mGestureDetector.mIsCapturingGesture);
assertEquals(false, mGestureDetector.mIsClickCandidate);
assertEquals(event1.getEventTime(), mGestureDetector.mActionDownTime);
assertEquals(event1.getX(), mGestureDetector.mActionDownX, 0f);
assertEquals(event1.getY(), mGestureDetector.mActionDownY, 0f);
verifyNoMoreInteractions(mClickListener);
event1.recycle();
event2.recycle();
event3.recycle();
event4.recycle();
}
@Test
public void testOnClick_ToLong() {
float s = mScaledTouchSlop;
long T0 = 1000;
long T1 = T0;
MotionEvent event1 = obtain(T0, T1, ACTION_DOWN, 100.f, 100.f, 0);
mGestureDetector.onTouchEvent(event1);
assertEquals(true, mGestureDetector.mIsCapturingGesture);
assertEquals(true, mGestureDetector.mIsClickCandidate);
assertEquals(event1.getEventTime(), mGestureDetector.mActionDownTime);
assertEquals(event1.getX(), mGestureDetector.mActionDownX, 0f);
assertEquals(event1.getY(), mGestureDetector.mActionDownY, 0f);
long T2 = T0 + mLongPressTimeout * 1 / 3;
MotionEvent event2 = obtain(T0, T2, ACTION_MOVE, 100.f + s * 0.3f, 100.f - s * 0.3f, 0);
mGestureDetector.onTouchEvent(event2);
assertEquals(true, mGestureDetector.mIsCapturingGesture);
assertEquals(true, mGestureDetector.mIsClickCandidate);
assertEquals(event1.getEventTime(), mGestureDetector.mActionDownTime);
assertEquals(event1.getX(), mGestureDetector.mActionDownX, 0f);
assertEquals(event1.getY(), mGestureDetector.mActionDownY, 0f);
long T3 = T0 + mLongPressTimeout * 2 / 3;
MotionEvent event3 = obtain(T0, T3, ACTION_MOVE, 100.f + s * 0.6f, 100.f - s * 0.6f, 0);
mGestureDetector.onTouchEvent(event3);
assertEquals(true, mGestureDetector.mIsCapturingGesture);
assertEquals(true, mGestureDetector.mIsClickCandidate);
assertEquals(event1.getEventTime(), mGestureDetector.mActionDownTime);
assertEquals(event1.getX(), mGestureDetector.mActionDownX, 0f);
assertEquals(event1.getY(), mGestureDetector.mActionDownY, 0f);
// maximum allowed duration is mLongPressTimeout ms, but here we released 1 ms after that
long T4 = T0 + mLongPressTimeout + 1;
MotionEvent event4 = obtain(T0, T4, ACTION_UP, 100.f + s, 100.f - s, 0);
mGestureDetector.onTouchEvent(event4);
assertEquals(false, mGestureDetector.mIsCapturingGesture);
assertEquals(false, mGestureDetector.mIsClickCandidate);
assertEquals(event1.getEventTime(), mGestureDetector.mActionDownTime);
assertEquals(event1.getX(), mGestureDetector.mActionDownX, 0f);
assertEquals(event1.getY(), mGestureDetector.mActionDownY, 0f);
verifyNoMoreInteractions(mClickListener);
event1.recycle();
event2.recycle();
event3.recycle();
event4.recycle();
}
}
| |
/*
Copyright 2011 Selenium committers
Copyright 2011 Software Freedom Conservancy
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.openqa.selenium.testing.drivers;
import static com.google.common.base.Preconditions.checkNotNull;
import static org.openqa.selenium.Platform.LINUX;
import static org.openqa.selenium.Platform.WINDOWS;
import static org.openqa.selenium.testing.Ignore.Driver.ALL;
import static org.openqa.selenium.testing.Ignore.Driver.CHROME;
import static org.openqa.selenium.testing.Ignore.Driver.FIREFOX;
import static org.openqa.selenium.testing.Ignore.Driver.HTMLUNIT;
import static org.openqa.selenium.testing.Ignore.Driver.IE;
import static org.openqa.selenium.testing.Ignore.Driver.MARIONETTE;
import static org.openqa.selenium.testing.Ignore.Driver.OPERA;
import static org.openqa.selenium.testing.Ignore.Driver.OPERA_MOBILE;
import static org.openqa.selenium.testing.Ignore.Driver.PHANTOMJS;
import static org.openqa.selenium.testing.Ignore.Driver.REMOTE;
import static org.openqa.selenium.testing.Ignore.Driver.SAFARI;
import static org.openqa.selenium.testing.drivers.Browser.chrome;
import static org.openqa.selenium.testing.drivers.Browser.htmlunit;
import static org.openqa.selenium.testing.drivers.Browser.htmlunit_js;
import static org.openqa.selenium.testing.drivers.Browser.ie;
import static org.openqa.selenium.testing.drivers.Browser.opera;
import static org.openqa.selenium.testing.drivers.Browser.phantomjs;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Sets;
import org.junit.runners.model.FrameworkMethod;
import org.openqa.selenium.Platform;
import org.openqa.selenium.testing.Ignore;
import org.openqa.selenium.testing.IgnoreComparator;
import org.openqa.selenium.testing.JavascriptEnabled;
import org.openqa.selenium.testing.NativeEventsRequired;
import org.openqa.selenium.testing.NeedsLocalEnvironment;
import java.util.Arrays;
import java.util.Set;
/**
* Class that decides whether a test class or method should be ignored.
*/
public class TestIgnorance {
private Set<Browser> alwaysNativeEvents = ImmutableSet.of(chrome, ie, opera);
private Set<Browser> neverNativeEvents = ImmutableSet.of(
htmlunit, htmlunit_js, phantomjs);
private IgnoreComparator ignoreComparator = new IgnoreComparator();
private Set<String> methods = Sets.newHashSet();
private Set<String> only = Sets.newHashSet();
private Set<String> ignoreMethods = Sets.newHashSet();
private Browser browser;
public TestIgnorance(Browser browser) {
setBrowser(browser);
String onlyRun = System.getProperty("only_run");
if (onlyRun != null) {
only.addAll(Arrays.asList(onlyRun.split(",")));
}
String method = System.getProperty("method");
if (method != null) {
methods.addAll(Arrays.asList(method.split(",")));
}
String skip = System.getProperty("ignore_method");
if (skip != null) {
ignoreMethods.addAll(Arrays.asList(skip.split(",")));
}
}
// JUnit 4
public boolean isIgnored(FrameworkMethod method, Object test) {
boolean ignored = ignoreComparator.shouldIgnore(test.getClass().getAnnotation(Ignore.class)) ||
ignoreComparator.shouldIgnore(method.getMethod().getAnnotation(Ignore.class));
ignored |= isIgnoredBecauseOfJUnit4Ignore(test.getClass().getAnnotation(org.junit.Ignore.class));
ignored |= isIgnoredBecauseOfJUnit4Ignore(method.getMethod().getAnnotation(org.junit.Ignore.class));
if (Boolean.getBoolean("ignored_only")) {
ignored = !ignored;
}
ignored |= isIgnoredDueToJavascript(test.getClass().getAnnotation(JavascriptEnabled.class));
ignored |= isIgnoredDueToJavascript(method.getMethod().getAnnotation(JavascriptEnabled.class));
ignored |= isIgnoredBecauseOfNativeEvents(test.getClass().getAnnotation(NativeEventsRequired.class));
ignored |= isIgnoredBecauseOfNativeEvents(method.getMethod().getAnnotation(NativeEventsRequired.class));
ignored |= isIgnoredDueToEnvironmentVariables(method, test);
ignored |= isIgnoredDueToBeingOnSauce(method, test);
return ignored;
}
private boolean isIgnoredBecauseOfJUnit4Ignore(org.junit.Ignore annotation) {
return annotation != null;
}
private boolean isIgnoredBecauseOfNativeEvents(NativeEventsRequired annotation) {
if (annotation == null) {
return false;
}
if (neverNativeEvents.contains(browser)) {
return true;
}
if (alwaysNativeEvents.contains(browser)) {
return false;
}
if (!Boolean.getBoolean("selenium.browser.native_events")) {
return true;
}
// We only have native events on Linux and Windows.
Platform platform = getEffectivePlatform();
return !(platform.is(LINUX) || platform.is(WINDOWS));
}
private static Platform getEffectivePlatform() {
if (SauceDriver.shouldUseSauce()) {
return SauceDriver.getEffectivePlatform();
}
return Platform.getCurrent();
}
private boolean isIgnoredDueToBeingOnSauce(FrameworkMethod method, Object test) {
return SauceDriver.shouldUseSauce() &&
(method.getMethod().getAnnotation(NeedsLocalEnvironment.class) != null ||
test.getClass().getAnnotation(NeedsLocalEnvironment.class) != null);
}
private boolean isIgnoredDueToJavascript(JavascriptEnabled enabled) {
return enabled != null && !browser.isJavascriptEnabled();
}
private boolean isIgnoredDueToEnvironmentVariables(FrameworkMethod method, Object test) {
return (!only.isEmpty() && !only.contains(test.getClass().getSimpleName())) ||
(!methods.isEmpty() && !methods.contains(method.getName())) ||
ignoreMethods.contains(method.getName());
}
public void setBrowser(Browser browser) {
this.browser = checkNotNull(browser, "Browser to use must be set");
addIgnoresForBrowser(browser, ignoreComparator);
}
private void addIgnoresForBrowser(Browser browser, IgnoreComparator comparator) {
if (Boolean.getBoolean("selenium.browser.remote") || SauceDriver.shouldUseSauce()) {
comparator.addDriver(REMOTE);
}
switch (browser) {
case chrome:
comparator.addDriver(CHROME);
break;
case ff:
if (Boolean.getBoolean("webdriver.firefox.marionette")) {
comparator.addDriver(MARIONETTE);
} else {
comparator.addDriver(FIREFOX);
}
break;
case htmlunit:
case htmlunit_js:
comparator.addDriver(HTMLUNIT);
break;
case ie:
comparator.addDriver(IE);
break;
case none:
comparator.addDriver(ALL);
break;
case opera:
comparator.addDriver(OPERA);
break;
case opera_mobile:
comparator.addDriver(OPERA_MOBILE);
comparator.addDriver(REMOTE);
break;
case phantomjs:
comparator.addDriver(PHANTOMJS);
break;
case safari:
comparator.addDriver(SAFARI);
break;
default:
throw new RuntimeException("Cannot determine which ignore to add ignores rules for");
}
}
}
| |
/*
* Copyright 2020 Playtika.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.kayenta.prometheus.metrics;
import static java.util.Collections.singletonMap;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import com.netflix.kayenta.prometheus.model.PrometheusMetricDescriptorsResponse;
import com.netflix.kayenta.prometheus.security.PrometheusNamedAccountCredentials;
import com.netflix.kayenta.prometheus.service.PrometheusRemoteService;
import com.netflix.kayenta.security.AccountCredentials;
import com.netflix.kayenta.security.AccountCredentialsRepository;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
@RunWith(MockitoJUnitRunner.class)
public class PrometheusMetricDescriptorsCacheTest {
private static final String ACCOUNT_1 = "metrics-acc-1";
private static final String ACCOUNT_2 = "metrics-acc-2";
@Mock PrometheusRemoteService prometheusRemote1;
@Mock AccountCredentialsRepository accountCredentialRepo;
@InjectMocks PrometheusMetricDescriptorsCache cache;
@Test
public void returnsEmptyMapIfNoDataForEmptyFilter() {
cache.updateMetricDescriptorsCache();
List<Map> metadata = cache.getMetadata(ACCOUNT_1, "");
assertThat(metadata).isEmpty();
}
@Test
public void returnsEmptyMapIfNoDataForMetricFilter() {
cache.updateMetricDescriptorsCache();
List<Map> metadata = cache.getMetadata(ACCOUNT_1, "metric_1");
assertThat(metadata).isEmpty();
}
@Test
public void returnsMetricsByAnyCase() {
when(accountCredentialRepo.getAllOf(AccountCredentials.Type.METRICS_STORE))
.thenReturn(
Collections.singleton(
PrometheusNamedAccountCredentials.builder()
.name(ACCOUNT_1)
.prometheusRemoteService(prometheusRemote1)
.build()));
when(prometheusRemote1.listMetricDescriptors())
.thenReturn(
PrometheusMetricDescriptorsResponse.builder()
.data(Arrays.asList("metric_1", "METRIC_2", "MEtriC_3", "other_thing"))
.status("success")
.build());
cache.updateMetricDescriptorsCache();
List<Map> metadata = cache.getMetadata(ACCOUNT_1, "METR");
assertThat(metadata)
.containsOnly(
singletonMap("name", "metric_1"),
singletonMap("name", "METRIC_2"),
singletonMap("name", "MEtriC_3"));
}
@Test
public void returnsMetricsForSpecificAccount() {
when(accountCredentialRepo.getAllOf(AccountCredentials.Type.METRICS_STORE))
.thenReturn(
Collections.singleton(
PrometheusNamedAccountCredentials.builder()
.name(ACCOUNT_1)
.prometheusRemoteService(prometheusRemote1)
.build()));
when(prometheusRemote1.listMetricDescriptors())
.thenReturn(
PrometheusMetricDescriptorsResponse.builder()
.data(Arrays.asList("metric_1", "METRIC_2", "MEtriC_3", "other_thing"))
.status("success")
.build());
cache.updateMetricDescriptorsCache();
assertThat(cache.getMetadata(ACCOUNT_2, "METR")).isEmpty();
}
@Test
public void returnsAllMetricsForEmptyFilter() {
when(accountCredentialRepo.getAllOf(AccountCredentials.Type.METRICS_STORE))
.thenReturn(
Collections.singleton(
PrometheusNamedAccountCredentials.builder()
.name(ACCOUNT_1)
.prometheusRemoteService(prometheusRemote1)
.build()));
when(prometheusRemote1.listMetricDescriptors())
.thenReturn(
PrometheusMetricDescriptorsResponse.builder()
.data(Arrays.asList("metric_1", "METRIC_2", "MEtriC_3", "other_thing"))
.status("success")
.build());
cache.updateMetricDescriptorsCache();
List<Map> metadata = cache.getMetadata(ACCOUNT_1, "");
assertThat(metadata)
.containsOnly(
singletonMap("name", "metric_1"),
singletonMap("name", "METRIC_2"),
singletonMap("name", "MEtriC_3"),
singletonMap("name", "other_thing"));
}
@Test
public void returnsEmptyDataIfPrometheusReturnsSuccessAndEmptyData() {
when(accountCredentialRepo.getAllOf(AccountCredentials.Type.METRICS_STORE))
.thenReturn(
Collections.singleton(
PrometheusNamedAccountCredentials.builder()
.name(ACCOUNT_1)
.prometheusRemoteService(prometheusRemote1)
.build()));
when(prometheusRemote1.listMetricDescriptors())
.thenReturn(PrometheusMetricDescriptorsResponse.builder().status("success").build());
cache.updateMetricDescriptorsCache();
assertThat(cache.getMetadata(ACCOUNT_1, "METR")).isEmpty();
}
@Test
public void returnsEmptyDataIfPrometheusReturnsError() {
when(accountCredentialRepo.getAllOf(AccountCredentials.Type.METRICS_STORE))
.thenReturn(
Collections.singleton(
PrometheusNamedAccountCredentials.builder()
.name(ACCOUNT_1)
.prometheusRemoteService(prometheusRemote1)
.build()));
when(prometheusRemote1.listMetricDescriptors())
.thenReturn(PrometheusMetricDescriptorsResponse.builder().status("error").build());
cache.updateMetricDescriptorsCache();
assertThat(cache.getMetadata(ACCOUNT_1, "METR")).isEmpty();
}
@Test
public void updateMetricDescriptorsCache_callsRepo() {
cache.updateMetricDescriptorsCache();
verify(accountCredentialRepo).getAllOf(AccountCredentials.Type.METRICS_STORE);
}
@Test
public void updateMetricDescriptorsCache_ignoresNonPrometheusAccounts() {
when(accountCredentialRepo.getAllOf(AccountCredentials.Type.METRICS_STORE))
.thenReturn(Collections.singleton(new TestAccountCredentials()));
cache.updateMetricDescriptorsCache();
verify(accountCredentialRepo).getAllOf(AccountCredentials.Type.METRICS_STORE);
}
public static class TestAccountCredentials implements AccountCredentials<String> {
@Override
public String getName() {
return "name";
}
@Override
public String getType() {
return "type";
}
@Override
public List<Type> getSupportedTypes() {
return Arrays.asList(Type.METRICS_STORE);
}
@Override
public String getCredentials() {
return "";
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.builder;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.apache.camel.CamelContext;
import org.apache.camel.Endpoint;
import org.apache.camel.Expression;
import org.apache.camel.LoggingLevel;
import org.apache.camel.NoSuchEndpointException;
import org.apache.camel.builder.xml.XPathBuilder;
import org.apache.camel.model.ModelCamelContext;
import org.apache.camel.model.language.HeaderExpression;
import org.apache.camel.model.language.MethodCallExpression;
import org.apache.camel.model.language.PropertyExpression;
import org.apache.camel.util.ObjectHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Base class for implementation inheritance for different clauses in the <a
* href="http://camel.apache.org/dsl.html">Java DSL</a>
*
* @version
*/
public abstract class BuilderSupport {
private ModelCamelContext context;
private ErrorHandlerBuilder errorHandlerBuilder;
protected BuilderSupport() {
}
protected BuilderSupport(CamelContext context) {
this.context = (ModelCamelContext)context;
}
// Builder methods
// -------------------------------------------------------------------------
/**
* Returns a value builder for the given header
*/
public ValueBuilder header(String name) {
HeaderExpression expression = new HeaderExpression(name);
return new ValueBuilder(expression);
}
/**
* Returns a value builder for the given property
*/
public ValueBuilder property(String name) {
PropertyExpression expression = new PropertyExpression(name);
return new ValueBuilder(expression);
}
/**
* Returns a predicate and value builder for the inbound body on an exchange
*/
public ValueBuilder body() {
return Builder.body();
}
/**
* Returns a predicate and value builder for the inbound message body as a
* specific type
*/
public <T> ValueBuilder body(Class<T> type) {
return Builder.bodyAs(type);
}
/**
* Returns a predicate and value builder for the outbound body on an
* exchange
*/
public ValueBuilder outBody() {
return Builder.outBody();
}
/**
* Returns a predicate and value builder for the outbound message body as a
* specific type
*/
public <T> ValueBuilder outBody(Class<T> type) {
return Builder.outBodyAs(type);
}
/**
* Returns a predicate and value builder for the fault body on an
* exchange
*/
public ValueBuilder faultBody() {
return Builder.faultBody();
}
/**
* Returns a predicate and value builder for the fault message body as a
* specific type
*/
public <T> ValueBuilder faultBodyAs(Class<T> type) {
return Builder.faultBodyAs(type);
}
/**
* Returns a value builder for the given system property
*/
public ValueBuilder systemProperty(String name) {
return Builder.systemProperty(name);
}
/**
* Returns a value builder for the given system property
*/
public ValueBuilder systemProperty(String name, String defaultValue) {
return Builder.systemProperty(name, defaultValue);
}
/**
* Returns a constant expression value builder
*/
public ValueBuilder constant(Object value) {
return Builder.constant(value);
}
/**
* Returns a language expression value builder
*/
public ValueBuilder language(String language, String expression) {
return Builder.language(language, expression);
}
/**
* Returns a simple expression value builder
*/
public SimpleBuilder simple(String value) {
return SimpleBuilder.simple(value);
}
/**
* Returns a simple expression value builder
*/
public SimpleBuilder simple(String value, Class<?> resultType) {
return SimpleBuilder.simple(value, resultType);
}
/**
* Returns a xpath expression value builder
* @param value The XPath expression
* @return A new XPathBuilder object
*/
public XPathBuilder xpath(String value) {
return XPathBuilder.xpath(value);
}
/**
* Returns a xpath expression value builder
* @param value The XPath expression
* @param resultType The result type that the XPath expression will return.
* @return A new XPathBuilder object
*/
public static XPathBuilder xpath(String value, Class<?> resultType) {
return XPathBuilder.xpath(value, resultType);
}
/**
* Returns a <a href="http://camel.apache.org/bean-language.html">method call expression</a>
* value builder
* <p/>
* This method accepts dual parameters. Either an bean instance or a reference to a bean (String).
*
* @param beanOrBeanRef either an instanceof a bean or a reference to bean to lookup in the Registry
* @return the builder
* @deprecated use {@link #method(Object)} instead
*/
@Deprecated
public ValueBuilder bean(Object beanOrBeanRef) {
return bean(beanOrBeanRef, null);
}
/**
* Returns a <a href="http://camel.apache.org/bean-language.html">method call expression</a>
* value builder
* <p/>
* This method accepts dual parameters. Either an bean instance or a reference to a bean (String).
*
* @param beanOrBeanRef either an instanceof a bean or a reference to bean to lookup in the Registry
* @param method name of method to invoke
* @return the builder
* @deprecated use {@link #method(Object, String)} instead
*/
@Deprecated
public ValueBuilder bean(Object beanOrBeanRef, String method) {
MethodCallExpression expression;
if (beanOrBeanRef instanceof String) {
expression = new MethodCallExpression((String) beanOrBeanRef, method);
} else {
expression = new MethodCallExpression(beanOrBeanRef, method);
}
return new ValueBuilder(expression);
}
/**
* Returns a <a href="http://camel.apache.org/bean-language.html">method call expression</a>
* value builder
*
* @param beanType the Class of the bean which we want to invoke
* @return the builder
* @deprecated use {@link #method(Class)} instead
*/
@Deprecated
public ValueBuilder bean(Class<?> beanType) {
MethodCallExpression expression = new MethodCallExpression(beanType);
return new ValueBuilder(expression);
}
/**
* Returns a <a href="http://camel.apache.org/bean-language.html">method call expression</a>
* value builder
*
* @param beanType the Class of the bean which we want to invoke
* @param method name of method to invoke
* @return the builder
* @deprecated use {@link #method(Class, String)} instead
*/
@Deprecated
public ValueBuilder bean(Class<?> beanType, String method) {
MethodCallExpression expression = new MethodCallExpression(beanType, method);
return new ValueBuilder(expression);
}
/**
* Returns a <a href="http://camel.apache.org/bean-language.html">method call expression</a>
* value builder
* <p/>
* This method accepts dual parameters. Either an bean instance or a reference to a bean (String).
*
* @param beanOrBeanRef either an instanceof a bean or a reference to bean to lookup in the Registry
* @return the builder
*/
public ValueBuilder method(Object beanOrBeanRef) {
return method(beanOrBeanRef, null);
}
/**
* Returns a <a href="http://camel.apache.org/bean-language.html">method call expression</a>
* value builder
* <p/>
* This method accepts dual parameters. Either an bean instance or a reference to a bean (String).
*
* @param beanOrBeanRef either an instanceof a bean or a reference to bean to lookup in the Registry
* @param method name of method to invoke
* @return the builder
*/
public ValueBuilder method(Object beanOrBeanRef, String method) {
MethodCallExpression expression;
if (beanOrBeanRef instanceof String) {
expression = new MethodCallExpression((String) beanOrBeanRef, method);
} else {
expression = new MethodCallExpression(beanOrBeanRef, method);
}
return new ValueBuilder(expression);
}
/**
* Returns a <a href="http://camel.apache.org/bean-language.html">method call expression</a>
* value builder
*
* @param beanType the Class of the bean which we want to invoke
* @return the builder
*/
public ValueBuilder method(Class<?> beanType) {
MethodCallExpression expression = new MethodCallExpression(beanType);
return new ValueBuilder(expression);
}
/**
* Returns a <a href="http://camel.apache.org/bean-language.html">method call expression</a>
* value builder
*
* @param beanType the Class of the bean which we want to invoke
* @param method name of method to invoke
* @return the builder
*/
public ValueBuilder method(Class<?> beanType, String method) {
MethodCallExpression expression = new MethodCallExpression(beanType, method);
return new ValueBuilder(expression);
}
/**
* Returns an expression processing the exchange to the given endpoint uri
*
* @param uri endpoint uri to send the exchange to
* @return the builder
*/
public ValueBuilder sendTo(String uri) {
return Builder.sendTo(uri);
}
/**
* Returns an expression value builder that replaces all occurrences of the
* regular expression with the given replacement
*/
public ValueBuilder regexReplaceAll(Expression content, String regex, String replacement) {
return Builder.regexReplaceAll(content, regex, replacement);
}
/**
* Returns an expression value builder that replaces all occurrences of the
* regular expression with the given replacement
*/
public ValueBuilder regexReplaceAll(Expression content, String regex, Expression replacement) {
return Builder.regexReplaceAll(content, regex, replacement);
}
/**
* Returns a exception expression value builder
*/
public ValueBuilder exceptionMessage() {
return Builder.exceptionMessage();
}
/**
* Resolves the given URI to an endpoint
*
* @param uri the uri to resolve
* @throws NoSuchEndpointException if the endpoint URI could not be resolved
* @return the endpoint
*/
public Endpoint endpoint(String uri) throws NoSuchEndpointException {
ObjectHelper.notNull(uri, "uri");
Endpoint endpoint = getContext().getEndpoint(uri);
if (endpoint == null) {
throw new NoSuchEndpointException(uri);
}
return endpoint;
}
/**
* Resolves the given URI to an endpoint of the specified type
*
* @param uri the uri to resolve
* @param type the excepted type of the endpoint
* @throws NoSuchEndpointException if the endpoint URI could not be resolved
* @return the endpoint
*/
public <T extends Endpoint> T endpoint(String uri, Class<T> type) throws NoSuchEndpointException {
ObjectHelper.notNull(uri, "uri");
T endpoint = getContext().getEndpoint(uri, type);
if (endpoint == null) {
throw new NoSuchEndpointException(uri);
}
return endpoint;
}
/**
* Resolves the list of URIs into a list of {@link Endpoint} instances
*
* @param uris list of endpoints to resolve
* @throws NoSuchEndpointException if an endpoint URI could not be resolved
* @return list of endpoints
*/
public List<Endpoint> endpoints(String... uris) throws NoSuchEndpointException {
List<Endpoint> endpoints = new ArrayList<Endpoint>();
for (String uri : uris) {
endpoints.add(endpoint(uri));
}
return endpoints;
}
/**
* Helper method to create a list of {@link Endpoint} instances
*
* @param endpoints endpoints
* @return list of the given endpoints
*/
public List<Endpoint> endpoints(Endpoint... endpoints) {
List<Endpoint> answer = new ArrayList<Endpoint>();
answer.addAll(Arrays.asList(endpoints));
return answer;
}
/**
* Creates a default <a href="http://camel.apache.org/error-handler.html">error handler</a>.
*
* @return the builder
*/
public DefaultErrorHandlerBuilder defaultErrorHandler() {
return new DefaultErrorHandlerBuilder();
}
/**
* Creates a disabled <a href="http://camel.apache.org/error-handler.html">error handler</a>
* for removing the default error handler
*
* @return the builder
*/
public NoErrorHandlerBuilder noErrorHandler() {
return new NoErrorHandlerBuilder();
}
/**
* Creates an <a href="http://camel.apache.org/error-handler.html">error handler</a>
* which just logs errors
*
* @return the builder
*/
public LoggingErrorHandlerBuilder loggingErrorHandler() {
return new LoggingErrorHandlerBuilder();
}
/**
* Creates an <a href="http://camel.apache.org/error-handler.html">error handler</a>
* which just logs errors
*
* @return the builder
*/
public LoggingErrorHandlerBuilder loggingErrorHandler(String log) {
return loggingErrorHandler(LoggerFactory.getLogger(log));
}
/**
* Creates an <a href="http://camel.apache.org/error-handler.html">error handler</a>
* which just logs errors
*
* @return the builder
*/
public LoggingErrorHandlerBuilder loggingErrorHandler(Logger log) {
return new LoggingErrorHandlerBuilder(log);
}
/**
* Creates an <a href="http://camel.apache.org/error-handler.html">error handler</a>
* which just logs errors
*
* @return the builder
*/
public LoggingErrorHandlerBuilder loggingErrorHandler(Logger log, LoggingLevel level) {
return new LoggingErrorHandlerBuilder(log, level);
}
/**
* <a href="http://camel.apache.org/dead-letter-channel.html">Dead Letter Channel EIP:</a>
* is a error handler for handling messages that could not be delivered to it's intended destination.
*
* @param deadLetterUri uri to the dead letter endpoint storing dead messages
* @return the builder
*/
public DeadLetterChannelBuilder deadLetterChannel(String deadLetterUri) {
return deadLetterChannel(endpoint(deadLetterUri));
}
/**
* <a href="http://camel.apache.org/dead-letter-channel.html">Dead Letter Channel EIP:</a>
* is a error handler for handling messages that could not be delivered to it's intended destination.
*
* @param deadLetterEndpoint dead letter endpoint storing dead messages
* @return the builder
*/
public DeadLetterChannelBuilder deadLetterChannel(Endpoint deadLetterEndpoint) {
return new DeadLetterChannelBuilder(deadLetterEndpoint);
}
// Properties
// -------------------------------------------------------------------------
public ModelCamelContext getContext() {
return context;
}
@Deprecated
public void setContext(CamelContext context) {
this.context = (ModelCamelContext)context;
}
public void setContext(ModelCamelContext context) {
this.context = context;
}
public ErrorHandlerBuilder getErrorHandlerBuilder() {
if (errorHandlerBuilder == null) {
errorHandlerBuilder = createErrorHandlerBuilder();
}
return errorHandlerBuilder;
}
protected ErrorHandlerBuilder createErrorHandlerBuilder() {
return new DefaultErrorHandlerBuilder();
}
/**
* Sets the error handler to use with processors created by this builder
*/
public void setErrorHandlerBuilder(ErrorHandlerBuilder errorHandlerBuilder) {
this.errorHandlerBuilder = errorHandlerBuilder;
}
}
| |
package org.libsdl.app;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import android.app.*;
import android.content.*;
import android.view.*;
import android.view.inputmethod.BaseInputConnection;
import android.view.inputmethod.EditorInfo;
import android.view.inputmethod.InputConnection;
import android.view.inputmethod.InputMethodManager;
import android.widget.AbsoluteLayout;
import android.os.*;
import android.util.Log;
import android.graphics.*;
import android.media.*;
import android.hardware.*;
/**
SDL Activity
*/
public class SDLActivity extends Activity {
private static final String TAG = "SDL";
// Keep track of the paused state
public static boolean mIsPaused, mIsSurfaceReady, mHasFocus;
public static boolean mExitCalledFromJava;
// Main components
protected static SDLActivity mSingleton;
protected static SDLSurface mSurface;
protected static View mTextEdit;
protected static ViewGroup mLayout;
protected static SDLJoystickHandler mJoystickHandler;
// This is what SDL runs in. It invokes SDL_main(), eventually
protected static Thread mSDLThread;
// Audio
protected static AudioTrack mAudioTrack;
// Load the .so
static {
System.loadLibrary("SDL2");
//System.loadLibrary("SDL2_image");
System.loadLibrary("SDL2_mixer");
//System.loadLibrary("SDL2_net");
//System.loadLibrary("SDL2_ttf");
System.loadLibrary("main");
}
public static void initialize() {
// The static nature of the singleton and Android quirkyness force us to initialize everything here
// Otherwise, when exiting the app and returning to it, these variables *keep* their pre exit values
mSingleton = null;
mSurface = null;
mTextEdit = null;
mLayout = null;
mJoystickHandler = null;
mSDLThread = null;
mAudioTrack = null;
mExitCalledFromJava = false;
mIsPaused = false;
mIsSurfaceReady = false;
mHasFocus = true;
}
// Setup
@Override
protected void onCreate(Bundle savedInstanceState) {
Log.v("SDL", "onCreate():" + mSingleton);
super.onCreate(savedInstanceState);
SDLActivity.initialize();
// So we can call stuff from static callbacks
mSingleton = this;
// Set up the surface
mSurface = new SDLSurface(getApplication());
if(Build.VERSION.SDK_INT >= 12) {
mJoystickHandler = new SDLJoystickHandler_API12();
}
else {
mJoystickHandler = new SDLJoystickHandler();
}
mLayout = new AbsoluteLayout(this);
mLayout.addView(mSurface);
setContentView(mLayout);
//prevent screen from sleeping
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
}
// Events
@Override
protected void onPause() {
Log.v("SDL", "onPause()");
super.onPause();
SDLActivity.handlePause();
}
@Override
protected void onResume() {
Log.v("SDL", "onResume()");
super.onResume();
SDLActivity.handleResume();
}
@Override
public void onWindowFocusChanged(boolean hasFocus) {
super.onWindowFocusChanged(hasFocus);
Log.v("SDL", "onWindowFocusChanged(): " + hasFocus);
SDLActivity.mHasFocus = hasFocus;
if (hasFocus) {
SDLActivity.handleResume();
}
}
@Override
public void onLowMemory() {
Log.v("SDL", "onLowMemory()");
super.onLowMemory();
SDLActivity.nativeLowMemory();
}
@Override
protected void onDestroy() {
Log.v("SDL", "onDestroy()");
// Send a quit message to the application
SDLActivity.mExitCalledFromJava = true;
SDLActivity.nativeQuit();
// Now wait for the SDL thread to quit
if (SDLActivity.mSDLThread != null) {
try {
SDLActivity.mSDLThread.join();
} catch(Exception e) {
Log.v("SDL", "Problem stopping thread: " + e);
}
SDLActivity.mSDLThread = null;
//Log.v("SDL", "Finished waiting for SDL thread");
}
super.onDestroy();
// Reset everything in case the user re opens the app
SDLActivity.initialize();
}
@Override
public boolean dispatchKeyEvent(KeyEvent event) {
int keyCode = event.getKeyCode();
// Ignore certain special keys so they're handled by Android
if (keyCode == KeyEvent.KEYCODE_VOLUME_DOWN ||
keyCode == KeyEvent.KEYCODE_VOLUME_UP ||
keyCode == KeyEvent.KEYCODE_CAMERA ||
keyCode == 168 || /* API 11: KeyEvent.KEYCODE_ZOOM_IN */
keyCode == 169 /* API 11: KeyEvent.KEYCODE_ZOOM_OUT */
) {
return false;
}
return super.dispatchKeyEvent(event);
}
/** Called by onPause or surfaceDestroyed. Even if surfaceDestroyed
* is the first to be called, mIsSurfaceReady should still be set
* to 'true' during the call to onPause (in a usual scenario).
*/
public static void handlePause() {
if (!SDLActivity.mIsPaused && SDLActivity.mIsSurfaceReady) {
SDLActivity.mIsPaused = true;
SDLActivity.nativePause();
mSurface.enableSensor(Sensor.TYPE_ACCELEROMETER, false);
}
}
/** Called by onResume or surfaceCreated. An actual resume should be done only when the surface is ready.
* Note: Some Android variants may send multiple surfaceChanged events, so we don't need to resume
* every time we get one of those events, only if it comes after surfaceDestroyed
*/
public static void handleResume() {
if (SDLActivity.mIsPaused && SDLActivity.mIsSurfaceReady && SDLActivity.mHasFocus) {
SDLActivity.mIsPaused = false;
SDLActivity.nativeResume();
mSurface.enableSensor(Sensor.TYPE_ACCELEROMETER, true);
}
}
/* The native thread has finished */
public static void handleNativeExit() {
SDLActivity.mSDLThread = null;
mSingleton.finish();
}
// Messages from the SDLMain thread
static final int COMMAND_CHANGE_TITLE = 1;
static final int COMMAND_UNUSED = 2;
static final int COMMAND_TEXTEDIT_HIDE = 3;
protected static final int COMMAND_USER = 0x8000;
/**
* This method is called by SDL if SDL did not handle a message itself.
* This happens if a received message contains an unsupported command.
* Method can be overwritten to handle Messages in a different class.
* @param command the command of the message.
* @param param the parameter of the message. May be null.
* @return if the message was handled in overridden method.
*/
protected boolean onUnhandledMessage(int command, Object param) {
return false;
}
/**
* A Handler class for Messages from native SDL applications.
* It uses current Activities as target (e.g. for the title).
* static to prevent implicit references to enclosing object.
*/
protected static class SDLCommandHandler extends Handler {
@Override
public void handleMessage(Message msg) {
Context context = getContext();
if (context == null) {
Log.e(TAG, "error handling message, getContext() returned null");
return;
}
switch (msg.arg1) {
case COMMAND_CHANGE_TITLE:
if (context instanceof Activity) {
((Activity) context).setTitle((String)msg.obj);
} else {
Log.e(TAG, "error handling message, getContext() returned no Activity");
}
break;
case COMMAND_TEXTEDIT_HIDE:
if (mTextEdit != null) {
mTextEdit.setVisibility(View.GONE);
InputMethodManager imm = (InputMethodManager) context.getSystemService(Context.INPUT_METHOD_SERVICE);
imm.hideSoftInputFromWindow(mTextEdit.getWindowToken(), 0);
}
break;
default:
if ((context instanceof SDLActivity) && !((SDLActivity) context).onUnhandledMessage(msg.arg1, msg.obj)) {
Log.e(TAG, "error handling message, command is " + msg.arg1);
}
}
}
}
// Handler for the messages
Handler commandHandler = new SDLCommandHandler();
// Send a message from the SDLMain thread
boolean sendCommand(int command, Object data) {
Message msg = commandHandler.obtainMessage();
msg.arg1 = command;
msg.obj = data;
return commandHandler.sendMessage(msg);
}
// C functions we call
public static native void nativeInit();
public static native void nativeLowMemory();
public static native void nativeQuit();
public static native void nativePause();
public static native void nativeResume();
public static native void onNativeResize(int x, int y, int format);
public static native int onNativePadDown(int device_id, int keycode);
public static native int onNativePadUp(int device_id, int keycode);
public static native void onNativeJoy(int device_id, int axis,
float value);
public static native void onNativeHat(int device_id, int hat_id,
int x, int y);
public static native void onNativeKeyDown(int keycode);
public static native void onNativeKeyUp(int keycode);
public static native void onNativeKeyboardFocusLost();
public static native void onNativeTouch(int touchDevId, int pointerFingerId,
int action, float x,
float y, float p);
public static native void onNativeAccel(float x, float y, float z);
public static native void onNativeSurfaceChanged();
public static native void onNativeSurfaceDestroyed();
public static native void nativeFlipBuffers();
public static native int nativeAddJoystick(int device_id, String name,
int is_accelerometer, int nbuttons,
int naxes, int nhats, int nballs);
public static native int nativeRemoveJoystick(int device_id);
public static void flipBuffers() {
SDLActivity.nativeFlipBuffers();
}
public static boolean setActivityTitle(String title) {
// Called from SDLMain() thread and can't directly affect the view
return mSingleton.sendCommand(COMMAND_CHANGE_TITLE, title);
}
public static boolean sendMessage(int command, int param) {
return mSingleton.sendCommand(command, Integer.valueOf(param));
}
public static Context getContext() {
return mSingleton;
}
/**
* @return result of getSystemService(name) but executed on UI thread.
*/
public Object getSystemServiceFromUiThread(final String name) {
final Object lock = new Object();
final Object[] results = new Object[2]; // array for writable variables
synchronized (lock) {
runOnUiThread(new Runnable() {
@Override
public void run() {
synchronized (lock) {
results[0] = getSystemService(name);
results[1] = Boolean.TRUE;
lock.notify();
}
}
});
if (results[1] == null) {
try {
lock.wait();
} catch (InterruptedException ex) {
ex.printStackTrace();
}
}
}
return results[0];
}
static class ShowTextInputTask implements Runnable {
/*
* This is used to regulate the pan&scan method to have some offset from
* the bottom edge of the input region and the top edge of an input
* method (soft keyboard)
*/
static final int HEIGHT_PADDING = 15;
public int x, y, w, h;
public ShowTextInputTask(int x, int y, int w, int h) {
this.x = x;
this.y = y;
this.w = w;
this.h = h;
}
@Override
public void run() {
AbsoluteLayout.LayoutParams params = new AbsoluteLayout.LayoutParams(
w, h + HEIGHT_PADDING, x, y);
if (mTextEdit == null) {
mTextEdit = new DummyEdit(getContext());
mLayout.addView(mTextEdit, params);
} else {
mTextEdit.setLayoutParams(params);
}
mTextEdit.setVisibility(View.VISIBLE);
mTextEdit.requestFocus();
InputMethodManager imm = (InputMethodManager) getContext().getSystemService(Context.INPUT_METHOD_SERVICE);
imm.showSoftInput(mTextEdit, 0);
}
}
public static boolean showTextInput(int x, int y, int w, int h) {
// Transfer the task to the main thread as a Runnable
return mSingleton.commandHandler.post(new ShowTextInputTask(x, y, w, h));
}
public static Surface getNativeSurface() {
return SDLActivity.mSurface.getNativeSurface();
}
// Audio
public static int audioInit(int sampleRate, boolean is16Bit, boolean isStereo, int desiredFrames) {
int channelConfig = isStereo ? AudioFormat.CHANNEL_CONFIGURATION_STEREO : AudioFormat.CHANNEL_CONFIGURATION_MONO;
int audioFormat = is16Bit ? AudioFormat.ENCODING_PCM_16BIT : AudioFormat.ENCODING_PCM_8BIT;
int frameSize = (isStereo ? 2 : 1) * (is16Bit ? 2 : 1);
Log.v("SDL", "SDL audio: wanted " + (isStereo ? "stereo" : "mono") + " " + (is16Bit ? "16-bit" : "8-bit") + " " + (sampleRate / 1000f) + "kHz, " + desiredFrames + " frames buffer");
// Let the user pick a larger buffer if they really want -- but ye
// gods they probably shouldn't, the minimums are horrifyingly high
// latency already
desiredFrames = Math.max(desiredFrames, (AudioTrack.getMinBufferSize(sampleRate, channelConfig, audioFormat) + frameSize - 1) / frameSize);
if (mAudioTrack == null) {
mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate,
channelConfig, audioFormat, desiredFrames * frameSize, AudioTrack.MODE_STREAM);
// Instantiating AudioTrack can "succeed" without an exception and the track may still be invalid
// Ref: https://android.googlesource.com/platform/frameworks/base/+/refs/heads/master/media/java/android/media/AudioTrack.java
// Ref: http://developer.android.com/reference/android/media/AudioTrack.html#getState()
if (mAudioTrack.getState() != AudioTrack.STATE_INITIALIZED) {
Log.e("SDL", "Failed during initialization of Audio Track");
mAudioTrack = null;
return -1;
}
mAudioTrack.play();
}
Log.v("SDL", "SDL audio: got " + ((mAudioTrack.getChannelCount() >= 2) ? "stereo" : "mono") + " " + ((mAudioTrack.getAudioFormat() == AudioFormat.ENCODING_PCM_16BIT) ? "16-bit" : "8-bit") + " " + (mAudioTrack.getSampleRate() / 1000f) + "kHz, " + desiredFrames + " frames buffer");
return 0;
}
public static void audioWriteShortBuffer(short[] buffer) {
for (int i = 0; i < buffer.length; ) {
int result = mAudioTrack.write(buffer, i, buffer.length - i);
if (result > 0) {
i += result;
} else if (result == 0) {
try {
Thread.sleep(1);
} catch(InterruptedException e) {
// Nom nom
}
} else {
Log.w("SDL", "SDL audio: error return from write(short)");
return;
}
}
}
public static void audioWriteByteBuffer(byte[] buffer) {
for (int i = 0; i < buffer.length; ) {
int result = mAudioTrack.write(buffer, i, buffer.length - i);
if (result > 0) {
i += result;
} else if (result == 0) {
try {
Thread.sleep(1);
} catch(InterruptedException e) {
// Nom nom
}
} else {
Log.w("SDL", "SDL audio: error return from write(byte)");
return;
}
}
}
public static void audioQuit() {
if (mAudioTrack != null) {
mAudioTrack.stop();
mAudioTrack = null;
}
}
// Input
/**
* @return an array which may be empty but is never null.
*/
public static int[] inputGetInputDeviceIds(int sources) {
int[] ids = InputDevice.getDeviceIds();
int[] filtered = new int[ids.length];
int used = 0;
for (int i = 0; i < ids.length; ++i) {
InputDevice device = InputDevice.getDevice(ids[i]);
if ((device != null) && ((device.getSources() & sources) != 0)) {
filtered[used++] = device.getId();
}
}
return Arrays.copyOf(filtered, used);
}
// Joystick glue code, just a series of stubs that redirect to the SDLJoystickHandler instance
public static boolean handleJoystickMotionEvent(MotionEvent event) {
return mJoystickHandler.handleMotionEvent(event);
}
public static void pollInputDevices() {
if (SDLActivity.mSDLThread != null) {
mJoystickHandler.pollInputDevices();
}
}
}
/**
Simple nativeInit() runnable
*/
class SDLMain implements Runnable {
@Override
public void run() {
// Runs SDL_main()
SDLActivity.nativeInit();
//Log.v("SDL", "SDL thread terminated");
}
}
/**
SDLSurface. This is what we draw on, so we need to know when it's created
in order to do anything useful.
Because of this, that's where we set up the SDL thread
*/
class SDLSurface extends SurfaceView implements SurfaceHolder.Callback,
View.OnKeyListener, View.OnTouchListener, SensorEventListener {
// Sensors
protected static SensorManager mSensorManager;
protected static Display mDisplay;
// Keep track of the surface size to normalize touch events
protected static float mWidth, mHeight;
// Startup
public SDLSurface(Context context) {
super(context);
getHolder().addCallback(this);
setFocusable(true);
setFocusableInTouchMode(true);
requestFocus();
setOnKeyListener(this);
setOnTouchListener(this);
mDisplay = ((WindowManager)context.getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();
mSensorManager = (SensorManager)context.getSystemService(Context.SENSOR_SERVICE);
if(Build.VERSION.SDK_INT >= 12) {
setOnGenericMotionListener(new SDLGenericMotionListener_API12());
}
// Some arbitrary defaults to avoid a potential division by zero
mWidth = 1.0f;
mHeight = 1.0f;
}
public Surface getNativeSurface() {
return getHolder().getSurface();
}
// Called when we have a valid drawing surface
@Override
public void surfaceCreated(SurfaceHolder holder) {
Log.v("SDL", "surfaceCreated()");
holder.setType(SurfaceHolder.SURFACE_TYPE_GPU);
}
// Called when we lose the surface
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
Log.v("SDL", "surfaceDestroyed()");
// Call this *before* setting mIsSurfaceReady to 'false'
SDLActivity.handlePause();
SDLActivity.mIsSurfaceReady = false;
SDLActivity.onNativeSurfaceDestroyed();
}
// Called when the surface is resized
@Override
public void surfaceChanged(SurfaceHolder holder,
int format, int width, int height) {
Log.v("SDL", "surfaceChanged()");
int sdlFormat = 0x15151002; // SDL_PIXELFORMAT_RGB565 by default
switch (format) {
case PixelFormat.A_8:
Log.v("SDL", "pixel format A_8");
break;
case PixelFormat.LA_88:
Log.v("SDL", "pixel format LA_88");
break;
case PixelFormat.L_8:
Log.v("SDL", "pixel format L_8");
break;
case PixelFormat.RGBA_4444:
Log.v("SDL", "pixel format RGBA_4444");
sdlFormat = 0x15421002; // SDL_PIXELFORMAT_RGBA4444
break;
case PixelFormat.RGBA_5551:
Log.v("SDL", "pixel format RGBA_5551");
sdlFormat = 0x15441002; // SDL_PIXELFORMAT_RGBA5551
break;
case PixelFormat.RGBA_8888:
Log.v("SDL", "pixel format RGBA_8888");
sdlFormat = 0x16462004; // SDL_PIXELFORMAT_RGBA8888
break;
case PixelFormat.RGBX_8888:
Log.v("SDL", "pixel format RGBX_8888");
sdlFormat = 0x16261804; // SDL_PIXELFORMAT_RGBX8888
break;
case PixelFormat.RGB_332:
Log.v("SDL", "pixel format RGB_332");
sdlFormat = 0x14110801; // SDL_PIXELFORMAT_RGB332
break;
case PixelFormat.RGB_565:
Log.v("SDL", "pixel format RGB_565");
sdlFormat = 0x15151002; // SDL_PIXELFORMAT_RGB565
break;
case PixelFormat.RGB_888:
Log.v("SDL", "pixel format RGB_888");
// Not sure this is right, maybe SDL_PIXELFORMAT_RGB24 instead?
sdlFormat = 0x16161804; // SDL_PIXELFORMAT_RGB888
break;
default:
Log.v("SDL", "pixel format unknown " + format);
break;
}
mWidth = width;
mHeight = height;
SDLActivity.onNativeResize(width, height, sdlFormat);
Log.v("SDL", "Window size:" + width + "x"+height);
// Set mIsSurfaceReady to 'true' *before* making a call to handleResume
SDLActivity.mIsSurfaceReady = true;
SDLActivity.onNativeSurfaceChanged();
if (SDLActivity.mSDLThread == null) {
// This is the entry point to the C app.
// Start up the C app thread and enable sensor input for the first time
SDLActivity.mSDLThread = new Thread(new SDLMain(), "SDLThread");
enableSensor(Sensor.TYPE_ACCELEROMETER, true);
SDLActivity.mSDLThread.start();
// Set up a listener thread to catch when the native thread ends
new Thread(new Runnable(){
@Override
public void run(){
try {
SDLActivity.mSDLThread.join();
}
catch(Exception e){}
finally{
// Native thread has finished
if (! SDLActivity.mExitCalledFromJava) {
SDLActivity.handleNativeExit();
}
}
}
}).start();
}
}
// unused
@Override
public void onDraw(Canvas canvas) {}
// Key events
@Override
public boolean onKey(View v, int keyCode, KeyEvent event) {
// Dispatch the different events depending on where they come from
// Some SOURCE_DPAD or SOURCE_GAMEPAD are also SOURCE_KEYBOARD
// So, we try to process them as DPAD or GAMEPAD events first, if that fails we try them as KEYBOARD
if ( (event.getSource() & 0x00000401) != 0 || /* API 12: SOURCE_GAMEPAD */
(event.getSource() & InputDevice.SOURCE_DPAD) != 0 ) {
if (event.getAction() == KeyEvent.ACTION_DOWN) {
if (SDLActivity.onNativePadDown(event.getDeviceId(), keyCode) == 0) {
return true;
}
} else if (event.getAction() == KeyEvent.ACTION_UP) {
if (SDLActivity.onNativePadUp(event.getDeviceId(), keyCode) == 0) {
return true;
}
}
}
if( (event.getSource() & InputDevice.SOURCE_KEYBOARD) != 0) {
if (event.getAction() == KeyEvent.ACTION_DOWN) {
//Log.v("SDL", "key down: " + keyCode);
SDLActivity.onNativeKeyDown(keyCode);
return true;
}
else if (event.getAction() == KeyEvent.ACTION_UP) {
//Log.v("SDL", "key up: " + keyCode);
SDLActivity.onNativeKeyUp(keyCode);
return true;
}
}
return false;
}
// Touch events
@Override
public boolean onTouch(View v, MotionEvent event) {
/* Ref: http://developer.android.com/training/gestures/multi.html */
final int touchDevId = event.getDeviceId();
final int pointerCount = event.getPointerCount();
int action = event.getActionMasked();
int pointerFingerId;
int i = -1;
float x,y,p;
switch(action) {
case MotionEvent.ACTION_MOVE:
for (i = 0; i < pointerCount; i++) {
pointerFingerId = event.getPointerId(i);
x = event.getX(i) / mWidth;
y = event.getY(i) / mHeight;
p = event.getPressure(i);
SDLActivity.onNativeTouch(touchDevId, pointerFingerId, action, x, y, p);
}
break;
case MotionEvent.ACTION_UP:
case MotionEvent.ACTION_DOWN:
// Primary pointer up/down, the index is always zero
i = 0;
case MotionEvent.ACTION_POINTER_UP:
case MotionEvent.ACTION_POINTER_DOWN:
// Non primary pointer up/down
if (i == -1) {
i = event.getActionIndex();
}
pointerFingerId = event.getPointerId(i);
x = event.getX(i) / mWidth;
y = event.getY(i) / mHeight;
p = event.getPressure(i);
SDLActivity.onNativeTouch(touchDevId, pointerFingerId, action, x, y, p);
break;
default:
break;
}
return true;
}
// Sensor events
public void enableSensor(int sensortype, boolean enabled) {
// TODO: This uses getDefaultSensor - what if we have >1 accels?
if (enabled) {
mSensorManager.registerListener(this,
mSensorManager.getDefaultSensor(sensortype),
SensorManager.SENSOR_DELAY_GAME, null);
} else {
mSensorManager.unregisterListener(this,
mSensorManager.getDefaultSensor(sensortype));
}
}
@Override
public void onAccuracyChanged(Sensor sensor, int accuracy) {
// TODO
}
@Override
public void onSensorChanged(SensorEvent event) {
if (event.sensor.getType() == Sensor.TYPE_ACCELEROMETER) {
float x, y;
switch (mDisplay.getRotation()) {
case Surface.ROTATION_90:
x = -event.values[1];
y = event.values[0];
break;
case Surface.ROTATION_270:
x = event.values[1];
y = -event.values[0];
break;
case Surface.ROTATION_180:
x = -event.values[1];
y = -event.values[0];
break;
default:
x = event.values[0];
y = event.values[1];
break;
}
SDLActivity.onNativeAccel(-x / SensorManager.GRAVITY_EARTH,
y / SensorManager.GRAVITY_EARTH,
event.values[2] / SensorManager.GRAVITY_EARTH - 1);
}
}
}
/* This is a fake invisible editor view that receives the input and defines the
* pan&scan region
*/
class DummyEdit extends View implements View.OnKeyListener {
InputConnection ic;
public DummyEdit(Context context) {
super(context);
setFocusableInTouchMode(true);
setFocusable(true);
setOnKeyListener(this);
}
@Override
public boolean onCheckIsTextEditor() {
return true;
}
@Override
public boolean onKey(View v, int keyCode, KeyEvent event) {
// This handles the hardware keyboard input
if (event.isPrintingKey()) {
if (event.getAction() == KeyEvent.ACTION_DOWN) {
ic.commitText(String.valueOf((char) event.getUnicodeChar()), 1);
}
return true;
}
if (event.getAction() == KeyEvent.ACTION_DOWN) {
SDLActivity.onNativeKeyDown(keyCode);
return true;
} else if (event.getAction() == KeyEvent.ACTION_UP) {
SDLActivity.onNativeKeyUp(keyCode);
return true;
}
return false;
}
//
@Override
public boolean onKeyPreIme (int keyCode, KeyEvent event) {
// As seen on StackOverflow: http://stackoverflow.com/questions/7634346/keyboard-hide-event
// FIXME: Discussion at http://bugzilla.libsdl.org/show_bug.cgi?id=1639
// FIXME: This is not a 100% effective solution to the problem of detecting if the keyboard is showing or not
// FIXME: A more effective solution would be to change our Layout from AbsoluteLayout to Relative or Linear
// FIXME: And determine the keyboard presence doing this: http://stackoverflow.com/questions/2150078/how-to-check-visibility-of-software-keyboard-in-android
// FIXME: An even more effective way would be if Android provided this out of the box, but where would the fun be in that :)
if (event.getAction()==KeyEvent.ACTION_UP && keyCode == KeyEvent.KEYCODE_BACK) {
if (SDLActivity.mTextEdit != null && SDLActivity.mTextEdit.getVisibility() == View.VISIBLE) {
SDLActivity.onNativeKeyboardFocusLost();
}
}
return super.onKeyPreIme(keyCode, event);
}
@Override
public InputConnection onCreateInputConnection(EditorInfo outAttrs) {
ic = new SDLInputConnection(this, true);
outAttrs.imeOptions = EditorInfo.IME_FLAG_NO_EXTRACT_UI
| 33554432 /* API 11: EditorInfo.IME_FLAG_NO_FULLSCREEN */;
return ic;
}
}
class SDLInputConnection extends BaseInputConnection {
public SDLInputConnection(View targetView, boolean fullEditor) {
super(targetView, fullEditor);
}
@Override
public boolean sendKeyEvent(KeyEvent event) {
/*
* This handles the keycodes from soft keyboard (and IME-translated
* input from hardkeyboard)
*/
int keyCode = event.getKeyCode();
if (event.getAction() == KeyEvent.ACTION_DOWN) {
if (event.isPrintingKey()) {
commitText(String.valueOf((char) event.getUnicodeChar()), 1);
}
SDLActivity.onNativeKeyDown(keyCode);
return true;
} else if (event.getAction() == KeyEvent.ACTION_UP) {
SDLActivity.onNativeKeyUp(keyCode);
return true;
}
return super.sendKeyEvent(event);
}
@Override
public boolean commitText(CharSequence text, int newCursorPosition) {
nativeCommitText(text.toString(), newCursorPosition);
return super.commitText(text, newCursorPosition);
}
@Override
public boolean setComposingText(CharSequence text, int newCursorPosition) {
nativeSetComposingText(text.toString(), newCursorPosition);
return super.setComposingText(text, newCursorPosition);
}
public native void nativeCommitText(String text, int newCursorPosition);
public native void nativeSetComposingText(String text, int newCursorPosition);
@Override
public boolean deleteSurroundingText(int beforeLength, int afterLength) {
// Workaround to capture backspace key. Ref: http://stackoverflow.com/questions/14560344/android-backspace-in-webview-baseinputconnection
if (beforeLength == 1 && afterLength == 0) {
// backspace
return super.sendKeyEvent(new KeyEvent(KeyEvent.ACTION_DOWN, KeyEvent.KEYCODE_DEL))
&& super.sendKeyEvent(new KeyEvent(KeyEvent.ACTION_UP, KeyEvent.KEYCODE_DEL));
}
return super.deleteSurroundingText(beforeLength, afterLength);
}
}
/* A null joystick handler for API level < 12 devices (the accelerometer is handled separately) */
class SDLJoystickHandler {
public boolean handleMotionEvent(MotionEvent event) {
return false;
}
public void pollInputDevices() {
}
}
/* Actual joystick functionality available for API >= 12 devices */
class SDLJoystickHandler_API12 extends SDLJoystickHandler {
class SDLJoystick {
public int device_id;
public String name;
public ArrayList<InputDevice.MotionRange> axes;
public ArrayList<InputDevice.MotionRange> hats;
}
class RangeComparator implements Comparator<InputDevice.MotionRange>
{
@Override
public int compare(InputDevice.MotionRange arg0, InputDevice.MotionRange arg1) {
return arg0.getAxis() - arg1.getAxis();
}
}
private ArrayList<SDLJoystick> mJoysticks;
public SDLJoystickHandler_API12() {
mJoysticks = new ArrayList<SDLJoystick>();
}
@Override
public void pollInputDevices() {
int[] deviceIds = InputDevice.getDeviceIds();
// It helps processing the device ids in reverse order
// For example, in the case of the XBox 360 wireless dongle,
// so the first controller seen by SDL matches what the receiver
// considers to be the first controller
for(int i=deviceIds.length-1; i>-1; i--) {
SDLJoystick joystick = getJoystick(deviceIds[i]);
if (joystick == null) {
joystick = new SDLJoystick();
InputDevice joystickDevice = InputDevice.getDevice(deviceIds[i]);
if( (joystickDevice.getSources() & InputDevice.SOURCE_CLASS_JOYSTICK) != 0) {
joystick.device_id = deviceIds[i];
joystick.name = joystickDevice.getName();
joystick.axes = new ArrayList<InputDevice.MotionRange>();
joystick.hats = new ArrayList<InputDevice.MotionRange>();
List<InputDevice.MotionRange> ranges = joystickDevice.getMotionRanges();
Collections.sort(ranges, new RangeComparator());
for (InputDevice.MotionRange range : ranges ) {
if ((range.getSource() & InputDevice.SOURCE_CLASS_JOYSTICK) != 0 ) {
if (range.getAxis() == MotionEvent.AXIS_HAT_X ||
range.getAxis() == MotionEvent.AXIS_HAT_Y) {
joystick.hats.add(range);
}
else {
joystick.axes.add(range);
}
}
}
mJoysticks.add(joystick);
SDLActivity.nativeAddJoystick(joystick.device_id, joystick.name, 0, -1,
joystick.axes.size(), joystick.hats.size()/2, 0);
}
}
}
/* Check removed devices */
ArrayList<Integer> removedDevices = new ArrayList<Integer>();
for(int i=0; i < mJoysticks.size(); i++) {
int device_id = mJoysticks.get(i).device_id;
int j;
for (j=0; j < deviceIds.length; j++) {
if (device_id == deviceIds[j]) break;
}
if (j == deviceIds.length) {
removedDevices.add(device_id);
}
}
for(int i=0; i < removedDevices.size(); i++) {
int device_id = removedDevices.get(i);
SDLActivity.nativeRemoveJoystick(device_id);
for (int j=0; j < mJoysticks.size(); j++) {
if (mJoysticks.get(j).device_id == device_id) {
mJoysticks.remove(j);
break;
}
}
}
}
protected SDLJoystick getJoystick(int device_id) {
for(int i=0; i < mJoysticks.size(); i++) {
if (mJoysticks.get(i).device_id == device_id) {
return mJoysticks.get(i);
}
}
return null;
}
@Override
public boolean handleMotionEvent(MotionEvent event) {
if ( (event.getSource() & InputDevice.SOURCE_JOYSTICK) != 0) {
int actionPointerIndex = event.getActionIndex();
int action = event.getActionMasked();
switch(action) {
case MotionEvent.ACTION_MOVE:
SDLJoystick joystick = getJoystick(event.getDeviceId());
if ( joystick != null ) {
for (int i = 0; i < joystick.axes.size(); i++) {
InputDevice.MotionRange range = joystick.axes.get(i);
/* Normalize the value to -1...1 */
float value = ( event.getAxisValue( range.getAxis(), actionPointerIndex) - range.getMin() ) / range.getRange() * 2.0f - 1.0f;
SDLActivity.onNativeJoy(joystick.device_id, i, value );
}
for (int i = 0; i < joystick.hats.size(); i+=2) {
int hatX = Math.round(event.getAxisValue( joystick.hats.get(i).getAxis(), actionPointerIndex ) );
int hatY = Math.round(event.getAxisValue( joystick.hats.get(i+1).getAxis(), actionPointerIndex ) );
SDLActivity.onNativeHat(joystick.device_id, i/2, hatX, hatY );
}
}
break;
default:
break;
}
}
return true;
}
}
class SDLGenericMotionListener_API12 implements View.OnGenericMotionListener {
// Generic Motion (mouse hover, joystick...) events go here
// We only have joysticks yet
@Override
public boolean onGenericMotion(View v, MotionEvent event) {
return SDLActivity.handleJoystickMotionEvent(event);
}
}
| |
/*
*
* * Copyright 2005-2015 Red Hat, Inc.
* * Red Hat licenses this file to you under the Apache License, version
* * 2.0 (the "License"); you may not use this file except in compliance
* * with the License. You may obtain a copy of the License at
* * http://www.apache.org/licenses/LICENSE-2.0
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* * implied. See the License for the specific language governing
* * permissions and limitations under the License.
*
*/
package io.fabric8.mq.protocol.stomp;
import org.apache.activemq.AsyncCallback;
import org.apache.activemq.command.Command;
import org.apache.activemq.util.DataByteArrayOutputStream;
import org.apache.activemq.util.ServiceStopper;
import org.apache.activemq.util.ServiceSupport;
import org.vertx.java.core.Handler;
import org.vertx.java.core.buffer.Buffer;
import org.vertx.java.core.streams.ReadStream;
import java.io.IOException;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingDeque;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.ReentrantLock;
class StompReadStream extends ServiceSupport implements ReadStream<StompReadStream> {
private final StompTransport transport;
private final StompWireFormat wireFormat;
private final DataByteArrayOutputStream dataOut;
private final ReentrantLock lock;
private Handler<Buffer> dataHandler;
private boolean paused;
private BlockingQueue<Send> queue;
StompReadStream(StompTransport transport, final StompWireFormat wireFormat) {
this.transport = transport;
this.wireFormat = wireFormat;
dataOut = new DataByteArrayOutputStream();
lock = new ReentrantLock();
}
@Override
public StompReadStream endHandler(Handler<Void> handler) {
return this;
}
@Override
public StompReadStream dataHandler(Handler<Buffer> handler) {
dataHandler = handler;
return this;
}
@Override
public StompReadStream pause() {
ReentrantLock lock = this.lock;
try {
lock.lockInterruptibly();
paused = true;
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
} finally {
lock.unlock();
}
return this;
}
@Override
public StompReadStream resume() {
final ReentrantLock lock = StompReadStream.this.lock;
try {
lock.lockInterruptibly();
if (paused) {
final BlockingQueue<Send> queue = this.queue;
if (queue != null && !queue.isEmpty()) {
Send send = queue.poll(50, TimeUnit.MILLISECONDS);
do {
Buffer buffer = StompReadStream.this.createBuffer(send.getCommand());
if (buffer != null) {
final Handler<Buffer> dh = dataHandler;
if (dh != null) {
dh.handle(buffer);
send.onSuccess();
} else {
throw new IllegalStateException("No Data Handler");
}
}
} while (!queue.isEmpty());
}
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
} finally {
paused = false;
lock.unlock();
}
return this;
}
@Override
public StompReadStream exceptionHandler(Handler<Throwable> handler) {
return this;
}
protected void sendToVertx(final Object o) throws IOException {
sendToVertx(o, null);
}
protected void sendToVertx(final Object o, final AsyncCallback asyncCallback) throws IOException {
final ReentrantLock lock = StompReadStream.this.lock;
try {
lock.lockInterruptibly();
if (paused) {
if (queue == null) {
queue = new LinkedBlockingDeque<>();
}
queue.add(new Send((Command) o, asyncCallback));
} else {
Buffer buffer = StompReadStream.this.createBuffer(o);
if (buffer != null) {
final Handler<Buffer> dh = dataHandler;
if (dh != null) {
dh.handle(buffer);
if (asyncCallback != null) {
asyncCallback.onSuccess();
}
} else {
throw new IllegalStateException("No Data Handler");
}
}
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
} finally {
lock.unlock();
}
}
private Buffer createBuffer(Object command) {
Buffer buffer = null;
if (command != null) {
try {
wireFormat.marshal(command, dataOut);
dataOut.flush();
int size = dataOut.size();
byte[] data = new byte[size];
System.arraycopy(dataOut.getData(), 0, data, 0, size);
if (size > StompTransport.COMPACT_SIZE) {
dataOut.restart();
}
dataOut.reset();
buffer = new Buffer(data);
} catch (Throwable t) {
transport.handleException(t);
}
}
return buffer;
}
@Override
protected void doStop(ServiceStopper serviceStopper) throws Exception {
paused = false;
}
@Override
protected void doStart() throws Exception {
}
private class Send {
final private Command command;
final private AsyncCallback callback;
Send(Command command, AsyncCallback callback) {
this.command = command;
this.callback = callback;
}
Send(Command command) {
this.command = command;
this.callback = null;
}
Command getCommand() {
return command;
}
void onSuccess() {
if (callback != null) {
callback.onSuccess();
}
}
public String toString() {
return "Send[" + command + "], callback = " + callback;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.schema.tool;
import org.apache.phoenix.end2end.ParallelStatsEnabledIT;
import org.apache.phoenix.end2end.ParallelStatsEnabledTest;
import org.apache.phoenix.jdbc.PhoenixConnection;
import org.apache.phoenix.parse.ParseException;
import org.apache.phoenix.parse.SQLParser;
import org.apache.phoenix.schema.PTable;
import org.apache.phoenix.util.PhoenixRuntime;
import org.apache.phoenix.util.PropertiesUtil;
import org.apache.phoenix.util.ReadOnlyProps;
import org.apache.phoenix.util.SchemaUtil;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Set;
import java.util.HashSet;
import java.util.Arrays;
import java.util.Properties;
import static junit.framework.TestCase.assertEquals;
import static junit.framework.TestCase.assertTrue;
import static junit.framework.TestCase.fail;
import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
@Category(ParallelStatsEnabledTest.class)
public class SchemaToolExtractionIT extends ParallelStatsEnabledIT {
@BeforeClass
public static synchronized void setup() throws Exception {
Map<String, String> props = Collections.emptyMap();
setUpTestDriver(new ReadOnlyProps(props.entrySet().iterator()));
}
@Test
public void testCreateTableStatement() throws Exception {
String tableName = generateUniqueName();
String schemaName = generateUniqueName();
String pTableFullName = SchemaUtil.getQualifiedTableName(schemaName, tableName);
String createTableStmt = "CREATE TABLE "+ pTableFullName + "(K VARCHAR NOT NULL PRIMARY KEY, "
+ "V1 VARCHAR, V2 VARCHAR) TTL=2592000, IMMUTABLE_ROWS=TRUE, DISABLE_WAL=TRUE";
List<String> queries = new ArrayList<String>(){};
queries.add(createTableStmt);
String result = runSchemaExtractionTool(schemaName, tableName, null, queries);
Assert.assertEquals(createTableStmt, result.toUpperCase());
}
@Test
public void testCreateTableStatementLowerCase() throws Exception {
String tableName = "lowecasetbl1";
String schemaName = "lowecaseschemaname1";
String pTableFullName = SchemaUtil.getEscapedTableName(schemaName, tableName);
String createTableStmt = "CREATE TABLE "+ pTableFullName + "(\"smallK\" VARCHAR NOT NULL PRIMARY KEY, "
+ "\"asd\".V1 VARCHAR, \"foo\".\"bar\" VARCHAR) TTL=2592000, IMMUTABLE_ROWS=true, DISABLE_WAL=true";
List<String> queries = new ArrayList<String>(){};
queries.add(createTableStmt);
String result = runSchemaExtractionTool("\"" + schemaName + "\"", "\"" + tableName + "\"", null, queries);
Assert.assertEquals(createTableStmt, result);
}
@Test
public void testCreateIndexStatement() throws Exception {
String tableName = generateUniqueName();
String schemaName = generateUniqueName();
String indexName = generateUniqueName();
String indexName1 = generateUniqueName();
String indexName2 = generateUniqueName();
String indexName3 = generateUniqueName();
String properties = "TTL=2592000,IMMUTABLE_ROWS=true,DISABLE_WAL=true";
String pTableFullName = SchemaUtil.getQualifiedTableName(schemaName, tableName);
String createTableStatement = "CREATE TABLE "+pTableFullName + "(k VARCHAR NOT NULL PRIMARY KEY, \"v1\" VARCHAR, v2 VARCHAR)"
+ properties;
//FIXME never verified
String createIndexStatement = "CREATE INDEX "+indexName + " ON "+pTableFullName+"(\"v1\" DESC) INCLUDE (v2)";
//FIXME never verified
String createIndexStatement1 = "CREATE INDEX "+indexName1 + " ON "+pTableFullName+"(v2 DESC) INCLUDE (\"v1\")";
String createIndexStatement2 = "CREATE INDEX "+indexName2 + " ON "+pTableFullName+"(k)";
String createIndexStatement3 ="CREATE INDEX " + indexName3 + " ON " + pTableFullName +
"('QUOTED' || \"v1\" || V2 DESC, \"v1\" DESC, K) INCLUDE (V2)";
List<String> queries = new ArrayList<String>(){};
queries.add(createTableStatement);
queries.add(createIndexStatement);
queries.add(createIndexStatement1);
queries.add(createIndexStatement2);
String result = runSchemaExtractionTool(schemaName, indexName2, null, queries);
Assert.assertEquals(createIndexStatement2.toUpperCase(), result.toUpperCase());
List<String> queries3 = new ArrayList<String>(){};
queries3.add(createIndexStatement3);
String result3 = runSchemaExtractionTool(schemaName, indexName3, null, queries3);
Assert.assertEquals(createIndexStatement3, result3);
}
@Test
public void testDDLsWithDefaults() throws Exception {
Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
String tableName = generateUniqueName();
String schemaName = generateUniqueName();
String indexName = generateUniqueName();
String properties = "COLUMN_ENCODED_BYTES=4";
String pTableFullName = SchemaUtil.getQualifiedTableName(schemaName, tableName);
String pIndexFullName = SchemaUtil.getQualifiedTableName(schemaName, indexName);
String createTableStatement = "CREATE TABLE "+pTableFullName + "(k VARCHAR NOT NULL PRIMARY KEY, v1 VARCHAR, v2 VARCHAR)";
String createIndexStatement = "CREATE INDEX "+indexName + " ON "+pTableFullName+"(v1 DESC) INCLUDE (v2)" + properties;
List<String> queries = new ArrayList<String>(){};
queries.add(createTableStatement);
queries.add(createIndexStatement);
try (Connection conn = DriverManager.getConnection(getUrl(), props)) {
executeCreateStatements(conn, queries);
PTable pData = PhoenixRuntime.getTable(conn, pTableFullName);
PTable pIndex = PhoenixRuntime.getTable(conn, pIndexFullName);
SchemaExtractionProcessor schemaExtractionProcessor = new SchemaExtractionProcessor(null, config, pData, true);
String tableDDL = schemaExtractionProcessor.process();
assertTrue(tableDDL.contains("IMMUTABLE_STORAGE_SCHEME"));
SchemaExtractionProcessor schemaExtractionProcessorIndex = new SchemaExtractionProcessor(null, config, pIndex, true);
String indexDDL = schemaExtractionProcessorIndex.process();
assertTrue(indexDDL.contains("IMMUTABLE_STORAGE_SCHEME"));
assertTrue(indexDDL.contains("ENCODING_SCHEME='FOUR_BYTE_QUALIFIERS'"));
}
}
@Test
public void testCreateLocalIndexStatement() throws Exception {
String tableName = generateUniqueName();
String schemaName = generateUniqueName();
String indexName = generateUniqueName();
String indexName2 = generateUniqueName();
String properties = "TTL=2592000,IMMUTABLE_ROWS=true,DISABLE_WAL=true";
String pTableFullName = SchemaUtil.getQualifiedTableName(schemaName, tableName);
String createTableStatement = "CREATE TABLE "+pTableFullName + "(k VARCHAR NOT NULL PRIMARY KEY, v1 VARCHAR, v2 VARCHAR)"
+ properties;
String createIndexStatement = "CREATE LOCAL INDEX "+indexName + " ON "+pTableFullName+"(v1 DESC, k) INCLUDE (v2)";
String createIndexStatement2 = "CREATE LOCAL INDEX "+indexName2 + " ON "+pTableFullName+"( LPAD(v1,10) DESC, k) INCLUDE (v2)";
List<String> queries = new ArrayList<String>(){};
queries.add(createTableStatement);
queries.add(createIndexStatement);
String result = runSchemaExtractionTool(schemaName, indexName, null, queries);
Assert.assertEquals(createIndexStatement.toUpperCase(), result.toUpperCase());
List<String> queries2 = new ArrayList<String>(){};
queries2.add(createIndexStatement2);
String result2 = runSchemaExtractionTool(schemaName, indexName2, null, queries2);
Assert.assertEquals(createIndexStatement2.toUpperCase(), result2.toUpperCase());
}
@Test
public void testCreateLocalIndexStatementLowerCase() throws Exception {
String tableName = generateUniqueName();
String schemaName = generateUniqueName();
String indexName = generateUniqueName();
String properties = "TTL=2592000,IMMUTABLE_ROWS=true,DISABLE_WAL=true";
String pTableFullName = SchemaUtil.getQualifiedTableName(schemaName, tableName);
String createTableStatement = "CREATE TABLE "+pTableFullName + "(K VARCHAR NOT NULL PRIMARY KEY, \"v1\" VARCHAR, V2 VARCHAR)"
+ properties;
String createIndexStatement = "CREATE LOCAL INDEX "+indexName + " ON "+pTableFullName+"( LPAD(\"v1\",10) DESC, K) INCLUDE (V2)";
List<String> queries = new ArrayList<String>(){};
queries.add(createTableStatement);
queries.add(createIndexStatement);
String result = runSchemaExtractionTool(schemaName, indexName, null, queries);
Assert.assertEquals(createIndexStatement, result);
}
@Test
public void testCreateIndexStatementLowerCase() throws Exception {
String tableName = "lowercase" + generateUniqueName();
String schemaName = "lowercase" + generateUniqueName();
String indexName = "\"lowercaseIND" + generateUniqueName() + "\"";
String properties = "TTL=2592000,IMMUTABLE_ROWS=true,DISABLE_WAL=true";
String pTableFullName = SchemaUtil.getEscapedTableName(schemaName, tableName);
String createTableStatement = "CREATE TABLE " + pTableFullName + "(\"k\" VARCHAR NOT NULL PRIMARY KEY, \"a\".V1 VARCHAR, \"v2\" VARCHAR)"
+ properties;
String createIndexStatement = "CREATE INDEX " + indexName + " ON "+ pTableFullName + "(\"a\".V1 DESC, \"k\") INCLUDE (\"v2\")";
List<String> queries = new ArrayList<String>(){};
queries.add(createTableStatement);
queries.add(createIndexStatement);
String result = runSchemaExtractionTool("\"" + schemaName + "\"", indexName, null, queries);
Assert.assertEquals(createIndexStatement, result);
}
@Test
public void testCreateIndexStatementLowerCaseCombined() throws Exception {
String tableName = "lowercase" + generateUniqueName();
String schemaName = "lowercase" + generateUniqueName();
String indexName = "\"lowercaseIND" + generateUniqueName() + "\"";
String properties = "TTL=2592000,IMMUTABLE_ROWS=true,DISABLE_WAL=true";
String pTableFullName = SchemaUtil.getEscapedTableName(schemaName, tableName);
String createTableStatement = "CREATE TABLE " + pTableFullName + "(ID varchar primary key, \"number\" integer, \"currency\" decimal(6,2), lista varchar[])"
+ properties;
String createIndexStatement = "CREATE INDEX " + indexName + " ON "+ pTableFullName + "(\"number\" * \"currency\", ID) INCLUDE (LISTA)";
List<String> queries = new ArrayList<String>(){};
queries.add(createTableStatement);
queries.add(createIndexStatement);
String result = runSchemaExtractionTool("\"" + schemaName + "\"", indexName, null, queries);
Assert.assertEquals(createIndexStatement, result);
}
@Test
public void testCreateViewStatement() throws Exception {
String tableName = generateUniqueName();
String schemaName = generateUniqueName();
String viewName = generateUniqueName();
String properties = "TTL=2592000,IMMUTABLE_ROWS=true,DISABLE_WAL=true";
String pTableFullName = SchemaUtil.getQualifiedTableName(schemaName, tableName);
String createTableStmt = "CREATE TABLE "+pTableFullName + "(k BIGINT NOT NULL PRIMARY KEY, "
+ "v1 VARCHAR, v2 VARCHAR)"
+ properties;
String viewFullName = SchemaUtil.getQualifiedTableName(schemaName, viewName);
String createView = "CREATE VIEW "+viewFullName + "(id1 BIGINT, id2 BIGINT NOT NULL, "
+ "id3 VARCHAR NOT NULL CONSTRAINT PKVIEW PRIMARY KEY (id2, id3 DESC)) "
+ "AS SELECT * FROM "+pTableFullName;
List<String> queries = new ArrayList<String>(){};
queries.add(createTableStmt);
queries.add(createView);
String result = runSchemaExtractionTool(schemaName, viewName, null, queries);
Assert.assertEquals(createView.toUpperCase(), result.toUpperCase());
}
@Test
public void testCreateViewStatementLowerCase() throws Exception {
String tableName = "lowercase" + generateUniqueName();
String schemaName = "lowercase" + generateUniqueName();
String viewName = "lowercase" + generateUniqueName();
String properties = "TTL=2592000,IMMUTABLE_ROWS=true,DISABLE_WAL=true";
String pTableFullName = SchemaUtil.getEscapedTableName(schemaName, tableName);
String createTableStmt = "CREATE TABLE "+pTableFullName + "(\"k\" BIGINT NOT NULL PRIMARY KEY, "
+ "\"a\".V1 VARCHAR, v2 VARCHAR)"
+ properties;
String viewFullName = SchemaUtil.getEscapedTableName(schemaName, viewName);
String createView = "CREATE VIEW "+viewFullName + "(ID1 BIGINT, \"id2\" BIGINT NOT NULL, "
+ "ID3 VARCHAR NOT NULL CONSTRAINT PKVIEW PRIMARY KEY (\"id2\", ID3 DESC)) "
+ "AS SELECT * FROM " + pTableFullName + " WHERE \"k\" > 3";
List<String> queries = new ArrayList<String>(){};
queries.add(createTableStmt);
queries.add(createView);
String result = runSchemaExtractionTool("\"" + schemaName + "\"", "\"" + viewName + "\"", null, queries);
Assert.assertEquals(createView, result);
}
@Test
public void testCreateViewStatement_customName() throws Exception {
String tableName = generateUniqueName();
String schemaName = generateUniqueName();
String viewName = generateUniqueName()+"@@";
String properties = "TTL=2592000,IMMUTABLE_ROWS=true,DISABLE_WAL=true";
String pTableFullName = SchemaUtil.getQualifiedTableName(schemaName, tableName);
String createTableStmt = "CREATE TABLE "+pTableFullName + "(k BIGINT NOT NULL PRIMARY KEY, "
+ "v1 VARCHAR, v2 VARCHAR)"
+ properties;
String viewFullName = SchemaUtil.getPTableFullNameWithQuotes(schemaName, viewName);
String createView = "CREATE VIEW "+viewFullName + "(id1 BIGINT, id2 BIGINT NOT NULL, "
+ "id3 VARCHAR NOT NULL CONSTRAINT PKVIEW PRIMARY KEY (id2, id3 DESC)) "
+ "AS SELECT * FROM "+pTableFullName;
List<String> queries = new ArrayList<String>(){};
queries.add(createTableStmt);
queries.add(createView);
String result = runSchemaExtractionTool(schemaName, viewName, null, queries);
Assert.assertEquals(createView.toUpperCase(), result.toUpperCase());
}
@Test
public void testCreateViewIndexStatement() throws Exception {
String tableName = generateUniqueName();
String schemaName = generateUniqueName();
String viewName = generateUniqueName();
String childView = generateUniqueName();
String indexName = generateUniqueName();
String pTableFullName = SchemaUtil.getQualifiedTableName(schemaName, tableName);
String createTableStmt = "CREATE TABLE "+pTableFullName + "(k BIGINT NOT NULL PRIMARY KEY, "
+ "v1 VARCHAR, v2 VARCHAR)";
String viewFullName = SchemaUtil.getQualifiedTableName(schemaName, viewName);
String childviewName = SchemaUtil.getQualifiedTableName(schemaName, childView);
String createView = "CREATE VIEW "+viewFullName + "(id1 BIGINT, id2 BIGINT NOT NULL, "
+ "id3 VARCHAR NOT NULL CONSTRAINT PKVIEW PRIMARY KEY (id2, id3 DESC)) "
+ "AS SELECT * FROM "+pTableFullName;
String createView1 = "CREATE VIEW "+childviewName + " AS SELECT * FROM "+viewFullName;
String createIndexStatement = "CREATE INDEX "+indexName + " ON "+childviewName+"(id2, id1) INCLUDE (v1)";
List<String> queries = new ArrayList<String>(){};
queries.add(createTableStmt);
queries.add(createView);
queries.add(createView1);
queries.add(createIndexStatement);
String expected = "CREATE INDEX %s ON " +childviewName +"(ID2, ID1, K, ID3 DESC) INCLUDE (V1)";
String result = runSchemaExtractionTool(schemaName, indexName, null, queries);
Assert.assertEquals(String.format(expected, indexName).toUpperCase(), result.toUpperCase());
queries.clear();
String newIndex =indexName+"_NEW";
queries.add(String.format(expected, newIndex));
Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
try (Connection conn = DriverManager.getConnection(getUrl(), props)) {
executeCreateStatements(conn, queries);
}
compareOrdinalPositions(indexName, newIndex);
}
private void compareOrdinalPositions(String table, String newTable) throws SQLException {
String ordinalQuery = "SELECT COLUMN_NAME, "
+ "ORDINAL_POSITION FROM SYSTEM.CATALOG"
+ " WHERE TABLE_NAME='%s' AND ORDINAL_POSITION IS NOT NULL ORDER BY COLUMN_NAME";
Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
Map<String, Integer> ordinalMap = new HashMap<>();
try (Connection conn = DriverManager.getConnection(getUrl(), props)) {
ResultSet rs = conn.createStatement().executeQuery(String.format(ordinalQuery, table));
while(rs.next()) {
ordinalMap.put(rs.getString(1), rs.getInt(2));
}
rs = conn.createStatement().executeQuery(String.format(ordinalQuery, newTable));
while(rs.next()) {
Assert.assertEquals(ordinalMap.get(rs.getString(1)).intValue(),
rs.getInt(2));
}
}
}
@Test
public void testCreateViewStatement_tenant() throws Exception {
String tableName = generateUniqueName();
String viewName = generateUniqueName();
String schemaName = generateUniqueName();
String tenantId = "abc";
String pTableFullName = SchemaUtil.getQualifiedTableName(schemaName, tableName);
String createTableStmt = "CREATE TABLE "+pTableFullName + "(k BIGINT NOT NULL PRIMARY KEY, "
+ "v1 VARCHAR, v2 VARCHAR)";
String viewFullName = SchemaUtil.getPTableFullNameWithQuotes(schemaName, viewName);
String createViewStmt = "CREATE VIEW "+viewFullName + "(id1 BIGINT, id2 BIGINT NOT NULL, "
+ "id3 VARCHAR NOT NULL CONSTRAINT PKVIEW PRIMARY KEY (id2, id3 DESC)) "
+ "AS SELECT * FROM "+pTableFullName;
List<String> queries1 = new ArrayList<String>(){};
queries1.add(createTableStmt);
runSchemaExtractionTool(schemaName, tableName, null, queries1);
List<String> queries2 = new ArrayList<String>();
queries2.add(createViewStmt);
String result2 = runSchemaExtractionTool(schemaName, viewName, tenantId, queries2);
Assert.assertEquals(createViewStmt.toUpperCase(), result2.toUpperCase());
}
@Test
public void testSaltedTableStatement() throws Exception {
String tableName = generateUniqueName();
String schemaName = generateUniqueName();
String pTableFullName = SchemaUtil.getQualifiedTableName(schemaName, tableName);
String query = "create table " + pTableFullName +
"(a_integer integer not null CONSTRAINT pk PRIMARY KEY (a_integer)) SALT_BUCKETS=16";
List<String> queries = new ArrayList<String>(){};
queries.add(query);
String result = runSchemaExtractionTool(schemaName, tableName, null, queries);
Assert.assertTrue(getProperties(result).contains("SALT_BUCKETS=16"));
}
@Test
public void testCreateTableWithPKConstraint() throws Exception {
String tableName = generateUniqueName();
String schemaName = generateUniqueName();
String pTableFullName = SchemaUtil.getQualifiedTableName(schemaName, tableName);
String query = "create table " + pTableFullName +
"(a_char CHAR(15) NOT NULL, " +
"b_char CHAR(15) NOT NULL, " +
"c_bigint BIGINT NOT NULL CONSTRAINT PK PRIMARY KEY (a_char, b_char, c_bigint)) IMMUTABLE_ROWS=TRUE";
List<String> queries = new ArrayList<String>(){};
queries.add(query);
String result = runSchemaExtractionTool(schemaName, tableName, null, queries);
Assert.assertEquals(query.toUpperCase(), result.toUpperCase());
}
@Test
public void testCreateTableWithArrayColumn() throws Exception {
String tableName = generateUniqueName();
String pTableFullName = tableName;
String query = "create table " + pTableFullName +
"(a_char CHAR(15) NOT NULL, " +
"b_char CHAR(10) NOT NULL, " +
"c_var_array VARCHAR ARRAY, " +
"d_char_array CHAR(15) ARRAY[3] CONSTRAINT PK PRIMARY KEY (a_char, b_char)) " +
"TTL=2592000, IMMUTABLE_STORAGE_SCHEME='ONE_CELL_PER_COLUMN', REPLICATION_SCOPE=1";
List<String> queries = new ArrayList<String>(){};
queries.add(query);
String result = runSchemaExtractionTool("", tableName, null, queries);
Assert.assertEquals(query.toUpperCase(), result.toUpperCase());
}
@Test
public void testCreateTableWithDefaultCFProperties() throws Exception {
String tableName = generateUniqueName();
String schemaName = generateUniqueName();
String properties = "KEEP_DELETED_CELLS=TRUE, TTL=1209600, IMMUTABLE_STORAGE_SCHEME='ONE_CELL_PER_COLUMN', "
+ "REPLICATION_SCOPE=1, DEFAULT_COLUMN_FAMILY='cv', SALT_BUCKETS=16, MULTI_TENANT=true, TIME_TEST='72HOURS'";
String pTableFullName = SchemaUtil.getQualifiedTableName(schemaName, tableName);
String query = "create table " + pTableFullName +
"(a_char CHAR(15) NOT NULL, " +
"b_char CHAR(10) NOT NULL, " +
"\"av\".\"_\" CHAR(1), " +
"\"bv\".\"_\" CHAR(1), " +
"\"cv\".\"_\" CHAR(1), " +
"\"dv\".\"_\" CHAR(1) CONSTRAINT PK PRIMARY KEY (a_char, b_char)) " + properties;
List<String> queries = new ArrayList<String>(){};
queries.add(query);
String result = runSchemaExtractionTool(schemaName, tableName, null, queries);
Assert.assertTrue(compareProperties(properties, getProperties(result)));
}
@Test
public void testCreateTableWithCFProperties() throws Exception {
String tableName = generateUniqueName();
String schemaName = generateUniqueName();
String properties = "\"av\".VERSIONS=2, \"bv\".VERSIONS=2, " +
"DATA_BLOCK_ENCODING='DIFF', " +
"IMMUTABLE_STORAGE_SCHEME='ONE_CELL_PER_COLUMN', SALT_BUCKETS=16, MULTI_TENANT=true";
String pTableFullName = SchemaUtil.getQualifiedTableName(schemaName, tableName);
String query = "create table " + pTableFullName +
"(a_char CHAR(15) NOT NULL, " +
"b_char CHAR(10) NOT NULL, " +
"\"av\".\"_\" CHAR(1), " +
"\"bv\".\"_\" CHAR(1), " +
"\"cv\".\"_\" CHAR(1) CONSTRAINT PK PRIMARY KEY (a_char, b_char)) " + properties;
List<String> queries = new ArrayList<String>(){};
queries.add(query);
String result = runSchemaExtractionTool(schemaName, tableName, null, queries);
Assert.assertTrue(compareProperties(properties, getProperties(result)));
}
@Test
public void testCreateTableWithMultipleCF() throws Exception {
String tableName = generateUniqueName();
String schemaName = generateUniqueName();
String properties = "\"av\".VERSIONS=2, \"bv\".VERSIONS=3, " +
"\"cv\".VERSIONS=4, DATA_BLOCK_ENCODING='DIFF', " +
"IMMUTABLE_STORAGE_SCHEME='ONE_CELL_PER_COLUMN', SALT_BUCKETS=16, MULTI_TENANT=true";
String pTableFullName = SchemaUtil.getQualifiedTableName(schemaName, tableName);
final String query = "create table " + pTableFullName +
"(a_char CHAR(15) NOT NULL, " +
"b_char CHAR(10) NOT NULL, " +
"\"av\".\"_\" CHAR(1), " +
"\"bv\".\"_\" CHAR(1), " +
"\"cv\".\"_\" CHAR(1), " +
"\"dv\".\"_\" CHAR(1) CONSTRAINT PK PRIMARY KEY (a_char, b_char)) " + properties;
List<String> queries = new ArrayList<String>(){};
queries.add(query);
String result = runSchemaExtractionTool(schemaName, tableName, null, queries);
Assert.assertTrue(compareProperties(properties, getProperties(result)));
}
@Test
public void testCreateTableWithMultipleCFProperties() throws Exception {
String tableName = "07"+generateUniqueName();
String schemaName = generateUniqueName();
String properties = "\"av\".DATA_BLOCK_ENCODING='DIFF', \"bv\".DATA_BLOCK_ENCODING='DIFF', "
+ "\"cv\".DATA_BLOCK_ENCODING='DIFF', " +
"IMMUTABLE_STORAGE_SCHEME='ONE_CELL_PER_COLUMN', "
+ "SALT_BUCKETS=16, MULTI_TENANT=true, BLOOMFITER='ROW'";
String simplifiedProperties = "DATA_BLOCK_ENCODING='DIFF', "
+ "IMMUTABLE_STORAGE_SCHEME='ONE_CELL_PER_COLUMN', "
+ "SALT_BUCKETS=16, MULTI_TENANT=true, BLOOMFITER='ROW'";
String query = "create table " + schemaName+".\""+tableName+"\"" +
"(a_char CHAR(15) NOT NULL, " +
"b_char CHAR(10) NOT NULL, " +
"\"av\".\"_\" CHAR(1), " +
"\"bv\".\"_\" CHAR(1), " +
"\"cv\".\"_\" CHAR(1) CONSTRAINT PK PRIMARY KEY (a_char, b_char)) " + properties;
List<String> queries = new ArrayList<String>(){};
queries.add(query);
String result = runSchemaExtractionTool(schemaName, tableName, null, queries);
try {
new SQLParser(result).parseStatement();
} catch (ParseException pe) {
fail("This should not happen!");
}
Assert.assertTrue(compareProperties(simplifiedProperties, getProperties(result)));
}
@Test
public void testColumnAndPKOrdering() throws Exception {
String table = "CREATE TABLE IF NOT EXISTS MY_SCHEMA.MY_DATA_TABLE (\n"
+ " ORGANIZATION_ID CHAR(15) NOT NULL, \n"
+ " KEY_PREFIX CHAR(3) NOT NULL,\n"
+ " CREATED_DATE DATE,\n"
+ " CREATED_BY CHAR(15) \n"
+ " CONSTRAINT PK PRIMARY KEY (\n"
+ " ORGANIZATION_ID, \n"
+ " KEY_PREFIX\n" + " )\n"
+ ") VERSIONS=1, IMMUTABLE_ROWS=true, MULTI_TENANT=true, REPLICATION_SCOPE=1";
String view = "CREATE VIEW IF NOT EXISTS MY_SCHEMA.MY_DATA_VIEW (\n"
+ " DATE_TIME1 DATE NOT NULL,\n"
+ " TEXT1 VARCHAR NOT NULL,\n"
+ " INT1 BIGINT NOT NULL,\n"
+ " DOUBLE1 DECIMAL(12, 3),\n"
+ " DOUBLE2 DECIMAL(12, 3),\n"
+ " DOUBLE3 DECIMAL(12, 3),\n"
+ " CONSTRAINT PKVIEW PRIMARY KEY\n" + " (\n"
+ " DATE_TIME1, TEXT1, INT1\n" + " )\n" + ")\n"
+ "AS SELECT * FROM MY_SCHEMA.MY_DATA_TABLE WHERE KEY_PREFIX = '9Yj'";
String index = "CREATE INDEX IF NOT EXISTS MY_VIEW_INDEX\n"
+ "ON MY_SCHEMA.MY_DATA_VIEW (TEXT1, DATE_TIME1 DESC, DOUBLE1)\n"
+ "INCLUDE (CREATED_BY, CREATED_DATE)";
List<String> queries = new ArrayList<String>(){};
queries.add(table);
queries.add(view);
queries.add(index);
String expectedIndex = "CREATE INDEX MY_VIEW_INDEX "
+ "ON MY_SCHEMA.MY_DATA_VIEW(TEXT1, DATE_TIME1 DESC, DOUBLE1, INT1)"
+ " INCLUDE (CREATED_BY, CREATED_DATE)";
String result = runSchemaExtractionTool("MY_SCHEMA", "MY_VIEW_INDEX", null, queries);
Assert.assertEquals(expectedIndex.toUpperCase(), result.toUpperCase());
String expectedView = "CREATE VIEW MY_SCHEMA.MY_DATA_VIEW(DATE_TIME1 DATE NOT NULL, "
+ "TEXT1 VARCHAR NOT NULL, INT1 BIGINT NOT NULL, DOUBLE1 DECIMAL(12,3), "
+ "DOUBLE2 DECIMAL(12,3), DOUBLE3 DECIMAL(12,3)"
+ " CONSTRAINT PKVIEW PRIMARY KEY (DATE_TIME1, TEXT1, INT1))"
+ " AS SELECT * FROM MY_SCHEMA.MY_DATA_TABLE WHERE KEY_PREFIX = '9YJ'";
result = runSchemaExtractionTool("MY_SCHEMA", "MY_DATA_VIEW", null, new ArrayList<String>());
Assert.assertEquals(expectedView.toUpperCase(), result.toUpperCase());
}
@Test
public void testColumnAndPKOrdering_nonView() throws Exception {
String indexName = "MY_DATA_TABLE_INDEX";
String table = "CREATE TABLE MY_SCHEMA.MY_SAMPLE_DATA_TABLE("
+ "ORGANIZATION_ID CHAR(15) NOT NULL,"
+ " SOME_ID_COLUMN CHAR(3) NOT NULL,"
+ " SOME_ID_COLUMN_2 CHAR(15) NOT NULL,"
+ " CREATED_DATE DATE NOT NULL,"
+ " SOME_ID_COLUMN_3 CHAR(15) NOT NULL,"
+ " SOME_ID_COLUMN_4 CHAR(15),"
+ " CREATED_BY_ID VARCHAR,"
+ " VALUE_FIELD VARCHAR"
+ " CONSTRAINT PK PRIMARY KEY (ORGANIZATION_ID, SOME_ID_COLUMN, SOME_ID_COLUMN_2,"
+ " CREATED_DATE DESC, SOME_ID_COLUMN_3))"
+ " IMMUTABLE_ROWS=true, IMMUTABLE_STORAGE_SCHEME='ONE_CELL_PER_COLUMN',"
+ " MULTI_TENANT=true, REPLICATION_SCOPE=1\n";
String index = "CREATE INDEX IF NOT EXISTS MY_DATA_TABLE_INDEX\n"
+ " ON MY_SCHEMA.MY_SAMPLE_DATA_TABLE (SOME_ID_COLUMN, CREATED_DATE DESC,"
+ " SOME_ID_COLUMN_2, SOME_ID_COLUMN_3)\n"
+ " INCLUDE\n"
+ "(SOME_ID_COLUMN_4, CREATED_BY_ID, VALUE_FIELD)\n";
List<String> queries = new ArrayList<String>(){};
queries.add(table);
queries.add(index);
String result = runSchemaExtractionTool("MY_SCHEMA",
"MY_DATA_TABLE_INDEX", null, queries);
String expected = "CREATE INDEX %s ON MY_SCHEMA.MY_SAMPLE_DATA_TABLE"
+ "(SOME_ID_COLUMN, CREATED_DATE DESC, SOME_ID_COLUMN_2, SOME_ID_COLUMN_3) "
+ "INCLUDE (SOME_ID_COLUMN_4, CREATED_BY_ID, VALUE_FIELD)";
Assert.assertEquals(String.format(expected, indexName).toUpperCase(), result.toUpperCase());
queries.clear();
String newIndex = indexName+"_NEW";
queries.add(String.format(expected, newIndex));
Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
try (Connection conn = DriverManager.getConnection(getUrl(), props)) {
executeCreateStatements(conn, queries);
}
compareOrdinalPositions(indexName, newIndex);
}
@Test
public void testCreateIndexStatementWithColumnFamily() throws Exception {
String tableName = generateUniqueName();
String schemaName = generateUniqueName();
String indexName = generateUniqueName();
String pTableFullName = SchemaUtil.getQualifiedTableName(schemaName, tableName);
String createTableStmt = "CREATE TABLE "+pTableFullName + "(k VARCHAR NOT NULL PRIMARY KEY, "
+ "\"av\".\"_\" CHAR(1), v2 VARCHAR)";
String createIndexStmt = "CREATE INDEX "+ indexName + " ON "+pTableFullName+ "(\"av\".\"_\")";
List<String> queries = new ArrayList<String>() {};
queries.add(createTableStmt);
queries.add(createIndexStmt);
//by the principle of having maximal columns in pk
String expected = "CREATE INDEX %s ON "+pTableFullName+ "(\"av\".\"_\", K)";
String result = runSchemaExtractionTool(schemaName, indexName, null, queries);
Assert.assertEquals(String.format(expected, indexName).toUpperCase(), result.toUpperCase());
queries.clear();
String newIndex = indexName+"_NEW";
queries.add(String.format(expected, newIndex));
Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
try (Connection conn = DriverManager.getConnection(getUrl(), props)) {
executeCreateStatements(conn, queries);
}
compareOrdinalPositions(indexName, newIndex);
}
private Connection getTenantConnection(String url, String tenantId) throws SQLException {
Properties props = new Properties();
props.setProperty(PhoenixRuntime.TENANT_ID_ATTRIB, tenantId);
return DriverManager.getConnection(url, props);
}
private String runSchemaExtractionTool(String schemaName, String tableName, String tenantId,
List<String> queries) throws Exception {
Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
String output;
if (tenantId == null) {
try (Connection conn = DriverManager.getConnection(getUrl(), props)) {
executeCreateStatements(conn, queries);
String [] args = {"-m", "EXTRACT", "-tb", tableName, "-s", schemaName};
output = runSchemaTool(conn, args);
}
} else {
try (Connection conn = getTenantConnection(getUrl(), tenantId)) {
executeCreateStatements(conn, queries);
String [] args = {"-m", "EXTRACT","-tb", tableName, "-s", schemaName, "-t", tenantId};
output = runSchemaTool(conn, args);
}
}
return output;
}
private void executeCreateStatements(Connection conn, List<String> queries) throws SQLException {
for (String query: queries){
conn.createStatement().execute(query);
}
conn.commit();
}
public static String runSchemaTool(Connection conn, String [] args) throws Exception {
SchemaTool set = new SchemaTool();
if(conn!=null) {
set.setConf(conn.unwrap(PhoenixConnection.class).getQueryServices().getConfiguration());
}
set.run(args);
return set.getOutput();
}
private String getProperties(String query){
return query.substring(query.lastIndexOf(")")+1);
}
private boolean compareProperties(String prop1, String prop2){
String[] propArray1 = prop1.toUpperCase().replaceAll("\\s+","").split(",");
String[] propArray2 = prop2.toUpperCase().replaceAll("\\s+","").split(",");
Set<String> set1 = new HashSet<>(Arrays.asList(propArray1));
Set<String> set2 = new HashSet<>(Arrays.asList(propArray2));
return set1.equals(set2);
}
}
| |
/*************************GO-LICENSE-START*********************************
* Copyright 2014 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*************************GO-LICENSE-END***********************************/
package com.thoughtworks.go.config.materials;
import com.thoughtworks.go.config.CaseInsensitiveString;
import com.thoughtworks.go.domain.PersistentObject;
import com.thoughtworks.go.domain.materials.Material;
import com.thoughtworks.go.domain.materials.MaterialConfig;
import com.thoughtworks.go.util.CachedDigestUtils;
import com.thoughtworks.go.util.ListUtil;
import com.thoughtworks.go.util.StringUtil;
import java.util.*;
/**
* @understands material configuration
*/
public abstract class AbstractMaterial extends PersistentObject implements Material {
/**
* CAREFUL!, this should be the same as the one used in migration 47_create_new_materials.sql
*/
public static final String FINGERPRINT_DELIMITER = "<|>";
public static final String SQL_CRITERIA_TYPE = "type";
private static final int TRUNCATED_NAME_MAX_LENGTH = 20;
protected CaseInsensitiveString name;
protected String type;
private Map<String, Object> sqlCriteria;
private Map<String, Object> attributesForXml;
private String pipelineUniqueFingerprint;
protected String fingerprint;
public AbstractMaterial(String typeName) {
type = typeName;
}
public CaseInsensitiveString getName() {
return name;
}
public final Map<String, Object> getSqlCriteria() {
if (sqlCriteria == null) {
Map<String, Object> map = new LinkedHashMap<String, Object>();
map.put("type", type);
appendCriteria(map);
sqlCriteria = Collections.unmodifiableMap(map);
}
return sqlCriteria;
}
public final Map<String, Object> getAttributesForXml() {
if (attributesForXml == null) {
Map<String, Object> map = new LinkedHashMap<String, Object>();
map.put("type", type);
appendAttributes(map);
attributesForXml = Collections.unmodifiableMap(map);
}
return attributesForXml;
}
public String getFingerprint() {
if (fingerprint == null) {
fingerprint = generateFingerprintFromCriteria(getSqlCriteria());
}
return fingerprint;
}
public String getPipelineUniqueFingerprint() {
if (pipelineUniqueFingerprint == null) {
Map<String, Object> basicCriteria = new LinkedHashMap<String, Object>(getSqlCriteria());
appendPipelineUniqueCriteria(basicCriteria);
pipelineUniqueFingerprint = generateFingerprintFromCriteria(basicCriteria);
}
return pipelineUniqueFingerprint;
}
private String generateFingerprintFromCriteria(Map<String, Object> sqlCriteria) {
List<String> list = new ArrayList<String>();
for (Map.Entry<String, Object> criteria : sqlCriteria.entrySet()) {
list.add(new StringBuilder().append(criteria.getKey()).append("=").append(criteria.getValue()).toString());
}
String fingerprint = ListUtil.join(list, FINGERPRINT_DELIMITER);
// CAREFUL! the hash algorithm has to be same as the one used in 47_create_new_materials.sql
return CachedDigestUtils.sha256Hex(fingerprint);
}
public String getTruncatedDisplayName() {
String displayName = getDisplayName();
if (displayName.length() > TRUNCATED_NAME_MAX_LENGTH) {
StringBuffer buffer = new StringBuffer();
buffer.append(displayName.substring(0, TRUNCATED_NAME_MAX_LENGTH / 2));
buffer.append("...");
buffer.append(displayName.substring(displayName.length() - TRUNCATED_NAME_MAX_LENGTH / 2));
displayName = buffer.toString();
}
return displayName;
}
protected abstract void appendCriteria(Map<String, Object> parameters);
protected abstract void appendAttributes(Map<String,Object> parameters);
protected abstract void appendPipelineUniqueCriteria(Map<String, Object> basicCriteria);
public void setName(final CaseInsensitiveString name) {
this.name = name;
}
public String getType() {
return type;
}
public String getShortRevision(String revision) {
return revision;
}
public boolean isSameFlyweight(Material other) {
return getFingerprint().equals(other.getFingerprint());
}
@Override
public boolean hasSameFingerprint(MaterialConfig materialConfig) {
return getFingerprint().equals(materialConfig.getFingerprint());
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
AbstractMaterial that = (AbstractMaterial) o;
if (name != null ? !name.equals(that.name) : that.name != null) {
return false;
}
if (type != null ? !type.equals(that.type) : that.type != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = name != null ? name.hashCode() : 0;
result = 31 * result + (type != null ? type.hashCode() : 0);
return result;
}
@Override public String toString() {
return String.format("AbstractMaterial{name=%s, type=%s}", name, type);
}
protected void resetCachedIdentityAttributes() {
sqlCriteria = null;
attributesForXml = null;
pipelineUniqueFingerprint = null;
}
@Override
public MaterialConfig config() {
throw new RuntimeException("You need to implement this");
}
@Override
public Map<String, Object> getAttributes(boolean addSecureFields) {
throw new RuntimeException("You need to implement this");
}
protected boolean hasDestinationFolder() {
return !StringUtil.isBlank(getFolder());
}
public boolean supportsDestinationFolder() {
return false;
}
@Override
public void updateFromConfig(MaterialConfig materialConfig) {
if(materialConfig instanceof PasswordAwareMaterial) {
PasswordAwareMaterial passwordConfig = (PasswordAwareMaterial) materialConfig;
((PasswordAwareMaterial) this).setPassword(passwordConfig.getPassword());
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.sql.validate;
import org.apache.calcite.linq4j.Linq4j;
import org.apache.calcite.linq4j.Ord;
import org.apache.calcite.plan.RelOptSchemaWithSampling;
import org.apache.calcite.plan.RelOptTable;
import org.apache.calcite.prepare.Prepare;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.rel.type.RelDataTypeFactory;
import org.apache.calcite.rel.type.RelDataTypeField;
import org.apache.calcite.sql.SqlCall;
import org.apache.calcite.sql.SqlDataTypeSpec;
import org.apache.calcite.sql.SqlDynamicParam;
import org.apache.calcite.sql.SqlIdentifier;
import org.apache.calcite.sql.SqlIntervalQualifier;
import org.apache.calcite.sql.SqlLiteral;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.SqlNodeList;
import org.apache.calcite.sql.SqlOperatorTable;
import org.apache.calcite.sql.SqlUtil;
import org.apache.calcite.sql.fun.SqlStdOperatorTable;
import org.apache.calcite.sql.parser.SqlParserPos;
import org.apache.calcite.sql.type.SqlTypeUtil;
import org.apache.calcite.util.ImmutableBitSet;
import org.apache.calcite.util.Util;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* Utility methods related to validation.
*/
public class SqlValidatorUtil {
private SqlValidatorUtil() {}
//~ Methods ----------------------------------------------------------------
/**
* Converts a {@link SqlValidatorScope} into a {@link RelOptTable}. This is
* only possible if the scope represents an identifier, such as "sales.emp".
* Otherwise, returns null.
*
* @param namespace Namespace
* @param catalogReader Schema
* @param datasetName Name of sample dataset to substitute, or null to use
* the regular table
* @param usedDataset Output parameter which is set to true if a sample
* dataset is found; may be null
*/
public static RelOptTable getRelOptTable(
SqlValidatorNamespace namespace,
Prepare.CatalogReader catalogReader,
String datasetName,
boolean[] usedDataset) {
if (!namespace.isWrapperFor(TableNamespace.class)) {
return null;
}
final TableNamespace tableNamespace =
namespace.unwrap(TableNamespace.class);
final List<String> names = tableNamespace.getTable().getQualifiedName();
RelOptTable table;
if (datasetName != null
&& catalogReader instanceof RelOptSchemaWithSampling) {
final RelOptSchemaWithSampling reader =
(RelOptSchemaWithSampling) catalogReader;
table = reader.getTableForMember(names, datasetName, usedDataset);
} else {
// Schema does not support substitution. Ignore the data set, if any.
table = catalogReader.getTableForMember(names);
}
if (!tableNamespace.extendedFields.isEmpty()) {
table = table.extend(tableNamespace.extendedFields);
}
return table;
}
/**
* Looks up a field with a given name, returning null if not found.
*
* @param rowType Row type
* @param columnName Field name
* @return Field, or null if not found
*/
public static RelDataTypeField lookupField(
boolean caseSensitive,
final RelDataType rowType,
String columnName) {
return rowType.getField(columnName, caseSensitive);
}
public static void checkCharsetAndCollateConsistentIfCharType(
RelDataType type) {
//(every charset must have a default collation)
if (SqlTypeUtil.inCharFamily(type)) {
Charset strCharset = type.getCharset();
Charset colCharset = type.getCollation().getCharset();
assert null != strCharset;
assert null != colCharset;
if (!strCharset.equals(colCharset)) {
if (false) {
// todo: enable this checking when we have a charset to
// collation mapping
throw new Error(type.toString()
+ " was found to have charset '" + strCharset.name()
+ "' and a mismatched collation charset '"
+ colCharset.name() + "'");
}
}
}
}
/**
* Converts an expression "expr" into "expr AS alias".
*/
public static SqlNode addAlias(
SqlNode expr,
String alias) {
final SqlParserPos pos = expr.getParserPosition();
final SqlIdentifier id = new SqlIdentifier(alias, pos);
return SqlStdOperatorTable.AS.createCall(pos, expr, id);
}
/**
* Derives an alias for a node, and invents a mangled identifier if it
* cannot.
*
* <p>Examples:
*
* <ul>
* <li>Alias: "1 + 2 as foo" yields "foo"
* <li>Identifier: "foo.bar.baz" yields "baz"
* <li>Anything else yields "expr$<i>ordinal</i>"
* </ul>
*
* @return An alias, if one can be derived; or a synthetic alias
* "expr$<i>ordinal</i>" if ordinal < 0; otherwise null
*/
public static String getAlias(SqlNode node, int ordinal) {
switch (node.getKind()) {
case AS:
// E.g. "1 + 2 as foo" --> "foo"
return ((SqlCall) node).operand(1).toString();
case OVER:
// E.g. "bids over w" --> "bids"
return getAlias(((SqlCall) node).operand(0), ordinal);
case IDENTIFIER:
// E.g. "foo.bar" --> "bar"
return Util.last(((SqlIdentifier) node).names);
default:
if (ordinal < 0) {
return null;
} else {
return SqlUtil.deriveAliasFromOrdinal(ordinal);
}
}
}
/**
* Makes a name distinct from other names which have already been used, adds
* it to the list, and returns it.
*
* @param name Suggested name, may not be unique
* @param nameList Collection of names already used
* @param suggester Base for name when input name is null
* @return Unique name
*/
public static String uniquify(
String name,
Set<String> nameList,
Suggester suggester) {
if (name != null) {
if (nameList.add(name)) {
return name;
}
}
final String originalName = name;
for (int j = 0;; j++) {
name = suggester.apply(originalName, j, nameList.size());
if (nameList.add(name)) {
return name;
}
}
}
/**
* Factory method for {@link SqlValidator}.
*/
public static SqlValidatorWithHints newValidator(
SqlOperatorTable opTab,
SqlValidatorCatalogReader catalogReader,
RelDataTypeFactory typeFactory) {
return new SqlValidatorImpl(
opTab,
catalogReader,
typeFactory,
SqlConformance.DEFAULT);
}
/**
* Makes sure that the names in a list are unique.
*
* <p>Does not modify the input list. Returns the input list if the strings
* are unique, otherwise allocates a new list.
*
* @param nameList List of strings
* @return List of unique strings
*/
public static List<String> uniquify(List<String> nameList) {
return uniquify(nameList, EXPR_SUGGESTER);
}
public static List<String> uniquify(
List<String> nameList,
Suggester suggester) {
Set<String> used = new LinkedHashSet<String>();
int changeCount = 0;
for (String name : nameList) {
String uniqueName = uniquify(name, used, suggester);
if (!uniqueName.equals(name)) {
++changeCount;
}
}
return changeCount == 0
? nameList
: new ArrayList<String>(used);
}
/**
* Resolves a multi-part identifier such as "SCHEMA.EMP.EMPNO" to a
* namespace. The returned namespace, never null, may represent a
* schema, table, column, etc.
*/
public static SqlValidatorNamespace lookup(
SqlValidatorScope scope,
List<String> names) {
assert names.size() > 0;
SqlValidatorNamespace namespace = null;
for (int i = 0; i < names.size(); i++) {
String name = names.get(i);
if (i == 0) {
namespace = scope.resolve(name, null, null);
} else {
namespace = namespace.lookupChild(name);
}
}
assert namespace != null;
return namespace;
}
public static void getSchemaObjectMonikers(
SqlValidatorCatalogReader catalogReader,
List<String> names,
List<SqlMoniker> hints) {
// Assume that the last name is 'dummy' or similar.
List<String> subNames = Util.skipLast(names);
hints.addAll(catalogReader.getAllSchemaObjectNames(subNames));
// If the name has length 0, try prepending the name of the default
// schema. So, the empty name would yield a list of tables in the
// default schema, as well as a list of schemas from the above code.
if (subNames.size() == 0) {
hints.addAll(
catalogReader.getAllSchemaObjectNames(
catalogReader.getSchemaName()));
}
}
public static SelectScope getEnclosingSelectScope(SqlValidatorScope scope) {
while (scope instanceof DelegatingScope) {
if (scope instanceof SelectScope) {
return (SelectScope) scope;
}
scope = ((DelegatingScope) scope).getParent();
}
return null;
}
public static AggregatingSelectScope
getEnclosingAggregateSelectScope(SqlValidatorScope scope) {
while (scope instanceof DelegatingScope) {
if (scope instanceof AggregatingSelectScope) {
return (AggregatingSelectScope) scope;
}
scope = ((DelegatingScope) scope).getParent();
}
return null;
}
/**
* Derives the list of column names suitable for NATURAL JOIN. These are the
* columns that occur exactly once on each side of the join.
*
* @param leftRowType Row type of left input to the join
* @param rightRowType Row type of right input to the join
* @return List of columns that occur once on each side
*/
public static List<String> deriveNaturalJoinColumnList(
RelDataType leftRowType,
RelDataType rightRowType) {
List<String> naturalColumnNames = new ArrayList<String>();
final List<String> leftNames = leftRowType.getFieldNames();
final List<String> rightNames = rightRowType.getFieldNames();
for (String name : leftNames) {
if ((Collections.frequency(leftNames, name) == 1)
&& (Collections.frequency(rightNames, name) == 1)) {
naturalColumnNames.add(name);
}
}
return naturalColumnNames;
}
public static RelDataType createTypeFromProjection(RelDataType type,
List<String> columnNameList, RelDataTypeFactory typeFactory,
boolean caseSensitive) {
// If the names in columnNameList and type have case-sensitive differences,
// the resulting type will use those from type. These are presumably more
// canonical.
final List<RelDataTypeField> fields =
new ArrayList<RelDataTypeField>(columnNameList.size());
for (String name : columnNameList) {
RelDataTypeField field = type.getField(name, caseSensitive);
fields.add(type.getFieldList().get(field.getIndex()));
}
return typeFactory.createStructType(fields);
}
/** Analyzes an expression in a GROUP BY clause.
*
* <p>It may be an expression, an empty list (), or a call to
* {@code GROUPING SETS}, {@code CUBE} or {@code ROLLUP}.
*
* <p>Each group item produces a list of group sets, which are written to
* {@code topBuilder}. To find the grouping sets of the query, we will take
* the cartesian product of the group sets. */
public static void analyzeGroupItem(SqlValidatorScope scope,
List<SqlNode> groupExprs, Map<Integer, Integer> groupExprProjection,
ImmutableList.Builder<ImmutableList<ImmutableBitSet>> topBuilder,
SqlNode groupExpr) {
final ImmutableList.Builder<ImmutableBitSet> builder;
switch (groupExpr.getKind()) {
case CUBE:
case ROLLUP:
// E.g. ROLLUP(a, (b, c)) becomes [{0}, {1, 2}]
// then we roll up to [(0, 1, 2), (0), ()] -- note no (0, 1)
List<ImmutableBitSet> bitSets =
analyzeGroupTuple(scope, groupExprs,
groupExprProjection, ((SqlCall) groupExpr).getOperandList());
switch (groupExpr.getKind()) {
case ROLLUP:
topBuilder.add(rollup(bitSets));
return;
default:
topBuilder.add(cube(bitSets));
return;
}
case OTHER:
if (groupExpr instanceof SqlNodeList) {
SqlNodeList list = (SqlNodeList) groupExpr;
for (SqlNode node : list) {
analyzeGroupItem(scope, groupExprs, groupExprProjection, topBuilder,
node);
}
return;
}
// fall through
case GROUPING_SETS:
default:
builder = ImmutableList.builder();
convertGroupSet(scope, groupExprs, groupExprProjection, builder,
groupExpr);
topBuilder.add(builder.build());
}
}
/** Analyzes a GROUPING SETS item in a GROUP BY clause. */
private static void convertGroupSet(SqlValidatorScope scope,
List<SqlNode> groupExprs, Map<Integer, Integer> groupExprProjection,
ImmutableList.Builder<ImmutableBitSet> builder, SqlNode groupExpr) {
switch (groupExpr.getKind()) {
case GROUPING_SETS:
final SqlCall call = (SqlCall) groupExpr;
for (SqlNode node : call.getOperandList()) {
convertGroupSet(scope, groupExprs, groupExprProjection, builder, node);
}
return;
case ROW:
final List<ImmutableBitSet> bitSets =
analyzeGroupTuple(scope, groupExprs, groupExprProjection,
((SqlCall) groupExpr).getOperandList());
builder.add(ImmutableBitSet.union(bitSets));
return;
default:
builder.add(
analyzeGroupExpr(scope, groupExprs, groupExprProjection, groupExpr));
return;
}
}
/** Analyzes a tuple in a GROUPING SETS clause.
*
* <p>For example, in {@code GROUP BY GROUPING SETS ((a, b), a, c)},
* {@code (a, b)} is a tuple.
*
* <p>Gathers into {@code groupExprs} the set of distinct expressions being
* grouped, and returns a bitmap indicating which expressions this tuple
* is grouping. */
private static List<ImmutableBitSet>
analyzeGroupTuple(SqlValidatorScope scope, List<SqlNode> groupExprs,
Map<Integer, Integer> groupExprProjection, List<SqlNode> operandList) {
List<ImmutableBitSet> list = Lists.newArrayList();
for (SqlNode operand : operandList) {
list.add(
analyzeGroupExpr(scope, groupExprs, groupExprProjection, operand));
}
return list;
}
/** Analyzes a component of a tuple in a GROUPING SETS clause. */
private static ImmutableBitSet analyzeGroupExpr(SqlValidatorScope scope,
List<SqlNode> groupExprs, Map<Integer, Integer> groupExprProjection,
SqlNode groupExpr) {
final SqlNode expandedGroupExpr =
scope.getValidator().expand(groupExpr, scope);
switch (expandedGroupExpr.getKind()) {
case ROW:
return ImmutableBitSet.union(
analyzeGroupTuple(scope, groupExprs, groupExprProjection,
((SqlCall) expandedGroupExpr).getOperandList()));
case OTHER:
if (expandedGroupExpr instanceof SqlNodeList
&& ((SqlNodeList) expandedGroupExpr).size() == 0) {
return ImmutableBitSet.of();
}
}
final int ref = lookupGroupExpr(groupExprs, groupExpr);
if (expandedGroupExpr instanceof SqlIdentifier) {
// SQL 2003 does not allow expressions of column references
SqlIdentifier expr = (SqlIdentifier) expandedGroupExpr;
// column references should be fully qualified.
assert expr.names.size() == 2;
String originalRelName = expr.names.get(0);
String originalFieldName = expr.names.get(1);
int[] nsIndexes = {-1};
final SqlValidatorScope[] ancestorScopes = {null};
SqlValidatorNamespace foundNs =
scope.resolve(
originalRelName,
ancestorScopes,
nsIndexes);
assert foundNs != null;
assert nsIndexes.length == 1;
int childNamespaceIndex = nsIndexes[0];
int namespaceOffset = 0;
if (childNamespaceIndex > 0) {
// If not the first child, need to figure out the width of
// output types from all the preceding namespaces
assert ancestorScopes[0] instanceof ListScope;
List<SqlValidatorNamespace> children =
((ListScope) ancestorScopes[0]).getChildren();
for (int j = 0; j < childNamespaceIndex; j++) {
namespaceOffset +=
children.get(j).getRowType().getFieldCount();
}
}
RelDataTypeField field =
scope.getValidator().getCatalogReader().field(foundNs.getRowType(),
originalFieldName);
int origPos = namespaceOffset + field.getIndex();
groupExprProjection.put(origPos, ref);
}
return ImmutableBitSet.of(ref);
}
private static int lookupGroupExpr(List<SqlNode> groupExprs, SqlNode expr) {
for (Ord<SqlNode> node : Ord.zip(groupExprs)) {
if (node.e.equalsDeep(expr, false)) {
return node.i;
}
}
groupExprs.add(expr);
return groupExprs.size() - 1;
}
/** Computes the rollup of bit sets.
*
* <p>For example, <code>rollup({0}, {1})</code>
* returns <code>({0, 1}, {0}, {})</code>.
*
* <p>Bit sets are not necessarily singletons:
* <code>rollup({0, 2}, {3, 5})</code>
* returns <code>({0, 2, 3, 5}, {0, 2}, {})</code>. */
@VisibleForTesting
public static ImmutableList<ImmutableBitSet>
rollup(List<ImmutableBitSet> bitSets) {
Set<ImmutableBitSet> builder = Sets.newLinkedHashSet();
for (;;) {
final ImmutableBitSet union = ImmutableBitSet.union(bitSets);
builder.add(union);
if (union.isEmpty()) {
break;
}
bitSets = bitSets.subList(0, bitSets.size() - 1);
}
return ImmutableList.copyOf(builder);
}
/** Computes the cube of bit sets.
*
* <p>For example, <code>rollup({0}, {1})</code>
* returns <code>({0, 1}, {0}, {})</code>.
*
* <p>Bit sets are not necessarily singletons:
* <code>rollup({0, 2}, {3, 5})</code>
* returns <code>({0, 2, 3, 5}, {0, 2}, {})</code>. */
@VisibleForTesting
public static ImmutableList<ImmutableBitSet>
cube(List<ImmutableBitSet> bitSets) {
// Given the bit sets [{1}, {2, 3}, {5}],
// form the lists [[{1}, {}], [{2, 3}, {}], [{5}, {}]].
final Set<List<ImmutableBitSet>> builder = Sets.newLinkedHashSet();
for (ImmutableBitSet bitSet : bitSets) {
builder.add(Arrays.asList(bitSet, ImmutableBitSet.of()));
}
Set<ImmutableBitSet> flattenedBitSets = Sets.newLinkedHashSet();
for (List<ImmutableBitSet> o : Linq4j.product(builder)) {
flattenedBitSets.add(ImmutableBitSet.union(o));
}
return ImmutableList.copyOf(flattenedBitSets);
}
//~ Inner Classes ----------------------------------------------------------
/**
* Walks over an expression, copying every node, and fully-qualifying every
* identifier.
*/
public static class DeepCopier extends SqlScopedShuttle {
DeepCopier(SqlValidatorScope scope) {
super(scope);
}
/** Copies a list of nodes. */
public static SqlNodeList copy(SqlValidatorScope scope, SqlNodeList list) {
return (SqlNodeList) list.accept(new DeepCopier(scope));
}
public SqlNode visit(SqlNodeList list) {
SqlNodeList copy = new SqlNodeList(list.getParserPosition());
for (SqlNode node : list) {
copy.add(node.accept(this));
}
return copy;
}
// Override to copy all arguments regardless of whether visitor changes
// them.
protected SqlNode visitScoped(SqlCall call) {
ArgHandler<SqlNode> argHandler =
new CallCopyingArgHandler(call, true);
call.getOperator().acceptCall(this, call, false, argHandler);
return argHandler.result();
}
public SqlNode visit(SqlLiteral literal) {
return (SqlNode) literal.clone();
}
public SqlNode visit(SqlIdentifier id) {
return getScope().fullyQualify(id);
}
public SqlNode visit(SqlDataTypeSpec type) {
return (SqlNode) type.clone();
}
public SqlNode visit(SqlDynamicParam param) {
return (SqlNode) param.clone();
}
public SqlNode visit(SqlIntervalQualifier intervalQualifier) {
return (SqlNode) intervalQualifier.clone();
}
}
/** Suggests candidates for unique names, given the number of attempts so far
* and the number of expressions in the project list. */
interface Suggester {
String apply(String original, int attempt, int size);
}
public static final Suggester EXPR_SUGGESTER =
new Suggester() {
public String apply(String original, int attempt, int size) {
return Util.first(original, "EXPR$") + attempt;
}
};
public static final Suggester F_SUGGESTER =
new Suggester() {
public String apply(String original, int attempt, int size) {
return Util.first(original, "$f") + Math.max(size, attempt);
}
};
}
// End SqlValidatorUtil.java
| |
// Copyright 2022 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.api.ads.admanager.jaxws.v202202;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlSchemaType;
import javax.xml.bind.annotation.XmlType;
/**
*
* Provides information about the expected volume and composition of traffic over a date range for a
* traffic forecast segment.
*
*
* <p>Java class for ForecastAdjustment complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="ForecastAdjustment">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="id" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/>
* <element name="trafficForecastSegmentId" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/>
* <element name="name" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="dateRange" type="{https://www.google.com/apis/ads/publisher/v202202}DateRange" minOccurs="0"/>
* <element name="status" type="{https://www.google.com/apis/ads/publisher/v202202}ForecastAdjustmentStatus" minOccurs="0"/>
* <element name="volumeType" type="{https://www.google.com/apis/ads/publisher/v202202}ForecastAdjustmentVolumeType" minOccurs="0"/>
* <element name="allowAdjustingForecastAboveRecommendedLimit" type="{http://www.w3.org/2001/XMLSchema}boolean" minOccurs="0"/>
* <element name="dailyVolumeSettings" type="{https://www.google.com/apis/ads/publisher/v202202}DailyVolumeSettings" minOccurs="0"/>
* <element name="totalVolumeSettings" type="{https://www.google.com/apis/ads/publisher/v202202}TotalVolumeSettings" minOccurs="0"/>
* <element name="historicalBasisVolumeSettings" type="{https://www.google.com/apis/ads/publisher/v202202}HistoricalBasisVolumeSettings" minOccurs="0"/>
* <element name="calculatedDailyAdOpportunityCounts" type="{http://www.w3.org/2001/XMLSchema}long" maxOccurs="unbounded" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "ForecastAdjustment", propOrder = {
"id",
"trafficForecastSegmentId",
"name",
"dateRange",
"status",
"volumeType",
"allowAdjustingForecastAboveRecommendedLimit",
"dailyVolumeSettings",
"totalVolumeSettings",
"historicalBasisVolumeSettings",
"calculatedDailyAdOpportunityCounts"
})
public class ForecastAdjustment {
protected Long id;
protected Long trafficForecastSegmentId;
protected String name;
protected DateRange dateRange;
@XmlSchemaType(name = "string")
protected ForecastAdjustmentStatus status;
@XmlSchemaType(name = "string")
protected ForecastAdjustmentVolumeType volumeType;
protected Boolean allowAdjustingForecastAboveRecommendedLimit;
protected DailyVolumeSettings dailyVolumeSettings;
protected TotalVolumeSettings totalVolumeSettings;
protected HistoricalBasisVolumeSettings historicalBasisVolumeSettings;
@XmlElement(type = Long.class)
protected List<Long> calculatedDailyAdOpportunityCounts;
/**
* Gets the value of the id property.
*
* @return
* possible object is
* {@link Long }
*
*/
public Long getId() {
return id;
}
/**
* Sets the value of the id property.
*
* @param value
* allowed object is
* {@link Long }
*
*/
public void setId(Long value) {
this.id = value;
}
/**
* Gets the value of the trafficForecastSegmentId property.
*
* @return
* possible object is
* {@link Long }
*
*/
public Long getTrafficForecastSegmentId() {
return trafficForecastSegmentId;
}
/**
* Sets the value of the trafficForecastSegmentId property.
*
* @param value
* allowed object is
* {@link Long }
*
*/
public void setTrafficForecastSegmentId(Long value) {
this.trafficForecastSegmentId = value;
}
/**
* Gets the value of the name property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getName() {
return name;
}
/**
* Sets the value of the name property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setName(String value) {
this.name = value;
}
/**
* Gets the value of the dateRange property.
*
* @return
* possible object is
* {@link DateRange }
*
*/
public DateRange getDateRange() {
return dateRange;
}
/**
* Sets the value of the dateRange property.
*
* @param value
* allowed object is
* {@link DateRange }
*
*/
public void setDateRange(DateRange value) {
this.dateRange = value;
}
/**
* Gets the value of the status property.
*
* @return
* possible object is
* {@link ForecastAdjustmentStatus }
*
*/
public ForecastAdjustmentStatus getStatus() {
return status;
}
/**
* Sets the value of the status property.
*
* @param value
* allowed object is
* {@link ForecastAdjustmentStatus }
*
*/
public void setStatus(ForecastAdjustmentStatus value) {
this.status = value;
}
/**
* Gets the value of the volumeType property.
*
* @return
* possible object is
* {@link ForecastAdjustmentVolumeType }
*
*/
public ForecastAdjustmentVolumeType getVolumeType() {
return volumeType;
}
/**
* Sets the value of the volumeType property.
*
* @param value
* allowed object is
* {@link ForecastAdjustmentVolumeType }
*
*/
public void setVolumeType(ForecastAdjustmentVolumeType value) {
this.volumeType = value;
}
/**
* Gets the value of the allowAdjustingForecastAboveRecommendedLimit property.
*
* @return
* possible object is
* {@link Boolean }
*
*/
public Boolean isAllowAdjustingForecastAboveRecommendedLimit() {
return allowAdjustingForecastAboveRecommendedLimit;
}
/**
* Sets the value of the allowAdjustingForecastAboveRecommendedLimit property.
*
* @param value
* allowed object is
* {@link Boolean }
*
*/
public void setAllowAdjustingForecastAboveRecommendedLimit(Boolean value) {
this.allowAdjustingForecastAboveRecommendedLimit = value;
}
/**
* Gets the value of the dailyVolumeSettings property.
*
* @return
* possible object is
* {@link DailyVolumeSettings }
*
*/
public DailyVolumeSettings getDailyVolumeSettings() {
return dailyVolumeSettings;
}
/**
* Sets the value of the dailyVolumeSettings property.
*
* @param value
* allowed object is
* {@link DailyVolumeSettings }
*
*/
public void setDailyVolumeSettings(DailyVolumeSettings value) {
this.dailyVolumeSettings = value;
}
/**
* Gets the value of the totalVolumeSettings property.
*
* @return
* possible object is
* {@link TotalVolumeSettings }
*
*/
public TotalVolumeSettings getTotalVolumeSettings() {
return totalVolumeSettings;
}
/**
* Sets the value of the totalVolumeSettings property.
*
* @param value
* allowed object is
* {@link TotalVolumeSettings }
*
*/
public void setTotalVolumeSettings(TotalVolumeSettings value) {
this.totalVolumeSettings = value;
}
/**
* Gets the value of the historicalBasisVolumeSettings property.
*
* @return
* possible object is
* {@link HistoricalBasisVolumeSettings }
*
*/
public HistoricalBasisVolumeSettings getHistoricalBasisVolumeSettings() {
return historicalBasisVolumeSettings;
}
/**
* Sets the value of the historicalBasisVolumeSettings property.
*
* @param value
* allowed object is
* {@link HistoricalBasisVolumeSettings }
*
*/
public void setHistoricalBasisVolumeSettings(HistoricalBasisVolumeSettings value) {
this.historicalBasisVolumeSettings = value;
}
/**
* Gets the value of the calculatedDailyAdOpportunityCounts property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the calculatedDailyAdOpportunityCounts property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getCalculatedDailyAdOpportunityCounts().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link Long }
*
*
*/
public List<Long> getCalculatedDailyAdOpportunityCounts() {
if (calculatedDailyAdOpportunityCounts == null) {
calculatedDailyAdOpportunityCounts = new ArrayList<Long>();
}
return this.calculatedDailyAdOpportunityCounts;
}
}
| |
package org.hisp.dhis.common;
/*
* Copyright (c) 2004-2018, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
import org.apache.commons.lang.StringUtils;
import org.hisp.dhis.calendar.Calendar;
import org.hisp.dhis.calendar.DateTimeUnit;
import org.hisp.dhis.category.Category;
import org.hisp.dhis.category.CategoryCombo;
import org.hisp.dhis.category.CategoryOption;
import org.hisp.dhis.period.Period;
import org.hisp.dhis.period.PeriodType;
import org.hisp.dhis.period.WeeklyAbstractPeriodType;
import org.joda.time.DateTime;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import org.springframework.util.Assert;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.ListIterator;
import java.util.Map;
import java.util.stream.Collectors;
/**
* @author Lars Helge Overland
*/
public class IdentifiableObjectUtils
{
public static final String SEPARATOR = "-";
public static final String SEPARATOR_JOIN = ", ";
public static final DateTimeFormatter LONG_DATE_FORMAT = DateTimeFormat.forPattern( "yyyy-MM-dd'T'HH:mm:ss" );
public static final DateTimeFormatter MEDIUM_DATE_FORMAT = DateTimeFormat.forPattern( "yyyy-MM-dd" );
public static final Map<String, String> CLASS_ALIAS = ImmutableMap.<String, String>builder().
put( "CategoryOption", CategoryOption.class.getSimpleName() ).
put( "Category", Category.class.getSimpleName() ).
put( "CategoryCombo", CategoryCombo.class.getSimpleName() ).build();
/**
* Joins the names of the IdentifiableObjects in the given list and separates
* them with {@link IdentifiableObjectUtils#SEPARATOR_JOIN} (a comma and a space).
* Returns null if the given list is null or has no elements.
*
* @param objects the list of IdentifiableObjects.
* @return the joined string.
*/
public static String join( Collection<? extends IdentifiableObject> objects )
{
if ( objects == null || objects.isEmpty() )
{
return null;
}
List<String> names = objects.stream().map( IdentifiableObject::getDisplayName ).collect( Collectors.toList() );
return StringUtils.join( names, SEPARATOR_JOIN );
}
/**
* Returns a list of uids for the given collection of IdentifiableObjects.
*
* @param objects the list of IdentifiableObjects.
* @return a list of uids.
*/
public static <T extends IdentifiableObject> List<String> getUids( Collection<T> objects )
{
return objects != null ? objects.stream().map( o -> o.getUid() ).collect( Collectors.toList() ) : null;
}
/**
* Returns a list of codes for the given collection of IdentifiableObjects.
*
* @param objects the list of IdentifiableObjects.
* @return a list of codes.
*/
public static <T extends IdentifiableObject> List<String> getCodes( Collection<T> objects )
{
return objects != null ? objects.stream().map( o -> o.getCode() ).collect( Collectors.toList() ) : null;
}
/**
* Returns a list of internal identifiers for the given collection of IdentifiableObjects.
*
* @param objects the list of IdentifiableObjects.
* @return a list of identifiers.
*/
public static <T extends IdentifiableObject> List<Integer> getIdentifiers( Collection<T> objects )
{
return objects != null ? objects.stream().map( o -> o.getId() ).collect( Collectors.toList() ) : null;
}
/**
* Returns a map from internal identifiers to IdentifiableObjects,
* for the given collection of IdentifiableObjects.
*
* @param objects the collection of IdentifiableObjects
* @return a map from the object internal identifiers to the objects
*/
public static <T extends IdentifiableObject> Map<Integer, T> getIdentifierMap( Collection<T> objects )
{
Map<Integer, T> map = new HashMap<>();
for ( T object : objects )
{
map.put( object.getId(), object );
}
return map;
}
/**
* Returns a list of calendar specific period identifiers for the given collection of
* periods and calendar.
*
* @param periods the list of periods.
* @param calendar the calendar to use for generation of iso periods.
* @return a list of iso period identifiers.
*/
public static <T extends IdentifiableObject> List<String> getLocalPeriodIdentifiers( Collection<T> periods, Calendar calendar )
{
List<String> localIdentifiers = new ArrayList<>();
for ( IdentifiableObject object : periods )
{
Period period = (Period) object;
DateTimeUnit dateTimeUnit = calendar.fromIso( period.getStartDate() );
localIdentifiers.add( period.getPeriodType().getIsoDate( dateTimeUnit ) );
}
return localIdentifiers;
}
/**
* Returns a local period identifier for a specific period / calendar.
*
* @param period the list of periods.
* @param calendar the calendar to use for generation of iso periods.
* @return Period identifier based on given calendar
*/
public static String getLocalPeriodIdentifier( Period period, Calendar calendar )
{
if ( calendar.isIso8601() )
{
return period.getIsoDate();
}
return period.getPeriodType().getIsoDate( calendar.fromIso( period.getStartDate() ) );
}
/**
* Returns the {@link Period} of the argument period type which corresponds to the argument period.
* The frequency order of the given period type must greater than or equal to the period type
* of the given period (represent "longer" periods). Weeks are converted to "longer" periods by
* determining which period contains at least 4 days of the week.
* <p>
* As an example, providing
* {@code Quarter 1, 2017} and {@code Yearly} as arguments will return the yearly
* period {@code 2017}.
*
* @param period the period.
* @param periodType the period type of the period to return.
* @param calendar the calendar to use when calculating the period.
* @return a period.
*/
public static Period getPeriodByPeriodType( Period period, PeriodType periodType, Calendar calendar )
{
Assert.isTrue( periodType.getFrequencyOrder() >= period.getPeriodType().getFrequencyOrder(),
"Frequency order of period type must be greater than or equal to period" );
Date date = period.getStartDate();
if ( WeeklyAbstractPeriodType.class.isAssignableFrom( period.getPeriodType().getClass() ) )
{
date = new DateTime( date.getTime() ).plusDays( 3 ).toDate();
}
return periodType.createPeriod( date, calendar );
}
/**
* Filters the given list of IdentifiableObjects based on the given key.
*
* @param identifiableObjects the list of IdentifiableObjects.
* @param key the key.
* @param ignoreCase indicates whether to ignore case when filtering.
* @return a filtered list of IdentifiableObjects.
*/
public static <T extends IdentifiableObject> List<T> filterNameByKey( List<T> identifiableObjects, String key,
boolean ignoreCase )
{
List<T> objects = new ArrayList<>();
ListIterator<T> iterator = identifiableObjects.listIterator();
if ( ignoreCase )
{
key = key.toLowerCase();
}
while ( iterator.hasNext() )
{
T object = iterator.next();
String name = ignoreCase ? object.getDisplayName().toLowerCase() : object.getDisplayName();
if ( name.indexOf( key ) != -1 )
{
objects.add( object );
}
}
return objects;
}
/**
* Removes duplicates from the given list while maintaining the order.
*
* @param list the list.
*/
public static <T extends IdentifiableObject> List<T> removeDuplicates( List<T> list )
{
final List<T> temp = new ArrayList<>( list );
list.clear();
for ( T object : temp )
{
if ( !list.contains( object ) )
{
list.add( object );
}
}
return list;
}
/**
* Generates a tag reflecting the date of when the most recently updated
* IdentifiableObject in the given collection was modified.
*
* @param objects the collection of IdentifiableObjects.
* @return a string tag.
*/
public static <T extends IdentifiableObject> String getLastUpdatedTag( Collection<T> objects )
{
Date latest = null;
if ( objects != null )
{
for ( IdentifiableObject object : objects )
{
if ( object != null && object.getLastUpdated() != null && (latest == null || object.getLastUpdated().after( latest )) )
{
latest = object.getLastUpdated();
}
}
}
return latest != null && objects != null ? objects.size() + SEPARATOR + LONG_DATE_FORMAT.print( new DateTime( latest ) ) : null;
}
/**
* Generates a tag reflecting the date of when the object was last updated.
*
* @param object the identifiable object.
* @return a string tag.
*/
public static String getLastUpdatedTag( IdentifiableObject object )
{
return object != null ? LONG_DATE_FORMAT.print( new DateTime( object.getLastUpdated() ) ) : null;
}
/**
* Returns a mapping between the uid and the display name of the given
* identifiable objects.
*
* @param objects the identifiable objects.
* @return mapping between the uid and the display name of the given objects.
*/
public static Map<String, String> getUidNameMap( Collection<? extends IdentifiableObject> objects )
{
return objects.stream().collect( Collectors.toMap( IdentifiableObject::getUid, IdentifiableObject::getDisplayName ) );
}
/**
* Returns a mapping between the uid and the property defined by the given
* identifiable property for the given identifiable objects.
*
* @param objects the identifiable objects.
* @param property the identifiable property.
* @return a mapping between uid and property.
*/
public static Map<String, String> getUidPropertyMap( Collection<? extends IdentifiableObject> objects, IdentifiableProperty property )
{
Map<String, String> map = Maps.newHashMap();
objects.forEach( obj -> map.put( obj.getUid(), obj.getPropertyValue( IdScheme.from( property ) ) ) );
return map;
}
/**
* Returns a mapping between the uid and the name of the given identifiable
* objects.
*
* @param objects the identifiable objects.
* @return mapping between the uid and the name of the given objects.
*/
public static <T extends IdentifiableObject> Map<String, T> getUidObjectMap( Collection<T> objects )
{
return objects != null ? Maps.uniqueIndex( objects, T::getUid ) : Maps.newHashMap();
}
/**
* Returns a map of the identifiable property specified by the given id scheme
* and the corresponding object.
*
* @param objects the objects.
* @param idScheme the id scheme.
* @return a map.
*/
public static <T extends IdentifiableObject> Map<String, T> getIdMap( List<T> objects, IdScheme idScheme )
{
Map<String, T> map = new HashMap<>();
for ( T object : objects )
{
String value = object.getPropertyValue( idScheme );
if ( value != null )
{
map.put( value, object );
}
}
return map;
}
/**
* @param object Object to get display name for
* @return A usable display name
*/
public static String getDisplayName( Object object )
{
if ( object == null )
{
return "[ object is null ]";
}
else if ( IdentifiableObject.class.isInstance( object ) )
{
IdentifiableObject identifiableObject = (IdentifiableObject) object;
if ( identifiableObject.getDisplayName() != null && !identifiableObject.getDisplayName().isEmpty() )
{
return identifiableObject.getDisplayName();
}
else if ( identifiableObject.getUid() != null && !identifiableObject.getUid().isEmpty() )
{
return identifiableObject.getUid();
}
else if ( identifiableObject.getCode() != null && !identifiableObject.getCode().isEmpty() )
{
return identifiableObject.getCode();
}
}
return object.getClass().getName();
}
}
| |
/*
* Copyright 2012-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.autoconfigure.data.elasticsearch;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.net.InetSocketAddress;
import java.nio.charset.StandardCharsets;
import java.time.Duration;
import java.util.Base64;
import java.util.List;
import org.assertj.core.api.InstanceOfAssertFactories;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
import org.springframework.boot.autoconfigure.AutoConfigurations;
import org.springframework.boot.test.context.runner.ApplicationContextRunner;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.elasticsearch.client.ClientConfiguration;
import org.springframework.data.elasticsearch.client.ClientConfiguration.ClientConfigurationCallback;
import org.springframework.data.elasticsearch.client.reactive.ReactiveElasticsearchClient;
import org.springframework.data.elasticsearch.client.reactive.ReactiveRestClients;
import org.springframework.http.HttpHeaders;
import org.springframework.http.codec.CodecConfigurer.DefaultCodecConfig;
import org.springframework.web.reactive.function.client.WebClient;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.mock;
/**
* Tests for {@link ReactiveElasticsearchRestClientAutoConfiguration}.
*
* @author Brian Clozel
*/
class ReactiveElasticsearchRestClientAutoConfigurationTests {
private final ApplicationContextRunner contextRunner = new ApplicationContextRunner()
.withConfiguration(AutoConfigurations.of(ReactiveElasticsearchRestClientAutoConfiguration.class));
@Test
void configureShouldCreateDefaultBeans() {
this.contextRunner.run((context) -> {
assertThat(context).hasSingleBean(ClientConfiguration.class)
.hasSingleBean(ReactiveElasticsearchClient.class);
List<InetSocketAddress> endpoints = context.getBean(ClientConfiguration.class).getEndpoints();
assertThat(endpoints).hasSize(1);
assertThat(endpoints.get(0).getHostString()).isEqualTo("localhost");
assertThat(endpoints.get(0).getPort()).isEqualTo(9200);
});
}
@Test
void configureWhenCustomClientShouldBackOff() {
this.contextRunner.withUserConfiguration(CustomClientConfiguration.class).run((context) -> assertThat(context)
.hasSingleBean(ReactiveElasticsearchClient.class).hasBean("customClient"));
}
@Test
void configureWhenCustomClientConfig() {
this.contextRunner.withUserConfiguration(CustomClientConfigConfiguration.class)
.run((context) -> assertThat(context).hasSingleBean(ReactiveElasticsearchClient.class)
.hasSingleBean(ClientConfiguration.class).hasBean("customClientConfiguration"));
}
@Test
@Deprecated
void whenEndpointIsCustomizedThenClientConfigurationHasCustomEndpoint() {
this.contextRunner.withPropertyValues("spring.data.elasticsearch.client.reactive.endpoints=localhost:9876")
.run((context) -> {
List<InetSocketAddress> endpoints = context.getBean(ClientConfiguration.class).getEndpoints();
assertThat(endpoints).hasSize(1);
assertThat(endpoints.get(0).getHostString()).isEqualTo("localhost");
assertThat(endpoints.get(0).getPort()).isEqualTo(9876);
});
}
@Test
@Deprecated
void whenMultipleEndpointsAreConfiguredThenClientConfigurationHasMultipleEndpoints() {
this.contextRunner
.withPropertyValues("spring.data.elasticsearch.client.reactive.endpoints=localhost:9876,localhost:8765")
.run((context) -> {
List<InetSocketAddress> endpoints = context.getBean(ClientConfiguration.class).getEndpoints();
assertThat(endpoints).hasSize(2);
assertThat(endpoints.get(0).getHostString()).isEqualTo("localhost");
assertThat(endpoints.get(0).getPort()).isEqualTo(9876);
assertThat(endpoints.get(1).getHostString()).isEqualTo("localhost");
assertThat(endpoints.get(1).getPort()).isEqualTo(8765);
});
}
@Test
void whenUriIsCustomizedThenClientConfigurationHasCustomEndpoint() {
this.contextRunner.withPropertyValues("spring.elasticsearch.uris=http://localhost:9876").run((context) -> {
List<InetSocketAddress> endpoints = context.getBean(ClientConfiguration.class).getEndpoints();
assertThat(endpoints).hasSize(1);
assertThat(endpoints.get(0).getHostString()).isEqualTo("localhost");
assertThat(endpoints.get(0).getPort()).isEqualTo(9876);
});
}
@Test
void whenUriHasHttpsSchemeThenClientConfigurationUsesSsl() {
this.contextRunner.withPropertyValues("spring.elasticsearch.uris=https://localhost:9876").run((context) -> {
ClientConfiguration clientConfiguration = context.getBean(ClientConfiguration.class);
List<InetSocketAddress> endpoints = clientConfiguration.getEndpoints();
assertThat(endpoints).hasSize(1);
assertThat(endpoints.get(0).getHostString()).isEqualTo("localhost");
assertThat(endpoints.get(0).getPort()).isEqualTo(9876);
assertThat(clientConfiguration.useSsl()).isTrue();
});
}
@Test
void whenMultipleUrisAreConfiguredThenClientConfigurationHasMultipleEndpoints() {
this.contextRunner.withPropertyValues("spring.elasticsearch.uris=http://localhost:9876,http://localhost:8765")
.run((context) -> {
List<InetSocketAddress> endpoints = context.getBean(ClientConfiguration.class).getEndpoints();
assertThat(endpoints).hasSize(2);
assertThat(endpoints.get(0).getHostString()).isEqualTo("localhost");
assertThat(endpoints.get(0).getPort()).isEqualTo(9876);
assertThat(endpoints.get(1).getHostString()).isEqualTo("localhost");
assertThat(endpoints.get(1).getPort()).isEqualTo(8765);
});
}
@Test
void whenMultipleUrisHaveHttpsSchemeThenClientConfigurationUsesSsl() {
this.contextRunner.withPropertyValues("spring.elasticsearch.uris=https://localhost:9876,https://localhost:8765")
.run((context) -> {
ClientConfiguration clientConfiguration = context.getBean(ClientConfiguration.class);
List<InetSocketAddress> endpoints = clientConfiguration.getEndpoints();
assertThat(endpoints).hasSize(2);
assertThat(endpoints.get(0).getHostString()).isEqualTo("localhost");
assertThat(endpoints.get(0).getPort()).isEqualTo(9876);
assertThat(endpoints.get(1).getHostString()).isEqualTo("localhost");
assertThat(endpoints.get(1).getPort()).isEqualTo(8765);
assertThat(clientConfiguration.useSsl()).isTrue();
});
}
@Test
void whenMultipleUrisHaveVaryingSchemesThenRunFails() {
this.contextRunner.withPropertyValues("spring.elasticsearch.uris=https://localhost:9876,http://localhost:8765")
.run((context) -> {
assertThat(context).hasFailed();
assertThat(context).getFailure().hasRootCauseInstanceOf(IllegalArgumentException.class)
.hasRootCauseMessage("Configured Elasticsearch URIs have varying schemes");
});
}
@Test
void whenUriHasUsernameOnlyThenDefaultAuthorizationHeaderHasUsernameAndEmptyPassword() {
this.contextRunner.withPropertyValues("spring.elasticsearch.uris=http://user@localhost:9200").run((context) -> {
ClientConfiguration clientConfiguration = context.getBean(ClientConfiguration.class);
assertThat(clientConfiguration.getDefaultHeaders().get(HttpHeaders.AUTHORIZATION)).containsExactly(
"Basic " + Base64.getEncoder().encodeToString("user:".getBytes(StandardCharsets.UTF_8)));
});
}
@Test
void whenUriHasUsernameAndPasswordThenDefaultAuthorizationHeaderHasUsernameAndPassword() {
this.contextRunner.withPropertyValues("spring.elasticsearch.uris=http://user:secret@localhost:9200")
.run((context) -> {
ClientConfiguration clientConfiguration = context.getBean(ClientConfiguration.class);
assertThat(clientConfiguration.getDefaultHeaders().get(HttpHeaders.AUTHORIZATION))
.containsExactly("Basic " + Base64.getEncoder()
.encodeToString("user:secret".getBytes(StandardCharsets.UTF_8)));
});
}
@Test
void whenMultipleUrisHaveVaryingUserInfosThenRunFails() {
this.contextRunner
.withPropertyValues("spring.elasticsearch.uris=http://user:secret@localhost:9876,http://localhost:8765")
.run((context) -> {
assertThat(context).hasFailed();
assertThat(context).getFailure().hasRootCauseInstanceOf(IllegalArgumentException.class)
.hasRootCauseMessage("Configured Elasticsearch URIs have varying user infos");
});
}
@Test
void whenUriUserInfoMatchesUsernameAndPasswordPropertiesThenDefaultAuthorizationHeaderIsConfigured() {
this.contextRunner.withPropertyValues("spring.elasticsearch.uris=http://user:secret@localhost:9876",
"spring.elasticsearch.username=user", "spring.elasticsearch.password=secret").run((context) -> {
ClientConfiguration clientConfiguration = context.getBean(ClientConfiguration.class);
assertThat(clientConfiguration.getDefaultHeaders().get(HttpHeaders.AUTHORIZATION))
.containsExactly("Basic " + Base64.getEncoder()
.encodeToString("user:secret".getBytes(StandardCharsets.UTF_8)));
});
}
@Test
void whenUriUserInfoAndUsernameAndPasswordPropertiesDoNotMatchThenRunFails() {
this.contextRunner
.withPropertyValues("spring.elasticsearch.uris=http://user:secret@localhost:9876",
"spring.elasticsearch.username=alice", "spring.elasticsearch.password=confidential")
.run((context) -> {
assertThat(context).hasFailed();
assertThat(context).getFailure().hasRootCauseInstanceOf(IllegalArgumentException.class)
.hasRootCauseMessage("Credentials from URI user info do not match those from "
+ "spring.elasticsearch.username and spring.elasticsearch.password");
});
}
@Test
@Deprecated
void whenConfiguredToUseSslThenClientConfigurationUsesSsl() {
this.contextRunner.withPropertyValues("spring.data.elasticsearch.client.reactive.use-ssl=true")
.run((context) -> assertThat(context.getBean(ClientConfiguration.class).useSsl()).isTrue());
}
@Test
void whenSocketTimeoutIsNotConfiguredThenClientConfigurationUsesDefault() {
this.contextRunner.run((context) -> assertThat(context.getBean(ClientConfiguration.class).getSocketTimeout())
.isEqualTo(Duration.ofSeconds(30)));
}
@Test
void whenConnectionTimeoutIsNotConfiguredThenClientConfigurationUsesDefault() {
this.contextRunner.run((context) -> assertThat(context.getBean(ClientConfiguration.class).getConnectTimeout())
.isEqualTo(Duration.ofSeconds(1)));
}
@ParameterizedPropertyPrefixTest
void whenSocketTimeoutIsConfiguredThenClientConfigurationHasCustomSocketTimeout(String prefix) {
this.contextRunner.withPropertyValues(prefix + "socket-timeout=2s")
.run((context) -> assertThat(context.getBean(ClientConfiguration.class).getSocketTimeout())
.isEqualTo(Duration.ofSeconds(2)));
}
@ParameterizedPropertyPrefixTest
void whenConnectionTimeoutIsConfiguredThenClientConfigurationHasCustomConnectTimeout(String prefix) {
this.contextRunner.withPropertyValues(prefix + "connection-timeout=2s")
.run((context) -> assertThat(context.getBean(ClientConfiguration.class).getConnectTimeout())
.isEqualTo(Duration.ofSeconds(2)));
}
@Test
void whenPathPrefixIsConfiguredThenClientConfigurationHasPathPrefix() {
this.contextRunner.withPropertyValues("spring.elasticsearch.path-prefix=/some/prefix")
.run((context) -> assertThat(context.getBean(ClientConfiguration.class).getPathPrefix())
.isEqualTo("/some/prefix"));
}
@ParameterizedPropertyPrefixTest
void whenCredentialsAreConfiguredThenClientConfigurationHasDefaultAuthorizationHeader(String prefix) {
this.contextRunner.withPropertyValues(prefix + "username=alice", prefix + "password=secret")
.run((context) -> assertThat(
context.getBean(ClientConfiguration.class).getDefaultHeaders().get(HttpHeaders.AUTHORIZATION))
.containsExactly("Basic YWxpY2U6c2VjcmV0"));
}
@ParameterizedTest
@ValueSource(strings = { "spring.elasticsearch.webclient.", "spring.data.elasticsearch.client.reactive." })
void whenMaxInMemorySizeIsConfiguredThenUnderlyingWebClientHasCustomMaxInMemorySize(String prefix) {
this.contextRunner.withPropertyValues(prefix + "max-in-memory-size=1MB").run((context) -> {
WebClient client = configureWebClient(context.getBean(ClientConfiguration.class).getClientConfigurers());
assertThat(client).extracting("exchangeFunction").extracting("strategies").extracting("codecConfigurer")
.extracting("defaultCodecs").asInstanceOf(InstanceOfAssertFactories.type(DefaultCodecConfig.class))
.extracting(DefaultCodecConfig::maxInMemorySize).isEqualTo(1024 * 1024);
});
}
private WebClient configureWebClient(List<ClientConfigurationCallback<?>> callbacks) {
WebClient webClient = WebClient.create();
for (ClientConfigurationCallback<?> callback : callbacks) {
webClient = ((ReactiveRestClients.WebClientConfigurationCallback) callback).configure(webClient);
}
return webClient;
}
@Configuration(proxyBeanMethods = false)
static class CustomClientConfiguration {
@Bean
ReactiveElasticsearchClient customClient() {
return mock(ReactiveElasticsearchClient.class);
}
}
@Configuration(proxyBeanMethods = false)
static class CustomClientConfigConfiguration {
@Bean
ClientConfiguration customClientConfiguration() {
return ClientConfiguration.localhost();
}
}
@ParameterizedTest
@Target(ElementType.METHOD)
@Retention(RetentionPolicy.RUNTIME)
@ValueSource(strings = { "spring.data.elasticsearch.client.reactive.", "spring.elasticsearch." })
static @interface ParameterizedPropertyPrefixTest {
}
}
| |
/*
* Copyright (C) 2017 Simon Vig Therkildsen
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.simonvt.cathode.common.widget;
import android.content.Context;
import android.content.res.Resources;
import android.content.res.TypedArray;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.graphics.Rect;
import android.graphics.RectF;
import android.util.AttributeSet;
import android.util.DisplayMetrics;
import android.util.TypedValue;
import android.view.View;
import java.util.Locale;
import net.simonvt.cathode.common.R;
public class CircularProgressIndicator extends View {
private float minValue;
private float maxValue;
private float value;
private String valueString;
private int textSize;
private int textColor;
private int textBackgroundColor;
private final Paint textPaint = new Paint();
private final Rect textBounds = new Rect();
private float textWidth;
private final Rect tempBounds = new Rect();
private int textMargin;
private int strokeWidth;
private final Paint circlePaint = new Paint();
private final RectF circleBounds = new RectF();
private int circleBackgroundColor;
private int circleColor;
private int minPadding;
public CircularProgressIndicator(Context context) {
this(context, null);
}
public CircularProgressIndicator(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public CircularProgressIndicator(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
TypedArray a = context.obtainStyledAttributes(attrs, R.styleable.CircularProgressIndicator,
R.attr.circularProgressIndicatorStyle, R.style.CircularProgressIndicator);
final Resources res = getResources();
final DisplayMetrics dm = res.getDisplayMetrics();
final int defTextMargin = (int) TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, 12, dm);
textMargin =
a.getDimensionPixelSize(R.styleable.CircularProgressIndicator_textMargin, defTextMargin);
final int defTextColor = 0xFF000000;
textColor = a.getColor(R.styleable.CircularProgressIndicator_textColor, defTextColor);
final int defTextSize = (int) TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_SP, 12, dm);
textSize = a.getDimensionPixelSize(R.styleable.CircularProgressIndicator_textSize, defTextSize);
textBackgroundColor = a.getColor(R.styleable.CircularProgressIndicator_textBackgroundColor, 0);
final int defStrokeWidth = (int) TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, 2, dm);
strokeWidth = a.getDimensionPixelSize(R.styleable.CircularProgressIndicator_circleStrokeWidth,
defStrokeWidth);
maxValue = a.getFloat(R.styleable.CircularProgressIndicator_maxValue, 10.0f);
final int defCircleColor = res.getColor(android.R.color.holo_green_dark);
circleColor = a.getColor(R.styleable.CircularProgressIndicator_circleColor, defCircleColor);
circleBackgroundColor =
a.getColor(R.styleable.CircularProgressIndicator_circleBackgroundColor, 0);
a.recycle();
minPadding = (int) TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, 2, dm);
textPaint.setAntiAlias(true);
textPaint.setTextSize(textSize);
textPaint.setColor(textColor);
circlePaint.setAntiAlias(true);
circlePaint.setStrokeWidth(strokeWidth);
setValue(0.0f);
}
public void setValue(float value) {
this.value = value;
valueString = String.format(Locale.US, "%.1f", value);
textPaint.getTextBounds(valueString, 0, valueString.length(), textBounds);
textWidth = textPaint.measureText(valueString);
invalidate();
requestLayout();
}
@Override protected void onDraw(Canvas canvas) {
final int width = getWidth();
final int height = getHeight();
circlePaint.setColor(textBackgroundColor);
circlePaint.setStyle(Paint.Style.FILL);
canvas.drawArc(circleBounds, 0, 360, false, circlePaint);
circlePaint.setStyle(Paint.Style.STROKE);
circlePaint.setColor(circleBackgroundColor);
canvas.drawArc(circleBounds, 0, 360, false, circlePaint);
circlePaint.setColor(circleColor);
final int endAngle = (int) (360.0f / maxValue * value);
canvas.drawArc(circleBounds, -90, endAngle, false, circlePaint);
final float textX = width / 2 - textWidth / 2;
final float textY = height / 2 + textBounds.height() / 2;
canvas.drawText(valueString, textX, textY, textPaint);
}
@Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
final int widthMode = MeasureSpec.getMode(widthMeasureSpec);
final int widthSize = MeasureSpec.getSize(widthMeasureSpec);
final int heightMode = MeasureSpec.getMode(heightMeasureSpec);
final int heightSize = MeasureSpec.getSize(heightMeasureSpec);
int width = -1;
int height = -1;
if (widthMode == MeasureSpec.EXACTLY && heightMode == MeasureSpec.EXACTLY) {
width = widthSize;
height = heightSize;
} else if (widthMode == MeasureSpec.EXACTLY) {
width = widthSize;
height = widthSize;
} else if (heightMode == MeasureSpec.EXACTLY) {
width = heightSize;
height = heightSize;
}
String maxValue = String.valueOf(this.maxValue);
textPaint.getTextBounds(maxValue, 0, maxValue.length(), tempBounds);
final int textWidth = tempBounds.width();
final int textHeight = tempBounds.height();
float c = (float) Math.sqrt(Math.pow(textWidth, 2) + Math.pow(textHeight, 2));
if (width == -1) {
width = (int) (c + textMargin + strokeWidth + 2 * minPadding);
height = width;
} else {
width = Math.max(width, height);
height = width;
}
final int halfStrokeWidth = strokeWidth / 2;
final int circleSize = (int) (c + textMargin + halfStrokeWidth);
final int halfCircleSize = circleSize / 2;
final int halfWidth = width / 2;
final int halfHeight = height / 2;
circleBounds.left = halfWidth - halfCircleSize;
circleBounds.top = halfHeight - halfCircleSize;
circleBounds.right = halfWidth + halfCircleSize;
circleBounds.bottom = halfHeight + halfCircleSize;
setMeasuredDimension(width, height);
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.diff.comparison;
import com.intellij.diff.comparison.iterables.DiffIterable;
import com.intellij.diff.comparison.iterables.DiffIterableUtil;
import com.intellij.diff.comparison.iterables.FairDiffIterable;
import com.intellij.diff.fragments.*;
import com.intellij.diff.util.DiffUtil;
import com.intellij.diff.util.IntPair;
import com.intellij.diff.util.MergeRange;
import com.intellij.diff.util.Range;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.util.Consumer;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.diff.FilesTooBigForDiffException;
import com.intellij.util.text.CharSequenceSubSequence;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.ArrayList;
import java.util.BitSet;
import java.util.Collections;
import java.util.List;
import static com.intellij.diff.comparison.iterables.DiffIterableUtil.fair;
import static java.util.Collections.singletonList;
public class ComparisonManagerImpl extends ComparisonManager {
private static final Logger LOG = Logger.getInstance(ComparisonManagerImpl.class);
@NotNull
public static ComparisonManagerImpl getInstanceImpl() {
return (ComparisonManagerImpl)getInstance();
}
@NotNull
@Override
public List<LineFragment> compareLines(@NotNull CharSequence text1,
@NotNull CharSequence text2,
@NotNull ComparisonPolicy policy,
@NotNull ProgressIndicator indicator) throws DiffTooBigException {
List<Line> lines1 = getLines(text1);
List<Line> lines2 = getLines(text2);
List<CharSequence> lineTexts1 = ContainerUtil.map(lines1, Line::getContent);
List<CharSequence> lineTexts2 = ContainerUtil.map(lines2, Line::getContent);
FairDiffIterable iterable = ByLine.compare(lineTexts1, lineTexts2, policy, indicator);
return convertIntoLineFragments(lines1, lines2, iterable);
}
@NotNull
@Override
public List<MergeLineFragment> compareLines(@NotNull CharSequence text1,
@NotNull CharSequence text2,
@NotNull CharSequence text3,
@NotNull ComparisonPolicy policy,
@NotNull ProgressIndicator indicator) throws DiffTooBigException {
List<Line> lines1 = getLines(text1);
List<Line> lines2 = getLines(text2);
List<Line> lines3 = getLines(text3);
List<CharSequence> lineTexts1 = ContainerUtil.map(lines1, Line::getContent);
List<CharSequence> lineTexts2 = ContainerUtil.map(lines2, Line::getContent);
List<CharSequence> lineTexts3 = ContainerUtil.map(lines3, Line::getContent);
List<MergeRange> ranges = ByLine.compare(lineTexts1, lineTexts2, lineTexts3, policy, indicator);
return convertIntoMergeLineFragments(ranges);
}
@NotNull
@Override
public List<LineFragment> compareLinesInner(@NotNull CharSequence text1,
@NotNull CharSequence text2,
@NotNull ComparisonPolicy policy,
@NotNull ProgressIndicator indicator) throws DiffTooBigException {
List<LineFragment> lineFragments = compareLines(text1, text2, policy, indicator);
return createInnerFragments(lineFragments, text1, text2, policy, indicator);
}
private static List<LineFragment> createInnerFragments(@NotNull List<LineFragment> lineFragments,
@NotNull CharSequence text1,
@NotNull CharSequence text2,
@NotNull ComparisonPolicy policy,
@NotNull ProgressIndicator indicator) {
List<LineFragment> result = new ArrayList<>(lineFragments.size());
int tooBigChunksCount = 0;
for (LineFragment fragment : lineFragments) {
assert fragment.getInnerFragments() == null;
try {
// Do not try to build fine blocks after few fails
boolean tryComputeDifferences = tooBigChunksCount < FilesTooBigForDiffException.MAX_BAD_LINES;
result.addAll(createInnerFragments(fragment, text1, text2, policy, indicator, tryComputeDifferences));
}
catch (DiffTooBigException e) {
result.add(fragment);
tooBigChunksCount++;
}
}
return result;
}
@NotNull
private static List<LineFragment> createInnerFragments(@NotNull LineFragment fragment,
@NotNull CharSequence text1,
@NotNull CharSequence text2,
@NotNull ComparisonPolicy policy,
@NotNull ProgressIndicator indicator,
boolean tryComputeDifferences) throws DiffTooBigException {
CharSequence subSequence1 = text1.subSequence(fragment.getStartOffset1(), fragment.getEndOffset1());
CharSequence subSequence2 = text2.subSequence(fragment.getStartOffset2(), fragment.getEndOffset2());
if (fragment.getStartLine1() == fragment.getEndLine1() ||
fragment.getStartLine2() == fragment.getEndLine2()) { // Insertion / Deletion
if (ComparisonUtil.isEquals(subSequence1, subSequence2, policy)) {
return singletonList(new LineFragmentImpl(fragment, Collections.emptyList()));
}
else {
return singletonList(fragment);
}
}
if (!tryComputeDifferences) return singletonList(fragment);
List<ByWord.LineBlock> lineBlocks = ByWord.compareAndSplit(subSequence1, subSequence2, policy, indicator);
assert lineBlocks.size() != 0;
int startOffset1 = fragment.getStartOffset1();
int startOffset2 = fragment.getStartOffset2();
int currentStartLine1 = fragment.getStartLine1();
int currentStartLine2 = fragment.getStartLine2();
List<LineFragment> chunks = new ArrayList<>();
for (int i = 0; i < lineBlocks.size(); i++) {
ByWord.LineBlock block = lineBlocks.get(i);
Range offsets = block.offsets;
// special case for last line to void problem with empty last line
int currentEndLine1 = i != lineBlocks.size() - 1 ? currentStartLine1 + block.newlines1 : fragment.getEndLine1();
int currentEndLine2 = i != lineBlocks.size() - 1 ? currentStartLine2 + block.newlines2 : fragment.getEndLine2();
chunks.add(new LineFragmentImpl(currentStartLine1, currentEndLine1, currentStartLine2, currentEndLine2,
offsets.start1 + startOffset1, offsets.end1 + startOffset1,
offsets.start2 + startOffset2, offsets.end2 + startOffset2,
block.fragments));
currentStartLine1 = currentEndLine1;
currentStartLine2 = currentEndLine2;
}
return chunks;
}
@NotNull
@Override
@Deprecated
public List<LineFragment> compareLinesInner(@NotNull CharSequence text1,
@NotNull CharSequence text2,
@NotNull List<LineFragment> lineFragments,
@NotNull ComparisonPolicy policy,
@NotNull ProgressIndicator indicator) throws DiffTooBigException {
return compareLinesInner(text1, text2, policy, indicator);
}
@NotNull
@Override
public List<DiffFragment> compareWords(@NotNull CharSequence text1,
@NotNull CharSequence text2,
@NotNull ComparisonPolicy policy,
@NotNull ProgressIndicator indicator) throws DiffTooBigException {
return ByWord.compare(text1, text2, policy, indicator);
}
@NotNull
@Override
public List<DiffFragment> compareChars(@NotNull CharSequence text1,
@NotNull CharSequence text2,
@NotNull ComparisonPolicy policy,
@NotNull ProgressIndicator indicator) throws DiffTooBigException {
if (policy == ComparisonPolicy.IGNORE_WHITESPACES) {
return convertIntoDiffFragments(ByChar.compareIgnoreWhitespaces(text1, text2, indicator));
}
if (policy == ComparisonPolicy.DEFAULT) {
return convertIntoDiffFragments(ByChar.compareTwoStep(text1, text2, indicator));
}
LOG.warn(policy.toString() + " is not supported by ByChar comparison");
return convertIntoDiffFragments(ByChar.compareTwoStep(text1, text2, indicator));
}
@Override
public boolean isEquals(@NotNull CharSequence text1, @NotNull CharSequence text2, @NotNull ComparisonPolicy policy) {
return ComparisonUtil.isEquals(text1, text2, policy);
}
//
// Fragments
//
@NotNull
public static List<DiffFragment> convertIntoDiffFragments(@NotNull DiffIterable changes) {
final List<DiffFragment> fragments = new ArrayList<>();
for (Range ch : changes.iterateChanges()) {
fragments.add(new DiffFragmentImpl(ch.start1, ch.end1, ch.start2, ch.end2));
}
return fragments;
}
@NotNull
public static List<LineFragment> convertIntoLineFragments(@NotNull List<Line> lines1,
@NotNull List<Line> lines2,
@NotNull FairDiffIterable changes) {
List<LineFragment> fragments = new ArrayList<>();
for (Range ch : changes.iterateChanges()) {
IntPair offsets1 = getOffsets(lines1, ch.start1, ch.end1);
IntPair offsets2 = getOffsets(lines2, ch.start2, ch.end2);
fragments.add(new LineFragmentImpl(ch.start1, ch.end1, ch.start2, ch.end2,
offsets1.val1, offsets1.val2, offsets2.val1, offsets2.val2));
}
return fragments;
}
@NotNull
private static IntPair getOffsets(@NotNull List<Line> lines, int startIndex, int endIndex) {
if (startIndex == endIndex) {
int offset;
if (startIndex < lines.size()) {
offset = lines.get(startIndex).getOffset1();
}
else {
offset = lines.get(lines.size() - 1).getOffset2();
}
return new IntPair(offset, offset);
}
else {
int offset1 = lines.get(startIndex).getOffset1();
int offset2 = lines.get(endIndex - 1).getOffset2();
return new IntPair(offset1, offset2);
}
}
@NotNull
public static List<MergeLineFragment> convertIntoMergeLineFragments(@NotNull List<MergeRange> conflicts) {
return ContainerUtil.map(conflicts, ch -> new MergeLineFragmentImpl(ch.start1, ch.end1, ch.start2, ch.end2, ch.start3, ch.end3));
}
@NotNull
public static List<MergeWordFragment> convertIntoMergeWordFragments(@NotNull List<MergeRange> conflicts) {
return ContainerUtil.map(conflicts, ch -> new MergeWordFragmentImpl(ch.start1, ch.end1, ch.start2, ch.end2, ch.start3, ch.end3));
}
//
// Post process line fragments
//
@NotNull
@Override
public List<LineFragment> squash(@NotNull List<LineFragment> oldFragments) {
if (oldFragments.isEmpty()) return oldFragments;
final List<LineFragment> newFragments = new ArrayList<>();
processAdjoining(oldFragments, fragments -> newFragments.add(doSquash(fragments)));
return newFragments;
}
@NotNull
@Override
public List<LineFragment> processBlocks(@NotNull List<LineFragment> oldFragments,
@NotNull final CharSequence text1, @NotNull final CharSequence text2,
@NotNull final ComparisonPolicy policy,
final boolean squash, final boolean trim) {
if (!squash && !trim) return oldFragments;
if (oldFragments.isEmpty()) return oldFragments;
final List<LineFragment> newFragments = new ArrayList<>();
processAdjoining(oldFragments, fragments -> newFragments.addAll(processAdjoining(fragments, text1, text2, policy, squash, trim)));
return newFragments;
}
private static void processAdjoining(@NotNull List<LineFragment> oldFragments,
@NotNull Consumer<List<LineFragment>> consumer) {
int startIndex = 0;
for (int i = 1; i < oldFragments.size(); i++) {
if (!isAdjoining(oldFragments.get(i - 1), oldFragments.get(i))) {
consumer.consume(oldFragments.subList(startIndex, i));
startIndex = i;
}
}
if (startIndex < oldFragments.size()) {
consumer.consume(oldFragments.subList(startIndex, oldFragments.size()));
}
}
@NotNull
private static List<LineFragment> processAdjoining(@NotNull List<LineFragment> fragments,
@NotNull CharSequence text1, @NotNull CharSequence text2,
@NotNull ComparisonPolicy policy, boolean squash, boolean trim) {
int start = 0;
int end = fragments.size();
// TODO: trim empty leading/trailing lines
if (trim && policy == ComparisonPolicy.IGNORE_WHITESPACES) {
while (start < end) {
LineFragment fragment = fragments.get(start);
CharSequenceSubSequence sequence1 = new CharSequenceSubSequence(text1, fragment.getStartOffset1(), fragment.getEndOffset1());
CharSequenceSubSequence sequence2 = new CharSequenceSubSequence(text2, fragment.getStartOffset2(), fragment.getEndOffset2());
if ((fragment.getInnerFragments() == null || !fragment.getInnerFragments().isEmpty()) &&
!StringUtil.equalsIgnoreWhitespaces(sequence1, sequence2)) {
break;
}
start++;
}
while (start < end) {
LineFragment fragment = fragments.get(end - 1);
CharSequenceSubSequence sequence1 = new CharSequenceSubSequence(text1, fragment.getStartOffset1(), fragment.getEndOffset1());
CharSequenceSubSequence sequence2 = new CharSequenceSubSequence(text2, fragment.getStartOffset2(), fragment.getEndOffset2());
if ((fragment.getInnerFragments() == null || !fragment.getInnerFragments().isEmpty()) &&
!StringUtil.equalsIgnoreWhitespaces(sequence1, sequence2)) {
break;
}
end--;
}
}
if (start == end) return Collections.emptyList();
if (squash) {
return singletonList(doSquash(fragments.subList(start, end)));
}
return fragments.subList(start, end);
}
@NotNull
private static LineFragment doSquash(@NotNull List<LineFragment> oldFragments) {
assert !oldFragments.isEmpty();
if (oldFragments.size() == 1) return oldFragments.get(0);
LineFragment firstFragment = oldFragments.get(0);
LineFragment lastFragment = oldFragments.get(oldFragments.size() - 1);
List<DiffFragment> newInnerFragments = new ArrayList<>();
for (LineFragment fragment : oldFragments) {
for (DiffFragment innerFragment : extractInnerFragments(fragment)) {
int shift1 = fragment.getStartOffset1() - firstFragment.getStartOffset1();
int shift2 = fragment.getStartOffset2() - firstFragment.getStartOffset2();
DiffFragment previousFragment = ContainerUtil.getLastItem(newInnerFragments);
if (previousFragment == null || !isAdjoiningInner(previousFragment, innerFragment, shift1, shift2)) {
newInnerFragments.add(new DiffFragmentImpl(innerFragment.getStartOffset1() + shift1, innerFragment.getEndOffset1() + shift1,
innerFragment.getStartOffset2() + shift2, innerFragment.getEndOffset2() + shift2));
}
else {
newInnerFragments.remove(newInnerFragments.size() - 1);
newInnerFragments.add(new DiffFragmentImpl(previousFragment.getStartOffset1(), innerFragment.getEndOffset1() + shift1,
previousFragment.getStartOffset2(), innerFragment.getEndOffset2() + shift2));
}
}
}
return new LineFragmentImpl(firstFragment.getStartLine1(), lastFragment.getEndLine1(),
firstFragment.getStartLine2(), lastFragment.getEndLine2(),
firstFragment.getStartOffset1(), lastFragment.getEndOffset1(),
firstFragment.getStartOffset2(), lastFragment.getEndOffset2(),
newInnerFragments);
}
private static boolean isAdjoining(@NotNull LineFragment beforeFragment, @NotNull LineFragment afterFragment) {
if (beforeFragment.getEndLine1() != afterFragment.getStartLine1() ||
beforeFragment.getEndLine2() != afterFragment.getStartLine2() ||
beforeFragment.getEndOffset1() != afterFragment.getStartOffset1() ||
beforeFragment.getEndOffset2() != afterFragment.getStartOffset2()) {
return false;
}
return true;
}
private static boolean isAdjoiningInner(@NotNull DiffFragment beforeFragment, @NotNull DiffFragment afterFragment,
int shift1, int shift2) {
if (beforeFragment.getEndOffset1() != afterFragment.getStartOffset1() + shift1 ||
beforeFragment.getEndOffset2() != afterFragment.getStartOffset2() + shift2) {
return false;
}
return true;
}
@NotNull
private static List<DiffFragment> extractInnerFragments(@NotNull LineFragment lineFragment) {
if (lineFragment.getInnerFragments() != null) return lineFragment.getInnerFragments();
int length1 = lineFragment.getEndOffset1() - lineFragment.getStartOffset1();
int length2 = lineFragment.getEndOffset2() - lineFragment.getStartOffset2();
return singletonList(new DiffFragmentImpl(0, length1, 0, length2));
}
@NotNull
private static List<Line> getLines(@NotNull CharSequence text) {
List<Line> lines = new ArrayList<>();
int offset = 0;
while (true) {
int lineEnd = StringUtil.indexOf(text, '\n', offset);
if (lineEnd != -1) {
lines.add(new Line(text, offset, lineEnd, true));
offset = lineEnd + 1;
}
else {
lines.add(new Line(text, offset, text.length(), false));
break;
}
}
return lines;
}
/**
* Compare two texts by-line and then compare changed fragments by-word
*/
@NotNull
public List<LineFragment> compareLinesWithIgnoredRanges(@NotNull CharSequence text1,
@NotNull CharSequence text2,
@NotNull List<TextRange> ignoredRanges1,
@NotNull List<TextRange> ignoredRanges2,
boolean innerFragments,
@NotNull ProgressIndicator indicator) throws DiffTooBigException {
BitSet ignored1 = collectIgnoredRanges(ignoredRanges1);
BitSet ignored2 = collectIgnoredRanges(ignoredRanges2);
List<Line> lines1 = getLines(text1);
List<Line> lines2 = getLines(text2);
List<CharSequence> lineTexts1 = ContainerUtil.map(lines1, line -> line.getNotIgnoredContent(ignored1));
List<CharSequence> lineTexts2 = ContainerUtil.map(lines2, line -> line.getNotIgnoredContent(ignored2));
FairDiffIterable iterable = ByLine.compare(lineTexts1, lineTexts2, ComparisonPolicy.DEFAULT, indicator);
FairDiffIterable correctedIterable = correctIgnoredRangesSecondStep(iterable, lines1, lines2, ignored1, ignored2);
List<LineFragment> lineFragments = convertIntoLineFragments(lines1, lines2, correctedIterable);
if (innerFragments) {
lineFragments = createInnerFragments(lineFragments, text1, text2, ComparisonPolicy.DEFAULT, indicator);
}
return ContainerUtil.mapNotNull(lineFragments, fragment -> {
return trimIgnoredChanges(fragment, lines1, lines2, ignored1, ignored2);
});
}
@NotNull
public static BitSet collectIgnoredRanges(@NotNull List<TextRange> ignoredRanges) {
BitSet set = new BitSet();
for (TextRange range : ignoredRanges) {
set.set(range.getStartOffset(), range.getEndOffset());
}
return set;
}
@NotNull
private static FairDiffIterable correctIgnoredRangesSecondStep(@NotNull FairDiffIterable iterable,
@NotNull List<Line> lines1,
@NotNull List<Line> lines2,
@NotNull BitSet ignored1,
@NotNull BitSet ignored2) {
DiffIterableUtil.ChangeBuilder builder = new DiffIterableUtil.ChangeBuilder(lines1.size(), lines2.size());
for (Range range : iterable.iterateUnchanged()) {
int count = range.end1 - range.start1;
for (int i = 0; i < count; i++) {
int index1 = range.start1 + i;
int index2 = range.start2 + i;
if (areIgnoredEqualLines(lines1.get(index1), lines2.get(index2), ignored1, ignored2)) {
builder.markEqual(index1, index2);
}
}
}
return fair(builder.finish());
}
@Nullable
private static LineFragment trimIgnoredChanges(@NotNull LineFragment fragment,
@NotNull List<Line> lines1,
@NotNull List<Line> lines2,
@NotNull BitSet ignored1,
@NotNull BitSet ignored2) {
// trim ignored lines
Range range = TrimUtil.trimExpandList(lines1, lines2,
fragment.getStartLine1(), fragment.getStartLine2(),
fragment.getEndLine1(), fragment.getEndLine2(),
(line1, line2) -> areIgnoredEqualLines(line1, line2, ignored1, ignored2),
line -> isIgnoredLine(line, ignored1),
line -> isIgnoredLine(line, ignored2));
int startLine1 = range.start1;
int startLine2 = range.start2;
int endLine1 = range.end1;
int endLine2 = range.end2;
if (startLine1 == endLine1 && startLine2 == endLine2) return null;
IntPair offsets1 = getOffsets(lines1, startLine1, endLine1);
IntPair offsets2 = getOffsets(lines2, startLine2, endLine2);
int startOffset1 = offsets1.val1;
int endOffset1 = offsets1.val2;
int startOffset2 = offsets2.val1;
int endOffset2 = offsets2.val2;
List<DiffFragment> newInner = null;
if (fragment.getInnerFragments() != null) {
int shift1 = startOffset1 - fragment.getStartOffset1();
int shift2 = startOffset2 - fragment.getStartOffset2();
int newCount1 = endOffset1 - startOffset1;
int newCount2 = endOffset2 - startOffset2;
newInner = ContainerUtil.mapNotNull(fragment.getInnerFragments(), it -> {
// update offsets, as some lines might have been ignored completely
int start1 = DiffUtil.bound(it.getStartOffset1() - shift1, 0, newCount1);
int start2 = DiffUtil.bound(it.getStartOffset2() - shift2, 0, newCount2);
int end1 = DiffUtil.bound(it.getEndOffset1() - shift1, 0, newCount1);
int end2 = DiffUtil.bound(it.getEndOffset2() - shift2, 0, newCount2);
// trim inner fragments
TextRange range1 = trimIgnoredRange(start1, end1, ignored1, startOffset1);
TextRange range2 = trimIgnoredRange(start2, end2, ignored2, startOffset2);
if (range1.isEmpty() && range2.isEmpty()) return null;
return new DiffFragmentImpl(range1.getStartOffset(), range1.getEndOffset(),
range2.getStartOffset(), range2.getEndOffset());
});
if (newInner.isEmpty()) return null;
}
return new LineFragmentImpl(startLine1, endLine1, startLine2, endLine2,
startOffset1, endOffset1, startOffset2, endOffset2,
newInner);
}
private static boolean isIgnoredLine(@NotNull Line line, @NotNull BitSet ignored) {
return isIgnoredRange(ignored, line.getOffset1(), line.getOffset2());
}
private static boolean areIgnoredEqualLines(@NotNull Line line1, @NotNull Line line2,
@NotNull BitSet ignored1, @NotNull BitSet ignored2) {
int start1 = line1.getOffset1();
int end1 = line1.getOffset2();
int start2 = line2.getOffset1();
int end2 = line2.getOffset2();
Range range = TrimUtil.trimExpandText(line1.getOriginalText(), line2.getOriginalText(),
start1, start2, end1, end2,
ignored1, ignored2);
if (!range.isEmpty()) return false;
List<ByWord.InlineChunk> words1 = getNonIgnoredWords(line1, ignored1);
List<ByWord.InlineChunk> words2 = getNonIgnoredWords(line2, ignored2);
if (words1.size() != words2.size()) return false;
for (int i = 0; i < words1.size(); i++) {
CharSequence word1 = getWordContent(line1, words1.get(i));
CharSequence word2 = getWordContent(line2, words2.get(i));
if (!StringUtil.equals(word1, word2)) return false;
}
return true;
}
@NotNull
private static List<ByWord.InlineChunk> getNonIgnoredWords(@NotNull Line line, @NotNull BitSet ignored) {
int offset = line.getOffset1();
List<ByWord.InlineChunk> innerChunks = ByWord.getInlineChunks(line.getContent());
return ContainerUtil.filter(innerChunks, it -> it instanceof ByWord.WordChunk &&
!isIgnoredRange(ignored, offset + it.getOffset1(), offset + it.getOffset2()));
}
@NotNull
private static CharSequence getWordContent(@NotNull Line line, @NotNull ByWord.InlineChunk word) {
return line.getContent().subSequence(word.getOffset1(), word.getOffset2());
}
@NotNull
private static TextRange trimIgnoredRange(int start, int end, @NotNull BitSet ignored, int offset) {
IntPair intPair = TrimUtil.trim(offset + start, offset + end, ignored);
return new TextRange(intPair.val1 - offset, intPair.val2 - offset);
}
private static boolean isIgnoredRange(@NotNull BitSet ignored, int start, int end) {
return ignored.nextClearBit(start) >= end;
}
private static class Line {
@NotNull private final CharSequence myChars;
private final int myOffset1;
private final int myOffset2;
private final boolean myNewline;
public Line(@NotNull CharSequence chars, int offset1, int offset2, boolean newline) {
myChars = chars;
myOffset1 = offset1;
myOffset2 = offset2;
myNewline = newline;
}
public int getOffset1() {
return myOffset1;
}
public int getOffset2() {
return myOffset2 + (myNewline ? 1 : 0);
}
@NotNull
public CharSequence getContent() {
return new CharSequenceSubSequence(myChars, myOffset1, myOffset2);
}
@NotNull
public CharSequence getNotIgnoredContent(@NotNull BitSet ignored) {
StringBuilder sb = new StringBuilder();
for (int i = myOffset1; i < myOffset2; i++) {
if (ignored.get(i)) continue;
sb.append(myChars.charAt(i));
}
return sb.toString();
}
@NotNull
public CharSequence getOriginalText() {
return myChars;
}
}
}
| |
package com.airbnb.epoxy;
import android.view.View;
import androidx.annotation.LayoutRes;
import androidx.annotation.Nullable;
import java.lang.CharSequence;
import java.lang.Number;
import java.lang.Object;
import java.lang.Override;
import java.lang.String;
/**
* Generated file. Do not modify!
*/
public class ModelWithViewClickListener_ extends ModelWithViewClickListener implements GeneratedModel<Object>, ModelWithViewClickListenerBuilder {
private OnModelBoundListener<ModelWithViewClickListener_, Object> onModelBoundListener_epoxyGeneratedModel;
private OnModelUnboundListener<ModelWithViewClickListener_, Object> onModelUnboundListener_epoxyGeneratedModel;
private OnModelVisibilityStateChangedListener<ModelWithViewClickListener_, Object> onModelVisibilityStateChangedListener_epoxyGeneratedModel;
private OnModelVisibilityChangedListener<ModelWithViewClickListener_, Object> onModelVisibilityChangedListener_epoxyGeneratedModel;
public ModelWithViewClickListener_() {
super();
}
@Override
public void addTo(EpoxyController controller) {
super.addTo(controller);
addWithDebugValidation(controller);
}
@Override
public void handlePreBind(final EpoxyViewHolder holder, final Object object, final int position) {
validateStateHasNotChangedSinceAdded("The model was changed between being added to the controller and being bound.", position);
}
@Override
public void handlePostBind(final Object object, int position) {
if (onModelBoundListener_epoxyGeneratedModel != null) {
onModelBoundListener_epoxyGeneratedModel.onModelBound(this, object, position);
}
validateStateHasNotChangedSinceAdded("The model was changed during the bind call.", position);
}
/**
* Register a listener that will be called when this model is bound to a view.
* <p>
* The listener will contribute to this model's hashCode state per the {@link
* com.airbnb.epoxy.EpoxyAttribute.Option#DoNotHash} rules.
* <p>
* You may clear the listener by setting a null value, or by calling {@link #reset()}
*/
public ModelWithViewClickListener_ onBind(
OnModelBoundListener<ModelWithViewClickListener_, Object> listener) {
onMutation();
this.onModelBoundListener_epoxyGeneratedModel = listener;
return this;
}
@Override
public void unbind(Object object) {
super.unbind(object);
if (onModelUnboundListener_epoxyGeneratedModel != null) {
onModelUnboundListener_epoxyGeneratedModel.onModelUnbound(this, object);
}
}
/**
* Register a listener that will be called when this model is unbound from a view.
* <p>
* The listener will contribute to this model's hashCode state per the {@link
* com.airbnb.epoxy.EpoxyAttribute.Option#DoNotHash} rules.
* <p>
* You may clear the listener by setting a null value, or by calling {@link #reset()}
*/
public ModelWithViewClickListener_ onUnbind(
OnModelUnboundListener<ModelWithViewClickListener_, Object> listener) {
onMutation();
this.onModelUnboundListener_epoxyGeneratedModel = listener;
return this;
}
@Override
public void onVisibilityStateChanged(int visibilityState, final Object object) {
if (onModelVisibilityStateChangedListener_epoxyGeneratedModel != null) {
onModelVisibilityStateChangedListener_epoxyGeneratedModel.onVisibilityStateChanged(this, object, visibilityState);
}
super.onVisibilityStateChanged(visibilityState, object);
}
/**
* Register a listener that will be called when this model visibility state has changed.
* <p>
* The listener will contribute to this model's hashCode state per the {@link
* com.airbnb.epoxy.EpoxyAttribute.Option#DoNotHash} rules.
*/
public ModelWithViewClickListener_ onVisibilityStateChanged(
OnModelVisibilityStateChangedListener<ModelWithViewClickListener_, Object> listener) {
onMutation();
this.onModelVisibilityStateChangedListener_epoxyGeneratedModel = listener;
return this;
}
@Override
public void onVisibilityChanged(float percentVisibleHeight, float percentVisibleWidth,
int visibleHeight, int visibleWidth, final Object object) {
if (onModelVisibilityChangedListener_epoxyGeneratedModel != null) {
onModelVisibilityChangedListener_epoxyGeneratedModel.onVisibilityChanged(this, object, percentVisibleHeight, percentVisibleWidth, visibleHeight, visibleWidth);
}
super.onVisibilityChanged(percentVisibleHeight, percentVisibleWidth, visibleHeight, visibleWidth, object);
}
/**
* Register a listener that will be called when this model visibility has changed.
* <p>
* The listener will contribute to this model's hashCode state per the {@link
* com.airbnb.epoxy.EpoxyAttribute.Option#DoNotHash} rules.
*/
public ModelWithViewClickListener_ onVisibilityChanged(
OnModelVisibilityChangedListener<ModelWithViewClickListener_, Object> listener) {
onMutation();
this.onModelVisibilityChangedListener_epoxyGeneratedModel = listener;
return this;
}
/**
* Set a click listener that will provide the parent view, model, and adapter position of the clicked view. This will clear the normal View.OnClickListener if one has been set
*/
public ModelWithViewClickListener_ clickListener(
final OnModelClickListener<ModelWithViewClickListener_, Object> clickListener) {
onMutation();
if (clickListener == null) {
super.clickListener = null;
}
else {
super.clickListener = new WrappedEpoxyModelClickListener(clickListener);
}
return this;
}
public ModelWithViewClickListener_ clickListener(View.OnClickListener clickListener) {
onMutation();
super.clickListener = clickListener;
return this;
}
public View.OnClickListener clickListener() {
return clickListener;
}
@Override
public ModelWithViewClickListener_ id(long id) {
super.id(id);
return this;
}
@Override
public ModelWithViewClickListener_ id(@Nullable Number... ids) {
super.id(ids);
return this;
}
@Override
public ModelWithViewClickListener_ id(long id1, long id2) {
super.id(id1, id2);
return this;
}
@Override
public ModelWithViewClickListener_ id(@Nullable CharSequence key) {
super.id(key);
return this;
}
@Override
public ModelWithViewClickListener_ id(@Nullable CharSequence key,
@Nullable CharSequence... otherKeys) {
super.id(key, otherKeys);
return this;
}
@Override
public ModelWithViewClickListener_ id(@Nullable CharSequence key, long id) {
super.id(key, id);
return this;
}
@Override
public ModelWithViewClickListener_ layout(@LayoutRes int layoutRes) {
super.layout(layoutRes);
return this;
}
@Override
public ModelWithViewClickListener_ spanSizeOverride(
@Nullable EpoxyModel.SpanSizeOverrideCallback spanSizeCallback) {
super.spanSizeOverride(spanSizeCallback);
return this;
}
@Override
public ModelWithViewClickListener_ show() {
super.show();
return this;
}
@Override
public ModelWithViewClickListener_ show(boolean show) {
super.show(show);
return this;
}
@Override
public ModelWithViewClickListener_ hide() {
super.hide();
return this;
}
@Override
public ModelWithViewClickListener_ reset() {
onModelBoundListener_epoxyGeneratedModel = null;
onModelUnboundListener_epoxyGeneratedModel = null;
onModelVisibilityStateChangedListener_epoxyGeneratedModel = null;
onModelVisibilityChangedListener_epoxyGeneratedModel = null;
super.clickListener = null;
super.reset();
return this;
}
@Override
public boolean equals(Object o) {
if (o == this) {
return true;
}
if (!(o instanceof ModelWithViewClickListener_)) {
return false;
}
if (!super.equals(o)) {
return false;
}
ModelWithViewClickListener_ that = (ModelWithViewClickListener_) o;
if (((onModelBoundListener_epoxyGeneratedModel == null) != (that.onModelBoundListener_epoxyGeneratedModel == null))) {
return false;
}
if (((onModelUnboundListener_epoxyGeneratedModel == null) != (that.onModelUnboundListener_epoxyGeneratedModel == null))) {
return false;
}
if (((onModelVisibilityStateChangedListener_epoxyGeneratedModel == null) != (that.onModelVisibilityStateChangedListener_epoxyGeneratedModel == null))) {
return false;
}
if (((onModelVisibilityChangedListener_epoxyGeneratedModel == null) != (that.onModelVisibilityChangedListener_epoxyGeneratedModel == null))) {
return false;
}
if (((clickListener == null) != (that.clickListener == null))) {
return false;
}
return true;
}
@Override
public int hashCode() {
int _result = super.hashCode();
_result = 31 * _result + (onModelBoundListener_epoxyGeneratedModel != null ? 1 : 0);
_result = 31 * _result + (onModelUnboundListener_epoxyGeneratedModel != null ? 1 : 0);
_result = 31 * _result + (onModelVisibilityStateChangedListener_epoxyGeneratedModel != null ? 1 : 0);
_result = 31 * _result + (onModelVisibilityChangedListener_epoxyGeneratedModel != null ? 1 : 0);
_result = 31 * _result + (clickListener != null ? 1 : 0);
return _result;
}
@Override
public String toString() {
return "ModelWithViewClickListener_{" +
"clickListener=" + clickListener +
"}" + super.toString();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.streams.kstream.internals;
import org.apache.kafka.common.serialization.IntegerDeserializer;
import org.apache.kafka.common.serialization.IntegerSerializer;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.apache.kafka.common.utils.Utils;
import org.apache.kafka.streams.kstream.JoinWindows;
import org.apache.kafka.streams.kstream.KStream;
import org.apache.kafka.streams.kstream.KStreamBuilder;
import org.apache.kafka.streams.kstream.ValueJoiner;
import org.apache.kafka.test.KStreamTestDriver;
import org.apache.kafka.test.MockProcessorSupplier;
import org.junit.Test;
import java.io.File;
import java.nio.file.Files;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.Set;
import static org.junit.Assert.assertEquals;
public class KStreamKStreamLeftJoinTest {
private String topic1 = "topic1";
private String topic2 = "topic2";
private IntegerSerializer keySerializer = new IntegerSerializer();
private StringSerializer valSerializer = new StringSerializer();
private IntegerDeserializer keyDeserializer = new IntegerDeserializer();
private StringDeserializer valDeserializer = new StringDeserializer();
private ValueJoiner<String, String, String> joiner = new ValueJoiner<String, String, String>() {
@Override
public String apply(String value1, String value2) {
return value1 + "+" + value2;
}
};
@Test
public void testLeftJoin() throws Exception {
File baseDir = Files.createTempDirectory("test").toFile();
try {
KStreamBuilder builder = new KStreamBuilder();
final int[] expectedKeys = new int[]{0, 1, 2, 3};
KStream<Integer, String> stream1;
KStream<Integer, String> stream2;
KStream<Integer, String> joined;
MockProcessorSupplier<Integer, String> processor;
processor = new MockProcessorSupplier<>();
stream1 = builder.stream(keyDeserializer, valDeserializer, topic1);
stream2 = builder.stream(keyDeserializer, valDeserializer, topic2);
joined = stream1.leftJoin(stream2, joiner, JoinWindows.of("test").within(100),
keySerializer, valSerializer, keyDeserializer, valDeserializer);
joined.process(processor);
Collection<Set<String>> copartitionGroups = builder.copartitionGroups();
assertEquals(1, copartitionGroups.size());
assertEquals(new HashSet<>(Arrays.asList(topic1, topic2)), copartitionGroups.iterator().next());
KStreamTestDriver driver = new KStreamTestDriver(builder, baseDir);
driver.setTime(0L);
// push two items to the primary stream. the other window is empty
// w {}
// --> w = {}
for (int i = 0; i < 2; i++) {
driver.process(topic1, expectedKeys[i], "X" + expectedKeys[i]);
}
processor.checkAndClearResult("0:X0+null", "1:X1+null");
// push two items to the other stream. this should produce two items.
// w {}
// --> w = { 0:Y0, 1:Y1 }
for (int i = 0; i < 2; i++) {
driver.process(topic2, expectedKeys[i], "Y" + expectedKeys[i]);
}
processor.checkAndClearResult();
// push all four items to the primary stream. this should produce four items.
// w = { 0:Y0, 1:Y1 }
// --> w = { 0:Y0, 1:Y1 }
for (int i = 0; i < expectedKeys.length; i++) {
driver.process(topic1, expectedKeys[i], "X" + expectedKeys[i]);
}
processor.checkAndClearResult("0:X0+Y0", "1:X1+Y1", "2:X2+null", "3:X3+null");
// push all items to the other stream. this should produce no items.
// w = { 0:Y0, 1:Y1 }
// --> w = { 0:Y0, 1:Y1, 0:YY0, 0:YY0, 1:YY1, 2:YY2, 3:YY3 }
for (int i = 0; i < expectedKeys.length; i++) {
driver.process(topic2, expectedKeys[i], "YY" + expectedKeys[i]);
}
processor.checkAndClearResult();
// push all four items to the primary stream. this should produce four items.
// w = { 0:Y0, 1:Y1, 0:YY0, 0:YY0, 1:YY1, 2:YY2, 3:YY3
// --> w = { 0:Y0, 1:Y1, 0:YY0, 0:YY0, 1:YY1, 2:YY2, 3:YY3 }
for (int i = 0; i < expectedKeys.length; i++) {
driver.process(topic1, expectedKeys[i], "XX" + expectedKeys[i]);
}
processor.checkAndClearResult("0:XX0+Y0", "0:XX0+YY0", "1:XX1+Y1", "1:XX1+YY1", "2:XX2+YY2", "3:XX3+YY3");
} finally {
Utils.delete(baseDir);
}
}
@Test
public void testWindowing() throws Exception {
File baseDir = Files.createTempDirectory("test").toFile();
try {
long time = 0L;
KStreamBuilder builder = new KStreamBuilder();
final int[] expectedKeys = new int[]{0, 1, 2, 3};
KStream<Integer, String> stream1;
KStream<Integer, String> stream2;
KStream<Integer, String> joined;
MockProcessorSupplier<Integer, String> processor;
processor = new MockProcessorSupplier<>();
stream1 = builder.stream(keyDeserializer, valDeserializer, topic1);
stream2 = builder.stream(keyDeserializer, valDeserializer, topic2);
joined = stream1.leftJoin(stream2, joiner, JoinWindows.of("test").within(100),
keySerializer, valSerializer, keyDeserializer, valDeserializer);
joined.process(processor);
Collection<Set<String>> copartitionGroups = builder.copartitionGroups();
assertEquals(1, copartitionGroups.size());
assertEquals(new HashSet<>(Arrays.asList(topic1, topic2)), copartitionGroups.iterator().next());
KStreamTestDriver driver = new KStreamTestDriver(builder, baseDir);
driver.setTime(time);
// push two items to the primary stream. the other window is empty. this should produce two items
// w = {}
// --> w = {}
for (int i = 0; i < 2; i++) {
driver.process(topic1, expectedKeys[i], "X" + expectedKeys[i]);
}
processor.checkAndClearResult("0:X0+null", "1:X1+null");
// push two items to the other stream. this should produce no items.
// w = {}
// --> w = { 0:Y0, 1:Y1 }
for (int i = 0; i < 2; i++) {
driver.process(topic2, expectedKeys[i], "Y" + expectedKeys[i]);
}
processor.checkAndClearResult();
// clear logically
time = 1000L;
// push all items to the other stream. this should produce no items.
// w = {}
// --> w = { 0:Y0, 1:Y1, 2:Y2, 3:Y3 }
for (int i = 0; i < expectedKeys.length; i++) {
driver.setTime(time + i);
driver.process(topic2, expectedKeys[i], "Y" + expectedKeys[i]);
}
processor.checkAndClearResult();
// gradually expire items in window.
// w = { 0:Y0, 1:Y1, 2:Y2, 3:Y3 }
time = 1000L + 100L;
driver.setTime(time);
for (int i = 0; i < expectedKeys.length; i++) {
driver.process(topic1, expectedKeys[i], "XX" + expectedKeys[i]);
}
processor.checkAndClearResult("0:XX0+Y0", "1:XX1+Y1", "2:XX2+Y2", "3:XX3+Y3");
driver.setTime(++time);
for (int i = 0; i < expectedKeys.length; i++) {
driver.process(topic1, expectedKeys[i], "XX" + expectedKeys[i]);
}
processor.checkAndClearResult("0:XX0+null", "1:XX1+Y1", "2:XX2+Y2", "3:XX3+Y3");
driver.setTime(++time);
for (int i = 0; i < expectedKeys.length; i++) {
driver.process(topic1, expectedKeys[i], "XX" + expectedKeys[i]);
}
processor.checkAndClearResult("0:XX0+null", "1:XX1+null", "2:XX2+Y2", "3:XX3+Y3");
driver.setTime(++time);
for (int i = 0; i < expectedKeys.length; i++) {
driver.process(topic1, expectedKeys[i], "XX" + expectedKeys[i]);
}
processor.checkAndClearResult("0:XX0+null", "1:XX1+null", "2:XX2+null", "3:XX3+Y3");
driver.setTime(++time);
for (int i = 0; i < expectedKeys.length; i++) {
driver.process(topic1, expectedKeys[i], "XX" + expectedKeys[i]);
}
processor.checkAndClearResult("0:XX0+null", "1:XX1+null", "2:XX2+null", "3:XX3+null");
// go back to the time before expiration
time = 1000L - 100L - 1L;
driver.setTime(time);
for (int i = 0; i < expectedKeys.length; i++) {
driver.process(topic1, expectedKeys[i], "XX" + expectedKeys[i]);
}
processor.checkAndClearResult("0:XX0+null", "1:XX1+null", "2:XX2+null", "3:XX3+null");
driver.setTime(++time);
for (int i = 0; i < expectedKeys.length; i++) {
driver.process(topic1, expectedKeys[i], "XX" + expectedKeys[i]);
}
processor.checkAndClearResult("0:XX0+Y0", "1:XX1+null", "2:XX2+null", "3:XX3+null");
driver.setTime(++time);
for (int i = 0; i < expectedKeys.length; i++) {
driver.process(topic1, expectedKeys[i], "XX" + expectedKeys[i]);
}
processor.checkAndClearResult("0:XX0+Y0", "1:XX1+Y1", "2:XX2+null", "3:XX3+null");
driver.setTime(++time);
for (int i = 0; i < expectedKeys.length; i++) {
driver.process(topic1, expectedKeys[i], "XX" + expectedKeys[i]);
}
processor.checkAndClearResult("0:XX0+Y0", "1:XX1+Y1", "2:XX2+Y2", "3:XX3+null");
driver.setTime(++time);
for (int i = 0; i < expectedKeys.length; i++) {
driver.process(topic1, expectedKeys[i], "XX" + expectedKeys[i]);
}
processor.checkAndClearResult("0:XX0+Y0", "1:XX1+Y1", "2:XX2+Y2", "3:XX3+Y3");
} finally {
Utils.delete(baseDir);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.vault.packaging;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import org.apache.jackrabbit.util.Text;
/**
* The sub package handling specifies how sub package are handled during recursive package installation. This
* configuration consists of a list of {@link org.apache.jackrabbit.vault.packaging.SubPackageHandling.Entry}s that
* match against a given {@link org.apache.jackrabbit.vault.packaging.PackageId}. The version of the package id is
* ignored.
* <p>
* The sub package handling can be specified in the package properties as a string of the following format:
* <pre>
* subPackageHandling := instruction { "," instruction };
* instruction := packageIdFilter { ";" option }
* packageIdFilter := packageNameFilter | groupNameFilter ":" packageNameFilter;
* groupNameFilter := "*" | groupName;
* packageNameFilter := "*" | packageName;
* option := "install" | "extract" | "add" | "ignore" | "force_install" | "force_extract";
* </pre>
*
* Note that 'ignore' is currently not really supported as sub packages are part of the normal package content and
* behaves the same as 'add'. The default option if not explicitly specified is "install".
* Future implementations will transport the sub packages outside of the normal package
* content, e.g. in a META-INF/vault/subpackages/ folder (see <a href="https://issues.apache.org/jira/browse/JCRVLT-33">JCRVLT-33</a>).
*
* <p>
* The sub package handling is being specified in the package properties xml within property {@link PackageProperties#NAME_SUB_PACKAGE_HANDLING} and is parsed via {@link SubPackageHandling#fromString(String)}.
* </p>
* @see PackageProperties
*/
public class SubPackageHandling {
/**
* The sub package option
*/
public enum Option {
/**
* adds and installs the package using {@link JcrPackage#install(org.apache.jackrabbit.vault.fs.io.ImportOptions)} in case a newer version has not already been installed
*/
INSTALL,
/**
* adds and extracts the package using {@link JcrPackage#extract(org.apache.jackrabbit.vault.fs.io.ImportOptions)} in case a newer version has not already been installed
*/
EXTRACT,
/**
* adds the package using {@link JcrPackageManager#upload}
*/
ADD,
/**
* ignores the sub package completely
*/
IGNORE,
/**
* adds and installs the package using {@link JcrPackage#install(org.apache.jackrabbit.vault.fs.io.ImportOptions)} even in case a newer version has already been installed
* (allows downgrades)
*/
FORCE_INSTALL,
/**
* adds and extracts the package using {@link JcrPackage#extract(org.apache.jackrabbit.vault.fs.io.ImportOptions)} even in case a newer version has already been installed
* (allows downgrades)
*/
FORCE_EXTRACT
}
public static class Entry {
private final String groupName;
private final String packageName;
private final Option option;
public Entry(String groupName, String packageName, Option option) {
this.groupName = groupName == null || groupName.isEmpty() ? "*" : groupName;
this.packageName = packageName == null || packageName.isEmpty() ? "*" : packageName;
this.option = option;
}
public String getGroupName() {
return groupName;
}
public String getPackageName() {
return packageName;
}
public Option getOption() {
return option;
}
}
/**
* The default handling
*/
public static final SubPackageHandling DEFAULT = new SubPackageHandling(Collections.<Entry>emptyList());
private final List<Entry> entries;
public SubPackageHandling() {
this(new LinkedList<Entry>());
}
private SubPackageHandling(List<Entry> entries) {
this.entries = entries;
}
/**
* Parses a options string as described above and returns a new SubPackageHandling instance.
* @param str the string to parse
* @return the configuration or {@code null} if the string is malformed.
*/
public static SubPackageHandling fromString(String str) {
if (str == null || str.isEmpty()) {
return SubPackageHandling.DEFAULT;
}
SubPackageHandling sp = new SubPackageHandling();
for (String instruction: Text.explode(str, ',')) {
String[] opts = Text.explode(instruction.trim(), ';');
if (opts.length > 0) {
PackageId id = PackageId.fromString(opts[0]);
Option opt = Option.INSTALL;
if (opts.length > 1) {
try {
opt = Option.valueOf(opts[1].toUpperCase());
} catch (IllegalArgumentException e) {
// ignore
}
}
sp.getEntries().add(new Entry(id.getGroup(), id.getName(), opt));
}
}
return sp;
}
/**
* Gets the option from the entries list that matches the package last. If no entry match, it returns
* {@link org.apache.jackrabbit.vault.packaging.SubPackageHandling.Option#INSTALL}
* @param id the package id to match
* @return the option.
*/
public Option getOption(PackageId id) {
Option opt = null;
for (Entry e: entries) {
if (!"*".equals(e.groupName) && !id.getGroup().equals(e.groupName)) {
continue;
}
if (!"*".equals(e.packageName) && !id.getName().equals(e.packageName)) {
continue;
}
opt = e.option;
}
return opt == null ? Option.INSTALL : opt;
}
/**
* Returns the modifiable list of entries.
* @return the list of entries
*/
public List<Entry> getEntries() {
return entries;
}
/**
* Returns the parseable string representation of this configuration.
* @return the string representation.
*/
public String getString() {
StringBuilder sb = new StringBuilder();
for (Entry e: entries) {
if (sb.length() > 0) {
sb.append(",");
}
sb.append(e.getGroupName()).append(":").append(e.getPackageName());
if (e.option != Option.INSTALL) {
sb.append(';').append(e.option.toString().toLowerCase());
}
}
return sb.toString();
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import static org.hamcrest.Matchers.containsString;
public class LambdaTests extends ScriptTestCase {
public void testNoArgLambda() {
assertEquals(1, exec("Optional.empty().orElseGet(() -> 1);"));
}
public void testNoArgLambdaDef() {
assertEquals(1, exec("def x = Optional.empty(); x.orElseGet(() -> 1);"));
}
public void testLambdaWithArgs() {
assertEquals("short", exec("List l = new ArrayList(); l.add('looooong'); l.add('short'); "
+ "l.sort((a, b) -> a.length() - b.length()); return l.get(0)"));
}
public void testLambdaWithTypedArgs() {
assertEquals("short", exec("List l = new ArrayList(); l.add('looooong'); l.add('short'); "
+ "l.sort((String a, String b) -> a.length() - b.length()); return l.get(0)"));
}
public void testPrimitiveLambdas() {
assertEquals(4, exec("List l = new ArrayList(); l.add(1); l.add(1); "
+ "return l.stream().mapToInt(x -> x + 1).sum();"));
}
public void testPrimitiveLambdasWithTypedArgs() {
assertEquals(4, exec("List l = new ArrayList(); l.add(1); l.add(1); "
+ "return l.stream().mapToInt(int x -> x + 1).sum();"));
}
public void testPrimitiveLambdasDef() {
assertEquals(4, exec("def l = new ArrayList(); l.add(1); l.add(1); "
+ "return l.stream().mapToInt(x -> x + 1).sum();"));
}
public void testPrimitiveLambdasWithTypedArgsDef() {
assertEquals(4, exec("def l = new ArrayList(); l.add(1); l.add(1); "
+ "return l.stream().mapToInt(int x -> x + 1).sum();"));
}
public void testPrimitiveLambdasConvertible() {
assertEquals(2, exec("List l = new ArrayList(); l.add((short)1); l.add(1); "
+ "return l.stream().mapToInt(long x -> (int)1).sum();"));
}
public void testPrimitiveArgs() {
assertEquals(2, exec("int applyOne(IntFunction arg) { arg.apply(1) } applyOne(x -> x + 1)"));
}
public void testPrimitiveArgsTyped() {
assertEquals(2, exec("int applyOne(IntFunction arg) { arg.apply(1) } applyOne(int x -> x + 1)"));
}
public void testPrimitiveArgsTypedOddly() {
assertEquals(2L, exec("long applyOne(IntFunction arg) { arg.apply(1) } applyOne(long x -> x + 1)"));
}
public void testMultipleStatements() {
assertEquals(2, exec("int applyOne(IntFunction arg) { arg.apply(1) } applyOne(x -> { def y = x + 1; return y })"));
}
public void testUnneededCurlyStatements() {
assertEquals(2, exec("int applyOne(IntFunction arg) { arg.apply(1) } applyOne(x -> { x + 1 })"));
}
/** interface ignores return value */
public void testVoidReturn() {
assertEquals(2, exec("List list = new ArrayList(); "
+ "list.add(2); "
+ "List list2 = new ArrayList(); "
+ "list.forEach(x -> list2.add(x));"
+ "return list[0]"));
}
/** interface ignores return value */
public void testVoidReturnDef() {
assertEquals(2, exec("def list = new ArrayList(); "
+ "list.add(2); "
+ "List list2 = new ArrayList(); "
+ "list.forEach(x -> list2.add(x));"
+ "return list[0]"));
}
public void testTwoLambdas() {
assertEquals("testingcdefg", exec(
"org.elasticsearch.painless.FeatureTest test = new org.elasticsearch.painless.FeatureTest(2,3);" +
"return test.twoFunctionsOfX(x -> 'testing'.concat(x), y -> 'abcdefg'.substring(y))"));
}
public void testNestedLambdas() {
assertEquals(1, exec("Optional.empty().orElseGet(() -> Optional.empty().orElseGet(() -> 1));"));
}
public void testLambdaInLoop() {
assertEquals(100, exec("int sum = 0; " +
"for (int i = 0; i < 100; i++) {" +
" sum += Optional.empty().orElseGet(() -> 1);" +
"}" +
"return sum;"));
}
public void testCapture() {
assertEquals(5, exec("int x = 5; return Optional.empty().orElseGet(() -> x);"));
}
public void testTwoCaptures() {
assertEquals("1test", exec("int x = 1; String y = 'test'; return Optional.empty().orElseGet(() -> x + y);"));
}
public void testCapturesAreReadOnly() {
IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> {
exec("List l = new ArrayList(); l.add(1); l.add(1); "
+ "return l.stream().mapToInt(x -> { l = null; return x + 1 }).sum();");
});
assertTrue(expected.getMessage().contains("is read-only"));
}
@AwaitsFix(bugUrl = "def type tracking")
public void testOnlyCapturesAreReadOnly() {
assertEquals(4, exec("List l = new ArrayList(); l.add(1); l.add(1); "
+ "return l.stream().mapToInt(x -> { x += 1; return x }).sum();"));
}
/** Lambda parameters shouldn't be able to mask a variable already in scope */
public void testNoParamMasking() {
IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> {
exec("int x = 0; List l = new ArrayList(); l.add(1); l.add(1); "
+ "return l.stream().mapToInt(x -> { x += 1; return x }).sum();");
});
assertTrue(expected.getMessage().contains("already defined"));
}
public void testCaptureDef() {
assertEquals(5, exec("int x = 5; def y = Optional.empty(); y.orElseGet(() -> x);"));
}
public void testNestedCapture() {
assertEquals(1, exec("boolean x = false; int y = 1;" +
"return Optional.empty().orElseGet(() -> x ? 5 : Optional.empty().orElseGet(() -> y));"));
}
public void testNestedCaptureParams() {
assertEquals(2, exec("int foo(Function f) { return f.apply(1) }" +
"return foo(x -> foo(y -> x + 1))"));
}
public void testWrongArity() {
IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, false, () -> {
exec("Optional.empty().orElseGet(x -> x);");
});
assertTrue(expected.getMessage().contains("Incorrect number of parameters"));
}
public void testWrongArityDef() {
IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> {
exec("def y = Optional.empty(); return y.orElseGet(x -> x);");
});
assertTrue(expected.getMessage(), expected.getMessage().contains("Incorrect number of parameters"));
}
public void testWrongArityNotEnough() {
IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, false, () -> {
exec("List l = new ArrayList(); l.add(1); l.add(1); "
+ "return l.stream().mapToInt(() -> 5).sum();");
});
assertTrue(expected.getMessage().contains("Incorrect number of parameters"));
}
public void testWrongArityNotEnoughDef() {
IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> {
exec("def l = new ArrayList(); l.add(1); l.add(1); "
+ "return l.stream().mapToInt(() -> 5).sum();");
});
assertTrue(expected.getMessage().contains("Incorrect number of parameters"));
}
public void testLambdaInFunction() {
assertEquals(5, exec("def foo() { Optional.empty().orElseGet(() -> 5) } return foo();"));
}
public void testLambdaCaptureFunctionParam() {
assertEquals(5, exec("def foo(int x) { Optional.empty().orElseGet(() -> x) } return foo(5);"));
}
public void testReservedCapture() {
String compare = "boolean compare(Supplier s, def v) {s.get() == v}";
assertEquals(true, exec(compare + "compare(() -> new ArrayList(), new ArrayList())"));
assertEquals(true, exec(compare + "compare(() -> { new ArrayList() }, new ArrayList())"));
Map<String, Object> params = new HashMap<>();
params.put("key", "value");
params.put("number", 2);
assertEquals(true, exec(compare + "compare(() -> { return params['key'] }, 'value')", params, true));
assertEquals(false, exec(compare + "compare(() -> { return params['nokey'] }, 'value')", params, true));
assertEquals(true, exec(compare + "compare(() -> { return params['nokey'] }, null)", params, true));
assertEquals(true, exec(compare + "compare(() -> { return params['number'] }, 2)", params, true));
assertEquals(false, exec(compare + "compare(() -> { return params['number'] }, 'value')", params, true));
assertEquals(false, exec(compare + "compare(() -> { if (params['number'] == 2) { return params['number'] }" +
"else { return params['key'] } }, 'value')", params, true));
assertEquals(true, exec(compare + "compare(() -> { if (params['number'] == 2) { return params['number'] }" +
"else { return params['key'] } }, 2)", params, true));
assertEquals(true, exec(compare + "compare(() -> { if (params['number'] == 1) { return params['number'] }" +
"else { return params['key'] } }, 'value')", params, true));
assertEquals(false, exec(compare + "compare(() -> { if (params['number'] == 1) { return params['number'] }" +
"else { return params['key'] } }, 2)", params, true));
}
public void testReturnVoid() {
Throwable expected = expectScriptThrows(ClassCastException.class, () -> {
exec("StringBuilder b = new StringBuilder(); List l = [1, 2]; l.stream().mapToLong(i -> b.setLength(i))");
});
assertThat(expected.getMessage(), containsString("Cannot cast from [void] to [long]."));
}
public void testReturnVoidDef() {
// If we can catch the error at compile time we do
Exception expected = expectScriptThrows(ClassCastException.class, () -> {
exec("StringBuilder b = new StringBuilder(); def l = [1, 2]; l.stream().mapToLong(i -> b.setLength(i))");
});
assertThat(expected.getMessage(), containsString("Cannot cast from [void] to [def]."));
// Otherwise we convert the void into a null
assertEquals(Arrays.asList(null, null),
exec("def b = new StringBuilder(); def l = [1, 2]; l.stream().map(i -> b.setLength(i)).collect(Collectors.toList())"));
assertEquals(Arrays.asList(null, null),
exec("def b = new StringBuilder(); List l = [1, 2]; l.stream().map(i -> b.setLength(i)).collect(Collectors.toList())"));
}
}
| |
/*
* Copyright (c) 2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.andes.server.slot;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.andes.configuration.AndesConfigurationManager;
import org.wso2.andes.configuration.enums.AndesConfiguration;
import org.wso2.andes.kernel.*;
import org.wso2.andes.server.cassandra.MessageFlusher;
import org.wso2.andes.server.cassandra.OnflightMessageTracker;
import org.wso2.andes.server.cluster.coordination.hazelcast.HazelcastAgent;
import org.wso2.andes.server.slot.thrift.MBThriftClient;
import org.wso2.andes.subscription.SubscriptionStore;
import java.util.*;
import java.util.concurrent.ConcurrentSkipListMap;
/**
* SlotDelivery worker is responsible of distributing messages to subscribers. Messages will be
* taken from a slot.
*/
public class SlotDeliveryWorker extends Thread {
/**
* keeps storage queue name vs actual destination it represent
*/
private ConcurrentSkipListMap<String, String> storageQueueNameToDestinationMap;
private SubscriptionStore subscriptionStore;
private HashMap<String, Long> localLastProcessedIdMap;
private static boolean isClusteringEnabled;
private static Log log = LogFactory.getLog(SlotDeliveryWorker.class);
/**
* This map contains slotId to slot hashmap against queue name
*/
private volatile boolean running;
private String nodeId;
private MessageFlusher messageFlusher;
public SlotDeliveryWorker() {
messageFlusher = MessageFlusher.getInstance();
this.storageQueueNameToDestinationMap = new ConcurrentSkipListMap<String, String>();
this.subscriptionStore = AndesContext.getInstance().getSubscriptionStore();
isClusteringEnabled = AndesContext.getInstance().isClusteringEnabled();
localLastProcessedIdMap = new HashMap<String, Long>();
/*
Start slot deleting thread only if clustering is enabled. Otherwise slots assignment will
not happen
*/
if (isClusteringEnabled) {
nodeId = HazelcastAgent.getInstance().getNodeId();
}
}
@Override
public void run() {
/**
* This while loop is necessary since whenever there are messages this thread should
* deliver them
*/
running = true;
while (running) {
//Iterate through all the queues registered in this thread
int idleQueueCounter = 0;
for (String storageQueueName : storageQueueNameToDestinationMap.keySet()) {
String destinationOfMessagesInQueue = storageQueueNameToDestinationMap.get(storageQueueName);
Collection<LocalSubscription> subscriptions4Queue;
try {
subscriptions4Queue = subscriptionStore.getActiveLocalSubscribersForQueuesAndTopics(destinationOfMessagesInQueue);
if (subscriptions4Queue != null && !subscriptions4Queue.isEmpty()) {
//Check in memory buffer in MessageFlusher has room
if (messageFlusher.getMessageDeliveryInfo(destinationOfMessagesInQueue)
.isMessageBufferFull()) {
if (isClusteringEnabled) {
long startTime = System.currentTimeMillis();
Slot currentSlot = MBThriftClient.getSlot(storageQueueName, nodeId);
currentSlot.setDestinationOfMessagesInSlot(destinationOfMessagesInQueue);
long endTime = System.currentTimeMillis();
if (log.isDebugEnabled()) {
log.debug(
(endTime - startTime) + " milliSec took to get a slot" +
" from slot manager");
}
/**
* If the slot is empty
*/
if (0 == currentSlot.getEndMessageId()) {
/*
If the message buffer in MessageFlusher is not empty
send those messages
*/
if (log.isDebugEnabled()) {
log.debug("Recieved an empty slot from slot manager in " +
"cluster mode");
}
boolean sentFromMessageBuffer = sendFromMessageBuffer(
destinationOfMessagesInQueue);
if (!sentFromMessageBuffer) {
//No available free slots
idleQueueCounter++;
if (idleQueueCounter == storageQueueNameToDestinationMap.size()) {
try {
if (log.isDebugEnabled()) {
log.debug("Sleeping Slot Delivery Worker");
}
Thread.sleep(100);
} catch (InterruptedException ignored) {
//Silently ignore
}
}
}
} else {
if (log.isDebugEnabled()) {
log.debug("Received slot for queue " + storageQueueName + " " +
"is: " + currentSlot.getStartMessageId() +
" - " + currentSlot.getEndMessageId() +
"Thread Id:" + Thread.currentThread().getId());
}
long firstMsgId = currentSlot.getStartMessageId();
long lastMsgId = currentSlot.getEndMessageId();
//Read messages in the slot
List<AndesMessageMetadata> messagesRead =
MessagingEngine.getInstance().getMetaDataList(
storageQueueName, firstMsgId, lastMsgId);
if(log.isDebugEnabled()) {
StringBuilder messageIDString = new StringBuilder();
for (AndesMessageMetadata metadata: messagesRead ) {
messageIDString.append(metadata.getMessageID()).append(" , ");
}
log.debug("Messages Read: " + messageIDString);
}
if (messagesRead != null &&
!messagesRead.isEmpty()) {
if (log.isDebugEnabled()) {
log.debug("Number of messages read from slot " +
currentSlot.getStartMessageId() + " - " +
currentSlot.getEndMessageId() + " is " +
messagesRead.size() + " queue= " + storageQueueName);
}
MessageFlusher.getInstance().sendMessageToFlusher(
messagesRead, currentSlot);
} else {
currentSlot.setSlotInActive();
MBThriftClient.deleteSlot(storageQueueName, currentSlot, nodeId);
//Release all message trackings for messages of slot
OnflightMessageTracker.getInstance().releaseAllMessagesOfSlotFromTracking(currentSlot);
/*If there are messages to be sent in the message
buffer in MessageFlusher send them */
sendFromMessageBuffer(destinationOfMessagesInQueue);
}
}
//Standalone mode
} else {
long startMessageId = 0;
if (localLastProcessedIdMap.get(storageQueueName) != null) {
startMessageId = localLastProcessedIdMap.get(storageQueueName) + 1;
}
Integer slotWindowSize = AndesConfigurationManager.readValue
(AndesConfiguration.PERFORMANCE_TUNING_SLOTS_SLOT_WINDOW_SIZE);
List<AndesMessageMetadata> messagesRead =
MessagingEngine.getInstance()
.getNextNMessageMetadataFromQueue
(storageQueueName, startMessageId,
slotWindowSize);
if(log.isDebugEnabled()) {
StringBuilder messageIDString = new StringBuilder();
for (AndesMessageMetadata metadata: messagesRead ) {
messageIDString.append(metadata.getMessageID()).append(" , ");
}
log.debug("Messages Read: " + messageIDString);
}
if (messagesRead == null ||
messagesRead.isEmpty()) {
log.debug("No messages are read. StorageQ= " + storageQueueName);
boolean sentFromMessageBuffer = sendFromMessageBuffer
(destinationOfMessagesInQueue);
log.debug(
"Sent messages from buffer = " + sentFromMessageBuffer);
if (!sentFromMessageBuffer) {
idleQueueCounter++;
try {
//There are no messages to read
if (idleQueueCounter == storageQueueNameToDestinationMap
.size()) {
if (log.isDebugEnabled()) {
log.debug("Sleeping Slot Delivery Worker");
}
Thread.sleep(2000);
}
} catch (InterruptedException ignored) {
//Silently ignore
}
}
} else {
if (log.isDebugEnabled()) {
log.debug(messagesRead.size() + " " +
"number of messages read from Slot Delivery Worker. StorageQ= " + storageQueueName);
}
long lastMessageId = messagesRead.get(
messagesRead
.size() - 1).getMessageID();
log.debug(
"Last message id read = " +
lastMessageId);
localLastProcessedIdMap.put(storageQueueName, lastMessageId);
//Simulate a slot here
Slot currentSlot = new Slot();
currentSlot.setStorageQueueName(storageQueueName);
currentSlot.setDestinationOfMessagesInSlot(
destinationOfMessagesInQueue);
currentSlot.setStartMessageId(startMessageId);
currentSlot.setEndMessageId(lastMessageId);
log.debug("sending read messages to flusher << " + currentSlot
.toString() + " >>");
messageFlusher.sendMessageToFlusher
(messagesRead, currentSlot);
}
}
} else {
/*If there are messages to be sent in the message
buffer in MessageFlusher send them */
if (log.isDebugEnabled()) {
log.debug(
"The queue " + storageQueueName + " has no room. Thus sending " +
"from buffer.");
}
sendFromMessageBuffer(destinationOfMessagesInQueue);
}
} else {
idleQueueCounter++;
if (idleQueueCounter == storageQueueNameToDestinationMap.size()) {
try {
if (log.isDebugEnabled()) {
log.debug("Sleeping Slot Delivery Worker");
}
Thread.sleep(100);
} catch (InterruptedException ignored) {
//Silently ignore
}
}
}
} catch (AndesException e) {
log.error("Error running Message Store Reader " + e.getMessage(), e);
} catch (ConnectionException e) {
log.error("Error occurred while connecting to the thrift coordinator " +
e.getMessage(), e);
//setRunning(false);
//Any exception should be caught here. Otherwise SDW thread will stop
//and MB node will become useless
} catch (Exception e) {
log.error("Error while running Slot Delivery Worker. ", e);
}
}
}
}
/**
* Send messages from buffer in MessageFlusher if the buffer is not empty
*
* @param msgDestination queue/topic message is addressed to
* @return whether the messages are sent from message buffer or not
* @throws AndesException
*/
private boolean sendFromMessageBuffer(String msgDestination) throws AndesException {
boolean sentFromMessageBuffer = false;
if (!messageFlusher.isMessageBufferEmpty(msgDestination)) {
messageFlusher.sendMessagesInBuffer(msgDestination);
sentFromMessageBuffer = true;
}
return sentFromMessageBuffer;
}
/**
* Add a queue to queue list of this SlotDeliveryWorkerThread
*
* @param storageQueueName queue name of the newly added queue
*/
public void addQueueToThread(String storageQueueName, String destination) {
getStorageQueueNameToDestinationMap().put(storageQueueName, destination);
}
/**
* Get queue list belongs to this thread
*
* @return queue list
*/
public ConcurrentSkipListMap<String, String> getStorageQueueNameToDestinationMap() {
return storageQueueNameToDestinationMap;
}
/**
* @return Whether the worker thread is in running state or not
*/
public boolean isRunning() {
return running;
}
/**
* Set state of the worker thread
*
* @param running new state of the worker
*/
public void setRunning(boolean running) {
this.running = running;
}
/**
* Check whether the slot is empty and if not resend the remaining messages. If the slot is
* empty delete the slot from slot manager and clear all tracking data in OnflightMessageTracker
*
* @param slot
* to be checked for emptiness
* @throws AndesException
*/
public void checkForSlotCompletionAndResend(Slot slot) throws AndesException {
// Once again get all metadata of given slot to check all sent messages' metadata has been removed
List<AndesMessageMetadata> messagesReturnedFromCassandra = MessagingEngine.getInstance().getMetaDataList(
slot.getStorageQueueName(), slot.getStartMessageId(),
slot.getEndMessageId());
// All metadata has not been removed
if (!messagesReturnedFromCassandra.isEmpty()) {
// Check messages returned from cassandra has already been buffered. If so removed each buffered from list
Iterator<AndesMessageMetadata> iterator = messagesReturnedFromCassandra.iterator();
while (iterator.hasNext()) {
if (OnflightMessageTracker.getInstance().checkIfMessageIsAlreadyBuffered(slot,
iterator.next().getMessageID())) {
iterator.remove();
}
}
// Return the slot if all messages remaining in slot are already sent.
// Otherwise the slot will not be removed and send remaining messages to flusher
if (messagesReturnedFromCassandra.isEmpty()) {
try {
slot.setSlotInActive();
if (isClusteringEnabled) {
MBThriftClient.deleteSlot(slot.getStorageQueueName(), slot, nodeId);
OnflightMessageTracker.getInstance().releaseAllMessagesOfSlotFromTracking(slot);
}
} catch (ConnectionException e) {
throw new AndesException(
"Error deleting slot while checking for slot completion.", e);
}
} else {
if (log.isDebugEnabled()) {
log.debug(
"Resending missing " + messagesReturnedFromCassandra.size() + " messages " +
"for slot: " + slot.toString());
}
MessageFlusher.getInstance().sendMessageToFlusher(
messagesReturnedFromCassandra, slot);
}
// All metadata has been removed and therefore return the slot
} else {
try {
slot.setSlotInActive();
if (isClusteringEnabled) {
MBThriftClient.deleteSlot(slot.getStorageQueueName(), slot, nodeId);
OnflightMessageTracker.getInstance().releaseAllMessagesOfSlotFromTracking(slot);
}
} catch (ConnectionException e) {
throw new AndesException(
"Error deleting slot while checking for slot completion.", e);
}
}
}
}
| |
package org.gestern.gringotts.data;
import com.avaje.ebean.EbeanServer;
import com.avaje.ebean.SqlQuery;
import com.avaje.ebean.SqlRow;
import com.avaje.ebean.SqlUpdate;
import org.bukkit.Bukkit;
import org.bukkit.Location;
import org.bukkit.World;
import org.bukkit.block.Block;
import org.bukkit.block.Sign;
import org.gestern.gringotts.AccountChest;
import org.gestern.gringotts.Gringotts;
import org.gestern.gringotts.GringottsAccount;
import org.gestern.gringotts.Util;
import org.gestern.gringotts.accountholder.AccountHolder;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
import java.util.logging.Logger;
import static org.gestern.gringotts.Configuration.CONF;
public class EBeanDAO implements DAO {
private static EBeanDAO dao;
private final EbeanServer db = Gringotts.getInstance().getDatabase();
private final Logger log = Gringotts.getInstance().getLogger();
public static EBeanDAO getDao() {
if (dao != null) {
return dao;
}
dao = new EBeanDAO();
return dao;
}
/**
* The classes comprising the DB model, required for the EBean DDL ("data description language").
*/
public static List<Class<?>> getDatabaseClasses() {
return Arrays.asList(EBeanAccount.class, EBeanAccountChest.class);
}
@Override
public synchronized boolean storeAccountChest(AccountChest chest) {
SqlUpdate storeChest = db.createSqlUpdate(
"insert into gringotts_accountchest (world,x,y,z,account) " +
"values (:world, :x, :y, :z, (select id from gringotts_account where owner=:owner and " +
"type=:type))");
Sign mark = chest.sign;
storeChest.setParameter("world", mark.getWorld().getName());
storeChest.setParameter("x", mark.getX());
storeChest.setParameter("y", mark.getY());
storeChest.setParameter("z", mark.getZ());
storeChest.setParameter("owner", chest.account.owner.getId());
storeChest.setParameter("type", chest.account.owner.getType());
return storeChest.execute() > 0;
}
@Override
public synchronized boolean destroyAccountChest(AccountChest chest) {
Sign mark = chest.sign;
return deleteAccountChest(mark.getWorld().getName(), mark.getX(), mark.getY(), mark.getZ());
}
@Override
public synchronized boolean storeAccount(GringottsAccount account) {
if (hasAccount(account.owner))
return false;
EBeanAccount acc = new EBeanAccount();
acc.setOwner(account.owner.getId());
acc.setType(account.owner.getType());
// TODO this is business logic and should probably be outside of the DAO implementation.
// also find a more elegant way of handling different account types
double startValue = 0;
String type = account.owner.getType();
switch (type) {
case "player":
startValue = CONF.startBalancePlayer;
break;
case "faction":
startValue = CONF.startBalanceFaction;
break;
case "town":
startValue = CONF.startBalanceTown;
break;
case "nation":
startValue = CONF.startBalanceNation;
break;
}
acc.setCents(CONF.getCurrency().centValue(startValue));
db.save(acc);
return true;
}
@Override
public synchronized boolean hasAccount(AccountHolder accountHolder) {
int accCount = db
.find(EBeanAccount.class)
.where()
.ieq("type", accountHolder.getType()).ieq("owner", accountHolder.getId())
.findRowCount();
return accCount == 1;
}
@Override
public synchronized List<AccountChest> getChests() {
List<SqlRow> result = db.createSqlQuery("SELECT ac.world, ac.x, ac.y, ac.z, a.type, a.owner " +
"FROM gringotts_accountchest ac JOIN gringotts_account a ON ac.account = a.id ")
.findList();
List<AccountChest> chests = new LinkedList<>();
for (SqlRow c : result) {
String worldName = c.getString("world");
int x = c.getInteger("x");
int y = c.getInteger("y");
int z = c.getInteger("z");
String type = c.getString("type");
String ownerId = c.getString("owner");
World world = Bukkit.getWorld(worldName);
if (world == null) continue; // skip vaults in non-existing worlds
Location loc = new Location(world, x, y, z);
Block signBlock = loc.getBlock();
if (Util.isSignBlock(signBlock)) {
AccountHolder owner = Gringotts.getInstance().getAccountHolderFactory().get(type, ownerId);
if (owner == null) {
log.info("AccountHolder " + type + ":" + ownerId + " is not valid. Deleting associated account " +
"chest at " + signBlock.getLocation());
deleteAccountChest(signBlock.getWorld().getName(), signBlock.getX(), signBlock.getY(), signBlock
.getZ());
} else {
GringottsAccount ownerAccount = new GringottsAccount(owner);
Sign sign = (Sign) signBlock.getState();
chests.add(new AccountChest(sign, ownerAccount));
}
} else {
// remove accountchest from storage if it is not a valid chest
deleteAccountChest(worldName, x, y, z);
}
}
return chests;
}
private boolean deleteAccountChest(String world, int x, int y, int z) {
SqlUpdate deleteChest = db.createSqlUpdate("delete from gringotts_accountchest " +
"where world = :world and x = :x and y = :y and z = :z");
deleteChest.setParameter("world", world);
deleteChest.setParameter("x", x);
deleteChest.setParameter("y", y);
deleteChest.setParameter("z", z);
return deleteChest.execute() > 0;
}
@Override
public synchronized List<AccountChest> getChests(GringottsAccount account) {
// TODO ensure world interaction is done in sync task
SqlQuery getChests = db.createSqlQuery("SELECT ac.world, ac.x, ac.y, ac.z " +
"FROM gringotts_accountchest ac JOIN gringotts_account a ON ac.account = a.id " +
"WHERE a.owner = :owner and a.type = :type");
getChests.setParameter("owner", account.owner.getId());
getChests.setParameter("type", account.owner.getType());
List<AccountChest> chests = new LinkedList<>();
for (SqlRow result : getChests.findSet()) {
String worldName = result.getString("world");
int x = result.getInteger("x");
int y = result.getInteger("y");
int z = result.getInteger("z");
World world = Bukkit.getWorld(worldName);
if (world == null) {
continue; // skip chest if it is in non-existent world
}
Location loc = new Location(world, x, y, z);
Block signBlock = loc.getBlock();
if (Util.isSignBlock(signBlock)) {
Sign sign = (Sign) loc.getBlock().getState();
chests.add(new AccountChest(sign, account));
} else {
// remove accountchest from storage if it is not a valid chest
deleteAccountChest(worldName, x, y, z);
}
}
return chests;
}
@Override
public synchronized boolean storeCents(GringottsAccount account, long amount) {
SqlUpdate up = db.createSqlUpdate("UPDATE gringotts_account SET cents = :cents " +
"WHERE owner = :owner and type = :type");
up.setParameter("cents", amount);
up.setParameter("owner", account.owner.getId());
up.setParameter("type", account.owner.getType());
return up.execute() == 1;
}
@Override
public synchronized long getCents(GringottsAccount account) {
// can this NPE? (probably doesn't)
return db.find(EBeanAccount.class)
.where()
.ieq("type", account.owner.getType())
.ieq("owner", account.owner.getId())
.findUnique().cents;
}
@Override
public synchronized void deleteAccount(GringottsAccount acc) {
// TODO implement deleteAccount, mayhaps?
throw new RuntimeException("delete account not supported yet in EBeanDAO");
}
@Override
public synchronized void shutdown() {
// probably handled by Bukkit?
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.pdx.internal;
import java.io.DataOutput;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Date;
import org.apache.geode.DataSerializer;
import org.apache.geode.internal.ByteBufferWriter;
import org.apache.geode.internal.HeapDataOutputStream;
import org.apache.geode.internal.InternalDataSerializer;
import org.apache.geode.internal.serialization.Version;
import org.apache.geode.internal.tcp.ByteBufferInputStream.ByteSource;
import org.apache.geode.pdx.PdxSerializationException;
/**
* Used by PdxWriterImpl to manage the raw data of a PDX.
*
*/
public class PdxOutputStream implements ByteBufferWriter {
private final HeapDataOutputStream hdos;
public PdxOutputStream() {
this.hdos = new HeapDataOutputStream(Version.CURRENT);
}
public PdxOutputStream(int allocSize) {
this.hdos = new HeapDataOutputStream(allocSize, Version.CURRENT);
}
/**
* Wrapper constructor
*
*/
public PdxOutputStream(HeapDataOutputStream hdos) {
this.hdos = hdos;
}
public void writeDate(Date date) {
try {
DataSerializer.writeDate(date, this.hdos);
} catch (IOException e) {
throw new PdxSerializationException("Exception while serializing a PDX field", e);
}
}
public void writeString(String value) {
try {
DataSerializer.writeString(value, this.hdos);
} catch (IOException e) {
throw new PdxSerializationException("Exception while serializing a PDX field", e);
}
}
public void writeObject(Object object, boolean ensureCompatibility) {
try {
InternalDataSerializer.basicWriteObject(object, this.hdos, ensureCompatibility);
} catch (IOException e) {
throw new PdxSerializationException("Exception while serializing a PDX field", e);
}
}
public void writeBooleanArray(boolean[] array) {
try {
DataSerializer.writeBooleanArray(array, this.hdos);
} catch (IOException e) {
throw new PdxSerializationException("Exception while serializing a PDX field", e);
}
}
public void writeCharArray(char[] array) {
try {
DataSerializer.writeCharArray(array, this.hdos);
} catch (IOException e) {
throw new PdxSerializationException("Exception while serializing a PDX field", e);
}
}
public void writeByteArray(byte[] array) {
try {
DataSerializer.writeByteArray(array, this.hdos);
} catch (IOException e) {
throw new PdxSerializationException("Exception while serializing a PDX field", e);
}
}
public void writeShortArray(short[] array) {
try {
DataSerializer.writeShortArray(array, this.hdos);
} catch (IOException e) {
throw new PdxSerializationException("Exception while serializing a PDX field", e);
}
}
public void writeIntArray(int[] array) {
try {
DataSerializer.writeIntArray(array, this.hdos);
} catch (IOException e) {
throw new PdxSerializationException("Exception while serializing a PDX field", e);
}
}
public void writeLongArray(long[] array) {
try {
DataSerializer.writeLongArray(array, this.hdos);
} catch (IOException e) {
throw new PdxSerializationException("Exception while serializing a PDX field", e);
}
}
public void writeFloatArray(float[] array) {
try {
DataSerializer.writeFloatArray(array, this.hdos);
} catch (IOException e) {
throw new PdxSerializationException("Exception while serializing a PDX field", e);
}
}
public void writeDoubleArray(double[] array) {
try {
DataSerializer.writeDoubleArray(array, this.hdos);
} catch (IOException e) {
throw new PdxSerializationException("Exception while serializing a PDX field", e);
}
}
public void writeStringArray(String[] array) {
try {
DataSerializer.writeStringArray(array, this.hdos);
} catch (IOException e) {
throw new PdxSerializationException("Exception while serializing a PDX field", e);
}
}
public void writeObjectArray(Object[] array, boolean ensureCompatibility) {
try {
InternalDataSerializer.writeObjectArray(array, this.hdos, ensureCompatibility);
} catch (IOException e) {
throw new PdxSerializationException("Exception while serializing a PDX field", e);
}
}
public void writeArrayOfByteArrays(byte[][] array) {
try {
DataSerializer.writeArrayOfByteArrays(array, this.hdos);
} catch (IOException e) {
throw new PdxSerializationException("Exception while serializing a PDX field", e);
}
}
public int size() {
return this.hdos.size();
}
public void writeChar(char value) {
this.hdos.writeChar(value);
}
public void writeByte(int value) {
this.hdos.writeByte(value);
}
public void writeShort(short value) {
this.hdos.writeShort(value);
}
public void writeInt(int value) {
this.hdos.writeInt(value);
}
public void writeLong(long value) {
this.hdos.writeLong(value);
}
public void writeFloat(float value) {
this.hdos.writeFloat(value);
}
public void writeDouble(double value) {
this.hdos.writeDouble(value);
}
public HeapDataOutputStream.LongUpdater reserveLong() {
return this.hdos.reserveLong();
}
public void write(byte b) {
this.hdos.write(b);
}
public void sendTo(DataOutput out) throws IOException {
this.hdos.sendTo(out);
}
@Override
public void write(ByteBuffer data) {
this.hdos.write(data);
}
public void write(ByteSource data) {
this.hdos.write(data);
}
public ByteBuffer toByteBuffer() {
return this.hdos.toByteBuffer();
}
public ByteBuffer toByteBuffer(int startPosition) {
return this.hdos.toByteBuffer(startPosition);
}
public byte[] toByteArray() {
return this.hdos.toByteArray();
}
public void write(byte[] source, int offset, int len) {
this.hdos.write(source, offset, len);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.runners.dataflow;
import static com.google.common.base.MoreObjects.firstNonNull;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.base.Strings.isNullOrEmpty;
import static org.apache.beam.sdk.util.SerializableUtils.serializeToByteArray;
import static org.apache.beam.sdk.util.StringUtils.byteArrayToJsonString;
import com.google.api.client.googleapis.json.GoogleJsonResponseException;
import com.google.api.services.clouddebugger.v2.Clouddebugger;
import com.google.api.services.clouddebugger.v2.model.Debuggee;
import com.google.api.services.clouddebugger.v2.model.RegisterDebuggeeRequest;
import com.google.api.services.clouddebugger.v2.model.RegisterDebuggeeResponse;
import com.google.api.services.dataflow.model.DataflowPackage;
import com.google.api.services.dataflow.model.Job;
import com.google.api.services.dataflow.model.ListJobsResponse;
import com.google.api.services.dataflow.model.WorkerPool;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Joiner;
import com.google.common.base.Strings;
import com.google.common.base.Utf8;
import com.google.common.collect.ImmutableMap;
import java.io.File;
import java.io.IOException;
import java.io.PrintWriter;
import java.net.URISyntaxException;
import java.net.URL;
import java.net.URLClassLoader;
import java.nio.channels.Channels;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
import javax.annotation.Nullable;
import org.apache.beam.runners.dataflow.DataflowPipelineTranslator.JobSpecification;
import org.apache.beam.runners.dataflow.options.DataflowPipelineDebugOptions;
import org.apache.beam.runners.dataflow.options.DataflowPipelineOptions;
import org.apache.beam.runners.dataflow.options.DataflowPipelineWorkerPoolOptions;
import org.apache.beam.runners.dataflow.util.DataflowTemplateJob;
import org.apache.beam.runners.dataflow.util.DataflowTransport;
import org.apache.beam.runners.dataflow.util.MonitoringUtil;
import org.apache.beam.sdk.Pipeline;
import org.apache.beam.sdk.Pipeline.PipelineVisitor;
import org.apache.beam.sdk.PipelineResult.State;
import org.apache.beam.sdk.annotations.Experimental;
import org.apache.beam.sdk.coders.Coder;
import org.apache.beam.sdk.coders.VoidCoder;
import org.apache.beam.sdk.io.BoundedSource;
import org.apache.beam.sdk.io.FileBasedSink;
import org.apache.beam.sdk.io.PubsubIO;
import org.apache.beam.sdk.io.PubsubUnboundedSink;
import org.apache.beam.sdk.io.PubsubUnboundedSource;
import org.apache.beam.sdk.io.Read;
import org.apache.beam.sdk.io.UnboundedSource;
import org.apache.beam.sdk.io.Write;
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.options.PipelineOptionsValidator;
import org.apache.beam.sdk.options.StreamingOptions;
import org.apache.beam.sdk.options.ValueProvider.NestedValueProvider;
import org.apache.beam.sdk.runners.PipelineRunner;
import org.apache.beam.sdk.runners.TransformHierarchy;
import org.apache.beam.sdk.transforms.Aggregator;
import org.apache.beam.sdk.transforms.Combine;
import org.apache.beam.sdk.transforms.Create;
import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.Flatten;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.SerializableFunction;
import org.apache.beam.sdk.transforms.View;
import org.apache.beam.sdk.transforms.WithKeys;
import org.apache.beam.sdk.transforms.display.DisplayData;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
import org.apache.beam.sdk.util.IOChannelUtils;
import org.apache.beam.sdk.util.InstanceBuilder;
import org.apache.beam.sdk.util.MimeTypes;
import org.apache.beam.sdk.util.NameUtils;
import org.apache.beam.sdk.util.PathValidator;
import org.apache.beam.sdk.util.PropertyNames;
import org.apache.beam.sdk.util.ReleaseInfo;
import org.apache.beam.sdk.util.Reshuffle;
import org.apache.beam.sdk.util.ValueWithRecordId;
import org.apache.beam.sdk.util.WindowedValue;
import org.apache.beam.sdk.util.WindowingStrategy;
import org.apache.beam.sdk.values.KV;
import org.apache.beam.sdk.values.PBegin;
import org.apache.beam.sdk.values.PCollection;
import org.apache.beam.sdk.values.PCollection.IsBounded;
import org.apache.beam.sdk.values.PCollectionList;
import org.apache.beam.sdk.values.PCollectionView;
import org.apache.beam.sdk.values.PDone;
import org.apache.beam.sdk.values.PInput;
import org.apache.beam.sdk.values.POutput;
import org.apache.beam.sdk.values.PValue;
import org.joda.time.DateTimeUtils;
import org.joda.time.DateTimeZone;
import org.joda.time.format.DateTimeFormat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A {@link PipelineRunner} that executes the operations in the pipeline by first translating them
* to the Dataflow representation using the {@link DataflowPipelineTranslator} and then submitting
* them to a Dataflow service for execution.
*
* <h3>Permissions</h3>
*
* <p>When reading from a Dataflow source or writing to a Dataflow sink using
* {@code DataflowRunner}, the Google cloudservices account and the Google compute engine service
* account of the GCP project running the Dataflow Job will need access to the corresponding
* source/sink.
*
* <p>Please see <a href="https://cloud.google.com/dataflow/security-and-permissions">Google Cloud
* Dataflow Security and Permissions</a> for more details.
*/
public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
private static final Logger LOG = LoggerFactory.getLogger(DataflowRunner.class);
/** Provided configuration options. */
private final DataflowPipelineOptions options;
/** Client for the Dataflow service. This is used to actually submit jobs. */
private final DataflowClient dataflowClient;
/** Translator for this DataflowRunner, based on options. */
private final DataflowPipelineTranslator translator;
/** Custom transforms implementations. */
private final Map<Class<?>, Class<?>> overrides;
/** A set of user defined functions to invoke at different points in execution. */
private DataflowRunnerHooks hooks;
// The limit of CreateJob request size.
private static final int CREATE_JOB_REQUEST_LIMIT_BYTES = 10 * 1024 * 1024;
@VisibleForTesting
static final int GCS_UPLOAD_BUFFER_SIZE_BYTES_DEFAULT = 1024 * 1024;
private final Set<PCollection<?>> pcollectionsRequiringIndexedFormat;
/**
* Project IDs must contain lowercase letters, digits, or dashes.
* IDs must start with a letter and may not end with a dash.
* This regex isn't exact - this allows for patterns that would be rejected by
* the service, but this is sufficient for basic validation of project IDs.
*/
public static final String PROJECT_ID_REGEXP = "[a-z][-a-z0-9:.]+[a-z0-9]";
/**
* Construct a runner from the provided options.
*
* @param options Properties that configure the runner.
* @return The newly created runner.
*/
public static DataflowRunner fromOptions(PipelineOptions options) {
// (Re-)register standard IO factories. Clobbers any prior credentials.
IOChannelUtils.registerIOFactoriesAllowOverride(options);
DataflowPipelineOptions dataflowOptions =
PipelineOptionsValidator.validate(DataflowPipelineOptions.class, options);
ArrayList<String> missing = new ArrayList<>();
if (dataflowOptions.getAppName() == null) {
missing.add("appName");
}
if (missing.size() > 0) {
throw new IllegalArgumentException(
"Missing required values: " + Joiner.on(',').join(missing));
}
PathValidator validator = dataflowOptions.getPathValidator();
String gcpTempLocation;
try {
gcpTempLocation = dataflowOptions.getGcpTempLocation();
} catch (Exception e) {
throw new IllegalArgumentException("DataflowRunner requires gcpTempLocation, "
+ "but failed to retrieve a value from PipelineOptions", e);
}
validator.validateOutputFilePrefixSupported(gcpTempLocation);
String stagingLocation;
try {
stagingLocation = dataflowOptions.getStagingLocation();
} catch (Exception e) {
throw new IllegalArgumentException("DataflowRunner requires stagingLocation, "
+ "but failed to retrieve a value from PipelineOptions", e);
}
validator.validateOutputFilePrefixSupported(stagingLocation);
if (!Strings.isNullOrEmpty(dataflowOptions.getSaveProfilesToGcs())) {
validator.validateOutputFilePrefixSupported(dataflowOptions.getSaveProfilesToGcs());
}
if (dataflowOptions.getFilesToStage() == null) {
dataflowOptions.setFilesToStage(detectClassPathResourcesToStage(
DataflowRunner.class.getClassLoader()));
LOG.info("PipelineOptions.filesToStage was not specified. "
+ "Defaulting to files from the classpath: will stage {} files. "
+ "Enable logging at DEBUG level to see which files will be staged.",
dataflowOptions.getFilesToStage().size());
LOG.debug("Classpath elements: {}", dataflowOptions.getFilesToStage());
}
// Verify jobName according to service requirements, truncating converting to lowercase if
// necessary.
String jobName =
dataflowOptions
.getJobName()
.toLowerCase();
checkArgument(
jobName.matches("[a-z]([-a-z0-9]*[a-z0-9])?"),
"JobName invalid; the name must consist of only the characters "
+ "[-a-z0-9], starting with a letter and ending with a letter "
+ "or number");
if (!jobName.equals(dataflowOptions.getJobName())) {
LOG.info(
"PipelineOptions.jobName did not match the service requirements. "
+ "Using {} instead of {}.",
jobName,
dataflowOptions.getJobName());
}
dataflowOptions.setJobName(jobName);
// Verify project
String project = dataflowOptions.getProject();
if (project.matches("[0-9]*")) {
throw new IllegalArgumentException("Project ID '" + project
+ "' invalid. Please make sure you specified the Project ID, not project number.");
} else if (!project.matches(PROJECT_ID_REGEXP)) {
throw new IllegalArgumentException("Project ID '" + project
+ "' invalid. Please make sure you specified the Project ID, not project description.");
}
DataflowPipelineDebugOptions debugOptions =
dataflowOptions.as(DataflowPipelineDebugOptions.class);
// Verify the number of worker threads is a valid value
if (debugOptions.getNumberOfWorkerHarnessThreads() < 0) {
throw new IllegalArgumentException("Number of worker harness threads '"
+ debugOptions.getNumberOfWorkerHarnessThreads()
+ "' invalid. Please make sure the value is non-negative.");
}
if (dataflowOptions.isStreaming() && dataflowOptions.getGcsUploadBufferSizeBytes() == null) {
dataflowOptions.setGcsUploadBufferSizeBytes(GCS_UPLOAD_BUFFER_SIZE_BYTES_DEFAULT);
}
return new DataflowRunner(dataflowOptions);
}
@VisibleForTesting protected DataflowRunner(DataflowPipelineOptions options) {
this.options = options;
this.dataflowClient = DataflowClient.create(options);
this.translator = DataflowPipelineTranslator.fromOptions(options);
this.pcollectionsRequiringIndexedFormat = new HashSet<>();
this.ptransformViewsWithNonDeterministicKeyCoders = new HashSet<>();
ImmutableMap.Builder<Class<?>, Class<?>> builder = ImmutableMap.<Class<?>, Class<?>>builder();
if (options.isStreaming()) {
builder.put(Combine.GloballyAsSingletonView.class,
StreamingViewOverrides.StreamingCombineGloballyAsSingletonView.class);
builder.put(View.AsMap.class, StreamingViewOverrides.StreamingViewAsMap.class);
builder.put(View.AsMultimap.class, StreamingViewOverrides.StreamingViewAsMultimap.class);
builder.put(View.AsSingleton.class, StreamingViewOverrides.StreamingViewAsSingleton.class);
builder.put(View.AsList.class, StreamingViewOverrides.StreamingViewAsList.class);
builder.put(View.AsIterable.class, StreamingViewOverrides.StreamingViewAsIterable.class);
builder.put(Read.Unbounded.class, StreamingUnboundedRead.class);
builder.put(Read.Bounded.class, StreamingBoundedRead.class);
// In streaming mode must use either the custom Pubsub unbounded source/sink or
// defer to Windmill's built-in implementation.
builder.put(PubsubIO.Read.PubsubBoundedReader.class, UnsupportedIO.class);
builder.put(PubsubIO.Write.PubsubBoundedWriter.class, UnsupportedIO.class);
if (options.getExperiments() == null
|| !options.getExperiments().contains("enable_custom_pubsub_source")) {
builder.put(PubsubUnboundedSource.class, StreamingPubsubIORead.class);
}
if (options.getExperiments() == null
|| !options.getExperiments().contains("enable_custom_pubsub_sink")) {
builder.put(PubsubUnboundedSink.class, StreamingPubsubIOWrite.class);
}
} else {
builder.put(Read.Unbounded.class, UnsupportedIO.class);
builder.put(Write.Bound.class, BatchWrite.class);
// In batch mode must use the custom Pubsub bounded source/sink.
builder.put(PubsubUnboundedSource.class, UnsupportedIO.class);
builder.put(PubsubUnboundedSink.class, UnsupportedIO.class);
if (options.getExperiments() == null
|| !options.getExperiments().contains("disable_ism_side_input")) {
builder.put(View.AsMap.class, BatchViewOverrides.BatchViewAsMap.class);
builder.put(View.AsMultimap.class, BatchViewOverrides.BatchViewAsMultimap.class);
builder.put(View.AsSingleton.class, BatchViewOverrides.BatchViewAsSingleton.class);
builder.put(View.AsList.class, BatchViewOverrides.BatchViewAsList.class);
builder.put(View.AsIterable.class, BatchViewOverrides.BatchViewAsIterable.class);
}
}
overrides = builder.build();
}
/**
* Applies the given transform to the input. For transforms with customized definitions
* for the Dataflow pipeline runner, the application is intercepted and modified here.
*/
@Override
public <OutputT extends POutput, InputT extends PInput> OutputT apply(
PTransform<InputT, OutputT> transform, InputT input) {
if (Combine.GroupedValues.class.equals(transform.getClass())) {
// For both Dataflow runners (streaming and batch), GroupByKey and GroupedValues are
// primitives. Returning a primitive output instead of the expanded definition
// signals to the translator that translation is necessary.
@SuppressWarnings("unchecked")
PCollection<?> pc = (PCollection<?>) input;
@SuppressWarnings("unchecked")
OutputT outputT =
(OutputT)
PCollection.createPrimitiveOutputInternal(
pc.getPipeline(), pc.getWindowingStrategy(), pc.isBounded());
return outputT;
} else if (Flatten.FlattenPCollectionList.class.equals(transform.getClass())
&& ((PCollectionList<?>) input).size() == 0) {
// This can cause downstream coder inference to be screwy. Most of the time, that won't be
// hugely impactful, because there will never be any elements encoded with this coder;
// the issue stems from flattening this with another PCollection.
return (OutputT)
Pipeline.applyTransform(
input.getPipeline().begin(), Create.empty(VoidCoder.of()));
} else if (overrides.containsKey(transform.getClass())) {
// It is the responsibility of whoever constructs overrides to ensure this is type safe.
@SuppressWarnings("unchecked")
Class<PTransform<InputT, OutputT>> transformClass =
(Class<PTransform<InputT, OutputT>>) transform.getClass();
@SuppressWarnings("unchecked")
Class<PTransform<InputT, OutputT>> customTransformClass =
(Class<PTransform<InputT, OutputT>>) overrides.get(transform.getClass());
PTransform<InputT, OutputT> customTransform =
InstanceBuilder.ofType(customTransformClass)
.withArg(DataflowRunner.class, this)
.withArg(transformClass, transform)
.build();
return Pipeline.applyTransform(input, customTransform);
} else {
return super.apply(transform, input);
}
}
private String debuggerMessage(String projectId, String uniquifier) {
return String.format("To debug your job, visit Google Cloud Debugger at: "
+ "https://console.developers.google.com/debug?project=%s&dbgee=%s",
projectId, uniquifier);
}
private void maybeRegisterDebuggee(DataflowPipelineOptions options, String uniquifier) {
if (!options.getEnableCloudDebugger()) {
return;
}
if (options.getDebuggee() != null) {
throw new RuntimeException("Should not specify the debuggee");
}
Clouddebugger debuggerClient = DataflowTransport.newClouddebuggerClient(options).build();
Debuggee debuggee = registerDebuggee(debuggerClient, uniquifier);
options.setDebuggee(debuggee);
System.out.println(debuggerMessage(options.getProject(), debuggee.getUniquifier()));
}
private Debuggee registerDebuggee(Clouddebugger debuggerClient, String uniquifier) {
RegisterDebuggeeRequest registerReq = new RegisterDebuggeeRequest();
registerReq.setDebuggee(new Debuggee()
.setProject(options.getProject())
.setUniquifier(uniquifier)
.setDescription(uniquifier)
.setAgentVersion("google.com/cloud-dataflow-java/v1"));
try {
RegisterDebuggeeResponse registerResponse =
debuggerClient.controller().debuggees().register(registerReq).execute();
Debuggee debuggee = registerResponse.getDebuggee();
if (debuggee.getStatus() != null && debuggee.getStatus().getIsError()) {
throw new RuntimeException("Unable to register with the debugger: "
+ debuggee.getStatus().getDescription().getFormat());
}
return debuggee;
} catch (IOException e) {
throw new RuntimeException("Unable to register with the debugger: ", e);
}
}
@Override
public DataflowPipelineJob run(Pipeline pipeline) {
logWarningIfPCollectionViewHasNonDeterministicKeyCoder(pipeline);
LOG.info("Executing pipeline on the Dataflow Service, which will have billing implications "
+ "related to Google Compute Engine usage and other Google Cloud Services.");
List<DataflowPackage> packages = options.getStager().stageFiles();
// Set a unique client_request_id in the CreateJob request.
// This is used to ensure idempotence of job creation across retried
// attempts to create a job. Specifically, if the service returns a job with
// a different client_request_id, it means the returned one is a different
// job previously created with the same job name, and that the job creation
// has been effectively rejected. The SDK should return
// Error::Already_Exists to user in that case.
int randomNum = new Random().nextInt(9000) + 1000;
String requestId = DateTimeFormat.forPattern("YYYYMMddHHmmssmmm").withZone(DateTimeZone.UTC)
.print(DateTimeUtils.currentTimeMillis()) + "_" + randomNum;
// Try to create a debuggee ID. This must happen before the job is translated since it may
// update the options.
DataflowPipelineOptions dataflowOptions = options.as(DataflowPipelineOptions.class);
maybeRegisterDebuggee(dataflowOptions, requestId);
JobSpecification jobSpecification =
translator.translate(pipeline, this, packages);
Job newJob = jobSpecification.getJob();
newJob.setClientRequestId(requestId);
ReleaseInfo releaseInfo = ReleaseInfo.getReleaseInfo();
String version = releaseInfo.getVersion();
checkState(
!version.equals("${pom.version}"),
"Unable to submit a job to the Dataflow service with unset version ${pom.version}");
System.out.println("Dataflow SDK version: " + version);
newJob.getEnvironment().setUserAgent(releaseInfo);
// The Dataflow Service may write to the temporary directory directly, so
// must be verified.
if (!isNullOrEmpty(options.getGcpTempLocation())) {
newJob.getEnvironment().setTempStoragePrefix(
dataflowOptions.getPathValidator().verifyPath(options.getGcpTempLocation()));
}
newJob.getEnvironment().setDataset(options.getTempDatasetId());
newJob.getEnvironment().setExperiments(options.getExperiments());
// Set the Docker container image that executes Dataflow worker harness, residing in Google
// Container Registry. Translator is guaranteed to create a worker pool prior to this point.
String workerHarnessContainerImage =
options.as(DataflowPipelineWorkerPoolOptions.class)
.getWorkerHarnessContainerImage();
for (WorkerPool workerPool : newJob.getEnvironment().getWorkerPools()) {
workerPool.setWorkerHarnessContainerImage(workerHarnessContainerImage);
}
// Requirements about the service.
Map<String, Object> environmentVersion = new HashMap<>();
environmentVersion.put(
PropertyNames.ENVIRONMENT_VERSION_MAJOR_KEY,
DataflowRunnerInfo.getDataflowRunnerInfo().getEnvironmentMajorVersion());
newJob.getEnvironment().setVersion(environmentVersion);
// Default jobType is JAVA_BATCH_AUTOSCALING: A Java job with workers that the job can
// autoscale if specified.
String jobType = "JAVA_BATCH_AUTOSCALING";
if (options.isStreaming()) {
jobType = "STREAMING";
}
environmentVersion.put(PropertyNames.ENVIRONMENT_VERSION_JOB_TYPE_KEY, jobType);
if (hooks != null) {
hooks.modifyEnvironmentBeforeSubmission(newJob.getEnvironment());
}
if (!isNullOrEmpty(options.getDataflowJobFile())
|| !isNullOrEmpty(options.getTemplateLocation())) {
boolean isTemplate = !isNullOrEmpty(options.getTemplateLocation());
if (isTemplate) {
checkArgument(isNullOrEmpty(options.getDataflowJobFile()),
"--dataflowJobFile and --templateLocation are mutually exclusive.");
}
String fileLocation = firstNonNull(
options.getTemplateLocation(), options.getDataflowJobFile());
checkArgument(fileLocation.startsWith("/") || fileLocation.startsWith("gs://"),
String.format(
"Location must be local or on Cloud Storage, got {}.", fileLocation));
String workSpecJson = DataflowPipelineTranslator.jobToString(newJob);
try (PrintWriter printWriter = new PrintWriter(
Channels.newOutputStream(IOChannelUtils.create(fileLocation, MimeTypes.TEXT)))) {
printWriter.print(workSpecJson);
LOG.info("Printed job specification to {}", fileLocation);
} catch (IOException ex) {
String error =
String.format("Cannot create output file at %s", fileLocation);
if (isTemplate) {
throw new RuntimeException(error, ex);
} else {
LOG.warn(error, ex);
}
}
if (isTemplate) {
LOG.info("Template successfully created.");
return new DataflowTemplateJob();
}
}
String jobIdToUpdate = null;
if (options.isUpdate()) {
jobIdToUpdate = getJobIdFromName(options.getJobName());
newJob.setTransformNameMapping(options.getTransformNameMapping());
newJob.setReplaceJobId(jobIdToUpdate);
}
Job jobResult;
try {
jobResult = dataflowClient.createJob(newJob);
} catch (GoogleJsonResponseException e) {
String errorMessages = "Unexpected errors";
if (e.getDetails() != null) {
if (Utf8.encodedLength(newJob.toString()) >= CREATE_JOB_REQUEST_LIMIT_BYTES) {
errorMessages = "The size of the serialized JSON representation of the pipeline "
+ "exceeds the allowable limit. "
+ "For more information, please check the FAQ link below:\n"
+ "https://cloud.google.com/dataflow/faq";
} else {
errorMessages = e.getDetails().getMessage();
}
}
throw new RuntimeException("Failed to create a workflow job: " + errorMessages, e);
} catch (IOException e) {
throw new RuntimeException("Failed to create a workflow job", e);
}
// Obtain all of the extractors from the PTransforms used in the pipeline so the
// DataflowPipelineJob has access to them.
Map<Aggregator<?, ?>, Collection<PTransform<?, ?>>> aggregatorSteps =
pipeline.getAggregatorSteps();
DataflowAggregatorTransforms aggregatorTransforms =
new DataflowAggregatorTransforms(aggregatorSteps, jobSpecification.getStepNames());
// Use a raw client for post-launch monitoring, as status calls may fail
// regularly and need not be retried automatically.
DataflowPipelineJob dataflowPipelineJob =
new DataflowPipelineJob(jobResult.getId(), options, aggregatorTransforms);
// If the service returned client request id, the SDK needs to compare it
// with the original id generated in the request, if they are not the same
// (i.e., the returned job is not created by this request), throw
// DataflowJobAlreadyExistsException or DataflowJobAlreadyUpdatedException
// depending on whether this is a reload or not.
if (jobResult.getClientRequestId() != null && !jobResult.getClientRequestId().isEmpty()
&& !jobResult.getClientRequestId().equals(requestId)) {
// If updating a job.
if (options.isUpdate()) {
throw new DataflowJobAlreadyUpdatedException(dataflowPipelineJob,
String.format("The job named %s with id: %s has already been updated into job id: %s "
+ "and cannot be updated again.",
newJob.getName(), jobIdToUpdate, jobResult.getId()));
} else {
throw new DataflowJobAlreadyExistsException(dataflowPipelineJob,
String.format("There is already an active job named %s with id: %s. If you want "
+ "to submit a second job, try again by setting a different name using --jobName.",
newJob.getName(), jobResult.getId()));
}
}
LOG.info("To access the Dataflow monitoring console, please navigate to {}",
MonitoringUtil.getJobMonitoringPageURL(options.getProject(), jobResult.getId()));
System.out.println("Submitted job: " + jobResult.getId());
LOG.info("To cancel the job using the 'gcloud' tool, run:\n> {}",
MonitoringUtil.getGcloudCancelCommand(options, jobResult.getId()));
return dataflowPipelineJob;
}
/**
* Returns the DataflowPipelineTranslator associated with this object.
*/
public DataflowPipelineTranslator getTranslator() {
return translator;
}
/**
* Sets callbacks to invoke during execution see {@code DataflowRunnerHooks}.
*/
@Experimental
public void setHooks(DataflowRunnerHooks hooks) {
this.hooks = hooks;
}
/////////////////////////////////////////////////////////////////////////////
/** Outputs a warning about PCollection views without deterministic key coders. */
private void logWarningIfPCollectionViewHasNonDeterministicKeyCoder(Pipeline pipeline) {
// We need to wait till this point to determine the names of the transforms since only
// at this time do we know the hierarchy of the transforms otherwise we could
// have just recorded the full names during apply time.
if (!ptransformViewsWithNonDeterministicKeyCoders.isEmpty()) {
final SortedSet<String> ptransformViewNamesWithNonDeterministicKeyCoders = new TreeSet<>();
pipeline.traverseTopologically(new PipelineVisitor() {
@Override
public void visitValue(PValue value, TransformHierarchy.Node producer) {
}
@Override
public void visitPrimitiveTransform(TransformHierarchy.Node node) {
if (ptransformViewsWithNonDeterministicKeyCoders.contains(node.getTransform())) {
ptransformViewNamesWithNonDeterministicKeyCoders.add(node.getFullName());
}
}
@Override
public CompositeBehavior enterCompositeTransform(TransformHierarchy.Node node) {
if (ptransformViewsWithNonDeterministicKeyCoders.contains(node.getTransform())) {
ptransformViewNamesWithNonDeterministicKeyCoders.add(node.getFullName());
}
return CompositeBehavior.ENTER_TRANSFORM;
}
@Override
public void leaveCompositeTransform(TransformHierarchy.Node node) {
}
});
LOG.warn("Unable to use indexed implementation for View.AsMap and View.AsMultimap for {} "
+ "because the key coder is not deterministic. Falling back to singleton implementation "
+ "which may cause memory and/or performance problems. Future major versions of "
+ "Dataflow will require deterministic key coders.",
ptransformViewNamesWithNonDeterministicKeyCoders);
}
}
/**
* Returns true if the passed in {@link PCollection} needs to be materialiazed using
* an indexed format.
*/
boolean doesPCollectionRequireIndexedFormat(PCollection<?> pcol) {
return pcollectionsRequiringIndexedFormat.contains(pcol);
}
/**
* Marks the passed in {@link PCollection} as requiring to be materialized using
* an indexed format.
*/
void addPCollectionRequiringIndexedFormat(PCollection<?> pcol) {
pcollectionsRequiringIndexedFormat.add(pcol);
}
/** A set of {@link View}s with non-deterministic key coders. */
private Set<PTransform<?, ?>> ptransformViewsWithNonDeterministicKeyCoders;
/**
* Records that the {@link PTransform} requires a deterministic key coder.
*/
void recordViewUsesNonDeterministicKeyCoder(PTransform<?, ?> ptransform) {
ptransformViewsWithNonDeterministicKeyCoders.add(ptransform);
}
/**
* Specialized implementation which overrides
* {@link org.apache.beam.sdk.io.Write.Bound Write.Bound} to provide Google
* Cloud Dataflow specific path validation of {@link FileBasedSink}s.
*/
private static class BatchWrite<T> extends PTransform<PCollection<T>, PDone> {
private final DataflowRunner runner;
private final Write.Bound<T> transform;
/**
* Builds an instance of this class from the overridden transform.
*/
@SuppressWarnings("unused") // used via reflection in DataflowRunner#apply()
public BatchWrite(DataflowRunner runner, Write.Bound<T> transform) {
this.runner = runner;
this.transform = transform;
}
@Override
public PDone expand(PCollection<T> input) {
if (transform.getSink() instanceof FileBasedSink) {
FileBasedSink<?> sink = (FileBasedSink<?>) transform.getSink();
if (sink.getBaseOutputFilenameProvider().isAccessible()) {
PathValidator validator = runner.options.getPathValidator();
validator.validateOutputFilePrefixSupported(
sink.getBaseOutputFilenameProvider().get());
}
}
return transform.expand(input);
}
}
// ================================================================================
// PubsubIO translations
// ================================================================================
/**
* Suppress application of {@link PubsubUnboundedSource#expand} in streaming mode so that we
* can instead defer to Windmill's implementation.
*/
private static class StreamingPubsubIORead<T> extends PTransform<PBegin, PCollection<T>> {
private final PubsubUnboundedSource<T> transform;
/**
* Builds an instance of this class from the overridden transform.
*/
public StreamingPubsubIORead(
DataflowRunner runner, PubsubUnboundedSource<T> transform) {
this.transform = transform;
}
PubsubUnboundedSource<T> getOverriddenTransform() {
return transform;
}
@Override
public PCollection<T> expand(PBegin input) {
return PCollection.<T>createPrimitiveOutputInternal(
input.getPipeline(), WindowingStrategy.globalDefault(), IsBounded.UNBOUNDED)
.setCoder(transform.getElementCoder());
}
@Override
protected String getKindString() {
return "StreamingPubsubIORead";
}
static {
DataflowPipelineTranslator.registerTransformTranslator(
StreamingPubsubIORead.class, new StreamingPubsubIOReadTranslator<>());
}
}
/** Rewrite {@link StreamingPubsubIORead} to the appropriate internal node. */
private static class StreamingPubsubIOReadTranslator<T>
implements TransformTranslator<StreamingPubsubIORead<T>> {
@Override
public void translate(StreamingPubsubIORead<T> transform, TranslationContext context) {
checkArgument(
context.getPipelineOptions().isStreaming(),
"StreamingPubsubIORead is only for streaming pipelines.");
PubsubUnboundedSource<T> overriddenTransform = transform.getOverriddenTransform();
StepTranslationContext stepContext = context.addStep(transform, "ParallelRead");
stepContext.addInput(PropertyNames.FORMAT, "pubsub");
if (overriddenTransform.getTopicProvider() != null) {
if (overriddenTransform.getTopicProvider().isAccessible()) {
stepContext.addInput(
PropertyNames.PUBSUB_TOPIC, overriddenTransform.getTopic().getV1Beta1Path());
} else {
stepContext.addInput(
PropertyNames.PUBSUB_TOPIC_OVERRIDE,
((NestedValueProvider) overriddenTransform.getTopicProvider()).propertyName());
}
}
if (overriddenTransform.getSubscriptionProvider() != null) {
if (overriddenTransform.getSubscriptionProvider().isAccessible()) {
stepContext.addInput(
PropertyNames.PUBSUB_SUBSCRIPTION,
overriddenTransform.getSubscription().getV1Beta1Path());
} else {
stepContext.addInput(
PropertyNames.PUBSUB_SUBSCRIPTION_OVERRIDE,
((NestedValueProvider) overriddenTransform.getSubscriptionProvider()).propertyName());
}
}
if (overriddenTransform.getTimestampLabel() != null) {
stepContext.addInput(
PropertyNames.PUBSUB_TIMESTAMP_LABEL, overriddenTransform.getTimestampLabel());
}
if (overriddenTransform.getIdLabel() != null) {
stepContext.addInput(PropertyNames.PUBSUB_ID_LABEL, overriddenTransform.getIdLabel());
}
if (overriddenTransform.getWithAttributesParseFn() != null) {
stepContext.addInput(
PropertyNames.PUBSUB_SERIALIZED_ATTRIBUTES_FN,
byteArrayToJsonString(
serializeToByteArray(overriddenTransform.getWithAttributesParseFn())));
}
stepContext.addOutput(context.getOutput(transform));
}
}
/**
* Suppress application of {@link PubsubUnboundedSink#expand} in streaming mode so that we
* can instead defer to Windmill's implementation.
*/
private static class StreamingPubsubIOWrite<T> extends PTransform<PCollection<T>, PDone> {
private final PubsubUnboundedSink<T> transform;
/**
* Builds an instance of this class from the overridden transform.
*/
public StreamingPubsubIOWrite(
DataflowRunner runner, PubsubUnboundedSink<T> transform) {
this.transform = transform;
}
PubsubUnboundedSink<T> getOverriddenTransform() {
return transform;
}
@Override
public PDone expand(PCollection<T> input) {
return PDone.in(input.getPipeline());
}
@Override
protected String getKindString() {
return "StreamingPubsubIOWrite";
}
static {
DataflowPipelineTranslator.registerTransformTranslator(
StreamingPubsubIOWrite.class, new StreamingPubsubIOWriteTranslator<>());
}
}
/**
* Rewrite {@link StreamingPubsubIOWrite} to the appropriate internal node.
*/
private static class StreamingPubsubIOWriteTranslator<T> implements
TransformTranslator<StreamingPubsubIOWrite<T>> {
@Override
public void translate(
StreamingPubsubIOWrite<T> transform,
TranslationContext context) {
checkArgument(context.getPipelineOptions().isStreaming(),
"StreamingPubsubIOWrite is only for streaming pipelines.");
PubsubUnboundedSink<T> overriddenTransform = transform.getOverriddenTransform();
StepTranslationContext stepContext = context.addStep(transform, "ParallelWrite");
stepContext.addInput(PropertyNames.FORMAT, "pubsub");
if (overriddenTransform.getTopicProvider().isAccessible()) {
stepContext.addInput(
PropertyNames.PUBSUB_TOPIC, overriddenTransform.getTopic().getV1Beta1Path());
} else {
stepContext.addInput(
PropertyNames.PUBSUB_TOPIC_OVERRIDE,
((NestedValueProvider) overriddenTransform.getTopicProvider()).propertyName());
}
if (overriddenTransform.getTimestampLabel() != null) {
stepContext.addInput(
PropertyNames.PUBSUB_TIMESTAMP_LABEL, overriddenTransform.getTimestampLabel());
}
if (overriddenTransform.getIdLabel() != null) {
stepContext.addInput(PropertyNames.PUBSUB_ID_LABEL, overriddenTransform.getIdLabel());
}
if (overriddenTransform.getFormatFn() != null) {
stepContext.addInput(
PropertyNames.PUBSUB_SERIALIZED_ATTRIBUTES_FN,
byteArrayToJsonString(serializeToByteArray(overriddenTransform.getFormatFn())));
// No coder is needed in this case since the formatFn formats directly into a byte[],
// however the Dataflow backend require a coder to be set.
stepContext.addEncodingInput(WindowedValue.getValueOnlyCoder(VoidCoder.of()));
} else if (overriddenTransform.getElementCoder() != null) {
stepContext.addEncodingInput(WindowedValue.getValueOnlyCoder(
overriddenTransform.getElementCoder()));
}
PCollection<T> input = context.getInput(transform);
stepContext.addInput(PropertyNames.PARALLEL_INPUT, input);
}
}
// ================================================================================
/**
* Specialized implementation for
* {@link org.apache.beam.sdk.io.Read.Unbounded Read.Unbounded} for the
* Dataflow runner in streaming mode.
*
* <p>In particular, if an UnboundedSource requires deduplication, then features of WindmillSink
* are leveraged to do the deduplication.
*/
private static class StreamingUnboundedRead<T> extends PTransform<PInput, PCollection<T>> {
private final UnboundedSource<T, ?> source;
/**
* Builds an instance of this class from the overridden transform.
*/
@SuppressWarnings("unused") // used via reflection in DataflowRunner#apply()
public StreamingUnboundedRead(DataflowRunner runner, Read.Unbounded<T> transform) {
this.source = transform.getSource();
}
@Override
protected Coder<T> getDefaultOutputCoder() {
return source.getDefaultOutputCoder();
}
@Override
public final PCollection<T> expand(PInput input) {
source.validate();
if (source.requiresDeduping()) {
return Pipeline.applyTransform(input, new ReadWithIds<>(source))
.apply(new Deduplicate<T>());
} else {
return Pipeline.applyTransform(input, new ReadWithIds<>(source))
.apply("StripIds", ParDo.of(new ValueWithRecordId.StripIdsDoFn<T>()));
}
}
/**
* {@link PTransform} that reads {@code (record,recordId)} pairs from an
* {@link UnboundedSource}.
*/
private static class ReadWithIds<T>
extends PTransform<PInput, PCollection<ValueWithRecordId<T>>> {
private final UnboundedSource<T, ?> source;
private ReadWithIds(UnboundedSource<T, ?> source) {
this.source = source;
}
@Override
public final PCollection<ValueWithRecordId<T>> expand(PInput input) {
return PCollection.<ValueWithRecordId<T>>createPrimitiveOutputInternal(
input.getPipeline(), WindowingStrategy.globalDefault(), IsBounded.UNBOUNDED);
}
@Override
protected Coder<ValueWithRecordId<T>> getDefaultOutputCoder() {
return ValueWithRecordId.ValueWithRecordIdCoder.of(source.getDefaultOutputCoder());
}
@Override
public void populateDisplayData(DisplayData.Builder builder) {
builder.delegate(source);
}
public UnboundedSource<T, ?> getSource() {
return source;
}
}
@Override
public String getKindString() {
return String.format("Read(%s)", NameUtils.approximateSimpleName(source));
}
static {
DataflowPipelineTranslator.registerTransformTranslator(
ReadWithIds.class, new ReadWithIdsTranslator());
}
private static class ReadWithIdsTranslator
implements TransformTranslator<ReadWithIds<?>> {
@Override
public void translate(ReadWithIds<?> transform,
TranslationContext context) {
ReadTranslator.translateReadHelper(transform.getSource(), transform, context);
}
}
}
/**
* Remove values with duplicate ids.
*/
private static class Deduplicate<T>
extends PTransform<PCollection<ValueWithRecordId<T>>, PCollection<T>> {
// Use a finite set of keys to improve bundling. Without this, the key space
// will be the space of ids which is potentially very large, which results in much
// more per-key overhead.
private static final int NUM_RESHARD_KEYS = 10000;
@Override
public PCollection<T> expand(PCollection<ValueWithRecordId<T>> input) {
return input
.apply(WithKeys.of(new SerializableFunction<ValueWithRecordId<T>, Integer>() {
@Override
public Integer apply(ValueWithRecordId<T> value) {
return Arrays.hashCode(value.getId()) % NUM_RESHARD_KEYS;
}
}))
// Reshuffle will dedup based on ids in ValueWithRecordId by passing the data through
// WindmillSink.
.apply(Reshuffle.<Integer, ValueWithRecordId<T>>of())
.apply("StripIds", ParDo.of(
new DoFn<KV<Integer, ValueWithRecordId<T>>, T>() {
@ProcessElement
public void processElement(ProcessContext c) {
c.output(c.element().getValue().getValue());
}
}));
}
}
/**
* Specialized implementation for {@link org.apache.beam.sdk.io.Read.Bounded Read.Bounded} for the
* Dataflow runner in streaming mode.
*/
private static class StreamingBoundedRead<T> extends PTransform<PBegin, PCollection<T>> {
private final BoundedSource<T> source;
/** Builds an instance of this class from the overridden transform. */
@SuppressWarnings("unused") // used via reflection in DataflowRunner#apply()
public StreamingBoundedRead(DataflowRunner runner, Read.Bounded<T> transform) {
this.source = transform.getSource();
}
@Override
protected Coder<T> getDefaultOutputCoder() {
return source.getDefaultOutputCoder();
}
@Override
public final PCollection<T> expand(PBegin input) {
source.validate();
return Pipeline.applyTransform(input, new DataflowUnboundedReadFromBoundedSource<>(source))
.setIsBoundedInternal(IsBounded.BOUNDED);
}
}
/**
* A marker {@link DoFn} for writing the contents of a {@link PCollection} to a streaming
* {@link PCollectionView} backend implementation.
*/
@Deprecated
public static class StreamingPCollectionViewWriterFn<T> extends DoFn<Iterable<T>, T> {
private final PCollectionView<?> view;
private final Coder<T> dataCoder;
public static <T> StreamingPCollectionViewWriterFn<T> create(
PCollectionView<?> view, Coder<T> dataCoder) {
return new StreamingPCollectionViewWriterFn<>(view, dataCoder);
}
private StreamingPCollectionViewWriterFn(PCollectionView<?> view, Coder<T> dataCoder) {
this.view = view;
this.dataCoder = dataCoder;
}
public PCollectionView<?> getView() {
return view;
}
public Coder<T> getDataCoder() {
return dataCoder;
}
@ProcessElement
public void processElement(ProcessContext c, BoundedWindow w) throws Exception {
throw new UnsupportedOperationException(
String.format(
"%s is a marker class only and should never be executed.", getClass().getName()));
}
}
/**
* Specialized expansion for unsupported IO transforms and DoFns that throws an error.
*/
private static class UnsupportedIO<InputT extends PInput, OutputT extends POutput>
extends PTransform<InputT, OutputT> {
@Nullable
private PTransform<?, ?> transform;
@Nullable
private DoFn<?, ?> doFn;
/**
* Builds an instance of this class from the overridden transform.
*/
@SuppressWarnings("unused") // used via reflection in DataflowRunner#apply()
public UnsupportedIO(DataflowRunner runner, Read.Unbounded<?> transform) {
this.transform = transform;
}
/**
* Builds an instance of this class from the overridden doFn.
*/
@SuppressWarnings("unused") // used via reflection in DataflowRunner#apply()
public UnsupportedIO(DataflowRunner runner,
PubsubIO.Read<?>.PubsubBoundedReader doFn) {
this.doFn = doFn;
}
/**
* Builds an instance of this class from the overridden doFn.
*/
@SuppressWarnings("unused") // used via reflection in DataflowRunner#apply()
public UnsupportedIO(DataflowRunner runner,
PubsubIO.Write<?>.PubsubBoundedWriter doFn) {
this.doFn = doFn;
}
/**
* Builds an instance of this class from the overridden transform.
*/
@SuppressWarnings("unused") // used via reflection in DataflowRunner#apply()
public UnsupportedIO(DataflowRunner runner, PubsubUnboundedSource<?> transform) {
this.transform = transform;
}
/**
* Builds an instance of this class from the overridden transform.
*/
@SuppressWarnings("unused") // used via reflection in DataflowRunner#apply()
public UnsupportedIO(DataflowRunner runner, PubsubUnboundedSink<?> transform) {
this.transform = transform;
}
@Override
public OutputT expand(InputT input) {
String mode = input.getPipeline().getOptions().as(StreamingOptions.class).isStreaming()
? "streaming" : "batch";
String name =
transform == null
? NameUtils.approximateSimpleName(doFn)
: NameUtils.approximatePTransformName(transform.getClass());
throw new UnsupportedOperationException(
String.format("The DataflowRunner in %s mode does not support %s.", mode, name));
}
}
@Override
public String toString() {
return "DataflowRunner#" + options.getJobName();
}
/**
* Attempts to detect all the resources the class loader has access to. This does not recurse
* to class loader parents stopping it from pulling in resources from the system class loader.
*
* @param classLoader The URLClassLoader to use to detect resources to stage.
* @throws IllegalArgumentException If either the class loader is not a URLClassLoader or one
* of the resources the class loader exposes is not a file resource.
* @return A list of absolute paths to the resources the class loader uses.
*/
protected static List<String> detectClassPathResourcesToStage(ClassLoader classLoader) {
if (!(classLoader instanceof URLClassLoader)) {
String message = String.format("Unable to use ClassLoader to detect classpath elements. "
+ "Current ClassLoader is %s, only URLClassLoaders are supported.", classLoader);
LOG.error(message);
throw new IllegalArgumentException(message);
}
List<String> files = new ArrayList<>();
for (URL url : ((URLClassLoader) classLoader).getURLs()) {
try {
files.add(new File(url.toURI()).getAbsolutePath());
} catch (IllegalArgumentException | URISyntaxException e) {
String message = String.format("Unable to convert url (%s) to file.", url);
LOG.error(message);
throw new IllegalArgumentException(message, e);
}
}
return files;
}
/**
* Finds the id for the running job of the given name.
*/
private String getJobIdFromName(String jobName) {
try {
ListJobsResponse listResult;
String token = null;
do {
listResult = dataflowClient.listJobs(token);
token = listResult.getNextPageToken();
for (Job job : listResult.getJobs()) {
if (job.getName().equals(jobName)
&& MonitoringUtil.toState(job.getCurrentState()).equals(State.RUNNING)) {
return job.getId();
}
}
} while (token != null);
} catch (GoogleJsonResponseException e) {
throw new RuntimeException(
"Got error while looking up jobs: "
+ (e.getDetails() != null ? e.getDetails().getMessage() : e), e);
} catch (IOException e) {
throw new RuntimeException("Got error while looking up jobs: ", e);
}
throw new IllegalArgumentException("Could not find running job named " + jobName);
}
}
| |
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.autoscaling.model;
import java.io.Serializable;
/**
*
*/
public class DescribeLoadBalancersResult implements Serializable, Cloneable {
/**
* <p>
* The load balancers.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<LoadBalancerState> loadBalancers;
/**
* <p>
* The token to use when requesting the next set of items. If there are no
* additional items to return, the string is empty.
* </p>
*/
private String nextToken;
/**
* <p>
* The load balancers.
* </p>
*
* @return The load balancers.
*/
public java.util.List<LoadBalancerState> getLoadBalancers() {
if (loadBalancers == null) {
loadBalancers = new com.amazonaws.internal.SdkInternalList<LoadBalancerState>();
}
return loadBalancers;
}
/**
* <p>
* The load balancers.
* </p>
*
* @param loadBalancers
* The load balancers.
*/
public void setLoadBalancers(
java.util.Collection<LoadBalancerState> loadBalancers) {
if (loadBalancers == null) {
this.loadBalancers = null;
return;
}
this.loadBalancers = new com.amazonaws.internal.SdkInternalList<LoadBalancerState>(
loadBalancers);
}
/**
* <p>
* The load balancers.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setLoadBalancers(java.util.Collection)} or
* {@link #withLoadBalancers(java.util.Collection)} if you want to override
* the existing values.
* </p>
*
* @param loadBalancers
* The load balancers.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public DescribeLoadBalancersResult withLoadBalancers(
LoadBalancerState... loadBalancers) {
if (this.loadBalancers == null) {
setLoadBalancers(new com.amazonaws.internal.SdkInternalList<LoadBalancerState>(
loadBalancers.length));
}
for (LoadBalancerState ele : loadBalancers) {
this.loadBalancers.add(ele);
}
return this;
}
/**
* <p>
* The load balancers.
* </p>
*
* @param loadBalancers
* The load balancers.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public DescribeLoadBalancersResult withLoadBalancers(
java.util.Collection<LoadBalancerState> loadBalancers) {
setLoadBalancers(loadBalancers);
return this;
}
/**
* <p>
* The token to use when requesting the next set of items. If there are no
* additional items to return, the string is empty.
* </p>
*
* @param nextToken
* The token to use when requesting the next set of items. If there
* are no additional items to return, the string is empty.
*/
public void setNextToken(String nextToken) {
this.nextToken = nextToken;
}
/**
* <p>
* The token to use when requesting the next set of items. If there are no
* additional items to return, the string is empty.
* </p>
*
* @return The token to use when requesting the next set of items. If there
* are no additional items to return, the string is empty.
*/
public String getNextToken() {
return this.nextToken;
}
/**
* <p>
* The token to use when requesting the next set of items. If there are no
* additional items to return, the string is empty.
* </p>
*
* @param nextToken
* The token to use when requesting the next set of items. If there
* are no additional items to return, the string is empty.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public DescribeLoadBalancersResult withNextToken(String nextToken) {
setNextToken(nextToken);
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getLoadBalancers() != null)
sb.append("LoadBalancers: " + getLoadBalancers() + ",");
if (getNextToken() != null)
sb.append("NextToken: " + getNextToken());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DescribeLoadBalancersResult == false)
return false;
DescribeLoadBalancersResult other = (DescribeLoadBalancersResult) obj;
if (other.getLoadBalancers() == null ^ this.getLoadBalancers() == null)
return false;
if (other.getLoadBalancers() != null
&& other.getLoadBalancers().equals(this.getLoadBalancers()) == false)
return false;
if (other.getNextToken() == null ^ this.getNextToken() == null)
return false;
if (other.getNextToken() != null
&& other.getNextToken().equals(this.getNextToken()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime
* hashCode
+ ((getLoadBalancers() == null) ? 0 : getLoadBalancers()
.hashCode());
hashCode = prime * hashCode
+ ((getNextToken() == null) ? 0 : getNextToken().hashCode());
return hashCode;
}
@Override
public DescribeLoadBalancersResult clone() {
try {
return (DescribeLoadBalancersResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException(
"Got a CloneNotSupportedException from Object.clone() "
+ "even though we're Cloneable!", e);
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.orc;
import static junit.framework.Assert.assertEquals;
import static org.junit.Assume.assumeTrue;
import java.io.File;
import java.io.FileOutputStream;
import java.io.PrintStream;
import java.sql.Timestamp;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.hadoop.hive.serde2.io.DateWritable;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.Text;
import org.apache.orc.impl.ColumnStatisticsImpl;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestName;
/**
* Test ColumnStatisticsImpl for ORC.
*/
public class TestColumnStatistics {
@Test
public void testLongMerge() throws Exception {
TypeDescription schema = TypeDescription.createInt();
ColumnStatisticsImpl stats1 = ColumnStatisticsImpl.create(schema);
ColumnStatisticsImpl stats2 = ColumnStatisticsImpl.create(schema);
stats1.updateInteger(10, 2);
stats2.updateInteger(1, 1);
stats2.updateInteger(1000, 1);
stats1.merge(stats2);
IntegerColumnStatistics typed = (IntegerColumnStatistics) stats1;
assertEquals(1, typed.getMinimum());
assertEquals(1000, typed.getMaximum());
stats1.reset();
stats1.updateInteger(-10, 1);
stats1.updateInteger(10000, 1);
stats1.merge(stats2);
assertEquals(-10, typed.getMinimum());
assertEquals(10000, typed.getMaximum());
}
@Test
public void testDoubleMerge() throws Exception {
TypeDescription schema = TypeDescription.createDouble();
ColumnStatisticsImpl stats1 = ColumnStatisticsImpl.create(schema);
ColumnStatisticsImpl stats2 = ColumnStatisticsImpl.create(schema);
stats1.updateDouble(10.0);
stats1.updateDouble(100.0);
stats2.updateDouble(1.0);
stats2.updateDouble(1000.0);
stats1.merge(stats2);
DoubleColumnStatistics typed = (DoubleColumnStatistics) stats1;
assertEquals(1.0, typed.getMinimum(), 0.001);
assertEquals(1000.0, typed.getMaximum(), 0.001);
stats1.reset();
stats1.updateDouble(-10);
stats1.updateDouble(10000);
stats1.merge(stats2);
assertEquals(-10, typed.getMinimum(), 0.001);
assertEquals(10000, typed.getMaximum(), 0.001);
}
@Test
public void testStringMerge() throws Exception {
TypeDescription schema = TypeDescription.createString();
ColumnStatisticsImpl stats1 = ColumnStatisticsImpl.create(schema);
ColumnStatisticsImpl stats2 = ColumnStatisticsImpl.create(schema);
stats1.updateString(new Text("bob"));
stats1.updateString(new Text("david"));
stats1.updateString(new Text("charles"));
stats2.updateString(new Text("anne"));
byte[] erin = new byte[]{0, 1, 2, 3, 4, 5, 101, 114, 105, 110};
stats2.updateString(erin, 6, 4, 5);
assertEquals(24, ((StringColumnStatistics)stats2).getSum());
stats1.merge(stats2);
StringColumnStatistics typed = (StringColumnStatistics) stats1;
assertEquals("anne", typed.getMinimum());
assertEquals("erin", typed.getMaximum());
assertEquals(39, typed.getSum());
stats1.reset();
stats1.updateString(new Text("aaa"));
stats1.updateString(new Text("zzz"));
stats1.merge(stats2);
assertEquals("aaa", typed.getMinimum());
assertEquals("zzz", typed.getMaximum());
}
@Test
public void testDateMerge() throws Exception {
TypeDescription schema = TypeDescription.createDate();
ColumnStatisticsImpl stats1 = ColumnStatisticsImpl.create(schema);
ColumnStatisticsImpl stats2 = ColumnStatisticsImpl.create(schema);
stats1.updateDate(new DateWritable(1000));
stats1.updateDate(new DateWritable(100));
stats2.updateDate(new DateWritable(10));
stats2.updateDate(new DateWritable(2000));
stats1.merge(stats2);
DateColumnStatistics typed = (DateColumnStatistics) stats1;
assertEquals(new DateWritable(10).get(), typed.getMinimum());
assertEquals(new DateWritable(2000).get(), typed.getMaximum());
stats1.reset();
stats1.updateDate(new DateWritable(-10));
stats1.updateDate(new DateWritable(10000));
stats1.merge(stats2);
assertEquals(new DateWritable(-10).get(), typed.getMinimum());
assertEquals(new DateWritable(10000).get(), typed.getMaximum());
}
@Test
public void testTimestampMerge() throws Exception {
TypeDescription schema = TypeDescription.createTimestamp();
ColumnStatisticsImpl stats1 = ColumnStatisticsImpl.create(schema);
ColumnStatisticsImpl stats2 = ColumnStatisticsImpl.create(schema);
stats1.updateTimestamp(new Timestamp(10));
stats1.updateTimestamp(new Timestamp(100));
stats2.updateTimestamp(new Timestamp(1));
stats2.updateTimestamp(new Timestamp(1000));
stats1.merge(stats2);
TimestampColumnStatistics typed = (TimestampColumnStatistics) stats1;
assertEquals(1, typed.getMinimum().getTime());
assertEquals(1000, typed.getMaximum().getTime());
stats1.reset();
stats1.updateTimestamp(new Timestamp(-10));
stats1.updateTimestamp(new Timestamp(10000));
stats1.merge(stats2);
assertEquals(-10, typed.getMinimum().getTime());
assertEquals(10000, typed.getMaximum().getTime());
}
@Test
public void testDecimalMerge() throws Exception {
TypeDescription schema = TypeDescription.createDecimal()
.withPrecision(38).withScale(16);
ColumnStatisticsImpl stats1 = ColumnStatisticsImpl.create(schema);
ColumnStatisticsImpl stats2 = ColumnStatisticsImpl.create(schema);
stats1.updateDecimal(new HiveDecimalWritable(10));
stats1.updateDecimal(new HiveDecimalWritable(100));
stats2.updateDecimal(new HiveDecimalWritable(1));
stats2.updateDecimal(new HiveDecimalWritable(1000));
stats1.merge(stats2);
DecimalColumnStatistics typed = (DecimalColumnStatistics) stats1;
assertEquals(1, typed.getMinimum().longValue());
assertEquals(1000, typed.getMaximum().longValue());
stats1.reset();
stats1.updateDecimal(new HiveDecimalWritable(-10));
stats1.updateDecimal(new HiveDecimalWritable(10000));
stats1.merge(stats2);
assertEquals(-10, typed.getMinimum().longValue());
assertEquals(10000, typed.getMaximum().longValue());
}
Path workDir = new Path(System.getProperty("test.tmp.dir",
"target" + File.separator + "test" + File.separator + "tmp"));
Configuration conf;
FileSystem fs;
Path testFilePath;
@Rule
public TestName testCaseName = new TestName();
@Before
public void openFileSystem() throws Exception {
conf = new Configuration();
fs = FileSystem.getLocal(conf);
fs.setWorkingDirectory(workDir);
testFilePath = new Path("TestOrcFile." + testCaseName.getMethodName() + ".orc");
fs.delete(testFilePath, false);
}
}
| |
/**
* Copyright (C) 2014 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.strata.product.swap;
import java.io.Serializable;
import java.time.LocalDate;
import java.time.temporal.TemporalAdjuster;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Optional;
import java.util.Set;
import org.joda.beans.Bean;
import org.joda.beans.BeanDefinition;
import org.joda.beans.ImmutableBean;
import org.joda.beans.ImmutableDefaults;
import org.joda.beans.ImmutableValidator;
import org.joda.beans.JodaBeanUtils;
import org.joda.beans.MetaProperty;
import org.joda.beans.Property;
import org.joda.beans.PropertyDefinition;
import org.joda.beans.impl.direct.DirectFieldsBeanBuilder;
import org.joda.beans.impl.direct.DirectMetaBean;
import org.joda.beans.impl.direct.DirectMetaProperty;
import org.joda.beans.impl.direct.DirectMetaPropertyMap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.opengamma.strata.basics.currency.Currency;
import com.opengamma.strata.basics.currency.CurrencyAmount;
import com.opengamma.strata.basics.date.DayCount;
import com.opengamma.strata.basics.index.FxIndexObservation;
import com.opengamma.strata.basics.index.Index;
import com.opengamma.strata.collect.Messages;
/**
* A period over which a rate of interest is paid.
* <p>
* A swap leg consists of one or more periods that are the basis of accrual.
* The payment period is formed from one or more accrual periods which
* detail the type of interest to be accrued, fixed or floating.
* <p>
* This class specifies the data necessary to calculate the value of the period.
* Any combination of accrual periods is supported in the data model, however
* there is no guarantee that exotic combinations will price sensibly.
*/
@BeanDefinition(constructorScope = "package")
public final class RatePaymentPeriod
implements NotionalPaymentPeriod, ImmutableBean, Serializable {
/**
* The date that payment occurs.
* <p>
* The date that payment is made for the accrual periods.
* If the schedule adjusts for business days, then this is the adjusted date.
*/
@PropertyDefinition(validate = "notNull", overrideGet = true)
private final LocalDate paymentDate;
/**
* The accrual periods that combine to form the payment period.
* <p>
* Each accrual period includes the applicable dates and details of how to observe the rate.
* In most cases, there will be one accrual period.
* If there is more than one accrual period then compounding may apply.
*/
@PropertyDefinition(validate = "notEmpty")
private final ImmutableList<RateAccrualPeriod> accrualPeriods;
/**
* The day count convention.
* <p>
* Each accrual period contains a year fraction calculated using this day count.
* This day count is used when there is a need to perform further calculations.
*/
@PropertyDefinition(validate = "notNull")
private final DayCount dayCount;
/**
* The primary currency of the payment period.
* <p>
* This is the currency of the swap leg and the currency that interest calculation is made in.
* <p>
* The amounts of the notional are usually expressed in terms of this currency,
* however they can be converted from amounts in a different currency.
* See the optional {@code fxReset} property.
*/
@PropertyDefinition(validate = "notNull", overrideGet = true)
private final Currency currency;
/**
* The FX reset definition, optional.
* <p>
* This property is used when the defined amount of the notional is specified in
* a currency other than the currency of the swap leg. When this occurs, the notional
* amount has to be converted using an FX rate to the swap leg currency.
* <p>
* The FX reset definition must be valid. It must have a reference currency that is
* different to that of this period, and the currency of this period must be
* one of those defined by the FX reset index.
*/
@PropertyDefinition(get = "optional")
private final FxReset fxReset;
/**
* The notional amount, positive if receiving, negative if paying.
* <p>
* The notional amount applicable during the period.
* The currency of the notional is specified by {@code currency} unless there
* is the {@code fxReset} property is present.
*/
@PropertyDefinition
private final double notional;
/**
* The compounding method to use when there is more than one accrual period, default is 'None'.
* <p>
* Compounding is used when combining accrual periods.
*/
@PropertyDefinition(validate = "notNull")
private final CompoundingMethod compoundingMethod;
//-------------------------------------------------------------------------
@ImmutableDefaults
private static void applyDefaults(Builder builder) {
builder.compoundingMethod(CompoundingMethod.NONE);
}
@ImmutableValidator
private void validate() {
if (fxReset != null) {
Currency notionalCcy = fxReset.getReferenceCurrency();
if (fxReset.getReferenceCurrency().equals(currency)) {
throw new IllegalArgumentException(Messages.format(
"Payment currency {} must not equal notional currency {} when FX reset applies", currency, notionalCcy));
}
if (!fxReset.getIndex().getCurrencyPair().contains(currency)) {
throw new IllegalArgumentException(Messages.format(
"Payment currency {} must be one of those in the FxReset index {}", currency, fxReset.getIndex()));
}
}
}
//-------------------------------------------------------------------------
/**
* Gets the accrual start date of the period.
* <p>
* This is the first accrual date in the period.
* This date has typically been adjusted to be a valid business day.
*
* @return the start date of the period
*/
@Override
public LocalDate getStartDate() {
return accrualPeriods.get(0).getStartDate();
}
/**
* Gets the accrual end date of the period.
* <p>
* This is the last accrual date in the period.
* This date has typically been adjusted to be a valid business day.
*
* @return the end date of the period
*/
@Override
public LocalDate getEndDate() {
return accrualPeriods.get(accrualPeriods.size() - 1).getEndDate();
}
/**
* Gets the notional amount, positive if receiving, negative if paying.
* <p>
* This is the notional amount applicable during the period.
* The currency of the notional is specified by {@code currency} unless there
* is the {@code fxReset} property is present.
*
* @return the notional as a {@code CurrencyAmount}
*/
@Override
public CurrencyAmount getNotionalAmount() {
if (fxReset != null) {
return CurrencyAmount.of(fxReset.getReferenceCurrency(), notional);
}
return CurrencyAmount.of(currency, notional);
}
@Override
public Optional<FxIndexObservation> getFxResetObservation() {
return getFxReset().map(fxr -> fxr.getObservation());
}
/**
* Checks whether compounding applies.
* <p>
* Compounding applies if there is more than one accrual period and the
* compounding method is not 'None'.
*
* @return true if compounding applies
*/
public boolean isCompoundingApplicable() {
return accrualPeriods.size() > 1 && compoundingMethod != CompoundingMethod.NONE;
}
//-------------------------------------------------------------------------
@Override
public RatePaymentPeriod adjustPaymentDate(TemporalAdjuster adjuster) {
LocalDate adjusted = paymentDate.with(adjuster);
return adjusted.equals(paymentDate) ? this : toBuilder().paymentDate(adjusted).build();
}
@Override
public void collectIndices(ImmutableSet.Builder<Index> builder) {
accrualPeriods.stream().forEach(accrual -> accrual.getRateComputation().collectIndices(builder));
getFxReset().ifPresent(fxReset -> builder.add(fxReset.getIndex()));
}
//------------------------- AUTOGENERATED START -------------------------
///CLOVER:OFF
/**
* The meta-bean for {@code RatePaymentPeriod}.
* @return the meta-bean, not null
*/
public static RatePaymentPeriod.Meta meta() {
return RatePaymentPeriod.Meta.INSTANCE;
}
static {
JodaBeanUtils.registerMetaBean(RatePaymentPeriod.Meta.INSTANCE);
}
/**
* The serialization version id.
*/
private static final long serialVersionUID = 1L;
/**
* Returns a builder used to create an instance of the bean.
* @return the builder, not null
*/
public static RatePaymentPeriod.Builder builder() {
return new RatePaymentPeriod.Builder();
}
/**
* Creates an instance.
* @param paymentDate the value of the property, not null
* @param accrualPeriods the value of the property, not empty
* @param dayCount the value of the property, not null
* @param currency the value of the property, not null
* @param fxReset the value of the property
* @param notional the value of the property
* @param compoundingMethod the value of the property, not null
*/
RatePaymentPeriod(
LocalDate paymentDate,
List<RateAccrualPeriod> accrualPeriods,
DayCount dayCount,
Currency currency,
FxReset fxReset,
double notional,
CompoundingMethod compoundingMethod) {
JodaBeanUtils.notNull(paymentDate, "paymentDate");
JodaBeanUtils.notEmpty(accrualPeriods, "accrualPeriods");
JodaBeanUtils.notNull(dayCount, "dayCount");
JodaBeanUtils.notNull(currency, "currency");
JodaBeanUtils.notNull(compoundingMethod, "compoundingMethod");
this.paymentDate = paymentDate;
this.accrualPeriods = ImmutableList.copyOf(accrualPeriods);
this.dayCount = dayCount;
this.currency = currency;
this.fxReset = fxReset;
this.notional = notional;
this.compoundingMethod = compoundingMethod;
validate();
}
@Override
public RatePaymentPeriod.Meta metaBean() {
return RatePaymentPeriod.Meta.INSTANCE;
}
@Override
public <R> Property<R> property(String propertyName) {
return metaBean().<R>metaProperty(propertyName).createProperty(this);
}
@Override
public Set<String> propertyNames() {
return metaBean().metaPropertyMap().keySet();
}
//-----------------------------------------------------------------------
/**
* Gets the date that payment occurs.
* <p>
* The date that payment is made for the accrual periods.
* If the schedule adjusts for business days, then this is the adjusted date.
* @return the value of the property, not null
*/
@Override
public LocalDate getPaymentDate() {
return paymentDate;
}
//-----------------------------------------------------------------------
/**
* Gets the accrual periods that combine to form the payment period.
* <p>
* Each accrual period includes the applicable dates and details of how to observe the rate.
* In most cases, there will be one accrual period.
* If there is more than one accrual period then compounding may apply.
* @return the value of the property, not empty
*/
public ImmutableList<RateAccrualPeriod> getAccrualPeriods() {
return accrualPeriods;
}
//-----------------------------------------------------------------------
/**
* Gets the day count convention.
* <p>
* Each accrual period contains a year fraction calculated using this day count.
* This day count is used when there is a need to perform further calculations.
* @return the value of the property, not null
*/
public DayCount getDayCount() {
return dayCount;
}
//-----------------------------------------------------------------------
/**
* Gets the primary currency of the payment period.
* <p>
* This is the currency of the swap leg and the currency that interest calculation is made in.
* <p>
* The amounts of the notional are usually expressed in terms of this currency,
* however they can be converted from amounts in a different currency.
* See the optional {@code fxReset} property.
* @return the value of the property, not null
*/
@Override
public Currency getCurrency() {
return currency;
}
//-----------------------------------------------------------------------
/**
* Gets the FX reset definition, optional.
* <p>
* This property is used when the defined amount of the notional is specified in
* a currency other than the currency of the swap leg. When this occurs, the notional
* amount has to be converted using an FX rate to the swap leg currency.
* <p>
* The FX reset definition must be valid. It must have a reference currency that is
* different to that of this period, and the currency of this period must be
* one of those defined by the FX reset index.
* @return the optional value of the property, not null
*/
public Optional<FxReset> getFxReset() {
return Optional.ofNullable(fxReset);
}
//-----------------------------------------------------------------------
/**
* Gets the notional amount, positive if receiving, negative if paying.
* <p>
* The notional amount applicable during the period.
* The currency of the notional is specified by {@code currency} unless there
* is the {@code fxReset} property is present.
* @return the value of the property
*/
public double getNotional() {
return notional;
}
//-----------------------------------------------------------------------
/**
* Gets the compounding method to use when there is more than one accrual period, default is 'None'.
* <p>
* Compounding is used when combining accrual periods.
* @return the value of the property, not null
*/
public CompoundingMethod getCompoundingMethod() {
return compoundingMethod;
}
//-----------------------------------------------------------------------
/**
* Returns a builder that allows this bean to be mutated.
* @return the mutable builder, not null
*/
public Builder toBuilder() {
return new Builder(this);
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj != null && obj.getClass() == this.getClass()) {
RatePaymentPeriod other = (RatePaymentPeriod) obj;
return JodaBeanUtils.equal(paymentDate, other.paymentDate) &&
JodaBeanUtils.equal(accrualPeriods, other.accrualPeriods) &&
JodaBeanUtils.equal(dayCount, other.dayCount) &&
JodaBeanUtils.equal(currency, other.currency) &&
JodaBeanUtils.equal(fxReset, other.fxReset) &&
JodaBeanUtils.equal(notional, other.notional) &&
JodaBeanUtils.equal(compoundingMethod, other.compoundingMethod);
}
return false;
}
@Override
public int hashCode() {
int hash = getClass().hashCode();
hash = hash * 31 + JodaBeanUtils.hashCode(paymentDate);
hash = hash * 31 + JodaBeanUtils.hashCode(accrualPeriods);
hash = hash * 31 + JodaBeanUtils.hashCode(dayCount);
hash = hash * 31 + JodaBeanUtils.hashCode(currency);
hash = hash * 31 + JodaBeanUtils.hashCode(fxReset);
hash = hash * 31 + JodaBeanUtils.hashCode(notional);
hash = hash * 31 + JodaBeanUtils.hashCode(compoundingMethod);
return hash;
}
@Override
public String toString() {
StringBuilder buf = new StringBuilder(256);
buf.append("RatePaymentPeriod{");
buf.append("paymentDate").append('=').append(paymentDate).append(',').append(' ');
buf.append("accrualPeriods").append('=').append(accrualPeriods).append(',').append(' ');
buf.append("dayCount").append('=').append(dayCount).append(',').append(' ');
buf.append("currency").append('=').append(currency).append(',').append(' ');
buf.append("fxReset").append('=').append(fxReset).append(',').append(' ');
buf.append("notional").append('=').append(notional).append(',').append(' ');
buf.append("compoundingMethod").append('=').append(JodaBeanUtils.toString(compoundingMethod));
buf.append('}');
return buf.toString();
}
//-----------------------------------------------------------------------
/**
* The meta-bean for {@code RatePaymentPeriod}.
*/
public static final class Meta extends DirectMetaBean {
/**
* The singleton instance of the meta-bean.
*/
static final Meta INSTANCE = new Meta();
/**
* The meta-property for the {@code paymentDate} property.
*/
private final MetaProperty<LocalDate> paymentDate = DirectMetaProperty.ofImmutable(
this, "paymentDate", RatePaymentPeriod.class, LocalDate.class);
/**
* The meta-property for the {@code accrualPeriods} property.
*/
@SuppressWarnings({"unchecked", "rawtypes" })
private final MetaProperty<ImmutableList<RateAccrualPeriod>> accrualPeriods = DirectMetaProperty.ofImmutable(
this, "accrualPeriods", RatePaymentPeriod.class, (Class) ImmutableList.class);
/**
* The meta-property for the {@code dayCount} property.
*/
private final MetaProperty<DayCount> dayCount = DirectMetaProperty.ofImmutable(
this, "dayCount", RatePaymentPeriod.class, DayCount.class);
/**
* The meta-property for the {@code currency} property.
*/
private final MetaProperty<Currency> currency = DirectMetaProperty.ofImmutable(
this, "currency", RatePaymentPeriod.class, Currency.class);
/**
* The meta-property for the {@code fxReset} property.
*/
private final MetaProperty<FxReset> fxReset = DirectMetaProperty.ofImmutable(
this, "fxReset", RatePaymentPeriod.class, FxReset.class);
/**
* The meta-property for the {@code notional} property.
*/
private final MetaProperty<Double> notional = DirectMetaProperty.ofImmutable(
this, "notional", RatePaymentPeriod.class, Double.TYPE);
/**
* The meta-property for the {@code compoundingMethod} property.
*/
private final MetaProperty<CompoundingMethod> compoundingMethod = DirectMetaProperty.ofImmutable(
this, "compoundingMethod", RatePaymentPeriod.class, CompoundingMethod.class);
/**
* The meta-properties.
*/
private final Map<String, MetaProperty<?>> metaPropertyMap$ = new DirectMetaPropertyMap(
this, null,
"paymentDate",
"accrualPeriods",
"dayCount",
"currency",
"fxReset",
"notional",
"compoundingMethod");
/**
* Restricted constructor.
*/
private Meta() {
}
@Override
protected MetaProperty<?> metaPropertyGet(String propertyName) {
switch (propertyName.hashCode()) {
case -1540873516: // paymentDate
return paymentDate;
case -92208605: // accrualPeriods
return accrualPeriods;
case 1905311443: // dayCount
return dayCount;
case 575402001: // currency
return currency;
case -449555555: // fxReset
return fxReset;
case 1585636160: // notional
return notional;
case -1376171496: // compoundingMethod
return compoundingMethod;
}
return super.metaPropertyGet(propertyName);
}
@Override
public RatePaymentPeriod.Builder builder() {
return new RatePaymentPeriod.Builder();
}
@Override
public Class<? extends RatePaymentPeriod> beanType() {
return RatePaymentPeriod.class;
}
@Override
public Map<String, MetaProperty<?>> metaPropertyMap() {
return metaPropertyMap$;
}
//-----------------------------------------------------------------------
/**
* The meta-property for the {@code paymentDate} property.
* @return the meta-property, not null
*/
public MetaProperty<LocalDate> paymentDate() {
return paymentDate;
}
/**
* The meta-property for the {@code accrualPeriods} property.
* @return the meta-property, not null
*/
public MetaProperty<ImmutableList<RateAccrualPeriod>> accrualPeriods() {
return accrualPeriods;
}
/**
* The meta-property for the {@code dayCount} property.
* @return the meta-property, not null
*/
public MetaProperty<DayCount> dayCount() {
return dayCount;
}
/**
* The meta-property for the {@code currency} property.
* @return the meta-property, not null
*/
public MetaProperty<Currency> currency() {
return currency;
}
/**
* The meta-property for the {@code fxReset} property.
* @return the meta-property, not null
*/
public MetaProperty<FxReset> fxReset() {
return fxReset;
}
/**
* The meta-property for the {@code notional} property.
* @return the meta-property, not null
*/
public MetaProperty<Double> notional() {
return notional;
}
/**
* The meta-property for the {@code compoundingMethod} property.
* @return the meta-property, not null
*/
public MetaProperty<CompoundingMethod> compoundingMethod() {
return compoundingMethod;
}
//-----------------------------------------------------------------------
@Override
protected Object propertyGet(Bean bean, String propertyName, boolean quiet) {
switch (propertyName.hashCode()) {
case -1540873516: // paymentDate
return ((RatePaymentPeriod) bean).getPaymentDate();
case -92208605: // accrualPeriods
return ((RatePaymentPeriod) bean).getAccrualPeriods();
case 1905311443: // dayCount
return ((RatePaymentPeriod) bean).getDayCount();
case 575402001: // currency
return ((RatePaymentPeriod) bean).getCurrency();
case -449555555: // fxReset
return ((RatePaymentPeriod) bean).fxReset;
case 1585636160: // notional
return ((RatePaymentPeriod) bean).getNotional();
case -1376171496: // compoundingMethod
return ((RatePaymentPeriod) bean).getCompoundingMethod();
}
return super.propertyGet(bean, propertyName, quiet);
}
@Override
protected void propertySet(Bean bean, String propertyName, Object newValue, boolean quiet) {
metaProperty(propertyName);
if (quiet) {
return;
}
throw new UnsupportedOperationException("Property cannot be written: " + propertyName);
}
}
//-----------------------------------------------------------------------
/**
* The bean-builder for {@code RatePaymentPeriod}.
*/
public static final class Builder extends DirectFieldsBeanBuilder<RatePaymentPeriod> {
private LocalDate paymentDate;
private List<RateAccrualPeriod> accrualPeriods = ImmutableList.of();
private DayCount dayCount;
private Currency currency;
private FxReset fxReset;
private double notional;
private CompoundingMethod compoundingMethod;
/**
* Restricted constructor.
*/
private Builder() {
applyDefaults(this);
}
/**
* Restricted copy constructor.
* @param beanToCopy the bean to copy from, not null
*/
private Builder(RatePaymentPeriod beanToCopy) {
this.paymentDate = beanToCopy.getPaymentDate();
this.accrualPeriods = beanToCopy.getAccrualPeriods();
this.dayCount = beanToCopy.getDayCount();
this.currency = beanToCopy.getCurrency();
this.fxReset = beanToCopy.fxReset;
this.notional = beanToCopy.getNotional();
this.compoundingMethod = beanToCopy.getCompoundingMethod();
}
//-----------------------------------------------------------------------
@Override
public Object get(String propertyName) {
switch (propertyName.hashCode()) {
case -1540873516: // paymentDate
return paymentDate;
case -92208605: // accrualPeriods
return accrualPeriods;
case 1905311443: // dayCount
return dayCount;
case 575402001: // currency
return currency;
case -449555555: // fxReset
return fxReset;
case 1585636160: // notional
return notional;
case -1376171496: // compoundingMethod
return compoundingMethod;
default:
throw new NoSuchElementException("Unknown property: " + propertyName);
}
}
@SuppressWarnings("unchecked")
@Override
public Builder set(String propertyName, Object newValue) {
switch (propertyName.hashCode()) {
case -1540873516: // paymentDate
this.paymentDate = (LocalDate) newValue;
break;
case -92208605: // accrualPeriods
this.accrualPeriods = (List<RateAccrualPeriod>) newValue;
break;
case 1905311443: // dayCount
this.dayCount = (DayCount) newValue;
break;
case 575402001: // currency
this.currency = (Currency) newValue;
break;
case -449555555: // fxReset
this.fxReset = (FxReset) newValue;
break;
case 1585636160: // notional
this.notional = (Double) newValue;
break;
case -1376171496: // compoundingMethod
this.compoundingMethod = (CompoundingMethod) newValue;
break;
default:
throw new NoSuchElementException("Unknown property: " + propertyName);
}
return this;
}
@Override
public Builder set(MetaProperty<?> property, Object value) {
super.set(property, value);
return this;
}
@Override
public Builder setString(String propertyName, String value) {
setString(meta().metaProperty(propertyName), value);
return this;
}
@Override
public Builder setString(MetaProperty<?> property, String value) {
super.setString(property, value);
return this;
}
@Override
public Builder setAll(Map<String, ? extends Object> propertyValueMap) {
super.setAll(propertyValueMap);
return this;
}
@Override
public RatePaymentPeriod build() {
return new RatePaymentPeriod(
paymentDate,
accrualPeriods,
dayCount,
currency,
fxReset,
notional,
compoundingMethod);
}
//-----------------------------------------------------------------------
/**
* Sets the date that payment occurs.
* <p>
* The date that payment is made for the accrual periods.
* If the schedule adjusts for business days, then this is the adjusted date.
* @param paymentDate the new value, not null
* @return this, for chaining, not null
*/
public Builder paymentDate(LocalDate paymentDate) {
JodaBeanUtils.notNull(paymentDate, "paymentDate");
this.paymentDate = paymentDate;
return this;
}
/**
* Sets the accrual periods that combine to form the payment period.
* <p>
* Each accrual period includes the applicable dates and details of how to observe the rate.
* In most cases, there will be one accrual period.
* If there is more than one accrual period then compounding may apply.
* @param accrualPeriods the new value, not empty
* @return this, for chaining, not null
*/
public Builder accrualPeriods(List<RateAccrualPeriod> accrualPeriods) {
JodaBeanUtils.notEmpty(accrualPeriods, "accrualPeriods");
this.accrualPeriods = accrualPeriods;
return this;
}
/**
* Sets the {@code accrualPeriods} property in the builder
* from an array of objects.
* @param accrualPeriods the new value, not empty
* @return this, for chaining, not null
*/
public Builder accrualPeriods(RateAccrualPeriod... accrualPeriods) {
return accrualPeriods(ImmutableList.copyOf(accrualPeriods));
}
/**
* Sets the day count convention.
* <p>
* Each accrual period contains a year fraction calculated using this day count.
* This day count is used when there is a need to perform further calculations.
* @param dayCount the new value, not null
* @return this, for chaining, not null
*/
public Builder dayCount(DayCount dayCount) {
JodaBeanUtils.notNull(dayCount, "dayCount");
this.dayCount = dayCount;
return this;
}
/**
* Sets the primary currency of the payment period.
* <p>
* This is the currency of the swap leg and the currency that interest calculation is made in.
* <p>
* The amounts of the notional are usually expressed in terms of this currency,
* however they can be converted from amounts in a different currency.
* See the optional {@code fxReset} property.
* @param currency the new value, not null
* @return this, for chaining, not null
*/
public Builder currency(Currency currency) {
JodaBeanUtils.notNull(currency, "currency");
this.currency = currency;
return this;
}
/**
* Sets the FX reset definition, optional.
* <p>
* This property is used when the defined amount of the notional is specified in
* a currency other than the currency of the swap leg. When this occurs, the notional
* amount has to be converted using an FX rate to the swap leg currency.
* <p>
* The FX reset definition must be valid. It must have a reference currency that is
* different to that of this period, and the currency of this period must be
* one of those defined by the FX reset index.
* @param fxReset the new value
* @return this, for chaining, not null
*/
public Builder fxReset(FxReset fxReset) {
this.fxReset = fxReset;
return this;
}
/**
* Sets the notional amount, positive if receiving, negative if paying.
* <p>
* The notional amount applicable during the period.
* The currency of the notional is specified by {@code currency} unless there
* is the {@code fxReset} property is present.
* @param notional the new value
* @return this, for chaining, not null
*/
public Builder notional(double notional) {
this.notional = notional;
return this;
}
/**
* Sets the compounding method to use when there is more than one accrual period, default is 'None'.
* <p>
* Compounding is used when combining accrual periods.
* @param compoundingMethod the new value, not null
* @return this, for chaining, not null
*/
public Builder compoundingMethod(CompoundingMethod compoundingMethod) {
JodaBeanUtils.notNull(compoundingMethod, "compoundingMethod");
this.compoundingMethod = compoundingMethod;
return this;
}
//-----------------------------------------------------------------------
@Override
public String toString() {
StringBuilder buf = new StringBuilder(256);
buf.append("RatePaymentPeriod.Builder{");
buf.append("paymentDate").append('=').append(JodaBeanUtils.toString(paymentDate)).append(',').append(' ');
buf.append("accrualPeriods").append('=').append(JodaBeanUtils.toString(accrualPeriods)).append(',').append(' ');
buf.append("dayCount").append('=').append(JodaBeanUtils.toString(dayCount)).append(',').append(' ');
buf.append("currency").append('=').append(JodaBeanUtils.toString(currency)).append(',').append(' ');
buf.append("fxReset").append('=').append(JodaBeanUtils.toString(fxReset)).append(',').append(' ');
buf.append("notional").append('=').append(JodaBeanUtils.toString(notional)).append(',').append(' ');
buf.append("compoundingMethod").append('=').append(JodaBeanUtils.toString(compoundingMethod));
buf.append('}');
return buf.toString();
}
}
///CLOVER:ON
//-------------------------- AUTOGENERATED END --------------------------
}
| |
package org.apache.lucene.util;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.util.Collection;
import java.util.Comparator;
/**
* Methods for manipulating arrays.
*
* @lucene.internal
*/
public final class ArrayUtil {
private ArrayUtil() {} // no instance
/*
Begin Apache Harmony code
Revision taken on Friday, June 12. https://svn.apache.org/repos/asf/harmony/enhanced/classlib/archive/java6/modules/luni/src/main/java/java/lang/Integer.java
*/
/**
* Parses the string argument as if it was an int value and returns the
* result. Throws NumberFormatException if the string does not represent an
* int quantity.
*
* @param chars a string representation of an int quantity.
* @return int the value represented by the argument
* @throws NumberFormatException if the argument could not be parsed as an int quantity.
*/
public static int parseInt(char[] chars) throws NumberFormatException {
return parseInt(chars, 0, chars.length, 10);
}
/**
* Parses a char array into an int.
* @param chars the character array
* @param offset The offset into the array
* @param len The length
* @return the int
* @throws NumberFormatException if it can't parse
*/
public static int parseInt(char[] chars, int offset, int len) throws NumberFormatException {
return parseInt(chars, offset, len, 10);
}
/**
* Parses the string argument as if it was an int value and returns the
* result. Throws NumberFormatException if the string does not represent an
* int quantity. The second argument specifies the radix to use when parsing
* the value.
*
* @param chars a string representation of an int quantity.
* @param radix the base to use for conversion.
* @return int the value represented by the argument
* @throws NumberFormatException if the argument could not be parsed as an int quantity.
*/
public static int parseInt(char[] chars, int offset, int len, int radix)
throws NumberFormatException {
if (chars == null || radix < Character.MIN_RADIX
|| radix > Character.MAX_RADIX) {
throw new NumberFormatException();
}
int i = 0;
if (len == 0) {
throw new NumberFormatException("chars length is 0");
}
boolean negative = chars[offset + i] == '-';
if (negative && ++i == len) {
throw new NumberFormatException("can't convert to an int");
}
if (negative == true){
offset++;
len--;
}
return parse(chars, offset, len, radix, negative);
}
private static int parse(char[] chars, int offset, int len, int radix,
boolean negative) throws NumberFormatException {
int max = Integer.MIN_VALUE / radix;
int result = 0;
for (int i = 0; i < len; i++){
int digit = Character.digit(chars[i + offset], radix);
if (digit == -1) {
throw new NumberFormatException("Unable to parse");
}
if (max > result) {
throw new NumberFormatException("Unable to parse");
}
int next = result * radix - digit;
if (next > result) {
throw new NumberFormatException("Unable to parse");
}
result = next;
}
/*while (offset < len) {
}*/
if (!negative) {
result = -result;
if (result < 0) {
throw new NumberFormatException("Unable to parse");
}
}
return result;
}
/*
END APACHE HARMONY CODE
*/
/** Returns an array size >= minTargetSize, generally
* over-allocating exponentially to achieve amortized
* linear-time cost as the array grows.
*
* NOTE: this was originally borrowed from Python 2.4.2
* listobject.c sources (attribution in LICENSE.txt), but
* has now been substantially changed based on
* discussions from java-dev thread with subject "Dynamic
* array reallocation algorithms", started on Jan 12
* 2010.
*
* @param minTargetSize Minimum required value to be returned.
* @param bytesPerElement Bytes used by each element of
* the array. See constants in {@link RamUsageEstimator}.
*
* @lucene.internal
*/
public static int oversize(int minTargetSize, int bytesPerElement) {
if (minTargetSize < 0) {
// catch usage that accidentally overflows int
throw new IllegalArgumentException("invalid array size " + minTargetSize);
}
if (minTargetSize == 0) {
// wait until at least one element is requested
return 0;
}
// asymptotic exponential growth by 1/8th, favors
// spending a bit more CPU to not tie up too much wasted
// RAM:
int extra = minTargetSize >> 3;
if (extra < 3) {
// for very small arrays, where constant overhead of
// realloc is presumably relatively high, we grow
// faster
extra = 3;
}
int newSize = minTargetSize + extra;
// add 7 to allow for worst case byte alignment addition below:
if (newSize+7 < 0) {
// int overflowed -- return max allowed array size
return Integer.MAX_VALUE;
}
if (Constants.JRE_IS_64BIT) {
// round up to 8 byte alignment in 64bit env
switch(bytesPerElement) {
case 4:
// round up to multiple of 2
return (newSize + 1) & 0x7ffffffe;
case 2:
// round up to multiple of 4
return (newSize + 3) & 0x7ffffffc;
case 1:
// round up to multiple of 8
return (newSize + 7) & 0x7ffffff8;
case 8:
// no rounding
default:
// odd (invalid?) size
return newSize;
}
} else {
// round up to 4 byte alignment in 64bit env
switch(bytesPerElement) {
case 2:
// round up to multiple of 2
return (newSize + 1) & 0x7ffffffe;
case 1:
// round up to multiple of 4
return (newSize + 3) & 0x7ffffffc;
case 4:
case 8:
// no rounding
default:
// odd (invalid?) size
return newSize;
}
}
}
public static int getShrinkSize(int currentSize, int targetSize, int bytesPerElement) {
final int newSize = oversize(targetSize, bytesPerElement);
// Only reallocate if we are "substantially" smaller.
// This saves us from "running hot" (constantly making a
// bit bigger then a bit smaller, over and over):
if (newSize < currentSize / 2)
return newSize;
else
return currentSize;
}
public static short[] grow(short[] array, int minSize) {
assert minSize >= 0: "size must be positive (got " + minSize + "): likely integer overflow?";
if (array.length < minSize) {
short[] newArray = new short[oversize(minSize, RamUsageEstimator.NUM_BYTES_SHORT)];
System.arraycopy(array, 0, newArray, 0, array.length);
return newArray;
} else
return array;
}
public static short[] grow(short[] array) {
return grow(array, 1 + array.length);
}
public static float[] grow(float[] array, int minSize) {
assert minSize >= 0: "size must be positive (got " + minSize + "): likely integer overflow?";
if (array.length < minSize) {
float[] newArray = new float[oversize(minSize, RamUsageEstimator.NUM_BYTES_FLOAT)];
System.arraycopy(array, 0, newArray, 0, array.length);
return newArray;
} else
return array;
}
public static float[] grow(float[] array) {
return grow(array, 1 + array.length);
}
public static double[] grow(double[] array, int minSize) {
assert minSize >= 0: "size must be positive (got " + minSize + "): likely integer overflow?";
if (array.length < minSize) {
double[] newArray = new double[oversize(minSize, RamUsageEstimator.NUM_BYTES_DOUBLE)];
System.arraycopy(array, 0, newArray, 0, array.length);
return newArray;
} else
return array;
}
public static double[] grow(double[] array) {
return grow(array, 1 + array.length);
}
public static short[] shrink(short[] array, int targetSize) {
assert targetSize >= 0: "size must be positive (got " + targetSize + "): likely integer overflow?";
final int newSize = getShrinkSize(array.length, targetSize, RamUsageEstimator.NUM_BYTES_SHORT);
if (newSize != array.length) {
short[] newArray = new short[newSize];
System.arraycopy(array, 0, newArray, 0, newSize);
return newArray;
} else
return array;
}
public static int[] grow(int[] array, int minSize) {
assert minSize >= 0: "size must be positive (got " + minSize + "): likely integer overflow?";
if (array.length < minSize) {
int[] newArray = new int[oversize(minSize, RamUsageEstimator.NUM_BYTES_INT)];
System.arraycopy(array, 0, newArray, 0, array.length);
return newArray;
} else
return array;
}
public static int[] grow(int[] array) {
return grow(array, 1 + array.length);
}
public static int[] shrink(int[] array, int targetSize) {
assert targetSize >= 0: "size must be positive (got " + targetSize + "): likely integer overflow?";
final int newSize = getShrinkSize(array.length, targetSize, RamUsageEstimator.NUM_BYTES_INT);
if (newSize != array.length) {
int[] newArray = new int[newSize];
System.arraycopy(array, 0, newArray, 0, newSize);
return newArray;
} else
return array;
}
public static long[] grow(long[] array, int minSize) {
assert minSize >= 0: "size must be positive (got " + minSize + "): likely integer overflow?";
if (array.length < minSize) {
long[] newArray = new long[oversize(minSize, RamUsageEstimator.NUM_BYTES_LONG)];
System.arraycopy(array, 0, newArray, 0, array.length);
return newArray;
} else
return array;
}
public static long[] grow(long[] array) {
return grow(array, 1 + array.length);
}
public static long[] shrink(long[] array, int targetSize) {
assert targetSize >= 0: "size must be positive (got " + targetSize + "): likely integer overflow?";
final int newSize = getShrinkSize(array.length, targetSize, RamUsageEstimator.NUM_BYTES_LONG);
if (newSize != array.length) {
long[] newArray = new long[newSize];
System.arraycopy(array, 0, newArray, 0, newSize);
return newArray;
} else
return array;
}
public static byte[] grow(byte[] array, int minSize) {
assert minSize >= 0: "size must be positive (got " + minSize + "): likely integer overflow?";
if (array.length < minSize) {
byte[] newArray = new byte[oversize(minSize, 1)];
System.arraycopy(array, 0, newArray, 0, array.length);
return newArray;
} else
return array;
}
public static byte[] grow(byte[] array) {
return grow(array, 1 + array.length);
}
public static byte[] shrink(byte[] array, int targetSize) {
assert targetSize >= 0: "size must be positive (got " + targetSize + "): likely integer overflow?";
final int newSize = getShrinkSize(array.length, targetSize, 1);
if (newSize != array.length) {
byte[] newArray = new byte[newSize];
System.arraycopy(array, 0, newArray, 0, newSize);
return newArray;
} else
return array;
}
public static boolean[] grow(boolean[] array, int minSize) {
assert minSize >= 0: "size must be positive (got " + minSize + "): likely integer overflow?";
if (array.length < minSize) {
boolean[] newArray = new boolean[oversize(minSize, 1)];
System.arraycopy(array, 0, newArray, 0, array.length);
return newArray;
} else
return array;
}
public static boolean[] grow(boolean[] array) {
return grow(array, 1 + array.length);
}
public static boolean[] shrink(boolean[] array, int targetSize) {
assert targetSize >= 0: "size must be positive (got " + targetSize + "): likely integer overflow?";
final int newSize = getShrinkSize(array.length, targetSize, 1);
if (newSize != array.length) {
boolean[] newArray = new boolean[newSize];
System.arraycopy(array, 0, newArray, 0, newSize);
return newArray;
} else
return array;
}
public static char[] grow(char[] array, int minSize) {
assert minSize >= 0: "size must be positive (got " + minSize + "): likely integer overflow?";
if (array.length < minSize) {
char[] newArray = new char[oversize(minSize, RamUsageEstimator.NUM_BYTES_CHAR)];
System.arraycopy(array, 0, newArray, 0, array.length);
return newArray;
} else
return array;
}
public static char[] grow(char[] array) {
return grow(array, 1 + array.length);
}
public static char[] shrink(char[] array, int targetSize) {
assert targetSize >= 0: "size must be positive (got " + targetSize + "): likely integer overflow?";
final int newSize = getShrinkSize(array.length, targetSize, RamUsageEstimator.NUM_BYTES_CHAR);
if (newSize != array.length) {
char[] newArray = new char[newSize];
System.arraycopy(array, 0, newArray, 0, newSize);
return newArray;
} else
return array;
}
public static int[][] grow(int[][] array, int minSize) {
assert minSize >= 0: "size must be positive (got " + minSize + "): likely integer overflow?";
if (array.length < minSize) {
int[][] newArray = new int[oversize(minSize, RamUsageEstimator.NUM_BYTES_OBJECT_REF)][];
System.arraycopy(array, 0, newArray, 0, array.length);
return newArray;
} else {
return array;
}
}
public static int[][] grow(int[][] array) {
return grow(array, 1 + array.length);
}
public static int[][] shrink(int[][] array, int targetSize) {
assert targetSize >= 0: "size must be positive (got " + targetSize + "): likely integer overflow?";
final int newSize = getShrinkSize(array.length, targetSize, RamUsageEstimator.NUM_BYTES_OBJECT_REF);
if (newSize != array.length) {
int[][] newArray = new int[newSize][];
System.arraycopy(array, 0, newArray, 0, newSize);
return newArray;
} else {
return array;
}
}
public static float[][] grow(float[][] array, int minSize) {
assert minSize >= 0: "size must be positive (got " + minSize + "): likely integer overflow?";
if (array.length < minSize) {
float[][] newArray = new float[oversize(minSize, RamUsageEstimator.NUM_BYTES_OBJECT_REF)][];
System.arraycopy(array, 0, newArray, 0, array.length);
return newArray;
} else {
return array;
}
}
public static float[][] grow(float[][] array) {
return grow(array, 1 + array.length);
}
public static float[][] shrink(float[][] array, int targetSize) {
assert targetSize >= 0: "size must be positive (got " + targetSize + "): likely integer overflow?";
final int newSize = getShrinkSize(array.length, targetSize, RamUsageEstimator.NUM_BYTES_OBJECT_REF);
if (newSize != array.length) {
float[][] newArray = new float[newSize][];
System.arraycopy(array, 0, newArray, 0, newSize);
return newArray;
} else {
return array;
}
}
/**
* Returns hash of chars in range start (inclusive) to
* end (inclusive)
*/
public static int hashCode(char[] array, int start, int end) {
int code = 0;
for (int i = end - 1; i >= start; i--)
code = code * 31 + array[i];
return code;
}
/**
* Returns hash of bytes in range start (inclusive) to
* end (inclusive)
*/
public static int hashCode(byte[] array, int start, int end) {
int code = 0;
for (int i = end - 1; i >= start; i--)
code = code * 31 + array[i];
return code;
}
// Since Arrays.equals doesn't implement offsets for equals
/**
* See if two array slices are the same.
*
* @param left The left array to compare
* @param offsetLeft The offset into the array. Must be positive
* @param right The right array to compare
* @param offsetRight the offset into the right array. Must be positive
* @param length The length of the section of the array to compare
* @return true if the two arrays, starting at their respective offsets, are equal
*
* @see java.util.Arrays#equals(char[], char[])
*/
public static boolean equals(char[] left, int offsetLeft, char[] right, int offsetRight, int length) {
if ((offsetLeft + length <= left.length) && (offsetRight + length <= right.length)) {
for (int i = 0; i < length; i++) {
if (left[offsetLeft + i] != right[offsetRight + i]) {
return false;
}
}
return true;
}
return false;
}
// Since Arrays.equals doesn't implement offsets for equals
/**
* See if two array slices are the same.
*
* @param left The left array to compare
* @param offsetLeft The offset into the array. Must be positive
* @param right The right array to compare
* @param offsetRight the offset into the right array. Must be positive
* @param length The length of the section of the array to compare
* @return true if the two arrays, starting at their respective offsets, are equal
*
* @see java.util.Arrays#equals(byte[], byte[])
*/
public static boolean equals(byte[] left, int offsetLeft, byte[] right, int offsetRight, int length) {
if ((offsetLeft + length <= left.length) && (offsetRight + length <= right.length)) {
for (int i = 0; i < length; i++) {
if (left[offsetLeft + i] != right[offsetRight + i]) {
return false;
}
}
return true;
}
return false;
}
/* DISABLE THIS FOR NOW: This has performance problems until Java creates intrinsics for Class#getComponentType() and Array.newInstance()
public static <T> T[] grow(T[] array, int minSize) {
assert minSize >= 0: "size must be positive (got " + minSize + "): likely integer overflow?";
if (array.length < minSize) {
@SuppressWarnings("unchecked") final T[] newArray =
(T[]) Array.newInstance(array.getClass().getComponentType(), oversize(minSize, RamUsageEstimator.NUM_BYTES_OBJECT_REF));
System.arraycopy(array, 0, newArray, 0, array.length);
return newArray;
} else
return array;
}
public static <T> T[] grow(T[] array) {
return grow(array, 1 + array.length);
}
public static <T> T[] shrink(T[] array, int targetSize) {
assert targetSize >= 0: "size must be positive (got " + targetSize + "): likely integer overflow?";
final int newSize = getShrinkSize(array.length, targetSize, RamUsageEstimator.NUM_BYTES_OBJECT_REF);
if (newSize != array.length) {
@SuppressWarnings("unchecked") final T[] newArray =
(T[]) Array.newInstance(array.getClass().getComponentType(), newSize);
System.arraycopy(array, 0, newArray, 0, newSize);
return newArray;
} else
return array;
}
*/
// Since Arrays.equals doesn't implement offsets for equals
/**
* See if two array slices are the same.
*
* @param left The left array to compare
* @param offsetLeft The offset into the array. Must be positive
* @param right The right array to compare
* @param offsetRight the offset into the right array. Must be positive
* @param length The length of the section of the array to compare
* @return true if the two arrays, starting at their respective offsets, are equal
*
* @see java.util.Arrays#equals(char[], char[])
*/
public static boolean equals(int[] left, int offsetLeft, int[] right, int offsetRight, int length) {
if ((offsetLeft + length <= left.length) && (offsetRight + length <= right.length)) {
for (int i = 0; i < length; i++) {
if (left[offsetLeft + i] != right[offsetRight + i]) {
return false;
}
}
return true;
}
return false;
}
public static int[] toIntArray(Collection<Integer> ints) {
final int[] result = new int[ints.size()];
int upto = 0;
for(int v : ints) {
result[upto++] = v;
}
// paranoia:
assert upto == result.length;
return result;
}
/** SorterTemplate with custom {@link Comparator} */
private static <T> SorterTemplate getSorter(final T[] a, final Comparator<? super T> comp) {
return new SorterTemplate() {
@Override
protected void swap(int i, int j) {
final T o = a[i];
a[i] = a[j];
a[j] = o;
}
@Override
protected int compare(int i, int j) {
return comp.compare(a[i], a[j]);
}
@Override
protected void setPivot(int i) {
pivot = a[i];
}
@Override
protected int comparePivot(int j) {
return comp.compare(pivot, a[j]);
}
private T pivot;
};
}
/** Natural SorterTemplate */
private static <T extends Comparable<? super T>> SorterTemplate getSorter(final T[] a) {
return new SorterTemplate() {
@Override
protected void swap(int i, int j) {
final T o = a[i];
a[i] = a[j];
a[j] = o;
}
@Override
protected int compare(int i, int j) {
return a[i].compareTo(a[j]);
}
@Override
protected void setPivot(int i) {
pivot = a[i];
}
@Override
protected int comparePivot(int j) {
return pivot.compareTo(a[j]);
}
private T pivot;
};
}
// quickSorts (endindex is exclusive!):
/**
* Sorts the given array slice using the {@link Comparator}. This method uses the quick sort
* algorithm, but falls back to insertion sort for small arrays.
* @param fromIndex start index (inclusive)
* @param toIndex end index (exclusive)
*/
public static <T> void quickSort(T[] a, int fromIndex, int toIndex, Comparator<? super T> comp) {
if (toIndex-fromIndex <= 1) return;
getSorter(a, comp).quickSort(fromIndex, toIndex-1);
}
/**
* Sorts the given array using the {@link Comparator}. This method uses the quick sort
* algorithm, but falls back to insertion sort for small arrays.
*/
public static <T> void quickSort(T[] a, Comparator<? super T> comp) {
quickSort(a, 0, a.length, comp);
}
/**
* Sorts the given array slice in natural order. This method uses the quick sort
* algorithm, but falls back to insertion sort for small arrays.
* @param fromIndex start index (inclusive)
* @param toIndex end index (exclusive)
*/
public static <T extends Comparable<? super T>> void quickSort(T[] a, int fromIndex, int toIndex) {
if (toIndex-fromIndex <= 1) return;
getSorter(a).quickSort(fromIndex, toIndex-1);
}
/**
* Sorts the given array in natural order. This method uses the quick sort
* algorithm, but falls back to insertion sort for small arrays.
*/
public static <T extends Comparable<? super T>> void quickSort(T[] a) {
quickSort(a, 0, a.length);
}
// mergeSorts:
/**
* Sorts the given array slice using the {@link Comparator}. This method uses the merge sort
* algorithm, but falls back to insertion sort for small arrays.
* @param fromIndex start index (inclusive)
* @param toIndex end index (exclusive)
*/
public static <T> void mergeSort(T[] a, int fromIndex, int toIndex, Comparator<? super T> comp) {
if (toIndex-fromIndex <= 1) return;
//System.out.println("SORT: " + (toIndex-fromIndex));
getSorter(a, comp).mergeSort(fromIndex, toIndex-1);
}
/**
* Sorts the given array using the {@link Comparator}. This method uses the merge sort
* algorithm, but falls back to insertion sort for small arrays.
*/
public static <T> void mergeSort(T[] a, Comparator<? super T> comp) {
mergeSort(a, 0, a.length, comp);
}
/**
* Sorts the given array slice in natural order. This method uses the merge sort
* algorithm, but falls back to insertion sort for small arrays.
* @param fromIndex start index (inclusive)
* @param toIndex end index (exclusive)
*/
public static <T extends Comparable<? super T>> void mergeSort(T[] a, int fromIndex, int toIndex) {
if (toIndex-fromIndex <= 1) return;
getSorter(a).mergeSort(fromIndex, toIndex-1);
}
/**
* Sorts the given array in natural order. This method uses the merge sort
* algorithm, but falls back to insertion sort for small arrays.
*/
public static <T extends Comparable<? super T>> void mergeSort(T[] a) {
mergeSort(a, 0, a.length);
}
// insertionSorts:
/**
* Sorts the given array slice using the {@link Comparator}. This method uses the insertion sort
* algorithm. It is only recommended to use this algorithm for partially sorted small arrays!
* @param fromIndex start index (inclusive)
* @param toIndex end index (exclusive)
*/
public static <T> void insertionSort(T[] a, int fromIndex, int toIndex, Comparator<? super T> comp) {
if (toIndex-fromIndex <= 1) return;
getSorter(a, comp).insertionSort(fromIndex, toIndex-1);
}
/**
* Sorts the given array using the {@link Comparator}. This method uses the insertion sort
* algorithm. It is only recommended to use this algorithm for partially sorted small arrays!
*/
public static <T> void insertionSort(T[] a, Comparator<? super T> comp) {
insertionSort(a, 0, a.length, comp);
}
/**
* Sorts the given array slice in natural order. This method uses the insertion sort
* algorithm. It is only recommended to use this algorithm for partially sorted small arrays!
* @param fromIndex start index (inclusive)
* @param toIndex end index (exclusive)
*/
public static <T extends Comparable<? super T>> void insertionSort(T[] a, int fromIndex, int toIndex) {
if (toIndex-fromIndex <= 1) return;
getSorter(a).insertionSort(fromIndex, toIndex-1);
}
/**
* Sorts the given array in natural order. This method uses the insertion sort
* algorithm. It is only recommended to use this algorithm for partially sorted small arrays!
*/
public static <T extends Comparable<? super T>> void insertionSort(T[] a) {
insertionSort(a, 0, a.length);
}
}
| |
////////////////////////////////////////////////////////////////////////////////
// Copyright (c) 2010-2017. Lapinin "lastrix" Sergey. /
// /
// Permission is hereby granted, free of charge, to any person /
// obtaining a copy of this software and associated documentation /
// files (the "Software"), to deal in the Software without /
// restriction, including without limitation the rights to use, /
// copy, modify, merge, publish, distribute, sublicense, and/or /
// sell copies of the Software, and to permit persons to whom the /
// Software is furnished to do so, subject to the following /
// conditions: /
// /
// The above copyright notice and this permission notice shall be /
// included in all copies or substantial portions of the Software. /
// /
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, /
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES /
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND /
// NON INFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT /
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, /
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING /
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE /
// OR OTHER DEALINGS IN THE SOFTWARE. /
////////////////////////////////////////////////////////////////////////////////
package org.asn1s.core;
import org.apache.commons.lang3.StringUtils;
import org.asn1s.api.Ref;
import org.asn1s.api.Scope;
import org.asn1s.api.Template;
import org.asn1s.api.TemplateParameter;
import org.asn1s.api.exception.IllegalValueException;
import org.asn1s.api.exception.ResolutionException;
import org.asn1s.api.exception.ValidationException;
import org.asn1s.api.type.DefinedType;
import org.asn1s.api.type.Type;
import org.asn1s.api.util.RefUtils;
import org.asn1s.api.value.ByteArrayValue;
import org.asn1s.api.value.Value;
import org.asn1s.api.value.Value.Kind;
import org.asn1s.api.value.x680.NamedValue;
import org.asn1s.api.value.x680.ValueCollection;
import org.asn1s.core.value.x680.ByteArrayValueImpl;
import org.jetbrains.annotations.NotNull;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.regex.Pattern;
@SuppressWarnings( {"UtilityClassCanBeEnum", "UtilityClass"} )
public final class CoreUtils
{
private static final int BYTE_MASK = 0xFF;
private static final Pattern CLEAR_HEX_PATTERN = Pattern.compile( "[^A-Za-z0-9]" );
private static final Pattern CLEAR_BIN_PATTERN = Pattern.compile( "[^0-1]" );
private static final byte[] EMPTY_ARRAY = new byte[0];
private static final int HEX_RADIX = 16;
private static final int BIN_RADIX = 2;
public static final String CORE_MODULE_NAME = "ASN14J-CORE-MODULE";
private CoreUtils()
{
}
private static final Pattern BIN_HEX_PATTERN = Pattern.compile( "^'([A-Za-z0-9]'H|[01]'B)$" );
public static boolean isConvertibleToByteArrayValue( CharSequence content )
{
return BIN_HEX_PATTERN.matcher( content ).matches();
}
public static int compareNumberToNamed( Value lhs, NamedValue rhs )
{
assert lhs.getKind() == Kind.REAL || lhs.getKind() == Kind.INTEGER;
Kind kind = rhs.getReferenceKind();
if( kind == Kind.REAL || kind == Kind.INTEGER )
{
assert rhs.getValueRef() instanceof Value;
return lhs.compareTo( (Value)rhs.getValueRef() );
}
throw new UnsupportedOperationException( "Unable to compare number value to " + kind );
}
public static void assertParameterMap( Scope scope, Template template ) throws ValidationException, ResolutionException
{
if( template.getParameterCount() == 0 )
throw new ValidationException( "No parameters for template type" );
for( int i = 0; i < template.getParameterCount(); i++ )
{
TemplateParameter parameter = template.getParameter( i );
assertReference( parameter );
assertGovernor( scope, parameter );
}
}
private static void assertReference( TemplateParameter parameter ) throws ValidationException
{
if( parameter.isTypeRef() )
return;
if( parameter.isValueRef() && parameter.getGovernor() == null )
throw new ValidationException( "Governor type must be present for value parameter" );
}
private static void assertGovernor( Scope scope, TemplateParameter parameter ) throws ResolutionException, ValidationException
{
if( parameter.getGovernor() == null )
return;
Type type = parameter.getGovernor().resolve( scope );
//noinspection ConstantConditions isAbstract is a check for template != null
if( type instanceof DefinedType && ( (DefinedType)type ).isAbstract() && !( (DefinedType)type ).getTemplate().isInstance() )
throw new ValidationException( "Unable to use Type template as governor" );
}
@NotNull
public static ByteArrayValue byteArrayFromBitString( @NotNull String content )
{
return new BinConverter( content ).convert();
}
@NotNull
public static ByteArrayValue byteArrayFromHexString( @NotNull String content )
{
return new HexConverter( content ).convert();
}
public static String paramMapToString( Map<String, TemplateParameter> parameterMap )
{
List<TemplateParameter> parameters = new ArrayList<>( parameterMap.values() );
parameters.sort( Comparator.comparingInt( TemplateParameter:: getIndex ) );
return StringUtils.join( parameters, ", " );
}
@NotNull
public static ValueCollection toValueCollectionOrDie( @NotNull Scope scope, @NotNull Ref<Value> valueRef ) throws ResolutionException, IllegalValueException
{
Value value = RefUtils.toBasicValue( scope, valueRef );
if( value.getKind() != Kind.NAMED_COLLECTION && value.getKind() != Kind.COLLECTION )
throw new IllegalValueException( "Illegal Sequence value: " + value );
return value.toValueCollection();
}
private abstract static class AbstractByteArrayConverter
{
AbstractByteArrayConverter( String content )
{
originalLength = content.length();
this.content = alignToStride( content );
}
private final String content;
private final int originalLength;
private String alignToStride( String content )
{
int stride = getStride();
int count = content.length() % stride;
if( count == 0 )
return content;
count = stride - count;
StringBuilder sb = new StringBuilder();
sb.append( content );
while( count > 0 )
{
sb.append( '0' );
count--;
}
return sb.toString();
}
ByteArrayValueImpl convert()
{
try( ByteArrayOutputStream os = new ByteArrayOutputStream() )
{
return writeToStream( os );
} catch( IOException e )
{
throw new IllegalStateException( e );
}
}
private ByteArrayValueImpl writeToStream( ByteArrayOutputStream os )
{
for( int i = 0; i * getStride() < content.length(); i++ )
writeOctet( os, i );
byte[] bytes = os.toByteArray();
int usedBits = originalLength * getMultiplier();
return usedBits > 0
? new ByteArrayValueImpl( usedBits, bytes )
: new ByteArrayValueImpl( 0, EMPTY_ARRAY );
}
private void writeOctet( ByteArrayOutputStream os, int i )
{
int offset = i * getStride();
String value = content.substring( offset, offset + getStride() );
os.write( Integer.parseInt( value, getRadix() ) & BYTE_MASK );
}
protected abstract int getStride();
protected abstract int getRadix();
protected abstract int getMultiplier();
}
private static final class BinConverter extends AbstractByteArrayConverter
{
private BinConverter( String content )
{
super( normalize( content ) );
}
private static String normalize( @NotNull String content )
{
if( !content.startsWith( "'" ) && content.endsWith( "'B" ) )
throw new IllegalArgumentException( "Not BString: " + content );
return CLEAR_BIN_PATTERN.matcher( content.substring( 1, content.length() - 2 ) ).replaceAll( "" );
}
@Override
protected int getStride()
{
return 8;
}
@Override
protected int getRadix()
{
return BIN_RADIX;
}
@Override
protected int getMultiplier()
{
return 1;
}
}
private static final class HexConverter extends AbstractByteArrayConverter
{
private HexConverter( String content )
{
super( normalize( content ) );
}
private static String normalize( @NotNull String content )
{
if( !content.startsWith( "'" ) && content.endsWith( "'H" ) )
throw new IllegalArgumentException( "Not HString: " + content );
return CLEAR_HEX_PATTERN.matcher( content.substring( 1, content.length() - 2 ) ).replaceAll( "" );
}
@Override
protected int getStride()
{
return 2;
}
@Override
protected int getRadix()
{
return HEX_RADIX;
}
@Override
protected int getMultiplier()
{
return 4;
}
}
}
| |
/*PLEASE DO NOT EDIT THIS CODE*/
/*This code was generated using the UMPLE 1.21.0.4666 modeling language!*/
import java.util.*;
// line 10 "Library.ump"
// line 69 "Library.ump"
public class PartOfPublication
{
//------------------------
// MEMBER VARIABLES
//------------------------
//PartOfPublication Attributes
private String title;
private String pageNumber;
//PartOfPublication Associations
private List<PartOfPublication> subparts;
private PartOfPublication partOfPublication;
private EditionOrIssue editionOrIssue;
//------------------------
// CONSTRUCTOR
//------------------------
public PartOfPublication(String aTitle, String aPageNumber, EditionOrIssue aEditionOrIssue)
{
title = aTitle;
pageNumber = aPageNumber;
subparts = new ArrayList<PartOfPublication>();
boolean didAddEditionOrIssue = setEditionOrIssue(aEditionOrIssue);
if (!didAddEditionOrIssue)
{
throw new RuntimeException("Unable to create tableOfContent due to editionOrIssue");
}
}
//------------------------
// INTERFACE
//------------------------
public boolean setTitle(String aTitle)
{
boolean wasSet = false;
title = aTitle;
wasSet = true;
return wasSet;
}
public boolean setPageNumber(String aPageNumber)
{
boolean wasSet = false;
pageNumber = aPageNumber;
wasSet = true;
return wasSet;
}
public String getTitle()
{
return title;
}
public String getPageNumber()
{
return pageNumber;
}
public PartOfPublication getSubpart(int index)
{
PartOfPublication aSubpart = subparts.get(index);
return aSubpart;
}
public List<PartOfPublication> getSubparts()
{
List<PartOfPublication> newSubparts = Collections.unmodifiableList(subparts);
return newSubparts;
}
public int numberOfSubparts()
{
int number = subparts.size();
return number;
}
public boolean hasSubparts()
{
boolean has = subparts.size() > 0;
return has;
}
public int indexOfSubpart(PartOfPublication aSubpart)
{
int index = subparts.indexOf(aSubpart);
return index;
}
public PartOfPublication getPartOfPublication()
{
return partOfPublication;
}
public boolean hasPartOfPublication()
{
boolean has = partOfPublication != null;
return has;
}
public EditionOrIssue getEditionOrIssue()
{
return editionOrIssue;
}
public static int minimumNumberOfSubparts()
{
return 0;
}
public boolean addSubpart(PartOfPublication aSubpart)
{
boolean wasAdded = false;
if (subparts.contains(aSubpart)) { return false; }
PartOfPublication existingPartOfPublication = aSubpart.getPartOfPublication();
if (existingPartOfPublication == null)
{
aSubpart.setPartOfPublication(this);
}
else if (!this.equals(existingPartOfPublication))
{
existingPartOfPublication.removeSubpart(aSubpart);
addSubpart(aSubpart);
}
else
{
subparts.add(aSubpart);
}
wasAdded = true;
return wasAdded;
}
public boolean removeSubpart(PartOfPublication aSubpart)
{
boolean wasRemoved = false;
if (subparts.contains(aSubpart))
{
subparts.remove(aSubpart);
aSubpart.setPartOfPublication(null);
wasRemoved = true;
}
return wasRemoved;
}
public boolean addSubpartAt(PartOfPublication aSubpart, int index)
{
boolean wasAdded = false;
if(addSubpart(aSubpart))
{
if(index < 0 ) { index = 0; }
if(index > numberOfSubparts()) { index = numberOfSubparts() - 1; }
subparts.remove(aSubpart);
subparts.add(index, aSubpart);
wasAdded = true;
}
return wasAdded;
}
public boolean addOrMoveSubpartAt(PartOfPublication aSubpart, int index)
{
boolean wasAdded = false;
if(subparts.contains(aSubpart))
{
if(index < 0 ) { index = 0; }
if(index > numberOfSubparts()) { index = numberOfSubparts() - 1; }
subparts.remove(aSubpart);
subparts.add(index, aSubpart);
wasAdded = true;
}
else
{
wasAdded = addSubpartAt(aSubpart, index);
}
return wasAdded;
}
public boolean setPartOfPublication(PartOfPublication aPartOfPublication)
{
boolean wasSet = false;
PartOfPublication existingPartOfPublication = partOfPublication;
partOfPublication = aPartOfPublication;
if (existingPartOfPublication != null && !existingPartOfPublication.equals(aPartOfPublication))
{
existingPartOfPublication.removeSubpart(this);
}
if (aPartOfPublication != null)
{
aPartOfPublication.addSubpart(this);
}
wasSet = true;
return wasSet;
}
public boolean setEditionOrIssue(EditionOrIssue aEditionOrIssue)
{
boolean wasSet = false;
if (aEditionOrIssue == null)
{
return wasSet;
}
EditionOrIssue existingEditionOrIssue = editionOrIssue;
editionOrIssue = aEditionOrIssue;
if (existingEditionOrIssue != null && !existingEditionOrIssue.equals(aEditionOrIssue))
{
existingEditionOrIssue.removeTableOfContent(this);
}
editionOrIssue.addTableOfContent(this);
wasSet = true;
return wasSet;
}
public void delete()
{
while( !subparts.isEmpty() )
{
subparts.get(0).setPartOfPublication(null);
}
if (partOfPublication != null)
{
PartOfPublication placeholderPartOfPublication = partOfPublication;
this.partOfPublication = null;
placeholderPartOfPublication.removeSubpart(this);
}
EditionOrIssue placeholderEditionOrIssue = editionOrIssue;
this.editionOrIssue = null;
placeholderEditionOrIssue.removeTableOfContent(this);
}
public String toString()
{
String outputString = "";
return super.toString() + "["+
"title" + ":" + getTitle()+ "," +
"pageNumber" + ":" + getPageNumber()+ "]" + System.getProperties().getProperty("line.separator") +
" " + "editionOrIssue = "+(getEditionOrIssue()!=null?Integer.toHexString(System.identityHashCode(getEditionOrIssue())):"null")
+ outputString;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.streaming;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Properties;
import java.util.StringTokenizer;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.ClusterMapReduceTestCase;
import org.apache.hadoop.mapred.Counters;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.RunningJob;
import org.apache.hadoop.mapred.SkipBadRecords;
import org.apache.hadoop.mapred.Utils;
import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
public class TestStreamingBadRecords extends ClusterMapReduceTestCase
{
private static final Log LOG =
LogFactory.getLog(TestStreamingBadRecords.class);
private static final List<String> MAPPER_BAD_RECORDS =
Arrays.asList("hey022","hey023","hey099");
private static final List<String> REDUCER_BAD_RECORDS =
Arrays.asList("hey001","hey018");
private static final String badMapper =
StreamUtil.makeJavaCommand(BadApp.class, new String[]{});
private static final String badReducer =
StreamUtil.makeJavaCommand(BadApp.class, new String[]{"true"});
private static final int INPUTSIZE=100;
public TestStreamingBadRecords() throws IOException
{
UtilTest utilTest = new UtilTest(getClass().getName());
utilTest.checkUserDir();
utilTest.redirectIfAntJunit();
}
protected void setUp() throws Exception {
Properties props = new Properties();
props.setProperty(JTConfig.JT_RETIREJOBS, "false");
startCluster(true, props);
}
private void createInput() throws Exception {
OutputStream os = getFileSystem().create(new Path(getInputDir(),
"text.txt"));
Writer wr = new OutputStreamWriter(os);
//increasing the record size so that we have stream flushing
String prefix = new String(new byte[20*1024]);
for(int i=1;i<=INPUTSIZE;i++) {
String str = ""+i;
int zerosToPrepend = 3 - str.length();
for(int j=0;j<zerosToPrepend;j++){
str = "0"+str;
}
wr.write(prefix + "hey"+str+"\n");
}wr.close();
}
private void validateOutput(RunningJob runningJob, boolean validateCount)
throws Exception {
LOG.info(runningJob.getCounters().toString());
assertTrue(runningJob.isSuccessful());
if(validateCount) {
//validate counters
String counterGrp = "org.apache.hadoop.mapred.Task$Counter";
Counters counters = runningJob.getCounters();
assertEquals(counters.findCounter(counterGrp, "MAP_SKIPPED_RECORDS").
getCounter(),MAPPER_BAD_RECORDS.size());
int mapRecs = INPUTSIZE - MAPPER_BAD_RECORDS.size();
assertEquals(counters.findCounter(counterGrp, "MAP_INPUT_RECORDS").
getCounter(),mapRecs);
assertEquals(counters.findCounter(counterGrp, "MAP_OUTPUT_RECORDS").
getCounter(),mapRecs);
int redRecs = mapRecs - REDUCER_BAD_RECORDS.size();
assertEquals(counters.findCounter(counterGrp, "REDUCE_SKIPPED_RECORDS").
getCounter(),REDUCER_BAD_RECORDS.size());
assertEquals(counters.findCounter(counterGrp, "REDUCE_SKIPPED_GROUPS").
getCounter(),REDUCER_BAD_RECORDS.size());
assertEquals(counters.findCounter(counterGrp, "REDUCE_INPUT_GROUPS").
getCounter(),redRecs);
assertEquals(counters.findCounter(counterGrp, "REDUCE_INPUT_RECORDS").
getCounter(),redRecs);
assertEquals(counters.findCounter(counterGrp, "REDUCE_OUTPUT_RECORDS").
getCounter(),redRecs);
}
List<String> badRecs = new ArrayList<String>();
badRecs.addAll(MAPPER_BAD_RECORDS);
badRecs.addAll(REDUCER_BAD_RECORDS);
Path[] outputFiles = FileUtil.stat2Paths(
getFileSystem().listStatus(getOutputDir(),
new Utils.OutputFileUtils.OutputFilesFilter()));
if (outputFiles.length > 0) {
InputStream is = getFileSystem().open(outputFiles[0]);
BufferedReader reader = new BufferedReader(new InputStreamReader(is));
String line = reader.readLine();
int counter = 0;
while (line != null) {
counter++;
StringTokenizer tokeniz = new StringTokenizer(line, "\t");
String value = tokeniz.nextToken();
int index = value.indexOf("hey");
assertTrue(index>-1);
if(index>-1) {
String heyStr = value.substring(index);
assertTrue(!badRecs.contains(heyStr));
}
line = reader.readLine();
}
reader.close();
if(validateCount) {
assertEquals(INPUTSIZE-badRecs.size(), counter);
}
}
}
public void testSkip() throws Exception {
JobConf clusterConf = createJobConf();
createInput();
int attSkip =0;
SkipBadRecords.setAttemptsToStartSkipping(clusterConf,attSkip);
//the no of attempts to successfully complete the task depends
//on the no of bad records.
int mapperAttempts = attSkip+1+MAPPER_BAD_RECORDS.size();
int reducerAttempts = attSkip+1+REDUCER_BAD_RECORDS.size();
String[] args = new String[] {
"-input", (new Path(getInputDir(), "text.txt")).toString(),
"-output", getOutputDir().toString(),
"-mapper", badMapper,
"-reducer", badReducer,
"-verbose",
"-inputformat", "org.apache.hadoop.mapred.KeyValueTextInputFormat",
"-jobconf", "mapreduce.task.skip.start.attempts="+attSkip,
"-jobconf", "mapreduce.job.skip.outdir=none",
"-jobconf", "mapreduce.map.maxattempts="+mapperAttempts,
"-jobconf", "mapreduce.reduce.maxattempts="+reducerAttempts,
"-jobconf", "mapreduce.map.skip.maxrecords="+Long.MAX_VALUE,
"-jobconf", "mapreduce.reduce.skip.maxgroups="+Long.MAX_VALUE,
"-jobconf", "mapreduce.job.maps=1",
"-jobconf", "mapreduce.job.reduces=1",
"-jobconf", "fs.default.name="+clusterConf.get("fs.default.name"),
"-jobconf", "mapreduce.jobtracker.address=" +
clusterConf.get(JTConfig.JT_IPC_ADDRESS),
"-jobconf", "mapreduce.jobtracker.http.address="
+clusterConf.get(JTConfig.JT_HTTP_ADDRESS),
"-jobconf", "stream.debug=set",
"-jobconf", "mapreduce.task.files.preserve.failedtasks=true",
"-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
};
StreamJob job = new StreamJob(args, false);
job.go();
validateOutput(job.running_, false);
//validate that there is no skip directory as it has been set to "none"
assertTrue(SkipBadRecords.getSkipOutputPath(job.jobConf_)==null);
}
public void testNarrowDown() throws Exception {
createInput();
JobConf clusterConf = createJobConf();
String[] args = new String[] {
"-input", (new Path(getInputDir(), "text.txt")).toString(),
"-output", getOutputDir().toString(),
"-mapper", badMapper,
"-reducer", badReducer,
"-verbose",
"-inputformat", "org.apache.hadoop.mapred.KeyValueTextInputFormat",
"-jobconf", "mapreduce.task.skip.start.attempts=1",
//actually fewer attempts are required than specified
//but to cater to the case of slow processed counter update, need to
//have more attempts
"-jobconf", "mapreduce.map.maxattempts=20",
"-jobconf", "mapreduce.reduce.maxattempts=15",
"-jobconf", "mapreduce.map.skip.maxrecords=1",
"-jobconf", "mapreduce.reduce.skip.maxgroups=1",
"-jobconf", "mapreduce.job.maps=1",
"-jobconf", "mapreduce.job.reduces=1",
"-jobconf", "fs.default.name="+clusterConf.get("fs.default.name"),
"-jobconf", "mapreduce.jobtracker.address="+clusterConf.get(JTConfig.JT_IPC_ADDRESS),
"-jobconf", "mapreduce.jobtracker.http.address="
+clusterConf.get(JTConfig.JT_HTTP_ADDRESS),
"-jobconf", "stream.debug=set",
"-jobconf", "mapreduce.task.files.preserve.failedtasks=true",
"-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
};
StreamJob job = new StreamJob(args, false);
job.go();
validateOutput(job.running_, true);
assertTrue(SkipBadRecords.getSkipOutputPath(job.jobConf_)!=null);
}
static class App{
boolean isReducer;
public App(String[] args) throws Exception{
if(args.length>0) {
isReducer = Boolean.parseBoolean(args[0]);
}
String counter = SkipBadRecords.COUNTER_MAP_PROCESSED_RECORDS;
if(isReducer) {
counter = SkipBadRecords.COUNTER_REDUCE_PROCESSED_GROUPS;
}
BufferedReader in = new BufferedReader(new InputStreamReader(System.in));
String line;
int count = 0;
while ((line = in.readLine()) != null) {
processLine(line);
count++;
if(count>=10) {
System.err.println("reporter:counter:"+SkipBadRecords.COUNTER_GROUP+
","+counter+","+count);
count = 0;
}
}
}
protected void processLine(String line) throws Exception{
System.out.println(line);
}
public static void main(String[] args) throws Exception{
new App(args);
}
}
static class BadApp extends App{
public BadApp(String[] args) throws Exception {
super(args);
}
protected void processLine(String line) throws Exception {
List<String> badRecords = MAPPER_BAD_RECORDS;
if(isReducer) {
badRecords = REDUCER_BAD_RECORDS;
}
if(badRecords.size()>0 && line.contains(badRecords.get(0))) {
LOG.warn("Encountered BAD record");
System.exit(-1);
}
else if(badRecords.size()>1 && line.contains(badRecords.get(1))) {
LOG.warn("Encountered BAD record");
throw new Exception("Got bad record..crashing");
}
else if(badRecords.size()>2 && line.contains(badRecords.get(2))) {
LOG.warn("Encountered BAD record");
System.exit(-1);
}
super.processLine(line);
}
public static void main(String[] args) throws Exception{
new BadApp(args);
}
}
}
| |
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.keycloak.adapters.authorization;
import org.jboss.logging.Logger;
import org.keycloak.AuthorizationContext;
import org.keycloak.adapters.OIDCHttpFacade;
import org.keycloak.adapters.spi.HttpFacade.Request;
import org.keycloak.adapters.spi.HttpFacade.Response;
import org.keycloak.authorization.client.AuthzClient;
import org.keycloak.authorization.client.representation.ResourceRepresentation;
import org.keycloak.authorization.client.resource.ProtectedResource;
import org.keycloak.representations.AccessToken;
import org.keycloak.representations.adapters.config.PolicyEnforcerConfig;
import org.keycloak.representations.adapters.config.PolicyEnforcerConfig.EnforcementMode;
import org.keycloak.representations.adapters.config.PolicyEnforcerConfig.PathConfig;
import org.keycloak.representations.authorization.Permission;
import java.net.URI;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* @author <a href="mailto:psilva@redhat.com">Pedro Igor</a>
*/
public abstract class AbstractPolicyEnforcer {
private static Logger LOGGER = Logger.getLogger(AbstractPolicyEnforcer.class);
private final PolicyEnforcerConfig enforcerConfig;
private final PolicyEnforcer policyEnforcer;
private List<PathConfig> paths;
private AuthzClient authzClient;
private PathMatcher pathMatcher;
public AbstractPolicyEnforcer(PolicyEnforcer policyEnforcer) {
this.policyEnforcer = policyEnforcer;
this.enforcerConfig = policyEnforcer.getEnforcerConfig();
this.authzClient = policyEnforcer.getClient();
this.pathMatcher = new PathMatcher();
this.paths = policyEnforcer.getPaths();
}
public AuthorizationContext authorize(OIDCHttpFacade httpFacade) {
EnforcementMode enforcementMode = this.enforcerConfig.getEnforcementMode();
if (EnforcementMode.DISABLED.equals(enforcementMode)) {
return createEmptyAuthorizationContext(true);
}
AccessToken accessToken = httpFacade.getSecurityContext().getToken();
Request request = httpFacade.getRequest();
Response response = httpFacade.getResponse();
String pathInfo = URI.create(request.getURI()).getPath().substring(1);
String path = pathInfo.substring(pathInfo.indexOf('/'), pathInfo.length());
PathConfig pathConfig = this.pathMatcher.matches(path, this.paths);
LOGGER.debugf("Checking permissions for path [%s] with config [%s].", request.getURI(), pathConfig);
if (pathConfig == null) {
if (EnforcementMode.PERMISSIVE.equals(enforcementMode)) {
return createAuthorizationContext(accessToken);
}
LOGGER.debugf("Could not find a configuration for path [%s]", path);
response.sendError(403, "Could not find a configuration for path [" + path + "].");
return createEmptyAuthorizationContext(false);
}
PathConfig actualPathConfig = resolvePathConfig(pathConfig, request);
Set<String> requiredScopes = getRequiredScopes(actualPathConfig, request);
if (isAuthorized(actualPathConfig, requiredScopes, accessToken, httpFacade)) {
try {
return createAuthorizationContext(accessToken);
} catch (Exception e) {
throw new RuntimeException("Error processing path [" + actualPathConfig.getPath() + "].", e);
}
}
if (!challenge(actualPathConfig, requiredScopes, httpFacade)) {
LOGGER.debugf("Sending challenge to the client. Path [%s]", pathConfig);
response.sendError(403, "Authorization failed.");
}
return createEmptyAuthorizationContext(false);
}
protected abstract boolean challenge(PathConfig pathConfig, Set<String> requiredScopes, OIDCHttpFacade facade);
protected boolean isAuthorized(PathConfig actualPathConfig, Set<String> requiredScopes, AccessToken accessToken, OIDCHttpFacade httpFacade) {
Request request = httpFacade.getRequest();
PolicyEnforcerConfig enforcerConfig = getEnforcerConfig();
String accessDeniedPath = enforcerConfig.getAccessDeniedPath();
if (accessDeniedPath != null) {
if (request.getURI().contains(accessDeniedPath)) {
return true;
}
}
AccessToken.Authorization authorization = accessToken.getAuthorization();
if (authorization == null) {
return false;
}
List<Permission> permissions = authorization.getPermissions();
for (Permission permission : permissions) {
Set<String> allowedScopes = permission.getScopes();
if (permission.getResourceSetId() != null) {
if (permission.getResourceSetId().equals(actualPathConfig.getId())) {
if (((allowedScopes == null || allowedScopes.isEmpty()) && requiredScopes.isEmpty()) || allowedScopes.containsAll(requiredScopes)) {
LOGGER.debugf("Authorization GRANTED for path [%s]. Permissions [%s].", actualPathConfig, permissions);
if (request.getMethod().equalsIgnoreCase("DELETE") && actualPathConfig.isInstance()) {
this.paths.remove(actualPathConfig);
}
return true;
}
}
} else {
if ((allowedScopes.isEmpty() && requiredScopes.isEmpty()) || allowedScopes.containsAll(requiredScopes)) {
LOGGER.debugf("Authorization GRANTED for path [%s]. Permissions [%s].", actualPathConfig, permissions);
return true;
}
}
}
LOGGER.debugf("Authorization FAILED for path [%s]. No enough permissions [%s].", actualPathConfig, permissions);
return false;
}
protected AuthzClient getAuthzClient() {
return this.authzClient;
}
protected PolicyEnforcerConfig getEnforcerConfig() {
return enforcerConfig;
}
protected PolicyEnforcer getPolicyEnforcer() {
return policyEnforcer;
}
private AuthorizationContext createEmptyAuthorizationContext(final boolean granted) {
return new AuthorizationContext() {
@Override
public boolean hasPermission(String resourceName, String scopeName) {
return granted;
}
@Override
public boolean hasResourcePermission(String resourceName) {
return granted;
}
@Override
public boolean hasScopePermission(String scopeName) {
return granted;
}
@Override
public List<Permission> getPermissions() {
return Collections.EMPTY_LIST;
}
@Override
public boolean isGranted() {
return granted;
}
};
}
private PathConfig resolvePathConfig(PathConfig originalConfig, Request request) {
if (originalConfig.hasPattern()) {
String pathInfo = URI.create(request.getURI()).getPath().substring(1);
String path = pathInfo.substring(pathInfo.indexOf('/'), pathInfo.length());
ProtectedResource resource = this.authzClient.protection().resource();
Set<String> search = resource.findByFilter("uri=" + path);
if (!search.isEmpty()) {
// resource does exist on the server, cache it
ResourceRepresentation targetResource = resource.findById(search.iterator().next()).getResourceDescription();
PathConfig config = new PathConfig();
config.setId(targetResource.getId());
config.setName(targetResource.getName());
config.setType(targetResource.getType());
config.setPath(targetResource.getUri());
config.setScopes(originalConfig.getScopes());
config.setMethods(originalConfig.getMethods());
config.setInstance(true);
this.paths.add(config);
return config;
}
}
return originalConfig;
}
private Set<String> getRequiredScopes(PathConfig pathConfig, Request request) {
Set<String> requiredScopes = new HashSet<>();
requiredScopes.addAll(pathConfig.getScopes());
String method = request.getMethod();
for (PolicyEnforcerConfig.MethodConfig methodConfig : pathConfig.getMethods()) {
if (methodConfig.getMethod().equals(method)) {
requiredScopes.addAll(methodConfig.getScopes());
}
}
return requiredScopes;
}
private AuthorizationContext createAuthorizationContext(AccessToken accessToken) {
return new AuthorizationContext(accessToken, this.paths);
}
}
| |
/*
* Copyright 2002-2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.beans.factory.support;
import java.lang.reflect.Constructor;
import java.lang.reflect.Method;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.BeanInstantiationException;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.BeanFactory;
import org.springframework.cglib.core.SpringNamingPolicy;
import org.springframework.cglib.proxy.Callback;
import org.springframework.cglib.proxy.CallbackFilter;
import org.springframework.cglib.proxy.Enhancer;
import org.springframework.cglib.proxy.Factory;
import org.springframework.cglib.proxy.MethodInterceptor;
import org.springframework.cglib.proxy.MethodProxy;
import org.springframework.cglib.proxy.NoOp;
import org.springframework.util.StringUtils;
/**
* Default object instantiation strategy for use in BeanFactories.
*
* <p>Uses CGLIB to generate subclasses dynamically if methods need to be
* overridden by the container to implement <em>Method Injection</em>.
*
* @author Rod Johnson
* @author Juergen Hoeller
* @author Sam Brannen
* @since 1.1
*/
public class CglibSubclassingInstantiationStrategy extends SimpleInstantiationStrategy {
/**
* Index in the CGLIB callback array for passthrough behavior,
* in which case the subclass won't override the original class.
*/
private static final int PASSTHROUGH = 0;
/**
* Index in the CGLIB callback array for a method that should
* be overridden to provide <em>method lookup</em>.
*/
private static final int LOOKUP_OVERRIDE = 1;
/**
* Index in the CGLIB callback array for a method that should
* be overridden using generic <em>method replacer</em> functionality.
*/
private static final int METHOD_REPLACER = 2;
@Override
protected Object instantiateWithMethodInjection(RootBeanDefinition bd, String beanName, BeanFactory owner) {
return instantiateWithMethodInjection(bd, beanName, owner, null);
}
@Override
protected Object instantiateWithMethodInjection(RootBeanDefinition bd, String beanName, BeanFactory owner,
Constructor<?> ctor, Object... args) {
// Must generate CGLIB subclass...
return new CglibSubclassCreator(bd, owner).instantiate(ctor, args);
}
/**
* An inner class created for historical reasons to avoid external CGLIB dependency
* in Spring versions earlier than 3.2.
*/
private static class CglibSubclassCreator {
private static final Class<?>[] CALLBACK_TYPES = new Class<?>[]
{NoOp.class, LookupOverrideMethodInterceptor.class, ReplaceOverrideMethodInterceptor.class};
private final RootBeanDefinition beanDefinition;
private final BeanFactory owner;
CglibSubclassCreator(RootBeanDefinition beanDefinition, BeanFactory owner) {
this.beanDefinition = beanDefinition;
this.owner = owner;
}
/**
* Create a new instance of a dynamically generated subclass implementing the
* required lookups.
* @param ctor constructor to use. If this is {@code null}, use the
* no-arg constructor (no parameterization, or Setter Injection)
* @param args arguments to use for the constructor.
* Ignored if the {@code ctor} parameter is {@code null}.
* @return new instance of the dynamically generated subclass
*/
public Object instantiate(Constructor<?> ctor, Object... args) {
Class<?> subclass = createEnhancedSubclass(this.beanDefinition);
Object instance;
if (ctor == null) {
instance = BeanUtils.instantiate(subclass);
}
else {
try {
Constructor<?> enhancedSubclassConstructor = subclass.getConstructor(ctor.getParameterTypes());
instance = enhancedSubclassConstructor.newInstance(args);
}
catch (Exception ex) {
throw new BeanInstantiationException(this.beanDefinition.getBeanClass(),
"Failed to invoke constructor for CGLIB enhanced subclass [" + subclass.getName() + "]", ex);
}
}
// SPR-10785: set callbacks directly on the instance instead of in the
// enhanced class (via the Enhancer) in order to avoid memory leaks.
Factory factory = (Factory) instance;
factory.setCallbacks(new Callback[] {NoOp.INSTANCE,
new LookupOverrideMethodInterceptor(this.beanDefinition, this.owner),
new ReplaceOverrideMethodInterceptor(this.beanDefinition, this.owner)});
return instance;
}
/**
* Create an enhanced subclass of the bean class for the provided bean
* definition, using CGLIB.
*/
private Class<?> createEnhancedSubclass(RootBeanDefinition beanDefinition) {
Enhancer enhancer = new Enhancer();
enhancer.setSuperclass(beanDefinition.getBeanClass());
enhancer.setNamingPolicy(SpringNamingPolicy.INSTANCE);
enhancer.setCallbackFilter(new MethodOverrideCallbackFilter(beanDefinition));
enhancer.setCallbackTypes(CALLBACK_TYPES);
return enhancer.createClass();
}
}
/**
* Class providing hashCode and equals methods required by CGLIB to
* ensure that CGLIB doesn't generate a distinct class per bean.
* Identity is based on class and bean definition.
*/
private static class CglibIdentitySupport {
private final RootBeanDefinition beanDefinition;
public CglibIdentitySupport(RootBeanDefinition beanDefinition) {
this.beanDefinition = beanDefinition;
}
public RootBeanDefinition getBeanDefinition() {
return this.beanDefinition;
}
@Override
public boolean equals(Object other) {
return (getClass().equals(other.getClass()) &&
this.beanDefinition.equals(((CglibIdentitySupport) other).beanDefinition));
}
@Override
public int hashCode() {
return this.beanDefinition.hashCode();
}
}
/**
* CGLIB callback for filtering method interception behavior.
*/
private static class MethodOverrideCallbackFilter extends CglibIdentitySupport implements CallbackFilter {
private static final Log logger = LogFactory.getLog(MethodOverrideCallbackFilter.class);
public MethodOverrideCallbackFilter(RootBeanDefinition beanDefinition) {
super(beanDefinition);
}
@Override
public int accept(Method method) {
MethodOverride methodOverride = getBeanDefinition().getMethodOverrides().getOverride(method);
if (logger.isTraceEnabled()) {
logger.trace("Override for '" + method.getName() + "' is [" + methodOverride + "]");
}
if (methodOverride == null) {
return PASSTHROUGH;
}
else if (methodOverride instanceof LookupOverride) {
return LOOKUP_OVERRIDE;
}
else if (methodOverride instanceof ReplaceOverride) {
return METHOD_REPLACER;
}
throw new UnsupportedOperationException("Unexpected MethodOverride subclass: " +
methodOverride.getClass().getName());
}
}
/**
* CGLIB MethodInterceptor to override methods, replacing them with an
* implementation that returns a bean looked up in the container.
*/
private static class LookupOverrideMethodInterceptor extends CglibIdentitySupport implements MethodInterceptor {
private final BeanFactory owner;
public LookupOverrideMethodInterceptor(RootBeanDefinition beanDefinition, BeanFactory owner) {
super(beanDefinition);
this.owner = owner;
}
@Override
public Object intercept(Object obj, Method method, Object[] args, MethodProxy mp) throws Throwable {
// Cast is safe, as CallbackFilter filters are used selectively.
LookupOverride lo = (LookupOverride) getBeanDefinition().getMethodOverrides().getOverride(method);
Object[] argsToUse = (args.length > 0 ? args : null); // if no-arg, don't insist on args at all
if (StringUtils.hasText(lo.getBeanName())) {
return this.owner.getBean(lo.getBeanName(), argsToUse);
}
else {
return this.owner.getBean(method.getReturnType(), argsToUse);
}
}
}
/**
* CGLIB MethodInterceptor to override methods, replacing them with a call
* to a generic MethodReplacer.
*/
private static class ReplaceOverrideMethodInterceptor extends CglibIdentitySupport implements MethodInterceptor {
private final BeanFactory owner;
public ReplaceOverrideMethodInterceptor(RootBeanDefinition beanDefinition, BeanFactory owner) {
super(beanDefinition);
this.owner = owner;
}
@Override
public Object intercept(Object obj, Method method, Object[] args, MethodProxy mp) throws Throwable {
ReplaceOverride ro = (ReplaceOverride) getBeanDefinition().getMethodOverrides().getOverride(method);
// TODO could cache if a singleton for minor performance optimization
MethodReplacer mr = this.owner.getBean(ro.getMethodReplacerBeanName(), MethodReplacer.class);
return mr.reimplement(obj, method, args);
}
}
}
| |
package com.cloud.network.topology;
import com.cloud.agent.api.Command;
import com.cloud.agent.api.PvlanSetupCommand;
import com.cloud.agent.manager.Commands;
import com.cloud.dc.DataCenter;
import com.cloud.exception.ResourceUnavailableException;
import com.cloud.network.Network;
import com.cloud.network.PublicIpAddress;
import com.cloud.network.VpnUser;
import com.cloud.network.router.VirtualRouter;
import com.cloud.network.rules.AdvancedVpnRules;
import com.cloud.network.rules.DhcpEntryRules;
import com.cloud.network.rules.DhcpPvlanRules;
import com.cloud.network.rules.NetworkAclsRules;
import com.cloud.network.rules.NicPlugInOutRules;
import com.cloud.network.rules.PrivateGatewayRules;
import com.cloud.network.rules.PublicIpAclsRules;
import com.cloud.network.rules.StaticRoutesRules;
import com.cloud.network.rules.UserdataPwdRules;
import com.cloud.network.rules.VpcIpAssociationRules;
import com.cloud.network.vpc.NetworkACLItem;
import com.cloud.network.vpc.PrivateIpAddress;
import com.cloud.network.vpc.PrivateIpVO;
import com.cloud.network.vpc.StaticRouteProfile;
import com.cloud.utils.net.NetUtils;
import com.cloud.vm.NicProfile;
import com.cloud.vm.NicVO;
import com.cloud.vm.UserVmVO;
import com.cloud.vm.VirtualMachine.State;
import com.cloud.vm.VirtualMachineProfile;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
@Component
public class AdvancedNetworkVisitor extends BasicNetworkVisitor {
private static final Logger s_logger = LoggerFactory.getLogger(AdvancedNetworkVisitor.class);
@Override
public boolean visit(final UserdataPwdRules userdata) throws ResourceUnavailableException {
final VirtualRouter router = userdata.getRouter();
final Commands commands = new Commands(Command.OnError.Stop);
final VirtualMachineProfile profile = userdata.getProfile();
final NicVO nicVo = userdata.getNicVo();
final UserVmVO userVM = userdata.getUserVM();
_commandSetupHelper.createPasswordCommand(router, profile, nicVo, commands);
_commandSetupHelper.createVmDataCommand(router, userVM, nicVo, userVM.getDetail("SSH.PublicKey"), commands);
return _networkGeneralHelper.sendCommandsToRouter(router, commands);
}
@Override
public boolean visit(final DhcpEntryRules dhcp) throws ResourceUnavailableException {
final VirtualRouter router = dhcp.getRouter();
final Commands commands = new Commands(Command.OnError.Stop);
final NicVO nicVo = dhcp.getNicVo();
final UserVmVO userVM = dhcp.getUserVM();
_commandSetupHelper.createDhcpEntryCommand(router, userVM, nicVo, commands);
return _networkGeneralHelper.sendCommandsToRouter(router, commands);
}
@Override
public boolean visit(final NetworkAclsRules acls) throws ResourceUnavailableException {
final VirtualRouter router = acls.getRouter();
final Network network = acls.getNetwork();
final Commands commands = new Commands(Command.OnError.Continue);
final List<? extends NetworkACLItem> rules = acls.getRules();
_commandSetupHelper.createNetworkACLsCommands(rules, router, commands, network.getId(), acls.isPrivateGateway());
return _networkGeneralHelper.sendCommandsToRouter(router, commands);
}
@Override
public boolean visit(final PublicIpAclsRules acls) throws ResourceUnavailableException {
final VirtualRouter router = acls.getRouter();
final Commands commands = new Commands(Command.OnError.Continue);
final List<? extends NetworkACLItem> rules = acls.getRules();
_commandSetupHelper.createPublicIpACLsCommands(rules, router, commands, acls.getPublicIp());
return _networkGeneralHelper.sendCommandsToRouter(router, commands);
}
@Override
public boolean visit(final VpcIpAssociationRules vpcip) throws ResourceUnavailableException {
final VirtualRouter router = vpcip.getRouter();
final Commands cmds = new Commands(Command.OnError.Continue);
final Map<String, String> vlanMacAddress = vpcip.getVlanMacAddress();
final List<PublicIpAddress> ipsToSend = vpcip.getIpsToSend();
if (!ipsToSend.isEmpty()) {
s_logger.debug("DEBUG::in AdvancedNetworkTopology --> will createVpcAssociatePublicIPCommands and send to router --> " + router.getId());
_commandSetupHelper.createVpcAssociatePublicIPCommands(router, ipsToSend, cmds, vlanMacAddress);
return _networkGeneralHelper.sendCommandsToRouter(router, cmds);
} else {
return true;
}
}
@Override
public boolean visit(final AdvancedVpnRules vpnRules) throws ResourceUnavailableException {
final VirtualRouter router = vpnRules.getRouter();
final List<? extends VpnUser> users = vpnRules.getUsers();
final Commands cmds = new Commands(Command.OnError.Continue);
_commandSetupHelper.createApplyVpnUsersCommand(users, router, cmds);
// Currently we receive just one answer from the agent. In the future we
// have to parse individual answers and set
// results accordingly
return _networkGeneralHelper.sendCommandsToRouter(router, cmds);
}
@Override
public boolean visit(final PrivateGatewayRules privateGW) throws ResourceUnavailableException {
final VirtualRouter router = privateGW.getRouter();
final NicProfile nicProfile = privateGW.getNicProfile();
final boolean isAddOperation = privateGW.isAddOperation();
if (router.getState() == State.Running) {
final PrivateIpVO ipVO = privateGW.retrivePrivateIP(this);
final Network network = privateGW.retrievePrivateNetwork(this);
final String netmask = NetUtils.getCidrNetmask(network.getCidr());
final PrivateIpAddress ip = new PrivateIpAddress(ipVO, network.getBroadcastUri().toString(), network.getGateway(), netmask, nicProfile.getMacAddress());
final Commands cmds = new Commands(Command.OnError.Stop);
_commandSetupHelper.createSetupPrivateGatewayCommand(router, ip, cmds, nicProfile, isAddOperation);
try {
if (_networkGeneralHelper.sendCommandsToRouter(router, cmds)) {
s_logger.debug("Successfully applied ip association for ip " + ip + " in vpc network " + network);
return true;
} else {
s_logger.warn("Failed to associate ip address " + ip + " in vpc network " + network);
return false;
}
} catch (final Exception ex) {
s_logger.warn("Failed to send " + (isAddOperation ? "add " : "delete ") + " private network " + network + " commands to rotuer ");
return false;
}
} else if (router.getState() == State.Stopped || router.getState() == State.Stopping) {
s_logger.debug("Router " + router.getInstanceName() + " is in " + router.getState() + ", so not sending setup private network command to the backend");
} else {
s_logger.warn("Unable to setup private gateway, virtual router " + router + " is not in the right state " + router.getState());
throw new ResourceUnavailableException("Unable to setup Private gateway on the backend," + " virtual router " + router + " is not in the right state",
DataCenter.class, router.getDataCenterId());
}
return true;
}
@Override
public boolean visit(final DhcpPvlanRules dhcp) throws ResourceUnavailableException {
final VirtualRouter router = dhcp.getRouter();
final PvlanSetupCommand setupCommand = dhcp.getSetupCommand();
// In fact we send command to the host of router, we're not programming
// router but the host
final Commands cmds = new Commands(Command.OnError.Stop);
cmds.addCommand(setupCommand);
try {
return _networkGeneralHelper.sendCommandsToRouter(router, cmds);
} catch (final ResourceUnavailableException e) {
s_logger.warn("Timed Out", e);
return false;
}
}
@Override
public boolean visit(final NicPlugInOutRules nicPlugInOutRules) throws ResourceUnavailableException {
final VirtualRouter router = nicPlugInOutRules.getRouter();
final Commands commands = nicPlugInOutRules.getNetUsageCommands();
if (commands.size() > 0) {
return _networkGeneralHelper.sendCommandsToRouter(router, commands);
}
return true;
}
@Override
public boolean visit(final StaticRoutesRules staticRoutesRules) throws ResourceUnavailableException {
final VirtualRouter router = staticRoutesRules.getRouter();
final List<StaticRouteProfile> staticRoutes = staticRoutesRules.getStaticRoutes();
final Commands cmds = new Commands(Command.OnError.Continue);
_commandSetupHelper.createStaticRouteCommands(staticRoutes, router, cmds);
return _networkGeneralHelper.sendCommandsToRouter(router, cmds);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime.template;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigException;
import com.typesafe.config.ConfigFactory;
import org.apache.gobblin.runtime.api.JobCatalogWithTemplates;
import org.apache.gobblin.runtime.api.JobTemplate;
import org.apache.gobblin.runtime.api.SpecNotFoundException;
import lombok.AllArgsConstructor;
public class InheritingJobTemplateTest {
@Test
public void testSimpleInheritance() throws Exception {
TestTemplate template1 = new TestTemplate(new URI("template1"), Lists.<JobTemplate>newArrayList(), ImmutableMap.of("key1", "value1"),
Lists.newArrayList("required"));
TestTemplate template2 = new TestTemplate(new URI("template2"), Lists.<JobTemplate>newArrayList(template1), ImmutableMap.of("key2", "value2"),
Lists.newArrayList("required2"));
Collection<String> required = template2.getRequiredConfigList();
Assert.assertEquals(required.size(), 2);
Assert.assertTrue(required.contains("required"));
Assert.assertTrue(required.contains("required2"));
Config rawTemplate = template2.getRawTemplateConfig();
Assert.assertEquals(rawTemplate.getString("key1"), "value1");
Assert.assertEquals(rawTemplate.getString("key2"), "value2");
Config resolved = template2.getResolvedConfig(ConfigFactory.parseMap(ImmutableMap.of("required", "r1", "required2", "r2")));
Assert.assertEquals(resolved.getString("key1"), "value1");
Assert.assertEquals(resolved.getString("key2"), "value2");
Assert.assertEquals(resolved.getString("required"), "r1");
Assert.assertEquals(resolved.getString("required2"), "r2");
try {
// should throw exception because missing required property
resolved = template2.getResolvedConfig(ConfigFactory.parseMap(ImmutableMap.of("required", "r1")));
Assert.fail();
} catch (JobTemplate.TemplateException te) {
// expected
}
}
@Test
public void testMultiInheritance() throws Exception {
TestTemplate template1 = new TestTemplate(new URI("template1"), Lists.<JobTemplate>newArrayList(), ImmutableMap.of("key1", "value1"),
Lists.newArrayList("required"));
TestTemplate template2 = new TestTemplate(new URI("template2"), Lists.<JobTemplate>newArrayList(), ImmutableMap.of("key2", "value2"),
Lists.newArrayList("required2"));
TestTemplate inheriting = new TestTemplate(new URI("inheriting"), Lists.<JobTemplate>newArrayList(template1, template2), ImmutableMap.<String, String>of(),
Lists.<String>newArrayList());
Collection<String> required = inheriting.getRequiredConfigList();
Assert.assertEquals(required.size(), 2);
Assert.assertTrue(required.contains("required"));
Assert.assertTrue(required.contains("required2"));
Config rawTemplate = inheriting.getRawTemplateConfig();
Assert.assertEquals(rawTemplate.getString("key1"), "value1");
Assert.assertEquals(rawTemplate.getString("key2"), "value2");
Config resolved = inheriting.getResolvedConfig(ConfigFactory.parseMap(ImmutableMap.of("required", "r1", "required2", "r2")));
Assert.assertEquals(resolved.getString("key1"), "value1");
Assert.assertEquals(resolved.getString("key2"), "value2");
Assert.assertEquals(resolved.getString("required"), "r1");
Assert.assertEquals(resolved.getString("required2"), "r2");
}
@Test
public void testLoopInheritance() throws Exception {
JobCatalogWithTemplates catalog = Mockito.mock(JobCatalogWithTemplates.class);
Mockito.when(catalog.getTemplate(new URI("template2"))).thenAnswer(
new TestTemplateAnswer(Lists.newArrayList(new URI("template3")), ImmutableMap.of("key2", "value2"),
Lists.<String>newArrayList(), catalog));
Mockito.when(catalog.getTemplate(new URI("template3"))).thenAnswer(
new TestTemplateAnswer(Lists.newArrayList(new URI("template1")), ImmutableMap.of("key3", "value3"),
Lists.newArrayList("required3"), catalog));
TestTemplate template = new TestTemplate(new URI("template1"), Lists.newArrayList(new URI("template2")),
ImmutableMap.of("key1", "value1"), Lists.newArrayList("required"), catalog);
Collection<String> required = template.getRequiredConfigList();
Assert.assertEquals(required.size(), 2);
Assert.assertTrue(required.contains("required"));
Assert.assertTrue(required.contains("required3"));
Config rawTemplate = template.getRawTemplateConfig();
Assert.assertEquals(rawTemplate.getString("key1"), "value1");
Assert.assertEquals(rawTemplate.getString("key2"), "value2");
Assert.assertEquals(rawTemplate.getString("key3"), "value3");
Config resolved = template.getResolvedConfig(ConfigFactory.parseMap(ImmutableMap.of("required", "r1", "required3", "r3")));
Assert.assertEquals(resolved.getString("key1"), "value1");
Assert.assertEquals(resolved.getString("key2"), "value2");
Assert.assertEquals(resolved.getString("key3"), "value3");
Assert.assertEquals(resolved.getString("required"), "r1");
Assert.assertEquals(resolved.getString("required3"), "r3");
}
@Test
public void testSatisfySuperTemplateRequirements() throws Exception {
TestTemplate template1 = new TestTemplate(new URI("template1"), Lists.<JobTemplate>newArrayList(), ImmutableMap.of("key1", "value1"),
Lists.newArrayList("required"));
TestTemplate template2 = new TestTemplate(new URI("template2"), Lists.<JobTemplate>newArrayList(template1), ImmutableMap.of("required", "r1"),
Lists.newArrayList("required2"));
Collection<String> required = template2.getRequiredConfigList();
Assert.assertEquals(required.size(), 1);
Assert.assertTrue(required.contains("required2"));
Config rawTemplate = template2.getRawTemplateConfig();
Assert.assertEquals(rawTemplate.getString("key1"), "value1");
Assert.assertEquals(rawTemplate.getString("required"), "r1");
Config resolved = template2.getResolvedConfig(ConfigFactory.parseMap(ImmutableMap.of("required2", "r2")));
Assert.assertEquals(resolved.getString("key1"), "value1");
Assert.assertEquals(resolved.getString("required"), "r1");
Assert.assertEquals(resolved.getString("required2"), "r2");
}
@AllArgsConstructor
public static class TestTemplateAnswer implements Answer<JobTemplate> {
private final List<URI> superTemplateUris;
private final Map<String,String> rawTemplate;
private final List<String> required;
private final JobCatalogWithTemplates catalog;
@Override
public JobTemplate answer(InvocationOnMock invocation)
throws Throwable {
return new TestTemplate((URI) invocation.getArguments()[0],
this.superTemplateUris, this.rawTemplate, this.required, this.catalog);
}
}
public static class TestTemplate extends InheritingJobTemplate {
private final URI uri;
private final Map<String,String> rawTemplate;
private final List<String> required;
public TestTemplate(URI uri, List<URI> superTemplateUris, Map<String, String> rawTemplate, List<String> required,
JobCatalogWithTemplates catalog) throws SpecNotFoundException, TemplateException {
super(superTemplateUris, catalog);
this.uri = uri;
this.rawTemplate = rawTemplate;
this.required = required;
}
public TestTemplate(URI uri, List<JobTemplate> superTemplates, Map<String, String> rawTemplate, List<String> required) {
super(superTemplates);
this.uri = uri;
this.rawTemplate = rawTemplate;
this.required = required;
}
@Override
public URI getUri() {
return this.uri;
}
@Override
public String getVersion() {
return "1";
}
@Override
public String getDescription() {
return "description";
}
@Override
protected Config getLocalRawTemplate() {
return ConfigFactory.parseMap(this.rawTemplate);
}
@Override
protected Collection<String> getLocallyRequiredConfigList() {
return this.required;
}
@Override
protected Config getLocallyResolvedConfig(Config userConfig) throws TemplateException {
for (String required : this.required) {
if (!userConfig.hasPath(required)) {
throw new TemplateException("Missing required property " + required);
}
}
return userConfig.withFallback(getLocalRawTemplate());
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.runtime.functions;
import java.util.ArrayList;
import java.util.List;
import java.util.ServiceLoader;
import org.apache.asterix.om.functions.IFunctionCollection;
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
import org.apache.asterix.om.functions.IFunctionRegistrant;
import org.apache.asterix.runtime.aggregates.collections.FirstElementAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.collections.LastElementAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.collections.ListifyAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.collections.LocalFirstElementAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.collections.NullWriterAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarArrayAggAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarArrayAggDistinctAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarAvgAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarAvgDistinctAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarCountAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarCountDistinctAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarKurtosisAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarKurtosisDistinctAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarMaxAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarMaxDistinctAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarMinAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarMinDistinctAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarSkewnessAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarSkewnessDistinctAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarSqlAvgAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarSqlAvgDistinctAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarSqlCountAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarSqlCountDistinctAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarSqlKurtosisAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarSqlKurtosisDistinctAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarSqlMaxAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarSqlMaxDistinctAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarSqlMinAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarSqlMinDistinctAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarSqlSkewnessAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarSqlSkewnessDistinctAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarSqlStddevAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarSqlStddevDistinctAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarSqlStddevPopAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarSqlStddevPopDistinctAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarSqlSumAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarSqlSumDistinctAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarSqlVarAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarSqlVarDistinctAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarSqlVarPopAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarSqlVarPopDistinctAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarStddevAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarStddevDistinctAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarStddevPopAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarStddevPopDistinctAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarSumAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarSumDistinctAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarVarAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarVarDistinctAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarVarPopAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarVarPopDistinctAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableAvgAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableCountAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableGlobalAvgAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableGlobalKurtosisAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableGlobalSkewnessAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableGlobalSqlAvgAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableGlobalSqlKurtosisAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableGlobalSqlSkewnessAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableGlobalSqlStddevAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableGlobalSqlStddevPopAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableGlobalSqlSumAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableGlobalSqlVarAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableGlobalSqlVarPopAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableGlobalStddevAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableGlobalStddevPopAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableGlobalSumAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableGlobalVarAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableGlobalVarPopAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableIntermediateAvgAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableIntermediateKurtosisAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableIntermediateSkewnessAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableIntermediateSqlAvgAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableIntermediateSqlKurtosisAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableIntermediateSqlSkewnessAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableIntermediateSqlStddevAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableIntermediateSqlStddevPopAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableIntermediateSqlSumAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableIntermediateSqlVarAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableIntermediateSqlVarPopAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableIntermediateStddevAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableIntermediateStddevPopAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableIntermediateSumAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableIntermediateVarAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableIntermediateVarPopAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableKurtosisAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableLocalAvgAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableLocalKurtosisAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableLocalSkewnessAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableLocalSqlAvgAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableLocalSqlKurtosisAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableLocalSqlSkewnessAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableLocalSqlStddevAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableLocalSqlStddevPopAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableLocalSqlSumAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableLocalSqlVarAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableLocalSqlVarPopAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableLocalStddevAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableLocalStddevPopAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableLocalSumAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableLocalVarAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableLocalVarPopAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableSkewnessAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableSqlAvgAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableSqlCountAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableSqlKurtosisAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableSqlSkewnessAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableSqlStddevAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableSqlStddevPopAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableSqlSumAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableSqlVarAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableSqlVarPopAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableStddevAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableStddevPopAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableSumAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableVarAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableVarPopAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.AvgAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.CountAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.GlobalAvgAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.GlobalKurtosisAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.GlobalMaxAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.GlobalMinAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.GlobalSkewnessAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.GlobalSqlAvgAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.GlobalSqlKurtosisAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.GlobalSqlMaxAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.GlobalSqlMinAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.GlobalSqlSkewnessAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.GlobalSqlStddevAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.GlobalSqlStddevPopAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.GlobalSqlSumAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.GlobalSqlVarAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.GlobalSqlVarPopAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.GlobalStddevAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.GlobalStddevPopAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.GlobalSumAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.GlobalVarAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.GlobalVarPopAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.IntermediateAvgAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.IntermediateKurtosisAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.IntermediateMaxAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.IntermediateMinAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.IntermediateSkewnessAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.IntermediateSqlAvgAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.IntermediateSqlKurtosisAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.IntermediateSqlMaxAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.IntermediateSqlMinAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.IntermediateSqlSkewnessAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.IntermediateSqlStddevAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.IntermediateSqlStddevPopAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.IntermediateSqlSumAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.IntermediateSqlVarAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.IntermediateSqlVarPopAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.IntermediateStddevAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.IntermediateStddevPopAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.IntermediateSumAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.IntermediateVarAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.IntermediateVarPopAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.KurtosisAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.LocalAvgAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.LocalKurtosisAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.LocalMaxAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.LocalMinAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.LocalSamplingAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.LocalSkewnessAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.LocalSqlAvgAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.LocalSqlKurtosisAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.LocalSqlMaxAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.LocalSqlMinAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.LocalSqlSkewnessAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.LocalSqlStddevAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.LocalSqlStddevPopAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.LocalSqlSumAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.LocalSqlVarAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.LocalSqlVarPopAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.LocalStddevAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.LocalStddevPopAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.LocalSumAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.LocalVarAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.LocalVarPopAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.MaxAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.MinAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.RangeMapAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.SkewnessAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.SqlAvgAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.SqlCountAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.SqlKurtosisAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.SqlMaxAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.SqlMinAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.SqlSkewnessAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.SqlStddevAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.SqlStddevPopAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.SqlSumAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.SqlVarAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.SqlVarPopAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.StddevAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.StddevPopAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.SumAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.VarAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.VarPopAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.stream.EmptyStreamAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.stream.NonEmptyStreamAggregateDescriptor;
import org.apache.asterix.runtime.evaluators.accessors.CircleCenterAccessor;
import org.apache.asterix.runtime.evaluators.accessors.CircleRadiusAccessor;
import org.apache.asterix.runtime.evaluators.accessors.LineRectanglePolygonAccessor;
import org.apache.asterix.runtime.evaluators.accessors.PointXCoordinateAccessor;
import org.apache.asterix.runtime.evaluators.accessors.PointYCoordinateAccessor;
import org.apache.asterix.runtime.evaluators.accessors.TemporalDayAccessor;
import org.apache.asterix.runtime.evaluators.accessors.TemporalHourAccessor;
import org.apache.asterix.runtime.evaluators.accessors.TemporalIntervalEndAccessor;
import org.apache.asterix.runtime.evaluators.accessors.TemporalIntervalEndDateAccessor;
import org.apache.asterix.runtime.evaluators.accessors.TemporalIntervalEndDatetimeAccessor;
import org.apache.asterix.runtime.evaluators.accessors.TemporalIntervalEndTimeAccessor;
import org.apache.asterix.runtime.evaluators.accessors.TemporalIntervalStartAccessor;
import org.apache.asterix.runtime.evaluators.accessors.TemporalIntervalStartDateAccessor;
import org.apache.asterix.runtime.evaluators.accessors.TemporalIntervalStartDatetimeAccessor;
import org.apache.asterix.runtime.evaluators.accessors.TemporalIntervalStartTimeAccessor;
import org.apache.asterix.runtime.evaluators.accessors.TemporalMillisecondAccessor;
import org.apache.asterix.runtime.evaluators.accessors.TemporalMinuteAccessor;
import org.apache.asterix.runtime.evaluators.accessors.TemporalMonthAccessor;
import org.apache.asterix.runtime.evaluators.accessors.TemporalSecondAccessor;
import org.apache.asterix.runtime.evaluators.accessors.TemporalYearAccessor;
import org.apache.asterix.runtime.evaluators.comparisons.EqualsDescriptor;
import org.apache.asterix.runtime.evaluators.comparisons.GreaterThanDescriptor;
import org.apache.asterix.runtime.evaluators.comparisons.GreaterThanOrEqualsDescriptor;
import org.apache.asterix.runtime.evaluators.comparisons.LessThanDescriptor;
import org.apache.asterix.runtime.evaluators.comparisons.LessThanOrEqualsDescriptor;
import org.apache.asterix.runtime.evaluators.comparisons.MissingIfEqualsDescriptor;
import org.apache.asterix.runtime.evaluators.comparisons.NanIfEqualsDescriptor;
import org.apache.asterix.runtime.evaluators.comparisons.NegInfIfEqualsDescriptor;
import org.apache.asterix.runtime.evaluators.comparisons.NotEqualsDescriptor;
import org.apache.asterix.runtime.evaluators.comparisons.NullIfEqualsDescriptor;
import org.apache.asterix.runtime.evaluators.comparisons.PosInfIfEqualsDescriptor;
import org.apache.asterix.runtime.evaluators.constructors.ABinaryBase64StringConstructorDescriptor;
import org.apache.asterix.runtime.evaluators.constructors.ABinaryHexStringConstructorDescriptor;
import org.apache.asterix.runtime.evaluators.constructors.ABooleanConstructorDescriptor;
import org.apache.asterix.runtime.evaluators.constructors.ACircleConstructorDescriptor;
import org.apache.asterix.runtime.evaluators.constructors.ADateConstructorDescriptor;
import org.apache.asterix.runtime.evaluators.constructors.ADateTimeConstructorDescriptor;
import org.apache.asterix.runtime.evaluators.constructors.ADayTimeDurationConstructorDescriptor;
import org.apache.asterix.runtime.evaluators.constructors.ADoubleConstructorDescriptor;
import org.apache.asterix.runtime.evaluators.constructors.ADurationConstructorDescriptor;
import org.apache.asterix.runtime.evaluators.constructors.AFloatConstructorDescriptor;
import org.apache.asterix.runtime.evaluators.constructors.AInt16ConstructorDescriptor;
import org.apache.asterix.runtime.evaluators.constructors.AInt32ConstructorDescriptor;
import org.apache.asterix.runtime.evaluators.constructors.AInt64ConstructorDescriptor;
import org.apache.asterix.runtime.evaluators.constructors.AInt8ConstructorDescriptor;
import org.apache.asterix.runtime.evaluators.constructors.AIntervalConstructorDescriptor;
import org.apache.asterix.runtime.evaluators.constructors.AIntervalStartFromDateConstructorDescriptor;
import org.apache.asterix.runtime.evaluators.constructors.AIntervalStartFromDateTimeConstructorDescriptor;
import org.apache.asterix.runtime.evaluators.constructors.AIntervalStartFromTimeConstructorDescriptor;
import org.apache.asterix.runtime.evaluators.constructors.ALineConstructorDescriptor;
import org.apache.asterix.runtime.evaluators.constructors.APoint3DConstructorDescriptor;
import org.apache.asterix.runtime.evaluators.constructors.APointConstructorDescriptor;
import org.apache.asterix.runtime.evaluators.constructors.APolygonConstructorDescriptor;
import org.apache.asterix.runtime.evaluators.constructors.ARectangleConstructorDescriptor;
import org.apache.asterix.runtime.evaluators.constructors.AStringConstructorDescriptor;
import org.apache.asterix.runtime.evaluators.constructors.ATimeConstructorDescriptor;
import org.apache.asterix.runtime.evaluators.constructors.AUUIDFromStringConstructorDescriptor;
import org.apache.asterix.runtime.evaluators.constructors.AYearMonthDurationConstructorDescriptor;
import org.apache.asterix.runtime.evaluators.constructors.ClosedRecordConstructorDescriptor;
import org.apache.asterix.runtime.evaluators.constructors.OpenRecordConstructorDescriptor;
import org.apache.asterix.runtime.evaluators.constructors.OrderedListConstructorDescriptor;
import org.apache.asterix.runtime.evaluators.constructors.UnorderedListConstructorDescriptor;
import org.apache.asterix.runtime.evaluators.functions.AndDescriptor;
import org.apache.asterix.runtime.evaluators.functions.AnyCollectionMemberDescriptor;
import org.apache.asterix.runtime.evaluators.functions.ArrayAppendDescriptor;
import org.apache.asterix.runtime.evaluators.functions.ArrayConcatDescriptor;
import org.apache.asterix.runtime.evaluators.functions.ArrayContainsDescriptor;
import org.apache.asterix.runtime.evaluators.functions.ArrayDistinctDescriptor;
import org.apache.asterix.runtime.evaluators.functions.ArrayExceptDescriptor;
import org.apache.asterix.runtime.evaluators.functions.ArrayFlattenDescriptor;
import org.apache.asterix.runtime.evaluators.functions.ArrayIfNullDescriptor;
import org.apache.asterix.runtime.evaluators.functions.ArrayInsertDescriptor;
import org.apache.asterix.runtime.evaluators.functions.ArrayIntersectDescriptor;
import org.apache.asterix.runtime.evaluators.functions.ArrayPositionDescriptor;
import org.apache.asterix.runtime.evaluators.functions.ArrayPrependDescriptor;
import org.apache.asterix.runtime.evaluators.functions.ArrayPutDescriptor;
import org.apache.asterix.runtime.evaluators.functions.ArrayRangeWithStepDescriptor;
import org.apache.asterix.runtime.evaluators.functions.ArrayRangeWithoutStepDescriptor;
import org.apache.asterix.runtime.evaluators.functions.ArrayRemoveDescriptor;
import org.apache.asterix.runtime.evaluators.functions.ArrayRepeatDescriptor;
import org.apache.asterix.runtime.evaluators.functions.ArrayReplaceWithMaximumDescriptor;
import org.apache.asterix.runtime.evaluators.functions.ArrayReplaceWithoutMaximumDescriptor;
import org.apache.asterix.runtime.evaluators.functions.ArrayReverseDescriptor;
import org.apache.asterix.runtime.evaluators.functions.ArraySliceWithEndPositionDescriptor;
import org.apache.asterix.runtime.evaluators.functions.ArraySliceWithoutEndPositionDescriptor;
import org.apache.asterix.runtime.evaluators.functions.ArraySortDescriptor;
import org.apache.asterix.runtime.evaluators.functions.ArrayStarDescriptor;
import org.apache.asterix.runtime.evaluators.functions.ArraySymDiffDescriptor;
import org.apache.asterix.runtime.evaluators.functions.ArraySymDiffnDescriptor;
import org.apache.asterix.runtime.evaluators.functions.ArrayUnionDescriptor;
import org.apache.asterix.runtime.evaluators.functions.CastTypeDescriptor;
import org.apache.asterix.runtime.evaluators.functions.CastTypeLaxDescriptor;
import org.apache.asterix.runtime.evaluators.functions.CheckUnknownDescriptor;
import org.apache.asterix.runtime.evaluators.functions.CodePointToStringDescriptor;
import org.apache.asterix.runtime.evaluators.functions.CreateCircleDescriptor;
import org.apache.asterix.runtime.evaluators.functions.CreateLineDescriptor;
import org.apache.asterix.runtime.evaluators.functions.CreateMBRDescriptor;
import org.apache.asterix.runtime.evaluators.functions.CreatePointDescriptor;
import org.apache.asterix.runtime.evaluators.functions.CreatePolygonDescriptor;
import org.apache.asterix.runtime.evaluators.functions.CreateQueryUIDDescriptor;
import org.apache.asterix.runtime.evaluators.functions.CreateRectangleDescriptor;
import org.apache.asterix.runtime.evaluators.functions.CreateUUIDDescriptor;
import org.apache.asterix.runtime.evaluators.functions.DecodeDataverseDisplayNameDescriptor;
import org.apache.asterix.runtime.evaluators.functions.DecodeDataverseNameDescriptor;
import org.apache.asterix.runtime.evaluators.functions.DeepEqualityDescriptor;
import org.apache.asterix.runtime.evaluators.functions.FullTextContainsFunctionDescriptor;
import org.apache.asterix.runtime.evaluators.functions.FullTextContainsWithoutOptionFunctionDescriptor;
import org.apache.asterix.runtime.evaluators.functions.GetItemDescriptor;
import org.apache.asterix.runtime.evaluators.functions.GetJobParameterByNameDescriptor;
import org.apache.asterix.runtime.evaluators.functions.GetTypeDescriptor;
import org.apache.asterix.runtime.evaluators.functions.IfInfDescriptor;
import org.apache.asterix.runtime.evaluators.functions.IfMissingDescriptor;
import org.apache.asterix.runtime.evaluators.functions.IfMissingOrNullDescriptor;
import org.apache.asterix.runtime.evaluators.functions.IfNanDescriptor;
import org.apache.asterix.runtime.evaluators.functions.IfNanOrInfDescriptor;
import org.apache.asterix.runtime.evaluators.functions.IfNullDescriptor;
import org.apache.asterix.runtime.evaluators.functions.IfSystemNullDescriptor;
import org.apache.asterix.runtime.evaluators.functions.InjectFailureDescriptor;
import org.apache.asterix.runtime.evaluators.functions.IsArrayDescriptor;
import org.apache.asterix.runtime.evaluators.functions.IsAtomicDescriptor;
import org.apache.asterix.runtime.evaluators.functions.IsBinaryDescriptor;
import org.apache.asterix.runtime.evaluators.functions.IsBooleanDescriptor;
import org.apache.asterix.runtime.evaluators.functions.IsCircleDescriptor;
import org.apache.asterix.runtime.evaluators.functions.IsDateDescriptor;
import org.apache.asterix.runtime.evaluators.functions.IsDatetimeDescriptor;
import org.apache.asterix.runtime.evaluators.functions.IsDurationDescriptor;
import org.apache.asterix.runtime.evaluators.functions.IsIntervalDescriptor;
import org.apache.asterix.runtime.evaluators.functions.IsLineDescriptor;
import org.apache.asterix.runtime.evaluators.functions.IsMissingDescriptor;
import org.apache.asterix.runtime.evaluators.functions.IsMultisetDescriptor;
import org.apache.asterix.runtime.evaluators.functions.IsNullDescriptor;
import org.apache.asterix.runtime.evaluators.functions.IsNumberDescriptor;
import org.apache.asterix.runtime.evaluators.functions.IsNumericAddCompatibleDescriptor;
import org.apache.asterix.runtime.evaluators.functions.IsObjectDescriptor;
import org.apache.asterix.runtime.evaluators.functions.IsPointDescriptor;
import org.apache.asterix.runtime.evaluators.functions.IsPolygonDescriptor;
import org.apache.asterix.runtime.evaluators.functions.IsRectangleDescriptor;
import org.apache.asterix.runtime.evaluators.functions.IsSpatialDescriptor;
import org.apache.asterix.runtime.evaluators.functions.IsStringDescriptor;
import org.apache.asterix.runtime.evaluators.functions.IsSystemNullDescriptor;
import org.apache.asterix.runtime.evaluators.functions.IsTemporalDescriptor;
import org.apache.asterix.runtime.evaluators.functions.IsTimeDescriptor;
import org.apache.asterix.runtime.evaluators.functions.IsUUIDDescriptor;
import org.apache.asterix.runtime.evaluators.functions.IsUnknownDescriptor;
import org.apache.asterix.runtime.evaluators.functions.LenDescriptor;
import org.apache.asterix.runtime.evaluators.functions.NotDescriptor;
import org.apache.asterix.runtime.evaluators.functions.NumericACosDescriptor;
import org.apache.asterix.runtime.evaluators.functions.NumericASinDescriptor;
import org.apache.asterix.runtime.evaluators.functions.NumericATan2Descriptor;
import org.apache.asterix.runtime.evaluators.functions.NumericATanDescriptor;
import org.apache.asterix.runtime.evaluators.functions.NumericAbsDescriptor;
import org.apache.asterix.runtime.evaluators.functions.NumericAddDescriptor;
import org.apache.asterix.runtime.evaluators.functions.NumericCeilingDescriptor;
import org.apache.asterix.runtime.evaluators.functions.NumericCosDescriptor;
import org.apache.asterix.runtime.evaluators.functions.NumericCoshDescriptor;
import org.apache.asterix.runtime.evaluators.functions.NumericDegreesDescriptor;
import org.apache.asterix.runtime.evaluators.functions.NumericDivDescriptor;
import org.apache.asterix.runtime.evaluators.functions.NumericDivideDescriptor;
import org.apache.asterix.runtime.evaluators.functions.NumericExpDescriptor;
import org.apache.asterix.runtime.evaluators.functions.NumericFloorDescriptor;
import org.apache.asterix.runtime.evaluators.functions.NumericLnDescriptor;
import org.apache.asterix.runtime.evaluators.functions.NumericLogDescriptor;
import org.apache.asterix.runtime.evaluators.functions.NumericModuloDescriptor;
import org.apache.asterix.runtime.evaluators.functions.NumericMultiplyDescriptor;
import org.apache.asterix.runtime.evaluators.functions.NumericPowerDescriptor;
import org.apache.asterix.runtime.evaluators.functions.NumericRadiansDescriptor;
import org.apache.asterix.runtime.evaluators.functions.NumericRoundDescriptor;
import org.apache.asterix.runtime.evaluators.functions.NumericRoundHalfToEven2Descriptor;
import org.apache.asterix.runtime.evaluators.functions.NumericRoundHalfToEvenDescriptor;
import org.apache.asterix.runtime.evaluators.functions.NumericRoundWithRoundDigitDescriptor;
import org.apache.asterix.runtime.evaluators.functions.NumericSignDescriptor;
import org.apache.asterix.runtime.evaluators.functions.NumericSinDescriptor;
import org.apache.asterix.runtime.evaluators.functions.NumericSinhDescriptor;
import org.apache.asterix.runtime.evaluators.functions.NumericSqrtDescriptor;
import org.apache.asterix.runtime.evaluators.functions.NumericSubDescriptor;
import org.apache.asterix.runtime.evaluators.functions.NumericTanDescriptor;
import org.apache.asterix.runtime.evaluators.functions.NumericTanhDescriptor;
import org.apache.asterix.runtime.evaluators.functions.NumericTruncDescriptor;
import org.apache.asterix.runtime.evaluators.functions.NumericUnaryMinusDescriptor;
import org.apache.asterix.runtime.evaluators.functions.OrDescriptor;
import org.apache.asterix.runtime.evaluators.functions.RandomDescriptor;
import org.apache.asterix.runtime.evaluators.functions.RandomWithSeedDescriptor;
import org.apache.asterix.runtime.evaluators.functions.SleepDescriptor;
import org.apache.asterix.runtime.evaluators.functions.SpatialAreaDescriptor;
import org.apache.asterix.runtime.evaluators.functions.SpatialCellDescriptor;
import org.apache.asterix.runtime.evaluators.functions.SpatialDistanceDescriptor;
import org.apache.asterix.runtime.evaluators.functions.StringConcatDescriptor;
import org.apache.asterix.runtime.evaluators.functions.StringContainsDescriptor;
import org.apache.asterix.runtime.evaluators.functions.StringEndsWithDescriptor;
import org.apache.asterix.runtime.evaluators.functions.StringEqualDescriptor;
import org.apache.asterix.runtime.evaluators.functions.StringInitCapDescriptor;
import org.apache.asterix.runtime.evaluators.functions.StringJoinDescriptor;
import org.apache.asterix.runtime.evaluators.functions.StringLTrim2Descriptor;
import org.apache.asterix.runtime.evaluators.functions.StringLTrimDescriptor;
import org.apache.asterix.runtime.evaluators.functions.StringLengthDescriptor;
import org.apache.asterix.runtime.evaluators.functions.StringLikeDescriptor;
import org.apache.asterix.runtime.evaluators.functions.StringLowerCaseDescriptor;
import org.apache.asterix.runtime.evaluators.functions.StringPositionDescriptor;
import org.apache.asterix.runtime.evaluators.functions.StringPositionOffset1Descriptor;
import org.apache.asterix.runtime.evaluators.functions.StringRTrim2Descriptor;
import org.apache.asterix.runtime.evaluators.functions.StringRTrimDescriptor;
import org.apache.asterix.runtime.evaluators.functions.StringRegExpContainsDescriptor;
import org.apache.asterix.runtime.evaluators.functions.StringRegExpContainsWithFlagDescriptor;
import org.apache.asterix.runtime.evaluators.functions.StringRegExpLikeDescriptor;
import org.apache.asterix.runtime.evaluators.functions.StringRegExpLikeWithFlagDescriptor;
import org.apache.asterix.runtime.evaluators.functions.StringRegExpMatchesDescriptor;
import org.apache.asterix.runtime.evaluators.functions.StringRegExpPositionDescriptor;
import org.apache.asterix.runtime.evaluators.functions.StringRegExpPositionOffset1Descriptor;
import org.apache.asterix.runtime.evaluators.functions.StringRegExpPositionOffset1WithFlagDescriptor;
import org.apache.asterix.runtime.evaluators.functions.StringRegExpPositionWithFlagDescriptor;
import org.apache.asterix.runtime.evaluators.functions.StringRegExpReplaceDescriptor;
import org.apache.asterix.runtime.evaluators.functions.StringRegExpReplaceWithFlagDescriptor;
import org.apache.asterix.runtime.evaluators.functions.StringRegExpSplitDescriptor;
import org.apache.asterix.runtime.evaluators.functions.StringRepeatDescriptor;
import org.apache.asterix.runtime.evaluators.functions.StringReplaceDescriptor;
import org.apache.asterix.runtime.evaluators.functions.StringReplaceWithLimitDescriptor;
import org.apache.asterix.runtime.evaluators.functions.StringReverseDescriptor;
import org.apache.asterix.runtime.evaluators.functions.StringSplitDescriptor;
import org.apache.asterix.runtime.evaluators.functions.StringStartsWithDescriptor;
import org.apache.asterix.runtime.evaluators.functions.StringToCodePointDescriptor;
import org.apache.asterix.runtime.evaluators.functions.StringTrim2Descriptor;
import org.apache.asterix.runtime.evaluators.functions.StringTrimDescriptor;
import org.apache.asterix.runtime.evaluators.functions.StringUpperCaseDescriptor;
import org.apache.asterix.runtime.evaluators.functions.Substring2Descriptor;
import org.apache.asterix.runtime.evaluators.functions.Substring2Offset1Descriptor;
import org.apache.asterix.runtime.evaluators.functions.SubstringAfterDescriptor;
import org.apache.asterix.runtime.evaluators.functions.SubstringBeforeDescriptor;
import org.apache.asterix.runtime.evaluators.functions.SubstringDescriptor;
import org.apache.asterix.runtime.evaluators.functions.SubstringOffset1Descriptor;
import org.apache.asterix.runtime.evaluators.functions.SwitchCaseDescriptor;
import org.apache.asterix.runtime.evaluators.functions.ToArrayDescriptor;
import org.apache.asterix.runtime.evaluators.functions.ToAtomicDescriptor;
import org.apache.asterix.runtime.evaluators.functions.ToBigIntDescriptor;
import org.apache.asterix.runtime.evaluators.functions.ToBooleanDescriptor;
import org.apache.asterix.runtime.evaluators.functions.ToDoubleDescriptor;
import org.apache.asterix.runtime.evaluators.functions.ToNumberDescriptor;
import org.apache.asterix.runtime.evaluators.functions.ToObjectDescriptor;
import org.apache.asterix.runtime.evaluators.functions.ToStringDescriptor;
import org.apache.asterix.runtime.evaluators.functions.TreatAsIntegerDescriptor;
import org.apache.asterix.runtime.evaluators.functions.UUIDDescriptor;
import org.apache.asterix.runtime.evaluators.functions.binary.BinaryConcatDescriptor;
import org.apache.asterix.runtime.evaluators.functions.binary.BinaryLengthDescriptor;
import org.apache.asterix.runtime.evaluators.functions.binary.FindBinaryDescriptor;
import org.apache.asterix.runtime.evaluators.functions.binary.FindBinaryFromDescriptor;
import org.apache.asterix.runtime.evaluators.functions.binary.ParseBinaryDescriptor;
import org.apache.asterix.runtime.evaluators.functions.binary.PrintBinaryDescriptor;
import org.apache.asterix.runtime.evaluators.functions.binary.SubBinaryFromDescriptor;
import org.apache.asterix.runtime.evaluators.functions.binary.SubBinaryFromToDescriptor;
import org.apache.asterix.runtime.evaluators.functions.bitwise.BitAndDescriptor;
import org.apache.asterix.runtime.evaluators.functions.bitwise.BitClearDescriptor;
import org.apache.asterix.runtime.evaluators.functions.bitwise.BitCountDescriptor;
import org.apache.asterix.runtime.evaluators.functions.bitwise.BitNotDescriptor;
import org.apache.asterix.runtime.evaluators.functions.bitwise.BitOrDescriptor;
import org.apache.asterix.runtime.evaluators.functions.bitwise.BitSetDescriptor;
import org.apache.asterix.runtime.evaluators.functions.bitwise.BitShiftWithRotateFlagDescriptor;
import org.apache.asterix.runtime.evaluators.functions.bitwise.BitShiftWithoutRotateFlagDescriptor;
import org.apache.asterix.runtime.evaluators.functions.bitwise.BitTestWithAllFlagDescriptor;
import org.apache.asterix.runtime.evaluators.functions.bitwise.BitTestWithoutAllFlagDescriptor;
import org.apache.asterix.runtime.evaluators.functions.bitwise.BitXorDescriptor;
import org.apache.asterix.runtime.evaluators.functions.bitwise.IsBitSetWithAllFlagDescriptor;
import org.apache.asterix.runtime.evaluators.functions.bitwise.IsBitSetWithoutAllFlagDescriptor;
import org.apache.asterix.runtime.evaluators.functions.records.FieldAccessByIndexDescriptor;
import org.apache.asterix.runtime.evaluators.functions.records.FieldAccessByNameDescriptor;
import org.apache.asterix.runtime.evaluators.functions.records.FieldAccessNestedDescriptor;
import org.apache.asterix.runtime.evaluators.functions.records.GetRecordFieldValueDescriptor;
import org.apache.asterix.runtime.evaluators.functions.records.GetRecordFieldsDescriptor;
import org.apache.asterix.runtime.evaluators.functions.records.PairsDescriptor;
import org.apache.asterix.runtime.evaluators.functions.records.RecordAddDescriptor;
import org.apache.asterix.runtime.evaluators.functions.records.RecordAddFieldsDescriptor;
import org.apache.asterix.runtime.evaluators.functions.records.RecordConcatDescriptor;
import org.apache.asterix.runtime.evaluators.functions.records.RecordConcatStrictDescriptor;
import org.apache.asterix.runtime.evaluators.functions.records.RecordLengthDescriptor;
import org.apache.asterix.runtime.evaluators.functions.records.RecordMergeDescriptor;
import org.apache.asterix.runtime.evaluators.functions.records.RecordMergeIgnoreDuplicatesDescriptor;
import org.apache.asterix.runtime.evaluators.functions.records.RecordNamesDescriptor;
import org.apache.asterix.runtime.evaluators.functions.records.RecordPairsDescriptor;
import org.apache.asterix.runtime.evaluators.functions.records.RecordPutDescriptor;
import org.apache.asterix.runtime.evaluators.functions.records.RecordRemoveDescriptor;
import org.apache.asterix.runtime.evaluators.functions.records.RecordRemoveFieldsDescriptor;
import org.apache.asterix.runtime.evaluators.functions.records.RecordRenameDescriptor;
import org.apache.asterix.runtime.evaluators.functions.records.RecordReplaceDescriptor;
import org.apache.asterix.runtime.evaluators.functions.records.RecordUnwrapDescriptor;
import org.apache.asterix.runtime.evaluators.functions.records.RecordValuesDescriptor;
import org.apache.asterix.runtime.evaluators.functions.temporal.AdjustDateTimeForTimeZoneDescriptor;
import org.apache.asterix.runtime.evaluators.functions.temporal.AdjustTimeForTimeZoneDescriptor;
import org.apache.asterix.runtime.evaluators.functions.temporal.CalendarDurationFromDateDescriptor;
import org.apache.asterix.runtime.evaluators.functions.temporal.CalendarDurationFromDateTimeDescriptor;
import org.apache.asterix.runtime.evaluators.functions.temporal.CurrentDateDescriptor;
import org.apache.asterix.runtime.evaluators.functions.temporal.CurrentDateTimeDescriptor;
import org.apache.asterix.runtime.evaluators.functions.temporal.CurrentTimeDescriptor;
import org.apache.asterix.runtime.evaluators.functions.temporal.DateFromDatetimeDescriptor;
import org.apache.asterix.runtime.evaluators.functions.temporal.DateFromUnixTimeInDaysDescriptor;
import org.apache.asterix.runtime.evaluators.functions.temporal.DatetimeFromDateAndTimeDescriptor;
import org.apache.asterix.runtime.evaluators.functions.temporal.DatetimeFromUnixTimeInMsDescriptor;
import org.apache.asterix.runtime.evaluators.functions.temporal.DatetimeFromUnixTimeInSecsDescriptor;
import org.apache.asterix.runtime.evaluators.functions.temporal.DayOfWeekDescriptor;
import org.apache.asterix.runtime.evaluators.functions.temporal.DayTimeDurationGreaterThanComparatorDescriptor;
import org.apache.asterix.runtime.evaluators.functions.temporal.DayTimeDurationLessThanComparatorDescriptor;
import org.apache.asterix.runtime.evaluators.functions.temporal.DurationEqualDescriptor;
import org.apache.asterix.runtime.evaluators.functions.temporal.DurationFromIntervalDescriptor;
import org.apache.asterix.runtime.evaluators.functions.temporal.DurationFromMillisecondsDescriptor;
import org.apache.asterix.runtime.evaluators.functions.temporal.DurationFromMonthsDescriptor;
import org.apache.asterix.runtime.evaluators.functions.temporal.GetDayTimeDurationDescriptor;
import org.apache.asterix.runtime.evaluators.functions.temporal.GetOverlappingIntervalDescriptor;
import org.apache.asterix.runtime.evaluators.functions.temporal.GetYearMonthDurationDescriptor;
import org.apache.asterix.runtime.evaluators.functions.temporal.IntervalAfterDescriptor;
import org.apache.asterix.runtime.evaluators.functions.temporal.IntervalBeforeDescriptor;
import org.apache.asterix.runtime.evaluators.functions.temporal.IntervalBinDescriptor;
import org.apache.asterix.runtime.evaluators.functions.temporal.IntervalCoveredByDescriptor;
import org.apache.asterix.runtime.evaluators.functions.temporal.IntervalCoversDescriptor;
import org.apache.asterix.runtime.evaluators.functions.temporal.IntervalEndedByDescriptor;
import org.apache.asterix.runtime.evaluators.functions.temporal.IntervalEndsDescriptor;
import org.apache.asterix.runtime.evaluators.functions.temporal.IntervalMeetsDescriptor;
import org.apache.asterix.runtime.evaluators.functions.temporal.IntervalMetByDescriptor;
import org.apache.asterix.runtime.evaluators.functions.temporal.IntervalOverlappedByDescriptor;
import org.apache.asterix.runtime.evaluators.functions.temporal.IntervalOverlapsDescriptor;
import org.apache.asterix.runtime.evaluators.functions.temporal.IntervalStartedByDescriptor;
import org.apache.asterix.runtime.evaluators.functions.temporal.IntervalStartsDescriptor;
import org.apache.asterix.runtime.evaluators.functions.temporal.MillisecondsFromDayTimeDurationDescriptor;
import org.apache.asterix.runtime.evaluators.functions.temporal.MonthsFromYearMonthDurationDescriptor;
import org.apache.asterix.runtime.evaluators.functions.temporal.OverlapBinsDescriptor;
import org.apache.asterix.runtime.evaluators.functions.temporal.OverlapDescriptor;
import org.apache.asterix.runtime.evaluators.functions.temporal.ParseDateDescriptor;
import org.apache.asterix.runtime.evaluators.functions.temporal.ParseDateTimeDescriptor;
import org.apache.asterix.runtime.evaluators.functions.temporal.ParseTimeDescriptor;
import org.apache.asterix.runtime.evaluators.functions.temporal.PrintDateDescriptor;
import org.apache.asterix.runtime.evaluators.functions.temporal.PrintDateTimeDescriptor;
import org.apache.asterix.runtime.evaluators.functions.temporal.PrintTimeDescriptor;
import org.apache.asterix.runtime.evaluators.functions.temporal.TimeFromDatetimeDescriptor;
import org.apache.asterix.runtime.evaluators.functions.temporal.TimeFromUnixTimeInMsDescriptor;
import org.apache.asterix.runtime.evaluators.functions.temporal.UnixTimeFromDateInDaysDescriptor;
import org.apache.asterix.runtime.evaluators.functions.temporal.UnixTimeFromDatetimeInMsDescriptor;
import org.apache.asterix.runtime.evaluators.functions.temporal.UnixTimeFromDatetimeInSecsDescriptor;
import org.apache.asterix.runtime.evaluators.functions.temporal.UnixTimeFromTimeInMsDescriptor;
import org.apache.asterix.runtime.evaluators.functions.temporal.YearMonthDurationGreaterThanComparatorDescriptor;
import org.apache.asterix.runtime.evaluators.functions.temporal.YearMonthDurationLessThanComparatorDescriptor;
import org.apache.asterix.runtime.runningaggregates.std.DenseRankRunningAggregateDescriptor;
import org.apache.asterix.runtime.runningaggregates.std.NtileRunningAggregateDescriptor;
import org.apache.asterix.runtime.runningaggregates.std.PercentRankRunningAggregateDescriptor;
import org.apache.asterix.runtime.runningaggregates.std.RankRunningAggregateDescriptor;
import org.apache.asterix.runtime.runningaggregates.std.RowNumberRunningAggregateDescriptor;
import org.apache.asterix.runtime.runningaggregates.std.TidRunningAggregateDescriptor;
import org.apache.asterix.runtime.runningaggregates.std.WinMarkFirstMissingRunningAggregateDescriptor;
import org.apache.asterix.runtime.runningaggregates.std.WinPartitionLenRunningAggregateDescriptor;
import org.apache.asterix.runtime.unnestingfunctions.std.RangeDescriptor;
import org.apache.asterix.runtime.unnestingfunctions.std.ScanCollectionDescriptor;
import org.apache.asterix.runtime.unnestingfunctions.std.SubsetCollectionDescriptor;
/**
* This class holds a list of function descriptor factories.
*/
public final class FunctionCollection implements IFunctionCollection {
private static final long serialVersionUID = -8308873930697425307L;
private final ArrayList<IFunctionDescriptorFactory> descriptorFactories = new ArrayList<>();
@Override
public void add(IFunctionDescriptorFactory descriptorFactory) {
descriptorFactories.add(descriptorFactory);
}
public static FunctionCollection createDefaultFunctionCollection() {
FunctionCollection fc = new FunctionCollection();
// array functions
fc.add(ArrayRemoveDescriptor.FACTORY);
fc.add(ArrayPutDescriptor.FACTORY);
fc.add(ArrayPrependDescriptor.FACTORY);
fc.add(ArrayAppendDescriptor.FACTORY);
fc.add(ArrayInsertDescriptor.FACTORY);
fc.add(ArrayPositionDescriptor.FACTORY);
fc.add(ArrayRepeatDescriptor.FACTORY);
fc.add(ArrayContainsDescriptor.FACTORY);
fc.add(ArrayReverseDescriptor.FACTORY);
fc.add(ArraySortDescriptor.FACTORY);
fc.add(ArrayDistinctDescriptor.FACTORY);
fc.add(ArrayUnionDescriptor.FACTORY);
fc.add(ArrayIntersectDescriptor.FACTORY);
fc.add(ArrayIfNullDescriptor.FACTORY);
fc.add(ArrayConcatDescriptor.FACTORY);
fc.add(ArrayRangeWithStepDescriptor.FACTORY);
fc.add(ArrayRangeWithoutStepDescriptor.FACTORY);
fc.add(ArrayFlattenDescriptor.FACTORY);
fc.add(ArrayReplaceWithMaximumDescriptor.FACTORY);
fc.add(ArrayReplaceWithoutMaximumDescriptor.FACTORY);
fc.add(ArraySliceWithEndPositionDescriptor.FACTORY);
fc.add(ArraySliceWithoutEndPositionDescriptor.FACTORY);
fc.add(ArraySymDiffDescriptor.FACTORY);
fc.add(ArraySymDiffnDescriptor.FACTORY);
fc.add(ArrayStarDescriptor.FACTORY);
fc.add(ArrayExceptDescriptor.FACTORY);
// unnesting functions
fc.add(TidRunningAggregateDescriptor.FACTORY);
fc.add(ScanCollectionDescriptor.FACTORY);
fc.add(RangeDescriptor.FACTORY);
fc.add(SubsetCollectionDescriptor.FACTORY);
// aggregate functions
fc.add(ListifyAggregateDescriptor.FACTORY);
fc.add(CountAggregateDescriptor.FACTORY);
fc.add(AvgAggregateDescriptor.FACTORY);
fc.add(LocalAvgAggregateDescriptor.FACTORY);
fc.add(IntermediateAvgAggregateDescriptor.FACTORY);
fc.add(GlobalAvgAggregateDescriptor.FACTORY);
fc.add(SumAggregateDescriptor.FACTORY);
fc.add(LocalSumAggregateDescriptor.FACTORY);
fc.add(IntermediateSumAggregateDescriptor.FACTORY);
fc.add(GlobalSumAggregateDescriptor.FACTORY);
fc.add(MaxAggregateDescriptor.FACTORY);
fc.add(LocalMaxAggregateDescriptor.FACTORY);
fc.add(IntermediateMaxAggregateDescriptor.FACTORY);
fc.add(GlobalMaxAggregateDescriptor.FACTORY);
fc.add(MinAggregateDescriptor.FACTORY);
fc.add(LocalMinAggregateDescriptor.FACTORY);
fc.add(IntermediateMinAggregateDescriptor.FACTORY);
fc.add(GlobalMinAggregateDescriptor.FACTORY);
fc.add(FirstElementAggregateDescriptor.FACTORY);
fc.add(LocalFirstElementAggregateDescriptor.FACTORY);
fc.add(LastElementAggregateDescriptor.FACTORY);
fc.add(StddevAggregateDescriptor.FACTORY);
fc.add(LocalStddevAggregateDescriptor.FACTORY);
fc.add(IntermediateStddevAggregateDescriptor.FACTORY);
fc.add(GlobalStddevAggregateDescriptor.FACTORY);
fc.add(LocalSamplingAggregateDescriptor.FACTORY);
fc.add(RangeMapAggregateDescriptor.FACTORY);
fc.add(StddevPopAggregateDescriptor.FACTORY);
fc.add(LocalStddevPopAggregateDescriptor.FACTORY);
fc.add(IntermediateStddevPopAggregateDescriptor.FACTORY);
fc.add(GlobalStddevPopAggregateDescriptor.FACTORY);
fc.add(VarAggregateDescriptor.FACTORY);
fc.add(LocalVarAggregateDescriptor.FACTORY);
fc.add(IntermediateVarAggregateDescriptor.FACTORY);
fc.add(GlobalVarAggregateDescriptor.FACTORY);
fc.add(VarPopAggregateDescriptor.FACTORY);
fc.add(LocalVarPopAggregateDescriptor.FACTORY);
fc.add(IntermediateVarPopAggregateDescriptor.FACTORY);
fc.add(GlobalVarPopAggregateDescriptor.FACTORY);
fc.add(KurtosisAggregateDescriptor.FACTORY);
fc.add(LocalKurtosisAggregateDescriptor.FACTORY);
fc.add(IntermediateKurtosisAggregateDescriptor.FACTORY);
fc.add(GlobalKurtosisAggregateDescriptor.FACTORY);
fc.add(SkewnessAggregateDescriptor.FACTORY);
fc.add(LocalSkewnessAggregateDescriptor.FACTORY);
fc.add(IntermediateSkewnessAggregateDescriptor.FACTORY);
fc.add(GlobalSkewnessAggregateDescriptor.FACTORY);
fc.add(EmptyStreamAggregateDescriptor.FACTORY);
fc.add(NonEmptyStreamAggregateDescriptor.FACTORY);
fc.add(NullWriterAggregateDescriptor.FACTORY);
// serializable aggregates
fc.add(SerializableCountAggregateDescriptor.FACTORY);
fc.add(SerializableAvgAggregateDescriptor.FACTORY);
fc.add(SerializableLocalAvgAggregateDescriptor.FACTORY);
fc.add(SerializableIntermediateAvgAggregateDescriptor.FACTORY);
fc.add(SerializableGlobalAvgAggregateDescriptor.FACTORY);
fc.add(SerializableSumAggregateDescriptor.FACTORY);
fc.add(SerializableLocalSumAggregateDescriptor.FACTORY);
fc.add(SerializableIntermediateSumAggregateDescriptor.FACTORY);
fc.add(SerializableGlobalSumAggregateDescriptor.FACTORY);
fc.add(SerializableStddevAggregateDescriptor.FACTORY);
fc.add(SerializableLocalStddevAggregateDescriptor.FACTORY);
fc.add(SerializableIntermediateStddevAggregateDescriptor.FACTORY);
fc.add(SerializableGlobalStddevAggregateDescriptor.FACTORY);
fc.add(SerializableStddevPopAggregateDescriptor.FACTORY);
fc.add(SerializableLocalStddevPopAggregateDescriptor.FACTORY);
fc.add(SerializableIntermediateStddevPopAggregateDescriptor.FACTORY);
fc.add(SerializableGlobalStddevPopAggregateDescriptor.FACTORY);
fc.add(SerializableVarAggregateDescriptor.FACTORY);
fc.add(SerializableLocalVarAggregateDescriptor.FACTORY);
fc.add(SerializableIntermediateVarAggregateDescriptor.FACTORY);
fc.add(SerializableGlobalVarAggregateDescriptor.FACTORY);
fc.add(SerializableVarPopAggregateDescriptor.FACTORY);
fc.add(SerializableLocalVarPopAggregateDescriptor.FACTORY);
fc.add(SerializableIntermediateVarPopAggregateDescriptor.FACTORY);
fc.add(SerializableGlobalVarPopAggregateDescriptor.FACTORY);
fc.add(SerializableKurtosisAggregateDescriptor.FACTORY);
fc.add(SerializableLocalKurtosisAggregateDescriptor.FACTORY);
fc.add(SerializableIntermediateKurtosisAggregateDescriptor.FACTORY);
fc.add(SerializableGlobalKurtosisAggregateDescriptor.FACTORY);
fc.add(SerializableSkewnessAggregateDescriptor.FACTORY);
fc.add(SerializableLocalSkewnessAggregateDescriptor.FACTORY);
fc.add(SerializableIntermediateSkewnessAggregateDescriptor.FACTORY);
fc.add(SerializableGlobalSkewnessAggregateDescriptor.FACTORY);
// scalar aggregates
fc.add(ScalarArrayAggAggregateDescriptor.FACTORY);
fc.add(ScalarArrayAggDistinctAggregateDescriptor.FACTORY);
fc.add(ScalarCountAggregateDescriptor.FACTORY);
fc.add(ScalarCountDistinctAggregateDescriptor.FACTORY);
fc.add(ScalarAvgAggregateDescriptor.FACTORY);
fc.add(ScalarAvgDistinctAggregateDescriptor.FACTORY);
fc.add(ScalarSumAggregateDescriptor.FACTORY);
fc.add(ScalarSumDistinctAggregateDescriptor.FACTORY);
fc.add(ScalarMaxAggregateDescriptor.FACTORY);
fc.add(ScalarMaxDistinctAggregateDescriptor.FACTORY);
fc.add(ScalarMinAggregateDescriptor.FACTORY);
fc.add(ScalarMinDistinctAggregateDescriptor.FACTORY);
fc.add(ScalarStddevAggregateDescriptor.FACTORY);
fc.add(ScalarStddevDistinctAggregateDescriptor.FACTORY);
fc.add(ScalarStddevPopAggregateDescriptor.FACTORY);
fc.add(ScalarStddevPopDistinctAggregateDescriptor.FACTORY);
fc.add(ScalarVarAggregateDescriptor.FACTORY);
fc.add(ScalarVarDistinctAggregateDescriptor.FACTORY);
fc.add(ScalarVarPopAggregateDescriptor.FACTORY);
fc.add(ScalarVarPopDistinctAggregateDescriptor.FACTORY);
fc.add(ScalarKurtosisAggregateDescriptor.FACTORY);
fc.add(ScalarKurtosisDistinctAggregateDescriptor.FACTORY);
fc.add(ScalarSkewnessAggregateDescriptor.FACTORY);
fc.add(ScalarSkewnessDistinctAggregateDescriptor.FACTORY);
// SQL aggregates
fc.add(SqlCountAggregateDescriptor.FACTORY);
fc.add(SqlAvgAggregateDescriptor.FACTORY);
fc.add(LocalSqlAvgAggregateDescriptor.FACTORY);
fc.add(IntermediateSqlAvgAggregateDescriptor.FACTORY);
fc.add(GlobalSqlAvgAggregateDescriptor.FACTORY);
fc.add(SqlSumAggregateDescriptor.FACTORY);
fc.add(LocalSqlSumAggregateDescriptor.FACTORY);
fc.add(IntermediateSqlSumAggregateDescriptor.FACTORY);
fc.add(GlobalSqlSumAggregateDescriptor.FACTORY);
fc.add(SqlMaxAggregateDescriptor.FACTORY);
fc.add(LocalSqlMaxAggregateDescriptor.FACTORY);
fc.add(IntermediateSqlMaxAggregateDescriptor.FACTORY);
fc.add(GlobalSqlMaxAggregateDescriptor.FACTORY);
fc.add(SqlMinAggregateDescriptor.FACTORY);
fc.add(LocalSqlMinAggregateDescriptor.FACTORY);
fc.add(IntermediateSqlMinAggregateDescriptor.FACTORY);
fc.add(GlobalSqlMinAggregateDescriptor.FACTORY);
fc.add(SqlStddevAggregateDescriptor.FACTORY);
fc.add(LocalSqlStddevAggregateDescriptor.FACTORY);
fc.add(IntermediateSqlStddevAggregateDescriptor.FACTORY);
fc.add(GlobalSqlStddevAggregateDescriptor.FACTORY);
fc.add(SqlStddevPopAggregateDescriptor.FACTORY);
fc.add(LocalSqlStddevPopAggregateDescriptor.FACTORY);
fc.add(IntermediateSqlStddevPopAggregateDescriptor.FACTORY);
fc.add(GlobalSqlStddevPopAggregateDescriptor.FACTORY);
fc.add(SqlVarAggregateDescriptor.FACTORY);
fc.add(LocalSqlVarAggregateDescriptor.FACTORY);
fc.add(IntermediateSqlVarAggregateDescriptor.FACTORY);
fc.add(GlobalSqlVarAggregateDescriptor.FACTORY);
fc.add(SqlVarPopAggregateDescriptor.FACTORY);
fc.add(LocalSqlVarPopAggregateDescriptor.FACTORY);
fc.add(IntermediateSqlVarPopAggregateDescriptor.FACTORY);
fc.add(GlobalSqlVarPopAggregateDescriptor.FACTORY);
fc.add(SqlKurtosisAggregateDescriptor.FACTORY);
fc.add(LocalSqlKurtosisAggregateDescriptor.FACTORY);
fc.add(IntermediateSqlKurtosisAggregateDescriptor.FACTORY);
fc.add(GlobalSqlKurtosisAggregateDescriptor.FACTORY);
fc.add(SqlSkewnessAggregateDescriptor.FACTORY);
fc.add(LocalSqlSkewnessAggregateDescriptor.FACTORY);
fc.add(IntermediateSqlSkewnessAggregateDescriptor.FACTORY);
fc.add(GlobalSqlSkewnessAggregateDescriptor.FACTORY);
// SQL serializable aggregates
fc.add(SerializableSqlCountAggregateDescriptor.FACTORY);
fc.add(SerializableSqlAvgAggregateDescriptor.FACTORY);
fc.add(SerializableLocalSqlAvgAggregateDescriptor.FACTORY);
fc.add(SerializableIntermediateSqlAvgAggregateDescriptor.FACTORY);
fc.add(SerializableGlobalSqlAvgAggregateDescriptor.FACTORY);
fc.add(SerializableSqlSumAggregateDescriptor.FACTORY);
fc.add(SerializableLocalSqlSumAggregateDescriptor.FACTORY);
fc.add(SerializableIntermediateSqlSumAggregateDescriptor.FACTORY);
fc.add(SerializableGlobalSqlSumAggregateDescriptor.FACTORY);
fc.add(SerializableSqlStddevAggregateDescriptor.FACTORY);
fc.add(SerializableLocalSqlStddevAggregateDescriptor.FACTORY);
fc.add(SerializableIntermediateSqlStddevAggregateDescriptor.FACTORY);
fc.add(SerializableGlobalSqlStddevAggregateDescriptor.FACTORY);
fc.add(SerializableSqlStddevPopAggregateDescriptor.FACTORY);
fc.add(SerializableLocalSqlStddevPopAggregateDescriptor.FACTORY);
fc.add(SerializableIntermediateSqlStddevPopAggregateDescriptor.FACTORY);
fc.add(SerializableGlobalSqlStddevPopAggregateDescriptor.FACTORY);
fc.add(SerializableSqlVarAggregateDescriptor.FACTORY);
fc.add(SerializableLocalSqlVarAggregateDescriptor.FACTORY);
fc.add(SerializableIntermediateSqlVarAggregateDescriptor.FACTORY);
fc.add(SerializableGlobalSqlVarAggregateDescriptor.FACTORY);
fc.add(SerializableSqlVarPopAggregateDescriptor.FACTORY);
fc.add(SerializableLocalSqlVarPopAggregateDescriptor.FACTORY);
fc.add(SerializableIntermediateSqlVarPopAggregateDescriptor.FACTORY);
fc.add(SerializableGlobalSqlVarPopAggregateDescriptor.FACTORY);
fc.add(SerializableSqlKurtosisAggregateDescriptor.FACTORY);
fc.add(SerializableLocalSqlKurtosisAggregateDescriptor.FACTORY);
fc.add(SerializableIntermediateSqlKurtosisAggregateDescriptor.FACTORY);
fc.add(SerializableGlobalSqlKurtosisAggregateDescriptor.FACTORY);
fc.add(SerializableSqlSkewnessAggregateDescriptor.FACTORY);
fc.add(SerializableLocalSqlSkewnessAggregateDescriptor.FACTORY);
fc.add(SerializableIntermediateSqlSkewnessAggregateDescriptor.FACTORY);
fc.add(SerializableGlobalSqlSkewnessAggregateDescriptor.FACTORY);
// SQL scalar aggregates
fc.add(ScalarSqlCountAggregateDescriptor.FACTORY);
fc.add(ScalarSqlCountDistinctAggregateDescriptor.FACTORY);
fc.add(ScalarSqlAvgAggregateDescriptor.FACTORY);
fc.add(ScalarSqlAvgDistinctAggregateDescriptor.FACTORY);
fc.add(ScalarSqlSumAggregateDescriptor.FACTORY);
fc.add(ScalarSqlSumDistinctAggregateDescriptor.FACTORY);
fc.add(ScalarSqlMaxAggregateDescriptor.FACTORY);
fc.add(ScalarSqlMaxDistinctAggregateDescriptor.FACTORY);
fc.add(ScalarSqlMinAggregateDescriptor.FACTORY);
fc.add(ScalarSqlMinDistinctAggregateDescriptor.FACTORY);
fc.add(ScalarSqlStddevAggregateDescriptor.FACTORY);
fc.add(ScalarSqlStddevDistinctAggregateDescriptor.FACTORY);
fc.add(ScalarSqlStddevPopAggregateDescriptor.FACTORY);
fc.add(ScalarSqlStddevPopDistinctAggregateDescriptor.FACTORY);
fc.add(ScalarSqlVarAggregateDescriptor.FACTORY);
fc.add(ScalarSqlVarDistinctAggregateDescriptor.FACTORY);
fc.add(ScalarSqlVarPopAggregateDescriptor.FACTORY);
fc.add(ScalarSqlVarPopDistinctAggregateDescriptor.FACTORY);
fc.add(ScalarSqlKurtosisAggregateDescriptor.FACTORY);
fc.add(ScalarSqlKurtosisDistinctAggregateDescriptor.FACTORY);
fc.add(ScalarSqlSkewnessAggregateDescriptor.FACTORY);
fc.add(ScalarSqlSkewnessDistinctAggregateDescriptor.FACTORY);
// window functions
fc.add(DenseRankRunningAggregateDescriptor.FACTORY);
fc.add(NtileRunningAggregateDescriptor.FACTORY);
fc.add(RankRunningAggregateDescriptor.FACTORY);
fc.add(RowNumberRunningAggregateDescriptor.FACTORY);
fc.add(PercentRankRunningAggregateDescriptor.FACTORY);
fc.add(WinMarkFirstMissingRunningAggregateDescriptor.FACTORY);
fc.add(WinPartitionLenRunningAggregateDescriptor.FACTORY);
// boolean functions
fc.add(AndDescriptor.FACTORY);
fc.add(OrDescriptor.FACTORY);
// Record constructors / functions
fc.add(ClosedRecordConstructorDescriptor.FACTORY);
fc.add(OpenRecordConstructorDescriptor.FACTORY);
fc.add(RecordConcatDescriptor.FACTORY);
fc.add(RecordConcatStrictDescriptor.FACTORY);
// List constructors
fc.add(OrderedListConstructorDescriptor.FACTORY);
fc.add(UnorderedListConstructorDescriptor.FACTORY);
// Sleep function
fc.add(SleepDescriptor.FACTORY);
// Inject failure function
fc.add(InjectFailureDescriptor.FACTORY);
// Get Job Parameter function
fc.add(GetJobParameterByNameDescriptor.FACTORY);
// Switch case
fc.add(SwitchCaseDescriptor.FACTORY);
// null functions
fc.add(IsMissingDescriptor.FACTORY);
fc.add(IsNullDescriptor.FACTORY);
fc.add(IsUnknownDescriptor.FACTORY);
fc.add(IsSystemNullDescriptor.FACTORY);
fc.add(CheckUnknownDescriptor.FACTORY);
fc.add(IfMissingDescriptor.FACTORY);
fc.add(IfNullDescriptor.FACTORY);
fc.add(IfMissingOrNullDescriptor.FACTORY);
fc.add(IfSystemNullDescriptor.FACTORY);
// uuid generators (zero independent functions)
fc.add(CreateUUIDDescriptor.FACTORY);
fc.add(UUIDDescriptor.FACTORY);
fc.add(CreateQueryUIDDescriptor.FACTORY);
fc.add(RandomDescriptor.FACTORY);
fc.add(CurrentDateDescriptor.FACTORY);
fc.add(CurrentTimeDescriptor.FACTORY);
fc.add(CurrentDateTimeDescriptor.FACTORY);
fc.add(IsNumericAddCompatibleDescriptor.FACTORY);
// functions that need generated class for null-handling.
// Element accessors.
fc.add(FieldAccessByIndexDescriptor.FACTORY);
fc.add(FieldAccessByNameDescriptor.FACTORY);
fc.add(FieldAccessNestedDescriptor.FACTORY);
fc.add(AnyCollectionMemberDescriptor.FACTORY);
fc.add(GetItemDescriptor.FACTORY);
// Numeric functions
fc.add(IfInfDescriptor.FACTORY);
fc.add(IfNanDescriptor.FACTORY);
fc.add(IfNanOrInfDescriptor.FACTORY);
fc.add(NumericUnaryMinusDescriptor.FACTORY);
fc.add(NumericAddDescriptor.FACTORY);
fc.add(NumericDivideDescriptor.FACTORY);
fc.add(NumericDivDescriptor.FACTORY);
fc.add(NumericMultiplyDescriptor.FACTORY);
fc.add(NumericSubDescriptor.FACTORY);
fc.add(NumericModuloDescriptor.FACTORY);
fc.add(NumericPowerDescriptor.FACTORY);
fc.add(NotDescriptor.FACTORY);
fc.add(LenDescriptor.FACTORY);
fc.add(NumericAbsDescriptor.FACTORY);
fc.add(NumericCeilingDescriptor.FACTORY);
fc.add(NumericFloorDescriptor.FACTORY);
fc.add(NumericRoundDescriptor.FACTORY);
fc.add(NumericRoundWithRoundDigitDescriptor.FACTORY);
fc.add(NumericRoundHalfToEvenDescriptor.FACTORY);
fc.add(NumericRoundHalfToEven2Descriptor.FACTORY);
fc.add(NumericACosDescriptor.FACTORY);
fc.add(NumericASinDescriptor.FACTORY);
fc.add(NumericATanDescriptor.FACTORY);
fc.add(NumericDegreesDescriptor.FACTORY);
fc.add(NumericRadiansDescriptor.FACTORY);
fc.add(NumericCosDescriptor.FACTORY);
fc.add(NumericCoshDescriptor.FACTORY);
fc.add(NumericSinDescriptor.FACTORY);
fc.add(NumericSinhDescriptor.FACTORY);
fc.add(NumericTanDescriptor.FACTORY);
fc.add(NumericTanhDescriptor.FACTORY);
fc.add(NumericExpDescriptor.FACTORY);
fc.add(NumericLnDescriptor.FACTORY);
fc.add(NumericLogDescriptor.FACTORY);
fc.add(NumericSqrtDescriptor.FACTORY);
fc.add(NumericSignDescriptor.FACTORY);
fc.add(NumericTruncDescriptor.FACTORY);
fc.add(NumericATan2Descriptor.FACTORY);
// Comparisons.
fc.add(EqualsDescriptor.FACTORY);
fc.add(GreaterThanDescriptor.FACTORY);
fc.add(GreaterThanOrEqualsDescriptor.FACTORY);
fc.add(LessThanDescriptor.FACTORY);
fc.add(LessThanOrEqualsDescriptor.FACTORY);
fc.add(NotEqualsDescriptor.FACTORY);
// If-Equals functions
fc.add(MissingIfEqualsDescriptor.FACTORY);
fc.add(NullIfEqualsDescriptor.FACTORY);
fc.add(NanIfEqualsDescriptor.FACTORY);
fc.add(PosInfIfEqualsDescriptor.FACTORY);
fc.add(NegInfIfEqualsDescriptor.FACTORY);
// Binary functions
fc.add(BinaryLengthDescriptor.FACTORY);
fc.add(ParseBinaryDescriptor.FACTORY);
fc.add(PrintBinaryDescriptor.FACTORY);
fc.add(BinaryConcatDescriptor.FACTORY);
fc.add(SubBinaryFromDescriptor.FACTORY);
fc.add(SubBinaryFromToDescriptor.FACTORY);
fc.add(FindBinaryDescriptor.FACTORY);
fc.add(FindBinaryFromDescriptor.FACTORY);
// Bitwise functions
fc.add(BitAndDescriptor.FACTORY);
fc.add(BitOrDescriptor.FACTORY);
fc.add(BitXorDescriptor.FACTORY);
fc.add(BitNotDescriptor.FACTORY);
fc.add(BitCountDescriptor.FACTORY);
fc.add(BitSetDescriptor.FACTORY);
fc.add(BitClearDescriptor.FACTORY);
fc.add(BitShiftWithoutRotateFlagDescriptor.FACTORY);
fc.add(BitShiftWithRotateFlagDescriptor.FACTORY);
fc.add(BitTestWithoutAllFlagDescriptor.FACTORY);
fc.add(BitTestWithAllFlagDescriptor.FACTORY);
fc.add(IsBitSetWithoutAllFlagDescriptor.FACTORY);
fc.add(IsBitSetWithAllFlagDescriptor.FACTORY);
// String functions
fc.add(StringLikeDescriptor.FACTORY);
fc.add(StringContainsDescriptor.FACTORY);
fc.add(StringEndsWithDescriptor.FACTORY);
fc.add(StringStartsWithDescriptor.FACTORY);
fc.add(SubstringDescriptor.FACTORY);
fc.add(SubstringOffset1Descriptor.FACTORY);
fc.add(StringEqualDescriptor.FACTORY);
fc.add(StringLowerCaseDescriptor.FACTORY);
fc.add(StringUpperCaseDescriptor.FACTORY);
fc.add(StringLengthDescriptor.FACTORY);
fc.add(Substring2Descriptor.FACTORY);
fc.add(Substring2Offset1Descriptor.FACTORY);
fc.add(SubstringBeforeDescriptor.FACTORY);
fc.add(SubstringAfterDescriptor.FACTORY);
fc.add(StringToCodePointDescriptor.FACTORY);
fc.add(CodePointToStringDescriptor.FACTORY);
fc.add(StringConcatDescriptor.FACTORY);
fc.add(StringJoinDescriptor.FACTORY);
fc.add(StringRegExpContainsDescriptor.FACTORY);
fc.add(StringRegExpContainsWithFlagDescriptor.FACTORY);
fc.add(StringRegExpLikeDescriptor.FACTORY);
fc.add(StringRegExpLikeWithFlagDescriptor.FACTORY);
fc.add(StringRegExpPositionDescriptor.FACTORY);
fc.add(StringRegExpPositionOffset1Descriptor.FACTORY);
fc.add(StringRegExpPositionWithFlagDescriptor.FACTORY);
fc.add(StringRegExpPositionOffset1WithFlagDescriptor.FACTORY);
fc.add(StringRegExpReplaceDescriptor.FACTORY);
fc.add(StringRegExpReplaceWithFlagDescriptor.FACTORY);
fc.add(StringRegExpMatchesDescriptor.FACTORY);
fc.add(StringRegExpSplitDescriptor.FACTORY);
fc.add(StringInitCapDescriptor.FACTORY);
fc.add(StringTrimDescriptor.FACTORY);
fc.add(StringLTrimDescriptor.FACTORY);
fc.add(StringRTrimDescriptor.FACTORY);
fc.add(StringTrim2Descriptor.FACTORY);
fc.add(StringLTrim2Descriptor.FACTORY);
fc.add(StringRTrim2Descriptor.FACTORY);
fc.add(StringPositionDescriptor.FACTORY);
fc.add(StringPositionOffset1Descriptor.FACTORY);
fc.add(StringRepeatDescriptor.FACTORY);
fc.add(StringReplaceDescriptor.FACTORY);
fc.add(StringReplaceWithLimitDescriptor.FACTORY);
fc.add(StringReverseDescriptor.FACTORY);
fc.add(StringSplitDescriptor.FACTORY);
// Constructors
fc.add(ABooleanConstructorDescriptor.FACTORY);
fc.add(ABinaryHexStringConstructorDescriptor.FACTORY);
fc.add(ABinaryBase64StringConstructorDescriptor.FACTORY);
fc.add(AStringConstructorDescriptor.FACTORY);
fc.add(AInt8ConstructorDescriptor.FACTORY);
fc.add(AInt16ConstructorDescriptor.FACTORY);
fc.add(AInt32ConstructorDescriptor.FACTORY);
fc.add(AInt64ConstructorDescriptor.FACTORY);
fc.add(AFloatConstructorDescriptor.FACTORY);
fc.add(ADoubleConstructorDescriptor.FACTORY);
fc.add(APointConstructorDescriptor.FACTORY);
fc.add(APoint3DConstructorDescriptor.FACTORY);
fc.add(ALineConstructorDescriptor.FACTORY);
fc.add(APolygonConstructorDescriptor.FACTORY);
fc.add(ACircleConstructorDescriptor.FACTORY);
fc.add(ARectangleConstructorDescriptor.FACTORY);
fc.add(ATimeConstructorDescriptor.FACTORY);
fc.add(ADateConstructorDescriptor.FACTORY);
fc.add(ADateTimeConstructorDescriptor.FACTORY);
fc.add(ADurationConstructorDescriptor.FACTORY);
fc.add(AYearMonthDurationConstructorDescriptor.FACTORY);
fc.add(ADayTimeDurationConstructorDescriptor.FACTORY);
fc.add(AUUIDFromStringConstructorDescriptor.FACTORY);
fc.add(AIntervalConstructorDescriptor.FACTORY);
fc.add(AIntervalStartFromDateConstructorDescriptor.FACTORY);
fc.add(AIntervalStartFromDateTimeConstructorDescriptor.FACTORY);
fc.add(AIntervalStartFromTimeConstructorDescriptor.FACTORY);
// Spatial
fc.add(CreatePointDescriptor.FACTORY);
fc.add(CreateLineDescriptor.FACTORY);
fc.add(CreatePolygonDescriptor.FACTORY);
fc.add(CreateCircleDescriptor.FACTORY);
fc.add(CreateRectangleDescriptor.FACTORY);
fc.add(SpatialAreaDescriptor.FACTORY);
fc.add(SpatialDistanceDescriptor.FACTORY);
fc.add(CreateMBRDescriptor.FACTORY);
fc.add(SpatialCellDescriptor.FACTORY);
fc.add(PointXCoordinateAccessor.FACTORY);
fc.add(PointYCoordinateAccessor.FACTORY);
fc.add(CircleRadiusAccessor.FACTORY);
fc.add(CircleCenterAccessor.FACTORY);
fc.add(LineRectanglePolygonAccessor.FACTORY);
// full-text function
fc.add(FullTextContainsFunctionDescriptor.FACTORY);
fc.add(FullTextContainsWithoutOptionFunctionDescriptor.FACTORY);
// Record functions.
fc.add(GetRecordFieldsDescriptor.FACTORY);
fc.add(GetRecordFieldValueDescriptor.FACTORY);
fc.add(DeepEqualityDescriptor.FACTORY);
fc.add(RecordMergeDescriptor.FACTORY);
fc.add(RecordMergeIgnoreDuplicatesDescriptor.FACTORY);
fc.add(RecordAddFieldsDescriptor.FACTORY);
fc.add(RecordRemoveFieldsDescriptor.FACTORY);
fc.add(RecordLengthDescriptor.FACTORY);
fc.add(RecordNamesDescriptor.FACTORY);
fc.add(RecordRemoveDescriptor.FACTORY);
fc.add(RecordRenameDescriptor.FACTORY);
fc.add(RecordUnwrapDescriptor.FACTORY);
fc.add(RecordReplaceDescriptor.FACTORY);
fc.add(RecordAddDescriptor.FACTORY);
fc.add(RecordPutDescriptor.FACTORY);
fc.add(RecordValuesDescriptor.FACTORY);
fc.add(PairsDescriptor.FACTORY);
// Spatial and temporal type accessors
fc.add(TemporalYearAccessor.FACTORY);
fc.add(TemporalMonthAccessor.FACTORY);
fc.add(TemporalDayAccessor.FACTORY);
fc.add(TemporalHourAccessor.FACTORY);
fc.add(TemporalMinuteAccessor.FACTORY);
fc.add(TemporalSecondAccessor.FACTORY);
fc.add(TemporalMillisecondAccessor.FACTORY);
fc.add(TemporalIntervalStartAccessor.FACTORY);
fc.add(TemporalIntervalEndAccessor.FACTORY);
fc.add(TemporalIntervalStartDateAccessor.FACTORY);
fc.add(TemporalIntervalEndDateAccessor.FACTORY);
fc.add(TemporalIntervalStartTimeAccessor.FACTORY);
fc.add(TemporalIntervalEndTimeAccessor.FACTORY);
fc.add(TemporalIntervalStartDatetimeAccessor.FACTORY);
fc.add(TemporalIntervalEndDatetimeAccessor.FACTORY);
// Temporal functions
fc.add(UnixTimeFromDateInDaysDescriptor.FACTORY);
fc.add(UnixTimeFromTimeInMsDescriptor.FACTORY);
fc.add(UnixTimeFromDatetimeInMsDescriptor.FACTORY);
fc.add(UnixTimeFromDatetimeInSecsDescriptor.FACTORY);
fc.add(DateFromUnixTimeInDaysDescriptor.FACTORY);
fc.add(DateFromDatetimeDescriptor.FACTORY);
fc.add(TimeFromUnixTimeInMsDescriptor.FACTORY);
fc.add(TimeFromDatetimeDescriptor.FACTORY);
fc.add(DatetimeFromUnixTimeInMsDescriptor.FACTORY);
fc.add(DatetimeFromUnixTimeInSecsDescriptor.FACTORY);
fc.add(DatetimeFromDateAndTimeDescriptor.FACTORY);
fc.add(CalendarDurationFromDateTimeDescriptor.FACTORY);
fc.add(CalendarDurationFromDateDescriptor.FACTORY);
fc.add(AdjustDateTimeForTimeZoneDescriptor.FACTORY);
fc.add(AdjustTimeForTimeZoneDescriptor.FACTORY);
fc.add(IntervalBeforeDescriptor.FACTORY);
fc.add(IntervalAfterDescriptor.FACTORY);
fc.add(IntervalMeetsDescriptor.FACTORY);
fc.add(IntervalMetByDescriptor.FACTORY);
fc.add(IntervalOverlapsDescriptor.FACTORY);
fc.add(IntervalOverlappedByDescriptor.FACTORY);
fc.add(OverlapDescriptor.FACTORY);
fc.add(IntervalStartsDescriptor.FACTORY);
fc.add(IntervalStartedByDescriptor.FACTORY);
fc.add(IntervalCoversDescriptor.FACTORY);
fc.add(IntervalCoveredByDescriptor.FACTORY);
fc.add(IntervalEndsDescriptor.FACTORY);
fc.add(IntervalEndedByDescriptor.FACTORY);
fc.add(DurationFromMillisecondsDescriptor.FACTORY);
fc.add(DurationFromMonthsDescriptor.FACTORY);
fc.add(YearMonthDurationGreaterThanComparatorDescriptor.FACTORY);
fc.add(YearMonthDurationLessThanComparatorDescriptor.FACTORY);
fc.add(DayTimeDurationGreaterThanComparatorDescriptor.FACTORY);
fc.add(DayTimeDurationLessThanComparatorDescriptor.FACTORY);
fc.add(MonthsFromYearMonthDurationDescriptor.FACTORY);
fc.add(MillisecondsFromDayTimeDurationDescriptor.FACTORY);
fc.add(DurationEqualDescriptor.FACTORY);
fc.add(GetYearMonthDurationDescriptor.FACTORY);
fc.add(GetDayTimeDurationDescriptor.FACTORY);
fc.add(IntervalBinDescriptor.FACTORY);
fc.add(OverlapBinsDescriptor.FACTORY);
fc.add(DayOfWeekDescriptor.FACTORY);
fc.add(ParseDateDescriptor.FACTORY);
fc.add(ParseTimeDescriptor.FACTORY);
fc.add(ParseDateTimeDescriptor.FACTORY);
fc.add(PrintDateDescriptor.FACTORY);
fc.add(PrintTimeDescriptor.FACTORY);
fc.add(PrintDateTimeDescriptor.FACTORY);
fc.add(GetOverlappingIntervalDescriptor.FACTORY);
fc.add(DurationFromIntervalDescriptor.FACTORY);
// Type functions.
fc.add(GetTypeDescriptor.FACTORY);
fc.add(IsArrayDescriptor.FACTORY);
fc.add(IsAtomicDescriptor.FACTORY);
fc.add(IsBooleanDescriptor.FACTORY);
fc.add(IsNumberDescriptor.FACTORY);
fc.add(IsObjectDescriptor.FACTORY);
fc.add(IsStringDescriptor.FACTORY);
fc.add(IsBinaryDescriptor.FACTORY);
fc.add(IsPointDescriptor.FACTORY);
fc.add(IsLineDescriptor.FACTORY);
fc.add(IsRectangleDescriptor.FACTORY);
fc.add(IsCircleDescriptor.FACTORY);
fc.add(IsPolygonDescriptor.FACTORY);
fc.add(IsSpatialDescriptor.FACTORY);
fc.add(IsDateDescriptor.FACTORY);
fc.add(IsDatetimeDescriptor.FACTORY);
fc.add(IsTimeDescriptor.FACTORY);
fc.add(IsDurationDescriptor.FACTORY);
fc.add(IsIntervalDescriptor.FACTORY);
fc.add(IsTemporalDescriptor.FACTORY);
fc.add(IsUUIDDescriptor.FACTORY);
fc.add(IsMultisetDescriptor.FACTORY);
fc.add(ToArrayDescriptor.FACTORY);
fc.add(ToAtomicDescriptor.FACTORY);
fc.add(ToBigIntDescriptor.FACTORY);
fc.add(ToBooleanDescriptor.FACTORY);
fc.add(ToDoubleDescriptor.FACTORY);
fc.add(ToNumberDescriptor.FACTORY);
fc.add(ToObjectDescriptor.FACTORY);
fc.add(ToStringDescriptor.FACTORY);
fc.add(TreatAsIntegerDescriptor.FACTORY);
// Cast function
fc.add(CastTypeDescriptor.FACTORY);
fc.add(CastTypeLaxDescriptor.FACTORY);
// Record function
fc.add(RecordPairsDescriptor.FACTORY);
// Other functions
fc.add(DecodeDataverseNameDescriptor.FACTORY);
fc.add(DecodeDataverseDisplayNameDescriptor.FACTORY);
fc.add(RandomWithSeedDescriptor.FACTORY);
ServiceLoader.load(IFunctionRegistrant.class).iterator().forEachRemaining(c -> c.register(fc));
return fc;
}
public List<IFunctionDescriptorFactory> getFunctionDescriptorFactories() {
return descriptorFactories;
}
}
| |
/*
* Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package com.gemstone.gemfire.management.internal.cli.shell;
import java.io.File;
import java.util.logging.Level;
import com.gemstone.gemfire.internal.lang.StringUtils;
import com.gemstone.gemfire.internal.util.IOUtils;
/**
*
* @author Abhishek Chaudhari
* @since 7.0
*/
// According to 7.5 discussions, gfsh should have as less confing as possible
// hence Persisting GfshConfig is not done
public class GfshConfig {
private static final String LOG_DIR_PROPERTY = "gfsh.log-dir";
private static final String LOG_LEVEL_PROPERTY = "gfsh.log-level";
private static final String LOG_FILE_SIZE_LIMIT_PROPERTY = "gfsh.log-file-size-limit";
private static final String LOG_DISK_SPACE_LIMIT_PROPERTY = "gfsh.log-disk-space-limit";
private static final File HISTORY_FILE = new File(getHomeGemFireDirectory(), ".gfsh.history");
// History file size
private static final int MAX_HISTORY_SIZE = 500;
private static final Level DEFAULT_LOGLEVEL = Level.OFF;
private static final int DEFAULT_LOGFILE_SIZE_LIMIT = 1024*1024*10;
private static final int DEFAULT_LOGFILE_DISK_USAGE = 1024*1024*10;
private static final String DEFAULT_PROMPT = "{0}gfsh{1}>";
private String historyFileName;
private String defaultPrompt;
private int historySize;
private String logDir;
private Level logLevel;
private int logFileSizeLimit;
private int logFileDiskLimit;
public GfshConfig() {
this(HISTORY_FILE.getAbsolutePath(), DEFAULT_PROMPT, MAX_HISTORY_SIZE, null, null, null, null);
}
public GfshConfig(String historyFileName, String defaultPrompt,
int historySize, String logDir, Level logLevel, Integer logLimit,
Integer logCount) {
this.historyFileName = historyFileName;
this.defaultPrompt = defaultPrompt;
this.historySize = historySize;
// Logger properties
if (logDir == null) {
this.logDir = System.getProperty(LOG_DIR_PROPERTY, ".");
} else {
this.logDir = logDir;
}
if (logLevel == null) {
this.logLevel = getLogLevel(System.getProperty(LOG_LEVEL_PROPERTY, DEFAULT_LOGLEVEL.getName()));
} else {
this.logLevel = logLevel;
}
if (logLimit == null) {
this.logFileSizeLimit = getParsedOrDefault(System.getProperty(LOG_FILE_SIZE_LIMIT_PROPERTY), LOG_FILE_SIZE_LIMIT_PROPERTY, DEFAULT_LOGFILE_SIZE_LIMIT);
} else {
this.logFileSizeLimit = logLimit;
}
if (logCount == null) {
// validation & correction to default is done in getLogFileCount()
this.logFileDiskLimit = getParsedOrDefault(System.getProperty(LOG_DISK_SPACE_LIMIT_PROPERTY), LOG_DISK_SPACE_LIMIT_PROPERTY, DEFAULT_LOGFILE_DISK_USAGE);
} else {
this.logFileDiskLimit = logCount;
}
}
public String getHistoryFileName() {
return historyFileName;
}
public String getDefaultPrompt() {
return defaultPrompt;
}
public int getHistorySize() {
return historySize;
}
public String getLogFilePath() {
return IOUtils.tryGetCanonicalPathElseGetAbsolutePath(new File(logDir, "gfsh-%u_%g.log"));
}
public Level getLogLevel() {
return logLevel;
}
public int getLogFileSizeLimit() {
return logFileSizeLimit;
}
protected int getLogFileDiskLimit() {
return logFileDiskLimit;
}
public int getLogFileCount() {
int logCount;
try {
logCount = getLogFileSizeLimit()/getLogFileDiskLimit();
logCount = logCount >= 1 ? logCount : 1;
} catch (java.lang.ArithmeticException e) { // for divide by zero
logCount = 1;
}
return logCount;
}
public boolean isLoggingEnabled() {
// keep call for getLogLevel() instead of logLevel for inheritance
return !Level.OFF.equals(getLogLevel());
}
private String getLoggerConfig() {
StringBuilder builder = new StringBuilder();
builder.append("log-file="+getLogFilePath()).append(Gfsh.LINE_SEPARATOR);
builder.append("log-level="+getLogLevel().getName()).append(Gfsh.LINE_SEPARATOR);
builder.append("log-file-size-limit="+getLogFileSizeLimit()).append(Gfsh.LINE_SEPARATOR);
builder.append("log-disk-space-limit="+getLogFileDiskLimit()).append(Gfsh.LINE_SEPARATOR);
builder.append("log-count="+getLogFileCount()).append(Gfsh.LINE_SEPARATOR);
return builder.toString();
}
public boolean isTestConfig() {
return false;
}
public boolean isANSISupported() {
return !Boolean.getBoolean("gfsh.disable.color");
}
private static Level getLogLevel(final String logLevelString) {
try {
String logLevelAsString = StringUtils.isBlank(logLevelString) ? "" : logLevelString.trim(); //trim spaces if any
// To support level NONE, used by GemFire
if ("NONE".equalsIgnoreCase(logLevelAsString)) {
logLevelAsString = Level.OFF.getName();
}
// To support level ERROR, used by GemFire, fall to WARNING
if ("ERROR".equalsIgnoreCase(logLevelAsString)) {
logLevelAsString = Level.WARNING.getName();
}
return Level.parse(logLevelAsString.toUpperCase());
} catch (IllegalArgumentException e) {
System.out.println(e.getMessage());
return DEFAULT_LOGLEVEL;
}
}
private static String getHomeGemFireDirectory() {
String userHome = System.getProperty("user.home");
String homeDirPath = userHome + "/.gemfire";
File alternateDir = new File(homeDirPath);
if (!alternateDir.exists()) {
if (!alternateDir.mkdirs()) {
homeDirPath = ".";
}
}
return homeDirPath;
}
private static int getParsedOrDefault(final String numberString, final String parseValueFor, final int defaultValue) {
if (numberString == null) {
return defaultValue;
}
try {
return Integer.valueOf(numberString);
} catch (NumberFormatException e) {
System.err.println("Invalid value \"" + numberString + "\" specified for: \"" + parseValueFor + "\". Using default value: \""+defaultValue+"\".");
return defaultValue;
}
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append(getClass().getSimpleName());
builder.append(" [historyFileName=");
builder.append(getHistoryFileName());
builder.append(", historySize=");
builder.append(getHistorySize());
builder.append(", loggerConfig={");
builder.append(getLoggerConfig()).append("}");
builder.append(", isANSISupported=");
builder.append(isANSISupported());
builder.append("]");
return builder.toString();
}
}
| |
/*
* Copyright 2017 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jbpm.test.functional.timer;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import javax.naming.InitialContext;
import javax.persistence.Persistence;
import javax.sql.DataSource;
import org.drools.core.time.TimerService;
import org.jbpm.process.core.timer.TimerServiceRegistry;
import org.jbpm.process.core.timer.impl.GlobalTimerService;
import org.jbpm.process.core.timer.impl.QuartzSchedulerService;
import org.jbpm.runtime.manager.impl.AbstractRuntimeManager;
import org.jbpm.services.task.identity.JBossUserGroupCallbackImpl;
import org.jbpm.test.listener.process.NodeLeftCountDownProcessEventListener;
import org.junit.After;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import org.kie.api.event.process.DefaultProcessEventListener;
import org.kie.api.event.process.ProcessEventListener;
import org.kie.api.event.process.ProcessNodeLeftEvent;
import org.kie.api.event.process.ProcessStartedEvent;
import org.kie.api.io.ResourceType;
import org.kie.api.runtime.KieSession;
import org.kie.api.runtime.manager.RuntimeEngine;
import org.kie.api.runtime.manager.RuntimeEnvironment;
import org.kie.api.runtime.manager.RuntimeEnvironmentBuilder;
import org.kie.api.runtime.manager.RuntimeManager;
import org.kie.api.runtime.manager.RuntimeManagerFactory;
import org.kie.api.runtime.process.ProcessInstance;
import org.kie.api.task.UserGroupCallback;
import org.kie.internal.io.ResourceFactory;
import org.kie.internal.runtime.manager.context.ProcessInstanceIdContext;
import static org.junit.Assert.*;
@RunWith(Parameterized.class)
public class GlobalQuartzDBTimerServiceTest extends GlobalTimerServiceBaseTest {
private int managerType;
@Parameters
public static Collection<Object[]> persistence() {
Object[][] data = new Object[][] { { 1 }, { 2 }, { 3 } };
return Arrays.asList(data);
};
public GlobalQuartzDBTimerServiceTest(int managerType) {
this.managerType = managerType;
}
@Before
public void setUp() {
cleanupSingletonSessionId();
emf = Persistence.createEntityManagerFactory("org.jbpm.test.persistence");
System.setProperty("org.quartz.properties", "quartz-db.properties");
testCreateQuartzSchema();
globalScheduler = new QuartzSchedulerService();
((QuartzSchedulerService)globalScheduler).forceShutdown();
}
@After
public void tearDown() {
try {
globalScheduler.shutdown();
} catch (Exception e) {
}
cleanup();
System.clearProperty("org.quartz.properties");
}
@Override
protected RuntimeManager getManager(RuntimeEnvironment environment, boolean waitOnStart) {
RuntimeManager manager = null;
if (managerType ==1) {
manager = RuntimeManagerFactory.Factory.get().newSingletonRuntimeManager(environment);
} else if (managerType == 2) {
manager = RuntimeManagerFactory.Factory.get().newPerRequestRuntimeManager(environment);
} else if (managerType == 3) {
manager = RuntimeManagerFactory.Factory.get().newPerProcessInstanceRuntimeManager(environment);
} else {
throw new IllegalArgumentException("Invalid runtime maanger type");
}
if (waitOnStart) {
// wait for the 2 seconds (default startup delay for quartz)
try {
Thread.sleep(2000);
} catch (InterruptedException e) {
// do nothing
}
}
return manager;
}
@Test(timeout=20000)
public void testTimerStartManagerClose() throws Exception {
NodeLeftCountDownProcessEventListener countDownListener = new NodeLeftCountDownProcessEventListener("StartProcess", 3);
QuartzSchedulerService additionalCopy = new QuartzSchedulerService();
additionalCopy.initScheduler(null);
// prepare listener to assert results
final List<Long> timerExporations = new ArrayList<Long>();
ProcessEventListener listener = new DefaultProcessEventListener(){
@Override
public void beforeProcessStarted(ProcessStartedEvent event) {
timerExporations.add(event.getProcessInstance().getId());
}
};
environment = RuntimeEnvironmentBuilder.Factory.get()
.newDefaultBuilder()
.entityManagerFactory(emf)
.addAsset(ResourceFactory.newClassPathResource("org/jbpm/test/functional/timer/TimerStart2.bpmn2"), ResourceType.BPMN2)
.schedulerService(globalScheduler)
.registerableItemsFactory(new TestRegisterableItemsFactory(listener, countDownListener))
.get();
manager = getManager(environment, false);
RuntimeEngine runtime = manager.getRuntimeEngine(ProcessInstanceIdContext.get());
KieSession ksession = runtime.getKieSession();
assertEquals(0, timerExporations.size());
countDownListener.waitTillCompleted();
manager.disposeRuntimeEngine(runtime);
int atDispose = timerExporations.size();
assertTrue(atDispose > 0);
((AbstractRuntimeManager)manager).close(true);
countDownListener.reset(1);
countDownListener.waitTillCompleted(3000);
assertEquals(atDispose, timerExporations.size());
additionalCopy.shutdown();
}
/**
* Test that illustrates that jobs are persisted and survives server restart
* and as soon as GlobalTimerService is active jobs are fired and it loads and aborts the
* process instance to illustrate jobs are properly removed when isntance is aborted
* NOTE: this test is disabled by default as it requires real db (not in memory)
* and test to be executed separately each with new jvm process
*/
@Test
@Ignore
public void testAbortGlobalTestService() throws Exception {
RuntimeEnvironment environment = RuntimeEnvironmentBuilder.Factory.get()
.newDefaultBuilder()
.entityManagerFactory(emf)
.addAsset(ResourceFactory.newClassPathResource("org/jbpm/test/functional/timer/IntermediateCatchEventTimerCycle3.bpmn2"), ResourceType.BPMN2)
.addConfiguration("drools.timerService", "org.jbpm.process.core.timer.impl.RegisteredTimerServiceDelegate")
.get();
RuntimeManager manger = RuntimeManagerFactory.Factory.get().newSingletonRuntimeManager(environment);
// build GlobalTimerService instance
TimerService globalTs = new GlobalTimerService(manger, globalScheduler);
// and register it in the registry under 'default' key
TimerServiceRegistry.getInstance().registerTimerService("default", globalTs);
// prepare listener to assert results
final List<Long> timerExporations = new ArrayList<Long>();
ProcessEventListener listener = new DefaultProcessEventListener(){
@Override
public void afterNodeLeft(ProcessNodeLeftEvent event) {
if (event.getNodeInstance().getNodeName().equals("timer")) {
timerExporations.add(event.getProcessInstance().getId());
}
}
};
long id = -1;
Thread.sleep(5000);
RuntimeEngine runtime = manger.getRuntimeEngine(ProcessInstanceIdContext.get());
KieSession ksession = runtime.getKieSession();
ksession.addEventListener(listener);
ksession.abortProcessInstance(id);
ProcessInstance processInstance = ksession.getProcessInstance(id);
assertNull(processInstance);
// let's wait to ensure no more timers are expired and triggered
Thread.sleep(3000);
ksession.dispose();
}
/**
* Test that illustrates that jobs are persisted and survives server restart
* and as soon as GlobalTimerService is active jobs are fired
* NOTE: this test is disabled by default as it requires real db (not in memory)
* and test to be executed separately each with new jvm process
*/
@Test
@Ignore
public void testContinueGlobalTestService() throws Exception {
RuntimeEnvironment environment = RuntimeEnvironmentBuilder.Factory.get()
.newDefaultBuilder()
.entityManagerFactory(emf)
.addAsset(ResourceFactory.newClassPathResource("org/jbpm/test/functional/timer/IntermediateCatchEventTimerCycle2.bpmn2"), ResourceType.BPMN2)
.addConfiguration("drools.timerService", "org.jbpm.process.core.timer.impl.RegisteredTimerServiceDelegate")
.get();
RuntimeManager manger = RuntimeManagerFactory.Factory.get().newSingletonRuntimeManager(environment);
// build GlobalTimerService instance
TimerService globalTs = new GlobalTimerService(manger, globalScheduler);
// and register it in the registry under 'default' key
TimerServiceRegistry.getInstance().registerTimerService("default", globalTs);
// prepare listener to assert results
final List<Long> timerExporations = new ArrayList<Long>();
ProcessEventListener listener = new DefaultProcessEventListener(){
@Override
public void afterNodeLeft(ProcessNodeLeftEvent event) {
if (event.getNodeInstance().getNodeName().equals("timer")) {
timerExporations.add(event.getProcessInstance().getId());
}
}
};
Thread.sleep(5000);
}
@Test(timeout=20000)
public void testContinueTimer() throws Exception {
// JBPM-4443
NodeLeftCountDownProcessEventListener countDownListener = new NodeLeftCountDownProcessEventListener("timer", 2);
// prepare listener to assert results
final List<Long> timerExporations = new ArrayList<Long>();
ProcessEventListener listener = new DefaultProcessEventListener(){
@Override
public void afterNodeLeft(ProcessNodeLeftEvent event) {
if (event.getNodeInstance().getNodeName().equals("timer")) {
timerExporations.add(event.getProcessInstance().getId());
}
}
};
// No special configuration for TimerService in order to test RuntimeManager default
environment = RuntimeEnvironmentBuilder.Factory.get()
.newDefaultBuilder()
.entityManagerFactory(emf)
.addAsset(ResourceFactory.newClassPathResource("org/jbpm/test/functional/timer/IntermediateCatchEventTimerCycle4.bpmn2"), ResourceType.BPMN2)
.registerableItemsFactory(new TestRegisterableItemsFactory(listener, countDownListener))
.get();
manager = getManager(environment, true);
RuntimeEngine runtime = manager.getRuntimeEngine(ProcessInstanceIdContext.get());
KieSession ksession = runtime.getKieSession();
ProcessInstance processInstance = ksession.startProcess("IntermediateCatchEvent");
manager.disposeRuntimeEngine(runtime);
countDownListener.waitTillCompleted();
manager.close();
countDownListener.reset(1);
// ---- restart ----
environment = RuntimeEnvironmentBuilder.Factory.get()
.newDefaultBuilder()
.entityManagerFactory(emf)
.addAsset(ResourceFactory.newClassPathResource("org/jbpm/test/functional/timer/IntermediateCatchEventTimerCycle4.bpmn2"), ResourceType.BPMN2)
.registerableItemsFactory(new TestRegisterableItemsFactory(listener))
.get();
manager = getManager(environment, true);
manager.disposeRuntimeEngine(runtime);
countDownListener.waitTillCompleted(3000);
assertEquals(2, timerExporations.size());
}
@Test(timeout=20000)
public void testTimerRequiresRecoveryFlagSet() throws Exception {
Properties properties= new Properties();
properties.setProperty("mary", "HR");
properties.setProperty("john", "HR");
UserGroupCallback userGroupCallback = new JBossUserGroupCallbackImpl(properties);
environment = RuntimeEnvironmentBuilder.Factory.get()
.newDefaultBuilder()
.entityManagerFactory(emf)
.addAsset(ResourceFactory.newClassPathResource("org/jbpm/test/functional/timer/HumanTaskWithBoundaryTimer.bpmn"), ResourceType.BPMN2)
.schedulerService(globalScheduler)
.userGroupCallback(userGroupCallback)
.get();
manager = getManager(environment, true);
RuntimeEngine runtime = manager.getRuntimeEngine(ProcessInstanceIdContext.get());
KieSession ksession = runtime.getKieSession();
Map<String, Object> params = new HashMap<String, Object>();
params.put("test", "john");
ProcessInstance processInstance = ksession.startProcess("PROCESS_1", params);
Connection connection = null;
Statement stmt = null;
try {
connection = ((DataSource)InitialContext.doLookup("jdbc/jbpm-ds")).getConnection();
stmt = connection.createStatement();
ResultSet resultSet = stmt.executeQuery("select REQUESTS_RECOVERY from QRTZ_JOB_DETAILS");
while(resultSet.next()) {
boolean requestsRecovery = resultSet.getBoolean(1);
assertEquals("Requests recovery must be set to true", true, requestsRecovery);
}
} finally {
if(stmt != null) {
stmt.close();
}
if(connection != null) {
connection.close();
}
}
ksession.abortProcessInstance(processInstance.getId());
manager.disposeRuntimeEngine(runtime);
}
@Test(timeout=25000)
public void testContinueTimerWithMisfire() throws Exception {
// RHBPMS-4729
System.setProperty("org.quartz.properties", "quartz-db-short-misfire.properties");
NodeLeftCountDownProcessEventListener countDownListener = new NodeLeftCountDownProcessEventListener("StartProcess", 2);
// prepare listener to assert results
final List<Long> timerExporations = new ArrayList<Long>();
ProcessEventListener listener = new DefaultProcessEventListener(){
@Override
public void beforeProcessStarted(ProcessStartedEvent event) {
timerExporations.add(event.getProcessInstance().getId());
}
};
// No special configuration for TimerService in order to test RuntimeManager default
environment = RuntimeEnvironmentBuilder.Factory.get()
.newDefaultBuilder()
.entityManagerFactory(emf)
.addAsset(ResourceFactory.newClassPathResource("org/jbpm/test/functional/timer/TimerStart2.bpmn2"), ResourceType.BPMN2)
.registerableItemsFactory(new TestRegisterableItemsFactory(listener, countDownListener))
.get();
manager = getManager(environment, true);
RuntimeEngine runtime = manager.getRuntimeEngine(ProcessInstanceIdContext.get());
KieSession ksession = runtime.getKieSession();
countDownListener.waitTillCompleted();
manager.disposeRuntimeEngine(runtime);
manager.close();
System.out.println("==== manager.close() ====");
countDownListener.reset(3);
// Simulate interval between shutdown and start so the Trigger is older than (now - misfireThreshold)
Thread.sleep(5000);
// ---- restart ----
environment = RuntimeEnvironmentBuilder.Factory.get()
.newDefaultBuilder()
.entityManagerFactory(emf)
.addAsset(ResourceFactory.newClassPathResource("org/jbpm/test/functional/timer/TimerStart2.bpmn2"), ResourceType.BPMN2)
.registerableItemsFactory(new TestRegisterableItemsFactory(listener, countDownListener))
.get();
manager = getManager(environment, true);
countDownListener.waitTillCompleted(4000);
assertEquals(5, timerExporations.size());
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.pipeline.movavg.models;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgParser;
import java.io.IOException;
import java.text.ParseException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Map;
import java.util.Objects;
/**
* Calculate a exponentially weighted moving average
*/
public class EwmaModel extends MovAvgModel {
private static final EwmaModel PROTOTYPE = new EwmaModel();
protected static final ParseField NAME_FIELD = new ParseField("ewma");
public static final double DEFAULT_ALPHA = 0.3;
/**
* Controls smoothing of data. Also known as "level" value.
* Alpha = 1 retains no memory of past values
* (e.g. random walk), while alpha = 0 retains infinite memory of past values (e.g.
* mean of the series).
*/
private final double alpha;
public EwmaModel() {
this(DEFAULT_ALPHA);
}
public EwmaModel(double alpha) {
this.alpha = alpha;
}
@Override
public boolean canBeMinimized() {
return true;
}
@Override
public MovAvgModel neighboringModel() {
double alpha = Math.random();
return new EwmaModel(alpha);
}
@Override
public MovAvgModel clone() {
return new EwmaModel(this.alpha);
}
@Override
protected <T extends Number> double[] doPredict(Collection<T> values, int numPredictions) {
double[] predictions = new double[numPredictions];
// EWMA just emits the same final prediction repeatedly.
Arrays.fill(predictions, next(values));
return predictions;
}
@Override
public <T extends Number> double next(Collection<T> values) {
double avg = 0;
boolean first = true;
for (T v : values) {
if (first) {
avg = v.doubleValue();
first = false;
} else {
avg = (v.doubleValue() * alpha) + (avg * (1 - alpha));
}
}
return avg;
}
public static final MovAvgModelStreams.Stream STREAM = new MovAvgModelStreams.Stream() {
@Override
public MovAvgModel readResult(StreamInput in) throws IOException {
return PROTOTYPE.readFrom(in);
}
@Override
public String getName() {
return NAME_FIELD.getPreferredName();
}
};
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(MovAvgParser.MODEL.getPreferredName(), NAME_FIELD.getPreferredName());
builder.startObject(MovAvgParser.SETTINGS.getPreferredName());
builder.field("alpha", alpha);
builder.endObject();
return builder;
}
@Override
public MovAvgModel readFrom(StreamInput in) throws IOException {
return new EwmaModel(in.readDouble());
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(STREAM.getName());
out.writeDouble(alpha);
}
@Override
public int hashCode() {
return Objects.hash(alpha);
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
EwmaModel other = (EwmaModel) obj;
return Objects.equals(alpha, other.alpha);
}
public static class SingleExpModelParser extends AbstractModelParser {
@Override
public String getName() {
return NAME_FIELD.getPreferredName();
}
@Override
public MovAvgModel parse(@Nullable Map<String, Object> settings, String pipelineName, int windowSize,
ParseFieldMatcher parseFieldMatcher) throws ParseException {
double alpha = parseDoubleParam(settings, "alpha", DEFAULT_ALPHA);
checkUnrecognizedParams(settings);
return new EwmaModel(alpha);
}
}
public static class EWMAModelBuilder implements MovAvgModelBuilder {
private double alpha = DEFAULT_ALPHA;
/**
* Alpha controls the smoothing of the data. Alpha = 1 retains no memory of past values
* (e.g. a random walk), while alpha = 0 retains infinite memory of past values (e.g.
* the series mean). Useful values are somewhere in between. Defaults to 0.5.
*
* @param alpha A double between 0-1 inclusive, controls data smoothing
*
* @return The builder to continue chaining
*/
public EWMAModelBuilder alpha(double alpha) {
this.alpha = alpha;
return this;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(MovAvgParser.MODEL.getPreferredName(), NAME_FIELD.getPreferredName());
builder.startObject(MovAvgParser.SETTINGS.getPreferredName());
builder.field("alpha", alpha);
builder.endObject();
return builder;
}
@Override
public MovAvgModel build() {
return new EwmaModel(alpha);
}
}
}
| |
/*
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package software.amazon.awssdk.http.nio.netty;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import java.util.stream.Collectors;
import software.amazon.awssdk.annotations.SdkPublicApi;
import software.amazon.awssdk.utils.ProxySystemSetting;
import software.amazon.awssdk.utils.builder.CopyableBuilder;
import software.amazon.awssdk.utils.builder.ToCopyableBuilder;
/**
* Proxy configuration for {@link NettyNioAsyncHttpClient}. This class is used to configure an HTTP proxy to be used by
* the {@link NettyNioAsyncHttpClient}.
*
* @see NettyNioAsyncHttpClient.Builder#proxyConfiguration(ProxyConfiguration)
*/
@SdkPublicApi
public final class ProxyConfiguration implements ToCopyableBuilder<ProxyConfiguration.Builder, ProxyConfiguration> {
private final Boolean useSystemPropertyValues;
private final String scheme;
private final String host;
private final int port;
private final String username;
private final String password;
private final Set<String> nonProxyHosts;
private ProxyConfiguration(BuilderImpl builder) {
this.useSystemPropertyValues = builder.useSystemPropertyValues;
this.scheme = builder.scheme;
this.host = resolveHost(builder.host);
this.port = resolvePort(builder.port);
this.username = builder.username;
this.password = builder.password;
this.nonProxyHosts = builder.nonProxyHosts;
}
public static Builder builder() {
return new BuilderImpl();
}
/**
* @return The proxy scheme.
*/
public String scheme() {
return scheme;
}
/**
* @return The proxy host from the configuration if set, or from the "http.proxyHost" system property if
* {@link Builder#useSystemPropertyValues(Boolean)} is set to true
*/
public String host() {
return host;
}
/**
* @return The proxy port from the configuration if set, or from the "http.proxyPort" system property if
* {@link Builder#useSystemPropertyValues(Boolean)} is set to true
*/
public int port() {
return port;
}
/**
* @return The proxy username from the configuration if set, or from the "http.proxyUser" system property if
* {@link Builder#useSystemPropertyValues(Boolean)} is set to true
* */
public String username() {
return resolveValue(username, ProxySystemSetting.PROXY_USERNAME);
}
/**
* @return The proxy password from the configuration if set, or from the "http.proxyPassword" system property if
* {@link Builder#useSystemPropertyValues(Boolean)} is set to true
* */
public String password() {
return resolveValue(password, ProxySystemSetting.PROXY_PASSWORD);
}
/**
* @return The set of hosts that should not be proxied. If the value is not set, the value present by "http.nonProxyHost
* system property os returned. If system property is also not set, an unmodifiable empty set is returned.
*/
public Set<String> nonProxyHosts() {
Set<String> hosts = nonProxyHosts == null && useSystemPropertyValues ? parseNonProxyHostsProperty()
: nonProxyHosts;
return Collections.unmodifiableSet(hosts != null ? hosts : Collections.emptySet());
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
ProxyConfiguration that = (ProxyConfiguration) o;
if (port != that.port) {
return false;
}
if (scheme != null ? !scheme.equals(that.scheme) : that.scheme != null) {
return false;
}
if (host != null ? !host.equals(that.host) : that.host != null) {
return false;
}
if (username != null ? !username.equals(that.username) : that.username != null) {
return false;
}
if (password != null ? !password.equals(that.password) : that.password != null) {
return false;
}
return nonProxyHosts.equals(that.nonProxyHosts);
}
@Override
public int hashCode() {
int result = scheme != null ? scheme.hashCode() : 0;
result = 31 * result + (host != null ? host.hashCode() : 0);
result = 31 * result + port;
result = 31 * result + nonProxyHosts.hashCode();
result = 31 * result + (username != null ? username.hashCode() : 0);
result = 31 * result + (password != null ? password.hashCode() : 0);
return result;
}
@Override
public Builder toBuilder() {
return new BuilderImpl(this);
}
/**
* Builder for {@link ProxyConfiguration}.
*/
public interface Builder extends CopyableBuilder<Builder, ProxyConfiguration> {
/**
* Set the hostname of the proxy.
*
* @param host The proxy host.
* @return This object for method chaining.
*/
Builder host(String host);
/**
* Set the port that the proxy expects connections on.
*
* @param port The proxy port.
* @return This object for method chaining.
*/
Builder port(int port);
/**
* The HTTP scheme to use for connecting to the proxy. Valid values are {@code http} and {@code https}.
* <p>
* The client defaults to {@code http} if none is given.
*
* @param scheme The proxy scheme.
* @return This object for method chaining.
*/
Builder scheme(String scheme);
/**
* Set the set of hosts that should not be proxied. Any request whose host portion matches any of the patterns
* given in the set will be sent to the remote host directly instead of through the proxy.
*
* @param nonProxyHosts The set of hosts that should not be proxied.
* @return This object for method chaining.
*/
Builder nonProxyHosts(Set<String> nonProxyHosts);
/**
* Set the username used to authenticate with the proxy username.
*
* @param username The proxy username.
* @return This object for method chaining.
*/
Builder username(String username);
/**
* Set the password used to authenticate with the proxy password.
*
* @param password The proxy password.
* @return This object for method chaining.
*/
Builder password(String password);
/**
* Set the option whether to use system property values from {@link ProxySystemSetting} if any of the config options
* are missing. The value is set to "true" by default which means SDK will automatically use system property values if
* options are not provided during building the {@link ProxyConfiguration} object. To disable this behaviour, set this
* value to false.
*
* @param useSystemPropertyValues The option whether to use system property values
* @return This object for method chaining.
*/
Builder useSystemPropertyValues(Boolean useSystemPropertyValues);
}
private String resolveHost(String host) {
return resolveValue(host, ProxySystemSetting.PROXY_HOST);
}
private int resolvePort(int port) {
return port == 0 && Boolean.TRUE.equals(useSystemPropertyValues) ?
ProxySystemSetting.PROXY_PORT.getStringValue().map(Integer::parseInt).orElse(0) : port;
}
/**
* Uses the configuration options, system setting property and returns the final value of the given member.
*/
private String resolveValue(String value, ProxySystemSetting systemSetting) {
return value == null && Boolean.TRUE.equals(useSystemPropertyValues) ?
systemSetting.getStringValue().orElse(null) : value;
}
private Set<String> parseNonProxyHostsProperty() {
String nonProxyHostsSystem = ProxySystemSetting.NON_PROXY_HOSTS.getStringValue().orElse(null);
if (nonProxyHostsSystem != null && !nonProxyHostsSystem.isEmpty()) {
return Arrays.stream(nonProxyHostsSystem.split("\\|"))
.map(String::toLowerCase)
.map(s -> s.replace("*", ".*?"))
.collect(Collectors.toSet());
}
return Collections.emptySet();
}
private static final class BuilderImpl implements Builder {
private String scheme;
private String host;
private int port = 0;
private String username;
private String password;
private Set<String> nonProxyHosts;
private Boolean useSystemPropertyValues = Boolean.TRUE;
private BuilderImpl() {
}
private BuilderImpl(ProxyConfiguration proxyConfiguration) {
this.useSystemPropertyValues = proxyConfiguration.useSystemPropertyValues;
this.scheme = proxyConfiguration.scheme;
this.host = proxyConfiguration.host;
this.port = proxyConfiguration.port;
this.nonProxyHosts = proxyConfiguration.nonProxyHosts != null ?
new HashSet<>(proxyConfiguration.nonProxyHosts) : null;
this.username = proxyConfiguration.username;
this.password = proxyConfiguration.password;
}
@Override
public Builder scheme(String scheme) {
this.scheme = scheme;
return this;
}
@Override
public Builder host(String host) {
this.host = host;
return this;
}
@Override
public Builder port(int port) {
this.port = port;
return this;
}
@Override
public Builder nonProxyHosts(Set<String> nonProxyHosts) {
if (nonProxyHosts != null) {
this.nonProxyHosts = new HashSet<>(nonProxyHosts);
} else {
this.nonProxyHosts = Collections.emptySet();
}
return this;
}
@Override
public Builder username(String username) {
this.username = username;
return this;
}
@Override
public Builder password(String password) {
this.password = password;
return this;
}
@Override
public Builder useSystemPropertyValues(Boolean useSystemPropertyValues) {
this.useSystemPropertyValues = useSystemPropertyValues;
return this;
}
public void setUseSystemPropertyValues(Boolean useSystemPropertyValues) {
useSystemPropertyValues(useSystemPropertyValues);
}
@Override
public ProxyConfiguration build() {
return new ProxyConfiguration(this);
}
}
}
| |
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.deploy;
import java.util.HashSet;
import java.util.Set;
import com.cloud.dc.DataCenter;
import com.cloud.dc.Pod;
import com.cloud.exception.InsufficientCapacityException;
import com.cloud.exception.InsufficientServerCapacityException;
import com.cloud.exception.ResourceUnavailableException;
import com.cloud.host.Host;
import com.cloud.org.Cluster;
import com.cloud.storage.StoragePool;
import com.cloud.utils.component.Adapter;
import com.cloud.vm.VirtualMachine;
import com.cloud.vm.VirtualMachineProfile;
/**
*/
public interface DeploymentPlanner extends Adapter {
/**
* plan is called to determine where a virtual machine should be running.
*
* @param vm
* virtual machine.
* @param plan
* deployment plan that tells you where it's being deployed to.
* @param avoid
* avoid these data centers, pods, clusters, or hosts.
* @return DeployDestination for that virtual machine.
*/
DeployDestination plan(VirtualMachineProfile<? extends VirtualMachine> vm, DeploymentPlan plan, ExcludeList avoid) throws InsufficientServerCapacityException;
/**
* check() is called right before the virtual machine starts to make sure
* the host has enough capacity.
*
* @param vm
* virtual machine in question.
* @param plan
* deployment plan used to determined the deploy destination.
* @param dest
* destination returned by plan.
* @param avoid
* what to avoid.
* @return true if it's okay to start; false if not. If false, the exclude list will include what should be
* excluded.
*/
boolean check(VirtualMachineProfile<? extends VirtualMachine> vm, DeploymentPlan plan, DeployDestination dest, ExcludeList exclude);
/**
* canHandle is called before plan to determine if the plan can do the allocation. Planers should be exclusive so
* planner writer must
* make sure only one planer->canHandle return true in the planner list
*
* @param vm
* virtual machine.
* @param plan
* deployment plan that tells you where it's being deployed to.
* @param avoid
* avoid these data centers, pods, clusters, or hosts.
* @return true if it's okay to allocate; false or not
*/
boolean canHandle(VirtualMachineProfile<? extends VirtualMachine> vm, DeploymentPlan plan, ExcludeList avoid);
public enum AllocationAlgorithm {
random,
firstfit,
userdispersing,
userconcentratedpod_random,
userconcentratedpod_firstfit;
}
public static class ExcludeList {
private Set<Long> _dcIds;
private Set<Long> _podIds;
private Set<Long> _clusterIds;
private Set<Long> _hostIds;
private Set<Long> _poolIds;
public ExcludeList() {
}
public ExcludeList(Set<Long> _dcIds, Set<Long> _podIds, Set<Long> _clusterIds, Set<Long> _hostIds, Set<Long> _poolIds) {
this._dcIds = _dcIds;
this._podIds = _podIds;
this._clusterIds = _clusterIds;
this._poolIds = _poolIds;
}
public boolean add(InsufficientCapacityException e) {
Class<?> scope = e.getScope();
if (scope == null) {
return false;
}
if (Host.class.isAssignableFrom(scope)) {
addHost(e.getId());
} else if (Pod.class.isAssignableFrom(scope)) {
addPod(e.getId());
} else if (DataCenter.class.isAssignableFrom(scope)) {
addDataCenter(e.getId());
} else if (Cluster.class.isAssignableFrom(scope)) {
addCluster(e.getId());
} else if (StoragePool.class.isAssignableFrom(scope)) {
addPool(e.getId());
} else {
return false;
}
return true;
}
public boolean add(ResourceUnavailableException e) {
Class<?> scope = e.getScope();
if (scope == null) {
return false;
}
if (Host.class.isAssignableFrom(scope)) {
addHost(e.getResourceId());
} else if (Pod.class.isAssignableFrom(scope)) {
addPod(e.getResourceId());
} else if (DataCenter.class.isAssignableFrom(scope)) {
addDataCenter(e.getResourceId());
} else if (Cluster.class.isAssignableFrom(scope)) {
addCluster(e.getResourceId());
} else if (StoragePool.class.isAssignableFrom(scope)) {
addPool(e.getResourceId());
} else {
return false;
}
return true;
}
public void addPool(long poolId) {
if (_poolIds == null) {
_poolIds = new HashSet<Long>();
}
_poolIds.add(poolId);
}
public void addDataCenter(long dataCenterId) {
if (_dcIds == null) {
_dcIds = new HashSet<Long>();
}
_dcIds.add(dataCenterId);
}
public void addPod(long podId) {
if (_podIds == null) {
_podIds = new HashSet<Long>();
}
_podIds.add(podId);
}
public void addCluster(long clusterId) {
if (_clusterIds == null) {
_clusterIds = new HashSet<Long>();
}
_clusterIds.add(clusterId);
}
public void addHost(long hostId) {
if (_hostIds == null) {
_hostIds = new HashSet<Long>();
}
_hostIds.add(hostId);
}
public boolean shouldAvoid(Host host) {
if (_dcIds != null && _dcIds.contains(host.getDataCenterId())) {
return true;
}
if (_podIds != null && _podIds.contains(host.getPodId())) {
return true;
}
if (_clusterIds != null && _clusterIds.contains(host.getClusterId())) {
return true;
}
if (_hostIds != null && _hostIds.contains(host.getId())) {
return true;
}
return false;
}
public boolean shouldAvoid(Cluster cluster) {
if (_dcIds != null && _dcIds.contains(cluster.getDataCenterId())) {
return true;
}
if (_podIds != null && _podIds.contains(cluster.getPodId())) {
return true;
}
if (_clusterIds != null && _clusterIds.contains(cluster.getId())) {
return true;
}
return false;
}
public boolean shouldAvoid(Pod pod) {
if (_dcIds != null && _dcIds.contains(pod.getDataCenterId())) {
return true;
}
if (_podIds != null && _podIds.contains(pod.getId())) {
return true;
}
return false;
}
public boolean shouldAvoid(StoragePool pool) {
if (_dcIds != null && _dcIds.contains(pool.getDataCenterId())) {
return true;
}
if (_podIds != null && _podIds.contains(pool.getPodId())) {
return true;
}
if (_clusterIds != null && _clusterIds.contains(pool.getClusterId())) {
return true;
}
if (_poolIds != null && _poolIds.contains(pool.getId())) {
return true;
}
return false;
}
public boolean shouldAvoid(DataCenter dc) {
if (_dcIds != null && _dcIds.contains(dc.getId())) {
return true;
}
return false;
}
public Set<Long> getDataCentersToAvoid() {
return _dcIds;
}
public Set<Long> getPodsToAvoid() {
return _podIds;
}
public Set<Long> getClustersToAvoid() {
return _clusterIds;
}
public Set<Long> getHostsToAvoid() {
return _hostIds;
}
public Set<Long> getPoolsToAvoid() {
return _poolIds;
}
}
}
| |
/*
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.batik.ext.awt.geom;
import java.awt.Rectangle;
import java.awt.Shape;
import java.awt.Polygon;
import java.awt.Point;
import java.awt.geom.AffineTransform;
import java.awt.geom.PathIterator;
import java.awt.geom.GeneralPath;
import java.awt.geom.Point2D;
import java.awt.geom.Rectangle2D;
import java.io.Serializable;
/**
* This class is a Polygon with float coordinates.
*
* @version $Id: Polygon2D.java 594018 2007-11-12 04:17:41Z cam $
*/
public class Polygon2D implements Shape, Cloneable, Serializable {
/**
* The total number of points. The value of <code>npoints</code>
* represents the number of valid points in this <code>Polygon</code>.
*
*/
public int npoints;
/**
* The array of <i>x</i> coordinates. The value of {@link #npoints npoints} is equal to the
* number of points in this <code>Polygon2D</code>.
*
*/
public float[] xpoints;
/**
* The array of <i>x</i> coordinates. The value of {@link #npoints npoints} is equal to the
* number of points in this <code>Polygon2D</code>.
*
*/
public float[] ypoints;
/**
* Bounds of the Polygon2D.
* @see #getBounds()
*/
protected Rectangle2D bounds;
private GeneralPath path;
private GeneralPath closedPath;
/**
* Creates an empty Polygon2D.
*/
public Polygon2D() {
xpoints = new float[4];
ypoints = new float[4];
}
/**
* Constructs and initializes a <code>Polygon2D</code> from the specified
* Rectangle2D.
* @param rec the Rectangle2D
* @exception NullPointerException rec is <code>null</code>.
*/
public Polygon2D(Rectangle2D rec) {
if (rec == null) {
throw new IndexOutOfBoundsException("null Rectangle");
}
npoints = 4;
xpoints = new float[4];
ypoints = new float[4];
xpoints[0] = (float)rec.getMinX();
ypoints[0] = (float)rec.getMinY();
xpoints[1] = (float)rec.getMaxX();
ypoints[1] = (float)rec.getMinY();
xpoints[2] = (float)rec.getMaxX();
ypoints[2] = (float)rec.getMaxY();
xpoints[3] = (float)rec.getMinX();
ypoints[3] = (float)rec.getMaxY();
calculatePath();
}
/**
* Constructs and initializes a <code>Polygon2D</code> from the specified
* Polygon.
* @param pol the Polygon
* @exception NullPointerException pol is <code>null</code>.
*/
public Polygon2D(Polygon pol) {
if (pol == null) {
throw new IndexOutOfBoundsException("null Polygon");
}
this.npoints = pol.npoints;
this.xpoints = new float[pol.npoints];
this.ypoints = new float[pol.npoints];
for (int i = 0; i < pol.npoints; i++) {
xpoints[i] = pol.xpoints[i];
ypoints[i] = pol.ypoints[i];
}
calculatePath();
}
/**
* Constructs and initializes a <code>Polygon2D</code> from the specified
* parameters.
* @param xpoints an array of <i>x</i> coordinates
* @param ypoints an array of <i>y</i> coordinates
* @param npoints the total number of points in the <code>Polygon2D</code>
* @exception NegativeArraySizeException if the value of
* <code>npoints</code> is negative.
* @exception IndexOutOfBoundsException if <code>npoints</code> is
* greater than the length of <code>xpoints</code>
* or the length of <code>ypoints</code>.
* @exception NullPointerException if <code>xpoints</code> or
* <code>ypoints</code> is <code>null</code>.
*/
public Polygon2D(float[] xpoints, float[] ypoints, int npoints) {
if (npoints > xpoints.length || npoints > ypoints.length) {
throw new IndexOutOfBoundsException("npoints > xpoints.length || npoints > ypoints.length");
}
this.npoints = npoints;
this.xpoints = new float[npoints];
this.ypoints = new float[npoints];
System.arraycopy(xpoints, 0, this.xpoints, 0, npoints);
System.arraycopy(ypoints, 0, this.ypoints, 0, npoints);
calculatePath();
}
/**
* Constructs and initializes a <code>Polygon2D</code> from the specified
* parameters.
* @param xpoints an array of <i>x</i> coordinates
* @param ypoints an array of <i>y</i> coordinates
* @param npoints the total number of points in the <code>Polygon2D</code>
* @exception NegativeArraySizeException if the value of
* <code>npoints</code> is negative.
* @exception IndexOutOfBoundsException if <code>npoints</code> is
* greater than the length of <code>xpoints</code>
* or the length of <code>ypoints</code>.
* @exception NullPointerException if <code>xpoints</code> or
* <code>ypoints</code> is <code>null</code>.
*/
public Polygon2D(int[] xpoints, int[] ypoints, int npoints) {
if (npoints > xpoints.length || npoints > ypoints.length) {
throw new IndexOutOfBoundsException("npoints > xpoints.length || npoints > ypoints.length");
}
this.npoints = npoints;
this.xpoints = new float[npoints];
this.ypoints = new float[npoints];
for (int i = 0; i < npoints; i++) {
this.xpoints[i] = xpoints[i];
this.ypoints[i] = ypoints[i];
}
calculatePath();
}
/**
* Resets this <code>Polygon</code> object to an empty polygon.
*/
public void reset() {
npoints = 0;
bounds = null;
path = new GeneralPath();
closedPath = null;
}
public Object clone() {
Polygon2D pol = new Polygon2D();
for (int i = 0; i < npoints; i++) {
pol.addPoint(xpoints[i], ypoints[i]);
}
return pol;
}
private void calculatePath() {
path = new GeneralPath();
path.moveTo(xpoints[0], ypoints[0]);
for (int i = 1; i < npoints; i++) {
path.lineTo(xpoints[i], ypoints[i]);
}
bounds = path.getBounds2D();
closedPath = null;
}
private void updatePath(float x, float y) {
closedPath = null;
if (path == null) {
path = new GeneralPath(GeneralPath.WIND_EVEN_ODD);
path.moveTo(x, y);
bounds = new Rectangle2D.Float(x, y, 0, 0);
} else {
path.lineTo(x, y);
float _xmax = (float)bounds.getMaxX();
float _ymax = (float)bounds.getMaxY();
float _xmin = (float)bounds.getMinX();
float _ymin = (float)bounds.getMinY();
if (x < _xmin) _xmin = x;
else if (x > _xmax) _xmax = x;
if (y < _ymin) _ymin = y;
else if (y > _ymax) _ymax = y;
bounds = new Rectangle2D.Float(_xmin, _ymin, _xmax - _xmin, _ymax - _ymin);
}
}
/* get the associated {@link Polyline2D}.
*/
public Polyline2D getPolyline2D() {
Polyline2D pol = new Polyline2D( xpoints, ypoints, npoints );
pol.addPoint( xpoints[0], ypoints[0]);
return pol;
}
public Polygon getPolygon() {
int[] _xpoints = new int[npoints];
int[] _ypoints = new int[npoints];
for (int i = 0; i < npoints; i++) {
_xpoints[i] = (int)xpoints[i]; // todo maybe rounding is better ?
_ypoints[i] = (int)ypoints[i];
}
return new Polygon(_xpoints, _ypoints, npoints);
}
public void addPoint(Point2D p) {
addPoint((float)p.getX(), (float)p.getY());
}
/**
* Appends the specified coordinates to this <code>Polygon2D</code>.
* @param x the specified x coordinate
* @param y the specified y coordinate
*/
public void addPoint(float x, float y) {
if (npoints == xpoints.length) {
float[] tmp;
tmp = new float[npoints * 2];
System.arraycopy(xpoints, 0, tmp, 0, npoints);
xpoints = tmp;
tmp = new float[npoints * 2];
System.arraycopy(ypoints, 0, tmp, 0, npoints);
ypoints = tmp;
}
xpoints[npoints] = x;
ypoints[npoints] = y;
npoints++;
updatePath(x, y);
}
/**
* Determines whether the specified {@link Point} is inside this
* <code>Polygon</code>.
* @param p the specified <code>Point</code> to be tested
* @return <code>true</code> if the <code>Polygon</code> contains the
* <code>Point</code>; <code>false</code> otherwise.
* @see #contains(double, double)
*/
public boolean contains(Point p) {
return contains(p.x, p.y);
}
/**
* Determines whether the specified coordinates are inside this
* <code>Polygon</code>.
* <p>
* @param x the specified x coordinate to be tested
* @param y the specified y coordinate to be tested
* @return <code>true</code> if this <code>Polygon</code> contains
* the specified coordinates, (<i>x</i>, <i>y</i>);
* <code>false</code> otherwise.
*/
public boolean contains(int x, int y) {
return contains((double) x, (double) y);
}
/**
* Returns the high precision bounding box of the {@link Shape}.
* @return a {@link Rectangle2D} that precisely
* bounds the <code>Shape</code>.
*/
public Rectangle2D getBounds2D() {
return bounds;
}
public Rectangle getBounds() {
if (bounds == null) return null;
else return bounds.getBounds();
}
/**
* Determines if the specified coordinates are inside this
* <code>Polygon</code>. For the definition of
* <i>insideness</i>, see the class comments of {@link Shape}.
* @param x the specified x coordinate
* @param y the specified y coordinate
* @return <code>true</code> if the <code>Polygon</code> contains the
* specified coordinates; <code>false</code> otherwise.
*/
public boolean contains(double x, double y) {
if (npoints <= 2 || !bounds.contains(x, y)) {
return false;
}
updateComputingPath();
return closedPath.contains(x, y);
}
private void updateComputingPath() {
if (npoints >= 1) {
if (closedPath == null) {
closedPath = (GeneralPath)path.clone();
closedPath.closePath();
}
}
}
/**
* Tests if a specified {@link Point2D} is inside the boundary of this
* <code>Polygon</code>.
* @param p a specified <code>Point2D</code>
* @return <code>true</code> if this <code>Polygon</code> contains the
* specified <code>Point2D</code>; <code>false</code>
* otherwise.
* @see #contains(double, double)
*/
public boolean contains(Point2D p) {
return contains(p.getX(), p.getY());
}
/**
* Tests if the interior of this <code>Polygon</code> intersects the
* interior of a specified set of rectangular coordinates.
* @param x the x coordinate of the specified rectangular
* shape's top-left corner
* @param y the y coordinate of the specified rectangular
* shape's top-left corner
* @param w the width of the specified rectangular shape
* @param h the height of the specified rectangular shape
* @return <code>true</code> if the interior of this
* <code>Polygon</code> and the interior of the
* specified set of rectangular
* coordinates intersect each other;
* <code>false</code> otherwise.
*/
public boolean intersects(double x, double y, double w, double h) {
if (npoints <= 0 || !bounds.intersects(x, y, w, h)) {
return false;
}
updateComputingPath();
return closedPath.intersects(x, y, w, h);
}
/**
* Tests if the interior of this <code>Polygon</code> intersects the
* interior of a specified <code>Rectangle2D</code>.
* @param r a specified <code>Rectangle2D</code>
* @return <code>true</code> if this <code>Polygon</code> and the
* interior of the specified <code>Rectangle2D</code>
* intersect each other; <code>false</code>
* otherwise.
*/
public boolean intersects(Rectangle2D r) {
return intersects(r.getX(), r.getY(), r.getWidth(), r.getHeight());
}
/**
* Tests if the interior of this <code>Polygon</code> entirely
* contains the specified set of rectangular coordinates.
* @param x the x coordinate of the top-left corner of the
* specified set of rectangular coordinates
* @param y the y coordinate of the top-left corner of the
* specified set of rectangular coordinates
* @param w the width of the set of rectangular coordinates
* @param h the height of the set of rectangular coordinates
* @return <code>true</code> if this <code>Polygon</code> entirely
* contains the specified set of rectangular
* coordinates; <code>false</code> otherwise.
*/
public boolean contains(double x, double y, double w, double h) {
if (npoints <= 0 || !bounds.intersects(x, y, w, h)) {
return false;
}
updateComputingPath();
return closedPath.contains(x, y, w, h);
}
/**
* Tests if the interior of this <code>Polygon</code> entirely
* contains the specified <code>Rectangle2D</code>.
* @param r the specified <code>Rectangle2D</code>
* @return <code>true</code> if this <code>Polygon</code> entirely
* contains the specified <code>Rectangle2D</code>;
* <code>false</code> otherwise.
* @see #contains(double, double, double, double)
*/
public boolean contains(Rectangle2D r) {
return contains(r.getX(), r.getY(), r.getWidth(), r.getHeight());
}
/**
* Returns an iterator object that iterates along the boundary of this
* <code>Polygon</code> and provides access to the geometry
* of the outline of this <code>Polygon</code>. An optional
* {@link AffineTransform} can be specified so that the coordinates
* returned in the iteration are transformed accordingly.
* @param at an optional <code>AffineTransform</code> to be applied to the
* coordinates as they are returned in the iteration, or
* <code>null</code> if untransformed coordinates are desired
* @return a {@link PathIterator} object that provides access to the
* geometry of this <code>Polygon</code>.
*/
public PathIterator getPathIterator(AffineTransform at) {
updateComputingPath();
if (closedPath == null) return null;
else return closedPath.getPathIterator(at);
}
/**
* Returns an iterator object that iterates along the boundary of
* the <code>Polygon2D</code> and provides access to the geometry of the
* outline of the <code>Shape</code>. Only SEG_MOVETO, SEG_LINETO, and
* SEG_CLOSE point types are returned by the iterator.
* Since polygons are already flat, the <code>flatness</code> parameter
* is ignored.
* @param at an optional <code>AffineTransform</code> to be applied to the
* coordinates as they are returned in the iteration, or
* <code>null</code> if untransformed coordinates are desired
* @param flatness the maximum amount that the control points
* for a given curve can vary from colinear before a subdivided
* curve is replaced by a straight line connecting the
* endpoints. Since polygons are already flat the
* <code>flatness</code> parameter is ignored.
* @return a <code>PathIterator</code> object that provides access to the
* <code>Shape</code> object's geometry.
*/
public PathIterator getPathIterator(AffineTransform at, double flatness) {
return getPathIterator(at);
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ngrinder.perftest.service;
import net.grinder.SingleConsole;
import net.grinder.console.model.ConsoleProperties;
import org.h2.util.StringUtils;
import org.ngrinder.infra.config.Config;
import org.ngrinder.perftest.model.NullSingleConsole;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import javax.annotation.PostConstruct;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.TimeUnit;
import static net.grinder.util.NetworkUtils.getAvailablePorts;
import static org.ngrinder.common.constant.ControllerConstants.*;
import static org.ngrinder.common.util.ExceptionUtils.processException;
import static org.ngrinder.common.util.NoOp.noOp;
/**
* Console manager is responsible for console instance management.
* <p/>
* A number of consoles(specified in ngrinder.maxConcurrentTest in system.conf) are pooled. Actually console itself is
* not pooled but the {@link ConsoleEntry} which contains console information are pooled internally. Whenever a user
* requires a new console, it gets the one {@link ConsoleEntry} from the pool and creates new console with the
* {@link ConsoleEntry}. Currently using consoles are kept in {@link #consoleInUse} member variable.
*
* @author JunHo Yoon
* @since 3.0
*/
@Component
public class ConsoleManager {
private static final int MAX_PORT_NUMBER = 65000;
private static final Logger LOG = LoggerFactory.getLogger(ConsoleManager.class);
private volatile ArrayBlockingQueue<ConsoleEntry> consoleQueue;
private volatile List<SingleConsole> consoleInUse = Collections.synchronizedList(new ArrayList<SingleConsole>());
@Autowired
private Config config;
@Autowired
private AgentManager agentManager;
/**
* Prepare console queue.
*/
@PostConstruct
public void init() {
int consoleSize = getConsoleSize();
consoleQueue = new ArrayBlockingQueue<ConsoleEntry>(consoleSize);
final String currentIP = config.getCurrentIP();
for (int each : getAvailablePorts(currentIP, consoleSize, getConsolePortBase(), MAX_PORT_NUMBER)) {
final ConsoleEntry e = new ConsoleEntry(config.getCurrentIP(), each);
try {
e.occupySocket();
consoleQueue.add(e);
} catch (Exception ex) {
LOG.error("socket binding to {}:{} is failed", config.getCurrentIP(), each);
}
}
}
/**
* Get the base port number of console.
* <p/>
* It can be specified at ngrinder.consolePortBase in system.conf. Each console will be created from that port.
*
* @return base port number
*/
protected int getConsolePortBase() {
return config.getControllerProperties().getPropertyInt(PROP_CONTROLLER_CONSOLE_PORT_BASE);
}
/**
* Get the console pool size. It can be specified at ngrinder.maxConcurrentTest in system.conf.
*
* @return console size.
*/
protected int getConsoleSize() {
return config.getControllerProperties().getPropertyInt(PROP_CONTROLLER_MAX_CONCURRENT_TEST);
}
/**
* Get Timeout (in second).
*
* @return 5000 second
*/
protected long getMaxWaitingMilliSecond() {
return config.getControllerProperties().getPropertyInt(PROP_CONTROLLER_MAX_CONNECTION_WAITING_MILLISECOND);
}
/**
* Get an available console.
* <p/>
* If there is no available console, it waits until available console is returned back. If the specific time is
* elapsed, the timeout error occurs and throws {@link org.ngrinder.common.exception.NGrinderRuntimeException} . The
* timeout can be adjusted by overriding {@link #getMaxWaitingMilliSecond()}.
*
* @param baseConsoleProperties base {@link net.grinder.console.model.ConsoleProperties}
* @return console
*/
public SingleConsole getAvailableConsole(ConsoleProperties baseConsoleProperties) {
ConsoleEntry consoleEntry = null;
try {
consoleEntry = consoleQueue.poll(getMaxWaitingMilliSecond(), TimeUnit.MILLISECONDS);
if (consoleEntry == null) {
throw processException("no console entry available");
}
synchronized (this) {
consoleEntry.releaseSocket();
// FIXME : It might fail here
SingleConsole singleConsole = new SingleConsole(config.getCurrentIP(), consoleEntry.getPort(),
baseConsoleProperties);
getConsoleInUse().add(singleConsole);
return singleConsole;
}
} catch (Exception e) {
if (consoleEntry != null) {
consoleQueue.add(consoleEntry);
}
throw processException("no console entry available");
}
}
/**
* Return back the given console.
* <p/>
* Duplicated returns is allowed.
*
* @param testIdentifier test identifier
* @param console console which will be returned back.
*/
public void returnBackConsole(String testIdentifier, SingleConsole console) {
if (console == null || console instanceof NullSingleConsole) {
LOG.error("Attempt to return back null console for {}.", testIdentifier);
return;
}
try {
console.sendStopMessageToAgents();
} catch (Exception e) {
LOG.error("Exception occurred during console return back for test {}.",
testIdentifier, e);
// But the port is getting back.
} finally {
// This is very careful implementation..
try {
// Wait console is completely shutdown...
console.waitUntilAllAgentDisconnected();
} catch (Exception e) {
LOG.error("Exception occurred during console return back for test {}.",
testIdentifier, e);
// If it's not disconnected still, stop them by force.
agentManager.stopAgent(console.getConsolePort());
}
try {
console.shutdown();
} catch (Exception e) {
LOG.error("Exception occurred during console return back for test {}.",
testIdentifier, e);
}
int consolePort;
String consoleIP;
try {
consolePort = console.getConsolePort();
consoleIP = console.getConsoleIP();
ConsoleEntry consoleEntry = new ConsoleEntry(consoleIP, consolePort);
synchronized (this) {
if (!consoleQueue.contains(consoleEntry)) {
consoleEntry.occupySocket();
consoleQueue.add(consoleEntry);
if (!getConsoleInUse().contains(console)) {
LOG.error("Try to return back the not used console on {} port", consolePort);
}
getConsoleInUse().remove(console);
}
}
} catch (Exception e) {
noOp();
}
}
}
/**
* Get the list of {@link SingleConsole} which are used.
*
* @return {@link SingleConsole} list in use
*/
public List<SingleConsole> getConsoleInUse() {
return consoleInUse;
}
/**
* Get the size of currently available consoles.
*
* @return size of available consoles.
*/
public Integer getAvailableConsoleSize() {
return consoleQueue.size();
}
/**
* Get the {@link SingleConsole} instance which is using the given port.
*
* @param port port which the console is using
* @return {@link SingleConsole} instance if found. Otherwise, {@link NullSingleConsole} instance.
*/
public SingleConsole getConsoleUsingPort(Integer port) {
String currentIP = config.getCurrentIP();
for (SingleConsole each : consoleInUse) {
// Avoid to Klocwork error.
if (each instanceof NullSingleConsole) {
continue;
}
if (StringUtils.equals(each.getConsoleIP(), currentIP) && each.getConsolePort() == port) {
return each;
}
}
return new NullSingleConsole();
}
}
| |
/*
* LexicalUnitImpl.java
*
* Steady State CSS2 Parser
*
* Copyright (C) 1999, 2002 Steady State Software Ltd. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*
* To contact the authors of the library, write to Steady State Software Ltd.,
* 49 Littleworth, Wing, Buckinghamshire, LU7 0JX, England
*
* http://www.steadystate.com/css/
* mailto:css@steadystate.co.uk
*
* $Id: LexicalUnitImpl.java,v 1.5 2008/02/24 13:37:09 xamjadmin Exp $
*/
package com.steadystate.css.parser;
import java.io.Serializable;
import org.w3c.css.sac.*;
/**
*
* @author David Schweinsberg
* @version $Release$
*/
public class LexicalUnitImpl implements LexicalUnit, Serializable {
/*
public static final short SAC_OPERATOR_COMMA = 0;
public static final short SAC_OPERATOR_PLUS = 1;
public static final short SAC_OPERATOR_MINUS = 2;
public static final short SAC_OPERATOR_MULTIPLY = 3;
public static final short SAC_OPERATOR_SLASH = 4;
public static final short SAC_OPERATOR_MOD = 5;
public static final short SAC_OPERATOR_EXP = 6;
public static final short SAC_OPERATOR_LT = 7;
public static final short SAC_OPERATOR_GT = 8;
public static final short SAC_OPERATOR_LE = 9;
public static final short SAC_OPERATOR_GE = 10;
public static final short SAC_OPERATOR_TILDE = 11;
public static final short SAC_INHERIT = 12;
public static final short SAC_INTEGER = 13;
public static final short SAC_REAL = 14;
public static final short SAC_EM = 15;
public static final short SAC_EX = 16;
public static final short SAC_PIXEL = 17;
public static final short SAC_INCH = 18;
public static final short SAC_CENTIMETER = 19;
public static final short SAC_MILLIMETER = 20;
public static final short SAC_POINT = 21;
public static final short SAC_PICA = 22;
public static final short SAC_PERCENTAGE = 23;
public static final short SAC_URI = 24;
public static final short SAC_COUNTER_FUNCTION = 25;
public static final short SAC_COUNTERS_FUNCTION = 26;
public static final short SAC_RGBCOLOR = 27;
public static final short SAC_DEGREE = 28;
public static final short SAC_GRADIAN = 29;
public static final short SAC_RADIAN = 30;
public static final short SAC_MILLISECOND = 31;
public static final short SAC_SECOND = 32;
public static final short SAC_HERTZ = 33;
public static final short SAC_KILOHERTZ = 34;
public static final short SAC_IDENT = 35;
public static final short SAC_STRING_VALUE = 36;
public static final short SAC_ATTR = 37;
public static final short SAC_RECT_FUNCTION = 38;
public static final short SAC_UNICODERANGE = 39;
public static final short SAC_SUB_EXPRESSION = 40;
public static final short SAC_FUNCTION = 41;
public static final short SAC_DIMENSION = 42;
*/
private short _type;
private LexicalUnit _next;
private LexicalUnit _prev;
// private int _intVal;
private float _floatVal;
private String _dimension;
private String _function;
private LexicalUnit _params;
private String _stringVal;
protected LexicalUnitImpl(LexicalUnit previous, short type) {
_type = type;
_prev = previous;
if (_prev != null) {
((LexicalUnitImpl)_prev)._next = this;
}
}
/**
* Integer
*/
protected LexicalUnitImpl(LexicalUnit previous, int value) {
this(previous, SAC_INTEGER);
// _intVal = value;
_floatVal = value;
}
/**
* Dimension
*/
protected LexicalUnitImpl(LexicalUnit previous, short type, float value) {
this(previous, type);
_floatVal = value;
}
/**
* Unknown dimension
*/
protected LexicalUnitImpl(
LexicalUnit previous,
short type,
String dimension,
float value) {
this(previous, type);
_dimension = dimension;
_floatVal = value;
}
/**
* String
*/
protected LexicalUnitImpl(LexicalUnit previous, short type, String value) {
this(previous, type);
_stringVal = value;
}
/**
* Function
*/
protected LexicalUnitImpl(
LexicalUnit previous,
short type,
String name,
LexicalUnit params) {
this(previous, type);
_function = name;
_params = params;
}
public short getLexicalUnitType() {
return _type;
}
public LexicalUnit getNextLexicalUnit() {
return _next;
}
public LexicalUnit getPreviousLexicalUnit() {
return _prev;
}
public int getIntegerValue() {
// return _intVal;
return (int) _floatVal;
}
public float getFloatValue() {
return _floatVal;
}
public String getDimensionUnitText() {
switch (_type) {
case SAC_EM:
return "em";
case SAC_EX:
return "ex";
case SAC_PIXEL:
return "px";
case SAC_INCH:
return "in";
case SAC_CENTIMETER:
return "cm";
case SAC_MILLIMETER:
return "mm";
case SAC_POINT:
return "pt";
case SAC_PICA:
return "pc";
case SAC_PERCENTAGE:
return "%";
case SAC_DEGREE:
return "deg";
case SAC_GRADIAN:
return "grad";
case SAC_RADIAN:
return "rad";
case SAC_MILLISECOND:
return "ms";
case SAC_SECOND:
return "s";
case SAC_HERTZ:
return "Hz";
case SAC_KILOHERTZ:
return "kHz";
case SAC_DIMENSION:
return _dimension;
}
return "";
}
public String getFunctionName() {
return _function;
}
public LexicalUnit getParameters() {
return _params;
}
public String getStringValue() {
return _stringVal;
}
public LexicalUnit getSubValues() {
return _params;
}
public String toString() {
StringBuffer sb = new StringBuffer();
switch (_type) {
case SAC_OPERATOR_COMMA:
sb.append(",");
break;
case SAC_OPERATOR_PLUS:
sb.append("+");
break;
case SAC_OPERATOR_MINUS:
sb.append("-");
break;
case SAC_OPERATOR_MULTIPLY:
sb.append("*");
break;
case SAC_OPERATOR_SLASH:
sb.append("/");
break;
case SAC_OPERATOR_MOD:
sb.append("%");
break;
case SAC_OPERATOR_EXP:
sb.append("^");
break;
case SAC_OPERATOR_LT:
sb.append("<");
break;
case SAC_OPERATOR_GT:
sb.append(">");
break;
case SAC_OPERATOR_LE:
sb.append("<=");
break;
case SAC_OPERATOR_GE:
sb.append(">=");
break;
case SAC_OPERATOR_TILDE:
sb.append("~");
break;
case SAC_OPERATOR_EQUALS:
sb.append("=");
break;
case SAC_INHERIT:
sb.append("inherit");
break;
case SAC_INTEGER:
sb.append(String.valueOf(getIntegerValue()));
break;
case SAC_REAL:
sb.append(trimFloat(getFloatValue()));
break;
case SAC_EM:
case SAC_EX:
case SAC_PIXEL:
case SAC_INCH:
case SAC_CENTIMETER:
case SAC_MILLIMETER:
case SAC_POINT:
case SAC_PICA:
case SAC_PERCENTAGE:
case SAC_DEGREE:
case SAC_GRADIAN:
case SAC_RADIAN:
case SAC_MILLISECOND:
case SAC_SECOND:
case SAC_HERTZ:
case SAC_KILOHERTZ:
case SAC_DIMENSION:
sb.append(trimFloat(getFloatValue()))
.append(getDimensionUnitText());
break;
case SAC_URI:
sb.append("url(").append(getStringValue()).append(")");
break;
case SAC_COUNTER_FUNCTION:
sb.append("counter(");
appendParams(sb, _params);
sb.append(")");
break;
case SAC_COUNTERS_FUNCTION:
sb.append("counters(");
appendParams(sb, _params);
sb.append(")");
break;
case SAC_RGBCOLOR:
sb.append("rgb(");
appendParams(sb, _params);
sb.append(")");
break;
case SAC_IDENT:
sb.append(getStringValue());
break;
case SAC_STRING_VALUE:
sb.append("\"").append(getStringValue()).append("\"");
break;
case SAC_ATTR:
sb.append("attr(");
appendParams(sb, _params);
sb.append(")");
break;
case SAC_RECT_FUNCTION:
sb.append("rect(");
appendParams(sb, _params);
sb.append(")");
break;
case SAC_UNICODERANGE:
sb.append(getStringValue());
break;
case SAC_SUB_EXPRESSION:
sb.append(getStringValue());
break;
case SAC_FUNCTION:
sb.append(getFunctionName());
appendParams(sb, _params);
sb.append(")");
break;
case SAC_ALPHA_FUNCTION:
sb.append(getFunctionName() + "(");
appendParams(sb, _params);
sb.append(")");
break;
}
return sb.toString();
}
public String toDebugString() {
StringBuffer sb = new StringBuffer();
switch (_type) {
case SAC_OPERATOR_COMMA:
sb.append("SAC_OPERATOR_COMMA");
break;
case SAC_OPERATOR_PLUS:
sb.append("SAC_OPERATOR_PLUS");
break;
case SAC_OPERATOR_MINUS:
sb.append("SAC_OPERATOR_MINUS");
break;
case SAC_OPERATOR_MULTIPLY:
sb.append("SAC_OPERATOR_MULTIPLY");
break;
case SAC_OPERATOR_SLASH:
sb.append("SAC_OPERATOR_SLASH");
break;
case SAC_OPERATOR_MOD:
sb.append("SAC_OPERATOR_MOD");
break;
case SAC_OPERATOR_EXP:
sb.append("SAC_OPERATOR_EXP");
break;
case SAC_OPERATOR_LT:
sb.append("SAC_OPERATOR_LT");
break;
case SAC_OPERATOR_GT:
sb.append("SAC_OPERATOR_GT");
break;
case SAC_OPERATOR_LE:
sb.append("SAC_OPERATOR_LE");
break;
case SAC_OPERATOR_GE:
sb.append("SAC_OPERATOR_GE");
break;
case SAC_OPERATOR_TILDE:
sb.append("SAC_OPERATOR_TILDE");
break;
case SAC_INHERIT:
sb.append("SAC_INHERIT");
break;
case SAC_INTEGER:
sb.append("SAC_INTEGER(")
.append(String.valueOf(getIntegerValue()))
.append(")");
break;
case SAC_REAL:
sb.append("SAC_REAL(")
.append(trimFloat(getFloatValue()))
.append(")");
break;
case SAC_EM:
sb.append("SAC_EM(")
.append(trimFloat(getFloatValue()))
.append(getDimensionUnitText())
.append(")");
break;
case SAC_EX:
sb.append("SAC_EX(")
.append(trimFloat(getFloatValue()))
.append(getDimensionUnitText())
.append(")");
break;
case SAC_PIXEL:
sb.append("SAC_PIXEL(")
.append(trimFloat(getFloatValue()))
.append(getDimensionUnitText())
.append(")");
break;
case SAC_INCH:
sb.append("SAC_INCH(")
.append(trimFloat(getFloatValue()))
.append(getDimensionUnitText())
.append(")");
break;
case SAC_CENTIMETER:
sb.append("SAC_CENTIMETER(")
.append(trimFloat(getFloatValue()))
.append(getDimensionUnitText())
.append(")");
break;
case SAC_MILLIMETER:
sb.append("SAC_MILLIMETER(")
.append(trimFloat(getFloatValue()))
.append(getDimensionUnitText())
.append(")");
break;
case SAC_POINT:
sb.append("SAC_POINT(")
.append(trimFloat(getFloatValue()))
.append(getDimensionUnitText())
.append(")");
break;
case SAC_PICA:
sb.append("SAC_PICA(")
.append(trimFloat(getFloatValue()))
.append(getDimensionUnitText())
.append(")");
break;
case SAC_PERCENTAGE:
sb.append("SAC_PERCENTAGE(")
.append(trimFloat(getFloatValue()))
.append(getDimensionUnitText())
.append(")");
break;
case SAC_DEGREE:
sb.append("SAC_DEGREE(")
.append(trimFloat(getFloatValue()))
.append(getDimensionUnitText())
.append(")");
break;
case SAC_GRADIAN:
sb.append("SAC_GRADIAN(")
.append(trimFloat(getFloatValue()))
.append(getDimensionUnitText())
.append(")");
break;
case SAC_RADIAN:
sb.append("SAC_RADIAN(")
.append(trimFloat(getFloatValue()))
.append(getDimensionUnitText())
.append(")");
break;
case SAC_MILLISECOND:
sb.append("SAC_MILLISECOND(")
.append(trimFloat(getFloatValue()))
.append(getDimensionUnitText())
.append(")");
break;
case SAC_SECOND:
sb.append("SAC_SECOND(")
.append(trimFloat(getFloatValue()))
.append(getDimensionUnitText())
.append(")");
break;
case SAC_HERTZ:
sb.append("SAC_HERTZ(")
.append(trimFloat(getFloatValue()))
.append(getDimensionUnitText())
.append(")");
break;
case SAC_KILOHERTZ:
sb.append("SAC_KILOHERTZ(")
.append(trimFloat(getFloatValue()))
.append(getDimensionUnitText())
.append(")");
break;
case SAC_DIMENSION:
sb.append("SAC_DIMENSION(")
.append(trimFloat(getFloatValue()))
.append(getDimensionUnitText())
.append(")");
break;
case SAC_URI:
sb.append("SAC_URI(url(")
.append(getStringValue())
.append("))");
break;
case SAC_COUNTER_FUNCTION:
sb.append("SAC_COUNTER_FUNCTION(counter(");
appendParams(sb, _params);
sb.append("))");
break;
case SAC_COUNTERS_FUNCTION:
sb.append("SAC_COUNTERS_FUNCTION(counters(");
appendParams(sb, _params);
sb.append("))");
break;
case SAC_RGBCOLOR:
sb.append("SAC_RGBCOLOR(rgb(");
appendParams(sb, _params);
sb.append("))");
break;
case SAC_IDENT:
sb.append("SAC_IDENT(")
.append(getStringValue())
.append(")");
break;
case SAC_STRING_VALUE:
sb.append("SAC_STRING_VALUE(\"")
.append(getStringValue())
.append("\")");
break;
case SAC_ATTR:
sb.append("SAC_ATTR(attr(");
appendParams(sb, _params);
sb.append("))");
break;
case SAC_RECT_FUNCTION:
sb.append("SAC_RECT_FUNCTION(rect(");
appendParams(sb, _params);
sb.append("))");
break;
case SAC_UNICODERANGE:
sb.append("SAC_UNICODERANGE(")
.append(getStringValue())
.append(")");
break;
case SAC_SUB_EXPRESSION:
sb.append("SAC_SUB_EXPRESSION(")
.append(getStringValue())
.append(")");
break;
case SAC_FUNCTION:
sb.append("SAC_FUNCTION(")
.append(getFunctionName())
.append("(");
appendParams(sb, _params);
sb.append("))");
break;
}
return sb.toString();
}
private void appendParams(StringBuffer sb, LexicalUnit first) {
LexicalUnit l = first;
while (l != null) {
sb.append(l.toString());
l = l.getNextLexicalUnit();
}
}
private String trimFloat(float f) {
String s = String.valueOf(getFloatValue());
return (f - (int) f != 0) ? s : s.substring(0, s.length() - 2);
}
// private static float value(char op, String s) {
// return ((op == '-') ? -1 : 1) * Float.valueOf(s).floatValue();
// }
//
public static LexicalUnit createNumber(LexicalUnit prev, float f) {
if (f > (int) f) {
return new LexicalUnitImpl(prev, LexicalUnit.SAC_REAL, f);
} else {
return new LexicalUnitImpl(prev, (int) f);
}
}
public static LexicalUnit createPercentage(LexicalUnit prev, float f) {
return new LexicalUnitImpl(prev, LexicalUnit.SAC_PERCENTAGE, f);
}
public static LexicalUnit createPixel(LexicalUnit prev, float f) {
return new LexicalUnitImpl(prev, LexicalUnit.SAC_PIXEL, f);
}
public static LexicalUnit createCentimeter(LexicalUnit prev, float f) {
return new LexicalUnitImpl(prev, LexicalUnit.SAC_CENTIMETER, f);
}
public static LexicalUnit createMillimeter(LexicalUnit prev, float f) {
return new LexicalUnitImpl(prev, LexicalUnit.SAC_MILLIMETER, f);
}
public static LexicalUnit createInch(LexicalUnit prev, float f) {
return new LexicalUnitImpl(prev, LexicalUnit.SAC_INCH, f);
}
public static LexicalUnit createPoint(LexicalUnit prev, float f) {
return new LexicalUnitImpl(prev, LexicalUnit.SAC_POINT, f);
}
public static LexicalUnit createPica(LexicalUnit prev, float f) {
return new LexicalUnitImpl(prev, LexicalUnit.SAC_PICA, f);
}
public static LexicalUnit createEm(LexicalUnit prev, float f) {
return new LexicalUnitImpl(prev, LexicalUnit.SAC_EM, f);
}
public static LexicalUnit createEx(LexicalUnit prev, float f) {
return new LexicalUnitImpl(prev, LexicalUnit.SAC_EX, f);
}
public static LexicalUnit createDegree(LexicalUnit prev, float f) {
return new LexicalUnitImpl(prev, LexicalUnit.SAC_DEGREE, f);
}
public static LexicalUnit createRadian(LexicalUnit prev, float f) {
return new LexicalUnitImpl(prev, LexicalUnit.SAC_RADIAN, f);
}
public static LexicalUnit createGradian(LexicalUnit prev, float f) {
return new LexicalUnitImpl(prev, LexicalUnit.SAC_GRADIAN, f);
}
public static LexicalUnit createMillisecond(LexicalUnit prev, float f) {
return new LexicalUnitImpl(prev, LexicalUnit.SAC_MILLISECOND, f);
}
public static LexicalUnit createSecond(LexicalUnit prev, float f) {
return new LexicalUnitImpl(prev, LexicalUnit.SAC_SECOND, f);
}
public static LexicalUnit createHertz(LexicalUnit prev, float f) {
return new LexicalUnitImpl(prev, LexicalUnit.SAC_HERTZ, f);
}
public static LexicalUnit createDimension(LexicalUnit prev, float f, String dim) {
return new LexicalUnitImpl(prev, LexicalUnit.SAC_DIMENSION, dim, f);
}
public static LexicalUnit createKiloHertz(LexicalUnit prev, float f) {
return new LexicalUnitImpl(prev, LexicalUnit.SAC_KILOHERTZ, f);
}
public static LexicalUnit createCounter(LexicalUnit prev, LexicalUnit params) {
return new LexicalUnitImpl(prev, LexicalUnit.SAC_COUNTER_FUNCTION, "counter", params);
}
public static LexicalUnit createAlpha(LexicalUnit prev, LexicalUnit params) {
return new LexicalUnitImpl(prev, LexicalUnit.SAC_ALPHA_FUNCTION, "alpha", params);
}
public static LexicalUnit createCounters(LexicalUnit prev, LexicalUnit params) {
return new LexicalUnitImpl(prev, LexicalUnit.SAC_COUNTERS_FUNCTION, "counters", params);
}
public static LexicalUnit createAttr(LexicalUnit prev, LexicalUnit params) {
return new LexicalUnitImpl(prev, LexicalUnit.SAC_ATTR, "attr", params);
}
public static LexicalUnit createRect(LexicalUnit prev, LexicalUnit params) {
return new LexicalUnitImpl(prev, LexicalUnit.SAC_RECT_FUNCTION, "rect", params);
}
public static LexicalUnit createRgbColor(LexicalUnit prev, LexicalUnit params) {
return new LexicalUnitImpl(prev, LexicalUnit.SAC_RGBCOLOR, "rgb", params);
}
public static LexicalUnit createFunction(LexicalUnit prev, String name, LexicalUnit params) {
return new LexicalUnitImpl(prev, LexicalUnit.SAC_FUNCTION, name, params);
}
public static LexicalUnit createString(LexicalUnit prev, String value) {
return new LexicalUnitImpl(prev, LexicalUnit.SAC_STRING_VALUE, value);
}
public static LexicalUnit createIdent(LexicalUnit prev, String value) {
return new LexicalUnitImpl(prev, LexicalUnit.SAC_IDENT, value);
}
public static LexicalUnit createURI(LexicalUnit prev, String value) {
return new LexicalUnitImpl(prev, LexicalUnit.SAC_URI, value);
}
public static LexicalUnit createComma(LexicalUnit prev) {
return new LexicalUnitImpl(prev, SAC_OPERATOR_COMMA);
}
}
| |
// Copyright 2000-2021 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.application.options;
import com.intellij.application.options.codeStyle.CodeStyleBlankLinesPanel;
import com.intellij.application.options.codeStyle.CodeStyleSchemesModel;
import com.intellij.application.options.codeStyle.CodeStyleSpacesPanel;
import com.intellij.application.options.codeStyle.WrappingAndBracesPanel;
import com.intellij.ide.DataManager;
import com.intellij.lang.Language;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.actionSystem.DataContext;
import com.intellij.openapi.actionSystem.DefaultActionGroup;
import com.intellij.openapi.application.ApplicationBundle;
import com.intellij.openapi.editor.colors.EditorColorsScheme;
import com.intellij.openapi.editor.highlighter.EditorHighlighter;
import com.intellij.openapi.editor.highlighter.EditorHighlighterFactory;
import com.intellij.openapi.extensions.ExtensionPointListener;
import com.intellij.openapi.extensions.PluginDescriptor;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.fileTypes.FileTypes;
import com.intellij.openapi.options.Configurable;
import com.intellij.openapi.options.ConfigurationException;
import com.intellij.openapi.project.DumbAwareAction;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.project.ProjectUtil;
import com.intellij.openapi.ui.JBMenuItem;
import com.intellij.openapi.ui.JBPopupMenu;
import com.intellij.openapi.ui.popup.JBPopupFactory;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.NlsContexts.TabTitle;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.Weighted;
import com.intellij.psi.codeStyle.*;
import com.intellij.ui.ScrollPaneFactory;
import com.intellij.ui.TabbedPaneWrapper;
import com.intellij.ui.TitledSeparator;
import com.intellij.util.EventDispatcher;
import com.intellij.util.ObjectUtils;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.JBTreeTraverser;
import com.intellij.util.containers.TreeTraversal;
import com.intellij.util.ui.EmptyIcon;
import com.intellij.util.ui.GraphicsUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.*;
import java.util.List;
import java.util.*;
import static java.util.Arrays.stream;
/**
* @author Rustam Vishnyakov
*/
public abstract class TabbedLanguageCodeStylePanel extends CodeStyleAbstractPanel {
private CodeStyleAbstractPanel myActiveTab;
private List<CodeStyleAbstractPanel> myTabs;
private JPanel myPanel;
private TabbedPaneWrapper myTabbedPane;
private final PredefinedCodeStyle[] myPredefinedCodeStyles;
private JPopupMenu myCopyFromMenu;
@Nullable private TabChangeListener myListener;
private final EventDispatcher<PredefinedCodeStyleListener> myPredefinedCodeStyleEventDispatcher = EventDispatcher.create(PredefinedCodeStyleListener.class);
private Ref<LanguageCodeStyleSettingsProvider> myProviderRef;
protected TabbedLanguageCodeStylePanel(@Nullable Language language, CodeStyleSettings currentSettings, CodeStyleSettings settings) {
super(language, currentSettings, settings);
myPredefinedCodeStyles = getPredefinedStyles();
CodeStyleSettingsProvider.EXTENSION_POINT_NAME.addExtensionPointListener(
new ExtensionPointListener<>() {
@Override
public void extensionAdded(@NotNull CodeStyleSettingsProvider extension,
@NotNull PluginDescriptor pluginDescriptor) {
if (!extension.hasSettingsPage() && getDefaultLanguage() == extension.getLanguage()) {
createTab(extension);
}
}
@Override
public void extensionRemoved(@NotNull CodeStyleSettingsProvider extension,
@NotNull PluginDescriptor pluginDescriptor) {
if (!extension.hasSettingsPage() && getDefaultLanguage() == extension.getLanguage()) {
final String tabTitle = extension.getConfigurableDisplayName();
for (int i = 0; i < myTabbedPane.getTabCount(); i++) {
if (myTabbedPane.getTitleAt(i).equals(tabTitle)) {
myTabbedPane.removeTabAt(i);
myTabs.stream().filter(
panel -> panel.getTabTitle().equals(tabTitle)
).findFirst().ifPresent(panel -> myTabs.remove(panel));
return;
}
}
}
}
}, this
);
}
/**
* Initializes all standard tabs: "Tabs and Indents", "Spaces", "Blank Lines" and "Wrapping and Braces" if relevant.
* For "Tabs and Indents" LanguageCodeStyleSettingsProvider must instantiate its own indent options, for other standard tabs it
* must return false in usesSharedPreview() method. You can override this method to add your own tabs by calling super.initTabs() and
* then addTab() methods or selectively add needed tabs with your own implementation.
* @param settings Code style settings to be used with initialized panels.
* @see LanguageCodeStyleSettingsProvider
* @see #addIndentOptionsTab(CodeStyleSettings)
* @see #addSpacesTab(CodeStyleSettings)
* @see #addBlankLinesTab(CodeStyleSettings)
* @see #addWrappingAndBracesTab(CodeStyleSettings)
*/
protected void initTabs(CodeStyleSettings settings) {
addIndentOptionsTab(settings);
if (getProvider() != null) {
addSpacesTab(settings);
addWrappingAndBracesTab(settings);
addBlankLinesTab(settings);
}
}
/**
* Adds "Tabs and Indents" tab if the language has its own LanguageCodeStyleSettings provider and instantiates indent options in
* getDefaultSettings() method.
* @param settings CodeStyleSettings to be used with "Tabs and Indents" panel.
*/
protected void addIndentOptionsTab(CodeStyleSettings settings) {
if (getProvider() != null) {
IndentOptionsEditor indentOptionsEditor = getProvider().getIndentOptionsEditor();
if (indentOptionsEditor != null) {
MyIndentOptionsWrapper indentOptionsWrapper = new MyIndentOptionsWrapper(settings, indentOptionsEditor);
addTab(indentOptionsWrapper);
}
}
}
protected void addSpacesTab(CodeStyleSettings settings) {
addTab(new MySpacesPanel(settings));
}
protected void addBlankLinesTab(CodeStyleSettings settings) {
addTab(new MyBlankLinesPanel(settings));
}
protected void addWrappingAndBracesTab(CodeStyleSettings settings) {
addTab(new MyWrappingAndBracesPanel(settings));
}
protected void ensureTabs() {
if (myTabs == null) {
myPanel = new JPanel();
myPanel.setLayout(new BorderLayout());
myTabbedPane = new TabbedPaneWrapper(this);
myTabbedPane.addChangeListener(__ -> {
if (myListener != null) {
String title = myTabbedPane.getSelectedTitle();
if (title != null) {
myListener.tabChanged(this, title);
}
}
});
myTabs = new ArrayList<>();
myPanel.add(myTabbedPane.getComponent());
initTabs(getSettings());
}
assert !myTabs.isEmpty();
}
public void showSetFrom(Component component) {
initCopyFromMenu();
DefaultActionGroup group = new DefaultActionGroup();
JBTreeTraverser<Component> traverser = JBTreeTraverser.<Component>of(
o -> o instanceof JMenu ? new Component[] { new TitledSeparator(((JMenu)o).getText()), ((JMenu)o).getPopupMenu()} :
o instanceof JPopupMenu ? ((JPopupMenu)o).getComponents() : null)
.withRoot(myCopyFromMenu);
for (Component c : traverser.traverse(TreeTraversal.LEAVES_DFS)) {
if (c instanceof JSeparator) {
group.addSeparator();
}
else if (c instanceof TitledSeparator) {
group.addSeparator(((TitledSeparator)c).getText());
}
else if (c instanceof JMenuItem) {
group.add(new DumbAwareAction(((JMenuItem)c).getText(), "", ObjectUtils.notNull(((JMenuItem)c).getIcon(), EmptyIcon.ICON_16)) {
@Override
public void actionPerformed(@NotNull AnActionEvent e) {
((JMenuItem)c).doClick();
}
});
}
}
int maxRows = group.getChildrenCount() > 17 ? 15 : -1;
DataContext dataContext = DataManager.getInstance().getDataContext(component);
JBPopupFactory.getInstance().createActionGroupPopup(
null, group, dataContext, JBPopupFactory.ActionSelectionAid.SPEEDSEARCH, false, null, maxRows, null, "popup@TabbedLanguageCodeStylePanel")
.showUnderneathOf(component);
}
private void initCopyFromMenu() {
if (myCopyFromMenu == null) {
myCopyFromMenu = new JBPopupMenu();
myCopyFromMenu.setFocusable(false);
setupCopyFromMenu(myCopyFromMenu);
}
}
/**
* Adds a tab with the given CodeStyleAbstractPanel. Tab title is taken from getTabTitle() method.
* @param tab The panel to use in a tab.
*/
protected final void addTab(CodeStyleAbstractPanel tab) {
myTabs.add(tab);
tab.setShouldUpdatePreview(true);
addPanelToWatch(tab.getPanel());
myTabbedPane.addTab(tab.getTabTitle(), tab.getPanel());
if (myActiveTab == null) {
myActiveTab = tab;
}
}
private void addTab(Configurable configurable) {
ConfigurableWrapper wrapper = new ConfigurableWrapper(configurable, getSettings());
addTab(wrapper);
}
/**
* Creates and adds a tab from CodeStyleSettingsProvider. The provider may return false in hasSettingsPage() method in order not to be
* shown at top level of code style settings.
* @param provider The provider used to create a settings page.
*/
protected final void createTab(CodeStyleSettingsProvider provider) {
if (provider.hasSettingsPage()) return;
Configurable configurable = provider.createConfigurable(getCurrentSettings(), getSettings());
addTab(configurable);
}
@Override
public final void setModel(@NotNull CodeStyleSchemesModel model) {
super.setModel(model);
ensureTabs();
for (CodeStyleAbstractPanel tab : myTabs) {
tab.setModel(model);
}
}
@Override
protected int getRightMargin() {
ensureTabs();
return myActiveTab.getRightMargin();
}
@Override
protected EditorHighlighter createHighlighter(EditorColorsScheme scheme) {
ensureTabs();
return myActiveTab.createHighlighter(scheme);
}
@NotNull
@Override
protected FileType getFileType() {
ensureTabs();
return myActiveTab.getFileType();
}
@Override
protected String getPreviewText() {
ensureTabs();
return myActiveTab.getPreviewText();
}
@Override
protected void updatePreview(boolean useDefaultSample) {
ensureTabs();
for (CodeStyleAbstractPanel tab : myTabs) {
tab.updatePreview(useDefaultSample);
}
}
@Override
public void onSomethingChanged() {
ensureTabs();
for (CodeStyleAbstractPanel tab : myTabs) {
tab.setShouldUpdatePreview(true);
tab.onSomethingChanged();
}
}
@Override
public void apply(CodeStyleSettings settings) throws ConfigurationException {
ensureTabs();
for (CodeStyleAbstractPanel tab : myTabs) {
tab.apply(settings);
}
}
@Override
public void dispose() {
for (CodeStyleAbstractPanel tab : myTabs) {
Disposer.dispose(tab);
}
super.dispose();
}
@Override
public boolean isModified(CodeStyleSettings settings) {
ensureTabs();
for (CodeStyleAbstractPanel tab : myTabs) {
if (tab.isModified(settings)) {
return true;
}
}
return false;
}
@Override
public JComponent getPanel() {
return myPanel;
}
@Override
protected void resetImpl(CodeStyleSettings settings) {
ensureTabs();
for (CodeStyleAbstractPanel tab : myTabs) {
tab.resetImpl(settings);
}
}
@Override
public void setupCopyFromMenu(JPopupMenu copyMenu) {
super.setupCopyFromMenu(copyMenu);
if (myPredefinedCodeStyles.length > 0) {
fillPredefinedStylesAndLanguages(copyMenu);
}
else {
fillLanguages(copyMenu);
}
}
private void fillPredefinedStylesAndLanguages(JPopupMenu copyMenu) {
fillPredefinedStyles(copyMenu);
LanguageCodeStyleSettingsProvider provider = getProvider();
int n = 0;
if (provider != null) n = provider.getApplicableLanguages().size();
if (n > 0) {
copyMenu.addSeparator();
if (n <= 15) {
fillLanguages(copyMenu);
}
else {
JMenu langs = new JMenu(ApplicationBundle.message("code.style.set.from.menu.language")) {
@Override
public void paint(Graphics g) {
GraphicsUtil.setupAntialiasing(g);
super.paint(g);
}
};
copyMenu.add(langs);
fillLanguages(langs);
}
}
}
private void fillLanguages(JComponent parentMenu) {
List<Language> languages = getProvider() != null ? getProvider().getApplicableLanguages() : Collections.emptyList();
for (final Language lang : languages) {
if (!lang.equals(getDefaultLanguage())) {
final String langName = LanguageCodeStyleSettingsProvider.getLanguageName(lang);
JMenuItem langItem = new JBMenuItem(langName);
langItem.addActionListener(__ -> applyLanguageSettings(lang));
parentMenu.add(langItem);
}
}
}
private void fillPredefinedStyles(JComponent parentMenu) {
for (final PredefinedCodeStyle predefinedCodeStyle : myPredefinedCodeStyles) {
JMenuItem predefinedItem = new JBMenuItem(predefinedCodeStyle.getName());
parentMenu.add(predefinedItem);
predefinedItem.addActionListener(__ -> applyPredefinedStyle(predefinedCodeStyle.getName()));
}
}
protected void addPredefinedCodeStyleListener(@NotNull PredefinedCodeStyleListener listener) {
myPredefinedCodeStyleEventDispatcher.addListener(listener, this);
}
private PredefinedCodeStyle[] getPredefinedStyles() {
final Language language = getDefaultLanguage();
if (language == null) return PredefinedCodeStyle.EMPTY_ARRAY;
PredefinedCodeStyle[] predefinedStyles = PredefinedCodeStyle.EP_NAME.getExtensions();
PredefinedCodeStyle[] styles = stream(predefinedStyles)
.filter(s -> s.isApplicableToLanguage(language))
.toArray(n -> new PredefinedCodeStyle[n]);
if (styles.length >= 2 && ContainerUtil.exists(styles, s -> s instanceof Weighted)) {
Arrays.sort(styles, WEIGHTED_COMPARATOR);
}
return styles;
}
private static final class WeightedComparator implements Comparator<Object> {
@Override
public int compare(Object o1, Object o2) {
double w1 = o1 instanceof Weighted ? ((Weighted)o1).getWeight() : Double.POSITIVE_INFINITY;
double w2 = o2 instanceof Weighted ? ((Weighted)o2).getWeight() : Double.POSITIVE_INFINITY;
return Double.compare(w1, w2);
}
}
private static final WeightedComparator WEIGHTED_COMPARATOR = new WeightedComparator();
private void applyLanguageSettings(Language lang) {
final Project currProject = ProjectUtil.guessCurrentProject(getPanel());
CodeStyleSettings rootSettings = CodeStyle.getSettings(currProject);
CodeStyleSettings targetSettings = getSettings();
applyLanguageSettings(lang, rootSettings, targetSettings);
reset(targetSettings);
onSomethingChanged();
}
protected void applyLanguageSettings(Language lang, CodeStyleSettings rootSettings, CodeStyleSettings targetSettings) {
CommonCodeStyleSettings sourceCommonSettings = rootSettings.getCommonSettings(lang);
CommonCodeStyleSettings targetCommonSettings = targetSettings.getCommonSettings(getDefaultLanguage());
targetCommonSettings.copyFrom(sourceCommonSettings);
}
private void applyPredefinedStyle(String styleName) {
for (PredefinedCodeStyle style : myPredefinedCodeStyles) {
if (style.getName().equals(styleName)) {
applyPredefinedSettings(style);
myPredefinedCodeStyleEventDispatcher.getMulticaster().styleApplied(style);
}
}
}
@Nullable
private LanguageCodeStyleSettingsProvider getProvider() {
if (myProviderRef == null) {
myProviderRef = Ref.create(LanguageCodeStyleSettingsProvider.forLanguage(getDefaultLanguage()));
}
return myProviderRef.get();
}
//========================================================================================================================================
protected class MySpacesPanel extends CodeStyleSpacesPanel {
public MySpacesPanel(CodeStyleSettings settings) {
super(settings);
}
@Override
protected boolean shouldHideOptions() {
return true;
}
@Override
public Language getDefaultLanguage() {
return TabbedLanguageCodeStylePanel.this.getDefaultLanguage();
}
}
protected class MyBlankLinesPanel extends CodeStyleBlankLinesPanel {
public MyBlankLinesPanel(CodeStyleSettings settings) {
super(settings);
}
@Override
public Language getDefaultLanguage() {
return TabbedLanguageCodeStylePanel.this.getDefaultLanguage();
}
}
protected class MyWrappingAndBracesPanel extends WrappingAndBracesPanel {
public MyWrappingAndBracesPanel(CodeStyleSettings settings) {
super(settings);
}
@Override
public Language getDefaultLanguage() {
return TabbedLanguageCodeStylePanel.this.getDefaultLanguage();
}
}
//========================================================================================================================================
private class ConfigurableWrapper extends CodeStyleAbstractPanel {
private final Configurable myConfigurable;
private JComponent myComponent;
ConfigurableWrapper(@NotNull Configurable configurable, CodeStyleSettings settings) {
super(settings);
myConfigurable = configurable;
Disposer.register(this, () -> myConfigurable.disposeUIResources());
}
@Override
protected int getRightMargin() {
return 0;
}
@Nullable
@Override
protected EditorHighlighter createHighlighter(EditorColorsScheme scheme) {
return null;
}
@SuppressWarnings("ConstantConditions")
@NotNull
@Override
protected FileType getFileType() {
Language language = getDefaultLanguage();
return language != null ? language.getAssociatedFileType() : FileTypes.PLAIN_TEXT;
}
@Override
public Language getDefaultLanguage() {
return TabbedLanguageCodeStylePanel.this.getDefaultLanguage();
}
@Override
protected @TabTitle @NotNull String getTabTitle() {
return myConfigurable.getDisplayName();
}
@Override
protected String getPreviewText() {
return null;
}
@Override
public void apply(CodeStyleSettings settings) throws ConfigurationException {
if (myConfigurable instanceof CodeStyleConfigurable) {
((CodeStyleConfigurable)myConfigurable).apply(settings);
}
else {
myConfigurable.apply();
}
}
@Override
public boolean isModified(CodeStyleSettings settings) {
return myConfigurable.isModified();
}
@Nullable
@Override
public JComponent getPanel() {
if (myComponent == null) {
myComponent = myConfigurable.createComponent();
}
return myComponent;
}
@Override
protected void resetImpl(CodeStyleSettings settings) {
if (myConfigurable instanceof CodeStyleConfigurable) {
((CodeStyleConfigurable)myConfigurable).reset(settings);
}
else {
myConfigurable.reset();
}
}
}
@Override
public @NotNull OptionsContainingConfigurable getOptionIndexer() {
return new OptionsContainingConfigurable() {
@Override
public @NotNull Map<String, Set<String>> processListOptionsWithPaths() {
final Map<String, Set<String>> result = new HashMap<>();
for (CodeStyleAbstractPanel tab : myTabs) {
result.put(tab.getTabTitle(), tab.processListOptions());
}
return result;
}
};
}
//========================================================================================================================================
protected class MyIndentOptionsWrapper extends CodeStyleAbstractPanel {
private final IndentOptionsEditor myEditor;
private final JPanel myTopPanel = new JPanel(new BorderLayout());
protected MyIndentOptionsWrapper(CodeStyleSettings settings, IndentOptionsEditor editor) {
super(settings);
JPanel leftPanel = new JPanel(new BorderLayout());
myTopPanel.add(leftPanel, BorderLayout.WEST);
JPanel rightPanel = new JPanel();
installPreviewPanel(rightPanel);
myEditor = editor;
if (myEditor != null) {
JPanel panel = myEditor.createPanel();
panel.setBorder(BorderFactory.createEmptyBorder(10, 10, 10, 10));
JScrollPane scroll = ScrollPaneFactory.createScrollPane(panel, true);
scroll.setPreferredSize(new Dimension(panel.getPreferredSize().width + scroll.getVerticalScrollBar().getPreferredSize().width + 5, -1));
leftPanel.add(scroll, BorderLayout.CENTER);
}
myTopPanel.add(rightPanel, BorderLayout.CENTER);
}
@Override
protected int getRightMargin() {
return getProvider() != null ? getProvider().getRightMargin(LanguageCodeStyleSettingsProvider.SettingsType.INDENT_SETTINGS) : -1;
}
@Override
protected EditorHighlighter createHighlighter(EditorColorsScheme scheme) {
return EditorHighlighterFactory.getInstance().createEditorHighlighter(getFileType(), scheme, null);
}
@SuppressWarnings("ConstantConditions")
@NotNull
@Override
protected FileType getFileType() {
Language language = TabbedLanguageCodeStylePanel.this.getDefaultLanguage();
return language != null ? language.getAssociatedFileType() : FileTypes.PLAIN_TEXT;
}
@Override
protected String getPreviewText() {
return getProvider() != null ? getProvider().getCodeSample(LanguageCodeStyleSettingsProvider.SettingsType.INDENT_SETTINGS) : "";
}
@Override
protected String getFileExt() {
if (getProvider() != null) {
String ext = getProvider().getFileExt();
if (ext != null) return ext;
}
return super.getFileExt();
}
@Override
public void apply(CodeStyleSettings settings) {
CommonCodeStyleSettings.IndentOptions indentOptions = getIndentOptions(settings);
if (indentOptions == null) return;
myEditor.apply(settings, indentOptions);
}
@Override
public boolean isModified(CodeStyleSettings settings) {
CommonCodeStyleSettings.IndentOptions indentOptions = getIndentOptions(settings);
if (indentOptions == null) return false;
return myEditor.isModified(settings, indentOptions);
}
@Override
public JComponent getPanel() {
return myTopPanel;
}
@Override
protected void resetImpl(CodeStyleSettings settings) {
CommonCodeStyleSettings.IndentOptions indentOptions = getIndentOptions(settings);
if (indentOptions == null && getProvider() != null) {
myEditor.setEnabled(false);
indentOptions = settings.getIndentOptions(getProvider().getLanguage().getAssociatedFileType());
}
assert indentOptions != null;
myEditor.reset(settings, indentOptions);
}
@Nullable
protected CommonCodeStyleSettings.IndentOptions getIndentOptions(CodeStyleSettings settings) {
return settings.getCommonSettings(getDefaultLanguage()).getIndentOptions();
}
@Override
public Language getDefaultLanguage() {
return TabbedLanguageCodeStylePanel.this.getDefaultLanguage();
}
@Override
protected @TabTitle @NotNull String getTabTitle() {
return ApplicationBundle.message("title.tabs.and.indents");
}
@Override
public void onSomethingChanged() {
super.onSomethingChanged();
myEditor.setEnabled(true);
}
}
@FunctionalInterface
public interface TabChangeListener {
void tabChanged(@NotNull TabbedLanguageCodeStylePanel source, @NotNull String tabTitle);
}
public void setListener(@Nullable TabChangeListener listener) {
myListener = listener;
}
public void changeTab(@NotNull String tabTitle) {
myTabbedPane.setSelectedTitle(tabTitle);
}
@Override
public void highlightOptions(@NotNull String searchString) {
for (CodeStyleAbstractPanel tab : myTabs) {
tab.highlightOptions(searchString);
}
}
}
| |
package sdk.chat.core.dao;
import java.util.List;
import java.util.ArrayList;
import android.database.Cursor;
import android.database.sqlite.SQLiteStatement;
import org.greenrobot.greendao.AbstractDao;
import org.greenrobot.greendao.Property;
import org.greenrobot.greendao.internal.SqlUtils;
import org.greenrobot.greendao.internal.DaoConfig;
import org.greenrobot.greendao.database.Database;
import org.greenrobot.greendao.database.DatabaseStatement;
import org.greenrobot.greendao.query.Query;
import org.greenrobot.greendao.query.QueryBuilder;
// THIS CODE IS GENERATED BY greenDAO, DO NOT EDIT.
/**
* DAO for table "MESSAGE".
*/
public class MessageDao extends AbstractDao<Message, Long> {
public static final String TABLENAME = "MESSAGE";
/**
* Properties of entity Message.<br/>
* Can be used for QueryBuilder and for referencing column names.
*/
public static class Properties {
public final static Property Id = new Property(0, Long.class, "id", true, "_id");
public final static Property EntityID = new Property(1, String.class, "entityID", false, "ENTITY_ID");
public final static Property Date = new Property(2, java.util.Date.class, "date", false, "DATE");
public final static Property Type = new Property(3, Integer.class, "type", false, "TYPE");
public final static Property Status = new Property(4, Integer.class, "status", false, "STATUS");
public final static Property SenderId = new Property(5, Long.class, "senderId", false, "SENDER_ID");
public final static Property ThreadId = new Property(6, Long.class, "threadId", false, "THREAD_ID");
public final static Property NextMessageId = new Property(7, Long.class, "nextMessageId", false, "NEXT_MESSAGE_ID");
public final static Property PreviousMessageId = new Property(8, Long.class, "previousMessageId", false, "PREVIOUS_MESSAGE_ID");
public final static Property EncryptedText = new Property(9, String.class, "encryptedText", false, "ENCRYPTED_TEXT");
}
private DaoSession daoSession;
private Query<Message> thread_MessagesQuery;
public MessageDao(DaoConfig config) {
super(config);
}
public MessageDao(DaoConfig config, DaoSession daoSession) {
super(config, daoSession);
this.daoSession = daoSession;
}
/** Creates the underlying database table. */
public static void createTable(Database db, boolean ifNotExists) {
String constraint = ifNotExists? "IF NOT EXISTS ": "";
db.execSQL("CREATE TABLE " + constraint + "\"MESSAGE\" (" + //
"\"_id\" INTEGER PRIMARY KEY ," + // 0: id
"\"ENTITY_ID\" TEXT UNIQUE ," + // 1: entityID
"\"DATE\" INTEGER," + // 2: date
"\"TYPE\" INTEGER," + // 3: type
"\"STATUS\" INTEGER," + // 4: status
"\"SENDER_ID\" INTEGER," + // 5: senderId
"\"THREAD_ID\" INTEGER," + // 6: threadId
"\"NEXT_MESSAGE_ID\" INTEGER," + // 7: nextMessageId
"\"PREVIOUS_MESSAGE_ID\" INTEGER," + // 8: previousMessageId
"\"ENCRYPTED_TEXT\" TEXT);"); // 9: encryptedText
}
/** Drops the underlying database table. */
public static void dropTable(Database db, boolean ifExists) {
String sql = "DROP TABLE " + (ifExists ? "IF EXISTS " : "") + "\"MESSAGE\"";
db.execSQL(sql);
}
@Override
protected final void bindValues(DatabaseStatement stmt, Message entity) {
stmt.clearBindings();
Long id = entity.getId();
if (id != null) {
stmt.bindLong(1, id);
}
String entityID = entity.getEntityID();
if (entityID != null) {
stmt.bindString(2, entityID);
}
java.util.Date date = entity.getDate();
if (date != null) {
stmt.bindLong(3, date.getTime());
}
Integer type = entity.getType();
if (type != null) {
stmt.bindLong(4, type);
}
Integer status = entity.getStatus();
if (status != null) {
stmt.bindLong(5, status);
}
Long senderId = entity.getSenderId();
if (senderId != null) {
stmt.bindLong(6, senderId);
}
Long threadId = entity.getThreadId();
if (threadId != null) {
stmt.bindLong(7, threadId);
}
Long nextMessageId = entity.getNextMessageId();
if (nextMessageId != null) {
stmt.bindLong(8, nextMessageId);
}
Long previousMessageId = entity.getPreviousMessageId();
if (previousMessageId != null) {
stmt.bindLong(9, previousMessageId);
}
String encryptedText = entity.getEncryptedText();
if (encryptedText != null) {
stmt.bindString(10, encryptedText);
}
}
@Override
protected final void bindValues(SQLiteStatement stmt, Message entity) {
stmt.clearBindings();
Long id = entity.getId();
if (id != null) {
stmt.bindLong(1, id);
}
String entityID = entity.getEntityID();
if (entityID != null) {
stmt.bindString(2, entityID);
}
java.util.Date date = entity.getDate();
if (date != null) {
stmt.bindLong(3, date.getTime());
}
Integer type = entity.getType();
if (type != null) {
stmt.bindLong(4, type);
}
Integer status = entity.getStatus();
if (status != null) {
stmt.bindLong(5, status);
}
Long senderId = entity.getSenderId();
if (senderId != null) {
stmt.bindLong(6, senderId);
}
Long threadId = entity.getThreadId();
if (threadId != null) {
stmt.bindLong(7, threadId);
}
Long nextMessageId = entity.getNextMessageId();
if (nextMessageId != null) {
stmt.bindLong(8, nextMessageId);
}
Long previousMessageId = entity.getPreviousMessageId();
if (previousMessageId != null) {
stmt.bindLong(9, previousMessageId);
}
String encryptedText = entity.getEncryptedText();
if (encryptedText != null) {
stmt.bindString(10, encryptedText);
}
}
@Override
protected final void attachEntity(Message entity) {
super.attachEntity(entity);
entity.__setDaoSession(daoSession);
}
@Override
public Long readKey(Cursor cursor, int offset) {
return cursor.isNull(offset + 0) ? null : cursor.getLong(offset + 0);
}
@Override
public Message readEntity(Cursor cursor, int offset) {
Message entity = new Message( //
cursor.isNull(offset + 0) ? null : cursor.getLong(offset + 0), // id
cursor.isNull(offset + 1) ? null : cursor.getString(offset + 1), // entityID
cursor.isNull(offset + 2) ? null : new java.util.Date(cursor.getLong(offset + 2)), // date
cursor.isNull(offset + 3) ? null : cursor.getInt(offset + 3), // type
cursor.isNull(offset + 4) ? null : cursor.getInt(offset + 4), // status
cursor.isNull(offset + 5) ? null : cursor.getLong(offset + 5), // senderId
cursor.isNull(offset + 6) ? null : cursor.getLong(offset + 6), // threadId
cursor.isNull(offset + 7) ? null : cursor.getLong(offset + 7), // nextMessageId
cursor.isNull(offset + 8) ? null : cursor.getLong(offset + 8), // previousMessageId
cursor.isNull(offset + 9) ? null : cursor.getString(offset + 9) // encryptedText
);
return entity;
}
@Override
public void readEntity(Cursor cursor, Message entity, int offset) {
entity.setId(cursor.isNull(offset + 0) ? null : cursor.getLong(offset + 0));
entity.setEntityID(cursor.isNull(offset + 1) ? null : cursor.getString(offset + 1));
entity.setDate(cursor.isNull(offset + 2) ? null : new java.util.Date(cursor.getLong(offset + 2)));
entity.setType(cursor.isNull(offset + 3) ? null : cursor.getInt(offset + 3));
entity.setStatus(cursor.isNull(offset + 4) ? null : cursor.getInt(offset + 4));
entity.setSenderId(cursor.isNull(offset + 5) ? null : cursor.getLong(offset + 5));
entity.setThreadId(cursor.isNull(offset + 6) ? null : cursor.getLong(offset + 6));
entity.setNextMessageId(cursor.isNull(offset + 7) ? null : cursor.getLong(offset + 7));
entity.setPreviousMessageId(cursor.isNull(offset + 8) ? null : cursor.getLong(offset + 8));
entity.setEncryptedText(cursor.isNull(offset + 9) ? null : cursor.getString(offset + 9));
}
@Override
protected final Long updateKeyAfterInsert(Message entity, long rowId) {
entity.setId(rowId);
return rowId;
}
@Override
public Long getKey(Message entity) {
if(entity != null) {
return entity.getId();
} else {
return null;
}
}
@Override
public boolean hasKey(Message entity) {
return entity.getId() != null;
}
@Override
protected final boolean isEntityUpdateable() {
return true;
}
/** Internal query to resolve the "messages" to-many relationship of Thread. */
public List<Message> _queryThread_Messages(Long threadId) {
synchronized (this) {
if (thread_MessagesQuery == null) {
QueryBuilder<Message> queryBuilder = queryBuilder();
queryBuilder.where(Properties.ThreadId.eq(null));
queryBuilder.orderRaw("T.'DATE' ASC");
thread_MessagesQuery = queryBuilder.build();
}
}
Query<Message> query = thread_MessagesQuery.forCurrentThread();
query.setParameter(0, threadId);
return query.list();
}
private String selectDeep;
protected String getSelectDeep() {
if (selectDeep == null) {
StringBuilder builder = new StringBuilder("SELECT ");
SqlUtils.appendColumns(builder, "T", getAllColumns());
builder.append(',');
SqlUtils.appendColumns(builder, "T0", daoSession.getUserDao().getAllColumns());
builder.append(',');
SqlUtils.appendColumns(builder, "T1", daoSession.getThreadDao().getAllColumns());
builder.append(',');
SqlUtils.appendColumns(builder, "T2", daoSession.getMessageDao().getAllColumns());
builder.append(',');
SqlUtils.appendColumns(builder, "T3", daoSession.getMessageDao().getAllColumns());
builder.append(" FROM MESSAGE T");
builder.append(" LEFT JOIN USER T0 ON T.\"SENDER_ID\"=T0.\"_id\"");
builder.append(" LEFT JOIN THREAD T1 ON T.\"THREAD_ID\"=T1.\"_id\"");
builder.append(" LEFT JOIN MESSAGE T2 ON T.\"NEXT_MESSAGE_ID\"=T2.\"_id\"");
builder.append(" LEFT JOIN MESSAGE T3 ON T.\"PREVIOUS_MESSAGE_ID\"=T3.\"_id\"");
builder.append(' ');
selectDeep = builder.toString();
}
return selectDeep;
}
protected Message loadCurrentDeep(Cursor cursor, boolean lock) {
Message entity = loadCurrent(cursor, 0, lock);
int offset = getAllColumns().length;
User sender = loadCurrentOther(daoSession.getUserDao(), cursor, offset);
entity.setSender(sender);
offset += daoSession.getUserDao().getAllColumns().length;
Thread thread = loadCurrentOther(daoSession.getThreadDao(), cursor, offset);
entity.setThread(thread);
offset += daoSession.getThreadDao().getAllColumns().length;
Message nextMessage = loadCurrentOther(daoSession.getMessageDao(), cursor, offset);
entity.setNextMessage(nextMessage);
offset += daoSession.getMessageDao().getAllColumns().length;
Message previousMessage = loadCurrentOther(daoSession.getMessageDao(), cursor, offset);
entity.setPreviousMessage(previousMessage);
return entity;
}
public Message loadDeep(Long key) {
assertSinglePk();
if (key == null) {
return null;
}
StringBuilder builder = new StringBuilder(getSelectDeep());
builder.append("WHERE ");
SqlUtils.appendColumnsEqValue(builder, "T", getPkColumns());
String sql = builder.toString();
String[] keyArray = new String[] { key.toString() };
Cursor cursor = db.rawQuery(sql, keyArray);
try {
boolean available = cursor.moveToFirst();
if (!available) {
return null;
} else if (!cursor.isLast()) {
throw new IllegalStateException("Expected unique result, but count was " + cursor.getCount());
}
return loadCurrentDeep(cursor, true);
} finally {
cursor.close();
}
}
/** Reads all available rows from the given cursor and returns a list of new ImageTO objects. */
public List<Message> loadAllDeepFromCursor(Cursor cursor) {
int count = cursor.getCount();
List<Message> list = new ArrayList<Message>(count);
if (cursor.moveToFirst()) {
if (identityScope != null) {
identityScope.lock();
identityScope.reserveRoom(count);
}
try {
do {
list.add(loadCurrentDeep(cursor, false));
} while (cursor.moveToNext());
} finally {
if (identityScope != null) {
identityScope.unlock();
}
}
}
return list;
}
protected List<Message> loadDeepAllAndCloseCursor(Cursor cursor) {
try {
return loadAllDeepFromCursor(cursor);
} finally {
cursor.close();
}
}
/** A raw-style query where you can pass any WHERE clause and arguments. */
public List<Message> queryDeep(String where, String... selectionArg) {
Cursor cursor = db.rawQuery(getSelectDeep() + where, selectionArg);
return loadDeepAllAndCloseCursor(cursor);
}
}
| |
/**
* MobWrite - Real-time Synchronization and Collaboration Service
*
* Copyright 2009 Google Inc.
* http://code.google.com/p/google-mobwrite/
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package google.mobwrite;
import google.mobwrite.diff_match_patch.Diff;
import google.mobwrite.diff_match_patch.Operation;
import google.mobwrite.diff_match_patch.Patch;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.util.LinkedList;
import java.util.logging.Level;
import java.util.regex.Pattern;
public abstract class ShareObj {
/**
* Instantiation of the Diff Match Patch library.
* http://code.google.com/p/google-diff-match-patch/
*/
protected diff_match_patch dmp;
/**
* The filename or ID for this shared object.
*/
protected String file;
public String getFile() {
return file;
}
/**
* The hosting MobWriteClient.
*/
protected MobWriteClient mobwrite;
public MobWriteClient getMobwrite() {
return mobwrite;
}
/**
* List of currently unacknowledged edits sent to the server.
*/
protected LinkedList<Object[]> editStack;
/**
* Client's understanding of what the server's text looks like.
*/
protected String shadowText = "";
/**
* The client's version for the shadow (n).
*/
protected int clientVersion = 0;
/**
* The server's version for the shadow (m).
*/
protected int serverVersion = 0;
/**
* Did the client understand the server's delta in the previous heartbeat?
* Initialize false because the server and client are out of sync initially.
*/
protected boolean deltaOk = false;
/**
* Synchronization mode.
* True: Used for text, attempts to gently merge differences together.
* False: Used for numbers, overwrites conflicts, last save wins.
*/
protected boolean mergeChanges = true;
/**
* A file ID must start with a letter and continue with letters, numbers,
* dashes, periods, colons or underscores. From the W3 spec for HTML IDs.
*/
private static Pattern idPattern = Pattern.compile("^[A-Za-z][-.:\\w]*$");
/**
* Constructor. Create a ShareObj with a file ID.
* @param file Filename to share as.
* @throws IllegalArgumentException If filename is illegal.
*/
public ShareObj(String file) {
if (!idPattern.matcher(file).matches()) {
throw new IllegalArgumentException("Illegal id " + file);
}
this.file = file;
this.dmp = new diff_match_patch();
this.dmp.Diff_Timeout = 0.5f;
// List of unacknowledged edits sent to the server.
this.editStack = new LinkedList<Object[]>();
}
/**
* Fetch or compute a plaintext representation of the user's text.
* @return Plaintext content.
*/
public abstract String getClientText();
/**
* Set the user's text based on the provided plaintext.
* @param text New text.
*/
public abstract void setClientText(String text);
/**
* Modify the user's plaintext by applying a series of patches against it.
* @param patches Array of Patch objects.
*/
public void patchClientText(LinkedList<Patch> patches) {
String oldClientText = this.getClientText();
Object[] result = this.dmp.patch_apply(patches, oldClientText);
// Set the new text only if there is a change to be made.
if (!oldClientText.equals(result[0])) {
// The following will probably destroy any cursor or selection.
// Widgets with cursors should override and patch more delicately.
this.setClientText((String) result[0]);
}
}
/**
* Notification of when a diff was sent to the server.
* @param diffs Array of diff objects.
*/
private void onSentDiff(LinkedList<Diff> diffs) {
// Potential hook for subclass
}
/**
* Does the text look like unmergable content?
* Currently we look for numbers.
* @param text Plaintext content.
* @return True iff unmergable.
*/
protected boolean isEnum(String text) {
Pattern p = Pattern.compile("^\\s*-?[\\d.,]+\\s*$");
return !p.matcher(text).matches();
}
/**
* Return the command to nullify this field. Also unshares this field.
* @return Command to be sent to the server.
*/
protected String nullify() {
mobwrite.unshare(this);
// Create the output starting with the file statement, followed by the edits.
String data = mobwrite.idPrefix + this.file;
return "N:" + data + '\n';
}
/**
* Asks the ShareObj to synchronize. Computes client-made changes since
* previous postback. Return '' to skip this synchronization.
* @return Commands to be sent to the server.
*/
protected String syncText() {
String clientText;
try {
clientText = this.getClientText();
if (clientText == null) {
// Null is not an acceptable result.
throw new NullPointerException();
}
} catch (Exception e) {
// Potential call to untrusted 3rd party code.
this.mobwrite.logger.log(Level.SEVERE, "Error calling getClientText on '"
+ this.file + "'", e);
return "";
}
if (this.deltaOk) {
// The last delta postback from the server to this shareObj was successful.
// Send a compressed delta.
LinkedList<Diff> diffs = this.dmp.diff_main(this.shadowText, clientText, true);
if (diffs.size() > 2) {
this.dmp.diff_cleanupSemantic(diffs);
this.dmp.diff_cleanupEfficiency(diffs);
}
boolean changed = diffs.size() != 1
|| diffs.getFirst().operation != Operation.EQUAL;
if (changed) {
this.mobwrite.clientChange_ = true;
this.shadowText = clientText;
}
// Don't bother appending a no-change diff onto the stack if the stack
// already contains something.
if (changed || this.editStack.isEmpty()) {
String action = (this.mergeChanges ? "d:" : "D:") + this.clientVersion
+ ':' + this.dmp.diff_toDelta(diffs);
this.editStack.push(new Object[]{this.clientVersion, action});
this.clientVersion++;
try {
this.onSentDiff(diffs);
} catch (Exception e) {
// Potential call to untrusted 3rd party code.
this.mobwrite.logger.log(Level.SEVERE, "Error calling onSentDiff on '"
+ this.file + "'", e);
}
}
} else {
// The last delta postback from the server to this shareObj didn't match.
// Send a full text dump to get back in sync. This will result in any
// changes since the last postback being wiped out. :(
String data = clientText;
try {
data = URLEncoder.encode(data, "UTF-8").replace('+', ' ');
} catch (UnsupportedEncodingException e) {
// Not likely on modern system.
throw new Error("This system does not support UTF-8.", e);
}
data = MobWriteClient.unescapeForEncodeUriCompatability(data);
this.shadowText = clientText;
this.clientVersion++;
String action = "r:" + this.clientVersion + ':' + data;
// Append the action to the edit stack.
this.editStack.push(new Object[]{this.clientVersion, action});
// Sending a raw dump will put us back in sync.
// Set deltaOk to true in case this sync fails to connect, in which case
// the following sync(s) should be a delta, not more raw dumps.
this.deltaOk = true;
}
// Create the output starting with the file statement, followed by the edits.
String data = mobwrite.idPrefix + this.file;
data = "F:" + this.serverVersion + ':' + data + '\n';
for (Object[] pair : this.editStack) {
data += (String) pair[1] + '\n';
}
return data;
}
/**
* Stop sharing this object.
*/
public void unshare() {
if (this.mobwrite != null) {
this.mobwrite.unshare(this);
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.