gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.jmeter;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Properties;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import org.apache.jmeter.junit.JMeterTestCase;
import org.apache.jmeter.util.JMeterUtils;
import org.junit.Before;
import org.junit.Test;
/**
* Check the eclipse and Maven version definitions against build.properties
* Drop this if we move to Maven the build process
*/
public class JMeterVersionTest extends JMeterTestCase {
// Convert between eclipse jar name and build.properties name
private static Map<String, String> JAR_TO_BUILD_PROP = new HashMap<>();
static {
JAR_TO_BUILD_PROP.put("bsf", "apache-bsf");
JAR_TO_BUILD_PROP.put("bsh", "beanshell");
JAR_TO_BUILD_PROP.put("geronimo-jms_1.1_spec", "jms");
JAR_TO_BUILD_PROP.put("mail", "javamail");
JAR_TO_BUILD_PROP.put("oro", "jakarta-oro");
JAR_TO_BUILD_PROP.put("xercesImpl", "xerces");
JAR_TO_BUILD_PROP.put("xpp3_min", "xpp3");
}
private static final File JMETER_HOME = new File(JMeterUtils.getJMeterHome());
/**
* Versions of all libraries mentioned in build.properties (except checkstyle-all)
*/
private final Map<String, String> versions = new HashMap<>();
/**
* Names of library.version entries in build.properties, excluding jars not bundled (used for docs only)
*/
private final Set<String> propNames = new HashSet<>();
/** License file names found under license/bin (WITHOUT the .txt suffix) */
private Set<String> liceFiles;
private File getFileFromHome(String relativeFile) {
return new File(JMETER_HOME, relativeFile);
}
private Properties prop;
@Before
public void setUp() throws IOException {
final Properties buildProp = new Properties();
final FileInputStream bp = new FileInputStream(getFileFromHome("build.properties"));
buildProp.load(bp);
bp.close();
for (Entry<Object, Object> entry : buildProp.entrySet()) {
final String key = (String) entry.getKey();
if (key.endsWith(".version")) {
final String value = (String) entry.getValue();
final String jarprop = key.replace(".version","");
final String old = versions.put(jarprop, value);
propNames.add(jarprop);
if (old != null) {
fail("Already have entry for "+key);
}
}
}
// remove docs-only jars
propNames.remove("jdom");
propNames.remove("velocity");
propNames.remove("commons-lang"); // lang3 is bundled, lang2 is doc-only
// Darcula is not a maven artifact
propNames.remove("darcula"); // not needed in Maven
buildProp.remove("darcula.loc"); // not a Maven download
versions.remove("darcula");
// remove optional checkstyle name
propNames.remove("checkstyle-all"); // not needed in Maven
buildProp.remove("checkstyle-all.loc"); // not a Maven download
versions.remove("checkstyle-all");
// remove option RAT jars
propNames.remove("rat");
versions.remove("rat");
propNames.remove("rat-tasks");
versions.remove("rat-tasks");
// remove optional hsqldb, jacoco and sonar jars (required for coverage reporting, not required for jmeter)
for (String optLib : Arrays.asList("jacocoant", "sonarqube-ant-task", "hsqldb", "activemq-all",
"mina-core", "ftplet-api", "ftpserver-core")) {
propNames.remove(optLib);
versions.remove(optLib);
}
prop = buildProp;
final File licencesDir = getFileFromHome("licenses/bin");
liceFiles = Arrays.stream(licencesDir.list())
.filter(name -> !name.equalsIgnoreCase("README.txt"))
.filter(name -> !name.equals(".svn")) // Ignore old-style SVN workspaces
.map(name -> name.replace(".txt", ""))
.collect(Collectors.toSet());
}
/**
* Check eclipse.classpath contains the jars declared in build.properties
* @throws IOException if something fails
*/
@Test
public void testEclipse() throws IOException {
final BufferedReader eclipse = new BufferedReader(
new FileReader(getFileFromHome("eclipse.classpath"))); // assume default charset is OK here
// <classpathentry kind="lib" path="lib/geronimo-jms_1.1_spec-1.1.1.jar"/>
// <classpathentry kind="lib" path="lib/activation-1.1.1.jar"/>
// <classpathentry kind="lib" path="lib/jtidy-r938.jar"/>
final Pattern p = Pattern.compile("\\s+<classpathentry kind=\"lib\" path=\"lib/(?:api/)?(.+?)-([^-]+(-\\d*|-b\\d+|-BETA\\d)?)\\.jar\"/>");
final Pattern versionPat = Pattern.compile("\\$\\{(.+)\\.version\\}");
String line;
final ArrayList<String> toRemove = new ArrayList<>();
while((line=eclipse.readLine()) != null){
final Matcher m = p.matcher(line);
if (m.matches()) {
String jar = m.group(1);
String version = m.group(2);
if (jar.endsWith("-jdk15on")) { // special handling
jar=jar.replace("-jdk15on","");
} else if (jar.equals("commons-jexl") && version.startsWith("2")) { // special handling
jar = "commons-jexl2";
} else if (jar.equals("spock-core-1.2-groovy")) { // special handling
jar = "spock-core";
version = "1.2-groovy-2.4";
} else {
String tmp = JAR_TO_BUILD_PROP.get(jar);
if (tmp != null) {
jar = tmp;
}
}
String expected = versions.get(jar);
if (expected == null) {
final String message =
"Didn't find version for jar name extracted by regexp, jar name extracted:"
+ jar + ", version extracted:" + version + ", current line:" + line;
System.err.println(message);
fail(message);
}
// Process ${xxx.version} references
final Matcher mp = versionPat.matcher(expected);
if (mp.matches()) {
String key = mp.group(1);
expected = versions.get(key);
toRemove.add(key); // in case it is not itself used we remove it later
}
propNames.remove(jar);
if (expected == null) {
fail("Versions list does not contain: " + jar);
} else if (!version.equals(expected)) {
assertEquals(jar,version,expected);
}
}
}
// remove any possibly unused references
propNames.removeAll(toRemove);
eclipse.close();
if (propNames.size() > 0) {
fail("Should have no names left: "
+ Arrays.toString(propNames.toArray())
+ ". Check eclipse.classpath");
}
}
@Test
public void testMaven() throws IOException {
final BufferedReader maven = new BufferedReader(
new FileReader(getFileFromHome("res/maven/ApacheJMeter_parent.pom"))); // assume default charset is OK here
// <apache-bsf.version>2.4.0</apache-bsf.version>
// <log4j-1.2-api.version>2.7</log4j-1.2-api.version>
final Pattern p = Pattern.compile("\\s+<([^\\<\\>]+)\\.version>([^<]+)<.*");
String line;
while((line=maven.readLine()) != null){
final Matcher m = p.matcher(line);
if (m.matches()) {
String jar = m.group(1);
String version = m.group(2);
String expected = versions.get(jar);
propNames.remove(jar);
if (expected == null) {
fail("Versions list does not contain: " + jar);
} else {
if (!version.equals(expected)) {
assertEquals(jar,expected,version);
}
}
}
}
maven.close();
if (propNames.size() > 0) {
fail("Should have no names left: "
+ Arrays.toString(propNames.toArray())
+ ". Check ApacheJMeter_parent.pom");
}
}
@Test
public void testLicences() {
Set<String> liceNames = new HashSet<>();
for (Map.Entry<String, String> me : versions.entrySet()) {
final String key = me.getKey();
liceNames.add(key + "-" + me.getValue());
}
assertTrue("Expected at least one license file", liceFiles.size() > 0);
for(String l : liceFiles) {
if (!liceNames.remove(l)) {
fail("Mismatched version in license file " + l);
}
}
}
@Test
public void testLICENSE() throws Exception {
HashSet<String> buildOnly = new HashSet<>();
buildOnly.addAll(Arrays.asList(new String[]{"bcprov","bcmail","bcpkix"}));
// Build set of names expected to be mentioned in LICENSE
final HashSet<String> binaryJarNames = new HashSet<>();
for(Map.Entry<String, String> me : versions.entrySet()) {
final String key = me.getKey();
final String jarName = key + "-" + me.getValue();
if (propNames.contains(key) && !buildOnly.contains(key)) {
binaryJarNames.add(jarName);
}
}
// Extract the jar names from LICENSE
final BufferedReader license = new BufferedReader(
new FileReader(getFileFromHome("LICENSE"))); // assume default charset is OK here
final Pattern p = Pattern.compile("^\\* (\\S+?)\\.jar(.*)");
final HashSet<String> namesInLicenseFile = new HashSet<>(); // names documented in LICENSE
final HashSet<String> externalNamesinLicenseFile = new HashSet<>(); // names documented in LICENSE with licenses/bin entries
String line;
while((line=license.readLine()) != null){
final Matcher m = p.matcher(line);
if (m.matches()) {
final String name = m.group(1);
assertTrue("Duplicate jar in LICENSE file " + line, namesInLicenseFile.add(name));
if (!binaryJarNames.contains(name) && !line.contains("darcula")) {
fail("Unexpected entry in LICENCE file: " + line);
}
final String comment = m.group(2);
if (comment.length() > 0) { // must be in external list
externalNamesinLicenseFile.add(name);
}
}
}
license.close();
// Check all build.properties entries are in LICENSE file
for(String s : binaryJarNames) {
if (!namesInLicenseFile.contains(s)) {
fail("LICENSE does not contain entry for " + s);
}
}
// Check that external license files are present
for(String s : externalNamesinLicenseFile) {
if (!liceFiles.contains(s)) {
fail("bin/licenses does not contain a file for " + s);
}
}
// Check that there are no license/bin files not mentioned in LICENSE
for(String s : liceFiles) {
if (!namesInLicenseFile.contains(s)) {
fail("LICENSE does not contain entry for " + s);
}
}
}
/**
* Check that all downloads use Maven Central
*/
@Test
public void testMavenDownload() {
int fails = 0;
for (Entry<Object, Object> entry : prop.entrySet()) {
final String key = (String) entry.getKey();
if (key.endsWith(".loc")) {
final String value = (String) entry.getValue();
if (! value.startsWith("${maven2.repo}")) {
fails++;
System.err.println("ERROR: non-Maven download detected\n" + key + "=" +value);
}
}
}
if (fails > 0) {
// TODO replace with fail()
System.err.println("ERROR: All files must be available from Maven Central; but " + fails + " use(s) a different download source");
}
}
}
| |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1beta1/custom_job.proto
package com.google.cloud.aiplatform.v1beta1;
/**
*
*
* <pre>
* All parameters related to queuing and scheduling of custom jobs.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.Scheduling}
*/
public final class Scheduling extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.Scheduling)
SchedulingOrBuilder {
private static final long serialVersionUID = 0L;
// Use Scheduling.newBuilder() to construct.
private Scheduling(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private Scheduling() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new Scheduling();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private Scheduling(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.protobuf.Duration.Builder subBuilder = null;
if (timeout_ != null) {
subBuilder = timeout_.toBuilder();
}
timeout_ =
input.readMessage(com.google.protobuf.Duration.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(timeout_);
timeout_ = subBuilder.buildPartial();
}
break;
}
case 24:
{
restartJobOnWorkerRestart_ = input.readBool();
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.CustomJobProto
.internal_static_google_cloud_aiplatform_v1beta1_Scheduling_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.CustomJobProto
.internal_static_google_cloud_aiplatform_v1beta1_Scheduling_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.Scheduling.class,
com.google.cloud.aiplatform.v1beta1.Scheduling.Builder.class);
}
public static final int TIMEOUT_FIELD_NUMBER = 1;
private com.google.protobuf.Duration timeout_;
/**
*
*
* <pre>
* The maximum job running time. The default is 7 days.
* </pre>
*
* <code>.google.protobuf.Duration timeout = 1;</code>
*
* @return Whether the timeout field is set.
*/
@java.lang.Override
public boolean hasTimeout() {
return timeout_ != null;
}
/**
*
*
* <pre>
* The maximum job running time. The default is 7 days.
* </pre>
*
* <code>.google.protobuf.Duration timeout = 1;</code>
*
* @return The timeout.
*/
@java.lang.Override
public com.google.protobuf.Duration getTimeout() {
return timeout_ == null ? com.google.protobuf.Duration.getDefaultInstance() : timeout_;
}
/**
*
*
* <pre>
* The maximum job running time. The default is 7 days.
* </pre>
*
* <code>.google.protobuf.Duration timeout = 1;</code>
*/
@java.lang.Override
public com.google.protobuf.DurationOrBuilder getTimeoutOrBuilder() {
return getTimeout();
}
public static final int RESTART_JOB_ON_WORKER_RESTART_FIELD_NUMBER = 3;
private boolean restartJobOnWorkerRestart_;
/**
*
*
* <pre>
* Restarts the entire CustomJob if a worker gets restarted.
* This feature can be used by distributed training jobs that are not
* resilient to workers leaving and joining a job.
* </pre>
*
* <code>bool restart_job_on_worker_restart = 3;</code>
*
* @return The restartJobOnWorkerRestart.
*/
@java.lang.Override
public boolean getRestartJobOnWorkerRestart() {
return restartJobOnWorkerRestart_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (timeout_ != null) {
output.writeMessage(1, getTimeout());
}
if (restartJobOnWorkerRestart_ != false) {
output.writeBool(3, restartJobOnWorkerRestart_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (timeout_ != null) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getTimeout());
}
if (restartJobOnWorkerRestart_ != false) {
size += com.google.protobuf.CodedOutputStream.computeBoolSize(3, restartJobOnWorkerRestart_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.Scheduling)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1beta1.Scheduling other =
(com.google.cloud.aiplatform.v1beta1.Scheduling) obj;
if (hasTimeout() != other.hasTimeout()) return false;
if (hasTimeout()) {
if (!getTimeout().equals(other.getTimeout())) return false;
}
if (getRestartJobOnWorkerRestart() != other.getRestartJobOnWorkerRestart()) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasTimeout()) {
hash = (37 * hash) + TIMEOUT_FIELD_NUMBER;
hash = (53 * hash) + getTimeout().hashCode();
}
hash = (37 * hash) + RESTART_JOB_ON_WORKER_RESTART_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getRestartJobOnWorkerRestart());
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1beta1.Scheduling parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.Scheduling parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.Scheduling parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.Scheduling parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.Scheduling parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.Scheduling parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.Scheduling parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.Scheduling parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.Scheduling parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.Scheduling parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.Scheduling parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.Scheduling parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.aiplatform.v1beta1.Scheduling prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* All parameters related to queuing and scheduling of custom jobs.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.Scheduling}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.Scheduling)
com.google.cloud.aiplatform.v1beta1.SchedulingOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.CustomJobProto
.internal_static_google_cloud_aiplatform_v1beta1_Scheduling_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.CustomJobProto
.internal_static_google_cloud_aiplatform_v1beta1_Scheduling_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.Scheduling.class,
com.google.cloud.aiplatform.v1beta1.Scheduling.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1beta1.Scheduling.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
if (timeoutBuilder_ == null) {
timeout_ = null;
} else {
timeout_ = null;
timeoutBuilder_ = null;
}
restartJobOnWorkerRestart_ = false;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1beta1.CustomJobProto
.internal_static_google_cloud_aiplatform_v1beta1_Scheduling_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.Scheduling getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1beta1.Scheduling.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.Scheduling build() {
com.google.cloud.aiplatform.v1beta1.Scheduling result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.Scheduling buildPartial() {
com.google.cloud.aiplatform.v1beta1.Scheduling result =
new com.google.cloud.aiplatform.v1beta1.Scheduling(this);
if (timeoutBuilder_ == null) {
result.timeout_ = timeout_;
} else {
result.timeout_ = timeoutBuilder_.build();
}
result.restartJobOnWorkerRestart_ = restartJobOnWorkerRestart_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1beta1.Scheduling) {
return mergeFrom((com.google.cloud.aiplatform.v1beta1.Scheduling) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.Scheduling other) {
if (other == com.google.cloud.aiplatform.v1beta1.Scheduling.getDefaultInstance()) return this;
if (other.hasTimeout()) {
mergeTimeout(other.getTimeout());
}
if (other.getRestartJobOnWorkerRestart() != false) {
setRestartJobOnWorkerRestart(other.getRestartJobOnWorkerRestart());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.aiplatform.v1beta1.Scheduling parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.cloud.aiplatform.v1beta1.Scheduling) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private com.google.protobuf.Duration timeout_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Duration,
com.google.protobuf.Duration.Builder,
com.google.protobuf.DurationOrBuilder>
timeoutBuilder_;
/**
*
*
* <pre>
* The maximum job running time. The default is 7 days.
* </pre>
*
* <code>.google.protobuf.Duration timeout = 1;</code>
*
* @return Whether the timeout field is set.
*/
public boolean hasTimeout() {
return timeoutBuilder_ != null || timeout_ != null;
}
/**
*
*
* <pre>
* The maximum job running time. The default is 7 days.
* </pre>
*
* <code>.google.protobuf.Duration timeout = 1;</code>
*
* @return The timeout.
*/
public com.google.protobuf.Duration getTimeout() {
if (timeoutBuilder_ == null) {
return timeout_ == null ? com.google.protobuf.Duration.getDefaultInstance() : timeout_;
} else {
return timeoutBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The maximum job running time. The default is 7 days.
* </pre>
*
* <code>.google.protobuf.Duration timeout = 1;</code>
*/
public Builder setTimeout(com.google.protobuf.Duration value) {
if (timeoutBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
timeout_ = value;
onChanged();
} else {
timeoutBuilder_.setMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The maximum job running time. The default is 7 days.
* </pre>
*
* <code>.google.protobuf.Duration timeout = 1;</code>
*/
public Builder setTimeout(com.google.protobuf.Duration.Builder builderForValue) {
if (timeoutBuilder_ == null) {
timeout_ = builderForValue.build();
onChanged();
} else {
timeoutBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The maximum job running time. The default is 7 days.
* </pre>
*
* <code>.google.protobuf.Duration timeout = 1;</code>
*/
public Builder mergeTimeout(com.google.protobuf.Duration value) {
if (timeoutBuilder_ == null) {
if (timeout_ != null) {
timeout_ =
com.google.protobuf.Duration.newBuilder(timeout_).mergeFrom(value).buildPartial();
} else {
timeout_ = value;
}
onChanged();
} else {
timeoutBuilder_.mergeFrom(value);
}
return this;
}
/**
*
*
* <pre>
* The maximum job running time. The default is 7 days.
* </pre>
*
* <code>.google.protobuf.Duration timeout = 1;</code>
*/
public Builder clearTimeout() {
if (timeoutBuilder_ == null) {
timeout_ = null;
onChanged();
} else {
timeout_ = null;
timeoutBuilder_ = null;
}
return this;
}
/**
*
*
* <pre>
* The maximum job running time. The default is 7 days.
* </pre>
*
* <code>.google.protobuf.Duration timeout = 1;</code>
*/
public com.google.protobuf.Duration.Builder getTimeoutBuilder() {
onChanged();
return getTimeoutFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The maximum job running time. The default is 7 days.
* </pre>
*
* <code>.google.protobuf.Duration timeout = 1;</code>
*/
public com.google.protobuf.DurationOrBuilder getTimeoutOrBuilder() {
if (timeoutBuilder_ != null) {
return timeoutBuilder_.getMessageOrBuilder();
} else {
return timeout_ == null ? com.google.protobuf.Duration.getDefaultInstance() : timeout_;
}
}
/**
*
*
* <pre>
* The maximum job running time. The default is 7 days.
* </pre>
*
* <code>.google.protobuf.Duration timeout = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Duration,
com.google.protobuf.Duration.Builder,
com.google.protobuf.DurationOrBuilder>
getTimeoutFieldBuilder() {
if (timeoutBuilder_ == null) {
timeoutBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Duration,
com.google.protobuf.Duration.Builder,
com.google.protobuf.DurationOrBuilder>(
getTimeout(), getParentForChildren(), isClean());
timeout_ = null;
}
return timeoutBuilder_;
}
private boolean restartJobOnWorkerRestart_;
/**
*
*
* <pre>
* Restarts the entire CustomJob if a worker gets restarted.
* This feature can be used by distributed training jobs that are not
* resilient to workers leaving and joining a job.
* </pre>
*
* <code>bool restart_job_on_worker_restart = 3;</code>
*
* @return The restartJobOnWorkerRestart.
*/
@java.lang.Override
public boolean getRestartJobOnWorkerRestart() {
return restartJobOnWorkerRestart_;
}
/**
*
*
* <pre>
* Restarts the entire CustomJob if a worker gets restarted.
* This feature can be used by distributed training jobs that are not
* resilient to workers leaving and joining a job.
* </pre>
*
* <code>bool restart_job_on_worker_restart = 3;</code>
*
* @param value The restartJobOnWorkerRestart to set.
* @return This builder for chaining.
*/
public Builder setRestartJobOnWorkerRestart(boolean value) {
restartJobOnWorkerRestart_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Restarts the entire CustomJob if a worker gets restarted.
* This feature can be used by distributed training jobs that are not
* resilient to workers leaving and joining a job.
* </pre>
*
* <code>bool restart_job_on_worker_restart = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearRestartJobOnWorkerRestart() {
restartJobOnWorkerRestart_ = false;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.Scheduling)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.Scheduling)
private static final com.google.cloud.aiplatform.v1beta1.Scheduling DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.Scheduling();
}
public static com.google.cloud.aiplatform.v1beta1.Scheduling getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<Scheduling> PARSER =
new com.google.protobuf.AbstractParser<Scheduling>() {
@java.lang.Override
public Scheduling parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new Scheduling(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<Scheduling> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<Scheduling> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.Scheduling getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.jackrabbit.oak.plugins.document;
import java.util.Arrays;
import java.util.Objects;
import org.apache.jackrabbit.oak.cache.CacheValue;
import org.apache.jackrabbit.oak.commons.PathUtils;
import org.apache.jackrabbit.oak.commons.StringUtils;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.collect.Iterables.elementsEqual;
/**
* The {@code Path} class is closely modeled after the semantics of
* {@code PathUtils} in oak-commons. Corresponding methods in this class can
* be used as a replacement for the methods in {@code PathUtils} on {@code Path}
* objects.
*/
public final class Path implements CacheValue, Comparable<Path> {
public static final Path ROOT = new Path(null, "", "".hashCode());
@Nullable
private final Path parent;
@NotNull
private final String name;
private int hash;
private Path(@Nullable Path parent,
@NotNull String name,
int hash) {
this.parent = parent;
this.name = name;
this.hash = hash;
}
/**
* Creates a new {@code Path} from the given parent {@code Path}. The name
* of the new {@code Path} cannot be the empty {@code String}.
*
* @param parent the parent {@code Path}.
* @param name the name of the new {@code Path}.
* @throws IllegalArgumentException if the {@code name} is empty.
*/
public Path(@NotNull Path parent, @NotNull String name) {
this(checkNotNull(parent), checkNotNull(name), -1);
checkArgument(!name.isEmpty(), "name cannot be the empty String");
}
/**
* Creates a relative path with a single name element. The name cannot be
* the empty {@code String}.
*
* @param name the name of the first path element.
* @throws IllegalArgumentException if the {@code name} is empty.
*/
public Path(@NotNull String name) {
this(null, checkNotNull(name), -1);
checkArgument(!name.isEmpty(), "name cannot be the empty String");
checkArgument(name.indexOf('/') == -1, "name must not contain path separator: {}", name);
}
/**
* Returns the name of this path. The {@link #ROOT} is the only path with
* an empty name. That is a String with length zero.
*
* @return the name of this path.
*/
@NotNull
public String getName() {
return name;
}
/**
* Returns the names of the path elements with increasing {@link #getDepth()}
* starting at depth 1.
*
* @return the names of the path elements.
*/
@NotNull
public Iterable<String> elements() {
return elements(false);
}
/**
* Returns {@code true} if this is the {@link #ROOT} path; {@code false}
* otherwise.
*
* @return whether this is the {@link #ROOT} path.
*/
public boolean isRoot() {
return name.isEmpty();
}
/**
* The parent of this path or {@code null} if this path does not have a
* parent. The {@link #ROOT} path and the first path element of a relative
* path do not have a parent.
*
* @return the parent of this path or {@code null} if this path does not
* have a parent.
*/
@Nullable
public Path getParent() {
return parent;
}
/**
* @return the number of characters of the {@code String} representation of
* this path.
*/
public int length() {
if (isRoot()) {
return 1;
}
int length = 0;
Path p = this;
while (p != null) {
length += p.name.length();
if (p.parent != null) {
length++;
}
p = p.parent;
}
return length;
}
/**
* The depth of this path. The {@link #ROOT} has a depth of 0. The path
* {@code /foo/bar} as well as {@code bar/baz} have depth 2.
*
* @return the depth of the path.
*/
public int getDepth() {
return getNumberOfPathElements(false);
}
/**
* Get the nth ancestor of a path. The 1st ancestor is the parent path,
* 2nd ancestor the grandparent path, and so on...
* <p>
* If {@code nth <= 0}, then this path is returned.
*
* @param nth indicates the ancestor level for which the path should be
* calculated.
* @return the ancestor path
*/
@NotNull
public Path getAncestor(int nth) {
Path p = this;
while (nth-- > 0 && p.parent != null) {
p = p.parent;
}
return p;
}
/**
* Return {@code true} if {@code this} path is an ancestor of the
* {@code other} path, otherwise {@code false}.
*
* @param other the other path.
* @return whether this path is an ancestor of the other path.
*/
public boolean isAncestorOf(@NotNull Path other) {
checkNotNull(other);
int depthDiff = other.getDepth() - getDepth();
return depthDiff > 0
&& elementsEqual(elements(true), other.getAncestor(depthDiff).elements(true));
}
/**
* @return {@code true} if this is an absolute path; {@code false} otherwise.
*/
public boolean isAbsolute() {
Path p = this;
while (p.parent != null) {
p = p.parent;
}
return p.isRoot();
}
/**
* Creates a {@code Path} from a {@code String}.
*
* @param path the {@code String} to parse.
* @return the {@code Path} from the {@code String}.
* @throws IllegalArgumentException if the {@code path} is the empty
* {@code String}.
*/
@NotNull
public static Path fromString(@NotNull String path) throws IllegalArgumentException {
checkNotNull(path);
Path p = null;
if (PathUtils.isAbsolute(path)) {
p = ROOT;
}
for (String name : PathUtils.elements(path)) {
name = StringCache.get(name);
if (p == null) {
p = new Path(name);
} else {
p = new Path(p, StringCache.get(name));
}
}
if (p == null) {
throw new IllegalArgumentException("path must not be empty");
}
return p;
}
/**
* Appends the {@code String} representation of this {@code Path} to the
* passed {@code StringBuilder}. See also {@link #toString()}.
*
* @param sb the {@code StringBuilder} this {@code Path} is appended to.
* @return the passed {@code StringBuilder}.
*/
@NotNull
public StringBuilder toStringBuilder(@NotNull StringBuilder sb) {
if (isRoot()) {
sb.append('/');
} else {
buildPath(sb);
}
return sb;
}
@Override
public int getMemory() {
int memory = 0;
Path p = this;
while (p.parent != null) {
memory += 24; // shallow size
memory += StringUtils.estimateMemoryUsage(name);
p = p.parent;
}
return memory;
}
@Override
public int compareTo(@NotNull Path other) {
if (this == other) {
return 0;
}
Path t = this;
int off = t.getNumberOfPathElements(true) -
checkNotNull(other).getNumberOfPathElements(true);
int corrected = off;
while (corrected > 0) {
t = t.parent;
corrected--;
}
while (corrected < 0) {
other = other.parent;
corrected++;
}
int cp = comparePath(t, other);
if (cp != 0) {
return cp;
}
return Integer.signum(off);
}
@Override
public String toString() {
if (isRoot()) {
return "/";
} else {
return buildPath(new StringBuilder(length())).toString();
}
}
@Override
public int hashCode() {
int h = hash;
if (h == -1 && parent != null) {
h = 17;
h = 37 * h + parent.hashCode();
h = 37 * h + name.hashCode();
hash = h;
}
return h;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
} else if (obj instanceof Path) {
Path other = (Path) obj;
return this.name.equals(other.name)
&& Objects.equals(this.parent, other.parent);
}
return false;
}
//-------------------------< internal >-------------------------------------
private Iterable<String> elements(boolean withRoot) {
int size = getNumberOfPathElements(withRoot);
String[] elements = new String[size];
Path p = this;
for (int i = size - 1; p != null; i--) {
if (withRoot || !p.isRoot()) {
elements[i] = p.name;
}
p = p.parent;
}
return Arrays.asList(elements);
}
private StringBuilder buildPath(StringBuilder sb) {
if (parent != null) {
parent.buildPath(sb).append("/");
}
sb.append(name);
return sb;
}
/**
* Returns the number of path elements. Depending on {@code withRoot} the
* root of an absolute path is also taken into account.
*
* @param withRoot whether the root of an absolute path is also counted.
* @return the number of path elements.
*/
private int getNumberOfPathElements(boolean withRoot) {
int depth = 0;
for (Path p = this; p != null; p = p.parent) {
if (withRoot || !p.isRoot()) {
depth++;
}
}
return depth;
}
private static int comparePath(Path a, Path b) {
if (a.parent != b.parent) {
int cp = comparePath(a.parent, b.parent);
if (cp != 0) {
return cp;
}
}
return a.name.compareTo(b.name);
}
}
| |
/*
* Copyright (C) 2008 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.common.collect;
import static com.google.common.base.Preconditions.checkNotNull;
import com.google.common.annotations.Beta;
import com.google.common.annotations.GwtCompatible;
import com.google.common.annotations.GwtIncompatible;
import com.google.common.base.Function;
import com.google.common.base.Joiner;
import com.google.common.base.Optional;
import com.google.common.base.Predicate;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import java.util.Arrays;
import java.util.Collection;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
import java.util.SortedSet;
import org.checkerframework.checker.nullness.compatqual.NullableDecl;
/**
* An expanded {@code Iterable} API, providing functionality similar to Java 8's powerful <a href=
* "https://docs.oracle.com/javase/8/docs/api/java/util/stream/package-summary.html#package.description"
* >streams library</a> in a slightly different way.
*
* <p>The following types of methods are provided:
*
* <ul>
* <li>chaining methods which return a new {@code FluentIterable} based in some way on the
* contents of the current one (for example {@link #transform})
* <li>element extraction methods which facilitate the retrieval of certain elements (for example
* {@link #last})
* <li>query methods which answer questions about the {@code FluentIterable}'s contents (for
* example {@link #anyMatch})
* <li>conversion methods which copy the {@code FluentIterable}'s contents into a new collection
* or array (for example {@link #toList})
* </ul>
*
* <p>Several lesser-used features are currently available only as static methods on the {@link
* Iterables} class.
*
* <p><a name="streams"></a>
*
* <h3>Comparison to streams</h3>
*
* <p>Starting with Java 8, the core Java class libraries provide a new "Streams" library (in {@code
* java.util.stream}), which is similar to {@code FluentIterable} but generally more powerful. Key
* differences include:
*
* <ul>
* <li>A stream is <i>single-use</i>; it becomes invalid as soon as any "terminal operation" such
* as {@code findFirst()} or {@code iterator()} is invoked. (Even though {@code Stream}
* contains all the right method <i>signatures</i> to implement {@link Iterable}, it does not
* actually do so, to avoid implying repeat-iterability.) {@code FluentIterable}, on the other
* hand, is multiple-use, and does implement {@link Iterable}.
* <li>Streams offer many features not found here, including {@code min/max}, {@code distinct},
* {@code reduce}, {@code sorted}, the very powerful {@code collect}, and built-in support for
* parallelizing stream operations.
* <li>{@code FluentIterable} contains several features not available on {@code Stream}, which are
* noted in the method descriptions below.
* <li>Streams include primitive-specialized variants such as {@code IntStream}, the use of which
* is strongly recommended.
* <li>Streams are standard Java, not requiring a third-party dependency (but do render your code
* incompatible with Java 7 and earlier).
* </ul>
*
* <h3>Example</h3>
*
* <p>Here is an example that accepts a list from a database call, filters it based on a predicate,
* transforms it by invoking {@code toString()} on each element, and returns the first 10 elements
* as a {@code List}:
*
* <pre>{@code
* ImmutableList<String> results =
* FluentIterable.from(database.getClientList())
* .filter(Client::isActiveInLastMonth)
* .transform(Object::toString)
* .limit(10)
* .toList();
* }</pre>
*
* The approximate stream equivalent is:
*
* <pre>{@code
* List<String> results =
* database.getClientList()
* .stream()
* .filter(Client::isActiveInLastMonth)
* .map(Object::toString)
* .limit(10)
* .collect(Collectors.toList());
* }</pre>
*
* @author Marcin Mikosik
* @since 12.0
*/
@GwtCompatible(emulated = true)
public abstract class FluentIterable<E> implements Iterable<E> {
// We store 'iterable' and use it instead of 'this' to allow Iterables to perform instanceof
// checks on the _original_ iterable when FluentIterable.from is used.
// To avoid a self retain cycle under j2objc, we store Optional.absent() instead of
// Optional.of(this). To access the iterator delegate, call #getDelegate(), which converts to
// absent() back to 'this'.
private final Optional<Iterable<E>> iterableDelegate;
/** Constructor for use by subclasses. */
protected FluentIterable() {
this.iterableDelegate = Optional.absent();
}
FluentIterable(Iterable<E> iterable) {
checkNotNull(iterable);
this.iterableDelegate = Optional.fromNullable(this != iterable ? iterable : null);
}
private Iterable<E> getDelegate() {
return iterableDelegate.or(this);
}
/**
* Returns a fluent iterable that wraps {@code iterable}, or {@code iterable} itself if it is
* already a {@code FluentIterable}.
*
* <p><b>{@code Stream} equivalent:</b> {@code iterable.stream()} if {@code iterable} is a {@link
* Collection}; {@code StreamSupport.stream(iterable.spliterator(), false)} otherwise.
*/
public static <E> FluentIterable<E> from(final Iterable<E> iterable) {
return (iterable instanceof FluentIterable)
? (FluentIterable<E>) iterable
: new FluentIterable<E>(iterable) {
@Override
public Iterator<E> iterator() {
return iterable.iterator();
}
};
}
/**
* Returns a fluent iterable containing {@code elements} in the specified order.
*
* <p>The returned iterable is an unmodifiable view of the input array.
*
* <p><b>{@code Stream} equivalent:</b> {@link java.util.stream.Stream#of(Object[])
* Stream.of(T...)}.
*
* @since 20.0 (since 18.0 as an overload of {@code of})
*/
@Beta
public static <E> FluentIterable<E> from(E[] elements) {
return from(Arrays.asList(elements));
}
/**
* Construct a fluent iterable from another fluent iterable. This is obviously never necessary,
* but is intended to help call out cases where one migration from {@code Iterable} to {@code
* FluentIterable} has obviated the need to explicitly convert to a {@code FluentIterable}.
*
* @deprecated instances of {@code FluentIterable} don't need to be converted to {@code
* FluentIterable}
*/
@Deprecated
public static <E> FluentIterable<E> from(FluentIterable<E> iterable) {
return checkNotNull(iterable);
}
/**
* Returns a fluent iterable that combines two iterables. The returned iterable has an iterator
* that traverses the elements in {@code a}, followed by the elements in {@code b}. The source
* iterators are not polled until necessary.
*
* <p>The returned iterable's iterator supports {@code remove()} when the corresponding input
* iterator supports it.
*
* <p><b>{@code Stream} equivalent:</b> {@link Stream#concat}.
*
* @since 20.0
*/
@Beta
public static <T> FluentIterable<T> concat(Iterable<? extends T> a, Iterable<? extends T> b) {
return concatNoDefensiveCopy(a, b);
}
/**
* Returns a fluent iterable that combines three iterables. The returned iterable has an iterator
* that traverses the elements in {@code a}, followed by the elements in {@code b}, followed by
* the elements in {@code c}. The source iterators are not polled until necessary.
*
* <p>The returned iterable's iterator supports {@code remove()} when the corresponding input
* iterator supports it.
*
* <p><b>{@code Stream} equivalent:</b> use nested calls to {@link Stream#concat}, or see the
* advice in {@link #concat(Iterable...)}.
*
* @since 20.0
*/
@Beta
public static <T> FluentIterable<T> concat(
Iterable<? extends T> a, Iterable<? extends T> b, Iterable<? extends T> c) {
return concatNoDefensiveCopy(a, b, c);
}
/**
* Returns a fluent iterable that combines four iterables. The returned iterable has an iterator
* that traverses the elements in {@code a}, followed by the elements in {@code b}, followed by
* the elements in {@code c}, followed by the elements in {@code d}. The source iterators are not
* polled until necessary.
*
* <p>The returned iterable's iterator supports {@code remove()} when the corresponding input
* iterator supports it.
*
* <p><b>{@code Stream} equivalent:</b> use nested calls to {@link Stream#concat}, or see the
* advice in {@link #concat(Iterable...)}.
*
* @since 20.0
*/
@Beta
public static <T> FluentIterable<T> concat(
Iterable<? extends T> a,
Iterable<? extends T> b,
Iterable<? extends T> c,
Iterable<? extends T> d) {
return concatNoDefensiveCopy(a, b, c, d);
}
/**
* Returns a fluent iterable that combines several iterables. The returned iterable has an
* iterator that traverses the elements of each iterable in {@code inputs}. The input iterators
* are not polled until necessary.
*
* <p>The returned iterable's iterator supports {@code remove()} when the corresponding input
* iterator supports it.
*
* <p><b>{@code Stream} equivalent:</b> to concatenate an arbitrary number of streams, use {@code
* Stream.of(stream1, stream2, ...).flatMap(s -> s)}. If the sources are iterables, use {@code
* Stream.of(iter1, iter2, ...).flatMap(Streams::stream)}.
*
* @throws NullPointerException if any of the provided iterables is {@code null}
* @since 20.0
*/
@Beta
public static <T> FluentIterable<T> concat(Iterable<? extends T>... inputs) {
return concatNoDefensiveCopy(Arrays.copyOf(inputs, inputs.length));
}
/**
* Returns a fluent iterable that combines several iterables. The returned iterable has an
* iterator that traverses the elements of each iterable in {@code inputs}. The input iterators
* are not polled until necessary.
*
* <p>The returned iterable's iterator supports {@code remove()} when the corresponding input
* iterator supports it. The methods of the returned iterable may throw {@code
* NullPointerException} if any of the input iterators is {@code null}.
*
* <p><b>{@code Stream} equivalent:</b> {@code streamOfStreams.flatMap(s -> s)} or {@code
* streamOfIterables.flatMap(Streams::stream)}. (See {@link Streams#stream}.)
*
* @since 20.0
*/
@Beta
public static <T> FluentIterable<T> concat(
final Iterable<? extends Iterable<? extends T>> inputs) {
checkNotNull(inputs);
return new FluentIterable<T>() {
@Override
public Iterator<T> iterator() {
return Iterators.concat(Iterators.transform(inputs.iterator(), Iterables.<T>toIterator()));
}
};
}
/** Concatenates a varargs array of iterables without making a defensive copy of the array. */
private static <T> FluentIterable<T> concatNoDefensiveCopy(
final Iterable<? extends T>... inputs) {
for (Iterable<? extends T> input : inputs) {
checkNotNull(input);
}
return new FluentIterable<T>() {
@Override
public Iterator<T> iterator() {
return Iterators.concat(
/* lazily generate the iterators on each input only as needed */
new AbstractIndexedListIterator<Iterator<? extends T>>(inputs.length) {
@Override
public Iterator<? extends T> get(int i) {
return inputs[i].iterator();
}
});
}
};
}
/**
* Returns a fluent iterable containing no elements.
*
* <p><b>{@code Stream} equivalent:</b> {@code Stream.empty()}.
*
* @since 20.0
*/
@Beta
public static <E> FluentIterable<E> of() {
return FluentIterable.from(ImmutableList.<E>of());
}
/**
* Returns a fluent iterable containing the specified elements in order.
*
* <p><b>{@code Stream} equivalent:</b> {@link java.util.stream.Stream#of(Object[])
* Stream.of(T...)}.
*
* @since 20.0
*/
@Beta
public static <E> FluentIterable<E> of(@NullableDecl E element, E... elements) {
return from(Lists.asList(element, elements));
}
/**
* Returns a string representation of this fluent iterable, with the format {@code [e1, e2, ...,
* en]}.
*
* <p><b>{@code Stream} equivalent:</b> {@code stream.collect(Collectors.joining(", ", "[", "]"))}
* or (less efficiently) {@code stream.collect(Collectors.toList()).toString()}.
*/
@Override
public String toString() {
return Iterables.toString(getDelegate());
}
/**
* Returns the number of elements in this fluent iterable.
*
* <p><b>{@code Stream} equivalent:</b> {@code stream.count()}.
*/
public final int size() {
return Iterables.size(getDelegate());
}
/**
* Returns {@code true} if this fluent iterable contains any object for which {@code
* equals(target)} is true.
*
* <p><b>{@code Stream} equivalent:</b> {@code stream.anyMatch(Predicate.isEqual(target))}.
*/
public final boolean contains(@NullableDecl Object target) {
return Iterables.contains(getDelegate(), target);
}
/**
* Returns a fluent iterable whose {@code Iterator} cycles indefinitely over the elements of this
* fluent iterable.
*
* <p>That iterator supports {@code remove()} if {@code iterable.iterator()} does. After {@code
* remove()} is called, subsequent cycles omit the removed element, which is no longer in this
* fluent iterable. The iterator's {@code hasNext()} method returns {@code true} until this fluent
* iterable is empty.
*
* <p><b>Warning:</b> Typical uses of the resulting iterator may produce an infinite loop. You
* should use an explicit {@code break} or be certain that you will eventually remove all the
* elements.
*
* <p><b>{@code Stream} equivalent:</b> if the source iterable has only a single element {@code
* e}, use {@code Stream.generate(() -> e)}. Otherwise, collect your stream into a collection and
* use {@code Stream.generate(() -> collection).flatMap(Collection::stream)}.
*/
public final FluentIterable<E> cycle() {
return from(Iterables.cycle(getDelegate()));
}
/**
* Returns a fluent iterable whose iterators traverse first the elements of this fluent iterable,
* followed by those of {@code other}. The iterators are not polled until necessary.
*
* <p>The returned iterable's {@code Iterator} supports {@code remove()} when the corresponding
* {@code Iterator} supports it.
*
* <p><b>{@code Stream} equivalent:</b> {@link Stream#concat}.
*
* @since 18.0
*/
@Beta
public final FluentIterable<E> append(Iterable<? extends E> other) {
return FluentIterable.concat(getDelegate(), other);
}
/**
* Returns a fluent iterable whose iterators traverse first the elements of this fluent iterable,
* followed by {@code elements}.
*
* <p><b>{@code Stream} equivalent:</b> {@code Stream.concat(thisStream, Stream.of(elements))}.
*
* @since 18.0
*/
@Beta
public final FluentIterable<E> append(E... elements) {
return FluentIterable.concat(getDelegate(), Arrays.asList(elements));
}
/**
* Returns the elements from this fluent iterable that satisfy a predicate. The resulting fluent
* iterable's iterator does not support {@code remove()}.
*
* <p><b>{@code Stream} equivalent:</b> {@link Stream#filter} (same).
*/
public final FluentIterable<E> filter(Predicate<? super E> predicate) {
return from(Iterables.filter(getDelegate(), predicate));
}
/**
* Returns the elements from this fluent iterable that are instances of class {@code type}.
*
* <p><b>{@code Stream} equivalent:</b> {@code stream.filter(type::isInstance).map(type::cast)}.
* This does perform a little more work than necessary, so another option is to insert an
* unchecked cast at some later point:
*
* <pre>
* {@code @SuppressWarnings("unchecked") // safe because of ::isInstance check
* ImmutableList<NewType> result =
* (ImmutableList) stream.filter(NewType.class::isInstance).collect(toImmutableList());}
* </pre>
*/
@GwtIncompatible // Class.isInstance
public final <T> FluentIterable<T> filter(Class<T> type) {
return from(Iterables.filter(getDelegate(), type));
}
/**
* Returns {@code true} if any element in this fluent iterable satisfies the predicate.
*
* <p><b>{@code Stream} equivalent:</b> {@link Stream#anyMatch} (same).
*/
public final boolean anyMatch(Predicate<? super E> predicate) {
return Iterables.any(getDelegate(), predicate);
}
/**
* Returns {@code true} if every element in this fluent iterable satisfies the predicate. If this
* fluent iterable is empty, {@code true} is returned.
*
* <p><b>{@code Stream} equivalent:</b> {@link Stream#allMatch} (same).
*/
public final boolean allMatch(Predicate<? super E> predicate) {
return Iterables.all(getDelegate(), predicate);
}
/**
* Returns an {@link Optional} containing the first element in this fluent iterable that satisfies
* the given predicate, if such an element exists.
*
* <p><b>Warning:</b> avoid using a {@code predicate} that matches {@code null}. If {@code null}
* is matched in this fluent iterable, a {@link NullPointerException} will be thrown.
*
* <p><b>{@code Stream} equivalent:</b> {@code stream.filter(predicate).findFirst()}.
*/
public final Optional<E> firstMatch(Predicate<? super E> predicate) {
return Iterables.tryFind(getDelegate(), predicate);
}
/**
* Returns a fluent iterable that applies {@code function} to each element of this fluent
* iterable.
*
* <p>The returned fluent iterable's iterator supports {@code remove()} if this iterable's
* iterator does. After a successful {@code remove()} call, this fluent iterable no longer
* contains the corresponding element.
*
* <p><b>{@code Stream} equivalent:</b> {@link Stream#map}.
*/
public final <T> FluentIterable<T> transform(Function<? super E, T> function) {
return from(Iterables.transform(getDelegate(), function));
}
/**
* Applies {@code function} to each element of this fluent iterable and returns a fluent iterable
* with the concatenated combination of results. {@code function} returns an Iterable of results.
*
* <p>The returned fluent iterable's iterator supports {@code remove()} if this function-returned
* iterables' iterator does. After a successful {@code remove()} call, the returned fluent
* iterable no longer contains the corresponding element.
*
* <p><b>{@code Stream} equivalent:</b> {@link Stream#flatMap} (using a function that produces
* streams, not iterables).
*
* @since 13.0 (required {@code Function<E, Iterable<T>>} until 14.0)
*/
public <T> FluentIterable<T> transformAndConcat(
Function<? super E, ? extends Iterable<? extends T>> function) {
return FluentIterable.concat(transform(function));
}
/**
* Returns an {@link Optional} containing the first element in this fluent iterable. If the
* iterable is empty, {@code Optional.absent()} is returned.
*
* <p><b>{@code Stream} equivalent:</b> if the goal is to obtain any element, {@link
* Stream#findAny}; if it must specifically be the <i>first</i> element, {@code Stream#findFirst}.
*
* @throws NullPointerException if the first element is null; if this is a possibility, use {@code
* iterator().next()} or {@link Iterables#getFirst} instead.
*/
public final Optional<E> first() {
Iterator<E> iterator = getDelegate().iterator();
return iterator.hasNext() ? Optional.of(iterator.next()) : Optional.<E>absent();
}
/**
* Returns an {@link Optional} containing the last element in this fluent iterable. If the
* iterable is empty, {@code Optional.absent()} is returned. If the underlying {@code iterable} is
* a {@link List} with {@link java.util.RandomAccess} support, then this operation is guaranteed
* to be {@code O(1)}.
*
* <p><b>{@code Stream} equivalent:</b> {@code stream.reduce((a, b) -> b)}.
*
* @throws NullPointerException if the last element is null; if this is a possibility, use {@link
* Iterables#getLast} instead.
*/
public final Optional<E> last() {
// Iterables#getLast was inlined here so we don't have to throw/catch a NSEE
// TODO(kevinb): Support a concurrently modified collection?
Iterable<E> iterable = getDelegate();
if (iterable instanceof List) {
List<E> list = (List<E>) iterable;
if (list.isEmpty()) {
return Optional.absent();
}
return Optional.of(list.get(list.size() - 1));
}
Iterator<E> iterator = iterable.iterator();
if (!iterator.hasNext()) {
return Optional.absent();
}
/*
* TODO(kevinb): consider whether this "optimization" is worthwhile. Users with SortedSets tend
* to know they are SortedSets and probably would not call this method.
*/
if (iterable instanceof SortedSet) {
SortedSet<E> sortedSet = (SortedSet<E>) iterable;
return Optional.of(sortedSet.last());
}
while (true) {
E current = iterator.next();
if (!iterator.hasNext()) {
return Optional.of(current);
}
}
}
/**
* Returns a view of this fluent iterable that skips its first {@code numberToSkip} elements. If
* this fluent iterable contains fewer than {@code numberToSkip} elements, the returned fluent
* iterable skips all of its elements.
*
* <p>Modifications to this fluent iterable before a call to {@code iterator()} are reflected in
* the returned fluent iterable. That is, the its iterator skips the first {@code numberToSkip}
* elements that exist when the iterator is created, not when {@code skip()} is called.
*
* <p>The returned fluent iterable's iterator supports {@code remove()} if the {@code Iterator} of
* this fluent iterable supports it. Note that it is <i>not</i> possible to delete the last
* skipped element by immediately calling {@code remove()} on the returned fluent iterable's
* iterator, as the {@code Iterator} contract states that a call to {@code * remove()} before a
* call to {@code next()} will throw an {@link IllegalStateException}.
*
* <p><b>{@code Stream} equivalent:</b> {@link Stream#skip} (same).
*/
public final FluentIterable<E> skip(int numberToSkip) {
return from(Iterables.skip(getDelegate(), numberToSkip));
}
/**
* Creates a fluent iterable with the first {@code size} elements of this fluent iterable. If this
* fluent iterable does not contain that many elements, the returned fluent iterable will have the
* same behavior as this fluent iterable. The returned fluent iterable's iterator supports {@code
* remove()} if this fluent iterable's iterator does.
*
* <p><b>{@code Stream} equivalent:</b> {@link Stream#limit} (same).
*
* @param maxSize the maximum number of elements in the returned fluent iterable
* @throws IllegalArgumentException if {@code size} is negative
*/
public final FluentIterable<E> limit(int maxSize) {
return from(Iterables.limit(getDelegate(), maxSize));
}
/**
* Determines whether this fluent iterable is empty.
*
* <p><b>{@code Stream} equivalent:</b> {@code !stream.findAny().isPresent()}.
*/
public final boolean isEmpty() {
return !getDelegate().iterator().hasNext();
}
/**
* Returns an {@code ImmutableList} containing all of the elements from this fluent iterable in
* proper sequence.
*
* <p><b>{@code Stream} equivalent:</b> {@code ImmutableList.copyOf(stream.iterator())}, or after
* the next release of Guava, pass {@link ImmutableList#toImmutableList} to {@code
* stream.collect()}.
*
* @throws NullPointerException if any element is {@code null}
* @since 14.0 (since 12.0 as {@code toImmutableList()}).
*/
public final ImmutableList<E> toList() {
return ImmutableList.copyOf(getDelegate());
}
/**
* Returns an {@code ImmutableList} containing all of the elements from this {@code
* FluentIterable} in the order specified by {@code comparator}. To produce an {@code
* ImmutableList} sorted by its natural ordering, use {@code toSortedList(Ordering.natural())}.
*
* <p><b>{@code Stream} equivalent:</b> {@code
* ImmutableList.copyOf(stream.sorted(comparator).iterator())}, or after the next release of
* Guava, pass {@link ImmutableList#toImmutableList} to {@code
* stream.sorted(comparator).collect()}.
*
* @param comparator the function by which to sort list elements
* @throws NullPointerException if any element of this iterable is {@code null}
* @since 14.0 (since 13.0 as {@code toSortedImmutableList()}).
*/
public final ImmutableList<E> toSortedList(Comparator<? super E> comparator) {
return Ordering.from(comparator).immutableSortedCopy(getDelegate());
}
/**
* Returns an {@code ImmutableSet} containing all of the elements from this fluent iterable with
* duplicates removed.
*
* <p><b>{@code Stream} equivalent:</b> {@code ImmutableSet.copyOf(stream.iterator())}, or after
* the next release of Guava, pass {@link ImmutableSet#toImmutableSet} to {@code
* stream.collect()}.
*
* @throws NullPointerException if any element is {@code null}
* @since 14.0 (since 12.0 as {@code toImmutableSet()}).
*/
public final ImmutableSet<E> toSet() {
return ImmutableSet.copyOf(getDelegate());
}
/**
* Returns an {@code ImmutableSortedSet} containing all of the elements from this {@code
* FluentIterable} in the order specified by {@code comparator}, with duplicates (determined by
* {@code comparator.compare(x, y) == 0}) removed. To produce an {@code ImmutableSortedSet} sorted
* by its natural ordering, use {@code toSortedSet(Ordering.natural())}.
*
* <p><b>{@code Stream} equivalent:</b> {@code ImmutableSortedSet.copyOf(comparator,
* stream.iterator())}, or after the next release of Guava, pass {@link
* ImmutableSortedSet#toImmutableSortedSet} to {@code stream.collect()}.
*
* @param comparator the function by which to sort set elements
* @throws NullPointerException if any element of this iterable is {@code null}
* @since 14.0 (since 12.0 as {@code toImmutableSortedSet()}).
*/
public final ImmutableSortedSet<E> toSortedSet(Comparator<? super E> comparator) {
return ImmutableSortedSet.copyOf(comparator, getDelegate());
}
/**
* Returns an {@code ImmutableMultiset} containing all of the elements from this fluent iterable.
*
* <p><b>{@code Stream} equivalent:</b> {@code ImmutableMultiset.copyOf(stream.iterator())}, or
* after the next release of Guava, pass {@link ImmutableMultiset#toImmutableMultiset} to {@code
* stream.collect()}.
*
* @throws NullPointerException if any element is null
* @since 19.0
*/
public final ImmutableMultiset<E> toMultiset() {
return ImmutableMultiset.copyOf(getDelegate());
}
/**
* Returns an immutable map whose keys are the distinct elements of this {@code FluentIterable}
* and whose value for each key was computed by {@code valueFunction}. The map's iteration order
* is the order of the first appearance of each key in this iterable.
*
* <p>When there are multiple instances of a key in this iterable, it is unspecified whether
* {@code valueFunction} will be applied to more than one instance of that key and, if it is,
* which result will be mapped to that key in the returned map.
*
* <p><b>{@code Stream} equivalent:</b> after the next release of Guava, use {@code
* stream.collect(ImmutableMap.toImmutableMap(k -> k, valueFunction))}. Before then you can use
* {@code ImmutableMap.copyOf(stream.collect(Collectors.toMap(k -> k, valueFunction)))}, but be
* aware that this may not preserve the order of entries.
*
* @throws NullPointerException if any element of this iterable is {@code null}, or if {@code
* valueFunction} produces {@code null} for any key
* @since 14.0
*/
public final <V> ImmutableMap<E, V> toMap(Function<? super E, V> valueFunction) {
return Maps.toMap(getDelegate(), valueFunction);
}
/**
* Creates an index {@code ImmutableListMultimap} that contains the results of applying a
* specified function to each item in this {@code FluentIterable} of values. Each element of this
* iterable will be stored as a value in the resulting multimap, yielding a multimap with the same
* size as this iterable. The key used to store that value in the multimap will be the result of
* calling the function on that value. The resulting multimap is created as an immutable snapshot.
* In the returned multimap, keys appear in the order they are first encountered, and the values
* corresponding to each key appear in the same order as they are encountered.
*
* <p><b>{@code Stream} equivalent:</b> {@code stream.collect(Collectors.groupingBy(keyFunction))}
* behaves similarly, but returns a mutable {@code Map<K, List<E>>} instead, and may not preserve
* the order of entries).
*
* @param keyFunction the function used to produce the key for each value
* @throws NullPointerException if any element of this iterable is {@code null}, or if {@code
* keyFunction} produces {@code null} for any key
* @since 14.0
*/
public final <K> ImmutableListMultimap<K, E> index(Function<? super E, K> keyFunction) {
return Multimaps.index(getDelegate(), keyFunction);
}
/**
* Returns a map with the contents of this {@code FluentIterable} as its {@code values}, indexed
* by keys derived from those values. In other words, each input value produces an entry in the
* map whose key is the result of applying {@code keyFunction} to that value. These entries appear
* in the same order as they appeared in this fluent iterable. Example usage:
*
* <pre>{@code
* Color red = new Color("red", 255, 0, 0);
* ...
* FluentIterable<Color> allColors = FluentIterable.from(ImmutableSet.of(red, green, blue));
*
* Map<String, Color> colorForName = allColors.uniqueIndex(toStringFunction());
* assertThat(colorForName).containsEntry("red", red);
* }</pre>
*
* <p>If your index may associate multiple values with each key, use {@link #index(Function)
* index}.
*
* <p><b>{@code Stream} equivalent:</b> after the next release of Guava, use {@code
* stream.collect(ImmutableMap.toImmutableMap(keyFunction, v -> v))}. Before then you can use
* {@code ImmutableMap.copyOf(stream.collect(Collectors.toMap(keyFunction, v -> v)))}, but be
* aware that this may not preserve the order of entries.
*
* @param keyFunction the function used to produce the key for each value
* @return a map mapping the result of evaluating the function {@code keyFunction} on each value
* in this fluent iterable to that value
* @throws IllegalArgumentException if {@code keyFunction} produces the same key for more than one
* value in this fluent iterable
* @throws NullPointerException if any element of this iterable is {@code null}, or if {@code
* keyFunction} produces {@code null} for any key
* @since 14.0
*/
public final <K> ImmutableMap<K, E> uniqueIndex(Function<? super E, K> keyFunction) {
return Maps.uniqueIndex(getDelegate(), keyFunction);
}
/**
* Returns an array containing all of the elements from this fluent iterable in iteration order.
*
* <p><b>{@code Stream} equivalent:</b> if an object array is acceptable, use {@code
* stream.toArray()}; if {@code type} is a class literal such as {@code MyType.class}, use {@code
* stream.toArray(MyType[]::new)}. Otherwise use {@code stream.toArray( len -> (E[])
* Array.newInstance(type, len))}.
*
* @param type the type of the elements
* @return a newly-allocated array into which all the elements of this fluent iterable have been
* copied
*/
@GwtIncompatible // Array.newArray(Class, int)
public final E[] toArray(Class<E> type) {
return Iterables.toArray(getDelegate(), type);
}
/**
* Copies all the elements from this fluent iterable to {@code collection}. This is equivalent to
* calling {@code Iterables.addAll(collection, this)}.
*
* <p><b>{@code Stream} equivalent:</b> {@code stream.forEachOrdered(collection::add)} or {@code
* stream.forEach(collection::add)}.
*
* @param collection the collection to copy elements to
* @return {@code collection}, for convenience
* @since 14.0
*/
@CanIgnoreReturnValue
public final <C extends Collection<? super E>> C copyInto(C collection) {
checkNotNull(collection);
Iterable<E> iterable = getDelegate();
if (iterable instanceof Collection) {
collection.addAll(Collections2.cast(iterable));
} else {
for (E item : iterable) {
collection.add(item);
}
}
return collection;
}
/**
* Returns a {@link String} containing all of the elements of this fluent iterable joined with
* {@code joiner}.
*
* <p><b>{@code Stream} equivalent:</b> {@code joiner.join(stream.iterator())}, or, if you are not
* using any optional {@code Joiner} features, {@code
* stream.collect(Collectors.joining(delimiter)}.
*
* @since 18.0
*/
@Beta
public final String join(Joiner joiner) {
return joiner.join(this);
}
/**
* Returns the element at the specified position in this fluent iterable.
*
* <p><b>{@code Stream} equivalent:</b> {@code stream.skip(position).findFirst().get()} (but note
* that this throws different exception types, and throws an exception if {@code null} would be
* returned).
*
* @param position position of the element to return
* @return the element at the specified position in this fluent iterable
* @throws IndexOutOfBoundsException if {@code position} is negative or greater than or equal to
* the size of this fluent iterable
*/
// TODO(kevinb): add @NullableDecl?
public final E get(int position) {
return Iterables.get(getDelegate(), position);
}
/** Function that transforms {@code Iterable<E>} into a fluent iterable. */
private static class FromIterableFunction<E> implements Function<Iterable<E>, FluentIterable<E>> {
@Override
public FluentIterable<E> apply(Iterable<E> fromObject) {
return FluentIterable.from(fromObject);
}
}
}
| |
/**
* Copyright 2005-2014 Red Hat, Inc.
*
* Red Hat licenses this file to you under the Apache License, version
* 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package io.fabric8.common.util.json;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
public class JsonReader {
public static Object read(Reader reader) throws IOException {
return new JsonReader(reader).parse();
}
public static Object read(InputStream is) throws IOException {
return new JsonReader(new InputStreamReader(is)).parse();
}
//
// Implementation
//
private final Reader reader;
private final StringBuilder recorder;
private int current;
private int line = 1;
private int column = 0;
JsonReader(Reader reader) {
this.reader = reader;
recorder = new StringBuilder();
}
public Object parse() throws IOException {
read();
skipWhiteSpace();
Object result = readValue();
skipWhiteSpace();
if (!endOfText()) {
throw error("Unexpected character");
}
return result;
}
private Object readValue() throws IOException {
switch (current) {
case 'n':
return readNull();
case 't':
return readTrue();
case 'f':
return readFalse();
case '"':
return readString();
case '[':
return readArray();
case '{':
return readObject();
case '-':
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
return readNumber();
default:
throw expected("value");
}
}
private Collection<?> readArray() throws IOException {
read();
Collection<Object> array = new ArrayList<Object>();
skipWhiteSpace();
if (readChar(']')) {
return array;
}
do {
skipWhiteSpace();
array.add(readValue());
skipWhiteSpace();
} while (readChar(','));
if (!readChar(']')) {
throw expected("',' or ']'");
}
return array;
}
private Map<String, Object> readObject() throws IOException {
read();
Map<String, Object> object = new HashMap<String, Object>();
skipWhiteSpace();
if (readChar('}')) {
return object;
}
do {
skipWhiteSpace();
String name = readName();
skipWhiteSpace();
if (!readChar(':')) {
throw expected("':'");
}
skipWhiteSpace();
object.put(name, readValue());
skipWhiteSpace();
} while (readChar(','));
if (!readChar('}')) {
throw expected("',' or '}'");
}
return object;
}
private Object readNull() throws IOException {
read();
readRequiredChar('u');
readRequiredChar('l');
readRequiredChar('l');
return null;
}
private Boolean readTrue() throws IOException {
read();
readRequiredChar('r');
readRequiredChar('u');
readRequiredChar('e');
return Boolean.TRUE;
}
private Boolean readFalse() throws IOException {
read();
readRequiredChar('a');
readRequiredChar('l');
readRequiredChar('s');
readRequiredChar('e');
return Boolean.FALSE;
}
private void readRequiredChar(char ch) throws IOException {
if (!readChar(ch)) {
throw expected("'" + ch + "'");
}
}
private String readString() throws IOException {
read();
recorder.setLength(0);
while (current != '"') {
if (current == '\\') {
readEscape();
} else if (current < 0x20) {
throw expected("valid string character");
} else {
recorder.append((char) current);
read();
}
}
read();
return recorder.toString();
}
private void readEscape() throws IOException {
read();
switch (current) {
case '"':
case '/':
case '\\':
recorder.append((char) current);
break;
case 'b':
recorder.append('\b');
break;
case 'f':
recorder.append('\f');
break;
case 'n':
recorder.append('\n');
break;
case 'r':
recorder.append('\r');
break;
case 't':
recorder.append('\t');
break;
case 'u':
char[] hexChars = new char[4];
for (int i = 0; i < 4; i++) {
read();
if (!isHexDigit(current)) {
throw expected("hexadecimal digit");
}
hexChars[i] = (char) current;
}
recorder.append((char) Integer.parseInt(String.valueOf(hexChars), 16));
break;
default:
throw expected("valid escape sequence");
}
read();
}
private Number readNumber() throws IOException {
recorder.setLength(0);
readAndAppendChar('-');
int firstDigit = current;
if (!readAndAppendDigit()) {
throw expected("digit");
}
if (firstDigit != '0') {
while (readAndAppendDigit()) {
}
}
readFraction();
readExponent();
return Double.parseDouble(recorder.toString());
}
private boolean readFraction() throws IOException {
if (!readAndAppendChar('.')) {
return false;
}
if (!readAndAppendDigit()) {
throw expected("digit");
}
while (readAndAppendDigit()) {
}
return true;
}
private boolean readExponent() throws IOException {
if (!readAndAppendChar('e') && !readAndAppendChar('E')) {
return false;
}
if (!readAndAppendChar('+')) {
readAndAppendChar('-');
}
if (!readAndAppendDigit()) {
throw expected("digit");
}
while (readAndAppendDigit()) {
}
return true;
}
private String readName() throws IOException {
if (current != '"') {
throw expected("name");
}
readString();
return recorder.toString();
}
private boolean readAndAppendChar(char ch) throws IOException {
if (current != ch) {
return false;
}
recorder.append(ch);
read();
return true;
}
private boolean readChar(char ch) throws IOException {
if (current != ch) {
return false;
}
read();
return true;
}
private boolean readAndAppendDigit() throws IOException {
if (!isDigit(current)) {
return false;
}
recorder.append((char) current);
read();
return true;
}
private void skipWhiteSpace() throws IOException {
while (isWhiteSpace(current) && !endOfText()) {
read();
}
}
private void read() throws IOException {
if (endOfText()) {
throw error("Unexpected end of input");
}
column++;
if (current == '\n') {
line++;
column = 0;
}
current = reader.read();
}
private boolean endOfText() {
return current == -1;
}
private IOException expected(String expected) {
if (endOfText()) {
return error("Unexpected end of input");
}
return error("Expected " + expected);
}
private IOException error(String message) {
return new IOException(message + " at " + line + ":" + column);
}
private static boolean isWhiteSpace(int ch) {
return ch == ' ' || ch == '\t' || ch == '\n' || ch == '\r';
}
private static boolean isDigit(int ch) {
return ch >= '0' && ch <= '9';
}
private static boolean isHexDigit(int ch) {
return ch >= '0' && ch <= '9' || ch >= 'a' && ch <= 'f' || ch >= 'A' && ch <= 'F';
}
}
| |
package org.utilities.dataaccesslayer;
/*
* #%L
* data-access-layer
* $Id:$
* $HeadURL:$
* %%
* Copyright (C) 2015 ElGuille
* %%
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* 3. Neither the name of the ElGuille nor the names of its contributors
* may be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
* #L%
*/
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import javax.swing.JOptionPane;
import org.utilities.dataaccesslayer.DataSource.CassandraCluster;
import com.datastax.driver.core.Session;
public class ConnectionDataSource
{
/** RDBMS conection properties */
private String user = "";
private String pass = "";
private DataSource ds = new DataSource();
private Connection connection = null;
private CassandraSession cassandraSession = new CassandraSession();
// ------------------------------------------
public ConnectionDataSource() {}
/***
* Constructors for RDBMS
*/
public ConnectionDataSource( String user, String pass, DataSource ds )
{
this.user = user;
this.pass = pass;
this.ds = ds;
}
public ConnectionDataSource( CassandraCluster cluster, String keySpace ) {
cassandraSession = new CassandraSession( cluster, keySpace );
}
// -------------------------------------------
/***
* Connection for the next RDBMS: MySQL, MariaDB and Postgre
*/
public Connection connect()
{
try {
Class.forName( this.ds.getDBMSProtocol().getDriver() );
connection = DriverManager.getConnection
(
this.ds.getDBMSProtocol().getURL() +
this.ds.getHost() +
this.ds.getPort() +
this.ds.getDataBase() ,
this.user ,
this.pass
);
}
catch ( ClassNotFoundException e ) {
e.printStackTrace();
} catch( SQLException e) {
e.printStackTrace();
}
return connection;
}
// -------------------------------------------
/***
*
*/
public void disconnect()
{
try {
connection.close();
connection = null;
}
catch (Exception ex) {
ex.printStackTrace();
}
}
// -------------------------------------------
/***
*
*/
public void getStateConnection()
{
try {
if( !this.connection.isClosed() )
JOptionPane.showMessageDialog( null, "conectado" );
else
JOptionPane.showMessageDialog( null, "desconectado" );
}
catch ( SQLException e ) {
e.printStackTrace();
}
}
/***
* Getter para coneccion
*/
public Connection getConnection() {
return this.connection;
}
public DataSource getDataSource() {
return this.ds;
}
public String getCassandraKeySpace() {
return this.cassandraSession.getKeySpace();
}
public Session getCassandraSession() {
return cassandraSession.getSession();
}
// -------------------------------------------
/**
* Cassandra Session class for handle the access to the clusters
* @author LuisGuillermo
*
*/
public class CassandraSession
{
/** Cassandra conection properties */
private String keySpace = "";
private Session session = null;
private CassandraCluster cluster = null;
public CassandraSession() {}
public CassandraSession( CassandraCluster cluster, String keySpace )
{
this.keySpace = keySpace;
this.cluster = cluster;
session = cluster.getCluster().connect( keySpace );
}
/**
* Is Cassandra Cluster connected or clossed?
*/
public boolean isSessionClosed() {
return session.isClosed();
}
public void closeSession() {
session.close();
}
/**
* Return Cassandra Session
*/
public Session getSession() {
return this.session;
}
public void setSession( Session session ) {
this.session = session;
}
public String getKeySpace() {
return this.keySpace;
}
}
}
| |
/*
* (C) Copyright 2014 mjahnen <jahnen@in.tum.de>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.github.mjdev.libaums.driver.scsi;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Arrays;
import android.util.Log;
import com.github.mjdev.libaums.UsbCommunication;
import com.github.mjdev.libaums.driver.BlockDeviceDriver;
import com.github.mjdev.libaums.driver.scsi.commands.CommandBlockWrapper;
import com.github.mjdev.libaums.driver.scsi.commands.CommandBlockWrapper.Direction;
import com.github.mjdev.libaums.driver.scsi.commands.CommandStatusWrapper;
import com.github.mjdev.libaums.driver.scsi.commands.ScsiInquiry;
import com.github.mjdev.libaums.driver.scsi.commands.ScsiInquiryResponse;
import com.github.mjdev.libaums.driver.scsi.commands.ScsiRead10;
import com.github.mjdev.libaums.driver.scsi.commands.ScsiReadCapacity;
import com.github.mjdev.libaums.driver.scsi.commands.ScsiReadCapacityResponse;
import com.github.mjdev.libaums.driver.scsi.commands.ScsiTestUnitReady;
import com.github.mjdev.libaums.driver.scsi.commands.ScsiWrite10;
/**
* This class is responsible for handling mass storage devices which follow the
* SCSI standard. This class communicates with the mass storage device via the
* different SCSI commands.
*
* @author mjahnen
* @see com.github.mjdev.libaums.driver.scsi.commands
*/
public class ScsiBlockDevice implements BlockDeviceDriver {
private static final String TAG = ScsiBlockDevice.class.getSimpleName();
private UsbCommunication usbCommunication;
private ByteBuffer outBuffer;
private byte[] cswBuffer;
private int blockSize;
private int lastBlockAddress;
public ScsiBlockDevice(UsbCommunication usbCommunication) {
this.usbCommunication = usbCommunication;
outBuffer = ByteBuffer.allocate(31);
cswBuffer = new byte[CommandStatusWrapper.SIZE];
}
/**
* Issues a SCSI Inquiry to determine the connected device. After that it is
* checked if the unit is ready. Logs a warning if the unit is not ready.
* Finally the capacity of the mass storage device is read.
*
* @throws IOException
* If initialing fails due to an unsupported device or if
* reading fails.
* @see com.github.mjdev.libaums.driver.scsi.commands.ScsiInquiry
* @see com.github.mjdev.libaums.driver.scsi.commands.ScsiInquiryResponse
* @see com.github.mjdev.libaums.driver.scsi.commands.ScsiTestUnitReady
* @see com.github.mjdev.libaums.driver.scsi.commands.ScsiReadCapacity
* @see com.github.mjdev.libaums.driver.scsi.commands.ScsiReadCapacityResponse
*/
@Override
public void init() throws IOException {
ByteBuffer inBuffer = ByteBuffer.allocate(36);
ScsiInquiry inquiry = new ScsiInquiry();
transferCommand(inquiry, inBuffer);
// TODO support multiple luns!
ScsiInquiryResponse inquiryResponse = ScsiInquiryResponse.read(inBuffer);
Log.d(TAG, "inquiry response: " + inquiryResponse);
if (inquiryResponse.getPeripheralQualifier() != 0
|| inquiryResponse.getPeripheralDeviceType() != 0) {
throw new IOException("unsupported PeripheralQualifier or PeripheralDeviceType");
}
ScsiTestUnitReady testUnit = new ScsiTestUnitReady();
if (!transferCommand(testUnit, null)) {
Log.w(TAG, "unit not ready!");
}
ScsiReadCapacity readCapacity = new ScsiReadCapacity();
transferCommand(readCapacity, inBuffer);
ScsiReadCapacityResponse readCapacityResponse = ScsiReadCapacityResponse.read(inBuffer);
blockSize = readCapacityResponse.getBlockLength();
lastBlockAddress = readCapacityResponse.getLogicalBlockAddress();
Log.i(TAG, "Block size: " + blockSize);
Log.i(TAG, "Last block address: " + lastBlockAddress);
}
/**
* Transfers the desired command to the device. If the command has a data
* phase the parameter <code>inBuffer</code> is used to store or read data
* to resp. from it. The direction of the data phase is determined by
* {@link com.github.mjdev.libaums.driver.scsi.commands.CommandBlockWrapper #getDirection()}
* .
* <p>
* Return value is true if the status of the command status wrapper is
* successful (
* {@link com.github.mjdev.libaums.driver.scsi.commands.CommandStatusWrapper #getbCswStatus()}
* ).
*
* @param command
* The command which should be transferred.
* @param inBuffer
* The buffer used for reading or writing.
* @return True if the transaction was successful.
* @throws IOException
* If something fails.
*/
private boolean transferCommand(CommandBlockWrapper command, ByteBuffer inBuffer)
throws IOException {
byte[] outArray = outBuffer.array();
outBuffer.clear();
Arrays.fill(outArray, (byte) 0);
command.serialize(outBuffer);
int written = usbCommunication.bulkOutTransfer(outArray, outArray.length);
if (written != outArray.length) {
Log.e(TAG, "Writing all bytes on command " + command + " failed!");
}
int transferLength = command.getdCbwDataTransferLength();
int read = 0;
if (transferLength > 0) {
byte[] inArray = inBuffer.array();
if (command.getDirection() == Direction.IN) {
do {
int tmp = usbCommunication.bulkInTransfer(inArray, read + inBuffer.position(),
inBuffer.remaining() - read);
if (tmp == -1) {
throw new IOException("reading failed!");
}
read += tmp;
} while (read < transferLength);
if (read != transferLength) {
throw new IOException("Unexpected command size (" + read + ") on response to "
+ command);
}
} else {
written = 0;
do {
int tmp = usbCommunication.bulkOutTransfer(inArray,
written + inBuffer.position(), inBuffer.remaining() - written);
if (tmp == -1) {
throw new IOException("writing failed!");
}
written += tmp;
} while (written < transferLength);
if (written != transferLength) {
throw new IOException("Could not write all bytes: " + command);
}
}
}
// expecting csw now
read = usbCommunication.bulkInTransfer(cswBuffer, cswBuffer.length);
if (read != CommandStatusWrapper.SIZE) {
Log.e(TAG, "Unexpected command size while expecting csw");
}
CommandStatusWrapper csw = CommandStatusWrapper.read(ByteBuffer.wrap(cswBuffer));
if (csw.getbCswStatus() != CommandStatusWrapper.COMMAND_PASSED) {
Log.e(TAG, "Unsuccessful Csw status: " + csw.getbCswStatus());
}
if (csw.getdCswTag() != command.getdCbwTag()) {
Log.e(TAG, "wrong csw tag!");
}
return csw.getbCswStatus() == CommandStatusWrapper.COMMAND_PASSED;
}
/**
* This method reads from the device at the specific device offset. The
* devOffset specifies at which block the reading should begin. That means
* the devOffset is not in bytes!
*/
@Override
public void read(long devOffset, ByteBuffer dest) throws IOException {
long time = System.currentTimeMillis();
// TODO try to make this more efficient by for example only allocating
// blockSize and making it global
ByteBuffer buffer;
if (dest.remaining() % blockSize != 0) {
Log.i(TAG, "we have to round up size to next block sector");
int rounded = blockSize - dest.remaining() % blockSize + dest.remaining();
buffer = ByteBuffer.allocate(rounded);
buffer.limit(rounded);
} else {
buffer = dest;
}
ScsiRead10 read = new ScsiRead10((int) devOffset, buffer.remaining(), blockSize);
Log.d(TAG, "reading: " + read);
transferCommand(read, buffer);
if (dest.remaining() % blockSize != 0) {
System.arraycopy(buffer.array(), 0, dest.array(), dest.position(), dest.remaining());
}
dest.position(dest.limit());
Log.d(TAG, "read time: " + (System.currentTimeMillis() - time));
}
/**
* This method writes from the device at the specific device offset. The
* devOffset specifies at which block the writing should begin. That means
* the devOffset is not in bytes!
*/
@Override
public void write(long devOffset, ByteBuffer src) throws IOException {
long time = System.currentTimeMillis();
// TODO try to make this more efficient by for example only allocating
// blockSize and making it global
ByteBuffer buffer;
if (src.remaining() % blockSize != 0) {
Log.i(TAG, "we have to round up size to next block sector");
int rounded = blockSize - src.remaining() % blockSize + src.remaining();
buffer = ByteBuffer.allocate(rounded);
buffer.limit(rounded);
System.arraycopy(src.array(), src.position(), buffer.array(), 0, src.remaining());
} else {
buffer = src;
}
ScsiWrite10 write = new ScsiWrite10((int) devOffset, buffer.remaining(), blockSize);
Log.d(TAG, "writing: " + write);
transferCommand(write, buffer);
src.position(src.limit());
Log.d(TAG, "write time: " + (System.currentTimeMillis() - time));
}
@Override
public int getBlockSize() {
return blockSize;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.plan.metadata;
import org.apache.flink.table.plan.stats.ValueInterval;
import org.apache.flink.table.plan.trait.FlinkRelDistribution;
import org.apache.flink.table.plan.trait.RelModifiedMonotonicity;
import org.apache.calcite.linq4j.tree.Types;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.metadata.Metadata;
import org.apache.calcite.rel.metadata.MetadataDef;
import org.apache.calcite.rel.metadata.MetadataHandler;
import org.apache.calcite.rel.metadata.RelMetadataQuery;
import org.apache.calcite.util.ImmutableBitSet;
import java.lang.reflect.Method;
/**
* Contains the interfaces for several specified metadata of flink.
*/
public abstract class FlinkMetadata {
/**
* Metadata about the interval of given column from a specified relational expression.
*/
public interface ColumnInterval extends Metadata {
Method METHOD = Types.lookupMethod(ColumnInterval.class, "getColumnInterval", int.class);
MetadataDef<ColumnInterval> DEF = MetadataDef.of(
ColumnInterval.class,
ColumnInterval.Handler.class,
METHOD);
/**
* Returns the interval value of the given column from a specified relational expression.
*
* @param index the index of the given column in a specified relational expression
* @return the interval of the given column.
* Returns null if interval cannot be estimated,
* Returns [[NullValueInterval]] if column values does not contains any value
* except for null.
*/
ValueInterval getColumnInterval(int index);
/**
* Handler API.
*/
interface Handler extends MetadataHandler<ColumnInterval> {
ValueInterval getColumnInterval(RelNode r, RelMetadataQuery mq, int index);
}
}
/**
* Metadata about the interval of given column under the given filter argument
* from a specified relational expression.
*/
public interface FilteredColumnInterval extends Metadata {
Method METHOD = Types.lookupMethod(FilteredColumnInterval.class, "getFilteredColumnInterval", int.class, int.class);
MetadataDef<FilteredColumnInterval> DEF = MetadataDef.of(
FilteredColumnInterval.class,
FilteredColumnInterval.Handler.class,
METHOD);
/**
* Returns the interval value of the given column under the given filter argument
* from a specified relational expression.
*
* @param columnIndex the index of the given column in a specified relational expression
* @param filterArg the index of the filter argument, -1 when no filter argument existed
* @return the interval of the given column.
* Returns null if interval cannot be estimated,
* Returns [[NullValueInterval]] if column values does not contains any value
* except for null.
*/
ValueInterval getFilteredColumnInterval(int columnIndex, int filterArg);
/**
* Handler API.
*/
interface Handler extends MetadataHandler<FilteredColumnInterval> {
ValueInterval getFilteredColumnInterval(RelNode r, RelMetadataQuery mq, int columnIndex, int filterArg);
}
}
/**
* Metadata about the null count of given column from a specified relational expression.
*/
public interface ColumnNullCount extends Metadata {
Method METHOD = Types.lookupMethod(ColumnNullCount.class, "getColumnNullCount", int.class);
MetadataDef<ColumnNullCount> DEF = MetadataDef.of(
ColumnNullCount.class,
ColumnNullCount.Handler.class,
METHOD);
/**
* Returns the null count of the given column from a specified relational expression.
*
* @param index the index of the given column in a specified relational expression
* @return the null count of the given column if can be estimated, else return null.
*/
Double getColumnNullCount(int index);
/**
* Handler API.
*/
interface Handler extends MetadataHandler<ColumnNullCount> {
Double getColumnNullCount(RelNode r, RelMetadataQuery mq, int index);
}
}
/**
* Origin null count, looking until source.
*/
public interface ColumnOriginNullCount extends Metadata {
Method METHOD = Types.lookupMethod(ColumnOriginNullCount.class, "getColumnOriginNullCount", int.class);
MetadataDef<ColumnOriginNullCount> DEF = MetadataDef.of(
ColumnOriginNullCount.class,
ColumnOriginNullCount.Handler.class,
METHOD);
/**
* Returns origin null count of the given column from a specified relational expression.
*
* @param index the index of the given column in a specified relational expression
* @return origin null count of the given column if can be estimated, else return null.
*/
Double getColumnOriginNullCount(int index);
/**
* Handler API.
*/
interface Handler extends MetadataHandler<ColumnOriginNullCount> {
Double getColumnOriginNullCount(RelNode r, RelMetadataQuery mq, int index);
}
}
/**
* Metadata about the (minimum) unique groups of the given columns from a specified relational expression.
*/
public interface UniqueGroups extends Metadata {
Method METHOD = Types.lookupMethod(UniqueGroups.class, "getUniqueGroups", ImmutableBitSet.class);
MetadataDef<UniqueGroups> DEF = MetadataDef.of(
UniqueGroups.class,
UniqueGroups.Handler.class,
METHOD);
/**
* Returns the (minimum) unique groups of the given columns from a specified relational expression.
*
* @param columns the given columns in a specified relational expression.
* The given columns should not be null.
* @return the (minimum) unique columns which should be a sub-collection of the given columns,
* and should not be null or empty. If none unique columns can be found, return the given columns.
*/
ImmutableBitSet getUniqueGroups(ImmutableBitSet columns);
/**
* Handler API.
*/
interface Handler extends MetadataHandler<UniqueGroups> {
ImmutableBitSet getUniqueGroups(RelNode r, RelMetadataQuery mq, ImmutableBitSet columns);
}
}
/**
* Metadata about how a relational expression is distributed.
*
* <p>If you are an operator consuming a relational expression, which subset
* of the rows are you seeing? You might be seeing all of them (BROADCAST
* or SINGLETON), only those whose key column values have a particular hash
* code (HASH) or only those whose column values have particular values or
* ranges of values (RANGE).
*
* <p>When a relational expression is partitioned, it is often partitioned
* among nodes, but it may be partitioned among threads running on the same
* node.
*/
public interface FlinkDistribution extends Metadata {
Method METHOD = Types.lookupMethod(FlinkDistribution.class, "flinkDistribution");
MetadataDef<FlinkDistribution> DEF = MetadataDef.of(
FlinkDistribution.class,
FlinkDistribution.Handler.class,
METHOD);
/** Determines how the rows are distributed. */
FlinkRelDistribution flinkDistribution();
/** Handler API. */
interface Handler extends MetadataHandler<FlinkDistribution> {
FlinkRelDistribution flinkDistribution(RelNode r, RelMetadataQuery mq);
}
}
/**
* Metadata about the modified property of a RelNode. For example, an aggregate RelNode
* contains a max aggregate function whose result value maybe modified increasing.
*/
public interface ModifiedMonotonicity extends Metadata {
Method METHOD = Types.lookupMethod(ModifiedMonotonicity.class, "getRelModifiedMonotonicity");
MetadataDef<ModifiedMonotonicity> DEF = MetadataDef.of(
ModifiedMonotonicity.class,
ModifiedMonotonicity.Handler.class,
METHOD);
RelModifiedMonotonicity getRelModifiedMonotonicity();
/** Handler API. */
interface Handler extends MetadataHandler<ModifiedMonotonicity> {
RelModifiedMonotonicity getRelModifiedMonotonicity(RelNode r, RelMetadataQuery mq);
}
}
}
| |
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.bandwidthtest;
import android.net.NetworkInfo.State;
import android.util.Log;
import java.util.List;
import java.util.ArrayList;
/**
* Data structure to keep track of the network state transitions.
*/
public class NetworkState {
/**
* Desired direction of state transition.
*/
public enum StateTransitionDirection {
TO_DISCONNECTION, TO_CONNECTION, DO_NOTHING
}
private final String LOG_TAG = "NetworkState";
private List<State> mStateDepository;
private State mTransitionTarget;
private StateTransitionDirection mTransitionDirection;
private String mReason = null; // record mReason of state transition failure
public NetworkState() {
mStateDepository = new ArrayList<State>();
mTransitionDirection = StateTransitionDirection.DO_NOTHING;
mTransitionTarget = State.UNKNOWN;
}
public NetworkState(State currentState) {
mStateDepository = new ArrayList<State>();
mStateDepository.add(currentState);
mTransitionDirection = StateTransitionDirection.DO_NOTHING;
mTransitionTarget = State.UNKNOWN;
}
/**
* Reinitialize the network state
*/
public void resetNetworkState() {
mStateDepository.clear();
mTransitionDirection = StateTransitionDirection.DO_NOTHING;
mTransitionTarget = State.UNKNOWN;
}
/**
* Set the transition criteria
* @param initState initial {@link State}
* @param transitionDir explicit {@link StateTransitionDirection}
* @param targetState desired {@link State}
*/
public void setStateTransitionCriteria(State initState, StateTransitionDirection transitionDir,
State targetState) {
if (!mStateDepository.isEmpty()) {
mStateDepository.clear();
}
mStateDepository.add(initState);
mTransitionDirection = transitionDir;
mTransitionTarget = targetState;
Log.v(LOG_TAG, "setStateTransitionCriteria: " + printStates());
}
/**
* Record the current state of the network
* @param currentState the current {@link State}
*/
public void recordState(State currentState) {
mStateDepository.add(currentState);
}
/**
* Verify the state transition
* @return true if the requested transition completed successfully.
*/
public boolean validateStateTransition() {
Log.v(LOG_TAG, String.format("Print state depository: %s", printStates()));
switch (mTransitionDirection) {
case DO_NOTHING:
Log.v(LOG_TAG, "No direction requested, verifying network states");
return validateNetworkStates();
case TO_CONNECTION:
Log.v(LOG_TAG, "Transition to CONNECTED");
return validateNetworkConnection();
case TO_DISCONNECTION:
Log.v(LOG_TAG, "Transition to DISCONNECTED");
return validateNetworkDisconnection();
default:
Log.e(LOG_TAG, "Invalid transition direction.");
return false;
}
}
/**
* Verify that network states are valid
* @return false if any of the states are invalid
*/
private boolean validateNetworkStates() {
if (mStateDepository.isEmpty()) {
Log.v(LOG_TAG, "no state is recorded");
mReason = "no state is recorded.";
return false;
} else if (mStateDepository.size() > 1) {
Log.v(LOG_TAG, "no broadcast is expected, instead broadcast is probably received");
mReason = "no broadcast is expected, instead broadcast is probably received";
return false;
} else if (mStateDepository.get(0) != mTransitionTarget) {
Log.v(LOG_TAG, String.format("%s is expected, but it is %s",
mTransitionTarget.toString(),
mStateDepository.get(0).toString()));
mReason = String.format("%s is expected, but it is %s",
mTransitionTarget.toString(),
mStateDepository.get(0).toString());
return false;
}
return true;
}
/**
* Verify the network state to disconnection
* @return false if any of the state transitions were not valid
*/
private boolean validateNetworkDisconnection() {
// Transition from CONNECTED -> DISCONNECTED: CONNECTED->DISCONNECTING->DISCONNECTED
StringBuffer str = new StringBuffer ("States: ");
str.append(printStates());
if (mStateDepository.get(0) != State.CONNECTED) {
str.append(String.format(" Initial state should be CONNECTED, but it is %s.",
mStateDepository.get(0)));
mReason = str.toString();
return false;
}
State lastState = mStateDepository.get(mStateDepository.size() - 1);
if ( lastState != mTransitionTarget) {
str.append(String.format(" Last state should be DISCONNECTED, but it is %s",
lastState));
mReason = str.toString();
return false;
}
for (int i = 1; i < mStateDepository.size() - 1; i++) {
State preState = mStateDepository.get(i-1);
State curState = mStateDepository.get(i);
if ((preState == State.CONNECTED) && ((curState == State.DISCONNECTING) ||
(curState == State.DISCONNECTED))) {
continue;
} else if ((preState == State.DISCONNECTING) && (curState == State.DISCONNECTED)) {
continue;
} else if ((preState == State.DISCONNECTED) && (curState == State.DISCONNECTED)) {
continue;
} else {
str.append(String.format(" Transition state from %s to %s is not valid",
preState.toString(), curState.toString()));
mReason = str.toString();
return false;
}
}
mReason = str.toString();
return true;
}
/**
* Verify the network state to connection
* @return false if any of the state transitions were not valid
*/
private boolean validateNetworkConnection() {
StringBuffer str = new StringBuffer("States ");
str.append(printStates());
if (mStateDepository.get(0) != State.DISCONNECTED) {
str.append(String.format(" Initial state should be DISCONNECTED, but it is %s.",
mStateDepository.get(0)));
mReason = str.toString();
return false;
}
State lastState = mStateDepository.get(mStateDepository.size() - 1);
if ( lastState != mTransitionTarget) {
str.append(String.format(" Last state should be %s, but it is %s", mTransitionTarget,
lastState));
mReason = str.toString();
return false;
}
for (int i = 1; i < mStateDepository.size(); i++) {
State preState = mStateDepository.get(i-1);
State curState = mStateDepository.get(i);
if ((preState == State.DISCONNECTED) && ((curState == State.CONNECTING) ||
(curState == State.CONNECTED) || (curState == State.DISCONNECTED))) {
continue;
} else if ((preState == State.CONNECTING) && (curState == State.CONNECTED)) {
continue;
} else if ((preState == State.CONNECTED) && (curState == State.CONNECTED)) {
continue;
} else {
str.append(String.format(" Transition state from %s to %s is not valid.",
preState.toString(), curState.toString()));
mReason = str.toString();
return false;
}
}
mReason = str.toString();
return true;
}
/**
* Fetch the different network state transitions
* @return {@link List} of {@link State}
*/
public List<State> getTransitionStates() {
return mStateDepository;
}
/**
* Fetch the reason for network state transition failure
* @return the {@link String} for the failure
*/
public String getFailureReason() {
return mReason;
}
/**
* Print the network state
* @return {@link String} representation of the network state
*/
public String printStates() {
StringBuilder stateBuilder = new StringBuilder();
for (int i = 0; i < mStateDepository.size(); i++) {
stateBuilder.append(" ").append(mStateDepository.get(i).toString()).append("->");
}
return stateBuilder.toString();
}
/**
* {@inheritDoc}
*/
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("mTransitionDirection: ").append(mTransitionDirection.toString()).
append("; ").append("states:").
append(printStates()).append("; ");
return builder.toString();
}
}
| |
package org.radargun.jmx;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import javax.management.MBeanServerConnection;
import javax.management.Notification;
import javax.management.NotificationFilterSupport;
import javax.management.NotificationListener;
import javax.management.remote.JMXConnectionNotification;
import javax.management.remote.JMXConnector;
import javax.management.remote.JMXConnectorFactory;
import javax.management.remote.JMXServiceURL;
import org.radargun.logging.Log;
import org.radargun.logging.LogFactory;
/**
*
* Periodically polls for values exposed via JMX on multiple nodes.
*
* @author Michal Linhard <mlinhard@redhat.com>
*
*/
public abstract class JMXPoller implements NotificationListener {
public static final String DEFAULT_SERVICE_URL_TEMPLATE = "service:jmx:rmi:///jndi/rmi://%s:%d/jmxrmi";
private static Log log = LogFactory.getLog(JMXPoller.class);
private List<InetSocketAddress> jmxEndpoints;
private long queryTimeout;
private ConcurrentHashMap<InetSocketAddress, JMXConnector> connectors;
private String serviceUrlTemplate;
public static class Result {
public Exception connectError;
public Exception pollError;
public Object value;
public Result(Exception connectError, Exception pollError, Object value) {
this.connectError = connectError;
this.pollError = pollError;
this.value = value;
}
}
/**
*
* Create a new JMXPoller.
*
* @param jmxEndpoints
* jmx endpoints in form host:port
* @param logicalNodeNames
* logical node names, list corresponding to jmx endpoints with size and positions. may
* be null, in that case logical names won't be used.
* @param queryTimeout
* @param serviceUrlTemplate
*/
protected JMXPoller(List<InetSocketAddress> jmxEndpoints, long queryTimeout, String serviceUrlTemplate) {
this.jmxEndpoints = jmxEndpoints;
this.queryTimeout = queryTimeout;
this.connectors = new ConcurrentHashMap<InetSocketAddress, JMXConnector>(jmxEndpoints.size());
this.serviceUrlTemplate = serviceUrlTemplate;
}
protected JMXPoller(List<InetSocketAddress> jmxEndpoints, long queryTimeout) {
this(jmxEndpoints, queryTimeout, DEFAULT_SERVICE_URL_TEMPLATE);
}
/**
* Override to poll for certain JMX attributes.
*
* @param connection
* Connection to a JMX endpoint.
* @param nodeName
* Logical node name, if logical node names are not used, then endpoint string
* host:port;
* @return Custom response object.
* @throws Exception
*/
protected abstract Object pollNode(MBeanServerConnection connection, String nodeName, int nodeIdx) throws Exception;
protected String endpointToString(InetSocketAddress endpoint) {
return endpoint.getHostName() + ":" + endpoint.getPort();
}
private void discardConnector(InetSocketAddress endpoint, JMXConnector connector) {
connectors.remove(endpoint);
if (connector != null) {
try {
connector.close();
} catch (IOException e1) {
log.trace("Error while closing connector", e1);
}
}
}
public synchronized List<Result> poll() {
final Result[] results = new Result[jmxEndpoints.size()];
Thread[] tryPoll = new Thread[jmxEndpoints.size()];
for (int i = 0; i < jmxEndpoints.size(); i++) {
final InetSocketAddress endpoint = jmxEndpoints.get(i);
final String node = endpointToString(endpoint);
final int nodeIdx = i;
tryPoll[i] = new Thread("tryPoll-" + node) {
@Override
public void run() {
JMXConnector connector = null;
MBeanServerConnection connection = null;
try {
connector = connect(endpoint);
connection = connector.getMBeanServerConnection();
} catch (Exception e) {
log.trace("Discarding connector to endpoint " + endpoint + " because of an exception.", e);
discardConnector(endpoint, connector);
results[nodeIdx] = new Result(e, null, null);
return;
}
try {
results[nodeIdx] = new Result(null, null, pollNode(connection, node, nodeIdx));
} catch (Exception e) {
discardConnector(endpoint, connector);
results[nodeIdx] = new Result(null, e, null);
}
}
};
tryPoll[i].start();
}
long waitEnd = System.currentTimeMillis() + queryTimeout;
boolean broken = false;
for (int i = 0; i < tryPoll.length; i++) {
try {
long maxJoinWait = waitEnd - System.currentTimeMillis();
if (maxJoinWait <= 0) {
broken = true;
break;
}
tryPoll[i].join(maxJoinWait);
} catch (InterruptedException e) {
if (results[i] == null) {
results[i] = new Result(null, e, null);
}
}
}
if (broken) {
for (int i = 0; i < tryPoll.length; i++) {
tryPoll[i].interrupt();
}
}
// return current snapshot of the map, the results map may get modified
// by an unfinished thread
Result[] a = Arrays.copyOf(results, results.length);
for (int i = 0; i < a.length; i++) {
if (a[i] == null) {
a[i] = new Result(null, null, null);
}
}
return Arrays.asList(a);
}
public synchronized void closeConnections() {
final Set<JMXConnector> connectors1 = new HashSet<JMXConnector>(connectors.values());
connectors.clear();
new Thread(new Runnable() {
@Override
public void run() {
for (JMXConnector ctor : connectors1) {
try {
ctor.close();
} catch (Exception e) {
log.trace("Error while closing JMXConnector", e);
}
}
}
}, "JMXPoller.closeConnections").start();
}
private JMXConnector connect(final InetSocketAddress endpoint) throws Exception {
JMXConnector cachedConnector = connectors.get(endpoint);
if (cachedConnector != null) {
return cachedConnector;
}
JMXServiceURL serviceURL = new JMXServiceURL(String.format(this.serviceUrlTemplate, endpoint.getHostName(),
endpoint.getPort()));
JMXConnector newConnector = JMXConnectorFactory.newJMXConnector(serviceURL, null);
try {
newConnector.connect();
} catch (Exception e) {
newConnector.close();
throw e;
}
if (log.isTraceEnabled()) {
log.trace("created new connector " + newConnector + " to " + endpoint);
}
JMXConnector oldConnector = connectors.putIfAbsent(endpoint, newConnector);
if (oldConnector != null) {
newConnector.close();
cachedConnector = oldConnector;
} else {
NotificationFilterSupport closedFilter = new NotificationFilterSupport();
closedFilter.enableType(JMXConnectionNotification.CLOSED);
newConnector.addConnectionNotificationListener(this, closedFilter, endpoint);
cachedConnector = newConnector;
}
return cachedConnector;
}
public List<InetSocketAddress> getEndpoints() {
return jmxEndpoints;
}
@Override
public void handleNotification(Notification notification, Object node) {
if (log.isTraceEnabled()) {
log.trace("Notification received: " + notification + " handback: " + node);
}
connectors.remove(node);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.isis.core.runtime.fixturedomainservice;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
import static org.junit.Assume.assumeThat;
import java.io.StringReader;
import java.io.StringWriter;
import java.text.SimpleDateFormat;
import java.util.HashSet;
import java.util.Locale;
import java.util.Set;
import java.util.TimeZone;
import junit.framework.Assert;
import com.google.common.collect.Sets;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.apache.isis.core.commons.config.IsisConfiguration;
import org.apache.isis.core.commons.config.IsisConfigurationDefault;
import org.apache.isis.core.commons.matchers.IsisMatchers;
import org.apache.isis.core.integtestsupport.IsisSystemWithFixtures;
import org.apache.isis.core.tck.dom.refs.ParentEntity;
import org.apache.isis.core.tck.dom.refs.ReferencingEntity;
import org.apache.isis.core.tck.dom.refs.SimpleEntity;
public class ObjectFixtureFilePersistorTest {
private static final String DATEFORMAT_PATTERN = "dd-MMM-yyyy HH:mm z";
private static final SimpleDateFormat dateFormat = new SimpleDateFormat(DATEFORMAT_PATTERN, Locale.US);
@Rule
public IsisSystemWithFixtures iswf = IsisSystemWithFixtures.builder()
.with(configuration()).build();
private static IsisConfiguration configuration() {
final IsisConfigurationDefault config = new IsisConfigurationDefault();
config.add("isis.value.format.datetime", DATEFORMAT_PATTERN);
return config;
}
private ObjectFixtureFilePersistor persistor;
@Before
public void setup() throws Exception {
org.apache.log4j.Logger.getRootLogger().setLevel(org.apache.log4j.Level.OFF);
Locale.setDefault(Locale.UK);
persistor = new ObjectFixtureFilePersistor();
iswf.fixtures.smpl1.setName("Fred Smith");
iswf.fixtures.smpl1.setDate(dateFormat.parse("08-Mar-2010 01:00 UTC"));
iswf.fixtures.smpl2.setName("Joe Bloggs");
iswf.fixtures.smpl2.setDate(dateFormat.parse("09-Apr-2011 02:10 UTC"));
assumeThat(TimeZone.getDefault().getDisplayName(), is("Greenwich Mean Time"));
}
@Test
public void loadInstance() throws Exception {
final StringReader reader = new StringReader(SimpleEntity.class.getName() + "#1\n name: Fred Smith\n date: 08-Mar-2010 01:00 UTC");
final Set<Object> objects = persistor.loadData(reader);
assertEquals(1, objects.size());
final Object object = objects.toArray()[0];
assertThat(object instanceof SimpleEntity, is(true));
final SimpleEntity epv = (SimpleEntity) object;
assertEquals("Fred Smith", epv.getName());
assertEquals(dateFormat.parse("08-Mar-2010 01:00 GMT"), epv.getDate());
}
@Test
public void invalidFieldLine() throws Exception {
try {
final StringReader reader = new StringReader(SimpleEntity.class.getName() + "#1\n name Fred Smith");
persistor.loadData(reader);
Assert.fail();
} catch (final FixtureException e) {
Assert.assertEquals("failed to load data at line 2", e.getMessage());
Assert.assertEquals("no colon (:) in: name Fred Smith", e.getCause().getMessage());
}
}
@Test
public void oldFieldNameSkipped() throws Exception {
final StringReader reader = new StringReader(SimpleEntity.class.getName() + "#1\n xname: Fred Smith");
final Set<Object> objects = persistor.loadData(reader);
final Object object = objects.toArray()[0];
Assert.assertNull(((SimpleEntity) object).getName());
}
@Test
public void saveNoObjects() throws Exception {
// Person person = new Person();
final Set<Object> objects = new HashSet<Object>();
final StringWriter out = new StringWriter();
persistor.save(objects, out);
Assert.assertEquals("", out.toString());
}
@Test
public void saveOneObject() throws Exception {
final Set<Object> objects = Sets.newLinkedHashSet();
objects.add(iswf.fixtures.smpl1);
final StringWriter out = new StringWriter();
persistor.save(objects, out);
final String actual = canonicalize(out);
final String expected = SimpleEntity.class.getName() + "#2\n date: 08-Mar-2010 01:00 UTC\n name: Fred Smith\n";
assertThat(actual, IsisMatchers.startsWith(expected));
}
@Test
public void saveTwoObjects() throws Exception {
final Set<Object> objects = Sets.newLinkedHashSet();
objects.add(iswf.fixtures.smpl1);
objects.add(iswf.fixtures.smpl3);
final StringWriter out = new StringWriter();
persistor.save(objects, out);
final String actual = canonicalize(out);
final String expected1 = SimpleEntity.class.getName() + "#2\n date: 08-Mar-2010 01:00 UTC\n name: Fred Smith\n";
final String expected2 = SimpleEntity.class.getName() + "#3\n date: \n name: 3\n";
assertThat(actual, IsisMatchers.contains(expected1));
assertThat(actual, IsisMatchers.contains(expected2));
}
private String canonicalize(final String out) {
return out.replaceAll("\r\n", "\n");
}
private String canonicalize(final StringWriter out) {
return canonicalize(out.toString());
}
@Test
public void saveReferencedObject() throws Exception {
final Set<Object> objects = Sets.newLinkedHashSet();
iswf.fixtures.rfcg1.setReference(iswf.fixtures.smpl1);
objects.add(iswf.fixtures.rfcg1);
objects.add(iswf.fixtures.smpl1);
final StringWriter out = new StringWriter();
persistor.save(objects, out);
final String actual = canonicalize(out);
final String expected1 = ReferencingEntity.class.getName() + "#2\n aggregatedEntities: \n aggregatedReference: \n reference: " + SimpleEntity.class.getName() + "#3";
final String expected2 = SimpleEntity.class.getName() + "#3\n date: 08-Mar-2010 01:00 UTC\n name: Fred Smith\n";
assertThat(actual, IsisMatchers.contains(expected1));
assertThat(actual, IsisMatchers.contains(expected2));
}
@Test
public void saveObjectAndAssociatedCollection() throws Exception {
final Set<Object> objects = Sets.newLinkedHashSet();
iswf.fixtures.prnt1.getHomogeneousCollection().add(iswf.fixtures.smpl1);
iswf.fixtures.prnt1.getHomogeneousCollection().add(iswf.fixtures.smpl2);
objects.add(iswf.fixtures.prnt1);
objects.add(iswf.fixtures.smpl1);
objects.add(iswf.fixtures.smpl2);
final StringWriter out = new StringWriter();
persistor.save(objects, out);
final String actual = canonicalize(out);
final String expected1a = ParentEntity.class.getName() + "#2\n";
final String expected1b = "heterogeneousCollection: \n homogeneousCollection: " + SimpleEntity.class.getName() + "#3 " + SimpleEntity.class.getName() + "#4 " + "\n";
final String expected2 = SimpleEntity.class.getName() + "#3\n date: 08-Mar-2010 01:00 UTC\n name: Fred Smith\n";
final String expected3 = SimpleEntity.class.getName() + "#4\n date: 09-Apr-2011 02:10 UTC\n name: Joe Bloggs\n";
assertThat(actual.replaceAll("\n", "###"), IsisMatchers.contains(expected1a.replaceAll("\n", "###")));
assertThat(actual.replaceAll("\n", "###"), IsisMatchers.contains(expected1b.replaceAll("\n", "###")));
assertThat(actual, IsisMatchers.contains(expected2));
assertThat(actual, IsisMatchers.contains(expected3));
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.diff.tools.util.side;
import com.intellij.diff.DiffContext;
import com.intellij.diff.actions.impl.OpenInEditorWithMouseAction;
import com.intellij.diff.actions.impl.SetEditorSettingsAction;
import com.intellij.diff.contents.DocumentContent;
import com.intellij.diff.requests.ContentDiffRequest;
import com.intellij.diff.requests.DiffRequest;
import com.intellij.diff.requests.SimpleDiffRequest;
import com.intellij.diff.tools.holders.EditorHolderFactory;
import com.intellij.diff.tools.holders.TextEditorHolder;
import com.intellij.diff.tools.util.DiffDataKeys;
import com.intellij.diff.tools.util.SyncScrollSupport;
import com.intellij.diff.tools.util.SyncScrollSupport.ThreesideSyncScrollSupport;
import com.intellij.diff.tools.util.base.InitialScrollPositionSupport;
import com.intellij.diff.tools.util.base.TextDiffSettingsHolder;
import com.intellij.diff.tools.util.base.TextDiffViewerUtil;
import com.intellij.diff.util.*;
import com.intellij.openapi.actionSystem.AnAction;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.LogicalPosition;
import com.intellij.openapi.editor.event.DocumentEvent;
import com.intellij.openapi.editor.event.VisibleAreaEvent;
import com.intellij.openapi.editor.event.VisibleAreaListener;
import com.intellij.openapi.editor.ex.EditorEx;
import com.intellij.openapi.editor.ex.EditorMarkupModel;
import com.intellij.openapi.fileEditor.OpenFileDescriptor;
import com.intellij.openapi.util.Pair;
import com.intellij.util.containers.ContainerUtil;
import org.jetbrains.annotations.CalledInAwt;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.util.List;
public abstract class ThreesideTextDiffViewer extends ThreesideDiffViewer<TextEditorHolder> {
public static final Logger LOG = Logger.getInstance(ThreesideTextDiffViewer.class);
@Nullable private List<? extends EditorEx> myEditors;
@NotNull private final List<? extends EditorEx> myEditableEditors;
@NotNull private final MyVisibleAreaListener myVisibleAreaListener1 = new MyVisibleAreaListener(Side.LEFT);
@NotNull private final MyVisibleAreaListener myVisibleAreaListener2 = new MyVisibleAreaListener(Side.RIGHT);
@Nullable protected ThreesideSyncScrollSupport mySyncScrollSupport;
@NotNull protected final SetEditorSettingsAction myEditorSettingsAction;
public ThreesideTextDiffViewer(@NotNull DiffContext context, @NotNull ContentDiffRequest request) {
super(context, request, TextEditorHolder.TextEditorHolderFactory.INSTANCE);
//new MyFocusOppositePaneAction().setupAction(myPanel, this); // TODO
myEditorSettingsAction = new SetEditorSettingsAction(getTextSettings(), getEditors());
myEditorSettingsAction.applyDefaults();
new MyOpenInEditorWithMouseAction().install(getEditors());
myEditableEditors = TextDiffViewerUtil.getEditableEditors(getEditors());
TextDiffViewerUtil.checkDifferentDocuments(myRequest);
}
@Override
@CalledInAwt
protected void onInit() {
super.onInit();
installEditorListeners();
}
@Override
@CalledInAwt
protected void onDispose() {
destroyEditorListeners();
super.onDispose();
}
@NotNull
@Override
protected List<TextEditorHolder> createEditorHolders(@NotNull EditorHolderFactory<TextEditorHolder> factory) {
List<TextEditorHolder> holders = super.createEditorHolders(factory);
boolean[] forceReadOnly = TextDiffViewerUtil.checkForceReadOnly(myContext, myRequest);
for (int i = 0; i < 3; i++) {
if (forceReadOnly[i]) holders.get(i).getEditor().setViewer(true);
}
ThreeSide.LEFT.select(holders).getEditor().setVerticalScrollbarOrientation(EditorEx.VERTICAL_SCROLLBAR_LEFT);
((EditorMarkupModel)ThreeSide.BASE.select(holders).getEditor().getMarkupModel()).setErrorStripeVisible(false);
for (TextEditorHolder holder : holders) {
DiffUtil.disableBlitting(holder.getEditor());
}
return holders;
}
@NotNull
@Override
protected List<JComponent> createTitles() {
return DiffUtil.createSyncHeightComponents(DiffUtil.createTextTitles(myRequest, getEditors()));
}
//
// Listeners
//
@CalledInAwt
protected void installEditorListeners() {
new TextDiffViewerUtil.EditorActionsPopup(createEditorPopupActions()).install(getEditors());
new TextDiffViewerUtil.EditorFontSizeSynchronizer(getEditors()).install(this);
getEditor(ThreeSide.LEFT).getScrollingModel().addVisibleAreaListener(myVisibleAreaListener1);
getEditor(ThreeSide.BASE).getScrollingModel().addVisibleAreaListener(myVisibleAreaListener1);
getEditor(ThreeSide.BASE).getScrollingModel().addVisibleAreaListener(myVisibleAreaListener2);
getEditor(ThreeSide.RIGHT).getScrollingModel().addVisibleAreaListener(myVisibleAreaListener2);
SyncScrollSupport.SyncScrollable scrollable1 = getSyncScrollable(Side.LEFT);
SyncScrollSupport.SyncScrollable scrollable2 = getSyncScrollable(Side.RIGHT);
if (scrollable1 != null && scrollable2 != null) {
mySyncScrollSupport = new ThreesideSyncScrollSupport(getEditors(), scrollable1, scrollable2);
myEditorSettingsAction.setSyncScrollSupport(mySyncScrollSupport);
}
}
@CalledInAwt
public void destroyEditorListeners() {
getEditor(ThreeSide.LEFT).getScrollingModel().removeVisibleAreaListener(myVisibleAreaListener1);
getEditor(ThreeSide.BASE).getScrollingModel().removeVisibleAreaListener(myVisibleAreaListener1);
getEditor(ThreeSide.BASE).getScrollingModel().removeVisibleAreaListener(myVisibleAreaListener2);
getEditor(ThreeSide.RIGHT).getScrollingModel().removeVisibleAreaListener(myVisibleAreaListener2);
mySyncScrollSupport = null;
}
protected void disableSyncScrollSupport(boolean disable) {
if (mySyncScrollSupport != null) {
if (disable) {
mySyncScrollSupport.enterDisableScrollSection();
}
else {
mySyncScrollSupport.exitDisableScrollSection();
}
}
}
//
// Diff
//
@NotNull
public TextDiffSettingsHolder.TextDiffSettings getTextSettings() {
return TextDiffViewerUtil.getTextSettings(myContext);
}
@NotNull
protected List<AnAction> createEditorPopupActions() {
return TextDiffViewerUtil.createEditorPopupActions();
}
@Override
protected void onDocumentChange(@NotNull DocumentEvent event) {
super.onDocumentChange(event);
myContentPanel.repaintDividers();
}
//
// Getters
//
@NotNull
public EditorEx getCurrentEditor() {
return getEditor(getCurrentSide());
}
@NotNull
public DocumentContent getCurrentContent() {
return getContent(getCurrentSide());
}
@NotNull
protected List<? extends DocumentContent> getContents() {
//noinspection unchecked
return (List)myRequest.getContents();
}
@NotNull
public List<? extends EditorEx> getEditors() {
if (myEditors == null) {
myEditors = ContainerUtil.map(getEditorHolders(), holder -> holder.getEditor());
}
return myEditors;
}
@NotNull
protected List<? extends EditorEx> getEditableEditors() {
return myEditableEditors;
}
@NotNull
public EditorEx getEditor(@NotNull ThreeSide side) {
return side.select(getEditors());
}
@NotNull
public DocumentContent getContent(@NotNull ThreeSide side) {
return side.select(getContents());
}
@Nullable
public ThreeSide getEditorSide(@Nullable Editor editor) {
if (getEditor(ThreeSide.BASE) == editor) return ThreeSide.BASE;
if (getEditor(ThreeSide.RIGHT) == editor) return ThreeSide.RIGHT;
if (getEditor(ThreeSide.LEFT) == editor) return ThreeSide.LEFT;
return null;
}
//
// Abstract
//
@CalledInAwt
protected void scrollToLine(@NotNull ThreeSide side, int line) {
DiffUtil.scrollEditor(getEditor(side), line, false);
setCurrentSide(side);
}
@Nullable
protected abstract SyncScrollSupport.SyncScrollable getSyncScrollable(@NotNull Side side);
@CalledInAwt
@NotNull
protected LogicalPosition transferPosition(@NotNull ThreeSide baseSide,
@NotNull ThreeSide targetSide,
@NotNull LogicalPosition position) {
if (mySyncScrollSupport == null) return position;
if (baseSide == targetSide) return position;
SyncScrollSupport.SyncScrollable scrollable12 = mySyncScrollSupport.getScrollable12();
SyncScrollSupport.SyncScrollable scrollable23 = mySyncScrollSupport.getScrollable23();
int baseLine; // line number in BASE
if (baseSide == ThreeSide.LEFT) {
baseLine = scrollable12.transfer(Side.LEFT, position.line);
}
else if (baseSide == ThreeSide.RIGHT) {
baseLine = scrollable23.transfer(Side.RIGHT, position.line);
}
else {
baseLine = position.line;
}
int targetLine;
if (targetSide == ThreeSide.LEFT) {
targetLine = scrollable12.transfer(Side.RIGHT, baseLine);
}
else if (targetSide == ThreeSide.RIGHT) {
targetLine = scrollable23.transfer(Side.LEFT, baseLine);
}
else {
targetLine = baseLine;
}
return new LogicalPosition(targetLine, position.column);
}
//
// Misc
//
@Nullable
@Override
protected OpenFileDescriptor getOpenFileDescriptor() {
int offset = getCurrentEditor().getCaretModel().getOffset();
return getCurrentContent().getOpenFileDescriptor(offset);
}
public static boolean canShowRequest(@NotNull DiffContext context, @NotNull DiffRequest request) {
return ThreesideDiffViewer.canShowRequest(context, request, TextEditorHolder.TextEditorHolderFactory.INSTANCE);
}
//
// Actions
//
private class MyOpenInEditorWithMouseAction extends OpenInEditorWithMouseAction {
@Override
protected OpenFileDescriptor getDescriptor(@NotNull Editor editor, int line) {
ThreeSide side = getEditorSide(editor);
if (side == null) return null;
int offset = editor.logicalPositionToOffset(new LogicalPosition(line, 0));
return getContent(side).getOpenFileDescriptor(offset);
}
}
protected class MyToggleAutoScrollAction extends TextDiffViewerUtil.ToggleAutoScrollAction {
public MyToggleAutoScrollAction() {
super(getTextSettings());
}
}
//
// Helpers
//
@Nullable
@Override
public Object getData(@NonNls String dataId) {
if (DiffDataKeys.CURRENT_EDITOR.is(dataId)) {
return getCurrentEditor();
}
return super.getData(dataId);
}
private class MyVisibleAreaListener implements VisibleAreaListener {
@NotNull Side mySide;
public MyVisibleAreaListener(@NotNull Side side) {
mySide = side;
}
@Override
public void visibleAreaChanged(VisibleAreaEvent e) {
if (mySyncScrollSupport != null) mySyncScrollSupport.visibleAreaChanged(e);
myContentPanel.repaint();
}
}
protected abstract class MyInitialScrollPositionHelper extends InitialScrollPositionSupport.ThreesideInitialScrollHelper {
@NotNull
@Override
protected List<? extends Editor> getEditors() {
return ThreesideTextDiffViewer.this.getEditors();
}
@Override
protected void disableSyncScroll(boolean value) {
disableSyncScrollSupport(value);
}
@Override
protected boolean doScrollToLine() {
if (myScrollToLine == null) return false;
scrollToLine(myScrollToLine.first, myScrollToLine.second);
return true;
}
}
protected class TextShowPartialDiffAction extends ShowPartialDiffAction {
public TextShowPartialDiffAction(@NotNull PartialDiffMode mode) {
super(mode);
}
@NotNull
@Override
protected SimpleDiffRequest createRequest() {
SimpleDiffRequest request = super.createRequest();
ThreeSide currentSide = getCurrentSide();
LogicalPosition currentPosition = DiffUtil.getCaretPosition(getCurrentEditor());
// we won't use DiffUserDataKeysEx.EDITORS_CARET_POSITION to avoid desync scroll position (as they can point to different places)
// TODO: pass EditorsVisiblePositions in case if view was scrolled without changing caret position ?
if (currentSide == mySide1) {
request.putUserData(DiffUserDataKeys.SCROLL_TO_LINE, Pair.create(Side.LEFT, currentPosition.line));
}
else if (currentSide == mySide2) {
request.putUserData(DiffUserDataKeys.SCROLL_TO_LINE, Pair.create(Side.RIGHT, currentPosition.line));
}
else {
LogicalPosition position1 = transferPosition(currentSide, mySide1, currentPosition);
LogicalPosition position2 = transferPosition(currentSide, mySide2, currentPosition);
request.putUserData(DiffUserDataKeysEx.EDITORS_CARET_POSITION, new LogicalPosition[]{position1, position2});
}
return request;
}
}
}
| |
package com.alicode.game.dogedash;
import com.alicode.game.dogedash.screens.OptionsScreen;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.graphics.Texture;
import com.badlogic.gdx.graphics.g2d.TextureAtlas;
import com.badlogic.gdx.graphics.g2d.TextureRegion;
public class Assets {
public static TextureAtlas atlas;
public static TextureRegion menu_blackpup, menu_blackpup2, menu_bluepup, menu_creampup_body, menu_creampup_paw, menu_creampup_paw2, menu_mom_nose_paw, menu_mombody;
public static TextureRegion menu, splash, background, background2, ninePatchBox;
public static TextureRegion character, character2, characterJump, characterHit, characterHit2, characterDie, characterDieCircle, dogsuper1, dogsuper2, energy1, energy2, energy3, night_dogsuper2;
public static TextureRegion night_character, night_character2, night_characterJump, night_characterHit, night_characterHit2, night_characterDie, night_characterDieCircle, night_dogsuper1,
night_energy1, night_energy2, night_energy3;
public static TextureRegion blackPup, blackPup2, creamPup, creamPup2, bluePup, bluePup2, redPup, redPup2;
public static TextureRegion night_blackPup, night_blackPup2, night_creamPup, night_creamPup2, night_bluePup, night_bluePup2, night_redPup, night_redPup2;
public static TextureRegion gamePuddle, gamePuddle2, gameMud, gameMud2, hurdleLog, pow, style, flower1, flower2, flower3, flower4, flower5, gameBush, night_gameMud, night_gameMud2,
night_gamePuddle2, night_gamePuddle, night_hurdleLog, treat1, treat2, shield_pickup, shield;
public static TextureRegion night_flower1, night_flower2, night_flower3, night_flower4, night_flower5, night_gameBush;
public static TextureRegion enemyBee, enemyBee2, enemyBee3, enemyMoth, enemyMoth2, enemyMoth3;
public static TextureRegion back, customization, difficulty, credit, easy, hard, highscores, lvl1, lvl2, lvlselect, lvlselect_txt, musicoff, musicon, normal, options, options_txt, options_title,
play, puppydash, soundoff, soundon, tutorial, vibrationoff, vibrationon, retry, gameovertitle, gametokens, mainmenu, puppiescaught, stylepoints, time, totalscore;
public static TextureRegion hardSmall, easySmall, normalSmall;
public static TextureRegion acc_angel, acc_angelj, acc_monocle, acc_tophatj, acc_tophat, acc_monoclej, acc_shades, acc_shadesj, acc_halo, acc_haloj, acc_hipster, acc_hipsterj, acc_horns,
acc_hornsj, acc_moustache, acc_moustachej, acc_clownnose, acc_clownnosej, acc_devilwings, acc_devilwingsj, acc_pumpkin, acc_pumpkinj, acc_santahat, acc_santahatj, acc_unibrow,
acc_unibrowj, acc_policehat, acc_policehatj, acc_keepout, acc_keepoutj;
public static TextureRegion mudDrop, waterDrop;
public static TextureRegion shop_shades, shop_monocle, shop_tophat, shop_halo, shop_horns, shop_hipster, shop_moustache, shop_angel, shop_clownnose, shop_devilwings, shop_pumpkin, shop_unibrow,
shop_santahat, shop_policehat, shop_keepout;
public static TextureRegion custom_title, currentBox, chowcoin, itemLock, shopItemBox;
public static TextureRegion num0, num1, num2, num3, num4, num5, num6, num7, num8, num9, num0_2, num1_2, num2_2, num3_2, num4_2, num5_2, num6_2, num7_2, num8_2, num9_2, numMinus, numMinus_2;
public static TextureRegion player_score;
public static TextureRegion tab_back, tab_eyes, tab_nose, tab_head, button_next, button_previous, itemsEquipText, tutorials;
public static TextureRegion pup_missed, pup_points, pause_menu, pause_resume, dogecoins_text, pause_button, enemies_on_player;
public static TextureRegion shop_after, shop_buyitem, shop_currentcoins, shop_no, shop_price, shop_yes, chowcoin_2, shop_not_enough;
public static TextureRegion arrow_left, arrow_right, tutorial_1, tutorial_2, tutorial_3, tutorial_4, tutorial_5, tutorial_resume, tutorial_mainmenu, tutorial_levelselect, tutorial_select;
public static Texture bg_big_day, bg_big_night;
public static TextureRegion night_light_1, night_light_2, night_light_3, pauseMusicOn, pauseMusicOff, pauseVibrationOn, pauseVibrationOff, pauseSoundOn, pauseSoundOff, gameLight, gameLightCone;
public static void load() {
atlas = new TextureAtlas(Gdx.files.internal("core/core.pack"));
loadAccs();
loadEffects();
loadEnemies();
loadObjects();
loadPlayer();
loadPuppies();
loadLevelSelection();
loadMain();
loadShop();
loadText();
loadButtons();
initSound();
// bgs
bg_big_day = new Texture(Gdx.files.internal("core/background_big.png"));
bg_big_night = new Texture(Gdx.files.internal("core/background_big2.png"));
}
private static void initSound() {
OptionsScreen.isSoundOn = DogeDashCore.db.getSettings(1).getSoundSettings();
OptionsScreen.isMusicOn = DogeDashCore.db.getSettings(1).getMusicSettings();
OptionsScreen.isVibrationOn = DogeDashCore.db.getSettings(1).getVibrationSettings();
}
private static void loadButtons() {
// main menu
play = atlas.findRegion("menu/buttons/play");
options = atlas.findRegion("menu/buttons/options");
highscores = atlas.findRegion("menu/buttons/highscores");
customization = atlas.findRegion("menu/buttons/customization");
back = atlas.findRegion("menu/buttons/back");
retry = atlas.findRegion("menu/buttons/retry");
button_next = atlas.findRegion("menu/buttons/button_next");
button_previous = atlas.findRegion("menu/buttons/button_previous");
credit = atlas.findRegion("menu/buttons/credit");
mainmenu = atlas.findRegion("menu/buttons/mainmenu");
arrow_left = atlas.findRegion("menu/buttons/arrow_left");
arrow_right = atlas.findRegion("menu/buttons/arrow_right");
// shop
shop_no = atlas.findRegion("menu/buttons/shop_no");
shop_yes = atlas.findRegion("menu/buttons/shop_yes");
// pause
pause_menu = atlas.findRegion("menu/buttons/pause_menu");
pause_button = atlas.findRegion("menu/buttons/pause2");
pause_resume = atlas.findRegion("menu/buttons/pause_resume");
tutorial_resume = atlas.findRegion("menu/buttons/tutorial_resume");
tutorial_levelselect = atlas.findRegion("menu/buttons/tutorial_levelselect");
tutorial_mainmenu = atlas.findRegion("menu/buttons/tutorial_mainmenu");
// options
soundon = atlas.findRegion("menu/buttons/soundon");
soundoff = atlas.findRegion("menu/buttons/soundoff");
musicon = atlas.findRegion("menu/buttons/musicon");
musicoff = atlas.findRegion("menu/buttons/musicoff");
vibrationon = atlas.findRegion("menu/buttons/vibrationon");
vibrationoff = atlas.findRegion("menu/buttons/vibrationoff");
pauseMusicOn = atlas.findRegion("menu/buttons/pauseMusicOn");
pauseMusicOff = atlas.findRegion("menu/buttons/pauseMusicOff");
pauseVibrationOn = atlas.findRegion("menu/buttons/pauseVibrationOn");
pauseVibrationOff = atlas.findRegion("menu/buttons/pauseVibrationOff");
pauseSoundOn = atlas.findRegion("menu/buttons/pauseSoundOn");
pauseSoundOff = atlas.findRegion("menu/buttons/pauseSoundOff");
}
private static void loadLevelSelection() {
lvl1 = atlas.findRegion("menu/level_selection/lvl1-screenshot");
lvl2 = atlas.findRegion("menu/level_selection/lvl2-screenshot");
tutorial_select = atlas.findRegion("menu/level_selection/tutorial-screenshot");
}
private static void loadMain() {
chowcoin = atlas.findRegion("menu/main/chowcoin");
splash = atlas.findRegion("menu/main/splash2");
// main menu bg
menu = atlas.findRegion("menu/main/menu_bg");
menu_blackpup = atlas.findRegion("menu/main/menu_blackpup");
menu_blackpup2 = atlas.findRegion("menu/main/menu_blackpup2");
menu_creampup_body = atlas.findRegion("menu/main/menu_creampup_body");
menu_creampup_paw = atlas.findRegion("menu/main/menu_creampup_paw1");
menu_creampup_paw2 = atlas.findRegion("menu/main/menu_creampup_paw2");
menu_bluepup = atlas.findRegion("menu/main/menu_bluepup");
menu_mom_nose_paw = atlas.findRegion("menu/main/menu_mom_nose_paw");
menu_mombody = atlas.findRegion("menu/main/menu_mombody");
}
private static void loadShop() {
itemLock = atlas.findRegion("menu/shop/itemLock");
shop_angel = atlas.findRegion("menu/shop/shop_angel");
shop_clownnose = atlas.findRegion("menu/shop/shop_clownnose");
shop_devilwings = atlas.findRegion("menu/shop/shop_devilwings");
shop_halo = atlas.findRegion("menu/shop/shop_halo");
shop_hipster = atlas.findRegion("menu/shop/shop_hipster");
shop_horns = atlas.findRegion("menu/shop/shop_horns");
shop_monocle = atlas.findRegion("menu/shop/shop_monocle");
shop_moustache = atlas.findRegion("menu/shop/shop_moustache");
shop_pumpkin = atlas.findRegion("menu/shop/shop_pumpkin");
shop_santahat = atlas.findRegion("menu/shop/shop_santahat");
shop_shades = atlas.findRegion("menu/shop/shop_shades");
shop_tophat = atlas.findRegion("menu/shop/shop_tophat");
shop_unibrow = atlas.findRegion("menu/shop/shop_unibrow");
shop_keepout = atlas.findRegion("menu/shop/shop_keepout");
shop_policehat = atlas.findRegion("menu/shop/shop_policehat");
tab_back = atlas.findRegion("menu/shop/tab_back_open");
tab_eyes = atlas.findRegion("menu/shop/tab_eyes_open");
tab_head = atlas.findRegion("menu/shop/tab_head_open");
tab_nose = atlas.findRegion("menu/shop/tab_nose_open");
shopItemBox = atlas.findRegion("menu/shop/shopItemBox");
ninePatchBox = atlas.findRegion("menu/shop/shopItemBox.9");
currentBox = atlas.findRegion("menu/shop/currentBox");
}
private static void loadText() {
// splash
// world selection
difficulty = atlas.findRegion("menu/txt/difficulty");
hard = atlas.findRegion("menu/txt/hard");
easy = atlas.findRegion("menu/txt/easy");
normal = atlas.findRegion("menu/txt/normal");
hardSmall = atlas.findRegion("menu/txt/hard2");
easySmall = atlas.findRegion("menu/txt/easy2");
normalSmall = atlas.findRegion("menu/txt/normal2");
// gameover stuff
player_score = atlas.findRegion("menu/txt/player_score");
enemies_on_player = atlas.findRegion("menu/txt/enemies");
pup_missed = atlas.findRegion("menu/txt/pup_missed");
puppiescaught = atlas.findRegion("menu/txt/puppiescaught");
pup_points = atlas.findRegion("menu/txt/pup_points");
stylepoints = atlas.findRegion("menu/txt/stylepoints");
dogecoins_text = atlas.findRegion("menu/txt/dogecoins_text");
time = atlas.findRegion("menu/txt/time");
totalscore = atlas.findRegion("menu/txt/totalscore");
tutorial_1 = atlas.findRegion("menu/txt/tutorial_1");
tutorial_2 = atlas.findRegion("menu/txt/tutorial_2");
tutorial_3 = atlas.findRegion("menu/txt/tutorial_3");
tutorial_4 = atlas.findRegion("menu/txt/tutorial_4");
tutorial_5 = atlas.findRegion("menu/txt/tutorial_5");
// shop
shop_after = atlas.findRegion("menu/txt/shop_after");
shop_not_enough = atlas.findRegion("menu/txt/shop_not_enough");
shop_price = atlas.findRegion("menu/txt/shop_price");
shop_currentcoins = atlas.findRegion("menu/txt/shop_currentcoins");
shop_buyitem = atlas.findRegion("menu/txt/shop_buyitem");
// titles/title txt
puppydash = atlas.findRegion("menu/txt/dogeDash");
options_title = atlas.findRegion("menu/txt/options_title");
options_txt = atlas.findRegion("menu/txt/options_text");
lvlselect = atlas.findRegion("menu/txt/lvlselect");
lvlselect_txt = atlas.findRegion("menu/txt/lvlselect_text");
custom_title = atlas.findRegion("menu/txt/custom_title");
itemsEquipText = atlas.findRegion("menu/txt/itemsEquipText");
gameovertitle = atlas.findRegion("menu/txt/gameovertitle");
}
private static void loadAccs() {
// accs
acc_angel = atlas.findRegion("game/accessories/acc_angel");
acc_angelj = atlas.findRegion("game/accessories/acc_angelj");
acc_policehat = atlas.findRegion("game/accessories/acc_policehat");
acc_policehatj = atlas.findRegion("game/accessories/acc_policehatj");
acc_keepout = atlas.findRegion("game/accessories/acc_keepout");
acc_keepoutj = atlas.findRegion("game/accessories/acc_keepoutj");
acc_clownnose = atlas.findRegion("game/accessories/acc_clownnose");
acc_clownnosej = atlas.findRegion("game/accessories/acc_clownnosej");
acc_devilwings = atlas.findRegion("game/accessories/acc_devilwings");
acc_devilwingsj = atlas.findRegion("game/accessories/acc_devilwings");
acc_halo = atlas.findRegion("game/accessories/acc_halo");
acc_haloj = atlas.findRegion("game/accessories/acc_haloj");
acc_hipster = atlas.findRegion("game/accessories/acc_hipster");
acc_hipsterj = atlas.findRegion("game/accessories/acc_hipsterj");
acc_horns = atlas.findRegion("game/accessories/acc_horns");
acc_hornsj = atlas.findRegion("game/accessories/acc_hornsj");
acc_monocle = atlas.findRegion("game/accessories/acc_monocle");
acc_monoclej = atlas.findRegion("game/accessories/acc_monoclej");
acc_moustache = atlas.findRegion("game/accessories/acc_moustache");
acc_moustachej = atlas.findRegion("game/accessories/acc_moustachej");
acc_pumpkin = atlas.findRegion("game/accessories/acc_pumpkin");
acc_pumpkinj = atlas.findRegion("game/accessories/acc_pumpkinj");
acc_santahat = atlas.findRegion("game/accessories/acc_santahat");
acc_santahatj = atlas.findRegion("game/accessories/acc_santahatj");
acc_shades = atlas.findRegion("game/accessories/acc_shades");
acc_shadesj = atlas.findRegion("game/accessories/acc_shadesj");
acc_tophat = atlas.findRegion("game/accessories/acc_tophat");
acc_tophatj = atlas.findRegion("game/accessories/acc_tophatj");
acc_unibrow = atlas.findRegion("game/accessories/acc_unibrow");
acc_unibrowj = atlas.findRegion("game/accessories/acc_unibrowj");
}
private static void loadEffects() {
// player effects
pow = atlas.findRegion("game/effects/pow");
style = atlas.findRegion("game/effects/style");
shield = atlas.findRegion("game/effects/shield");
shield_pickup = atlas.findRegion("game/effects/shield_pickup");
energy1 = atlas.findRegion("game/effects/energy1");
energy2 = atlas.findRegion("game/effects/energy2");
energy3 = atlas.findRegion("game/effects/energy3");
gameLight = atlas.findRegion("game/effects/gameLight");
gameLightCone = atlas.findRegion("game/effects/gameLightCone");
}
private static void loadEnemies() {
// enemies
enemyBee = atlas.findRegion("game/enemies/enemyBee");
enemyBee2 = atlas.findRegion("game/enemies/enemyBee2");
enemyBee3 = atlas.findRegion("game/enemies/enemyBee3");
enemyMoth = atlas.findRegion("game/enemies/enemyMoth1");
enemyMoth2 = atlas.findRegion("game/enemies/enemyMoth2");
enemyMoth3 = atlas.findRegion("game/enemies/enemyMoth3");
}
private static void loadObjects() {
gameBush = atlas.findRegion("game/objects/gameBush");
gameMud = atlas.findRegion("game/objects/gameMud");
gameMud2 = atlas.findRegion("game/objects/gameMud2");
gamePuddle = atlas.findRegion("game/objects/gamePuddle");
gamePuddle2 = atlas.findRegion("game/objects/gamePuddle2");
hurdleLog = atlas.findRegion("game/objects/hurdleLog");
chowcoin_2 = atlas.findRegion("game/objects/chowcoin_2");
flower1 = atlas.findRegion("game/objects/flower1");
flower2 = atlas.findRegion("game/objects/flower2");
flower3 = atlas.findRegion("game/objects/flower3");
flower4 = atlas.findRegion("game/objects/flower4");
flower5 = atlas.findRegion("game/objects/flower5");
treat1 = atlas.findRegion("game/objects/treat1");
treat2 = atlas.findRegion("game/objects/treat2");
}
private static void loadPlayer() {
// Player
character = atlas.findRegion("game/player/character");
character2 = atlas.findRegion("game/player/character2");
characterHit = atlas.findRegion("game/player/characterHit");
characterHit2 = atlas.findRegion("game/player/characterHit2");
characterDie = atlas.findRegion("game/player/characterDie");
characterJump = atlas.findRegion("game/player/characterJump");
// super doge
dogsuper1 = atlas.findRegion("game/player/dogsuper1");
dogsuper2 = atlas.findRegion("game/player/dogsuper2");
}
private static void loadPuppies() {
// Puppies
redPup = atlas.findRegion("game/puppies/redPup");
redPup2 = atlas.findRegion("game/puppies/redPup2");
blackPup = atlas.findRegion("game/puppies/blackPup");
blackPup2 = atlas.findRegion("game/puppies/blackPup2");
creamPup = atlas.findRegion("game/puppies/creamPup");
creamPup2 = atlas.findRegion("game/puppies/creamPup2");
bluePup = atlas.findRegion("game/puppies/bluePup");
bluePup2 = atlas.findRegion("game/puppies/bluePup2");
}
public static void dispose() {
atlas.dispose();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.infinispan.embedded;
import java.util.Set;
import org.apache.camel.Exchange;
import org.apache.camel.support.DefaultExchange;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class InfinispanEmbeddedAggregationRepositoryOperationsTest extends InfinispanEmbeddedTestSupport {
private InfinispanEmbeddedAggregationRepository aggregationRepository;
@Override
public void setupResources() throws Exception {
super.setupResources();
InfinispanEmbeddedConfiguration configuration = new InfinispanEmbeddedConfiguration();
configuration.setCacheContainer(cacheContainer);
aggregationRepository = new InfinispanEmbeddedAggregationRepository(getCacheName());
aggregationRepository.setConfiguration(configuration);
aggregationRepository.start();
}
@Override
public void cleanupResources() {
if (aggregationRepository != null) {
aggregationRepository.stop();
}
}
private boolean exists(String key) {
return aggregationRepository.getCache().get(key) != null;
}
@Test
public void testAdd() {
// cleanup
aggregationRepository.getCache().clear();
// Given
String key = "Add";
assertFalse(exists(key));
Exchange exchange = new DefaultExchange(context());
// When
aggregationRepository.add(context(), key, exchange);
// Then
assertTrue(exists(key));
}
@Test
public void testGetExists() {
// cleanup
aggregationRepository.getCache().clear();
// Given
String key = "Get_Exists";
Exchange exchange = new DefaultExchange(context());
aggregationRepository.add(context(), key, exchange);
assertTrue(exists(key));
// When
Exchange exchange2 = aggregationRepository.get(context(), key);
// Then
assertNotNull(exchange2);
assertEquals(exchange.getExchangeId(), exchange2.getExchangeId());
}
@Test
public void testGetNotExists() {
// cleanup
aggregationRepository.getCache().clear();
// Given
String key = "Get_NotExists";
assertFalse(exists(key));
// When
Exchange exchange2 = aggregationRepository.get(context(), key);
// Then
assertNull(exchange2);
}
@Test
public void testRemoveExists() {
// cleanup
aggregationRepository.getCache().clear();
// Given
String key = "Remove_Exists";
Exchange exchange = new DefaultExchange(context());
aggregationRepository.add(context(), key, exchange);
assertTrue(exists(key));
// When
aggregationRepository.remove(context(), key, exchange);
// Then
assertFalse(exists(key));
}
@Test
public void testRemoveNotExists() {
// cleanup
aggregationRepository.getCache().clear();
// Given
String key = "RemoveNotExists";
Exchange exchange = new DefaultExchange(context());
assertFalse(exists(key));
// When
aggregationRepository.remove(context(), key, exchange);
// Then
assertFalse(exists(key));
}
@Test
public void testGetKeys() {
// cleanup
aggregationRepository.getCache().clear();
// Given
String[] keys = { "GetKeys1", "GetKeys2" };
addExchanges(keys);
// When
Set<String> keySet = aggregationRepository.getKeys();
// Then
for (String key : keys) {
assertTrue(keySet.contains(key));
}
}
@Test
public void testConfirmExist() {
// cleanup
aggregationRepository.getCache().clear();
// Given
for (int i = 1; i < 4; i++) {
String key = "Confirm_" + i;
Exchange exchange = new DefaultExchange(context());
exchange.setExchangeId("Exchange_" + i);
aggregationRepository.add(context(), key, exchange);
assertTrue(exists(key));
}
// When
aggregationRepository.confirm(context(), "Confirm_2");
// Then
assertTrue(exists("Confirm_1"));
assertFalse(exists("Confirm_2"));
assertTrue(exists("Confirm_3"));
}
@Test
public void testConfirmNotExist() {
// cleanup
aggregationRepository.getCache().clear();
// Given
String[] keys = new String[3];
for (int i = 1; i < 4; i++) {
keys[i - 1] = "Confirm" + i;
}
addExchanges(keys);
for (String key : keys) {
assertTrue(exists(key));
}
// When
aggregationRepository.confirm(context(), "Exchange-Confirm5");
// Then
for (String key : keys) {
assertTrue(exists(key));
}
}
private void addExchanges(String... keys) {
// cleanup
aggregationRepository.getCache().clear();
for (String key : keys) {
Exchange exchange = new DefaultExchange(context());
exchange.setExchangeId("Exchange-" + key);
aggregationRepository.add(context(), key, exchange);
}
}
@Test
public void testScan() {
// cleanup
aggregationRepository.getCache().clear();
// Given
String[] keys = { "Scan1", "Scan2" };
addExchanges(keys);
// When
Set<String> exchangeIdSet = aggregationRepository.scan(context());
// Then
for (String key : keys) {
assertTrue(exchangeIdSet.contains(key));
}
}
@Test
public void testRecover() {
// cleanup
aggregationRepository.getCache().clear();
// Given
String[] keys = { "Recover1", "Recover2" };
addExchanges(keys);
// When
Exchange exchange2 = aggregationRepository.recover(context(), "Recover2");
Exchange exchange3 = aggregationRepository.recover(context(), "Recover3");
// Then
assertNotNull(exchange2);
assertNull(exchange3);
}
}
| |
/*
* Copyright 2015 Themistoklis Mavridis <themis.mavridis@issel.ee.auth.gr>.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thesmartweb.swebrank;
/*
Porter stemmer in Java. The original paper is in
Porter, 1980, An algorithm for suffix stripping, Program, Vol. 14,
no. 3, pp 130-137,
See also http://www.tartarus.org/~martin/PorterStemmer
History:
Release 1
Bug 1 (reported by Gonzalo Parra 16/10/99) fixed as marked below.
The words 'aed', 'eed', 'oed' leave k at 'a' for step 3, and b[k-1]
is then out outside the bounds of b.
Release 2
Similarly,
Bug 2 (reported by Steve Dyrdahl 22/2/00) fixed as marked below.
'ion' by itself leaves j = -1 in the test for 'ion' in step 5, and
b[j] is then outside the bounds of b.
Release 3
Considerably revised 4/9/00 in the light of many helpful suggestions
from Brian Goetz of Quiotix Corporation (brian@quiotix.com).
Release 4
*/
import java.io.*;
/**
* PorterStemmer, implementing the Porter Stemming Algorithm
The PorterStemmer class transforms a word into its root form. The input
* word can be provided a character at time (by calling add()), or at once
* by calling one of the various stem(something) methods.
*/
class PorterStemmer
{ private char[] b;
private int i, /* offset into b */
i_end, /* offset to end of stemmed word */
j, k;
private static final int INC = 50;
/* unit of size whereby b is increased */
public PorterStemmer()
{ b = new char[INC];
i = 0;
i_end = 0;
}
/**
* Add a character to the word being stemmed. When you are finished
* adding characters, you can call stem(void) to stem the word.
*/
public void add(char ch)
{ if (i == b.length)
{ char[] new_b = new char[i+INC];
for (int c = 0; c < i; c++) new_b[c] = b[c];
b = new_b;
}
b[i++] = ch;
}
/** Adds wLen characters to the word being stemmed contained in a portion
* of a char[] array. This is like repeated calls of add(char ch), but
* faster.
*/
public void add(char[] w, int wLen)
{ if (i+wLen >= b.length)
{ char[] new_b = new char[i+wLen+INC];
for (int c = 0; c < i; c++) new_b[c] = b[c];
b = new_b;
}
for (int c = 0; c < wLen; c++) b[i++] = w[c];
}
/**
* After a word has been stemmed, it can be retrieved by toString(),
* or a reference to the internal buffer can be retrieved by getResultBuffer
* and getResultLength (which is generally more efficient.)
*/
public String toString() { return new String(b,0,i_end); }
/**
* Returns the length of the word resulting from the stemming process.
*/
public int getResultLength() { return i_end; }
/**
* Returns a reference to a character buffer containing the results of
* the stemming process. You also need to consult getResultLength()
* to determine the length of the result.
*/
public char[] getResultBuffer() { return b; }
/* cons(i) is true <=> b[i] is a consonant. */
private boolean cons(int i)
{ switch (b[i])
{ case 'a': case 'e': case 'i': case 'o': case 'u': return false;
case 'y': return (i==0) ? true : !cons(i-1);
default: return true;
}
}
/* m() measures the number of consonant sequences between 0 and j. if c is
a consonant sequence and v a vowel sequence, and <..> indicates arbitrary
presence,
<c><v> gives 0
<c>vc<v> gives 1
<c>vcvc<v> gives 2
<c>vcvcvc<v> gives 3
....
*/
private int m()
{ int n = 0;
int i = 0;
while(true)
{ if (i > j) return n;
if (! cons(i)) break; i++;
}
i++;
while(true)
{ while(true)
{ if (i > j) return n;
if (cons(i)) break;
i++;
}
i++;
n++;
while(true)
{ if (i > j) return n;
if (! cons(i)) break;
i++;
}
i++;
}
}
/* vowelinstem() is true <=> 0,...j contains a vowel */
private boolean vowelinstem()
{ int i; for (i = 0; i <= j; i++) if (! cons(i)) return true;
return false;
}
/* doublec(j) is true <=> j,(j-1) contain a double consonant. */
private boolean doublec(int j)
{ if (j < 1) return false;
if (b[j] != b[j-1]) return false;
return cons(j);
}
/* cvc(i) is true <=> i-2,i-1,i has the form consonant - vowel - consonant
and also if the second c is not w,x or y. this is used when trying to
restore an e at the end of a short word. e.g.
cav(e), lov(e), hop(e), crim(e), but
snow, box, tray.
*/
private boolean cvc(int i)
{ if (i < 2 || !cons(i) || cons(i-1) || !cons(i-2)) return false;
{ int ch = b[i];
if (ch == 'w' || ch == 'x' || ch == 'y') return false;
}
return true;
}
private boolean ends(String s)
{ int l = s.length();
int o = k-l+1;
if (o < 0) return false;
for (int i = 0; i < l; i++) if (b[o+i] != s.charAt(i)) return false;
j = k-l;
return true;
}
/* setto(s) sets (j+1),...k to the characters in the string s, readjusting
k. */
private void setto(String s)
{ int l = s.length();
int o = j+1;
for (int i = 0; i < l; i++) b[o+i] = s.charAt(i);
k = j+l;
}
/* r(s) is used further down. */
private void r(String s) { if (m() > 0) setto(s); }
/* step1() gets rid of plurals and -ed or -ing. e.g.
caresses -> caress
ponies -> poni
ties -> ti
caress -> caress
cats -> cat
feed -> feed
agreed -> agree
disabled -> disable
matting -> mat
mating -> mate
meeting -> meet
milling -> mill
messing -> mess
meetings -> meet
*/
private void step1()
{ if (b[k] == 's')
{ if (ends("sses")) k -= 2; else
if (ends("ies")) setto("i"); else
if (b[k-1] != 's') k--;
}
if (ends("eed")) { if (m() > 0) k--; } else
if ((ends("ed") || ends("ing")) && vowelinstem())
{ k = j;
if (ends("at")) setto("ate"); else
if (ends("bl")) setto("ble"); else
if (ends("iz")) setto("ize"); else
if (doublec(k))
{ k--;
{ int ch = b[k];
if (ch == 'l' || ch == 's' || ch == 'z') k++;
}
}
else if (m() == 1 && cvc(k)) setto("e");
}
}
/* step2() turns terminal y to i when there is another vowel in the stem. */
private void step2() { if (ends("y") && vowelinstem()) b[k] = 'i'; }
/* step3() maps double suffices to single ones. so -ization ( = -ize plus
-ation) maps to -ize etc. note that the string before the suffix must give
m() > 0. */
private void step3() { if (k == 0) return; /* For Bug 1 */ switch (b[k-1])
{
case 'a': if (ends("ational")) { r("ate"); break; }
if (ends("tional")) { r("tion"); break; }
break;
case 'c': if (ends("enci")) { r("ence"); break; }
if (ends("anci")) { r("ance"); break; }
break;
case 'e': if (ends("izer")) { r("ize"); break; }
break;
case 'l': if (ends("bli")) { r("ble"); break; }
if (ends("alli")) { r("al"); break; }
if (ends("entli")) { r("ent"); break; }
if (ends("eli")) { r("e"); break; }
if (ends("ousli")) { r("ous"); break; }
break;
case 'o': if (ends("ization")) { r("ize"); break; }
if (ends("ation")) { r("ate"); break; }
if (ends("ator")) { r("ate"); break; }
break;
case 's': if (ends("alism")) { r("al"); break; }
if (ends("iveness")) { r("ive"); break; }
if (ends("fulness")) { r("ful"); break; }
if (ends("ousness")) { r("ous"); break; }
break;
case 't': if (ends("aliti")) { r("al"); break; }
if (ends("iviti")) { r("ive"); break; }
if (ends("biliti")) { r("ble"); break; }
break;
case 'g': if (ends("logi")) { r("log"); break; }
} }
/* step4() deals with -ic-, -full, -ness etc. similar strategy to step3. */
private void step4() { switch (b[k])
{
case 'e': if (ends("icate")) { r("ic"); break; }
if (ends("ative")) { r(""); break; }
if (ends("alize")) { r("al"); break; }
break;
case 'i': if (ends("iciti")) { r("ic"); break; }
break;
case 'l': if (ends("ical")) { r("ic"); break; }
if (ends("ful")) { r(""); break; }
break;
case 's': if (ends("ness")) { r(""); break; }
break;
} }
/* step5() takes off -ant, -ence etc., in context <c>vcvc<v>. */
private void step5()
{ if (k == 0) return; /* for Bug 1 */ switch (b[k-1])
{ case 'a': if (ends("al")) break; return;
case 'c': if (ends("ance")) break;
if (ends("ence")) break; return;
case 'e': if (ends("er")) break; return;
case 'i': if (ends("ic")) break; return;
case 'l': if (ends("able")) break;
if (ends("ible")) break; return;
case 'n': if (ends("ant")) break;
if (ends("ement")) break;
if (ends("ment")) break;
/* element etc. not stripped before the m */
if (ends("ent")) break; return;
case 'o': if (ends("ion") && j >= 0 && (b[j] == 's' || b[j] == 't')) break;
/* j >= 0 fixes Bug 2 */
if (ends("ou")) break; return;
/* takes care of -ous */
case 's': if (ends("ism")) break; return;
case 't': if (ends("ate")) break;
if (ends("iti")) break; return;
case 'u': if (ends("ous")) break; return;
case 'v': if (ends("ive")) break; return;
case 'z': if (ends("ize")) break; return;
default: return;
}
if (m() > 1) k = j;
}
/* step6() removes a final -e if m() > 1. */
private void step6()
{ j = k;
if (b[k] == 'e')
{ int a = m();
if (a > 1 || a == 1 && !cvc(k-1)) k--;
}
if (b[k] == 'l' && doublec(k) && m() > 1) k--;
}
/** Stem the word placed into the PorterStemmer buffer through calls to add().
* Returns true if the stemming process resulted in a word different
* from the input. You can retrieve the result with
* getResultLength()/getResultBuffer() or toString().
*/
public void stem()
{ k = i - 1;
if (k > 1) { step1(); step2(); step3(); step4(); step5(); step6(); }
i_end = k+1; i = 0;
}
/** Test program for demonstrating the PorterStemmer. It reads text from a
a list of files, stems each word, and writes the result to standard
output. Note that the word stemmed is expected to be in lower case:
forcing lower case must be done outside the PorterStemmer class.
Usage: PorterStemmer file-name file-name ...
*/
public String[] process(String[] input) throws UnsupportedEncodingException
{
String[] final_str=new String[input.length];
int cnt=0;
char[] w = new char[501];
PorterStemmer s = new PorterStemmer();
for (int i = 0; i < input.length; i++)
{
//FileInputStream in = new FileInputStream(input[i]);
InputStream in = new ByteArrayInputStream(input[i].getBytes("UTF-8"));
try
{ while(true)
{ int ch = in.read();
if (Character.isLetter((char) ch))
{
int j = 0;
while(true)
{ ch = Character.toLowerCase((char) ch);
w[j] = (char) ch;
if (j < 500) j++;
ch = in.read();
if (!Character.isLetter((char) ch))
{
/* to test add(char ch) */
for (int c = 0; c < j; c++) s.add(w[c]);
/* or, to test add(char[] w, int j) */
/* s.add(w, j); */
s.stem();
{ String u;
/* and now, to test toString() : */
u = s.toString();
final_str[cnt]=u;
cnt=cnt+1;
/* to test getResultBuffer(), getResultLength() : */
/* u = new String(s.getResultBuffer(), 0, s.getResultLength()); */
//System.out.print(u);
}
break;
}
}
}
if (ch < 0) break;
//System.out.print((char)ch);
}
}
catch (IOException e)
{ System.out.println("error reading " + input[i]);
final_str=new String[input.length];
return final_str;
}
}return final_str;
}
}
| |
package org.testobject.commons.math.algebra;
import java.util.List;
import org.codehaus.jackson.annotate.JsonCreator;
import org.codehaus.jackson.annotate.JsonProperty;
/**
*
* @author enijkamp
*
*/
// TODO align Int, Double class methods (en)
public interface Rectangle {
class Int {
public int x;
public int y;
public int w;
public int h;
public static final Rectangle.Int ZERO = new Rectangle.Int(0, 0, 0, 0);
public Int() {
this.x = 0;
this.y = 0;
this.w = 0;
this.h = 0;
}
public Int(int w, int h) {
this.x = 0;
this.y = 0;
this.w = w;
this.h = h;
}
@JsonCreator
public Int(@JsonProperty("x") int x, @JsonProperty("y") int y, @JsonProperty("w") int w, @JsonProperty("h") int h) {
this.x = x;
this.y = y;
this.w = w;
this.h = h;
}
public Int(Point.Int position, Size.Int size) {
this.x = position.x;
this.y = position.y;
this.w = size.w;
this.h = size.h;
}
public Int(Rectangle.Int other) {
this.x = other.x;
this.y = other.y;
this.w = other.w;
this.h = other.h;
}
public Point.Int getLocation() {
return new Point.Int(x, y);
}
@JsonProperty("x")
public int getX() {
return x;
}
@JsonProperty("y")
public int getY() {
return y;
}
@JsonProperty("w")
public int getWidth() {
return w;
}
@JsonProperty("h")
public int getHeight() {
return h;
}
public int getMinX() {
return getX();
}
public int getMinY() {
return getY();
}
public int getMaxX() {
return getX() + getWidth();
}
public int getMaxY() {
return getY() + getHeight();
}
public int getCenterX() {
return getX() + getWidth() / 2;
}
public int getCenterY() {
return getY() + getHeight() / 2;
}
public Size.Int getSize() {
return new Size.Int(w, h);
}
public boolean isEmpty() {
return (w <= 0) || (h <= 0);
}
public String toString() {
return "[" + x + "," + y + "," + w + "," + h + "]";
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof Rectangle.Int))
return false;
Rectangle.Int p = (Rectangle.Int) obj;
return x == p.x && y == p.y && w == p.w && h == p.h;
}
@Override
public int hashCode() {
long bits = java.lang.Double.doubleToLongBits(getX());
bits += java.lang.Double.doubleToLongBits(getY()) * 37;
bits += java.lang.Double.doubleToLongBits(getWidth()) * 43;
bits += java.lang.Double.doubleToLongBits(getHeight()) * 47;
return (((int) bits) ^ ((int) (bits >> 32)));
}
public void setRect(Rectangle.Int rect) {
reshape(rect.x, rect.y, rect.w, rect.h);
}
public void add(Rectangle.Int r) {
int tx2 = this.w;
int ty2 = this.h;
if ((tx2 | ty2) < 0) {
reshape(r.x, r.y, r.w, r.h);
}
int rx2 = r.w;
int ry2 = r.h;
if ((rx2 | ry2) < 0) {
return;
}
int tx1 = this.x;
int ty1 = this.y;
tx2 += tx1;
ty2 += ty1;
int rx1 = r.x;
int ry1 = r.y;
rx2 += rx1;
ry2 += ry1;
if (tx1 > rx1)
tx1 = rx1;
if (ty1 > ry1)
ty1 = ry1;
if (tx2 < rx2)
tx2 = rx2;
if (ty2 < ry2)
ty2 = ry2;
tx2 -= tx1;
ty2 -= ty1;
// tx2,ty2 will never underflow since both original
// rectangles were non-empty
// they might overflow, though...
if (tx2 > java.lang.Integer.MAX_VALUE)
tx2 = java.lang.Integer.MAX_VALUE;
if (ty2 > java.lang.Integer.MAX_VALUE)
ty2 = java.lang.Integer.MAX_VALUE;
reshape(tx1, ty1, tx2, ty2);
}
private void reshape(int x, int y, int w, int h) {
this.x = x;
this.y = y;
this.h = h;
this.w = w;
}
public boolean contains(int X, int Y) {
int w = this.w;
int h = this.h;
if ((w | h) < 0) {
// At least one of the dimensions is negative...
return false;
}
// Note: if either dimension is zero, tests below must return false...
int x = this.x;
int y = this.y;
if (X < x || Y < y) {
return false;
}
w += x;
h += y;
// overflow || intersect
return ((w < x || w > X) && (h < y || h > Y));
}
public boolean contains(Rectangle.Int r) {
return contains(r.x, r.y, r.w, r.h);
}
public boolean contains(int X, int Y, int W, int H) {
int w = this.w;
int h = this.h;
if ((w | h | W | H) < 0) {
// At least one of the dimensions is negative...
return false;
}
// Note: if any dimension is zero, tests below must return false...
int x = this.x;
int y = this.y;
if (X < x || Y < y) {
return false;
}
w += x;
W += X;
if (W <= X) {
// X+W overflowed or W was zero, return false if...
// either original w or W was zero or
// x+w did not overflow or
// the overflowed x+w is smaller than the overflowed X+W
if (w >= x || W > w)
return false;
} else {
// X+W did not overflow and W was not zero, return false if...
// original w was zero or
// x+w did not overflow and x+w is smaller than X+W
if (w >= x && W > w)
return false;
}
h += y;
H += Y;
if (H <= Y) {
if (h >= y || H > h)
return false;
} else {
if (h >= y && H > h)
return false;
}
return true;
}
public static void union(Rectangle.Int src1, Rectangle.Int src2, Rectangle.Int dest) {
int x1 = Math.min(src1.getMinX(), src2.getMinX());
int y1 = Math.min(src1.getMinY(), src2.getMinY());
int x2 = Math.max(src1.getMaxX(), src2.getMaxX());
int y2 = Math.max(src1.getMaxY(), src2.getMaxY());
dest.setFrameFromDiagonal(x1, y1, x2, y2);
}
public static Rectangle.Int union(List<Rectangle.Int> rectangles) {
if(rectangles.isEmpty()) {
throw new IllegalArgumentException("empty set of rectangles");
}
Rectangle.Int union = new Rectangle.Int(rectangles.get(0));
for(int i = 1; i < rectangles.size(); i++) {
union = union.union(rectangles.get(i));
}
return union;
}
public void setFrameFromDiagonal(int x1, int y1, int x2, int y2) {
if (x2 < x1) {
int t = x1;
x1 = x2;
x2 = t;
}
if (y2 < y1) {
int t = y1;
y1 = y2;
y2 = t;
}
reshape(x1, y1, x2 - x1, y2 - y1);
}
public Rectangle.Int union(Rectangle.Int r) {
long tx2 = this.w;
long ty2 = this.h;
if ((tx2 | ty2) < 0) {
// This rectangle has negative dimensions...
// If r has non-negative dimensions then it is the answer.
// If r is non-existant (has a negative dimension), then both
// are non-existant and we can return any non-existant rectangle
// as an answer. Thus, returning r meets that criterion.
// Either way, r is our answer.
return new Rectangle.Int(r);
}
long rx2 = r.w;
long ry2 = r.h;
if ((rx2 | ry2) < 0) {
return new Rectangle.Int(this);
}
int tx1 = this.x;
int ty1 = this.y;
tx2 += tx1;
ty2 += ty1;
int rx1 = r.x;
int ry1 = r.y;
rx2 += rx1;
ry2 += ry1;
if (tx1 > rx1)
tx1 = rx1;
if (ty1 > ry1)
ty1 = ry1;
if (tx2 < rx2)
tx2 = rx2;
if (ty2 < ry2)
ty2 = ry2;
tx2 -= tx1;
ty2 -= ty1;
// tx2,ty2 will never underflow since both original rectangles
// were already proven to be non-empty
// they might overflow, though...
if (tx2 > Integer.MAX_VALUE)
tx2 = Integer.MAX_VALUE;
if (ty2 > Integer.MAX_VALUE)
ty2 = Integer.MAX_VALUE;
return new Rectangle.Int(tx1, ty1, (int) tx2, (int) ty2);
}
public Rectangle.Int intersection(Rectangle.Int r) {
int tx1 = this.x;
int ty1 = this.y;
int rx1 = r.x;
int ry1 = r.y;
long tx2 = tx1;
tx2 += this.w;
long ty2 = ty1;
ty2 += this.h;
long rx2 = rx1;
rx2 += r.w;
long ry2 = ry1;
ry2 += r.h;
if (tx1 < rx1)
tx1 = rx1;
if (ty1 < ry1)
ty1 = ry1;
if (tx2 > rx2)
tx2 = rx2;
if (ty2 > ry2)
ty2 = ry2;
tx2 -= tx1;
ty2 -= ty1;
// tx2,ty2 will never overflow (they will never be
// larger than the smallest of the two source w,h)
// they might underflow, though...
if (tx2 < Integer.MIN_VALUE)
tx2 = Integer.MIN_VALUE;
if (ty2 < Integer.MIN_VALUE)
ty2 = Integer.MIN_VALUE;
return new Rectangle.Int(tx1, ty1, (int) tx2, (int) ty2);
}
public boolean bordering(Rectangle.Int r) {
int tw = this.w;
int th = this.h;
int rw = r.w;
int rh = r.h;
if (rw <= 0 || rh <= 0 || tw <= 0 || th <= 0) {
return false;
}
int tx = this.x;
int ty = this.y;
int rx = r.x;
int ry = r.y;
rw += rx;
rh += ry;
tw += tx;
th += ty;
// overflow || intersect
return ((rw <= rx || rw >= tx) &&
(rh <= ry || rh >= ty) &&
(tw <= tx || tw >= rx) && (th <= ty || th >= ry));
}
public boolean intersects(Rectangle.Int r) {
int tw = this.w;
int th = this.h;
int rw = r.w;
int rh = r.h;
if (rw <= 0 || rh <= 0 || tw <= 0 || th <= 0) {
return false;
}
int tx = this.x;
int ty = this.y;
int rx = r.x;
int ry = r.y;
rw += rx;
rh += ry;
tw += tx;
th += ty;
// overflow || intersect
return ((rw < rx || rw > tx) &&
(rh < ry || rh > ty) &&
(tw < tx || tw > rx) && (th < ty || th > ry));
}
}
class Double {
public double x;
public double y;
public double w;
public double h;
public static final Rectangle.Double ZERO = new Rectangle.Double(0, 0, 0, 0);
public Double() {
this.x = 0;
this.y = 0;
this.w = 0;
this.h = 0;
}
public Double(double x, double y, double w, double h) {
this.x = x;
this.y = y;
this.w = w;
this.h = h;
}
public double getX() {
return x;
}
public double getY() {
return y;
}
public double getWidth() {
return w;
}
public double getHeight() {
return h;
}
public double getMinX() {
return getX();
}
public double getMinY() {
return getY();
}
public double getMaxX() {
return getX() + getWidth();
}
public double getMaxY() {
return getY() + getHeight();
}
public double getCenterX() {
return getX() + getWidth() / 2.0;
}
public double getCenterY() {
return getY() + getHeight() / 2.0;
}
public String toString() {
return "[" + x + "," + y + "," + w + "," + h + "]";
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof Rectangle.Double))
return false;
Rectangle.Double p = (Rectangle.Double) obj;
return x == p.x && y == p.y && w == p.w && h == p.h;
}
@Override
public int hashCode() {
long bits = java.lang.Double.doubleToLongBits(getX());
bits += java.lang.Double.doubleToLongBits(getY()) * 37;
bits += java.lang.Double.doubleToLongBits(getWidth()) * 43;
bits += java.lang.Double.doubleToLongBits(getHeight()) * 47;
return (((int) bits) ^ ((int) (bits >> 32)));
}
public double area() {
return w * h;
}
public boolean contains(Rectangle.Double r) {
return contains(r.x, r.y, r.w, r.h);
}
public boolean contains(double x, double y, double w, double h) {
if (w <= 0 || h <= 0) {
return false;
}
double x0 = getX();
double y0 = getY();
return (x >= x0 &&
y >= y0 &&
(x + w) <= x0 + getWidth() && (y + h) <= y0 + getHeight());
}
public Rectangle.Double union(Rectangle.Double r) {
Rectangle.Double dest = new Rectangle.Double();
Rectangle.Double.union(this, r, dest);
return dest;
}
public static void union(Rectangle.Double src1,
Rectangle.Double src2,
Rectangle.Double dest) {
double x1 = Math.min(src1.getMinX(), src2.getMinX());
double y1 = Math.min(src1.getMinY(), src2.getMinY());
double x2 = Math.max(src1.getMaxX(), src2.getMaxX());
double y2 = Math.max(src1.getMaxY(), src2.getMaxY());
dest.setFrameFromDiagonal(x1, y1, x2, y2);
}
public void setFrameFromDiagonal(double x1, double y1,
double x2, double y2) {
if (x2 < x1) {
double t = x1;
x1 = x2;
x2 = t;
}
if (y2 < y1) {
double t = y1;
y1 = y2;
y2 = t;
}
setFrame(x1, y1, x2 - x1, y2 - y1);
}
public Rectangle.Double intersect(Rectangle.Double r) {
Rectangle.Double dest = new Rectangle.Double();
Rectangle.Double.intersect(this, r, dest);
return dest;
}
public static void intersect(Rectangle.Double src1,
Rectangle.Double src2,
Rectangle.Double dest) {
double x1 = Math.max(src1.getMinX(), src2.getMinX());
double y1 = Math.max(src1.getMinY(), src2.getMinY());
double x2 = Math.min(src1.getMaxX(), src2.getMaxX());
double y2 = Math.min(src1.getMaxY(), src2.getMaxY());
dest.setFrame(x1, y1, x2 - x1, y2 - y1);
}
public boolean intersects(Rectangle.Double rect) {
if (rect.w <= 0 || rect.h <= 0) {
return false;
}
double x0 = x;
double y0 = y;
return (rect.x + rect.w > x0 &&
rect.y + rect.h > y0 &&
rect.x < x0 + w && rect.y < y0 + h);
}
public void add(Rectangle.Double r) {
double tx2 = this.w;
double ty2 = this.h;
if ((tx2) < 0 || (ty2) < 0) {
setFrame(r.x, r.y, r.w, r.h);
}
double rx2 = r.w;
double ry2 = r.h;
if ((rx2) < 0 || (ry2) < 0) {
return;
}
double tx1 = this.x;
double ty1 = this.y;
tx2 += tx1;
ty2 += ty1;
double rx1 = r.x;
double ry1 = r.y;
rx2 += rx1;
ry2 += ry1;
if (tx1 > rx1)
tx1 = rx1;
if (ty1 > ry1)
ty1 = ry1;
if (tx2 < rx2)
tx2 = rx2;
if (ty2 < ry2)
ty2 = ry2;
tx2 -= tx1;
ty2 -= ty1;
// tx2,ty2 will never underflow since both original
// rectangles were non-empty
// they might overflow, though...
if (tx2 > java.lang.Double.MAX_VALUE)
tx2 = java.lang.Double.MAX_VALUE;
if (ty2 > java.lang.Double.MAX_VALUE)
ty2 = java.lang.Double.MAX_VALUE;
setFrame(tx1, ty1, tx2, ty2);
}
private void setFrame(double x, double y, double w, double h) {
this.x = x;
this.y = y;
this.w = w;
this.h = h;
}
// FIXME use (x,y,w,h) notation, currently used by r-tree (en)
public static boolean contains(double r1MinX, double r1MinY, double r1MaxX, double r1MaxY,
double r2MinX, double r2MinY, double r2MaxX, double r2MaxY) {
return r1MaxX >= r2MaxX && r1MinX <= r2MinX && r1MaxY >= r2MaxY && r1MinY <= r2MinY;
}
public static double distanceSq(double minX, double minY, double maxX, double maxY, double pX, double pY) {
double distanceSqX = 0;
double distanceSqY = 0;
if (minX > pX) {
distanceSqX = minX - pX;
distanceSqX *= distanceSqX;
} else if (pX > maxX) {
distanceSqX = pX - maxX;
distanceSqX *= distanceSqX;
}
if (minY > pY) {
distanceSqY = minY - pY;
distanceSqY *= distanceSqY;
} else if (pY > maxY) {
distanceSqY = pY - maxY;
distanceSqY *= distanceSqY;
}
return distanceSqX + distanceSqY;
}
public static boolean intersects(double r1MinX, double r1MinY, double r1MaxX, double r1MaxY,
double r2MinX, double r2MinY, double r2MaxX, double r2MaxY) {
return r1MaxX >= r2MinX && r1MinX <= r2MaxX && r1MaxY >= r2MinY && r1MinY <= r2MaxY;
}
public static double area(double minX, double minY, double maxX, double maxY) {
return (maxX - minX) * (maxY - minY);
}
public static double enlargement(double r1MinX, double r1MinY, double r1MaxX, double r1MaxY,
double r2MinX, double r2MinY, double r2MaxX, double r2MaxY) {
double r1Area = (r1MaxX - r1MinX) * (r1MaxY - r1MinY);
if (r1Area == java.lang.Double.POSITIVE_INFINITY) {
return 0; // cannot enlarge an infinite rectangle...
}
if (r2MinX < r1MinX)
r1MinX = r2MinX;
if (r2MinY < r1MinY)
r1MinY = r2MinY;
if (r2MaxX > r1MaxX)
r1MaxX = r2MaxX;
if (r2MaxY > r1MaxY)
r1MaxY = r2MaxY;
double r1r2UnionArea = (r1MaxX - r1MinX) * (r1MaxY - r1MinY);
if (r1r2UnionArea == Float.POSITIVE_INFINITY) {
// if a finite rectangle is enlarged and becomes infinite,
// then the enlargement must be infinite.
return Float.POSITIVE_INFINITY;
}
return r1r2UnionArea - r1Area;
}
}
}
| |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.task;
import java.io.Serializable;
import java.util.Date;
import java.util.List;
import org.camunda.bpm.engine.BadUserRequestException;
import org.camunda.bpm.engine.ProcessEngineConfiguration;
import org.camunda.bpm.engine.ProcessEngineException;
import org.camunda.bpm.engine.query.Query;
import org.camunda.bpm.engine.variable.type.ValueType;
/**
* Allows programmatic querying of {@link Task}s;
*
* @author Joram Barrez
* @author Falko Menge
*/
public interface TaskQuery extends Query<TaskQuery, Task>{
/**
* Only select tasks with the given task id (in practice, there will be
* maximum one of this kind)
*/
TaskQuery taskId(String taskId);
/** Only select tasks with the given name */
TaskQuery taskName(String name);
/** Only select tasks with a name matching the parameter.
* The syntax is that of SQL: for example usage: nameLike(%activiti%)*/
TaskQuery taskNameLike(String nameLike);
/** Only select tasks with the given description. */
TaskQuery taskDescription(String description);
/** Only select tasks with a description matching the parameter .
* The syntax is that of SQL: for example usage: descriptionLike(%activiti%)*/
TaskQuery taskDescriptionLike(String descriptionLike);
/** Only select tasks with the given priority. */
TaskQuery taskPriority(Integer priority);
/** Only select tasks with the given priority or higher. */
TaskQuery taskMinPriority(Integer minPriority);
/** Only select tasks with the given priority or lower. */
TaskQuery taskMaxPriority(Integer maxPriority);
/** Only select tasks which are assigned to the given user. */
TaskQuery taskAssignee(String assignee);
/**
* <p>Only select tasks which are assigned to the user described by the given expression.</p>
*
* @throws BadUserRequestException
* <ul><li>When the query is executed and expressions are disabled for adhoc queries
* (in case the query is executed via {@link #list()}, {@link #listPage(int, int)}, {@link #singleResult()}, or {@link #count()})
* or stored queries (in case the query is stored along with a filter).
* Expression evaluation can be activated by setting the process engine configuration properties
* <code>enableExpressionsInAdhocQueries</code> (default <code>false</code>) and
* <code>enableExpressionsInStoredQueries</code> (default <code>true</code>) to <code>true</code>.
*/
TaskQuery taskAssigneeExpression(String assigneeExpression);
/** Only select tasks which are matching the given user.
* The syntax is that of SQL: for example usage: nameLike(%activiti%)*/
TaskQuery taskAssigneeLike(String assignee);
/**
* <p>Only select tasks which are assigned to the user described by the given expression.
* The syntax is that of SQL: for example usage: taskAssigneeLikeExpression("${'%test%'}")</p>
*
* @throws BadUserRequestException
* <ul><li>When the query is executed and expressions are disabled for adhoc queries
* (in case the query is executed via {@link #list()}, {@link #listPage(int, int)}, {@link #singleResult()}, or {@link #count()})
* or stored queries (in case the query is stored along with a filter).
* Expression evaluation can be activated by setting the process engine configuration properties
* <code>enableExpressionsInAdhocQueries</code> (default <code>false</code>) and
* <code>enableExpressionsInStoredQueries</code> (default <code>true</code>) to <code>true</code>.
*/
TaskQuery taskAssigneeLikeExpression(String assigneeLikeExpression);
/** Only select tasks for which the given user is the owner. */
TaskQuery taskOwner(String owner);
/**
* <p>Only select tasks for which the described user by the given expression is the owner.</p>
*
* @throws BadUserRequestException
* <ul><li>When the query is executed and expressions are disabled for adhoc queries
* (in case the query is executed via {@link #list()}, {@link #listPage(int, int)}, {@link #singleResult()}, or {@link #count()})
* or stored queries (in case the query is stored along with a filter).
* Expression evaluation can be activated by setting the process engine configuration properties
* <code>enableExpressionsInAdhocQueries</code> (default <code>false</code>) and
* <code>enableExpressionsInStoredQueries</code> (default <code>true</code>) to <code>true</code>.
*/
TaskQuery taskOwnerExpression(String ownerExpression);
/** Only select tasks which don't have an assignee. */
TaskQuery taskUnassigned();
/** @see {@link #taskUnassigned} */
@Deprecated
TaskQuery taskUnnassigned();
/** Only select tasks with the given {@link DelegationState}. */
TaskQuery taskDelegationState(DelegationState delegationState);
/**
* Only select tasks for which the given user is a candidate.
*
* <p>
* Per default it only selects tasks which are not already assigned
* to a user. To also include assigned task in the result specify
* {@link #includeAssignedTasks()} in your query.
* </p>
*
* @throws ProcessEngineException
* <ul><li>When query is executed and {@link #taskCandidateGroup(String)} or
* {@link #taskCandidateGroupIn(List)} has been executed on the query instance.
* <li>When passed user is <code>null</code>.
* </ul>
* @throws BadUserRequestException
* <ul><li>When the query is executed and expressions are disabled for adhoc queries
* (in case the query is executed via {@link #list()}, {@link #listPage(int, int)}, {@link #singleResult()}, or {@link #count()})
* or stored queries (in case the query is stored along with a filter).
* Expression evaluation can be activated by setting the process engine configuration properties
* <code>enableExpressionsInAdhocQueries</code> (default <code>false</code>) and
* <code>enableExpressionsInStoredQueries</code> (default <code>true</code>) to <code>true</code>.
*
*/
TaskQuery taskCandidateUser(String candidateUser);
/**
* Only select tasks for which the described user by the given expression is a candidate.
*
* <p>
* Per default it only selects tasks which are not already assigned
* to a user. To also include assigned task in the result specify
* {@link #includeAssignedTasks()} in your query.
* </p>
*
* @throws ProcessEngineException
* <ul><li>When query is executed and {@link #taskCandidateGroup(String)} or
* {@link #taskCandidateGroupIn(List)} has been executed on the query instance.
* <li>When passed user is <code>null</code>.
* </ul>
* @throws BadUserRequestException
* <ul><li>When the query is executed and expressions are disabled for adhoc queries
* (in case the query is executed via {@link #list()}, {@link #listPage(int, int)}, {@link #singleResult()}, or {@link #count()})
* or stored queries (in case the query is stored along with a filter).
* Expression evaluation can be activated by setting the process engine configuration properties
* <code>enableExpressionsInAdhocQueries</code> (default <code>false</code>) and
* <code>enableExpressionsInStoredQueries</code> (default <code>true</code>) to <code>true</code>.
*/
TaskQuery taskCandidateUserExpression(String candidateUserExpression);
/** Only select tasks for which there exist an {@link IdentityLink} with the given user */
TaskQuery taskInvolvedUser(String involvedUser);
/**
* <p>Only select tasks for which there exist an {@link IdentityLink} with the
* described user by the given expression</p>
*
* @throws BadUserRequestException
* <ul><li>When the query is executed and expressions are disabled for adhoc queries
* (in case the query is executed via {@link #list()}, {@link #listPage(int, int)}, {@link #singleResult()}, or {@link #count()})
* or stored queries (in case the query is stored along with a filter).
* Expression evaluation can be activated by setting the process engine configuration properties
* <code>enableExpressionsInAdhocQueries</code> (default <code>false</code>) and
* <code>enableExpressionsInStoredQueries</code> (default <code>true</code>) to <code>true</code>.
*/
TaskQuery taskInvolvedUserExpression(String involvedUserExpression);
/**
* Only select tasks for which users in the given group are candidates.
*
* <p>
* Per default it only selects tasks which are not already assigned
* to a user. To also include assigned task in the result specify
* {@link #includeAssignedTasks()} in your query.
* </p>
*
* @throws ProcessEngineException
* When query is executed and {@link #taskCandidateUser(String)} or
* {@link #taskCandidateGroupIn(List)} has been executed on the query instance.
* When passed group is <code>null</code>.
*/
TaskQuery taskCandidateGroup(String candidateGroup);
/**
* Only select tasks for which users in the described group by the given expression are candidates.
*
* <p>
* Per default it only selects tasks which are not already assigned
* to a user. To also include assigned task in the result specify
* {@link #includeAssignedTasks()} in your query.
* </p>
*
* @throws ProcessEngineException
* <ul><li>When query is executed and {@link #taskCandidateUser(String)} or
* {@link #taskCandidateGroupIn(List)} has been executed on the query instance.
* <li>When passed group is <code>null</code>.
* </ul>
* @throws BadUserRequestException
* <ul><li>When the query is executed and expressions are disabled for adhoc queries
* (in case the query is executed via {@link #list()}, {@link #listPage(int, int)}, {@link #singleResult()}, or {@link #count()})
* or stored queries (in case the query is stored along with a filter).
* Expression evaluation can be activated by setting the process engine configuration properties
* <code>enableExpressionsInAdhocQueries</code> (default <code>false</code>) and
* <code>enableExpressionsInStoredQueries</code> (default <code>true</code>) to <code>true</code>.
*/
TaskQuery taskCandidateGroupExpression(String candidateGroupExpression);
/**
* Only select tasks for which the 'candidateGroup' is one of the given groups.
*
* <p>
* Per default it only selects tasks which are not already assigned
* to a user. To also include assigned task in the result specify
* {@link #includeAssignedTasks()} in your query.
* </p>
*
* @throws ProcessEngineException
* When query is executed and {@link #taskCandidateGroup(String)} or
* {@link #taskCandidateUser(String)} has been executed on the query instance.
* When passed group list is empty or <code>null</code>.
*/
TaskQuery taskCandidateGroupIn(List<String> candidateGroups);
/**
* Only select tasks for which the 'candidateGroup' is one of the described groups of the given expression.
*
* <p>
* Per default it only selects tasks which are not already assigned
* to a user. To also include assigned task in the result specify
* {@link #includeAssignedTasks()} in your query.
* </p>
*
* @throws ProcessEngineException
* <ul><li>When query is executed and {@link #taskCandidateGroup(String)} or
* {@link #taskCandidateUser(String)} has been executed on the query instance.
* <li>When passed group list is empty or <code>null</code>.</ul>
* @throws BadUserRequestException
* <ul><li>When the query is executed and expressions are disabled for adhoc queries
* (in case the query is executed via {@link #list()}, {@link #listPage(int, int)}, {@link #singleResult()}, or {@link #count()})
* or stored queries (in case the query is stored along with a filter).
* Expression evaluation can be activated by setting the process engine configuration properties
* <code>enableExpressionsInAdhocQueries</code> (default <code>false</code>) and
* <code>enableExpressionsInStoredQueries</code> (default <code>true</code>) to <code>true</code>.
*/
TaskQuery taskCandidateGroupInExpression(String candidateGroupsExpression);
/**
* Select both assigned and not assigned tasks for candidate user or group queries.
* <p>
* By default {@link #taskCandidateUser(String)}, {@link #taskCandidateGroup(String)}
* and {@link #taskCandidateGroupIn(List)} queries only select not assigned tasks.
* </p>
*
* @throws ProcessEngineException
* When no candidate user or group(s) are specified beforehand
*/
TaskQuery includeAssignedTasks();
/** Only select tasks for the given process instance id. */
TaskQuery processInstanceId(String processInstanceId);
/** Only select tasks for the given process instance business key */
TaskQuery processInstanceBusinessKey(String processInstanceBusinessKey);
/**
* Only select tasks for any of the given the given process instance business keys.
*/
TaskQuery processInstanceBusinessKeyIn(String... processInstanceBusinessKeys);
/** Only select tasks matching the given process instance business key.
* The syntax is that of SQL: for example usage: nameLike(%activiti%)*/
TaskQuery processInstanceBusinessKeyLike(String processInstanceBusinessKey);
/** Only select tasks for the given execution. */
TaskQuery executionId(String executionId);
/** Only select task which have one of the activity instance ids. **/
TaskQuery activityInstanceIdIn(String... activityInstanceIds);
/** Only select tasks that are created on the given date. **/
TaskQuery taskCreatedOn(Date createTime);
/** Only select tasks that are created on the described date by the given expression. **/
TaskQuery taskCreatedOnExpression(String createTimeExpression);
/** Only select tasks that are created before the given date. **/
TaskQuery taskCreatedBefore(Date before);
/** Only select tasks that are created before the described date by the given expression. **/
TaskQuery taskCreatedBeforeExpression(String beforeExpression);
/** Only select tasks that are created after the given date. **/
TaskQuery taskCreatedAfter(Date after);
/** Only select tasks that are created after the described date by the given expression. **/
TaskQuery taskCreatedAfterExpression(String afterExpression);
/** Only select tasks that have no parent (i.e. do not select subtasks). **/
TaskQuery excludeSubtasks();
/**
* Only select tasks with the given taskDefinitionKey.
* The task definition key is the id of the userTask:
* <userTask id="xxx" .../>
**/
TaskQuery taskDefinitionKey(String key);
/**
* Only select tasks with a taskDefinitionKey that match the given parameter.
* The syntax is that of SQL: for example usage: taskDefinitionKeyLike("%activiti%").
* The task definition key is the id of the userTask:
* <userTask id="xxx" .../>
**/
TaskQuery taskDefinitionKeyLike(String keyLike);
/** Only select tasks which have one of the taskDefinitionKeys. **/
TaskQuery taskDefinitionKeyIn(String... taskDefinitionKeys);
/**
* Select the tasks which are sub tasks of the given parent task.
*/
TaskQuery taskParentTaskId(String parentTaskId);
/** Only select tasks for the given case instance id. */
TaskQuery caseInstanceId(String caseInstanceId);
/** Only select tasks for the given case instance business key */
TaskQuery caseInstanceBusinessKey(String caseInstanceBusinessKey);
/** Only select tasks matching the given case instance business key.
* The syntax is that of SQL: for example usage: nameLike(%aBusinessKey%)*/
TaskQuery caseInstanceBusinessKeyLike(String caseInstanceBusinessKeyLike);
/** Only select tasks for the given case execution. */
TaskQuery caseExecutionId(String caseExecutionId);
/**
* Only select tasks which are part of a case instance which has the given
* case definition key.
*/
TaskQuery caseDefinitionKey(String caseDefinitionKey);
/**
* Only select tasks which are part of a case instance which has the given
* case definition id.
*/
TaskQuery caseDefinitionId(String caseDefinitionId);
/**
* Only select tasks which are part of a case instance which has the given
* case definition name.
*/
TaskQuery caseDefinitionName(String caseDefinitionName);
/**
* Only select tasks which are part of a case instance which case definition
* name is like the given parameter.
* The syntax is that of SQL: for example usage: nameLike(%processDefinitionName%)*/
TaskQuery caseDefinitionNameLike(String caseDefinitionNameLike);
/**
* Only select tasks which have a local task variable with the given name
* set to the given value.
*/
TaskQuery taskVariableValueEquals(String variableName, Object variableValue);
/**
* Only select tasks which have a local task variable with the given name, but
* with a different value than the passed value.
* Byte-arrays and {@link Serializable} objects (which are not primitive type wrappers)
* are not supported.
*/
TaskQuery taskVariableValueNotEquals(String variableName, Object variableValue);
/**
* Only select tasks which have a local task variable with the given name
* matching the given value.
* The syntax is that of SQL: for example usage: valueLike(%value%)
*/
TaskQuery taskVariableValueLike(String variableName, String variableValue);
/**
* Only select tasks which have a local task variable with the given name
* and a value greater than the given one.
*/
TaskQuery taskVariableValueGreaterThan(String variableName, Object variableValue);
/**
* Only select tasks which have a local task variable with the given name
* and a value greater than or equal to the given one.
*/
TaskQuery taskVariableValueGreaterThanOrEquals(String variableName, Object variableValue);
/**
* Only select tasks which have a local task variable with the given name
* and a value less than the given one.
*/
TaskQuery taskVariableValueLessThan(String variableName, Object variableValue);
/**
* Only select tasks which have a local task variable with the given name
* and a value less than or equal to the given one.
*/
TaskQuery taskVariableValueLessThanOrEquals(String variableName, Object variableValue);
/**
* Only select tasks which have are part of a process that have a variable
* with the given name set to the given value.
*/
TaskQuery processVariableValueEquals(String variableName, Object variableValue);
/**
* Only select tasks which have a variable with the given name, but
* with a different value than the passed value.
* Byte-arrays and {@link Serializable} objects (which are not primitive type wrappers)
* are not supported.
*/
TaskQuery processVariableValueNotEquals(String variableName, Object variableValue);
/**
* Only select tasks which are part of a process that have a variable
* with the given name and matching the given value.
* The syntax is that of SQL: for example usage: valueLike(%value%)*/
TaskQuery processVariableValueLike(String variableName, String variableValue);
/**
* Only select tasks which are part of a process that have a variable
* with the given name and a value greater than the given one.
*/
TaskQuery processVariableValueGreaterThan(String variableName, Object variableValue);
/**
* Only select tasks which are part of a process that have a variable
* with the given name and a value greater than or equal to the given one.
*/
TaskQuery processVariableValueGreaterThanOrEquals(String variableName, Object variableValue);
/**
* Only select tasks which are part of a process that have a variable
* with the given name and a value less than the given one.
*/
TaskQuery processVariableValueLessThan(String variableName, Object variableValue);
/**
* Only select tasks which are part of a process that have a variable
* with the given name and a value greater than or equal to the given one.
*/
TaskQuery processVariableValueLessThanOrEquals(String variableName, Object variableValue);
/**
* Only select tasks which are part of a case instance that have a variable
* with the given name set to the given value. The type of variable is determined based
* on the value, using types configured in {@link ProcessEngineConfiguration#getVariableSerializers()}.
*
* Byte-arrays and {@link Serializable} objects (which are not primitive type wrappers)
* are not supported.
*
* @param name name of the variable, cannot be null.
*/
TaskQuery caseInstanceVariableValueEquals(String variableName, Object variableValue);
/**
* Only select tasks which are part of a case instance that have a variable
* with the given name, but with a different value than the passed value. The
* type of variable is determined based on the value, using types configured
* in {@link ProcessEngineConfiguration#getVariableSerializers()}.
*
* Byte-arrays and {@link Serializable} objects (which are not primitive type wrappers)
* are not supported.
*
* @param name name of the variable, cannot be null.
*/
TaskQuery caseInstanceVariableValueNotEquals(String variableName, Object variableValue);
/**
* Only select tasks which are part of a case instance that have a variable value
* like the given value.
*
* This be used on string variables only.
*
* @param name variable name, cannot be null.
*
* @param value variable value. The string can include the
* wildcard character '%' to express like-strategy:
* starts with (string%), ends with (%string) or contains (%string%).
*/
TaskQuery caseInstanceVariableValueLike(String variableName, String variableValue);
/**
* Only select tasks which are part of a case instance that have a variable
* with the given name and a variable value greater than the passed value.
*
* Booleans, Byte-arrays and {@link Serializable} objects (which are not primitive type wrappers)
* are not supported.
*
* @param name variable name, cannot be null.
*/
TaskQuery caseInstanceVariableValueGreaterThan(String variableName, Object variableValue);
/**
* Only select tasks which are part of a case instance that have a
* variable value greater than or equal to the passed value.
*
* Booleans, Byte-arrays and {@link Serializable} objects (which
* are not primitive type wrappers) are not supported.
*
* @param name variable name, cannot be null.
*/
TaskQuery caseInstanceVariableValueGreaterThanOrEquals(String variableName, Object variableValue);
/**
* Only select tasks which are part of a case instance that have a variable
* value less than the passed value.
*
* Booleans, Byte-arrays and {@link Serializable} objects (which are not primitive type wrappers)
* are not supported.
*
* @param name variable name, cannot be null.
*/
TaskQuery caseInstanceVariableValueLessThan(String variableName, Object variableValue);
/**
* Only select tasks which are part of a case instance that have a variable
* value less than or equal to the passed value.
*
* Booleans, Byte-arrays and {@link Serializable} objects (which are not primitive type wrappers)
* are not supported.
*
* @param name variable name, cannot be null.
*/
TaskQuery caseInstanceVariableValueLessThanOrEquals(String variableName, Object variableValue);
/**
* Only select tasks which are part of a process instance which has the given
* process definition key.
*/
TaskQuery processDefinitionKey(String processDefinitionKey);
/**
* Only select tasks which are part of a process instance which has one of the
* given process definition keys.
*/
TaskQuery processDefinitionKeyIn(String... processDefinitionKeys);
/**
* Only select tasks which are part of a process instance which has the given
* process definition id.
*/
TaskQuery processDefinitionId(String processDefinitionId);
/**
* Only select tasks which are part of a process instance which has the given
* process definition name.
*/
TaskQuery processDefinitionName(String processDefinitionName);
/**
* Only select tasks which are part of a process instance which process definition
* name is like the given parameter.
* The syntax is that of SQL: for example usage: nameLike(%processDefinitionName%)*/
TaskQuery processDefinitionNameLike(String processDefinitionName);
/**
* Only select tasks with the given due date.
*/
TaskQuery dueDate(Date dueDate);
/**
* <p>Only select tasks with the described due date by the given expression.</p>
*
* @throws BadUserRequestException
* <ul><li>When the query is executed and expressions are disabled for adhoc queries
* (in case the query is executed via {@link #list()}, {@link #listPage(int, int)}, {@link #singleResult()}, or {@link #count()})
* or stored queries (in case the query is stored along with a filter).
* Expression evaluation can be activated by setting the process engine configuration properties
* <code>enableExpressionsInAdhocQueries</code> (default <code>false</code>) and
* <code>enableExpressionsInStoredQueries</code> (default <code>true</code>) to <code>true</code>.
*/
TaskQuery dueDateExpression(String dueDateExpression);
/**
* Only select tasks which have a due date before the given date.
*/
TaskQuery dueBefore(Date dueDate);
/**
* <p>Only select tasks which have a due date before the described date by the given expression.</p>
*
* @throws BadUserRequestException
* <ul><li>When the query is executed and expressions are disabled for adhoc queries
* (in case the query is executed via {@link #list()}, {@link #listPage(int, int)}, {@link #singleResult()}, or {@link #count()})
* or stored queries (in case the query is stored along with a filter).
* Expression evaluation can be activated by setting the process engine configuration properties
* <code>enableExpressionsInAdhocQueries</code> (default <code>false</code>) and
* <code>enableExpressionsInStoredQueries</code> (default <code>true</code>) to <code>true</code>.
*/
TaskQuery dueBeforeExpression(String dueDateExpression);
/**
* Only select tasks which have a due date after the given date.
*/
TaskQuery dueAfter(Date dueDate);
/**
* <p>Only select tasks which have a due date after the described date by the given expression.</p>
*
* @throws BadUserRequestException
* <ul><li>When the query is executed and expressions are disabled for adhoc queries
* (in case the query is executed via {@link #list()}, {@link #listPage(int, int)}, {@link #singleResult()}, or {@link #count()})
* or stored queries (in case the query is stored along with a filter).
* Expression evaluation can be activated by setting the process engine configuration properties
* <code>enableExpressionsInAdhocQueries</code> (default <code>false</code>) and
* <code>enableExpressionsInStoredQueries</code> (default <code>true</code>) to <code>true</code>.
*/
TaskQuery dueAfterExpression(String dueDateExpression);
/**
* Only select tasks with the given follow-up date.
*/
TaskQuery followUpDate(Date followUpDate);
/**
* <p>Only select tasks with the described follow-up date by the given expression.</p>
*
* @throws BadUserRequestException
* <ul><li>When the query is executed and expressions are disabled for adhoc queries
* (in case the query is executed via {@link #list()}, {@link #listPage(int, int)}, {@link #singleResult()}, or {@link #count()})
* or stored queries (in case the query is stored along with a filter).
* Expression evaluation can be activated by setting the process engine configuration properties
* <code>enableExpressionsInAdhocQueries</code> (default <code>false</code>) and
* <code>enableExpressionsInStoredQueries</code> (default <code>true</code>) to <code>true</code>.
*/
TaskQuery followUpDateExpression(String followUpDateExpression);
/**
* Only select tasks which have a follow-up date before the given date.
*/
TaskQuery followUpBefore(Date followUpDate);
/**
* <p>Only select tasks which have a follow-up date before the described date by the given expression.</p>
*
* @throws BadUserRequestException
* <ul><li>When the query is executed and expressions are disabled for adhoc queries
* (in case the query is executed via {@link #list()}, {@link #listPage(int, int)}, {@link #singleResult()}, or {@link #count()})
* or stored queries (in case the query is stored along with a filter).
* Expression evaluation can be activated by setting the process engine configuration properties
* <code>enableExpressionsInAdhocQueries</code> (default <code>false</code>) and
* <code>enableExpressionsInStoredQueries</code> (default <code>true</code>) to <code>true</code>.
*/
TaskQuery followUpBeforeExpression(String followUpDateExpression);
/**
* Only select tasks which have no follow-up date or a follow-up date before the given date.
* Serves the typical use case "give me all tasks without follow-up or follow-up date which is already due"
*/
TaskQuery followUpBeforeOrNotExistent(Date followUpDate);
/**
* <p>Only select tasks which have no follow-up date or a follow-up date before the described date by the given expression.
* Serves the typical use case "give me all tasks without follow-up or follow-up date which is already due"</p>
*
* @throws BadUserRequestException
* <ul><li>When the query is executed and expressions are disabled for adhoc queries
* (in case the query is executed via {@link #list()}, {@link #listPage(int, int)}, {@link #singleResult()}, or {@link #count()})
* or stored queries (in case the query is stored along with a filter).
* Expression evaluation can be activated by setting the process engine configuration properties
* <code>enableExpressionsInAdhocQueries</code> (default <code>false</code>) and
* <code>enableExpressionsInStoredQueries</code> (default <code>true</code>) to <code>true</code>.
*/
TaskQuery followUpBeforeOrNotExistentExpression(String followUpDateExpression);
/**
* Only select tasks which have a follow-up date after the given date.
*/
TaskQuery followUpAfter(Date followUpDate);
/**
* <p>Only select tasks which have a follow-up date after the described date by the given expression.</p>
*
* @throws BadUserRequestException
* <ul><li>When the query is executed and expressions are disabled for adhoc queries
* (in case the query is executed via {@link #list()}, {@link #listPage(int, int)}, {@link #singleResult()}, or {@link #count()})
* or stored queries (in case the query is stored along with a filter).
* Expression evaluation can be activated by setting the process engine configuration properties
* <code>enableExpressionsInAdhocQueries</code> (default <code>false</code>) and
* <code>enableExpressionsInStoredQueries</code> (default <code>true</code>) to <code>true</code>.
*/
TaskQuery followUpAfterExpression(String followUpDateExpression);
/**
* Only select tasks which are suspended, because its process instance was suspended.
*/
TaskQuery suspended();
/**
* Only select tasks which are active (ie. not suspended)
*/
TaskQuery active();
/**
* If called, the form keys of the fetched tasks are initialized and
* {@link Task#getFormKey()} will return a value (in case the task has a form key).
*
* @return the query itself
*/
TaskQuery initializeFormKeys();
/** Only select tasks with one of the given tenant ids. */
TaskQuery tenantIdIn(String... tenantIds);
/** Only select tasks which have no tenant id. */
TaskQuery withoutTenantId();
// ordering ////////////////////////////////////////////////////////////
/** Order by task id (needs to be followed by {@link #asc()} or {@link #desc()}). */
TaskQuery orderByTaskId();
/** Order by task name (needs to be followed by {@link #asc()} or {@link #desc()}). */
TaskQuery orderByTaskName();
/** Order by task name case insensitive (needs to be followed by {@link #asc()} or {@link #desc()}). */
TaskQuery orderByTaskNameCaseInsensitive();
/** Order by description (needs to be followed by {@link #asc()} or {@link #desc()}). */
TaskQuery orderByTaskDescription();
/** Order by priority (needs to be followed by {@link #asc()} or {@link #desc()}). */
TaskQuery orderByTaskPriority();
/** Order by assignee (needs to be followed by {@link #asc()} or {@link #desc()}). */
TaskQuery orderByTaskAssignee();
/** Order by the time on which the tasks were created (needs to be followed by {@link #asc()} or {@link #desc()}). */
TaskQuery orderByTaskCreateTime();
/** Order by process instance id (needs to be followed by {@link #asc()} or {@link #desc()}). */
TaskQuery orderByProcessInstanceId();
/** Order by case instance id (needs to be followed by {@link #asc()} or {@link #desc()}). */
TaskQuery orderByCaseInstanceId();
/** Order by execution id (needs to be followed by {@link #asc()} or {@link #desc()}). */
TaskQuery orderByExecutionId();
/** Order by case execution id (needs to be followed by {@link #asc()} or {@link #desc()}). */
TaskQuery orderByCaseExecutionId();
/** Order by due date (needs to be followed by {@link #asc()} or {@link #desc()}). */
TaskQuery orderByDueDate();
/** Order by follow-up date (needs to be followed by {@link #asc()} or {@link #desc()}). */
TaskQuery orderByFollowUpDate();
/**
* Order by a process instance variable value of a certain type. Calling this method multiple times
* specifies secondary, tertiary orderings, etc. The ordering of variables with <code>null</code>
* values is database-specific.
*/
TaskQuery orderByProcessVariable(String variableName, ValueType valueType);
/**
* Order by an execution variable value of a certain type. Calling this method multiple times
* specifies secondary, tertiary orderings, etc. The ordering of variables with <code>null</code>
* values is database-specific.
*/
TaskQuery orderByExecutionVariable(String variableName, ValueType valueType);
/**
* Order by a task variable value of a certain type. Calling this method multiple times
* specifies secondary, tertiary orderings, etc. The ordering of variables with <code>null</code>
* values is database-specific.
*/
TaskQuery orderByTaskVariable(String variableName, ValueType valueType);
/**
* Order by a task variable value of a certain type. Calling this method multiple times
* specifies secondary, tertiary orderings, etc. The ordering of variables with <code>null</code>
* values is database-specific.
*/
TaskQuery orderByCaseExecutionVariable(String variableName, ValueType valueType);
/**
* Order by a task variable value of a certain type. Calling this method multiple times
* specifies secondary, tertiary orderings, etc. The ordering of variables with <code>null</code>
* values is database-specific.
*/
TaskQuery orderByCaseInstanceVariable(String variableName, ValueType valueType);
/** Order by tenant id (needs to be followed by {@link #asc()} or {@link #desc()}).
* Note that the ordering of tasks without tenant id is database-specific. */
TaskQuery orderByTenantId();
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.common.lucene.search;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.index.Fields;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.similarities.ClassicSimilarity;
import org.apache.lucene.search.similarities.Similarity;
import org.apache.lucene.search.similarities.TFIDFSimilarity;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.Strings;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import java.io.IOException;
import java.io.Reader;
import java.io.StringReader;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Objects;
import java.util.Set;
public class MoreLikeThisQuery extends Query {
public static final String DEFAULT_MINIMUM_SHOULD_MATCH = "30%";
private TFIDFSimilarity similarity;
private String[] likeText;
private Fields[] likeFields;
private String[] unlikeText;
private Fields[] unlikeFields;
private String[] moreLikeFields;
private Analyzer analyzer;
private String analyzerName; // used for equals/hashcode
private String minimumShouldMatch = DEFAULT_MINIMUM_SHOULD_MATCH;
private int minTermFrequency = XMoreLikeThis.DEFAULT_MIN_TERM_FREQ;
private int maxQueryTerms = XMoreLikeThis.DEFAULT_MAX_QUERY_TERMS;
private Set<?> stopWords = XMoreLikeThis.DEFAULT_STOP_WORDS;
private int minDocFreq = XMoreLikeThis.DEFAULT_MIN_DOC_FREQ;
private int maxDocFreq = XMoreLikeThis.DEFAULT_MAX_DOC_FREQ;
private int minWordLen = XMoreLikeThis.DEFAULT_MIN_WORD_LENGTH;
private int maxWordLen = XMoreLikeThis.DEFAULT_MAX_WORD_LENGTH;
private boolean boostTerms = XMoreLikeThis.DEFAULT_BOOST;
private float boostTermsFactor = 1;
public MoreLikeThisQuery() {
}
public MoreLikeThisQuery(String likeText, String[] moreLikeFields, NamedAnalyzer analyzer) {
this.likeText = new String[]{likeText};
this.moreLikeFields = moreLikeFields;
this.analyzer = analyzer;
this.analyzerName = analyzer.name();
}
@Override
public int hashCode() {
return Objects.hash(classHash(), boostTerms, boostTermsFactor, Arrays.hashCode(likeText),
maxDocFreq, maxQueryTerms, maxWordLen, minDocFreq, minTermFrequency, minWordLen,
Arrays.hashCode(moreLikeFields), minimumShouldMatch, stopWords);
}
@Override
public boolean equals(Object obj) {
if (sameClassAs(obj) == false) {
return false;
}
MoreLikeThisQuery other = (MoreLikeThisQuery) obj;
if (Objects.equals(analyzerName, other.analyzerName) == false)
return false;
if (boostTerms != other.boostTerms)
return false;
if (boostTermsFactor != other.boostTermsFactor)
return false;
if ((Arrays.equals(likeText, other.likeText)) == false)
return false;
if (maxDocFreq != other.maxDocFreq)
return false;
if (maxQueryTerms != other.maxQueryTerms)
return false;
if (maxWordLen != other.maxWordLen)
return false;
if (minDocFreq != other.minDocFreq)
return false;
if (minTermFrequency != other.minTermFrequency)
return false;
if (minWordLen != other.minWordLen)
return false;
if (Arrays.equals(moreLikeFields, other.moreLikeFields) == false)
return false;
if (minimumShouldMatch.equals(other.minimumShouldMatch) == false)
return false;
if (similarity == null) {
if (other.similarity != null)
return false;
} else if (similarity.equals(other.similarity) == false)
return false;
if (stopWords == null) {
if (other.stopWords != null)
return false;
} else if (stopWords.equals(other.stopWords) == false)
return false;
return true;
}
@Override
public Query rewrite(IndexReader reader) throws IOException {
Query rewritten = super.rewrite(reader);
if (rewritten != this) {
return rewritten;
}
XMoreLikeThis mlt = new XMoreLikeThis(reader, similarity == null ? new ClassicSimilarity() : similarity);
mlt.setFieldNames(moreLikeFields);
mlt.setAnalyzer(analyzer);
mlt.setMinTermFreq(minTermFrequency);
mlt.setMinDocFreq(minDocFreq);
mlt.setMaxDocFreq(maxDocFreq);
mlt.setMaxQueryTerms(maxQueryTerms);
mlt.setMinWordLen(minWordLen);
mlt.setMaxWordLen(maxWordLen);
mlt.setStopWords(stopWords);
mlt.setBoost(boostTerms);
mlt.setBoostFactor(boostTermsFactor);
if (this.unlikeText != null || this.unlikeFields != null) {
handleUnlike(mlt, this.unlikeText, this.unlikeFields);
}
return createQuery(mlt);
}
private Query createQuery(XMoreLikeThis mlt) throws IOException {
BooleanQuery.Builder bqBuilder = new BooleanQuery.Builder();
if (this.likeFields != null) {
Query mltQuery = mlt.like(this.likeFields);
mltQuery = Queries.applyMinimumShouldMatch((BooleanQuery) mltQuery, minimumShouldMatch);
bqBuilder.add(mltQuery, BooleanClause.Occur.SHOULD);
}
if (this.likeText != null) {
Reader[] readers = new Reader[likeText.length];
for (int i = 0; i < readers.length; i++) {
readers[i] = new StringReader(likeText[i]);
}
//LUCENE 4 UPGRADE this mapps the 3.6 behavior (only use the first field)
Query mltQuery = mlt.like(moreLikeFields[0], readers);
mltQuery = Queries.applyMinimumShouldMatch((BooleanQuery) mltQuery, minimumShouldMatch);
bqBuilder.add(mltQuery, BooleanClause.Occur.SHOULD);
}
return bqBuilder.build();
}
private void handleUnlike(XMoreLikeThis mlt, String[] unlikeText, Fields[] unlikeFields) throws IOException {
Set<Term> skipTerms = new HashSet<>();
// handle like text
if (unlikeText != null) {
for (String text : unlikeText) {
// only use the first field to be consistent
String fieldName = moreLikeFields[0];
try (TokenStream ts = analyzer.tokenStream(fieldName, text)) {
CharTermAttribute termAtt = ts.addAttribute(CharTermAttribute.class);
ts.reset();
while (ts.incrementToken()) {
skipTerms.add(new Term(fieldName, termAtt.toString()));
}
ts.end();
}
}
}
// handle like fields
if (unlikeFields != null) {
for (Fields fields : unlikeFields) {
for (String fieldName : fields) {
Terms terms = fields.terms(fieldName);
final TermsEnum termsEnum = terms.iterator();
BytesRef text;
while ((text = termsEnum.next()) != null) {
skipTerms.add(new Term(fieldName, text.utf8ToString()));
}
}
}
}
if (skipTerms.isEmpty() == false) {
mlt.setSkipTerms(skipTerms);
}
}
@Override
public String toString(String field) {
return "like:" + Arrays.toString(likeText);
}
public String getLikeText() {
return (likeText == null ? null : likeText[0]);
}
public String[] getLikeTexts() {
return likeText;
}
public void setLikeText(String... likeText) {
this.likeText = likeText;
}
public Fields[] getLikeFields() {
return likeFields;
}
public void setLikeFields(Fields... likeFields) {
this.likeFields = likeFields;
}
public void setLikeText(List<String> likeText) {
setLikeText(likeText.toArray(Strings.EMPTY_ARRAY));
}
public void setUnlikeFields(Fields... unlikeFields) {
this.unlikeFields = unlikeFields;
}
public void setUnlikeText(String[] unlikeText) {
this.unlikeText = unlikeText;
}
public String[] getMoreLikeFields() {
return moreLikeFields;
}
public void setMoreLikeFields(String[] moreLikeFields) {
this.moreLikeFields = moreLikeFields;
}
public Similarity getSimilarity() {
return similarity;
}
public void setSimilarity(Similarity similarity) {
if (similarity == null || similarity instanceof TFIDFSimilarity) {
//LUCENE 4 UPGRADE we need TFIDF similarity here so I only set it if it is an instance of it
this.similarity = (TFIDFSimilarity) similarity;
}
}
public Analyzer getAnalyzer() {
return analyzer;
}
public void setAnalyzer(String analyzerName, Analyzer analyzer) {
this.analyzer = analyzer;
this.analyzerName = analyzerName;
}
/**
* Number of terms that must match the generated query expressed in the
* common syntax for minimum should match.
*
* @see org.elasticsearch.common.lucene.search.Queries#calculateMinShouldMatch(int, String)
*/
public String getMinimumShouldMatch() {
return minimumShouldMatch;
}
/**
* Number of terms that must match the generated query expressed in the
* common syntax for minimum should match. Defaults to {@code 30%}.
*
* @see org.elasticsearch.common.lucene.search.Queries#calculateMinShouldMatch(int, String)
*/
public void setMinimumShouldMatch(String minimumShouldMatch) {
this.minimumShouldMatch = minimumShouldMatch;
}
public int getMinTermFrequency() {
return minTermFrequency;
}
public void setMinTermFrequency(int minTermFrequency) {
this.minTermFrequency = minTermFrequency;
}
public int getMaxQueryTerms() {
return maxQueryTerms;
}
public void setMaxQueryTerms(int maxQueryTerms) {
if (maxQueryTerms <= 0) {
throw new IllegalArgumentException("requires 'maxQueryTerms' to be greater than 0");
}
this.maxQueryTerms = maxQueryTerms;
}
public Set<?> getStopWords() {
return stopWords;
}
public void setStopWords(Set<?> stopWords) {
this.stopWords = stopWords;
}
public int getMinDocFreq() {
return minDocFreq;
}
public void setMinDocFreq(int minDocFreq) {
this.minDocFreq = minDocFreq;
}
public int getMaxDocFreq() {
return maxDocFreq;
}
public void setMaxDocFreq(int maxDocFreq) {
this.maxDocFreq = maxDocFreq;
}
public int getMinWordLen() {
return minWordLen;
}
public void setMinWordLen(int minWordLen) {
this.minWordLen = minWordLen;
}
public int getMaxWordLen() {
return maxWordLen;
}
public void setMaxWordLen(int maxWordLen) {
this.maxWordLen = maxWordLen;
}
public boolean isBoostTerms() {
return boostTerms;
}
public void setBoostTerms(boolean boostTerms) {
this.boostTerms = boostTerms;
}
public float getBoostTermsFactor() {
return boostTermsFactor;
}
public void setBoostTermsFactor(float boostTermsFactor) {
this.boostTermsFactor = boostTermsFactor;
}
}
| |
/*
* This is part of Geomajas, a GIS framework, http://www.geomajas.org/.
*
* Copyright 2008-2015 Geosparc nv, http://www.geosparc.com/, Belgium.
*
* The program is available in open source according to the Apache
* License, Version 2.0. All contributions in this program are covered
* by the Geomajas Contributors License Agreement. For full licensing
* details, see LICENSE.txt in the project root.
*/
package org.geomajas.sld.xlink;
import java.io.Serializable;
import org.geomajas.annotation.Api;
/**
* Schema fragment(s) for this class:...
*
* <pre>
* <xs:attributeGroup
* xmlns:xlink="http://www.w3.org/1999/xlink"
*
* xmlns:xs="http://www.w3.org/2001/XMLSchema" name="arcLink">
* <xs:attribute type="xs:string" fixed="arc" form="qualified" name="type"/>
* <xs:attribute use="optional" ref="xlink:arcrole">
* <!-- Reference to inner class ArcroleInfo -->
* </xs:attribute>
* <xs:attribute use="optional" ref="xlink:title">
* <!-- Reference to inner class TitleInfo -->
* </xs:attribute>
* <xs:attribute use="optional" ref="xlink:show"/>
* <xs:attribute use="optional" ref="xlink:actuate"/>
* <xs:attribute use="optional" ref="xlink:from">
* <!-- Reference to inner class FromInfo -->
* </xs:attribute>
* <xs:attribute use="optional" ref="xlink:to">
* <!-- Reference to inner class ToInfo -->
* </xs:attribute>
* </xs:attributeGroup>
* </pre>
*
* @author Jan De Moerloose
* @since 1.0.0
*/
@Api(allMethods = true)
public class ArcLinkInfo implements Serializable {
private static final long serialVersionUID = 100;
private String type;
private ArcroleInfo arcrole;
private TitleInfo title;
private ShowInfo show;
private ActuateInfo actuate;
private FromInfo from;
private ToInfo to;
/**
* Get the 'type' attribute value.
*
* @return value
*/
public String getType() {
return type;
}
/**
* Set the 'type' attribute value.
*
* @param type
*/
public void setType(String type) {
this.type = type;
}
/**
* Get the 'arcrole' attribute value.
*
* @return value
*/
public ArcroleInfo getArcrole() {
return arcrole;
}
/**
* Set the 'arcrole' attribute value.
*
* @param arcrole
*/
public void setArcrole(ArcroleInfo arcrole) {
this.arcrole = arcrole;
}
/**
* Get the 'title' attribute value.
*
* @return value
*/
public TitleInfo getTitle() {
return title;
}
/**
* Set the 'title' attribute value.
*
* @param title
*/
public void setTitle(TitleInfo title) {
this.title = title;
}
/**
* Get the 'show' attribute value.
*
* @return value
*/
public ShowInfo getShow() {
return show;
}
/**
* Set the 'show' attribute value.
*
* @param show
*/
public void setShow(ShowInfo show) {
this.show = show;
}
/**
* Get the 'actuate' attribute value.
*
* @return value
*/
public ActuateInfo getActuate() {
return actuate;
}
/**
* Set the 'actuate' attribute value.
*
* @param actuate
*/
public void setActuate(ActuateInfo actuate) {
this.actuate = actuate;
}
/**
* Get the 'from' attribute value.
*
* @return value
*/
public FromInfo getFrom() {
return from;
}
/**
* Set the 'from' attribute value.
*
* @param from
*/
public void setFrom(FromInfo from) {
this.from = from;
}
/**
* Get the 'to' attribute value.
*
* @return value
*/
public ToInfo getTo() {
return to;
}
/**
* Set the 'to' attribute value.
*
* @param to
*/
public void setTo(ToInfo to) {
this.to = to;
}
/**
* Schema fragment(s) for this class:...
*
* <pre>
* <xs:attribute
* xmlns:ns="http://www.w3.org/1999/xlink"
*
* xmlns:xs="http://www.w3.org/2001/XMLSchema" use="optional" ref="ns:arcrole"/>
*
* <xs:attribute
* xmlns:ns="http://www.w3.org/1999/xlink"
*
* xmlns:xs="http://www.w3.org/2001/XMLSchema" type="xs:string" name="arcrole"/>
* </pre>
*/
public static class ArcroleInfo implements Serializable {
private static final long serialVersionUID = 100;
private String arcrole;
/**
* Get the 'arcrole' attribute value.
*
* @return value
*/
public String getArcrole() {
return arcrole;
}
/**
* Set the 'arcrole' attribute value.
*
* @param arcrole
*/
public void setArcrole(String arcrole) {
this.arcrole = arcrole;
}
@Override
@java.lang.SuppressWarnings("all")
public java.lang.String toString() {
return "ArcLinkInfo.ArcroleInfo(arcrole=" + this.getArcrole() + ")";
}
@Override
@java.lang.SuppressWarnings("all")
public boolean equals(final java.lang.Object o) {
if (o == this) {
return true;
}
if (!(o instanceof ArcroleInfo)) {
return false;
}
final ArcroleInfo other = (ArcroleInfo) o;
if (!other.canEqual((java.lang.Object) this)) {
return false;
}
if (this.getArcrole() == null ? other.getArcrole() != null : !this.getArcrole().equals(
(java.lang.Object) other.getArcrole())) {
return false;
}
return true;
}
/**
* Is there a chance that the object are equal? Verifies that the other object has a comparable type.
*
* @param other other object
* @return true when other is an instance of this type
*/
public boolean canEqual(final java.lang.Object other) {
return other instanceof ArcroleInfo;
}
@Override
@java.lang.SuppressWarnings("all")
public int hashCode() {
final int prime = 31;
int result = 1;
result = result * prime + (this.getArcrole() == null ? 0 : this.getArcrole().hashCode());
return result;
}
}
/**
* Schema fragment(s) for this class:...
*
* <pre>
* <xs:attribute
* xmlns:ns="http://www.w3.org/1999/xlink"
*
* xmlns:xs="http://www.w3.org/2001/XMLSchema" use="optional" ref="ns:title"/>
*
* <xs:attribute
* xmlns:ns="http://www.w3.org/1999/xlink"
*
* xmlns:xs="http://www.w3.org/2001/XMLSchema" type="xs:string" name="title"/>
* </pre>
*/
public static class TitleInfo implements Serializable {
private static final long serialVersionUID = 100;
private String title;
/**
* Get the 'title' attribute value.
*
* @return value
*/
public String getTitle() {
return title;
}
/**
* Set the 'title' attribute value.
*
* @param title title
*/
public void setTitle(String title) {
this.title = title;
}
@Override
@java.lang.SuppressWarnings("all")
public java.lang.String toString() {
return "ArcLinkInfo.TitleInfo(title=" + this.getTitle() + ")";
}
@Override
@java.lang.SuppressWarnings("all")
public boolean equals(final java.lang.Object o) {
if (o == this) {
return true;
}
if (!(o instanceof TitleInfo)) {
return false;
}
final TitleInfo other = (TitleInfo) o;
if (!other.canEqual((java.lang.Object) this)) {
return false;
}
if (this.getTitle() == null ? other.getTitle() != null : !this.getTitle().equals(
(java.lang.Object) other.getTitle())) {
return false;
}
return true;
}
/**
* Is there a chance that the object are equal? Verifies that the other object has a comparable type.
*
* @param other other object
* @return true when other is an instance of this type
*/
public boolean canEqual(final java.lang.Object other) {
return other instanceof TitleInfo;
}
@Override
@java.lang.SuppressWarnings("all")
public int hashCode() {
final int prime = 31;
int result = 1;
result = result * prime + (this.getTitle() == null ? 0 : this.getTitle().hashCode());
return result;
}
}
/**
* Schema fragment(s) for this class:...
*
* <pre>
* <xs:attribute
* xmlns:ns="http://www.w3.org/1999/xlink"
*
* xmlns:xs="http://www.w3.org/2001/XMLSchema" use="optional" ref="ns:from"/>
*
* <xs:attribute
* xmlns:ns="http://www.w3.org/1999/xlink"
*
* xmlns:xs="http://www.w3.org/2001/XMLSchema" type="xs:string" name="from"/>
* </pre>
*/
public static class FromInfo implements Serializable {
private static final long serialVersionUID = 100;
private String from;
/**
* Get the 'from' attribute value.
*
* @return value
*/
public String getFrom() {
return from;
}
/**
* Set the 'from' attribute value.
*
* @param from from
*/
public void setFrom(String from) {
this.from = from;
}
@Override
@java.lang.SuppressWarnings("all")
public java.lang.String toString() {
return "ArcLinkInfo.FromInfo(from=" + this.getFrom() + ")";
}
@Override
@java.lang.SuppressWarnings("all")
public boolean equals(final java.lang.Object o) {
if (o == this) {
return true;
}
if (!(o instanceof FromInfo)) {
return false;
}
final FromInfo other = (FromInfo) o;
if (!other.canEqual((java.lang.Object) this)) {
return false;
}
if (this.getFrom() == null ? other.getFrom() != null : !this.getFrom().equals(
(java.lang.Object) other.getFrom())) {
return false;
}
return true;
}
/**
* Is there a chance that the object are equal? Verifies that the other object has a comparable type.
*
* @param other other object
* @return true when other is an instance of this type
*/
public boolean canEqual(final java.lang.Object other) {
return other instanceof FromInfo;
}
@Override
@java.lang.SuppressWarnings("all")
public int hashCode() {
final int prime = 31;
int result = 1;
result = result * prime + (this.getFrom() == null ? 0 : this.getFrom().hashCode());
return result;
}
}
/**
* Schema fragment(s) for this class:...
*
* <pre>
* <xs:attribute
* xmlns:ns="http://www.w3.org/1999/xlink"
*
* xmlns:xs="http://www.w3.org/2001/XMLSchema" use="optional" ref="ns:to"/>
*
* <xs:attribute
* xmlns:ns="http://www.w3.org/1999/xlink"
*
* xmlns:xs="http://www.w3.org/2001/XMLSchema" type="xs:string" name="to"/>
* </pre>
*/
public static class ToInfo implements Serializable {
private static final long serialVersionUID = 100;
private String to;
/**
* Get the 'to' attribute value.
*
* @return value
*/
public String getTo() {
return to;
}
/**
* Set the 'to' attribute value.
*
* @param to
*/
public void setTo(String to) {
this.to = to;
}
@Override
@java.lang.SuppressWarnings("all")
public java.lang.String toString() {
return "ArcLinkInfo.ToInfo(to=" + this.getTo() + ")";
}
@Override
@java.lang.SuppressWarnings("all")
public boolean equals(final java.lang.Object o) {
if (o == this) {
return true;
}
if (!(o instanceof ToInfo)) {
return false;
}
final ToInfo other = (ToInfo) o;
if (!other.canEqual((java.lang.Object) this)) {
return false;
}
if (this.getTo() == null ? other.getTo() != null : !this.getTo().equals((java.lang.Object) other.getTo())) {
return false;
}
return true;
}
/**
* Is there a chance that the object are equal? Verifies that the other object has a comparable type.
*
* @param other other object
* @return true when other is an instance of this type
*/
public boolean canEqual(final java.lang.Object other) {
return other instanceof ToInfo;
}
@Override
@java.lang.SuppressWarnings("all")
public int hashCode() {
final int prime = 31;
int result = 1;
result = result * prime + (this.getTo() == null ? 0 : this.getTo().hashCode());
return result;
}
}
@Override
@java.lang.SuppressWarnings("all")
public java.lang.String toString() {
return "ArcLinkInfo(type=" + this.getType() + ", arcrole=" + this.getArcrole() + ", title=" + this.getTitle()
+ ", show=" + this.getShow() + ", actuate=" + this.getActuate() + ", from=" + this.getFrom() + ", to="
+ this.getTo() + ")";
}
@Override
@java.lang.SuppressWarnings("all")
public boolean equals(final java.lang.Object o) {
if (o == this) {
return true;
}
if (!(o instanceof ArcLinkInfo)) {
return false;
}
final ArcLinkInfo other = (ArcLinkInfo) o;
if (!other.canEqual((java.lang.Object) this)) {
return false;
}
if (this.getType() == null ? other.getType() != null : !this.getType().equals(
(java.lang.Object) other.getType())) {
return false;
}
if (this.getArcrole() == null ? other.getArcrole() != null : !this.getArcrole().equals(
(java.lang.Object) other.getArcrole())) {
return false;
}
if (this.getTitle() == null ? other.getTitle() != null : !this.getTitle().equals(
(java.lang.Object) other.getTitle())) {
return false;
}
if (this.getShow() == null ? other.getShow() != null : !this.getShow().equals(
(java.lang.Object) other.getShow())) {
return false;
}
if (this.getActuate() == null ? other.getActuate() != null : !this.getActuate().equals(
(java.lang.Object) other.getActuate())) {
return false;
}
if (this.getFrom() == null ? other.getFrom() != null : !this.getFrom().equals(
(java.lang.Object) other.getFrom())) {
return false;
}
if (this.getTo() == null ? other.getTo() != null : !this.getTo().equals((java.lang.Object) other.getTo())) {
return false;
}
return true;
}
/**
* Is there a chance that the object are equal? Verifies that the other object has a comparable type.
*
* @param other other object
* @return true when other is an instance of this type
*/
public boolean canEqual(final java.lang.Object other) {
return other instanceof ArcLinkInfo;
}
@Override
@java.lang.SuppressWarnings("all")
public int hashCode() {
final int prime = 31;
int result = 1;
result = result * prime + (this.getType() == null ? 0 : this.getType().hashCode());
result = result * prime + (this.getArcrole() == null ? 0 : this.getArcrole().hashCode());
result = result * prime + (this.getTitle() == null ? 0 : this.getTitle().hashCode());
result = result * prime + (this.getShow() == null ? 0 : this.getShow().hashCode());
result = result * prime + (this.getActuate() == null ? 0 : this.getActuate().hashCode());
result = result * prime + (this.getFrom() == null ? 0 : this.getFrom().hashCode());
result = result * prime + (this.getTo() == null ? 0 : this.getTo().hashCode());
return result;
}
}
| |
/*$Id: PresContextImpl.java 3493 2002-07-14 16:03:36Z gunterze $*/
/*****************************************************************************
* *
* Copyright (c) 2002 by TIANI MEDGRAPH AG *
* *
* This file is part of dcm4che. *
* *
* This library is free software; you can redistribute it and/or modify it *
* under the terms of the GNU Lesser General Public License as published *
* by the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
* This library is distributed in the hope that it will be useful, but *
* WITHOUT ANY WARRANTY; without even the implied warranty of *
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU *
* Lesser General Public License for more details. *
* *
* You should have received a copy of the GNU Lesser General Public *
* License along with this library; if not, write to the Free Software *
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA *
* *
*****************************************************************************/
package org.dcm4cheri.net;
import org.dcm4che.net.*;
import org.dcm4che.dict.DictionaryFactory;
import org.dcm4che.dict.UIDDictionary;
import java.io.*;
import java.util.*;
/**
*
* @author gunter.zeilinger@tiani.com
* @version 1.0.0
*/
final class PresContextImpl implements PresContext {
private static final UIDDictionary UID_DICT =
DictionaryFactory.getInstance().getDefaultUIDDictionary();
private final int type;
private final int pcid;
private final int result;
private final String asuid;
private final List tsuids;
PresContextImpl(int type, int pcid, int result, String asuid,
String[] tsuids) {
if ((pcid | 1) == 0 || (pcid & ~0xff) != 0) {
throw new IllegalArgumentException("pcid=" + pcid);
}
if (tsuids.length == 0) {
throw new IllegalArgumentException("Missing TransferSyntax");
}
this.type = type;
this.pcid = pcid;
this.result = result;
this.asuid = asuid;
this.tsuids = new ArrayList(Arrays.asList(tsuids));
}
PresContextImpl(int type, DataInputStream din, int len)
throws IOException, PDUException {
this.type = type;
this.pcid = din.readUnsignedByte();
din.readUnsignedByte();
this.result = din.readUnsignedByte();
din.readUnsignedByte();
int remain = len - 4;
String asuid = null;
this.tsuids = new LinkedList();
while (remain > 0) {
int uidtype = din.readUnsignedByte();
din.readUnsignedByte();
int uidlen = din.readUnsignedShort();
switch (uidtype) {
case 0x30:
if (type == 0x21 || asuid != null) {
throw new PDUException(
"Unexpected Abstract Syntax sub-item in"
+ " Presentation Context",
new AAbortImpl(AAbort.SERVICE_PROVIDER,
AAbort.UNEXPECTED_PDU_PARAMETER));
}
asuid = AAssociateRQACImpl.readASCII(din, uidlen);
break;
case 0x40:
if (type == 0x21 && !tsuids.isEmpty()) {
throw new PDUException(
"Unexpected Transfer Syntax sub-item in"
+ " Presentation Context",
new AAbortImpl(AAbort.SERVICE_PROVIDER,
AAbort.UNEXPECTED_PDU_PARAMETER));
}
tsuids.add(AAssociateRQACImpl.readASCII(din, uidlen));
break;
default:
throw new PDUException(
"unrecognized item type "
+ Integer.toHexString(uidtype) + 'H',
new AAbortImpl(AAbort.SERVICE_PROVIDER,
AAbort.UNRECOGNIZED_PDU_PARAMETER));
}
remain -= 4 + uidlen;
}
this.asuid = asuid;
if (remain < 0) {
throw new PDUException("Presentation item length: " + len
+ " mismatch length of sub-items",
new AAbortImpl(AAbort.SERVICE_PROVIDER,
AAbort.INVALID_PDU_PARAMETER_VALUE));
}
}
void writeTo(DataOutputStream dout) throws IOException {
dout.write(type);
dout.write(0);
dout.writeShort(length());
dout.write(pcid);
dout.write(0);
dout.write(result);
dout.write(0);
if (asuid != null) {
dout.write(0x30);
dout.write(0);
dout.writeShort(asuid.length());
dout.writeBytes(asuid);
}
for (Iterator it = tsuids.iterator(); it.hasNext();) {
String tsuid = (String)it.next();
dout.write(0x40);
dout.write(0);
dout.writeShort(tsuid.length());
dout.writeBytes(tsuid);
}
}
final int length() {
int retval = 4;
if (asuid != null) {
retval += 4 + asuid.length();
}
for (Iterator it = tsuids.iterator(); it.hasNext();) {
retval += 4 + ((String)it.next()).length();
}
return retval;
}
final int type() {
return type;
}
public final int pcid() {
return pcid;
}
public final int result() {
return result;
}
public final String getAbstractSyntaxUID() {
return asuid;
}
public final List getTransferSyntaxUIDs() {
return Collections.unmodifiableList(tsuids);
}
public final String getTransferSyntaxUID() {
return (String)tsuids.get(0);
}
public String toString() {
return toStringBuffer(new StringBuffer()).toString();
}
private StringBuffer toStringBuffer(StringBuffer sb) {
sb.append("PresContext[pcid=").append(pcid);
if (type == 0x20) {
sb.append(", as=").append(UID_DICT.lookup(asuid));
} else {
sb.append(", result=").append(resultAsString());
}
Iterator it = tsuids.iterator();
sb.append(", ts=").append(UID_DICT.lookup((String)it.next()));
while (it.hasNext()) {
sb.append(", ").append(UID_DICT.lookup((String)it.next()));
}
return sb.append("]");
}
public String resultAsString() {
switch (result()) {
case ACCEPTANCE:
return "0 - acceptance";
case USER_REJECTION:
return "1 - user-rejection";
case NO_REASON_GIVEN:
return "2 - no-reason-given";
case ABSTRACT_SYNTAX_NOT_SUPPORTED:
return "3 - abstract-syntax-not-supported";
case TRANSFER_SYNTAXES_NOT_SUPPORTED:
return "4 - transfer-syntaxes-not-supported";
default:
return String.valueOf(result());
}
}
}
| |
/*
* The MIT License
*
* Copyright (c) 2004-2010, Sun Microsystems, Inc., Kohsuke Kawaguchi
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson.util;
import com.thoughtworks.xstream.converters.ConversionException;
import com.thoughtworks.xstream.converters.SingleValueConverter;
import com.thoughtworks.xstream.converters.UnmarshallingContext;
import com.thoughtworks.xstream.converters.Converter;
import com.thoughtworks.xstream.converters.MarshallingContext;
import com.thoughtworks.xstream.converters.reflection.ReflectionConverter;
import com.thoughtworks.xstream.converters.reflection.ReflectionProvider;
import com.thoughtworks.xstream.converters.reflection.SerializationMethodInvoker;
import com.thoughtworks.xstream.converters.reflection.ObjectAccessException;
import com.thoughtworks.xstream.converters.reflection.PureJavaReflectionProvider;
import com.thoughtworks.xstream.converters.reflection.NonExistentFieldException;
import com.thoughtworks.xstream.core.util.Primitives;
import com.thoughtworks.xstream.io.HierarchicalStreamReader;
import com.thoughtworks.xstream.io.HierarchicalStreamWriter;
import com.thoughtworks.xstream.io.ExtendedHierarchicalStreamWriterHelper;
import com.thoughtworks.xstream.mapper.Mapper;
import com.thoughtworks.xstream.mapper.CannotResolveClassException;
import hudson.diagnosis.OldDataMonitor;
import hudson.model.Saveable;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.HashSet;
import java.util.Collection;
import java.util.HashMap;
import java.util.logging.Logger;
import static java.util.logging.Level.WARNING;
/**
* Custom {@link ReflectionConverter} that handle errors more gracefully.
*
* <ul>
* <li>If the field is missing, the value is ignored instead of causing an error.
* This makes evolution easy.
* <li>If the type found in XML is no longer available, the element is skipped
* instead of causing an error.
* </ul>
*
*/
public class RobustReflectionConverter implements Converter {
protected final ReflectionProvider reflectionProvider;
protected final Mapper mapper;
protected transient SerializationMethodInvoker serializationMethodInvoker;
private transient ReflectionProvider pureJavaReflectionProvider;
public RobustReflectionConverter(Mapper mapper, ReflectionProvider reflectionProvider) {
this.mapper = mapper;
this.reflectionProvider = reflectionProvider;
serializationMethodInvoker = new SerializationMethodInvoker();
}
public boolean canConvert(Class type) {
return true;
}
public void marshal(Object original, final HierarchicalStreamWriter writer, final MarshallingContext context) {
final Object source = serializationMethodInvoker.callWriteReplace(original);
if (source.getClass() != original.getClass()) {
writer.addAttribute(mapper.aliasForAttribute("resolves-to"), mapper.serializedClass(source.getClass()));
}
doMarshal(source, writer, context);
}
protected void doMarshal(final Object source, final HierarchicalStreamWriter writer, final MarshallingContext context) {
final Set seenFields = new HashSet();
final Set seenAsAttributes = new HashSet();
// Attributes might be preferred to child elements ...
reflectionProvider.visitSerializableFields(source, new ReflectionProvider.Visitor() {
public void visit(String fieldName, Class type, Class definedIn, Object value) {
SingleValueConverter converter = mapper.getConverterFromItemType(fieldName, type, definedIn);
if (converter == null) converter = mapper.getConverterFromItemType(fieldName, type);
if (converter == null) converter = mapper.getConverterFromItemType(type);
if (converter != null) {
if (value != null) {
final String str = converter.toString(value);
if (str != null) {
writer.addAttribute(mapper.aliasForAttribute(fieldName), str);
}
}
seenAsAttributes.add(fieldName);
}
}
});
// Child elements not covered already processed as attributes ...
reflectionProvider.visitSerializableFields(source, new ReflectionProvider.Visitor() {
public void visit(String fieldName, Class fieldType, Class definedIn, Object newObj) {
if (!seenAsAttributes.contains(fieldName) && newObj != null) {
Mapper.ImplicitCollectionMapping mapping = mapper.getImplicitCollectionDefForFieldName(source.getClass(), fieldName);
if (mapping != null) {
if (mapping.getItemFieldName() != null) {
Collection list = (Collection) newObj;
for (Iterator iter = list.iterator(); iter.hasNext();) {
Object obj = iter.next();
writeField(fieldName, mapping.getItemFieldName(), mapping.getItemType(), definedIn, obj);
}
} else {
context.convertAnother(newObj);
}
} else {
writeField(fieldName, fieldName, fieldType, definedIn, newObj);
seenFields.add(fieldName);
}
}
}
private void writeField(String fieldName, String aliasName, Class fieldType, Class definedIn, Object newObj) {
try {
if (!mapper.shouldSerializeMember(definedIn, aliasName)) {
return;
}
ExtendedHierarchicalStreamWriterHelper.startNode(writer, mapper.serializedMember(definedIn, aliasName), fieldType);
Class actualType = newObj.getClass();
Class defaultType = mapper.defaultImplementationOf(fieldType);
if (!actualType.equals(defaultType)) {
String serializedClassName = mapper.serializedClass(actualType);
if (!serializedClassName.equals(mapper.serializedClass(defaultType))) {
writer.addAttribute(mapper.aliasForSystemAttribute("class"), serializedClassName);
}
}
if (seenFields.contains(aliasName)) {
writer.addAttribute(mapper.aliasForAttribute("defined-in"), mapper.serializedClass(definedIn));
}
Field field = reflectionProvider.getField(definedIn,fieldName);
marshallField(context, newObj, field);
writer.endNode();
} catch (RuntimeException e) {
// intercept an exception so that the stack trace shows how we end up marshalling the object in question
throw new RuntimeException("Failed to serialize "+definedIn.getName()+"#"+fieldName+" for "+source.getClass(),e);
}
}
});
}
protected void marshallField(final MarshallingContext context, Object newObj, Field field) {
context.convertAnother(newObj);
}
public Object unmarshal(final HierarchicalStreamReader reader, final UnmarshallingContext context) {
Object result = instantiateNewInstance(reader, context);
result = doUnmarshal(result, reader, context);
return serializationMethodInvoker.callReadResolve(result);
}
public Object doUnmarshal(final Object result, final HierarchicalStreamReader reader, final UnmarshallingContext context) {
final SeenFields seenFields = new SeenFields();
Iterator it = reader.getAttributeNames();
// Remember outermost Saveable encountered, for reporting below
if (result instanceof Saveable && context.get("Saveable") == null)
context.put("Saveable", result);
// Process attributes before recursing into child elements.
while (it.hasNext()) {
String attrAlias = (String) it.next();
String attrName = mapper.attributeForAlias(attrAlias);
Class classDefiningField = determineWhichClassDefinesField(reader);
boolean fieldExistsInClass = fieldDefinedInClass(result, attrName);
if (fieldExistsInClass) {
Field field = reflectionProvider.getField(result.getClass(), attrName);
SingleValueConverter converter = mapper.getConverterFromAttribute(field.getDeclaringClass(),attrName,field.getType());
Class type = field.getType();
if (converter == null) {
converter = mapper.getConverterFromItemType(type);
}
if (converter != null) {
Object value = converter.fromString(reader.getAttribute(attrAlias));
if (type.isPrimitive()) {
type = Primitives.box(type);
}
if (value != null && !type.isAssignableFrom(value.getClass())) {
throw new ConversionException("Cannot convert type " + value.getClass().getName() + " to type " + type.getName());
}
reflectionProvider.writeField(result, attrName, value, classDefiningField);
seenFields.add(classDefiningField, attrName);
}
}
}
Map implicitCollectionsForCurrentObject = null;
while (reader.hasMoreChildren()) {
reader.moveDown();
try {
String fieldName = mapper.realMember(result.getClass(), reader.getNodeName());
boolean implicitCollectionHasSameName = mapper.getImplicitCollectionDefForFieldName(result.getClass(), reader.getNodeName()) != null;
Class classDefiningField = determineWhichClassDefinesField(reader);
boolean fieldExistsInClass = !implicitCollectionHasSameName && fieldDefinedInClass(result,fieldName);
Class type = determineType(reader, fieldExistsInClass, result, fieldName, classDefiningField);
final Object value;
if (fieldExistsInClass) {
Field field = reflectionProvider.getField(result.getClass(),fieldName);
value = unmarshalField(context, result, type, field);
// TODO the reflection provider should have returned the proper field in first place ....
Class definedType = reflectionProvider.getFieldType(result, fieldName, classDefiningField);
if (!definedType.isPrimitive()) {
type = definedType;
}
} else {
value = context.convertAnother(result, type);
}
if (value != null && !type.isAssignableFrom(value.getClass())) {
LOGGER.warning("Cannot convert type " + value.getClass().getName() + " to type " + type.getName());
// behave as if we didn't see this element
} else {
if (fieldExistsInClass) {
reflectionProvider.writeField(result, fieldName, value, classDefiningField);
seenFields.add(classDefiningField, fieldName);
} else {
implicitCollectionsForCurrentObject = writeValueToImplicitCollection(context, value, implicitCollectionsForCurrentObject, result, fieldName);
}
}
} catch (NonExistentFieldException e) {
LOGGER.log(WARNING,"Skipping a non-existent field "+e.getFieldName(),e);
addErrorInContext(context, e);
} catch (CannotResolveClassException e) {
LOGGER.log(WARNING,"Skipping a non-existent type",e);
addErrorInContext(context, e);
} catch (LinkageError e) {
LOGGER.log(WARNING,"Failed to resolve a type",e);
addErrorInContext(context, e);
}
reader.moveUp();
}
// Report any class/field errors in Saveable objects
if (context.get("ReadError") != null && context.get("Saveable") == result) {
OldDataMonitor.report((Saveable)result, (ArrayList<Throwable>)context.get("ReadError"));
context.put("ReadError", null);
}
return result;
}
public static void addErrorInContext(UnmarshallingContext context, Throwable e) {
ArrayList<Throwable> list = (ArrayList<Throwable>)context.get("ReadError");
if (list == null)
context.put("ReadError", list = new ArrayList<Throwable>());
list.add(e);
}
private boolean fieldDefinedInClass(Object result, String attrName) {
// during unmarshalling, unmarshal into transient fields like XStream 1.1.3
//boolean fieldExistsInClass = reflectionProvider.fieldDefinedInClass(attrName, result.getClass());
return reflectionProvider.getFieldOrNull(result.getClass(),attrName)!=null;
}
protected Object unmarshalField(final UnmarshallingContext context, final Object result, Class type, Field field) {
return context.convertAnother(result, type);
}
private Map writeValueToImplicitCollection(UnmarshallingContext context, Object value, Map implicitCollections, Object result, String itemFieldName) {
String fieldName = mapper.getFieldNameForItemTypeAndName(context.getRequiredType(), value.getClass(), itemFieldName);
if (fieldName != null) {
if (implicitCollections == null) {
implicitCollections = new HashMap(); // lazy instantiation
}
Collection collection = (Collection) implicitCollections.get(fieldName);
if (collection == null) {
Class fieldType = mapper.defaultImplementationOf(reflectionProvider.getFieldType(result, fieldName, null));
if (!Collection.class.isAssignableFrom(fieldType)) {
throw new ObjectAccessException("Field " + fieldName + " of " + result.getClass().getName() +
" is configured for an implicit Collection, but field is of type " + fieldType.getName());
}
if (pureJavaReflectionProvider == null) {
pureJavaReflectionProvider = new PureJavaReflectionProvider();
}
collection = (Collection)pureJavaReflectionProvider.newInstance(fieldType);
reflectionProvider.writeField(result, fieldName, collection, null);
implicitCollections.put(fieldName, collection);
}
collection.add(value);
}
return implicitCollections;
}
private Class determineWhichClassDefinesField(HierarchicalStreamReader reader) {
String definedIn = reader.getAttribute(mapper.aliasForAttribute("defined-in"));
return definedIn == null ? null : mapper.realClass(definedIn);
}
protected Object instantiateNewInstance(HierarchicalStreamReader reader, UnmarshallingContext context) {
String readResolveValue = reader.getAttribute(mapper.aliasForAttribute("resolves-to"));
Class type = readResolveValue != null ? mapper.realClass(readResolveValue) : context.getRequiredType();
Object currentObject = context.currentObject();
if (currentObject != null) {
if (type.isInstance(currentObject))
return currentObject;
}
return reflectionProvider.newInstance(type);
}
private static class SeenFields {
private Set seen = new HashSet();
public void add(Class definedInCls, String fieldName) {
String uniqueKey = fieldName;
if (definedInCls != null) {
uniqueKey += " [" + definedInCls.getName() + "]";
}
if (seen.contains(uniqueKey)) {
throw new DuplicateFieldException(uniqueKey);
} else {
seen.add(uniqueKey);
}
}
}
private Class determineType(HierarchicalStreamReader reader, boolean validField, Object result, String fieldName, Class definedInCls) {
String classAttribute = reader.getAttribute(mapper.aliasForAttribute("class"));
Class fieldType = reflectionProvider.getFieldType(result, fieldName, definedInCls);
if (classAttribute != null) {
Class specifiedType = mapper.realClass(classAttribute);
if(fieldType.isAssignableFrom(specifiedType))
// make sure that the specified type in XML is compatible with the field type.
// this allows the code to evolve in more flexible way.
return specifiedType;
}
if (!validField) {
Class itemType = mapper.getItemTypeForItemFieldName(result.getClass(), fieldName);
if (itemType != null) {
return itemType;
} else {
return mapper.realClass(reader.getNodeName());
}
} else {
return mapper.defaultImplementationOf(fieldType);
}
}
private Object readResolve() {
serializationMethodInvoker = new SerializationMethodInvoker();
return this;
}
public static class DuplicateFieldException extends ConversionException {
public DuplicateFieldException(String msg) {
super(msg);
add("duplicate-field", msg);
}
}
private static final Logger LOGGER = Logger.getLogger(RobustReflectionConverter.class.getName());
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache.distributed;
import org.apache.ignite.*;
import org.apache.ignite.cache.*;
import org.apache.ignite.cache.affinity.*;
import org.apache.ignite.cache.store.*;
import org.apache.ignite.configuration.*;
import org.apache.ignite.internal.*;
import org.apache.ignite.internal.util.typedef.*;
import org.apache.ignite.lang.*;
import org.apache.ignite.resources.*;
import org.apache.ignite.spi.discovery.tcp.*;
import org.apache.ignite.spi.discovery.tcp.ipfinder.*;
import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.*;
import org.apache.ignite.testframework.junits.common.*;
import org.jdk8.backport.*;
import javax.cache.configuration.*;
import javax.cache.integration.*;
import java.util.*;
import static org.apache.ignite.cache.CacheAtomicWriteOrderMode.*;
import static org.apache.ignite.cache.CacheDistributionMode.*;
import static org.apache.ignite.cache.CacheMode.*;
import static org.apache.ignite.cache.CacheWriteSynchronizationMode.*;
/**
* Check reloadAll() on partitioned cache.
*/
public abstract class GridCachePartitionedReloadAllAbstractSelfTest extends GridCommonAbstractTest {
/** Amount of nodes in the grid. */
private static final int GRID_CNT = 4;
/** Amount of backups in partitioned cache. */
private static final int BACKUP_CNT = 1;
/** IP finder. */
private static TcpDiscoveryIpFinder ipFinder = new TcpDiscoveryVmIpFinder(true);
/** Map where dummy cache store values are stored. */
private final Map<Integer, String> map = new ConcurrentHashMap8<>();
/** Collection of caches, one per grid node. */
private List<IgniteCache<Integer, String>> caches;
/** {@inheritDoc} */
@SuppressWarnings("unchecked")
@Override protected IgniteConfiguration getConfiguration(String gridName) throws Exception {
IgniteConfiguration c = super.getConfiguration(gridName);
TcpDiscoverySpi disco = new TcpDiscoverySpi();
disco.setIpFinder(ipFinder);
c.setDiscoverySpi(disco);
CacheConfiguration cc = defaultCacheConfiguration();
cc.setDistributionMode(nearEnabled() ? NEAR_PARTITIONED : PARTITIONED_ONLY);
cc.setCacheMode(cacheMode());
cc.setAtomicityMode(atomicityMode());
cc.setBackups(BACKUP_CNT);
cc.setWriteSynchronizationMode(FULL_SYNC);
CacheStore store = cacheStore();
if (store != null) {
cc.setCacheStoreFactory(new FactoryBuilder.SingletonFactory(store));
cc.setReadThrough(true);
cc.setWriteThrough(true);
cc.setLoadPreviousValue(true);
}
else
cc.setCacheStoreFactory(null);
cc.setAtomicWriteOrderMode(atomicWriteOrderMode());
c.setCacheConfiguration(cc);
return c;
}
/**
* @return Cache mode.
*/
protected CacheMode cacheMode() {
return PARTITIONED;
}
/**
* @return Atomicity mode.
*/
protected CacheAtomicityMode atomicityMode() {
return CacheAtomicityMode.TRANSACTIONAL;
}
/**
* @return Write order mode for atomic cache.
*/
protected CacheAtomicWriteOrderMode atomicWriteOrderMode() {
return CLOCK;
}
/**
* @return {@code True} if near cache is enabled.
*/
protected abstract boolean nearEnabled();
/** {@inheritDoc} */
@Override protected void beforeTestsStarted() throws Exception {
caches = new ArrayList<>(GRID_CNT);
for (int i = 0; i < GRID_CNT; i++)
caches.add(startGrid(i).<Integer, String>jcache(null));
awaitPartitionMapExchange();
}
/** {@inheritDoc} */
@Override protected void afterTestsStopped() throws Exception {
stopAllGrids();
map.clear();
caches = null;
}
/**
* Create new cache store.
*
* @return Write through storage emulator.
*/
protected CacheStore<?, ?> cacheStore() {
return new CacheStoreAdapter<Integer, String>() {
@IgniteInstanceResource
private Ignite g;
@Override public void loadCache(IgniteBiInClosure<Integer, String> c,
Object... args) {
X.println("Loading all on: " + caches.indexOf(((IgniteKernal)g).<Integer, String>cache(null)));
for (Map.Entry<Integer, String> e : map.entrySet())
c.apply(e.getKey(), e.getValue());
}
@Override public String load(Integer key) {
X.println("Loading on: " + caches.indexOf(((IgniteKernal)g)
.<Integer, String>cache(null)) + " key=" + key);
return map.get(key);
}
@Override public void write(javax.cache.Cache.Entry<? extends Integer, ? extends String> e) {
fail("Should not be called within the test.");
}
@Override public void delete(Object key) {
fail("Should not be called within the test.");
}
};
}
/**
* @throws Exception If test failed.
*/
public void testReloadAll() throws Exception {
// Fill caches with values.
for (IgniteCache<Integer, String> cache : caches) {
Iterable<Integer> keys = primaryKeys(cache, 100);
info("Values [cache=" + caches.indexOf(cache) + ", size=" + F.size(keys.iterator()) + ", keys=" + keys + "]");
for (Integer key : keys)
map.put(key, "val" + key);
}
CompletionListenerFuture fut = new CompletionListenerFuture();
caches.get(0).loadAll(map.keySet(), false, fut);
fut.get();
CacheAffinity aff = ignite(0).affinity(null);
for (IgniteCache<Integer, String> cache : caches) {
for (Integer key : map.keySet()) {
if (aff.isPrimaryOrBackup(grid(caches.indexOf(cache)).localNode(), key))
assertEquals(map.get(key), cache.localPeek(key));
else
assertNull(cache.localPeek(key));
}
}
}
}
| |
/*
* Copyright 2004 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import com.google.common.base.Preconditions;
import com.google.javascript.rhino.IR;
import com.google.javascript.rhino.Node;
import com.google.javascript.rhino.Token;
import com.google.javascript.rhino.head.ScriptRuntime;
import com.google.javascript.rhino.jstype.TernaryValue;
/**
* Peephole optimization to fold constants (e.g. x + 1 + 7 --> x + 8).
*
*/
class PeepholeFoldConstants extends AbstractPeepholeOptimization {
// TODO(johnlenz): optimizations should not be emiting errors. Move these to
// a check pass.
static final DiagnosticType INVALID_GETELEM_INDEX_ERROR =
DiagnosticType.warning(
"JSC_INVALID_GETELEM_INDEX_ERROR",
"Array index not integer: {0}");
static final DiagnosticType INDEX_OUT_OF_BOUNDS_ERROR =
DiagnosticType.warning(
"JSC_INDEX_OUT_OF_BOUNDS_ERROR",
"Array index out of bounds: {0}");
static final DiagnosticType NEGATING_A_NON_NUMBER_ERROR =
DiagnosticType.warning(
"JSC_NEGATING_A_NON_NUMBER_ERROR",
"Can't negate non-numeric value: {0}");
static final DiagnosticType BITWISE_OPERAND_OUT_OF_RANGE =
DiagnosticType.warning(
"JSC_BITWISE_OPERAND_OUT_OF_RANGE",
"Operand out of range, bitwise operation will lose information: {0}");
static final DiagnosticType SHIFT_AMOUNT_OUT_OF_BOUNDS =
DiagnosticType.warning(
"JSC_SHIFT_AMOUNT_OUT_OF_BOUNDS",
"Shift amount out of bounds: {0}");
static final DiagnosticType FRACTIONAL_BITWISE_OPERAND =
DiagnosticType.warning(
"JSC_FRACTIONAL_BITWISE_OPERAND",
"Fractional bitwise operand: {0}");
private static final double MAX_FOLD_NUMBER = Math.pow(2, 53);
private final boolean late;
/**
* @param late When late is false, this mean we are currently running before
* most of the other optimizations. In this case we would avoid optimizations
* that would make the code harder to analyze. When this is true, we would
* do anything to minimize for size.
*/
PeepholeFoldConstants(boolean late) {
this.late = late;
}
@Override
Node optimizeSubtree(Node subtree) {
switch(subtree.getType()) {
case Token.NEW:
return tryFoldCtorCall(subtree);
case Token.TYPEOF:
return tryFoldTypeof(subtree);
case Token.NOT:
case Token.POS:
case Token.NEG:
case Token.BITNOT:
tryReduceOperandsForOp(subtree);
return tryFoldUnaryOperator(subtree);
case Token.VOID:
return tryReduceVoid(subtree);
default:
tryReduceOperandsForOp(subtree);
return tryFoldBinaryOperator(subtree);
}
}
private Node tryFoldBinaryOperator(Node subtree) {
Node left = subtree.getFirstChild();
if (left == null) {
return subtree;
}
Node right = left.getNext();
if (right == null) {
return subtree;
}
// If we've reached here, node is truly a binary operator.
switch(subtree.getType()) {
case Token.GETPROP:
return tryFoldGetProp(subtree, left, right);
case Token.GETELEM:
return tryFoldGetElem(subtree, left, right);
case Token.INSTANCEOF:
return tryFoldInstanceof(subtree, left, right);
case Token.AND:
case Token.OR:
return tryFoldAndOr(subtree, left, right);
case Token.LSH:
case Token.RSH:
case Token.URSH:
return tryFoldShift(subtree, left, right);
case Token.ASSIGN:
return tryFoldAssign(subtree, left, right);
case Token.ASSIGN_BITOR:
case Token.ASSIGN_BITXOR:
case Token.ASSIGN_BITAND:
case Token.ASSIGN_LSH:
case Token.ASSIGN_RSH:
case Token.ASSIGN_URSH:
case Token.ASSIGN_ADD:
case Token.ASSIGN_SUB:
case Token.ASSIGN_MUL:
case Token.ASSIGN_DIV:
case Token.ASSIGN_MOD:
return tryUnfoldAssignOp(subtree, left, right);
case Token.ADD:
return tryFoldAdd(subtree, left, right);
case Token.SUB:
case Token.DIV:
case Token.MOD:
return tryFoldArithmeticOp(subtree, left, right);
case Token.MUL:
case Token.BITAND:
case Token.BITOR:
case Token.BITXOR:
Node result = tryFoldArithmeticOp(subtree, left, right);
if (result != subtree) {
return result;
}
return tryFoldLeftChildOp(subtree, left, right);
case Token.LT:
case Token.GT:
case Token.LE:
case Token.GE:
case Token.EQ:
case Token.NE:
case Token.SHEQ:
case Token.SHNE:
return tryFoldComparison(subtree, left, right);
default:
return subtree;
}
}
private Node tryReduceVoid(Node n) {
Node child = n.getFirstChild();
if (!child.isNumber() || child.getDouble() != 0.0) {
if (!mayHaveSideEffects(n)) {
n.replaceChild(child, IR.number(0));
reportCodeChange();
}
}
return n;
}
private void tryReduceOperandsForOp(Node n) {
switch (n.getType()) {
case Token.ADD:
Node left = n.getFirstChild();
Node right = n.getLastChild();
if (!NodeUtil.mayBeString(left) && !NodeUtil.mayBeString(right)) {
tryConvertOperandsToNumber(n);
}
break;
case Token.ASSIGN_BITOR:
case Token.ASSIGN_BITXOR:
case Token.ASSIGN_BITAND:
// TODO(johnlenz): convert these to integers.
case Token.ASSIGN_LSH:
case Token.ASSIGN_RSH:
case Token.ASSIGN_URSH:
case Token.ASSIGN_SUB:
case Token.ASSIGN_MUL:
case Token.ASSIGN_MOD:
case Token.ASSIGN_DIV:
tryConvertToNumber(n.getLastChild());
break;
case Token.BITNOT:
case Token.BITOR:
case Token.BITXOR:
case Token.BITAND:
case Token.LSH:
case Token.RSH:
case Token.URSH:
case Token.SUB:
case Token.MUL:
case Token.MOD:
case Token.DIV:
case Token.POS:
case Token.NEG:
tryConvertOperandsToNumber(n);
break;
}
}
private void tryConvertOperandsToNumber(Node n) {
Node next;
for (Node c = n.getFirstChild(); c != null; c = next) {
next = c.getNext();
tryConvertToNumber(c);
}
}
private void tryConvertToNumber(Node n) {
switch (n.getType()) {
case Token.NUMBER:
// Nothing to do
return;
case Token.AND:
case Token.OR:
case Token.COMMA:
tryConvertToNumber(n.getLastChild());
return;
case Token.HOOK:
tryConvertToNumber(n.getChildAtIndex(1));
tryConvertToNumber(n.getLastChild());
return;
case Token.NAME:
if (!NodeUtil.isUndefined(n)) {
return;
}
break;
}
Double result = NodeUtil.getNumberValue(n);
if (result == null) {
return;
}
double value = result;
Node replacement = NodeUtil.numberNode(value, n);
if (replacement.isEquivalentTo(n)) {
return;
}
n.getParent().replaceChild(n, replacement);
reportCodeChange();
}
/**
* Folds 'typeof(foo)' if foo is a literal, e.g.
* typeof("bar") --> "string"
* typeof(6) --> "number"
*/
private Node tryFoldTypeof(Node originalTypeofNode) {
Preconditions.checkArgument(originalTypeofNode.isTypeOf());
Node argumentNode = originalTypeofNode.getFirstChild();
if (argumentNode == null || !NodeUtil.isLiteralValue(argumentNode, true)) {
return originalTypeofNode;
}
String typeNameString = null;
switch (argumentNode.getType()) {
case Token.FUNCTION:
typeNameString = "function";
break;
case Token.STRING:
typeNameString = "string";
break;
case Token.NUMBER:
typeNameString = "number";
break;
case Token.TRUE:
case Token.FALSE:
typeNameString = "boolean";
break;
case Token.NULL:
case Token.OBJECTLIT:
case Token.ARRAYLIT:
typeNameString = "object";
break;
case Token.VOID:
typeNameString = "undefined";
break;
case Token.NAME:
// We assume here that programs don't change the value of the
// keyword undefined to something other than the value undefined.
if ("undefined".equals(argumentNode.getString())) {
typeNameString = "undefined";
}
break;
}
if (typeNameString != null) {
Node newNode = IR.string(typeNameString);
originalTypeofNode.getParent().replaceChild(originalTypeofNode, newNode);
reportCodeChange();
return newNode;
}
return originalTypeofNode;
}
private Node tryFoldUnaryOperator(Node n) {
Preconditions.checkState(n.hasOneChild());
Node left = n.getFirstChild();
Node parent = n.getParent();
if (left == null) {
return n;
}
TernaryValue leftVal = NodeUtil.getPureBooleanValue(left);
if (leftVal == TernaryValue.UNKNOWN) {
return n;
}
switch (n.getType()) {
case Token.NOT:
// Don't fold !0 and !1 back to false.
if (late && left.isNumber()) {
double numValue = left.getDouble();
if (numValue == 0 || numValue == 1) {
return n;
}
}
Node replacementNode = NodeUtil.booleanNode(!leftVal.toBoolean(true));
parent.replaceChild(n, replacementNode);
reportCodeChange();
return replacementNode;
case Token.POS:
if (NodeUtil.isNumericResult(left)) {
// POS does nothing to numeric values.
parent.replaceChild(n, left.detachFromParent());
reportCodeChange();
return left;
}
return n;
case Token.NEG:
if (left.isName()) {
if (left.getString().equals("Infinity")) {
// "-Infinity" is valid and a literal, don't modify it.
return n;
} else if (left.getString().equals("NaN")) {
// "-NaN" is "NaN".
n.removeChild(left);
parent.replaceChild(n, left);
reportCodeChange();
return left;
}
}
if (left.isNumber()) {
double negNum = -left.getDouble();
Node negNumNode = IR.number(negNum);
parent.replaceChild(n, negNumNode);
reportCodeChange();
return negNumNode;
} else {
// left is not a number node, so do not replace, but warn the
// user because they can't be doing anything good
report(NEGATING_A_NON_NUMBER_ERROR, left);
return n;
}
case Token.BITNOT:
try {
double val = left.getDouble();
if (val >= Integer.MIN_VALUE && val <= Integer.MAX_VALUE) {
int intVal = (int) val;
if (intVal == val) {
Node notIntValNode = IR.number(~intVal);
parent.replaceChild(n, notIntValNode);
reportCodeChange();
return notIntValNode;
} else {
report(FRACTIONAL_BITWISE_OPERAND, left);
return n;
}
} else {
report(BITWISE_OPERAND_OUT_OF_RANGE, left);
return n;
}
} catch (UnsupportedOperationException ex) {
// left is not a number node, so do not replace, but warn the
// user because they can't be doing anything good
report(NEGATING_A_NON_NUMBER_ERROR, left);
return n;
}
default:
return n;
}
}
/**
* Try to fold {@code left instanceof right} into {@code true}
* or {@code false}.
*/
private Node tryFoldInstanceof(Node n, Node left, Node right) {
Preconditions.checkArgument(n.isInstanceOf());
// TODO(johnlenz) Use type information if available to fold
// instanceof.
if (NodeUtil.isLiteralValue(left, true)
&& !mayHaveSideEffects(right)) {
Node replacementNode = null;
if (NodeUtil.isImmutableValue(left)) {
// Non-object types are never instances.
replacementNode = IR.falseNode();
} else if (right.isName()
&& "Object".equals(right.getString())) {
replacementNode = IR.trueNode();
}
if (replacementNode != null) {
n.getParent().replaceChild(n, replacementNode);
reportCodeChange();
return replacementNode;
}
}
return n;
}
private Node tryFoldAssign(Node n, Node left, Node right) {
Preconditions.checkArgument(n.isAssign());
if (!late) {
return n;
}
// Tries to convert x = x + y -> x += y;
if (!right.hasChildren() ||
right.getFirstChild().getNext() != right.getLastChild()) {
// RHS must have two children.
return n;
}
if (mayHaveSideEffects(left)) {
return n;
}
Node newRight;
if (areNodesEqualForInlining(left, right.getFirstChild())) {
newRight = right.getLastChild();
} else if (NodeUtil.isCommutative(right.getType()) &&
areNodesEqualForInlining(left, right.getLastChild())) {
newRight = right.getFirstChild();
} else {
return n;
}
int newType = -1;
switch (right.getType()) {
case Token.ADD:
newType = Token.ASSIGN_ADD;
break;
case Token.BITAND:
newType = Token.ASSIGN_BITAND;
break;
case Token.BITOR:
newType = Token.ASSIGN_BITOR;
break;
case Token.BITXOR:
newType = Token.ASSIGN_BITXOR;
break;
case Token.DIV:
newType = Token.ASSIGN_DIV;
break;
case Token.LSH:
newType = Token.ASSIGN_LSH;
break;
case Token.MOD:
newType = Token.ASSIGN_MOD;
break;
case Token.MUL:
newType = Token.ASSIGN_MUL;
break;
case Token.RSH:
newType = Token.ASSIGN_RSH;
break;
case Token.SUB:
newType = Token.ASSIGN_SUB;
break;
case Token.URSH:
newType = Token.ASSIGN_URSH;
break;
default:
return n;
}
Node newNode = new Node(newType,
left.detachFromParent(), newRight.detachFromParent());
n.getParent().replaceChild(n, newNode);
reportCodeChange();
return newNode;
}
private Node tryUnfoldAssignOp(Node n, Node left, Node right) {
if (late) {
return n;
}
if (!n.hasChildren() ||
n.getFirstChild().getNext() != n.getLastChild()) {
return n;
}
if (mayHaveSideEffects(left)) {
return n;
}
// Tries to convert x += y -> x = x + y;
int op = NodeUtil.getOpFromAssignmentOp(n);
Node replacement = IR.assign(left.detachFromParent(),
new Node(op, left.cloneTree(), right.detachFromParent())
.srcref(n));
n.getParent().replaceChild(n, replacement);
reportCodeChange();
return replacement;
}
/**
* Try to fold a AND/OR node.
*/
private Node tryFoldAndOr(Node n, Node left, Node right) {
Node parent = n.getParent();
Node result = null;
int type = n.getType();
TernaryValue leftVal = NodeUtil.getImpureBooleanValue(left);
if (leftVal != TernaryValue.UNKNOWN) {
boolean lval = leftVal.toBoolean(true);
// (TRUE || x) => TRUE (also, (3 || x) => 3)
// (FALSE && x) => FALSE
if (lval && type == Token.OR ||
!lval && type == Token.AND) {
result = left;
} else if (!mayHaveSideEffects(left)) {
// (FALSE || x) => x
// (TRUE && x) => x
result = right;
}
}
// Note: Right hand side folding is handled by
// PeepholeMinimizeConditions#tryMinimizeCondition
if (result != null) {
// Fold it!
n.removeChild(result);
parent.replaceChild(n, result);
reportCodeChange();
return result;
} else {
return n;
}
}
/**
* Expressions such as [foo() + 'a' + 'b'] generate parse trees
* where no node has two const children ((foo() + 'a') + 'b'), so
* tryFoldAdd() won't fold it -- tryFoldLeftChildAdd() will (for Strings).
* Specifically, it folds Add expressions where:
* - The left child is also and add expression
* - The right child is a constant value
* - The left child's right child is a STRING constant.
*/
private Node tryFoldChildAddString(Node n, Node left, Node right) {
if (NodeUtil.isLiteralValue(right, false) &&
left.isAdd()) {
Node ll = left.getFirstChild();
Node lr = ll.getNext();
// Left's right child MUST be a string. We would not want to fold
// foo() + 2 + 'a' because we don't know what foo() will return, and
// therefore we don't know if left is a string concat, or a numeric add.
if (lr.isString()) {
String leftString = NodeUtil.getStringValue(lr);
String rightString = NodeUtil.getStringValue(right);
if (leftString != null && rightString != null) {
left.removeChild(ll);
String result = leftString + rightString;
n.replaceChild(left, ll);
n.replaceChild(right, IR.string(result));
reportCodeChange();
return n;
}
}
}
if (NodeUtil.isLiteralValue(left, false) &&
right.isAdd()) {
Node rl = right.getFirstChild();
Node rr = right.getLastChild();
// Left's right child MUST be a string. We would not want to fold
// foo() + 2 + 'a' because we don't know what foo() will return, and
// therefore we don't know if left is a string concat, or a numeric add.
if (rl.isString()) {
String leftString = NodeUtil.getStringValue(left);
String rightString = NodeUtil.getStringValue(rl);
if (leftString != null && rightString != null) {
right.removeChild(rr);
String result = leftString + rightString;
n.replaceChild(right, rr);
n.replaceChild(left, IR.string(result));
reportCodeChange();
return n;
}
}
}
return n;
}
/**
* Try to fold an ADD node with constant operands
*/
private Node tryFoldAddConstantString(Node n, Node left, Node right) {
if (left.isString() ||
right.isString()) {
// Add strings.
String leftString = NodeUtil.getStringValue(left);
String rightString = NodeUtil.getStringValue(right);
if (leftString != null && rightString != null) {
Node newStringNode = IR.string(leftString + rightString);
n.getParent().replaceChild(n, newStringNode);
reportCodeChange();
return newStringNode;
}
}
return n;
}
/**
* Try to fold arithmetic binary operators
*/
private Node tryFoldArithmeticOp(Node n, Node left, Node right) {
Node result = performArithmeticOp(n.getType(), left, right);
if (result != null) {
result.copyInformationFromForTree(n);
n.getParent().replaceChild(n, result);
reportCodeChange();
return result;
}
return n;
}
/**
* Try to fold arithmetic binary operators
*/
private Node performArithmeticOp(int opType, Node left, Node right) {
// Unlike other operations, ADD operands are not always converted
// to Number.
if (opType == Token.ADD
&& (NodeUtil.mayBeString(left, false)
|| NodeUtil.mayBeString(right, false))) {
return null;
}
double result;
// TODO(johnlenz): Handle NaN with unknown value. BIT ops convert NaN
// to zero so this is a little awkward here.
Double lValObj = NodeUtil.getNumberValue(left);
if (lValObj == null) {
return null;
}
Double rValObj = NodeUtil.getNumberValue(right);
if (rValObj == null) {
return null;
}
double lval = lValObj;
double rval = rValObj;
switch (opType) {
case Token.BITAND:
result = ScriptRuntime.toInt32(lval) & ScriptRuntime.toInt32(rval);
break;
case Token.BITOR:
result = ScriptRuntime.toInt32(lval) | ScriptRuntime.toInt32(rval);
break;
case Token.BITXOR:
result = ScriptRuntime.toInt32(lval) ^ ScriptRuntime.toInt32(rval);
break;
case Token.ADD:
result = lval + rval;
break;
case Token.SUB:
result = lval - rval;
break;
case Token.MUL:
result = lval * rval;
break;
case Token.MOD:
if (rval == 0) {
return null;
}
result = lval % rval;
break;
case Token.DIV:
if (rval == 0) {
return null;
}
result = lval / rval;
break;
default:
throw new Error("Unexpected arithmetic operator");
}
// TODO(johnlenz): consider removing the result length check.
// length of the left and right value plus 1 byte for the operator.
if ((String.valueOf(result).length() <=
String.valueOf(lval).length() + String.valueOf(rval).length() + 1
// Do not try to fold arithmetic for numbers > 2^53. After that
// point, fixed-point math starts to break down and become inaccurate.
&& Math.abs(result) <= MAX_FOLD_NUMBER)
|| Double.isNaN(result)
|| result == Double.POSITIVE_INFINITY
|| result == Double.NEGATIVE_INFINITY) {
return NodeUtil.numberNode(result, null);
}
return null;
}
/**
* Expressions such as [foo() * 10 * 20] generate parse trees
* where no node has two const children ((foo() * 10) * 20), so
* performArithmeticOp() won't fold it -- tryFoldLeftChildOp() will.
* Specifically, it folds associative expressions where:
* - The left child is also an associative expression of the same time.
* - The right child is a constant NUMBER constant.
* - The left child's right child is a NUMBER constant.
*/
private Node tryFoldLeftChildOp(Node n, Node left, Node right) {
int opType = n.getType();
Preconditions.checkState(
(NodeUtil.isAssociative(opType) && NodeUtil.isCommutative(opType))
|| n.isAdd());
Preconditions.checkState(!n.isAdd() || !NodeUtil.mayBeString(n));
// Use getNumberValue to handle constants like "NaN" and "Infinity"
// other values are converted to numbers elsewhere.
Double rightValObj = NodeUtil.getNumberValue(right);
if (rightValObj != null && left.getType() == opType) {
Preconditions.checkState(left.getChildCount() == 2);
Node ll = left.getFirstChild();
Node lr = ll.getNext();
Node valueToCombine = ll;
Node replacement = performArithmeticOp(opType, valueToCombine, right);
if (replacement == null) {
valueToCombine = lr;
replacement = performArithmeticOp(opType, valueToCombine, right);
}
if (replacement != null) {
// Remove the child that has been combined
left.removeChild(valueToCombine);
// Replace the left op with the remaining child.
n.replaceChild(left, left.removeFirstChild());
// New "-Infinity" node need location info explicitly
// added.
replacement.copyInformationFromForTree(right);
n.replaceChild(right, replacement);
reportCodeChange();
}
}
return n;
}
private Node tryFoldAdd(Node node, Node left, Node right) {
Preconditions.checkArgument(node.isAdd());
if (NodeUtil.mayBeString(node, true)) {
if (NodeUtil.isLiteralValue(left, false) &&
NodeUtil.isLiteralValue(right, false)) {
// '6' + 7
return tryFoldAddConstantString(node, left, right);
} else {
// a + 7 or 6 + a
return tryFoldChildAddString(node, left, right);
}
} else {
// Try arithmetic add
Node result = tryFoldArithmeticOp(node, left, right);
if (result != node) {
return result;
}
return tryFoldLeftChildOp(node, left, right);
}
}
/**
* Try to fold shift operations
*/
private Node tryFoldShift(Node n, Node left, Node right) {
if (left.isNumber() &&
right.isNumber()) {
double result;
double lval = left.getDouble();
double rval = right.getDouble();
// check ranges. We do not do anything that would clip the double to
// a 32-bit range, since the user likely does not intend that.
if (lval < Integer.MIN_VALUE) {
report(BITWISE_OPERAND_OUT_OF_RANGE, left);
return n;
}
// only the lower 5 bits are used when shifting, so don't do anything
// if the shift amount is outside [0,32)
if (!(rval >= 0 && rval < 32)) {
report(SHIFT_AMOUNT_OUT_OF_BOUNDS, right);
return n;
}
int rvalInt = (int) rval;
if (rvalInt != rval) {
report(FRACTIONAL_BITWISE_OPERAND, right);
return n;
}
switch (n.getType()) {
case Token.LSH:
case Token.RSH:
// Convert the numbers to ints
if (lval > Integer.MAX_VALUE) {
report(BITWISE_OPERAND_OUT_OF_RANGE, left);
return n;
}
int lvalInt = (int) lval;
if (lvalInt != lval) {
report(FRACTIONAL_BITWISE_OPERAND, left);
return n;
}
if (n.getType() == Token.LSH) {
result = lvalInt << rvalInt;
} else {
result = lvalInt >> rvalInt;
}
break;
case Token.URSH:
// JavaScript handles zero shifts on signed numbers differently than
// Java as an Java int can not represent the unsigned 32-bit number
// where JavaScript can so use a long here.
long maxUint32 = 0xffffffffL;
if (lval > maxUint32) {
report(BITWISE_OPERAND_OUT_OF_RANGE, left);
return n;
}
long lvalLong = (long) lval;
if (lvalLong != lval) {
report(FRACTIONAL_BITWISE_OPERAND, left);
return n;
}
result = (lvalLong & maxUint32) >>> rvalInt;
break;
default:
throw new AssertionError("Unknown shift operator: " +
Token.name(n.getType()));
}
Node newNumber = IR.number(result);
n.getParent().replaceChild(n, newNumber);
reportCodeChange();
return newNumber;
}
return n;
}
/**
* Try to fold comparison nodes, e.g ==
*/
@SuppressWarnings("fallthrough")
private Node tryFoldComparison(Node n, Node left, Node right) {
TernaryValue result = evaluateComparison(n.getType(), left, right);
if (result == TernaryValue.UNKNOWN) {
return n;
}
Node newNode = NodeUtil.booleanNode(result.toBoolean(true));
n.getParent().replaceChild(n, newNode);
reportCodeChange();
return newNode;
}
static TernaryValue evaluateComparison(int op, Node left, Node right) {
boolean leftLiteral = NodeUtil.isLiteralValue(left, true);
boolean rightLiteral = NodeUtil.isLiteralValue(right, true);
if (!leftLiteral || !rightLiteral) {
// We only handle literal operands for LT and GT.
if (op != Token.GT && op != Token.LT) {
return TernaryValue.UNKNOWN;
}
}
boolean undefinedRight = NodeUtil.isUndefined(right) && rightLiteral;
boolean nullRight = right.isNull();
int lhType = getNormalizedNodeType(left);
int rhType = getNormalizedNodeType(right);
switch (lhType) {
case Token.VOID:
if (!leftLiteral) {
return TernaryValue.UNKNOWN;
} else if (!rightLiteral) {
return TernaryValue.UNKNOWN;
} else {
return TernaryValue.forBoolean(compareToUndefined(right, op));
}
case Token.NULL:
if (rightLiteral && isEqualityOp(op)) {
return TernaryValue.forBoolean(compareToNull(right, op));
}
// fallthrough
case Token.TRUE:
case Token.FALSE:
if (undefinedRight) {
return TernaryValue.forBoolean(compareToUndefined(left, op));
}
if (rhType != Token.TRUE &&
rhType != Token.FALSE &&
rhType != Token.NULL) {
return TernaryValue.UNKNOWN;
}
switch (op) {
case Token.SHEQ:
case Token.EQ:
return TernaryValue.forBoolean(lhType == rhType);
case Token.SHNE:
case Token.NE:
return TernaryValue.forBoolean(lhType != rhType);
case Token.GE:
case Token.LE:
case Token.GT:
case Token.LT:
return compareAsNumbers(op, left, right);
}
return TernaryValue.UNKNOWN;
case Token.THIS:
if (!right.isThis()) {
return TernaryValue.UNKNOWN;
}
switch (op) {
case Token.SHEQ:
case Token.EQ:
return TernaryValue.TRUE;
case Token.SHNE:
case Token.NE:
return TernaryValue.FALSE;
}
// We can only handle == and != here.
// GT, LT, GE, LE depend on the type of "this" and how it will
// be converted to number. The results are different depending on
// whether it is a string, NaN or other number value.
return TernaryValue.UNKNOWN;
case Token.STRING:
if (undefinedRight) {
return TernaryValue.forBoolean(compareToUndefined(left, op));
}
if (nullRight && isEqualityOp(op)) {
return TernaryValue.forBoolean(compareToNull(left, op));
}
if (Token.STRING != right.getType()) {
return TernaryValue.UNKNOWN; // Only eval if they are the same type
}
switch (op) {
case Token.SHEQ:
case Token.EQ:
return areStringsEqual(left.getString(), right.getString());
case Token.SHNE:
case Token.NE:
return areStringsEqual(left.getString(), right.getString()).not();
}
return TernaryValue.UNKNOWN;
case Token.NUMBER:
if (undefinedRight) {
return TernaryValue.forBoolean(compareToUndefined(left, op));
}
if (nullRight && isEqualityOp(op)) {
return TernaryValue.forBoolean(compareToNull(left, op));
}
if (Token.NUMBER != right.getType()) {
return TernaryValue.UNKNOWN; // Only eval if they are the same type
}
return compareAsNumbers(op, left, right);
case Token.NAME:
if (leftLiteral && undefinedRight) {
return TernaryValue.forBoolean(compareToUndefined(left, op));
}
if (rightLiteral) {
boolean undefinedLeft = (left.getString().equals("undefined"));
if (undefinedLeft) {
return TernaryValue.forBoolean(compareToUndefined(right, op));
}
if (leftLiteral && nullRight && isEqualityOp(op)) {
return TernaryValue.forBoolean(compareToNull(left, op));
}
}
if (Token.NAME != right.getType()) {
return TernaryValue.UNKNOWN; // Only eval if they are the same type
}
String ln = left.getString();
String rn = right.getString();
if (!ln.equals(rn)) {
return TernaryValue.UNKNOWN; // Not the same value name.
}
switch (op) {
// If we knew the named value wouldn't be NaN, it would be nice
// to handle EQ,NE,LE,GE,SHEQ, and SHNE.
case Token.LT:
case Token.GT:
return TernaryValue.FALSE;
}
return TernaryValue.UNKNOWN; // don't handle that op
case Token.NEG:
if (leftLiteral) {
if (undefinedRight) {
return TernaryValue.forBoolean(compareToUndefined(left, op));
}
if (nullRight && isEqualityOp(op)) {
return TernaryValue.forBoolean(compareToNull(left, op));
}
}
// Nothing else for now.
return TernaryValue.UNKNOWN;
case Token.ARRAYLIT:
case Token.OBJECTLIT:
case Token.REGEXP:
case Token.FUNCTION:
if (leftLiteral) {
if (undefinedRight) {
return TernaryValue.forBoolean(compareToUndefined(left, op));
}
if (nullRight && isEqualityOp(op)) {
return TernaryValue.forBoolean(compareToNull(left, op));
}
}
// ignore the rest for now.
return TernaryValue.UNKNOWN;
default:
// assert, this should cover all consts
return TernaryValue.UNKNOWN;
}
}
/** Returns whether two JS strings are equal. */
private static TernaryValue areStringsEqual(String a, String b) {
// In JS, browsers parse \v differently. So do not consider strings
// equal if one contains \v.
if (a.indexOf('\u000B') != -1 ||
b.indexOf('\u000B') != -1) {
return TernaryValue.UNKNOWN;
} else {
return a.equals(b) ? TernaryValue.TRUE : TernaryValue.FALSE;
}
}
/**
* @return Translate NOT expressions into TRUE or FALSE when possible.
*/
private static int getNormalizedNodeType(Node n) {
int type = n.getType();
if (type == Token.NOT) {
TernaryValue value = NodeUtil.getPureBooleanValue(n);
switch (value) {
case TRUE:
return Token.TRUE;
case FALSE:
return Token.FALSE;
case UNKNOWN:
return type;
}
}
return type;
}
/**
* The result of the comparison, or UNKNOWN if the
* result could not be determined.
*/
private static TernaryValue compareAsNumbers(int op, Node left, Node right) {
Double leftValue = NodeUtil.getNumberValue(left);
if (leftValue == null) {
return TernaryValue.UNKNOWN;
}
Double rightValue = NodeUtil.getNumberValue(right);
if (rightValue == null) {
return TernaryValue.UNKNOWN;
}
double lv = leftValue;
double rv = rightValue;
switch (op) {
case Token.SHEQ:
case Token.EQ:
Preconditions.checkState(
left.isNumber() && right.isNumber());
return TernaryValue.forBoolean(lv == rv);
case Token.SHNE:
case Token.NE:
Preconditions.checkState(
left.isNumber() && right.isNumber());
return TernaryValue.forBoolean(lv != rv);
case Token.LE:
return TernaryValue.forBoolean(lv <= rv);
case Token.LT:
return TernaryValue.forBoolean(lv < rv);
case Token.GE:
return TernaryValue.forBoolean(lv >= rv);
case Token.GT:
return TernaryValue.forBoolean(lv > rv);
default:
return TernaryValue.UNKNOWN; // don't handle that op
}
}
/**
* @param value The value to compare to "undefined"
* @param op The boolean op to compare with
* @return Whether the boolean op is true or false
*/
private static boolean compareToUndefined(Node value, int op) {
Preconditions.checkState(NodeUtil.isLiteralValue(value, true));
boolean valueUndefined = NodeUtil.isUndefined(value);
boolean valueNull = (Token.NULL == value.getType());
boolean equivalent = valueUndefined || valueNull;
switch (op) {
case Token.EQ:
// undefined is only equal to null or an undefined value
return equivalent;
case Token.NE:
return !equivalent;
case Token.SHEQ:
return valueUndefined;
case Token.SHNE:
return !valueUndefined;
case Token.LT:
case Token.GT:
case Token.LE:
case Token.GE:
return false;
default:
throw new IllegalStateException("unexpected.");
}
}
private static boolean isEqualityOp(int op) {
switch (op) {
case Token.EQ:
case Token.NE:
case Token.SHEQ:
case Token.SHNE:
return true;
}
return false;
}
/**
* @param value The value to compare to "null"
* @param op The boolean op to compare with
* @return Whether the boolean op is true or false
*/
private static boolean compareToNull(Node value, int op) {
boolean valueUndefined = NodeUtil.isUndefined(value);
boolean valueNull = (Token.NULL == value.getType());
boolean equivalent = valueUndefined || valueNull;
switch (op) {
case Token.EQ:
// undefined is only equal to null or an undefined value
return equivalent;
case Token.NE:
return !equivalent;
case Token.SHEQ:
return valueNull;
case Token.SHNE:
return !valueNull;
default:
throw new IllegalStateException("unexpected.");
}
}
/**
* Try to fold away unnecessary object instantiation.
* e.g. this[new String('eval')] -> this.eval
*/
private Node tryFoldCtorCall(Node n) {
Preconditions.checkArgument(n.isNew());
// we can remove this for GETELEM calls (anywhere else?)
if (inForcedStringContext(n)) {
return tryFoldInForcedStringContext(n);
}
return n;
}
/** Returns whether this node must be coerced to a string. */
private boolean inForcedStringContext(Node n) {
if (n.getParent().isGetElem() &&
n.getParent().getLastChild() == n) {
return true;
}
// we can fold in the case "" + new String("")
if (n.getParent().isAdd()) {
return true;
}
return false;
}
private Node tryFoldInForcedStringContext(Node n) {
// For now, we only know how to fold ctors.
Preconditions.checkArgument(n.isNew());
Node objectType = n.getFirstChild();
if (!objectType.isName()) {
return n;
}
if (objectType.getString().equals("String")) {
Node value = objectType.getNext();
String stringValue = null;
if (value == null) {
stringValue = "";
} else {
if (!NodeUtil.isImmutableValue(value)) {
return n;
}
stringValue = NodeUtil.getStringValue(value);
}
if (stringValue == null) {
return n;
}
Node parent = n.getParent();
Node newString = IR.string(stringValue);
parent.replaceChild(n, newString);
newString.copyInformationFrom(parent);
reportCodeChange();
return newString;
}
return n;
}
/**
* Try to fold array-element. e.g [1, 2, 3][10];
*/
private Node tryFoldGetElem(Node n, Node left, Node right) {
Preconditions.checkArgument(n.isGetElem());
if (left.isObjectLit()) {
return tryFoldObjectPropAccess(n, left, right);
}
if (left.isArrayLit()) {
return tryFoldArrayAccess(n, left, right);
}
return n;
}
/**
* Try to fold array-length. e.g [1, 2, 3].length ==> 3, [x, y].length ==> 2
*/
private Node tryFoldGetProp(Node n, Node left, Node right) {
Preconditions.checkArgument(n.isGetProp());
if (left.isObjectLit()) {
return tryFoldObjectPropAccess(n, left, right);
}
if (right.isString() &&
right.getString().equals("length")) {
int knownLength = -1;
switch (left.getType()) {
case Token.ARRAYLIT:
if (mayHaveSideEffects(left)) {
// Nope, can't fold this, without handling the side-effects.
return n;
}
knownLength = left.getChildCount();
break;
case Token.STRING:
knownLength = left.getString().length();
break;
default:
// Not a foldable case, forget it.
return n;
}
Preconditions.checkState(knownLength != -1);
Node lengthNode = IR.number(knownLength);
n.getParent().replaceChild(n, lengthNode);
reportCodeChange();
return lengthNode;
}
return n;
}
private Node tryFoldArrayAccess(Node n, Node left, Node right) {
// If GETPROP/GETELEM is used as assignment target the array literal is
// acting as a temporary we can't fold it here:
// "[][0] += 1"
if (NodeUtil.isAssignmentTarget(n)) {
return n;
}
if (!right.isNumber()) {
// Sometimes people like to use complex expressions to index into
// arrays, or strings to index into array methods.
return n;
}
double index = right.getDouble();
int intIndex = (int) index;
if (intIndex != index) {
report(INVALID_GETELEM_INDEX_ERROR, right);
return n;
}
if (intIndex < 0) {
report(INDEX_OUT_OF_BOUNDS_ERROR, right);
return n;
}
Node current = left.getFirstChild();
Node elem = null;
for (int i = 0; current != null; i++) {
if (i != intIndex) {
if (mayHaveSideEffects(current)) {
return n;
}
} else {
elem = current;
}
current = current.getNext();
}
if (elem == null) {
report(INDEX_OUT_OF_BOUNDS_ERROR, right);
return n;
}
if (elem.isEmpty()) {
elem = NodeUtil.newUndefinedNode(elem);
} else {
left.removeChild(elem);
}
// Replace the entire GETELEM with the value
n.getParent().replaceChild(n, elem);
reportCodeChange();
return elem;
}
private Node tryFoldObjectPropAccess(Node n, Node left, Node right) {
Preconditions.checkArgument(NodeUtil.isGet(n));
if (!left.isObjectLit() || !right.isString()) {
return n;
}
if (NodeUtil.isAssignmentTarget(n)) {
// If GETPROP/GETELEM is used as assignment target the object literal is
// acting as a temporary we can't fold it here:
// "{a:x}.a += 1" is not "x += 1"
return n;
}
// find the last definition in the object literal
Node key = null;
Node value = null;
for (Node c = left.getFirstChild(); c != null; c = c.getNext()) {
if (c.getString().equals(right.getString())) {
switch (c.getType()) {
case Token.SETTER_DEF:
continue;
case Token.GETTER_DEF:
case Token.STRING_KEY:
if (value != null && mayHaveSideEffects(value)) {
// The previously found value had side-effects
return n;
}
key = c;
value = key.getFirstChild();
break;
default:
throw new IllegalStateException();
}
} else if (mayHaveSideEffects(c.getFirstChild())) {
// We don't handle the side-effects here as they might need a temporary
// or need to be reordered.
return n;
}
}
// Didn't find a definition of the name in the object literal, it might
// be coming from the Object prototype
if (value == null) {
return n;
}
if (value.isFunction() && NodeUtil.referencesThis(value)) {
// 'this' may refer to the object we are trying to remove
return n;
}
Node replacement = value.detachFromParent();
if (key.isGetterDef()){
replacement = IR.call(replacement);
replacement.putBooleanProp(Node.FREE_CALL, true);
}
n.getParent().replaceChild(n, replacement);
reportCodeChange();
return n;
}
}
| |
package com.googlecode.objectify;
import java.io.Serializable;
import com.google.appengine.api.datastore.KeyFactory;
import com.googlecode.objectify.annotation.Subclass;
/**
* <p>A typesafe wrapper for the datastore Key object.</p>
*
* @author Jeff Schnitzer <jeff@infohazard.org>
* @author Scott Hernandez
*/
public class Key<T> implements Serializable, Comparable<Key<?>>
{
private static final long serialVersionUID = 2L;
/** */
protected com.google.appengine.api.datastore.Key raw;
/** Cache the instance of the parent wrapper to avoid unnecessary garbage */
transient protected Key<?> parent;
/** For GWT serialization */
protected Key() {}
/** Wrap a raw Key */
public Key(com.google.appengine.api.datastore.Key raw)
{
this.raw = raw;
}
/** Create a key with a long id */
public Key(Class<? extends T> kindClass, long id)
{
this(null, kindClass, id);
}
/** Create a key with a String name */
public Key(Class<? extends T> kindClass, String name)
{
this(null, kindClass, name);
}
/** Create a key with a parent and a long id */
public Key(Key<?> parent, Class<? extends T> kindClass, long id)
{
this.raw = KeyFactory.createKey(raw(parent), getKind(kindClass), id);
this.parent = parent;
}
/** Create a key with a parent and a String name */
public Key(Key<?> parent, Class<? extends T> kindClass, String name)
{
this.raw = KeyFactory.createKey(raw(parent), getKind(kindClass), name);
this.parent = parent;
}
/**
* @return the raw datastore version of this key
*/
public com.google.appengine.api.datastore.Key getRaw()
{
return this.raw;
}
/**
* @return the id associated with this key, or 0 if this key has a name.
*/
public long getId()
{
return this.raw.getId();
}
/**
* @return the name associated with this key, or null if this key has an id
*/
public String getName()
{
return this.raw.getName();
}
/**
* @return the low-level datastore kind associated with this Key
*/
public String getKind()
{
return this.raw.getKind();
}
/**
* @return the parent key, or null if there is no parent. Note that
* the parent could potentially have any type.
*/
@SuppressWarnings("unchecked")
public <V> Key<V> getParent()
{
if (this.parent == null && this.raw.getParent() != null)
this.parent = new Key<V>(this.raw.getParent());
return (Key<V>)this.parent;
}
/**
* Gets the root of a parent graph of keys. If a Key has no parent, it is the root.
*
* @return the topmost parent key, or this object itself if it is the root.
* Note that the root key could potentially have any type.
*/
@SuppressWarnings("unchecked")
public <V> Key<V> getRoot()
{
if (this.getParent() == null)
return (Key<V>)this;
else
return this.getParent().getRoot();
}
/**
* <p>Compares based on comparison of the raw key</p>
*/
@Override
public int compareTo(Key<?> other)
{
return this.raw.compareTo(other.raw);
}
/** */
@Override
public boolean equals(Object obj)
{
if (obj == null)
return false;
if (!(obj instanceof Key<?>))
return false;
return this.compareTo((Key<?>)obj) == 0;
}
/** */
@Override
public int hashCode()
{
return this.raw.hashCode();
}
/** Creates a human-readable version of this key */
@Override
public String toString()
{
return "Key<?>(" + this.raw + ")";
}
/**
* Easy null-safe conversion of the raw key.
*/
public static <V> Key<V> typed(com.google.appengine.api.datastore.Key raw)
{
if (raw == null)
return null;
else
return new Key<V>(raw);
}
/**
* Easy null-safe conversion of the typed key.
*/
public static com.google.appengine.api.datastore.Key raw(Key<?> typed)
{
if (typed == null)
return null;
else
return typed.getRaw();
}
/**
* <p>Determines the kind for a Class, as understood by the datastore. The logic for this
* is approximately:</p>
*
* <ul>
* <li>If the class has an @Entity (either JPA or Objectify) annotation, the kind is the "name" attribute of the annotation.</li>
* <li>If the class has no @Entity, or the "name" attribute is empty, the kind is the simplename of the class.</li>
* <li>If the class has @Subclass, the kind is drawn from the first parent class that has an @Entity annotation.</li>
* </ul>
*
* @throws IllegalArgumentException if a kind cannot be determined (ie @Subclass with invalid hierarchy).
*/
public static String getKind(Class<?> clazz)
{
// Check this one directly
String kind = getKindHere(clazz);
if (kind != null)
return kind;
// @Subclass is treated differently, a superclass must have a mandatory @Entity
if (clazz.getAnnotation(Subclass.class) != null)
{
kind = getRequiredEntityKind(clazz.getSuperclass());
if (kind != null)
return kind;
else
throw new IllegalArgumentException("@Subclass entity " + clazz.getName() + " must have a superclass with @Entity");
}
return clazz.getSimpleName();
}
/**
* Recursively climbs the class hierarchy looking for the first @Entity annotation.
* @return the kind of the first @Entity found, or null if nothing can be found
*/
private static String getRequiredEntityKind(Class<?> clazz)
{
if (clazz == Object.class)
return null;
String kind = getKindHere(clazz);
if (kind != null)
return kind;
else
return getRequiredEntityKind(clazz.getSuperclass());
}
/**
* Get the kind from the class if the class has an @Entity annotation, otherwise return null.
*/
private static String getKindHere(Class<?> clazz)
{
com.googlecode.objectify.annotation.Entity ourAnn = clazz.getAnnotation(com.googlecode.objectify.annotation.Entity.class);
if (ourAnn != null)
if (ourAnn.name() != null && ourAnn.name().length() != 0)
return ourAnn.name();
else
return clazz.getSimpleName();
javax.persistence.Entity jpaAnn = clazz.getAnnotation(javax.persistence.Entity.class);
if (jpaAnn != null)
if (jpaAnn.name() != null && jpaAnn.name().length() != 0)
return jpaAnn.name();
else
return clazz.getSimpleName();
return null;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.tools;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.HarFileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.FileSplit;
import org.apache.hadoop.mapred.InputFormat;
import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.RecordReader;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.SequenceFileRecordReader;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.lib.NullOutputFormat;
import org.apache.hadoop.mapreduce.JobSubmissionFiles;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
/**
* a archive creation utility.
* This class provides methods that can be used
* to create hadoop archives. For understanding of
* Hadoop archives look at {@link HarFileSystem}.
*/
public class HadoopArchives implements Tool {
private static final Log LOG = LogFactory.getLog(HadoopArchives.class);
private static final String NAME = "har";
static final String SRC_LIST_LABEL = NAME + ".src.list";
static final String DST_DIR_LABEL = NAME + ".dest.path";
static final String TMP_DIR_LABEL = NAME + ".tmp.dir";
static final String JOB_DIR_LABEL = NAME + ".job.dir";
static final String SRC_COUNT_LABEL = NAME + ".src.count";
static final String TOTAL_SIZE_LABEL = NAME + ".total.size";
static final String DST_HAR_LABEL = NAME + ".archive.name";
static final String SRC_PARENT_LABEL = NAME + ".parent.path";
// size of each part file
// its fixed for now.
static final long partSize = 2 * 1024 * 1024 * 1024l;
private static final String usage = "archive"
+ " -archiveName NAME -p <parent path> <src>* <dest>" +
"\n";
private JobConf conf;
public void setConf(Configuration conf) {
if (conf instanceof JobConf) {
this.conf = (JobConf) conf;
} else {
this.conf = new JobConf(conf, HadoopArchives.class);
}
}
public Configuration getConf() {
return this.conf;
}
public HadoopArchives(Configuration conf) {
setConf(conf);
}
// check the src paths
private static void checkPaths(Configuration conf, List<Path> paths) throws
IOException {
for (Path p : paths) {
FileSystem fs = p.getFileSystem(conf);
if (!fs.exists(p)) {
throw new FileNotFoundException("Source " + p + " does not exist.");
}
}
}
/**
* this assumes that there are two types of files file/dir
* @param fs the input filesystem
* @param fdir the filestatusdir of the path
* @param out the list of paths output of recursive ls
* @throws IOException
*/
private void recursivels(FileSystem fs, FileStatusDir fdir, List<FileStatusDir> out)
throws IOException {
if (!fdir.getFileStatus().isDir()) {
out.add(fdir);
return;
}
else {
out.add(fdir);
FileStatus[] listStatus = fs.listStatus(fdir.getFileStatus().getPath());
fdir.setChildren(listStatus);
for (FileStatus stat: listStatus) {
FileStatusDir fstatDir = new FileStatusDir(stat, null);
recursivels(fs, fstatDir, out);
}
}
}
/**
* Input format of a hadoop archive job responsible for
* generating splits of the file list
*/
static class HArchiveInputFormat implements InputFormat<LongWritable, Text> {
//generate input splits from the src file lists
public InputSplit[] getSplits(JobConf jconf, int numSplits)
throws IOException {
String srcfilelist = jconf.get(SRC_LIST_LABEL, "");
if ("".equals(srcfilelist)) {
throw new IOException("Unable to get the " +
"src file for archive generation.");
}
long totalSize = jconf.getLong(TOTAL_SIZE_LABEL, -1);
if (totalSize == -1) {
throw new IOException("Invalid size of files to archive");
}
//we should be safe since this is set by our own code
Path src = new Path(srcfilelist);
FileSystem fs = src.getFileSystem(jconf);
FileStatus fstatus = fs.getFileStatus(src);
ArrayList<FileSplit> splits = new ArrayList<FileSplit>(numSplits);
LongWritable key = new LongWritable();
Text value = new Text();
SequenceFile.Reader reader = null;
// the remaining bytes in the file split
long remaining = fstatus.getLen();
// the count of sizes calculated till now
long currentCount = 0L;
// the endposition of the split
long lastPos = 0L;
// the start position of the split
long startPos = 0L;
long targetSize = totalSize/numSplits;
// create splits of size target size so that all the maps
// have equals sized data to read and write to.
try {
reader = new SequenceFile.Reader(fs, src, jconf);
while(reader.next(key, value)) {
if (currentCount + key.get() > targetSize && currentCount != 0){
long size = lastPos - startPos;
splits.add(new FileSplit(src, startPos, size, (String[]) null));
remaining = remaining - size;
startPos = lastPos;
currentCount = 0L;
}
currentCount += key.get();
lastPos = reader.getPosition();
}
// the remaining not equal to the target size.
if (remaining != 0) {
splits.add(new FileSplit(src, startPos, remaining, (String[])null));
}
}
finally {
reader.close();
}
return splits.toArray(new FileSplit[splits.size()]);
}
public RecordReader<LongWritable, Text> getRecordReader(InputSplit split,
JobConf job, Reporter reporter) throws IOException {
return new SequenceFileRecordReader<LongWritable, Text>(job,
(FileSplit)split);
}
}
private boolean checkValidName(String name) {
Path tmp = new Path(name);
if (tmp.depth() != 1) {
return false;
}
if (name.endsWith(".har"))
return true;
return false;
}
private Path largestDepth(List<Path> paths) {
Path deepest = paths.get(0);
for (Path p: paths) {
if (p.depth() > deepest.depth()) {
deepest = p;
}
}
return deepest;
}
/**
* truncate the prefix root from the full path
* @param fullPath the full path
* @param root the prefix root to be truncated
* @return the relative path
*/
private Path relPathToRoot(Path fullPath, Path root) {
// just take some effort to do it
// rather than just using substring
// so that we do not break sometime later
Path justRoot = new Path(Path.SEPARATOR);
if (fullPath.depth() == root.depth()) {
return justRoot;
}
else if (fullPath.depth() > root.depth()) {
Path retPath = new Path(fullPath.getName());
Path parent = fullPath.getParent();
for (int i=0; i < (fullPath.depth() - root.depth() -1); i++) {
retPath = new Path(parent.getName(), retPath);
parent = parent.getParent();
}
return new Path(justRoot, retPath);
}
return null;
}
/**
* this method writes all the valid top level directories
* into the srcWriter for indexing. This method is a little
* tricky. example-
* for an input with parent path /home/user/ and sources
* as /home/user/source/dir1, /home/user/source/dir2 - this
* will output <source, dir, dir1, dir2> (dir means that source is a dir
* with dir1 and dir2 as children) and <source/dir1, file, null>
* and <source/dir2, file, null>
* @param srcWriter the sequence file writer to write the
* directories to
* @param paths the source paths provided by the user. They
* are glob free and have full path (not relative paths)
* @param parentPath the parent path that you wnat the archives
* to be relative to. example - /home/user/dir1 can be archived with
* parent as /home or /home/user.
* @throws IOException
*/
private void writeTopLevelDirs(SequenceFile.Writer srcWriter,
List<Path> paths, Path parentPath) throws IOException {
//add all the directories
List<Path> justDirs = new ArrayList<Path>();
for (Path p: paths) {
if (!p.getFileSystem(getConf()).isFile(p)) {
justDirs.add(new Path(p.toUri().getPath()));
}
else {
justDirs.add(new Path(p.getParent().toUri().getPath()));
}
}
/* find all the common parents of paths that are valid archive
* paths. The below is done so that we do not add a common path
* twice and also we need to only add valid child of a path that
* are specified the user.
*/
TreeMap<String, HashSet<String>> allpaths = new TreeMap<String,
HashSet<String>>();
/* the largest depth of paths. the max number of times
* we need to iterate
*/
Path deepest = largestDepth(paths);
Path root = new Path(Path.SEPARATOR);
for (int i = parentPath.depth(); i < deepest.depth(); i++) {
List<Path> parents = new ArrayList<Path>();
for (Path p: justDirs) {
if (p.compareTo(root) == 0){
//do nothing
}
else {
Path parent = p.getParent();
if (allpaths.containsKey(parent.toString())) {
HashSet<String> children = allpaths.get(parent.toString());
children.add(p.getName());
}
else {
HashSet<String> children = new HashSet<String>();
children.add(p.getName());
allpaths.put(parent.toString(), children);
}
parents.add(parent);
}
}
justDirs = parents;
}
Set<Map.Entry<String, HashSet<String>>> keyVals = allpaths.entrySet();
for (Map.Entry<String, HashSet<String>> entry : keyVals) {
Path relPath = relPathToRoot(new Path(entry.getKey()), parentPath);
if (relPath != null) {
String toWrite = relPath + " dir ";
HashSet<String> children = entry.getValue();
StringBuffer sbuff = new StringBuffer();
sbuff.append(toWrite);
for (String child: children) {
sbuff.append(child + " ");
}
toWrite = sbuff.toString();
srcWriter.append(new LongWritable(0L), new Text(toWrite));
}
}
}
/**
* A static class that keeps
* track of status of a path
* and there children if path is a dir
*/
static class FileStatusDir {
private FileStatus fstatus;
private FileStatus[] children = null;
/**
* constructor for filestatusdir
* @param fstatus the filestatus object that maps to filestatusdir
* @param children the children list if fs is a directory
*/
FileStatusDir(FileStatus fstatus, FileStatus[] children) {
this.fstatus = fstatus;
this.children = children;
}
/**
* set children of this object
* @param listStatus the list of children
*/
public void setChildren(FileStatus[] listStatus) {
this.children = listStatus;
}
/**
* the filestatus of this object
* @return the filestatus of this object
*/
FileStatus getFileStatus() {
return this.fstatus;
}
/**
* the children list of this object, null if
* @return the children list
*/
FileStatus[] getChildren() {
return this.children;
}
}
/**archive the given source paths into
* the dest
* @param parentPath the parent path of all the source paths
* @param srcPaths the src paths to be archived
* @param dest the dest dir that will contain the archive
*/
void archive(Path parentPath, List<Path> srcPaths,
String archiveName, Path dest) throws IOException {
checkPaths(conf, srcPaths);
int numFiles = 0;
long totalSize = 0;
FileSystem fs = parentPath.getFileSystem(conf);
conf.set(DST_HAR_LABEL, archiveName);
conf.set(SRC_PARENT_LABEL, parentPath.makeQualified(fs).toString());
Path outputPath = new Path(dest, archiveName);
FileOutputFormat.setOutputPath(conf, outputPath);
FileSystem outFs = outputPath.getFileSystem(conf);
if (outFs.exists(outputPath) || outFs.isFile(dest)) {
throw new IOException("Invalid Output: " + outputPath);
}
conf.set(DST_DIR_LABEL, outputPath.toString());
final String randomId = DistCp.getRandomId();
Path stagingArea;
try {
stagingArea = JobSubmissionFiles.getStagingDir(new JobClient(conf),
conf);
} catch (InterruptedException e) {
throw new IOException(e);
}
Path jobDirectory = new Path(stagingArea,
NAME + "_" + randomId);
FsPermission mapredSysPerms =
new FsPermission(JobSubmissionFiles.JOB_DIR_PERMISSION);
FileSystem.mkdirs(jobDirectory.getFileSystem(conf), jobDirectory,
mapredSysPerms);
conf.set(JOB_DIR_LABEL, jobDirectory.toString());
//get a tmp directory for input splits
FileSystem jobfs = jobDirectory.getFileSystem(conf);
jobfs.mkdirs(jobDirectory);
Path srcFiles = new Path(jobDirectory, "_har_src_files");
conf.set(SRC_LIST_LABEL, srcFiles.toString());
SequenceFile.Writer srcWriter = SequenceFile.createWriter(jobfs, conf,
srcFiles, LongWritable.class, Text.class,
SequenceFile.CompressionType.NONE);
// get the list of files
// create single list of files and dirs
try {
// write the top level dirs in first
writeTopLevelDirs(srcWriter, srcPaths, parentPath);
srcWriter.sync();
// these are the input paths passed
// from the command line
// we do a recursive ls on these paths
// and then write them to the input file
// one at a time
for (Path src: srcPaths) {
ArrayList<FileStatusDir> allFiles = new ArrayList<FileStatusDir>();
FileStatus fstatus = fs.getFileStatus(src);
FileStatusDir fdir = new FileStatusDir(fstatus, null);
recursivels(fs, fdir, allFiles);
for (FileStatusDir statDir: allFiles) {
FileStatus stat = statDir.getFileStatus();
String toWrite = "";
long len = stat.isDir()? 0:stat.getLen();
if (stat.isDir()) {
toWrite = "" + relPathToRoot(stat.getPath(), parentPath) + " dir ";
//get the children
FileStatus[] list = statDir.getChildren();
StringBuffer sbuff = new StringBuffer();
sbuff.append(toWrite);
for (FileStatus stats: list) {
sbuff.append(stats.getPath().getName() + " ");
}
toWrite = sbuff.toString();
}
else {
toWrite += relPathToRoot(stat.getPath(), parentPath) + " file ";
}
srcWriter.append(new LongWritable(len), new
Text(toWrite));
srcWriter.sync();
numFiles++;
totalSize += len;
}
}
} finally {
srcWriter.close();
}
//increase the replication of src files
jobfs.setReplication(srcFiles, (short) 10);
conf.setInt(SRC_COUNT_LABEL, numFiles);
conf.setLong(TOTAL_SIZE_LABEL, totalSize);
int numMaps = (int)(totalSize/partSize);
//run atleast one map.
conf.setNumMapTasks(numMaps == 0? 1:numMaps);
conf.setNumReduceTasks(1);
conf.setInputFormat(HArchiveInputFormat.class);
conf.setOutputFormat(NullOutputFormat.class);
conf.setMapperClass(HArchivesMapper.class);
conf.setReducerClass(HArchivesReducer.class);
conf.setMapOutputKeyClass(IntWritable.class);
conf.setMapOutputValueClass(Text.class);
conf.set("hadoop.job.history.user.location", "none");
FileInputFormat.addInputPath(conf, jobDirectory);
//make sure no speculative execution is done
conf.setSpeculativeExecution(false);
JobClient.runJob(conf);
//delete the tmp job directory
try {
jobfs.delete(jobDirectory, true);
} catch(IOException ie) {
LOG.info("Unable to clean tmp directory " + jobDirectory);
}
}
static class HArchivesMapper
implements Mapper<LongWritable, Text, IntWritable, Text> {
private JobConf conf = null;
int partId = -1 ;
Path tmpOutputDir = null;
Path tmpOutput = null;
String partname = null;
Path rootPath = null;
FSDataOutputStream partStream = null;
FileSystem destFs = null;
byte[] buffer;
int buf_size = 128 * 1024;
// configure the mapper and create
// the part file.
// use map reduce framework to write into
// tmp files.
public void configure(JobConf conf) {
this.conf = conf;
// this is tightly tied to map reduce
// since it does not expose an api
// to get the partition
partId = conf.getInt("mapred.task.partition", -1);
// create a file name using the partition
// we need to write to this directory
tmpOutputDir = FileOutputFormat.getWorkOutputPath(conf);
// get the output path and write to the tmp
// directory
partname = "part-" + partId;
tmpOutput = new Path(tmpOutputDir, partname);
rootPath = (conf.get(SRC_PARENT_LABEL, null) == null) ? null :
new Path(conf.get(SRC_PARENT_LABEL));
if (rootPath == null) {
throw new RuntimeException("Unable to read parent " +
"path for har from config");
}
try {
destFs = tmpOutput.getFileSystem(conf);
//this was a stale copy
if (destFs.exists(tmpOutput)) {
destFs.delete(tmpOutput, false);
}
partStream = destFs.create(tmpOutput);
} catch(IOException ie) {
throw new RuntimeException("Unable to open output file " + tmpOutput);
}
buffer = new byte[buf_size];
}
// copy raw data.
public void copyData(Path input, FSDataInputStream fsin,
FSDataOutputStream fout, Reporter reporter) throws IOException {
try {
for (int cbread=0; (cbread = fsin.read(buffer))>= 0;) {
fout.write(buffer, 0,cbread);
reporter.progress();
}
} finally {
fsin.close();
}
}
static class MapStat {
private String pathname;
private boolean isDir;
private List<String> children;
public MapStat(String line) {
String[] splits = line.split(" ");
pathname = splits[0];
if ("dir".equals(splits[1])) {
isDir = true;
}
else {
isDir = false;
}
if (isDir) {
children = new ArrayList<String>();
for (int i = 2; i < splits.length; i++) {
children.add(splits[i]);
}
}
}
}
/**
* get rid of / in the beginning of path
* @param p the path
* @return return path without /
*/
private Path realPath(Path p, Path parent) {
Path rootPath = new Path(Path.SEPARATOR);
if (rootPath.compareTo(p) == 0) {
return parent;
}
return new Path(parent, new Path(p.toString().substring(1)));
}
// read files from the split input
// and write it onto the part files.
// also output hash(name) and string
// for reducer to create index
// and masterindex files.
public void map(LongWritable key, Text value,
OutputCollector<IntWritable, Text> out,
Reporter reporter) throws IOException {
String line = value.toString();
MapStat mstat = new MapStat(line);
Path relPath = new Path(mstat.pathname);
int hash = HarFileSystem.getHarHash(relPath);
String towrite = null;
Path srcPath = realPath(relPath, rootPath);
long startPos = partStream.getPos();
if (mstat.isDir) {
towrite = relPath.toString() + " " + "dir none " + 0 + " " + 0 + " ";
StringBuffer sbuff = new StringBuffer();
sbuff.append(towrite);
for (String child: mstat.children) {
sbuff.append(child + " ");
}
towrite = sbuff.toString();
//reading directories is also progress
reporter.progress();
}
else {
FileSystem srcFs = srcPath.getFileSystem(conf);
FileStatus srcStatus = srcFs.getFileStatus(srcPath);
FSDataInputStream input = srcFs.open(srcStatus.getPath());
reporter.setStatus("Copying file " + srcStatus.getPath() +
" to archive.");
copyData(srcStatus.getPath(), input, partStream, reporter);
towrite = relPath.toString() + " file " + partname + " " + startPos
+ " " + srcStatus.getLen() + " ";
}
out.collect(new IntWritable(hash), new Text(towrite));
}
public void close() throws IOException {
// close the part files.
partStream.close();
}
}
// Hardcoding HAR version here because HARFileSystem comes from Hadoop2
private static final String HAR_VERSION = "1";
/** the reduce for creating the index and the master index
*
*/
static class HArchivesReducer implements Reducer<IntWritable,
Text, Text, Text> {
private JobConf conf = null;
private long startIndex = 0;
private long endIndex = 0;
private long startPos = 0;
private Path masterIndex = null;
private Path index = null;
private FileSystem fs = null;
private FSDataOutputStream outStream = null;
private FSDataOutputStream indexStream = null;
private int numIndexes = 1000;
private Path tmpOutputDir = null;
private int written = 0;
private int keyVal = 0;
// configure
public void configure(JobConf conf) {
this.conf = conf;
tmpOutputDir = FileOutputFormat.getWorkOutputPath(this.conf);
masterIndex = new Path(tmpOutputDir, "_masterindex");
index = new Path(tmpOutputDir, "_index");
try {
fs = masterIndex.getFileSystem(conf);
if (fs.exists(masterIndex)) {
fs.delete(masterIndex, false);
}
if (fs.exists(index)) {
fs.delete(index, false);
}
indexStream = fs.create(index);
outStream = fs.create(masterIndex);
String version = HAR_VERSION + " \n";
outStream.write(version.getBytes());
} catch(IOException e) {
throw new RuntimeException(e);
}
}
// create the index and master index. The input to
// the reduce is already sorted by the hash of the
// files. SO we just need to write it to the index.
// We update the masterindex as soon as we update
// numIndex entries.
public void reduce(IntWritable key, Iterator<Text> values,
OutputCollector<Text, Text> out,
Reporter reporter) throws IOException {
keyVal = key.get();
while(values.hasNext()) {
Text value = values.next();
String towrite = value.toString() + "\n";
indexStream.write(towrite.getBytes());
written++;
if (written > numIndexes -1) {
// every 1000 indexes we report status
reporter.setStatus("Creating index for archives");
reporter.progress();
endIndex = keyVal;
String masterWrite = startIndex + " " + endIndex + " " + startPos
+ " " + indexStream.getPos() + " \n" ;
outStream.write(masterWrite.getBytes());
startPos = indexStream.getPos();
startIndex = endIndex;
written = 0;
}
}
}
public void close() throws IOException {
//write the last part of the master index.
if (written > 0) {
String masterWrite = startIndex + " " + keyVal + " " + startPos +
" " + indexStream.getPos() + " \n";
outStream.write(masterWrite.getBytes());
}
// close the streams
outStream.close();
indexStream.close();
// try increasing the replication
fs.setReplication(index, (short) 5);
fs.setReplication(masterIndex, (short) 5);
}
}
/** the main driver for creating the archives
* it takes at least three command line parameters. The parent path,
* The src and the dest. It does an lsr on the source paths.
* The mapper created archuves and the reducer creates
* the archive index.
*/
public int run(String[] args) throws Exception {
try {
Path parentPath = null;
List<Path> srcPaths = new ArrayList<Path>();
Path destPath = null;
String archiveName = null;
if (args.length < 5) {
System.out.println(usage);
throw new IOException("Invalid usage.");
}
if (!"-archiveName".equals(args[0])) {
System.out.println(usage);
throw new IOException("Archive Name not specified.");
}
archiveName = args[1];
if (!checkValidName(archiveName)) {
System.out.println(usage);
throw new IOException("Invalid name for archives. " + archiveName);
}
int i = 2;
//check to see if relative parent has been provided or not
//this is a required parameter.
if (! "-p".equals(args[i])) {
System.out.println(usage);
throw new IOException("Parent path not specified.");
}
parentPath = new Path(args[i+1]);
i+=2;
//read the rest of the paths
for (; i < args.length; i++) {
if (i == (args.length - 1)) {
destPath = new Path(args[i]);
}
else {
Path argPath = new Path(args[i]);
if (argPath.isAbsolute()) {
System.out.println(usage);
throw new IOException("source path " + argPath +
" is not relative to "+ parentPath);
}
srcPaths.add(new Path(parentPath, argPath));
}
}
if (srcPaths.size() == 0) {
// assuming if the user does not specify path for sources
// the whole parent directory needs to be archived.
srcPaths.add(parentPath);
}
// do a glob on the srcPaths and then pass it on
List<Path> globPaths = new ArrayList<Path>();
for (Path p: srcPaths) {
FileSystem fs = p.getFileSystem(getConf());
FileStatus[] statuses = fs.globStatus(p);
if (statuses != null) {
for (FileStatus status: statuses) {
globPaths.add(fs.makeQualified(status.getPath()));
}
}
}
archive(parentPath, globPaths, archiveName, destPath);
} catch(IOException ie) {
System.err.println(ie.getLocalizedMessage());
return -1;
}
return 0;
}
/** the main functions **/
public static void main(String[] args) {
JobConf job = new JobConf(HadoopArchives.class);
HadoopArchives harchives = new HadoopArchives(job);
int ret = 0;
try{
ret = ToolRunner.run(harchives, args);
} catch(Exception e) {
LOG.debug("Exception in archives ", e);
System.err.println(e.getClass().getSimpleName() + " in archives");
final String s = e.getLocalizedMessage();
if (s != null) {
System.err.println(s);
} else {
e.printStackTrace(System.err);
}
System.exit(1);
}
System.exit(ret);
}
}
| |
/*
* This file is part of the rootshell Project: http://code.google.com/p/RootShell/
*
* Copyright (c) 2014 Stephen Erickson, Chris Ravenscroft
*
* This code is dual-licensed under the terms of the Apache License Version 2.0 and
* the terms of the General Public License (GPL) Version 2.
* You may use this code according to either of these licenses as is most appropriate
* for your project on a case-by-case basis.
*
* The terms of each license can be found in the root directory of this project's repository as well as at:
*
* * http://www.apache.org/licenses/LICENSE-2.0
* * http://www.gnu.org/licenses/gpl-2.0.txt
*
* Unless required by applicable law or agreed to in writing, software
* distributed under these Licenses is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See each License for the specific language governing permissions and
* limitations under that License.
*/
package com.stericson.rootshell;
import android.util.Log;
import com.stericson.rootshell.exceptions.RootDeniedException;
import com.stericson.rootshell.execution.Command;
import com.stericson.rootshell.execution.Shell;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.TimeoutException;
@SuppressWarnings("JavaDoc")
public class RootShell {
// --------------------
// # Public Variables #
// --------------------
public static boolean debugMode = false;
public static final String version = "rootshell v1.6";
/**
* Setting this to false will disable the handler that is used
* by default for the 3 callback methods for Command.
* <p/>
* By disabling this all callbacks will be called from a thread other than
* the main UI thread.
*/
public static final boolean handlerEnabled = true;
/**
* Setting this will change the default command timeout.
* <p/>
* The default is 20000ms
*/
public static final int defaultCommandTimeout = 20000;
public enum LogLevel {
VERBOSE,
ERROR,
DEBUG,
WARN
}
// --------------------
// # Public Methods #
// --------------------
/**
* This will close all open shells.
*/
public static void closeAllShells() throws IOException {
Shell.closeAll();
}
/**
* This will close the custom shell that you opened.
*/
public static void closeCustomShell() throws IOException {
Shell.closeCustomShell();
}
/**
* This will close either the root shell or the standard shell depending on what you specify.
*
* @param root a <code>boolean</code> to specify whether to close the root shell or the standard shell.
*/
public static void closeShell(boolean root) throws IOException {
if (root) {
Shell.closeRootShell();
} else {
Shell.closeShell();
}
}
/**
* Use this to check whether or not a file exists on the filesystem.
*
* @param file String that represent the file, including the full path to the
* file and its name.
* @return a boolean that will indicate whether or not the file exists.
*/
public static boolean exists(final String file) {
return exists(file, false);
}
/**
* Use this to check whether or not a file OR directory exists on the filesystem.
*
* @param file String that represent the file OR the directory, including the full path to the
* file and its name.
* @param isDir boolean that represent whether or not we are looking for a directory
* @return a boolean that will indicate whether or not the file exists.
*/
public static boolean exists(final String file, boolean isDir) {
final List<String> result = new ArrayList<>();
String cmdToExecute = "ls " + (isDir ? "-d " : " ");
Command command = new Command(0, false, cmdToExecute + file) {
@Override
public void commandOutput(int id, String line) {
RootShell.log(line);
result.add(line);
super.commandOutput(id, line);
}
};
try {
//Try without root...
RootShell.getShell(false).add(command);
commandWait(RootShell.getShell(false), command);
} catch (Exception e) {
RootShell.log("Exception: " + e);
return false;
}
for (String line : result) {
if (line.trim().equals(file)) {
return true;
}
}
result.clear();
command = new Command(0, false, cmdToExecute + file) {
@Override
public void commandOutput(int id, String line) {
RootShell.log(line);
result.add(line);
super.commandOutput(id, line);
}
};
try {
RootShell.getShell(true).add(command);
commandWait(RootShell.getShell(true), command);
} catch (Exception e) {
RootShell.log("Exception: " + e);
return false;
}
//Avoid concurrent modification...
List<String> final_result = new ArrayList<>();
//noinspection CollectionAddAllCanBeReplacedWithConstructor
final_result.addAll(result);
for (String line : final_result) {
if (line.trim().equals(file)) {
return true;
}
}
return false;
}
/**
* @param binaryName String that represent the binary to find.
* @param singlePath boolean that represents whether to return a single path or multiple.
*
* @return <code>List<String></code> containing the locations the binary was found at.
*/
public static List<String> findBinary(String binaryName, boolean singlePath) {
return findBinary(binaryName, null, singlePath);
}
/**
* @param binaryName <code>String</code> that represent the binary to find.
* @param searchPaths <code>List<String></code> which contains the paths to search for this binary in.
* @param singlePath boolean that represents whether to return a single path or multiple.
*
* @return <code>List<String></code> containing the locations the binary was found at.
*/
public static List<String> findBinary(final String binaryName, List<String> searchPaths, boolean singlePath) {
final List<String> foundPaths = new ArrayList<>();
boolean found = false;
if(searchPaths == null)
{
searchPaths = RootShell.getPath();
}
RootShell.log("Checking for " + binaryName);
//Try to use stat first
try {
for (String path : searchPaths) {
if(!path.endsWith("/"))
{
path += "/";
}
final String currentPath = path;
Command cc = new Command(0, false, "stat " + path + binaryName) {
@Override
public void commandOutput(int id, String line) {
if (line.contains("File: ") && line.contains(binaryName)) {
foundPaths.add(currentPath);
RootShell.log(binaryName + " was found here: " + currentPath);
}
RootShell.log(line);
super.commandOutput(id, line);
}
};
cc = RootShell.getShell(false).add(cc);
commandWait(RootShell.getShell(false), cc);
if(foundPaths.size() > 0 && singlePath) {
break;
}
}
found = !foundPaths.isEmpty();
} catch (Exception e) {
RootShell.log(binaryName + " was not found, more information MAY be available with Debugging on.");
}
if (!found) {
RootShell.log("Trying second method");
for (String path : searchPaths) {
if(!path.endsWith("/"))
{
path += "/";
}
if (RootShell.exists(path + binaryName)) {
RootShell.log(binaryName + " was found here: " + path);
foundPaths.add(path);
if(foundPaths.size() > 0 && singlePath) {
break;
}
} else {
RootShell.log(binaryName + " was NOT found here: " + path);
}
}
}
Collections.reverse(foundPaths);
return foundPaths;
}
/**
* This will open or return, if one is already open, a custom shell, you are responsible for managing the shell, reading the output
* and for closing the shell when you are done using it.
*
* @param shellPath a <code>String</code> to Indicate the path to the shell that you want to open.
* @param timeout an <code>int</code> to Indicate the length of time before giving up on opening a shell.
* @throws TimeoutException
* @throws com.stericson.rootshell.exceptions.RootDeniedException
* @throws IOException
*/
public static Shell getCustomShell(String shellPath, int timeout) throws IOException, TimeoutException, RootDeniedException
{
//return rootshell.getCustomShell(shellPath, timeout);
return Shell.startCustomShell(shellPath, timeout);
}
/**
* This will return the environment variable PATH
*
* @return <code>List<String></code> A List of Strings representing the environment variable $PATH
*/
@SuppressWarnings("ConstantConditions")
public static List<String> getPath() {
return Arrays.asList(System.getenv("PATH").split(":"));
}
/**
* This will open or return, if one is already open, a shell, you are responsible for managing the shell, reading the output
* and for closing the shell when you are done using it.
*
* @param root a <code>boolean</code> to Indicate whether or not you want to open a root shell or a standard shell
* @param timeout an <code>int</code> to Indicate the length of time to wait before giving up on opening a shell.
* @param shellContext the context to execute the shell with
* @param retry a <code>int</code> to indicate how many times the ROOT shell should try to open with root priviliges...
*/
public static Shell getShell(boolean root, int timeout, Shell.ShellContext shellContext, int retry) throws IOException, TimeoutException, RootDeniedException {
if (root) {
return Shell.startRootShell(timeout, shellContext, retry);
} else {
return Shell.startShell(timeout);
}
}
/**
* This will open or return, if one is already open, a shell, you are responsible for managing the shell, reading the output
* and for closing the shell when you are done using it.
*
* @param root a <code>boolean</code> to Indicate whether or not you want to open a root shell or a standard shell
* @param timeout an <code>int</code> to Indicate the length of time to wait before giving up on opening a shell.
* @param shellContext the context to execute the shell with
*/
@SuppressWarnings("unused")
public static Shell getShell(boolean root, int timeout, Shell.ShellContext shellContext) throws IOException, TimeoutException, RootDeniedException {
return getShell(root, timeout, shellContext, 3);
}
/**
* This will open or return, if one is already open, a shell, you are responsible for managing the shell, reading the output
* and for closing the shell when you are done using it.
*
* @param root a <code>boolean</code> to Indicate whether or not you want to open a root shell or a standard shell
* @param shellContext the context to execute the shell with
*/
@SuppressWarnings("unused")
public static Shell getShell(boolean root, Shell.ShellContext shellContext) throws IOException, TimeoutException, RootDeniedException {
return getShell(root, 0, shellContext, 3);
}
/**
* This will open or return, if one is already open, a shell, you are responsible for managing the shell, reading the output
* and for closing the shell when you are done using it.
*
* @param root a <code>boolean</code> to Indicate whether or not you want to open a root shell or a standard shell
* @param timeout an <code>int</code> to Indicate the length of time to wait before giving up on opening a shell.
*/
public static Shell getShell(boolean root, int timeout) throws IOException, TimeoutException, RootDeniedException {
return getShell(root, timeout, Shell.defaultContext, 3);
}
/**
* This will open or return, if one is already open, a shell, you are responsible for managing the shell, reading the output
* and for closing the shell when you are done using it.
*
* @param root a <code>boolean</code> to Indicate whether or not you want to open a root shell or a standard shell
*/
public static Shell getShell(boolean root) throws IOException, TimeoutException, RootDeniedException {
return RootShell.getShell(root, 0);
}
/**
* @return <code>true</code> if your app has been given root access.
* @throws TimeoutException if this operation times out. (cannot determine if access is given)
*/
@SuppressWarnings("unused")
public static boolean isAccessGiven() {
return isAccessGiven(0, 3);
}
/**
* Control how many time of retries should request
*
* @param timeout The timeout
* @param retries The number of retries
*
* @return <code>true</code> if your app has been given root access.
* @throws TimeoutException if this operation times out. (cannot determine if access is given)
*/
public static boolean isAccessGiven(int timeout, int retries) {
final Set<String> ID = new HashSet<>();
final int IAG = 158;
try {
RootShell.log("Checking for Root access");
Command command = new Command(IAG, false, "id") {
@Override
public void commandOutput(int id, String line) {
if (id == IAG) {
ID.addAll(Arrays.asList(line.split(" ")));
}
super.commandOutput(id, line);
}
};
Shell shell = Shell.startRootShell(timeout, retries);
shell.add(command);
commandWait(shell, command);
//parse the userid
for (String userid : ID) {
RootShell.log(userid);
if (userid.toLowerCase().contains("uid=0")) {
RootShell.log("Access Given");
return true;
}
}
return false;
} catch (Exception e) {
e.printStackTrace();
return false;
}
}
/**
* @return <code>true</code> if BusyBox was found.
*/
public static boolean isBusyboxAvailable()
{
return isBusyboxAvailable(false);
}
/**
* @return <code>true</code> if BusyBox or Toybox was found.
*/
public static boolean isBusyboxAvailable(boolean includeToybox)
{
if(includeToybox) {
return (findBinary("busybox", true)).size() > 0 || (findBinary("toybox", true)).size() > 0;
} else {
return (findBinary("busybox", true)).size() > 0;
}
}
/**
* @return <code>true</code> if su was found.
*/
public static boolean isRootAvailable() {
return (findBinary("su", true)).size() > 0;
}
/**
* This method allows you to output debug messages only when debugging is on. This will allow
* you to add a debug option to your app, which by default can be left off for performance.
* However, when you need debugging information, a simple switch can enable it and provide you
* with detailed logging.
* <p/>
* This method handles whether or not to log the information you pass it depending whether or
* not rootshell.debugMode is on. So you can use this and not have to worry about handling it
* yourself.
*
* @param msg The message to output.
*/
public static void log(String msg) {
log(null, msg, LogLevel.DEBUG, null);
}
/**
* This method allows you to output debug messages only when debugging is on. This will allow
* you to add a debug option to your app, which by default can be left off for performance.
* However, when you need debugging information, a simple switch can enable it and provide you
* with detailed logging.
* <p/>
* This method handles whether or not to log the information you pass it depending whether or
* not rootshell.debugMode is on. So you can use this and not have to worry about handling it
* yourself.
*
* @param TAG Optional parameter to define the tag that the Log will use.
* @param msg The message to output.
*/
public static void log(String TAG, String msg) {
log(TAG, msg, LogLevel.DEBUG, null);
}
/**
* This method allows you to output debug messages only when debugging is on. This will allow
* you to add a debug option to your app, which by default can be left off for performance.
* However, when you need debugging information, a simple switch can enable it and provide you
* with detailed logging.
* <p/>
* This method handles whether or not to log the information you pass it depending whether or
* not rootshell.debugMode is on. So you can use this and not have to worry about handling it
* yourself.
*
* @param msg The message to output.
* @param type The type of log, 1 for verbose, 2 for error, 3 for debug, 4 for warn
* @param e The exception that was thrown (Needed for errors)
*/
public static void log(String msg, LogLevel type, Exception e) {
log(null, msg, type, e);
}
/**
* This method allows you to check whether logging is enabled.
* Yes, it has a goofy name, but that's to keep it as short as possible.
* After all writing logging calls should be painless.
* This method exists to save Android going through the various Java layers
* that are traversed any time a string is created (i.e. what you are logging)
* <p/>
* Example usage:
* if(islog) {
* StringBuilder sb = new StringBuilder();
* // ...
* // build string
* // ...
* log(sb.toString());
* }
*
* @return true if logging is enabled
*/
@SuppressWarnings("unused")
public static boolean islog() {
return debugMode;
}
/**
* This method allows you to output debug messages only when debugging is on. This will allow
* you to add a debug option to your app, which by default can be left off for performance.
* However, when you need debugging information, a simple switch can enable it and provide you
* with detailed logging.
* <p/>
* This method handles whether or not to log the information you pass it depending whether or
* not rootshell.debugMode is on. So you can use this and not have to worry about handling it
* yourself.
*
* @param TAG Optional parameter to define the tag that the Log will use.
* @param msg The message to output.
* @param type The type of log, 1 for verbose, 2 for error, 3 for debug
* @param e The exception that was thrown (Needed for errors)
*/
public static void log(String TAG, String msg, LogLevel type, Exception e) {
if (msg != null && !msg.equals("")) {
if (debugMode) {
if (TAG == null) {
TAG = version;
}
switch (type) {
case VERBOSE:
Log.v(TAG, msg);
break;
case ERROR:
Log.e(TAG, msg, e);
break;
case DEBUG:
Log.d(TAG, msg);
break;
case WARN:
Log.w(TAG, msg);
break;
}
}
}
}
// --------------------
// # Public Methods #
// --------------------
@SuppressWarnings("RedundantThrows")
private static void commandWait(Shell shell, Command cmd) throws Exception {
while (!cmd.isFinished()) {
RootShell.log(version, shell.getCommandQueuePositionString(cmd));
RootShell.log(version, "Processed " + cmd.totalOutputProcessed + " of " + cmd.totalOutput + " output from command.");
//noinspection SynchronizationOnLocalVariableOrMethodParameter
synchronized (cmd) {
try {
if (!cmd.isFinished()) {
cmd.wait(2000);
}
} catch (InterruptedException e) {
e.printStackTrace();
}
}
if (!cmd.isExecuting() && !cmd.isFinished()) {
if (!shell.isExecuting && !shell.isReading) {
RootShell.log(version, "Waiting for a command to be executed in a shell that is not executing and not reading! \n\n Command: " + cmd.getCommand());
Exception e = new Exception();
e.setStackTrace(Thread.currentThread().getStackTrace());
e.printStackTrace();
} else if (shell.isExecuting && !shell.isReading) {
RootShell.log(version, "Waiting for a command to be executed in a shell that is executing but not reading! \n\n Command: " + cmd.getCommand());
Exception e = new Exception();
e.setStackTrace(Thread.currentThread().getStackTrace());
e.printStackTrace();
} else {
RootShell.log(version, "Waiting for a command to be executed in a shell that is not reading! \n\n Command: " + cmd.getCommand());
Exception e = new Exception();
e.setStackTrace(Thread.currentThread().getStackTrace());
e.printStackTrace();
}
}
}
}
}
| |
/*
* Copyright (c) 2008-2020, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.spring;
import com.hazelcast.client.HazelcastClient;
import com.hazelcast.client.LoadBalancer;
import com.hazelcast.client.config.ClientCloudConfig;
import com.hazelcast.client.config.ClientConfig;
import com.hazelcast.client.config.ClientConnectionStrategyConfig;
import com.hazelcast.client.config.ClientConnectionStrategyConfig.ReconnectMode;
import com.hazelcast.client.config.ClientFlakeIdGeneratorConfig;
import com.hazelcast.client.config.ClientIcmpPingConfig;
import com.hazelcast.client.config.ClientMetricsConfig;
import com.hazelcast.client.config.ClientNetworkConfig;
import com.hazelcast.client.config.ClientReliableTopicConfig;
import com.hazelcast.client.config.ClientUserCodeDeploymentConfig;
import com.hazelcast.client.config.ConnectionRetryConfig;
import com.hazelcast.client.config.ProxyFactoryConfig;
import com.hazelcast.client.impl.clientside.HazelcastClientProxy;
import com.hazelcast.client.util.RoundRobinLB;
import com.hazelcast.collection.IList;
import com.hazelcast.collection.IQueue;
import com.hazelcast.collection.ISet;
import com.hazelcast.config.AwsConfig;
import com.hazelcast.config.EntryListenerConfig;
import com.hazelcast.config.EvictionPolicy;
import com.hazelcast.config.InMemoryFormat;
import com.hazelcast.config.IndexConfig;
import com.hazelcast.config.IndexType;
import com.hazelcast.config.MaxSizePolicy;
import com.hazelcast.config.NearCacheConfig;
import com.hazelcast.config.NearCachePreloaderConfig;
import com.hazelcast.config.QueryCacheConfig;
import com.hazelcast.config.SerializationConfig;
import com.hazelcast.config.SerializerConfig;
import com.hazelcast.core.Hazelcast;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.cp.IAtomicLong;
import com.hazelcast.cp.IAtomicReference;
import com.hazelcast.cp.ICountDownLatch;
import com.hazelcast.cp.ISemaphore;
import com.hazelcast.map.IMap;
import com.hazelcast.multimap.MultiMap;
import com.hazelcast.security.Credentials;
import com.hazelcast.test.annotation.QuickTest;
import com.hazelcast.topic.ITopic;
import com.hazelcast.topic.TopicOverloadPolicy;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import javax.annotation.Resource;
import java.nio.ByteOrder;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import static com.hazelcast.config.NearCacheConfig.LocalUpdatePolicy.CACHE_ON_UPDATE;
import static com.hazelcast.test.HazelcastTestSupport.assertContains;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
@RunWith(CustomSpringJUnit4ClassRunner.class)
@ContextConfiguration(locations = {"node-client-applicationContext-hazelcast.xml"})
@Category(QuickTest.class)
public class TestClientApplicationContext {
@Resource(name = "client")
private HazelcastClientProxy client;
@Resource(name = "client2")
private HazelcastClientProxy client2;
@Resource(name = "client3")
private HazelcastClientProxy client3;
@Resource(name = "client4")
private HazelcastClientProxy client4;
@Resource(name = "client5")
private HazelcastClientProxy client5;
@Resource(name = "client6")
private HazelcastClientProxy client6;
@Resource(name = "client7-empty-serialization-config")
private HazelcastClientProxy client7;
@Resource(name = "client8")
private HazelcastClientProxy client8;
@Resource(name = "client9-user-code-deployment-test")
private HazelcastClientProxy userCodeDeploymentTestClient;
@Resource(name = "client10-flakeIdGenerator")
private HazelcastClientProxy client10;
@Resource(name = "client11-icmp-ping")
private HazelcastClientProxy icmpPingTestClient;
@Resource(name = "client12-hazelcast-cloud")
private HazelcastClientProxy hazelcastCloudClient;
@Resource(name = "client13-exponential-connection-retry")
private HazelcastClientProxy connectionRetryClient;
@Resource(name = "client14-reliable-topic")
private HazelcastClientProxy hazelcastReliableTopic;
@Resource(name = "client16-name-and-labels")
private HazelcastClientProxy namedClient;
@Resource(name = "client17-backupAckToClient")
private HazelcastClientProxy backupAckToClient;
@Resource(name = "client18-metrics")
private HazelcastClientProxy metricsClient;
@Resource(name = "instance")
private HazelcastInstance instance;
@Resource(name = "map1")
private IMap<Object, Object> map1;
@Resource(name = "map2")
private IMap<Object, Object> map2;
@Resource(name = "multiMap")
private MultiMap multiMap;
@Resource(name = "queue")
private IQueue queue;
@Resource(name = "topic")
private ITopic topic;
@Resource(name = "set")
private ISet set;
@Resource(name = "list")
private IList list;
@Resource(name = "executorService")
private ExecutorService executorService;
@Resource(name = "atomicLong")
private IAtomicLong atomicLong;
@Resource(name = "atomicReference")
private IAtomicReference atomicReference;
@Resource(name = "countDownLatch")
private ICountDownLatch countDownLatch;
@Resource(name = "semaphore")
private ISemaphore semaphore;
@Resource(name = "reliableTopic")
private ITopic reliableTopic;
@Autowired
private Credentials credentials;
@BeforeClass
@AfterClass
public static void start() {
HazelcastClient.shutdownAll();
Hazelcast.shutdownAll();
}
@Test
public void testClient() {
assertNotNull(client);
assertNotNull(client2);
assertNotNull(client3);
ClientConfig config = client.getClientConfig();
assertEquals("13", config.getProperty("hazelcast.client.retry.count"));
assertEquals(1000, config.getNetworkConfig().getConnectionTimeout());
client.getMap("default").put("Q", "q");
client2.getMap("default").put("X", "x");
IMap<Object, Object> map = instance.getMap("default");
assertEquals("q", map.get("Q"));
assertEquals("x", map.get("X"));
ClientConfig config3 = client3.getClientConfig();
SerializationConfig serConf = config3.getSerializationConfig();
assertEquals(ByteOrder.BIG_ENDIAN, serConf.getByteOrder());
assertFalse(serConf.isAllowUnsafe());
assertFalse(serConf.isCheckClassDefErrors());
assertFalse(serConf.isEnableCompression());
assertFalse(serConf.isEnableSharedObject());
assertFalse(serConf.isUseNativeByteOrder());
assertEquals(10, serConf.getPortableVersion());
Map<Integer, String> map1 = serConf.getDataSerializableFactoryClasses();
assertNotNull(map1);
assertTrue(map1.containsKey(1));
assertEquals("com.hazelcast.spring.serialization.DummyDataSerializableFactory", map1.get(1));
Map<Integer, String> portableFactoryClasses = serConf.getPortableFactoryClasses();
assertNotNull(portableFactoryClasses);
assertTrue(portableFactoryClasses.containsKey(2));
assertEquals("com.hazelcast.spring.serialization.DummyPortableFactory", portableFactoryClasses.get(2));
Collection<SerializerConfig> serializerConfigs = serConf.getSerializerConfigs();
assertNotNull(serializerConfigs);
SerializerConfig serializerConfig = serializerConfigs.iterator().next();
assertNotNull(serializerConfig);
assertEquals("com.hazelcast.nio.serialization.CustomSerializationTest$FooXmlSerializer", serializerConfig.getClassName());
assertEquals("com.hazelcast.nio.serialization.CustomSerializationTest$Foo", serializerConfig.getTypeClassName());
List<ProxyFactoryConfig> proxyFactoryConfigs = config3.getProxyFactoryConfigs();
assertNotNull(proxyFactoryConfigs);
ProxyFactoryConfig proxyFactoryConfig = proxyFactoryConfigs.get(0);
assertNotNull(proxyFactoryConfig);
assertEquals("com.hazelcast.spring.DummyProxyFactory", proxyFactoryConfig.getClassName());
assertEquals("MyService", proxyFactoryConfig.getService());
LoadBalancer loadBalancer = config3.getLoadBalancer();
assertNotNull(loadBalancer);
assertTrue(loadBalancer instanceof RoundRobinLB);
NearCacheConfig nearCacheConfig = config3.getNearCacheConfig("default");
assertNotNull(nearCacheConfig);
assertEquals(1, nearCacheConfig.getTimeToLiveSeconds());
assertEquals(70, nearCacheConfig.getMaxIdleSeconds());
assertEquals(EvictionPolicy.LRU, nearCacheConfig.getEvictionConfig().getEvictionPolicy());
assertEquals(4000, nearCacheConfig.getEvictionConfig().getSize());
assertTrue(nearCacheConfig.isInvalidateOnChange());
assertFalse(nearCacheConfig.isSerializeKeys());
assertEquals(CACHE_ON_UPDATE, nearCacheConfig.getLocalUpdatePolicy());
}
@Test
public void testAwsClientConfig() {
assertNotNull(client4);
ClientConfig config = client4.getClientConfig();
ClientNetworkConfig networkConfig = config.getNetworkConfig();
AwsConfig awsConfig = networkConfig.getAwsConfig();
assertFalse(awsConfig.isEnabled());
assertTrue(awsConfig.isUsePublicIp());
assertEquals("sample-access-key", awsConfig.getProperty("access-key"));
assertEquals("sample-secret-key", awsConfig.getProperty("secret-key"));
assertEquals("sample-region", awsConfig.getProperty("region"));
assertEquals("sample-group", awsConfig.getProperty("security-group-name"));
assertEquals("sample-tag-key", awsConfig.getProperty("tag-key"));
assertEquals("sample-tag-value", awsConfig.getProperty("tag-value"));
}
@Test
public void testUnlimitedConnectionAttempt() {
assertNotNull(client5);
ClientConfig config = client5.getClientConfig();
assertEquals(1000, config.getConnectionStrategyConfig().getConnectionRetryConfig().getClusterConnectTimeoutMillis());
}
@Test
public void testHazelcastInstances() {
assertNotNull(map1);
assertNotNull(map2);
assertNotNull(multiMap);
assertNotNull(queue);
assertNotNull(topic);
assertNotNull(set);
assertNotNull(list);
assertNotNull(executorService);
assertNotNull(atomicLong);
assertNotNull(atomicReference);
assertNotNull(countDownLatch);
assertNotNull(semaphore);
assertNotNull(reliableTopic);
assertEquals("map1", map1.getName());
assertEquals("map2", map2.getName());
assertEquals("multiMap", multiMap.getName());
assertEquals("queue", queue.getName());
assertEquals("topic", topic.getName());
assertEquals("set", set.getName());
assertEquals("list", list.getName());
assertEquals("atomicLong", atomicLong.getName());
assertEquals("atomicReference", atomicReference.getName());
assertEquals("countDownLatch", countDownLatch.getName());
assertEquals("semaphore", semaphore.getName());
assertEquals("reliableTopic", reliableTopic.getName());
}
@Test
public void testDefaultSerializationConfig() {
ClientConfig config7 = client7.getClientConfig();
SerializationConfig serConf = config7.getSerializationConfig();
assertEquals(ByteOrder.BIG_ENDIAN, serConf.getByteOrder());
assertFalse(serConf.isAllowUnsafe());
assertTrue(serConf.isCheckClassDefErrors());
assertFalse(serConf.isEnableCompression());
assertTrue(serConf.isEnableSharedObject());
assertFalse(serConf.isUseNativeByteOrder());
assertEquals(0, serConf.getPortableVersion());
}
@Test
public void testClientNearCacheEvictionPolicies() {
ClientConfig config = client3.getClientConfig();
assertEquals(EvictionPolicy.LFU, getNearCacheEvictionPolicy("lfuNearCacheEviction", config));
assertEquals(EvictionPolicy.LRU, getNearCacheEvictionPolicy("lruNearCacheEviction", config));
assertEquals(EvictionPolicy.RANDOM, getNearCacheEvictionPolicy("randomNearCacheEviction", config));
assertEquals(EvictionPolicy.NONE, getNearCacheEvictionPolicy("noneNearCacheEviction", config));
}
@Test
public void testNearCachePreloader() {
NearCachePreloaderConfig preloaderConfig = client3.getClientConfig()
.getNearCacheConfig("preloader")
.getPreloaderConfig();
assertTrue(preloaderConfig.isEnabled());
assertEquals("/tmp/preloader", preloaderConfig.getDirectory());
assertEquals(23, preloaderConfig.getStoreInitialDelaySeconds());
assertEquals(42, preloaderConfig.getStoreIntervalSeconds());
}
@Test
public void testUserCodeDeploymentConfig() {
ClientConfig config = userCodeDeploymentTestClient.getClientConfig();
ClientUserCodeDeploymentConfig userCodeDeploymentConfig = config.getUserCodeDeploymentConfig();
List<String> classNames = userCodeDeploymentConfig.getClassNames();
assertFalse(userCodeDeploymentConfig.isEnabled());
assertEquals(2, classNames.size());
assertTrue(classNames.contains("SampleClassName1"));
assertTrue(classNames.contains("SampleClassName2"));
List<String> jarPaths = userCodeDeploymentConfig.getJarPaths();
assertEquals(1, jarPaths.size());
assertTrue(jarPaths.contains("/User/jar/path/test.jar"));
}
private EvictionPolicy getNearCacheEvictionPolicy(String mapName, ClientConfig clientConfig) {
return clientConfig.getNearCacheConfig(mapName).getEvictionConfig().getEvictionPolicy();
}
@Test
public void testFullQueryCacheConfig() throws Exception {
ClientConfig config = client6.getClientConfig();
QueryCacheConfig queryCacheConfig = getQueryCacheConfig(config);
EntryListenerConfig entryListenerConfig = queryCacheConfig.getEntryListenerConfigs().get(0);
assertTrue(entryListenerConfig.isIncludeValue());
assertFalse(entryListenerConfig.isLocal());
assertEquals("com.hazelcast.spring.DummyEntryListener", entryListenerConfig.getClassName());
assertFalse(queryCacheConfig.isIncludeValue());
assertEquals("my-query-cache-1", queryCacheConfig.getName());
assertEquals(12, queryCacheConfig.getBatchSize());
assertEquals(33, queryCacheConfig.getBufferSize());
assertEquals(12, queryCacheConfig.getDelaySeconds());
assertEquals(InMemoryFormat.OBJECT, queryCacheConfig.getInMemoryFormat());
assertTrue(queryCacheConfig.isCoalesce());
assertFalse(queryCacheConfig.isPopulate());
assertEquals("__key > 12", queryCacheConfig.getPredicateConfig().getSql());
assertEquals(EvictionPolicy.LRU, queryCacheConfig.getEvictionConfig().getEvictionPolicy());
assertEquals(MaxSizePolicy.ENTRY_COUNT, queryCacheConfig.getEvictionConfig().getMaxSizePolicy());
assertEquals(111, queryCacheConfig.getEvictionConfig().getSize());
assertEquals(2, queryCacheConfig.getIndexConfigs().size());
IndexConfig hashIndex = queryCacheConfig.getIndexConfigs().get(0);
assertEquals(IndexType.HASH, hashIndex.getType());
assertNull(hashIndex.getName());
assertEquals(1, hashIndex.getAttributes().size());
assertEquals("name", hashIndex.getAttributes().get(0));
IndexConfig sortedIndex = queryCacheConfig.getIndexConfigs().get(1);
assertEquals(IndexType.SORTED, sortedIndex.getType());
assertEquals("sortedIndex", sortedIndex.getName());
assertEquals(2, sortedIndex.getAttributes().size());
assertEquals("age", sortedIndex.getAttributes().get(0));
assertEquals("name", sortedIndex.getAttributes().get(1));
}
@Test
public void testClientConnectionStrategyConfig() {
ClientConnectionStrategyConfig connectionStrategyConfig = client8.getClientConfig().getConnectionStrategyConfig();
assertTrue(connectionStrategyConfig.isAsyncStart());
assertEquals(ReconnectMode.ASYNC, connectionStrategyConfig.getReconnectMode());
}
@Test
public void testFlakeIdGeneratorConfig() {
Map<String, ClientFlakeIdGeneratorConfig> configMap = client10.getClientConfig().getFlakeIdGeneratorConfigMap();
assertEquals(1, configMap.size());
ClientFlakeIdGeneratorConfig config = configMap.values().iterator().next();
assertEquals("gen1", config.getName());
assertEquals(3, config.getPrefetchCount());
assertEquals(3000L, config.getPrefetchValidityMillis());
}
@Test
public void testClientIcmpConfig() {
ClientIcmpPingConfig icmpPingConfig = icmpPingTestClient.getClientConfig()
.getNetworkConfig().getClientIcmpPingConfig();
assertEquals(false, icmpPingConfig.isEnabled());
assertEquals(2000, icmpPingConfig.getTimeoutMilliseconds());
assertEquals(3000, icmpPingConfig.getIntervalMilliseconds());
assertEquals(50, icmpPingConfig.getTtl());
assertEquals(5, icmpPingConfig.getMaxAttempts());
assertEquals(false, icmpPingConfig.isEchoFailFastOnStartup());
}
@Test
public void testCloudConfig() {
ClientCloudConfig cloudConfig = hazelcastCloudClient.getClientConfig()
.getNetworkConfig().getCloudConfig();
assertEquals(false, cloudConfig.isEnabled());
assertEquals("EXAMPLE_TOKEN", cloudConfig.getDiscoveryToken());
}
@Test
public void testConnectionRetry() {
ConnectionRetryConfig connectionRetryConfig = connectionRetryClient
.getClientConfig().getConnectionStrategyConfig().getConnectionRetryConfig();
assertEquals(5000, connectionRetryConfig.getClusterConnectTimeoutMillis());
assertEquals(0.5, connectionRetryConfig.getJitter(), 0);
assertEquals(2000, connectionRetryConfig.getInitialBackoffMillis());
assertEquals(60000, connectionRetryConfig.getMaxBackoffMillis());
assertEquals(3, connectionRetryConfig.getMultiplier(), 0);
}
@Test
public void testReliableTopicConfig() {
ClientConfig clientConfig = hazelcastReliableTopic.getClientConfig();
ClientReliableTopicConfig topicConfig = clientConfig.getReliableTopicConfig("rel-topic");
assertEquals(100, topicConfig.getReadBatchSize());
assertEquals(TopicOverloadPolicy.DISCARD_NEWEST, topicConfig.getTopicOverloadPolicy());
}
private static QueryCacheConfig getQueryCacheConfig(ClientConfig config) {
Map<String, Map<String, QueryCacheConfig>> queryCacheConfigs = config.getQueryCacheConfigs();
Collection<Map<String, QueryCacheConfig>> values = queryCacheConfigs.values();
for (Map<String, QueryCacheConfig> value : values) {
Set<Map.Entry<String, QueryCacheConfig>> entries = value.entrySet();
for (Map.Entry<String, QueryCacheConfig> entry : entries) {
return entry.getValue();
}
}
return null;
}
@Test
public void testInstanceNameConfig() {
assertEquals("clusterName", namedClient.getName());
}
@Test
public void testLabelsConfig() {
Set<String> labels = namedClient.getClientConfig().getLabels();
assertEquals(1, labels.size());
assertContains(labels, "foo");
}
@Test
public void testBackupAckToClient() {
assertFalse(backupAckToClient.getClientConfig().isBackupAckToClientEnabled());
}
@Test
public void testMetrics() {
ClientMetricsConfig metricsConfig = metricsClient.getClientConfig().getMetricsConfig();
assertFalse(metricsConfig.isEnabled());
assertFalse(metricsConfig.getJmxConfig().isEnabled());
assertEquals(42, metricsConfig.getCollectionFrequencySeconds());
}
}
| |
package org.zpid.se4ojs.test;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.LinkOption;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import org.jdom2.JDOMException;
import org.junit.Ignore;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.ontoware.rdf2go.exception.ModelRuntimeException;
import org.zpid.se4ojs.annotation.AnnotationUtils;
import org.zpid.se4ojs.annotation.OaAnnotator;
import org.zpid.se4ojs.annotation.ncbo.NcboAnnotator;
import org.zpid.se4ojs.app.Config;
import org.zpid.se4ojs.app.JsonFileVisitor;
import org.zpid.se4ojs.app.SE4OJSAccessHelper;
import org.zpid.se4ojs.sparql.Prefix;
import org.zpid.se4ojs.textStructure.bo.BOStructureElement;
import com.hp.hpl.jena.vocabulary.XSD;
/**
* <p>
* This is a kind of integration test for the Open Annotation annotation data model used by se4ojs.
* </p>
* <p>
* The test runs se4ojs tool and stores the results in a temporary file.
* The results are then compared to a reference annotation (stored in the resources folder of this project).
* </p>
*
* @author barth
*
*/
public class TestOa_Annotation_Model extends AnnotationTester {
private static final String TEST_ONTOLOGY = "CHEBI";
private static final String PROP_JSON_ANNOTATION = "ncbo.annotator.json.serialize";
private static final String PROP_EXPAND_MAPPINGS = "ncbo.annotator.expandMappings";
private static final String PROP_SEMANTIC_TYPE = "ncbo.annotator.semanticType";
private static final String PROP_EXCLUDE_SYNONYMS = "ncbo.annotator.excludeSynonyms";
private static final String PROP_INCLUDE_CUI = "ncbo.annotator.cui";
static final String TEST_SEMANTIC_TYPE_ONTOLOGY = "SNOMEDCT";
private String ncboRdfFileName = "jsonHandlerTest-ncboAnnotations.rdf";
private byte[] minimumRdfAnnotation;
@Rule
public TemporaryFolder folder = new TemporaryFolder();
/**
* Compares the expected annotation outcome with the actual outcome on the basis of a short document.
*
* @throws ModelRuntimeException
* @throws IOException
* @throws JDOMException
*/
@Test
public void testNcboAnnotation() throws ModelRuntimeException, IOException, JDOMException {
TestableConfig config = new TestableConfig();
config.setPropValue(PROP_JSON_ANNOTATION, "false");
config.setPropValue(PROP_EXCLUDE_SYNONYMS, "true");
config.setPropValue(PROP_EXPAND_MAPPINGS, "false");
config.setPropValue(PROP_INCLUDE_CUI, "false");
config.setPropValue(PROP_SEMANTIC_TYPE, "false");
String inPath = this.getClass().getClassLoader().getResource("ncboAnnotatorTestXml.xml").getFile();
inPath = inPath.replaceFirst("^/(.:/)", "$1");
File in = new File(inPath);
String outputDir = folder.getRoot().toString();
// get the structure elements of the input text
SE4OJSAccessHelper se4ojsAccessHelper = new SE4OJSAccessHelper();
List<BOStructureElement> structureElements = se4ojsAccessHelper.rdfizeSections(in, outputDir);
//annotate the file
NcboAnnotator ncboAnnotator = new TestableNCBOAnnotator(TEST_ONTOLOGY);
ncboAnnotator.annotate(Config.getInstitutionUrl(), in, structureElements, Paths.get(outputDir));
super.compareTransformationResults(folder,
"ncboAnnotatorTestXml-ncboAnnotations.rdf", "ncboAnnotatorReferenceAnnotation.rdf");
}
/**
* Check the semanticType Model.
* Currently, this test only checks whether the semantic type rdf file is stored.
*
* @throws ModelRuntimeException
* @throws IOException
* @throws JDOMException
*/
@Test
public void testSemanticTypeAnnotation() throws ModelRuntimeException, IOException, JDOMException {
TestableConfig config = new TestableConfig();
config.setPropValue(PROP_JSON_ANNOTATION, "true");
config.setPropValue(PROP_EXCLUDE_SYNONYMS, "true");
config.setPropValue(PROP_EXPAND_MAPPINGS, "false");
config.setPropValue(PROP_INCLUDE_CUI, "true");
config.setPropValue(PROP_SEMANTIC_TYPE, "true");
String testXmlFileName = "semTypeAnnotatorTest.xml";
String inPath = this.getClass().getClassLoader().getResource(testXmlFileName).getFile();
inPath = inPath.replaceFirst("^/(.:/)", "$1");
File in = new File(inPath);
String outputDir = folder.getRoot().toString();
// get the structure elements of the input text
SE4OJSAccessHelper se4ojsAccessHelper = new SE4OJSAccessHelper();
List<BOStructureElement> structureElements = se4ojsAccessHelper.rdfizeSections(in, outputDir);
//annotate the file
NcboAnnotator ncboAnnotator = new TestableNCBOAnnotator(TEST_SEMANTIC_TYPE_ONTOLOGY);
ncboAnnotator.annotate(Config.getInstitutionUrl(), in, structureElements, Paths.get(outputDir));
Path rdfPath = Paths.get(outputDir, testXmlFileName.replace(".xml", "") + "-" + OaAnnotator.SEM_TYPE_RDF_FILENAME);
assertTrue("The rdf output file for semantic type annotation does not exist",
rdfPath.toFile().exists());
}
/**
* First runs the NCBO Annotation with the minimum amount of information (no mappings, no synonyms,
* no semanticTypes, no cuis) and stores it both as rdf and in Json format.
* The generated rdf is copied as a reference annotation.
* Then the Json deserializer is used (with the same settings) to generate a second rdf file.
* The test passes if both rdf files are
* @throws ModelRuntimeException
* @throws IOException
* @throws JDOMException
*/
@Test
public void testJsonHandler() throws ModelRuntimeException, IOException, JDOMException {
//set the required properties to annotate with json:
TestableConfig config = new TestableConfig();
config.setPropValue(PROP_JSON_ANNOTATION, "true");
config.setPropValue(PROP_EXCLUDE_SYNONYMS, "true");
config.setPropValue(PROP_EXPAND_MAPPINGS, "false");
config.setPropValue(PROP_INCLUDE_CUI, "false");
config.setPropValue(PROP_SEMANTIC_TYPE, "false");
String inPath = this.getClass().getClassLoader().getResource("jsonHandlerTest.xml").getFile();
inPath = inPath.replaceFirst("^/(.:/)", "$1");
File in = new File(inPath);
String outputDir = folder.getRoot().toString();
// get the structure elements of the input text
SE4OJSAccessHelper se4ojsAccessHelper = new SE4OJSAccessHelper();
List<BOStructureElement> structureElements = se4ojsAccessHelper.rdfizeSections(in, outputDir);
//annotate the file
TestableNCBOAnnotator ncboAnnotator = new TestableNCBOAnnotator(TEST_SEMANTIC_TYPE_ONTOLOGY);
ncboAnnotator.annotate(Config.getInstitutionUrl(), in, structureElements, Paths.get(outputDir));
Path jsonPath = Paths.get(outputDir, "jsonHandlerTest-ncboAnnotations.json");
assertTrue("The json output file does not exist",
jsonPath.toFile().exists());
//copy the file (otherwise it will be overridden with the current settings)
String originalNcboRdfFileName = ncboRdfFileName +"-original";
Files.copy(Paths.get(outputDir, ncboRdfFileName), Paths.get(outputDir, originalNcboRdfFileName),
LinkOption.NOFOLLOW_LINKS);
Files.delete(Paths.get(outputDir, ncboRdfFileName));
//read the json content
String xmlDirectory = Paths.get(inPath).getParent().toString();
String jsonDirectory = jsonPath.getParent().toString();
String outputDirectory = outputDir;
//TODO jatsProcessingTask
TestableJsonFileVisitor jsonVisitor = new TestableJsonFileVisitor(xmlDirectory, outputDirectory);
Files.walkFileTree(Paths.get(jsonDirectory),
jsonVisitor);
minimumRdfAnnotation = Files.readAllBytes(Paths.get(outputDir, originalNcboRdfFileName));
byte[] jsonFileContent = Files.readAllBytes(Paths.get(outputDir, ncboRdfFileName));
assertTrue("File contents differ", Arrays.equals(minimumRdfAnnotation, jsonFileContent));
}
/**
* Similar to {@link #testJsonHandler()}, but
* first runs the NCBO Annotation with the maximum amount of information (no mappings, but synonyms,
* semanticTypes and cuis) and stores it both as rdf and in Json format.
*
* Then the Json deserializer is used (with the minimum settings from the previous test)
* to generate a second rdf file.
* The test passes if this second rdf file matches the directly generated rdf file from the previous test.
*
* @throws ModelRuntimeException
* @throws IOException
* @throws JDOMException
*/
@Test
public void testJsonHandler_DifferentPropertySettings() throws ModelRuntimeException, IOException, JDOMException {
String outputDir = folder.getRoot().toString();
if (minimumRdfAnnotation == null) {
testJsonHandler();
Files.delete(Paths.get(outputDir, ncboRdfFileName));
Files.delete(Paths.get(outputDir, ncboRdfFileName +"-original"));
}
//set the required properties to annotate with json:
TestableConfig config = new TestableConfig();
config.setPropValue(PROP_JSON_ANNOTATION, "true");
config.setPropValue(PROP_EXCLUDE_SYNONYMS, "false");
config.setPropValue(PROP_EXPAND_MAPPINGS, "false");
config.setPropValue(PROP_INCLUDE_CUI, "true");
config.setPropValue(PROP_SEMANTIC_TYPE, "true");
String inPath = this.getClass().getClassLoader().getResource("jsonHandlerTest.xml").getFile();
inPath = inPath.replaceFirst("^/(.:/)", "$1");
File in = new File(inPath);
// get the structure elements of the input text
SE4OJSAccessHelper se4ojsAccessHelper = new SE4OJSAccessHelper();
List<BOStructureElement> structureElements = se4ojsAccessHelper.rdfizeSections(in, outputDir);
//annotate the file
TestableNCBOAnnotator ncboAnnotator = new TestableNCBOAnnotator(TEST_SEMANTIC_TYPE_ONTOLOGY);
ncboAnnotator.annotate(Config.getInstitutionUrl(), in, structureElements, Paths.get(outputDir));
Path jsonPath = Paths.get(outputDir, "jsonHandlerTest-ncboAnnotations.json");
assertTrue("The json output file does not exist",
jsonPath.toFile().exists());
//read the json content
config.setPropValue(PROP_EXCLUDE_SYNONYMS, "true");
config.setPropValue(PROP_EXPAND_MAPPINGS, "false");
config.setPropValue(PROP_INCLUDE_CUI, "false");
config.setPropValue(PROP_SEMANTIC_TYPE, "false");
String xmlDirectory = Paths.get(inPath).getParent().toString();
String jsonDirectory = jsonPath.getParent().toString();
String outputDirectory = outputDir;
TestableJsonFileVisitor jsonVisitor = new TestableJsonFileVisitor(xmlDirectory, outputDirectory);
Files.walkFileTree(Paths.get(jsonDirectory),
jsonVisitor);
byte[] jsonFileContent = Files.readAllBytes(Paths.get(outputDir, ncboRdfFileName));
// File originalRdf = Paths.get(outputDir, ncboRdfFileName +"-original").toFile();
// FileUtils.writeByteArrayToFile(originalRdf, minimumRdfAnnotation);
assertTrue("File contents differ", Arrays.equals(minimumRdfAnnotation, jsonFileContent));
}
// This is a test to do manual inspection for files that are not part of the test resources. Thus, it
// is ignored by default.
@Ignore
@Test
public void testJsonContent() throws FileNotFoundException, IOException {
String fileName = "psyct.v5i1.4-ncboAnnotations.rdf";
String inOld = "E:\\KOBPSY3\\PSYCT_out_NoSyns_old";
String inNew = "E:\\KOBPSY3\\PSYCT_out_NoSyns";
super.compareTransformationResults(Paths.get(inOld, fileName).toString(), Paths.get(inNew, fileName).toString());
}
}
class TestableNCBOAnnotator extends NcboAnnotator {
private static final int DAY = 30;
private static final int MONTH = 5;
private static final int YEAR = 2015;
public TestableNCBOAnnotator(String ontologies) {
super(ontologies);
setAnnotationUtils(new TestableAnnotationUtils());
}
public TestableNCBOAnnotator(String ontologies, boolean b) {
super(ontologies, true);
setAnnotationUtils(new TestableAnnotationUtils());
}
@Override
protected void createDateTriple(org.ontoware.rdf2go.model.Model model,
String annotationId) {
DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd");
getAnnotationUtils().createLiteralTriple(annotationId,
AnnotationUtils.createPropertyString(Prefix.OA, OA_ANNOTATED_AT),
dateFormat.format(getFixedSampleDate()),XSD.date, model);
}
private Date getFixedSampleDate() {
Calendar cal = Calendar.getInstance();
cal.set(YEAR, MONTH, DAY);
return cal.getTime();
}
}
class TestableAnnotationUtils extends AnnotationUtils {
int uuidCounter = 0;
@Override
public String generateUuidUri() {
return String.format("urn:uuid:test%d", ++uuidCounter);
}
}
class TestableJsonFileVisitor extends JsonFileVisitor {
public TestableJsonFileVisitor(String xmlDirectory, String outputDirectory) {
super(xmlDirectory, outputDirectory);
}
@Override
public NcboAnnotator getNcboAnnotator() {
return new TestableNCBOAnnotator(TestOa_Annotation_Model.TEST_SEMANTIC_TYPE_ONTOLOGY, true);
}
}
| |
package org.semanticweb.yars2.rdfxml;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.Locale;
import java.util.TreeSet;
import java.util.logging.Logger;
import java.util.regex.Pattern;
import org.semanticweb.yars.nx.BNode;
import org.semanticweb.yars.nx.Literal;
import org.semanticweb.yars.nx.Node;
import org.semanticweb.yars.nx.Nodes;
import org.semanticweb.yars.nx.Resource;
import org.semanticweb.yars.nx.namespace.RDF;
import org.semanticweb.yars.nx.parser.Callback;
import org.semanticweb.yars.nx.parser.ParseException;
import org.semanticweb.yars.nx.util.NxUtil;
import org.xml.sax.Attributes;
import org.xml.sax.InputSource;
import org.xml.sax.Locator;
import org.xml.sax.SAXException;
import org.xml.sax.helpers.DefaultHandler;
/**
* RDF/XML parser base using SAX parsing events
* @author Aidan Hogan
*
*/
public class RDFXMLParserBase extends DefaultHandler {
//state = 0, awaiting opening rdf:RDF
//state = 1, awaiting open resource element
//state = 2, awaiting close resource element or open property element
//state = 3, awaiting text, close property element or open resource element
//state = 4, awaiting close property element
//state = 5, awaiting text or close property element
//state = 6, parsetype collection, awaiting open resource element or close property element
//state = 7, parsetype literal, awaiting valid XML
//state = 8, parsetype resource, awaiting open property or close property
private static Logger _log = Logger.getLogger(RDFXMLParserBase.class.getName());
private static enum State implements Comparable<State> {
START, OR, CR_OP, T_CP_OR, CP, T_CP, PTC_OR_CP, PTL_XML, PTR_OP_CP, PTA;
// public boolean expectCloseProperty(){
// if(this.equals(T_CP_OR) ||
// this.equals(CP) || this.equals(T_CP) ||
// this.equals(PTC_OR_CP) || this.equals(PTL_XML)){
// return true;
// }
// return false;
// }
//
// public boolean expectCloseResource(){
// if(this.equals(CR_OP) || this.equals(PTL_XML)){
// return true;
// }
// return false;
// }
public boolean expectText(){
if(this.equals(T_CP_OR) || this.equals(T_CP) || this.equals(PTL_XML)){
return true;
}
return false;
}
public boolean expectOpenResource(){
if(this.equals(START) || this.equals(OR)
|| this.equals(T_CP_OR)
|| this.equals(PTC_OR_CP)
|| this.equals(PTL_XML)){
return true;
}
return false;
}
public boolean expectOpenProperty(){
if(this.equals(CR_OP) || this.equals(PTR_OP_CP)
|| this.equals(PTL_XML) ){
return true;
}
return false;
}
public boolean equals(State s){
if(s==null)
return false;
else if(s.ordinal()!=ordinal())
return false;
return true;
}
}
private boolean _skolemise = false;
private boolean _strict = false;
private String _escapedDocURI = null;
public static final String BNODE_PREFIX = "bnode";
private static final String XML_BASE = "xml:base";
private static final String XML_LANG = "xml:lang";
private static final String XML_SPACE = "xml:space";
private static final String XML_ID = "xml:id";
private static final String XML_PREFIX = "xml:";
private static final String NULL = "";
private ArrayList<Node> _fifoS;
private ScopedThing<URI> _sbq;
// private ScopedThing<Boolean> _sptr;
private ScopedThing<String> _slang;
private TreeSet<Integer> _sptr;
// private ScopedThing<Boolean> _sptl = null;
private Resource _sptlp = null;
private int _sptldepth = 0;
private HashMap<Integer, Integer> _li;
private HashMap<Integer, Node> _coll;
private HashMap<String,HashSet<URI>> _ids;
private URI _currentBase;
private int _currentLi = 0;
private Node _currentColl = null;
private Node _currentReify = null;
// private ArrayList<Resource> _fifoP = new ArrayList<Resource>();
// private ArrayList<Resource> _fifoT = new ArrayList<Resource>();
private int _depth = 0;
private int _bnode = 0;
private String _currentLang = null;
private Resource _datatype = null;
//store prefix mappings for stuff inside an XMLLiteral definition
private HashMap<String, String> _xmllPrefixes = null;
private StringBuffer _prefixDefinition = null;
private Resource _con = null;
//state = 0, awaiting opening rdf:RDF
//state = 1, awaiting open resource element
//state = 2, awaiting close resource element or open property element
//state = 3, awaiting text, close property element or open resource element
//state = 4, awaiting close property element
//state = 5, awaiting text or close property element
//state = 6, parsetype collection, awaiting open resource element or close property element
//state = 7, parsetype literal, awaiting valid XML
//state = 8, parsetype resource, awaiting open property
private State _state = State.START;
//used for resources encoded within empty property nodes
private ResourceDescription _currentPRD = null;
private Node _currentS = null;
private Resource _currentP = null;
private Locator _loc = null;
private StringBuffer _currentL = null;
private Callback _c;
//elements from RDF (syntax) namespace disallowed as use in nodes in RDF/XML
private static final Node[] NOT_ALLOWED_NODE = {RDF.ABOUT, RDF.DATATYPE, RDF.ID, RDF.NODEID, RDF.PARSETYPE, RDF.RESOURCE};
private final static HashSet<Node> NOT_ALLOWED_NODE_TS = new HashSet<Node>();
{
for(Node n:NOT_ALLOWED_NODE){
NOT_ALLOWED_NODE_TS.add(n);
}
}
//RDF syntax names and their allowed positions
private static final String[] RDF_SUBJ_NODE_NAMES = {"Description", "Bag", "Seq", "Alt", "List", "Statement", "Property"};
private static final String[] RDF_PROP_NODE_NAMES = {"type", "subject", "predicate", "object", "first", "rest", "value", "li"};
private static final String[] RDF_SUBJ_ATTR_NAMES = {"about", "ID", "nodeID", "type"};
private static final String[] RDF_PROP_ATTR_NAMES = {"ID", "nodeID", "datatype", "parseType", "resource"};
private final static HashSet<String> RDF_SUBJ_NODE_NAMES_TS = new HashSet<String>();
private final static HashSet<String> RDF_PROP_NODE_NAMES_TS = new HashSet<String>();
private final static HashSet<String> RDF_SUBJ_ATTR_NAMES_TS = new HashSet<String>();
private final static HashSet<String> RDF_PROP_ATTR_NAMES_TS = new HashSet<String>();
{
for(String s:RDF_SUBJ_NODE_NAMES){
RDF_SUBJ_NODE_NAMES_TS.add(s);
}
for(String s:RDF_PROP_NODE_NAMES){
RDF_PROP_NODE_NAMES_TS.add(s);
}
for(String s:RDF_SUBJ_ATTR_NAMES){
RDF_SUBJ_ATTR_NAMES_TS.add(s);
}
for(String s:RDF_PROP_ATTR_NAMES){
RDF_PROP_ATTR_NAMES_TS.add(s);
}
}
// public static void main (String argv [])
// {
// SAXParserFactory factory = SAXParserFactory.newInstance();
// factory.setNamespaceAware(true);
// try {
//// OutputStreamWriter out = new OutputStreamWriter (System.out, "UTF8");
// SAXParser saxParser = factory.newSAXParser();
// System.out.println(saxParser.isNamespaceAware()+" "+saxParser.isValidating());
// saxParser.parse( new File("test/rdfxml/foafsmall.txt"), new RDFXMLParserBase("http://sw.deri.org/~aidanh/_foaf/@-foaf+.rdf", new true) );
// } catch (Throwable err) {
// err.printStackTrace ();
// }
// }
public RDFXMLParserBase(String docURI, Callback c) throws SAXException{
this(docURI, c, true, null);
}
public RDFXMLParserBase(String docURI, Callback c, boolean skolemise) throws SAXException{
this(docURI, c, skolemise, null);
}
public RDFXMLParserBase(String docURI, Callback c, boolean skolemise, boolean strict) throws SAXException{
this(docURI, c, skolemise, strict, null);
}
public RDFXMLParserBase(String docURI, Callback c, boolean skolemise, Resource con) throws SAXException{
this(docURI, c, skolemise, false, con);
}
public RDFXMLParserBase(String docURI, Callback c, boolean skolemise, boolean strict, Resource con) throws SAXException{
_fifoS = new ArrayList<Node>();
initialiseBaseURI(docURI);
setDocumentURI(docURI);
_sbq = new ScopedThing<URI>(_currentBase);
//_sptr = new ScopedThing<Boolean>(false);
_sptr = new TreeSet<Integer>();
_slang = new ScopedThing<String>(NULL);
_li = new HashMap<Integer, Integer>();
_coll = new HashMap<Integer, Node>();
_ids = new HashMap<String,HashSet<URI>>();
_skolemise = skolemise;
_c = c;
_con = con;
_strict = strict;
}
private void setDocumentURI(String docURI){
_escapedDocURI = BNode.escapeForBNode(docURI);
}
public void startDocument () throws SAXException{
_c.startDocument();
}
public void endDocument () throws SAXException{
_c.endDocument();
}
public void startElement (String name, String lname, String qname, Attributes attrs) throws SAXException{
final Resource uri;
if(name==null || name.equals("") && !_state.equals(State.PTL_XML)){
if(lname.equals("RDF")&&_state.equals(State.START)){
warning("Unqualified use of 'rdf:RDF' is deprecated.");
uri = RDF.RDF;
}else if(RDF_SUBJ_NODE_NAMES_TS.contains(lname) && _state.expectOpenResource()){
warning("Unqualified use of RDF name '"+lname+"' is deprecated.");
uri = createResource(RDF.NS+lname);
}else if(RDF_PROP_NODE_NAMES_TS.contains(lname) && _state.expectOpenProperty()){
warning("Unqualified use of RDF name '"+lname+"' is deprecated.");
uri = createResource(RDF.NS+lname);
}else{
error("Unqualified attribute name '"+lname+"' found.");
uri = createResource(resolveFullURI(lname, false));
}
}else{
uri = createResource(name+lname);
}
nodeIsAllowed(qname, uri);
if(_state.equals(State.START)){ //expecting open rdf:RDF or top level standalone element
// for(int i=0; i<attrs.getLength(); i++){
// String aqname = attrs.getQName(i);
// String value = attrs.getValue(i);
// if(aqname.equals("xml:base")){
// initialiseBaseURI(value);
// _sbq.addNewScopedElement(_currentBase);
// }
// _state = State.OR;
// }
checkAndHandleLang(attrs);
checkAndHandleBaseURI(attrs);
_state = State.OR;
if(!uri.equals(RDF.RDF)){
initialiseCurrentResource(uri, attrs);
_state = State.CR_OP;
}
} else if(_state.equals(State.OR)){ //expecting top level open resource
initialiseCurrentResource(uri, attrs);
_state = State.CR_OP;
} else if(_state.equals(State.CR_OP)){ //expecting open property
_state = State.T_CP_OR;
initialiseCurrentProperty(uri, attrs);
} else if(_state.equals(State.T_CP_OR)){ //expecting deep open resource
_currentL = null;
Node old = _currentS;
initialiseCurrentResource(uri, attrs);
handleStatement(old, _currentP, _currentS);
_state = State.CR_OP;
} else if(_state.equals(State.CP)){ //error, expecting close property tag
fatalError("Expecting close property tag, not open element tag '"+qname+"'.");
} else if(_state.equals(State.T_CP)){ //unless text found is only whitespace, then error, expecting more text or close property.
fatalError("Expecting close property, not open element tag '"+qname+"'.");
} else if(_state.equals(State.PTC_OR_CP)){ //within open collection property, expecting open resource elements of collection
BNode collID = generateBNode();
if(_currentColl==null){
handleStatement(_currentS, _currentP, collID);
_fifoS.add(_currentS);
} else{
handleStatement(_currentColl, RDF.REST, collID);
}
initialiseCurrentResource(uri, attrs);
handleStatement(collID, RDF.FIRST, _currentS);
_currentColl = null;
_coll.put(_depth, collID);
_state=State.CR_OP;
} else if(_state.equals(State.PTL_XML)){ //expecting valid xml
if(uri.equals(_sptlp)){
_sptldepth++;
}
handleParseTypeLiteralStartElement(name, lname, qname, attrs);
} else if(_state.equals(State.PTR_OP_CP)){ //parseType resource, expecting open property and creation of virtual resource
Node old = _currentS;
_currentS = generateBNode();
_fifoS.add(_currentS);
handleStatement(old, _currentP, _currentS);
_state=State.T_CP_OR;
initialiseCurrentProperty(uri, attrs);
}
_sbq.incrementScope();
_slang.incrementScope();
}
private void handleParseTypeLiteralStartElement(final String name, final String lname, final String qname, final Attributes attrs){
checkXMLLiteralPrefix(name, lname, qname);
_currentL.append("<"+qname);
for(int i=0; i<attrs.getLength(); i++){
_currentL.append(" "+attrs.getQName(i)+"=\""+attrs.getValue(i)+"\"");
checkXMLLiteralPrefix(attrs.getURI(i), attrs.getQName(i), attrs.getLocalName(i));
}
if(_prefixDefinition!=null){
_currentL.append(_prefixDefinition.toString());
_prefixDefinition = null;
}
_currentL.append(">");
}
private void checkAndHandleBaseURI(final Attributes attrs) throws SAXException{
int b = attrs.getIndex(XML_BASE);
if(b==-1){
return;
} else{
initialiseBaseURI(attrs.getValue(b));
_sbq.addNewScopedElement(_currentBase);
}
}
private void checkAndHandleLang(final Attributes attrs) throws SAXException{
int b = attrs.getIndex(XML_LANG);
if(b==-1){
return;
} else{
initialiseLang(attrs.getValue(b));
if(_currentLang==null) _slang.addNewScopedElement(NULL);
else _slang.addNewScopedElement(_currentLang);
}
}
private void initialiseLang(final String lang) throws SAXException{
//should probably check string here to see whether it is a valid language string
if(lang.isEmpty())
_currentLang = null;
else _currentLang = lang.toLowerCase(Locale.ENGLISH);
}
private void initialiseBaseURI(String base) throws SAXException{
if(base.contains(" ")){
warning("Base uri '"+base+"' contains a space.");
base = base.replaceAll(" ", "+");
}
try {
_currentBase = new URI(base);
} catch (URISyntaxException e) {
fatalError(new SAXException(e));
}
if(!_currentBase.isAbsolute()){
fatalError(new SAXException(new RDFXMLParseException("Cannot have relative xml:base value: '"+base+"'.")));
}
}
private void initialiseCurrentResource(final Resource rq, final Attributes attrs) throws SAXException{
_currentLi=0;
_currentS = null;
ResourceDescription temp = new ResourceDescription();
if(!rq.equals(RDF.DESCRIPTION)){
temp.addEdge(RDF.TYPE, rq);
}
checkAndHandleBaseURI(attrs);
checkAndHandleLang(attrs);
for(int i=0; i<attrs.getLength(); i++){
Node[] edge = handleSubjectAttributePair(attrs.getURI(i), attrs.getLocalName(i), attrs.getQName(i), attrs.getValue(i));
if(edge!=null){
temp.addEdge(edge);
}
}
if(_currentS==null){
_currentS = generateBNode();
}
_fifoS.add(_currentS);
for(Node[] edge:temp.getEdges()){
handleStatement(_currentS, edge[0], edge[1]);
}
}
private Node[] handleSubjectAttributePair(final String name, final String lname, final String qname, final String o) throws SAXException{
final Resource p;
if(name==null || name.equals("")){
if(RDF_SUBJ_ATTR_NAMES_TS.contains(lname)){
warning("Unqualified use of RDF name '"+lname+"' is deprecated.");
p = createResource(RDF.NS+lname);
}else{
error("Unqualified attribute name '"+lname+"' found.");
p = createResource(resolveFullURI(lname, false));
}
}else{
p = createResource(name+lname);
}
if(p.equals(RDF.ABOUT)){
if(_currentS!=null){
fatalError("rdf:about used for resource already identified as '"+_currentS+"'.");
}
_currentS = createResource(resolveFullURI(o, false));
return null;
} else if(p.equals(RDF.ID)){
if(_currentS!=null){
fatalError("rdf:ID used for resource already identified as '"+_currentS+"'.");
}
_currentS = createResource(resolveFullURI(o, true));
return null;
} else if(p.equals(RDF.NODEID)){
if(_currentS!=null){
fatalError("rdf:nodeID used for resource already identified as '"+_currentS+"'.");
}
_currentS = generateBNode(o);
return null;
} else if(p.equals(RDF.TYPE)){
return new Node[]{RDF.TYPE, createResource(o)};
} else if(qname.equals("xml:base")){
return null; //handled already by checkAndHandleBaseURI()
} else if(qname.equals("xml:lang")){
return null; //handled already by checkAndHandleLang()
} else{
return new Node[]{p, createLiteral(o)};
}
}
private String resolveFullURI( String id, final boolean rdfID) throws SAXException{
URI uri = null;
int oldLength = id.length();
id = id.trim();
if(oldLength!=id.length()){
warning("Leading or trailing whitespace in id "+id+".");
}
if(id.contains(" ")){
warning("ID "+id+" contains a space.");
id = id.replaceAll(" ", "+");
}
try {
if(rdfID){
uri = new URI("#"+id);
} else uri = new URI(id);
} catch (URISyntaxException e) {
fatalError(new SAXException(e));
}
if(uri.isAbsolute()){
if(rdfID){
error("Absolute URI provided for rdf:ID. Not resolving against base URI.");
}
return id;
} else{
if(id.isEmpty()){
if(_currentBase.getFragment()!=null)
return removeFragment(_currentBase);
else
return _currentBase.toString();
}
if((_currentBase.getPath()==null ||_currentBase.getPath().length()==0) && !_currentBase.toString().endsWith("/")){
try {
return new URL(_currentBase.toURL(),id).toString();
} catch (MalformedURLException e) {
error("MalformedURLException resolving base:"+_currentBase+" id: "+id);
}
}
URI full = _currentBase.resolve(uri);
if(rdfID){
if(trackDuplicateIds(id,full)){
warning("Duplicate value '"+id+"' for rdf:ID attribute.");
}
if (!Pattern.matches(XMLRegex.NC_NAME, id)){
warning("ID value '"+id+"' is not a valid XML NCName.");
}
}
return full.toString();
}
}
/**
* Check to see if an rdf:ID is duplicate ... indexes current ID.
*
* Duplication is allowed if the resulting URIs are different (i.e., the base has been changed).
*
* @param id
* @param uri
* @return true if duplicate, false otherwise
*/
private boolean trackDuplicateIds(String id, URI uri) {
HashSet<URI> urisForId = _ids.get(id);
if(urisForId==null){
urisForId = new HashSet<URI>();
_ids.put(id, urisForId);
}
return !urisForId.add(uri);
}
private String removeFragment(URI u)throws SAXException{
// remove fragment
try {
return new URI(u.getScheme().toLowerCase(),
u.getUserInfo(), u.getHost().toLowerCase(), u.getPort(),
u.getPath(), u.getQuery(), null).toString();
} catch (URISyntaxException e) {
error("URISyntaxException removing fragment from base:"+u);
return u.toString();
}
}
private Literal createLiteral(final String s){
Literal l;
if(_datatype!=null){
l = new Literal(NxUtil.escapeForNx(s), _datatype);
_datatype = null;
} else if(_currentLang!=null){
l = new Literal(NxUtil.escapeForNx(s), _currentLang);
} else{
l = new Literal(NxUtil.escapeForNx(s));
}
return l;
}
private void initialiseCurrentProperty(final Resource uri, final Attributes attrs) throws SAXException{
_currentP = uri;
if(_currentP.equals(RDF.LI)){
_currentLi++;
_li.put(_depth, _currentLi);
_currentP = createResource(RDF.NS+"_"+_currentLi);
}
_depth++;
checkAndHandleBaseURI(attrs);
checkAndHandleLang(attrs);
for(int i=0; i<attrs.getLength(); i++){
handlePropertyAttributePair(attrs.getURI(i), attrs.getLocalName(i), attrs.getQName(i), attrs.getValue(i));
}
if(_currentPRD!=null){
Node id = _currentPRD.getIdentifier();
if(id==null){
id = generateBNode();
}
handleStatement(_currentS, _currentP, id);
for(Node[] edge:_currentPRD.getEdges()){
handleStatement(id, edge[0], edge[1]);
}
_currentPRD=null;
}
}
/**
* Used exclusively for XMLLiteral
*/
public void startPrefixMapping(final String prefix, final String uri) throws SAXException{
if(_state.equals(State.PTL_XML)){
handleXMLLiteralPrefixMapping(prefix, uri);
}
}
/**
* Used exclusively for XMLLiteral.
*/
private void handleXMLLiteralPrefixMapping(final String prefix, final String uri){
_xmllPrefixes.put(prefix, uri);
if(_prefixDefinition==null){
_prefixDefinition = new StringBuffer();
}
_prefixDefinition.append(" "+createPrefixDefinition(prefix, uri));
}
/**
* Used exclusively for XMLLiteral
*/
private String createPrefixDefinition(final String prefix, final String uri){
if(prefix==null || prefix.isEmpty()){
return "xmlns=\""+uri+"\"";
}
return "xmlns:"+prefix+"=\""+uri+"\"";
}
/**
* Used exclusively for XMLLiteral
*/
private String getPrefix(final String qname, final String lname){
if(qname.length()==lname.length())
return "";
return qname.substring(0, (qname.length()-(lname.length()+1)));
}
/**
* Used exclusively for XMLLiteral
*/
private void checkXMLLiteralPrefix(final String name, final String lname, final String qname){
if(name.equals("")){
return;
}
String prefix = getPrefix(qname, lname);
if(_xmllPrefixes.get(prefix)==null){
handleXMLLiteralPrefixMapping(prefix, name);
}
}
/**
* Used exclusively for XMLLiteral
*/
public void endPrefixMapping(final String prefix) throws SAXException{
if(_state.equals(State.PTL_XML)){
_xmllPrefixes.remove(prefix);
}
}
private void handlePropertyAttributePair(final String name, final String lname, final String qname, final String o) throws SAXException{
final Resource p;
if(name==null || name.equals("")){
if(RDF_PROP_ATTR_NAMES_TS.contains(lname)){
warning("Unqualified use of RDF name "+lname+" is deprecated.");
p = createResource(RDF.NS+lname);
} else if(lname.startsWith("xml")){
//silently ignore according to this weird W3C testcase
//http://www.w3.org/2000/10/rdf-tests/rdfcore/unrecognised-xml-attributes/test002.rdf
// warning("Unrecognised unqualified attribute starting with XML: "+lname+".");
return;
} else {
error("Unqualified attribute name "+lname+" found.");
p = createResource(resolveFullURI(lname, false));
}
} else{
p = createResource(name+lname);
}
if(p.equals(RDF.RESOURCE)){
if(_currentPRD!=null && _currentPRD.getIdentifier()!=null){
fatalError("Cannot have more than one rdf:resource/rdf:nodeID attached as attributes to a property.");
} else if(_state.equals(State.T_CP)){
fatalError("Cannot have both rdf:datatype and rdf:resource attached as attributes to a property.");
} else if(_state.equals(State.PTC_OR_CP)){
fatalError("Cannot have both rdf:parseType='Collection' and rdf:resource attached as attributes to a property.");
} else if(_state.equals(State.PTL_XML)){
fatalError("Cannot have both rdf:parseType='Literal' and rdf:resource attached as attributes to a property.");
} else if(_state.equals(State.PTR_OP_CP)){
fatalError("Cannot have both rdf:parseType='Resource' and rdf:resource attached as attributes to a property.");
}
Resource id = createResource(resolveFullURI(o, false));
if(_currentPRD==null){
_currentPRD = new ResourceDescription(id);
} else{
_currentPRD.setIdentifier(id);
}
_state = State.CP;
} else if(p.equals(RDF.NODEID)){
if(_currentPRD!=null && _currentPRD.getIdentifier()!=null){
fatalError("Cannot have more than one rdf:resource/rdf:nodeID attached as attributes to a property.");
} else if(_state.equals(State.T_CP)){
fatalError("Cannot have both rdf:datatype and rdf:nodeID attached as attributes to a property.");
} else if(_state.equals(State.PTC_OR_CP)){
fatalError("Cannot have both rdf:parseType='Collection' and rdf:nodeID attached as attributes to a property.");
} else if(_state.equals(State.PTL_XML)){
fatalError("Cannot have both rdf:parseType='Literal' and rdf:nodeID attached as attributes to a property.");
} else if(_state.equals(State.PTR_OP_CP)){
fatalError("Cannot have both rdf:parseType='Resource' and rdf:nodeID attached as attributes to a property.");
}
BNode id = generateBNode(o);
if(_currentPRD==null){
_currentPRD = new ResourceDescription(id);
} else{
_currentPRD.setIdentifier(id);
}
_state = State.CP;
} else if(p.equals(RDF.TYPE)){
if(_state.equals(State.T_CP)){
fatalError("Cannot have both rdf:datatype and rdf:type attached as attributes to a property.");
} else if(_state.equals(State.PTC_OR_CP)){
fatalError("Cannot have both rdf:parseType='Collection' and rdf:type attached as attributes to a property.");
} else if(_state.equals(State.PTL_XML)){
fatalError("Cannot have both rdf:parseType='Literal' and rdf:type attached as attributes to a property.");
} else if(_state.equals(State.PTR_OP_CP)){
fatalError("Cannot have both rdf:parseType='Resource' and rdf:type attached as attributes to a property.");
}
if(_currentPRD==null){
_currentPRD = new ResourceDescription();
}
_currentPRD.addEdge(RDF.TYPE, createResource(o));
_state = State.CP;
} else if(qname.equals(XML_BASE)){
;//handled already by checkAndHandleBaseURI()
} else if(qname.equals(XML_LANG)){
;//handled already by checkAndHandleBaseURI()
} else if(qname.equals(XML_SPACE)){
;/**@todo do nothing???**/
} else if(qname.equals(XML_ID)){
;/**@todo do nothing???**/
} else if(qname.startsWith(XML_PREFIX)){
// just ignore it according to W3C test-cases
// warning("Unrecognised xml qname "+qname+".");
} else if(p.equals(RDF.DATATYPE)){
if(_state.equals(State.CP)){
fatalError("Cannot have both rdf:datatype and rdf:resource attached as attributes to a property.");
} else if(_state.equals(State.PTC_OR_CP)){
fatalError("Cannot have both rdf:datatype and rdf:parseType='Collection' attached as attributes to a property.");
} else if(_state.equals(State.PTL_XML)){
fatalError("Cannot have both rdf:datatype and rdf:parseType='Literal' attached as attributes to a property.");
} else if(_state.equals(State.PTR_OP_CP)){
fatalError("Cannot have both rdf:datatype and rdf:parseType='Resource' attached as attributes to a property.");
}
_datatype = createResource(resolveFullURI(o, false));
_state = State.T_CP; //will be incremented outside of call to 5 :(
} else if(p.equals(RDF.PARSETYPE)){
if(o.equals("Collection")){
if(_state.equals(State.CP)){
fatalError("Cannot have both rdf:resource and rdf:parseType='Collection' attached as attributes to a property.");
} else if(_state.equals(State.T_CP)){
fatalError("Cannot have both rdf:datatype and rdf:parseType='Collection' attached as attributes to a property.");
} else if(_state.equals(State.PTL_XML)){
fatalError("Cannot have both rdf:parseType='Literal' and rdf:parseType='Collection' attached as attributes to a property.");
} else if(_state.equals(State.PTR_OP_CP)){
fatalError("Cannot have both rdf:parseType='Resource' and rdf:parseType='Collection' attached as attributes to a property.");
}
_state=State.PTC_OR_CP; //will be increment outside to 6 :(
}
else if(o.equals("Literal")){
if(_state.equals(State.CP)){
fatalError("Cannot have both rdf:resource and rdf:parseType='Literal' attached as attributes to a property.");
} else if(_state.equals(State.T_CP)){
fatalError("Cannot have both rdf:datatype and rdf:parseType='Literal' attached as attributes to a property.");
} else if(_state.equals(State.PTC_OR_CP)){
fatalError("Cannot have both rdf:parseType='Collection' and rdf:parseType='Literal' attached as attributes to a property.");
} else if(_state.equals(State.PTR_OP_CP)){
fatalError("Cannot have both rdf:parseType='Resource' and rdf:parseType='Literal' attached as attributes to a property.");
}
_state=State.PTL_XML; //will be incremented outside of call to 7 :(
_sptlp = _currentP;
_sptldepth = 0;
_xmllPrefixes = new HashMap<String, String>();
_currentL = new StringBuffer();
} else if(o.equals("Resource")){
if(_state.equals(State.CP)){
fatalError("Cannot have both rdf:resource and rdf:parseType='Resource' attached as attributes to a property.");
} else if(_state.equals(State.T_CP)){
fatalError("Cannot have both rdf:datatype and rdf:parseType='Resource' attached as attributes to a property.");
} else if(_state.equals(State.PTC_OR_CP)){
fatalError("Cannot have both rdf:parseType='Collection' and rdf:parseType='Resource' attached as attributes to a property.");
} else if(_state.equals(State.PTL_XML)){
fatalError("Cannot have both rdf:parseType='Literal' and rdf:parseType='Resource' attached as attributes to a property.");
}
_currentL = null;
_sptr.add(_depth);
_state=State.PTR_OP_CP; //will be incremented outside of call to 8 :(
}
} else if(p.equals(RDF.ID)){
_currentReify = createResource(resolveFullURI(o, true));
} else{
if(_state.equals(State.T_CP)){
fatalError("Cannot have both rdf:datatype and "+qname+" attached as attributes to a property.");
} else if(_state.equals(State.PTC_OR_CP)){
fatalError("Cannot have both rdf:parseType='Collection' and "+qname+" attached as attributes to a property.");
} else if(_state.equals(State.PTL_XML)){
fatalError("Cannot have both rdf:parseType='Literal' and "+qname+" attached as attributes to a property.");
} else if(_state.equals(State.PTR_OP_CP)){
fatalError("Cannot have both rdf:parseType='Resource' and "+qname+" attached as attributes to a property.");
}
if(_currentPRD==null){
_currentPRD = new ResourceDescription();
}
_currentPRD.addEdge(p, createLiteral(o));
_state = State.CP;
}
}
public void endElement (String name, String lname, String qname) throws SAXException{
Resource rq = createResource(name+lname);
URI base = _sbq.decrementScope();
if(base!=null){
_currentBase = base;
}
boolean ptr = _sptr.remove(_depth);
boolean endptl = _sptldepth==0 && _sptlp!=null && (_sptlp.equals(rq) || (isCMP(_sptlp) && isCMP(rq)));
if(rq.equals(RDF.RDF)){
//error handled by SAX, not valid XML;
} else if(_state.equals(State.OR)){
//error handled by SAX, not valid XML;
} else if(_state.equals(State.CR_OP)){ //closing resource or closing parseType="Resource" property
_fifoS.remove(_fifoS.size()-1);
_li.remove(_depth);
if(ptr){ //closing parseType="Resource" property
//close 'virtual' resource
_currentS = _fifoS.get(_fifoS.size()-1);
Integer li = _li.get(_depth-1);
if(li!=null)
_currentLi = li;
//then close property, leave in state 2
_depth--;
}
else{
if(_depth>0){ //not top level resource (not including rdf:RDF)
_currentS = _fifoS.get(_fifoS.size()-1);
Integer li = _li.get(_depth-1);
if(li!=null)
_currentLi = li;
_state=State.CP;
Node cc = _coll.get(_depth);
if(cc!=null){
_currentColl = cc;
_state=State.PTC_OR_CP;
}
} else{ //close top level resource (not including rdf:RDF) -- end of document
_state=State.START;
}
}
} else if(_state.equals(State.T_CP_OR) || _state.equals(State.T_CP)){ //closing empty property or property after some text
String s = "";
if(_currentL!=null)
s = _currentL.toString();
Literal l = createLiteral(s);
handleStatement(_currentS, _currentP, l);
_currentL=null;
_depth--;
_state=State.CR_OP;
} else if(_state.equals(State.CP)){ //closing property after closing resource
_depth--;
_state=State.CR_OP;
} else if(_state.equals(State.PTC_OR_CP)){ //closing parseType="Collection" property
_coll.remove(_depth);
if(_currentColl!=null){
handleStatement(_currentColl, RDF.REST, RDF.NIL);
_currentColl=null;
}else{
handleStatement(_currentS, _currentP, RDF.NIL);
}
_depth--;
_state=State.CR_OP;
} else if(_state.equals(State.PTL_XML)){ //closing parseType="Literal" property
if(endptl){
String s = "";
_state=State.CR_OP;
if(_currentL!=null)
s = _currentL.toString();
_datatype = RDF.XMLLITERAL;
Literal l = createLiteral(s);
handleStatement(_currentS, _currentP, l);
_currentL=null;
_sptldepth = 0;
_sptlp = null;
_depth--;
}else{
if(rq.equals(_sptlp)){
_sptldepth--;
}
handleParseTypeLiteralEndElement(name, lname, qname);
}
} else if(_state.equals(State.PTR_OP_CP)){ //closing empty parseType resource property
handleStatement(_currentS, _currentP, generateBNode());
_depth--;
_state=State.CR_OP;
}
String lang = _slang.decrementScope();
if(lang!=null){
if(lang.equals(""))
_currentLang = null;
else
_currentLang = lang.toLowerCase();
}
}
private static boolean isCMP(Resource p1) {
return p1.equals(RDF.LI) || p1.toString().startsWith(RDF.NS+"_");
}
private void handleParseTypeLiteralEndElement(final String name, final String lname, final String qname){
_currentL.append("</"+qname+">");
}
private BNode generateBNode(){
_bnode++;
if(_skolemise){
return new BNode(_escapedDocURI+"xx"+BNODE_PREFIX+(_bnode-1));
} else{
return new BNode(BNODE_PREFIX+(_bnode-1));
}
}
private BNode generateBNode(String nodeID) throws SAXException{
int oldLength = nodeID.length();
nodeID = nodeID.trim();
if(oldLength!=nodeID.length())
warning("Leading or trailing whitespace in rdf:nodeID "+nodeID+".");
if (!Pattern.matches(XMLRegex.NC_NAME, nodeID))
warning("nodeID value '"+nodeID+"' is not a valid XML NCName.");
nodeID = BNode.escapeForBNode(nodeID);
if(_skolemise){
return new BNode(_escapedDocURI+"xx"+nodeID);
} else{
return new BNode(nodeID);
}
}
/**Create and handle warning exception
* @throws SAXException
*/
private void warning(final String msg) throws SAXException{
SAXException e = new SAXException(new RDFXMLParseException("WARNING: "+msg+" "+getLocation()));
warning(e);
}
/**Create and handle error exception
* @throws SAXException
*/
private void error(final String msg) throws SAXException{
SAXException e = new SAXException(new RDFXMLParseException("ERROR: "+msg+" "+getLocation()));
error(e);
}
/**Create and handle fatal error exception
* @throws SAXException
* @throws
*/
private void fatalError(final String msg) throws SAXException{
SAXException e = new SAXException(new RDFXMLParseException("FATAL ERROR: "+msg+" "+getLocation()));
fatalError(e);
}
/**Warning
* @throws SAXException
*/
public void warning(SAXException e) throws SAXException{
_log.warning(e.getMessage());
if(_strict)
throw e;
}
/**
* Recoverable error
* @throws SAXException
*/
public void error(SAXException e) throws SAXException{
_log.warning(e.getMessage());
if(_strict)
throw e;
}
/**
* Unrecoverable error
* @throws SAXException
*/
public void fatalError(SAXException e) throws SAXException{
_log.severe(e.getMessage());
throw e;
}
public void ignorableWhitespace(char[] buf, int offset, int len) throws SAXException{
char[] cs = new char[len];
System.arraycopy(buf, offset, cs, 0, len);
String s = new String(cs);
if(_state.expectText()){
if(_currentL==null){
_currentL = new StringBuffer();
}
_currentL.append(s);
}
}
private boolean nodeIsAllowed(final String qname, final Resource r) throws SAXException {
if(NOT_ALLOWED_NODE_TS.contains(r)){
error("Attribute '"+qname+"' not allowed to be used as a node element.");
return false;
}
return true;
}
public void characters (char buf [], int offset, int len) throws SAXException{
char[] cs = new char[len];
System.arraycopy(buf, offset, cs, 0, len);
String v = new String(cs);
boolean ws = v.trim().equals("");
if(ws){
ignorableWhitespace(buf, offset, len);
return;
}
if(_state.expectText()){
if(_currentL==null){
// if(_state.equals(State.PTL_XML)){
// fatalError("Dangling text '"+v+"' found outside of enclosing tags, inside parseType=\"Literal\" tags.");
// }
_currentL = new StringBuffer();
}
for(int i=offset; i<(len+offset); i++){
char c = buf[i];
if(_state.equals(State.PTL_XML)){
if (c == '"')
_currentL.append(""");
else if (c == '&')
_currentL.append("&");
else if (c == '<')
_currentL.append("<");
else if (c == '>')
_currentL.append(">");
else {
int ci = 0xffff & c;
if (ci < 160 )
// nothing special only 7 Bit
_currentL.append(c);
else {
// Not 7 Bit use the unicode system
_currentL.append("&#");
_currentL.append(new Integer(ci).toString());
_currentL.append(';');
}
}
}
else _currentL.append(c);
}
if(_state.equals(State.T_CP_OR))
_state=State.T_CP;
} else{
fatalError("Dangling text '"+v+"' found outside of enclosing property tags.");
}
}
public void skippedEntity(String name) throws SAXException{
;
}
public InputSource resolveEntity(String publicId,
String systemId)
throws SAXException{
return null;
}
public void unparsedEntityDecl(String name,
String publicId,
String systemId,
String notationName)
throws SAXException{
;
}
public void setDocumentLocator(Locator loc) {
_loc = loc;
}
private String getLocation(){
if(_loc!=null)
return "Line "+_loc.getLineNumber()+" column "+_loc.getColumnNumber()+".";
return "";
}
public static Resource createResource(String raw){
return new Resource(NxUtil.escapeForNx(raw));
}
private void handleStatement(final Node... triple){
processStatement(triple);
if(_currentReify!=null){
processStatement(_currentReify, RDF.TYPE, RDF.STATEMENT);
processStatement(_currentReify, RDF.SUBJECT, triple[0]);
processStatement(_currentReify, RDF.PREDICATE, triple[1]);
processStatement(_currentReify, RDF.OBJECT, triple[2]);
_currentReify = null;
}
}
private void processStatement(final Node... triple){
if(_con!=null){
_c.processStatement(new Node[]{triple[0],triple[1],triple[2],_con});
}
else _c.processStatement(triple);
}
public static class ResourceDescription{
private Node _id = null;
private ArrayList<Node[]> _edges;
public ResourceDescription(){
_edges = new ArrayList<Node[]>();
}
public ResourceDescription(Node id){
_edges = new ArrayList<Node[]>();
_id = id;
}
public void setIdentifier(Node id){
_id = id;
}
public Node getIdentifier(){
return _id;
}
public void addEdge(Node... po){
_edges.add(po);
}
public ArrayList<Node[]> getEdges(){
return _edges;
}
public String toString(){
StringBuffer buf = new StringBuffer();
for(Node[] edge:_edges){
buf.append(_id.toN3()+" "+Nodes.toN3(edge)+"\n");
}
return buf.toString();
}
}
public static class RDFXMLParseException extends ParseException{
/**
*
*/
private static final long serialVersionUID = 1L;
public RDFXMLParseException(){
super();
}
public RDFXMLParseException(String msg){
super(msg);
}
}
// public static class NodeIntPair{
// private Node _n;
// private int _i;
// public NodeIntPair(Node n, int i){
// _n = n;
// _i = i;
// }
// public boolean equals(Object o){
// if(o instanceof NodeIntPair){
// return equals((NodeIntPair)o);
// } return false;
// }
// public boolean equals(NodeIntPair nip){
// if(nip._i==_i){
// return nip._n.equals(_n);
// } return false;
// }
// public Node getNode() {
// return _n;
// }
// public void setNode(Node n) {
// _n = n;
// }
// public int getInt() {
// return _i;
// }
// public void setInt(int i) {
// _i = i;
// }
// public String toString(){
// return(_n.toN3()+" "+_i);
// }
// public int hashCode(){
// return _n.hashCode() + _i;
// }
// }
public static class ScopedThing<E>{
private LinkedList<Integer> _skip;
private LinkedList<E> _scoped;
private int _currentSkip = -1;
private E _current = null;
private E _global;
public ScopedThing(){
this(null);
}
/**
* Checks if an element is in-scope. Not used at the moment/not optimised.
*/
public boolean inScope(E element){
if(_current!=null && _current.equals(element)){
return true;
} else if(_global!=null && _global.equals(element)){
return true;
} else if(_scoped.contains(element)){
return true;
}
return false;
}
public ScopedThing(E global){
_currentSkip = -1;
_skip = new LinkedList<Integer>();
_scoped = new LinkedList<E>();
_global = global;
}
/**
* Increments scope of all objects in stack.
*/
public void incrementScope(){
if(_currentSkip!=-1)
_currentSkip++;
}
/**
* Decrements scope, removes out of scope objects as applicable and checks if in-scope object has changed.
* @return New in-scope object iff the current in-scope object changes, otherwise null.
*/
public E decrementScope(){
if(_currentSkip==-1){
return null;
}
else{
_currentSkip--;
if(_currentSkip==0){
if(_skip.size()>0){
_currentSkip = _skip.removeLast();
_current = _scoped.removeLast();
return _current;
} else{
_currentSkip=-1;
return _global;
}
}
return null;
}
}
/**
* Adds a new scoped element to the stack.
*/
public void addNewScopedElement(E element){
if(_currentSkip!=-1){
_skip.add(_currentSkip);
_scoped.add(_current);
}
_currentSkip=0;
_current = element;
}
/**
* Sets current scoped element.
*/
public void setCurrent(E element){
_current = element;
}
}
}
| |
/*
* Copyright (c) 2005-2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.andes.server.cluster.coordination.hazelcast;
import com.hazelcast.core.*;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.andes.configuration.AndesConfigurationManager;
import org.wso2.andes.configuration.enums.AndesConfiguration;
import org.wso2.andes.kernel.AndesException;
import org.wso2.andes.server.cluster.coordination.ClusterCoordinationHandler;
import org.wso2.andes.server.cluster.coordination.ClusterNotification;
import org.wso2.andes.server.cluster.coordination.CoordinationConstants;
import org.wso2.andes.server.cluster.coordination.hazelcast.custom.serializer.wrapper
.HashmapStringTreeSetWrapper;
import org.wso2.andes.server.cluster.coordination.hazelcast.custom.serializer.wrapper.TreeSetSlotWrapper;
import org.wso2.andes.server.cluster.coordination.hazelcast.custom.serializer.wrapper.TreeSetLongWrapper;
import java.util.*;
/**
* This is a singleton class, which contains all Hazelcast related operations.
*/
public class HazelcastAgent {
private static Log log = LogFactory.getLog(HazelcastAgent.class);
/**
* Value used to indicate the cluster initialization success state
*/
private static final long INIT_SUCCESSFUL = 1L;
/**
* Singleton HazelcastAgent Instance.
*/
private static HazelcastAgent hazelcastAgentInstance = new HazelcastAgent();
/**
* Hazelcast instance exposed by Carbon.
*/
private HazelcastInstance hazelcastInstance;
/**
* Distributed topic to communicate subscription change notifications among cluster nodes.
*/
private ITopic<ClusterNotification> subscriptionChangedNotifierChannel;
/**
* Distributed topic to communicate binding change notifications among cluster nodes.
*/
private ITopic<ClusterNotification> bindingChangeNotifierChannel;
/**
* Distributed topic to communicate queue purge notifications among cluster nodes.
*/
private ITopic<ClusterNotification> queueChangedNotifierChannel;
/**
* Distributed topic to communicate exchange change notification among cluster nodes.
*/
private ITopic<ClusterNotification> exchangeChangeNotifierChannel;
/**
* These distributed maps are used for slot management
*/
/**
* distributed Map to store message ID list against queue name
*/
private IMap<String, TreeSetLongWrapper> slotIdMap;
/**
* to keep track of assigned slots up to now. Key of the map contains nodeID+"_"+queueName
*/
private IMap<String, HashmapStringTreeSetWrapper> slotAssignmentMap;
/**
* To keep track of slots that overlap with already assigned slots (in slotAssignmentMap). This is to ensure that
* messages assigned to a specific assigned slot are only handled by that node itself.
*/
private IMap<String, HashmapStringTreeSetWrapper> overLappedSlotMap;
/**
*distributed Map to store last assigned ID against queue name
*/
private IMap<String, Long> lastAssignedIDMap;
/**
* distributed Map to store last published ID against node ID
*/
private IMap<String, Long> lastPublishedIDMap;
/**
* Distributed Map to keep track of non-empty slots which are unassigned from
* other nodes
*/
private IMap<String, TreeSetSlotWrapper> unAssignedSlotMap;
/**
* This map is used to store thrift server host and thrift server port
* map's key is port or host name.
*/
private IMap<String,String> thriftServerDetailsMap;
/**
* Unique ID generated to represent the node.
* This ID is used when generating message IDs.
*/
private int uniqueIdOfLocalMember;
/**
* Lock used to initialize the Slot map used by the Slot manager.
*/
private ILock initializationLock;
/**
* This is used to indicate if the cluster initialization was done properly. Used a atomic long
* since am atomic boolean is not available in the current Hazelcast implementation.
*/
private IAtomicLong initializationDoneIndicator;
/**
* This map is used to store coordinator node's host address and port.
*/
private IMap<String,String> coordinatorNodeDetailsMap;
/**
* Private constructor.
*/
private HazelcastAgent() {
}
/**
* Get singleton HazelcastAgent.
*
* @return HazelcastAgent
*/
public static synchronized HazelcastAgent getInstance() {
return hazelcastAgentInstance;
}
/**
* Initialize HazelcastAgent instance.
*
* @param hazelcastInstance obtained hazelcastInstance from the OSGI service
*/
@SuppressWarnings("unchecked")
public void init(HazelcastInstance hazelcastInstance) {
log.info("Initializing Hazelcast Agent");
this.hazelcastInstance = hazelcastInstance;
/**
* membership changes
*/
this.hazelcastInstance.getCluster().addMembershipListener(new AndesMembershipListener());
/**
* subscription changes
*/
this.subscriptionChangedNotifierChannel = this.hazelcastInstance.getTopic(
CoordinationConstants.HAZELCAST_SUBSCRIPTION_CHANGED_NOTIFIER_TOPIC_NAME);
ClusterSubscriptionChangedListener clusterSubscriptionChangedListener = new ClusterSubscriptionChangedListener();
clusterSubscriptionChangedListener.addSubscriptionListener(new ClusterCoordinationHandler(this));
this.subscriptionChangedNotifierChannel.addMessageListener(clusterSubscriptionChangedListener);
/**
* exchange changes
*/
this.exchangeChangeNotifierChannel = this.hazelcastInstance.getTopic(
CoordinationConstants.HAZELCAST_EXCHANGE_CHANGED_NOTIFIER_TOPIC_NAME);
ClusterExchangeChangedListener clusterExchangeChangedListener = new ClusterExchangeChangedListener();
clusterExchangeChangedListener.addExchangeListener(new ClusterCoordinationHandler(this));
this.exchangeChangeNotifierChannel.addMessageListener(clusterExchangeChangedListener);
/**
* queue changes
*/
this.queueChangedNotifierChannel = this.hazelcastInstance.getTopic(
CoordinationConstants.HAZELCAST_QUEUE_CHANGED_NOTIFIER_TOPIC_NAME);
ClusterQueueChangedListener clusterQueueChangedListener = new ClusterQueueChangedListener();
clusterQueueChangedListener.addQueueListener(new ClusterCoordinationHandler(this));
this.queueChangedNotifierChannel.addMessageListener(clusterQueueChangedListener);
/**
* binding changes
*/
this.bindingChangeNotifierChannel = this.hazelcastInstance.getTopic(
CoordinationConstants.HAZELCAST_BINDING_CHANGED_NOTIFIER_TOPIC_NAME);
ClusterBindingChangedListener clusterBindingChangedListener = new ClusterBindingChangedListener();
clusterBindingChangedListener.addBindingListener(new ClusterCoordinationHandler(this));
this.bindingChangeNotifierChannel.addMessageListener(clusterBindingChangedListener);
// generates a unique id for the node unique for the cluster
IdGenerator idGenerator = hazelcastInstance.getIdGenerator(CoordinationConstants.HAZELCAST_ID_GENERATOR_NAME);
this.uniqueIdOfLocalMember = (int) idGenerator.newId();
/**
* Initialize hazelcast maps for slots
*/
unAssignedSlotMap = hazelcastInstance.getMap(CoordinationConstants.UNASSIGNED_SLOT_MAP_NAME);
slotIdMap = hazelcastInstance.getMap(CoordinationConstants.SLOT_ID_MAP_NAME);
lastAssignedIDMap = hazelcastInstance.getMap(CoordinationConstants.LAST_ASSIGNED_ID_MAP_NAME);
lastPublishedIDMap = hazelcastInstance.getMap(CoordinationConstants.LAST_PUBLISHED_ID_MAP_NAME);
slotAssignmentMap = hazelcastInstance.getMap(CoordinationConstants.SLOT_ASSIGNMENT_MAP_NAME);
overLappedSlotMap = hazelcastInstance.getMap(CoordinationConstants.OVERLAPPED_SLOT_MAP_NAME);
/**
* Initialize hazelcast map fot thrift server details
*/
thriftServerDetailsMap = hazelcastInstance.getMap(CoordinationConstants.THRIFT_SERVER_DETAILS_MAP_NAME);
/**
* Initialize hazelcast map for coordinator node details
*/
coordinatorNodeDetailsMap = hazelcastInstance.getMap(CoordinationConstants.COORDINATOR_NODE_DETAILS_MAP_NAME);
/**
* Initialize distributed lock and boolean related to slot map initialization
*/
initializationLock = hazelcastInstance.getLock(CoordinationConstants.INITIALIZATION_LOCK);
initializationDoneIndicator = hazelcastInstance
.getAtomicLong(CoordinationConstants.INITIALIZATION_DONE_INDICATOR);
log.info("Successfully initialized Hazelcast Agent");
if (log.isDebugEnabled()) {
log.debug("Unique ID generation for message ID generation:" + uniqueIdOfLocalMember);
}
}
/**
* Node ID is generated in the format of "NODE/<host IP>:<Port>"
* @return NodeId Identifier of the node in the cluster
*/
public String getNodeId() {
String nodeId;
// Get Node ID configured by user in broker.xml (if not "default" we must use it as the ID)
nodeId = AndesConfigurationManager.readValue(AndesConfiguration.COORDINATION_NODE_ID);
// If the config value is "default" we must generate the ID
if (AndesConfiguration.COORDINATION_NODE_ID.get().getDefaultValue().equals(nodeId)) {
Member localMember = hazelcastInstance.getCluster().getLocalMember();
nodeId = CoordinationConstants.NODE_NAME_PREFIX + localMember.getSocketAddress();
}
return nodeId;
}
/**
* All members of the cluster are returned as a Set of Members
*
* @return Set of Members
*/
public Set<Member> getAllClusterMembers() {
return hazelcastInstance.getCluster().getMembers();
}
/**
* Get node IDs of all nodes available in the cluster.
*
* @return List of node IDs.
*/
public List<String> getMembersNodeIDs() {
Set<Member> members = this.getAllClusterMembers();
List<String> nodeIDList = new ArrayList<String>();
for (Member member : members) {
nodeIDList.add(CoordinationConstants.NODE_NAME_PREFIX +
member.getSocketAddress());
}
return nodeIDList;
}
/**
* Get local node.
*
* @return local node as a Member.
*/
public Member getLocalMember() {
return hazelcastInstance.getCluster().getLocalMember();
}
/**
* Get number of members in the cluster.
*
* @return number of members.
*/
public int getClusterSize() {
return hazelcastInstance.getCluster().getMembers().size();
}
/**
* Get unique ID to represent local member.
*
* @return unique ID.
*/
public int getUniqueIdForNode() {
return uniqueIdOfLocalMember;
}
/**
* Get node ID of the given node.
*
* @param node cluster node to get the ID
* @return node ID.
*/
public String getIdOfNode(Member node) {
return CoordinationConstants.NODE_NAME_PREFIX +
node.getSocketAddress();
}
/**
* Each member of the cluster is given an unique UUID and here the UUIDs of all nodes are sorted
* and the index of the belonging UUID of the given node is returned.
*
* @param node node to get the index
* @return the index of the specified node
*/
public int getIndexOfNode(Member node) {
TreeSet<String> membersUniqueRepresentations = new TreeSet<String>();
for (Member member : this.getAllClusterMembers()) {
membersUniqueRepresentations.add(member.getUuid());
}
return membersUniqueRepresentations.headSet(node.getUuid()).size();
}
/**
* Get the index where the local node is placed when all
* the cluster nodes are sorted according to their UUID.
*
* @return the index of the local node
*/
public int getIndexOfLocalNode() {
return this.getIndexOfNode(this.getLocalMember());
}
public void notifySubscriptionsChanged(ClusterNotification clusterNotification) {
log.debug("Sending GOSSIP: " + clusterNotification.getDescription());
this.subscriptionChangedNotifierChannel.publish(clusterNotification);
}
public void notifyQueuesChanged(ClusterNotification clusterNotification) throws AndesException {
log.debug("Sending GOSSIP: " + clusterNotification.getDescription());
try {
this.queueChangedNotifierChannel.publish(clusterNotification);
} catch (Exception e) {
log.error("Error while sending queue change notification : " + clusterNotification.getEncodedObjectAsString(), e);
throw new AndesException("Error while sending queue change notification : " + clusterNotification.getEncodedObjectAsString(), e);
}
}
public void notifyExchangesChanged(ClusterNotification clusterNotification) throws AndesException {
log.debug("Sending GOSSIP: " + clusterNotification.getDescription());
try {
this.exchangeChangeNotifierChannel.publish(clusterNotification);
} catch (Exception e) {
log.error("Error while sending exchange change notification" + clusterNotification.getEncodedObjectAsString(), e);
throw new AndesException("Error while sending exchange change notification" + clusterNotification.getEncodedObjectAsString(), e);
}
}
public void notifyBindingsChanged(ClusterNotification clusterNotification) throws AndesException {
log.debug("GOSSIP: " + clusterNotification.getDescription());
try {
this.bindingChangeNotifierChannel.publish(clusterNotification);
} catch (Exception e) {
log.error("Error while sending binding change notification" + clusterNotification.getEncodedObjectAsString(), e);
throw new AndesException("Error while sending binding change notification" + clusterNotification.getEncodedObjectAsString(), e);
}
}
public IMap<String, TreeSetSlotWrapper> getUnAssignedSlotMap() {
return unAssignedSlotMap;
}
public IMap<String, TreeSetLongWrapper> getSlotIdMap() {
return slotIdMap;
}
public IMap<String, Long> getLastAssignedIDMap() {
return lastAssignedIDMap;
}
public IMap<String, Long> getLastPublishedIDMap() {
return lastPublishedIDMap;
}
public IMap<String, HashmapStringTreeSetWrapper> getSlotAssignmentMap() {
return slotAssignmentMap;
}
public IMap<String, HashmapStringTreeSetWrapper> getOverLappedSlotMap() {
return overLappedSlotMap;
}
/**
* This method returns a map containing thrift server port and hostname
* @return thriftServerDetailsMap
*/
public IMap<String, String> getThriftServerDetailsMap() {
return thriftServerDetailsMap;
}
/**
* This method returns a map containing thrift server port and hostname
* @return coordinatorNodeDetailsMap
*/
public IMap<String, String> getCoordinatorNodeDetailsMap() {
return coordinatorNodeDetailsMap;
}
/**
* Acquire the distributed lock related to cluster initialization. This lock is required to
* avoid two nodes initializing the map twice.
*/
public void acquireInitializationLock() {
if (log.isDebugEnabled()) {
log.debug("Trying to acquire initialization lock.");
}
initializationLock.lock();
if (log.isDebugEnabled()) {
log.debug("Initialization lock acquired.");
}
}
/**
* Inform other members in the cluster that the cluster was initialized properly.
*/
public void indicateSuccessfulInitilization() {
initializationDoneIndicator.set(INIT_SUCCESSFUL);
}
/**
* Check if a member has already initialized the cluster
*
* @return true if cluster is already initialized
*/
public boolean isClusterInitializedSuccessfully() {
return initializationDoneIndicator.get() == INIT_SUCCESSFUL;
}
/**
* Release the initialization lock.
*/
public void releaseInitializationLock() {
initializationLock.unlock();
if (log.isDebugEnabled()) {
log.debug("Initialization lock released.");
}
}
}
| |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package io.flutter.embedding.android;
import android.app.Activity;
import android.arch.lifecycle.Lifecycle;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.annotation.VisibleForTesting;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentActivity;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import io.flutter.Log;
import io.flutter.embedding.engine.FlutterEngine;
import io.flutter.embedding.engine.FlutterShellArgs;
import io.flutter.embedding.engine.renderer.FlutterUiDisplayListener;
import io.flutter.plugin.platform.PlatformPlugin;
import io.flutter.view.FlutterMain;
/**
* {@code Fragment} which displays a Flutter UI that takes up all available {@code Fragment} space.
*
* <p>Using a {@code FlutterFragment} requires forwarding a number of calls from an {@code Activity}
* to ensure that the internal Flutter app behaves as expected:
*
* <ol>
* <li>{@link #onPostResume()}
* <li>{@link #onBackPressed()}
* <li>{@link #onRequestPermissionsResult(int, String[], int[])} ()}
* <li>{@link #onNewIntent(Intent)} ()}
* <li>{@link #onUserLeaveHint()}
* <li>{@link #onTrimMemory(int)}
* </ol>
*
* Additionally, when starting an {@code Activity} for a result from this {@code Fragment}, be sure
* to invoke {@link Fragment#startActivityForResult(Intent, int)} rather than {@link
* android.app.Activity#startActivityForResult(Intent, int)}. If the {@code Activity} version of the
* method is invoked then this {@code Fragment} will never receive its {@link
* Fragment#onActivityResult(int, int, Intent)} callback.
*
* <p>If convenient, consider using a {@link FlutterActivity} instead of a {@code FlutterFragment}
* to avoid the work of forwarding calls.
*
* <p>{@code FlutterFragment} supports the use of an existing, cached {@link FlutterEngine}. To use
* a cached {@link FlutterEngine}, ensure that the {@link FlutterEngine} is stored in {@link
* FlutterEngineCache} and then use {@link #withCachedEngine(String)} to build a {@code
* FlutterFragment} with the cached {@link FlutterEngine}'s ID.
*
* <p>It is generally recommended to use a cached {@link FlutterEngine} to avoid a momentary delay
* when initializing a new {@link FlutterEngine}. The two exceptions to using a cached {@link
* FlutterEngine} are:
*
* <p>
*
* <ul>
* <li>When {@code FlutterFragment} is in the first {@code Activity} displayed by the app, because
* pre-warming a {@link FlutterEngine} would have no impact in this situation.
* <li>When you are unsure when/if you will need to display a Flutter experience.
* </ul>
*
* <p>The following illustrates how to pre-warm and cache a {@link FlutterEngine}:
*
* <pre>{@code
* // Create and pre-warm a FlutterEngine.
* FlutterEngine flutterEngine = new FlutterEngine(context);
* flutterEngine
* .getDartExecutor()
* .executeDartEntrypoint(DartEntrypoint.createDefault());
*
* // Cache the pre-warmed FlutterEngine in the FlutterEngineCache.
* FlutterEngineCache.getInstance().put("my_engine", flutterEngine);
* }</pre>
*
* <p>If Flutter is needed in a location that can only use a {@code View}, consider using a {@link
* FlutterView}. Using a {@link FlutterView} requires forwarding some calls from an {@code
* Activity}, as well as forwarding lifecycle calls from an {@code Activity} or a {@code Fragment}.
*/
public class FlutterFragment extends Fragment implements FlutterActivityAndFragmentDelegate.Host {
private static final String TAG = "FlutterFragment";
/** The Dart entrypoint method name that is executed upon initialization. */
protected static final String ARG_DART_ENTRYPOINT = "dart_entrypoint";
/** Initial Flutter route that is rendered in a Navigator widget. */
protected static final String ARG_INITIAL_ROUTE = "initial_route";
/** Path to Flutter's Dart code. */
protected static final String ARG_APP_BUNDLE_PATH = "app_bundle_path";
/** Flutter shell arguments. */
protected static final String ARG_FLUTTER_INITIALIZATION_ARGS = "initialization_args";
/** {@link RenderMode} to be used for the {@link FlutterView} in this {@code FlutterFragment} */
protected static final String ARG_FLUTTERVIEW_RENDER_MODE = "flutterview_render_mode";
/**
* {@link TransparencyMode} to be used for the {@link FlutterView} in this {@code FlutterFragment}
*/
protected static final String ARG_FLUTTERVIEW_TRANSPARENCY_MODE = "flutterview_transparency_mode";
/** See {@link #shouldAttachEngineToActivity()}. */
protected static final String ARG_SHOULD_ATTACH_ENGINE_TO_ACTIVITY =
"should_attach_engine_to_activity";
/**
* The ID of a {@link FlutterEngine} cached in {@link FlutterEngineCache} that will be used within
* the created {@code FlutterFragment}.
*/
protected static final String ARG_CACHED_ENGINE_ID = "cached_engine_id";
/**
* True if the {@link FlutterEngine} in the created {@code FlutterFragment} should be destroyed
* when the {@code FlutterFragment} is destroyed, false if the {@link FlutterEngine} should
* outlive the {@code FlutterFragment}.
*/
protected static final String ARG_DESTROY_ENGINE_WITH_FRAGMENT = "destroy_engine_with_fragment";
/**
* Creates a {@code FlutterFragment} with a default configuration.
*
* <p>{@code FlutterFragment}'s default configuration creates a new {@link FlutterEngine} within
* the {@code FlutterFragment} and uses the following settings:
*
* <ul>
* <li>Dart entrypoint: "main"
* <li>Initial route: "/"
* <li>Render mode: surface
* <li>Transparency mode: transparent
* </ul>
*
* <p>To use a new {@link FlutterEngine} with different settings, use {@link #withNewEngine()}.
*
* <p>To use a cached {@link FlutterEngine} instead of creating a new one, use {@link
* #withCachedEngine(String)}.
*/
@NonNull
public static FlutterFragment createDefault() {
return new NewEngineFragmentBuilder().build();
}
/**
* Returns a {@link NewEngineFragmentBuilder} to create a {@code FlutterFragment} with a new
* {@link FlutterEngine} and a desired engine configuration.
*/
@NonNull
public static NewEngineFragmentBuilder withNewEngine() {
return new NewEngineFragmentBuilder();
}
/**
* Builder that creates a new {@code FlutterFragment} with {@code arguments} that correspond to
* the values set on this {@code NewEngineFragmentBuilder}.
*
* <p>To create a {@code FlutterFragment} with default {@code arguments}, invoke {@link
* #createDefault()}.
*
* <p>Subclasses of {@code FlutterFragment} that do not introduce any new arguments can use this
* {@code NewEngineFragmentBuilder} to construct instances of the subclass without subclassing
* this {@code NewEngineFragmentBuilder}. {@code MyFlutterFragment f = new
* FlutterFragment.NewEngineFragmentBuilder(MyFlutterFragment.class) .someProperty(...)
* .someOtherProperty(...) .build<MyFlutterFragment>(); }
*
* <p>Subclasses of {@code FlutterFragment} that introduce new arguments should subclass this
* {@code NewEngineFragmentBuilder} to add the new properties:
*
* <ol>
* <li>Ensure the {@code FlutterFragment} subclass has a no-arg constructor.
* <li>Subclass this {@code NewEngineFragmentBuilder}.
* <li>Override the new {@code NewEngineFragmentBuilder}'s no-arg constructor and invoke the
* super constructor to set the {@code FlutterFragment} subclass: {@code public MyBuilder()
* { super(MyFlutterFragment.class); } }
* <li>Add appropriate property methods for the new properties.
* <li>Override {@link NewEngineFragmentBuilder#createArgs()}, call through to the super method,
* then add the new properties as arguments in the {@link Bundle}.
* </ol>
*
* Once a {@code NewEngineFragmentBuilder} subclass is defined, the {@code FlutterFragment}
* subclass can be instantiated as follows. {@code MyFlutterFragment f = new MyBuilder()
* .someExistingProperty(...) .someNewProperty(...) .build<MyFlutterFragment>(); }
*/
public static class NewEngineFragmentBuilder {
private final Class<? extends FlutterFragment> fragmentClass;
private String dartEntrypoint = "main";
private String initialRoute = "/";
private String appBundlePath = null;
private FlutterShellArgs shellArgs = null;
private RenderMode renderMode = RenderMode.surface;
private TransparencyMode transparencyMode = TransparencyMode.transparent;
private boolean shouldAttachEngineToActivity = true;
/**
* Constructs a {@code NewEngineFragmentBuilder} that is configured to construct an instance of
* {@code FlutterFragment}.
*/
public NewEngineFragmentBuilder() {
fragmentClass = FlutterFragment.class;
}
/**
* Constructs a {@code NewEngineFragmentBuilder} that is configured to construct an instance of
* {@code subclass}, which extends {@code FlutterFragment}.
*/
public NewEngineFragmentBuilder(@NonNull Class<? extends FlutterFragment> subclass) {
fragmentClass = subclass;
}
/** The name of the initial Dart method to invoke, defaults to "main". */
@NonNull
public NewEngineFragmentBuilder dartEntrypoint(@NonNull String dartEntrypoint) {
this.dartEntrypoint = dartEntrypoint;
return this;
}
/**
* The initial route that a Flutter app will render in this {@link FlutterFragment}, defaults to
* "/".
*/
@NonNull
public NewEngineFragmentBuilder initialRoute(@NonNull String initialRoute) {
this.initialRoute = initialRoute;
return this;
}
/**
* The path to the app bundle which contains the Dart app to execute, defaults to {@link
* FlutterMain#findAppBundlePath()}
*/
@NonNull
public NewEngineFragmentBuilder appBundlePath(@NonNull String appBundlePath) {
this.appBundlePath = appBundlePath;
return this;
}
/** Any special configuration arguments for the Flutter engine */
@NonNull
public NewEngineFragmentBuilder flutterShellArgs(@NonNull FlutterShellArgs shellArgs) {
this.shellArgs = shellArgs;
return this;
}
/**
* Render Flutter either as a {@link RenderMode#surface} or a {@link RenderMode#texture}. You
* should use {@code surface} unless you have a specific reason to use {@code texture}. {@code
* texture} comes with a significant performance impact, but {@code texture} can be displayed
* beneath other Android {@code View}s and animated, whereas {@code surface} cannot.
*/
@NonNull
public NewEngineFragmentBuilder renderMode(@NonNull RenderMode renderMode) {
this.renderMode = renderMode;
return this;
}
/**
* Support a {@link TransparencyMode#transparent} background within {@link FlutterView}, or
* force an {@link TransparencyMode#opaque} background.
*
* <p>See {@link TransparencyMode} for implications of this selection.
*/
@NonNull
public NewEngineFragmentBuilder transparencyMode(@NonNull TransparencyMode transparencyMode) {
this.transparencyMode = transparencyMode;
return this;
}
/**
* Whether or not this {@code FlutterFragment} should automatically attach its {@code Activity}
* as a control surface for its {@link FlutterEngine}.
*
* <p>Control surfaces are used to provide Android resources and lifecycle events to plugins
* that are attached to the {@link FlutterEngine}. If {@code shouldAttachEngineToActivity} is
* true then this {@code FlutterFragment} will connect its {@link FlutterEngine} to the
* surrounding {@code Activity}, along with any plugins that are registered with that {@link
* FlutterEngine}. This allows plugins to access the {@code Activity}, as well as receive {@code
* Activity}-specific calls, e.g., {@link android.app.Activity#onNewIntent(Intent)}. If {@code
* shouldAttachEngineToActivity} is false, then this {@code FlutterFragment} will not
* automatically manage the connection between its {@link FlutterEngine} and the surrounding
* {@code Activity}. The {@code Activity} will need to be manually connected to this {@code
* FlutterFragment}'s {@link FlutterEngine} by the app developer. See {@link
* FlutterEngine#getActivityControlSurface()}.
*
* <p>One reason that a developer might choose to manually manage the relationship between the
* {@code Activity} and {@link FlutterEngine} is if the developer wants to move the {@link
* FlutterEngine} somewhere else. For example, a developer might want the {@link FlutterEngine}
* to outlive the surrounding {@code Activity} so that it can be used later in a different
* {@code Activity}. To accomplish this, the {@link FlutterEngine} will need to be disconnected
* from the surrounding {@code Activity} at an unusual time, preventing this {@code
* FlutterFragment} from correctly managing the relationship between the {@link FlutterEngine}
* and the surrounding {@code Activity}.
*
* <p>Another reason that a developer might choose to manually manage the relationship between
* the {@code Activity} and {@link FlutterEngine} is if the developer wants to prevent, or
* explicitly control when the {@link FlutterEngine}'s plugins have access to the surrounding
* {@code Activity}. For example, imagine that this {@code FlutterFragment} only takes up part
* of the screen and the app developer wants to ensure that none of the Flutter plugins are able
* to manipulate the surrounding {@code Activity}. In this case, the developer would not want
* the {@link FlutterEngine} to have access to the {@code Activity}, which can be accomplished
* by setting {@code shouldAttachEngineToActivity} to {@code false}.
*/
@NonNull
public NewEngineFragmentBuilder shouldAttachEngineToActivity(
boolean shouldAttachEngineToActivity) {
this.shouldAttachEngineToActivity = shouldAttachEngineToActivity;
return this;
}
/**
* Creates a {@link Bundle} of arguments that are assigned to the new {@code FlutterFragment}.
*
* <p>Subclasses should override this method to add new properties to the {@link Bundle}.
* Subclasses must call through to the super method to collect all existing property values.
*/
@NonNull
protected Bundle createArgs() {
Bundle args = new Bundle();
args.putString(ARG_INITIAL_ROUTE, initialRoute);
args.putString(ARG_APP_BUNDLE_PATH, appBundlePath);
args.putString(ARG_DART_ENTRYPOINT, dartEntrypoint);
// TODO(mattcarroll): determine if we should have an explicit FlutterTestFragment instead of
// conflating.
if (null != shellArgs) {
args.putStringArray(ARG_FLUTTER_INITIALIZATION_ARGS, shellArgs.toArray());
}
args.putString(
ARG_FLUTTERVIEW_RENDER_MODE,
renderMode != null ? renderMode.name() : RenderMode.surface.name());
args.putString(
ARG_FLUTTERVIEW_TRANSPARENCY_MODE,
transparencyMode != null ? transparencyMode.name() : TransparencyMode.transparent.name());
args.putBoolean(ARG_SHOULD_ATTACH_ENGINE_TO_ACTIVITY, shouldAttachEngineToActivity);
args.putBoolean(ARG_DESTROY_ENGINE_WITH_FRAGMENT, true);
return args;
}
/**
* Constructs a new {@code FlutterFragment} (or a subclass) that is configured based on
* properties set on this {@code Builder}.
*/
@NonNull
public <T extends FlutterFragment> T build() {
try {
@SuppressWarnings("unchecked")
T frag = (T) fragmentClass.getDeclaredConstructor().newInstance();
if (frag == null) {
throw new RuntimeException(
"The FlutterFragment subclass sent in the constructor ("
+ fragmentClass.getCanonicalName()
+ ") does not match the expected return type.");
}
Bundle args = createArgs();
frag.setArguments(args);
return frag;
} catch (Exception e) {
throw new RuntimeException(
"Could not instantiate FlutterFragment subclass (" + fragmentClass.getName() + ")", e);
}
}
}
/**
* Returns a {@link CachedEngineFragmentBuilder} to create a {@code FlutterFragment} with a cached
* {@link FlutterEngine} in {@link FlutterEngineCache}.
*
* <p>An {@code IllegalStateException} will be thrown during the lifecycle of the {@code
* FlutterFragment} if a cached {@link FlutterEngine} is requested but does not exist in the
* cache.
*
* <p>To create a {@code FlutterFragment} that uses a new {@link FlutterEngine}, use {@link
* #createDefault()} or {@link #withNewEngine()}.
*/
@NonNull
public static CachedEngineFragmentBuilder withCachedEngine(@NonNull String engineId) {
return new CachedEngineFragmentBuilder(engineId);
}
/**
* Builder that creates a new {@code FlutterFragment} that uses a cached {@link FlutterEngine}
* with {@code arguments} that correspond to the values set on this {@code Builder}.
*
* <p>Subclasses of {@code FlutterFragment} that do not introduce any new arguments can use this
* {@code Builder} to construct instances of the subclass without subclassing this {@code
* Builder}. {@code MyFlutterFragment f = new
* FlutterFragment.CachedEngineFragmentBuilder(MyFlutterFragment.class) .someProperty(...)
* .someOtherProperty(...) .build<MyFlutterFragment>(); }
*
* <p>Subclasses of {@code FlutterFragment} that introduce new arguments should subclass this
* {@code CachedEngineFragmentBuilder} to add the new properties:
*
* <ol>
* <li>Ensure the {@code FlutterFragment} subclass has a no-arg constructor.
* <li>Subclass this {@code CachedEngineFragmentBuilder}.
* <li>Override the new {@code CachedEngineFragmentBuilder}'s no-arg constructor and invoke the
* super constructor to set the {@code FlutterFragment} subclass: {@code public MyBuilder()
* { super(MyFlutterFragment.class); } }
* <li>Add appropriate property methods for the new properties.
* <li>Override {@link CachedEngineFragmentBuilder#createArgs()}, call through to the super
* method, then add the new properties as arguments in the {@link Bundle}.
* </ol>
*
* Once a {@code CachedEngineFragmentBuilder} subclass is defined, the {@code FlutterFragment}
* subclass can be instantiated as follows. {@code MyFlutterFragment f = new MyBuilder()
* .someExistingProperty(...) .someNewProperty(...) .build<MyFlutterFragment>(); }
*/
public static class CachedEngineFragmentBuilder {
private final Class<? extends FlutterFragment> fragmentClass;
private final String engineId;
private boolean destroyEngineWithFragment = false;
private RenderMode renderMode = RenderMode.surface;
private TransparencyMode transparencyMode = TransparencyMode.transparent;
private boolean shouldAttachEngineToActivity = true;
private CachedEngineFragmentBuilder(@NonNull String engineId) {
this(FlutterFragment.class, engineId);
}
protected CachedEngineFragmentBuilder(
@NonNull Class<? extends FlutterFragment> subclass, @NonNull String engineId) {
this.fragmentClass = subclass;
this.engineId = engineId;
}
/**
* Pass {@code true} to destroy the cached {@link FlutterEngine} when this {@code
* FlutterFragment} is destroyed, or {@code false} for the cached {@link FlutterEngine} to
* outlive this {@code FlutterFragment}.
*/
@NonNull
public CachedEngineFragmentBuilder destroyEngineWithFragment(
boolean destroyEngineWithFragment) {
this.destroyEngineWithFragment = destroyEngineWithFragment;
return this;
}
/**
* Render Flutter either as a {@link RenderMode#surface} or a {@link RenderMode#texture}. You
* should use {@code surface} unless you have a specific reason to use {@code texture}. {@code
* texture} comes with a significant performance impact, but {@code texture} can be displayed
* beneath other Android {@code View}s and animated, whereas {@code surface} cannot.
*/
@NonNull
public CachedEngineFragmentBuilder renderMode(@NonNull RenderMode renderMode) {
this.renderMode = renderMode;
return this;
}
/**
* Support a {@link TransparencyMode#transparent} background within {@link FlutterView}, or
* force an {@link TransparencyMode#opaque} background.
*
* <p>See {@link TransparencyMode} for implications of this selection.
*/
@NonNull
public CachedEngineFragmentBuilder transparencyMode(
@NonNull TransparencyMode transparencyMode) {
this.transparencyMode = transparencyMode;
return this;
}
/**
* Whether or not this {@code FlutterFragment} should automatically attach its {@code Activity}
* as a control surface for its {@link FlutterEngine}.
*
* <p>Control surfaces are used to provide Android resources and lifecycle events to plugins
* that are attached to the {@link FlutterEngine}. If {@code shouldAttachEngineToActivity} is
* true then this {@code FlutterFragment} will connect its {@link FlutterEngine} to the
* surrounding {@code Activity}, along with any plugins that are registered with that {@link
* FlutterEngine}. This allows plugins to access the {@code Activity}, as well as receive {@code
* Activity}-specific calls, e.g., {@link android.app.Activity#onNewIntent(Intent)}. If {@code
* shouldAttachEngineToActivity} is false, then this {@code FlutterFragment} will not
* automatically manage the connection between its {@link FlutterEngine} and the surrounding
* {@code Activity}. The {@code Activity} will need to be manually connected to this {@code
* FlutterFragment}'s {@link FlutterEngine} by the app developer. See {@link
* FlutterEngine#getActivityControlSurface()}.
*
* <p>One reason that a developer might choose to manually manage the relationship between the
* {@code Activity} and {@link FlutterEngine} is if the developer wants to move the {@link
* FlutterEngine} somewhere else. For example, a developer might want the {@link FlutterEngine}
* to outlive the surrounding {@code Activity} so that it can be used later in a different
* {@code Activity}. To accomplish this, the {@link FlutterEngine} will need to be disconnected
* from the surrounding {@code Activity} at an unusual time, preventing this {@code
* FlutterFragment} from correctly managing the relationship between the {@link FlutterEngine}
* and the surrounding {@code Activity}.
*
* <p>Another reason that a developer might choose to manually manage the relationship between
* the {@code Activity} and {@link FlutterEngine} is if the developer wants to prevent, or
* explicitly control when the {@link FlutterEngine}'s plugins have access to the surrounding
* {@code Activity}. For example, imagine that this {@code FlutterFragment} only takes up part
* of the screen and the app developer wants to ensure that none of the Flutter plugins are able
* to manipulate the surrounding {@code Activity}. In this case, the developer would not want
* the {@link FlutterEngine} to have access to the {@code Activity}, which can be accomplished
* by setting {@code shouldAttachEngineToActivity} to {@code false}.
*/
@NonNull
public CachedEngineFragmentBuilder shouldAttachEngineToActivity(
boolean shouldAttachEngineToActivity) {
this.shouldAttachEngineToActivity = shouldAttachEngineToActivity;
return this;
}
/**
* Creates a {@link Bundle} of arguments that are assigned to the new {@code FlutterFragment}.
*
* <p>Subclasses should override this method to add new properties to the {@link Bundle}.
* Subclasses must call through to the super method to collect all existing property values.
*/
@NonNull
protected Bundle createArgs() {
Bundle args = new Bundle();
args.putString(ARG_CACHED_ENGINE_ID, engineId);
args.putBoolean(ARG_DESTROY_ENGINE_WITH_FRAGMENT, destroyEngineWithFragment);
args.putString(
ARG_FLUTTERVIEW_RENDER_MODE,
renderMode != null ? renderMode.name() : RenderMode.surface.name());
args.putString(
ARG_FLUTTERVIEW_TRANSPARENCY_MODE,
transparencyMode != null ? transparencyMode.name() : TransparencyMode.transparent.name());
args.putBoolean(ARG_SHOULD_ATTACH_ENGINE_TO_ACTIVITY, shouldAttachEngineToActivity);
return args;
}
/**
* Constructs a new {@code FlutterFragment} (or a subclass) that is configured based on
* properties set on this {@code CachedEngineFragmentBuilder}.
*/
@NonNull
public <T extends FlutterFragment> T build() {
try {
@SuppressWarnings("unchecked")
T frag = (T) fragmentClass.getDeclaredConstructor().newInstance();
if (frag == null) {
throw new RuntimeException(
"The FlutterFragment subclass sent in the constructor ("
+ fragmentClass.getCanonicalName()
+ ") does not match the expected return type.");
}
Bundle args = createArgs();
frag.setArguments(args);
return frag;
} catch (Exception e) {
throw new RuntimeException(
"Could not instantiate FlutterFragment subclass (" + fragmentClass.getName() + ")", e);
}
}
}
// Delegate that runs all lifecycle and OS hook logic that is common between
// FlutterActivity and FlutterFragment. See the FlutterActivityAndFragmentDelegate
// implementation for details about why it exists.
@VisibleForTesting /* package */ FlutterActivityAndFragmentDelegate delegate;
public FlutterFragment() {
// Ensure that we at least have an empty Bundle of arguments so that we don't
// need to continually check for null arguments before grabbing one.
setArguments(new Bundle());
}
/**
* This method exists so that JVM tests can ensure that a delegate exists without putting this
* Fragment through any lifecycle events, because JVM tests cannot handle executing any lifecycle
* methods, at the time of writing this.
*
* <p>The testing infrastructure should be upgraded to make FlutterFragment tests easy to write
* while exercising real lifecycle methods. At such a time, this method should be removed.
*/
// TODO(mattcarroll): remove this when tests allow for it
// (https://github.com/flutter/flutter/issues/43798)
@VisibleForTesting
/* package */ void setDelegate(@NonNull FlutterActivityAndFragmentDelegate delegate) {
this.delegate = delegate;
}
@Override
public void onAttach(@NonNull Context context) {
super.onAttach(context);
delegate = new FlutterActivityAndFragmentDelegate(this);
delegate.onAttach(context);
}
@Nullable
@Override
public View onCreateView(
LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) {
return delegate.onCreateView(inflater, container, savedInstanceState);
}
@Override
public void onActivityCreated(@Nullable Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
delegate.onActivityCreated(savedInstanceState);
}
@Override
public void onStart() {
super.onStart();
delegate.onStart();
}
@Override
public void onResume() {
super.onResume();
delegate.onResume();
}
// TODO(mattcarroll): determine why this can't be in onResume(). Comment reason, or move if
// possible.
@ActivityCallThrough
public void onPostResume() {
delegate.onPostResume();
}
@Override
public void onPause() {
super.onPause();
delegate.onPause();
}
@Override
public void onStop() {
super.onStop();
delegate.onStop();
}
@Override
public void onDestroyView() {
super.onDestroyView();
delegate.onDestroyView();
}
@Override
public void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
delegate.onSaveInstanceState(outState);
}
@Override
public void onDetach() {
super.onDetach();
delegate.onDetach();
delegate.release();
delegate = null;
}
/**
* The result of a permission request has been received.
*
* <p>See {@link android.app.Activity#onRequestPermissionsResult(int, String[], int[])}
*
* <p>
*
* @param requestCode identifier passed with the initial permission request
* @param permissions permissions that were requested
* @param grantResults permission grants or denials
*/
@ActivityCallThrough
public void onRequestPermissionsResult(
int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
delegate.onRequestPermissionsResult(requestCode, permissions, grantResults);
}
/**
* A new Intent was received by the {@link android.app.Activity} that currently owns this {@link
* Fragment}.
*
* <p>See {@link android.app.Activity#onNewIntent(Intent)}
*
* <p>
*
* @param intent new Intent
*/
@ActivityCallThrough
public void onNewIntent(@NonNull Intent intent) {
delegate.onNewIntent(intent);
}
/**
* The hardware back button was pressed.
*
* <p>See {@link android.app.Activity#onBackPressed()}
*/
@ActivityCallThrough
public void onBackPressed() {
delegate.onBackPressed();
}
/**
* A result has been returned after an invocation of {@link
* Fragment#startActivityForResult(Intent, int)}.
*
* <p>
*
* @param requestCode request code sent with {@link Fragment#startActivityForResult(Intent, int)}
* @param resultCode code representing the result of the {@code Activity} that was launched
* @param data any corresponding return data, held within an {@code Intent}
*/
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
delegate.onActivityResult(requestCode, resultCode, data);
}
/**
* The {@link android.app.Activity} that owns this {@link Fragment} is about to go to the
* background as the result of a user's choice/action, i.e., not as the result of an OS decision.
*
* <p>See {@link android.app.Activity#onUserLeaveHint()}
*/
@ActivityCallThrough
public void onUserLeaveHint() {
delegate.onUserLeaveHint();
}
/**
* Callback invoked when memory is low.
*
* <p>This implementation forwards a memory pressure warning to the running Flutter app.
*
* <p>
*
* @param level level
*/
@ActivityCallThrough
public void onTrimMemory(int level) {
delegate.onTrimMemory(level);
}
/**
* Callback invoked when memory is low.
*
* <p>This implementation forwards a memory pressure warning to the running Flutter app.
*/
@Override
public void onLowMemory() {
super.onLowMemory();
delegate.onLowMemory();
}
/**
* {@link FlutterActivityAndFragmentDelegate.Host} method that is used by {@link
* FlutterActivityAndFragmentDelegate} to obtain Flutter shell arguments when initializing
* Flutter.
*/
@Override
@NonNull
public FlutterShellArgs getFlutterShellArgs() {
String[] flutterShellArgsArray = getArguments().getStringArray(ARG_FLUTTER_INITIALIZATION_ARGS);
return new FlutterShellArgs(
flutterShellArgsArray != null ? flutterShellArgsArray : new String[] {});
}
/**
* Returns the ID of a statically cached {@link FlutterEngine} to use within this {@code
* FlutterFragment}, or {@code null} if this {@code FlutterFragment} does not want to use a cached
* {@link FlutterEngine}.
*/
@Nullable
@Override
public String getCachedEngineId() {
return getArguments().getString(ARG_CACHED_ENGINE_ID, null);
}
/**
* Returns false if the {@link FlutterEngine} within this {@code FlutterFragment} should outlive
* the {@code FlutterFragment}, itself.
*
* <p>Defaults to true if no custom {@link FlutterEngine is provided}, false if a custom {@link
* FlutterEngine} is provided.
*/
@Override
public boolean shouldDestroyEngineWithHost() {
boolean explicitDestructionRequested =
getArguments().getBoolean(ARG_DESTROY_ENGINE_WITH_FRAGMENT, false);
if (getCachedEngineId() != null || delegate.isFlutterEngineFromHost()) {
// Only destroy a cached engine if explicitly requested by app developer.
return explicitDestructionRequested;
} else {
// If this Fragment created the FlutterEngine, destroy it by default unless
// explicitly requested not to.
return getArguments().getBoolean(ARG_DESTROY_ENGINE_WITH_FRAGMENT, true);
}
}
/**
* Returns the name of the Dart method that this {@code FlutterFragment} should execute to start a
* Flutter app.
*
* <p>Defaults to "main".
*
* <p>Used by this {@code FlutterFragment}'s {@link FlutterActivityAndFragmentDelegate.Host}
*/
@Override
@NonNull
public String getDartEntrypointFunctionName() {
return getArguments().getString(ARG_DART_ENTRYPOINT, "main");
}
/**
* Returns the file path to the desired Flutter app's bundle of code.
*
* <p>Defaults to {@link FlutterMain#findAppBundlePath()}.
*
* <p>Used by this {@code FlutterFragment}'s {@link FlutterActivityAndFragmentDelegate.Host}
*/
@Override
@NonNull
public String getAppBundlePath() {
return getArguments().getString(ARG_APP_BUNDLE_PATH, FlutterMain.findAppBundlePath());
}
/**
* Returns the initial route that should be rendered within Flutter, once the Flutter app starts.
*
* <p>Defaults to {@code null}, which signifies a route of "/" in Flutter.
*
* <p>Used by this {@code FlutterFragment}'s {@link FlutterActivityAndFragmentDelegate.Host}
*/
@Override
@Nullable
public String getInitialRoute() {
return getArguments().getString(ARG_INITIAL_ROUTE);
}
/**
* Returns the desired {@link RenderMode} for the {@link FlutterView} displayed in this {@code
* FlutterFragment}.
*
* <p>Defaults to {@link RenderMode#surface}.
*
* <p>Used by this {@code FlutterFragment}'s {@link FlutterActivityAndFragmentDelegate.Host}
*/
@Override
@NonNull
public RenderMode getRenderMode() {
String renderModeName =
getArguments().getString(ARG_FLUTTERVIEW_RENDER_MODE, RenderMode.surface.name());
return RenderMode.valueOf(renderModeName);
}
/**
* Returns the desired {@link TransparencyMode} for the {@link FlutterView} displayed in this
* {@code FlutterFragment}.
*
* <p>Defaults to {@link TransparencyMode#transparent}.
*
* <p>Used by this {@code FlutterFragment}'s {@link FlutterActivityAndFragmentDelegate.Host}
*/
@Override
@NonNull
public TransparencyMode getTransparencyMode() {
String transparencyModeName =
getArguments()
.getString(ARG_FLUTTERVIEW_TRANSPARENCY_MODE, TransparencyMode.transparent.name());
return TransparencyMode.valueOf(transparencyModeName);
}
@Override
@Nullable
public SplashScreen provideSplashScreen() {
FragmentActivity parentActivity = getActivity();
if (parentActivity instanceof SplashScreenProvider) {
SplashScreenProvider splashScreenProvider = (SplashScreenProvider) parentActivity;
return splashScreenProvider.provideSplashScreen();
}
return null;
}
/**
* Hook for subclasses to return a {@link FlutterEngine} with whatever configuration is desired.
*
* <p>By default this method defers to this {@code FlutterFragment}'s surrounding {@code
* Activity}, if that {@code Activity} implements {@link FlutterEngineProvider}. If this method is
* overridden, the surrounding {@code Activity} will no longer be given an opportunity to provide
* a {@link FlutterEngine}, unless the subclass explicitly implements that behavior.
*
* <p>Consider returning a cached {@link FlutterEngine} instance from this method to avoid the
* typical warm-up time that a new {@link FlutterEngine} instance requires.
*
* <p>If null is returned then a new default {@link FlutterEngine} will be created to back this
* {@code FlutterFragment}.
*
* <p>Used by this {@code FlutterFragment}'s {@link FlutterActivityAndFragmentDelegate.Host}
*/
@Override
@Nullable
public FlutterEngine provideFlutterEngine(@NonNull Context context) {
// Defer to the FragmentActivity that owns us to see if it wants to provide a
// FlutterEngine.
FlutterEngine flutterEngine = null;
FragmentActivity attachedActivity = getActivity();
if (attachedActivity instanceof FlutterEngineProvider) {
// Defer to the Activity that owns us to provide a FlutterEngine.
Log.v(TAG, "Deferring to attached Activity to provide a FlutterEngine.");
FlutterEngineProvider flutterEngineProvider = (FlutterEngineProvider) attachedActivity;
flutterEngine = flutterEngineProvider.provideFlutterEngine(getContext());
}
return flutterEngine;
}
/**
* Hook for subclasses to obtain a reference to the {@link FlutterEngine} that is owned by this
* {@code FlutterActivity}.
*/
@Nullable
public FlutterEngine getFlutterEngine() {
return delegate.getFlutterEngine();
}
@Nullable
@Override
public PlatformPlugin providePlatformPlugin(
@Nullable Activity activity, @NonNull FlutterEngine flutterEngine) {
if (activity != null) {
return new PlatformPlugin(getActivity(), flutterEngine.getPlatformChannel());
} else {
return null;
}
}
/**
* Configures a {@link FlutterEngine} after its creation.
*
* <p>This method is called after {@link #provideFlutterEngine(Context)}, and after the given
* {@link FlutterEngine} has been attached to the owning {@code FragmentActivity}. See {@link
* io.flutter.embedding.engine.plugins.activity.ActivityControlSurface#attachToActivity(Activity,
* Lifecycle)}.
*
* <p>It is possible that the owning {@code FragmentActivity} opted not to connect itself as an
* {@link io.flutter.embedding.engine.plugins.activity.ActivityControlSurface}. In that case, any
* configuration, e.g., plugins, must not expect or depend upon an available {@code Activity} at
* the time that this method is invoked.
*
* <p>The default behavior of this method is to defer to the owning {@code FragmentActivity} as a
* {@link FlutterEngineConfigurator}. Subclasses can override this method if the subclass needs to
* override the {@code FragmentActivity}'s behavior, or add to it.
*
* <p>Used by this {@code FlutterFragment}'s {@link FlutterActivityAndFragmentDelegate.Host}
*/
@Override
public void configureFlutterEngine(@NonNull FlutterEngine flutterEngine) {
FragmentActivity attachedActivity = getActivity();
if (attachedActivity instanceof FlutterEngineConfigurator) {
((FlutterEngineConfigurator) attachedActivity).configureFlutterEngine(flutterEngine);
}
}
/**
* Hook for the host to cleanup references that were established in {@link
* #configureFlutterEngine(FlutterEngine)} before the host is destroyed or detached.
*
* <p>This method is called in {@link #onDetach()}.
*/
@Override
public void cleanUpFlutterEngine(@NonNull FlutterEngine flutterEngine) {
FragmentActivity attachedActivity = getActivity();
if (attachedActivity instanceof FlutterEngineConfigurator) {
((FlutterEngineConfigurator) attachedActivity).cleanUpFlutterEngine(flutterEngine);
}
}
/**
* See {@link NewEngineFragmentBuilder#shouldAttachEngineToActivity()} and {@link
* CachedEngineFragmentBuilder#shouldAttachEngineToActivity()}.
*
* <p>Used by this {@code FlutterFragment}'s {@link FlutterActivityAndFragmentDelegate}
*/
@Override
public boolean shouldAttachEngineToActivity() {
return getArguments().getBoolean(ARG_SHOULD_ATTACH_ENGINE_TO_ACTIVITY);
}
@Override
public void onFlutterSurfaceViewCreated(@NonNull FlutterSurfaceView flutterSurfaceView) {
// Hook for subclasses.
}
@Override
public void onFlutterTextureViewCreated(@NonNull FlutterTextureView flutterTextureView) {
// Hook for subclasses.
}
/**
* Invoked after the {@link FlutterView} within this {@code FlutterFragment} starts rendering
* pixels to the screen.
*
* <p>This method forwards {@code onFlutterUiDisplayed()} to its attached {@code Activity}, if the
* attached {@code Activity} implements {@link FlutterUiDisplayListener}.
*
* <p>Subclasses that override this method must call through to the {@code super} method.
*
* <p>Used by this {@code FlutterFragment}'s {@link FlutterActivityAndFragmentDelegate.Host}
*/
@Override
public void onFlutterUiDisplayed() {
FragmentActivity attachedActivity = getActivity();
if (attachedActivity instanceof FlutterUiDisplayListener) {
((FlutterUiDisplayListener) attachedActivity).onFlutterUiDisplayed();
}
}
/**
* Invoked after the {@link FlutterView} within this {@code FlutterFragment} stops rendering
* pixels to the screen.
*
* <p>This method forwards {@code onFlutterUiNoLongerDisplayed()} to its attached {@code
* Activity}, if the attached {@code Activity} implements {@link FlutterUiDisplayListener}.
*
* <p>Subclasses that override this method must call through to the {@code super} method.
*
* <p>Used by this {@code FlutterFragment}'s {@link FlutterActivityAndFragmentDelegate.Host}
*/
@Override
public void onFlutterUiNoLongerDisplayed() {
FragmentActivity attachedActivity = getActivity();
if (attachedActivity instanceof FlutterUiDisplayListener) {
((FlutterUiDisplayListener) attachedActivity).onFlutterUiNoLongerDisplayed();
}
}
/**
* Annotates methods in {@code FlutterFragment} that must be called by the containing {@code
* Activity}.
*/
@interface ActivityCallThrough {}
}
| |
package fr.beitz.buildchaindemo.service;
import fr.beitz.buildchaindemo.domain.Authority;
import fr.beitz.buildchaindemo.domain.User;
import fr.beitz.buildchaindemo.repository.AuthorityRepository;
import fr.beitz.buildchaindemo.repository.PersistentTokenRepository;
import fr.beitz.buildchaindemo.repository.UserRepository;
import fr.beitz.buildchaindemo.security.AuthoritiesConstants;
import fr.beitz.buildchaindemo.security.SecurityUtils;
import fr.beitz.buildchaindemo.service.util.RandomUtil;
import fr.beitz.buildchaindemo.web.rest.vm.ManagedUserVM;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.security.crypto.password.PasswordEncoder;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.time.LocalDate;
import java.time.ZonedDateTime;
import javax.inject.Inject;
import java.util.*;
/**
* Service class for managing users.
*/
@Service
@Transactional
public class UserService {
private final Logger log = LoggerFactory.getLogger(UserService.class);
@Inject
private PasswordEncoder passwordEncoder;
@Inject
private UserRepository userRepository;
@Inject
private PersistentTokenRepository persistentTokenRepository;
@Inject
private AuthorityRepository authorityRepository;
public Optional<User> activateRegistration(String key) {
log.debug("Activating user for activation key {}", key);
return userRepository.findOneByActivationKey(key)
.map(user -> {
// activate given user for the registration key.
user.setActivated(true);
user.setActivationKey(null);
userRepository.save(user);
log.debug("Activated user: {}", user);
return user;
});
}
public Optional<User> completePasswordReset(String newPassword, String key) {
log.debug("Reset user password for reset key {}", key);
return userRepository.findOneByResetKey(key)
.filter(user -> {
ZonedDateTime oneDayAgo = ZonedDateTime.now().minusHours(24);
return user.getResetDate().isAfter(oneDayAgo);
})
.map(user -> {
user.setPassword(passwordEncoder.encode(newPassword));
user.setResetKey(null);
user.setResetDate(null);
userRepository.save(user);
return user;
});
}
public Optional<User> requestPasswordReset(String mail) {
return userRepository.findOneByEmail(mail)
.filter(User::getActivated)
.map(user -> {
user.setResetKey(RandomUtil.generateResetKey());
user.setResetDate(ZonedDateTime.now());
userRepository.save(user);
return user;
});
}
public User createUser(String login, String password, String firstName, String lastName, String email,
String langKey) {
User newUser = new User();
Authority authority = authorityRepository.findOne(AuthoritiesConstants.USER);
Set<Authority> authorities = new HashSet<>();
String encryptedPassword = passwordEncoder.encode(password);
newUser.setLogin(login);
// new user gets initially a generated password
newUser.setPassword(encryptedPassword);
newUser.setFirstName(firstName);
newUser.setLastName(lastName);
newUser.setEmail(email);
newUser.setLangKey(langKey);
// new user is not active
newUser.setActivated(false);
// new user gets registration key
newUser.setActivationKey(RandomUtil.generateActivationKey());
authorities.add(authority);
newUser.setAuthorities(authorities);
userRepository.save(newUser);
log.debug("Created Information for User: {}", newUser);
return newUser;
}
public User createUser(ManagedUserVM managedUserVM) {
User user = new User();
user.setLogin(managedUserVM.getLogin());
user.setFirstName(managedUserVM.getFirstName());
user.setLastName(managedUserVM.getLastName());
user.setEmail(managedUserVM.getEmail());
if (managedUserVM.getLangKey() == null) {
user.setLangKey("en"); // default language
} else {
user.setLangKey(managedUserVM.getLangKey());
}
if (managedUserVM.getAuthorities() != null) {
Set<Authority> authorities = new HashSet<>();
managedUserVM.getAuthorities().stream().forEach(
authority -> authorities.add(authorityRepository.findOne(authority))
);
user.setAuthorities(authorities);
}
String encryptedPassword = passwordEncoder.encode(RandomUtil.generatePassword());
user.setPassword(encryptedPassword);
user.setResetKey(RandomUtil.generateResetKey());
user.setResetDate(ZonedDateTime.now());
user.setActivated(true);
userRepository.save(user);
log.debug("Created Information for User: {}", user);
return user;
}
public void updateUser(String firstName, String lastName, String email, String langKey) {
userRepository.findOneByLogin(SecurityUtils.getCurrentUserLogin()).ifPresent(u -> {
u.setFirstName(firstName);
u.setLastName(lastName);
u.setEmail(email);
u.setLangKey(langKey);
userRepository.save(u);
log.debug("Changed Information for User: {}", u);
});
}
public void updateUser(Long id, String login, String firstName, String lastName, String email,
boolean activated, String langKey, Set<String> authorities) {
userRepository
.findOneById(id)
.ifPresent(u -> {
u.setLogin(login);
u.setFirstName(firstName);
u.setLastName(lastName);
u.setEmail(email);
u.setActivated(activated);
u.setLangKey(langKey);
Set<Authority> managedAuthorities = u.getAuthorities();
managedAuthorities.clear();
authorities.stream().forEach(
authority -> managedAuthorities.add(authorityRepository.findOne(authority))
);
log.debug("Changed Information for User: {}", u);
});
}
public void deleteUser(String login) {
userRepository.findOneByLogin(login).ifPresent(u -> {
userRepository.delete(u);
log.debug("Deleted User: {}", u);
});
}
public void changePassword(String password) {
userRepository.findOneByLogin(SecurityUtils.getCurrentUserLogin()).ifPresent(u -> {
String encryptedPassword = passwordEncoder.encode(password);
u.setPassword(encryptedPassword);
userRepository.save(u);
log.debug("Changed password for User: {}", u);
});
}
@Transactional(readOnly = true)
public Optional<User> getUserWithAuthoritiesByLogin(String login) {
return userRepository.findOneByLogin(login).map(u -> {
u.getAuthorities().size();
return u;
});
}
@Transactional(readOnly = true)
public User getUserWithAuthorities(Long id) {
User user = userRepository.findOne(id);
user.getAuthorities().size(); // eagerly load the association
return user;
}
@Transactional(readOnly = true)
public User getUserWithAuthorities() {
Optional<User> optionalUser = userRepository.findOneByLogin(SecurityUtils.getCurrentUserLogin());
User user = null;
if (optionalUser.isPresent()) {
user = optionalUser.get();
user.getAuthorities().size(); // eagerly load the association
}
return user;
}
/**
* Persistent Token are used for providing automatic authentication, they should be automatically deleted after
* 30 days.
* <p>
* This is scheduled to get fired everyday, at midnight.
* </p>
*/
@Scheduled(cron = "0 0 0 * * ?")
public void removeOldPersistentTokens() {
LocalDate now = LocalDate.now();
persistentTokenRepository.findByTokenDateBefore(now.minusMonths(1)).stream().forEach(token -> {
log.debug("Deleting token {}", token.getSeries());
User user = token.getUser();
user.getPersistentTokens().remove(token);
persistentTokenRepository.delete(token);
});
}
/**
* Not activated users should be automatically deleted after 3 days.
* <p>
* This is scheduled to get fired everyday, at 01:00 (am).
* </p>
*/
@Scheduled(cron = "0 0 1 * * ?")
public void removeNotActivatedUsers() {
ZonedDateTime now = ZonedDateTime.now();
List<User> users = userRepository.findAllByActivatedIsFalseAndCreatedDateBefore(now.minusDays(3));
for (User user : users) {
log.debug("Deleting not activated user {}", user.getLogin());
userRepository.delete(user);
}
}
}
| |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.testFramework;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.WriteAction;
import com.intellij.openapi.command.WriteCommandAction;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.module.ModifiableModuleModel;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleManager;
import com.intellij.openapi.module.ModuleType;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.projectRoots.Sdk;
import com.intellij.openapi.projectRoots.SdkModificator;
import com.intellij.openapi.roots.*;
import com.intellij.openapi.roots.impl.ContentEntryImpl;
import com.intellij.openapi.roots.impl.libraries.ProjectLibraryTable;
import com.intellij.openapi.roots.libraries.Library;
import com.intellij.openapi.roots.libraries.LibraryTable;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.*;
import com.intellij.psi.PsiDocumentManager;
import com.intellij.psi.PsiFile;
import com.intellij.psi.impl.DebugUtil;
import com.intellij.psi.impl.source.PsiFileImpl;
import com.intellij.psi.stubs.StubTextInconsistencyException;
import com.intellij.util.Function;
import com.intellij.util.containers.ContainerUtil;
import org.jetbrains.annotations.Contract;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.jps.model.JpsElement;
import org.jetbrains.jps.model.java.JavaSourceRootType;
import org.jetbrains.jps.model.module.JpsModuleSourceRootType;
import org.junit.Assert;
import java.io.File;
import java.io.IOException;
import java.util.*;
import java.util.function.Consumer;
public class PsiTestUtil {
public static VirtualFile createTestProjectStructure(Project project,
Module module,
String rootPath,
Collection<? super File> filesToDelete) throws Exception {
return createTestProjectStructure(project, module, rootPath, filesToDelete, true);
}
public static VirtualFile createTestProjectStructure(Project project, Module module, Collection<? super File> filesToDelete) throws IOException {
return createTestProjectStructure(project, module, null, filesToDelete, true);
}
public static VirtualFile createTestProjectStructure(Project project,
Module module,
String rootPath,
Collection<? super File> filesToDelete,
boolean addProjectRoots) throws IOException {
VirtualFile vDir = createTestProjectStructure(module, rootPath, filesToDelete, addProjectRoots);
PsiDocumentManager.getInstance(project).commitAllDocuments();
return vDir;
}
public static VirtualFile createTestProjectStructure(Module module,
String rootPath,
Collection<? super File> filesToDelete,
boolean addProjectRoots) throws IOException {
return createTestProjectStructure("unitTest", module, rootPath, filesToDelete, addProjectRoots);
}
public static VirtualFile createTestProjectStructure(String tempName,
Module module,
String rootPath,
Collection<? super File> filesToDelete,
boolean addProjectRoots) throws IOException {
File dir = FileUtil.createTempDirectory(tempName, null, false);
filesToDelete.add(dir);
VirtualFile vDir = LocalFileSystem.getInstance().refreshAndFindFileByPath(dir.getCanonicalPath().replace(File.separatorChar, '/'));
assert vDir != null && vDir.isDirectory() : dir;
PlatformTestCase.synchronizeTempDirVfs(vDir);
EdtTestUtil.runInEdtAndWait(() -> {
WriteAction.run(() -> {
if (rootPath != null) {
VirtualFile vDir1 = LocalFileSystem.getInstance().findFileByPath(rootPath.replace(File.separatorChar, '/'));
if (vDir1 == null) {
throw new Exception(rootPath + " not found");
}
VfsUtil.copyDirectory(null, vDir1, vDir, null);
}
if (addProjectRoots) {
addSourceContentToRoots(module, vDir);
}
});
});
return vDir;
}
public static void removeAllRoots(@NotNull Module module, Sdk jdk) {
ModuleRootModificationUtil.updateModel(module, model -> {
model.clear();
model.setSdk(jdk);
});
}
public static SourceFolder addSourceContentToRoots(Module module, @NotNull VirtualFile vDir) {
return addSourceContentToRoots(module, vDir, false);
}
public static SourceFolder addSourceContentToRoots(Module module, @NotNull VirtualFile vDir, boolean testSource) {
Ref<SourceFolder> result = Ref.create();
ModuleRootModificationUtil.updateModel(module, model -> result.set(model.addContentEntry(vDir).addSourceFolder(vDir, testSource)));
return result.get();
}
public static SourceFolder addSourceRoot(Module module, VirtualFile vDir) {
return addSourceRoot(module, vDir, false);
}
public static SourceFolder addSourceRoot(Module module, VirtualFile vDir, boolean isTestSource) {
return addSourceRoot(module, vDir, isTestSource ? JavaSourceRootType.TEST_SOURCE : JavaSourceRootType.SOURCE);
}
public static <P extends JpsElement> SourceFolder addSourceRoot(Module module, VirtualFile vDir, @NotNull JpsModuleSourceRootType<P> rootType) {
return addSourceRoot(module, vDir, rootType, rootType.createDefaultProperties());
}
public static <P extends JpsElement> SourceFolder addSourceRoot(Module module,
VirtualFile vDir,
@NotNull JpsModuleSourceRootType<P> rootType,
P properties) {
Ref<SourceFolder> result = Ref.create();
ModuleRootModificationUtil.updateModel(module, model -> {
ContentEntry entry = findContentEntry(model, vDir);
if (entry == null) entry = model.addContentEntry(vDir);
result.set(entry.addSourceFolder(vDir, rootType, properties));
});
return result.get();
}
@Nullable
private static ContentEntry findContentEntry(ModuleRootModel rootModel, VirtualFile file) {
return ContainerUtil.find(rootModel.getContentEntries(), object -> {
VirtualFile entryRoot = object.getFile();
return entryRoot != null && VfsUtilCore.isAncestor(entryRoot, file, false);
});
}
public static ContentEntry addContentRoot(Module module, VirtualFile vDir) {
ModuleRootModificationUtil.updateModel(module, model -> model.addContentEntry(vDir));
for (ContentEntry entry : ModuleRootManager.getInstance(module).getContentEntries()) {
if (Comparing.equal(entry.getFile(), vDir)) {
Assert.assertFalse(((ContentEntryImpl)entry).isDisposed());
return entry;
}
}
return null;
}
public static void addExcludedRoot(Module module, VirtualFile dir) {
ModuleRootModificationUtil.updateModel(module, model -> ApplicationManager.getApplication().runReadAction(() -> {
findContentEntryWithAssertion(model, dir).addExcludeFolder(dir);
}));
}
@NotNull
private static ContentEntry findContentEntryWithAssertion(ModifiableRootModel model, VirtualFile dir) {
ContentEntry entry = findContentEntry(model, dir);
if (entry == null) {
throw new RuntimeException(dir + " is not under content roots: " + Arrays.toString(model.getContentRoots()));
}
return entry;
}
public static void removeContentEntry(Module module, VirtualFile contentRoot) {
ModuleRootModificationUtil.updateModel(module, model -> model.removeContentEntry(findContentEntryWithAssertion(model, contentRoot)));
}
public static void removeSourceRoot(@NotNull Module module, @NotNull VirtualFile root) {
ModuleRootModificationUtil.updateModel(module, model -> {
ContentEntry entry = findContentEntryWithAssertion(model, root);
for (SourceFolder sourceFolder : entry.getSourceFolders()) {
if (root.equals(sourceFolder.getFile())) {
entry.removeSourceFolder(sourceFolder);
break;
}
}
});
}
public static void removeExcludedRoot(Module module, VirtualFile root) {
ModuleRootModificationUtil.updateModel(module, model -> {
ContentEntry entry = findContentEntryWithAssertion(model, root);
entry.removeExcludeFolder(root.getUrl());
});
}
public static void checkFileStructure(PsiFile file) {
compareFromAllRoots(file, f -> DebugUtil.psiTreeToString(f, false));
}
private static void compareFromAllRoots(PsiFile file, Function<? super PsiFile, String> fun) {
PsiFile dummyFile = createDummyCopy(file);
String psiTree = StringUtil.join(file.getViewProvider().getAllFiles(), fun, "\n");
String reparsedTree = StringUtil.join(dummyFile.getViewProvider().getAllFiles(), fun, "\n");
assertPsiTextTreeConsistency(psiTree, reparsedTree);
}
private static void assertPsiTextTreeConsistency(String psiTree, String reparsedTree) {
if (!psiTree.equals(reparsedTree)) {
String[] psiLines = StringUtil.splitByLinesDontTrim(psiTree);
String[] reparsedLines = StringUtil.splitByLinesDontTrim(reparsedTree);
for (int i = 0; ; i++) {
if (i >= psiLines.length || i >= reparsedLines.length || !psiLines[i].equals(reparsedLines[i])) {
psiLines[Math.min(i, psiLines.length - 1)] += " // in PSI structure";
reparsedLines[Math.min(i, reparsedLines.length - 1)] += " // re-created from text";
break;
}
}
psiTree = StringUtil.join(psiLines, "\n");
reparsedTree = StringUtil.join(reparsedLines, "\n");
Assert.assertEquals(reparsedTree, psiTree);
}
}
@NotNull
private static PsiFile createDummyCopy(PsiFile file) {
LightVirtualFile copy = new LightVirtualFile(file.getName(), file.getText());
copy.setOriginalFile(file.getViewProvider().getVirtualFile());
PsiFile dummyCopy = Objects.requireNonNull(file.getManager().findFile(copy));
if (dummyCopy instanceof PsiFileImpl) {
((PsiFileImpl)dummyCopy).setOriginalFile(file);
}
return dummyCopy;
}
public static void checkPsiMatchesTextIgnoringNonCode(PsiFile file) {
compareFromAllRoots(file, f -> DebugUtil.psiToStringIgnoringNonCode(f));
}
/**
* @deprecated to attract attention and motivate to fix tests which fail these checks
*/
@Deprecated
public static void disablePsiTextConsistencyChecks(@NotNull Disposable parentDisposable) {
Registry.get("ide.check.structural.psi.text.consistency.in.tests").setValue(false, parentDisposable);
}
public static void addLibrary(Module module, String libPath) {
File file = new File(libPath);
String libName = file.getName();
addLibrary(module, libName, file.getParent(), libName);
}
public static void addLibrary(Module module, String libName, String libPath, String... jarArr) {
ModuleRootModificationUtil.updateModel(module, model -> addLibrary(module, model, libName, libPath, jarArr));
}
public static void addLibrary(@NotNull Disposable parent, Module module, String libName, String libPath, String... jarArr) {
Ref<Library> ref = new Ref<>();
ModuleRootModificationUtil.updateModel(module, model -> ref.set(addLibrary(module, model, libName, libPath, jarArr)));
Disposer.register(parent, () -> {
Library library = ref.get();
ModuleRootModificationUtil.updateModel(module, model -> model.removeOrderEntry(model.findLibraryOrderEntry(library)));
WriteCommandAction.runWriteCommandAction(null, ()-> {
LibraryTable table = ProjectLibraryTable.getInstance(module.getProject());
LibraryTable.ModifiableModel model = table.getModifiableModel();
model.removeLibrary(library);
model.commit();
});
});
}
public static void addProjectLibrary(Module module, String libName, List<String> classesRootPaths) {
List<VirtualFile> roots = getLibraryRoots(classesRootPaths);
addProjectLibrary(module, libName, roots, Collections.emptyList());
}
@NotNull
private static List<VirtualFile> getLibraryRoots(List<String> classesRootPaths) {
return ContainerUtil.map(classesRootPaths, path -> VirtualFileManager.getInstance().refreshAndFindFileByUrl(VfsUtil.getUrlForLibraryRoot(new File(path))));
}
public static void addProjectLibrary(ModifiableRootModel model, String libName, List<String> classesRootPaths) {
List<VirtualFile> roots = getLibraryRoots(classesRootPaths);
addProjectLibrary(model, libName, roots, Collections.emptyList());
}
public static void addProjectLibrary(Module module, String libName, VirtualFile... classesRoots) {
addProjectLibrary(module, libName, Arrays.asList(classesRoots), Collections.emptyList());
}
public static Library addProjectLibrary(Module module, String libName, List<? extends VirtualFile> classesRoots, List<? extends VirtualFile> sourceRoots) {
Ref<Library> result = Ref.create();
ModuleRootModificationUtil.updateModel(module, model -> result.set(addProjectLibrary(model, libName, classesRoots, sourceRoots)));
return result.get();
}
@NotNull
private static Library addProjectLibrary(ModifiableRootModel model,
String libName,
List<? extends VirtualFile> classesRoots,
List<? extends VirtualFile> sourceRoots) {
LibraryTable libraryTable = ProjectLibraryTable.getInstance(model.getProject());
return WriteAction.computeAndWait(() -> {
Library library = libraryTable.createLibrary(libName);
Library.ModifiableModel libraryModel = library.getModifiableModel();
try {
for (VirtualFile root : classesRoots) {
libraryModel.addRoot(root, OrderRootType.CLASSES);
}
for (VirtualFile root : sourceRoots) {
libraryModel.addRoot(root, OrderRootType.SOURCES);
}
libraryModel.commit();
}
catch (Throwable t) {
//noinspection SSBasedInspection
libraryModel.dispose();
throw t;
}
model.addLibraryEntry(library);
OrderEntry[] orderEntries = model.getOrderEntries();
OrderEntry last = orderEntries[orderEntries.length - 1];
System.arraycopy(orderEntries, 0, orderEntries, 1, orderEntries.length - 1);
orderEntries[0] = last;
model.rearrangeOrderEntries(orderEntries);
return library;
});
}
@NotNull
public static Library addLibrary(Module module,
ModifiableRootModel model,
String libName,
String libPath,
String... jarArr) {
List<VirtualFile> classesRoots = new ArrayList<>();
for (String jar : jarArr) {
if (!libPath.endsWith("/") && !jar.startsWith("/")) {
jar = "/" + jar;
}
String path = libPath + jar;
VirtualFile root;
if (path.endsWith(".jar")) {
root = JarFileSystem.getInstance().refreshAndFindFileByPath(path + "!/");
}
else {
root = LocalFileSystem.getInstance().refreshAndFindFileByPath(path);
}
assert root != null : "Library root folder not found: " + path + "!/";
classesRoots.add(root);
}
return addProjectLibrary(model, libName, classesRoots, Collections.emptyList());
}
public static void addLibrary(Module module,
String libName, String libDir,
String[] classRoots,
String[] sourceRoots) {
String proto = (classRoots.length > 0 ? classRoots[0] : sourceRoots[0]).endsWith(".jar!/") ? JarFileSystem.PROTOCOL : LocalFileSystem.PROTOCOL;
String parentUrl = VirtualFileManager.constructUrl(proto, libDir);
List<String> classesUrls = new ArrayList<>();
List<String> sourceUrls = new ArrayList<>();
for (String classRoot : classRoots) {
classesUrls.add(parentUrl + classRoot);
}
for (String sourceRoot : sourceRoots) {
sourceUrls.add(parentUrl + sourceRoot);
}
ModuleRootModificationUtil.addModuleLibrary(module, libName, classesUrls, sourceUrls);
}
public static Module addModule(Project project, ModuleType type, String name, VirtualFile root) {
return WriteCommandAction.writeCommandAction(project).compute(() -> {
String moduleName;
ModifiableModuleModel moduleModel = ModuleManager.getInstance(project).getModifiableModel();
try {
moduleName = moduleModel.newModule(root.getPath() + "/" + name + ".iml", type.getId()).getName();
moduleModel.commit();
}
catch (Throwable t) {
moduleModel.dispose();
throw t;
}
Module dep = ModuleManager.getInstance(project).findModuleByName(moduleName);
assert dep != null : moduleName;
ModifiableRootModel model = ModuleRootManager.getInstance(dep).getModifiableModel();
try {
model.addContentEntry(root).addSourceFolder(root, false);
model.commit();
}
catch (Throwable t) {
model.dispose();
throw t;
}
return dep;
});
}
public static void setCompilerOutputPath(Module module, String url, boolean forTests) {
ModuleRootModificationUtil.updateModel(module, model -> {
CompilerModuleExtension extension = model.getModuleExtension(CompilerModuleExtension.class);
extension.inheritCompilerOutputPath(false);
if (forTests) {
extension.setCompilerOutputPathForTests(url);
}
else {
extension.setCompilerOutputPath(url);
}
});
}
public static void setExcludeCompileOutput(Module module, boolean exclude) {
ModuleRootModificationUtil.updateModel(module, model -> model.getModuleExtension(CompilerModuleExtension.class).setExcludeOutput(exclude));
}
public static void setJavadocUrls(Module module, String... urls) {
ModuleRootModificationUtil.updateModel(module, model -> model.getModuleExtension(JavaModuleExternalPaths.class).setJavadocUrls(urls));
}
@NotNull
@Contract(pure=true)
public static Sdk addJdkAnnotations(@NotNull Sdk sdk) {
String path = FileUtil.toSystemIndependentName(PlatformTestUtil.getCommunityPath()) + "/java/jdkAnnotations";
VirtualFile root = LocalFileSystem.getInstance().findFileByPath(path);
return addRootsToJdk(sdk, AnnotationOrderRootType.getInstance(), root);
}
@NotNull
@Contract(pure=true)
public static Sdk addRootsToJdk(@NotNull Sdk sdk,
@NotNull OrderRootType rootType,
@NotNull VirtualFile... roots) {
Sdk clone;
try {
clone = (Sdk)sdk.clone();
}
catch (CloneNotSupportedException e) {
throw new RuntimeException(e);
}
SdkModificator sdkModificator = clone.getSdkModificator();
for (VirtualFile root : roots) {
sdkModificator.addRoot(root, rootType);
}
sdkModificator.commitChanges();
return clone;
}
public static void checkStubsMatchText(@NotNull PsiFile file) {
try {
StubTextInconsistencyException.checkStubTextConsistency(file);
}
catch (StubTextInconsistencyException e) {
compareStubTexts(e);
}
}
public static void compareStubTexts(@NotNull StubTextInconsistencyException e) {
assertPsiTextTreeConsistency(e.getStubsFromPsi(), e.getStubsFromText());
throw e;
}
public static void checkPsiStructureWithCommit(@NotNull PsiFile psiFile, Consumer<? super PsiFile> checker) {
checker.accept(psiFile);
Document document = psiFile.getViewProvider().getDocument();
PsiDocumentManager manager = PsiDocumentManager.getInstance(psiFile.getProject());
if (document != null && manager.isUncommited(document)) {
manager.commitDocument(document);
checker.accept(manager.getPsiFile(document));
}
}
}
| |
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.workflow.model.wf;
import java.awt.image.BufferedImage;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.imageio.ImageIO;
import javax.xml.namespace.QName;
import com.google.gson.JsonObject;
import com.google.gson.JsonPrimitive;
import org.apache.airavata.common.exception.AiravataException;
import org.apache.airavata.common.utils.JSONUtil;
import org.apache.airavata.common.utils.XMLUtil;
import org.apache.airavata.workflow.model.component.Component;
import org.apache.airavata.workflow.model.component.ComponentException;
import org.apache.airavata.workflow.model.component.ws.WSComponent;
import org.apache.airavata.workflow.model.component.ws.WSComponentKey;
import org.apache.airavata.workflow.model.component.ws.WSComponentPort;
import org.apache.airavata.workflow.model.exceptions.WorkflowException;
import org.apache.airavata.workflow.model.exceptions.WorkflowRuntimeException;
import org.apache.airavata.workflow.model.gpel.script.BPELScript;
import org.apache.airavata.workflow.model.graph.GraphException;
import org.apache.airavata.workflow.model.graph.GraphSchema;
import org.apache.airavata.workflow.model.graph.Node;
import org.apache.airavata.workflow.model.graph.impl.NodeImpl;
import org.apache.airavata.workflow.model.graph.system.InputNode;
import org.apache.airavata.workflow.model.graph.system.OutputNode;
import org.apache.airavata.workflow.model.graph.util.GraphUtil;
import org.apache.airavata.workflow.model.graph.ws.WSGraph;
import org.apache.airavata.workflow.model.graph.ws.WSGraphFactory;
import org.apache.airavata.workflow.model.graph.ws.WSNode;
//import org.apache.airavata.workflow.model.ode.ODEBPELTransformer;
//import org.apache.airavata.workflow.model.ode.ODEDeploymentDescriptor;
//import org.apache.airavata.workflow.model.ode.ODEWSDLTransformer;
//import org.apache.airavata.workflow.model.ode.WSDLCleaner;
import org.apache.airavata.workflow.model.utils.ApplicationVersion;
import org.apache.airavata.workflow.model.utils.WorkflowConstants;
import org.apache.commons.codec.binary.Base64;
//import org.gpel.GpelConstants;
//import org.gpel.model.GpelProcess;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.xmlpull.infoset.XmlElement;
import org.xmlpull.infoset.XmlInfosetBuilder;
import org.xmlpull.infoset.XmlNamespace;
//import xsul5.XmlConstants;
//import xsul5.wsdl.WsdlDefinitions;
public class Workflow implements Cloneable {
/**
* Namespace prefix
*/
public static final String NS_PREFIX_XWF = "xwf";
/**
* Namespace URI
*/
public static final String NS_URI_XWF = WorkflowConstants.NS_URI_XBAYA + "xwf";
/**
* Namespace
*/
public static final XmlNamespace NS_XWF = XMLUtil.BUILDER.newNamespace(NS_PREFIX_XWF, NS_URI_XWF);
/**
* WORKFLOW_TAG
*/
public static final String WORKFLOW_TAG = "workflow";
private static final String VERSION_ATTRIBUTE = "version";
private static final String WSDLS_TAG = "wsdls";
private static final String WSDL_TAG = "wsdl";
private static final String BPEL_TAG = "bpel";
private static final String WORKFLOW_WSDL_TAG = "workflowWSDL";
private static final String IMAGE_TAG = "image";
private static final String ID_ATTRIBUTE = "id";
private static final Logger logger = LoggerFactory.getLogger(Workflow.class);
private WSGraph graph;
private BufferedImage image;
private URI gpelTemplateID;
private URI gpelInstanceID;
// private GpelProcess gpelProcess;
//
// private WsdlDefinitions workflowWSDL;
//
// private WsdlDefinitions odeInvokableWSDL;
//
// private WsdlDefinitions odeWorkflowWSDL;
//
// private GpelProcess odeProcess;
//
// private Map<String, WsdlDefinitions> odeWsdlMap;
private boolean editable=true;
/**
* used only during the parsing xwf or loading from GPEL.
*/
// private Map<String, WsdlDefinitions> wsdlMap;
private XmlElement odeDeploymentDiscriptor;
private volatile WorkflowExecutionState executionState = WorkflowExecutionState.NONE;
// private WsdlDefinitions tridentWSDL;
/**
* Constructs a Workflow.
*/
public Workflow() {
// this.wsdlMap = new HashMap<String, WsdlDefinitions>();
// Create a empty graph here to avoid null checks.
this.graph = WSGraphFactory.createGraph();
}
/**
* Constructs a Workflow.
*
* @param workflowString
* Workflow XML in String.
* @throws GraphException
* @throws ComponentException
*/
public Workflow(String workflowString) throws GraphException, ComponentException {
this();
try {
// XmlElement workflowElement = XMLUtil.stringToXmlElement(workflowString);
JsonObject workflowElement = JSONUtil.stringToJSONObject(workflowString);
parse(workflowElement);
} catch (RuntimeException e) {
throw new GraphException(e);
}
}
/**
* Constructs a workflow from a given URI.
* @param workflowFilePath The workflow URI path.
* @throws GraphException If an error occurred while creating workflow.
* @throws ComponentException If an error occurred while parsing the workflow content.
*/
public Workflow(URI workflowFilePath) throws GraphException, ComponentException {
this();
try {
XmlElement workflowElement = XMLUtil.loadXML(workflowFilePath.toURL().openStream());
parse(workflowElement);
} catch (RuntimeException e) {
throw new GraphException(e);
} catch (IOException e) {
throw new GraphException(e);
}
}
/**
* Constructs a Workflow.
*
* @param workflowElement
* @throws GraphException
* @throws ComponentException
*/
public Workflow(XmlElement workflowElement) throws GraphException, ComponentException {
this();
parse(workflowElement);
}
public Workflow(JsonObject workflowObject) throws GraphException, ComponentException {
this();
parse(workflowObject);
}
/**
* This is used for ODE
*
* @return The Template ID like id
*/
public URI getUniqueWorkflowName() {
try {
return new URI(WorkflowConstants.LEAD_NS + "/" + this.getName());
} catch (URISyntaxException e) {
throw new WorkflowRuntimeException(e);
}
}
public URI getNameSpace() {
try {
return new URI(WorkflowConstants.LEAD_NS);
} catch (URISyntaxException e) {
throw new WorkflowRuntimeException(e);
}
}
/**
* Returns the gpelInstanceID.
*
* @return The gpelInstanceID
*/
public URI getGPELInstanceID() {
return this.gpelInstanceID;
}
/**
* Sets gpelInstanceID.
*
* @param gpelInstanceID
* The gpelInstanceID to set.
*/
public void setGPELInstanceID(URI gpelInstanceID) {
this.gpelInstanceID = gpelInstanceID;
}
/**
* Returns the name.
*
* @return The name
*/
public String getName() {
return this.graph.getName();
}
/**
* Sets name.
*
* @param name
* The name to set.
*/
public void setName(String name) {
this.graph.setName(name);
}
/**
* Returns the description.
*
* @return The description
*/
public String getDescription() {
return this.graph.getDescription();
}
/**
* Sets description.
*
* @param description
* The description to set.
*/
public void setDescription(String description) {
this.graph.setDescription(description);
}
/**
* Returns the graph.
*
* @return The graph
*/
public WSGraph getGraph() {
return this.graph;
}
/**
* Sets graph.
*
* @param graph
* The graph to set.
*/
public void setGraph(WSGraph graph) {
this.graph = graph;
}
/**
* Returns the image.
*
* @return The image
*/
public BufferedImage getImage() {
return this.image;
}
/**
* Sets image.
*
* @param image
* The image to set.
*/
public void setImage(BufferedImage image) {
this.image = image;
}
// /**
// * Returns the gpelProcess.
// *
// * @return The gpelProcess
// */
// public GpelProcess getGpelProcess() {
// return this.gpelProcess;
// }
//
// /**
// * Sets gpelProcess.
// *
// * @param gpelProcess
// * The gpelProcess to set.
// */
// public void setGpelProcess(GpelProcess gpelProcess) {
// this.gpelProcess = gpelProcess;
// }
//
// /**
// * Returns the workflowWSDL.
// *
// * @return The workflowWSDL
// */
// public WsdlDefinitions getWorkflowWSDL() {
// return this.workflowWSDL;
// }
//
// /**
// * Sets workflowWSDL.
// *
// * @param workflowWSDL
// * The workflowWSDL to set.
// */
// public void setWorkflowWSDL(WsdlDefinitions workflowWSDL) {
// this.workflowWSDL = workflowWSDL;
// }
//
// /**
// * @return The set of WSDLs
// */
// public Map<String, WsdlDefinitions> getWSDLs() {
//
// Map<String, WsdlDefinitions> wsdls = new LinkedHashMap<String, WsdlDefinitions>();
//// Map<WsdlDefinitions, String> ids = new HashMap<WsdlDefinitions, String>();
//// // Use LinkedHashMap to preserve the order of WSDLs, which is useful for
//// // some unit tests.
////
//// for (WSNode node : GraphUtil.getNodes(this.graph, WSNode.class)) {
//// WsdlDefinitions wsdl = node.getComponent().getWSDL();
//// if (wsdls.containsValue(wsdl)) {
//// String id = ids.get(wsdl);
//// node.setWSDLID(id);
//// } else {
//// // Assign unique key
//// String name = WSDLUtil.getWSDLName(wsdl);
//// String id = StringUtil.convertToJavaIdentifier(name);
//// while (wsdls.containsKey(id)) {
//// id = StringUtil.incrementName(id);
//// }
//// wsdls.put(id, wsdl);
//// ids.put(wsdl, id);
//// node.setWSDLID(id);
//// }
//// }
// return wsdls;
// }
//
// /**
// * This method is called by GPELClient during loading a workflow.
// *
// * @param id
// * @param wsdl
// */
// public void addWSDL(String id, WsdlDefinitions wsdl) {
// logger.debug("id: " + id);
// this.wsdlMap.put(id, wsdl);
// }
/**
* Creates a node from a specified component and adds it to the graph.
*
* @param component
* The specified component
* @return The node added
*/
public Node addNode(Component component) {
Node node = component.createNode(this.graph);
return node;
}
/**
* Removes a specified node from the graph.
*
* @param node
* The specified node
* @throws GraphException
*/
public void removeNode(Node node) throws GraphException {
this.graph.removeNode(node);
}
/**
* Imports a specified workflow to the current workflow.
*
* @param workflow
* The specified workflow to import
* @throws GraphException
*/
public void importWorkflow(Workflow workflow) throws GraphException {
this.graph.importGraph(workflow.getGraph());
}
/**
* Returns the inputs of the workflow.
*
* @return The inputs of the workflow.
* @throws ComponentException
*/
public List<WSComponentPort> getInputs() throws ComponentException {
List<InputNode> nodes = GraphUtil.getNodes(this.graph, InputNode.class);
List<WSComponentPort> ports = new ArrayList<WSComponentPort>();
for (InputNode inputNode : nodes) {
ports.add(new WSComponentPort(inputNode.getName(), inputNode.getOutputPorts().get(0).getType(), null));
}
return ports;
}
public List<WorkflowInput> getWorkflowInputs() throws Exception{
List<InputNode> inputNodes = GraphUtil.getInputNodes(getGraph());
List<WorkflowInput> results=new ArrayList<WorkflowInput>();
for (InputNode port : inputNodes) {
results.add(new WorkflowInput(port.getID(), port.getParameterType(), port.getDefaultValue(), port.getDefaultValue(), !port.isVisibility()));
}
return results;
}
/**
* Returns the outputs of the workflow.
*
* @return The outputs of the workflow.
* @throws ComponentException
*/
public List<WSComponentPort> getOutputs() throws ComponentException {
List<OutputNode> nodes = GraphUtil.getNodes(this.graph, OutputNode.class);
List<WSComponentPort> ports = new ArrayList<WSComponentPort>();
for (OutputNode outputNode : nodes) {
ports.add(new WSComponentPort(outputNode.getName(), outputNode.getOutputPorts().get(0).getType(), null));
}
return ports;
}
/**
* Returns the XML Text of the workflow.
*
* @return The XML Text of the workflow
*/
@Deprecated
public String toXMLText() {
return XMLUtil.xmlElementToString(toXML());
}
public void setGraphID(String id){
getGraph().setID(id);
}
/**
* Returns the XmlElement of the workflow.
*
* @return The XmlElement of the workflow
*/
public XmlElement toXML() {
// This must be before graph.toXML() to set WSDL ID to each node.
//FIXME
// Map<String, WsdlDefinitions> wsdls = getWSDLs();
XmlElement workflowElement = XMLUtil.BUILDER.newFragment(NS_XWF, WORKFLOW_TAG);
// Version
workflowElement.setAttributeValue(NS_XWF, VERSION_ATTRIBUTE, ApplicationVersion.VERSION.getVersion());
// Date
// TODO add modification time
// XmlElement modifiedTimeElement = graphElement.addElement(
// XgraphSchema.NS, "modifiedTime");
// modifiedTimeElement.addChild(new GregorianCalendar().toString());
// Graph
workflowElement.addElement(this.graph.toXML());
// WSDLs
XmlElement wsdlsElement = workflowElement.addElement(NS_XWF, WSDLS_TAG);
// for (String id : wsdls.keySet()) {
// WsdlDefinitions wsdl = wsdls.get(id);
// XmlElement wsdlElement = wsdlsElement.addElement(NS_XWF, WSDL_TAG);
// wsdlElement.setAttributeValue(NS_XWF, ID_ATTRIBUTE, id);
// wsdlElement.setText(XMLUtil.xmlElementToString(wsdl.xml()));
// }
// Image
if (this.image != null) {
try {
ByteArrayOutputStream outStream = new ByteArrayOutputStream();
ImageIO.write(this.image, WorkflowConstants.PNG_FORMAT_NAME, outStream);
byte[] bytes = outStream.toByteArray();
byte[] base64 = Base64.encodeBase64Chunked(bytes);
XmlElement imageElement = workflowElement.addElement(NS_XWF, IMAGE_TAG);
imageElement.setText(new String(base64));
} catch (IOException e) {
// No image
logger.error(e.getMessage(), e);
}
}
// BPEL
// if (this.gpelProcess != null) {
// XmlElement bpelElement = workflowElement.addElement(NS_XWF, BPEL_TAG);
// bpelElement.setText(this.gpelProcess.xmlStringPretty());
// }
//
// // Workflow WSDL
// if (this.workflowWSDL != null) {
// XmlElement workflowWSDLElement = workflowElement.addElement(NS_XWF, WORKFLOW_WSDL_TAG);
// workflowWSDLElement.setText(this.workflowWSDL.xmlStringPretty());
// }
return workflowElement;
}
public JsonObject toJSON() {
JsonObject workflowRoot = new JsonObject();
JsonObject workflow = new JsonObject();
workflowRoot.add(WORKFLOW_TAG, workflow);
workflow.addProperty(VERSION_ATTRIBUTE, ApplicationVersion.VERSION.getVersion());
workflow.add(GraphSchema.GRAPH_TAG, this.graph.toJSON());
if (this.image != null) {
try {
workflow.addProperty(IMAGE_TAG, getBase64String());
} catch (IOException e) {
logger.error("Failed to attached image to workflow description", e);
}
}
return workflowRoot;
}
private String getBase64String() throws IOException {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ImageIO.write(this.image, WorkflowConstants.PNG_FORMAT_NAME, outputStream);
byte[] bytes = outputStream.toByteArray();
byte[] base64 = Base64.encodeBase64Chunked(bytes);
return new String(base64);
}
/**
* Binds WSNodes to components
*
* @throws ComponentException
* @throws GraphException
*/
public void bindComponents() throws ComponentException, GraphException {
// This map is to avoid creating multiple instances for the a component
Map<WSComponentKey, WSComponent> components = new HashMap<WSComponentKey, WSComponent>();
for (WSNode node : GraphUtil.getWSNodes(this.graph)) {
String id = node.getWSDLID();
logger.debug("id: " + id);
// WsdlDefinitions wsdl = this.wsdlMap.get(id);
//
// if (wsdl == null) {
// // XXX This happens while loading a workflow that is created by
// // the version 2.2.6_2 or below from GPEL.
// // Need to look for wsdl manually.
// // id should be look like
// // {http://www.extreme.indiana.edu/math/}Adder
// for (WsdlDefinitions w : this.wsdlMap.values()) {
// QName name = WSDLUtil.getWSDLQName(w);
// if (name.toString().equals(id)) {
// wsdl = w;
// break;
// }
// }
// }
// if (wsdl == null) {
// continue;
// }
// try {
// QName portType = node.getPortTypeQName();
// if (portType == null) {
// // XXX This happens while parsing xwf created by the version
// // 2.2.6_1 or below.
// portType = WSDLUtil.getFirstPortTypeQName(wsdl);
// }
// String operation = node.getOperationName();
// if (operation == null) {
// // XXX This happens while parsing xwf created by the version
// // 2.2.6_1 or below.
// operation = WSDLUtil.getFirstOperationName(wsdl, portType);
// }
// WSComponentKey key = new WSComponentKey(id, portType, operation);
//
// WSComponent component = null;
// if (components.containsKey(key)) {
// component = components.get(key);
// } else {
// //FIXME
//// component = WSComponentFactory.createComponent(wsdl, portType, operation);
// components.put(key, component);
// }
// node.setComponent(component);
// } catch (UtilsException e) {
// logger.error(e.getMessage(), e);
// }
}
this.graph.fixParameterNodes();
GraphUtil.propagateTypes(this.graph);
}
/**
* @see java.lang.Object#clone()
*/
@Override
public Workflow clone() {
XmlElement originalXML = toXML();
try {
XmlElement newXML = XMLUtil.deepClone(originalXML);
Workflow newWorkflow = new Workflow(newXML);
return newWorkflow;
} catch (GraphException e) {
// This should not happen.
throw new WorkflowRuntimeException(e);
} catch (WorkflowException e) {
// This should not happen.
throw new WorkflowRuntimeException(e);
} catch (AiravataException e) {
// This should not happen.
throw new WorkflowRuntimeException(e);
}
}
/**
* @param graph
* @return The workflow
*/
public static Workflow graphToWorkflow(WSGraph graph) {
Workflow workflow = new Workflow();
workflow.setGraph(graph);
workflow.setName(graph.getName());
workflow.setDescription(graph.getDescription());
return workflow;
}
/**
* @param workflowElement
* @throws GraphException
* @throws ComponentException
*/
private void parse(XmlElement workflowElement) throws GraphException, ComponentException {
// Graph
XmlElement graphElement = workflowElement.element(GraphSchema.GRAPH_TAG);
this.graph = WSGraphFactory.createGraph(graphElement);
// WsdlDefinitions wsdl = null;
// XmlElement wsdlsElement = workflowElement.element(WSDLS_TAG);
// for (XmlElement wsdlElement : wsdlsElement.elements(null, WSDL_TAG)) {
// String wsdlText = wsdlElement.requiredText();
// try {
// wsdl = WSDLUtil.stringToWSDL(wsdlText);
// } catch (UtilsException e) {
// logger.error(e.getMessage(), e);
// }
// String id = wsdlElement.attributeValue(NS_XWF, ID_ATTRIBUTE);
// if (id == null || id.length() == 0) {
// // xwf up to 2.2.6_2 doesn't have ID.
// id = WSDLUtil.getWSDLQName(wsdl).toString();
// }
// addWSDL(id, wsdl);
// }
bindComponents();
// Image
XmlElement imageElement = workflowElement.element(IMAGE_TAG);
if (imageElement != null) {
String base64 = imageElement.requiredText();
byte[] bytes = Base64.decodeBase64(base64.getBytes());
try {
this.image = ImageIO.read(new ByteArrayInputStream(bytes));
} catch (IOException e) {
// This should not happen and it's OK that image is broken. We
// can reproduce it anytime.
logger.error(e.getMessage(), e);
}
}
XmlElement bpelElement = workflowElement.element(BPEL_TAG);
if (bpelElement != null) {
try {
String bpelString = bpelElement.requiredText();
XmlNamespace gpelNS = XmlInfosetBuilder.newInstance().newNamespace(BPELScript.GPEL, BPELScript.GPELNS);
// GpelConstants.GPEL_NS = gpelNS;
// this.gpelProcess = new GpelProcess(XMLUtil.stringToXmlElement(bpelString));
} catch (RuntimeException e) {
String error = "Failed to parse the BPEL document.";
throw new GraphException(error, e);
}
}
XmlElement workflowWSDLElement = workflowElement.element(WORKFLOW_WSDL_TAG);
if (workflowWSDLElement != null) {
try {
String wsdlText = workflowWSDLElement.requiredText();
// this.workflowWSDL = new WsdlDefinitions(XMLUtil.stringToXmlElement(wsdlText));
} catch (RuntimeException e) {
String error = "Failed to parse the workflow WSDL.";
throw new GraphException(error, e);
}
}
}
private void parse(JsonObject workflowObject) throws GraphException, ComponentException {
// Graph
if (workflowObject.getAsJsonObject(WORKFLOW_TAG) == null) {
throw new GraphException("Failed to parse the json object, workflow object doesn't exist");
}
JsonObject workflowObj = workflowObject.getAsJsonObject(WORKFLOW_TAG);
JsonObject graphObject = workflowObj.getAsJsonObject(GraphSchema.GRAPH_TAG);
this.graph = WSGraphFactory.createGraph(graphObject);
bindComponents();
// Image
JsonPrimitive imagePrimitive = workflowObj.getAsJsonPrimitive(IMAGE_TAG);
if (imagePrimitive != null) {
String base64 = imagePrimitive.getAsString();
byte[] bytes = Base64.decodeBase64(base64.getBytes());
try {
this.image = ImageIO.read(new ByteArrayInputStream(bytes));
} catch (IOException e) {
logger.error(e.getMessage(), e);
}
}
}
public XmlElement getODEDeploymentDescriptor(URI dscUrl, String odeEprEndingWithPort) throws GraphException,
ComponentException {
// if (this.odeDeploymentDiscriptor == null) {
// this.odeDeploymentDiscriptor = new ODEDeploymentDescriptor().generate(this.getName(),
// getOdeWorkflowWSDL(dscUrl, odeEprEndingWithPort), getOdeProcess(dscUrl, odeEprEndingWithPort),
// getOdeServiceWSDLs(dscUrl, odeEprEndingWithPort));
// }
return this.odeDeploymentDiscriptor;
}
// /**
// * Returns the odeWorkflowWSDL.
// *
// * @return The odeWorkflowWSDL
// * @throws GraphException
// * @throws ComponentException
// */
// public WsdlDefinitions getOdeInvokableWSDL(URI dscUrl, String odeEprEndingWithPort) throws GraphException,
// ComponentException {
// if (this.odeInvokableWSDL == null) {
// generateODEScripts(dscUrl, odeEprEndingWithPort);
// }
// return this.odeInvokableWSDL;
// }
//
// /**
// * Returns the odeProcess.
// *
// * @return The odeProcess
// * @throws ComponentException
// * @throws GraphException
// */
// public GpelProcess getOdeProcess(URI dscUrl, String odeEprEndingWithPort) throws GraphException, ComponentException {
// if (this.odeProcess == null) {
// generateODEScripts(dscUrl, odeEprEndingWithPort);
// }
// return this.odeProcess;
// }
//
// /**
// * Returns the odeWsdlMap.
// *
// * @return The odeWsdlMap
// * @throws ComponentException
// * @throws GraphException
// */
// public Map<String, WsdlDefinitions> getOdeServiceWSDLs(URI dscUrl, String odeEprEndingWithPort)
// throws GraphException, ComponentException {
// if (this.odeWsdlMap == null) {
// generateODEScripts(dscUrl, odeEprEndingWithPort);
// }
//
// return this.odeWsdlMap;
// }
//
// /**
// * Returns the odeWorkflowWSDL.
// *
// * @return The odeWorkflowWSDL
// * @throws ComponentException
// * @throws GraphException
// */
// public WsdlDefinitions getOdeWorkflowWSDL(URI dscUrl, String odeEprEndingWithPort) throws GraphException,
// ComponentException {
// if (this.odeWorkflowWSDL == null) {
// generateODEScripts(dscUrl, odeEprEndingWithPort);
// }
//
// return this.odeWorkflowWSDL;
// }
//
// public WsdlDefinitions getTridentWorkflowWSDL(URI dscUrl, String odeEprEndingWithPort) throws GraphException,
// ComponentException {
// if (this.tridentWSDL == null) {
// generateODEScripts(dscUrl, odeEprEndingWithPort);
// }
//
// return this.tridentWSDL;
// }
//
// private void generateODEScripts(URI dscUrl, String odeEprEndingWithPort) throws GraphException, ComponentException {
// this.getGraph().setID(this.getName());
//
// BPELScript script = null;
//
// script = new BPELScript(this);
// ODEWSDLTransformer wsdlTransformer = new ODEWSDLTransformer();
// script.create(BPELScriptType.BPEL2);
// this.odeProcess = script.getGpelProcess();
// this.odeProcess.setTargetNamespace(WorkflowConstants.LEAD_NS);
//
// WsdlDefinitions abstractWorkflowWsdl = script.getWorkflowWSDL().getWsdlDefinitions();
// this.odeWorkflowWSDL = abstractWorkflowWsdl;
// try {
// this.odeInvokableWSDL = WSDLUtil.stringToWSDL(abstractWorkflowWsdl.xmlString());
// wsdlTransformer.makeWorkflowWSDLConcrete(this.odeInvokableWSDL, this.getName(), dscUrl);
// wsdlTransformer.setOdeLocation(odeEprEndingWithPort, this.getName(), this.odeInvokableWSDL);
//
// this.odeWsdlMap = new HashMap<String, WsdlDefinitions>();
// Collection<XmlElement> itr = script.getWSDLs();
// for (XmlElement xmlElement : itr) {
// WsdlDefinitions wsdl = WSDLUtil.stringToWSDL(XmlConstants.BUILDER.serializeToString(xmlElement));
// String id = xmlElement.attributeValue(NS_XWF, ID_ATTRIBUTE);
// if (id == null || id.length() == 0) {
// // xwf up to 2.2.6_2 doesn't have ID.
// id = WSDLUtil.getWSDLQName(wsdl).toString();
// if (null == id || "".equals(id) || (id.startsWith("{") && id.endsWith("}"))) {
// QName wsdlQname = new QName(NS_XWF.getName(), WSDLUtil.getFirstOperationName(wsdl,
// WSDLUtil.getFirstPortTypeQName(wsdl)));
// id = wsdlQname.toString();
// wsdl.xml().setAttributeValue("name", wsdlQname.getLocalPart());
// }
// }
// WSDLCleaner.cleanWSDL(wsdl);
// this.odeWsdlMap.put(id, wsdl);
// }
// } catch (Exception e) {
// logger.error(e.getMessage(), e);
// }
// new ODEBPELTransformer()
// .generateODEBPEL(this.odeProcess, this.getName(), this.odeWorkflowWSDL, this.odeWsdlMap);
//
// wsdlTransformer.trasnformToODEWsdls(this.getName(), dscUrl, this.odeWorkflowWSDL, this.odeWsdlMap);
//
// String wsdlString = XMLUtil.xmlElementToString(this.odeWorkflowWSDL.xml());
// this.tridentWSDL = new WsdlDefinitions(XMLUtil.stringToXmlElement(wsdlString));
// new TridentTransformer().process(this.tridentWSDL);
//
// }
/**
* @return
*/
public QName getQname() {
return new QName(WorkflowConstants.LEAD_NS, this.getName());
}
/**
* @param templateID
*/
public void setGPELTemplateID(URI templateID) {
this.gpelTemplateID = templateID;
}
/**
* @return
*/
public URI getGPELTemplateID() {
return this.gpelTemplateID;
}
public boolean equals(Workflow workflow) {
return this.graph.equals(workflow.getGraph());
}
private Object executionStateLock=new Object();
/**
* @return
*/
public synchronized WorkflowExecutionState getExecutionState() {
WorkflowExecutionState result;
synchronized (executionStateLock) {
result=this.executionState;
}
return result;
}
/**
* @param state
*/
public synchronized void setExecutionState(WorkflowExecutionState state) {
synchronized (executionStateLock) {
this.executionState = state;
}
}
public boolean isEditable() {
return editable;
}
public void setEditable(boolean editable) {
this.editable = editable;
getGraph().setEditable(isEditable());
}
public void createScript() throws GraphException {
// Generate a BPEL process.
BPELScript script = new BPELScript(this);
// script.create(BPELScriptType.GPEL);
// this.setGpelProcess(script.getGpelProcess());
// this.setWorkflowWSDL(script.getWorkflowWSDL().getWsdlDefinitions());
}
public List<String> getWorkflowServiceNodeIDs() {
List<NodeImpl> nodes = getGraph().getNodes();
ArrayList<String> nodeIDs = new ArrayList<String>();
for (Node node : nodes) {
if (node instanceof WSNode) {
nodeIDs.add(node.getID());
}
}
return nodeIDs;
}
}
| |
package com.intellij.spring.refactoring;
import com.intellij.lang.refactoring.InlineHandler;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.project.Project;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiReference;
import com.intellij.psi.codeStyle.CodeStyleManager;
import com.intellij.psi.xml.XmlAttribute;
import com.intellij.psi.xml.XmlAttributeValue;
import com.intellij.psi.xml.XmlTag;
import com.intellij.spring.impl.model.beans.SpringBeanImpl;
import com.intellij.spring.model.SpringUtils;
import com.intellij.spring.model.xml.DomSpringBean;
import com.intellij.spring.model.xml.beans.*;
import com.intellij.usageView.UsageInfo;
import com.intellij.util.Function;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.containers.MultiMap;
import com.intellij.util.xml.DomElement;
import com.intellij.util.xml.DomManager;
import com.intellij.util.xml.GenericAttributeValue;
import com.intellij.util.xml.GenericDomValue;
import com.intellij.util.xml.reflect.DomCollectionChildDescription;
import com.intellij.util.xml.reflect.DomFixedChildDescription;
import com.intellij.util.xml.reflect.DomGenericInfo;
import org.jetbrains.annotations.NonNls;
import java.util.Collection;
import java.util.Set;
/**
* @author Dmitry Avdeev
*/
public class SpringInlineHandler implements InlineHandler {
private final static Logger LOG = Logger.getInstance("#com.intellij.spring.refactoring.SpringInlineHandler");
@NonNls private static final String PARENT_ATTR = "parent";
public Settings prepareInlineElement(final PsiElement element, final Editor editor, final boolean invokedOnReference) {
return new Settings() {
public boolean isOnlyOneReferenceToInline() {
return false;
}
};
}
public void removeDefinition(final PsiElement element, Settings settings) {
final DomElement domElement;
if (element instanceof XmlTag) {
domElement = DomManager.getDomManager(element.getProject()).getDomElement((XmlTag)element);
if (domElement != null) {
domElement.undefine();
}
}
}
public Inliner createInliner(final PsiElement element, Settings settings) {
if (!(element instanceof XmlTag)) {
return null;
}
return new Inliner() {
public MultiMap<PsiElement,String> getConflicts(final PsiReference reference, final PsiElement referenced) {
return null;
}
public void inlineUsage(final UsageInfo usage, final PsiElement referenced) {
if (!(referenced instanceof XmlTag)) {
return;
}
final Project project = referenced.getProject();
final DomManager domManager = DomManager.getDomManager(project);
final DomSpringBean bean = (DomSpringBean)domManager.getDomElement((XmlTag)referenced);
PsiElement psiElement = usage.getElement();
if (psiElement instanceof XmlAttributeValue) {
final XmlAttribute attribute = (XmlAttribute)psiElement.getParent();
final GenericAttributeValue value = domManager.getDomElement(attribute);
assert value != null;
final DomElement parent = value.getParent();
assert parent != null;
if (parent instanceof SpringBean) {
final String attrName = attribute.getName();
if (attrName.equals(PARENT_ATTR)) {
SpringBean thisBean = (SpringBean)parent;
mergeValue(thisBean, thisBean.getScope());
mergeValue(thisBean, thisBean.getAbstract());
mergeValue(thisBean, thisBean.getLazyInit());
mergeValue(thisBean, thisBean.getAutowireCandidate());
mergeValue(thisBean, thisBean.getAutowire());
mergeValue(thisBean, thisBean.getDependencyCheck());
mergeValue(thisBean, thisBean.getDependsOn());
mergeValue(thisBean, thisBean.getFactoryBean());
mergeValue(thisBean, thisBean.getFactoryMethod());
mergeValue(thisBean, thisBean.getInitMethod());
mergeValue(thisBean, thisBean.getDestroyMethod());
mergeValue(thisBean, thisBean.getDescription());
mergeList(thisBean, SpringBeanImpl.CTOR_ARGS_GETTER, new Function<SpringBean, ConstructorArg>() {
public ConstructorArg fun(final SpringBean springBean) {
return springBean.addConstructorArg();
}
});
mergeList(thisBean, SpringBeanImpl.PROPERTIES_GETTER, new Function<SpringBean, SpringPropertyDefinition>() {
public SpringPropertyDefinition fun(final SpringBean springBean) {
return springBean.addProperty();
}
});
mergeList(thisBean, new Function<SpringBean, Collection<ReplacedMethod>>() {
public Collection<ReplacedMethod> fun(final SpringBean springBean) {
return springBean.getReplacedMethods();
}
}, new Function<SpringBean, ReplacedMethod>() {
public ReplacedMethod fun(final SpringBean springBean) {
return springBean.addReplacedMethod();
}
});
value.undefine();
reformat(parent);
}
}
else if (parent instanceof SpringElementsHolder) {
copyBean(bean, parent);
value.undefine();
reformat(parent);
}
else {
final DomElement grandParent = parent.getParent();
if (grandParent instanceof SpringElementsHolder) {
copyBean(bean, grandParent);
parent.undefine();
reformat(grandParent);
} else if (grandParent instanceof CollectionElements) {
copyBean(bean, grandParent);
parent.undefine();
reformat(grandParent);
}
else {
LOG.error("Cannot inline " + attribute);
}
}
}
}
};
}
private static <T extends GenericDomValue<?>> void mergeValue(SpringBean springBean, T value) {
final T mergedValue = SpringUtils.getMergedValue(springBean, value);
if (mergedValue != value) {
value.setStringValue(mergedValue.getStringValue());
}
}
public static <T extends DomElement> void mergeList(final SpringBean springBean,
final Function<SpringBean, Collection<T>> getter,
final Function<SpringBean, T> adder) {
final Set<T> merged = SpringUtils.getMergedSet(springBean, getter);
final Collection<T> existing = getter.fun(springBean);
for (T t : existing) {
if (!merged.contains(t)) {
t.undefine();
} else {
merged.remove(t);
}
}
for (T t : merged) {
final T newElement = adder.fun(springBean);
newElement.copyFrom(t);
}
}
private static void reformat(final DomElement domElement) {
try {
CodeStyleManager.getInstance(domElement.getManager().getProject()).reformat(domElement.getXmlTag());
}
catch (IncorrectOperationException e) {
LOG.error(e);
}
}
private static void copyBean(final DomSpringBean from, final DomElement parent) {
final DomCollectionChildDescription description = (DomCollectionChildDescription)from.getChildDescription();
final DomGenericInfo info = parent.getGenericInfo();
final String name = description.getXmlElementName();
final String namespaceKey = description.getXmlName().getNamespaceKey();
final DomSpringBean to;
DomCollectionChildDescription targetDescription = info.getCollectionChildDescription(name, namespaceKey);
if (targetDescription != null) {
to = (DomSpringBean)targetDescription.addValue(parent);
} else {
final DomFixedChildDescription fixedDescr = info.getFixedChildDescription(name, namespaceKey);
assert fixedDescr != null;
to = (DomSpringBean)fixedDescr.getValues(parent).get(0);
}
to.copyFrom(from);
to.getId().undefine();
if (to instanceof SpringBean) {
((SpringBean)to).getName().undefine();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.samza.coordinator.stream;
import com.google.common.annotations.VisibleForTesting;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.samza.Partition;
import org.apache.samza.SamzaException;
import org.apache.samza.config.Config;
import org.apache.samza.config.MapConfig;
import org.apache.samza.coordinator.stream.messages.CoordinatorStreamMessage;
import org.apache.samza.coordinator.stream.messages.SetConfig;
import org.apache.samza.metrics.MetricsRegistry;
import org.apache.samza.serializers.JsonSerde;
import org.apache.samza.serializers.Serde;
import org.apache.samza.system.IncomingMessageEnvelope;
import org.apache.samza.system.SystemAdmin;
import org.apache.samza.system.SystemConsumer;
import org.apache.samza.system.SystemFactory;
import org.apache.samza.system.SystemStream;
import org.apache.samza.system.SystemStreamMetadata;
import org.apache.samza.system.SystemStreamMetadata.SystemStreamPartitionMetadata;
import org.apache.samza.system.SystemStreamPartition;
import org.apache.samza.system.SystemStreamPartitionIterator;
import org.apache.samza.util.Util;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A wrapper around a SystemConsumer that provides helpful methods for dealing
* with the coordinator stream.
*/
public class CoordinatorStreamSystemConsumer {
private static final Logger log = LoggerFactory.getLogger(CoordinatorStreamSystemConsumer.class);
private final Serde<List<?>> keySerde;
private final Serde<Map<String, Object>> messageSerde;
private final SystemStreamPartition coordinatorSystemStreamPartition;
private final SystemConsumer systemConsumer;
private final SystemAdmin systemAdmin;
private final Map<String, String> configMap;
private volatile boolean isStarted;
private volatile boolean isBootstrapped;
private final Object bootstrapLock = new Object();
private volatile Set<CoordinatorStreamMessage> bootstrappedStreamSet = Collections.emptySet();
public CoordinatorStreamSystemConsumer(Config config, MetricsRegistry registry) {
SystemStream coordinatorSystemStream = Util.getCoordinatorSystemStream(config);
SystemFactory systemFactory = Util.getCoordinatorSystemFactory(config);
SystemAdmin systemAdmin = systemFactory.getAdmin(coordinatorSystemStream.getSystem(), config);
SystemConsumer systemConsumer = systemFactory.getConsumer(coordinatorSystemStream.getSystem(), config, registry);
this.coordinatorSystemStreamPartition = new SystemStreamPartition(coordinatorSystemStream, new Partition(0));
this.systemConsumer = systemConsumer;
this.systemAdmin = systemAdmin;
this.configMap = new HashMap<>();
this.isBootstrapped = false;
this.keySerde = new JsonSerde<>();
this.messageSerde = new JsonSerde<>();
}
// Used only for test
public CoordinatorStreamSystemConsumer(SystemStream coordinatorSystemStream, SystemConsumer systemConsumer, SystemAdmin systemAdmin) {
this.coordinatorSystemStreamPartition = new SystemStreamPartition(coordinatorSystemStream, new Partition(0));
this.systemConsumer = systemConsumer;
this.systemAdmin = systemAdmin;
this.configMap = new HashMap<>();
this.isBootstrapped = false;
this.keySerde = new JsonSerde<>();
this.messageSerde = new JsonSerde<>();
}
/**
* Retrieves the oldest offset in the coordinator stream, and registers the
* coordinator stream with the SystemConsumer using the earliest offset.
*/
public void register() {
if (isStarted) {
log.info("Coordinator stream partition {} has already been registered. Skipping.", coordinatorSystemStreamPartition);
return;
}
log.debug("Attempting to register: {}", coordinatorSystemStreamPartition);
Set<String> streamNames = new HashSet<String>();
String streamName = coordinatorSystemStreamPartition.getStream();
streamNames.add(streamName);
Map<String, SystemStreamMetadata> systemStreamMetadataMap = systemAdmin.getSystemStreamMetadata(streamNames);
log.info(String.format("Got metadata %s", systemStreamMetadataMap.toString()));
if (systemStreamMetadataMap == null) {
throw new SamzaException("Received a null systemStreamMetadataMap from the systemAdmin. This is illegal.");
}
SystemStreamMetadata systemStreamMetadata = systemStreamMetadataMap.get(streamName);
if (systemStreamMetadata == null) {
throw new SamzaException("Expected " + streamName + " to be in system stream metadata.");
}
SystemStreamPartitionMetadata systemStreamPartitionMetadata = systemStreamMetadata.getSystemStreamPartitionMetadata().get(coordinatorSystemStreamPartition.getPartition());
if (systemStreamPartitionMetadata == null) {
throw new SamzaException("Expected metadata for " + coordinatorSystemStreamPartition + " to exist.");
}
String startingOffset = systemStreamPartitionMetadata.getOldestOffset();
log.debug("Registering {} with offset {}", coordinatorSystemStreamPartition, startingOffset);
systemConsumer.register(coordinatorSystemStreamPartition, startingOffset);
}
/**
* Starts the underlying SystemConsumer.
*/
public void start() {
if (isStarted) {
log.info("Coordinator stream consumer already started");
return;
}
log.info("Starting coordinator stream system consumer.");
systemConsumer.start();
systemAdmin.start();
isStarted = true;
}
/**
* Stops the underlying SystemConsumer.
*/
public void stop() {
log.info("Stopping coordinator stream system consumer.");
systemConsumer.stop();
systemAdmin.stop();
isStarted = false;
}
/**
* Read all messages from the earliest offset, all the way to the latest.
* Currently, this method only pays attention to config messages.
*/
public void bootstrap() {
synchronized (bootstrapLock) {
// Make a copy so readers aren't affected while we modify the set.
final LinkedHashSet<CoordinatorStreamMessage> bootstrappedMessages = new LinkedHashSet<>(bootstrappedStreamSet);
log.info("Bootstrapping configuration from coordinator stream.");
SystemStreamPartitionIterator iterator =
new SystemStreamPartitionIterator(systemConsumer, coordinatorSystemStreamPartition);
try {
while (iterator.hasNext()) {
IncomingMessageEnvelope envelope = iterator.next();
Object[] keyArray = keySerde.fromBytes((byte[]) envelope.getKey()).toArray();
Map<String, Object> valueMap = null;
if (envelope.getMessage() != null) {
valueMap = messageSerde.fromBytes((byte[]) envelope.getMessage());
}
CoordinatorStreamMessage coordinatorStreamMessage = new CoordinatorStreamMessage(keyArray, valueMap);
log.debug("Received coordinator stream message: {}", coordinatorStreamMessage);
// Remove any existing entry. Set.add() does not add if the element already exists.
if (bootstrappedMessages.remove(coordinatorStreamMessage)) {
log.debug("Removed duplicate message: {}", coordinatorStreamMessage);
}
bootstrappedMessages.add(coordinatorStreamMessage);
if (SetConfig.TYPE.equals(coordinatorStreamMessage.getType())) {
String configKey = coordinatorStreamMessage.getKey();
if (coordinatorStreamMessage.isDelete()) {
configMap.remove(configKey);
} else {
String configValue = new SetConfig(coordinatorStreamMessage).getConfigValue();
configMap.put(configKey, configValue);
}
}
}
bootstrappedStreamSet = Collections.unmodifiableSet(bootstrappedMessages);
log.debug("Bootstrapped configuration: {}", configMap);
isBootstrapped = true;
} catch (Exception e) {
throw new SamzaException(e);
}
}
}
public Set<CoordinatorStreamMessage> getBoostrappedStream() {
log.info("Returning the bootstrapped data from the stream");
if (!isBootstrapped)
bootstrap();
return bootstrappedStreamSet;
}
public Set<CoordinatorStreamMessage> getBootstrappedStream(String type) {
log.debug("Bootstrapping coordinator stream for messages of type {}", type);
bootstrap();
LinkedHashSet<CoordinatorStreamMessage> bootstrappedStream = new LinkedHashSet<CoordinatorStreamMessage>();
for (CoordinatorStreamMessage coordinatorStreamMessage : bootstrappedStreamSet) {
log.trace("Considering message: {}", coordinatorStreamMessage);
if (type.equalsIgnoreCase(coordinatorStreamMessage.getType())) {
log.trace("Adding message: {}", coordinatorStreamMessage);
bootstrappedStream.add(coordinatorStreamMessage);
}
}
return bootstrappedStream;
}
/**
* @return The bootstrapped configuration that's been read after bootstrap has
* been invoked.
*/
public Config getConfig() {
if (isBootstrapped) {
return new MapConfig(configMap);
} else {
throw new SamzaException("Must call bootstrap before retrieving config.");
}
}
/**
* Gets an iterator on the coordinator stream, starting from the starting offset the consumer was registered with.
*
* @return an iterator on the coordinator stream pointing to the starting offset the consumer was registered with.
*/
public SystemStreamPartitionIterator getStartIterator() {
return new SystemStreamPartitionIterator(systemConsumer, coordinatorSystemStreamPartition);
}
/**
* returns all unread messages after an iterator on the stream
*
* @param iterator the iterator pointing to an offset in the coordinator stream. All unread messages after this iterator are returned
* @return a set of unread messages after a given iterator
*/
public Set<CoordinatorStreamMessage> getUnreadMessages(SystemStreamPartitionIterator iterator) {
return getUnreadMessages(iterator, null);
}
/**
* returns all unread messages of a specific type, after an iterator on the stream
*
* @param iterator the iterator pointing to an offset in the coordinator stream. All unread messages after this iterator are returned
* @param type the type of the messages to be returned
* @return a set of unread messages of a given type, after a given iterator
*/
public Set<CoordinatorStreamMessage> getUnreadMessages(SystemStreamPartitionIterator iterator, String type) {
LinkedHashSet<CoordinatorStreamMessage> messages = new LinkedHashSet<CoordinatorStreamMessage>();
while (iterator.hasNext()) {
IncomingMessageEnvelope envelope = iterator.next();
Object[] keyArray = keySerde.fromBytes((byte[]) envelope.getKey()).toArray();
Map<String, Object> valueMap = null;
if (envelope.getMessage() != null) {
valueMap = messageSerde.fromBytes((byte[]) envelope.getMessage());
}
CoordinatorStreamMessage coordinatorStreamMessage = new CoordinatorStreamMessage(keyArray, valueMap);
if (type == null || type.equals(coordinatorStreamMessage.getType())) {
messages.add(coordinatorStreamMessage);
}
}
return messages;
}
/**
* Checks whether or not there are any messages after a given iterator on the coordinator stream
*
* @param iterator The iterator to check if there are any new messages after this point
* @return True if there are new messages after the iterator, false otherwise
*/
public boolean hasNewMessages(SystemStreamPartitionIterator iterator) {
if (iterator == null) {
return false;
}
return iterator.hasNext();
}
@VisibleForTesting
boolean isStarted() {
return isStarted;
}
}
| |
/*
* Licensed to The Apereo Foundation under one or more contributor license
* agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*
* The Apereo Foundation licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.unitime.timetable.onlinesectioning.solver;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.Hashtable;
import java.util.List;
import java.util.Set;
import org.unitime.localization.impl.Localization;
import org.unitime.timetable.gwt.resources.StudentSectioningMessages;
import org.unitime.timetable.gwt.shared.ClassAssignmentInterface;
import org.unitime.timetable.gwt.shared.SectioningException;
import org.unitime.timetable.onlinesectioning.OnlineSectioningAction;
import org.unitime.timetable.onlinesectioning.OnlineSectioningHelper;
import org.unitime.timetable.onlinesectioning.OnlineSectioningServer;
import org.unitime.timetable.onlinesectioning.OnlineSectioningServer.Lock;
import org.unitime.timetable.onlinesectioning.custom.StudentEnrollmentProvider.EnrollmentRequest;
import org.unitime.timetable.onlinesectioning.model.XConfig;
import org.unitime.timetable.onlinesectioning.model.XCourse;
import org.unitime.timetable.onlinesectioning.model.XCourseRequest;
import org.unitime.timetable.onlinesectioning.model.XEnrollment;
import org.unitime.timetable.onlinesectioning.model.XEnrollments;
import org.unitime.timetable.onlinesectioning.model.XOffering;
import org.unitime.timetable.onlinesectioning.model.XRequest;
import org.unitime.timetable.onlinesectioning.model.XReservation;
import org.unitime.timetable.onlinesectioning.model.XSection;
import org.unitime.timetable.onlinesectioning.model.XStudent;
import org.unitime.timetable.onlinesectioning.model.XSubpart;
/**
* @author Tomas Muller
*/
public class CheckAssignmentAction implements OnlineSectioningAction<List<EnrollmentRequest>>{
private static final long serialVersionUID = 1L;
private static StudentSectioningMessages MSG = Localization.create(StudentSectioningMessages.class);
private Long iStudentId;
private Collection<ClassAssignmentInterface.ClassAssignment> iAssignment;
public CheckAssignmentAction forStudent(Long studentId) {
iStudentId = studentId;
return this;
}
public CheckAssignmentAction withAssignment(Collection<ClassAssignmentInterface.ClassAssignment> assignment) {
iAssignment = assignment;
return this;
}
public Long getStudentId() { return iStudentId; }
public Collection<ClassAssignmentInterface.ClassAssignment> getAssignment() { return iAssignment; }
@Override
public List<EnrollmentRequest> execute(OnlineSectioningServer server, OnlineSectioningHelper helper) {
Lock readLock = server.readLock();
try {
Set<Long> offeringIds = new HashSet<Long>();
for (ClassAssignmentInterface.ClassAssignment ca: getAssignment())
if (ca != null && !ca.isFreeTime()) {
XCourse course = server.getCourse(ca.getCourseId());
if (course != null) offeringIds.add(course.getOfferingId());
}
Lock lock = server.lockStudent(getStudentId(), offeringIds, name());
try {
return check(server, helper);
} finally {
lock.release();
}
} finally {
readLock.release();
}
}
public List<EnrollmentRequest> check(OnlineSectioningServer server, OnlineSectioningHelper helper) {
XStudent student = server.getStudent(getStudentId());
if (student == null) throw new SectioningException(MSG.exceptionBadStudentId());
List<EnrollmentRequest> requests = new ArrayList<EnrollmentRequest>();
Hashtable<Long, EnrollmentRequest> courseId2request = new Hashtable<Long, EnrollmentRequest>();
Hashtable<Long, XOffering> courseId2offering = new Hashtable<Long, XOffering>();
for (ClassAssignmentInterface.ClassAssignment ca: getAssignment()) {
// Skip free times and dummy sections
if (ca == null || ca.isFreeTime() || ca.getClassId() == null || ca.isDummy()) continue;
XCourse course = server.getCourse(ca.getCourseId());
if (course == null)
throw new SectioningException(MSG.exceptionCourseDoesNotExist(MSG.courseName(ca.getSubject(), ca.getClassNumber())));
XOffering offering = server.getOffering(course.getOfferingId());
if (offering == null)
throw new SectioningException(MSG.exceptionCourseDoesNotExist(MSG.courseName(ca.getSubject(), ca.getClassNumber())));
// Check section limits
XSection section = offering.getSection(ca.getClassId());
if (section == null)
throw new SectioningException(MSG.exceptionEnrollNotAvailable(MSG.clazz(ca.getSubject(), ca.getCourseNbr(), ca.getSubpart(), ca.getSection())));
// Check cancelled flag
if (section.isCancelled()) {
if (server.getConfig().getPropertyBoolean("Enrollment.CanKeepCancelledClass", false)) {
boolean contains = false;
for (XRequest r: student.getRequests())
if (r instanceof XCourseRequest) {
XCourseRequest cr = (XCourseRequest)r;
if (cr.getEnrollment() != null && cr.getEnrollment().getSectionIds().contains(section.getSectionId())) { contains = true; break; }
}
if (!contains)
throw new SectioningException(MSG.exceptionEnrollCancelled(MSG.clazz(ca.getSubject(), ca.getCourseNbr(), ca.getSubpart(), ca.getSection())));
} else {
throw new SectioningException(MSG.exceptionEnrollCancelled(MSG.clazz(ca.getSubject(), ca.getCourseNbr(), ca.getSubpart(), ca.getSection())));
}
}
EnrollmentRequest request = courseId2request.get(ca.getCourseId());
if (request == null) {
request = new EnrollmentRequest(course, new ArrayList<XSection>());
courseId2request.put(ca.getCourseId(), request);
requests.add(request);
}
request.getSections().add(section);
courseId2offering.put(course.getCourseId(), offering);
}
// Check for NEW and CHANGE deadlines
check: for (EnrollmentRequest request: requests) {
XCourse course = request.getCourse();
List<XSection> sections = request.getSections();
for (XRequest r: student.getRequests()) {
if (r instanceof XCourseRequest) {
XEnrollment enrollment = ((XCourseRequest)r).getEnrollment();
if (enrollment != null && enrollment.getCourseId().equals(course.getCourseId())) { // course change
for (XSection s: sections)
if (!enrollment.getSectionIds().contains(s.getSectionId()) && !server.checkDeadline(course.getCourseId(), s.getTime(), OnlineSectioningServer.Deadline.CHANGE))
throw new SectioningException(MSG.exceptionEnrollDeadlineChange(MSG.clazz(course.getSubjectArea(), course.getCourseNumber(), s.getSubpartName(), s.getName(course.getCourseId()))));
continue check;
}
}
}
// new course
for (XSection section: sections) {
if (!server.checkDeadline(course.getOfferingId(), section.getTime(), OnlineSectioningServer.Deadline.NEW))
throw new SectioningException(MSG.exceptionEnrollDeadlineNew(MSG.clazz(course.getSubjectArea(), course.getCourseNumber(), section.getSubpartName(), section.getName(course.getCourseId()))));
}
}
// Check for DROP deadlines
for (XRequest r: student.getRequests()) {
if (r instanceof XCourseRequest) {
XEnrollment enrollment = ((XCourseRequest)r).getEnrollment();
if (enrollment != null && !courseId2offering.containsKey(enrollment.getCourseId())) {
XOffering offering = server.getOffering(enrollment.getOfferingId());
if (offering != null)
for (XSection section: offering.getSections(enrollment)) {
if (!server.checkDeadline(offering.getOfferingId(), section.getTime(), OnlineSectioningServer.Deadline.DROP))
throw new SectioningException(MSG.exceptionEnrollDeadlineDrop(enrollment.getCourseName()));
}
}
}
}
Hashtable<Long, XConfig> courseId2config = new Hashtable<Long, XConfig>();
for (EnrollmentRequest request: requests) {
XCourse course = request.getCourse();
XOffering offering = courseId2offering.get(course.getCourseId());
XEnrollments enrollments = server.getEnrollments(course.getOfferingId());
List<XSection> sections = request.getSections();
XSubpart subpart = offering.getSubpart(sections.get(0).getSubpartId());
XConfig config = offering.getConfig(subpart.getConfigId());
courseId2config.put(course.getCourseId(), config);
XReservation reservation = null;
reservations: for (XReservation r: offering.getReservations()) {
if (!r.isApplicable(student)) continue;
if (r.getLimit() >= 0 && r.getLimit() <= enrollments.countEnrollmentsForReservation(r.getReservationId())) {
boolean contain = false;
for (XEnrollment e: enrollments.getEnrollmentsForReservation(r.getReservationId()))
if (e.getStudentId().equals(student.getStudentId())) { contain = true; break; }
if (!contain) continue;
}
if (!r.getConfigsIds().isEmpty() && !r.getConfigsIds().contains(config.getConfigId())) continue;
for (XSection section: sections)
if (r.getSectionIds(section.getSubpartId()) != null && !r.getSectionIds(section.getSubpartId()).contains(section.getSectionId())) continue reservations;
if (reservation == null || r.compareTo(reservation) < 0)
reservation = r;
}
if (reservation == null || !reservation.canAssignOverLimit()) {
for (XSection section: sections) {
if (section.getLimit() >= 0 && section.getLimit() <= enrollments.countEnrollmentsForSection(section.getSectionId())) {
boolean contain = false;
for (XEnrollment e: enrollments.getEnrollmentsForSection(section.getSectionId()))
if (e.getStudentId().equals(student.getStudentId())) { contain = true; break; }
if (!contain)
throw new SectioningException(MSG.exceptionEnrollNotAvailable(MSG.clazz(course.getSubjectArea(), course.getCourseNumber(), section.getSubpartName(), section.getName())));
}
if ((reservation == null || !offering.getSectionReservations(section.getSectionId()).contains(reservation)) && offering.getUnreservedSectionSpace(section.getSectionId(), enrollments) <= 0) {
boolean contain = false;
for (XEnrollment e: enrollments.getEnrollmentsForSection(section.getSectionId()))
if (e.getStudentId().equals(student.getStudentId())) { contain = true; break; }
if (!contain)
throw new SectioningException(MSG.exceptionEnrollNotAvailable(MSG.clazz(course.getSubjectArea(), course.getCourseNumber(), section.getSubpartName(), section.getName())));
}
}
if (config.getLimit() >= 0 && config.getLimit() <= enrollments.countEnrollmentsForConfig(config.getConfigId())) {
boolean contain = false;
for (XEnrollment e: enrollments.getEnrollmentsForConfig(config.getConfigId()))
if (e.getStudentId().equals(student.getStudentId())) { contain = true; break; }
if (!contain)
throw new SectioningException(MSG.exceptionEnrollNotAvailable(MSG.courseName(course.getSubjectArea(), course.getCourseNumber())) + " " + config.getName());
}
if ((reservation == null || !offering.getConfigReservations(config.getConfigId()).contains(reservation)) && offering.getUnreservedConfigSpace(config.getConfigId(), enrollments) <= 0) {
boolean contain = false;
for (XEnrollment e: enrollments.getEnrollmentsForConfig(config.getConfigId()))
if (e.getStudentId().equals(student.getStudentId())) { contain = true; break; }
if (!contain)
throw new SectioningException(MSG.exceptionEnrollNotAvailable(MSG.courseName(course.getSubjectArea(), course.getCourseNumber())) + " " + config.getName());
}
if (course.getLimit() >= 0 && course.getLimit() <= enrollments.countEnrollmentsForCourse(course.getCourseId())) {
boolean contain = false;
for (XEnrollment e: enrollments.getEnrollmentsForCourse(course.getCourseId()))
if (e.getStudentId().equals(student.getStudentId())) { contain = true; break; }
if (!contain)
throw new SectioningException(MSG.exceptionEnrollNotAvailable(MSG.courseName(course.getSubjectArea(), course.getCourseNumber())));
}
}
}
for (EnrollmentRequest request: requests) {
XCourse course = request.getCourse();
XOffering offering = courseId2offering.get(course.getCourseId());
List<XSection> sections = request.getSections();
XSubpart subpart = offering.getSubpart(sections.get(0).getSubpartId());
XConfig config = offering.getConfig(subpart.getConfigId());
if (sections.size() < config.getSubparts().size()) {
throw new SectioningException(MSG.exceptionEnrollmentIncomplete(MSG.courseName(course.getSubjectArea(), course.getCourseNumber())));
} else if (sections.size() > config.getSubparts().size()) {
throw new SectioningException(MSG.exceptionEnrollmentInvalid(MSG.courseName(course.getSubjectArea(), course.getCourseNumber())));
}
for (XSection s1: sections) {
for (XSection s2: sections) {
if (s1.getSectionId() < s2.getSectionId() && s1.isOverlapping(offering.getDistributions(), s2)) {
throw new SectioningException(MSG.exceptionEnrollmentOverlapping(MSG.courseName(course.getSubjectArea(), course.getCourseNumber())));
}
if (!s1.getSectionId().equals(s2.getSectionId()) && s1.getSubpartId().equals(s2.getSubpartId())) {
throw new SectioningException(MSG.exceptionEnrollmentInvalid(MSG.courseName(course.getSubjectArea(), course.getCourseNumber())));
}
}
if (!offering.getSubpart(s1.getSubpartId()).getConfigId().equals(config.getConfigId())) {
throw new SectioningException(MSG.exceptionEnrollmentInvalid(MSG.courseName(course.getSubjectArea(), course.getCourseNumber())));
}
}
if (!offering.isAllowOverlap(student, config.getConfigId(), sections))
for (EnrollmentRequest otherRequest: requests) {
XOffering other = courseId2offering.get(otherRequest.getCourse().getCourseId());
XConfig otherConfig = courseId2config.get(otherRequest.getCourse().getCourseId());
if (!other.equals(offering) && !other.isAllowOverlap(student, otherConfig.getConfigId(), otherRequest.getSections())) {
List<XSection> assignment = otherRequest.getSections();
for (XSection section: sections)
if (section.isOverlapping(offering.getDistributions(), assignment))
throw new SectioningException(MSG.exceptionEnrollmentConflicting(MSG.courseName(course.getSubjectArea(), course.getCourseNumber())));
}
}
}
return requests;
}
@Override
public String name() {
return "check-assignment";
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.sql.planner.optimizations;
import com.facebook.presto.Session;
import com.facebook.presto.metadata.Metadata;
import com.facebook.presto.spi.ConstantProperty;
import com.facebook.presto.spi.LocalProperty;
import com.facebook.presto.spi.relation.ConstantExpression;
import com.facebook.presto.spi.relation.RowExpression;
import com.facebook.presto.spi.relation.VariableReferenceExpression;
import com.facebook.presto.sql.planner.Partitioning;
import com.facebook.presto.sql.planner.PartitioningHandle;
import com.facebook.presto.sql.planner.TypeProvider;
import com.facebook.presto.sql.tree.SymbolReference;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import javax.annotation.concurrent.Immutable;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.function.Function;
import static com.facebook.presto.sql.planner.PlannerUtils.toVariableReference;
import static com.facebook.presto.sql.planner.SystemPartitioningHandle.COORDINATOR_DISTRIBUTION;
import static com.facebook.presto.sql.planner.SystemPartitioningHandle.SINGLE_DISTRIBUTION;
import static com.facebook.presto.sql.planner.SystemPartitioningHandle.SOURCE_DISTRIBUTION;
import static com.facebook.presto.sql.relational.OriginalExpressionUtils.castToExpression;
import static com.facebook.presto.sql.relational.OriginalExpressionUtils.isExpression;
import static com.facebook.presto.util.MoreLists.filteredCopy;
import static com.google.common.base.MoreObjects.toStringHelper;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.collect.Iterables.transform;
import static java.util.Objects.requireNonNull;
public class ActualProperties
{
private final Global global;
private final List<LocalProperty<VariableReferenceExpression>> localProperties;
private final Map<VariableReferenceExpression, ConstantExpression> constants;
private ActualProperties(
Global global,
List<? extends LocalProperty<VariableReferenceExpression>> localProperties,
Map<VariableReferenceExpression, ConstantExpression> constants)
{
requireNonNull(global, "globalProperties is null");
requireNonNull(localProperties, "localProperties is null");
requireNonNull(constants, "constants is null");
this.global = global;
// The constants field implies a ConstantProperty in localProperties (but not vice versa).
// Let's make sure to include the constants into the local constant properties.
Set<VariableReferenceExpression> localConstants = LocalProperties.extractLeadingConstants(localProperties);
localProperties = LocalProperties.stripLeadingConstants(localProperties);
Set<VariableReferenceExpression> updatedLocalConstants = ImmutableSet.<VariableReferenceExpression>builder()
.addAll(localConstants)
.addAll(constants.keySet())
.build();
List<LocalProperty<VariableReferenceExpression>> updatedLocalProperties = LocalProperties.normalizeAndPrune(ImmutableList.<LocalProperty<VariableReferenceExpression>>builder()
.addAll(transform(updatedLocalConstants, ConstantProperty::new))
.addAll(localProperties)
.build());
this.localProperties = ImmutableList.copyOf(updatedLocalProperties);
this.constants = ImmutableMap.copyOf(constants);
}
public boolean isCoordinatorOnly()
{
return global.isCoordinatorOnly();
}
/**
* @return true if the plan will only execute on a single node
*/
public boolean isSingleNode()
{
return global.isSingleNode();
}
public boolean isNullsAndAnyReplicated()
{
return global.isNullsAndAnyReplicated();
}
public boolean isStreamPartitionedOn(Collection<VariableReferenceExpression> columns)
{
return isStreamPartitionedOn(columns, false);
}
public boolean isStreamPartitionedOn(Collection<VariableReferenceExpression> columns, boolean nullsAndAnyReplicated)
{
return global.isStreamPartitionedOn(columns, constants.keySet(), nullsAndAnyReplicated);
}
public boolean isNodePartitionedOn(Collection<VariableReferenceExpression> columns)
{
return isNodePartitionedOn(columns, false);
}
public boolean isNodePartitionedOn(Collection<VariableReferenceExpression> columns, boolean nullsAndAnyReplicated)
{
return global.isNodePartitionedOn(columns, constants.keySet(), nullsAndAnyReplicated);
}
@Deprecated
public boolean isCompatibleTablePartitioningWith(Partitioning partitioning, boolean nullsAndAnyReplicated, Metadata metadata, Session session)
{
return global.isCompatibleTablePartitioningWith(partitioning, nullsAndAnyReplicated, metadata, session);
}
@Deprecated
public boolean isCompatibleTablePartitioningWith(ActualProperties other, Function<VariableReferenceExpression, Set<VariableReferenceExpression>> symbolMappings, Metadata metadata, Session session)
{
return global.isCompatibleTablePartitioningWith(
other.global,
symbolMappings,
variable -> Optional.ofNullable(constants.get(variable)),
variable -> Optional.ofNullable(other.constants.get(variable)),
metadata,
session);
}
public boolean isRefinedPartitioningOver(Partitioning partitioning, boolean nullsAndAnyReplicated, Metadata metadata, Session session)
{
return global.isRefinedPartitioningOver(partitioning, nullsAndAnyReplicated, metadata, session);
}
public boolean isRefinedPartitioningOver(ActualProperties other, Function<VariableReferenceExpression, Set<VariableReferenceExpression>> symbolMappings, Metadata metadata, Session session)
{
return global.isRefinedPartitioningOver(
other.global,
symbolMappings,
variable -> Optional.ofNullable(constants.get(variable)),
variable -> Optional.ofNullable(other.constants.get(variable)),
metadata,
session);
}
/**
* @return true if all the data will effectively land in a single stream
*/
public boolean isEffectivelySingleStream()
{
return global.isEffectivelySingleStream(constants.keySet());
}
/**
* @return true if repartitioning on the keys will yield some difference
*/
public boolean isStreamRepartitionEffective(Collection<VariableReferenceExpression> keys)
{
return global.isStreamRepartitionEffective(keys, constants.keySet());
}
public ActualProperties translateVariable(Function<VariableReferenceExpression, Optional<VariableReferenceExpression>> translator)
{
Map<VariableReferenceExpression, ConstantExpression> translatedConstants = new HashMap<>();
for (Map.Entry<VariableReferenceExpression, ConstantExpression> entry : constants.entrySet()) {
Optional<VariableReferenceExpression> translatedKey = translator.apply(entry.getKey());
if (translatedKey.isPresent()) {
translatedConstants.put(translatedKey.get(), entry.getValue());
}
}
return builder()
.global(global.translateVariableToRowExpression(variable -> {
Optional<RowExpression> translated = translator.apply(variable).map(RowExpression.class::cast);
if (!translated.isPresent()) {
translated = Optional.ofNullable(constants.get(variable));
}
return translated;
}))
.local(LocalProperties.translate(localProperties, translator))
.constants(translatedConstants)
.build();
}
public ActualProperties translateRowExpression(Map<VariableReferenceExpression, RowExpression> assignments, TypeProvider types)
{
Map<VariableReferenceExpression, VariableReferenceExpression> inputToOutputVariables = new HashMap<>();
for (Map.Entry<VariableReferenceExpression, RowExpression> assignment : assignments.entrySet()) {
RowExpression expression = assignment.getValue();
if (isExpression(expression)) {
if (castToExpression(expression) instanceof SymbolReference) {
inputToOutputVariables.put(toVariableReference(castToExpression(expression), types), assignment.getKey());
}
}
else {
if (expression instanceof VariableReferenceExpression) {
inputToOutputVariables.put((VariableReferenceExpression) expression, assignment.getKey());
}
}
}
Map<VariableReferenceExpression, ConstantExpression> translatedConstants = new HashMap<>();
for (Map.Entry<VariableReferenceExpression, ConstantExpression> entry : constants.entrySet()) {
if (inputToOutputVariables.containsKey(entry.getKey())) {
translatedConstants.put(inputToOutputVariables.get(entry.getKey()), entry.getValue());
}
}
ImmutableMap.Builder<VariableReferenceExpression, RowExpression> inputToOutputMappings = ImmutableMap.builder();
inputToOutputMappings.putAll(inputToOutputVariables);
constants.entrySet().stream()
.filter(entry -> !inputToOutputVariables.containsKey(entry.getKey()))
.forEach(inputToOutputMappings::put);
return builder()
.global(global.translateRowExpression(inputToOutputMappings.build(), assignments, types))
.local(LocalProperties.translate(localProperties, variable -> Optional.ofNullable(inputToOutputVariables.get(variable))))
.constants(translatedConstants)
.build();
}
public Optional<Partitioning> getNodePartitioning()
{
return global.getNodePartitioning();
}
public Map<VariableReferenceExpression, ConstantExpression> getConstants()
{
return constants;
}
public List<LocalProperty<VariableReferenceExpression>> getLocalProperties()
{
return localProperties;
}
public ActualProperties withReplicatedNulls(boolean replicatedNulls)
{
return builderFrom(this)
.global(global.withReplicatedNulls(replicatedNulls))
.build();
}
public static Builder builder()
{
return new Builder();
}
public static Builder builderFrom(ActualProperties properties)
{
return new Builder(properties.global, properties.localProperties, properties.constants);
}
public static class Builder
{
private Global global;
private List<LocalProperty<VariableReferenceExpression>> localProperties;
private Map<VariableReferenceExpression, ConstantExpression> constants;
private boolean unordered;
public Builder()
{
this(Global.arbitraryPartition(), ImmutableList.of(), ImmutableMap.of());
}
public Builder(Global global, List<LocalProperty<VariableReferenceExpression>> localProperties, Map<VariableReferenceExpression, ConstantExpression> constants)
{
this.global = requireNonNull(global, "global is null");
this.localProperties = ImmutableList.copyOf(localProperties);
this.constants = ImmutableMap.copyOf(constants);
}
public Builder global(Global global)
{
this.global = global;
return this;
}
public Builder global(ActualProperties other)
{
this.global = other.global;
return this;
}
public Builder local(List<? extends LocalProperty<VariableReferenceExpression>> localProperties)
{
this.localProperties = ImmutableList.copyOf(localProperties);
return this;
}
public Builder constants(Map<VariableReferenceExpression, ConstantExpression> constants)
{
this.constants = ImmutableMap.copyOf(constants);
return this;
}
public Builder unordered(boolean unordered)
{
this.unordered = unordered;
return this;
}
public ActualProperties build()
{
List<LocalProperty<VariableReferenceExpression>> localProperties = this.localProperties;
if (unordered) {
localProperties = filteredCopy(this.localProperties, property -> !property.isOrderSensitive());
}
return new ActualProperties(global, localProperties, constants);
}
}
@Override
public int hashCode()
{
return Objects.hash(global, localProperties, constants.keySet());
}
@Override
public boolean equals(Object obj)
{
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
final ActualProperties other = (ActualProperties) obj;
return Objects.equals(this.global, other.global)
&& Objects.equals(this.localProperties, other.localProperties)
&& Objects.equals(this.constants.keySet(), other.constants.keySet());
}
@Override
public String toString()
{
return toStringHelper(this)
.add("globalProperties", global)
.add("localProperties", localProperties)
.add("constants", constants)
.toString();
}
@Immutable
public static final class Global
{
// Description of the partitioning of the data across nodes
private final Optional<Partitioning> nodePartitioning; // if missing => partitioned with some unknown scheme
// Description of the partitioning of the data across streams (splits)
private final Optional<Partitioning> streamPartitioning; // if missing => partitioned with some unknown scheme
// NOTE: Partitioning on zero columns (or effectively zero columns if the columns are constant) indicates that all
// the rows will be partitioned into a single node or stream. However, this can still be a partitioned plan in that the plan
// will be executed on multiple servers, but only one server will get all the data.
// Description of whether rows with nulls in partitioning columns or some arbitrary rows have been replicated to all *nodes*
private final boolean nullsAndAnyReplicated;
private Global(Optional<Partitioning> nodePartitioning, Optional<Partitioning> streamPartitioning, boolean nullsAndAnyReplicated)
{
checkArgument(!nodePartitioning.isPresent()
|| !streamPartitioning.isPresent()
|| nodePartitioning.get().getVariableReferences().containsAll(streamPartitioning.get().getVariableReferences())
|| streamPartitioning.get().getVariableReferences().containsAll(nodePartitioning.get().getVariableReferences()),
"Global stream partitioning columns should match node partitioning columns");
this.nodePartitioning = requireNonNull(nodePartitioning, "nodePartitioning is null");
this.streamPartitioning = requireNonNull(streamPartitioning, "streamPartitioning is null");
this.nullsAndAnyReplicated = nullsAndAnyReplicated;
}
public static Global coordinatorSingleStreamPartition()
{
return partitionedOn(
COORDINATOR_DISTRIBUTION,
ImmutableList.of(),
Optional.of(ImmutableList.of()));
}
public static Global singleStreamPartition()
{
return partitionedOn(
SINGLE_DISTRIBUTION,
ImmutableList.of(),
Optional.of(ImmutableList.of()));
}
public static Global arbitraryPartition()
{
return new Global(Optional.empty(), Optional.empty(), false);
}
public static Global partitionedOn(
PartitioningHandle nodePartitioningHandle,
List<VariableReferenceExpression> nodePartitioning,
Optional<List<VariableReferenceExpression>> streamPartitioning)
{
return new Global(
Optional.of(Partitioning.create(nodePartitioningHandle, nodePartitioning)),
streamPartitioning.map(columns -> Partitioning.create(SOURCE_DISTRIBUTION, columns)),
false);
}
public static Global partitionedOn(Partitioning nodePartitioning, Optional<Partitioning> streamPartitioning)
{
return new Global(
Optional.of(nodePartitioning),
streamPartitioning,
false);
}
public static Global streamPartitionedOn(List<VariableReferenceExpression> streamPartitioning)
{
return new Global(
Optional.empty(),
Optional.of(Partitioning.create(SOURCE_DISTRIBUTION, streamPartitioning)),
false);
}
public static Global partitionedOnCoalesce(Partitioning one, Partitioning other)
{
return new Global(one.translateToCoalesce(other), Optional.empty(), false);
}
public Global withReplicatedNulls(boolean replicatedNulls)
{
return new Global(nodePartitioning, streamPartitioning, replicatedNulls);
}
private boolean isNullsAndAnyReplicated()
{
return nullsAndAnyReplicated;
}
/**
* @return true if the plan will only execute on a single node
*/
private boolean isSingleNode()
{
if (!nodePartitioning.isPresent()) {
return false;
}
return nodePartitioning.get().getHandle().isSingleNode();
}
private boolean isCoordinatorOnly()
{
if (!nodePartitioning.isPresent()) {
return false;
}
return nodePartitioning.get().getHandle().isCoordinatorOnly();
}
private boolean isNodePartitionedOn(Collection<VariableReferenceExpression> columns, Set<VariableReferenceExpression> constants, boolean nullsAndAnyReplicated)
{
return nodePartitioning.isPresent() && nodePartitioning.get().isPartitionedOn(columns, constants) && this.nullsAndAnyReplicated == nullsAndAnyReplicated;
}
private boolean isCompatibleTablePartitioningWith(Partitioning partitioning, boolean nullsAndAnyReplicated, Metadata metadata, Session session)
{
return nodePartitioning.isPresent() && nodePartitioning.get().isCompatibleWith(partitioning, metadata, session) && this.nullsAndAnyReplicated == nullsAndAnyReplicated;
}
private boolean isCompatibleTablePartitioningWith(
Global other,
Function<VariableReferenceExpression, Set<VariableReferenceExpression>> symbolMappings,
Function<VariableReferenceExpression, Optional<ConstantExpression>> leftConstantMapping,
Function<VariableReferenceExpression, Optional<ConstantExpression>> rightConstantMapping,
Metadata metadata,
Session session)
{
return nodePartitioning.isPresent() &&
other.nodePartitioning.isPresent() &&
nodePartitioning.get().isCompatibleWith(
other.nodePartitioning.get(),
symbolMappings,
leftConstantMapping,
rightConstantMapping,
metadata,
session) &&
nullsAndAnyReplicated == other.nullsAndAnyReplicated;
}
private boolean isRefinedPartitioningOver(Partitioning partitioning, boolean nullsAndAnyReplicated, Metadata metadata, Session session)
{
return nodePartitioning.isPresent() && nodePartitioning.get().isRefinedPartitioningOver(partitioning, metadata, session) && this.nullsAndAnyReplicated == nullsAndAnyReplicated;
}
private boolean isRefinedPartitioningOver(
Global other,
Function<VariableReferenceExpression, Set<VariableReferenceExpression>> symbolMappings,
Function<VariableReferenceExpression, Optional<ConstantExpression>> leftConstantMapping,
Function<VariableReferenceExpression, Optional<ConstantExpression>> rightConstantMapping,
Metadata metadata,
Session session)
{
return nodePartitioning.isPresent() &&
other.nodePartitioning.isPresent() &&
nodePartitioning.get().isRefinedPartitioningOver(
other.nodePartitioning.get(),
symbolMappings,
leftConstantMapping,
rightConstantMapping,
metadata,
session) &&
nullsAndAnyReplicated == other.nullsAndAnyReplicated;
}
private Optional<Partitioning> getNodePartitioning()
{
return nodePartitioning;
}
private boolean isStreamPartitionedOn(Collection<VariableReferenceExpression> columns, Set<VariableReferenceExpression> constants, boolean nullsAndAnyReplicated)
{
return streamPartitioning.isPresent() && streamPartitioning.get().isPartitionedOn(columns, constants) && this.nullsAndAnyReplicated == nullsAndAnyReplicated;
}
/**
* @return true if all the data will effectively land in a single stream
*/
private boolean isEffectivelySingleStream(Set<VariableReferenceExpression> constants)
{
return streamPartitioning.isPresent() && streamPartitioning.get().isEffectivelySinglePartition(constants) && !nullsAndAnyReplicated;
}
/**
* @return true if repartitioning on the keys will yield some difference
*/
private boolean isStreamRepartitionEffective(Collection<VariableReferenceExpression> keys, Set<VariableReferenceExpression> constants)
{
return (!streamPartitioning.isPresent() || streamPartitioning.get().isRepartitionEffective(keys, constants)) && !nullsAndAnyReplicated;
}
private Global translateVariableToRowExpression(
Function<VariableReferenceExpression, Optional<RowExpression>> translator)
{
return new Global(
nodePartitioning.flatMap(partitioning -> partitioning.translateVariableToRowExpression(translator)),
streamPartitioning.flatMap(partitioning -> partitioning.translateVariableToRowExpression(translator)),
nullsAndAnyReplicated);
}
private Global translateRowExpression(Map<VariableReferenceExpression, RowExpression> inputToOutputMappings, Map<VariableReferenceExpression, RowExpression> assignments, TypeProvider types)
{
return new Global(
nodePartitioning.flatMap(partitioning -> partitioning.translateRowExpression(inputToOutputMappings, assignments, types)),
streamPartitioning.flatMap(partitioning -> partitioning.translateRowExpression(inputToOutputMappings, assignments, types)),
nullsAndAnyReplicated);
}
@Override
public int hashCode()
{
return Objects.hash(nodePartitioning, streamPartitioning, nullsAndAnyReplicated);
}
@Override
public boolean equals(Object obj)
{
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
final Global other = (Global) obj;
return Objects.equals(this.nodePartitioning, other.nodePartitioning) &&
Objects.equals(this.streamPartitioning, other.streamPartitioning) &&
this.nullsAndAnyReplicated == other.nullsAndAnyReplicated;
}
@Override
public String toString()
{
return toStringHelper(this)
.add("nodePartitioning", nodePartitioning)
.add("streamPartitioning", streamPartitioning)
.add("nullsAndAnyReplicated", nullsAndAnyReplicated)
.toString();
}
}
}
| |
package com.jeecms.core.entity.base;
import java.io.Serializable;
import com.jeecms.core.entity.CmsConfigItem;
/**
* This is an object that contains data related to the jc_config table.
* Do not modify this class because it will be overwritten if the configuration file
* related to this class is modified.
*
* @hibernate.class
* table="jc_config"
*/
public abstract class BaseCmsConfig implements Serializable {
public static String REF = "CmsConfig";
public static String PROP_LOGIN_URL = "loginUrl";
public static String PROP_COLOR = "color";
public static String PROP_PROCESS_URL = "processUrl";
public static String PROP_ALPHA = "alpha";
public static String PROP_DEF_IMG = "defImg";
public static String PROP_COUNT_CLEAR_TIME = "countClearTime";
public static String PROP_COUNT_COPY_TIME = "countCopyTime";
public static String PROP_PORT = "port";
public static String PROP_DB_FILE_URI = "dbFileUri";
public static String PROP_CONTEXT_PATH = "contextPath";
public static String PROP_PASSWORD = "password";
public static String PROP_OFFSET_X = "offsetX";
public static String PROP_SERVLET_POINT = "servletPoint";
public static String PROP_MIN_HEIGHT = "minHeight";
public static String PROP_CONTENT = "content";
public static String PROP_ON = "on";
public static String PROP_DOWNLOAD_CODE = "downloadCode";
public static String PROP_DOWNLOAD_TIME = "downloadTime";
public static String PROP_HOST = "host";
public static String PROP_POS = "pos";
public static String PROP_MIN_WIDTH = "minWidth";
public static String PROP_OFFSET_Y = "offsetY";
public static String PROP_ENCODING = "encoding";
public static String PROP_SIZE = "size";
public static String PROP_IMAGE_PATH = "imagePath";
public static String PROP_PERSONAL = "personal";
public static String PROP_UPLOAD_TO_DB = "uploadToDb";
public static String PROP_ID = "id";
public static String PROP_USERNAME = "username";
// constructors
public BaseCmsConfig () {
initialize();
}
/**
* Constructor for primary key
*/
public BaseCmsConfig (java.lang.Integer id) {
this.setId(id);
initialize();
}
/**
* Constructor for required fields
*/
public BaseCmsConfig (
java.lang.Integer id,
java.lang.String dbFileUri,
java.lang.Boolean uploadToDb,
java.lang.String defImg,
java.lang.String loginUrl,
java.util.Date countClearTime,
java.util.Date countCopyTime,
java.lang.String downloadCode,
java.lang.Integer downloadTime) {
this.setId(id);
this.setDbFileUri(dbFileUri);
this.setUploadToDb(uploadToDb);
this.setDefImg(defImg);
this.setLoginUrl(loginUrl);
this.setCountClearTime(countClearTime);
this.setCountCopyTime(countCopyTime);
this.setDownloadCode(downloadCode);
this.setDownloadTime(downloadTime);
initialize();
}
protected void initialize () {}
private int hashCode = Integer.MIN_VALUE;
// primary key
private java.lang.Integer id;
// fields
private java.lang.String contextPath;
private java.lang.String servletPoint;
private java.lang.Integer port;
private java.lang.String dbFileUri;
private java.lang.Boolean uploadToDb;
private java.lang.String defImg;
private java.lang.String loginUrl;
private java.lang.String processUrl;
private java.util.Date countClearTime;
private java.util.Date countCopyTime;
private java.lang.String downloadCode;
private java.lang.Integer downloadTime;
private java.lang.Boolean emailValidate;
private java.lang.Boolean viewOnlyChecked;
// components
com.jeecms.core.entity.MarkConfig m_markConfig;
com.jeecms.core.entity.EmailConfig m_emailConfig;
// collections
private java.util.Map<java.lang.String, java.lang.String> attr;
private java.util.Set<CmsConfigItem> registerItems;
/**
* Return the unique identifier of this class
* @hibernate.id
* generator-class="assigned"
* column="config_id"
*/
public java.lang.Integer getId () {
return id;
}
/**
* Set the unique identifier of this class
* @param id the new ID
*/
public void setId (java.lang.Integer id) {
this.id = id;
this.hashCode = Integer.MIN_VALUE;
}
/**
* Return the value associated with the column: context_path
*/
public java.lang.String getContextPath () {
return contextPath;
}
/**
* Set the value related to the column: context_path
* @param contextPath the context_path value
*/
public void setContextPath (java.lang.String contextPath) {
this.contextPath = contextPath;
}
/**
* Return the value associated with the column: servlet_point
*/
public java.lang.String getServletPoint () {
return servletPoint;
}
/**
* Set the value related to the column: servlet_point
* @param servletPoint the servlet_point value
*/
public void setServletPoint (java.lang.String servletPoint) {
this.servletPoint = servletPoint;
}
/**
* Return the value associated with the column: port
*/
public java.lang.Integer getPort () {
return port;
}
/**
* Set the value related to the column: port
* @param port the port value
*/
public void setPort (java.lang.Integer port) {
this.port = port;
}
/**
* Return the value associated with the column: db_file_uri
*/
public java.lang.String getDbFileUri () {
return dbFileUri;
}
/**
* Set the value related to the column: db_file_uri
* @param dbFileUri the db_file_uri value
*/
public void setDbFileUri (java.lang.String dbFileUri) {
this.dbFileUri = dbFileUri;
}
/**
* Return the value associated with the column: is_upload_to_db
*/
public java.lang.Boolean getUploadToDb () {
return uploadToDb;
}
/**
* Set the value related to the column: is_upload_to_db
* @param uploadToDb the is_upload_to_db value
*/
public void setUploadToDb (java.lang.Boolean uploadToDb) {
this.uploadToDb = uploadToDb;
}
/**
* Return the value associated with the column: def_img
*/
public java.lang.String getDefImg () {
return defImg;
}
/**
* Set the value related to the column: def_img
* @param defImg the def_img value
*/
public void setDefImg (java.lang.String defImg) {
this.defImg = defImg;
}
/**
* Return the value associated with the column: login_url
*/
public java.lang.String getLoginUrl () {
return loginUrl;
}
/**
* Set the value related to the column: login_url
* @param loginUrl the login_url value
*/
public void setLoginUrl (java.lang.String loginUrl) {
this.loginUrl = loginUrl;
}
/**
* Return the value associated with the column: process_url
*/
public java.lang.String getProcessUrl () {
return processUrl;
}
/**
* Set the value related to the column: process_url
* @param processUrl the process_url value
*/
public void setProcessUrl (java.lang.String processUrl) {
this.processUrl = processUrl;
}
/**
* Return the value associated with the column: count_clear_time
*/
public java.util.Date getCountClearTime () {
return countClearTime;
}
/**
* Set the value related to the column: count_clear_time
* @param countClearTime the count_clear_time value
*/
public void setCountClearTime (java.util.Date countClearTime) {
this.countClearTime = countClearTime;
}
/**
* Return the value associated with the column: count_copy_time
*/
public java.util.Date getCountCopyTime () {
return countCopyTime;
}
/**
* Set the value related to the column: count_copy_time
* @param countCopyTime the count_copy_time value
*/
public void setCountCopyTime (java.util.Date countCopyTime) {
this.countCopyTime = countCopyTime;
}
/**
* Return the value associated with the column: download_code
*/
public java.lang.String getDownloadCode () {
return downloadCode;
}
/**
* Set the value related to the column: download_code
* @param downloadCode the download_code value
*/
public void setDownloadCode (java.lang.String downloadCode) {
this.downloadCode = downloadCode;
}
/**
* Return the value associated with the column: download_time
*/
public java.lang.Integer getDownloadTime () {
return downloadTime;
}
/**
* Set the value related to the column: download_time
* @param downloadTime the download_time value
*/
public void setDownloadTime (java.lang.Integer downloadTime) {
this.downloadTime = downloadTime;
}
public java.lang.Boolean getEmailValidate() {
return emailValidate;
}
public void setEmailValidate(java.lang.Boolean emailValidate) {
this.emailValidate = emailValidate;
}
public java.lang.Boolean getViewOnlyChecked() {
return viewOnlyChecked;
}
public void setViewOnlyChecked(java.lang.Boolean viewOnlyChecked) {
this.viewOnlyChecked = viewOnlyChecked;
}
public com.jeecms.core.entity.MarkConfig getMarkConfig () {
return m_markConfig;
}
/**
* Set the value related to the column: ${prop.Column}
* @param m_markConfig the ${prop.Column} value
*/
public void setMarkConfig (com.jeecms.core.entity.MarkConfig m_markConfig) {
this.m_markConfig = m_markConfig;
}
public com.jeecms.core.entity.EmailConfig getEmailConfig () {
return m_emailConfig;
}
/**
* Set the value related to the column: ${prop.Column}
* @param m_emailConfig the ${prop.Column} value
*/
public void setEmailConfig (com.jeecms.core.entity.EmailConfig m_emailConfig) {
this.m_emailConfig = m_emailConfig;
}
/**
* Return the value associated with the column: attr
*/
public java.util.Map<java.lang.String, java.lang.String> getAttr () {
return attr;
}
/**
* Set the value related to the column: attr
* @param attr the attr value
*/
public void setAttr (java.util.Map<java.lang.String, java.lang.String> attr) {
this.attr = attr;
}
public java.util.Set<CmsConfigItem> getRegisterItems() {
return registerItems;
}
public void setRegisterItems(java.util.Set<CmsConfigItem> registerItems) {
this.registerItems = registerItems;
}
public boolean equals (Object obj) {
if (null == obj) return false;
if (!(obj instanceof com.jeecms.core.entity.CmsConfig)) return false;
else {
com.jeecms.core.entity.CmsConfig cmsConfig = (com.jeecms.core.entity.CmsConfig) obj;
if (null == this.getId() || null == cmsConfig.getId()) return false;
else return (this.getId().equals(cmsConfig.getId()));
}
}
public int hashCode () {
if (Integer.MIN_VALUE == this.hashCode) {
if (null == this.getId()) return super.hashCode();
else {
String hashStr = this.getClass().getName() + ":" + this.getId().hashCode();
this.hashCode = hashStr.hashCode();
}
}
return this.hashCode;
}
public String toString () {
return super.toString();
}
}
| |
package org.mamute.model;
import static org.mamute.infra.Digester.hashFor;
import static org.mamute.infra.NormalizerBrutal.toSlug;
import static org.mamute.model.SanitizedText.fromTrustedText;
import static org.mamute.validators.UserPersonalInfoValidator.ABOUT_LENGTH_MESSAGE;
import static org.mamute.validators.UserPersonalInfoValidator.ABOUT_MAX_LENGTH;
import static org.mamute.validators.UserPersonalInfoValidator.ABOUT_MIN_LENGTH;
import static org.mamute.validators.UserPersonalInfoValidator.EMAIL_LENGTH_MESSAGE;
import static org.mamute.validators.UserPersonalInfoValidator.EMAIL_MAX_LENGTH;
import static org.mamute.validators.UserPersonalInfoValidator.EMAIL_MIN_LENGTH;
import static org.mamute.validators.UserPersonalInfoValidator.EMAIL_NOT_VALID;
import static org.mamute.validators.UserPersonalInfoValidator.LOCATION_LENGTH_MESSAGE;
import static org.mamute.validators.UserPersonalInfoValidator.LOCATION_MAX_LENGTH;
import static org.mamute.validators.UserPersonalInfoValidator.MARKED_ABOUT_MAX_LENGTH;
import static org.mamute.validators.UserPersonalInfoValidator.NAME_LENGTH_MESSAGE;
import static org.mamute.validators.UserPersonalInfoValidator.NAME_MAX_LENGTH;
import static org.mamute.validators.UserPersonalInfoValidator.NAME_MIN_LENGTH;
import static org.mamute.validators.UserPersonalInfoValidator.NAME_REQUIRED;
import static org.mamute.validators.UserPersonalInfoValidator.WEBSITE_LENGTH_MESSAGE;
import static org.mamute.validators.UserPersonalInfoValidator.WEBSITE_MAX_LENGHT;
import static org.mamute.validators.UserPersonalInfoValidator.WEBSITE_MIN_LENGTH;
import java.io.UnsupportedEncodingException;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import javax.enterprise.inject.Vetoed;
import javax.persistence.Cacheable;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import javax.persistence.OneToMany;
import javax.persistence.Table;
import org.hibernate.annotations.Cache;
import org.hibernate.annotations.CacheConcurrencyStrategy;
import org.hibernate.annotations.Type;
import org.hibernate.validator.constraints.Email;
import org.hibernate.validator.constraints.Length;
import org.hibernate.validator.constraints.NotEmpty;
import org.joda.time.DateTime;
import org.joda.time.Years;
import org.mamute.auth.rules.PermissionRulesConstants;
import org.mamute.dto.UserPersonalInfo;
import org.mamute.infra.Digester;
import org.mamute.model.interfaces.Identifiable;
import org.mamute.model.interfaces.Moderatable;
import org.mamute.model.interfaces.Votable;
import org.mamute.model.watch.Watcher;
import org.mamute.providers.SessionFactoryCreator;
@Cacheable
@Cache(usage=CacheConcurrencyStrategy.READ_WRITE, region="cache")
@Table(name="Users")
@Entity
@Vetoed
public class User implements Identifiable {
@Type(type = SessionFactoryCreator.JODA_TIME_TYPE)
private final DateTime createdAt = new DateTime();
@Id
@GeneratedValue
private Long id;
@NotEmpty(message = NAME_REQUIRED)
@Length(min = NAME_MIN_LENGTH, max = NAME_MAX_LENGTH, message = NAME_LENGTH_MESSAGE)
private String name;
@Length(min = WEBSITE_MIN_LENGTH, max = WEBSITE_MAX_LENGHT, message = WEBSITE_LENGTH_MESSAGE)
private String website;
@Length(max = LOCATION_MAX_LENGTH, message = LOCATION_LENGTH_MESSAGE)
private String location;
private String about;
private String markedAbout;
@Type(type = SessionFactoryCreator.JODA_TIME_TYPE)
private DateTime birthDate;
private long karma = 0;
private boolean moderator = false;
private boolean confirmedEmail = false;
private String forgotPasswordToken = "";
private String photoUri;
@Type(type = "text")
@NotEmpty
private String sluggedName;
@Type(type = SessionFactoryCreator.JODA_TIME_TYPE)
private DateTime nameLastTouchedAt;
@OneToMany(mappedBy="user")
private final List<LoginMethod> loginMethods = new ArrayList<>();
public static final User GHOST;
@Length(min = EMAIL_MIN_LENGTH, max = EMAIL_MAX_LENGTH, message = EMAIL_LENGTH_MESSAGE)
@Email(message = EMAIL_NOT_VALID)
private String email;
private boolean isSubscribed = true;
private boolean isBanned = false;
@OneToMany(mappedBy = "watcher")
private final List<Watcher> watches = new ArrayList<>();
@Type(type = SessionFactoryCreator.JODA_TIME_TYPE)
private DateTime lastUpvote = new DateTime();
static {
GHOST = new User(fromTrustedText("GHOST"), "");
GHOST.setId(1000l);
}
/**
* @deprecated hibernate eyes only
*/
protected User() {
this(fromTrustedText(""), "");
}
public User(SanitizedText name, String email) {
setName(name);
this.email = email;
}
public DateTime getNameLastTouchedAt() {
return nameLastTouchedAt;
}
public UserSession newSession() {
Long currentTimeMillis = System.currentTimeMillis();
String sessionKey = Digester.encrypt(currentTimeMillis.toString() + this.id.toString());
UserSession userSession = new UserSession(this, sessionKey);
return userSession;
}
public void setName(SanitizedText name) {
this.name = name.getText();
this.sluggedName = toSlug(this.name);
this.nameLastTouchedAt = new DateTime();
}
public void setId(Long id) {
this.id = id;
}
public void setPhotoUri(URL storedUri) {
photoUri = storedUri.toString();
}
public void setPersonalInformation(UserPersonalInfo info) {
this.birthDate = info.getBirthDate();
this.name = info.getName();
this.email = info.getEmail();
this.website = info.getWebsite();
this.location = info.getLocation();
this.about = info.getAbout();
this.markedAbout = info.getMarkedAbout();
this.isSubscribed = info.isSubscribed();
}
void setKarma(long karma) {
this.karma = karma;
}
public boolean isSubscribedToNewsletter() {
return isSubscribed;
}
public Long getId() {
return this.id;
}
public String getSmallPhoto(String gravatarUrl) {
return getPhoto(32, 32, gravatarUrl);
}
public String getMediumPhoto(String gravatarUrl) {
return getPhoto(48, 48, gravatarUrl);
}
public String getBigPhoto(String gravatarUrl) {
return getPhoto(128, 128, gravatarUrl);
}
public String getName() {
return name;
}
public long getKarma() {
return karma;
}
public String getEmail() {
return email;
}
public String getSluggedName() {
return sluggedName;
}
public DateTime getCreatedAt() {
return createdAt;
}
public String getWebsite() {
return website;
}
public String getLocation() {
return location;
}
public DateTime getBirthDate() {
return birthDate;
}
public Integer getAge() {
DateTime now = new DateTime();
if (birthDate == null){
return null;
}
return Years.yearsBetween(birthDate, now).getYears();
}
public LoginMethod getBrutalLogin() {
for (LoginMethod method : loginMethods) {
if (method.isBrutal()) {
return method;
}
}
throw new IllegalStateException("this guy dont have a brutal login method!");
}
public String getPhoto(Integer width, Integer height, String gravatarUrl) {
String size = width + "x" + height;
if (photoUri == null) {
String digest = Digester.md5(email);
String robohash = "http://robohash.org/size_"+size+"/set_set1/bgset_any/"+digest+".png";
String gravatar = gravatarUrl + "/avatar/" + digest + ".png?r=PG&size=" + size;
try {
return gravatar + "&d=" + java.net.URLEncoder.encode(robohash, "UTF-8");
} catch (UnsupportedEncodingException e) {
return gravatar;
}
} else if (photoUri.contains("googleusercontent")) {
return photoUri.replaceAll("sz=(\\d+)", "sz="+width);
} else {
return photoUri + "?width=" + width + "&height=" + height;
}
}
public String getAbout() {
return about;
}
public String getMarkedAbout() {
return markedAbout;
}
@Override
public String toString() {
return "[User " + email + ", "+ name +", "+ id +"]";
}
public boolean isModerator() {
return moderator;
}
public User asModerator() {
this.moderator = true;
return this;
}
public User removeModerator() {
this.moderator = false;
return this;
}
public boolean canModerate() {
return isModerator() || this.karma >= PermissionRulesConstants.MODERATE_EDITS;
}
public void setSubscribed(boolean isSubscribed){
this.isSubscribed = isSubscribed;
}
public String touchForgotPasswordToken() {
String tokenSource = Math.random() + System.currentTimeMillis() + getEmail() + getId();
this.forgotPasswordToken = Digester.encrypt(tokenSource);
return forgotPasswordToken;
}
public boolean isValidForgotPasswordToken(String token) {
return this.forgotPasswordToken.equals(token);
}
public boolean updateForgottenPassword(String password,
String passwordConfirmation) {
if (!password.equals(passwordConfirmation)) {
return false;
}
for (LoginMethod method : loginMethods) {
method.updateForgottenPassword(password);
}
return true;
}
public UpdateStatus approve(Moderatable moderatable, Information approvedInfo) {
if (this.canModerate()) {
moderatable.approve(approvedInfo);
approvedInfo.moderate(this, UpdateStatus.APPROVED);
}
return UpdateStatus.REFUSED;
}
public void descreaseKarma(int value) {
this.karma -= value;
}
public void increaseKarma(int value) {
this.karma += value;
}
public void confirmEmail(){
confirmedEmail = true;
}
public void add(LoginMethod brutalLogin) {
loginMethods.add(brutalLogin);
}
public boolean isAuthorOf(Votable votable) {
return id == votable.getAuthor().getId();
}
public boolean hasKarmaToAnswerOwnQuestion() {
return (this.karma >= PermissionRulesConstants.ANSWER_OWN_QUESTION) || isModerator();
}
public boolean hasKarmaToAnswerInactiveQuestion() {
return (this.karma >= PermissionRulesConstants.INACTIVE_QUESTION) || isModerator();
}
public List<LoginMethod> getLoginMethods() {
return loginMethods;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((getId() == null) ? 0 : getId().hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (!(obj instanceof User))
return false;
User other = (User) obj;
if (getId() == null) {
if (other.getId() != null)
return false;
} else if (!getId().equals(other.getId()))
return false;
return true;
}
public String getUnsubscribeHash() {
return Digester.encrypt(this.email + hashFor(this.id));
}
public boolean isBanned() {
return isBanned;
}
public void ban() {
this.isBanned = true;
}
public void undoBan() {
this.isBanned = false;
}
public void votedUp() {
this.lastUpvote = new DateTime();
}
public boolean isVotingEnough(){
return !lastUpvote.isBefore(new DateTime().minusWeeks(1));
}
public boolean hasKarma() {
return (this.karma >= 0);
}
}
| |
/*
* Copyright LWJGL. All rights reserved.
* License terms: https://www.lwjgl.org/license
* MACHINE GENERATED FILE, DO NOT EDIT
*/
package org.lwjgl.opengl;
import java.nio.*;
import static org.lwjgl.system.Checks.*;
import static org.lwjgl.system.JNI.*;
import static org.lwjgl.system.MemoryUtil.*;
/**
* Native bindings to the <a href="http://www.opengl.org/registry/specs/NV/glx_swap_group.txt">GLX_NV_swap_group</a> extension.
*
* <p>This extension provides the capability to synchronize the buffer swaps of a group of OpenGL windows. A swap group is created, and windows are added as
* members to the swap group. Buffer swaps to members of the swap group will then take place concurrently.</p>
*
* <p>This extension also provides the capability to sychronize the buffer swaps of different swap groups, which may reside on distributed systems on a
* network. For this purpose swap groups can be bound to a swap barrier.</p>
*
* <p>This extension extends the set of conditions that must be met before a buffer swap can take place.</p>
*/
public class GLXNVSwapGroup {
protected GLXNVSwapGroup() {
throw new UnsupportedOperationException();
}
static boolean isAvailable(GLXCapabilities caps) {
return checkFunctions(
caps.glXJoinSwapGroupNV, caps.glXBindSwapBarrierNV, caps.glXQuerySwapGroupNV, caps.glXQueryMaxSwapGroupsNV, caps.glXQueryFrameCountNV,
caps.glXResetFrameCountNV
);
}
// --- [ glXJoinSwapGroupNV ] ---
/**
*
*
* @param display the connection to the X server
* @param drawable
* @param group
*/
public static int glXJoinSwapGroupNV(long display, long drawable, int group) {
long __functionAddress = GL.getCapabilitiesGLXClient().glXJoinSwapGroupNV;
if ( CHECKS ) {
checkFunctionAddress(__functionAddress);
checkPointer(display);
checkPointer(drawable);
}
return callPPI(__functionAddress, display, drawable, group);
}
// --- [ glXBindSwapBarrierNV ] ---
/**
*
*
* @param display the connection to the X server
* @param group
* @param barrier
*/
public static int glXBindSwapBarrierNV(long display, int group, int barrier) {
long __functionAddress = GL.getCapabilitiesGLXClient().glXBindSwapBarrierNV;
if ( CHECKS ) {
checkFunctionAddress(__functionAddress);
checkPointer(display);
}
return callPI(__functionAddress, display, group, barrier);
}
// --- [ glXQuerySwapGroupNV ] ---
/** Unsafe version of: {@link #glXQuerySwapGroupNV QuerySwapGroupNV} */
public static int nglXQuerySwapGroupNV(long display, long drawable, long group, long barrier) {
long __functionAddress = GL.getCapabilitiesGLXClient().glXQuerySwapGroupNV;
if ( CHECKS ) {
checkFunctionAddress(__functionAddress);
checkPointer(display);
checkPointer(drawable);
}
return callPPPPI(__functionAddress, display, drawable, group, barrier);
}
/**
*
*
* @param display the connection to the X server
* @param drawable
* @param group
* @param barrier
*/
public static int glXQuerySwapGroupNV(long display, long drawable, IntBuffer group, IntBuffer barrier) {
if ( CHECKS ) {
checkBuffer(group, 1);
checkBuffer(barrier, 1);
}
return nglXQuerySwapGroupNV(display, drawable, memAddress(group), memAddress(barrier));
}
// --- [ glXQueryMaxSwapGroupsNV ] ---
/** Unsafe version of: {@link #glXQueryMaxSwapGroupsNV QueryMaxSwapGroupsNV} */
public static int nglXQueryMaxSwapGroupsNV(long display, int screen, long maxGroups, long maxBarriers) {
long __functionAddress = GL.getCapabilitiesGLXClient().glXQueryMaxSwapGroupsNV;
if ( CHECKS ) {
checkFunctionAddress(__functionAddress);
checkPointer(display);
}
return callPPPI(__functionAddress, display, screen, maxGroups, maxBarriers);
}
/**
*
*
* @param display the connection to the X server
* @param screen
* @param maxGroups
* @param maxBarriers
*/
public static int glXQueryMaxSwapGroupsNV(long display, int screen, IntBuffer maxGroups, IntBuffer maxBarriers) {
if ( CHECKS ) {
checkBuffer(maxGroups, 1);
checkBuffer(maxBarriers, 1);
}
return nglXQueryMaxSwapGroupsNV(display, screen, memAddress(maxGroups), memAddress(maxBarriers));
}
// --- [ glXQueryFrameCountNV ] ---
/** Unsafe version of: {@link #glXQueryFrameCountNV QueryFrameCountNV} */
public static int nglXQueryFrameCountNV(long display, int screen, long count) {
long __functionAddress = GL.getCapabilitiesGLXClient().glXQueryFrameCountNV;
if ( CHECKS ) {
checkFunctionAddress(__functionAddress);
checkPointer(display);
}
return callPPI(__functionAddress, display, screen, count);
}
/**
*
*
* @param display the connection to the X server
* @param screen
* @param count
*/
public static int glXQueryFrameCountNV(long display, int screen, IntBuffer count) {
if ( CHECKS )
checkBuffer(count, 1);
return nglXQueryFrameCountNV(display, screen, memAddress(count));
}
// --- [ glXResetFrameCountNV ] ---
/**
*
*
* @param display the connection to the X server
* @param screen
*/
public static int glXResetFrameCountNV(long display, int screen) {
long __functionAddress = GL.getCapabilitiesGLXClient().glXResetFrameCountNV;
if ( CHECKS ) {
checkFunctionAddress(__functionAddress);
checkPointer(display);
}
return callPI(__functionAddress, display, screen);
}
/** Array version of: {@link #glXQuerySwapGroupNV QuerySwapGroupNV} */
public static int glXQuerySwapGroupNV(long display, long drawable, int[] group, int[] barrier) {
long __functionAddress = GL.getCapabilitiesGLXClient().glXQuerySwapGroupNV;
if ( CHECKS ) {
checkFunctionAddress(__functionAddress);
checkPointer(display);
checkPointer(drawable);
checkBuffer(group, 1);
checkBuffer(barrier, 1);
}
return callPPPPI(__functionAddress, display, drawable, group, barrier);
}
/** Array version of: {@link #glXQueryMaxSwapGroupsNV QueryMaxSwapGroupsNV} */
public static int glXQueryMaxSwapGroupsNV(long display, int screen, int[] maxGroups, int[] maxBarriers) {
long __functionAddress = GL.getCapabilitiesGLXClient().glXQueryMaxSwapGroupsNV;
if ( CHECKS ) {
checkFunctionAddress(__functionAddress);
checkPointer(display);
checkBuffer(maxGroups, 1);
checkBuffer(maxBarriers, 1);
}
return callPPPI(__functionAddress, display, screen, maxGroups, maxBarriers);
}
/** Array version of: {@link #glXQueryFrameCountNV QueryFrameCountNV} */
public static int glXQueryFrameCountNV(long display, int screen, int[] count) {
long __functionAddress = GL.getCapabilitiesGLXClient().glXQueryFrameCountNV;
if ( CHECKS ) {
checkFunctionAddress(__functionAddress);
checkPointer(display);
checkBuffer(count, 1);
}
return callPPI(__functionAddress, display, screen, count);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.runners.flink.translation.functions;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.emptyIterable;
import static org.hamcrest.Matchers.instanceOf;
import static org.mockito.Mockito.when;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import org.apache.beam.model.pipeline.v1.RunnerApi;
import org.apache.beam.model.pipeline.v1.RunnerApi.Components;
import org.apache.beam.model.pipeline.v1.RunnerApi.Environment;
import org.apache.beam.runners.core.construction.PTransformTranslation;
import org.apache.beam.runners.core.construction.graph.ExecutableStage;
import org.apache.beam.runners.core.construction.graph.ImmutableExecutableStage;
import org.apache.beam.runners.core.construction.graph.PipelineNode;
import org.apache.beam.runners.core.construction.graph.PipelineNode.PCollectionNode;
import org.apache.beam.runners.core.construction.graph.SideInputReference;
import org.apache.beam.runners.fnexecution.state.StateRequestHandlers.SideInputHandler;
import org.apache.beam.sdk.coders.Coder;
import org.apache.beam.sdk.coders.KvCoder;
import org.apache.beam.sdk.coders.StringUtf8Coder;
import org.apache.beam.sdk.coders.VarIntCoder;
import org.apache.beam.sdk.coders.VoidCoder;
import org.apache.beam.sdk.transforms.windowing.GlobalWindow;
import org.apache.beam.sdk.transforms.windowing.IntervalWindow;
import org.apache.beam.sdk.transforms.windowing.IntervalWindow.IntervalWindowCoder;
import org.apache.beam.sdk.transforms.windowing.PaneInfo;
import org.apache.beam.sdk.util.WindowedValue;
import org.apache.beam.sdk.values.KV;
import org.apache.flink.api.common.functions.RuntimeContext;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.Instant;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
/** Tests for {@link FlinkBatchSideInputHandlerFactory}. */
@RunWith(JUnit4.class)
public class FlinkBatchSideInputHandlerFactoryTest {
private static final String TRANSFORM_ID = "transform-id";
private static final String SIDE_INPUT_NAME = "side-input";
private static final String COLLECTION_ID = "collection";
private static final RunnerApi.FunctionSpec MULTIMAP_ACCESS =
RunnerApi.FunctionSpec.newBuilder().setUrn(PTransformTranslation.MULTIMAP_SIDE_INPUT).build();
private static final RunnerApi.FunctionSpec ITERABLE_ACCESS =
RunnerApi.FunctionSpec.newBuilder().setUrn(PTransformTranslation.ITERABLE_SIDE_INPUT).build();
private static final ExecutableStage EXECUTABLE_STAGE =
createExecutableStage(
Arrays.asList(
SideInputReference.of(
PipelineNode.pTransform(TRANSFORM_ID, RunnerApi.PTransform.getDefaultInstance()),
SIDE_INPUT_NAME,
PipelineNode.pCollection(
COLLECTION_ID, RunnerApi.PCollection.getDefaultInstance()))));
private static final byte[] ENCODED_NULL = encode(null, VoidCoder.of());
private static final byte[] ENCODED_FOO = encode("foo", StringUtf8Coder.of());
@Rule public ExpectedException thrown = ExpectedException.none();
@Mock private RuntimeContext context;
@Before
public void setUpMocks() {
MockitoAnnotations.initMocks(this);
}
@Test
public void invalidSideInputThrowsException() {
ExecutableStage stage = createExecutableStage(Collections.emptyList());
FlinkBatchSideInputHandlerFactory factory =
FlinkBatchSideInputHandlerFactory.forStage(stage, context);
thrown.expect(instanceOf(IllegalArgumentException.class));
factory.forSideInput(
"transform-id",
"side-input",
MULTIMAP_ACCESS,
KvCoder.of(VoidCoder.of(), VoidCoder.of()),
GlobalWindow.Coder.INSTANCE);
}
@Test
public void emptyResultForEmptyCollection() {
FlinkBatchSideInputHandlerFactory factory =
FlinkBatchSideInputHandlerFactory.forStage(EXECUTABLE_STAGE, context);
SideInputHandler<Integer, GlobalWindow> handler =
factory.forSideInput(
TRANSFORM_ID,
SIDE_INPUT_NAME,
MULTIMAP_ACCESS,
KvCoder.of(VoidCoder.of(), VarIntCoder.of()),
GlobalWindow.Coder.INSTANCE);
// We never populated the broadcast variable for "side-input", so the mock will return an empty
// list.
Iterable<Integer> result = handler.get(ENCODED_NULL, GlobalWindow.INSTANCE);
assertThat(result, emptyIterable());
}
@Test
public void singleElementForCollection() {
when(context.getBroadcastVariable(COLLECTION_ID))
.thenReturn(
Arrays.asList(WindowedValue.valueInGlobalWindow(KV.<Void, Integer>of(null, 3))));
FlinkBatchSideInputHandlerFactory factory =
FlinkBatchSideInputHandlerFactory.forStage(EXECUTABLE_STAGE, context);
SideInputHandler<Integer, GlobalWindow> handler =
factory.forSideInput(
TRANSFORM_ID,
SIDE_INPUT_NAME,
MULTIMAP_ACCESS,
KvCoder.of(VoidCoder.of(), VarIntCoder.of()),
GlobalWindow.Coder.INSTANCE);
Iterable<Integer> result = handler.get(ENCODED_NULL, GlobalWindow.INSTANCE);
assertThat(result, contains(3));
}
@Test
public void groupsValuesByKey() {
when(context.getBroadcastVariable(COLLECTION_ID))
.thenReturn(
Arrays.asList(
WindowedValue.valueInGlobalWindow(KV.of("foo", 2)),
WindowedValue.valueInGlobalWindow(KV.of("bar", 3)),
WindowedValue.valueInGlobalWindow(KV.of("foo", 5))));
FlinkBatchSideInputHandlerFactory factory =
FlinkBatchSideInputHandlerFactory.forStage(EXECUTABLE_STAGE, context);
SideInputHandler<Integer, GlobalWindow> handler =
factory.forSideInput(
TRANSFORM_ID,
SIDE_INPUT_NAME,
MULTIMAP_ACCESS,
KvCoder.of(StringUtf8Coder.of(), VarIntCoder.of()),
GlobalWindow.Coder.INSTANCE);
Iterable<Integer> result = handler.get(ENCODED_FOO, GlobalWindow.INSTANCE);
assertThat(result, containsInAnyOrder(2, 5));
}
@Test
public void groupsValuesByWindowAndKey() {
Instant instantA = new DateTime(2018, 1, 1, 1, 1, DateTimeZone.UTC).toInstant();
Instant instantB = new DateTime(2018, 1, 1, 1, 2, DateTimeZone.UTC).toInstant();
Instant instantC = new DateTime(2018, 1, 1, 1, 3, DateTimeZone.UTC).toInstant();
IntervalWindow windowA = new IntervalWindow(instantA, instantB);
IntervalWindow windowB = new IntervalWindow(instantB, instantC);
when(context.getBroadcastVariable(COLLECTION_ID))
.thenReturn(
Arrays.asList(
WindowedValue.of(KV.of("foo", 1), instantA, windowA, PaneInfo.NO_FIRING),
WindowedValue.of(KV.of("bar", 2), instantA, windowA, PaneInfo.NO_FIRING),
WindowedValue.of(KV.of("foo", 3), instantA, windowA, PaneInfo.NO_FIRING),
WindowedValue.of(KV.of("foo", 4), instantB, windowB, PaneInfo.NO_FIRING),
WindowedValue.of(KV.of("bar", 5), instantB, windowB, PaneInfo.NO_FIRING),
WindowedValue.of(KV.of("foo", 6), instantB, windowB, PaneInfo.NO_FIRING)));
FlinkBatchSideInputHandlerFactory factory =
FlinkBatchSideInputHandlerFactory.forStage(EXECUTABLE_STAGE, context);
SideInputHandler<Integer, IntervalWindow> handler =
factory.forSideInput(
TRANSFORM_ID,
SIDE_INPUT_NAME,
MULTIMAP_ACCESS,
KvCoder.of(StringUtf8Coder.of(), VarIntCoder.of()),
IntervalWindowCoder.of());
Iterable<Integer> resultA = handler.get(ENCODED_FOO, windowA);
Iterable<Integer> resultB = handler.get(ENCODED_FOO, windowB);
assertThat(resultA, containsInAnyOrder(1, 3));
assertThat(resultB, containsInAnyOrder(4, 6));
}
@Test
public void iterableAccessPattern() {
Instant instantA = new DateTime(2018, 1, 1, 1, 1, DateTimeZone.UTC).toInstant();
Instant instantB = new DateTime(2018, 1, 1, 1, 2, DateTimeZone.UTC).toInstant();
Instant instantC = new DateTime(2018, 1, 1, 1, 3, DateTimeZone.UTC).toInstant();
IntervalWindow windowA = new IntervalWindow(instantA, instantB);
IntervalWindow windowB = new IntervalWindow(instantB, instantC);
when(context.getBroadcastVariable(COLLECTION_ID))
.thenReturn(
Arrays.asList(
WindowedValue.of(1, instantA, windowA, PaneInfo.NO_FIRING),
WindowedValue.of(2, instantA, windowA, PaneInfo.NO_FIRING),
WindowedValue.of(3, instantB, windowB, PaneInfo.NO_FIRING),
WindowedValue.of(4, instantB, windowB, PaneInfo.NO_FIRING)));
FlinkBatchSideInputHandlerFactory factory =
FlinkBatchSideInputHandlerFactory.forStage(EXECUTABLE_STAGE, context);
SideInputHandler<Integer, IntervalWindow> handler =
factory.forSideInput(
TRANSFORM_ID,
SIDE_INPUT_NAME,
ITERABLE_ACCESS,
VarIntCoder.of(),
IntervalWindowCoder.of());
Iterable<Integer> resultA = handler.get(null, windowA);
Iterable<Integer> resultB = handler.get(null, windowB);
assertThat(resultA, containsInAnyOrder(1, 2));
assertThat(resultB, containsInAnyOrder(3, 4));
}
private static ExecutableStage createExecutableStage(Collection<SideInputReference> sideInputs) {
Components components = Components.getDefaultInstance();
Environment environment = Environment.getDefaultInstance();
PCollectionNode inputCollection =
PipelineNode.pCollection("collection-id", RunnerApi.PCollection.getDefaultInstance());
return ImmutableExecutableStage.of(
components,
environment,
inputCollection,
sideInputs,
Collections.emptyList(),
Collections.emptyList(),
Collections.emptyList(),
Collections.emptyList());
}
private static <T> byte[] encode(T value, Coder<T> coder) {
ByteArrayOutputStream out = new ByteArrayOutputStream();
try {
coder.encode(value, out);
} catch (IOException e) {
throw new RuntimeException(e);
}
return out.toByteArray();
}
}
| |
package cl.monsoon.s1next.widget;
import android.content.Context;
import android.graphics.Rect;
import android.graphics.drawable.Drawable;
import android.net.Uri;
import android.text.Html;
import android.view.View;
import android.webkit.URLUtil;
import android.widget.TextView;
import com.bumptech.glide.Glide;
import com.bumptech.glide.load.engine.DiskCacheStrategy;
import com.bumptech.glide.load.resource.drawable.GlideDrawable;
import com.bumptech.glide.request.Request;
import com.bumptech.glide.request.RequestListener;
import com.bumptech.glide.request.animation.GlideAnimation;
import com.bumptech.glide.request.target.Target;
import com.bumptech.glide.request.target.ViewTarget;
import java.util.Collections;
import java.util.Set;
import java.util.WeakHashMap;
import cl.monsoon.s1next.App;
import cl.monsoon.s1next.R;
import cl.monsoon.s1next.data.api.Api;
import cl.monsoon.s1next.util.TransformationUtil;
/**
* Implements {@link android.text.Html.ImageGetter}
* in order to show images in the TextView.
* <p>
* Uses {@link com.bumptech.glide.request.target.ViewTarget}
* to make an asynchronous HTTP GET to load the image.
* <p>
* Forked from https://github.com/goofyz/testGlide/pull/1
* See https://github.com/bumptech/glide/issues/550
*/
public final class GlideImageGetter
implements Html.ImageGetter, View.OnAttachStateChangeListener, Drawable.Callback {
private final Context mContext;
private final TextView mTextView;
/**
* Weak {@link java.util.HashSet}.
*/
private final Set<ViewTarget> mViewTargetSet = Collections.newSetFromMap(new WeakHashMap<>());
public GlideImageGetter(Context context, TextView textView) {
this.mContext = context;
this.mTextView = textView;
// save Drawable.Callback in TextView
// and get back when finish fetching image
// see https://github.com/goofyz/testGlide/pull/1 for more details
mTextView.setTag(R.id.tag_drawable_callback, this);
// add this listener in order to clean any pending images loading
// and set drawable callback tag to null when detached from window
mTextView.addOnAttachStateChangeListener(this);
}
/**
* We display image depends on settings and Wi-Fi status,
* but display emoticons at any time.
*/
@Override
public Drawable getDrawable(String url) {
UrlDrawable urlDrawable = new UrlDrawable();
// url has no domain if it comes from server.
if (!URLUtil.isNetworkUrl(url)) {
ImageGetterViewTarget imageGetterViewTarget = new ImageGetterViewTarget(mTextView,
urlDrawable);
// We may have this image in assets if this is emoticon.
if (url.startsWith(Api.URL_EMOTICON_IMAGE_PREFIX)) {
String emoticonFileName = url.substring(Api.URL_EMOTICON_IMAGE_PREFIX.length());
TransformationUtil.SizeMultiplierBitmapTransformation sizeMultiplierBitmapTransformation =
new TransformationUtil.SizeMultiplierBitmapTransformation(mContext,
mContext.getResources().getDisplayMetrics().density);
Glide.with(mContext)
.load(Uri.parse(EmoticonFactory.ASSET_PATH_EMOTICON + emoticonFileName))
.transform(sizeMultiplierBitmapTransformation)
.listener(new RequestListener<Uri, GlideDrawable>() {
/**
* Occurs If we don't have this image (maybe a new emoticon) in assets.
*/
@Override
public boolean onException(Exception e, Uri model, Target<GlideDrawable> target, boolean isFirstResource) {
// append domain to this url
Glide.with(mContext)
.load(Api.BASE_URL + url)
.transform(sizeMultiplierBitmapTransformation)
.into(imageGetterViewTarget);
return true;
}
@Override
public boolean onResourceReady(GlideDrawable resource, Uri model, Target<GlideDrawable> target, boolean isFromMemoryCache, boolean isFirstResource) {
return false;
}
})
.into(imageGetterViewTarget);
} else {
Glide.with(mContext)
.load(Api.BASE_URL + url)
.transform(new TransformationUtil.GlMaxTextureSizeBitmapTransformation(
mContext))
.into(imageGetterViewTarget);
}
mViewTargetSet.add(imageGetterViewTarget);
return urlDrawable;
}
if (App.getAppComponent(mContext).getDownloadPreferencesManager().isImagesDownload()) {
ImageGetterViewTarget imageGetterViewTarget = new ImageGetterViewTarget(mTextView,
urlDrawable);
Glide.with(mContext)
.load(url)
.diskCacheStrategy(DiskCacheStrategy.SOURCE)
.transform(new TransformationUtil.GlMaxTextureSizeBitmapTransformation(mContext))
.into(imageGetterViewTarget);
mViewTargetSet.add(imageGetterViewTarget);
return urlDrawable;
} else {
return null;
}
}
@Override
public void onViewAttachedToWindow(View v) {}
@Override
public void onViewDetachedFromWindow(View v) {
// cancels any pending images loading
for (ViewTarget viewTarget : mViewTargetSet) {
Glide.clear(viewTarget);
}
mViewTargetSet.clear();
v.removeOnAttachStateChangeListener(this);
v.setTag(R.id.tag_drawable_callback, null);
}
/**
* Implements {@link Drawable.Callback} in order to
* redraw the TextView which contains the animated GIFs.
*/
@Override
public void invalidateDrawable(Drawable who) {
mTextView.invalidate();
}
@Override
public void scheduleDrawable(Drawable who, Runnable what, long when) {}
@Override
public void unscheduleDrawable(Drawable who, Runnable what) {}
private static final class ImageGetterViewTarget extends ViewTarget<TextView, GlideDrawable> {
private final UrlDrawable mDrawable;
private Request mRequest;
private ImageGetterViewTarget(TextView view, UrlDrawable drawable) {
super(view);
this.mDrawable = drawable;
}
@Override
public void onResourceReady(GlideDrawable resource, GlideAnimation<? super GlideDrawable> glideAnimation) {
// resize this drawable's width & height to fit its container
final int resWidth = resource.getIntrinsicWidth();
final int resHeight = resource.getIntrinsicHeight();
int width, height;
TextView textView = getView();
if (textView.getWidth() >= resWidth) {
width = resWidth;
height = resHeight;
} else {
width = textView.getWidth();
height = (int) (resHeight / ((float) resWidth / width));
}
Rect rect = new Rect(0, 0, width, height);
resource.setBounds(rect);
mDrawable.setBounds(rect);
mDrawable.setDrawable(resource);
if (resource.isAnimated()) {
Drawable.Callback callback = (Drawable.Callback) textView.getTag(
R.id.tag_drawable_callback);
// note: not sure whether callback would be null sometimes
// when this Drawable' host view is detached from View
if (callback != null) {
// set callback to drawable in order to
// signal its container to be redrawn
// to show the animated GIF
mDrawable.setCallback(callback);
resource.setLoopCount(GlideDrawable.LOOP_FOREVER);
resource.start();
}
} else {
textView.setTag(R.id.tag_drawable_callback, null);
}
// see http://stackoverflow.com/questions/7870312/android-imagegetter-images-overlapping-text#comment-22289166
textView.setText(textView.getText());
}
/**
* See https://github.com/bumptech/glide/issues/550#issuecomment-123693051
*
* @see com.bumptech.glide.GenericRequestBuilder#into(com.bumptech.glide.request.target.Target)
*/
@Override
public Request getRequest() {
return mRequest;
}
@Override
public void setRequest(Request request) {
this.mRequest = request;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.end2end;
import static org.apache.phoenix.util.TestUtil.ATABLE_NAME;
import static org.apache.phoenix.util.TestUtil.A_VALUE;
import static org.apache.phoenix.util.TestUtil.B_VALUE;
import static org.apache.phoenix.util.TestUtil.C_VALUE;
import static org.apache.phoenix.util.TestUtil.E_VALUE;
import static org.apache.phoenix.util.TestUtil.MILLIS_IN_DAY;
import static org.apache.phoenix.util.TestUtil.ROW1;
import static org.apache.phoenix.util.TestUtil.ROW2;
import static org.apache.phoenix.util.TestUtil.ROW3;
import static org.apache.phoenix.util.TestUtil.ROW4;
import static org.apache.phoenix.util.TestUtil.ROW5;
import static org.apache.phoenix.util.TestUtil.ROW6;
import static org.apache.phoenix.util.TestUtil.ROW7;
import static org.apache.phoenix.util.TestUtil.ROW8;
import static org.apache.phoenix.util.TestUtil.ROW9;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.math.BigDecimal;
import java.sql.Connection;
import java.sql.Date;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.Statement;
import java.sql.Types;
import java.text.Format;
import java.util.Calendar;
import org.apache.phoenix.util.DateUtil;
import org.junit.Test;
public class DateTimeIT extends BaseHBaseManagedTimeIT {
protected Connection conn;
protected Date date;
protected static final String tenantId = getOrganizationId();
public DateTimeIT() throws Exception {
super();
conn = DriverManager.getConnection(getUrl());
date = new Date(System.currentTimeMillis());
initAtable();
}
protected void initAtable() throws Exception {
ensureTableCreated(getUrl(), ATABLE_NAME, (byte[][])null);
PreparedStatement stmt = conn.prepareStatement(
"upsert into " + ATABLE_NAME +
"(" +
" ORGANIZATION_ID, " +
" ENTITY_ID, " +
" A_STRING, " +
" B_STRING, " +
" A_INTEGER, " +
" A_DATE, " +
" X_DECIMAL, " +
" X_LONG, " +
" X_INTEGER," +
" Y_INTEGER," +
" A_BYTE," +
" A_SHORT," +
" A_FLOAT," +
" A_DOUBLE," +
" A_UNSIGNED_FLOAT," +
" A_UNSIGNED_DOUBLE)" +
"VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)");
stmt.setString(1, tenantId);
stmt.setString(2, ROW1);
stmt.setString(3, A_VALUE);
stmt.setString(4, B_VALUE);
stmt.setInt(5, 1);
stmt.setDate(6, date);
stmt.setBigDecimal(7, null);
stmt.setNull(8, Types.BIGINT);
stmt.setNull(9, Types.INTEGER);
stmt.setNull(10, Types.INTEGER);
stmt.setByte(11, (byte)1);
stmt.setShort(12, (short) 128);
stmt.setFloat(13, 0.01f);
stmt.setDouble(14, 0.0001);
stmt.setFloat(15, 0.01f);
stmt.setDouble(16, 0.0001);
stmt.execute();
stmt.setString(1, tenantId);
stmt.setString(2, ROW2);
stmt.setString(3, A_VALUE);
stmt.setString(4, C_VALUE);
stmt.setInt(5, 2);
stmt.setDate(6, date == null ? null : new Date(date.getTime() + MILLIS_IN_DAY * 1));
stmt.setBigDecimal(7, null);
stmt.setNull(8, Types.BIGINT);
stmt.setNull(9, Types.INTEGER);
stmt.setNull(10, Types.INTEGER);
stmt.setByte(11, (byte)2);
stmt.setShort(12, (short) 129);
stmt.setFloat(13, 0.02f);
stmt.setDouble(14, 0.0002);
stmt.setFloat(15, 0.02f);
stmt.setDouble(16, 0.0002);
stmt.execute();
stmt.setString(1, tenantId);
stmt.setString(2, ROW3);
stmt.setString(3, A_VALUE);
stmt.setString(4, E_VALUE);
stmt.setInt(5, 3);
stmt.setDate(6, date == null ? null : new Date(date.getTime() + MILLIS_IN_DAY * 2));
stmt.setBigDecimal(7, null);
stmt.setNull(8, Types.BIGINT);
stmt.setNull(9, Types.INTEGER);
stmt.setNull(10, Types.INTEGER);
stmt.setByte(11, (byte)3);
stmt.setShort(12, (short) 130);
stmt.setFloat(13, 0.03f);
stmt.setDouble(14, 0.0003);
stmt.setFloat(15, 0.03f);
stmt.setDouble(16, 0.0003);
stmt.execute();
stmt.setString(1, tenantId);
stmt.setString(2, ROW4);
stmt.setString(3, A_VALUE);
stmt.setString(4, B_VALUE);
stmt.setInt(5, 4);
stmt.setDate(6, date == null ? null : date);
stmt.setBigDecimal(7, null);
stmt.setNull(8, Types.BIGINT);
stmt.setNull(9, Types.INTEGER);
stmt.setNull(10, Types.INTEGER);
stmt.setByte(11, (byte)4);
stmt.setShort(12, (short) 131);
stmt.setFloat(13, 0.04f);
stmt.setDouble(14, 0.0004);
stmt.setFloat(15, 0.04f);
stmt.setDouble(16, 0.0004);
stmt.execute();
stmt.setString(1, tenantId);
stmt.setString(2, ROW5);
stmt.setString(3, B_VALUE);
stmt.setString(4, C_VALUE);
stmt.setInt(5, 5);
stmt.setDate(6, date == null ? null : new Date(date.getTime() + MILLIS_IN_DAY * 1));
stmt.setBigDecimal(7, null);
stmt.setNull(8, Types.BIGINT);
stmt.setNull(9, Types.INTEGER);
stmt.setNull(10, Types.INTEGER);
stmt.setByte(11, (byte)5);
stmt.setShort(12, (short) 132);
stmt.setFloat(13, 0.05f);
stmt.setDouble(14, 0.0005);
stmt.setFloat(15, 0.05f);
stmt.setDouble(16, 0.0005);
stmt.execute();
stmt.setString(1, tenantId);
stmt.setString(2, ROW6);
stmt.setString(3, B_VALUE);
stmt.setString(4, E_VALUE);
stmt.setInt(5, 6);
stmt.setDate(6, date == null ? null : new Date(date.getTime() + MILLIS_IN_DAY * 2));
stmt.setBigDecimal(7, null);
stmt.setNull(8, Types.BIGINT);
stmt.setNull(9, Types.INTEGER);
stmt.setNull(10, Types.INTEGER);
stmt.setByte(11, (byte)6);
stmt.setShort(12, (short) 133);
stmt.setFloat(13, 0.06f);
stmt.setDouble(14, 0.0006);
stmt.setFloat(15, 0.06f);
stmt.setDouble(16, 0.0006);
stmt.execute();
stmt.setString(1, tenantId);
stmt.setString(2, ROW7);
stmt.setString(3, B_VALUE);
stmt.setString(4, B_VALUE);
stmt.setInt(5, 7);
stmt.setDate(6, date == null ? null : date);
stmt.setBigDecimal(7, BigDecimal.valueOf(0.1));
stmt.setLong(8, 5L);
stmt.setInt(9, 5);
stmt.setNull(10, Types.INTEGER);
stmt.setByte(11, (byte)7);
stmt.setShort(12, (short) 134);
stmt.setFloat(13, 0.07f);
stmt.setDouble(14, 0.0007);
stmt.setFloat(15, 0.07f);
stmt.setDouble(16, 0.0007);
stmt.execute();
stmt.setString(1, tenantId);
stmt.setString(2, ROW8);
stmt.setString(3, B_VALUE);
stmt.setString(4, C_VALUE);
stmt.setInt(5, 8);
stmt.setDate(6, date == null ? null : new Date(date.getTime() + MILLIS_IN_DAY * 1));
stmt.setBigDecimal(7, BigDecimal.valueOf(3.9));
long l = Integer.MIN_VALUE - 1L;
assert(l < Integer.MIN_VALUE);
stmt.setLong(8, l);
stmt.setInt(9, 4);
stmt.setNull(10, Types.INTEGER);
stmt.setByte(11, (byte)8);
stmt.setShort(12, (short) 135);
stmt.setFloat(13, 0.08f);
stmt.setDouble(14, 0.0008);
stmt.setFloat(15, 0.08f);
stmt.setDouble(16, 0.0008);
stmt.execute();
stmt.setString(1, tenantId);
stmt.setString(2, ROW9);
stmt.setString(3, C_VALUE);
stmt.setString(4, E_VALUE);
stmt.setInt(5, 9);
stmt.setDate(6, date == null ? null : new Date(date.getTime() + MILLIS_IN_DAY * 2));
stmt.setBigDecimal(7, BigDecimal.valueOf(3.3));
l = Integer.MAX_VALUE + 1L;
assert(l > Integer.MAX_VALUE);
stmt.setLong(8, l);
stmt.setInt(9, 3);
stmt.setInt(10, 300);
stmt.setByte(11, (byte)9);
stmt.setShort(12, (short) 0);
stmt.setFloat(13, 0.09f);
stmt.setDouble(14, 0.0009);
stmt.setFloat(15, 0.09f);
stmt.setDouble(16, 0.0009);
stmt.execute();
conn.commit();
}
@Test
public void selectBetweenDates() throws Exception {
Format formatter = DateUtil.getDateFormatter("yyyy-MM-dd");
Calendar cal = Calendar.getInstance();
cal.setTime(date);
java.util.Date dateToday = cal.getTime();
cal.add(Calendar.DAY_OF_YEAR, 1);
java.util.Date dateTomorrow = cal.getTime();
String today = formatter.format(dateToday);
String tomorrow = formatter.format(dateTomorrow);
String query = "SELECT entity_id FROM ATABLE WHERE a_integer < 4 AND a_date BETWEEN date '" + today + "' AND date '" + tomorrow + "' ";
Statement statement = conn.createStatement();
ResultSet rs = statement.executeQuery(query);
assertTrue(rs.next());
assertEquals(ROW1, rs.getString(1));
assertFalse(rs.next());
}
@Test
public void testSelectLiteralDate() throws Exception {
String s = DateUtil.DEFAULT_DATE_FORMATTER.format(date);
String query = "SELECT DATE '" + s + "' FROM ATABLE";
Statement statement = conn.createStatement();
ResultSet rs = statement.executeQuery(query);
assertTrue(rs.next());
assertEquals(date, rs.getDate(1));
}
@Test
public void testSelectLiteralDateCompare() throws Exception {
String query = "SELECT (DATE '" + date + "' = DATE '" + date + "') FROM ATABLE";
Statement statement = conn.createStatement();
ResultSet rs = statement.executeQuery(query);
assertTrue(rs.next());
assertTrue(rs.getBoolean(1));
}
@Test
public void testSelectWhereDatesEqual() throws Exception {
String query = "SELECT entity_id FROM ATABLE WHERE a_integer < 4 AND DATE '" + date + "' = DATE '" + date + "'";
Statement statement = conn.createStatement();
ResultSet rs = statement.executeQuery(query);
assertTrue(rs.next());
}
@Test
public void testSelectWhereDateAndToDateEqual() throws Exception {
String query = "SELECT entity_id FROM ATABLE WHERE a_integer < 4 AND DATE '" + date + "' = TO_DATE ('" + date + "')";
Statement statement = conn.createStatement();
ResultSet rs = statement.executeQuery(query);
assertTrue(rs.next());
}
@Test
public void testSelectWhereDateAndTimestampEqual() throws Exception {
final String timestamp = "2012-09-08 07:08:23";
String query = "SELECT entity_id FROM ATABLE WHERE a_integer < 4 AND DATE '" + timestamp + "' = TIMESTAMP '" + timestamp + "'";
Statement statement = conn.createStatement();
ResultSet rs = statement.executeQuery(query);
assertTrue(rs.next());
}
@Test
public void testSelectWhereSameDatesUnequal() throws Exception {
String query = "SELECT entity_id FROM ATABLE WHERE a_integer < 4 AND DATE '" + date + "' > DATE '" + date + "'";
Statement statement = conn.createStatement();
ResultSet rs = statement.executeQuery(query);
assertFalse(rs.next());
}
@Test
public void testDateInList() throws Exception {
String query = "SELECT entity_id FROM ATABLE WHERE a_date IN (?,?) AND a_integer < 4";
PreparedStatement statement = conn.prepareStatement(query);
statement.setDate(1, new Date(0));
statement.setDate(2, date);
ResultSet rs = statement.executeQuery();
assertTrue(rs.next());
assertEquals(ROW1, rs.getString(1));
assertFalse(rs.next());
}
@Test
public void testDateBetweenLiterals() throws Exception {
Format formatter = DateUtil.getDateFormatter("yyyy-MM-dd");
Calendar cal = Calendar.getInstance();
cal.setTime(date);
java.util.Date dateToday = cal.getTime();
cal.add(Calendar.DAY_OF_YEAR, 1);
java.util.Date dateTomorrow = cal.getTime();
String today = formatter.format(dateToday);
String tomorrow = formatter.format(dateTomorrow);
String query = "SELECT entity_id FROM ATABLE WHERE a_integer < 4 AND a_date BETWEEN date '" + today + "' AND date '" + tomorrow + "' ";
Statement statement = conn.createStatement();
ResultSet rs = statement.executeQuery(query);
assertTrue(rs.next());
assertEquals(ROW1, rs.getString(1));
assertFalse(rs.next());
}
}
| |
/* ========================================================================
* PlantUML : a free UML diagram generator
* ========================================================================
*
* Project Info: http://plantuml.com
*
* This file is part of Smetana.
* Smetana is a partial translation of Graphviz/Dot sources from C to Java.
*
* (C) Copyright 2009-2017, Arnaud Roques
*
* This translation is distributed under the same Licence as the original C program:
*
*************************************************************************
* Copyright (c) 2011 AT&T Intellectual Property
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors: See CVS logs. Details at http://www.graphviz.org/
*************************************************************************
*
* THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE PUBLIC
* LICENSE ("AGREEMENT"). [Eclipse Public License - v 1.0]
*
* ANY USE, REPRODUCTION OR DISTRIBUTION OF THE PROGRAM CONSTITUTES
* RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT.
*
* You may obtain a copy of the License at
*
* http://www.eclipse.org/legal/epl-v10.html
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package gen.lib.cgraph;
import static gen.lib.cgraph.mem__c.agalloc;
import static gen.lib.cgraph.utils__c.agdtdelete;
import static gen.lib.cgraph.utils__c.agdtopen;
import static smetana.core.JUtils.EQ;
import static smetana.core.JUtils.sizeof;
import static smetana.core.JUtilsDebug.ENTERING;
import static smetana.core.JUtilsDebug.LEAVING;
import static smetana.core.Macro.UNSUPPORTED;
import h.Agraph_s;
import h._dt_s;
import h.refstr_t;
import smetana.core.ACCESS;
import smetana.core.CString;
import smetana.core.OFFSET;
import smetana.core.STARSTAR;
import smetana.core.Z;
import smetana.core.__struct__;
import smetana.core.size_t;
public class refstr__c {
//1 9k44uhd5foylaeoekf3llonjq
// extern Dtmethod_t* Dtset
//1 1ahfywsmzcpcig2oxm7pt9ihj
// extern Dtmethod_t* Dtbag
//1 anhghfj3k7dmkudy2n7rvt31v
// extern Dtmethod_t* Dtoset
//1 5l6oj1ux946zjwvir94ykejbc
// extern Dtmethod_t* Dtobag
//1 2wtf222ak6cui8cfjnw6w377z
// extern Dtmethod_t* Dtlist
//1 d1s1s6ibtcsmst88e3057u9r7
// extern Dtmethod_t* Dtstack
//1 axa7mflo824p6fspjn1rdk0mt
// extern Dtmethod_t* Dtqueue
//1 ega812utobm4xx9oa9w9ayij6
// extern Dtmethod_t* Dtdeque
//1 cyfr996ur43045jv1tjbelzmj
// extern Dtmethod_t* Dtorder
//1 wlofoiftbjgrrabzb2brkycg
// extern Dtmethod_t* Dttree
//1 12bds94t7voj7ulwpcvgf6agr
// extern Dtmethod_t* Dthash
//1 9lqknzty480cy7zsubmabkk8h
// extern Dtmethod_t _Dttree
//1 bvn6zkbcp8vjdhkccqo1xrkrb
// extern Dtmethod_t _Dthash
//1 9lidhtd6nsmmv3e7vjv9e10gw
// extern Dtmethod_t _Dtlist
//1 34ujfamjxo7xn89u90oh2k6f8
// extern Dtmethod_t _Dtqueue
//1 3jy4aceckzkdv950h89p4wjc8
// extern Dtmethod_t _Dtstack
//1 8dfqgf3u1v830qzcjqh9o8ha7
// extern Agmemdisc_t AgMemDisc
//1 18k2oh2t6llfsdc5x0wlcnby8
// extern Agiddisc_t AgIdDisc
//1 a4r7hi80gdxtsv4hdoqpyiivn
// extern Agiodisc_t AgIoDisc
//1 bnzt5syjb7mgeru19114vd6xx
// extern Agdisc_t AgDefaultDisc
//1 35y2gbegsdjilegaribes00mg
// extern Agdesc_t Agdirected, Agstrictdirected, Agundirected, Agstrictundirected
//1 c2rygslq6bcuka3awmvy2b3ow
// typedef Agsubnode_t Agnoderef_t
//1 xam6yv0dcsx57dtg44igpbzn
// typedef Dtlink_t Agedgeref_t
//1 6ayavpu39aihwyojkx093pcy3
// extern Agraph_t *Ag_G_global
//1 871mxtg9l6ffpxdl9kniwusf7
// extern char *AgDataRecName
//1 c0o2kmml0tn6hftuwo0u4shwd
// extern Dtdisc_t Ag_subnode_id_disc
//1 8k15pyu256unm2kpd9zf5pf7k
// extern Dtdisc_t Ag_subnode_seq_disc
//1 e3d820y06gpeusn6atgmj8bzd
// extern Dtdisc_t Ag_mainedge_id_disc
//1 cbr0772spix9h1aw7h5v7dv9j
// extern Dtdisc_t Ag_subedge_id_disc
//1 akd0c3v0j7m2npxcb9acit1fa
// extern Dtdisc_t Ag_mainedge_seq_disc
//1 12d8la07351ww7vwfzucjst8m
// extern Dtdisc_t Ag_subedge_seq_disc
//1 29eokk7v88e62g8o6lizmo967
// extern Dtdisc_t Ag_subgraph_id_disc
//1 4xd9cbgy6hk5g6nhjcbpzkx14
// extern Agcbdisc_t AgAttrdisc
//1 8bj6ivnd4go7wt4pvzqgk8mlr
// static unsigned long HTML_BIT
//static public int HTML_BIT;
//1 dqn77l82bfu071bv703e77jmg
// static unsigned long CNT_BITS
//static public int CNT_BITS;
//1 boyxdmkhstn4i64pqf6sv1mi7
// static Dtdisc_t Refstrdisc =
/*static public final __struct__<_dtdisc_s> Refstrdisc = __struct__.from(_dtdisc_s.class);
static {
Refstrdisc.setInt("key", OFFSET.create(refstr_t.class, "s").toInt()); // *s is the third field in refstr_t
Refstrdisc.setInt("size", -1);
Refstrdisc.setInt("link", 0);
Refstrdisc.setPtr("makef", null);
Refstrdisc.setPtr("freef", function(utils__c.class, "agdictobjfree"));
Refstrdisc.setPtr("comparf", null);
Refstrdisc.setPtr("hashf", null);
Refstrdisc.setPtr("memoryf", function(utils__c.class, "agdictobjmem"));
Refstrdisc.setPtr("eventf", null);
}*/
//1 2e0tdcdyjc9zq54xt1nzgwvn3
// static Dict_t *Refdict_default
//static public _dt_s Refdict_default;
//3 f1nwss2xoaub1hyord232ugoj
// static Dict_t *refdict(Agraph_t * g)
public static _dt_s refdict(final Agraph_s g) {
ENTERING("f1nwss2xoaub1hyord232ugoj","refdict");
try {
STARSTAR<_dt_s> dictref;
if (g != null)
dictref = STARSTAR.amp(new ACCESS<_dt_s>() {
public _dt_s get() {
return (_dt_s) g.getPtr("clos").getPtr("strdict");
}
public void set(_dt_s obj) {
g.getPtr("clos").setPtr("strdict", obj);
}
});
else
dictref = STARSTAR.amp(new ACCESS<_dt_s>() {
public _dt_s get() {
return Z.z().Refdict_default;
}
public void set(_dt_s obj) {
Z.z().Refdict_default = obj;
}
});
if (dictref.getMe() == null) {
dictref.setMe(agdtopen(g, Z.z().Refstrdisc.amp(), Z.z().Dttree));
}
Z.z().HTML_BIT = 1 << 31;
Z.z().CNT_BITS = ~Z.z().HTML_BIT;
return dictref.getMe();
} finally {
LEAVING("f1nwss2xoaub1hyord232ugoj","refdict");
}
}
//3 9aajykl8nuymg60zukycquawa
// int agstrclose(Agraph_t * g)
public static Object agstrclose(Object... arg) {
UNSUPPORTED("c2l353zz5jt7jlmbhjbbt3m7v"); // int agstrclose(Agraph_t * g)
UNSUPPORTED("erg9i1970wdri39osu8hx2a6e"); // {
UNSUPPORTED("208rcf0f70q2wxwsa6po42oqq"); // return agdtclose(g, refdict(g));
UNSUPPORTED("c24nfmv9i7o5eoqaymbibp7m7"); // }
throw new UnsupportedOperationException();
}
//3 9ts4wqhw2xafdv3tlcilneewq
// static refstr_t *refsymbind(Dict_t * strdict, char *s)
public static refstr_t refsymbind(_dt_s strdict, CString s) {
ENTERING("9ts4wqhw2xafdv3tlcilneewq","refsymbind");
try {
final __struct__<refstr_t> key = __struct__.from(refstr_t.class);
refstr_t r;
// key.setPtr("s", s.duplicate());
key.setPtr("s", s);
r = (refstr_t) strdict.call("searchf", strdict, key.amp(), 0000004);
return r;
} finally {
LEAVING("9ts4wqhw2xafdv3tlcilneewq","refsymbind");
}
}
//3 1scntgo71z7c2v15zapiyw59w
// static char *refstrbind(Dict_t * strdict, char *s)
public static CString refstrbind(_dt_s strdict, CString s) {
ENTERING("1scntgo71z7c2v15zapiyw59w","refstrbind");
try {
refstr_t r;
r = refsymbind(strdict, s);
if (r!=null)
return r.getCString("s");
else
return null;
} finally {
LEAVING("1scntgo71z7c2v15zapiyw59w","refstrbind");
}
}
//3 bb8aqjshw3ecae2lsmhigd0mc
// char *agstrbind(Agraph_t * g, char *s)
public static CString agstrbind(Agraph_s g, CString s) {
ENTERING("bb8aqjshw3ecae2lsmhigd0mc","agstrbind");
try {
return refstrbind(refdict(g), s);
} finally {
LEAVING("bb8aqjshw3ecae2lsmhigd0mc","agstrbind");
}
}
//3 86oznromwhn9qeym0k7pih73q
// char *agstrdup(Agraph_t * g, char *s)
public static CString agstrdup(Agraph_s g, CString s) {
ENTERING("86oznromwhn9qeym0k7pih73q","agstrdup");
try {
refstr_t r;
_dt_s strdict;
size_t sz;
if (s == null)
return null;
strdict = refdict(g);
r = (refstr_t) refsymbind(strdict, s);
if (r!=null) r = (refstr_t) r.castTo(refstr_t.class);
if (r!=null)
r.setInt("refcnt", r.getInt("refcnt")+1);
else {
sz = sizeof(refstr_t.class).plus(s.length());
if (g!=null)
r = (refstr_t) agalloc(g, sz);
else
r = (refstr_t) sz.malloc();
r.setInt("refcnt", 1);
r.setPtr("s", s.duplicate());
// strcpy(r->store, s);
// r->s = r->store;
strdict.call("searchf", strdict,r,0000001);
}
return r.getCString("s");
} finally {
LEAVING("86oznromwhn9qeym0k7pih73q","agstrdup");
}
}
//3 1ovgpnv6r8ru6iz51ic91zzal
// char *agstrdup_html(Agraph_t * g, char *s)
public static Object agstrdup_html(Object... arg) {
UNSUPPORTED("6679vrn0j0vkzernsn2rlottw"); // char *agstrdup_html(Agraph_t * g, char *s)
UNSUPPORTED("erg9i1970wdri39osu8hx2a6e"); // {
UNSUPPORTED("1uvxutp09oluiacpgn0f76bgu"); // refstr_t *r;
UNSUPPORTED("czgqod5ni1s5av81qa3n0ghgr"); // Dict_t *strdict;
UNSUPPORTED("55x2wgzchv0157f4g74693oaq"); // size_t sz;
UNSUPPORTED("a5abfeqtsa4i5x739edpwuxin"); // if (s == ((char *)0))
UNSUPPORTED("xp8okoaicybpovkenntpd857"); // return ((char *)0);
UNSUPPORTED("bo3fdoot7ldevx250qweitj6z"); // strdict = refdict(g);
UNSUPPORTED("12vt0u4w3q0jht9f9vsaybntn"); // r = refsymbind(strdict, s);
UNSUPPORTED("67y4tszu7dmeves31gr9ydmpi"); // if (r)
UNSUPPORTED("5gybhadmtbc77f5wf9adyemj7"); // r->refcnt++;
UNSUPPORTED("1nyzbeonram6636b1w955bypn"); // else {
UNSUPPORTED("9llv1u64vbj6q8loctnrowtm5"); // sz = sizeof(refstr_t) + strlen(s);
UNSUPPORTED("7tmc6a514rv2k24wg5o8qpvyp"); // if (g)
UNSUPPORTED("asjj8bj1b02f70rfr41ayipxy"); // r = (refstr_t *) agalloc(g, sz);
UNSUPPORTED("9352ql3e58qs4fzapgjfrms2s"); // else
UNSUPPORTED("bp5rr6mkh94826cbgdwglvpk9"); // r = (refstr_t *) malloc(sz);
UNSUPPORTED("6sl9ejza97inawt8uprd120h6"); // r->refcnt = 1 | HTML_BIT;
UNSUPPORTED("dadamob0ot3fpofdm1ey34srj"); // strcpy(r->store, s);
UNSUPPORTED("1cyhds1lm0ee8rtp7k7h5cqfw"); // r->s = r->store;
UNSUPPORTED("b2zaf5uj8gofpyc40hl0ziymh"); // (*(((Dt_t*)(strdict))->searchf))((strdict),(void*)(r),0000001);
UNSUPPORTED("dvgyxsnyeqqnyzq696k3vskib"); // }
UNSUPPORTED("lxjgfic7zk869xczsgazw3sx"); // return r->s;
UNSUPPORTED("c24nfmv9i7o5eoqaymbibp7m7"); // }
throw new UnsupportedOperationException();
}
//3 enhn1ajfo86a19dgm4b8lduz7
// int agstrfree(Agraph_t * g, char *s)
public static int agstrfree(Agraph_s g, CString s) {
ENTERING("enhn1ajfo86a19dgm4b8lduz7","agstrfree");
try {
refstr_t r;
_dt_s strdict;
if (s == null)
return -1;
strdict = refdict(g);
r = refsymbind(strdict, s);
if (r!=null) r = (refstr_t) r.castTo(refstr_t.class);
if (r!=null && (EQ(r.getPtr("s"), s))) {
r.setInt("refcnt", r.getInt("refcnt")-1);
if ((r.getInt("refcnt")!=0 && Z.z().CNT_BITS!=0) == false) {
agdtdelete(g, strdict, r);
/*
if (g) agfree(g,r);
else free(r);
*/
}
}
if (r == null)
return -1;
return 0;
} finally {
LEAVING("enhn1ajfo86a19dgm4b8lduz7","agstrfree");
}
}
//3 3em4wzjnpajd5d3igb90l3rml
// int aghtmlstr(char *s)
public static int aghtmlstr(CString s) {
ENTERING("3em4wzjnpajd5d3igb90l3rml","aghtmlstr");
try {
refstr_t key;
if (s == null)
return 0;
key = (refstr_t) s.addVirtualBytes(-OFFSET.create(refstr_t.class, "s").toInt());
return (key.getInt("refcnt") & Z.z().HTML_BIT);
} finally {
LEAVING("3em4wzjnpajd5d3igb90l3rml","aghtmlstr");
}
}
//3 ap2ebebypq6vmwle0hicv6tmj
// void agmarkhtmlstr(char *s)
public static Object agmarkhtmlstr(Object... arg) {
UNSUPPORTED("8oc24oz62ej815sjwuwuj9bmt"); // void agmarkhtmlstr(char *s)
UNSUPPORTED("erg9i1970wdri39osu8hx2a6e"); // {
UNSUPPORTED("164ww6fcxh6v2wmxj0v8aqviy"); // refstr_t *key;
UNSUPPORTED("8quozj18vjguewxdpv9w14yjn"); // if (s == NULL)
UNSUPPORTED("a7fgam0j0jm7bar0mblsv3no4"); // return;
UNSUPPORTED("9cmt4vbkm95fqftevdqyfvslr"); // key = (refstr_t *) (s - ((int)(&(((refstr_t*)0)->store[0]))));
UNSUPPORTED("68mcf5kr6xw538zkdk8b50aeb"); // key->refcnt |= HTML_BIT;
UNSUPPORTED("c24nfmv9i7o5eoqaymbibp7m7"); // }
throw new UnsupportedOperationException();
}
}
| |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.app.engine;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import javax.sql.DataSource;
import org.apache.ibatis.session.Configuration;
import org.apache.ibatis.type.JdbcType;
import org.flowable.app.api.AppEngineConfigurationApi;
import org.flowable.app.api.AppManagementService;
import org.flowable.app.api.AppRepositoryService;
import org.flowable.app.api.repository.AppResourceConverter;
import org.flowable.app.engine.impl.AppEngineImpl;
import org.flowable.app.engine.impl.AppManagementServiceImpl;
import org.flowable.app.engine.impl.AppRepositoryServiceImpl;
import org.flowable.app.engine.impl.cfg.StandaloneInMemAppEngineConfiguration;
import org.flowable.app.engine.impl.cmd.SchemaOperationsAppEngineBuild;
import org.flowable.app.engine.impl.db.AppDbSchemaManager;
import org.flowable.app.engine.impl.db.EntityDependencyOrder;
import org.flowable.app.engine.impl.deployer.AppDeployer;
import org.flowable.app.engine.impl.deployer.AppDeploymentManager;
import org.flowable.app.engine.impl.deployer.AppResourceConverterImpl;
import org.flowable.app.engine.impl.el.AppExpressionManager;
import org.flowable.app.engine.impl.interceptor.AppCommandInvoker;
import org.flowable.app.engine.impl.persistence.entity.AppDefinitionEntityManager;
import org.flowable.app.engine.impl.persistence.entity.AppDefinitionEntityManagerImpl;
import org.flowable.app.engine.impl.persistence.entity.AppDeploymentEntityManager;
import org.flowable.app.engine.impl.persistence.entity.AppDeploymentEntityManagerImpl;
import org.flowable.app.engine.impl.persistence.entity.AppResourceEntityManager;
import org.flowable.app.engine.impl.persistence.entity.AppResourceEntityManagerImpl;
import org.flowable.app.engine.impl.persistence.entity.data.AppDefinitionDataManager;
import org.flowable.app.engine.impl.persistence.entity.data.AppDeploymentDataManager;
import org.flowable.app.engine.impl.persistence.entity.data.AppResourceDataManager;
import org.flowable.app.engine.impl.persistence.entity.data.TableDataManager;
import org.flowable.app.engine.impl.persistence.entity.data.impl.MybatisAppDefinitionDataManager;
import org.flowable.app.engine.impl.persistence.entity.data.impl.MybatisAppDeploymentDataManager;
import org.flowable.app.engine.impl.persistence.entity.data.impl.MybatisResourceDataManager;
import org.flowable.app.engine.impl.persistence.entity.data.impl.TableDataManagerImpl;
import org.flowable.app.engine.impl.persistence.entity.deploy.AppDefinitionCacheEntry;
import org.flowable.common.engine.impl.AbstractEngineConfiguration;
import org.flowable.common.engine.impl.EngineConfigurator;
import org.flowable.common.engine.impl.EngineDeployer;
import org.flowable.common.engine.impl.HasExpressionManagerEngineConfiguration;
import org.flowable.common.engine.impl.calendar.BusinessCalendarManager;
import org.flowable.common.engine.impl.calendar.CycleBusinessCalendar;
import org.flowable.common.engine.impl.calendar.DueDateBusinessCalendar;
import org.flowable.common.engine.impl.calendar.DurationBusinessCalendar;
import org.flowable.common.engine.impl.calendar.MapBusinessCalendarManager;
import org.flowable.common.engine.impl.cfg.BeansConfigurationHelper;
import org.flowable.common.engine.impl.db.SchemaManager;
import org.flowable.common.engine.impl.el.ExpressionManager;
import org.flowable.common.engine.impl.interceptor.CommandInterceptor;
import org.flowable.common.engine.impl.interceptor.EngineConfigurationConstants;
import org.flowable.common.engine.impl.persistence.deploy.DefaultDeploymentCache;
import org.flowable.common.engine.impl.persistence.deploy.DeploymentCache;
import org.flowable.identitylink.service.IdentityLinkServiceConfiguration;
import org.flowable.identitylink.service.impl.db.IdentityLinkDbSchemaManager;
import org.flowable.idm.api.IdmEngineConfigurationApi;
import org.flowable.idm.api.IdmIdentityService;
import org.flowable.idm.engine.configurator.IdmEngineConfigurator;
import org.flowable.variable.api.types.VariableType;
import org.flowable.variable.api.types.VariableTypes;
import org.flowable.variable.service.VariableServiceConfiguration;
import org.flowable.variable.service.impl.db.IbatisVariableTypeHandler;
import org.flowable.variable.service.impl.db.VariableDbSchemaManager;
import org.flowable.variable.service.impl.types.BooleanType;
import org.flowable.variable.service.impl.types.ByteArrayType;
import org.flowable.variable.service.impl.types.DateType;
import org.flowable.variable.service.impl.types.DefaultVariableTypes;
import org.flowable.variable.service.impl.types.DoubleType;
import org.flowable.variable.service.impl.types.IntegerType;
import org.flowable.variable.service.impl.types.JodaDateTimeType;
import org.flowable.variable.service.impl.types.JodaDateType;
import org.flowable.variable.service.impl.types.JsonType;
import org.flowable.variable.service.impl.types.LongJsonType;
import org.flowable.variable.service.impl.types.LongStringType;
import org.flowable.variable.service.impl.types.LongType;
import org.flowable.variable.service.impl.types.NullType;
import org.flowable.variable.service.impl.types.SerializableType;
import org.flowable.variable.service.impl.types.ShortType;
import org.flowable.variable.service.impl.types.StringType;
import org.flowable.variable.service.impl.types.UUIDType;
import com.fasterxml.jackson.databind.ObjectMapper;
public class AppEngineConfiguration extends AbstractEngineConfiguration implements
AppEngineConfigurationApi, HasExpressionManagerEngineConfiguration {
public static final String DEFAULT_MYBATIS_MAPPING_FILE = "org/flowable/app/db/mapping/mappings.xml";
public static final String LIQUIBASE_CHANGELOG_PREFIX = "ACT_APP_";
protected String cmmnEngineName = AppEngines.NAME_DEFAULT;
protected AppManagementService appManagementService = new AppManagementServiceImpl(this);
protected AppRepositoryService appRepositoryService = new AppRepositoryServiceImpl(this);
protected TableDataManager tableDataManager;
protected AppDeploymentDataManager deploymentDataManager;
protected AppResourceDataManager resourceDataManager;
protected AppDefinitionDataManager appDefinitionDataManager;
protected AppDeploymentEntityManager appDeploymentEntityManager;
protected AppResourceEntityManager appResourceEntityManager;
protected AppDefinitionEntityManager appDefinitionEntityManager;
protected boolean disableIdmEngine;
protected boolean executeServiceSchemaManagers = true;
protected AppDeployer appDeployer;
protected AppDeploymentManager deploymentManager;
protected AppResourceConverter appResourceConverter;
protected int appDefinitionCacheLimit = -1;
protected DeploymentCache<AppDefinitionCacheEntry> appDefinitionCache;
protected ExpressionManager expressionManager;
protected SchemaManager identityLinkSchemaManager;
protected SchemaManager variableSchemaManager;
// Identitylink support
protected IdentityLinkServiceConfiguration identityLinkServiceConfiguration;
// Variable support
protected VariableTypes variableTypes;
protected List<VariableType> customPreVariableTypes;
protected List<VariableType> customPostVariableTypes;
protected VariableServiceConfiguration variableServiceConfiguration;
protected boolean serializableVariableTypeTrackDeserializedObjects = true;
protected ObjectMapper objectMapper = new ObjectMapper();
protected BusinessCalendarManager businessCalendarManager;
public static AppEngineConfiguration createAppEngineConfigurationFromResourceDefault() {
return createAppEngineConfigurationFromResource("flowable.app.cfg.xml", "appEngineConfiguration");
}
public static AppEngineConfiguration createAppEngineConfigurationFromResource(String resource) {
return createAppEngineConfigurationFromResource(resource, "appEngineConfiguration");
}
public static AppEngineConfiguration createAppEngineConfigurationFromResource(String resource, String beanName) {
return (AppEngineConfiguration) BeansConfigurationHelper.parseEngineConfigurationFromResource(resource, beanName);
}
public static AppEngineConfiguration createAppEngineConfigurationFromInputStream(InputStream inputStream) {
return createAppEngineConfigurationFromInputStream(inputStream, "appEngineConfiguration");
}
public static AppEngineConfiguration createAppEngineConfigurationFromInputStream(InputStream inputStream, String beanName) {
return (AppEngineConfiguration) BeansConfigurationHelper.parseEngineConfigurationFromInputStream(inputStream, beanName);
}
public static AppEngineConfiguration createStandaloneAppEngineConfiguration() {
return new AppEngineConfiguration();
}
public static AppEngineConfiguration createStandaloneInMemAppEngineConfiguration() {
return new StandaloneInMemAppEngineConfiguration();
}
public AppEngine buildAppEngine() {
init();
return new AppEngineImpl(this);
}
protected void init() {
initEngineConfigurations();
initConfigurators();
configuratorsBeforeInit();
initCommandContextFactory();
initTransactionContextFactory();
initCommandExecutors();
initIdGenerator();
initExpressionManager();
if (usingRelationalDatabase) {
initDataSource();
}
if (usingRelationalDatabase || usingSchemaMgmt) {
initSchemaManager();
initSchemaManagementCommand();
}
initVariableTypes();
initBeans();
initTransactionFactory();
if (usingRelationalDatabase) {
initSqlSessionFactory();
}
initSessionFactories();
initServices();
initDataManagers();
initEntityManagers();
initDeployers();
initAppDefinitionCache();
initAppResourceConverter();
initDeploymentManager();
initClock();
initIdentityLinkServiceConfiguration();
initVariableServiceConfiguration();
configuratorsAfterInit();
initBusinessCalendarManager();
}
@Override
public void initSchemaManager() {
super.initSchemaManager();
initAppSchemaManager();
if (executeServiceSchemaManagers) {
initIdentityLinkSchemaManager();
initVariableSchemaManager();
}
}
public void initSchemaManagementCommand() {
if (schemaManagementCmd == null) {
if (usingRelationalDatabase && databaseSchemaUpdate != null) {
this.schemaManagementCmd = new SchemaOperationsAppEngineBuild();
}
}
}
protected void initAppSchemaManager() {
if (this.schemaManager == null) {
this.schemaManager = new AppDbSchemaManager();
}
}
protected void initVariableSchemaManager() {
if (this.variableSchemaManager == null) {
this.variableSchemaManager = new VariableDbSchemaManager();
}
}
protected void initIdentityLinkSchemaManager() {
if (this.identityLinkSchemaManager == null) {
this.identityLinkSchemaManager = new IdentityLinkDbSchemaManager();
}
}
@Override
public void initMybatisTypeHandlers(Configuration configuration) {
configuration.getTypeHandlerRegistry().register(VariableType.class, JdbcType.VARCHAR, new IbatisVariableTypeHandler(variableTypes));
}
public void initExpressionManager() {
if (expressionManager == null) {
expressionManager = new AppExpressionManager(beans);
}
}
@Override
public void initCommandInvoker() {
if (commandInvoker == null) {
commandInvoker = new AppCommandInvoker();
}
}
protected void initServices() {
initService(appManagementService);
initService(appRepositoryService);
}
public void initDataManagers() {
if (tableDataManager == null) {
tableDataManager = new TableDataManagerImpl();
}
if (deploymentDataManager == null) {
deploymentDataManager = new MybatisAppDeploymentDataManager(this);
}
if (resourceDataManager == null) {
resourceDataManager = new MybatisResourceDataManager(this);
}
if (appDefinitionDataManager == null) {
appDefinitionDataManager = new MybatisAppDefinitionDataManager(this);
}
}
public void initEntityManagers() {
if (appDeploymentEntityManager == null) {
appDeploymentEntityManager = new AppDeploymentEntityManagerImpl(this, deploymentDataManager);
}
if (appResourceEntityManager == null) {
appResourceEntityManager = new AppResourceEntityManagerImpl(this, resourceDataManager);
}
if (appDefinitionEntityManager == null) {
appDefinitionEntityManager = new AppDefinitionEntityManagerImpl(this, appDefinitionDataManager);
}
}
protected void initDeployers() {
if (this.appDeployer == null) {
this.deployers = new ArrayList<>();
if (customPreDeployers != null) {
this.deployers.addAll(customPreDeployers);
}
this.deployers.addAll(getDefaultDeployers());
if (customPostDeployers != null) {
this.deployers.addAll(customPostDeployers);
}
}
}
public Collection<? extends EngineDeployer> getDefaultDeployers() {
List<EngineDeployer> defaultDeployers = new ArrayList<>();
if (appDeployer == null) {
appDeployer = new AppDeployer();
}
defaultDeployers.add(appDeployer);
return defaultDeployers;
}
protected void initAppDefinitionCache() {
if (appDefinitionCache == null) {
if (appDefinitionCacheLimit <= 0) {
appDefinitionCache = new DefaultDeploymentCache<>();
} else {
appDefinitionCache = new DefaultDeploymentCache<>(appDefinitionCacheLimit);
}
}
}
protected void initAppResourceConverter() {
if (appResourceConverter == null) {
appResourceConverter = new AppResourceConverterImpl(objectMapper);
}
}
protected void initDeploymentManager() {
if (deploymentManager == null) {
deploymentManager = new AppDeploymentManager();
deploymentManager.setAppEngineConfiguration(this);
deploymentManager.setAppDefinitionCache(appDefinitionCache);
deploymentManager.setDeployers(deployers);
deploymentManager.setAppDefinitionEntityManager(appDefinitionEntityManager);
deploymentManager.setDeploymentEntityManager(appDeploymentEntityManager);
}
}
@Override
public String getEngineCfgKey() {
return EngineConfigurationConstants.KEY_APP_ENGINE_CONFIG;
}
@Override
public CommandInterceptor createTransactionInterceptor() {
return null;
}
@Override
public InputStream getMyBatisXmlConfigurationStream() {
return getResourceAsStream(DEFAULT_MYBATIS_MAPPING_FILE);
}
@Override
protected void initDbSqlSessionFactoryEntitySettings() {
defaultInitDbSqlSessionFactoryEntitySettings(EntityDependencyOrder.INSERT_ORDER, EntityDependencyOrder.DELETE_ORDER);
}
public void initVariableTypes() {
if (variableTypes == null) {
variableTypes = new DefaultVariableTypes();
if (customPreVariableTypes != null) {
for (VariableType customVariableType : customPreVariableTypes) {
variableTypes.addType(customVariableType);
}
}
variableTypes.addType(new NullType());
variableTypes.addType(new StringType(getMaxLengthString()));
variableTypes.addType(new LongStringType(getMaxLengthString() + 1));
variableTypes.addType(new BooleanType());
variableTypes.addType(new ShortType());
variableTypes.addType(new IntegerType());
variableTypes.addType(new LongType());
variableTypes.addType(new DateType());
variableTypes.addType(new JodaDateType());
variableTypes.addType(new JodaDateTimeType());
variableTypes.addType(new DoubleType());
variableTypes.addType(new UUIDType());
variableTypes.addType(new JsonType(getMaxLengthString(), objectMapper));
variableTypes.addType(new LongJsonType(getMaxLengthString() + 1, objectMapper));
variableTypes.addType(new ByteArrayType());
variableTypes.addType(new SerializableType(serializableVariableTypeTrackDeserializedObjects));
if (customPostVariableTypes != null) {
for (VariableType customVariableType : customPostVariableTypes) {
variableTypes.addType(customVariableType);
}
}
}
}
public void initVariableServiceConfiguration() {
this.variableServiceConfiguration = new VariableServiceConfiguration();
this.variableServiceConfiguration.setClock(this.clock);
this.variableServiceConfiguration.setObjectMapper(this.objectMapper);
this.variableServiceConfiguration.setEventDispatcher(this.eventDispatcher);
this.variableServiceConfiguration.setVariableTypes(this.variableTypes);
this.variableServiceConfiguration.setMaxLengthString(this.getMaxLengthString());
this.variableServiceConfiguration.setSerializableVariableTypeTrackDeserializedObjects(this.isSerializableVariableTypeTrackDeserializedObjects());
this.variableServiceConfiguration.init();
addServiceConfiguration(EngineConfigurationConstants.KEY_VARIABLE_SERVICE_CONFIG, this.variableServiceConfiguration);
}
public void initIdentityLinkServiceConfiguration() {
this.identityLinkServiceConfiguration = new IdentityLinkServiceConfiguration();
this.identityLinkServiceConfiguration.setClock(this.clock);
this.identityLinkServiceConfiguration.setObjectMapper(this.objectMapper);
this.identityLinkServiceConfiguration.setEventDispatcher(this.eventDispatcher);
this.identityLinkServiceConfiguration.init();
addServiceConfiguration(EngineConfigurationConstants.KEY_IDENTITY_LINK_SERVICE_CONFIG, this.identityLinkServiceConfiguration);
}
public void initBusinessCalendarManager() {
if (businessCalendarManager == null) {
MapBusinessCalendarManager mapBusinessCalendarManager = new MapBusinessCalendarManager();
mapBusinessCalendarManager.addBusinessCalendar(DurationBusinessCalendar.NAME, new DurationBusinessCalendar(this.clock));
mapBusinessCalendarManager.addBusinessCalendar(DueDateBusinessCalendar.NAME, new DueDateBusinessCalendar(this.clock));
mapBusinessCalendarManager.addBusinessCalendar(CycleBusinessCalendar.NAME, new CycleBusinessCalendar(this.clock));
businessCalendarManager = mapBusinessCalendarManager;
}
}
@Override
protected List<EngineConfigurator> getEngineSpecificEngineConfigurators() {
if (!disableIdmEngine) {
List<EngineConfigurator> specificConfigurators = new ArrayList<>();
if (idmEngineConfigurator != null) {
specificConfigurators.add(idmEngineConfigurator);
} else {
specificConfigurators.add(new IdmEngineConfigurator());
}
return specificConfigurators;
}
return Collections.emptyList();
}
@Override
public String getEngineName() {
return cmmnEngineName;
}
public String getCmmnEngineName() {
return cmmnEngineName;
}
public AppEngineConfiguration setCmmnEngineName(String cmmnEngineName) {
this.cmmnEngineName = cmmnEngineName;
return this;
}
@Override
public AppManagementService getAppManagementService() {
return appManagementService;
}
public AppEngineConfiguration setAppManagementService(AppManagementService appManagementService) {
this.appManagementService = appManagementService;
return this;
}
@Override
public AppRepositoryService getAppRepositoryService() {
return appRepositoryService;
}
public AppEngineConfiguration setAppRepositoryService(AppRepositoryService appRepositoryService) {
this.appRepositoryService = appRepositoryService;
return this;
}
public IdmIdentityService getIdmIdentityService() {
return ((IdmEngineConfigurationApi) engineConfigurations.get(EngineConfigurationConstants.KEY_IDM_ENGINE_CONFIG)).getIdmIdentityService();
}
public TableDataManager getTableDataManager() {
return tableDataManager;
}
public AppEngineConfiguration setTableDataManager(TableDataManager tableDataManager) {
this.tableDataManager = tableDataManager;
return this;
}
public AppDeploymentDataManager getDeploymentDataManager() {
return deploymentDataManager;
}
public AppEngineConfiguration setDeploymentDataManager(AppDeploymentDataManager deploymentDataManager) {
this.deploymentDataManager = deploymentDataManager;
return this;
}
public AppResourceDataManager getResourceDataManager() {
return resourceDataManager;
}
public AppEngineConfiguration setResourceDataManager(AppResourceDataManager resourceDataManager) {
this.resourceDataManager = resourceDataManager;
return this;
}
public AppDefinitionDataManager getAppDefinitionDataManager() {
return appDefinitionDataManager;
}
public AppEngineConfiguration setAppDefinitionDataManager(AppDefinitionDataManager appDefinitionDataManager) {
this.appDefinitionDataManager = appDefinitionDataManager;
return this;
}
public AppDeploymentEntityManager getAppDeploymentEntityManager() {
return appDeploymentEntityManager;
}
public AppEngineConfiguration setAppDeploymentEntityManager(AppDeploymentEntityManager appDeploymentEntityManager) {
this.appDeploymentEntityManager = appDeploymentEntityManager;
return this;
}
public AppResourceEntityManager getAppResourceEntityManager() {
return appResourceEntityManager;
}
public AppEngineConfiguration setAppResourceEntityManager(AppResourceEntityManager appResourceEntityManager) {
this.appResourceEntityManager = appResourceEntityManager;
return this;
}
public AppDefinitionEntityManager getAppDefinitionEntityManager() {
return appDefinitionEntityManager;
}
public AppEngineConfiguration setAppDefinitionEntityManager(AppDefinitionEntityManager appDefinitionEntityManager) {
this.appDefinitionEntityManager = appDefinitionEntityManager;
return this;
}
public AppDeployer getAppDeployer() {
return appDeployer;
}
public AppEngineConfiguration setAppDeployer(AppDeployer appDeployer) {
this.appDeployer = appDeployer;
return this;
}
public AppResourceConverter getAppResourceConverter() {
return appResourceConverter;
}
public AppEngineConfiguration setAppResourceConverter(AppResourceConverter appResourceConverter) {
this.appResourceConverter = appResourceConverter;
return this;
}
public AppDeploymentManager getDeploymentManager() {
return deploymentManager;
}
public AppEngineConfiguration setDeploymentManager(AppDeploymentManager deploymentManager) {
this.deploymentManager = deploymentManager;
return this;
}
public int getAppDefinitionCacheLimit() {
return appDefinitionCacheLimit;
}
public AppEngineConfiguration setAppDefinitionCacheLimit(int appDefinitionCacheLimit) {
this.appDefinitionCacheLimit = appDefinitionCacheLimit;
return this;
}
public DeploymentCache<AppDefinitionCacheEntry> getAppDefinitionCache() {
return appDefinitionCache;
}
public AppEngineConfiguration setAppDefinitionCache(DeploymentCache<AppDefinitionCacheEntry> appDefinitionCache) {
this.appDefinitionCache = appDefinitionCache;
return this;
}
@Override
public AppEngineConfiguration setDataSource(DataSource dataSource) {
this.dataSource = dataSource;
return this;
}
public boolean isExecuteServiceSchemaManagers() {
return executeServiceSchemaManagers;
}
public void setExecuteServiceSchemaManagers(boolean executeServiceSchemaManagers) {
this.executeServiceSchemaManagers = executeServiceSchemaManagers;
}
@Override
public ExpressionManager getExpressionManager() {
return expressionManager;
}
@Override
public AppEngineConfiguration setExpressionManager(ExpressionManager expressionManager) {
this.expressionManager = expressionManager;
return this;
}
public SchemaManager getIdentityLinkSchemaManager() {
return identityLinkSchemaManager;
}
public AppEngineConfiguration setIdentityLinkSchemaManager(SchemaManager identityLinkSchemaManager) {
this.identityLinkSchemaManager = identityLinkSchemaManager;
return this;
}
public SchemaManager getVariableSchemaManager() {
return variableSchemaManager;
}
public AppEngineConfiguration setVariableSchemaManager(SchemaManager variableSchemaManager) {
this.variableSchemaManager = variableSchemaManager;
return this;
}
public VariableTypes getVariableTypes() {
return variableTypes;
}
public AppEngineConfiguration setVariableTypes(VariableTypes variableTypes) {
this.variableTypes = variableTypes;
return this;
}
public List<VariableType> getCustomPreVariableTypes() {
return customPreVariableTypes;
}
public AppEngineConfiguration setCustomPreVariableTypes(List<VariableType> customPreVariableTypes) {
this.customPreVariableTypes = customPreVariableTypes;
return this;
}
public List<VariableType> getCustomPostVariableTypes() {
return customPostVariableTypes;
}
public AppEngineConfiguration setCustomPostVariableTypes(List<VariableType> customPostVariableTypes) {
this.customPostVariableTypes = customPostVariableTypes;
return this;
}
public IdentityLinkServiceConfiguration getIdentityLinkServiceConfiguration() {
return identityLinkServiceConfiguration;
}
public AppEngineConfiguration setIdentityLinkServiceConfiguration(IdentityLinkServiceConfiguration identityLinkServiceConfiguration) {
this.identityLinkServiceConfiguration = identityLinkServiceConfiguration;
return this;
}
public VariableServiceConfiguration getVariableServiceConfiguration() {
return variableServiceConfiguration;
}
public AppEngineConfiguration setVariableServiceConfiguration(VariableServiceConfiguration variableServiceConfiguration) {
this.variableServiceConfiguration = variableServiceConfiguration;
return this;
}
public boolean isSerializableVariableTypeTrackDeserializedObjects() {
return serializableVariableTypeTrackDeserializedObjects;
}
public AppEngineConfiguration setSerializableVariableTypeTrackDeserializedObjects(boolean serializableVariableTypeTrackDeserializedObjects) {
this.serializableVariableTypeTrackDeserializedObjects = serializableVariableTypeTrackDeserializedObjects;
return this;
}
public ObjectMapper getObjectMapper() {
return objectMapper;
}
public AppEngineConfiguration setObjectMapper(ObjectMapper objectMapper) {
this.objectMapper = objectMapper;
return this;
}
public boolean isDisableIdmEngine() {
return disableIdmEngine;
}
public AppEngineConfiguration setDisableIdmEngine(boolean disableIdmEngine) {
this.disableIdmEngine = disableIdmEngine;
return this;
}
public BusinessCalendarManager getBusinessCalendarManager() {
return businessCalendarManager;
}
public AppEngineConfiguration setBusinessCalendarManager(BusinessCalendarManager businessCalendarManager) {
this.businessCalendarManager = businessCalendarManager;
return this;
}
}
| |
/*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.metacat.connector.hive.converters;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.base.Splitter;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import com.netflix.metacat.common.QualifiedName;
import com.netflix.metacat.common.server.connectors.ConnectorInfoConverter;
import com.netflix.metacat.common.server.connectors.model.AuditInfo;
import com.netflix.metacat.common.server.connectors.model.DatabaseInfo;
import com.netflix.metacat.common.server.connectors.model.FieldInfo;
import com.netflix.metacat.common.server.connectors.model.PartitionInfo;
import com.netflix.metacat.common.server.connectors.model.StorageInfo;
import com.netflix.metacat.common.server.connectors.model.TableInfo;
import com.netflix.metacat.common.server.connectors.model.ViewInfo;
import com.netflix.metacat.connector.hive.iceberg.IcebergTableWrapper;
import com.netflix.metacat.connector.hive.sql.DirectSqlTable;
import com.netflix.metacat.connector.hive.util.HiveTableUtil;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.hive.metastore.TableType;
import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.Partition;
import org.apache.hadoop.hive.metastore.api.SerDeInfo;
import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import java.time.Instant;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/**
* Hive connector info converter.
*
* @author zhenl
* @since 1.0.0
*/
@Slf4j
public class HiveConnectorInfoConverter implements ConnectorInfoConverter<Database, Table, Partition> {
private static final Splitter SLASH_SPLITTER = Splitter.on('/');
private static final Splitter EQUAL_SPLITTER = Splitter.on('=').limit(2);
private HiveTypeConverter hiveTypeConverter = new HiveTypeConverter();
/**
* Constructor.
*
* @param hiveTypeConverter typeconverter
*/
public HiveConnectorInfoConverter(final HiveTypeConverter hiveTypeConverter) {
this.hiveTypeConverter = hiveTypeConverter;
}
/**
* Converts epoch time to Date.
*
* @param seconds time in seconds
* @return Date
*/
public static Date epochSecondsToDate(final long seconds) {
return Date.from(Instant.ofEpochSecond(seconds));
}
/**
* Converts to DatabaseDto.
*
* @param database connector database
* @return Metacat database Info
*/
@Override
public DatabaseInfo toDatabaseInfo(
final QualifiedName qualifiedName,
final Database database
) {
return DatabaseInfo.builder()
.name(qualifiedName)
.uri(database.getLocationUri())
.metadata(database.getParameters())
.build();
}
/**
* Converts from DatabaseDto to the connector database.
*
* @param databaseInfo Metacat database Info
* @return connector database
*/
@Override
public Database fromDatabaseInfo(final DatabaseInfo databaseInfo) {
final QualifiedName databaseName = databaseInfo.getName();
final String name = (databaseName == null) ? "" : databaseName.getDatabaseName();
//this is a temp hack to resolve the uri = null issue
// final String dbUri = Strings.isNullOrEmpty(databaseInfo.getUri()) ? "file://temp/" : databaseInfo.getUri();
final Map<String, String> metadata
= (databaseInfo.getMetadata() != null) ? databaseInfo.getMetadata() : Collections.EMPTY_MAP;
return new Database(name, name, databaseInfo.getUri(), metadata);
}
/**
* Converts to TableDto.
*
* @param table connector table
* @return Metacat table Info
*/
@Override
public TableInfo toTableInfo(final QualifiedName name, final Table table) {
final List<FieldSchema> nonPartitionColumns =
(table.getSd() != null) ? table.getSd().getCols() : Collections.emptyList();
// add the data fields to the nonPartitionColumns
//ignore all exceptions
try {
if (nonPartitionColumns.isEmpty()) {
for (StructField field : HiveTableUtil.getTableStructFields(table)) {
final FieldSchema fieldSchema = new FieldSchema(field.getFieldName(),
field.getFieldObjectInspector().getTypeName(),
field.getFieldComment());
nonPartitionColumns.add(fieldSchema);
}
}
} catch (final Exception e) {
log.error(e.getMessage(), e);
}
final List<FieldSchema> partitionColumns = table.getPartitionKeys();
final Date creationDate = table.isSetCreateTime() ? epochSecondsToDate(table.getCreateTime()) : null;
final List<FieldInfo> allFields =
Lists.newArrayListWithCapacity(nonPartitionColumns.size() + partitionColumns.size());
nonPartitionColumns.stream()
.map(field -> hiveToMetacatField(field, false))
.forEachOrdered(allFields::add);
partitionColumns.stream()
.map(field -> hiveToMetacatField(field, true))
.forEachOrdered(allFields::add);
final AuditInfo auditInfo = AuditInfo.builder().createdDate(creationDate).build();
if (null != table.getTableType() && table.getTableType().equals(TableType.VIRTUAL_VIEW.name())) {
return TableInfo.builder()
.serde(toStorageInfo(table.getSd(), table.getOwner())).fields(allFields)
.metadata(table.getParameters()).name(name).auditInfo(auditInfo)
.view(ViewInfo.builder().
viewOriginalText(table.getViewOriginalText())
.viewExpandedText(table.getViewExpandedText()).build()
).build();
} else {
return TableInfo.builder()
.serde(toStorageInfo(table.getSd(), table.getOwner())).fields(allFields)
.metadata(table.getParameters()).name(name).auditInfo(auditInfo)
.build();
}
}
/**
* Converts IcebergTable to TableDto.
*
* @param name qualified name
* @param tableWrapper iceberg table wrapper containing the table info and extra properties
* @param tableLoc iceberg table metadata location
* @param tableInfo table info
* @return Metacat table Info
*/
public TableInfo fromIcebergTableToTableInfo(final QualifiedName name,
final IcebergTableWrapper tableWrapper,
final String tableLoc,
final TableInfo tableInfo) {
final org.apache.iceberg.Table table = tableWrapper.getTable();
final List<FieldInfo> allFields =
this.hiveTypeConverter.icebergeSchemaTofieldDtos(table.schema(), table.spec().fields());
final Map<String, String> tableParameters = new HashMap<>();
tableParameters.put(DirectSqlTable.PARAM_TABLE_TYPE, DirectSqlTable.ICEBERG_TABLE_TYPE);
tableParameters.put(DirectSqlTable.PARAM_METADATA_LOCATION, tableLoc);
tableParameters.put(DirectSqlTable.PARAM_PARTITION_SPEC, table.spec().toString());
//adding iceberg table properties
tableParameters.putAll(table.properties());
tableParameters.putAll(tableWrapper.getExtraProperties());
final String location = tableInfo.getSerde() != null ? tableInfo.getSerde().getUri() : null;
return TableInfo.builder().fields(allFields)
.metadata(tableParameters)
.serde(StorageInfo.builder().uri(location).build())
.name(name).auditInfo(tableInfo.getAudit())
.build();
}
/**
* Converts from TableDto to the connector table.
*
* @param tableInfo Metacat table Info
* @return connector table
*/
@Override
public Table fromTableInfo(final TableInfo tableInfo) {
final QualifiedName name = tableInfo.getName();
final String tableName = (name != null) ? name.getTableName() : "";
final String databaseName = (name != null) ? name.getDatabaseName() : "";
final StorageInfo storageInfo = tableInfo.getSerde();
final String owner = (storageInfo != null && storageInfo.getOwner() != null)
? storageInfo.getOwner() : "";
final AuditInfo auditInfo = tableInfo.getAudit();
final int createTime = (auditInfo != null && auditInfo.getCreatedDate() != null)
? dateToEpochSeconds(auditInfo.getCreatedDate()) : 0;
final Map<String, String> params = (tableInfo.getMetadata() != null)
? tableInfo.getMetadata() : new HashMap<>();
final List<FieldInfo> fields = tableInfo.getFields();
List<FieldSchema> partitionFields = Collections.emptyList();
List<FieldSchema> nonPartitionFields = Collections.emptyList();
if (fields != null) {
nonPartitionFields = Lists.newArrayListWithCapacity(fields.size());
partitionFields = Lists.newArrayListWithCapacity(fields.size());
for (FieldInfo fieldInfo : fields) {
if (fieldInfo.isPartitionKey()) {
partitionFields.add(metacatToHiveField(fieldInfo));
} else {
nonPartitionFields.add(metacatToHiveField(fieldInfo));
}
}
}
final StorageDescriptor sd = fromStorageInfo(storageInfo, nonPartitionFields);
final ViewInfo viewInfo = tableInfo.getView();
final String tableType = (null != viewInfo
&& !Strings.isNullOrEmpty(viewInfo.getViewOriginalText()))
? TableType.VIRTUAL_VIEW.name() : TableType.EXTERNAL_TABLE.name();
return new Table(tableName,
databaseName,
owner,
createTime,
0,
0,
sd,
partitionFields,
params,
tableType.equals(TableType.VIRTUAL_VIEW.name())
? tableInfo.getView().getViewOriginalText() : null,
tableType.equals(TableType.VIRTUAL_VIEW.name())
? tableInfo.getView().getViewExpandedText() : null,
tableType);
}
/**
* Converts to PartitionDto.
*
* @param partition connector partition
* @return Metacat partition Info
*/
@Override
public PartitionInfo toPartitionInfo(
final TableInfo tableInfo,
final Partition partition
) {
final QualifiedName tableName = tableInfo.getName();
final QualifiedName partitionName = QualifiedName.ofPartition(tableName.getCatalogName(),
tableName.getDatabaseName(),
tableName.getTableName(),
getNameFromPartVals(tableInfo, partition.getValues()));
final String owner = notNull(tableInfo.getSerde()) ? tableInfo.getSerde().getOwner() : "";
final AuditInfo auditInfo = AuditInfo.builder()
.createdDate(epochSecondsToDate(partition.getCreateTime()))
.lastModifiedDate(epochSecondsToDate(partition.getLastAccessTime())).build();
return PartitionInfo.builder()
.serde(toStorageInfo(partition.getSd(), owner))
.name(partitionName)
.auditInfo(auditInfo)
.metadata(partition.getParameters())
.build();
}
/**
* Converts from PartitionDto to the connector partition.
*
* @param partition Metacat partition Info
* @return connector partition
*/
@Override
public Partition fromPartitionInfo(
final TableInfo tableInfo,
final PartitionInfo partition
) {
final QualifiedName name = partition.getName();
final List<String> values = Lists.newArrayListWithCapacity(16);
Map<String, String> metadata = partition.getMetadata();
if (metadata == null) {
metadata = new HashMap<>();
//can't use Collections.emptyMap()
// which is immutable and can't be
// modifed by add parts in the embedded
}
final List<FieldInfo> fields = tableInfo.getFields();
List<FieldSchema> fieldSchemas = Collections.emptyList();
if (notNull(fields)) {
fieldSchemas = fields.stream()
.filter(field -> !field.isPartitionKey())
.map(this::metacatToHiveField)
.collect(Collectors.toList());
}
final StorageDescriptor sd = fromStorageInfo(partition.getSerde(), fieldSchemas);
//using the table level serialization lib
if (
notNull(sd.getSerdeInfo())
&& notNull(tableInfo.getSerde())
&& Strings.isNullOrEmpty(sd.getSerdeInfo().getSerializationLib())
) {
sd.getSerdeInfo().setSerializationLib(tableInfo.getSerde().getSerializationLib());
}
final AuditInfo auditInfo = partition.getAudit();
final int createTime = (notNull(auditInfo) && notNull(auditInfo.getCreatedDate()))
? dateToEpochSeconds(auditInfo.getCreatedDate()) : 0;
final int lastAccessTime = (notNull(auditInfo) && notNull(auditInfo.getLastModifiedDate()))
? dateToEpochSeconds(auditInfo.getLastModifiedDate()) : 0;
if (null == name) {
return new Partition(values, "", "", createTime, lastAccessTime, sd, metadata);
}
if (notNull(name.getPartitionName())) {
for (String partialPartName : SLASH_SPLITTER.split(partition.getName().getPartitionName())) {
final List<String> nameValues = ImmutableList.copyOf(EQUAL_SPLITTER.split(partialPartName));
Preconditions.checkState(nameValues.size() == 2,
"Unrecognized partition name: " + partition.getName());
values.add(nameValues.get(1));
}
}
final String databaseName = notNull(name.getDatabaseName()) ? name.getDatabaseName() : "";
final String tableName = notNull(name.getTableName()) ? name.getTableName() : "";
return new Partition(
values,
databaseName,
tableName,
createTime,
lastAccessTime,
sd,
metadata);
}
/**
* metacatToHiveField.
*
* @param fieldInfo fieldInfo
* @return FieldSchema
*/
public FieldSchema metacatToHiveField(final FieldInfo fieldInfo) {
final FieldSchema result = new FieldSchema();
result.setName(fieldInfo.getName());
if (StringUtils.isBlank(fieldInfo.getSourceType())) {
result.setType(hiveTypeConverter.fromMetacatType(fieldInfo.getType()));
} else {
result.setType(fieldInfo.getSourceType());
}
result.setComment(fieldInfo.getComment());
return result;
}
/**
* hiveToMetacatField.
*
* @param field field
* @param isPartitionKey boolean
* @return field info obj
*/
private FieldInfo hiveToMetacatField(final FieldSchema field, final boolean isPartitionKey) {
return FieldInfo.builder().name(field.getName())
.type(hiveTypeConverter.toMetacatType(field.getType()))
.sourceType(field.getType())
.comment(field.getComment())
.partitionKey(isPartitionKey)
.build();
}
private StorageInfo toStorageInfo(final StorageDescriptor sd, final String owner) {
if (sd == null) {
return new StorageInfo();
}
if (sd.getSerdeInfo() != null) {
return StorageInfo.builder().owner(owner)
.uri(sd.getLocation())
.inputFormat(sd.getInputFormat())
.outputFormat(sd.getOutputFormat())
.parameters(sd.getParameters())
.serializationLib(sd.getSerdeInfo().getSerializationLib())
.serdeInfoParameters(sd.getSerdeInfo().getParameters())
.build();
}
return StorageInfo.builder().owner(owner).uri(sd.getLocation()).inputFormat(sd.getInputFormat())
.outputFormat(sd.getOutputFormat()).parameters(sd.getParameters()).build();
}
@VisibleForTesting
Integer dateToEpochSeconds(final Date date) {
return null == date ? null : Math.toIntExact(date.toInstant().getEpochSecond());
}
private StorageDescriptor fromStorageInfo(final StorageInfo storageInfo, final List<FieldSchema> cols) {
if (storageInfo == null) {
return new StorageDescriptor(
Collections.emptyList(),
"",
null,
null,
false,
0,
new SerDeInfo("", null, new HashMap<>()),
Collections.emptyList(),
Collections.emptyList(),
new HashMap<>());
}
// Set all required fields to a non-null value
final String inputFormat = storageInfo.getInputFormat();
final String location = notNull(storageInfo.getUri()) ? storageInfo.getUri() : "";
final String outputFormat = storageInfo.getOutputFormat();
final Map<String, String> sdParams = notNull(storageInfo.getParameters())
? storageInfo.getParameters() : new HashMap<>();
final Map<String, String> serdeParams = notNull(storageInfo.getSerdeInfoParameters())
? storageInfo.getSerdeInfoParameters() : new HashMap<>();
final String serializationLib = storageInfo.getSerializationLib();
return new StorageDescriptor(
cols,
location,
inputFormat,
outputFormat,
false,
0,
new SerDeInfo("", serializationLib, serdeParams),
Collections.emptyList(),
Collections.emptyList(),
sdParams);
}
private String getNameFromPartVals(final TableInfo tableInfo, final List<String> partVals) {
final List<String> partitionKeys = getPartitionKeys(tableInfo.getFields());
if (partitionKeys.size() != partVals.size()) {
throw new IllegalArgumentException("Not the same number of partition columns and partition values");
}
final StringBuilder builder = new StringBuilder();
for (int i = 0; i < partitionKeys.size(); i++) {
if (builder.length() > 0) {
builder.append('/');
}
builder.append(partitionKeys.get(i))
.append('=')
.append(partVals.get(i));
}
return builder.toString();
}
private List<String> getPartitionKeys(final List<FieldInfo> fieldInfos) {
if (fieldInfos == null) {
return null;
} else if (fieldInfos.isEmpty()) {
return Collections.emptyList();
}
final List<String> keys = new LinkedList<>();
for (FieldInfo field : fieldInfos) {
if (field.isPartitionKey()) {
keys.add(field.getName());
}
}
return keys;
}
private boolean notNull(final Object object) {
return null != object;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.streaming.runtime.tasks;
import org.apache.flink.annotation.Internal;
import org.apache.flink.annotation.VisibleForTesting;
import org.apache.flink.configuration.TaskManagerOptions;
import org.apache.flink.core.fs.CloseableRegistry;
import org.apache.flink.core.io.InputStatus;
import org.apache.flink.metrics.Counter;
import org.apache.flink.metrics.SimpleCounter;
import org.apache.flink.runtime.checkpoint.CheckpointException;
import org.apache.flink.runtime.checkpoint.CheckpointFailureReason;
import org.apache.flink.runtime.checkpoint.CheckpointMetaData;
import org.apache.flink.runtime.checkpoint.CheckpointMetrics;
import org.apache.flink.runtime.checkpoint.CheckpointOptions;
import org.apache.flink.runtime.checkpoint.channel.ChannelStateReader;
import org.apache.flink.runtime.checkpoint.channel.ChannelStateWriter;
import org.apache.flink.runtime.concurrent.FutureUtils;
import org.apache.flink.runtime.execution.CancelTaskException;
import org.apache.flink.runtime.execution.Environment;
import org.apache.flink.runtime.io.network.api.CancelCheckpointMarker;
import org.apache.flink.runtime.io.network.api.writer.MultipleRecordWriters;
import org.apache.flink.runtime.io.network.api.writer.NonRecordWriter;
import org.apache.flink.runtime.io.network.api.writer.RecordWriter;
import org.apache.flink.runtime.io.network.api.writer.RecordWriterBuilder;
import org.apache.flink.runtime.io.network.api.writer.RecordWriterDelegate;
import org.apache.flink.runtime.io.network.api.writer.ResultPartitionWriter;
import org.apache.flink.runtime.io.network.api.writer.SingleRecordWriter;
import org.apache.flink.runtime.io.network.partition.consumer.InputGate;
import org.apache.flink.runtime.jobgraph.OperatorID;
import org.apache.flink.runtime.jobgraph.tasks.AbstractInvokable;
import org.apache.flink.runtime.metrics.groups.OperatorMetricGroup;
import org.apache.flink.runtime.operators.coordination.OperatorEvent;
import org.apache.flink.runtime.plugable.SerializationDelegate;
import org.apache.flink.runtime.state.CheckpointStorageWorkerView;
import org.apache.flink.runtime.state.StateBackend;
import org.apache.flink.runtime.state.StateBackendLoader;
import org.apache.flink.runtime.taskmanager.DispatcherThreadFactory;
import org.apache.flink.runtime.util.ExecutorThreadFactory;
import org.apache.flink.runtime.util.FatalExitExceptionHandler;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.graph.StreamConfig;
import org.apache.flink.streaming.api.graph.StreamEdge;
import org.apache.flink.streaming.api.operators.MailboxExecutor;
import org.apache.flink.streaming.api.operators.StreamOperator;
import org.apache.flink.streaming.api.operators.StreamTaskStateInitializer;
import org.apache.flink.streaming.api.operators.StreamTaskStateInitializerImpl;
import org.apache.flink.streaming.runtime.io.RecordWriterOutput;
import org.apache.flink.streaming.runtime.io.StreamInputProcessor;
import org.apache.flink.streaming.runtime.partitioner.ConfigurableStreamPartitioner;
import org.apache.flink.streaming.runtime.partitioner.StreamPartitioner;
import org.apache.flink.streaming.runtime.streamrecord.StreamRecord;
import org.apache.flink.streaming.runtime.streamstatus.StreamStatusMaintainer;
import org.apache.flink.streaming.runtime.tasks.mailbox.MailboxDefaultAction;
import org.apache.flink.streaming.runtime.tasks.mailbox.MailboxExecutorFactory;
import org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor;
import org.apache.flink.streaming.runtime.tasks.mailbox.TaskMailbox;
import org.apache.flink.streaming.runtime.tasks.mailbox.TaskMailboxImpl;
import org.apache.flink.util.ExceptionUtils;
import org.apache.flink.util.FlinkException;
import org.apache.flink.util.Preconditions;
import org.apache.flink.util.SerializedValue;
import org.apache.flink.util.function.RunnableWithException;
import org.apache.flink.util.function.ThrowingRunnable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nullable;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.OptionalLong;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.ThreadFactory;
/**
* Base class for all streaming tasks. A task is the unit of local processing that is deployed
* and executed by the TaskManagers. Each task runs one or more {@link StreamOperator}s which form
* the Task's operator chain. Operators that are chained together execute synchronously in the
* same thread and hence on the same stream partition. A common case for these chains
* are successive map/flatmap/filter tasks.
*
* <p>The task chain contains one "head" operator and multiple chained operators.
* The StreamTask is specialized for the type of the head operator: one-input and two-input tasks,
* as well as for sources, iteration heads and iteration tails.
*
* <p>The Task class deals with the setup of the streams read by the head operator, and the streams
* produced by the operators at the ends of the operator chain. Note that the chain may fork and
* thus have multiple ends.
*
* <p>The life cycle of the task is set up as follows:
* <pre>{@code
* -- setInitialState -> provides state of all operators in the chain
*
* -- invoke()
* |
* +----> Create basic utils (config, etc) and load the chain of operators
* +----> operators.setup()
* +----> task specific init()
* +----> initialize-operator-states()
* +----> open-operators()
* +----> run()
* +----> close-operators()
* +----> dispose-operators()
* +----> common cleanup
* +----> task specific cleanup()
* }</pre>
*
* <p>The {@code StreamTask} has a lock object called {@code lock}. All calls to methods on a
* {@code StreamOperator} must be synchronized on this lock object to ensure that no methods
* are called concurrently.
*
* @param <OUT>
* @param <OP>
*/
@Internal
public abstract class StreamTask<OUT, OP extends StreamOperator<OUT>>
extends AbstractInvokable
implements AsyncExceptionHandler {
/** The thread group that holds all trigger timer threads. */
public static final ThreadGroup TRIGGER_THREAD_GROUP = new ThreadGroup("Triggers");
/** The logger used by the StreamTask and its subclasses. */
protected static final Logger LOG = LoggerFactory.getLogger(StreamTask.class);
// ------------------------------------------------------------------------
/**
* All actions outside of the task {@link #mailboxProcessor mailbox} (i.e. performed by another thread) must be executed through this executor
* to ensure that we don't have concurrent method calls that void consistent checkpoints.
* <p>CheckpointLock is superseded by {@link MailboxExecutor}, with
* {@link StreamTaskActionExecutor.SynchronizedStreamTaskActionExecutor SynchronizedStreamTaskActionExecutor}
* to provide lock to {@link SourceStreamTask}. </p>
*/
private final StreamTaskActionExecutor actionExecutor;
/**
* The input processor. Initialized in {@link #init()} method.
*/
@Nullable
protected StreamInputProcessor inputProcessor;
/** the head operator that consumes the input streams of this task. */
protected OP headOperator;
/** The chain of operators executed by this task. */
protected OperatorChain<OUT, OP> operatorChain;
/** The configuration of this streaming task. */
protected final StreamConfig configuration;
/** Our state backend. We use this to create checkpoint streams and a keyed state backend. */
protected final StateBackend stateBackend;
private final SubtaskCheckpointCoordinator subtaskCheckpointCoordinator;
/**
* The internal {@link TimerService} used to define the current
* processing time (default = {@code System.currentTimeMillis()}) and
* register timers for tasks to be executed in the future.
*/
protected final TimerService timerService;
/** The currently active background materialization threads. */
private final CloseableRegistry cancelables = new CloseableRegistry();
private final StreamTaskAsyncExceptionHandler asyncExceptionHandler;
/**
* Flag to mark the task "in operation", in which case check needs to be initialized to true,
* so that early cancel() before invoke() behaves correctly.
*/
private volatile boolean isRunning;
/** Flag to mark this task as canceled. */
private volatile boolean canceled;
/** Flag to mark this task as failing, i.e. if an exception has occurred inside {@link #invoke()}. */
private volatile boolean failing;
private boolean disposedOperators;
/** Thread pool for async snapshot workers. */
private final ExecutorService asyncOperationsThreadPool;
private final RecordWriterDelegate<SerializationDelegate<StreamRecord<OUT>>> recordWriter;
protected final MailboxProcessor mailboxProcessor;
final MailboxExecutor mainMailboxExecutor;
/**
* TODO it might be replaced by the global IO executor on TaskManager level future.
*/
private final ExecutorService channelIOExecutor;
private Long syncSavepointId = null;
private long latestAsyncCheckpointStartDelayNanos;
// ------------------------------------------------------------------------
/**
* Constructor for initialization, possibly with initial state (recovery / savepoint / etc).
*
* @param env The task environment for this task.
*/
protected StreamTask(Environment env) throws Exception {
this(env, null);
}
/**
* Constructor for initialization, possibly with initial state (recovery / savepoint / etc).
*
* @param env The task environment for this task.
* @param timerService Optionally, a specific timer service to use.
*/
protected StreamTask(Environment env, @Nullable TimerService timerService) throws Exception {
this(env, timerService, FatalExitExceptionHandler.INSTANCE);
}
protected StreamTask(
Environment environment,
@Nullable TimerService timerService,
Thread.UncaughtExceptionHandler uncaughtExceptionHandler) throws Exception {
this(environment, timerService, uncaughtExceptionHandler, StreamTaskActionExecutor.IMMEDIATE);
}
/**
* Constructor for initialization, possibly with initial state (recovery / savepoint / etc).
*
* <p>This constructor accepts a special {@link TimerService}. By default (and if
* null is passes for the timer service) a {@link SystemProcessingTimeService DefaultTimerService}
* will be used.
*
* @param environment The task environment for this task.
* @param timerService Optionally, a specific timer service to use.
* @param uncaughtExceptionHandler to handle uncaught exceptions in the async operations thread pool
* @param actionExecutor a mean to wrap all actions performed by this task thread. Currently, only SynchronizedActionExecutor can be used to preserve locking semantics.
*/
protected StreamTask(
Environment environment,
@Nullable TimerService timerService,
Thread.UncaughtExceptionHandler uncaughtExceptionHandler,
StreamTaskActionExecutor actionExecutor) throws Exception {
this(environment, timerService, uncaughtExceptionHandler, actionExecutor, new TaskMailboxImpl(Thread.currentThread()));
}
protected StreamTask(
Environment environment,
@Nullable TimerService timerService,
Thread.UncaughtExceptionHandler uncaughtExceptionHandler,
StreamTaskActionExecutor actionExecutor,
TaskMailbox mailbox) throws Exception {
super(environment);
this.configuration = new StreamConfig(getTaskConfiguration());
this.recordWriter = createRecordWriterDelegate(configuration, environment);
this.actionExecutor = Preconditions.checkNotNull(actionExecutor);
this.mailboxProcessor = new MailboxProcessor(this::processInput, mailbox, actionExecutor);
this.mailboxProcessor.initMetric(environment.getMetricGroup());
this.mainMailboxExecutor = mailboxProcessor.getMainMailboxExecutor();
this.asyncExceptionHandler = new StreamTaskAsyncExceptionHandler(environment);
this.asyncOperationsThreadPool = Executors.newCachedThreadPool(
new ExecutorThreadFactory("AsyncOperations", uncaughtExceptionHandler));
this.stateBackend = createStateBackend();
this.subtaskCheckpointCoordinator = new SubtaskCheckpointCoordinatorImpl(
stateBackend.createCheckpointStorage(getEnvironment().getJobID()),
getName(),
actionExecutor,
getCancelables(),
getAsyncOperationsThreadPool(),
getEnvironment(),
this,
configuration.isUnalignedCheckpointsEnabled(),
this::prepareInputSnapshot);
// if the clock is not already set, then assign a default TimeServiceProvider
if (timerService == null) {
ThreadFactory timerThreadFactory = new DispatcherThreadFactory(TRIGGER_THREAD_GROUP, "Time Trigger for " + getName());
this.timerService = new SystemProcessingTimeService(this::handleTimerException, timerThreadFactory);
} else {
this.timerService = timerService;
}
this.channelIOExecutor = Executors.newSingleThreadExecutor(new ExecutorThreadFactory("channel-state-unspilling"));
}
private CompletableFuture<Void> prepareInputSnapshot(ChannelStateWriter channelStateWriter, long checkpointId) throws IOException {
if (inputProcessor == null) {
return FutureUtils.completedVoidFuture();
}
return inputProcessor.prepareSnapshot(channelStateWriter, checkpointId);
}
SubtaskCheckpointCoordinator getCheckpointCoordinator() {
return subtaskCheckpointCoordinator;
}
// ------------------------------------------------------------------------
// Life cycle methods for specific implementations
// ------------------------------------------------------------------------
protected abstract void init() throws Exception;
protected void cancelTask() throws Exception {
}
protected void cleanup() throws Exception {
if (inputProcessor != null) {
inputProcessor.close();
}
}
/**
* This method implements the default action of the task (e.g. processing one event from the input). Implementations
* should (in general) be non-blocking.
*
* @param controller controller object for collaborative interaction between the action and the stream task.
* @throws Exception on any problems in the action.
*/
protected void processInput(MailboxDefaultAction.Controller controller) throws Exception {
InputStatus status = inputProcessor.processInput();
if (status == InputStatus.MORE_AVAILABLE && recordWriter.isAvailable()) {
return;
}
if (status == InputStatus.END_OF_INPUT) {
controller.allActionsCompleted();
return;
}
CompletableFuture<?> jointFuture = getInputOutputJointFuture(status);
MailboxDefaultAction.Suspension suspendedDefaultAction = controller.suspendDefaultAction();
jointFuture.thenRun(suspendedDefaultAction::resume);
}
/**
* Considers three scenarios to combine input and output futures:
* 1. Both input and output are unavailable.
* 2. Only input is unavailable.
* 3. Only output is unavailable.
*/
@VisibleForTesting
CompletableFuture<?> getInputOutputJointFuture(InputStatus status) {
if (status == InputStatus.NOTHING_AVAILABLE && !recordWriter.isAvailable()) {
return CompletableFuture.allOf(inputProcessor.getAvailableFuture(), recordWriter.getAvailableFuture());
} else if (status == InputStatus.NOTHING_AVAILABLE) {
return inputProcessor.getAvailableFuture();
} else {
return recordWriter.getAvailableFuture();
}
}
private void resetSynchronousSavepointId() {
syncSavepointId = null;
}
private void setSynchronousSavepointId(long checkpointId) {
Preconditions.checkState(
syncSavepointId == null, "at most one stop-with-savepoint checkpoint at a time is allowed");
syncSavepointId = checkpointId;
}
@VisibleForTesting
OptionalLong getSynchronousSavepointId() {
return syncSavepointId != null ? OptionalLong.of(syncSavepointId) : OptionalLong.empty();
}
private boolean isSynchronousSavepointId(long checkpointId) {
return syncSavepointId != null && syncSavepointId == checkpointId;
}
private void runSynchronousSavepointMailboxLoop() throws Exception {
assert syncSavepointId != null;
MailboxExecutor mailboxExecutor = mailboxProcessor.getMailboxExecutor(TaskMailbox.MAX_PRIORITY);
while (!canceled && syncSavepointId != null) {
mailboxExecutor.yield();
}
}
/**
* Emits the {@link org.apache.flink.streaming.api.watermark.Watermark#MAX_WATERMARK MAX_WATERMARK}
* so that all registered timers are fired.
*
* <p>This is used by the source task when the job is {@code TERMINATED}. In the case,
* we want all the timers registered throughout the pipeline to fire and the related
* state (e.g. windows) to be flushed.
*
* <p>For tasks other than the source task, this method does nothing.
*/
protected void advanceToEndOfEventTime() throws Exception {
}
/**
* Instructs the task to go through its normal termination routine, i.e. exit the run-loop
* and call {@link StreamOperator#close()} and {@link StreamOperator#dispose()} on its operators.
*
* <p>This is used by the source task to get out of the run-loop when the job is stopped with a savepoint.
*
* <p>For tasks other than the source task, this method does nothing.
*/
protected void finishTask() throws Exception {
}
// ------------------------------------------------------------------------
// Core work methods of the Stream Task
// ------------------------------------------------------------------------
public StreamTaskStateInitializer createStreamTaskStateInitializer() {
return new StreamTaskStateInitializerImpl(
getEnvironment(),
stateBackend);
}
protected Counter setupNumRecordsInCounter(StreamOperator streamOperator) {
try {
return ((OperatorMetricGroup) streamOperator.getMetricGroup()).getIOMetricGroup().getNumRecordsInCounter();
} catch (Exception e) {
LOG.warn("An exception occurred during the metrics setup.", e);
return new SimpleCounter();
}
}
protected void beforeInvoke() throws Exception {
disposedOperators = false;
LOG.debug("Initializing {}.", getName());
operatorChain = new OperatorChain<>(this, recordWriter);
headOperator = operatorChain.getHeadOperator();
// task specific initialization
init();
// save the work of reloading state, etc, if the task is already canceled
if (canceled) {
throw new CancelTaskException();
}
// -------- Invoke --------
LOG.debug("Invoking {}", getName());
// we need to make sure that any triggers scheduled in open() cannot be
// executed before all operators are opened
actionExecutor.runThrowing(() -> {
// both the following operations are protected by the lock
// so that we avoid race conditions in the case that initializeState()
// registers a timer, that fires before the open() is called.
operatorChain.initializeStateAndOpenOperators(createStreamTaskStateInitializer());
readRecoveredChannelState();
});
isRunning = true;
}
private void readRecoveredChannelState() throws IOException, InterruptedException {
ChannelStateReader reader = getEnvironment().getTaskStateManager().getChannelStateReader();
if (!reader.hasChannelStates()) {
requestPartitions();
return;
}
ResultPartitionWriter[] writers = getEnvironment().getAllWriters();
if (writers != null) {
for (ResultPartitionWriter writer : writers) {
writer.readRecoveredState(reader);
}
}
// It would get possible benefits to recovery input side after output side, which guarantees the
// output can request more floating buffers from global firstly.
InputGate[] inputGates = getEnvironment().getAllInputGates();
if (inputGates != null && inputGates.length > 0) {
CompletableFuture[] futures = new CompletableFuture[inputGates.length];
for (int i = 0; i < inputGates.length; i++) {
futures[i] = inputGates[i].readRecoveredState(channelIOExecutor, reader);
}
// Note that we must request partition after all the single gates finished recovery.
CompletableFuture.allOf(futures).thenRun(() -> mainMailboxExecutor.execute(
this::requestPartitions, "Input gates request partitions"));
}
}
private void requestPartitions() throws IOException {
InputGate[] inputGates = getEnvironment().getAllInputGates();
if (inputGates != null) {
for (InputGate inputGate : inputGates) {
inputGate.requestPartitions();
}
}
}
@Override
public final void invoke() throws Exception {
try {
beforeInvoke();
// final check to exit early before starting to run
if (canceled) {
throw new CancelTaskException();
}
// let the task do its work
runMailboxLoop();
// if this left the run() method cleanly despite the fact that this was canceled,
// make sure the "clean shutdown" is not attempted
if (canceled) {
throw new CancelTaskException();
}
afterInvoke();
}
catch (Throwable invokeException) {
failing = !canceled;
try {
cleanUpInvoke();
}
// TODO: investigate why Throwable instead of Exception is used here.
catch (Throwable cleanUpException) {
Throwable throwable = ExceptionUtils.firstOrSuppressed(cleanUpException, invokeException);
ExceptionUtils.rethrowException(throwable);
}
ExceptionUtils.rethrowException(invokeException);
}
cleanUpInvoke();
}
@VisibleForTesting
public boolean runMailboxStep() throws Exception {
return mailboxProcessor.runMailboxStep();
}
private void runMailboxLoop() throws Exception {
mailboxProcessor.runMailboxLoop();
}
protected void afterInvoke() throws Exception {
LOG.debug("Finished task {}", getName());
getCompletionFuture().exceptionally(unused -> null).join();
final CompletableFuture<Void> timersFinishedFuture = new CompletableFuture<>();
// close all operators in a chain effect way
operatorChain.closeOperators(actionExecutor);
// make sure no further checkpoint and notification actions happen.
// at the same time, this makes sure that during any "regular" exit where still
actionExecutor.runThrowing(() -> {
// make sure no new timers can come
FutureUtils.forward(timerService.quiesce(), timersFinishedFuture);
// let mailbox execution reject all new letters from this point
mailboxProcessor.prepareClose();
// only set the StreamTask to not running after all operators have been closed!
// See FLINK-7430
isRunning = false;
});
// processes the remaining mails; no new mails can be enqueued
mailboxProcessor.drain();
// make sure all timers finish
timersFinishedFuture.get();
LOG.debug("Closed operators for task {}", getName());
// make sure all buffered data is flushed
operatorChain.flushOutputs();
// make an attempt to dispose the operators such that failures in the dispose call
// still let the computation fail
disposeAllOperators();
}
protected void cleanUpInvoke() throws Exception {
getCompletionFuture().exceptionally(unused -> null).join();
// clean up everything we initialized
isRunning = false;
// Now that we are outside the user code, we do not want to be interrupted further
// upon cancellation. The shutdown logic below needs to make sure it does not issue calls
// that block and stall shutdown.
// Additionally, the cancellation watch dog will issue a hard-cancel (kill the TaskManager
// process) as a backup in case some shutdown procedure blocks outside our control.
setShouldInterruptOnCancel(false);
// clear any previously issued interrupt for a more graceful shutdown
Thread.interrupted();
// stop all timers and threads
Exception suppressedException = runAndSuppressThrowable(this::tryShutdownTimerService, null);
// stop all asynchronous checkpoint threads
suppressedException = runAndSuppressThrowable(cancelables::close, suppressedException);
suppressedException = runAndSuppressThrowable(this::shutdownAsyncThreads, suppressedException);
// we must! perform this cleanup
suppressedException = runAndSuppressThrowable(this::cleanup, suppressedException);
// if the operators were not disposed before, do a hard dispose
suppressedException = runAndSuppressThrowable(this::disposeAllOperators, suppressedException);
// release the output resources. this method should never fail.
suppressedException = runAndSuppressThrowable(this::releaseOutputResources, suppressedException);
suppressedException = runAndSuppressThrowable(channelIOExecutor::shutdown, suppressedException);
suppressedException = runAndSuppressThrowable(mailboxProcessor::close, suppressedException);
if (suppressedException != null) {
throw suppressedException;
}
}
protected CompletableFuture<Void> getCompletionFuture() {
return FutureUtils.completedVoidFuture();
}
@Override
public final void cancel() throws Exception {
isRunning = false;
canceled = true;
// the "cancel task" call must come first, but the cancelables must be
// closed no matter what
try {
cancelTask();
}
finally {
getCompletionFuture()
.whenComplete((unusedResult, unusedError) -> {
// WARN: the method is called from the task thread but the callback can be invoked from a different thread
mailboxProcessor.allActionsCompleted();
try {
cancelables.close();
} catch (IOException e) {
throw new CompletionException(e);
}
});
}
}
public MailboxExecutorFactory getMailboxExecutorFactory() {
return this.mailboxProcessor::getMailboxExecutor;
}
public final boolean isRunning() {
return isRunning;
}
public final boolean isCanceled() {
return canceled;
}
public final boolean isFailing() {
return failing;
}
private void shutdownAsyncThreads() throws Exception {
if (!asyncOperationsThreadPool.isShutdown()) {
asyncOperationsThreadPool.shutdownNow();
}
}
private void releaseOutputResources() throws Exception {
if (operatorChain != null) {
// beware: without synchronization, #performCheckpoint() may run in
// parallel and this call is not thread-safe
actionExecutor.run(() -> operatorChain.releaseOutputs());
} else {
// failed to allocate operatorChain, clean up record writers
recordWriter.close();
}
}
private Exception runAndSuppressThrowable(ThrowingRunnable<?> runnable, @Nullable Exception originalException) {
try {
runnable.run();
} catch (Throwable t) {
// TODO: investigate why Throwable instead of Exception is used here.
Exception e = t instanceof Exception ? (Exception) t : new Exception(t);
return ExceptionUtils.firstOrSuppressed(e, originalException);
}
return originalException;
}
/**
* Execute @link StreamOperator#dispose()} of each operator in the chain of this
* {@link StreamTask}. Disposing happens from <b>tail to head</b> operator in the chain.
*/
private void disposeAllOperators() throws Exception {
if (operatorChain != null && !disposedOperators) {
Exception disposalException = null;
for (StreamOperatorWrapper<?, ?> operatorWrapper : operatorChain.getAllOperators(true)) {
StreamOperator<?> operator = operatorWrapper.getStreamOperator();
try {
operator.dispose();
}
catch (Exception e) {
disposalException = ExceptionUtils.firstOrSuppressed(e, disposalException);
}
}
disposedOperators = true;
if (disposalException != null) {
throw disposalException;
}
}
}
/**
* The finalize method shuts down the timer. This is a fail-safe shutdown, in case the original
* shutdown method was never called.
*
* <p>This should not be relied upon! It will cause shutdown to happen much later than if manual
* shutdown is attempted, and cause threads to linger for longer than needed.
*/
@Override
protected void finalize() throws Throwable {
super.finalize();
if (!timerService.isTerminated()) {
LOG.info("Timer service is shutting down.");
timerService.shutdownService();
}
cancelables.close();
}
boolean isSerializingTimestamps() {
TimeCharacteristic tc = configuration.getTimeCharacteristic();
return tc == TimeCharacteristic.EventTime | tc == TimeCharacteristic.IngestionTime;
}
// ------------------------------------------------------------------------
// Access to properties and utilities
// ------------------------------------------------------------------------
/**
* Gets the name of the task, in the form "taskname (2/5)".
* @return The name of the task.
*/
public final String getName() {
return getEnvironment().getTaskInfo().getTaskNameWithSubtasks();
}
/**
* Gets the name of the task, appended with the subtask indicator and execution id.
*
* @return The name of the task, with subtask indicator and execution id.
*/
String getTaskNameWithSubtaskAndId() {
return getEnvironment().getTaskInfo().getTaskNameWithSubtasks() +
" (" + getEnvironment().getExecutionId() + ')';
}
public CheckpointStorageWorkerView getCheckpointStorage() {
return subtaskCheckpointCoordinator.getCheckpointStorage();
}
public StreamConfig getConfiguration() {
return configuration;
}
public StreamStatusMaintainer getStreamStatusMaintainer() {
return operatorChain;
}
RecordWriterOutput<?>[] getStreamOutputs() {
return operatorChain.getStreamOutputs();
}
// ------------------------------------------------------------------------
// Checkpoint and Restore
// ------------------------------------------------------------------------
@Override
public Future<Boolean> triggerCheckpointAsync(
CheckpointMetaData checkpointMetaData,
CheckpointOptions checkpointOptions,
boolean advanceToEndOfEventTime) {
CompletableFuture<Boolean> result = new CompletableFuture<>();
mainMailboxExecutor.execute(
() -> {
latestAsyncCheckpointStartDelayNanos = 1_000_000 * Math.max(
0,
System.currentTimeMillis() - checkpointMetaData.getTimestamp());
try {
result.complete(triggerCheckpoint(checkpointMetaData, checkpointOptions, advanceToEndOfEventTime));
}
catch (Exception ex) {
// Report the failure both via the Future result but also to the mailbox
result.completeExceptionally(ex);
throw ex;
}
},
"checkpoint %s with %s",
checkpointMetaData,
checkpointOptions);
return result;
}
private boolean triggerCheckpoint(
CheckpointMetaData checkpointMetaData,
CheckpointOptions checkpointOptions,
boolean advanceToEndOfEventTime) throws Exception {
try {
// No alignment if we inject a checkpoint
CheckpointMetrics checkpointMetrics = new CheckpointMetrics().setAlignmentDurationNanos(0L);
subtaskCheckpointCoordinator.initCheckpoint(checkpointMetaData.getCheckpointId(), checkpointOptions);
boolean success = performCheckpoint(checkpointMetaData, checkpointOptions, checkpointMetrics, advanceToEndOfEventTime);
if (!success) {
declineCheckpoint(checkpointMetaData.getCheckpointId());
}
return success;
} catch (Exception e) {
// propagate exceptions only if the task is still in "running" state
if (isRunning) {
throw new Exception("Could not perform checkpoint " + checkpointMetaData.getCheckpointId() +
" for operator " + getName() + '.', e);
} else {
LOG.debug("Could not perform checkpoint {} for operator {} while the " +
"invokable was not in state running.", checkpointMetaData.getCheckpointId(), getName(), e);
return false;
}
}
}
@Override
public <E extends Exception> void executeInTaskThread(
ThrowingRunnable<E> runnable,
String descriptionFormat,
Object... descriptionArgs) throws E {
if (mailboxProcessor.isMailboxThread()) {
runnable.run();
} else {
mainMailboxExecutor.execute(runnable, descriptionFormat, descriptionArgs);
}
}
@Override
public void triggerCheckpointOnBarrier(
CheckpointMetaData checkpointMetaData,
CheckpointOptions checkpointOptions,
CheckpointMetrics checkpointMetrics) throws IOException {
try {
if (performCheckpoint(checkpointMetaData, checkpointOptions, checkpointMetrics, false)) {
if (isSynchronousSavepointId(checkpointMetaData.getCheckpointId())) {
runSynchronousSavepointMailboxLoop();
}
}
}
catch (CancelTaskException e) {
LOG.info("Operator {} was cancelled while performing checkpoint {}.",
getName(), checkpointMetaData.getCheckpointId());
throw e;
}
catch (Exception e) {
throw new IOException("Could not perform checkpoint " + checkpointMetaData.getCheckpointId() + " for operator " +
getName() + '.', e);
}
}
@Override
public void abortCheckpointOnBarrier(long checkpointId, Throwable cause) throws IOException {
subtaskCheckpointCoordinator.abortCheckpointOnBarrier(checkpointId, cause, operatorChain);
}
private boolean performCheckpoint(
CheckpointMetaData checkpointMetaData,
CheckpointOptions checkpointOptions,
CheckpointMetrics checkpointMetrics,
boolean advanceToEndOfTime) throws Exception {
LOG.debug("Starting checkpoint ({}) {} on task {}",
checkpointMetaData.getCheckpointId(), checkpointOptions.getCheckpointType(), getName());
if (isRunning) {
actionExecutor.runThrowing(() -> {
if (checkpointOptions.getCheckpointType().isSynchronous()) {
setSynchronousSavepointId(checkpointMetaData.getCheckpointId());
if (advanceToEndOfTime) {
advanceToEndOfEventTime();
}
}
subtaskCheckpointCoordinator.checkpointState(
checkpointMetaData,
checkpointOptions,
checkpointMetrics,
operatorChain,
this::isCanceled);
});
return true;
} else {
actionExecutor.runThrowing(() -> {
// we cannot perform our checkpoint - let the downstream operators know that they
// should not wait for any input from this operator
// we cannot broadcast the cancellation markers on the 'operator chain', because it may not
// yet be created
final CancelCheckpointMarker message = new CancelCheckpointMarker(checkpointMetaData.getCheckpointId());
recordWriter.broadcastEvent(message);
});
return false;
}
}
protected void declineCheckpoint(long checkpointId) {
getEnvironment().declineCheckpoint(
checkpointId,
new CheckpointException("Task Name" + getName(), CheckpointFailureReason.CHECKPOINT_DECLINED_TASK_NOT_READY));
}
public final ExecutorService getAsyncOperationsThreadPool() {
return asyncOperationsThreadPool;
}
@Override
public Future<Void> notifyCheckpointCompleteAsync(long checkpointId) {
return notifyCheckpointOperation(
() -> notifyCheckpointComplete(checkpointId),
String.format("checkpoint %d complete", checkpointId));
}
@Override
public Future<Void> notifyCheckpointAbortAsync(long checkpointId) {
return notifyCheckpointOperation(
() -> subtaskCheckpointCoordinator.notifyCheckpointAborted(checkpointId, operatorChain, this::isRunning),
String.format("checkpoint %d aborted", checkpointId));
}
private Future<Void> notifyCheckpointOperation(RunnableWithException runnable, String description) {
CompletableFuture<Void> result = new CompletableFuture<>();
mailboxProcessor.getMailboxExecutor(TaskMailbox.MAX_PRIORITY).execute(
() -> {
try {
runnable.run();
}
catch (Exception ex) {
result.completeExceptionally(ex);
throw ex;
}
result.complete(null);
},
description);
return result;
}
private void notifyCheckpointComplete(long checkpointId) throws Exception {
subtaskCheckpointCoordinator.notifyCheckpointComplete(checkpointId, operatorChain, this::isRunning);
if (isRunning && isSynchronousSavepointId(checkpointId)) {
finishTask();
// Reset to "notify" the internal synchronous savepoint mailbox loop.
resetSynchronousSavepointId();
}
}
private void tryShutdownTimerService() {
if (!timerService.isTerminated()) {
final long timeoutMs = getEnvironment()
.getTaskManagerInfo()
.getConfiguration()
.getLong(TaskManagerOptions.TASK_CANCELLATION_TIMEOUT_TIMERS);
if (!timerService.shutdownServiceUninterruptible(timeoutMs)) {
LOG.warn("Timer service shutdown exceeded time limit of {} ms while waiting for pending " +
"timers. Will continue with shutdown procedure.", timeoutMs);
}
}
}
// ------------------------------------------------------------------------
// Operator Events
// ------------------------------------------------------------------------
@Override
public void dispatchOperatorEvent(OperatorID operator, SerializedValue<OperatorEvent> event) throws FlinkException {
try {
mainMailboxExecutor.execute(
() -> {
try {
operatorChain.dispatchOperatorEvent(operator, event);
} catch (Throwable t) {
mailboxProcessor.reportThrowable(t);
}
},
"dispatch operator event");
}
catch (RejectedExecutionException e) {
// this happens during shutdown, we can swallow this
}
}
// ------------------------------------------------------------------------
// State backend
// ------------------------------------------------------------------------
private StateBackend createStateBackend() throws Exception {
final StateBackend fromApplication = configuration.getStateBackend(getUserCodeClassLoader());
return StateBackendLoader.fromApplicationOrConfigOrDefault(
fromApplication,
getEnvironment().getTaskManagerInfo().getConfiguration(),
getUserCodeClassLoader(),
LOG);
}
/**
* Returns the {@link TimerService} responsible for telling the current processing time and registering actual timers.
*/
@VisibleForTesting
TimerService getTimerService() {
return timerService;
}
@VisibleForTesting
OP getHeadOperator() {
return this.headOperator;
}
@VisibleForTesting
StreamTaskActionExecutor getActionExecutor() {
return actionExecutor;
}
public ProcessingTimeServiceFactory getProcessingTimeServiceFactory() {
return mailboxExecutor -> new ProcessingTimeServiceImpl(
timerService,
callback -> deferCallbackToMailbox(mailboxExecutor, callback));
}
/**
* Handles an exception thrown by another thread (e.g. a TriggerTask),
* other than the one executing the main task by failing the task entirely.
*
* <p>In more detail, it marks task execution failed for an external reason
* (a reason other than the task code itself throwing an exception). If the task
* is already in a terminal state (such as FINISHED, CANCELED, FAILED), or if the
* task is already canceling this does nothing. Otherwise it sets the state to
* FAILED, and, if the invokable code is running, starts an asynchronous thread
* that aborts that code.
*
* <p>This method never blocks.
*/
@Override
public void handleAsyncException(String message, Throwable exception) {
if (isRunning) {
// only fail if the task is still running
asyncExceptionHandler.handleAsyncException(message, exception);
}
}
// ------------------------------------------------------------------------
// Utilities
// ------------------------------------------------------------------------
@Override
public String toString() {
return getName();
}
// ------------------------------------------------------------------------
/**
* Utility class to encapsulate the handling of asynchronous exceptions.
*/
static class StreamTaskAsyncExceptionHandler {
private final Environment environment;
StreamTaskAsyncExceptionHandler(Environment environment) {
this.environment = environment;
}
void handleAsyncException(String message, Throwable exception) {
environment.failExternally(new AsynchronousException(message, exception));
}
}
public final CloseableRegistry getCancelables() {
return cancelables;
}
// ------------------------------------------------------------------------
@VisibleForTesting
public static <OUT> RecordWriterDelegate<SerializationDelegate<StreamRecord<OUT>>> createRecordWriterDelegate(
StreamConfig configuration,
Environment environment) {
List<RecordWriter<SerializationDelegate<StreamRecord<OUT>>>> recordWrites = createRecordWriters(
configuration,
environment);
if (recordWrites.size() == 1) {
return new SingleRecordWriter<>(recordWrites.get(0));
} else if (recordWrites.size() == 0) {
return new NonRecordWriter<>();
} else {
return new MultipleRecordWriters<>(recordWrites);
}
}
private static <OUT> List<RecordWriter<SerializationDelegate<StreamRecord<OUT>>>> createRecordWriters(
StreamConfig configuration,
Environment environment) {
List<RecordWriter<SerializationDelegate<StreamRecord<OUT>>>> recordWriters = new ArrayList<>();
List<StreamEdge> outEdgesInOrder = configuration.getOutEdgesInOrder(environment.getUserClassLoader());
Map<Integer, StreamConfig> chainedConfigs = configuration.getTransitiveChainedTaskConfigsWithSelf(environment.getUserClassLoader());
for (int i = 0; i < outEdgesInOrder.size(); i++) {
StreamEdge edge = outEdgesInOrder.get(i);
recordWriters.add(
createRecordWriter(
edge,
i,
environment,
environment.getTaskInfo().getTaskName(),
chainedConfigs.get(edge.getSourceId()).getBufferTimeout()));
}
return recordWriters;
}
private static <OUT> RecordWriter<SerializationDelegate<StreamRecord<OUT>>> createRecordWriter(
StreamEdge edge,
int outputIndex,
Environment environment,
String taskName,
long bufferTimeout) {
@SuppressWarnings("unchecked")
StreamPartitioner<OUT> outputPartitioner = (StreamPartitioner<OUT>) edge.getPartitioner();
LOG.debug("Using partitioner {} for output {} of task {}", outputPartitioner, outputIndex, taskName);
ResultPartitionWriter bufferWriter = environment.getWriter(outputIndex);
// we initialize the partitioner here with the number of key groups (aka max. parallelism)
if (outputPartitioner instanceof ConfigurableStreamPartitioner) {
int numKeyGroups = bufferWriter.getNumTargetKeyGroups();
if (0 < numKeyGroups) {
((ConfigurableStreamPartitioner) outputPartitioner).configure(numKeyGroups);
}
}
RecordWriter<SerializationDelegate<StreamRecord<OUT>>> output = new RecordWriterBuilder<SerializationDelegate<StreamRecord<OUT>>>()
.setChannelSelector(outputPartitioner)
.setTimeout(bufferTimeout)
.setTaskName(taskName)
.build(bufferWriter);
output.setMetricGroup(environment.getMetricGroup().getIOMetricGroup());
return output;
}
private void handleTimerException(Exception ex) {
handleAsyncException("Caught exception while processing timer.", new TimerException(ex));
}
@VisibleForTesting
ProcessingTimeCallback deferCallbackToMailbox(MailboxExecutor mailboxExecutor, ProcessingTimeCallback callback) {
return timestamp -> {
mailboxExecutor.execute(
() -> invokeProcessingTimeCallback(callback, timestamp),
"Timer callback for %s @ %d",
callback,
timestamp);
};
}
private void invokeProcessingTimeCallback(ProcessingTimeCallback callback, long timestamp) {
try {
callback.onProcessingTime(timestamp);
} catch (Throwable t) {
handleAsyncException("Caught exception while processing timer.", new TimerException(t));
}
}
protected long getAsyncCheckpointStartDelayNanos() {
return latestAsyncCheckpointStartDelayNanos;
}
}
| |
/*
* Copyright (C) 2009 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.collect;
import static org.junit.contrib.truth.Truth.ASSERT;
import com.google.common.annotations.GwtCompatible;
import com.google.common.annotations.GwtIncompatible;
import com.google.common.collect.ImmutableSetMultimap.Builder;
import com.google.common.collect.testing.google.UnmodifiableCollectionTests;
import com.google.common.testing.EqualsTester;
import com.google.common.testing.SerializableTester;
import junit.framework.TestCase;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Map.Entry;
/**
* Tests for {@link ImmutableSetMultimap}.
*
* @author Mike Ward
*/
@GwtCompatible(emulated = true)
public class ImmutableSetMultimapTest extends TestCase {
public void testBuilder_withImmutableEntry() {
ImmutableSetMultimap<String, Integer> multimap = new Builder<String, Integer>()
.put(Maps.immutableEntry("one", 1))
.build();
assertEquals(ImmutableSet.of(1), multimap.get("one"));
}
public void testBuilder_withImmutableEntryAndNullContents() {
Builder<String, Integer> builder = new Builder<String, Integer>();
try {
builder.put(Maps.immutableEntry("one", (Integer) null));
fail();
} catch (NullPointerException expected) {
}
try {
builder.put(Maps.immutableEntry((String) null, 1));
fail();
} catch (NullPointerException expected) {
}
}
private static class StringHolder {
String string;
}
public void testBuilder_withMutableEntry() {
ImmutableSetMultimap.Builder<String, Integer> builder =
new Builder<String, Integer>();
final StringHolder holder = new StringHolder();
holder.string = "one";
Entry<String, Integer> entry = new AbstractMapEntry<String, Integer>() {
@Override public String getKey() {
return holder.string;
}
@Override public Integer getValue() {
return 1;
}
};
builder.put(entry);
holder.string = "two";
assertEquals(ImmutableSet.of(1), builder.build().get("one"));
}
public void testBuilderPutAllIterable() {
ImmutableSetMultimap.Builder<String, Integer> builder
= ImmutableSetMultimap.builder();
builder.putAll("foo", Arrays.asList(1, 2, 3));
builder.putAll("bar", Arrays.asList(4, 5));
builder.putAll("foo", Arrays.asList(6, 7));
Multimap<String, Integer> multimap = builder.build();
assertEquals(ImmutableSet.of(1, 2, 3, 6, 7), multimap.get("foo"));
assertEquals(ImmutableSet.of(4, 5), multimap.get("bar"));
assertEquals(7, multimap.size());
}
public void testBuilderPutAllVarargs() {
ImmutableSetMultimap.Builder<String, Integer> builder
= ImmutableSetMultimap.builder();
builder.putAll("foo", 1, 2, 3);
builder.putAll("bar", 4, 5);
builder.putAll("foo", 6, 7);
Multimap<String, Integer> multimap = builder.build();
assertEquals(ImmutableSet.of(1, 2, 3, 6, 7), multimap.get("foo"));
assertEquals(ImmutableSet.of(4, 5), multimap.get("bar"));
assertEquals(7, multimap.size());
}
public void testBuilderPutAllMultimap() {
Multimap<String, Integer> toPut = LinkedListMultimap.create();
toPut.put("foo", 1);
toPut.put("bar", 4);
toPut.put("foo", 2);
toPut.put("foo", 3);
Multimap<String, Integer> moreToPut = LinkedListMultimap.create();
moreToPut.put("foo", 6);
moreToPut.put("bar", 5);
moreToPut.put("foo", 7);
ImmutableSetMultimap.Builder<String, Integer> builder
= ImmutableSetMultimap.builder();
builder.putAll(toPut);
builder.putAll(moreToPut);
Multimap<String, Integer> multimap = builder.build();
assertEquals(ImmutableSet.of(1, 2, 3, 6, 7), multimap.get("foo"));
assertEquals(ImmutableSet.of(4, 5), multimap.get("bar"));
assertEquals(7, multimap.size());
}
public void testBuilderPutAllWithDuplicates() {
ImmutableSetMultimap.Builder<String, Integer> builder
= ImmutableSetMultimap.builder();
builder.putAll("foo", 1, 2, 3);
builder.putAll("bar", 4, 5);
builder.putAll("foo", 1, 6, 7);
ImmutableSetMultimap<String, Integer> multimap = builder.build();
assertEquals(7, multimap.size());
}
public void testBuilderPutWithDuplicates() {
ImmutableSetMultimap.Builder<String, Integer> builder
= ImmutableSetMultimap.builder();
builder.putAll("foo", 1, 2, 3);
builder.putAll("bar", 4, 5);
builder.put("foo", 1);
ImmutableSetMultimap<String, Integer> multimap = builder.build();
assertEquals(5, multimap.size());
}
public void testBuilderPutAllMultimapWithDuplicates() {
Multimap<String, Integer> toPut = LinkedListMultimap.create();
toPut.put("foo", 1);
toPut.put("bar", 4);
toPut.put("foo", 2);
toPut.put("foo", 1);
toPut.put("bar", 5);
ImmutableSetMultimap.Builder<String, Integer> builder
= ImmutableSetMultimap.builder();
builder.putAll(toPut);
ImmutableSetMultimap<String, Integer> multimap = builder.build();
assertEquals(4, multimap.size());
}
public void testBuilderPutNullKey() {
Multimap<String, Integer> toPut = LinkedListMultimap.create();
toPut.put("foo", null);
ImmutableSetMultimap.Builder<String, Integer> builder
= ImmutableSetMultimap.builder();
try {
builder.put(null, 1);
fail();
} catch (NullPointerException expected) {}
try {
builder.putAll(null, Arrays.asList(1, 2, 3));
fail();
} catch (NullPointerException expected) {}
try {
builder.putAll(null, 1, 2, 3);
fail();
} catch (NullPointerException expected) {}
try {
builder.putAll(toPut);
fail();
} catch (NullPointerException expected) {}
}
public void testBuilderPutNullValue() {
Multimap<String, Integer> toPut = LinkedListMultimap.create();
toPut.put(null, 1);
ImmutableSetMultimap.Builder<String, Integer> builder
= ImmutableSetMultimap.builder();
try {
builder.put("foo", null);
fail();
} catch (NullPointerException expected) {}
try {
builder.putAll("foo", Arrays.asList(1, null, 3));
fail();
} catch (NullPointerException expected) {}
try {
builder.putAll("foo", 4, null, 6);
fail();
} catch (NullPointerException expected) {}
try {
builder.putAll(toPut);
fail();
} catch (NullPointerException expected) {}
}
public void testBuilderOrderKeysBy() {
ImmutableSetMultimap.Builder<String, Integer> builder
= ImmutableSetMultimap.builder();
builder.put("b", 3);
builder.put("d", 2);
builder.put("a", 5);
builder.orderKeysBy(Collections.reverseOrder());
builder.put("c", 4);
builder.put("a", 2);
builder.put("b", 6);
ImmutableSetMultimap<String, Integer> multimap = builder.build();
ASSERT.that(multimap.keySet()).hasContentsInOrder("d", "c", "b", "a");
ASSERT.that(multimap.values()).hasContentsInOrder(2, 4, 3, 6, 5, 2);
ASSERT.that(multimap.get("a")).hasContentsInOrder(5, 2);
ASSERT.that(multimap.get("b")).hasContentsInOrder(3, 6);
assertFalse(multimap.get("a") instanceof ImmutableSortedSet);
assertFalse(multimap.get("x") instanceof ImmutableSortedSet);
assertFalse(multimap.asMap().get("a") instanceof ImmutableSortedSet);
}
public void testBuilderOrderKeysByDuplicates() {
ImmutableSetMultimap.Builder<String, Integer> builder
= ImmutableSetMultimap.builder();
builder.put("bb", 3);
builder.put("d", 2);
builder.put("a", 5);
builder.orderKeysBy(new Ordering<String>() {
@Override
public int compare(String left, String right) {
return left.length() - right.length();
}
});
builder.put("cc", 4);
builder.put("a", 2);
builder.put("bb", 6);
ImmutableSetMultimap<String, Integer> multimap = builder.build();
ASSERT.that(multimap.keySet()).hasContentsInOrder("d", "a", "bb", "cc");
ASSERT.that(multimap.values()).hasContentsInOrder(2, 5, 2, 3, 6, 4);
ASSERT.that(multimap.get("a")).hasContentsInOrder(5, 2);
ASSERT.that(multimap.get("bb")).hasContentsInOrder(3, 6);
assertFalse(multimap.get("a") instanceof ImmutableSortedSet);
assertFalse(multimap.get("x") instanceof ImmutableSortedSet);
assertFalse(multimap.asMap().get("a") instanceof ImmutableSortedSet);
}
public void testBuilderOrderValuesBy() {
ImmutableSetMultimap.Builder<String, Integer> builder
= ImmutableSetMultimap.builder();
builder.put("b", 3);
builder.put("d", 2);
builder.put("a", 5);
builder.orderValuesBy(Collections.reverseOrder());
builder.put("c", 4);
builder.put("a", 2);
builder.put("b", 6);
ImmutableSetMultimap<String, Integer> multimap = builder.build();
ASSERT.that(multimap.keySet()).hasContentsInOrder("b", "d", "a", "c");
ASSERT.that(multimap.values()).hasContentsInOrder(6, 3, 2, 5, 2, 4);
ASSERT.that(multimap.get("a")).hasContentsInOrder(5, 2);
ASSERT.that(multimap.get("b")).hasContentsInOrder(6, 3);
assertTrue(multimap.get("a") instanceof ImmutableSortedSet);
assertEquals(Collections.reverseOrder(),
((ImmutableSortedSet<Integer>) multimap.get("a")).comparator());
assertTrue(multimap.get("x") instanceof ImmutableSortedSet);
assertEquals(Collections.reverseOrder(),
((ImmutableSortedSet<Integer>) multimap.get("x")).comparator());
assertTrue(multimap.asMap().get("a") instanceof ImmutableSortedSet);
assertEquals(Collections.reverseOrder(),
((ImmutableSortedSet<Integer>) multimap.asMap().get("a")).comparator());
}
public void testBuilderOrderKeysAndValuesBy() {
ImmutableSetMultimap.Builder<String, Integer> builder
= ImmutableSetMultimap.builder();
builder.put("b", 3);
builder.put("d", 2);
builder.put("a", 5);
builder.orderKeysBy(Collections.reverseOrder());
builder.orderValuesBy(Collections.reverseOrder());
builder.put("c", 4);
builder.put("a", 2);
builder.put("b", 6);
ImmutableSetMultimap<String, Integer> multimap = builder.build();
ASSERT.that(multimap.keySet()).hasContentsInOrder("d", "c", "b", "a");
ASSERT.that(multimap.values()).hasContentsInOrder(2, 4, 6, 3, 5, 2);
ASSERT.that(multimap.get("a")).hasContentsInOrder(5, 2);
ASSERT.that(multimap.get("b")).hasContentsInOrder(6, 3);
assertTrue(multimap.get("a") instanceof ImmutableSortedSet);
assertEquals(Collections.reverseOrder(),
((ImmutableSortedSet<Integer>) multimap.get("a")).comparator());
assertTrue(multimap.get("x") instanceof ImmutableSortedSet);
assertEquals(Collections.reverseOrder(),
((ImmutableSortedSet<Integer>) multimap.get("x")).comparator());
assertTrue(multimap.asMap().get("a") instanceof ImmutableSortedSet);
assertEquals(Collections.reverseOrder(),
((ImmutableSortedSet<Integer>) multimap.asMap().get("a")).comparator());
}
public void testCopyOf() {
HashMultimap<String, Integer> input = HashMultimap.create();
input.put("foo", 1);
input.put("bar", 2);
input.put("foo", 3);
Multimap<String, Integer> multimap = ImmutableSetMultimap.copyOf(input);
assertEquals(multimap, input);
assertEquals(input, multimap);
}
public void testCopyOfWithDuplicates() {
ArrayListMultimap<Object, Object> input = ArrayListMultimap.create();
input.put("foo", 1);
input.put("bar", 2);
input.put("foo", 3);
input.put("foo", 1);
ImmutableSetMultimap<Object, Object> copy
= ImmutableSetMultimap.copyOf(input);
assertEquals(3, copy.size());
}
public void testCopyOfEmpty() {
HashMultimap<String, Integer> input = HashMultimap.create();
Multimap<String, Integer> multimap = ImmutableSetMultimap.copyOf(input);
assertEquals(multimap, input);
assertEquals(input, multimap);
}
public void testCopyOfImmutableSetMultimap() {
Multimap<String, Integer> multimap = createMultimap();
assertSame(multimap, ImmutableSetMultimap.copyOf(multimap));
}
public void testCopyOfNullKey() {
HashMultimap<String, Integer> input = HashMultimap.create();
input.put(null, 1);
try {
ImmutableSetMultimap.copyOf(input);
fail();
} catch (NullPointerException expected) {}
}
public void testCopyOfNullValue() {
HashMultimap<String, Integer> input = HashMultimap.create();
input.putAll("foo", Arrays.asList(1, null, 3));
try {
ImmutableSetMultimap.copyOf(input);
fail();
} catch (NullPointerException expected) {}
}
public void testEmptyMultimapReads() {
Multimap<String, Integer> multimap = ImmutableSetMultimap.of();
assertFalse(multimap.containsKey("foo"));
assertFalse(multimap.containsValue(1));
assertFalse(multimap.containsEntry("foo", 1));
assertTrue(multimap.entries().isEmpty());
assertTrue(multimap.equals(HashMultimap.create()));
assertEquals(Collections.emptySet(), multimap.get("foo"));
assertEquals(0, multimap.hashCode());
assertTrue(multimap.isEmpty());
assertEquals(HashMultiset.create(), multimap.keys());
assertEquals(Collections.emptySet(), multimap.keySet());
assertEquals(0, multimap.size());
assertTrue(multimap.values().isEmpty());
assertEquals("{}", multimap.toString());
}
public void testEmptyMultimapWrites() {
Multimap<String, Integer> multimap = ImmutableSetMultimap.of();
UnmodifiableCollectionTests.assertMultimapIsUnmodifiable(
multimap, "foo", 1);
}
public void testMultimapReads() {
Multimap<String, Integer> multimap = createMultimap();
assertTrue(multimap.containsKey("foo"));
assertFalse(multimap.containsKey("cat"));
assertTrue(multimap.containsValue(1));
assertFalse(multimap.containsValue(5));
assertTrue(multimap.containsEntry("foo", 1));
assertFalse(multimap.containsEntry("cat", 1));
assertFalse(multimap.containsEntry("foo", 5));
assertFalse(multimap.entries().isEmpty());
assertEquals(3, multimap.size());
assertFalse(multimap.isEmpty());
assertEquals("{foo=[1, 3], bar=[2]}", multimap.toString());
}
public void testMultimapWrites() {
Multimap<String, Integer> multimap = createMultimap();
UnmodifiableCollectionTests.assertMultimapIsUnmodifiable(
multimap, "bar", 2);
}
public void testMultimapEquals() {
Multimap<String, Integer> multimap = createMultimap();
Multimap<String, Integer> hashMultimap = HashMultimap.create();
hashMultimap.putAll("foo", Arrays.asList(1, 3));
hashMultimap.put("bar", 2);
new EqualsTester()
.addEqualityGroup(
multimap,
createMultimap(),
hashMultimap,
ImmutableSetMultimap.<String, Integer>builder()
.put("bar", 2).put("foo", 1).put("foo", 3).build(),
ImmutableSetMultimap.<String, Integer>builder()
.put("bar", 2).put("foo", 3).put("foo", 1).build())
.addEqualityGroup(ImmutableSetMultimap.<String, Integer>builder()
.put("foo", 2).put("foo", 3).put("foo", 1).build())
.addEqualityGroup(ImmutableSetMultimap.<String, Integer>builder()
.put("bar", 2).put("foo", 3).build())
.testEquals();
}
public void testOf() {
assertMultimapEquals(
ImmutableSetMultimap.of("one", 1),
"one", 1);
assertMultimapEquals(
ImmutableSetMultimap.of("one", 1, "two", 2),
"one", 1, "two", 2);
assertMultimapEquals(
ImmutableSetMultimap.of("one", 1, "two", 2, "three", 3),
"one", 1, "two", 2, "three", 3);
assertMultimapEquals(
ImmutableSetMultimap.of("one", 1, "two", 2, "three", 3, "four", 4),
"one", 1, "two", 2, "three", 3, "four", 4);
assertMultimapEquals(
ImmutableSetMultimap.of(
"one", 1, "two", 2, "three", 3, "four", 4, "five", 5),
"one", 1, "two", 2, "three", 3, "four", 4, "five", 5);
}
public void testInverse() {
assertEquals(
ImmutableSetMultimap.<Integer, String>of(),
ImmutableSetMultimap.<String, Integer>of().inverse());
assertEquals(
ImmutableSetMultimap.of(1, "one"),
ImmutableSetMultimap.of("one", 1).inverse());
assertEquals(
ImmutableSetMultimap.of(1, "one", 2, "two"),
ImmutableSetMultimap.of("one", 1, "two", 2).inverse());
assertEquals(
ImmutableSetMultimap.of('o', "of", 'f', "of", 't', "to", 'o', "to"),
ImmutableSetMultimap.of("of", 'o', "of", 'f', "to", 't', "to", 'o').inverse());
}
public void testInverseMinimizesWork() {
ImmutableSetMultimap<String, Character> multimap =
ImmutableSetMultimap.of("of", 'o', "of", 'f', "to", 't', "to", 'o');
assertSame(multimap.inverse(), multimap.inverse());
assertSame(multimap, multimap.inverse().inverse());
}
private static <K, V> void assertMultimapEquals(Multimap<K, V> multimap,
Object... alternatingKeysAndValues) {
assertEquals(multimap.size(), alternatingKeysAndValues.length / 2);
int i = 0;
for (Entry<K, V> entry : multimap.entries()) {
assertEquals(alternatingKeysAndValues[i++], entry.getKey());
assertEquals(alternatingKeysAndValues[i++], entry.getValue());
}
}
@GwtIncompatible("SerializableTester")
public void testSerialization() {
Multimap<String, Integer> multimap = createMultimap();
SerializableTester.reserializeAndAssert(multimap);
assertEquals(multimap.size(),
SerializableTester.reserialize(multimap).size());
SerializableTester.reserializeAndAssert(multimap.get("foo"));
LenientSerializableTester.reserializeAndAssertLenient(multimap.keySet());
SerializableTester.reserializeAndAssert(multimap.keys());
SerializableTester.reserializeAndAssert(multimap.asMap());
Collection<Integer> valuesCopy
= SerializableTester.reserialize(multimap.values());
assertEquals(HashMultiset.create(multimap.values()),
HashMultiset.create(valuesCopy));
}
@GwtIncompatible("SerializableTester")
public void testEmptySerialization() {
Multimap<String, Integer> multimap = ImmutableSetMultimap.of();
assertSame(multimap, SerializableTester.reserialize(multimap));
}
private ImmutableSetMultimap<String, Integer> createMultimap() {
return ImmutableSetMultimap.<String, Integer>builder()
.put("foo", 1).put("bar", 2).put("foo", 3).build();
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.test.engine;
import org.apache.lucene.index.*;
import org.apache.lucene.search.AssertingIndexSearcher;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.XSearcherManager;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.codec.CodecService;
import org.elasticsearch.index.deletionpolicy.SnapshotDeletionPolicy;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.engine.EngineException;
import org.elasticsearch.index.engine.internal.InternalEngine;
import org.elasticsearch.index.indexing.ShardIndexingService;
import org.elasticsearch.index.merge.policy.MergePolicyProvider;
import org.elasticsearch.index.merge.scheduler.MergeSchedulerProvider;
import org.elasticsearch.index.settings.IndexSettings;
import org.elasticsearch.index.settings.IndexSettingsService;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.index.store.Store;
import org.elasticsearch.index.translog.Translog;
import org.elasticsearch.indices.warmer.IndicesWarmer;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.elasticsearch.threadpool.ThreadPool;
import java.lang.reflect.Constructor;
import java.util.Map.Entry;
import java.util.Random;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
public final class MockInternalEngine extends InternalEngine implements Engine {
public static final ConcurrentMap<AssertingSearcher, RuntimeException> INFLIGHT_ENGINE_SEARCHERS = new ConcurrentHashMap<AssertingSearcher, RuntimeException>();
public static final String WRAP_READER_RATIO = "index.engine.mock.random.wrap_reader_ratio";
public static final String READER_WRAPPER_TYPE = "index.engine.mock.random.wrapper";
private final Random random;
private final boolean wrapReader;
private final Class<? extends FilterDirectoryReader> wrapper;
@Inject
public MockInternalEngine(ShardId shardId, @IndexSettings Settings indexSettings, ThreadPool threadPool,
IndexSettingsService indexSettingsService, ShardIndexingService indexingService, @Nullable IndicesWarmer warmer, Store store,
SnapshotDeletionPolicy deletionPolicy, Translog translog, MergePolicyProvider mergePolicyProvider,
MergeSchedulerProvider mergeScheduler, AnalysisService analysisService, SimilarityService similarityService,
CodecService codecService) throws EngineException {
super(shardId, indexSettings, threadPool, indexSettingsService, indexingService, warmer, store,
deletionPolicy, translog, mergePolicyProvider, mergeScheduler, analysisService, similarityService, codecService);
final long seed = indexSettings.getAsLong(ElasticsearchIntegrationTest.INDEX_SEED_SETTING, 0l);
random = new Random(seed);
final double ratio = indexSettings.getAsDouble(WRAP_READER_RATIO, 0.0d); // DISABLED by default - AssertingDR is crazy slow
wrapper = indexSettings.getAsClass(READER_WRAPPER_TYPE, AssertingDirectoryReader.class);
wrapReader = random.nextDouble() < ratio;
if (logger.isTraceEnabled()) {
logger.trace("Using [{}] for shard [{}] seed: [{}] wrapReader: [{}]", this.getClass().getName(), shardId, seed, wrapReader);
}
}
public void close() throws ElasticsearchException {
try {
super.close();
} finally {
if (logger.isTraceEnabled()) {
// log debug if we have pending searchers
for (Entry<MockInternalEngine.AssertingSearcher, RuntimeException> entry : MockInternalEngine.INFLIGHT_ENGINE_SEARCHERS.entrySet()) {
logger.trace("Unreleased Searchers instance for shard [{}]", entry.getValue(), entry.getKey().shardId);
}
}
}
}
@Override
protected Searcher newSearcher(String source, IndexSearcher searcher, XSearcherManager manager) throws EngineException {
IndexReader reader = searcher.getIndexReader();
IndexReader wrappedReader = reader;
if (reader instanceof DirectoryReader && wrapReader) {
wrappedReader = wrapReader((DirectoryReader) reader);
}
// this executes basic query checks and asserts that weights are normalized only once etc.
final AssertingIndexSearcher assertingIndexSearcher = new AssertingIndexSearcher(random, wrappedReader);
assertingIndexSearcher.setSimilarity(searcher.getSimilarity());
// pass the original searcher to the super.newSearcher() method to make sure this is the searcher that will
// be released later on. If we wrap an index reader here must not pass the wrapped version to the manager
// on release otherwise the reader will be closed too early. - good news, stuff will fail all over the place if we don't get this right here
return new AssertingSearcher(assertingIndexSearcher, super.newSearcher(source, searcher, manager), shardId);
}
private DirectoryReader wrapReader(DirectoryReader reader) {
try {
Constructor<?>[] constructors = wrapper.getConstructors();
Constructor<?> nonRandom = null;
for (Constructor<?> constructor : constructors) {
Class<?>[] parameterTypes = constructor.getParameterTypes();
if (parameterTypes.length > 0 && parameterTypes[0] == DirectoryReader.class) {
if (parameterTypes.length == 1) {
nonRandom = constructor;
} else if (parameterTypes.length == 2 && parameterTypes[1] == Settings.class) {
return (DirectoryReader) constructor.newInstance(reader, indexSettings);
}
}
}
if (nonRandom != null) {
return (DirectoryReader) nonRandom.newInstance(reader);
}
} catch (Exception e) {
throw new ElasticsearchException("Can not wrap reader", e);
}
return reader;
}
public final class AssertingSearcher implements Searcher {
private final Searcher wrappedSearcher;
private final ShardId shardId;
private final IndexSearcher indexSearcher;
private RuntimeException firstReleaseStack;
private final Object lock = new Object();
private final int initialRefCount;
public AssertingSearcher(IndexSearcher indexSearcher, Searcher wrappedSearcher, ShardId shardId) {
// we only use the given index searcher here instead of the IS of the wrapped searcher. the IS might be a wrapped searcher
// with a wrapped reader.
this.wrappedSearcher = wrappedSearcher;
this.shardId = shardId;
initialRefCount = wrappedSearcher.reader().getRefCount();
this.indexSearcher = indexSearcher;
assert initialRefCount > 0 : "IndexReader#getRefCount() was [" + initialRefCount + "] expected a value > [0] - reader is already closed";
INFLIGHT_ENGINE_SEARCHERS.put(this, new RuntimeException("Unreleased Searcher, source [" + wrappedSearcher.source() + "]"));
}
@Override
public String source() {
return wrappedSearcher.source();
}
@Override
public boolean release() throws ElasticsearchException {
RuntimeException remove = INFLIGHT_ENGINE_SEARCHERS.remove(this);
synchronized (lock) {
// make sure we only get this once and store the stack of the first caller!
if (remove == null) {
assert firstReleaseStack != null;
AssertionError error = new AssertionError("Released Searcher more than once, source [" + wrappedSearcher.source() + "]");
error.initCause(firstReleaseStack);
throw error;
} else {
assert firstReleaseStack == null;
firstReleaseStack = new RuntimeException("Searcher Released first here, source [" + wrappedSearcher.source() + "]");
}
}
final int refCount = wrappedSearcher.reader().getRefCount();
// this assert seems to be paranoid but given LUCENE-5362 we better add some assertions here to make sure we catch any potential
// problems.
assert refCount > 0 : "IndexReader#getRefCount() was [" + refCount + "] expected a value > [0] - reader is already closed. Initial refCount was: [" + initialRefCount + "]";
try {
return wrappedSearcher.release();
} catch (RuntimeException ex) {
logger.debug("Failed to release searcher", ex);
throw ex;
}
}
@Override
public IndexReader reader() {
return indexSearcher.getIndexReader();
}
@Override
public IndexSearcher searcher() {
return indexSearcher;
}
public ShardId shardId() {
return shardId;
}
}
public static abstract class DirectoryReaderWrapper extends FilterDirectoryReader {
protected final SubReaderWrapper subReaderWrapper;
public DirectoryReaderWrapper(DirectoryReader in, SubReaderWrapper subReaderWrapper) {
super(in, subReaderWrapper);
this.subReaderWrapper = subReaderWrapper;
}
@Override
public Object getCoreCacheKey() {
return in.getCoreCacheKey();
}
@Override
public Object getCombinedCoreAndDeletesKey() {
return in.getCombinedCoreAndDeletesKey();
}
}
}
| |
/*
* Copyright 2012 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.channel.socket;
import io.netty.buffer.ByteBufAllocator;
import io.netty.channel.ChannelException;
import io.netty.channel.ChannelOption;
import io.netty.channel.DefaultChannelConfig;
import io.netty.channel.FixedRecvByteBufAllocator;
import io.netty.channel.MessageSizeEstimator;
import io.netty.channel.RecvByteBufAllocator;
import io.netty.util.internal.PlatformDependent;
import io.netty.util.internal.logging.InternalLogger;
import io.netty.util.internal.logging.InternalLoggerFactory;
import java.io.IOException;
import java.net.DatagramSocket;
import java.net.InetAddress;
import java.net.MulticastSocket;
import java.net.NetworkInterface;
import java.net.SocketException;
import java.util.Map;
import static io.netty.channel.ChannelOption.*;
/**
* The default {@link DatagramChannelConfig} implementation.
*/
public class DefaultDatagramChannelConfig extends DefaultChannelConfig implements DatagramChannelConfig {
private static final InternalLogger logger = InternalLoggerFactory.getInstance(DefaultDatagramChannelConfig.class);
private static final RecvByteBufAllocator DEFAULT_RCVBUF_ALLOCATOR = new FixedRecvByteBufAllocator(2048);
private final DatagramSocket javaSocket;
private volatile boolean activeOnOpen;
/**
* Creates a new instance.
*/
public DefaultDatagramChannelConfig(DatagramChannel channel, DatagramSocket javaSocket) {
super(channel);
if (javaSocket == null) {
throw new NullPointerException("javaSocket");
}
this.javaSocket = javaSocket;
setRecvByteBufAllocator(DEFAULT_RCVBUF_ALLOCATOR);
}
@Override
@SuppressWarnings("deprecation")
public Map<ChannelOption<?>, Object> getOptions() {
return getOptions(
super.getOptions(),
SO_BROADCAST, SO_RCVBUF, SO_SNDBUF, SO_REUSEADDR, IP_MULTICAST_LOOP_DISABLED,
IP_MULTICAST_ADDR, IP_MULTICAST_IF, IP_MULTICAST_TTL, IP_TOS, DATAGRAM_CHANNEL_ACTIVE_ON_REGISTRATION);
}
@Override
@SuppressWarnings({ "unchecked", "deprecation" })
public <T> T getOption(ChannelOption<T> option) {
if (option == SO_BROADCAST) {
return (T) Boolean.valueOf(isBroadcast());
}
if (option == SO_RCVBUF) {
return (T) Integer.valueOf(getReceiveBufferSize());
}
if (option == SO_SNDBUF) {
return (T) Integer.valueOf(getSendBufferSize());
}
if (option == SO_REUSEADDR) {
return (T) Boolean.valueOf(isReuseAddress());
}
if (option == IP_MULTICAST_LOOP_DISABLED) {
return (T) Boolean.valueOf(isLoopbackModeDisabled());
}
if (option == IP_MULTICAST_ADDR) {
return (T) getInterface();
}
if (option == IP_MULTICAST_IF) {
return (T) getNetworkInterface();
}
if (option == IP_MULTICAST_TTL) {
return (T) Integer.valueOf(getTimeToLive());
}
if (option == IP_TOS) {
return (T) Integer.valueOf(getTrafficClass());
}
if (option == DATAGRAM_CHANNEL_ACTIVE_ON_REGISTRATION) {
return (T) Boolean.valueOf(activeOnOpen);
}
return super.getOption(option);
}
@Override
@SuppressWarnings("deprecation")
public <T> boolean setOption(ChannelOption<T> option, T value) {
validate(option, value);
if (option == SO_BROADCAST) {
setBroadcast((Boolean) value);
} else if (option == SO_RCVBUF) {
setReceiveBufferSize((Integer) value);
} else if (option == SO_SNDBUF) {
setSendBufferSize((Integer) value);
} else if (option == SO_REUSEADDR) {
setReuseAddress((Boolean) value);
} else if (option == IP_MULTICAST_LOOP_DISABLED) {
setLoopbackModeDisabled((Boolean) value);
} else if (option == IP_MULTICAST_ADDR) {
setInterface((InetAddress) value);
} else if (option == IP_MULTICAST_IF) {
setNetworkInterface((NetworkInterface) value);
} else if (option == IP_MULTICAST_TTL) {
setTimeToLive((Integer) value);
} else if (option == IP_TOS) {
setTrafficClass((Integer) value);
} else if (option == DATAGRAM_CHANNEL_ACTIVE_ON_REGISTRATION) {
setActiveOnOpen((Boolean) value);
} else {
return super.setOption(option, value);
}
return true;
}
private void setActiveOnOpen(boolean activeOnOpen) {
if (channel.isRegistered()) {
throw new IllegalStateException("Can only changed before channel was registered");
}
this.activeOnOpen = activeOnOpen;
}
@Override
public boolean isBroadcast() {
try {
return javaSocket.getBroadcast();
} catch (SocketException e) {
throw new ChannelException(e);
}
}
@Override
public DatagramChannelConfig setBroadcast(boolean broadcast) {
try {
// See: https://github.com/netty/netty/issues/576
if (broadcast &&
!javaSocket.getLocalAddress().isAnyLocalAddress() &&
!PlatformDependent.isWindows() && !PlatformDependent.isRoot()) {
// Warn a user about the fact that a non-root user can't receive a
// broadcast packet on *nix if the socket is bound on non-wildcard address.
logger.warn(
"A non-root user can't receive a broadcast packet if the socket " +
"is not bound to a wildcard address; setting the SO_BROADCAST flag " +
"anyway as requested on the socket which is bound to " +
javaSocket.getLocalSocketAddress() + '.');
}
javaSocket.setBroadcast(broadcast);
} catch (SocketException e) {
throw new ChannelException(e);
}
return this;
}
@Override
public InetAddress getInterface() {
if (javaSocket instanceof MulticastSocket) {
try {
return ((MulticastSocket) javaSocket).getInterface();
} catch (SocketException e) {
throw new ChannelException(e);
}
} else {
throw new UnsupportedOperationException();
}
}
@Override
public DatagramChannelConfig setInterface(InetAddress interfaceAddress) {
if (javaSocket instanceof MulticastSocket) {
try {
((MulticastSocket) javaSocket).setInterface(interfaceAddress);
} catch (SocketException e) {
throw new ChannelException(e);
}
} else {
throw new UnsupportedOperationException();
}
return this;
}
@Override
public boolean isLoopbackModeDisabled() {
if (javaSocket instanceof MulticastSocket) {
try {
return ((MulticastSocket) javaSocket).getLoopbackMode();
} catch (SocketException e) {
throw new ChannelException(e);
}
} else {
throw new UnsupportedOperationException();
}
}
@Override
public DatagramChannelConfig setLoopbackModeDisabled(boolean loopbackModeDisabled) {
if (javaSocket instanceof MulticastSocket) {
try {
((MulticastSocket) javaSocket).setLoopbackMode(loopbackModeDisabled);
} catch (SocketException e) {
throw new ChannelException(e);
}
} else {
throw new UnsupportedOperationException();
}
return this;
}
@Override
public NetworkInterface getNetworkInterface() {
if (javaSocket instanceof MulticastSocket) {
try {
return ((MulticastSocket) javaSocket).getNetworkInterface();
} catch (SocketException e) {
throw new ChannelException(e);
}
} else {
throw new UnsupportedOperationException();
}
}
@Override
public DatagramChannelConfig setNetworkInterface(NetworkInterface networkInterface) {
if (javaSocket instanceof MulticastSocket) {
try {
((MulticastSocket) javaSocket).setNetworkInterface(networkInterface);
} catch (SocketException e) {
throw new ChannelException(e);
}
} else {
throw new UnsupportedOperationException();
}
return this;
}
@Override
public boolean isReuseAddress() {
try {
return javaSocket.getReuseAddress();
} catch (SocketException e) {
throw new ChannelException(e);
}
}
@Override
public DatagramChannelConfig setReuseAddress(boolean reuseAddress) {
try {
javaSocket.setReuseAddress(reuseAddress);
} catch (SocketException e) {
throw new ChannelException(e);
}
return this;
}
@Override
public int getReceiveBufferSize() {
try {
return javaSocket.getReceiveBufferSize();
} catch (SocketException e) {
throw new ChannelException(e);
}
}
@Override
public DatagramChannelConfig setReceiveBufferSize(int receiveBufferSize) {
try {
javaSocket.setReceiveBufferSize(receiveBufferSize);
} catch (SocketException e) {
throw new ChannelException(e);
}
return this;
}
@Override
public int getSendBufferSize() {
try {
return javaSocket.getSendBufferSize();
} catch (SocketException e) {
throw new ChannelException(e);
}
}
@Override
public DatagramChannelConfig setSendBufferSize(int sendBufferSize) {
try {
javaSocket.setSendBufferSize(sendBufferSize);
} catch (SocketException e) {
throw new ChannelException(e);
}
return this;
}
@Override
public int getTimeToLive() {
if (javaSocket instanceof MulticastSocket) {
try {
return ((MulticastSocket) javaSocket).getTimeToLive();
} catch (IOException e) {
throw new ChannelException(e);
}
} else {
throw new UnsupportedOperationException();
}
}
@Override
public DatagramChannelConfig setTimeToLive(int ttl) {
if (javaSocket instanceof MulticastSocket) {
try {
((MulticastSocket) javaSocket).setTimeToLive(ttl);
} catch (IOException e) {
throw new ChannelException(e);
}
} else {
throw new UnsupportedOperationException();
}
return this;
}
@Override
public int getTrafficClass() {
try {
return javaSocket.getTrafficClass();
} catch (SocketException e) {
throw new ChannelException(e);
}
}
@Override
public DatagramChannelConfig setTrafficClass(int trafficClass) {
try {
javaSocket.setTrafficClass(trafficClass);
} catch (SocketException e) {
throw new ChannelException(e);
}
return this;
}
@Override
public DatagramChannelConfig setWriteSpinCount(int writeSpinCount) {
super.setWriteSpinCount(writeSpinCount);
return this;
}
@Override
public DatagramChannelConfig setConnectTimeoutMillis(int connectTimeoutMillis) {
super.setConnectTimeoutMillis(connectTimeoutMillis);
return this;
}
@Override
@Deprecated
public DatagramChannelConfig setMaxMessagesPerRead(int maxMessagesPerRead) {
super.setMaxMessagesPerRead(maxMessagesPerRead);
return this;
}
@Override
public DatagramChannelConfig setAllocator(ByteBufAllocator allocator) {
super.setAllocator(allocator);
return this;
}
@Override
public DatagramChannelConfig setRecvByteBufAllocator(RecvByteBufAllocator allocator) {
super.setRecvByteBufAllocator(allocator);
return this;
}
@Override
public DatagramChannelConfig setAutoRead(boolean autoRead) {
super.setAutoRead(autoRead);
return this;
}
@Override
public DatagramChannelConfig setWriteBufferHighWaterMark(int writeBufferHighWaterMark) {
super.setWriteBufferHighWaterMark(writeBufferHighWaterMark);
return this;
}
@Override
public DatagramChannelConfig setWriteBufferLowWaterMark(int writeBufferLowWaterMark) {
super.setWriteBufferLowWaterMark(writeBufferLowWaterMark);
return this;
}
@Override
public DatagramChannelConfig setMessageSizeEstimator(MessageSizeEstimator estimator) {
super.setMessageSizeEstimator(estimator);
return this;
}
}
| |
/*
* Copyright (c) 2007 BUSINESS OBJECTS SOFTWARE LIMITED
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* * Neither the name of Business Objects nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
/*
* ErrorMessage_Test.java
* Created: Jun 23, 2005
* By: Greg McClement
*/
package org.openquark.cal.compiler;
import junit.extensions.TestSetup;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
import org.openquark.cal.CALPlatformTestModuleNames;
import org.openquark.cal.compiler.CompilerMessage.Severity;
import org.openquark.cal.module.Cal.Core.CAL_Prelude;
import org.openquark.cal.runtime.CALExecutorException;
import org.openquark.cal.runtime.ErrorInfo;
import org.openquark.cal.runtime.MachineType;
import org.openquark.cal.services.BasicCALServices;
import org.openquark.cal.services.CALServicesTestUtilities;
import org.openquark.cal.services.GemCompilationException;
/**
* The error messages now contain information about the position of the error/trace/assert/undefined call
* in the source code. This test check that the correct position is being reported.
*
* @author Greg McClement
*
*/
public class ErrorMessage_Test extends TestCase {
/**
* Set this flag to true if debugging output is desired regardless of
* whether a test fails or succeeds.
*/
private static final boolean SHOW_DEBUGGING_OUTPUT = false;
/**
* @return a test suite containing all the test cases for testing CAL source
* generation.
*/
public static Test suite() {
TestSuite suite = new TestSuite(ErrorMessage_Test.class);
return new TestSetup(suite) {
@Override
protected void setUp() {
oneTimeSetUp();
}
@Override
protected void tearDown() {
oneTimeTearDown();
}
};
}
/**
* Performs the setup for the test suite.
*/
private static void oneTimeSetUp() {
}
/**
* Performs the tear down for the test suite.
*/
private static void oneTimeTearDown() {
}
/**
* Constructor for ErrorMessage_Test.
*
* @param name
* the name of the test.
*/
public ErrorMessage_Test(String name) {
super(name);
}
/**
* Tests the source model debugging code in the CALCompiler class by compiling the workspace
* with the debugging code turned on, and running tests.
*/
public void testErrorMessage_LECC() throws GemCompilationException, CALExecutorException {
// Turn on the debugging flag in the compiler, so that each module being compiled will
// go through a set of semantics-preserving text-to-source-model-to-text transformations
CALCompiler.setInUnitTestDebugSourceModelMode(false);
BasicCALServices privateCopyLeccServices = BasicCALServices.make(
"org.openquark.cal.test.workspace",
"ice.default.cws",
null);
((EntryPointGenerator)privateCopyLeccServices.getWorkspaceManager().getCompiler()).setForceCodeRegen(true);
try {
MessageLogger logger = new MessageLogger();
privateCopyLeccServices.compileWorkspace(null, logger);
assertEquals(logger.getNErrors(), 0);
addSourceModelCode( privateCopyLeccServices );
// Run tests and make sure that it still returns true even after all the source model
// transformations
runCode_test(privateCopyLeccServices);
} finally {
// Restore the workspace by compiling the original module definitions
// without the source model transformations
// Turn off the debugging flag before recompilation
CALCompiler.setInUnitTestDebugSourceModelMode(false);
((EntryPointGenerator)privateCopyLeccServices.getWorkspaceManager().getCompiler()).setForceCodeRegen(true);
privateCopyLeccServices.compileWorkspace(null, new MessageLogger());
}
}
/**
* Runs the platform function using the g machine.
*/
public void testErrorMessage_gMachine() throws GemCompilationException, CALExecutorException {
// Not working yet
BasicCALServices bcs = CALServicesTestUtilities.getCommonCALServices(MachineType.G, "cal.platform.test.cws");
addSourceModelCode( bcs );
runCode_test( bcs );
}
private void addSourceModelCode(BasicCALServices bcs){
// Use the source model to add code to the workspace. Try making errors off that.
String myFunModule = "module MyFunModule; import " + CAL_Prelude.MODULE_NAME + "; public myFunFunction = if " + CAL_Prelude.MODULE_NAME + ".False then " + CAL_Prelude.MODULE_NAME + ".True else " + CAL_Prelude.MODULE_NAME + ".error \"myFunError\";";
SourceModel.ModuleDefn moduleDefn = SourceModelUtilities.TextParsing.parseModuleDefnIntoSourceModel(myFunModule);
ModuleSourceDefinition moduleSourceDefn = new SourceModelModuleSource(moduleDefn);
CompilerMessageLogger logger = new MessageLogger();
Severity severity = bcs.getWorkspaceManager().makeModule(moduleSourceDefn, logger);
assertTrue(severity.compareTo(Severity.ERROR) < 0);
}
/**
* Helper function that runs test functions using the specified copy
* of BasicCALServices, which dictates the type of machine that the test is
* run on.
*
* @param calServices
* the copy of BasicCALServices to use in running the function
*/
private void runCode_test(BasicCALServices calServices) throws GemCompilationException, CALExecutorException {
try{
CALServicesTestUtilities.runNamedFunction(QualifiedName.make(CALPlatformTestModuleNames.ErrorTest, "k1"), calServices);
fail( "This should have failed" );
}
catch(CALExecutorException.ExternalException.ErrorFunctionException e){
ErrorInfo errorInfo = e.getErrorInfo();
assertNotNull(errorInfo);
// The compiler seems to mess up source positions so I can use the exact position
assertEquals(QualifiedName.make(CALPlatformTestModuleNames.ErrorTest, "k1"), errorInfo.getTopLevelFunctionName());
assertEquals(81, errorInfo.getLine());
assertEquals(5, errorInfo.getColumn());
}
try{
CALServicesTestUtilities.runNamedFunction(QualifiedName.make(CALPlatformTestModuleNames.ErrorTest, "k2"), calServices);
fail( "This should have failed" );
}
catch(CALExecutorException.ExternalException.ErrorFunctionException e){
// The compiler seems to mess up source positions so I can use the exact position
ErrorInfo errorInfo = e.getErrorInfo();
assertNotNull(errorInfo);
// The compiler seems to mess up source positions so I can use the exact position
assertEquals(QualifiedName.make(CALPlatformTestModuleNames.ErrorTest, "k2"), errorInfo.getTopLevelFunctionName());
assertEquals(86, errorInfo.getLine());
assertEquals(5, errorInfo.getColumn());
}
try{
CALServicesTestUtilities.runNamedFunction(QualifiedName.make(CALPlatformTestModuleNames.ErrorTest, "k3"), calServices);
fail( "This should have failed" );
}
catch(CALExecutorException.ExternalException.ErrorFunctionException e){
// The compiler seems to mess up source positions so I can use the exact position
ErrorInfo errorInfo = e.getErrorInfo();
assertNotNull(errorInfo);
// The compiler seems to mess up source positions so I can use the exact position
assertEquals(QualifiedName.make(CALPlatformTestModuleNames.ErrorTest, "k3"), errorInfo.getTopLevelFunctionName());
assertEquals(96, errorInfo.getLine());
assertEquals(5, errorInfo.getColumn());
}
try{
CALServicesTestUtilities.runNamedFunction(QualifiedName.make(CALPlatformTestModuleNames.ErrorTest, "j"), calServices);
fail( "This should have failed" );
}
catch(CALExecutorException.ExternalException.ErrorFunctionException e){
ErrorInfo errorInfo = e.getErrorInfo();
assertNotNull(errorInfo);
// The compiler seems to mess up source positions so I can use the exact position
assertEquals(QualifiedName.make(CALPlatformTestModuleNames.ErrorTest, "j"), errorInfo.getTopLevelFunctionName());
assertEquals(105, errorInfo.getLine());
assertEquals(12, errorInfo.getColumn());
}
try{
CALServicesTestUtilities.runNamedFunction(QualifiedName.make(CALPlatformTestModuleNames.ErrorTest, "m"), calServices);
fail( "This should have failed" );
}
catch(CALExecutorException.ExternalException.ErrorFunctionException e){
// This does not have position information because the call is evaluated lazily.
ErrorInfo errorInfo = e.getErrorInfo();
assertNotNull(errorInfo);
// The compiler seems to mess up source positions so I can use the exact position
assertEquals(QualifiedName.make(CALPlatformTestModuleNames.ErrorTest, "m"), errorInfo.getTopLevelFunctionName());
assertEquals(118, errorInfo.getLine());
assertEquals(9, errorInfo.getColumn());
}
try{
CALServicesTestUtilities.runNamedFunction(QualifiedName.make(CALPlatformTestModuleNames.ErrorTest, "n"), calServices);
fail( "This should have failed" );
}
catch(CALExecutorException.ExternalException.ErrorFunctionException e){
// This does not have position information because the call is evaluated lazily.
ErrorInfo errorInfo = e.getErrorInfo();
assertNotNull(errorInfo);
// The compiler seems to mess up source positions so I can use the exact position
assertEquals(QualifiedName.make(CALPlatformTestModuleNames.ErrorTest, "n"), errorInfo.getTopLevelFunctionName());
assertEquals(122, errorInfo.getLine());
assertEquals(8, errorInfo.getColumn());
}
try{
CALServicesTestUtilities.runNamedFunction(QualifiedName.make(CALPlatformTestModuleNames.ErrorTest, "o"), calServices);
fail( "This should have failed" );
}
catch(CALExecutorException.ExternalException.ErrorFunctionException e){
// This does not have position information because the call is evaluated lazily.
ErrorInfo errorInfo = e.getErrorInfo();
assertNotNull(errorInfo);
// The compiler seems to mess up source positions so I can use the exact position
assertEquals(QualifiedName.make(CALPlatformTestModuleNames.ErrorTest, "o"), errorInfo.getTopLevelFunctionName());
assertEquals(132, errorInfo.getLine());
assertEquals(9, errorInfo.getColumn());
}
try{
CALServicesTestUtilities.runNamedFunction(QualifiedName.make(CALPlatformTestModuleNames.ErrorTest, "p"), calServices);
fail( "This should have failed" );
}
catch(CALExecutorException.ExternalException.ErrorFunctionException e){
// This does not have position information because the call is evaluated lazily.
ErrorInfo errorInfo = e.getErrorInfo();
assertNotNull(errorInfo);
// The compiler seems to mess up source positions so I can use the exact position
assertEquals(QualifiedName.make(CALPlatformTestModuleNames.ErrorTest, "p"), errorInfo.getTopLevelFunctionName());
assertEquals(136, errorInfo.getLine());
assertEquals(10, errorInfo.getColumn());
}
try{
CALServicesTestUtilities.runNamedFunction(QualifiedName.make(CALPlatformTestModuleNames.ErrorTest, "q"), calServices);
fail( "This should have failed" );
}
catch(CALExecutorException.ExternalException.ErrorFunctionException e){
// This does not have position information because the call is evaluated lazily.
ErrorInfo errorInfo = e.getErrorInfo();
assertNotNull(errorInfo);
// The compiler seems to mess up source positions so I can use the exact position
assertEquals(QualifiedName.make(CALPlatformTestModuleNames.ErrorTest, "q"), errorInfo.getTopLevelFunctionName());
assertEquals(143, errorInfo.getLine());
assertEquals(10, errorInfo.getColumn());
}
try{
CALServicesTestUtilities.runNamedFunction(QualifiedName.make(CALPlatformTestModuleNames.ErrorTest, "r"), calServices);
fail( "This should have failed" );
}
catch(CALExecutorException.ExternalException.ErrorFunctionException e){
// This does not have position information because the call is evaluated lazily.
ErrorInfo errorInfo = e.getErrorInfo();
assertNotNull(errorInfo);
// The compiler seems to mess up source positions so I can use the exact position
assertEquals(QualifiedName.make(CALPlatformTestModuleNames.ErrorTest, "r"), errorInfo.getTopLevelFunctionName());
assertEquals(149, errorInfo.getLine());
assertEquals(13, errorInfo.getColumn());
}
try{
CALServicesTestUtilities.runNamedFunction(QualifiedName.make(CALPlatformTestModuleNames.ErrorTest, "s"), calServices);
fail( "This should have failed" );
}
catch(CALExecutorException.ExternalException.ErrorFunctionException e){
ErrorInfo errorInfo = e.getErrorInfo();
assertNotNull(errorInfo);
// The compiler seems to mess up source positions so I can use the exact position
assertEquals(QualifiedName.make(CALPlatformTestModuleNames.ErrorTest, "s"), errorInfo.getTopLevelFunctionName());
assertEquals(156, errorInfo.getLine());
assertEquals(9, errorInfo.getColumn());
}
try{
CALServicesTestUtilities.runNamedFunction(QualifiedName.make(CALPlatformTestModuleNames.ErrorTest, "t"), calServices);
fail( "This should have failed" );
}
catch(CALExecutorException.ExternalException.ErrorFunctionException e){
// This does not have position information because the call is evaluated lazily.
ErrorInfo errorInfo = e.getErrorInfo();
assertNotNull(errorInfo);
// The compiler seems to mess up source positions so I can use the exact position
assertEquals(QualifiedName.make(CALPlatformTestModuleNames.ErrorTest, "t"), errorInfo.getTopLevelFunctionName());
assertEquals(165, errorInfo.getLine());
assertEquals(8, errorInfo.getColumn());
}
try{
CALServicesTestUtilities.runNamedFunction(QualifiedName.make(CALPlatformTestModuleNames.ErrorTest, "testCases"), calServices);
fail( "This should have failed" );
}
catch(CALExecutorException e){
// This does not have position information because the call is evaluated lazily.
ErrorInfo errorInfo = e.getErrorInfo();
assertNotNull(errorInfo);
// The compiler seems to mess up source positions so I can use the exact position
assertEquals(QualifiedName.make(CALPlatformTestModuleNames.ErrorTest, "testCases"), errorInfo.getTopLevelFunctionName());
assertEquals(178, errorInfo.getLine());
assertEquals(17, errorInfo.getColumn());
}
try{
CALServicesTestUtilities.runNamedFunction(QualifiedName.make(CALPlatformTestModuleNames.ErrorTest, "testCasesRecord"), calServices);
fail( "This should have failed" );
}
catch(CALExecutorException e){
// This does not have position information because the call is evaluated lazily.
ErrorInfo errorInfo = e.getErrorInfo();
assertNotNull(errorInfo);
// The compiler seems to mess up source positions so I can use the exact position
assertEquals(QualifiedName.make(CALPlatformTestModuleNames.ErrorTest, "testCasesRecord"), errorInfo.getTopLevelFunctionName());
assertEquals(187, errorInfo.getLine());
assertEquals(9, errorInfo.getColumn());
}
try{
CALServicesTestUtilities.runNamedFunction(QualifiedName.make(CALPlatformTestModuleNames.ErrorTest, "f"), calServices);
fail( "This should have failed" );
}
catch(CALExecutorException.ExternalException.ErrorFunctionException e){
// This does not have position information because the call is evaluated lazily.
assertTrue( -1 != e.toString().indexOf("hi") );
}
try{
CALServicesTestUtilities.runNamedFunction(QualifiedName.make(ModuleName.make("MyFunModule"), "myFunFunction"), calServices);
fail( "This should have failed" );
}
catch(CALExecutorException.ExternalException.ErrorFunctionException e){
// This does not have position information because the call is evaluated lazily.
ErrorInfo errorInfo = e.getErrorInfo();
assertNotNull(errorInfo);
// The compiler seems to mess up source positions so I can use the exact position
assertEquals(QualifiedName.make(ModuleName.make("MyFunModule"), "myFunFunction"), errorInfo.getTopLevelFunctionName());
// assertEquals(72, errorInfo.getLine());
// assertEquals(5, errorInfo.getColumn());
}
}
}
| |
/*
* Copyright 2015 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.server.api.marshalling.json;
import java.io.IOException;
import java.lang.annotation.Annotation;
import java.lang.reflect.AnnotatedElement;
import java.lang.reflect.Array;
import java.lang.reflect.Member;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.ServiceLoader;
import java.util.Set;
import java.util.regex.Pattern;
import javax.xml.bind.annotation.adapters.XmlAdapter;
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapters;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.JsonToken;
import com.fasterxml.jackson.core.Version;
import com.fasterxml.jackson.databind.AnnotationIntrospector;
import com.fasterxml.jackson.databind.BeanProperty;
import com.fasterxml.jackson.databind.DeserializationConfig;
import com.fasterxml.jackson.databind.DeserializationContext;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.JavaType;
import com.fasterxml.jackson.databind.JsonDeserializer;
import com.fasterxml.jackson.databind.JsonSerializer;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import com.fasterxml.jackson.databind.deser.std.UntypedObjectDeserializer;
import com.fasterxml.jackson.databind.introspect.Annotated;
import com.fasterxml.jackson.databind.introspect.AnnotatedClass;
import com.fasterxml.jackson.databind.introspect.AnnotatedParameter;
import com.fasterxml.jackson.databind.introspect.JacksonAnnotationIntrospector;
import com.fasterxml.jackson.databind.jsontype.NamedType;
import com.fasterxml.jackson.databind.jsontype.TypeDeserializer;
import com.fasterxml.jackson.databind.jsontype.TypeIdResolver;
import com.fasterxml.jackson.databind.jsontype.TypeResolverBuilder;
import com.fasterxml.jackson.databind.jsontype.impl.AsWrapperTypeDeserializer;
import com.fasterxml.jackson.databind.module.SimpleModule;
import com.fasterxml.jackson.databind.util.ClassUtil;
import com.fasterxml.jackson.module.jaxb.JaxbAnnotationIntrospector;
import org.drools.core.xml.jaxb.util.JaxbListAdapter;
import org.drools.core.xml.jaxb.util.JaxbListWrapper;
import org.drools.core.xml.jaxb.util.JaxbUnknownAdapter;
import org.kie.server.api.marshalling.Marshaller;
import org.kie.server.api.marshalling.MarshallerFactory;
import org.kie.server.api.marshalling.MarshallingException;
import org.kie.server.api.marshalling.MarshallingFormat;
import org.kie.server.api.model.Wrapped;
import org.kie.server.api.model.type.JaxbByteArray;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class JSONMarshaller implements Marshaller {
private static final Logger logger = LoggerFactory.getLogger( MarshallerFactory.class );
private static boolean formatDate = Boolean.parseBoolean(System.getProperty("org.kie.server.json.format.date", "false"));
private static String dateFormatStr = System.getProperty("org.kie.server.json.date_format", "yyyy-MM-dd'T'hh:mm:ss.SSSZ");
private ThreadLocal<Boolean> stripped = new ThreadLocal<Boolean>() {
@Override
protected Boolean initialValue() {
return false;
}
};
protected ClassLoader classLoader;
protected ObjectMapper objectMapper;
protected Set<Class<?>> classesSet;
protected ObjectMapper deserializeObjectMapper;
protected DateFormat dateFormat = new SimpleDateFormat(dateFormatStr);
// Optional Marshaller Extension to handle new types
private static final List<JSONMarshallerExtension> EXTENSIONS;
// Load Marshaller Extension
static {
logger.info("Marshaller extensions init");
ServiceLoader<JSONMarshallerExtension> plugins = ServiceLoader.load(JSONMarshallerExtension.class);
List<JSONMarshallerExtension> loadedPlugins = new ArrayList<>();
plugins.forEach( plugin -> {
logger.info("JSONMarshallerExtension implementation found: {}", plugin.getClass().getName());
loadedPlugins.add(plugin);
});
EXTENSIONS = Collections.unmodifiableList(loadedPlugins);
}
public JSONMarshaller(Set<Class<?>> classes, ClassLoader classLoader) {
this.classLoader = classLoader;
buildMarshaller(classes, classLoader);
configureMarshaller(classes, classLoader);
}
protected void buildMarshaller( Set<Class<?>> classes, final ClassLoader classLoader ) {
objectMapper = new ObjectMapper();
deserializeObjectMapper = new ObjectMapper();
}
protected void configureMarshaller( Set<Class<?>> classes, final ClassLoader classLoader ) {
ObjectMapper customSerializationMapper = new ObjectMapper();
if (classes == null) {
classes = new HashSet<Class<?>>();
}
// add byte array handling support to allow byte[] to be send as payload
classes.add(JaxbByteArray.class);
classes.add(Date.class);
List<NamedType> customClasses = prepareCustomClasses(classes);
// this is needed because we need better control of serialization and deserialization
AnnotationIntrospector primary = new ExtendedJaxbAnnotationIntrospector(customClasses, customSerializationMapper);
AnnotationIntrospector secondary = new JacksonAnnotationIntrospector();
AnnotationIntrospector introspectorPair = AnnotationIntrospector.pair(primary, secondary);
objectMapper.setConfig(
objectMapper.getSerializationConfig()
.with(introspectorPair)
.with(SerializationFeature.INDENT_OUTPUT));
deserializeObjectMapper.setConfig(objectMapper.getDeserializationConfig()
.with(introspectorPair)
.without(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES));
// setup custom serialization mapper with jaxb adapters
customSerializationMapper.setConfig(customSerializationMapper.getDeserializationConfig().with(introspectorPair));
customSerializationMapper.setConfig(customSerializationMapper.getSerializationConfig().with(introspectorPair).with(SerializationFeature.INDENT_OUTPUT));
// in case there are custom classes register module to deal with them both for serialization and deserialization
// this module makes sure that only custom classes are equipped with type information
if (classes != null && !classes.isEmpty()) {
ObjectMapper customObjectMapper = new ObjectMapper();
TypeResolverBuilder<?> typer = new ObjectMapper.DefaultTypeResolverBuilder(ObjectMapper.DefaultTyping.NON_FINAL){
@Override
public boolean useForType(JavaType t) {
if (classesSet.contains(t.getRawClass())) {
return true;
}
return false;
}
};
typer = typer.init(JsonTypeInfo.Id.CLASS, null);
typer = typer.inclusion(JsonTypeInfo.As.WRAPPER_OBJECT);
customObjectMapper.setDefaultTyping(typer);
SimpleModule mod = new SimpleModule("custom-object-mapper", Version.unknownVersion());
CustomObjectSerializer customObjectSerializer = new CustomObjectSerializer(customObjectMapper);
for (Class<?> clazz : classes) {
mod.addSerializer(clazz, customObjectSerializer);
}
objectMapper.registerModule(mod);
TypeResolverBuilder<?> typer2 = new ObjectMapper.DefaultTypeResolverBuilder(ObjectMapper.DefaultTyping.NON_FINAL){
@Override
public boolean useForType(JavaType t) {
if (classesSet.contains(t.getRawClass())) {
return true;
}
return false;
}
@Override
public TypeDeserializer buildTypeDeserializer(DeserializationConfig config, JavaType baseType, Collection<NamedType> subtypes) {
if (useForType(baseType)) {
if (_idType == JsonTypeInfo.Id.NONE) {
return null;
}
TypeIdResolver idRes = idResolver(config, baseType, subtypes, false, true);
switch (_includeAs) {
case WRAPPER_OBJECT:
return new CustomAsWrapperTypeDeserializer(baseType, idRes, _typeProperty, true, _defaultImpl);
}
}
return super.buildTypeDeserializer(config, baseType, subtypes);
}
};
typer2 = typer2.init(JsonTypeInfo.Id.CLASS, null);
typer2 = typer2.inclusion(JsonTypeInfo.As.WRAPPER_OBJECT);
deserializeObjectMapper.setDefaultTyping(typer2);
SimpleModule modDeser = new SimpleModule("custom-object-unmapper", Version.unknownVersion());
modDeser.addDeserializer(Object.class, new CustomObjectDeserializer(classes));
deserializeObjectMapper.registerModule(modDeser);
}
if (formatDate) {
objectMapper.setDateFormat(dateFormat);
customSerializationMapper.setDateFormat(dateFormat);
deserializeObjectMapper.setDateFormat(dateFormat);
deserializeObjectMapper.getDeserializationConfig().with(dateFormat);
objectMapper.configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, false);
customSerializationMapper.configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, false);
}
this.classesSet = classes;
// Extend the marshaller with optional extensions
for(JSONMarshallerExtension extension : EXTENSIONS){
extension.extend(this, objectMapper, deserializeObjectMapper);
}
}
protected List<NamedType> prepareCustomClasses(Set<Class<?>> classes) {
List<NamedType> customClasses = new ArrayList<NamedType>();
if (classes != null) {
for (Class<?> clazz : classes) {
customClasses.add(new NamedType(clazz, clazz.getSimpleName()));
customClasses.add(new NamedType(clazz, clazz.getName()));
}
}
return customClasses;
}
@Override
public String marshall(Object objectInput) {
try {
return objectMapper.writeValueAsString(wrap(objectInput));
} catch (IOException e) {
throw new MarshallingException("Error marshalling input", e);
}
}
@Override
public <T> T unmarshall(String serializedInput, Class<T> type) {
try {
Class actualType = classesSet.contains(type) ? Object.class : type;
return (T) unwrap(deserializeObjectMapper.readValue(serializedInput, actualType));
} catch (IOException e) {
throw new MarshallingException("Error unmarshalling input", e);
} finally {
stripped.set(false);
}
}
@Override
public void dispose() {
}
@Override
public MarshallingFormat getFormat() {
return MarshallingFormat.JSON;
}
protected Object wrap(Object data) {
if (data instanceof byte[]) {
return new JaxbByteArray((byte[]) data);
}
return data;
}
protected Object unwrap(Object data) {
if (data instanceof Wrapped) {
return ((Wrapped) data).unwrap();
}
return data;
}
class ExtendedJaxbAnnotationIntrospector extends JaxbAnnotationIntrospector {
private List<NamedType> customClasses;
private ObjectMapper customObjectMapper;
public ExtendedJaxbAnnotationIntrospector(List<NamedType> customClasses, ObjectMapper anotherCustomObjectMapper) {
this.customClasses = customClasses;
this.customObjectMapper = anotherCustomObjectMapper;
}
@Override
public List<NamedType> findSubtypes(Annotated a) {
List<NamedType> base = super.findSubtypes(a);
List<NamedType> complete = new ArrayList<NamedType>();
if (base != null) {
complete.addAll(base);
}
if (customClasses != null) {
complete.addAll(customClasses);
}
return complete;
}
@Override
public JsonSerializer<?> findSerializer(Annotated am) {
// replace JaxbUnknownAdapter as it breaks JSON marshaller for list and maps with wrapping serializer
XmlJavaTypeAdapter adapterInfo = findAnnotation(XmlJavaTypeAdapter.class, am, true, false, false);
if (adapterInfo != null && adapterInfo.value().isAssignableFrom(JaxbUnknownAdapter.class)) {
if ( findAnnotation(JsonSerialize.class, am, true, false, false) != null ) {
// .. unless there is also an explicitly specified serializer, in such case use the specified one:
return super.findSerializer(am);
}
return new WrappingObjectSerializer(customObjectMapper);
}
return super.findSerializer(am);
}
@Override
public Object findSerializationConverter(Annotated a)
{
Class<?> serType = _rawSerializationType(a);
// Can apply to both container and regular type; no difference yet here
XmlAdapter<?,?> adapter = findAdapter(a, true, serType);
if (adapter != null) {
return _converter(adapter, true);
}
return null;
}
private <A extends Annotation> A findAnnotation(Class<A> annotationClass, Annotated annotated, boolean includePackage, boolean includeClass, boolean includeSuperclasses) {
Annotation annotation = annotated.getAnnotation(annotationClass);
if(annotation != null) {
return (A) annotation;
} else {
Class memberClass = null;
if(annotated instanceof AnnotatedParameter) {
memberClass = ((AnnotatedParameter)annotated).getDeclaringClass();
} else {
AnnotatedElement pkg = annotated.getAnnotated();
if(pkg instanceof Member) {
memberClass = ((Member)pkg).getDeclaringClass();
if(includeClass) {
annotation = memberClass.getAnnotation(annotationClass);
if(annotation != null) {
return (A) annotation;
}
}
} else {
if(!(pkg instanceof Class)) {
throw new IllegalStateException("Unsupported annotated member: " + annotated.getClass().getName());
}
memberClass = (Class)pkg;
}
}
if(memberClass != null) {
if(includeSuperclasses) {
for(Class pkg1 = memberClass.getSuperclass(); pkg1 != null && pkg1 != Object.class; pkg1 = pkg1.getSuperclass()) {
annotation = pkg1.getAnnotation(annotationClass);
if(annotation != null) {
return (A) annotation;
}
}
}
if(includePackage) {
Package pkg2 = memberClass.getPackage();
if(pkg2 != null) {
return memberClass.getPackage().getAnnotation(annotationClass);
}
}
}
return null;
}
}
private XmlAdapter<Object,Object> findAdapter(Annotated am, boolean forSerialization, Class<?> type) {
// First of all, are we looking for annotations for class?
if (am instanceof AnnotatedClass) {
return findAdapterForClass((AnnotatedClass) am, forSerialization);
}
// Otherwise for a member. First, let's figure out type of property
XmlJavaTypeAdapter adapterInfo = findAnnotation(XmlJavaTypeAdapter.class, am, true, false, false);
if (adapterInfo != null) {
XmlAdapter<Object,Object> adapter = checkAdapter(adapterInfo, type, forSerialization);
if (adapter != null) {
return adapter;
}
}
XmlJavaTypeAdapters adapters = findAnnotation(XmlJavaTypeAdapters.class, am, true, false, false);
if (adapters != null) {
for (XmlJavaTypeAdapter info : adapters.value()) {
XmlAdapter<Object,Object> adapter = checkAdapter(info, type, forSerialization);
if (adapter != null) {
return adapter;
}
}
}
return null;
}
private final XmlAdapter<Object,Object> checkAdapter(XmlJavaTypeAdapter adapterInfo, Class<?> typeNeeded, boolean forSerialization) {
// if annotation has no type, it's applicable; if it has, must match
Class<?> adaptedType = adapterInfo.type();
if (adapterInfo.value().isAssignableFrom(JaxbUnknownAdapter.class)) {
return null;
}
if (adaptedType == XmlJavaTypeAdapter.DEFAULT.class) {
JavaType[] params = _typeFactory.findTypeParameters(adapterInfo.value(), XmlAdapter.class);
adaptedType = params[1].getRawClass();
}
if (adaptedType.isAssignableFrom(typeNeeded)) {
@SuppressWarnings("rawtypes")
Class<? extends XmlAdapter> cls = adapterInfo.value();
return ClassUtil.createInstance(cls, true);
}
return null;
}
@SuppressWarnings("unchecked")
private XmlAdapter<Object,Object> findAdapterForClass(AnnotatedClass ac, boolean forSerialization) {
XmlJavaTypeAdapter adapterInfo = ac.getAnnotated().getAnnotation(XmlJavaTypeAdapter.class);
if (adapterInfo != null) {
@SuppressWarnings("rawtypes")
Class<? extends XmlAdapter> cls = adapterInfo.value();
return ClassUtil.createInstance(cls, true);
}
return null;
}
}
/**
* Simple utility Serializer which can be used to override replacement of JaxbUnknownAdapter with WrappingObjectSerializer
*/
public static class PassThruSerializer extends JsonSerializer<Object> {
@Override
public void serialize(Object p0, JsonGenerator p1, SerializerProvider p2) throws IOException, JsonProcessingException {
p1.writeObject(p0);
}
}
class CustomObjectSerializer extends JsonSerializer<Object> {
private ObjectMapper customObjectMapper;
public CustomObjectSerializer(ObjectMapper customObjectMapper) {
this.customObjectMapper = customObjectMapper;
}
@Override
public void serialize(Object value, JsonGenerator jgen, SerializerProvider provider) throws IOException, JsonProcessingException {
String json = customObjectMapper.writeValueAsString(value);
jgen.writeRawValue(json);
}
}
class WrappingObjectSerializer extends JsonSerializer<Object> {
private ObjectMapper customObjectMapper;
public WrappingObjectSerializer(ObjectMapper customObjectMapper) {
this.customObjectMapper = customObjectMapper;
}
@Override
public void serialize(Object value, JsonGenerator jgen, SerializerProvider provider) throws IOException, JsonProcessingException {
String className = value.getClass().getName();
if (value instanceof Collection) {
String collectionJson = writeCollection((Collection) value, customObjectMapper);
jgen.writeRawValue(collectionJson);
} else if (value instanceof Map) {
String mapJson = writeMap((Map) value, customObjectMapper);
jgen.writeRawValue(mapJson);
} else if (value instanceof Object[] || value.getClass().isArray()) {
String arrayJson = writeArray((Object[]) value, customObjectMapper);
jgen.writeRawValue(arrayJson);
} else {
String json = customObjectMapper.writeValueAsString(value);
// don't wrap java and javax classes as they are always available, in addition avoid double wrapping
if (!className.startsWith("java.") && !className.startsWith("javax.") && !json.contains(className)) {
json = "{\"" + className + "\":" + json + "}";
}
jgen.writeRawValue(json);
}
}
private String writeArray(Object[] value, ObjectMapper customObjectMapper) throws IOException{
StringBuilder builder = new StringBuilder();
builder.append("[");
int size = Array.getLength(value);
for (Object element : value) {
size--;
String elementClassName = element.getClass().getName();
String json = customObjectMapper.writeValueAsString(element);
// don't wrap java and javax classes as they are always available, in addition avoid double wrapping
if (!elementClassName.startsWith("java.") && !elementClassName.startsWith("javax.") && !json.contains(elementClassName)) {
json = "{\"" + elementClassName + "\":" + json + "}";
}
builder.append(json);
if (size > 0) {
builder.append(",");
}
}
builder.append("]");
return builder.toString();
}
private String writeMap(Map value, ObjectMapper customObjectMapper) throws IOException{
StringBuilder builder = new StringBuilder();
builder.append("{");
int size = ((Map<?, ?>)value).size();
for (Map.Entry<?, ?> entry : ((Map<?, ?>)value).entrySet()) {
size--;
// handle map key
Object key = entry.getKey();
String keyClassName = key.getClass().getName();
String json = customObjectMapper.writeValueAsString(key);
// don't wrap java and javax classes as they are always available, in addition avoid double wrapping
if (!keyClassName.startsWith("java.") && !keyClassName.startsWith("javax.") && !json.contains(keyClassName)) {
json = "{\"" + keyClassName + "\":" + json + "}";
}
// handle map value
Object mValue = entry.getValue();
String mValueClassName = mValue.getClass().getName();
String jsonValue = customObjectMapper.writeValueAsString(mValue);
// don't wrap java and javax classes as they are always available, in addition avoid double wrapping
if (!mValueClassName.startsWith("java.") && !mValueClassName.startsWith("javax.") && !json.contains(mValueClassName)) {
jsonValue = "{\"" + mValueClassName + "\":" + jsonValue + "}";
}
// add as JSON map
builder.append(json);
builder.append(" : ");
builder.append(jsonValue);
if (size > 0) {
builder.append(",");
}
}
builder.append("}");
return builder.toString();
}
private String writeCollection(Collection collection, ObjectMapper customObjectMapper) throws IOException {
StringBuilder builder = new StringBuilder();
builder.append("[");
int size = collection.size();
Iterator it = collection.iterator();
while (it.hasNext()) {
size--;
Object element = it.next();
String elementClassName = element.getClass().getName();
String json = customObjectMapper.writeValueAsString(element);
// don't wrap java and javax classes as they are always available, in addition avoid double wrapping
if (!elementClassName.startsWith("java.") && !elementClassName.startsWith("javax.") && !json.contains(elementClassName)) {
json = "{\"" + elementClassName + "\":" + json + "}";
}
builder.append(json);
if (size > 0) {
builder.append(",");
}
}
builder.append("]");
return builder.toString();
}
}
class CustomObjectDeserializer extends UntypedObjectDeserializer {
private final Pattern VALID_JAVA_IDENTIFIER = Pattern.compile("(\\p{javaJavaIdentifierStart}\\p{javaJavaIdentifierPart}*\\.)*\\p{javaJavaIdentifierStart}\\p{javaJavaIdentifierPart}*");
private static final long serialVersionUID = 7764405880012867708L;
private Map<String, Class<?>> classes = new HashMap<String, Class<?>>();
public CustomObjectDeserializer(Set<Class<?>> classes) {
for (Class<?> c : classes) {
this.classes.put(c.getSimpleName(), c);
this.classes.put(c.getName(), c);
}
}
@Override
protected Object mapObject(JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException {
JsonToken t = jp.getCurrentToken();
if (t == JsonToken.START_OBJECT) {
t = jp.nextToken();
}
// 1.6: minor optimization; let's handle 1 and 2 entry cases separately
if (t != JsonToken.FIELD_NAME) { // and empty one too
// empty map might work; but caller may want to modify... so better just give small modifiable
return new LinkedHashMap<String, Object>(4);
}
String field1 = jp.getText();
jp.nextToken();
if (classes.containsKey(field1)) {
stripped.set(true);
Object value = deserializeObjectMapper.readValue(jp, classes.get(field1));
jp.nextToken();
return value;
} else {
if (isFullyQualifiedClassname(field1)) {
try {
Object value = deserializeObjectMapper.readValue(jp, classLoader.loadClass(field1));
jp.nextToken();
return value;
} catch (ClassNotFoundException e) {
}
}
Object value1 = deserialize(jp, ctxt);
if (jp.nextToken() != JsonToken.FIELD_NAME) { // single entry; but we want modifiable
LinkedHashMap<String, Object> result = new LinkedHashMap<String, Object>(4);
result.put(field1, value1);
return result;
}
String field2 = jp.getText();
jp.nextToken();
Object value2 = deserialize(jp, ctxt);
if (jp.nextToken() != JsonToken.FIELD_NAME) {
LinkedHashMap<String, Object> result = new LinkedHashMap<String, Object>(4);
result.put(field1, value1);
result.put(field2, value2);
return result;
}
// And then the general case; default map size is 16
LinkedHashMap<String, Object> result = new LinkedHashMap<String, Object>();
result.put(field1, value1);
result.put(field2, value2);
do {
String fieldName = jp.getText();
jp.nextToken();
result.put(fieldName, deserialize(jp, ctxt));
} while (jp.nextToken() != JsonToken.END_OBJECT);
// in case nested jaxb list wrapper was not recognized automatically map it manually
if (result.containsKey("type") && result.containsKey("componentType") && result.containsKey("element")) {
JaxbListWrapper wrapper = new JaxbListWrapper();
wrapper.setType(JaxbListWrapper.JaxbWrapperType.valueOf((String)result.get("type")));
wrapper.setComponentType((String)result.get("componentType"));
wrapper.setElements(toArray(result.get("element")));
try {
Object data = null;
if (wrapper.getType().equals(JaxbListWrapper.JaxbWrapperType.MAP)) {
Map<Object, Object> tranformed = new LinkedHashMap<Object, Object>();
// this is mapped to JaxbStringObjectPair
for (Object element : wrapper.getElements()) {
Map<Object, Object> map = (Map<Object, Object>) element;
tranformed.put(map.get("key"), map.get("value"));
}
data = tranformed;
} else {
data = new JaxbListAdapter().unmarshal(wrapper);
}
return data;
} catch (Exception e) {
}
}
return result;
}
}
private Object[] toArray(Object element) {
if (element != null) {
if (element instanceof Collection) {
return ((Collection) element).toArray();
}
}
return new Object[0];
}
private boolean isFullyQualifiedClassname(String classname) {
if (!classname.contains(".")) {
return false;
}
return VALID_JAVA_IDENTIFIER.matcher(classname).matches();
}
}
class CustomAsWrapperTypeDeserializer extends AsWrapperTypeDeserializer {
public CustomAsWrapperTypeDeserializer(JavaType bt, TypeIdResolver idRes, String typePropertyName, boolean typeIdVisible, Class<?> defaultImpl) {
super(bt, idRes, typePropertyName, typeIdVisible, defaultImpl);
}
protected CustomAsWrapperTypeDeserializer(AsWrapperTypeDeserializer src, BeanProperty property) {
super(src, property);
}
@Override
public Object deserializeTypedFromArray(JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException {
ClassLoader current = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader(_baseType.getRawClass().getClassLoader());
JsonDeserializer<Object> deser = _findDeserializer(ctxt, baseTypeName());
Object value = deser.deserialize(jp, ctxt);
return value;
} finally {
Thread.currentThread().setContextClassLoader(current);
}
}
@Override
public Object deserializeTypedFromObject(JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException {
ClassLoader current = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader(_baseType.getRawClass().getClassLoader());
if (classesSet.contains(_baseType.getRawClass()) && !stripped.get()) {
try {
return super.deserializeTypedFromObject(jp, ctxt);
} catch (Exception e) {
JsonDeserializer<Object> deser = _findDeserializer(ctxt, baseTypeName());
Object value = deser.deserialize(jp, ctxt);
return value;
}
}
stripped.set(false);
JsonDeserializer<Object> deser = _findDeserializer(ctxt, baseTypeName());
Object value = deser.deserialize(jp, ctxt);
return value;
} finally {
Thread.currentThread().setContextClassLoader(current);
}
}
@Override
public Object deserializeTypedFromScalar(JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException {
ClassLoader current = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader(_baseType.getRawClass().getClassLoader());
if (classesSet.contains(_baseType.getRawClass())) {
try {
return super.deserializeTypedFromScalar(jp, ctxt);
} catch (Exception e) {
JsonDeserializer<Object> deser = _findDeserializer(ctxt, baseTypeName());
Object value = deser.deserialize(jp, ctxt);
return value;
}
}
JsonDeserializer<Object> deser = _findDeserializer(ctxt, baseTypeName());
Object value = deser.deserialize(jp, ctxt);
return value;
} finally {
Thread.currentThread().setContextClassLoader(current);
}
}
@Override
public Object deserializeTypedFromAny(JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException {
ClassLoader current = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader(_baseType.getRawClass().getClassLoader());
if (classesSet.contains(_baseType.getRawClass())) {
try {
return super.deserializeTypedFromAny(jp, ctxt);
} catch (Exception e) {
JsonDeserializer<Object> deser = _findDeserializer(ctxt, baseTypeName());
Object value = deser.deserialize(jp, ctxt);
return value;
}
}
if (_baseType.isMapLikeType() && jp.getCurrentToken() == JsonToken.START_ARRAY) {
LinkedHashMap<Object, Object> data = new LinkedHashMap<Object, Object>();
jp.nextToken();
if (jp.getCurrentToken() == JsonToken.END_ARRAY) {
return data;
}
JsonDeserializer<Object> deser = _findDeserializer(ctxt, LinkedHashMap.class.getName());
Map<Object, Object> value = (Map) deser.deserialize(jp, ctxt);
jp.nextToken();
if (value != null) {
Collection<Object> values = value.values();
if (values.size() == 2) {
Iterator<Object> it = values.iterator();
data.put(it.next(), it.next());
return data;
}
}
return value;
} else {
JsonDeserializer<Object> deser = _findDeserializer(ctxt, baseTypeName());
Object value = deser.deserialize(jp, ctxt);
return value;
}
} finally {
Thread.currentThread().setContextClassLoader(current);
}
}
@Override
public TypeDeserializer forProperty(BeanProperty prop) {
if (prop != null) {
if (useForType(prop.getType()) || useForType(prop.getType().getContentType()) ) {
return new CustomAsWrapperTypeDeserializer(this, prop);
}
}
return super.forProperty(prop);
}
boolean useForType(JavaType t) {
if (classesSet.contains(t.getRawClass())) {
return true;
}
return false;
}
}
@Override
public void setClassLoader(ClassLoader classLoader) {
this.classLoader = classLoader;
}
@Override
public ClassLoader getClassLoader() {
return classLoader;
}
}
| |
/*
* Copyright 2014-present Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.net.intent;
import com.google.common.annotations.Beta;
import com.google.common.base.MoreObjects;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Sets;
import org.onosproject.core.ApplicationId;
import org.onosproject.net.ConnectPoint;
import org.onosproject.net.flow.DefaultTrafficTreatment;
import org.onosproject.net.flow.TrafficSelector;
import org.onosproject.net.flow.TrafficTreatment;
import java.util.Map;
import java.util.Set;
import java.util.List;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
/**
* Abstraction of single source, multiple destination connectivity intent.
*/
@Beta
public final class SinglePointToMultiPointIntent extends ConnectivityIntent {
private final ConnectPoint ingressPoint;
private final Set<ConnectPoint> egressPoints;
/**
* To manage multiple treatments use case.
*/
private final Map<ConnectPoint, TrafficTreatment> egressTreatments;
/**
* Creates a new single-to-multi point connectivity intent.
*
* @param appId application identifier
* @param key intent key
* @param selector traffic selector
* @param treatment treatment
* @param ingressPoint port on which traffic will ingress
* @param egressPoints set of ports on which traffic will egress
* @param constraints constraints to apply to the intent
* @param priority priority to use for flows generated by this intent
* @param egressTreatments map to store the association egress to treatment
* @throws NullPointerException if {@code ingressPoint} or
* {@code egressPoints} is null
* @throws IllegalArgumentException if the size of {@code egressPoints} is
* not more than 1
*/
private SinglePointToMultiPointIntent(ApplicationId appId,
Key key,
TrafficSelector selector,
TrafficTreatment treatment,
ConnectPoint ingressPoint,
Set<ConnectPoint> egressPoints,
List<Constraint> constraints,
int priority,
Map<ConnectPoint, TrafficTreatment> egressTreatments) {
super(appId, key, ImmutableList.of(), selector, treatment, constraints,
priority);
checkNotNull(egressPoints);
checkNotNull(ingressPoint);
checkArgument(!egressPoints.isEmpty(), "Egress point set cannot be empty");
checkArgument(!egressPoints.contains(ingressPoint),
"Set of egresses should not contain ingress (ingress: %s)", ingressPoint);
this.ingressPoint = ingressPoint;
this.egressPoints = Sets.newHashSet(egressPoints);
this.egressTreatments = egressTreatments;
}
/**
* Returns a new single point to multi point intent builder. The application id,
* ingress point and egress points are required fields. If they are
* not set by calls to the appropriate methods, an exception will
* be thrown.
*
* @return single point to multi point builder
*/
public static Builder builder() {
return new Builder();
}
/**
* Builder of a single point to multi point intent.
*/
public static final class Builder extends ConnectivityIntent.Builder {
ConnectPoint ingressPoint;
Set<ConnectPoint> egressPoints;
Map<ConnectPoint, TrafficTreatment> egressTreatments = ImmutableMap.of();
private Builder() {
// Hide constructor
}
@Override
public Builder appId(ApplicationId appId) {
return (Builder) super.appId(appId);
}
@Override
public Builder key(Key key) {
return (Builder) super.key(key);
}
@Override
public Builder selector(TrafficSelector selector) {
return (Builder) super.selector(selector);
}
@Override
public Builder treatment(TrafficTreatment treatment) {
return (Builder) super.treatment(treatment);
}
@Override
public Builder constraints(List<Constraint> constraints) {
return (Builder) super.constraints(constraints);
}
@Override
public Builder priority(int priority) {
return (Builder) super.priority(priority);
}
/**
* Sets the ingress point of the single point to multi point intent
* that will be built.
*
* @param ingressPoint ingress connect point
* @return this builder
*/
public Builder ingressPoint(ConnectPoint ingressPoint) {
this.ingressPoint = ingressPoint;
return this;
}
/**
* Sets the egress points of the single point to multi point intent
* that will be built.
*
* @param egressPoints egress connect points
* @return this builder
*/
public Builder egressPoints(Set<ConnectPoint> egressPoints) {
this.egressPoints = ImmutableSet.copyOf(egressPoints);
return this;
}
/**
* Sets the treatments of the single point to multi point intent
* that will be built.
*
* @param egressTreatments the multiple treatments
* @return this builder
*/
public Builder treatments(Map<ConnectPoint, TrafficTreatment> egressTreatments) {
this.egressTreatments = ImmutableMap.copyOf(egressTreatments);
return this;
}
/**
* Builds a single point to multi point intent from the
* accumulated parameters.
*
* @return point to point intent
*/
public SinglePointToMultiPointIntent build() {
if (treatment != null && !treatment.allInstructions().isEmpty() &&
!treatment.equals(DefaultTrafficTreatment.emptyTreatment()) &&
egressTreatments != null && !egressTreatments.isEmpty()) {
throw new IllegalArgumentException("Treatment and Multiple Treatments are both set");
}
return new SinglePointToMultiPointIntent(
appId,
key,
selector,
treatment,
ingressPoint,
egressPoints,
constraints,
priority,
egressTreatments
);
}
}
/**
* Constructor for serializer.
*/
protected SinglePointToMultiPointIntent() {
super();
this.ingressPoint = null;
this.egressPoints = null;
this.egressTreatments = null;
}
/**
* Returns the port on which the ingress traffic should be connected to the
* egress.
*
* @return ingress port
*/
public ConnectPoint ingressPoint() {
return ingressPoint;
}
/**
* Returns the set of ports on which the traffic should egress.
*
* @return set of egress ports
*/
public Set<ConnectPoint> egressPoints() {
return egressPoints;
}
/**
* Returns the multiple treatments jointly with their connection points.
* @return multiple treatments
*/
public Map<ConnectPoint, TrafficTreatment> egressTreatments() {
return egressTreatments;
}
@Override
public String toString() {
return MoreObjects.toStringHelper(getClass())
.add("id", id())
.add("key", key())
.add("appId", appId())
.add("priority", priority())
.add("resources", resources())
.add("selector", selector())
.add("treatment", treatment())
.add("ingress", ingressPoint)
.add("egress", egressPoints)
.add("treatments", egressTreatments)
.add("constraints", constraints())
.toString();
}
}
| |
package mvm.rya.accumulo.pig.optimizer;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.openrdf.query.BindingSet;
import org.openrdf.query.Dataset;
import org.openrdf.query.algebra.*;
import org.openrdf.query.algebra.evaluation.QueryOptimizer;
import org.openrdf.query.algebra.evaluation.impl.EvaluationStatistics;
import org.openrdf.query.algebra.helpers.QueryModelVisitorBase;
import org.openrdf.query.algebra.helpers.StatementPatternCollector;
import java.util.*;
/**
* A query optimizer that re-orders nested Joins according to cardinality, preferring joins that have similar variables.
*
*/
public class SimilarVarJoinOptimizer implements QueryOptimizer {
protected final EvaluationStatistics statistics;
public SimilarVarJoinOptimizer() {
this(new EvaluationStatistics());
}
public SimilarVarJoinOptimizer(EvaluationStatistics statistics) {
this.statistics = statistics;
}
/**
* Applies generally applicable optimizations: path expressions are sorted
* from more to less specific.
*
* @param tupleExpr
*/
public void optimize(TupleExpr tupleExpr, Dataset dataset, BindingSet bindings) {
tupleExpr.visit(new JoinVisitor());
}
protected class JoinVisitor extends QueryModelVisitorBase<RuntimeException> {
Set<String> boundVars = new HashSet<String>();
@Override
public void meet(LeftJoin leftJoin) {
leftJoin.getLeftArg().visit(this);
Set<String> origBoundVars = boundVars;
try {
boundVars = new HashSet<String>(boundVars);
boundVars.addAll(leftJoin.getLeftArg().getBindingNames());
leftJoin.getRightArg().visit(this);
} finally {
boundVars = origBoundVars;
}
}
@Override
public void meet(Join node) {
Set<String> origBoundVars = boundVars;
try {
boundVars = new HashSet<String>(boundVars);
// Recursively get the join arguments
List<TupleExpr> joinArgs = getJoinArgs(node, new ArrayList<TupleExpr>());
// Build maps of cardinalities and vars per tuple expression
Map<TupleExpr, Double> cardinalityMap = new HashMap<TupleExpr, Double>();
for (TupleExpr tupleExpr : joinArgs) {
double cardinality = statistics.getCardinality(tupleExpr);
cardinalityMap.put(tupleExpr, cardinality);
}
// Reorder the (recursive) join arguments to a more optimal sequence
List<TupleExpr> orderedJoinArgs = new ArrayList<TupleExpr>(joinArgs.size());
TupleExpr last = null;
while (!joinArgs.isEmpty()) {
TupleExpr tupleExpr = selectNextTupleExpr(joinArgs, cardinalityMap, last);
if (tupleExpr == null) {
break;
}
joinArgs.remove(tupleExpr);
orderedJoinArgs.add(tupleExpr);
last = tupleExpr;
// Recursively optimize join arguments
tupleExpr.visit(this);
boundVars.addAll(tupleExpr.getBindingNames());
}
// Build new join hierarchy
// Note: generated hierarchy is right-recursive to help the
// IterativeEvaluationOptimizer to factor out the left-most join
// argument
int i = 0;
TupleExpr replacement = orderedJoinArgs.get(i);
for (i++; i < orderedJoinArgs.size(); i++) {
replacement = new Join(replacement, orderedJoinArgs.get(i));
}
// Replace old join hierarchy
node.replaceWith(replacement);
} finally {
boundVars = origBoundVars;
}
}
protected <L extends List<TupleExpr>> L getJoinArgs(TupleExpr tupleExpr, L joinArgs) {
if (tupleExpr instanceof Join) {
Join join = (Join) tupleExpr;
getJoinArgs(join.getLeftArg(), joinArgs);
getJoinArgs(join.getRightArg(), joinArgs);
} else {
joinArgs.add(tupleExpr);
}
return joinArgs;
}
protected List<Var> getStatementPatternVars(TupleExpr tupleExpr) {
if(tupleExpr == null)
return null;
List<StatementPattern> stPatterns = StatementPatternCollector.process(tupleExpr);
List<Var> varList = new ArrayList<Var>(stPatterns.size() * 4);
for (StatementPattern sp : stPatterns) {
sp.getVars(varList);
}
return varList;
}
protected <M extends Map<Var, Integer>> M getVarFreqMap(List<Var> varList, M varFreqMap) {
for (Var var : varList) {
Integer freq = varFreqMap.get(var);
freq = (freq == null) ? 1 : freq + 1;
varFreqMap.put(var, freq);
}
return varFreqMap;
}
/**
* Selects from a list of tuple expressions the next tuple expression that
* should be evaluated. This method selects the tuple expression with
* highest number of bound variables, preferring variables that have been
* bound in other tuple expressions over variables with a fixed value.
*/
protected TupleExpr selectNextTupleExpr(List<TupleExpr> expressions,
Map<TupleExpr, Double> cardinalityMap,
TupleExpr last) {
double lowestCardinality = Double.MAX_VALUE;
TupleExpr result = expressions.get(0);
expressions = getExprsWithSameVars(expressions, last);
for (TupleExpr tupleExpr : expressions) {
// Calculate a score for this tuple expression
double cardinality = cardinalityMap.get(tupleExpr);
if (cardinality < lowestCardinality) {
// More specific path expression found
lowestCardinality = cardinality;
result = tupleExpr;
}
}
return result;
}
protected List<TupleExpr> getExprsWithSameVars(List<TupleExpr> expressions, TupleExpr last) {
if(last == null)
return expressions;
List<TupleExpr> retExprs = new ArrayList<TupleExpr>();
for(TupleExpr tupleExpr : expressions) {
List<Var> statementPatternVars = getStatementPatternVars(tupleExpr);
List<Var> lastVars = getStatementPatternVars(last);
statementPatternVars.retainAll(lastVars);
if(statementPatternVars.size() > 0) {
retExprs.add(tupleExpr);
}
}
if(retExprs.size() == 0) {
return expressions;
}
return retExprs;
}
}
}
| |
/*
* Copyright 2015-2018 Jeeva Kandasamy (jkandasa@gmail.com)
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mycontroller.standalone.provider;
import org.mycontroller.standalone.AppProperties;
import org.mycontroller.standalone.AppProperties.RESOURCE_TYPE;
import org.mycontroller.standalone.db.DaoUtils;
import org.mycontroller.standalone.db.ResourcesLogsUtils.LOG_DIRECTION;
import org.mycontroller.standalone.db.ResourcesLogsUtils.LOG_LEVEL;
import org.mycontroller.standalone.db.tables.Node;
import org.mycontroller.standalone.db.tables.ResourcesLogs;
import org.mycontroller.standalone.db.tables.Sensor;
import org.mycontroller.standalone.db.tables.SensorVariable;
import org.mycontroller.standalone.message.IMessage;
import org.mycontroller.standalone.message.McMessageUtils.MESSAGE_TYPE;
import org.mycontroller.standalone.message.McMessageUtils.MESSAGE_TYPE_SET_REQ;
import org.mycontroller.standalone.message.McMessageUtils.MESSAGE_TYPE_STREAM;
import lombok.extern.slf4j.Slf4j;
/**
* @author Jeeva Kandasamy (jkandasa)
* @since 1.2.0
*/
@Slf4j
public class ResourcesLogger implements Runnable {
private static final int MAXIMUN_PAYLOAD_SIZE = 220;
private static final int MAXIMUN_PAYLOAD_SIZE_FIRMWARE = 20;
private IMessage _message = null;
private RESOURCE_TYPE rType = null;
private LOG_DIRECTION logDirection = null;
public ResourcesLogger(IMessage _message) {
this._message = _message;
}
private boolean isInPermittedLogLevel(LOG_LEVEL logLevel) {
if (LOG_LEVEL.fromString(AppProperties.getInstance().getControllerSettings().getResourcesLogLevel())
.ordinal() <= logLevel.ordinal()) {
return true;
}
return false;
}
private Integer getResourceId() {
switch (rType) {
case GATEWAY:
return _message.getGatewayId();
case NODE:
Node _node = DaoUtils.getNodeDao().get(_message.getGatewayId(), _message.getNodeEui());
return _node.getId();
case SENSOR:
_node = DaoUtils.getNodeDao().get(_message.getGatewayId(), _message.getNodeEui());
Sensor _s = DaoUtils.getSensorDao().get(_node.getId(), _message.getSensorId());
return _s.getId();
case SENSOR_VARIABLE:
_node = DaoUtils.getNodeDao().get(_message.getGatewayId(), _message.getNodeEui());
_s = DaoUtils.getSensorDao().get(_node.getId(), _message.getSensorId());
SensorVariable _sv = DaoUtils.getSensorVariableDao().get(_s.getId(),
MESSAGE_TYPE_SET_REQ.fromString(_message.getSubType()));
return _sv.getId();
default:
return null;
}
}
private void log() {
if (!isInPermittedLogLevel(LOG_LEVEL.INFO)) {
return;
}
// update resource type
if (_message.getSensorId().equalsIgnoreCase(IMessage.SENSOR_BROADCAST_ID)) {
if (_message.getNodeEui().equalsIgnoreCase(IMessage.NODE_BROADCAST_ID)) {
rType = RESOURCE_TYPE.GATEWAY;
} else {
rType = RESOURCE_TYPE.NODE;
}
} else if (_message.getType().equalsIgnoreCase(MESSAGE_TYPE.C_PRESENTATION.getText())) {
rType = RESOURCE_TYPE.SENSOR;
} else {
rType = RESOURCE_TYPE.SENSOR_VARIABLE;
}
switch (MESSAGE_TYPE.fromString(_message.getType())) {
case C_INTERNAL:
if (!isInPermittedLogLevel(LOG_LEVEL.NOTICE)) {
return;
}
internal();
break;
case C_PRESENTATION:
if (!isInPermittedLogLevel(LOG_LEVEL.NOTICE)) {
return;
}
presentation();
break;
case C_REQ:
if (!isInPermittedLogLevel(LOG_LEVEL.INFO)) {
return;
}
request();
break;
case C_SET:
if (!isInPermittedLogLevel(LOG_LEVEL.INFO)) {
return;
}
set();
break;
case C_STREAM:
if (!isInPermittedLogLevel(LOG_LEVEL.NOTICE)) {
return;
}
stream();
break;
default:
break;
}
}
private void internal() {
doLog(MESSAGE_TYPE.C_INTERNAL, LOG_LEVEL.NOTICE);
}
private void presentation() {
doLog(MESSAGE_TYPE.C_PRESENTATION, LOG_LEVEL.NOTICE);
}
private void request() {
doLog(MESSAGE_TYPE.C_REQ, LOG_LEVEL.INFO);
}
private void set() {
doLog(MESSAGE_TYPE.C_SET, LOG_LEVEL.INFO);
}
private void stream() {
switch (MESSAGE_TYPE_STREAM.fromString(_message.getSubType())) {
case ST_FIRMWARE_CONFIG_REQUEST:
case ST_FIRMWARE_CONFIG_RESPONSE:
doLog(MESSAGE_TYPE.C_STREAM, LOG_LEVEL.NOTICE);
break;
case ST_FIRMWARE_REQUEST:
case ST_FIRMWARE_RESPONSE:
if (!isInPermittedLogLevel(LOG_LEVEL.TRACE)) {
return;
}
doLog(MESSAGE_TYPE.C_STREAM, LOG_LEVEL.TRACE);
break;
case ST_IMAGE:
case ST_SOUND:
// not supported at this moment
return;
default:
break;
}
}
private void doLog(MESSAGE_TYPE type, LOG_LEVEL logLevel) {
StringBuilder _builder = new StringBuilder();
_builder.append("[").append(_message.getSubType()).append("] ");
if (type == MESSAGE_TYPE.C_STREAM
&& _message.getSubType().equalsIgnoreCase(MESSAGE_TYPE_STREAM.ST_FIRMWARE_RESPONSE.getText())) {
if (_message.getPayload().length() > MAXIMUN_PAYLOAD_SIZE_FIRMWARE) {
_builder.append(_message.getPayload().substring(0, MAXIMUN_PAYLOAD_SIZE_FIRMWARE - 3)).append("...");
} else {
_builder.append(_message.getPayload());
}
} else if (_message.getPayload() != null && _message.getPayload().length() > MAXIMUN_PAYLOAD_SIZE) {
_builder.append(_message.getPayload().substring(0, MAXIMUN_PAYLOAD_SIZE)).append("...");
} else {
_builder.append(_message.getPayload());
}
ResourcesLogs resLog = ResourcesLogs.builder()
.resourceType(rType)
.resourceId(getResourceId())
.logDirection(logDirection)
.logLevel(logLevel)
.messageType(type)
.message(_builder.toString())
.timestamp(_message.getTimestamp())
.logDirection(_message.isTxMessage() ? LOG_DIRECTION.SENT : LOG_DIRECTION.RECEIVED)
.build();
DaoUtils.getResourcesLogsDao().add(resLog);
}
@Override
public void run() {
try {
log();
} catch (Exception ex) {
_logger.error("Error on {},", _message, ex);
}
}
}
| |
package br.senac.tads3.pi03b.gruposete.dao;
import br.senac.tads3.pi03b.gruposete.models.RelatorioSLA;
import br.senac.tads3.pi03b.gruposete.models.RelatorioMudancas;
import br.senac.tads3.pi03b.gruposete.models.RelatorioValores;
import br.senac.tads3.pi03b.gruposete.utils.DbUtil;
import java.sql.Connection;
import java.sql.Date;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
public class RelatorioDAO {
private static Connection connection;
private static PreparedStatement preparedStatement;
private static Statement statement;
private static ResultSet resultSet;
public ArrayList<RelatorioMudancas> procurarRelatorioMudanca() throws SQLException, ClassNotFoundException {
// Conecta.
connection = DbUtil.getConnection();
// Lista que ira receber vendas.
ArrayList<RelatorioMudancas> listaResultado = new ArrayList<>();
// Comando SQL.
String slq = "SELECT * FROM `relatoriomudancas` "
+ "INNER JOIN funcionario "
+ "ON relatoriomudancas.id_funcionario = funcionario.id_funcionario "
+ "ORDER BY data_m DESC LIMIT 50";
preparedStatement = connection.prepareStatement(slq);
// Executa e recebe resultado.
resultSet = preparedStatement.executeQuery();
// Loop com resultados.
while (resultSet.next()) {
// Declara objeto.
RelatorioMudancas relatorio = new RelatorioMudancas();
// Prenche.
relatorio.setMudanca(resultSet.getString("mudanca"));
relatorio.setData(resultSet.getString("data_m"));
relatorio.setFuncionario(resultSet.getString("nome"));
relatorio.setFilial(resultSet.getString("filial"));
relatorio.setCargo(resultSet.getString("cargo"));
// Adiciona a lista.
listaResultado.add(relatorio);
}
// Fecha conexao.
connection.close();
// Retorna lista.
return listaResultado;
}
public ArrayList<RelatorioSLA> procurarRelatorioSLA() throws SQLException, ClassNotFoundException {
// Conecta.
connection = DbUtil.getConnection();
// Lista que ira receber vendas.
ArrayList<RelatorioSLA> listaResultado = new ArrayList<>();
// Comando SQL.
String slq = "SELECT * FROM `sla` "
+ "INNER JOIN funcionario "
+ "ON sla.id_funcionario = funcionario.id_funcionario "
+ "ORDER BY data_m DESC LIMIT 50";
preparedStatement = connection.prepareStatement(slq);
// Executa e recebe resultado.
resultSet = preparedStatement.executeQuery();
// Loop com resultados.
while (resultSet.next()) {
// Declara objeto.
RelatorioSLA relatorio = new RelatorioSLA();
// Prenche.
relatorio.setId_sla(resultSet.getInt("id_sla"));
relatorio.setMensagem(resultSet.getString("mensagem"));
relatorio.setData(resultSet.getString("data_m"));
relatorio.setFuncionario(resultSet.getString("nome"));
relatorio.setFilial(resultSet.getString("filial"));
relatorio.setCargo(resultSet.getString("cargo"));
// Adiciona a lista.
listaResultado.add(relatorio);
}
// Fecha conexao.
connection.close();
// Retorna lista.
return listaResultado;
}
public ArrayList<RelatorioSLA> procurarRelatorioSLA(String filial) throws SQLException, ClassNotFoundException {
// Lista que ira receber vendas.
ArrayList<RelatorioSLA> listaResultado = new ArrayList<>();
// Comando SQL.
String slq = "SELECT * FROM `sla` "
+ "INNER JOIN funcionario "
+ "ON sla.id_funcionario = funcionario.id_funcionario "
+ "WHERE funcionario.filial = ?"
+ "ORDER BY data_m DESC LIMIT 50";
// Conecta.
connection = DbUtil.getConnection();
preparedStatement = connection.prepareStatement(slq);
preparedStatement.setString(1, filial);
// Executa e recebe resultado.
resultSet = preparedStatement.executeQuery();
// Loop com resultados.
while (resultSet.next()) {
// Declara objeto.
RelatorioSLA relatorio = new RelatorioSLA();
// Prenche.
relatorio.setId_sla(resultSet.getInt("id_sla"));
relatorio.setMensagem(resultSet.getString("mensagem"));
relatorio.setData(resultSet.getString("data_m"));
relatorio.setFuncionario(resultSet.getString("nome"));
relatorio.setFilial(resultSet.getString("filial"));
relatorio.setCargo(resultSet.getString("cargo"));
// Adiciona a lista.
listaResultado.add(relatorio);
}
// Fecha conexao.
connection.close();
// Retorna lista.
return listaResultado;
}
public void excluirSLA(int id) throws SQLException, ClassNotFoundException {
String slq = "DELETE FROM sla WHERE id_sla = ?";
try {
connection = DbUtil.getConnection();
preparedStatement = connection.prepareStatement(slq);
preparedStatement.setInt(1, id);
preparedStatement.execute();
} finally {
if (preparedStatement != null && !preparedStatement.isClosed()) {
preparedStatement.close();
}
if (connection != null && !connection.isClosed()) {
connection.close();
}
}
}
public ArrayList<RelatorioValores> procurarRelatorioAno() throws SQLException, ClassNotFoundException {
// Conecta.
connection = DbUtil.getConnection();
// Lista que ira receber vendas.
ArrayList<RelatorioValores> listaResultado = new ArrayList<>();
// Comando SQL.
String slq = "SELECT (SELECT SUBSTRING(venda.data_venda,1,4 )) AS Ano, "
+ "ROUND (SUM(venda.total_preco), 2) AS SOMA, filial "
+ "FROM venda "
+ "INNER JOIN funcionario "
+ "ON funcionario.id_funcionario = venda.id_funcionario "
+ "GROUP BY funcionario.filial, (SELECT SUBSTRING(venda.data_venda,1,4))"
+ "ORDER BY (SELECT SUBSTRING(venda.data_venda,1,4)) "
+ "DESC LIMIT 100";
preparedStatement = connection.prepareStatement(slq);
// Executa e recebe resultado.
resultSet = preparedStatement.executeQuery();
// Loop com resultados.
while (resultSet.next()) {
// Declara objeto.
RelatorioValores relatorio = new RelatorioValores();
// Prenche.
relatorio.setValor(resultSet.getFloat("SOMA"));
relatorio.setData(resultSet.getString("Ano"));
relatorio.setFilial(resultSet.getString("filial"));
// Adiciona a lista.
listaResultado.add(relatorio);
}
// Fecha conexao.
connection.close();
// Retorna lista.
return listaResultado;
}
public ArrayList<RelatorioValores> procurarRelatorioMes() throws SQLException, ClassNotFoundException {
// Conecta.
connection = DbUtil.getConnection();
// Lista que ira receber vendas.
ArrayList<RelatorioValores> listaResultado = new ArrayList<>();
// Comando SQL.
String slq = "SELECT (SELECT SUBSTRING(venda.data_venda,1,7 )) AS Mes,"
+ "ROUND (SUM(venda.total_preco), 2) AS SOMA, filial "
+ "FROM venda "
+ "INNER JOIN funcionario "
+ "ON funcionario.id_funcionario = venda.id_funcionario "
+ "GROUP BY funcionario.filial, (SELECT SUBSTRING(venda.data_venda,1,7)) "
+ "ORDER BY (SELECT SUBSTRING(venda.data_venda,1,7)) "
+ "DESC LIMIT 100";
preparedStatement = connection.prepareStatement(slq);
// Executa e recebe resultado.
resultSet = preparedStatement.executeQuery();
// Loop com resultados.
while (resultSet.next()) {
// Declara objeto.
RelatorioValores relatorio = new RelatorioValores();
// Prenche.
relatorio.setValor(resultSet.getFloat("SOMA"));
relatorio.setData(resultSet.getString("Mes"));
relatorio.setFilial(resultSet.getString("filial"));
// Adiciona a lista.
listaResultado.add(relatorio);
}
// Fecha conexao.
connection.close();
// Retorna lista.
return listaResultado;
}
public void inserir(RelatorioMudancas relatorios) throws SQLException, Exception {
String sql = "INSERT INTO `relatoriomudancas`"
+ "(`mudanca`, `id_funcionario`, `data_m`) "
+ "VALUES (?, ?, ?)";
try {
connection = DbUtil.getConnection();
preparedStatement = connection.prepareStatement(sql);
Date data = new Date(System.currentTimeMillis());
SimpleDateFormat formatarDate = new SimpleDateFormat("yyyy-MM-dd");
String formatado = formatarDate.format(data);
preparedStatement.setString(1, relatorios.getMudanca());
preparedStatement.setInt(2, relatorios.getId_func());
preparedStatement.setString(3, formatado);
preparedStatement.executeUpdate();
} catch (SQLException e) {
} finally {
if (connection != null && !connection.isClosed()) {
connection.close();
}
}
}
public void sla(RelatorioSLA relatorios) throws SQLException, Exception {
String sql = "INSERT INTO `sla`"
+ "(`mensagem`, `id_funcionario`, `data_m`) "
+ "VALUES (?, ?, ?)";
try {
connection = DbUtil.getConnection();
preparedStatement = connection.prepareStatement(sql);
Date data = new Date(System.currentTimeMillis());
SimpleDateFormat formatarDate = new SimpleDateFormat("yyyy-MM-dd");
String formatado = formatarDate.format(data);
preparedStatement.setString(1, relatorios.getMensagem());
preparedStatement.setInt(2, relatorios.getId_func());
preparedStatement.setString(3, formatado);
preparedStatement.executeUpdate();
} catch (SQLException e) {
} finally {
if (connection != null && !connection.isClosed()) {
connection.close();
}
}
}
}
| |
/*
UA CMPUT 301 Project Group: CMPUT301W15T06
Copyright {2015} {Jingjiao Ni
Tianqi Xiao
Jiafeng Wu
Xinyi Pan
Xinyi Wu
Han Wang}
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software distributed under
the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language
governing permissions and limitations under the License.
*/
package ca.ualberta.CMPUT301W15T06;
import java.util.ArrayList;
import android.os.Bundle;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.Dialog;
import android.app.AlertDialog.Builder;
import android.content.DialogInterface;
import android.text.InputType;
import android.text.method.DigitsKeyListener;
import android.view.Menu;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemClickListener;
import android.widget.ArrayAdapter;
import android.widget.EditText;
import android.widget.ListView;
/**
* This <code>ClaimantTagListActivity</code> class is an extended class
* of <code>Activity</code> class. This class controls the User Interface of
* <code>TagList</code> for claimant. This view displays a list of
* <code>Tag</code> and creates an option menu and return true as
* the boolean value. It will be used when the claimant asks to access to
* the <code>TagList</code>.
*
* @author CMPUT301W15T06
* @version 04/07/2015
* @see android.os.Bundle
* @see android.app.Activity
* @see android.view.Menu
* @see android.app.AlertDialog
* @see android.app.Dialog
* @see android.app.AlertDialog.Builder
* @see android.content.DialogInterface
* @see android.text.InputType
* @see android.text.method.DigitsKeyListener
* @see android.view.View
* @see android.view.View.OnClickListener
* @see android.widget.AdapterView
* @see android.widget.AdapterView.OnItemClickListener
* @see android.widget.ArrayAdapter
* @see android.widget.EditText
* @see android.widget.ListView
*/
public class ClaimantTagListActivity extends Activity {
/**
* Set a Dialog object dialog as the dialog window.
*/
private Dialog dialog;
/**
* Set an unchangeable integer variable DELETE to 1.
* Set an unchangeable integer variable EDIT to 0.
*/
private static final int DELETE = 1;
private static final int EDIT = 0;
/**
* Set a ClaimantTagListController (the controller of this class) ctlc with default value null.
*/
private ClaimantTagListController ctlc=null;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_claimant_tag_list);
setTitle("User: "+AppSingleton.getInstance().getUserName());
ctlc=new ClaimantTagListController(AppSingleton.getInstance().getCurrentUser());
ListView listView = (ListView) findViewById(R.id.tagListView);
final ArrayList<Tag> list =AppSingleton.getInstance().getCurrentUser().getTagList();
final ArrayAdapter<Tag> adapter=new ArrayAdapter<Tag>(this, android.R.layout.simple_list_item_1,list);
listView.setAdapter(adapter);
AppSingleton.getInstance().getCurrentUser().addListener(new Listener() {
@Override
public void update() {
// TODO Auto-generated method stub
adapter.notifyDataSetChanged();
}
});
listView.setOnItemClickListener(new OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view,
final int position, long id) {
// TODO Auto-generated method stub
AlertDialog.Builder builder = new AlertDialog.Builder(ClaimantTagListActivity.this);
setItem(builder,list,position);
builder.create();
builder.show();
}
});
}
/**
* This method will set tag to the item. Catch exceptions to prevent program crushing.
*
* @param builder a Builder object
* @param list an ArrayList that contains all the tags.
* @param position the position index in the tag list.
*/
private void setItem(Builder builder, final ArrayList<Tag> list, final int position) {
// TODO Auto-generated method stub
builder.setItems(R.array.tag_dialog_array, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
// TODO Auto-generated method stub
if (which==EDIT){
edit(list,position);
}else if (which==DELETE){
try {
ctlc.delete(list.get(position));
} catch (NetWorkException e) {
// TODO: handle exception
throw new RuntimeException(e);
}
}
}
});
}
/**
* This method will edit tag in the item. Catch exceptions to prevent program crushing.
*
* @param list an ArrayList that contains all the tags.
* @param position the position index in the tag list.
*/
private void edit(final ArrayList<Tag> list, final int position) {
// TODO Auto-generated method stub
AlertDialog.Builder builder = new AlertDialog.Builder(ClaimantTagListActivity.this);
builder.setTitle("Edit the Tag Name");
final EditText input=new EditText(ClaimantTagListActivity.this);
input.setText(list.get(position).getName());
input.setKeyListener(DigitsKeyListener.getInstance("0123456789zxcvbnmasdfghjklqwertyuiop"));
builder.setView(input);
builder.setPositiveButton("OK", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
// TODO Auto-generated method stub
try {
ctlc.edit(list.get(position),input.getText().toString());
} catch (NetWorkException e) {
// TODO: handle exception
throw new RuntimeException(e);
} }
});
builder.setNegativeButton("Cancel",new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
// TODO Auto-generated method stub
}
});
builder.create();
builder.show();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
return false;
}
/**
* This method will add tag in the item. Catch exceptions to prevent program crushing.
*
* @param v the basic building block for user interface components
* @throws NetWorkException
* @throws RuntimeException
*/
public void addTag(View v){
EditText addView=(EditText)findViewById(R.id.addTagEditText);
try {
ctlc.addTag(addView.getText().toString());
} catch (NetWorkException e) {
// TODO: handle exception
throw new RuntimeException(e);
}
addView.setText("");
}
/**
* This method will set a dialog window to the interface.
*/
public Dialog getDialog() {
return dialog;
}
@SuppressWarnings("deprecation")
@Override
protected void onPrepareDialog(int id, Dialog dialog, Bundle args){
super.onPrepareDialog(id, dialog, args);
this.dialog = dialog;
}
}
| |
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.sdklib.internal.repository.archives;
import com.android.annotations.NonNull;
import com.android.annotations.Nullable;
import com.android.sdklib.repository.NoPreviewRevision;
import java.util.Properties;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* @deprecated
* com.android.sdklib.internal.repository has moved into Studio as
* com.android.tools.idea.sdk.remote.internal.
*/
@Deprecated
public class ArchFilter {
private static final String PROP_HOST_OS = "Archive.HostOs"; //$NON-NLS-1$
private static final String PROP_HOST_BITS = "Archive.HostBits"; //$NON-NLS-1$
private static final String PROP_JVM_BITS = "Archive.JvmBits"; //$NON-NLS-1$
private static final String PROP_MIN_JVM_VERSION = "Archive.MinJvmVers"; //$NON-NLS-1$
/**
* The legacy property used to serialize {@link LegacyOs} in source.properties files.
* <p/>
* Replaced by {@code ArchFilter.PROP_HOST_OS}.
*/
public static final String LEGACY_PROP_OS = "Archive.Os"; //$NON-NLS-1$
/**
* The legacy property used to serialize {@link LegacyArch} in source.properties files.
* <p/>
* Replaced by {@code ArchFilter.PROP_HOST_BITS} and {@code ArchFilter.PROP_JVM_BITS}.
*/
public static final String LEGACY_PROP_ARCH = "Archive.Arch"; //$NON-NLS-1$
private final HostOs mHostOs;
private final BitSize mHostBits;
private final BitSize mJvmBits;
private final NoPreviewRevision mMinJvmVersion;
/**
* Creates a new {@link ArchFilter} with the specified filter attributes.
* <p/>
* This filters represents the attributes requires for a package's {@link Archive} to
* be installable on the current architecture. Not all fields are required -- those that
* are not specified imply there is no limitation on that particular attribute.
*
*
* @param hostOs The host OS or null if there's no limitation for this package.
* @param hostBits The host bit size or null if there's no limitation for this package.
* @param jvmBits The JVM bit size or null if there's no limitation for this package.
* @param minJvmVersion The minimal JVM version required by this package
* or null if there's no limitation for this package.
*/
public ArchFilter(@Nullable HostOs hostOs,
@Nullable BitSize hostBits,
@Nullable BitSize jvmBits,
@Nullable NoPreviewRevision minJvmVersion) {
mHostOs = hostOs;
mHostBits = hostBits;
mJvmBits = jvmBits;
mMinJvmVersion = minJvmVersion;
}
/**
* Creates an {@link ArchFilter} using properties previously saved in a {@link Properties}
* object, typically by the {@link ArchFilter#saveProperties(Properties)} method.
* <p/>
* Missing properties are set to null and will not filter.
*
* @param props A properties object previously filled by {@link #saveProperties(Properties)}.
* If null, a default empty {@link ArchFilter} is created.
*/
public ArchFilter(@Nullable Properties props) {
HostOs hostOs = null;
BitSize hostBits = null;
BitSize jvmBits = null;
NoPreviewRevision minJvmVers = null;
if (props != null) {
hostOs = HostOs .fromXmlName(props.getProperty(PROP_HOST_OS));
hostBits = BitSize.fromXmlName(props.getProperty(PROP_HOST_BITS));
jvmBits = BitSize.fromXmlName(props.getProperty(PROP_JVM_BITS));
try {
minJvmVers = NoPreviewRevision.parseRevision(props.getProperty(PROP_MIN_JVM_VERSION));
} catch (NumberFormatException ignore) {}
// Backward compatibility with older PROP_OS and PROP_ARCH values
if (!props.containsKey(PROP_HOST_OS) && props.containsKey(LEGACY_PROP_OS)) {
hostOs = HostOs.fromXmlName(props.getProperty(LEGACY_PROP_OS));
}
if (!props.containsKey(PROP_HOST_BITS) &&
!props.containsKey(PROP_HOST_BITS) &&
props.containsKey(LEGACY_PROP_ARCH)) {
// We'll only handle the typical x86 and x86_64 values of the old PROP_ARCH
// value and ignore the PPC value. "Any" is equivalent to keeping the new
// attributes to null.
String v = props.getProperty(LEGACY_PROP_ARCH).toLowerCase();
if (v.indexOf("x86_64") > 0) {
// JVM in 64-bit x86_64 mode so host-bits should be 64 too.
hostBits = jvmBits = BitSize._64;
} else if (v.indexOf("x86") > 0) {
// JVM in 32-bit x86 mode, but host-bits could be either 32 or 64
// so we don't set this one.
jvmBits = BitSize._32;
}
}
}
mHostOs = hostOs;
mHostBits = hostBits;
mJvmBits = jvmBits;
mMinJvmVersion = minJvmVers;
}
/** @return the host OS or null if there's no limitation for this package. */
@Nullable
public HostOs getHostOS() {
return mHostOs;
}
/** @return the host bit size or null if there's no limitation for this package. */
@Nullable
public BitSize getHostBits() {
return mHostBits;
}
/** @return the JVM bit size or null if there's no limitation for this package. */
@Nullable
public BitSize getJvmBits() {
return mJvmBits;
}
/** @return the minimal JVM version required by this package
* or null if there's no limitation for this package. */
@Nullable
public NoPreviewRevision getMinJvmVersion() {
return mMinJvmVersion;
}
/**
* Checks whether {@code this} {@link ArchFilter} is compatible with the right-hand side one.
* <p/>
* Typically this is used to check whether "this downloaded package is compatible with the
* current architecture", which would be expressed as:
* <pre>
* DownloadedArchive.filter.isCompatibleWith(ArhFilter.getCurrent())
* </pre>
* For the host OS & bit size attribute, if the attributes are non-null they must be equal.
* For the min-jvm-version, "this" version (the package we want to install) needs to be lower
* or equal to the "required" (current host) version.
*
* @param required The requirements to meet.
* @return True if this filter meets or exceeds the given requirements.
*/
public boolean isCompatibleWith(@NonNull ArchFilter required) {
if (mHostOs != null
&& required.mHostOs != null
&& !mHostOs.equals(required.mHostOs)) {
return false;
}
if (mHostBits != null
&& required.mHostBits != null
&& !mHostBits.equals(required.mHostBits)) {
return false;
}
if (mJvmBits != null
&& required.mJvmBits != null
&& !mJvmBits.equals(required.mJvmBits)) {
return false;
}
if (mMinJvmVersion != null
&& required.mMinJvmVersion != null
&& mMinJvmVersion.compareTo(required.mMinJvmVersion) > 0) {
return false;
}
return true;
}
/**
* Returns an {@link ArchFilter} that represents the current host platform.
* @return an {@link ArchFilter} that represents the current host platform.
*/
@NonNull
public static ArchFilter getCurrent() {
String os = System.getProperty("os.name"); //$NON-NLS-1$
HostOs hostOS = null;
if (os.startsWith("Mac")) { //$NON-NLS-1$
hostOS = HostOs.MACOSX;
} else if (os.startsWith("Windows")) { //$NON-NLS-1$
hostOS = HostOs.WINDOWS;
} else if (os.startsWith("Linux")) { //$NON-NLS-1$
hostOS = HostOs.LINUX;
}
BitSize jvmBits;
String arch = System.getProperty("os.arch"); //$NON-NLS-1$
if (arch.equalsIgnoreCase("x86_64") || //$NON-NLS-1$
arch.equalsIgnoreCase("ia64") || //$NON-NLS-1$
arch.equalsIgnoreCase("amd64")) { //$NON-NLS-1$
jvmBits = BitSize._64;
} else {
jvmBits = BitSize._32;
}
// TODO figure out the host bit size.
// When jvmBits is 64 we know it's surely 64
// but that's not necessarily obvious when jvmBits is 32.
BitSize hostBits = jvmBits;
NoPreviewRevision minJvmVersion = null;
String javav = System.getProperty("java.version"); //$NON-NLS-1$
// java Version is typically in the form "1.2.3_45" and we just need to keep up to "1.2.3"
// since our revision numbers are in 3-parts form (1.2.3).
Pattern p = Pattern.compile("((\\d+)(\\.\\d+)?(\\.\\d+)?).*"); //$NON-NLS-1$
Matcher m = p.matcher(javav);
if (m.matches()) {
minJvmVersion = NoPreviewRevision.parseRevision(m.group(1));
}
return new ArchFilter(hostOS, hostBits, jvmBits, minJvmVersion);
}
/**
* Save this {@link ArchFilter} attributes into the the given {@link Properties} object.
* These properties can later be given to the constructor that takes a {@link Properties} object.
* <p/>
* Null attributes are not saved in the properties.
*
* @param props A non-null properties object to fill with non-null attributes.
*/
void saveProperties(@NonNull Properties props) {
if (mHostOs != null) {
props.setProperty(PROP_HOST_OS, mHostOs.getXmlName());
}
if (mHostBits != null) {
props.setProperty(PROP_HOST_BITS, mHostBits.getXmlName());
}
if (mJvmBits != null) {
props.setProperty(PROP_JVM_BITS, mJvmBits.getXmlName());
}
if (mMinJvmVersion != null) {
props.setProperty(PROP_MIN_JVM_VERSION, mMinJvmVersion.toShortString());
}
}
/** String for debug purposes. */
@Override
public String toString() {
return "<ArchFilter mHostOs=" + mHostOs +
", mHostBits=" + mHostBits
+ ", mJvmBits=" + mJvmBits +
", mMinJvmVersion=" + mMinJvmVersion + ">";
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((mHostOs == null) ? 0 : mHostOs.hashCode());
result = prime * result + ((mHostBits == null) ? 0 : mHostBits.hashCode());
result = prime * result + ((mJvmBits == null) ? 0 : mJvmBits.hashCode());
result = prime * result + ((mMinJvmVersion == null) ? 0 : mMinJvmVersion.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
ArchFilter other = (ArchFilter) obj;
if (mHostBits != other.mHostBits) {
return false;
}
if (mHostOs != other.mHostOs) {
return false;
}
if (mJvmBits != other.mJvmBits) {
return false;
}
if (mMinJvmVersion == null) {
if (other.mMinJvmVersion != null) {
return false;
}
} else if (!mMinJvmVersion.equals(other.mMinJvmVersion)) {
return false;
}
return true;
}
}
| |
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University
// Copyright (c) 2011, 2012 Open Networking Foundation
// Copyright (c) 2012, 2013 Big Switch Networks, Inc.
// This library was generated by the LoxiGen Compiler.
// See the file LICENSE.txt which should have been included in the source distribution
// Automatically generated by LOXI from template of_class.java
// Do not modify
package org.projectfloodlight.openflow.protocol.ver13;
import org.projectfloodlight.openflow.protocol.*;
import org.projectfloodlight.openflow.protocol.action.*;
import org.projectfloodlight.openflow.protocol.actionid.*;
import org.projectfloodlight.openflow.protocol.bsntlv.*;
import org.projectfloodlight.openflow.protocol.errormsg.*;
import org.projectfloodlight.openflow.protocol.meterband.*;
import org.projectfloodlight.openflow.protocol.instruction.*;
import org.projectfloodlight.openflow.protocol.instructionid.*;
import org.projectfloodlight.openflow.protocol.match.*;
import org.projectfloodlight.openflow.protocol.oxm.*;
import org.projectfloodlight.openflow.protocol.queueprop.*;
import org.projectfloodlight.openflow.types.*;
import org.projectfloodlight.openflow.util.*;
import org.projectfloodlight.openflow.exceptions.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Set;
import org.jboss.netty.buffer.ChannelBuffer;
import com.google.common.hash.PrimitiveSink;
import com.google.common.hash.Funnel;
class OFBsnTlvActorSystemMacVer13 implements OFBsnTlvActorSystemMac {
private static final Logger logger = LoggerFactory.getLogger(OFBsnTlvActorSystemMacVer13.class);
// version: 1.3
final static byte WIRE_VERSION = 4;
final static int LENGTH = 10;
private final static MacAddress DEFAULT_VALUE = MacAddress.NONE;
// OF message fields
private final MacAddress value;
//
// Immutable default instance
final static OFBsnTlvActorSystemMacVer13 DEFAULT = new OFBsnTlvActorSystemMacVer13(
DEFAULT_VALUE
);
// package private constructor - used by readers, builders, and factory
OFBsnTlvActorSystemMacVer13(MacAddress value) {
if(value == null) {
throw new NullPointerException("OFBsnTlvActorSystemMacVer13: property value cannot be null");
}
this.value = value;
}
// Accessors for OF message fields
@Override
public int getType() {
return 0x29;
}
@Override
public MacAddress getValue() {
return value;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_13;
}
public OFBsnTlvActorSystemMac.Builder createBuilder() {
return new BuilderWithParent(this);
}
static class BuilderWithParent implements OFBsnTlvActorSystemMac.Builder {
final OFBsnTlvActorSystemMacVer13 parentMessage;
// OF message fields
private boolean valueSet;
private MacAddress value;
BuilderWithParent(OFBsnTlvActorSystemMacVer13 parentMessage) {
this.parentMessage = parentMessage;
}
@Override
public int getType() {
return 0x29;
}
@Override
public MacAddress getValue() {
return value;
}
@Override
public OFBsnTlvActorSystemMac.Builder setValue(MacAddress value) {
this.value = value;
this.valueSet = true;
return this;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_13;
}
@Override
public OFBsnTlvActorSystemMac build() {
MacAddress value = this.valueSet ? this.value : parentMessage.value;
if(value == null)
throw new NullPointerException("Property value must not be null");
//
return new OFBsnTlvActorSystemMacVer13(
value
);
}
}
static class Builder implements OFBsnTlvActorSystemMac.Builder {
// OF message fields
private boolean valueSet;
private MacAddress value;
@Override
public int getType() {
return 0x29;
}
@Override
public MacAddress getValue() {
return value;
}
@Override
public OFBsnTlvActorSystemMac.Builder setValue(MacAddress value) {
this.value = value;
this.valueSet = true;
return this;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_13;
}
//
@Override
public OFBsnTlvActorSystemMac build() {
MacAddress value = this.valueSet ? this.value : DEFAULT_VALUE;
if(value == null)
throw new NullPointerException("Property value must not be null");
return new OFBsnTlvActorSystemMacVer13(
value
);
}
}
final static Reader READER = new Reader();
static class Reader implements OFMessageReader<OFBsnTlvActorSystemMac> {
@Override
public OFBsnTlvActorSystemMac readFrom(ChannelBuffer bb) throws OFParseError {
int start = bb.readerIndex();
// fixed value property type == 0x29
short type = bb.readShort();
if(type != (short) 0x29)
throw new OFParseError("Wrong type: Expected=0x29(0x29), got="+type);
int length = U16.f(bb.readShort());
if(length != 10)
throw new OFParseError("Wrong length: Expected=10(10), got="+length);
if(bb.readableBytes() + (bb.readerIndex() - start) < length) {
// Buffer does not have all data yet
bb.readerIndex(start);
return null;
}
if(logger.isTraceEnabled())
logger.trace("readFrom - length={}", length);
MacAddress value = MacAddress.read6Bytes(bb);
OFBsnTlvActorSystemMacVer13 bsnTlvActorSystemMacVer13 = new OFBsnTlvActorSystemMacVer13(
value
);
if(logger.isTraceEnabled())
logger.trace("readFrom - read={}", bsnTlvActorSystemMacVer13);
return bsnTlvActorSystemMacVer13;
}
}
public void putTo(PrimitiveSink sink) {
FUNNEL.funnel(this, sink);
}
final static OFBsnTlvActorSystemMacVer13Funnel FUNNEL = new OFBsnTlvActorSystemMacVer13Funnel();
static class OFBsnTlvActorSystemMacVer13Funnel implements Funnel<OFBsnTlvActorSystemMacVer13> {
private static final long serialVersionUID = 1L;
@Override
public void funnel(OFBsnTlvActorSystemMacVer13 message, PrimitiveSink sink) {
// fixed value property type = 0x29
sink.putShort((short) 0x29);
// fixed value property length = 10
sink.putShort((short) 0xa);
message.value.putTo(sink);
}
}
public void writeTo(ChannelBuffer bb) {
WRITER.write(bb, this);
}
final static Writer WRITER = new Writer();
static class Writer implements OFMessageWriter<OFBsnTlvActorSystemMacVer13> {
@Override
public void write(ChannelBuffer bb, OFBsnTlvActorSystemMacVer13 message) {
// fixed value property type = 0x29
bb.writeShort((short) 0x29);
// fixed value property length = 10
bb.writeShort((short) 0xa);
message.value.write6Bytes(bb);
}
}
@Override
public String toString() {
StringBuilder b = new StringBuilder("OFBsnTlvActorSystemMacVer13(");
b.append("value=").append(value);
b.append(")");
return b.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
OFBsnTlvActorSystemMacVer13 other = (OFBsnTlvActorSystemMacVer13) obj;
if (value == null) {
if (other.value != null)
return false;
} else if (!value.equals(other.value))
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((value == null) ? 0 : value.hashCode());
return result;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.end2end;
import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyList;
import static org.mockito.Matchers.anyListOf;
import static org.mockito.Matchers.anyLong;
import static org.mockito.Matchers.anyMap;
import static org.mockito.Matchers.anySetOf;
import static org.mockito.Matchers.eq;
import static org.mockito.Matchers.isNull;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.reset;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.Collections;
import java.util.List;
import java.util.Properties;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.phoenix.exception.SQLExceptionCode;
import org.apache.phoenix.jdbc.PhoenixConnection;
import org.apache.phoenix.jdbc.PhoenixEmbeddedDriver;
import org.apache.phoenix.query.ConnectionQueryServices;
import org.apache.phoenix.schema.ColumnAlreadyExistsException;
import org.apache.phoenix.schema.PColumn;
import org.apache.phoenix.schema.PName;
import org.apache.phoenix.schema.PTable;
import org.apache.phoenix.schema.PTableKey;
import org.apache.phoenix.schema.PTableType;
import org.apache.phoenix.schema.TableAlreadyExistsException;
import org.apache.phoenix.util.PropertiesUtil;
import org.junit.Test;
import org.mockito.Mockito;
public class AppendOnlySchemaIT extends ParallelStatsDisabledIT {
private void testTableWithSameSchema(boolean notExists, boolean sameClient) throws Exception {
// use a spyed ConnectionQueryServices so we can verify calls to getTable
ConnectionQueryServices connectionQueryServices =
Mockito.spy(driver.getConnectionQueryServices(getUrl(),
PropertiesUtil.deepCopy(TEST_PROPERTIES)));
Properties props = new Properties();
props.putAll(PhoenixEmbeddedDriver.DEFAULT_PROPS.asMap());
try (Connection conn1 = connectionQueryServices.connect(getUrl(), props);
Connection conn2 = sameClient ? conn1 : connectionQueryServices.connect(getUrl(), props)) {
String metricTableName = generateUniqueName();
String viewName = generateUniqueName();
String metricIdSeqTableName = generateUniqueName();
// create sequence for auto partition
conn1.createStatement().execute("CREATE SEQUENCE " + metricIdSeqTableName + " CACHE 1");
// create base table
conn1.createStatement().execute("CREATE TABLE "+ metricTableName + "(metricId INTEGER NOT NULL, metricVal DOUBLE, CONSTRAINT PK PRIMARY KEY(metricId))"
+ " APPEND_ONLY_SCHEMA = true, UPDATE_CACHE_FREQUENCY=1, AUTO_PARTITION_SEQ=" + metricIdSeqTableName);
// create view
String ddl =
"CREATE VIEW " + (notExists ? "IF NOT EXISTS " : "")
+ viewName + " ( hostName varchar NOT NULL, tagName varChar"
+ " CONSTRAINT HOSTNAME_PK PRIMARY KEY (hostName))"
+ " AS SELECT * FROM " + metricTableName
+ " UPDATE_CACHE_FREQUENCY=300000";
conn1.createStatement().execute(ddl);
conn1.createStatement().execute("UPSERT INTO " + viewName + "(hostName, metricVal) VALUES('host1', 1.0)");
conn1.commit();
reset(connectionQueryServices);
// execute same create ddl
try {
conn2.createStatement().execute(ddl);
if (!notExists) {
fail("Create Table should fail");
}
}
catch (TableAlreadyExistsException e) {
if (notExists) {
fail("Create Table should not fail");
}
}
// verify getTable rpcs
verify(connectionQueryServices, sameClient ? never() : times(1))
.getTable((PName) isNull(), eq(new byte[0]),
eq(Bytes.toBytes(viewName)), anyLong(), anyLong());
// verify no create table rpcs
verify(connectionQueryServices, never()).createTable(anyListOf(Mutation.class),
any(byte[].class), any(PTableType.class), anyMap(), anyList(), any(byte[][].class),
eq(false), eq(false), eq(false), any(PTable.class));
reset(connectionQueryServices);
// execute alter table ddl that adds the same column
ddl = "ALTER VIEW " + viewName + " ADD " + (notExists ? "IF NOT EXISTS" : "") + " tagName varchar";
try {
conn2.createStatement().execute(ddl);
if (!notExists) {
fail("Alter Table should fail");
}
}
catch (ColumnAlreadyExistsException e) {
if (notExists) {
fail("Alter Table should not fail");
}
}
// if not verify exists is true one call to add column table with empty mutation list (which does not make a rpc)
// else verify no add column calls
verify(connectionQueryServices, notExists ? times(1) : never() )
.addColumn(eq(Collections.<Mutation>emptyList()), any(PTable.class),
any(PTable.class), anyMap(), anySetOf(String.class),
anyListOf(PColumn.class));
// upsert one row
conn2.createStatement().execute("UPSERT INTO " + viewName + "(hostName, metricVal) VALUES('host2', 2.0)");
conn2.commit();
// verify data in base table
ResultSet rs = conn2.createStatement().executeQuery("SELECT * from " + metricTableName);
assertTrue(rs.next());
assertEquals(1, rs.getInt(1));
assertEquals(1.0, rs.getDouble(2), 1e-6);
assertTrue(rs.next());
assertEquals(1, rs.getInt(1));
assertEquals(2.0, rs.getDouble(2), 1e-6);
assertFalse(rs.next());
// verify data in view
rs = conn2.createStatement().executeQuery("SELECT * from " + viewName);
assertTrue(rs.next());
assertEquals(1, rs.getInt(1));
assertEquals(1.0, rs.getDouble(2), 1e-6);
assertEquals("host1", rs.getString(3));
assertTrue(rs.next());
assertEquals(1, rs.getInt(1));
assertEquals(2.0, rs.getDouble(2), 1e-6);
assertEquals("host2", rs.getString(3));
assertFalse(rs.next());
}
}
@Test
public void testSameSchemaWithNotExistsSameClient() throws Exception {
testTableWithSameSchema(true, true);
}
@Test
public void testSameSchemaWithNotExistsDifferentClient() throws Exception {
testTableWithSameSchema(true, false);
}
@Test
public void testSameSchemaSameClient() throws Exception {
testTableWithSameSchema(false, true);
}
@Test
public void testSameSchemaDifferentClient() throws Exception {
testTableWithSameSchema(false, false);
}
private void testAddColumns(boolean sameClient) throws Exception {
Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
try (Connection conn1 = DriverManager.getConnection(getUrl(), props);
Connection conn2 = sameClient ? conn1 : DriverManager.getConnection(getUrl(), props)) {
String metricTableName = generateUniqueName();
String viewName = generateUniqueName();
String metricIdSeqTableName = generateUniqueName();
// create sequence for auto partition
conn1.createStatement().execute("CREATE SEQUENCE " + metricIdSeqTableName + " CACHE 1");
// create base table
conn1.createStatement().execute("CREATE TABLE " + metricTableName + " (metricId INTEGER NOT NULL, metricVal1 DOUBLE, CONSTRAINT PK PRIMARY KEY(metricId))"
+ " APPEND_ONLY_SCHEMA = true, UPDATE_CACHE_FREQUENCY=1, AUTO_PARTITION_SEQ=" + metricIdSeqTableName);
// create view
String ddl =
"CREATE VIEW IF NOT EXISTS "
+ viewName + "( hostName varchar NOT NULL,"
+ " CONSTRAINT HOSTNAME_PK PRIMARY KEY (hostName))"
+ " AS SELECT * FROM " + metricTableName
+ " UPDATE_CACHE_FREQUENCY=300000";
conn1.createStatement().execute(ddl);
conn1.createStatement().execute("UPSERT INTO " + viewName + "(hostName, metricVal1) VALUES('host1', 1.0)");
conn1.commit();
// execute ddl that creates that same view with an additional pk column and regular column
// and also changes the order of the pk columns (which is not respected since we only
// allow appending columns)
ddl =
"CREATE VIEW IF NOT EXISTS "
+ viewName + "( instanceName varchar, hostName varchar, metricVal2 double, metricVal1 double"
+ " CONSTRAINT HOSTNAME_PK PRIMARY KEY (instancename, hostName))"
+ " AS SELECT * FROM " + metricTableName
+ " UPDATE_CACHE_FREQUENCY=300000";
conn2.createStatement().execute(ddl);
conn2.createStatement().execute(
"UPSERT INTO " + viewName + "(hostName, instanceName, metricVal1, metricval2) VALUES('host2', 'instance2', 21.0, 22.0)");
conn2.commit();
conn1.createStatement().execute("UPSERT INTO " + viewName + "(hostName, metricVal1) VALUES('host3', 3.0)");
conn1.commit();
// verify data exists
ResultSet rs = conn2.createStatement().executeQuery("SELECT * from " + viewName);
// verify the two columns were added correctly
PTable table =
conn2.unwrap(PhoenixConnection.class).getTable(new PTableKey(null, viewName));
List<PColumn> pkColumns = table.getPKColumns();
assertEquals(3,table.getPKColumns().size());
// even though the second create view statement changed the order of the pk, the original order is maintained
PColumn metricId = pkColumns.get(0);
assertEquals("METRICID", metricId.getName().getString());
assertFalse(metricId.isNullable());
PColumn hostName = pkColumns.get(1);
assertEquals("HOSTNAME", hostName.getName().getString());
// hostname name is not nullable even though the second create statement changed it to nullable
// since we only allow appending columns
assertFalse(hostName.isNullable());
PColumn instanceName = pkColumns.get(2);
assertEquals("INSTANCENAME", instanceName.getName().getString());
assertTrue(instanceName.isNullable());
List<PColumn> columns = table.getColumns();
assertEquals("METRICID", columns.get(0).getName().getString());
assertEquals("METRICVAL1", columns.get(1).getName().getString());
assertEquals("HOSTNAME", columns.get(2).getName().getString());
assertEquals("INSTANCENAME", columns.get(3).getName().getString());
assertEquals("METRICVAL2", columns.get(4).getName().getString());
// verify the data
assertTrue(rs.next());
assertEquals(1, rs.getInt(1));
assertEquals(1.0, rs.getDouble(2), 1e-6);
assertEquals("host1", rs.getString(3));
assertEquals(null, rs.getString(4));
assertEquals(0.0, rs.getDouble(5), 1e-6);
assertTrue(rs.next());
assertEquals(1, rs.getInt(1));
assertEquals(21.0, rs.getDouble(2), 1e-6);
assertEquals("host2", rs.getString(3));
assertEquals("instance2", rs.getString(4));
assertEquals(22.0, rs.getDouble(5), 1e-6);
assertTrue(rs.next());
assertEquals(1, rs.getInt(1));
assertEquals(3.0, rs.getDouble(2), 1e-6);
assertEquals("host3", rs.getString(3));
assertEquals(null, rs.getString(4));
assertEquals(0.0, rs.getDouble(5), 1e-6);
assertFalse(rs.next());
}
}
@Test
public void testAddColumnsSameClient() throws Exception {
testAddColumns(true);
}
@Test
public void testTableAddColumnsDifferentClient() throws Exception {
testAddColumns(false);
}
@Test
public void testValidateAttributes() throws Exception {
Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
try (Connection conn = DriverManager.getConnection(getUrl(), props)) {
String tableName = generateUniqueName();
String viewName = generateUniqueName();
try {
conn.createStatement().execute(
"create table IF NOT EXISTS " + tableName + " ( id char(1) NOT NULL,"
+ " col1 integer NOT NULL,"
+ " CONSTRAINT NAME_PK PRIMARY KEY (id, col1))"
+ " APPEND_ONLY_SCHEMA = true");
fail("UPDATE_CACHE_FREQUENCY attribute must not be set to ALWAYS if APPEND_ONLY_SCHEMA is true");
} catch (SQLException e) {
assertEquals(SQLExceptionCode.UPDATE_CACHE_FREQUENCY_INVALID.getErrorCode(),
e.getErrorCode());
}
conn.createStatement().execute(
"create table IF NOT EXISTS " + tableName + " ( id char(1) NOT NULL,"
+ " col1 integer NOT NULL"
+ " CONSTRAINT NAME_PK PRIMARY KEY (id, col1))"
+ " APPEND_ONLY_SCHEMA = true, UPDATE_CACHE_FREQUENCY=1000");
conn.createStatement().execute(
"create view IF NOT EXISTS " + viewName + " (val1 integer) AS SELECT * FROM " + tableName);
PhoenixConnection pconn = conn.unwrap(PhoenixConnection.class);
PTable view = pconn.getTable(new PTableKey(pconn.getTenantId(), viewName));
assertEquals(true, view.isAppendOnlySchema());
assertEquals(1000, view.getUpdateCacheFrequency());
}
}
@Test
public void testUpsertRowToDeletedTable() throws Exception {
Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
try (Connection conn1 = DriverManager.getConnection(getUrl(), props);
Connection conn2 = DriverManager.getConnection(getUrl(), props)) {
String metricTableName = generateUniqueName();
String viewName = generateUniqueName();
String metricIdSeqTableName = generateUniqueName();
// create sequence for auto partition
conn1.createStatement().execute("CREATE SEQUENCE " + metricIdSeqTableName + " CACHE 1");
// create base table
conn1.createStatement().execute("CREATE TABLE " + metricTableName + " (metricId INTEGER NOT NULL, metricVal DOUBLE, CONSTRAINT PK PRIMARY KEY(metricId))"
+ " APPEND_ONLY_SCHEMA = true, UPDATE_CACHE_FREQUENCY=1, AUTO_PARTITION_SEQ=" + metricIdSeqTableName);
// create view
String ddl =
"CREATE VIEW IF NOT EXISTS "
+ viewName + "( hostName varchar NOT NULL,"
+ " CONSTRAINT HOSTNAME_PK PRIMARY KEY (hostName))"
+ " AS SELECT * FROM " + metricTableName
+ " APPEND_ONLY_SCHEMA = true, UPDATE_CACHE_FREQUENCY=300000";
conn1.createStatement().execute(ddl);
// drop the table using a different connection
conn2.createStatement().execute("DROP VIEW " + viewName);
// upsert one row
conn1.createStatement().execute("UPSERT INTO " + viewName + "(hostName, metricVal) VALUES('host1', 1.0)");
// upsert doesn't fail since base table still exists
conn1.commit();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.sdk.extensions.sql.impl.interpreter.operator.date;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import com.google.common.collect.ImmutableMap;
import java.util.Arrays;
import org.apache.beam.sdk.extensions.sql.impl.interpreter.operator.BeamSqlExpression;
import org.apache.beam.sdk.extensions.sql.impl.interpreter.operator.BeamSqlPrimitive;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
import org.apache.beam.sdk.values.Row;
import org.apache.calcite.avatica.util.TimeUnit;
import org.apache.calcite.sql.type.SqlTypeName;
import org.joda.time.DateTime;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
/** Unit tests for {@link BeamSqlTimestampMinusTimestampExpression}. */
public class BeamSqlTimestampMinusTimestampExpressionTest {
private static final Row NULL_ROW = null;
private static final BoundedWindow NULL_WINDOW = null;
private static final DateTime DATE = new DateTime().withDate(2017, 3, 4).withTime(3, 2, 1, 0);
private static final DateTime DATE_MINUS_2_SEC = DATE.minusSeconds(2);
private static final DateTime DATE_MINUS_3_MIN = DATE.minusMinutes(3);
private static final DateTime DATE_MINUS_4_HOURS = DATE.minusHours(4);
private static final DateTime DATE_MINUS_7_DAYS = DATE.minusDays(7);
private static final DateTime DATE_MINUS_2_MONTHS = DATE.minusMonths(2);
private static final DateTime DATE_MINUS_1_YEAR = DATE.minusYears(1);
@Rule public ExpectedException thrown = ExpectedException.none();
@Test
public void testOutputTypeIsBigint() {
BeamSqlTimestampMinusTimestampExpression minusExpression =
minusExpression(SqlTypeName.INTERVAL_DAY, timestamp(DATE_MINUS_2_SEC), timestamp(DATE));
assertEquals(SqlTypeName.BIGINT, minusExpression.getOutputType());
}
@Test
public void testAccepts2Timestamps() {
BeamSqlTimestampMinusTimestampExpression minusExpression =
minusExpression(SqlTypeName.INTERVAL_DAY, timestamp(DATE_MINUS_2_SEC), timestamp(DATE));
assertTrue(minusExpression.accept());
}
@Test
public void testDoesNotAccept3Timestamps() {
BeamSqlTimestampMinusTimestampExpression minusExpression =
minusExpression(
SqlTypeName.INTERVAL_DAY,
timestamp(DATE_MINUS_2_SEC),
timestamp(DATE_MINUS_1_YEAR),
timestamp(DATE));
assertFalse(minusExpression.accept());
}
@Test
public void testDoesNotAccept1Timestamp() {
BeamSqlTimestampMinusTimestampExpression minusExpression =
minusExpression(SqlTypeName.INTERVAL_DAY, timestamp(DATE));
assertFalse(minusExpression.accept());
}
@Test
public void testDoesNotAcceptUnsupportedIntervalToCount() {
BeamSqlTimestampMinusTimestampExpression minusExpression =
minusExpression(
SqlTypeName.INTERVAL_DAY_MINUTE, timestamp(DATE_MINUS_2_SEC), timestamp(DATE));
assertFalse(minusExpression.accept());
}
@Test
public void testDoesNotAcceptNotTimestampAsOperandOne() {
BeamSqlTimestampMinusTimestampExpression minusExpression =
minusExpression(
SqlTypeName.INTERVAL_DAY, BeamSqlPrimitive.of(SqlTypeName.INTEGER, 3), timestamp(DATE));
assertFalse(minusExpression.accept());
}
@Test
public void testDoesNotAcceptNotTimestampAsOperandTwo() {
BeamSqlTimestampMinusTimestampExpression minusExpression =
minusExpression(
SqlTypeName.INTERVAL_DAY, timestamp(DATE), BeamSqlPrimitive.of(SqlTypeName.INTEGER, 3));
assertFalse(minusExpression.accept());
}
@Test
public void testEvaluateDiffSeconds() {
BeamSqlTimestampMinusTimestampExpression minusExpression =
minusExpression(SqlTypeName.INTERVAL_SECOND, timestamp(DATE), timestamp(DATE_MINUS_2_SEC));
long expectedResult = applyMultiplier(2L, TimeUnit.SECOND);
assertEquals(expectedResult, eval(minusExpression));
}
@Test
public void testEvaluateDiffMinutes() {
BeamSqlTimestampMinusTimestampExpression minusExpression =
minusExpression(SqlTypeName.INTERVAL_MINUTE, timestamp(DATE), timestamp(DATE_MINUS_3_MIN));
long expectedResult = applyMultiplier(3L, TimeUnit.MINUTE);
assertEquals(expectedResult, eval(minusExpression));
}
@Test
public void testEvaluateDiffHours() {
BeamSqlTimestampMinusTimestampExpression minusExpression =
minusExpression(SqlTypeName.INTERVAL_HOUR, timestamp(DATE), timestamp(DATE_MINUS_4_HOURS));
long expectedResult = applyMultiplier(4L, TimeUnit.HOUR);
assertEquals(expectedResult, eval(minusExpression));
}
@Test
public void testEvaluateDiffDays() {
BeamSqlTimestampMinusTimestampExpression minusExpression =
minusExpression(SqlTypeName.INTERVAL_DAY, timestamp(DATE), timestamp(DATE_MINUS_7_DAYS));
long expectedResult = applyMultiplier(7L, TimeUnit.DAY);
assertEquals(expectedResult, eval(minusExpression));
}
@Test
public void testEvaluateDiffMonths() {
BeamSqlTimestampMinusTimestampExpression minusExpression =
minusExpression(
SqlTypeName.INTERVAL_MONTH, timestamp(DATE), timestamp(DATE_MINUS_2_MONTHS));
long expectedResult = applyMultiplier(2L, TimeUnit.MONTH);
assertEquals(expectedResult, eval(minusExpression));
}
@Test
public void testEvaluateDiffYears() {
BeamSqlTimestampMinusTimestampExpression minusExpression =
minusExpression(SqlTypeName.INTERVAL_YEAR, timestamp(DATE), timestamp(DATE_MINUS_1_YEAR));
long expectedResult = applyMultiplier(1L, TimeUnit.YEAR);
assertEquals(expectedResult, eval(minusExpression));
}
@Test
public void testEvaluateNegativeDiffSeconds() {
BeamSqlTimestampMinusTimestampExpression minusExpression =
minusExpression(SqlTypeName.INTERVAL_SECOND, timestamp(DATE_MINUS_2_SEC), timestamp(DATE));
long expectedResult = applyMultiplier(-2L, TimeUnit.SECOND);
assertEquals(expectedResult, eval(minusExpression));
}
@Test
public void testEvaluateThrowsForUnsupportedIntervalType() {
thrown.expect(IllegalArgumentException.class);
BeamSqlTimestampMinusTimestampExpression minusExpression =
minusExpression(
SqlTypeName.INTERVAL_DAY_MINUTE, timestamp(DATE_MINUS_2_SEC), timestamp(DATE));
eval(minusExpression);
}
private static BeamSqlTimestampMinusTimestampExpression minusExpression(
SqlTypeName intervalsToCount, BeamSqlExpression... operands) {
return new BeamSqlTimestampMinusTimestampExpression(Arrays.asList(operands), intervalsToCount);
}
private BeamSqlExpression timestamp(DateTime date) {
return BeamSqlPrimitive.of(SqlTypeName.TIMESTAMP, date);
}
private long eval(BeamSqlTimestampMinusTimestampExpression minusExpression) {
return minusExpression.evaluate(NULL_ROW, NULL_WINDOW, ImmutableMap.of()).getLong();
}
private long applyMultiplier(long value, TimeUnit timeUnit) {
return value * timeUnit.multiplier.longValue();
}
}
| |
package org.jsoup.safety;
/*
Thank you to Ryan Grove (wonko.com) for the Ruby HTML cleaner http://github.com/rgrove/sanitize/, which inspired
this whitelist configuration, and the initial defaults.
*/
import org.jsoup.helper.Validate;
import org.jsoup.nodes.Attribute;
import org.jsoup.nodes.Attributes;
import org.jsoup.nodes.Element;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
/**
Whitelists define what HTML (elements and attributes) to allow through the cleaner. Everything else is removed.
<p/>
Start with one of the defaults:
<ul>
<li>{@link #none}
<li>{@link #simpleText}
<li>{@link #basic}
<li>{@link #basicWithImages}
<li>{@link #relaxed}
</ul>
<p/>
If you need to allow more through (please be careful!), tweak a base whitelist with:
<ul>
<li>{@link #addTags}
<li>{@link #addAttributes}
<li>{@link #addEnforcedAttribute}
<li>{@link #addProtocols}
</ul>
<p/>
You can remove any setting from an existing whitelist with:
<ul>
<li>{@link #removeTags}
<li>{@link #removeAttributes}
<li>{@link #removeEnforcedAttribute}
<li>{@link #removeProtocols}
</ul>
<p/>
The cleaner and these whitelists assume that you want to clean a <code>body</code> fragment of HTML (to add user
supplied HTML into a templated page), and not to clean a full HTML document. If the latter is the case, either wrap the
document HTML around the cleaned body HTML, or create a whitelist that allows <code>html</code> and <code>head</code>
elements as appropriate.
<p/>
If you are going to extend a whitelist, please be very careful. Make sure you understand what attributes may lead to
XSS attack vectors. URL attributes are particularly vulnerable and require careful validation. See
http://ha.ckers.org/xss.html for some XSS attack examples.
@author Jonathan Hedley
*/
public class Whitelist {
private Set<TagName> tagNames; // tags allowed, lower case. e.g. [p, br, span]
private Map<TagName, Set<AttributeKey>> attributes; // tag -> attribute[]. allowed attributes [href] for a tag.
private Map<TagName, Map<AttributeKey, AttributeValue>> enforcedAttributes; // always set these attribute values
private Map<TagName, Map<AttributeKey, Set<Protocol>>> protocols; // allowed URL protocols for attributes
private boolean preserveRelativeLinks; // option to preserve relative links
/**
This whitelist allows only text nodes: all HTML will be stripped.
@return whitelist
*/
public static Whitelist none() {
return new Whitelist();
}
/**
This whitelist allows only simple text formatting: <code>b, em, i, strong, u</code>. All other HTML (tags and
attributes) will be removed.
@return whitelist
*/
public static Whitelist simpleText() {
return new Whitelist()
.addTags("b", "em", "i", "strong", "u")
;
}
/**
This whitelist allows a fuller range of text nodes: <code>a, b, blockquote, br, cite, code, dd, dl, dt, em, i, li,
ol, p, pre, q, small, span, strike, strong, sub, sup, u, ul</code>, and appropriate attributes.
<p/>
Links (<code>a</code> elements) can point to <code>http, https, ftp, mailto</code>, and have an enforced
<code>rel=nofollow</code> attribute.
<p/>
Does not allow images.
@return whitelist
*/
public static Whitelist basic() {
return new Whitelist()
.addTags(
"a", "b", "blockquote", "br", "cite", "code", "dd", "dl", "dt", "em",
"i", "li", "ol", "p", "pre", "q", "small", "span", "strike", "strong", "sub",
"sup", "u", "ul")
.addAttributes("a", "href")
.addAttributes("blockquote", "cite")
.addAttributes("q", "cite")
.addProtocols("a", "href", "ftp", "http", "https", "mailto")
.addProtocols("blockquote", "cite", "http", "https")
.addProtocols("cite", "cite", "http", "https")
.addEnforcedAttribute("a", "rel", "nofollow")
;
}
/**
This whitelist allows the same text tags as {@link #basic}, and also allows <code>img</code> tags, with appropriate
attributes, with <code>src</code> pointing to <code>http</code> or <code>https</code>.
@return whitelist
*/
public static Whitelist basicWithImages() {
return basic()
.addTags("img")
.addAttributes("img", "align", "alt", "height", "src", "title", "width")
.addProtocols("img", "src", "http", "https")
;
}
/**
This whitelist allows a full range of text and structural body HTML: <code>a, b, blockquote, br, caption, cite,
code, col, colgroup, dd, div, dl, dt, em, h1, h2, h3, h4, h5, h6, i, img, li, ol, p, pre, q, small, span, strike, strong, sub,
sup, table, tbody, td, tfoot, th, thead, tr, u, ul</code>
<p/>
Links do not have an enforced <code>rel=nofollow</code> attribute, but you can add that if desired.
@return whitelist
*/
public static Whitelist relaxed() {
return new Whitelist()
.addTags(
"a", "b", "blockquote", "br", "caption", "cite", "code", "col",
"colgroup", "dd", "div", "dl", "dt", "em", "h1", "h2", "h3", "h4", "h5", "h6",
"i", "img", "li", "ol", "p", "pre", "q", "small", "span", "strike", "strong",
"sub", "sup", "table", "tbody", "td", "tfoot", "th", "thead", "tr", "u",
"ul")
.addAttributes("a", "href", "title")
.addAttributes("blockquote", "cite")
.addAttributes("col", "span", "width")
.addAttributes("colgroup", "span", "width")
.addAttributes("img", "align", "alt", "height", "src", "title", "width")
.addAttributes("ol", "start", "type")
.addAttributes("q", "cite")
.addAttributes("table", "summary", "width")
.addAttributes("td", "abbr", "axis", "colspan", "rowspan", "width")
.addAttributes(
"th", "abbr", "axis", "colspan", "rowspan", "scope",
"width")
.addAttributes("ul", "type")
.addProtocols("a", "href", "ftp", "http", "https", "mailto")
.addProtocols("blockquote", "cite", "http", "https")
.addProtocols("cite", "cite", "http", "https")
.addProtocols("img", "src", "http", "https")
.addProtocols("q", "cite", "http", "https")
;
}
/**
Create a new, empty whitelist. Generally it will be better to start with a default prepared whitelist instead.
@see #basic()
@see #basicWithImages()
@see #simpleText()
@see #relaxed()
*/
public Whitelist() {
tagNames = new HashSet<TagName>();
attributes = new HashMap<TagName, Set<AttributeKey>>();
enforcedAttributes = new HashMap<TagName, Map<AttributeKey, AttributeValue>>();
protocols = new HashMap<TagName, Map<AttributeKey, Set<Protocol>>>();
preserveRelativeLinks = false;
}
/**
Add a list of allowed elements to a whitelist. (If a tag is not allowed, it will be removed from the HTML.)
@param tags tag names to allow
@return this (for chaining)
*/
public Whitelist addTags(String... tags) {
Validate.notNull(tags);
for (String tagName : tags) {
Validate.notEmpty(tagName);
tagNames.add(TagName.valueOf(tagName));
}
return this;
}
/**
Remove a list of allowed elements from a whitelist. (If a tag is not allowed, it will be removed from the HTML.)
@param tags tag names to disallow
@return this (for chaining)
*/
public Whitelist removeTags(String... tags) {
Validate.notNull(tags);
for(String tag: tags) {
Validate.notEmpty(tag);
TagName tagName = TagName.valueOf(tag);
if(tagNames.remove(tagName)) { // Only look in sub-maps if tag was allowed
attributes.remove(tagName);
enforcedAttributes.remove(tagName);
protocols.remove(tagName);
}
}
return this;
}
/**
Add a list of allowed attributes to a tag. (If an attribute is not allowed on an element, it will be removed.)
<p/>
E.g.: <code>addAttributes("a", "href", "class")</code> allows <code>href</code> and <code>class</code> attributes
on <code>a</code> tags.
<p/>
To make an attribute valid for <b>all tags</b>, use the pseudo tag <code>:all</code>, e.g.
<code>addAttributes(":all", "class")</code>.
@param tag The tag the attributes are for. The tag will be added to the allowed tag list if necessary.
@param keys List of valid attributes for the tag
@return this (for chaining)
*/
public Whitelist addAttributes(String tag, String... keys) {
Validate.notEmpty(tag);
Validate.notNull(keys);
Validate.isTrue(keys.length > 0, "No attributes supplied.");
TagName tagName = TagName.valueOf(tag);
if (!tagNames.contains(tagName))
tagNames.add(tagName);
Set<AttributeKey> attributeSet = new HashSet<AttributeKey>();
for (String key : keys) {
Validate.notEmpty(key);
attributeSet.add(AttributeKey.valueOf(key));
}
if (attributes.containsKey(tagName)) {
Set<AttributeKey> currentSet = attributes.get(tagName);
currentSet.addAll(attributeSet);
} else {
attributes.put(tagName, attributeSet);
}
return this;
}
/**
Remove a list of allowed attributes from a tag. (If an attribute is not allowed on an element, it will be removed.)
<p/>
E.g.: <code>removeAttributes("a", "href", "class")</code> disallows <code>href</code> and <code>class</code>
attributes on <code>a</code> tags.
<p/>
To make an attribute invalid for <b>all tags</b>, use the pseudo tag <code>:all</code>, e.g.
<code>removeAttributes(":all", "class")</code>.
@param tag The tag the attributes are for.
@param keys List of invalid attributes for the tag
@return this (for chaining)
*/
public Whitelist removeAttributes(String tag, String... keys) {
Validate.notEmpty(tag);
Validate.notNull(keys);
Validate.isTrue(keys.length > 0, "No attributes supplied.");
TagName tagName = TagName.valueOf(tag);
Set<AttributeKey> attributeSet = new HashSet<AttributeKey>();
for (String key : keys) {
Validate.notEmpty(key);
attributeSet.add(AttributeKey.valueOf(key));
}
if(tagNames.contains(tagName) && attributes.containsKey(tagName)) { // Only look in sub-maps if tag was allowed
Set<AttributeKey> currentSet = attributes.get(tagName);
currentSet.removeAll(attributeSet);
if(currentSet.isEmpty()) // Remove tag from attribute map if no attributes are allowed for tag
attributes.remove(tagName);
}
if(tag.equals(":all")) // Attribute needs to be removed from all individually set tags
for(TagName name: attributes.keySet()) {
Set<AttributeKey> currentSet = attributes.get(name);
currentSet.removeAll(attributeSet);
if(currentSet.isEmpty()) // Remove tag from attribute map if no attributes are allowed for tag
attributes.remove(name);
}
return this;
}
/**
Add an enforced attribute to a tag. An enforced attribute will always be added to the element. If the element
already has the attribute set, it will be overridden.
<p/>
E.g.: <code>addEnforcedAttribute("a", "rel", "nofollow")</code> will make all <code>a</code> tags output as
<code><a href="..." rel="nofollow"></code>
@param tag The tag the enforced attribute is for. The tag will be added to the allowed tag list if necessary.
@param key The attribute key
@param value The enforced attribute value
@return this (for chaining)
*/
public Whitelist addEnforcedAttribute(String tag, String key, String value) {
Validate.notEmpty(tag);
Validate.notEmpty(key);
Validate.notEmpty(value);
TagName tagName = TagName.valueOf(tag);
if (!tagNames.contains(tagName))
tagNames.add(tagName);
AttributeKey attrKey = AttributeKey.valueOf(key);
AttributeValue attrVal = AttributeValue.valueOf(value);
if (enforcedAttributes.containsKey(tagName)) {
enforcedAttributes.get(tagName).put(attrKey, attrVal);
} else {
Map<AttributeKey, AttributeValue> attrMap = new HashMap<AttributeKey, AttributeValue>();
attrMap.put(attrKey, attrVal);
enforcedAttributes.put(tagName, attrMap);
}
return this;
}
/**
Remove a previously configured enforced attribute from a tag.
@param tag The tag the enforced attribute is for.
@param key The attribute key
@return this (for chaining)
*/
public Whitelist removeEnforcedAttribute(String tag, String key) {
Validate.notEmpty(tag);
Validate.notEmpty(key);
TagName tagName = TagName.valueOf(tag);
if(tagNames.contains(tagName) && enforcedAttributes.containsKey(tagName)) {
AttributeKey attrKey = AttributeKey.valueOf(key);
Map<AttributeKey, AttributeValue> attrMap = enforcedAttributes.get(tagName);
attrMap.remove(attrKey);
if(attrMap.isEmpty()) // Remove tag from enforced attribute map if no enforced attributes are present
enforcedAttributes.remove(tagName);
}
return this;
}
/**
* Configure this Whitelist to preserve relative links in an element's URL attribute, or convert them to absolute
* links. By default, this is <b>false</b>: URLs will be made absolute (e.g. start with an allowed protocol, like
* e.g. {@code http://}.
* <p />
* Note that when handling relative links, the input document must have an appropriate {@code base URI} set when
* parsing, so that the link's protocol can be confirmed. Regardless of the setting of the {@code preserve relative
* links} option, the link must be resolvable against the base URI to an allowed protocol; otherwise the attribute
* will be removed.
*
* @param preserve {@code true} to allow relative links, {@code false} (default) to deny
* @return this Whitelist, for chaining.
* @see #addProtocols
*/
public Whitelist preserveRelativeLinks(boolean preserve) {
preserveRelativeLinks = preserve;
return this;
}
/**
Add allowed URL protocols for an element's URL attribute. This restricts the possible values of the attribute to
URLs with the defined protocol.
<p/>
E.g.: <code>addProtocols("a", "href", "ftp", "http", "https")</code>
<p/>
To allow a link to an in-page URL anchor (i.e. <code><a href="#anchor"></code>, add a <code>#</code>:<br>
E.g.: <code>addProtocols("a", "href", "#")</code>
@param tag Tag the URL protocol is for
@param key Attribute key
@param protocols List of valid protocols
@return this, for chaining
*/
public Whitelist addProtocols(String tag, String key, String... protocols) {
Validate.notEmpty(tag);
Validate.notEmpty(key);
Validate.notNull(protocols);
TagName tagName = TagName.valueOf(tag);
AttributeKey attrKey = AttributeKey.valueOf(key);
Map<AttributeKey, Set<Protocol>> attrMap;
Set<Protocol> protSet;
if (this.protocols.containsKey(tagName)) {
attrMap = this.protocols.get(tagName);
} else {
attrMap = new HashMap<AttributeKey, Set<Protocol>>();
this.protocols.put(tagName, attrMap);
}
if (attrMap.containsKey(attrKey)) {
protSet = attrMap.get(attrKey);
} else {
protSet = new HashSet<Protocol>();
attrMap.put(attrKey, protSet);
}
for (String protocol : protocols) {
Validate.notEmpty(protocol);
Protocol prot = Protocol.valueOf(protocol);
protSet.add(prot);
}
return this;
}
/**
Remove allowed URL protocols for an element's URL attribute.
<p/>
E.g.: <code>removeProtocols("a", "href", "ftp")</code>
@param tag Tag the URL protocol is for
@param key Attribute key
@param protocols List of invalid protocols
@return this, for chaining
*/
public Whitelist removeProtocols(String tag, String key, String... protocols) {
Validate.notEmpty(tag);
Validate.notEmpty(key);
Validate.notNull(protocols);
TagName tagName = TagName.valueOf(tag);
AttributeKey attrKey = AttributeKey.valueOf(key);
if(this.protocols.containsKey(tagName)) {
Map<AttributeKey, Set<Protocol>> attrMap = this.protocols.get(tagName);
if(attrMap.containsKey(attrKey)) {
Set<Protocol> protSet = attrMap.get(attrKey);
for (String protocol : protocols) {
Validate.notEmpty(protocol);
Protocol prot = Protocol.valueOf(protocol);
protSet.remove(prot);
}
if(protSet.isEmpty()) { // Remove protocol set if empty
attrMap.remove(attrKey);
if(attrMap.isEmpty()) // Remove entry for tag if empty
this.protocols.remove(tagName);
}
}
}
return this;
}
/**
* Test if the supplied tag is allowed by this whitelist
* @param tag test tag
* @return true if allowed
*/
protected boolean isSafeTag(String tag) {
return tagNames.contains(TagName.valueOf(tag));
}
/**
* Test if the supplied attribute is allowed by this whitelist for this tag
* @param tagName tag to consider allowing the attribute in
* @param el element under test, to confirm protocol
* @param attr attribute under test
* @return true if allowed
*/
protected boolean isSafeAttribute(String tagName, Element el, Attribute attr) {
TagName tag = TagName.valueOf(tagName);
AttributeKey key = AttributeKey.valueOf(attr.getKey());
if (attributes.containsKey(tag)) {
if (attributes.get(tag).contains(key)) {
if (protocols.containsKey(tag)) {
Map<AttributeKey, Set<Protocol>> attrProts = protocols.get(tag);
// ok if not defined protocol; otherwise test
return !attrProts.containsKey(key) || testValidProtocol(el, attr, attrProts.get(key));
} else { // attribute found, no protocols defined, so OK
return true;
}
}
}
// no attributes defined for tag, try :all tag
return !tagName.equals(":all") && isSafeAttribute(":all", el, attr);
}
private boolean testValidProtocol(Element el, Attribute attr, Set<Protocol> protocols) {
// try to resolve relative urls to abs, and optionally update the attribute so output html has abs.
// rels without a baseuri get removed
String value = el.absUrl(attr.getKey());
if (value.length() == 0)
value = attr.getValue(); // if it could not be made abs, run as-is to allow custom unknown protocols
if (!preserveRelativeLinks)
attr.setValue(value);
for (Protocol protocol : protocols) {
String prot = protocol.toString();
if (prot.equals("#")) { // allows anchor links
if (isValidAnchor(value)) {
return true;
} else {
continue;
}
}
prot += ":";
if (value.toLowerCase().startsWith(prot)) {
return true;
}
}
return false;
}
private boolean isValidAnchor(String value) {
return value.startsWith("#") && !value.matches(".*\\s.*");
}
Attributes getEnforcedAttributes(String tagName) {
Attributes attrs = new Attributes();
TagName tag = TagName.valueOf(tagName);
if (enforcedAttributes.containsKey(tag)) {
Map<AttributeKey, AttributeValue> keyVals = enforcedAttributes.get(tag);
for (Map.Entry<AttributeKey, AttributeValue> entry : keyVals.entrySet()) {
attrs.put(entry.getKey().toString(), entry.getValue().toString());
}
}
return attrs;
}
// named types for config. All just hold strings, but here for my sanity.
static class TagName extends TypedValue {
TagName(String value) {
super(value);
}
static TagName valueOf(String value) {
return new TagName(value);
}
}
static class AttributeKey extends TypedValue {
AttributeKey(String value) {
super(value);
}
static AttributeKey valueOf(String value) {
return new AttributeKey(value);
}
}
static class AttributeValue extends TypedValue {
AttributeValue(String value) {
super(value);
}
static AttributeValue valueOf(String value) {
return new AttributeValue(value);
}
}
static class Protocol extends TypedValue {
Protocol(String value) {
super(value);
}
static Protocol valueOf(String value) {
return new Protocol(value);
}
}
abstract static class TypedValue {
private String value;
TypedValue(String value) {
Validate.notNull(value);
this.value = value;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((value == null) ? 0 : value.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (getClass() != obj.getClass()) return false;
TypedValue other = (TypedValue) obj;
if (value == null) {
if (other.value != null) return false;
} else if (!value.equals(other.value)) return false;
return true;
}
@Override
public String toString() {
return value;
}
}
}
| |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/devtools/containeranalysis/v1beta1/deployment/deployment.proto
package io.grafeas.v1beta1.deployment;
/**
*
*
* <pre>
* An artifact that can be deployed in some runtime.
* </pre>
*
* Protobuf type {@code grafeas.v1beta1.deployment.Deployable}
*/
public final class Deployable extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:grafeas.v1beta1.deployment.Deployable)
DeployableOrBuilder {
private static final long serialVersionUID = 0L;
// Use Deployable.newBuilder() to construct.
private Deployable(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private Deployable() {
resourceUri_ = com.google.protobuf.LazyStringArrayList.EMPTY;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new Deployable();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private Deployable(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
if (!((mutable_bitField0_ & 0x00000001) != 0)) {
resourceUri_ = new com.google.protobuf.LazyStringArrayList();
mutable_bitField0_ |= 0x00000001;
}
resourceUri_.add(s);
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) != 0)) {
resourceUri_ = resourceUri_.getUnmodifiableView();
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return io.grafeas.v1beta1.deployment.DeploymentOuterClass
.internal_static_grafeas_v1beta1_deployment_Deployable_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return io.grafeas.v1beta1.deployment.DeploymentOuterClass
.internal_static_grafeas_v1beta1_deployment_Deployable_fieldAccessorTable
.ensureFieldAccessorsInitialized(
io.grafeas.v1beta1.deployment.Deployable.class,
io.grafeas.v1beta1.deployment.Deployable.Builder.class);
}
public static final int RESOURCE_URI_FIELD_NUMBER = 1;
private com.google.protobuf.LazyStringList resourceUri_;
/**
*
*
* <pre>
* Required. Resource URI for the artifact being deployed.
* </pre>
*
* <code>repeated string resource_uri = 1;</code>
*
* @return A list containing the resourceUri.
*/
public com.google.protobuf.ProtocolStringList getResourceUriList() {
return resourceUri_;
}
/**
*
*
* <pre>
* Required. Resource URI for the artifact being deployed.
* </pre>
*
* <code>repeated string resource_uri = 1;</code>
*
* @return The count of resourceUri.
*/
public int getResourceUriCount() {
return resourceUri_.size();
}
/**
*
*
* <pre>
* Required. Resource URI for the artifact being deployed.
* </pre>
*
* <code>repeated string resource_uri = 1;</code>
*
* @param index The index of the element to return.
* @return The resourceUri at the given index.
*/
public java.lang.String getResourceUri(int index) {
return resourceUri_.get(index);
}
/**
*
*
* <pre>
* Required. Resource URI for the artifact being deployed.
* </pre>
*
* <code>repeated string resource_uri = 1;</code>
*
* @param index The index of the value to return.
* @return The bytes of the resourceUri at the given index.
*/
public com.google.protobuf.ByteString getResourceUriBytes(int index) {
return resourceUri_.getByteString(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < resourceUri_.size(); i++) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, resourceUri_.getRaw(i));
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
{
int dataSize = 0;
for (int i = 0; i < resourceUri_.size(); i++) {
dataSize += computeStringSizeNoTag(resourceUri_.getRaw(i));
}
size += dataSize;
size += 1 * getResourceUriList().size();
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof io.grafeas.v1beta1.deployment.Deployable)) {
return super.equals(obj);
}
io.grafeas.v1beta1.deployment.Deployable other = (io.grafeas.v1beta1.deployment.Deployable) obj;
if (!getResourceUriList().equals(other.getResourceUriList())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getResourceUriCount() > 0) {
hash = (37 * hash) + RESOURCE_URI_FIELD_NUMBER;
hash = (53 * hash) + getResourceUriList().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static io.grafeas.v1beta1.deployment.Deployable parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static io.grafeas.v1beta1.deployment.Deployable parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static io.grafeas.v1beta1.deployment.Deployable parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static io.grafeas.v1beta1.deployment.Deployable parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static io.grafeas.v1beta1.deployment.Deployable parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static io.grafeas.v1beta1.deployment.Deployable parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static io.grafeas.v1beta1.deployment.Deployable parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static io.grafeas.v1beta1.deployment.Deployable parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static io.grafeas.v1beta1.deployment.Deployable parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static io.grafeas.v1beta1.deployment.Deployable parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static io.grafeas.v1beta1.deployment.Deployable parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static io.grafeas.v1beta1.deployment.Deployable parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(io.grafeas.v1beta1.deployment.Deployable prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* An artifact that can be deployed in some runtime.
* </pre>
*
* Protobuf type {@code grafeas.v1beta1.deployment.Deployable}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:grafeas.v1beta1.deployment.Deployable)
io.grafeas.v1beta1.deployment.DeployableOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return io.grafeas.v1beta1.deployment.DeploymentOuterClass
.internal_static_grafeas_v1beta1_deployment_Deployable_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return io.grafeas.v1beta1.deployment.DeploymentOuterClass
.internal_static_grafeas_v1beta1_deployment_Deployable_fieldAccessorTable
.ensureFieldAccessorsInitialized(
io.grafeas.v1beta1.deployment.Deployable.class,
io.grafeas.v1beta1.deployment.Deployable.Builder.class);
}
// Construct using io.grafeas.v1beta1.deployment.Deployable.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
resourceUri_ = com.google.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return io.grafeas.v1beta1.deployment.DeploymentOuterClass
.internal_static_grafeas_v1beta1_deployment_Deployable_descriptor;
}
@java.lang.Override
public io.grafeas.v1beta1.deployment.Deployable getDefaultInstanceForType() {
return io.grafeas.v1beta1.deployment.Deployable.getDefaultInstance();
}
@java.lang.Override
public io.grafeas.v1beta1.deployment.Deployable build() {
io.grafeas.v1beta1.deployment.Deployable result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public io.grafeas.v1beta1.deployment.Deployable buildPartial() {
io.grafeas.v1beta1.deployment.Deployable result =
new io.grafeas.v1beta1.deployment.Deployable(this);
int from_bitField0_ = bitField0_;
if (((bitField0_ & 0x00000001) != 0)) {
resourceUri_ = resourceUri_.getUnmodifiableView();
bitField0_ = (bitField0_ & ~0x00000001);
}
result.resourceUri_ = resourceUri_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof io.grafeas.v1beta1.deployment.Deployable) {
return mergeFrom((io.grafeas.v1beta1.deployment.Deployable) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(io.grafeas.v1beta1.deployment.Deployable other) {
if (other == io.grafeas.v1beta1.deployment.Deployable.getDefaultInstance()) return this;
if (!other.resourceUri_.isEmpty()) {
if (resourceUri_.isEmpty()) {
resourceUri_ = other.resourceUri_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureResourceUriIsMutable();
resourceUri_.addAll(other.resourceUri_);
}
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
io.grafeas.v1beta1.deployment.Deployable parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (io.grafeas.v1beta1.deployment.Deployable) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private com.google.protobuf.LazyStringList resourceUri_ =
com.google.protobuf.LazyStringArrayList.EMPTY;
private void ensureResourceUriIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
resourceUri_ = new com.google.protobuf.LazyStringArrayList(resourceUri_);
bitField0_ |= 0x00000001;
}
}
/**
*
*
* <pre>
* Required. Resource URI for the artifact being deployed.
* </pre>
*
* <code>repeated string resource_uri = 1;</code>
*
* @return A list containing the resourceUri.
*/
public com.google.protobuf.ProtocolStringList getResourceUriList() {
return resourceUri_.getUnmodifiableView();
}
/**
*
*
* <pre>
* Required. Resource URI for the artifact being deployed.
* </pre>
*
* <code>repeated string resource_uri = 1;</code>
*
* @return The count of resourceUri.
*/
public int getResourceUriCount() {
return resourceUri_.size();
}
/**
*
*
* <pre>
* Required. Resource URI for the artifact being deployed.
* </pre>
*
* <code>repeated string resource_uri = 1;</code>
*
* @param index The index of the element to return.
* @return The resourceUri at the given index.
*/
public java.lang.String getResourceUri(int index) {
return resourceUri_.get(index);
}
/**
*
*
* <pre>
* Required. Resource URI for the artifact being deployed.
* </pre>
*
* <code>repeated string resource_uri = 1;</code>
*
* @param index The index of the value to return.
* @return The bytes of the resourceUri at the given index.
*/
public com.google.protobuf.ByteString getResourceUriBytes(int index) {
return resourceUri_.getByteString(index);
}
/**
*
*
* <pre>
* Required. Resource URI for the artifact being deployed.
* </pre>
*
* <code>repeated string resource_uri = 1;</code>
*
* @param index The index to set the value at.
* @param value The resourceUri to set.
* @return This builder for chaining.
*/
public Builder setResourceUri(int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureResourceUriIsMutable();
resourceUri_.set(index, value);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Resource URI for the artifact being deployed.
* </pre>
*
* <code>repeated string resource_uri = 1;</code>
*
* @param value The resourceUri to add.
* @return This builder for chaining.
*/
public Builder addResourceUri(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureResourceUriIsMutable();
resourceUri_.add(value);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Resource URI for the artifact being deployed.
* </pre>
*
* <code>repeated string resource_uri = 1;</code>
*
* @param values The resourceUri to add.
* @return This builder for chaining.
*/
public Builder addAllResourceUri(java.lang.Iterable<java.lang.String> values) {
ensureResourceUriIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, resourceUri_);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Resource URI for the artifact being deployed.
* </pre>
*
* <code>repeated string resource_uri = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearResourceUri() {
resourceUri_ = com.google.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Resource URI for the artifact being deployed.
* </pre>
*
* <code>repeated string resource_uri = 1;</code>
*
* @param value The bytes of the resourceUri to add.
* @return This builder for chaining.
*/
public Builder addResourceUriBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
ensureResourceUriIsMutable();
resourceUri_.add(value);
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:grafeas.v1beta1.deployment.Deployable)
}
// @@protoc_insertion_point(class_scope:grafeas.v1beta1.deployment.Deployable)
private static final io.grafeas.v1beta1.deployment.Deployable DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new io.grafeas.v1beta1.deployment.Deployable();
}
public static io.grafeas.v1beta1.deployment.Deployable getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<Deployable> PARSER =
new com.google.protobuf.AbstractParser<Deployable>() {
@java.lang.Override
public Deployable parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new Deployable(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<Deployable> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<Deployable> getParserForType() {
return PARSER;
}
@java.lang.Override
public io.grafeas.v1beta1.deployment.Deployable getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*
* Copyright (c) 2005-2010 Grameen Foundation USA
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*
* See also http://www.apache.org/licenses/LICENSE-2.0.html for an
* explanation of the license and how it is applied.
*/
package org.mifos.accounts.business;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import org.joda.time.DateTime;
import org.mifos.accounts.exceptions.AccountException;
import org.mifos.application.master.business.PaymentTypeEntity;
import org.mifos.customers.personnel.business.PersonnelBO;
import org.mifos.framework.business.PersistentObject;
import org.mifos.framework.components.logger.LoggerConstants;
import org.mifos.framework.components.logger.MifosLogManager;
import org.mifos.framework.components.logger.MifosLogger;
import org.mifos.framework.util.helpers.Money;
/*
* Seems to be used to record information about a payment.
*
* Has some duplicate information that is contained in {@link AccountTrxnEntity}
*/
public class AccountPaymentEntity extends PersistentObject {
private static final MifosLogger logger = MifosLogManager.getLogger(LoggerConstants.ACCOUNTSLOGGER);
private final Integer paymentId = null;
private final AccountBO account;
private final PaymentTypeEntity paymentType;
private final String receiptNumber;
private final String voucherNumber;
private final String checkNumber;
private final Date receiptDate;
private final String bankName;
private final Date paymentDate;
private Money amount;
private Set<AccountTrxnEntity> accountTrxns = new LinkedHashSet<AccountTrxnEntity>();
private PersonnelBO createdByUser;
private String comment;
protected AccountPaymentEntity() {
this(null, null, null, null, null, new DateTime().toDate());
}
public AccountPaymentEntity(final AccountBO account, final Money amount, final String receiptNumber, final Date receiptDate,
final PaymentTypeEntity paymentType, final Date paymentDate) {
this.paymentDate = paymentDate;
this.account = account;
this.receiptNumber = receiptNumber;
this.paymentType = paymentType;
this.receiptDate = receiptDate;
this.amount = amount;
this.bankName = null;
this.voucherNumber = null;
this.checkNumber = null;
}
public Integer getPaymentId() {
return paymentId;
}
public AccountBO getAccount() {
return account;
}
public PaymentTypeEntity getPaymentType() {
return paymentType;
}
public Date getPaymentDate() {
return paymentDate;
}
public Set<AccountTrxnEntity> getAccountTrxns() {
return accountTrxns;
}
public void setAccountTrxns(final Set<AccountTrxnEntity> accountTrxns) {
this.accountTrxns = accountTrxns;
}
public String getBankName() {
return bankName;
}
public String getCheckNumber() {
return checkNumber;
}
public Date getReceiptDate() {
return receiptDate;
}
public String getReceiptNumber() {
return receiptNumber;
}
public String getVoucherNumber() {
return voucherNumber;
}
public Money getAmount() {
return amount;
}
public void setAmount(final Money amount) {
this.amount = amount;
}
public void addAccountTrxn(final AccountTrxnEntity accountTrxn) {
accountTrxns.add(accountTrxn);
}
public PersonnelBO getCreatedByUser() {
return this.createdByUser;
}
public void setCreatedByUser(final PersonnelBO createdByUser) {
this.createdByUser = createdByUser;
}
/**
* Create reverse entries of all the transactions associated with this
* payment and adds them to the set of transactions associated.
*/
List<AccountTrxnEntity> reversalAdjustment(final PersonnelBO personnel, final String adjustmentComment) throws AccountException {
List<AccountTrxnEntity> newlyAddedTrxns = null;
this.setAmount(getAmount().subtract(getAmount()));
logger.debug("The amount in account payment is " + getAmount());
if (null != getAccountTrxns() && getAccountTrxns().size() > 0) {
newlyAddedTrxns = new ArrayList<AccountTrxnEntity>();
logger.debug("The number of transactions before adjustment are " + getAccountTrxns().size());
Set<AccountTrxnEntity> reverseAccntTrxns = generateReverseAccountTransactions(personnel, adjustmentComment);
for (AccountTrxnEntity reverseAccntTrxn : reverseAccntTrxns) {
addAccountTrxn(reverseAccntTrxn);
}
newlyAddedTrxns.addAll(reverseAccntTrxns);
}
logger.debug("After adding adjustment transactions the total no of transactions are "
+ getAccountTrxns().size());
return newlyAddedTrxns;
}
private Set<AccountTrxnEntity> generateReverseAccountTransactions(final PersonnelBO personnel, final String adjustmentComment)
throws AccountException {
Set<AccountTrxnEntity> reverseAccntTrxns = new HashSet<AccountTrxnEntity>();
for (AccountTrxnEntity accntTrxn : getAccountTrxns()) {
logger.debug("Generating reverse transactions for transaction id " + accntTrxn.getAccountTrxnId());
AccountTrxnEntity reverseAccntTrxn = accntTrxn.generateReverseTrxn(personnel, adjustmentComment);
logger.debug("Amount associated with reverse transaction is "
+ reverseAccntTrxn.getAmount());
reverseAccntTrxns.add(reverseAccntTrxn);
logger.debug("After succesfully adding the reverse transaction");
}
return reverseAccntTrxns;
}
@Override
public String toString() {
return "{" + paymentId + ", " + account + ", " + paymentType + ", " + amount + "}";
}
public String getComment() {
return comment;
}
public void setComment(String comment) {
this.comment = comment;
}
}
| |
/*******************************************************************************
* Copyright SemanticBits, Northwestern University and Akaza Research
*
* Distributed under the OSI-approved BSD 3-Clause License.
* See http://ncip.github.com/caaers/LICENSE.txt for details.
******************************************************************************/
package gov.nih.nci.cabig.caaers.domain.repository;
import gov.nih.nci.cabig.caaers.domain.AdverseEventReportingPeriod;
import gov.nih.nci.cabig.caaers.domain.ExpeditedAdverseEventReport;
import gov.nih.nci.cabig.caaers.domain.Organization;
import gov.nih.nci.cabig.caaers.domain.Participant;
import gov.nih.nci.cabig.caaers.domain.ReportStatus;
import gov.nih.nci.cabig.caaers.domain.ReviewStatus;
import gov.nih.nci.cabig.caaers.domain.Study;
import gov.nih.nci.cabig.caaers.domain.StudySite;
import gov.nih.nci.cabig.caaers.domain.dto.AdverseEventReportingPeriodDTO;
import gov.nih.nci.cabig.caaers.domain.report.Report;
import gov.nih.nci.cabig.caaers.domain.workflow.ReviewComment;
import java.util.List;
/**
* This interface has methods to save and retrieve the {@link ReviewComment}s, and to change the review state of {@link Report} and {@link AdverseEventReportingPeriod}.
*
* @author Biju Joseph
*/
public interface AdverseEventRoutingAndReviewRepository {
/**
* Will query the {@link AdverseEventReportingPeriodDTO} objects, matching the criteria.
*
* @param participant the participant
* @param study the study
* @param organization the organization
* @param reviewStatus the review status
* @param reportStatus the report status
* @param userId the user id
* @param courseWorkflowEnabled the course workflow enabled
* @return the list
*/
public List<AdverseEventReportingPeriodDTO> findAdverseEventReportingPeriods(Participant participant, Study study,
Organization organization, ReviewStatus reviewStatus, ReportStatus reportStatus, String userId, Boolean courseWorkflowEnabled);
/**
* Will list the review comments associated to a report.
*
* @param reportId the report id
* @return the list<? extends review comment>
*/
public List<? extends ReviewComment> fetchReviewCommentsForReport(Integer reportId);
/**
* Will list the review comments associated to reporting period.
*
* @param rpId the rp id
* @return the list<? extends review comment>
*/
public List<? extends ReviewComment> fetchReviewCommentsForReportingPeriod(Integer rpId);
/**
* Will add a review comment against the {@link Report}.
*
* @param reportId the report id
* @param comment the comment
* @param userId the user id
*/
//TODO This has to change to accept aeReport instead of a report.
public void addReportReviewComment(Integer reportId, String comment, String userId);
/**
* Adds the report review comment.
*
* @param report the report
* @param comment the comment
* @param userId the user id
*/
public void addReportReviewComment(Report report, String comment, String userId);
/**
* Will add a review comment against the reporting period.
*
* @param reportingPeriodId the reporting period id
* @param comment the comment
* @param userId the user id
*/
public void addReportingPeriodReviewComment(Integer reportingPeriodId, String comment, String userId);
/**
* Adds the reporting period review comment.
*
* @param reportingPeriod the reporting period
* @param comment the comment
* @param userId the user id
*/
public void addReportingPeriodReviewComment(AdverseEventReportingPeriod reportingPeriod, String comment, String userId);
/**
* Will edit a review comment against the {@link aeReport}.
*
* @param reportId the report id
* @param comment the comment
* @param userId the user id
* @param commentId the comment id
*/
//TODO This method needs an implementation once the data model changes have been made.
public void editReportReviewComment(Integer reportId, String comment, String userId, Integer commentId);
/**
* Will edit a review comment against the {@link aeReport}.
*
* @param report the report
* @param comment the comment
* @param userId the user id
* @param commentId the comment id
*/
//TODO This method needs an implementation once the data model changes have been made.
public void editReportReviewComment(Report report, String comment, String userId, Integer commentId);
/**
* Will edit a review comment against the {@link reportingPeriod}.
*
* @param reportingPeriodId the reporting period id
* @param comment the comment
* @param userId the user id
* @param commentId the comment id
*/
public void editReportingPeriodReviewComment(Integer reportingPeriodId, String comment, String userId, Integer commentId);
/**
* Will edit a review comment against the {@link reportingPeriod}.
*
* @param reportingPeriod the reporting period
* @param comment the comment
* @param userId the user id
* @param commentId the comment id
*/
public void editReportingPeriodReviewComment(AdverseEventReportingPeriod reportingPeriod, String comment, String userId, Integer commentId);
/**
* Will delete a review comment with commentId passed to the method.
*
* @param reportId the report id
* @param commentId the comment id
*/
public void deleteReportReviewComment(Integer reportId, Integer commentId);
/**
* Will delete a review comment with commentId passed to the method.
*
* @param report the report
* @param commentId the comment id
*/
public void deleteReportReviewComment(Report report, Integer commentId);
/**
* Will delete a review comment with commentId passed to the method.
*
* @param reportingPeriodId the reporting period id
* @param commentId the comment id
*/
public void deleteReportingPeriodReviewComment(Integer reportingPeriodId, Integer commentId);
/**
* Will delete a review comment with commentId passed to the method.
*
* @param reportingPeriod the reporting period
* @param commentId the comment id
*/
public void deleteReportingPeriodReviewComment(AdverseEventReportingPeriod reportingPeriod, Integer commentId);
/**
* Will advance the workflow to its next step, for the Report.
*
* @param workflowId the workflow id
* @param transition the transition
* @param id the id
* @param userId the user id
* @return the list
*/
public List<String> advanceReportWorkflow(Integer workflowId, String transition, Integer id, String userId);
/**
* Advance report workflow.
*
* @param workflowId the workflow id
* @param transition the transition
* @param report the report
* @param userId the user id
* @return the list
*/
public List<String> advanceReportWorkflow(Integer workflowId, String transition, Report report, String userId);
/**
* Will advance the workflow to its next step, for the reporting period.
*
* @param workflowId the workflow id
* @param transition the transition
* @param id the id
* @param userId the user id
* @return the list
*/
public List<String> advanceReportingPeriodWorkflow(Integer workflowId, String transition, Integer id, String userId);
/**
* Advance reporting period workflow.
*
* @param workflowId the workflow id
* @param transition the transition
* @param reportingPeriod the reporting period
* @param userId the user id
* @return the list
*/
public List<String> advanceReportingPeriodWorkflow(Integer workflowId, String transition, AdverseEventReportingPeriod reportingPeriod, String userId);
/**
* This method will enact a new workflow, for the expedited report.
*
* @param report the report
* @return the long
*/
public Long enactReportWorkflow(Report report);
/**
* This method will enact a new workflow, for the evaluation period.
*
* @param reportingPeriod the reporting period
* @return the long
*/
public Long enactReportingPeriodWorkflow(AdverseEventReportingPeriod reportingPeriod);
/**
* This method will return the next workflow transitions, available for the user.
*
* @param workflowId the workflow id
* @param userId the user id
* @return the list
*/
public List<String> nextTransitionNames(Integer workflowId, String userId);
/**
* This method will return the next workflow transitions, available for the user for aeReport workflow.
* It does the filtering of the next transitions depending upon whether the reports are submittable or not.
*
* @param report the report
* @param loginId the login id
* @return the list
*/
public List<String> nextTransitionNamesForReportWorkflow(Report report, String loginId);
}
| |
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.cloudsearchv2.model;
import java.io.Serializable;
import com.amazonaws.AmazonWebServiceRequest;
/**
* Container for the parameters to the {@link com.amazonaws.services.cloudsearchv2.AmazonCloudSearch#describeSuggesters(DescribeSuggestersRequest) DescribeSuggesters operation}.
* <p>
* Gets the suggesters configured for a domain. A suggester enables you
* to display possible matches before users finish typing their queries.
* Can be limited to specific suggesters by name. By default, shows all
* suggesters and includes any pending changes to the configuration. Set
* the <code>Deployed</code> option to <code>true</code> to show the
* active configuration and exclude pending changes. For more
* information, see
* <a href="http://docs.aws.amazon.com/cloudsearch/latest/developerguide/getting-suggestions.html"> Getting Search Suggestions </a>
* in the <i>Amazon CloudSearch Developer Guide</i> .
* </p>
*
* @see com.amazonaws.services.cloudsearchv2.AmazonCloudSearch#describeSuggesters(DescribeSuggestersRequest)
*/
public class DescribeSuggestersRequest extends AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* The name of the domain you want to describe.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>3 - 28<br/>
* <b>Pattern: </b>[a-z][a-z0-9\-]+<br/>
*/
private String domainName;
/**
* The suggesters you want to describe.
*/
private com.amazonaws.internal.ListWithAutoConstructFlag<String> suggesterNames;
/**
* Whether to display the deployed configuration (<code>true</code>) or
* include any pending changes (<code>false</code>). Defaults to
* <code>false</code>.
*/
private Boolean deployed;
/**
* The name of the domain you want to describe.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>3 - 28<br/>
* <b>Pattern: </b>[a-z][a-z0-9\-]+<br/>
*
* @return The name of the domain you want to describe.
*/
public String getDomainName() {
return domainName;
}
/**
* The name of the domain you want to describe.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>3 - 28<br/>
* <b>Pattern: </b>[a-z][a-z0-9\-]+<br/>
*
* @param domainName The name of the domain you want to describe.
*/
public void setDomainName(String domainName) {
this.domainName = domainName;
}
/**
* The name of the domain you want to describe.
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>3 - 28<br/>
* <b>Pattern: </b>[a-z][a-z0-9\-]+<br/>
*
* @param domainName The name of the domain you want to describe.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public DescribeSuggestersRequest withDomainName(String domainName) {
this.domainName = domainName;
return this;
}
/**
* The suggesters you want to describe.
*
* @return The suggesters you want to describe.
*/
public java.util.List<String> getSuggesterNames() {
if (suggesterNames == null) {
suggesterNames = new com.amazonaws.internal.ListWithAutoConstructFlag<String>();
suggesterNames.setAutoConstruct(true);
}
return suggesterNames;
}
/**
* The suggesters you want to describe.
*
* @param suggesterNames The suggesters you want to describe.
*/
public void setSuggesterNames(java.util.Collection<String> suggesterNames) {
if (suggesterNames == null) {
this.suggesterNames = null;
return;
}
com.amazonaws.internal.ListWithAutoConstructFlag<String> suggesterNamesCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<String>(suggesterNames.size());
suggesterNamesCopy.addAll(suggesterNames);
this.suggesterNames = suggesterNamesCopy;
}
/**
* The suggesters you want to describe.
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setSuggesterNames(java.util.Collection)} or {@link
* #withSuggesterNames(java.util.Collection)} if you want to override the
* existing values.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param suggesterNames The suggesters you want to describe.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public DescribeSuggestersRequest withSuggesterNames(String... suggesterNames) {
if (getSuggesterNames() == null) setSuggesterNames(new java.util.ArrayList<String>(suggesterNames.length));
for (String value : suggesterNames) {
getSuggesterNames().add(value);
}
return this;
}
/**
* The suggesters you want to describe.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param suggesterNames The suggesters you want to describe.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public DescribeSuggestersRequest withSuggesterNames(java.util.Collection<String> suggesterNames) {
if (suggesterNames == null) {
this.suggesterNames = null;
} else {
com.amazonaws.internal.ListWithAutoConstructFlag<String> suggesterNamesCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<String>(suggesterNames.size());
suggesterNamesCopy.addAll(suggesterNames);
this.suggesterNames = suggesterNamesCopy;
}
return this;
}
/**
* Whether to display the deployed configuration (<code>true</code>) or
* include any pending changes (<code>false</code>). Defaults to
* <code>false</code>.
*
* @return Whether to display the deployed configuration (<code>true</code>) or
* include any pending changes (<code>false</code>). Defaults to
* <code>false</code>.
*/
public Boolean isDeployed() {
return deployed;
}
/**
* Whether to display the deployed configuration (<code>true</code>) or
* include any pending changes (<code>false</code>). Defaults to
* <code>false</code>.
*
* @param deployed Whether to display the deployed configuration (<code>true</code>) or
* include any pending changes (<code>false</code>). Defaults to
* <code>false</code>.
*/
public void setDeployed(Boolean deployed) {
this.deployed = deployed;
}
/**
* Whether to display the deployed configuration (<code>true</code>) or
* include any pending changes (<code>false</code>). Defaults to
* <code>false</code>.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param deployed Whether to display the deployed configuration (<code>true</code>) or
* include any pending changes (<code>false</code>). Defaults to
* <code>false</code>.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public DescribeSuggestersRequest withDeployed(Boolean deployed) {
this.deployed = deployed;
return this;
}
/**
* Whether to display the deployed configuration (<code>true</code>) or
* include any pending changes (<code>false</code>). Defaults to
* <code>false</code>.
*
* @return Whether to display the deployed configuration (<code>true</code>) or
* include any pending changes (<code>false</code>). Defaults to
* <code>false</code>.
*/
public Boolean getDeployed() {
return deployed;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getDomainName() != null) sb.append("DomainName: " + getDomainName() + ",");
if (getSuggesterNames() != null) sb.append("SuggesterNames: " + getSuggesterNames() + ",");
if (isDeployed() != null) sb.append("Deployed: " + isDeployed() );
sb.append("}");
return sb.toString();
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getDomainName() == null) ? 0 : getDomainName().hashCode());
hashCode = prime * hashCode + ((getSuggesterNames() == null) ? 0 : getSuggesterNames().hashCode());
hashCode = prime * hashCode + ((isDeployed() == null) ? 0 : isDeployed().hashCode());
return hashCode;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (obj instanceof DescribeSuggestersRequest == false) return false;
DescribeSuggestersRequest other = (DescribeSuggestersRequest)obj;
if (other.getDomainName() == null ^ this.getDomainName() == null) return false;
if (other.getDomainName() != null && other.getDomainName().equals(this.getDomainName()) == false) return false;
if (other.getSuggesterNames() == null ^ this.getSuggesterNames() == null) return false;
if (other.getSuggesterNames() != null && other.getSuggesterNames().equals(this.getSuggesterNames()) == false) return false;
if (other.isDeployed() == null ^ this.isDeployed() == null) return false;
if (other.isDeployed() != null && other.isDeployed().equals(this.isDeployed()) == false) return false;
return true;
}
@Override
public DescribeSuggestersRequest clone() {
return (DescribeSuggestersRequest) super.clone();
}
}
| |
/*
* Copyright (C) 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.cloud.dataflow.sdk.coders;
import static com.google.common.base.Preconditions.checkArgument;
import com.google.cloud.dataflow.sdk.coders.protobuf.ProtoCoder;
import com.google.cloud.dataflow.sdk.util.CloudObject;
import com.google.cloud.dataflow.sdk.util.Structs;
import com.google.cloud.dataflow.sdk.values.TypeDescriptor;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.protobuf.ExtensionRegistry;
import com.google.protobuf.Message;
import com.google.protobuf.Parser;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import javax.annotation.Nullable;
/**
* A {@link Coder} using Google Protocol Buffers 2 binary format.
*
* <p>To learn more about Protocol Buffers, visit:
* <a href="https://developers.google.com/protocol-buffers">https://developers.google.com/protocol-buffers</a>
*
* <p>To use, specify the {@link Coder} type on a PCollection containing Protocol Buffers messages.
*
* <pre>
* {@code
* PCollection<MyProto.Message> records =
* input.apply(...)
* .setCoder(Proto2Coder.of(MyProto.Message.class));
* }
* </pre>
*
* <p>Custom message extensions are also supported, but the coder must be made
* aware of them explicitly:
*
* <pre>
* {@code
* PCollection<MyProto.Message> records =
* input.apply(...)
* .setCoder(Proto2Coder.of(MyProto.Message.class)
* .addExtensionsFrom(MyProto.class));
* }
* </pre>
*
* @param <T> the type of elements handled by this coder, must extend {@code Message}
* @deprecated Use {@link ProtoCoder}.
*/
@Deprecated
public class Proto2Coder<T extends Message> extends AtomicCoder<T> {
/** The class of Protobuf message to be encoded. */
private final Class<T> protoMessageClass;
/**
* All extension host classes included in this Proto2Coder. The extensions from
* these classes will be included in the {@link ExtensionRegistry} used during
* encoding and decoding.
*/
private final List<Class<?>> extensionHostClasses;
private Proto2Coder(Class<T> protoMessageClass, List<Class<?>> extensionHostClasses) {
this.protoMessageClass = protoMessageClass;
this.extensionHostClasses = extensionHostClasses;
}
private static final CoderProvider PROVIDER =
new CoderProvider() {
@Override
public <T> Coder<T> getCoder(TypeDescriptor<T> type) throws CannotProvideCoderException {
if (type.isSubtypeOf(new TypeDescriptor<Message>() {})) {
@SuppressWarnings("unchecked")
TypeDescriptor<? extends Message> messageType =
(TypeDescriptor<? extends Message>) type;
@SuppressWarnings("unchecked")
Coder<T> coder = (Coder<T>) Proto2Coder.of(messageType);
return coder;
} else {
throw new CannotProvideCoderException(
String.format(
"Cannot provide Proto2Coder because %s "
+ "is not a subclass of protocol buffer Messsage",
type));
}
}
};
public static CoderProvider coderProvider() {
return PROVIDER;
}
/**
* Returns a {@code Proto2Coder} for the given Protobuf message class.
*/
public static <T extends Message> Proto2Coder<T> of(Class<T> protoMessageClass) {
return new Proto2Coder<T>(protoMessageClass, Collections.<Class<?>>emptyList());
}
/**
* Returns a {@code Proto2Coder} for the given Protobuf message class.
*/
public static <T extends Message> Proto2Coder<T> of(TypeDescriptor<T> protoMessageType) {
@SuppressWarnings("unchecked")
Class<T> protoMessageClass = (Class<T>) protoMessageType.getRawType();
return of(protoMessageClass);
}
/**
* Produces a {@code Proto2Coder} like this one, but with the extensions from
* the given classes registered.
*
* @param moreExtensionHosts an iterable of classes that define a static
* method {@code registerAllExtensions(ExtensionRegistry)}
*/
public Proto2Coder<T> withExtensionsFrom(Iterable<Class<?>> moreExtensionHosts) {
for (Class<?> extensionHost : moreExtensionHosts) {
// Attempt to access the required method, to make sure it's present.
try {
Method registerAllExtensions =
extensionHost.getDeclaredMethod("registerAllExtensions", ExtensionRegistry.class);
checkArgument(
Modifier.isStatic(registerAllExtensions.getModifiers()),
"Method registerAllExtensions() must be static for use with Proto2Coder");
} catch (NoSuchMethodException | SecurityException e) {
throw new IllegalArgumentException(e);
}
}
return new Proto2Coder<T>(
protoMessageClass,
new ImmutableList.Builder<Class<?>>()
.addAll(extensionHostClasses)
.addAll(moreExtensionHosts)
.build());
}
/**
* See {@link #withExtensionsFrom(Iterable)}.
*/
public Proto2Coder<T> withExtensionsFrom(Class<?>... extensionHosts) {
return withExtensionsFrom(ImmutableList.copyOf(extensionHosts));
}
/**
* Adds custom Protobuf extensions to the coder. Returns {@code this}
* for method chaining.
*
* @param extensionHosts must be a class that defines a static
* method name {@code registerAllExtensions}
* @deprecated use {@link #withExtensionsFrom}
*/
@Deprecated
public Proto2Coder<T> addExtensionsFrom(Class<?>... extensionHosts) {
return addExtensionsFrom(ImmutableList.copyOf(extensionHosts));
}
/**
* Adds custom Protobuf extensions to the coder. Returns {@code this}
* for method chaining.
*
* @param extensionHosts must be a class that defines a static
* method name {@code registerAllExtensions}
* @deprecated use {@link #withExtensionsFrom}
*/
@Deprecated
public Proto2Coder<T> addExtensionsFrom(Iterable<Class<?>> extensionHosts) {
for (Class<?> extensionHost : extensionHosts) {
try {
// Attempt to access the declared method, to make sure it's present.
extensionHost.getDeclaredMethod("registerAllExtensions", ExtensionRegistry.class);
} catch (NoSuchMethodException e) {
throw new IllegalArgumentException(e);
}
extensionHostClasses.add(extensionHost);
}
// The memoized extension registry needs to be recomputed because we have mutated this object.
synchronized (this) {
memoizedExtensionRegistry = null;
getExtensionRegistry();
}
return this;
}
@Override
public void encode(T value, OutputStream outStream, Context context) throws IOException {
if (value == null) {
throw new CoderException("cannot encode a null " + protoMessageClass.getSimpleName());
}
if (context.isWholeStream) {
value.writeTo(outStream);
} else {
value.writeDelimitedTo(outStream);
}
}
@Override
public T decode(InputStream inStream, Context context) throws IOException {
if (context.isWholeStream) {
return getParser().parseFrom(inStream, getExtensionRegistry());
} else {
return getParser().parseDelimitedFrom(inStream, getExtensionRegistry());
}
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (!(other instanceof Proto2Coder)) {
return false;
}
Proto2Coder<?> otherCoder = (Proto2Coder<?>) other;
return protoMessageClass.equals(otherCoder.protoMessageClass)
&& Sets.newHashSet(extensionHostClasses)
.equals(Sets.newHashSet(otherCoder.extensionHostClasses));
}
@Override
public int hashCode() {
return Objects.hash(protoMessageClass, extensionHostClasses);
}
/**
* The encoding identifier is designed to support evolution as per the design of Protocol
* Buffers. In order to use this class effectively, carefully follow the advice in the Protocol
* Buffers documentation at
* <a href="https://developers.google.com/protocol-buffers/docs/proto#updating">Updating
* A Message Type</a>.
*
* <p>In particular, the encoding identifier is guaranteed to be the same for {@code Proto2Coder}
* instances of the same principal message class, and otherwise distinct. Loaded extensions do not
* affect the id, nor does it encode the full schema.
*
* <p>When modifying a message class, here are the broadest guidelines; see the above link
* for greater detail.
*
* <ul>
* <li>Do not change the numeric tags for any fields.
* <li>Never remove a <code>required</code> field.
* <li>Only add <code>optional</code> or <code>repeated</code> fields, with sensible defaults.
* <li>When changing the type of a field, consult the Protocol Buffers documentation to ensure
* the new and old types are interchangeable.
* </ul>
*
* <p>Code consuming this message class should be prepared to support <i>all</i> versions of
* the class until it is certain that no remaining serialized instances exist.
*
* <p>If backwards incompatible changes must be made, the best recourse is to change the name
* of your Protocol Buffers message class.
*/
@Override
public String getEncodingId() {
return protoMessageClass.getName();
}
private transient Parser<T> memoizedParser;
private Parser<T> getParser() {
if (memoizedParser == null) {
try {
@SuppressWarnings("unchecked")
T protoMessageInstance = (T) protoMessageClass.getMethod("getDefaultInstance").invoke(null);
@SuppressWarnings("unchecked")
Parser<T> tParser = (Parser<T>) protoMessageInstance.getParserForType();
memoizedParser = tParser;
} catch (IllegalAccessException | InvocationTargetException | NoSuchMethodException e) {
throw new IllegalArgumentException(e);
}
}
return memoizedParser;
}
private transient ExtensionRegistry memoizedExtensionRegistry;
private synchronized ExtensionRegistry getExtensionRegistry() {
if (memoizedExtensionRegistry == null) {
ExtensionRegistry registry = ExtensionRegistry.newInstance();
for (Class<?> extensionHost : extensionHostClasses) {
try {
extensionHost
.getDeclaredMethod("registerAllExtensions", ExtensionRegistry.class)
.invoke(null, registry);
} catch (IllegalAccessException | InvocationTargetException | NoSuchMethodException e) {
throw new IllegalStateException(e);
}
}
memoizedExtensionRegistry = registry.getUnmodifiable();
}
return memoizedExtensionRegistry;
}
////////////////////////////////////////////////////////////////////////////////////
// JSON Serialization details below
private static final String PROTO_MESSAGE_CLASS = "proto_message_class";
private static final String PROTO_EXTENSION_HOSTS = "proto_extension_hosts";
/**
* Constructor for JSON deserialization only.
*/
@JsonCreator
public static <T extends Message> Proto2Coder<T> of(
@JsonProperty(PROTO_MESSAGE_CLASS) String protoMessageClassName,
@Nullable @JsonProperty(PROTO_EXTENSION_HOSTS) List<String> extensionHostClassNames) {
try {
@SuppressWarnings("unchecked")
Class<T> protoMessageClass = (Class<T>) Class.forName(protoMessageClassName);
List<Class<?>> extensionHostClasses = Lists.newArrayList();
if (extensionHostClassNames != null) {
for (String extensionHostClassName : extensionHostClassNames) {
extensionHostClasses.add(Class.forName(extensionHostClassName));
}
}
return of(protoMessageClass).withExtensionsFrom(extensionHostClasses);
} catch (ClassNotFoundException e) {
throw new IllegalArgumentException(e);
}
}
@Override
public CloudObject asCloudObject() {
CloudObject result = super.asCloudObject();
Structs.addString(result, PROTO_MESSAGE_CLASS, protoMessageClass.getName());
List<CloudObject> extensionHostClassNames = Lists.newArrayList();
for (Class<?> clazz : extensionHostClasses) {
extensionHostClassNames.add(CloudObject.forString(clazz.getName()));
}
Structs.addList(result, PROTO_EXTENSION_HOSTS, extensionHostClassNames);
return result;
}
}
| |
package ch.epfl.bbp.uima.ae.output;
import static ch.epfl.bbp.io.LineReader.linesFrom;
import static ch.epfl.bbp.uima.BlueCasUtil.getHeaderDocId;
import static ch.epfl.bbp.uima.BlueUima.PARAM_OUTPUT_FILE;
import static ch.epfl.bbp.uima.typesystem.TypeSystem.KEEP;
import static ch.epfl.bbp.uima.utils.Preconditions.checkFileExists;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.collect.Lists.newLinkedList;
import static com.google.common.collect.Maps.newHashMap;
import static com.google.common.collect.Maps.newLinkedHashMap;
import static org.apache.uima.fit.util.JCasUtil.select;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.apache.uima.UimaContext;
import org.apache.uima.analysis_engine.AnalysisEngineProcessException;
import org.apache.uima.fit.component.JCasAnnotator_ImplBase;
import org.apache.uima.fit.descriptor.ConfigurationParameter;
import org.apache.uima.fit.descriptor.TypeCapability;
import org.apache.uima.jcas.JCas;
import org.apache.uima.resource.ResourceInitializationException;
import ch.epfl.bbp.io.TextFileWriter;
import ch.epfl.bbp.uima.types.Keep;
/**
* Output {@link Keep}s into the LDA-C format:<br>
* <code> [M] [term_1]:[count] [term_2]:[count] ... [term_N]:[count]</code><br>
* where [M] is the number of unique terms in the document, and the [count]
* associated with each term is how many times that term appeared in the
* document. Also writes the corresponding vocabulary file, one {@link Keep} per
* line.<br>
* The writer can be used separately:
*
* <pre>
* writer = new LdaCWriter.Writer(" ", corpusFile, vocabFile);
* writer.addDocument(wordList);
* writer.close();
* </pre>
*
* @author renaud.richardet@epfl.ch
*/
@TypeCapability(inputs = KEEP)
public class LdaCWriter extends JCasAnnotator_ImplBase {
@ConfigurationParameter(name = PARAM_OUTPUT_FILE, mandatory = true)
private String outputFile;
public static final String PARAM_VOCABULARY_OUTPUT_FILE = "vocabularyOutputFile";
@ConfigurationParameter(name = PARAM_VOCABULARY_OUTPUT_FILE, mandatory = false)
private String vocabularyOutputFile;
public static final String PARAM_VOCABULARY_INPUT_FILE = "vocabularyInputFile";
@ConfigurationParameter(name = PARAM_VOCABULARY_INPUT_FILE, mandatory = false)
private String vocabularyInputFile;
public static final String PARAM_IDS_OUTPUT_FILE = "idsOutputFile";
@ConfigurationParameter(name = PARAM_IDS_OUTPUT_FILE, mandatory = false, //
description = "(optional) outputs a list of pmids alongside")
private String idsOutputFile;
public static final String PARAM_DCA_FORMAT = "dcaFormat";
@ConfigurationParameter(name = PARAM_DCA_FORMAT, defaultValue = "false",//
description = "wheter to output in DCA format (without ':')")
private boolean dcaFormat;
private Writer writer;
private TextFileWriter idsWriter = null;
@Override
public void initialize(UimaContext context)
throws ResourceInitializationException {
super.initialize(context);
checkArgument(vocabularyInputFile == null
^ vocabularyOutputFile == null,
"please provide either input or output vocabulary file ");
try {
String separator = dcaFormat ? " " : ":";
if (idsOutputFile != null)
idsWriter = new TextFileWriter(new File(idsOutputFile));
if (vocabularyOutputFile != null) { // create new vocab file
writer = new Writer(separator, outputFile, vocabularyOutputFile);
} else {// use existing vocab
checkFileExists(vocabularyInputFile);
writer = new Writer(separator, outputFile, linesFrom(new File(
vocabularyInputFile).getAbsolutePath()));
}
} catch (IOException e) {
throw new ResourceInitializationException(e);
}
}
@Override
public void process(JCas jCas) throws AnalysisEngineProcessException {
try {
List<String> words = newLinkedList();
for (Keep k : select(jCas, Keep.class)) {
// replace spaces by _ in multiwords
words.add(k.getNormalizedText().replace(' ', '_'));
}
if (!words.isEmpty()) {
if (writer.addDocument(words) && idsWriter != null) {
idsWriter.addLine(getHeaderDocId(jCas));
}
}
} catch (Exception e) {
throw new AnalysisEngineProcessException(e);
}
}
@Override
public void collectionProcessComplete()
throws AnalysisEngineProcessException {
super.collectionProcessComplete();
try {
writer.close();
if (idsWriter != null)
idsWriter.close();
} catch (IOException e) {
throw new AnalysisEngineProcessException(e);
}
}
public static class Writer {
/** Either space (DCA format) or colum (LDA-C format) */
private String separator;
private Map<String, Integer> vocabulary = newLinkedHashMap(); // ordered!
/** true=no more words are added, useful when providing the vocabulary */
private boolean vocabularyClosed = false;
private PrintWriter corpusWriter;
private String vocabularyOutputFile = null;
/**
* @param separator
* ':' for LDA-C, ' ' for DCA
* @param outputFile
* @param vocabularyOutputFile
* new vocabulary file to output
*/
public Writer(String separator, String outputFile,
String vocabularyOutputFile) throws IOException {
this.separator = separator;
corpusWriter = new PrintWriter(new BufferedWriter(new FileWriter(
new File(outputFile))));
this.vocabularyOutputFile = vocabularyOutputFile;
}
/**
* @param separator
* ':' for LDA-C, ' ' for DCA
* @param outputFile
* @param providedVocabulary
* existing vocabulary file to use. Words not found in this
* vocabulary are simply ignored
*/
public Writer(String separator, String outputFile,
List<String> providedVocabulary) throws IOException {
this.separator = separator;
corpusWriter = new PrintWriter(new BufferedWriter(new FileWriter(
new File(outputFile))));
for (int i = 0; i < providedVocabulary.size(); i++) {
vocabulary.put(providedVocabulary.get(i), i);
}
vocabularyClosed = true;
}
/** @return true if the document is not empty */
public boolean addDocument(List<String> words) {
// key: vocabulary id; val: count
Map<Integer, Integer> documentMap = newHashMap();
for (String word : words) {
Integer tokenId = vocabulary.get(word);
if (tokenId == null && vocabularyClosed) {
continue; // just skip this word
} else if (tokenId == null && !vocabularyClosed) {
// add new word to vocab
tokenId = vocabulary.size();
vocabulary.put(word, tokenId);
}
Integer cnt = documentMap.get(tokenId);
if (cnt == null) {// add new to docMap
documentMap.put(tokenId, 1);
} else {
documentMap.put(tokenId, cnt + 1);
}
}
if (!documentMap.isEmpty()) { // do not print empty documents
corpusWriter.print(documentMap.size());
for (Entry<Integer, Integer> entry : documentMap.entrySet()) {
corpusWriter.print(" " + entry.getKey() + separator
+ entry.getValue());
}
corpusWriter.println();
return true;
}
return false;
}
public void close() throws IOException {
corpusWriter.close();
if (!vocabularyClosed) { // --> write vocab
PrintWriter vocabWriter = new PrintWriter(new BufferedWriter(
new FileWriter(new File(vocabularyOutputFile))));
for (String vocabEntry : vocabulary.keySet()) {
vocabWriter.println(vocabEntry);
}
vocabWriter.close();
}
}
}
}
| |
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.structuralsearch.plugin.replace.impl;
import com.intellij.codeInsight.template.Template;
import com.intellij.codeInsight.template.TemplateManager;
import com.intellij.lang.Language;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.impl.ApplicationImpl;
import com.intellij.openapi.fileEditor.FileDocumentManager;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.*;
import com.intellij.psi.codeStyle.CodeStyleManager;
import com.intellij.psi.search.LocalSearchScope;
import com.intellij.structuralsearch.*;
import com.intellij.structuralsearch.impl.matcher.MatcherImplUtil;
import com.intellij.structuralsearch.impl.matcher.PatternTreeContext;
import com.intellij.structuralsearch.impl.matcher.predicates.ScriptSupport;
import com.intellij.structuralsearch.plugin.replace.ReplaceOptions;
import com.intellij.structuralsearch.plugin.replace.ReplacementInfo;
import com.intellij.structuralsearch.plugin.util.CollectingMatchResultSink;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.SmartList;
import org.jetbrains.annotations.Nullable;
import java.util.List;
/**
* @author Maxim.Mossienko
* Date: Mar 4, 2004
* Time: 9:19:34 PM
*/
public class Replacer {
private final Project project;
private ReplacementBuilder replacementBuilder;
private ReplaceOptions options;
private ReplacementContext context;
private StructuralReplaceHandler replaceHandler;
private PsiElement lastAffectedElement = null;
public Replacer(Project project, ReplaceOptions options) {
this.project = project;
this.options = options;
}
public static String stripTypedVariableDecoration(final String type) {
return type.substring(1,type.length()-1);
}
public static int insertSubstitution(StringBuilder result, int offset, final ParameterInfo info, String image) {
if (image.length() > 0) result.insert(offset+ info.getStartIndex(),image);
offset += image.length();
return offset;
}
public String testReplace(String in, String what, String by, ReplaceOptions options) throws IncorrectOperationException {
return testReplace(in, what, by, options, false);
}
public String testReplace(String in, String what, String by, ReplaceOptions options, boolean filePattern) {
FileType type = options.getMatchOptions().getFileType();
return testReplace(in, what, by, options, filePattern, false, type, null);
}
public String testReplace(String in, String what, String by, ReplaceOptions options, boolean filePattern, boolean createPhysicalFile,
FileType sourceFileType, Language sourceDialect) {
this.options = options;
final MatchOptions matchOptions = this.options.getMatchOptions();
this.options.setReplacement(by);
replacementBuilder=null;
context = null;
replaceHandler = null;
matchOptions.clearVariableConstraints();
matchOptions.fillSearchCriteria(what);
Matcher.validate(project, matchOptions);
checkSupportedReplacementPattern(project, options);
Matcher matcher = new Matcher(project);
try {
PsiElement firstElement, lastElement, parent;
if (options.getMatchOptions().getScope() == null) {
PsiElement[] elements = MatcherImplUtil.createTreeFromText(
in,
filePattern ? PatternTreeContext.File : PatternTreeContext.Block,
sourceFileType, sourceDialect, null,
project,
createPhysicalFile
);
firstElement = elements[0];
lastElement = elements[elements.length-1];
parent = firstElement.getParent();
matchOptions.setScope(new LocalSearchScope(elements));
} else {
parent = ((LocalSearchScope)options.getMatchOptions().getScope()).getScope()[0];
firstElement = parent.getFirstChild();
lastElement = parent.getLastChild();
}
matchOptions.setResultIsContextMatch(true);
CollectingMatchResultSink sink = new CollectingMatchResultSink();
matcher.testFindMatches(sink, matchOptions);
final List<ReplacementInfo> resultPtrList = new SmartList<>();
for (final MatchResult result : sink.getMatches()) {
resultPtrList.add(buildReplacement(result));
}
int startOffset = firstElement.getTextRange().getStartOffset();
int endOffset = filePattern ? 0 : parent.getTextLength() - lastElement.getTextRange().getEndOffset();
// get nodes from text may contain
PsiElement prevSibling = firstElement.getPrevSibling();
if (prevSibling instanceof PsiWhiteSpace) {
startOffset -= prevSibling.getTextLength() - 1;
}
PsiElement nextSibling = lastElement.getNextSibling();
if (nextSibling instanceof PsiWhiteSpace) {
endOffset -= nextSibling.getTextLength() - 1;
}
replaceAll(resultPtrList);
String result = parent.getText();
result = result.substring(startOffset);
result = result.substring(0,result.length() - endOffset);
return result;
}
catch (Exception e) {
throw new IncorrectOperationException(e);
}
finally {
options.getMatchOptions().setScope(null);
}
}
public void replaceAll(final List<ReplacementInfo> infos) {
for (ReplacementInfo info : infos) {
PsiElement element = info.getMatch(0);
initContextAndHandler(element);
if (replaceHandler != null) {
replaceHandler.prepare(info);
}
}
((ApplicationImpl)ApplicationManager.getApplication()).runWriteActionWithProgressInDispatchThread(
SSRBundle.message("structural.replace.title"),
project,
null,
"Stop",
indicator -> {
indicator.setIndeterminate(false);
try {
final int size = infos.size();
VirtualFile lastFile = null;
for (int i = 0; i < size; i++) {
indicator.checkCanceled();
indicator.setFraction((float)(i + 1) / size);
ReplacementInfo info = infos.get(i);
PsiElement element = info.getMatch(0);
assert element != null;
final VirtualFile vFile = element.getContainingFile().getVirtualFile();
if (vFile != null && !vFile.equals(lastFile)) {
indicator.setText2(vFile.getPresentableUrl());
lastFile = vFile;
}
ProgressManager.getInstance().executeNonCancelableSection(() -> {
final PsiElement affectedElement = doReplace(info);
if (affectedElement != lastAffectedElement) {
if (lastAffectedElement != null) reformatAndPostProcess(lastAffectedElement);
lastAffectedElement = affectedElement;
}
});
}
} finally {
ProgressManager.getInstance().executeNonCancelableSection(() -> reformatAndPostProcess(lastAffectedElement));
}
}
);
}
public void replace(ReplacementInfo info) {
initContextAndHandler(info.getMatch(0));
if (replaceHandler != null) {
replaceHandler.prepare(info);
}
reformatAndPostProcess(doReplace(info));
}
@Nullable
private PsiElement doReplace(ReplacementInfo info) {
final PsiElement element = info.getMatch(0);
if (element==null || !element.isWritable() || !element.isValid()) return null;
final PsiElement elementParent = element.getParent();
CodeStyleManager.getInstance(project).performActionWithFormatterDisabled(
(Runnable)() -> {
if (replaceHandler != null) {
replaceHandler.replace(info, options);
}
}
);
if (!elementParent.isValid() || !elementParent.isWritable()) {
return null;
}
return elementParent;
}
private void reformatAndPostProcess(final PsiElement elementParent) {
if (elementParent == null) return;
final PsiFile containingFile = elementParent.getContainingFile();
if (containingFile != null && options.isToReformatAccordingToStyle()) {
final VirtualFile file = containingFile.getVirtualFile();
if (file != null) {
PsiDocumentManager.getInstance(project).commitDocument(FileDocumentManager.getInstance().getDocument(file));
}
final int parentOffset = elementParent.getTextRange().getStartOffset();
CodeStyleManager.getInstance(project).reformatRange(containingFile, parentOffset, parentOffset + elementParent.getTextLength(), true);
}
if (replaceHandler != null) {
replaceHandler.postProcess(elementParent, options);
}
}
private void initContextAndHandler(PsiElement psiContext) {
if (context == null) {
context = new ReplacementContext(options, project);
}
if (replaceHandler == null) {
final StructuralSearchProfile profile = StructuralSearchUtil.getProfileByPsiElement(psiContext);
if (profile != null) {
replaceHandler = profile.getReplaceHandler(context);
}
}
}
public static void handleComments(final PsiElement el, final PsiElement replacement, ReplacementInfo replacementInfo) {
final PsiElement lastChild = el.getLastChild();
if (lastChild instanceof PsiComment &&
replacementInfo.getVariableName(lastChild) == null &&
!(replacement.getLastChild() instanceof PsiComment)
) {
PsiElement firstElementAfterStatementEnd = lastChild;
for(PsiElement curElement=firstElementAfterStatementEnd.getPrevSibling();curElement!=null;curElement = curElement.getPrevSibling()) {
if (!(curElement instanceof PsiWhiteSpace) && !(curElement instanceof PsiComment)) break;
firstElementAfterStatementEnd = curElement;
}
replacement.addRangeAfter(firstElementAfterStatementEnd,lastChild,replacement.getLastChild());
}
final PsiElement firstChild = el.getFirstChild();
if (firstChild instanceof PsiComment &&
!(firstChild instanceof PsiDocCommentBase) &&
replacementInfo.getVariableName(firstChild) == null
) {
PsiElement lastElementBeforeStatementStart = firstChild;
for(PsiElement curElement=lastElementBeforeStatementStart.getNextSibling();curElement!=null;curElement = curElement.getNextSibling()) {
if (!(curElement instanceof PsiWhiteSpace) && !(curElement instanceof PsiComment)) break;
lastElementBeforeStatementStart = curElement;
}
replacement.addRangeBefore(firstChild,lastElementBeforeStatementStart,replacement.getFirstChild());
}
}
public static void checkSupportedReplacementPattern(Project project, ReplaceOptions options) throws UnsupportedPatternException {
try {
String search = options.getMatchOptions().getSearchPattern();
String replacement = options.getReplacement();
FileType fileType = options.getMatchOptions().getFileType();
Template template = TemplateManager.getInstance(project).createTemplate("","",search);
Template template2 = TemplateManager.getInstance(project).createTemplate("","",replacement);
int segmentCount = template2.getSegmentsCount();
for(int i=0;i<segmentCount;++i) {
final String replacementSegmentName = template2.getSegmentName(i);
final int segmentCount2 = template.getSegmentsCount();
int j;
for(j=0;j<segmentCount2;++j) {
final String searchSegmentName = template.getSegmentName(j);
if (replacementSegmentName.equals(searchSegmentName)) break;
// Reference to
if (replacementSegmentName.startsWith(searchSegmentName) &&
replacementSegmentName.charAt(searchSegmentName.length())=='_'
) {
try {
Integer.parseInt(replacementSegmentName.substring(searchSegmentName.length()+1));
break;
} catch(NumberFormatException ex) {}
}
}
if (j==segmentCount2) {
ReplacementVariableDefinition definition = options.getVariableDefinition(replacementSegmentName);
if (definition == null || definition.getScriptCodeConstraint().length() <= 2 /*empty quotes*/) {
throw new UnsupportedPatternException(
SSRBundle.message("replacement.variable.is.not.defined.message", replacementSegmentName)
);
} else {
String message = ScriptSupport.checkValidScript(StringUtil.stripQuotesAroundValue(definition.getScriptCodeConstraint()));
if (message != null) {
throw new UnsupportedPatternException(
SSRBundle.message("replacement.variable.is.not.valid", replacementSegmentName, message)
);
}
}
}
}
StructuralSearchProfile profile = StructuralSearchUtil.getProfileByFileType(fileType);
assert profile != null;
profile.checkReplacementPattern(project, options);
} catch(IncorrectOperationException ex) {
throw new UnsupportedPatternException(SSRBundle.message("incorrect.pattern.message"));
}
}
public ReplacementInfo buildReplacement(MatchResult result) {
final ReplacementInfoImpl replacementInfo = new ReplacementInfoImpl(result, project);
if (replacementBuilder==null) {
replacementBuilder = new ReplacementBuilder(project, options);
}
replacementInfo.setReplacement(replacementBuilder.process(result, replacementInfo, options.getMatchOptions().getFileType()));
return replacementInfo;
}
}
| |
/*
* This file is part of React, licensed under the MIT License (MIT).
*
* Copyright (c) 2013 Flow Powered <https://flowpowered.com/>
* Original ReactPhysics3D C++ library by Daniel Chappuis <http://danielchappuis.ch>
* React is re-licensed with permission from ReactPhysics3D author.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.flowpowered.react.constraint;
import com.flowpowered.react.ReactDefaults.JointsPositionCorrectionTechnique;
import com.flowpowered.react.body.RigidBody;
import com.flowpowered.react.constraint.ConstraintSolver.ConstraintSolverData;
import com.flowpowered.react.math.Matrix3x3;
import com.flowpowered.react.math.Quaternion;
import com.flowpowered.react.math.Transform;
import com.flowpowered.react.math.Vector3;
/**
* This class represents a ball-and-socket joint that allows arbitrary rotation between two bodies. This joint has three degrees of freedom. It can be used to create a chain of bodies for instance.
*/
public class BallAndSocketJoint extends Joint {
private static final float BETA = 0.2f;
private final Vector3 mLocalAnchorPointBody1;
private final Vector3 mLocalAnchorPointBody2;
private final Vector3 mR1World = new Vector3();
private final Vector3 mR2World = new Vector3();
private final Matrix3x3 mI1 = new Matrix3x3();
private final Matrix3x3 mI2 = new Matrix3x3();
private final Vector3 mBiasVector = new Vector3();
private final Matrix3x3 mInverseMassMatrix = new Matrix3x3();
private final Vector3 mImpulse;
/**
* Constructs a new ball and socket joint from provided ball and socket joint info.
*
* @param jointInfo The joint info
*/
public BallAndSocketJoint(BallAndSocketJointInfo jointInfo) {
super(jointInfo);
mImpulse = new Vector3(0, 0, 0);
mLocalAnchorPointBody1 = Transform.multiply(mBody1.getTransform().getInverse(), jointInfo.getAnchorPointWorldSpace());
mLocalAnchorPointBody2 = Transform.multiply(mBody2.getTransform().getInverse(), jointInfo.getAnchorPointWorldSpace());
}
@Override
public void initBeforeSolve(ConstraintSolverData constraintSolverData) {
mIndexBody1 = constraintSolverData.getMapBodyToConstrainedVelocityIndex().get(mBody1);
mIndexBody2 = constraintSolverData.getMapBodyToConstrainedVelocityIndex().get(mBody2);
final Vector3 x1 = mBody1.getTransform().getPosition();
final Vector3 x2 = mBody2.getTransform().getPosition();
final Quaternion orientationBody1 = mBody1.getTransform().getOrientation();
final Quaternion orientationBody2 = mBody2.getTransform().getOrientation();
mI1.set(mBody1.getInertiaTensorInverseWorld());
mI2.set(mBody2.getInertiaTensorInverseWorld());
mR1World.set(Quaternion.multiply(orientationBody1, mLocalAnchorPointBody1));
mR2World.set(Quaternion.multiply(orientationBody2, mLocalAnchorPointBody2));
final Matrix3x3 skewSymmetricMatrixU1 = Matrix3x3.computeSkewSymmetricMatrixForCrossProduct(mR1World);
final Matrix3x3 skewSymmetricMatrixU2 = Matrix3x3.computeSkewSymmetricMatrixForCrossProduct(mR2World);
float inverseMassBodies = 0;
if (mBody1.isMotionEnabled()) {
inverseMassBodies += mBody1.getMassInverse();
}
if (mBody2.isMotionEnabled()) {
inverseMassBodies += mBody2.getMassInverse();
}
final Matrix3x3 massMatrix = new Matrix3x3(
inverseMassBodies, 0, 0,
0, inverseMassBodies, 0,
0, 0, inverseMassBodies);
if (mBody1.isMotionEnabled()) {
massMatrix.add(Matrix3x3.multiply(skewSymmetricMatrixU1, Matrix3x3.multiply(mI1, skewSymmetricMatrixU1.getTranspose())));
}
if (mBody2.isMotionEnabled()) {
massMatrix.add(Matrix3x3.multiply(skewSymmetricMatrixU2, Matrix3x3.multiply(mI2, skewSymmetricMatrixU2.getTranspose())));
}
mInverseMassMatrix.setToZero();
if (mBody1.isMotionEnabled() || mBody2.isMotionEnabled()) {
mInverseMassMatrix.set(massMatrix.getInverse());
}
mBiasVector.setToZero();
if (mPositionCorrectionTechnique == JointsPositionCorrectionTechnique.BAUMGARTE_JOINTS) {
final float biasFactor = BETA / constraintSolverData.getTimeStep();
mBiasVector.set(Vector3.multiply(biasFactor, Vector3.subtract(Vector3.subtract(Vector3.add(x2, mR2World), x1), mR1World)));
}
if (!constraintSolverData.isWarmStartingActive()) {
mImpulse.setToZero();
}
}
@Override
public void warmstart(ConstraintSolverData constraintSolverData) {
final Vector3 v1 = constraintSolverData.getLinearVelocities()[mIndexBody1];
final Vector3 v2 = constraintSolverData.getLinearVelocities()[mIndexBody2];
final Vector3 w1 = constraintSolverData.getAngularVelocities()[mIndexBody1];
final Vector3 w2 = constraintSolverData.getAngularVelocities()[mIndexBody2];
final float inverseMassBody1 = mBody1.getMassInverse();
final float inverseMassBody2 = mBody2.getMassInverse();
if (mBody1.isMotionEnabled()) {
final Vector3 linearImpulseBody1 = Vector3.negate(mImpulse);
final Vector3 angularImpulseBody1 = mImpulse.cross(mR1World);
v1.add(Vector3.multiply(inverseMassBody1, linearImpulseBody1));
w1.add(Matrix3x3.multiply(mI1, angularImpulseBody1));
}
if (mBody2.isMotionEnabled()) {
final Vector3 linearImpulseBody2 = mImpulse;
final Vector3 angularImpulseBody2 = Vector3.negate(mImpulse.cross(mR2World));
v2.add(Vector3.multiply(inverseMassBody2, linearImpulseBody2));
w2.add(Matrix3x3.multiply(mI2, angularImpulseBody2));
}
}
@Override
public void solveVelocityConstraint(ConstraintSolverData constraintSolverData) {
final Vector3 v1 = constraintSolverData.getLinearVelocities()[mIndexBody1];
final Vector3 v2 = constraintSolverData.getLinearVelocities()[mIndexBody2];
final Vector3 w1 = constraintSolverData.getAngularVelocities()[mIndexBody1];
final Vector3 w2 = constraintSolverData.getAngularVelocities()[mIndexBody2];
float inverseMassBody1 = mBody1.getMassInverse();
float inverseMassBody2 = mBody2.getMassInverse();
final Vector3 Jv = Vector3.subtract(Vector3.subtract(Vector3.add(v2, w2.cross(mR2World)), v1), w1.cross(mR1World));
final Vector3 deltaLambda = Matrix3x3.multiply(mInverseMassMatrix, Vector3.subtract(Vector3.negate(Jv), mBiasVector));
mImpulse.add(deltaLambda);
if (mBody1.isMotionEnabled()) {
final Vector3 linearImpulseBody1 = Vector3.negate(deltaLambda);
final Vector3 angularImpulseBody1 = deltaLambda.cross(mR1World);
v1.add(Vector3.multiply(inverseMassBody1, linearImpulseBody1));
w1.add(Matrix3x3.multiply(mI1, angularImpulseBody1));
}
if (mBody2.isMotionEnabled()) {
final Vector3 linearImpulseBody2 = deltaLambda;
final Vector3 angularImpulseBody2 = Vector3.negate(deltaLambda.cross(mR2World));
v2.add(Vector3.multiply(inverseMassBody2, linearImpulseBody2));
w2.add(Matrix3x3.multiply(mI2, angularImpulseBody2));
}
}
@Override
public void solvePositionConstraint(ConstraintSolverData constraintSolverData) {
if (mPositionCorrectionTechnique != JointsPositionCorrectionTechnique.NON_LINEAR_GAUSS_SEIDEL) {
return;
}
final Vector3 x1 = constraintSolverData.getPositions().get(mIndexBody1);
final Vector3 x2 = constraintSolverData.getPositions().get(mIndexBody2);
final Quaternion q1 = constraintSolverData.getOrientations().get(mIndexBody1);
final Quaternion q2 = constraintSolverData.getOrientations().get(mIndexBody2);
final float inverseMassBody1 = mBody1.getMassInverse();
final float inverseMassBody2 = mBody2.getMassInverse();
mI1.set(mBody1.getInertiaTensorInverseWorld());
mI2.set(mBody2.getInertiaTensorInverseWorld());
mR1World.set(Quaternion.multiply(q1, mLocalAnchorPointBody1));
mR2World.set(Quaternion.multiply(q2, mLocalAnchorPointBody2));
final Matrix3x3 skewSymmetricMatrixU1 = Matrix3x3.computeSkewSymmetricMatrixForCrossProduct(mR1World);
final Matrix3x3 skewSymmetricMatrixU2 = Matrix3x3.computeSkewSymmetricMatrixForCrossProduct(mR2World);
float inverseMassBodies = 0;
if (mBody1.isMotionEnabled()) {
inverseMassBodies += inverseMassBody1;
}
if (mBody2.isMotionEnabled()) {
inverseMassBodies += inverseMassBody2;
}
final Matrix3x3 massMatrix = new Matrix3x3(
inverseMassBodies, 0, 0,
0, inverseMassBodies, 0,
0, 0, inverseMassBodies);
if (mBody1.isMotionEnabled()) {
massMatrix.add(Matrix3x3.multiply(skewSymmetricMatrixU1, Matrix3x3.multiply(mI1, skewSymmetricMatrixU1.getTranspose())));
}
if (mBody2.isMotionEnabled()) {
massMatrix.add(Matrix3x3.multiply(skewSymmetricMatrixU2, Matrix3x3.multiply(mI2, skewSymmetricMatrixU2.getTranspose())));
}
mInverseMassMatrix.setToZero();
if (mBody1.isMotionEnabled() || mBody2.isMotionEnabled()) {
mInverseMassMatrix.set(massMatrix.getInverse());
}
final Vector3 constraintError = Vector3.subtract(Vector3.subtract(Vector3.add(x2, mR2World), x1), mR1World);
final Vector3 lambda = Matrix3x3.multiply(mInverseMassMatrix, Vector3.negate(constraintError));
if (mBody1.isMotionEnabled()) {
final Vector3 linearImpulseBody1 = Vector3.negate(lambda);
final Vector3 angularImpulseBody1 = lambda.cross(mR1World);
final Vector3 v1 = Vector3.multiply(inverseMassBody1, linearImpulseBody1);
final Vector3 w1 = Matrix3x3.multiply(mI1, angularImpulseBody1);
x1.add(v1);
q1.add(Quaternion.multiply(Quaternion.multiply(new Quaternion(0, w1), q1), 0.5f));
q1.normalize();
}
if (mBody2.isMotionEnabled()) {
final Vector3 linearImpulseBody2 = lambda;
final Vector3 angularImpulseBody2 = Vector3.negate(lambda.cross(mR2World));
final Vector3 v2 = Vector3.multiply(inverseMassBody2, linearImpulseBody2);
final Vector3 w2 = Matrix3x3.multiply(mI2, angularImpulseBody2);
x2.add(v2);
q2.add(Quaternion.multiply(Quaternion.multiply(new Quaternion(0, w2), q2), 0.5f));
q2.normalize();
}
}
/**
* This structure is used to gather the information needed to create a ball-and-socket joint. This structure will be used to create the actual ball-and-socket joint.
*/
public static class BallAndSocketJointInfo extends JointInfo {
private final Vector3 anchorPointWorldSpace = new Vector3();
/**
* Constructs a new ball and socket joint info from both bodies and the initial anchor point in world space.
*
* @param rigidBody1 The first body
* @param rigidBody2 The second body
* @param initAnchorPointWorldSpace The anchor point in world space
*/
public BallAndSocketJointInfo(RigidBody rigidBody1, RigidBody rigidBody2, Vector3 initAnchorPointWorldSpace) {
super(rigidBody1, rigidBody2, JointType.BALLSOCKETJOINT);
anchorPointWorldSpace.set(initAnchorPointWorldSpace);
}
/**
* Returns the anchor point in world space.
*
* @return The anchor point in world space
*/
public Vector3 getAnchorPointWorldSpace() {
return anchorPointWorldSpace;
}
}
}
| |
package com.tixon.portlab.app;
import android.content.ContentValues;
import android.content.Context;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteOpenHelper;
import android.util.Log;
import android.widget.Toast;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
public class MyDataHelper extends SQLiteOpenHelper {
private static final String LOG_TAG = "myLogs";
private static final String UID = "_id";
private static final String TABLE_VARIABLES_NAME = "table_variables";
private static final String TABLE_FUNCTIONS_NAME = "table_functions";
private static final String TABLE_ARRAYS_NAME = "table_arrays";
private static final String TABLE_HISTORY_NAME = "table_history";
private static final String TABLE_EXPRESSIONS_NAME = "table_expressions";
private static final String FIELD_VARIABLE_NAME = "variable_name";
private static final String FIELD_VARIABLE_VALUE = "variable_value";
private static final String FIELD_FUNCTION_NAME = "function_name";
private static final String FIELD_FUNCTION_ARGUMENTS = "function_arguments";
private static final String FIELD_FUNCTION_EXPRESSION = "function_expression";
private static final String FIELD_ARRAY_NAME = "array_name";
private static final String FIELD_ARRAY_VALUES = "array_values";
private static final String FIELD_HISTORY_ITEM = "history_item";
private static final String FIELD_EXPRESSIONS_ITEM = "expressions_item";
private static final String DATABASE_NAME = "port_lab_database";
private static final int DATABASE_VERSION = 4;//last stable version: 3
private static final String SQL_CREATE_TABLE_VARIABLES = "create table " + TABLE_VARIABLES_NAME + " (" +
UID + " integer primary key autoincrement, " + FIELD_VARIABLE_NAME + " text, " + FIELD_VARIABLE_VALUE + " real" + ");";
private static final String SQL_CREATE_TABLE_FUNCTIONS = "create table " + TABLE_FUNCTIONS_NAME + " (" +
UID + " integer primary key autoincrement, " + FIELD_FUNCTION_NAME + " text, " + FIELD_FUNCTION_ARGUMENTS + " text, " +
FIELD_FUNCTION_EXPRESSION + " text" + ");";
private static final String SQL_CREATE_TABLE_ARRAYS = "create table " + TABLE_ARRAYS_NAME + " (" +
UID + " integer primary key autoincrement, " + FIELD_ARRAY_NAME + " text, " + FIELD_ARRAY_VALUES + " text" + ");";
private static final String SQL_CREATE_TABLE_HISTORY = "create table " + TABLE_HISTORY_NAME + " (" +
UID + " integer primary key autoincrement, " + FIELD_HISTORY_ITEM + " text" + ");";
private static final String SQL_CREATE_TABLE_EXPRESSIONS = "create table " + TABLE_EXPRESSIONS_NAME + " (" +
UID + " integer primary key autoincrement, " + FIELD_EXPRESSIONS_ITEM + " text" + ");";
private static final String SQL_DELETE_VARIABLES = "DROP TABLE IF EXISTS "
+ TABLE_VARIABLES_NAME;
private static final String SQL_DELETE_FUNCTIONS = "DROP TABLE IF EXISTS "
+ TABLE_FUNCTIONS_NAME;
private static final String SQL_DELETE_ARRAYS = "DROP TABLE IF EXISTS "
+ TABLE_ARRAYS_NAME;
private static final String SQL_DELETE_HISTORY = "DROP TABLE IF EXISTS "
+ TABLE_HISTORY_NAME;
private static final String SQL_DELETE_EXPRESSIONS = "DROP TABLE IF EXISTS "
+ TABLE_EXPRESSIONS_NAME;
public MyDataHelper(Context context) {
super(context, DATABASE_NAME, null, DATABASE_VERSION);
}
@Override
public void onCreate(SQLiteDatabase db) {
Log.d(LOG_TAG, "--- onCreate database ---");
db.execSQL(SQL_CREATE_TABLE_VARIABLES);
db.execSQL(SQL_CREATE_TABLE_FUNCTIONS);
db.execSQL(SQL_CREATE_TABLE_HISTORY);
db.execSQL(SQL_CREATE_TABLE_EXPRESSIONS);
db.execSQL(SQL_CREATE_TABLE_ARRAYS);
}
@Override
public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) {
Log.d(LOG_TAG, "--- onUpgrade database: old v. = " + oldVersion + ", new v. = " + newVersion + " ---");
ArrayList<String> historyItems = new ArrayList<>();
ArrayList<String> expressionItems = new ArrayList<>();
HashMap<String, Double> variables = new HashMap<>();
HashMap<String, FunctionsDefinitions> functions = new HashMap<>();
readHistory(db, historyItems, expressionItems);
readVariables(db, variables);
readFunctions(db, functions);
historyItems = reverse(historyItems);
expressionItems = reverse(expressionItems);
db.execSQL(SQL_DELETE_VARIABLES);
db.execSQL(SQL_DELETE_FUNCTIONS);
db.execSQL(SQL_DELETE_HISTORY);
db.execSQL(SQL_DELETE_EXPRESSIONS);
onCreate(db);
for(int i = 0; i < historyItems.size(); i++) {
insertHistory(db, historyItems.get(i), expressionItems.get(i));
}
insertVariables(db, variables);
insertFunctions(db, functions);
Log.d(LOG_TAG, "The database has been upgraded successfully");
}
public void insertVariables(SQLiteDatabase db, HashMap<String, Double> varMap) {
Log.d(LOG_TAG, "--- Insert in '" + TABLE_VARIABLES_NAME + "': ---");
ContentValues cv = new ContentValues();
String varName;
double varValue;
for(Map.Entry entry : varMap.entrySet()) {
varName = entry.getKey().toString();
varValue = Double.parseDouble(entry.getValue().toString());
cv.put(FIELD_VARIABLE_NAME, varName);
cv.put(FIELD_VARIABLE_VALUE, varValue);
long rowID = db.insert(TABLE_VARIABLES_NAME, null, cv);
Log.d(LOG_TAG, TABLE_VARIABLES_NAME + ": row inserted, ID = " + rowID);
}
}
public void readVariables(SQLiteDatabase db, HashMap<String, Double> varMap) {
Log.d(LOG_TAG, "--- Read from '" + TABLE_VARIABLES_NAME + "': ---");
Cursor c = db.query(TABLE_VARIABLES_NAME, null, null, null, null, null, null);
if(c.moveToFirst()) {
int idColIndex = c.getColumnIndex(UID);
int varNameColIndex = c.getColumnIndex(FIELD_VARIABLE_NAME);
int varValueColIndex = c.getColumnIndex(FIELD_VARIABLE_VALUE);
do {
Log.d(LOG_TAG, UID + " = " + c.getInt(idColIndex) + ", " + FIELD_VARIABLE_NAME + " = " + c.getString(varNameColIndex) + ", " + FIELD_VARIABLE_VALUE + " = " + c.getDouble(varValueColIndex));
varMap.put(c.getString(varNameColIndex), c.getDouble(varValueColIndex));
} while(c.moveToNext());
} else
Log.d(LOG_TAG, "0 rows");
c.close();
}
public int findVariableByName(SQLiteDatabase db, String name, HashMap<String, Double> varMap) {
int id_result = 0;
Log.d(LOG_TAG, "--- Read from '" + TABLE_VARIABLES_NAME + "': ---");
Cursor c = db.query(TABLE_VARIABLES_NAME, null, null, null, null, null, null);
if(c.moveToFirst()) {
int idColIndex = c.getColumnIndex(UID);
int varNameColIndex = c.getColumnIndex(FIELD_VARIABLE_NAME);
int varValueColIndex = c.getColumnIndex(FIELD_VARIABLE_VALUE);
do {
int id = c.getInt(idColIndex);
String varName = c.getString(varNameColIndex);
if(varName.equals(name)) {
Log.d(LOG_TAG, UID + " = " + id + ", " + FIELD_VARIABLE_NAME + " = " + c.getString(varNameColIndex) + ", " + FIELD_VARIABLE_VALUE + " = " + c.getDouble(varValueColIndex));
if(varMap != null) varMap.put(c.getString(varNameColIndex), c.getDouble(varValueColIndex));
id_result = id;
break;
}
} while(c.moveToNext());
} else
Log.d(LOG_TAG, "0 rows");
c.close();
return id_result;
}
public void deleteVariables(SQLiteDatabase db) {
int clearCount = db.delete(TABLE_VARIABLES_NAME, null, null);
db.execSQL("DELETE FROM SQLITE_SEQUENCE WHERE NAME = '" + TABLE_VARIABLES_NAME + "'"); //Resetting ID
Log.d(LOG_TAG, "Deleted from '" + TABLE_VARIABLES_NAME + "': " + clearCount);
}
public void insertFunctions(SQLiteDatabase db, HashMap<String, FunctionsDefinitions> funcMap) {
Log.d(LOG_TAG, "--- Insert in '" + TABLE_FUNCTIONS_NAME + "': ---");
ContentValues cv = new ContentValues();
String funcName, funcExpr;
for(Map.Entry entry : funcMap.entrySet()) {
FunctionsDefinitions FD; //= new FunctionsDefinitions();
StringBuilder funcArgs = new StringBuilder();
funcName = entry.getKey().toString();
FD = (FunctionsDefinitions) entry.getValue();
funcExpr = FD.expression;
//Making arguments string from arrayList
if(FD.arguments.size() > 1) {
for(int i = 0; i < FD.arguments.size(); i++) {
funcArgs.append(FD.arguments.get(i));
funcArgs.append(",");
}
funcArgs.deleteCharAt(funcArgs.length()-1);
} else {
funcArgs.append(FD.arguments.get(0));
}
//Putting into contentValues
cv.put(FIELD_FUNCTION_NAME, funcName);
cv.put(FIELD_FUNCTION_ARGUMENTS, funcArgs.toString());
cv.put(FIELD_FUNCTION_EXPRESSION, funcExpr);
long rowID = db.insert(TABLE_FUNCTIONS_NAME, null, cv);
Log.d(LOG_TAG, TABLE_FUNCTIONS_NAME + ": row inserted, ID = " + rowID);
}
}
public void readFunctions(SQLiteDatabase db, HashMap<String, FunctionsDefinitions> funcMap) {
Log.d(LOG_TAG, "--- Read from '" + TABLE_FUNCTIONS_NAME + "': ---");
Cursor c = db.query(TABLE_FUNCTIONS_NAME, null, null, null, null, null, null);
if(c.moveToFirst()) {
int idColIndex = c.getColumnIndex(UID);
int funcNameColIndex = c.getColumnIndex(FIELD_FUNCTION_NAME);
int funcArgsColIndex = c.getColumnIndex(FIELD_FUNCTION_ARGUMENTS);
int funcExprColIndex = c.getColumnIndex(FIELD_FUNCTION_EXPRESSION);
do {
FunctionsDefinitions FD = new FunctionsDefinitions();
FD.arguments = new ArrayList<String>();
int id = c.getInt(idColIndex);
String funcName = c.getString(funcNameColIndex);
String funcArgs = c.getString(funcArgsColIndex);
String funcExpr = c.getString(funcExprColIndex);
Log.d(LOG_TAG, UID + " = " + id + ", " + FIELD_FUNCTION_NAME + " = " + funcName + ", " + FIELD_FUNCTION_ARGUMENTS + " = " + funcArgs + ", " + FIELD_FUNCTION_EXPRESSION + " = " + funcExpr);
String[] args_mas = funcArgs.split(",");
FD.expression = funcExpr;//
Collections.addAll(FD.arguments, args_mas);
funcMap.put(funcName, FD);
} while(c.moveToNext());
} else Log.d(LOG_TAG, "0 rows in " + TABLE_FUNCTIONS_NAME);
c.close();
}
public int findFunctionByName(SQLiteDatabase db, String name, HashMap<String, FunctionsDefinitions> funcMap) {
int id_result = 0;
Log.d(LOG_TAG, "--- Read from '" + TABLE_FUNCTIONS_NAME + "': ---");
Cursor c = db.query(TABLE_FUNCTIONS_NAME, null, null, null, null, null, null);
if(c.moveToFirst()) {
int idColIndex = c.getColumnIndex(UID);
int funcNameColIndex = c.getColumnIndex(FIELD_FUNCTION_NAME);
int funcArgsColIndex = c.getColumnIndex(FIELD_FUNCTION_ARGUMENTS);
int funcExprColIndex = c.getColumnIndex(FIELD_FUNCTION_EXPRESSION);
do {
int id = c.getInt(idColIndex);
String funcName = c.getString(funcNameColIndex);
if(funcName.equals(name)) {
id_result = id;
FunctionsDefinitions FD = new FunctionsDefinitions();
FD.arguments = new ArrayList<String>();
String funcArgs = c.getString(funcArgsColIndex);
String funcExpr = c.getString(funcExprColIndex);
Log.d(LOG_TAG, UID + " = " + id + ", " + FIELD_FUNCTION_NAME + " = " + funcName + ", " + FIELD_FUNCTION_ARGUMENTS + " = " + funcArgs + ", " + FIELD_FUNCTION_EXPRESSION + " = " + funcExpr);
String[] args_mas = funcArgs.split(",");
FD.expression = funcExpr;//
Collections.addAll(FD.arguments, args_mas);
if(funcMap != null) funcMap.put(funcName, FD);
}
} while(c.moveToNext());
} else Log.d(LOG_TAG, "0 rows in " + TABLE_FUNCTIONS_NAME);
c.close();
return id_result;
}
public void deleteFunctions(SQLiteDatabase db) {
int clearCount = db.delete(TABLE_FUNCTIONS_NAME, null, null);
db.execSQL("DELETE FROM SQLITE_SEQUENCE WHERE NAME = '" + TABLE_FUNCTIONS_NAME + "'"); //Resetting ID
Log.d(LOG_TAG, "Deleted from '" + TABLE_FUNCTIONS_NAME + "': " + clearCount);
}
public void insertArrays(SQLiteDatabase db, ArrayList<Array> arrays) {
Log.d(LOG_TAG, "--- Insert in '" + TABLE_ARRAYS_NAME + "': ---");
ContentValues cv = new ContentValues();
String arrayName;
String arrayValues;
for(int a = 0; a < arrays.size(); a++) {
arrayName = arrays.get(a).getName();
arrayValues = makeStringFromArray(arrays.get(a).getValues());
cv.put(FIELD_ARRAY_NAME, arrayName);
cv.put(FIELD_ARRAY_VALUES, arrayValues);
long rowID = db.insert(TABLE_ARRAYS_NAME, null, cv);
Log.d(LOG_TAG, TABLE_ARRAYS_NAME + ": row inserted, ID = " + rowID);
}
}
public void readArrays(SQLiteDatabase db, ArrayList<Array> arrays) {
Log.d(LOG_TAG, "--- Read from '" + TABLE_ARRAYS_NAME + "': ---");
Cursor c = db.query(TABLE_ARRAYS_NAME, null, null, null, null, null, null);
ArrayList<Double> tempArrayValues;
Array array;
if(c.moveToFirst()) {
int idColIndex = c.getColumnIndex(UID);
int arrayNameColIndex = c.getColumnIndex(FIELD_ARRAY_NAME);
int arrayValuesColIndex = c.getColumnIndex(FIELD_ARRAY_VALUES);
do {
Log.d(LOG_TAG, UID + " = " + c.getInt(idColIndex) + ", " + FIELD_ARRAY_NAME + " = " + c.getString(arrayNameColIndex) + ", " + FIELD_ARRAY_VALUES + " = " + c.getString(arrayValuesColIndex));
array = new Array();
tempArrayValues = new ArrayList<>();
String[] values = c.getString(arrayValuesColIndex).split(";");
StringMasToDoubleList(values, tempArrayValues);
array.setName(c.getString(arrayNameColIndex));
array.setValues(tempArrayValues);
arrays.add(array);
} while(c.moveToNext());
} else {
Log.d(LOG_TAG, "0 rows");
}
c.close();
}
public int findArrayByName(SQLiteDatabase db, String name, Array array) {
int id_result = 0;
Log.d(LOG_TAG, "--- Read from '" + TABLE_ARRAYS_NAME + "': ---");
Cursor c = db.query(TABLE_ARRAYS_NAME, null, null, null, null, null, null);
if(c.moveToFirst()) {
int idColIndex = c.getColumnIndex(UID);
int arrayNameColIndex = c.getColumnIndex(FIELD_ARRAY_NAME);
int arrayValuesColIndex = c.getColumnIndex(FIELD_ARRAY_VALUES);
do {
int id = c.getInt(idColIndex);
String arrayName = c.getString(arrayNameColIndex);
if(arrayName.equals(name)) {
Log.d(LOG_TAG, UID + " = " + c.getInt(idColIndex) + ", " + FIELD_ARRAY_NAME + " = " + c.getString(arrayNameColIndex) + ", " + FIELD_ARRAY_VALUES + " = " + c.getString(arrayValuesColIndex));
id_result = id;
if(array != null) {
array.setName(c.getString(arrayNameColIndex));
array.setValuesString(c.getString(arrayValuesColIndex));
}
break;
}
} while(c.moveToNext());
} else
Log.d(LOG_TAG, "0 rows");
c.close();
return id_result;
}
public void deleteArrays(SQLiteDatabase db) {
int clearCount = db.delete(TABLE_ARRAYS_NAME, null, null);
db.execSQL("DELETE FROM SQLITE_SEQUENCE WHERE NAME = '" + TABLE_ARRAYS_NAME + "'"); //Resetting ID
Log.d(LOG_TAG, "Deleted from '" + TABLE_ARRAYS_NAME + "': " + clearCount);
}
public void deleteHistoryAndExpressions(SQLiteDatabase db) {
int clearCount = db.delete(TABLE_HISTORY_NAME, null, null);
int clearExprCount = db.delete(TABLE_EXPRESSIONS_NAME, null, null);
db.execSQL("DELETE FROM SQLITE_SEQUENCE WHERE NAME = '" + TABLE_HISTORY_NAME + "'");
db.execSQL("DELETE FROM SQLITE_SEQUENCE WHERE NAME = '" + TABLE_EXPRESSIONS_NAME + "'");
Log.d(LOG_TAG, "Deleted from '" + TABLE_HISTORY_NAME + "': " + clearCount);
Log.d(LOG_TAG, "Deleted from '" + TABLE_EXPRESSIONS_NAME + "': " + clearExprCount);
}
public void insertHistory(SQLiteDatabase db, String history_item, String expressions_item) {
ContentValues cv_history = new ContentValues();
ContentValues cv_expressions = new ContentValues();
cv_history.put(FIELD_HISTORY_ITEM, history_item);
cv_expressions.put(FIELD_EXPRESSIONS_ITEM, expressions_item);
long history_rowID = db.insert(TABLE_HISTORY_NAME, null, cv_history);
long expressions_rowID = db.insert(TABLE_EXPRESSIONS_NAME, null, cv_expressions);
Log.d(LOG_TAG, "row inserted in " + TABLE_HISTORY_NAME + ": ID = " + history_rowID);
Log.d(LOG_TAG, "row inserted in " + TABLE_EXPRESSIONS_NAME + ": ID = " + expressions_rowID);
}
public void readHistory(SQLiteDatabase db, ArrayList<String> history_list, ArrayList<String> expressions_list) {
history_list.clear();
expressions_list.clear();
Cursor c_history = db.query(TABLE_HISTORY_NAME, null, null, null, null, null, null);
Cursor c_expressions = db.query(TABLE_EXPRESSIONS_NAME, null, null, null, null, null, null);
//for history:
if(c_history.moveToLast()) {
int idCI = c_history.getColumnIndex(UID);
int historyItemCI = c_history.getColumnIndex(FIELD_HISTORY_ITEM);
do {
Log.d(LOG_TAG, "reading history: ID = " + c_history.getInt(idCI) + "; history_item = " + c_history.getString(historyItemCI));
history_list.add(c_history.getString(historyItemCI));
} while(c_history.moveToPrevious());
} else Log.d(LOG_TAG, "0 rows in " + TABLE_HISTORY_NAME);
//for expressions:
if(c_expressions.moveToLast()) {
int idCI = c_expressions.getColumnIndex(UID);
int expressionsItemCI = c_expressions.getColumnIndex(FIELD_EXPRESSIONS_ITEM);
do {
Log.d(LOG_TAG, "reading expressions: ID = " + c_expressions.getInt(idCI) + "; expressions_item = " + c_expressions.getString(expressionsItemCI));
expressions_list.add(c_expressions.getString(expressionsItemCI));
} while(c_expressions.moveToPrevious());
} else Log.d(LOG_TAG, "0 rows in " + TABLE_EXPRESSIONS_NAME);
}
public String findHistoryByID(SQLiteDatabase db, int needed_id) {
String result = "";
Log.d(LOG_TAG, "--- Read from '" + TABLE_HISTORY_NAME + "', find history by ID: ---");
Cursor c = db.query(TABLE_HISTORY_NAME, null, null, null, null, null, null);
if(c.moveToFirst()) {
int idColIndex = c.getColumnIndex(UID);
int historyItemColIndex = c.getColumnIndex(FIELD_HISTORY_ITEM);
do {
int id = c.getInt(idColIndex);
if(id == needed_id) {
Log.d(LOG_TAG, "IN DATABASE: " + UID + " = " + id + ", " + FIELD_HISTORY_ITEM + " = " + c.getString(historyItemColIndex));
result = c.getString(historyItemColIndex);
break;
}
} while(c.moveToNext());
}
c.close();
return result;
}
public int findHistoryByName(SQLiteDatabase db, String item) {
int id_result = 0;
Log.d(LOG_TAG, "--- Read from '" + TABLE_HISTORY_NAME + "': ---");
Cursor c = db.query(TABLE_HISTORY_NAME, null, null, null, null, null, null);
if(c.moveToFirst()) {
int idColIndex = c.getColumnIndex(UID);
int historyItemColIndex = c.getColumnIndex(FIELD_HISTORY_ITEM);
do {
int id = c.getInt(idColIndex);
String historyItem = c.getString(historyItemColIndex);
if(historyItem.equals(item)) {
Log.d(LOG_TAG, UID + " = " + c.getInt(idColIndex) + ", " + FIELD_HISTORY_ITEM + " = " + c.getString(historyItemColIndex));
id_result = id;
break;
}
} while(c.moveToNext());
} else
Log.d(LOG_TAG, "0 rows");
c.close();
return id_result;
}
public void updateVariables(SQLiteDatabase db, int id, String name, Double value) {
ContentValues cv = new ContentValues();
cv.put(FIELD_VARIABLE_NAME, name);
cv.put(FIELD_VARIABLE_VALUE, value);
int updCount = db.update(TABLE_VARIABLES_NAME, cv, UID + " = ?", new String[] {String.valueOf(id)});
Log.d(LOG_TAG, "UPDATE_VARiABLES: updated rows count = " + updCount);
}
public void updateFunctions(SQLiteDatabase db, int id, String name, String args, String expr) {
ContentValues cv = new ContentValues();
cv.put(FIELD_FUNCTION_NAME, name);
cv.put(FIELD_FUNCTION_ARGUMENTS, args);
cv.put(FIELD_FUNCTION_EXPRESSION, expr);
int updCount = db.update(TABLE_FUNCTIONS_NAME, cv, UID + " = ?", new String[] {String.valueOf(id)});
Log.d(LOG_TAG, "UPDATE_FUNCTIONS: updated rows count = " + updCount);
}
public void updateArrays(SQLiteDatabase db, int id, String name, String values) {
ContentValues cv = new ContentValues();
cv.put(FIELD_ARRAY_NAME, name);
cv.put(FIELD_ARRAY_VALUES, values);
int updCount = db.update(TABLE_ARRAYS_NAME, cv, UID + " = ?", new String[] {String.valueOf(id)});
Log.d(LOG_TAG, "UPDATE_ARRAYS: updated rows count = " + updCount);
}
public void deleteVariable(SQLiteDatabase db, int id) {
int delCount;
delCount = db.delete(TABLE_VARIABLES_NAME, UID + " = " + id, null);
/*try {
//delCount = db.delete(TABLE_VARIABLES_NAME, "id = " + id, null);
} catch(Exception e) {
//
}*/
Log.d("myLogs", "deleteVariable: id = " + id + "; count = " + delCount);
}
public void deleteFunction(SQLiteDatabase db, int id) {
int delCount;
delCount = db.delete(TABLE_FUNCTIONS_NAME, UID + " = " + id, null);
/*try {
//delCount = db.delete(TABLE_FUNCTIONS_NAME, "id = " + id, null);
} catch (Exception e) {
//
}*/
Log.d("myLogs", "deleteFunction: id = " + id + "; count = " + delCount);
}
public void deleteArray(SQLiteDatabase db, int id) {
int delCount;
delCount = db.delete(TABLE_ARRAYS_NAME, UID + " = " + id, null);
/*try {
//delCount = db.delete(TABLE_ARRAYS_NAME, "id = " + id, null);
} catch (Exception e) {
//
}*/
Log.d("myLogs", "deleteArray: id = " + id + "; count = " + delCount);
}
public void deleteHistoryItem(SQLiteDatabase db, int id) {
int delCount;
delCount = db.delete(TABLE_HISTORY_NAME, UID + " = " + id, null);
Log.d("myLogs", "deleteHistory: id = " + id + "; count = " + delCount);
}
private ArrayList<String> reverse(ArrayList<String> list) {
ArrayList<String> result = new ArrayList<>();
for(int i = 0; i < list.size(); i++) {
result.add(0, list.get(i));
}
return result;
}
private String makeStringFromArray(ArrayList<Double> L) {
String result = "";
for(int i = 0; i < L.size()-1; i++) result += String.valueOf(L.get(i)) + ";";
result += String.valueOf(L.get(L.size()-1));
return result;
}
public void StringMasToDoubleList(String[] mas, ArrayList<Double> F) {
for(int i = 0; i < mas.length; i++) {
F.add(Double.parseDouble(mas[i]));
}
}
}
| |
package cz.metacentrum.perun.wui.registrar.widgets.items;
import com.google.gwt.event.dom.client.BlurEvent;
import com.google.gwt.event.dom.client.BlurHandler;
import com.google.gwt.event.dom.client.ChangeEvent;
import com.google.gwt.event.dom.client.ChangeHandler;
import com.google.gwt.event.logical.shared.ValueChangeEvent;
import com.google.gwt.event.logical.shared.ValueChangeHandler;
import com.google.gwt.user.client.ui.Widget;
import cz.metacentrum.perun.wui.json.Events;
import cz.metacentrum.perun.wui.model.beans.ApplicationFormItemData;
import cz.metacentrum.perun.wui.registrar.widgets.PerunForm;
import cz.metacentrum.perun.wui.registrar.widgets.Select;
import cz.metacentrum.perun.wui.registrar.widgets.items.validators.PerunFormItemValidator;
import cz.metacentrum.perun.wui.registrar.widgets.items.validators.ValidatedEmailValidator;
import cz.metacentrum.perun.wui.widgets.boxes.ExtendedTextBox;
import org.gwtbootstrap3.client.ui.InputGroup;
import org.gwtbootstrap3.client.ui.InputGroupAddon;
import org.gwtbootstrap3.client.ui.InputGroupButton;
import org.gwtbootstrap3.client.ui.constants.IconType;
import org.gwtbootstrap3.client.ui.html.Paragraph;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedHashSet;
import java.util.List;
/**
* Represents field for mails with prefilled options. Value have to be correct e-Mail format.
*
* @author Ondrej Velisek <ondrejvelisek@gmail.com>
*/
public class ValidatedEmail extends PerunFormItemEditable {
public final static int MAX_LENGTH = 1024;
public final static String CUSTOM_ID = "custom";
private final ValidatedEmailValidator validator;
private InputGroup widget;
@Override
public void setEnabled(boolean enabled) {
if (getTextBox() != null) {
getTextBox().setEnabled(enabled);
}
if (getSelect() != null) {
getSelect().setEnabled(enabled);
}
}
public ValidatedEmail(PerunForm form, ApplicationFormItemData item, String lang) {
super(form, item, lang);
this.validator = new ValidatedEmailValidator();
if (PerunForm.FormState.EDIT.equals(getForm().getFormState())) {
// make sure we consider current value as verified based on LoA when editing form
item.setPrefilledValue(item.getValue());
}
}
protected Widget initWidget() {
widget = new InputGroup();
InputGroupAddon addon = new InputGroupAddon();
addon.setIcon(IconType.ENVELOPE);
final ExtendedTextBox box = new ExtendedTextBox();
box.setMaxLength(MAX_LENGTH);
widget.add(addon);
widget.add(box);
if (getItemData().getFormItem().getRegex() != null) {
box.setRegex(getItemData().getFormItem().getRegex());
}
return widget;
}
@Override
public void validate(Events<Boolean> events) {
validator.validate(this, events);
}
@Override
public boolean validateLocal() {
return validator.validateLocal(this);
}
@Override
public PerunFormItemValidator.Result getLastValidationResult() {
return validator.getLastResult();
}
@Override
public boolean focus() {
if (isOnlyPreview()) {
return false;
}
getTextBox().setFocus(true);
return true;
}
@Override
public Widget initWidgetOnlyPreview() {
InputGroupAddon addon = new InputGroupAddon();
addon.setIcon(IconType.ENVELOPE);
Paragraph box = new Paragraph();
box.addStyleName("form-control");
widget = new InputGroup();
widget.add(addon);
widget.add(box);
return widget;
}
@Override
public void setValidationTriggers() {
if (isOnlyPreview()) {
return;
}
getTextBox().addBlurHandler(new BlurHandler() {
@Override
public void onBlur(BlurEvent event) {
validateLocal();
}
});
getTextBox().addValueChangeHandler(new ValueChangeHandler() {
private boolean first = true;
@Override
public void onValueChange(ValueChangeEvent event) {
if (first && isCustomSelected() && getValue().isEmpty()) {
first = false;
return;
}
validateLocal();
}
});
}
@Override
public String getValue() {
if (isOnlyPreview()) {
return getPreview().getText();
}
return getTextBox().getValue();
}
@Override
public InputGroup getWidget() {
return widget;
}
@Override
protected void setValueImpl(String value) {
if (value.isEmpty()) {
return;
}
if (isOnlyPreview()) {
getPreview().setText(value.split(";")[0]);
return;
}
List<String> prefilledMails = Arrays.asList(value.split(";"));
// remove duplicates
prefilledMails = new ArrayList<>(new LinkedHashSet<>(prefilledMails));
getTextBox().setValue(prefilledMails.get(0));
if (isOnlyPreview()) {
// should contain only one value;
return;
}
final Select emailSelect = new Select();
emailSelect.addStyleName("emailFormItem");
emailSelect.setWidth("38px");
for (String val : prefilledMails) {
emailSelect.addItem(val, val);
}
emailSelect.addItem(getTranslation().customValueEmail(), CUSTOM_ID);
emailSelect.addChangeHandler(new ChangeHandler() {
private String customValueStore = "";
private boolean customSelected = false;
@Override
public void onChange(ChangeEvent event) {
if (emailSelect.getValue(emailSelect.getSelectedIndex()).equals(CUSTOM_ID)) {
getTextBox().setValue(customValueStore);
getTextBox().setFocus(true);
customSelected = true;
} else {
if (customSelected) {
customValueStore = getTextBox().getValue();
}
customSelected = false;
getTextBox().setValue(emailSelect.getSelectedValue());
}
ValueChangeEvent.fire(getTextBox(), getTextBox().getValue());
}
});
InputGroupButton dropdown = new InputGroupButton();
dropdown.add(emailSelect);
emailSelect.refresh();
widget.add(dropdown);
}
@Override
protected void onAttach() {
super.onAttach();
if (!isOnlyPreview()) {
// Selectpicker widget has to have proper form class too, make sure it's not null
if(getSelect() != null &&
getSelect().getElement().getPreviousSiblingElement() != null &&
getSelect().getElement().getPreviousSiblingElement().getPreviousSiblingElement() != null) {
getSelect().getElement().getPreviousSiblingElement().getPreviousSiblingElement().addClassName("form-control");
}
}
}
public ExtendedTextBox getTextBox() {
for (Widget box : getWidget()) {
if (box instanceof ExtendedTextBox) {
return (ExtendedTextBox) box;
}
}
return null;
}
public Select getSelect() {
for (Widget box : getWidget()) {
if (box instanceof InputGroupButton) {
for (Widget select : (InputGroupButton) box) {
if (select instanceof Select) {
return (Select) select;
}
}
}
}
return null;
}
public Paragraph getPreview() {
for (Widget par : getWidget()) {
if (par instanceof Paragraph) {
return (Paragraph) par;
}
}
return null;
}
private boolean isCustomSelected() {
if (getSelect() != null) return getSelect().getSelectedValue().equals(CUSTOM_ID);
return false;
}
}
| |
/*
* Copyright (c) 2000-2005 Regents of the University of California.
* All rights reserved.
*
* This software was developed at the University of California, Irvine.
*
* Redistribution and use in source and binary forms are permitted
* provided that the above copyright notice and this paragraph are
* duplicated in all such forms and that any documentation,
* advertising materials, and other materials related to such
* distribution and use acknowledge that the software was developed
* by the University of California, Irvine. The name of the
* University may not be used to endorse or promote products derived
* from this software without specific prior written permission.
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND WITHOUT ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, WITHOUT LIMITATION, THE IMPLIED
* WARRANTIES OF MERCHANTIBILITY AND FITNESS FOR A PARTICULAR PURPOSE.
*/
package edu.uci.isr.xarch.variants;
import java.util.*;
import edu.uci.isr.xarch.*;
import org.w3c.dom.*;
import edu.uci.isr.xarch.IXArch;
import edu.uci.isr.xarch.IXArchContext;
/**
* The context object for the variants package.
* This object is used to create objects that are used
* in the variants namespace.
*
* @author Automatically Generated by xArch apigen
*/
public class VariantsContext implements IVariantsContext {
protected static final String DEFAULT_ELT_NAME = "anonymousInstanceTag";
protected Document doc;
protected IXArch xArch;
/**
* Create a new VariantsContext for the given
* IXArch object.
* @param xArch XArch object to contextualize in this namespace.
*/
public VariantsContext(IXArch xArch){
if(!(xArch instanceof DOMBased)){
throw new IllegalArgumentException("Cannot process non-DOM based xArch entities.");
}
Node docRootNode = ((DOMBased)xArch).getDOMNode();
synchronized(DOMUtils.getDOMLock(docRootNode)){
this.doc = docRootNode.getOwnerDocument();
xArch.addSchemaLocation("http://www.ics.uci.edu/pub/arch/xArch/variants.xsd", "http://www.ics.uci.edu/pub/arch/xArch/variants.xsd");
this.xArch = xArch;
}
}
public IXArch getXArch(){
return xArch;
}
protected Element createElement(String name){
synchronized(DOMUtils.getDOMLock(doc)){
return doc.createElementNS(VariantsConstants.NS_URI, name);
}
}
public XArchTypeMetadata getTypeMetadata(){
return IVariantsContext.TYPE_METADATA;
}
/**
* Create an IVariant object in this namespace.
* @return New IVariant object.
*/
public IVariant createVariant(){
Element elt = createElement(DEFAULT_ELT_NAME);
DOMUtils.addXSIType(elt, VariantImpl.XSD_TYPE_NSURI, VariantImpl.XSD_TYPE_NAME);
VariantImpl newElt = new VariantImpl(elt);
newElt.setXArch(this.getXArch());
return newElt;
}
/**
* Brings an IVariant object created in another
* context into this context.
* @param value Object to recontextualize.
* @return <code>value</code> object in this namespace.
*/
public IVariant recontextualizeVariant(IVariant value){
if(!(value instanceof DOMBased)){
throw new IllegalArgumentException("Cannot process non-DOM based xArch entities.");
}
Element elt = (Element)((DOMBased)value).getDOMNode();
elt = DOMUtils.cloneAndRename(elt, doc, VariantsConstants.NS_URI, elt.getLocalName());
//elt = DOMUtils.cloneAndRename(elt, VariantsConstants.NS_URI, elt.getTagName());
//Removed next line because it causes an illegal character DOM exception
//elt.setPrefix(null);
((DOMBased)value).setDOMNode(elt);
((IXArchElement)value).setXArch(this.getXArch());
return value;
}
/**
* Create an edu.uci.isr.xarch.options.IGuard object in this namespace.
* @return New edu.uci.isr.xarch.options.IGuard object.
*/
public edu.uci.isr.xarch.options.IGuard createGuard(){
Element elt = createElement(DEFAULT_ELT_NAME);
DOMUtils.addXSIType(elt, edu.uci.isr.xarch.options.GuardImpl.XSD_TYPE_NSURI, edu.uci.isr.xarch.options.GuardImpl.XSD_TYPE_NAME);
edu.uci.isr.xarch.options.GuardImpl newElt = new edu.uci.isr.xarch.options.GuardImpl(elt);
newElt.setXArch(this.getXArch());
return newElt;
}
/**
* Brings an edu.uci.isr.xarch.options.IGuard object created in another
* context into this context.
* @param value Object to recontextualize.
* @return <code>value</code> object in this namespace.
*/
public edu.uci.isr.xarch.options.IGuard recontextualizeGuard(edu.uci.isr.xarch.options.IGuard value){
if(!(value instanceof DOMBased)){
throw new IllegalArgumentException("Cannot process non-DOM based xArch entities.");
}
Element elt = (Element)((DOMBased)value).getDOMNode();
elt = DOMUtils.cloneAndRename(elt, doc, VariantsConstants.NS_URI, elt.getLocalName());
//elt = DOMUtils.cloneAndRename(elt, VariantsConstants.NS_URI, elt.getTagName());
//Removed next line because it causes an illegal character DOM exception
//elt.setPrefix(null);
((DOMBased)value).setDOMNode(elt);
((IXArchElement)value).setXArch(this.getXArch());
return value;
}
/**
* Create an edu.uci.isr.xarch.instance.IXMLLink object in this namespace.
* @return New edu.uci.isr.xarch.instance.IXMLLink object.
*/
public edu.uci.isr.xarch.instance.IXMLLink createXMLLink(){
Element elt = createElement(DEFAULT_ELT_NAME);
DOMUtils.addXSIType(elt, edu.uci.isr.xarch.instance.XMLLinkImpl.XSD_TYPE_NSURI, edu.uci.isr.xarch.instance.XMLLinkImpl.XSD_TYPE_NAME);
edu.uci.isr.xarch.instance.XMLLinkImpl newElt = new edu.uci.isr.xarch.instance.XMLLinkImpl(elt);
newElt.setXArch(this.getXArch());
return newElt;
}
/**
* Brings an edu.uci.isr.xarch.instance.IXMLLink object created in another
* context into this context.
* @param value Object to recontextualize.
* @return <code>value</code> object in this namespace.
*/
public edu.uci.isr.xarch.instance.IXMLLink recontextualizeXMLLink(edu.uci.isr.xarch.instance.IXMLLink value){
if(!(value instanceof DOMBased)){
throw new IllegalArgumentException("Cannot process non-DOM based xArch entities.");
}
Element elt = (Element)((DOMBased)value).getDOMNode();
elt = DOMUtils.cloneAndRename(elt, doc, VariantsConstants.NS_URI, elt.getLocalName());
//elt = DOMUtils.cloneAndRename(elt, VariantsConstants.NS_URI, elt.getTagName());
//Removed next line because it causes an illegal character DOM exception
//elt.setPrefix(null);
((DOMBased)value).setDOMNode(elt);
((IXArchElement)value).setXArch(this.getXArch());
return value;
}
/**
* Create an IVariantComponentType object in this namespace.
* @return New IVariantComponentType object.
*/
public IVariantComponentType createVariantComponentType(){
Element elt = createElement(DEFAULT_ELT_NAME);
DOMUtils.addXSIType(elt, VariantComponentTypeImpl.XSD_TYPE_NSURI, VariantComponentTypeImpl.XSD_TYPE_NAME);
VariantComponentTypeImpl newElt = new VariantComponentTypeImpl(elt);
newElt.setXArch(this.getXArch());
return newElt;
}
/**
* Brings an IVariantComponentType object created in another
* context into this context.
* @param value Object to recontextualize.
* @return <code>value</code> object in this namespace.
*/
public IVariantComponentType recontextualizeVariantComponentType(IVariantComponentType value){
if(!(value instanceof DOMBased)){
throw new IllegalArgumentException("Cannot process non-DOM based xArch entities.");
}
Element elt = (Element)((DOMBased)value).getDOMNode();
elt = DOMUtils.cloneAndRename(elt, doc, VariantsConstants.NS_URI, elt.getLocalName());
//elt = DOMUtils.cloneAndRename(elt, VariantsConstants.NS_URI, elt.getTagName());
//Removed next line because it causes an illegal character DOM exception
//elt.setPrefix(null);
((DOMBased)value).setDOMNode(elt);
((IXArchElement)value).setXArch(this.getXArch());
return value;
}
/**
* Promote an object of type <code>edu.uci.isr.xarch.types.IComponentType</code>
* to one of type <code>IVariantComponentType</code>. xArch APIs
* are structured in such a way that a regular cast is not possible
* to change interface types, so casting must be done through these
* promotion functions. If the <code>edu.uci.isr.xarch.types.IComponentType</code>
* object wraps a DOM element that is the base type, then the
* <code>xsi:type</code> of the base element is promoted. Otherwise,
* it is left unchanged.
*
* This function also emits an <CODE>XArchEvent</CODE> with type
* PROMOTE_TYPE. The source for this events is the pre-promoted
* IXArchElement object (should no longer be used), and the
* target is the post-promotion object. The target name is
* the name of the interface class that was the target of the
* promotion.
*
* @param value Object to promote.
* @return Promoted object.
*/
public IVariantComponentType promoteToVariantComponentType(
edu.uci.isr.xarch.types.IComponentType value){
if(!(value instanceof DOMBased)){
throw new IllegalArgumentException("Cannot process non-DOM based xArch entities.");
}
Element elt = (Element)((DOMBased)value).getDOMNode();
if(DOMUtils.hasXSIType(elt,
edu.uci.isr.xarch.types.ComponentTypeImpl.XSD_TYPE_NSURI,
edu.uci.isr.xarch.types.ComponentTypeImpl.XSD_TYPE_NAME)){
DOMUtils.addXSIType(elt, VariantComponentTypeImpl.XSD_TYPE_NSURI,
VariantComponentTypeImpl.XSD_TYPE_NAME);
}
VariantComponentTypeImpl newElt = new VariantComponentTypeImpl(elt);
newElt.setXArch(this.getXArch());
xArch.fireXArchEvent(
new XArchEvent(value,
XArchEvent.PROMOTE_EVENT,
XArchEvent.ELEMENT_CHANGED,
IVariantComponentType.class.getName(), newElt,
XArchUtils.getDefaultXArchImplementation().isContainedIn(xArch, newElt))
);
return newElt;
}
/**
* Create an IVariantConnectorType object in this namespace.
* @return New IVariantConnectorType object.
*/
public IVariantConnectorType createVariantConnectorType(){
Element elt = createElement(DEFAULT_ELT_NAME);
DOMUtils.addXSIType(elt, VariantConnectorTypeImpl.XSD_TYPE_NSURI, VariantConnectorTypeImpl.XSD_TYPE_NAME);
VariantConnectorTypeImpl newElt = new VariantConnectorTypeImpl(elt);
newElt.setXArch(this.getXArch());
return newElt;
}
/**
* Brings an IVariantConnectorType object created in another
* context into this context.
* @param value Object to recontextualize.
* @return <code>value</code> object in this namespace.
*/
public IVariantConnectorType recontextualizeVariantConnectorType(IVariantConnectorType value){
if(!(value instanceof DOMBased)){
throw new IllegalArgumentException("Cannot process non-DOM based xArch entities.");
}
Element elt = (Element)((DOMBased)value).getDOMNode();
elt = DOMUtils.cloneAndRename(elt, doc, VariantsConstants.NS_URI, elt.getLocalName());
//elt = DOMUtils.cloneAndRename(elt, VariantsConstants.NS_URI, elt.getTagName());
//Removed next line because it causes an illegal character DOM exception
//elt.setPrefix(null);
((DOMBased)value).setDOMNode(elt);
((IXArchElement)value).setXArch(this.getXArch());
return value;
}
/**
* Promote an object of type <code>edu.uci.isr.xarch.types.IConnectorType</code>
* to one of type <code>IVariantConnectorType</code>. xArch APIs
* are structured in such a way that a regular cast is not possible
* to change interface types, so casting must be done through these
* promotion functions. If the <code>edu.uci.isr.xarch.types.IConnectorType</code>
* object wraps a DOM element that is the base type, then the
* <code>xsi:type</code> of the base element is promoted. Otherwise,
* it is left unchanged.
*
* This function also emits an <CODE>XArchEvent</CODE> with type
* PROMOTE_TYPE. The source for this events is the pre-promoted
* IXArchElement object (should no longer be used), and the
* target is the post-promotion object. The target name is
* the name of the interface class that was the target of the
* promotion.
*
* @param value Object to promote.
* @return Promoted object.
*/
public IVariantConnectorType promoteToVariantConnectorType(
edu.uci.isr.xarch.types.IConnectorType value){
if(!(value instanceof DOMBased)){
throw new IllegalArgumentException("Cannot process non-DOM based xArch entities.");
}
Element elt = (Element)((DOMBased)value).getDOMNode();
if(DOMUtils.hasXSIType(elt,
edu.uci.isr.xarch.types.ConnectorTypeImpl.XSD_TYPE_NSURI,
edu.uci.isr.xarch.types.ConnectorTypeImpl.XSD_TYPE_NAME)){
DOMUtils.addXSIType(elt, VariantConnectorTypeImpl.XSD_TYPE_NSURI,
VariantConnectorTypeImpl.XSD_TYPE_NAME);
}
VariantConnectorTypeImpl newElt = new VariantConnectorTypeImpl(elt);
newElt.setXArch(this.getXArch());
xArch.fireXArchEvent(
new XArchEvent(value,
XArchEvent.PROMOTE_EVENT,
XArchEvent.ELEMENT_CHANGED,
IVariantConnectorType.class.getName(), newElt,
XArchUtils.getDefaultXArchImplementation().isContainedIn(xArch, newElt))
);
return newElt;
}
}
| |
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
/**
* DescribePlacementGroupsResponse.java
*
* This file was auto-generated from WSDL
* by the Apache Axis2 version: 1.5.1 Built on : Oct 19, 2009 (10:59:34 EDT)
*/
package com.amazon.ec2;
/**
* DescribePlacementGroupsResponse bean class
*/
public class DescribePlacementGroupsResponse
implements org.apache.axis2.databinding.ADBBean{
public static final javax.xml.namespace.QName MY_QNAME = new javax.xml.namespace.QName(
"http://ec2.amazonaws.com/doc/2010-11-15/",
"DescribePlacementGroupsResponse",
"ns1");
private static java.lang.String generatePrefix(java.lang.String namespace) {
if(namespace.equals("http://ec2.amazonaws.com/doc/2010-11-15/")){
return "ns1";
}
return org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix();
}
/**
* field for DescribePlacementGroupsResponse
*/
protected com.amazon.ec2.DescribePlacementGroupsResponseType localDescribePlacementGroupsResponse ;
/**
* Auto generated getter method
* @return com.amazon.ec2.DescribePlacementGroupsResponseType
*/
public com.amazon.ec2.DescribePlacementGroupsResponseType getDescribePlacementGroupsResponse(){
return localDescribePlacementGroupsResponse;
}
/**
* Auto generated setter method
* @param param DescribePlacementGroupsResponse
*/
public void setDescribePlacementGroupsResponse(com.amazon.ec2.DescribePlacementGroupsResponseType param){
this.localDescribePlacementGroupsResponse=param;
}
/**
* isReaderMTOMAware
* @return true if the reader supports MTOM
*/
public static boolean isReaderMTOMAware(javax.xml.stream.XMLStreamReader reader) {
boolean isReaderMTOMAware = false;
try{
isReaderMTOMAware = java.lang.Boolean.TRUE.equals(reader.getProperty(org.apache.axiom.om.OMConstants.IS_DATA_HANDLERS_AWARE));
}catch(java.lang.IllegalArgumentException e){
isReaderMTOMAware = false;
}
return isReaderMTOMAware;
}
/**
*
* @param parentQName
* @param factory
* @return org.apache.axiom.om.OMElement
*/
public org.apache.axiom.om.OMElement getOMElement (
final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory) throws org.apache.axis2.databinding.ADBException{
org.apache.axiom.om.OMDataSource dataSource =
new org.apache.axis2.databinding.ADBDataSource(this,MY_QNAME){
public void serialize(org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
DescribePlacementGroupsResponse.this.serialize(MY_QNAME,factory,xmlWriter);
}
};
return new org.apache.axiom.om.impl.llom.OMSourcedElementImpl(
MY_QNAME,factory,dataSource);
}
public void serialize(final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory,
org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter)
throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{
serialize(parentQName,factory,xmlWriter,false);
}
public void serialize(final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory,
org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter,
boolean serializeType)
throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{
//We can safely assume an element has only one type associated with it
if (localDescribePlacementGroupsResponse==null){
throw new org.apache.axis2.databinding.ADBException("Property cannot be null!");
}
localDescribePlacementGroupsResponse.serialize(MY_QNAME,factory,xmlWriter);
}
/**
* Util method to write an attribute with the ns prefix
*/
private void writeAttribute(java.lang.String prefix,java.lang.String namespace,java.lang.String attName,
java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{
if (xmlWriter.getPrefix(namespace) == null) {
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
xmlWriter.writeAttribute(namespace,attName,attValue);
}
/**
* Util method to write an attribute without the ns prefix
*/
private void writeAttribute(java.lang.String namespace,java.lang.String attName,
java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{
if (namespace.equals(""))
{
xmlWriter.writeAttribute(attName,attValue);
}
else
{
registerPrefix(xmlWriter, namespace);
xmlWriter.writeAttribute(namespace,attName,attValue);
}
}
/**
* Util method to write an attribute without the ns prefix
*/
private void writeQNameAttribute(java.lang.String namespace, java.lang.String attName,
javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
java.lang.String attributeNamespace = qname.getNamespaceURI();
java.lang.String attributePrefix = xmlWriter.getPrefix(attributeNamespace);
if (attributePrefix == null) {
attributePrefix = registerPrefix(xmlWriter, attributeNamespace);
}
java.lang.String attributeValue;
if (attributePrefix.trim().length() > 0) {
attributeValue = attributePrefix + ":" + qname.getLocalPart();
} else {
attributeValue = qname.getLocalPart();
}
if (namespace.equals("")) {
xmlWriter.writeAttribute(attName, attributeValue);
} else {
registerPrefix(xmlWriter, namespace);
xmlWriter.writeAttribute(namespace, attName, attributeValue);
}
}
/**
* method to handle Qnames
*/
private void writeQName(javax.xml.namespace.QName qname,
javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
java.lang.String namespaceURI = qname.getNamespaceURI();
if (namespaceURI != null) {
java.lang.String prefix = xmlWriter.getPrefix(namespaceURI);
if (prefix == null) {
prefix = generatePrefix(namespaceURI);
xmlWriter.writeNamespace(prefix, namespaceURI);
xmlWriter.setPrefix(prefix,namespaceURI);
}
if (prefix.trim().length() > 0){
xmlWriter.writeCharacters(prefix + ":" + org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
} else {
// i.e this is the default namespace
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
}
} else {
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
}
}
private void writeQNames(javax.xml.namespace.QName[] qnames,
javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
if (qnames != null) {
// we have to store this data until last moment since it is not possible to write any
// namespace data after writing the charactor data
java.lang.StringBuffer stringToWrite = new java.lang.StringBuffer();
java.lang.String namespaceURI = null;
java.lang.String prefix = null;
for (int i = 0; i < qnames.length; i++) {
if (i > 0) {
stringToWrite.append(" ");
}
namespaceURI = qnames[i].getNamespaceURI();
if (namespaceURI != null) {
prefix = xmlWriter.getPrefix(namespaceURI);
if ((prefix == null) || (prefix.length() == 0)) {
prefix = generatePrefix(namespaceURI);
xmlWriter.writeNamespace(prefix, namespaceURI);
xmlWriter.setPrefix(prefix,namespaceURI);
}
if (prefix.trim().length() > 0){
stringToWrite.append(prefix).append(":").append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
} else {
stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
}
} else {
stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
}
}
xmlWriter.writeCharacters(stringToWrite.toString());
}
}
/**
* Register a namespace prefix
*/
private java.lang.String registerPrefix(javax.xml.stream.XMLStreamWriter xmlWriter, java.lang.String namespace) throws javax.xml.stream.XMLStreamException {
java.lang.String prefix = xmlWriter.getPrefix(namespace);
if (prefix == null) {
prefix = generatePrefix(namespace);
while (xmlWriter.getNamespaceContext().getNamespaceURI(prefix) != null) {
prefix = org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix();
}
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
return prefix;
}
/**
* databinding method to get an XML representation of this object
*
*/
public javax.xml.stream.XMLStreamReader getPullParser(javax.xml.namespace.QName qName)
throws org.apache.axis2.databinding.ADBException{
//We can safely assume an element has only one type associated with it
return localDescribePlacementGroupsResponse.getPullParser(MY_QNAME);
}
/**
* Factory class that keeps the parse method
*/
public static class Factory{
/**
* static method to create the object
* Precondition: If this object is an element, the current or next start element starts this object and any intervening reader events are ignorable
* If this object is not an element, it is a complex type and the reader is at the event just after the outer start element
* Postcondition: If this object is an element, the reader is positioned at its end element
* If this object is a complex type, the reader is positioned at the end element of its outer element
*/
public static DescribePlacementGroupsResponse parse(javax.xml.stream.XMLStreamReader reader) throws java.lang.Exception{
DescribePlacementGroupsResponse object =
new DescribePlacementGroupsResponse();
int event;
java.lang.String nillableValue = null;
java.lang.String prefix ="";
java.lang.String namespaceuri ="";
try {
while (!reader.isStartElement() && !reader.isEndElement())
reader.next();
// Note all attributes that were handled. Used to differ normal attributes
// from anyAttributes.
java.util.Vector handledAttributes = new java.util.Vector();
while(!reader.isEndElement()) {
if (reader.isStartElement() ){
if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2010-11-15/","DescribePlacementGroupsResponse").equals(reader.getName())){
object.setDescribePlacementGroupsResponse(com.amazon.ec2.DescribePlacementGroupsResponseType.Factory.parse(reader));
} // End of if for expected property start element
else{
// A start element we are not expecting indicates an invalid parameter was passed
throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName());
}
} else {
reader.next();
}
} // end of while loop
} catch (javax.xml.stream.XMLStreamException e) {
throw new java.lang.Exception(e);
}
return object;
}
}//end of factory class
}
| |
/*
* Copyright 2019 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.workbench.common.stunner.bpmn.client.forms.widgets;
import com.google.gwt.core.client.GWT;
import com.google.gwt.event.dom.client.BlurEvent;
import com.google.gwt.event.dom.client.BlurHandler;
import com.google.gwt.event.dom.client.KeyCodes;
import com.google.gwt.event.dom.client.KeyPressEvent;
import com.google.gwt.event.dom.client.KeyPressHandler;
import com.google.gwtmockito.GwtMockito;
import org.junit.Before;
import org.junit.Test;
import org.kie.workbench.common.stunner.bpmn.client.forms.util.StringUtils;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.Mock;
import static org.junit.Assert.assertEquals;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyBoolean;
import static org.mockito.ArgumentMatchers.anyChar;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.doCallRealMethod;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class CustomDataTypeTextBoxTest {
private static final String ERROR_REMOVED = "some error reg exp";
private static final String ERROR_TYPED = "some error reg exp2";
@Captor
private ArgumentCaptor<BlurHandler> blurCaptor;
@Captor
private ArgumentCaptor<KeyPressHandler> keyPressCaptor;
@Mock
private BlurEvent blurEvent;
@Mock
private KeyPressEvent keyPressEvent;
private CustomDataTypeTextBox textBox;
@Before
public void init() {
GwtMockito.initMocks(this);
textBox = GWT.create(CustomDataTypeTextBox.class);
doCallRealMethod().when(textBox).setRegExp(anyString(),
anyString(),
anyString());
doCallRealMethod().when(textBox).isValidValue(anyString(),
anyBoolean());
doCallRealMethod().when(textBox).setText(anyString());
doCallRealMethod().when(textBox).makeValidValue(anyString());
doCallRealMethod().when(textBox).getInvalidCharsInName(anyString());
doCallRealMethod().when(textBox).isValidChar(anyChar());
doCallRealMethod().when(textBox).setup();
doCallRealMethod().when(textBox).addBlurHandler(any(BlurHandler.class));
doCallRealMethod().when(textBox).addKeyPressHandler(any(KeyPressHandler.class));
textBox.setRegExp(StringUtils.ALPHA_NUM_UNDERSCORE_DOT_REGEXP,
ERROR_REMOVED,
ERROR_TYPED);
}
@Test
public void testSetup() {
when(textBox.getKeyCodeFromKeyPressEvent(any(KeyPressEvent.class))).thenReturn(64);
when(keyPressEvent.isControlKeyDown()).thenReturn(false);
when(keyPressEvent.isShiftKeyDown()).thenReturn(true);
when(keyPressEvent.getCharCode()).thenReturn('@');
when(textBox.getCursorPos()).thenReturn(4);
when(textBox.getSelectionLength()).thenReturn(0);
when(textBox.getValue()).thenReturn("ab12");
when(textBox.getText()).thenReturn("ab12@");
textBox.setup();
verify(textBox,
times(1)).addBlurHandler(blurCaptor.capture());
verify(textBox,
times(1)).addKeyPressHandler(keyPressCaptor.capture());
BlurHandler blurHandler = blurCaptor.getValue();
blurHandler.onBlur(blurEvent);
verify(textBox,
times(1)).isValidValue("ab12@",
true);
verify(textBox,
times(1)).makeValidValue("ab12@");
verify(textBox,
times(1)).setValue("ab12");
KeyPressHandler keyPressHandler = keyPressCaptor.getValue();
keyPressHandler.onKeyPress(keyPressEvent);
verify(keyPressEvent,
times(1)).preventDefault();
verify(textBox,
times(1)).isValidValue("ab12@",
false);
verify(textBox,
times(1)).fireValidationError(ERROR_REMOVED + ": @");
verify(textBox,
times(1)).fireValidationError(ERROR_TYPED + ": @");
}
@Test
public void testMakeValid() {
String makeValidResult;
makeValidResult = textBox.makeValidValue(null);
assertEquals("",
makeValidResult);
makeValidResult = textBox.makeValidValue("");
assertEquals("",
makeValidResult);
makeValidResult = textBox.makeValidValue("c");
assertEquals("c",
makeValidResult);
makeValidResult = textBox.makeValidValue("a#b$2%1");
assertEquals("ab21",
makeValidResult);
makeValidResult = textBox.makeValidValue("<a#b$2%1.3-4_5>");
assertEquals("ab21.34_5",
makeValidResult);
}
@Test
public void testIsValidValue() {
String isValidResult;
isValidResult = textBox.isValidValue("a",
true);
assertEquals(null,
isValidResult);
isValidResult = textBox.isValidValue("a",
false);
assertEquals(null,
isValidResult);
isValidResult = textBox.isValidValue("_",
true);
assertEquals(null,
isValidResult);
isValidResult = textBox.isValidValue("_",
false);
assertEquals(null,
isValidResult);
isValidResult = textBox.isValidValue("aBc",
false);
assertEquals(null,
isValidResult);
isValidResult = textBox.isValidValue("CdE",
false);
assertEquals(null,
isValidResult);
isValidResult = textBox.isValidValue("-",
true);
assertEquals(ERROR_REMOVED + ": -",
isValidResult);
isValidResult = textBox.isValidValue("a-b",
true);
assertEquals(ERROR_REMOVED + ": -",
isValidResult);
isValidResult = textBox.isValidValue("a#$%1",
false);
assertEquals(ERROR_TYPED + ": #$%",
isValidResult);
isValidResult = textBox.isValidValue("Cd.E",
false);
assertEquals(null,
isValidResult);
isValidResult = textBox.isValidValue("<a#$%1>",
false);
assertEquals(ERROR_TYPED + ": <#$%>",
isValidResult);
}
@Test
public void testKeyEnter() {
when(textBox.getKeyCodeFromKeyPressEvent(any(KeyPressEvent.class))).thenReturn(KeyCodes.KEY_ENTER);
when(keyPressEvent.isControlKeyDown()).thenReturn(false);
when(keyPressEvent.isShiftKeyDown()).thenReturn(false);
when(keyPressEvent.getCharCode()).thenReturn((char) 13);
when(textBox.getCursorPos()).thenReturn(4);
when(textBox.getSelectionLength()).thenReturn(0);
when(textBox.getValue()).thenReturn("ab12");
when(textBox.getText()).thenReturn("ab12" + (char) 13);
textBox.setup();
verify(textBox,
times(1)).addBlurHandler(blurCaptor.capture());
verify(textBox,
times(1)).addKeyPressHandler(keyPressCaptor.capture());
BlurHandler blurHandler = blurCaptor.getValue();
blurHandler.onBlur(blurEvent);
verify(textBox,
times(1)).isValidValue("ab12" + (char) 13,
true);
verify(textBox,
times(1)).makeValidValue("ab12" + (char) 13);
verify(textBox,
times(1)).setValue("ab12");
KeyPressHandler keyPressHandler = keyPressCaptor.getValue();
keyPressHandler.onKeyPress(keyPressEvent);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.builder.component.dsl;
import javax.annotation.Generated;
import org.apache.camel.Component;
import org.apache.camel.builder.component.AbstractComponentBuilder;
import org.apache.camel.builder.component.ComponentBuilder;
import org.apache.camel.component.rest.swagger.RestSwaggerComponent;
/**
* Configure REST producers based on a Swagger (OpenAPI) specification document
* delegating to a component implementing the RestProducerFactory interface.
*
* Generated by camel-package-maven-plugin - do not edit this file!
*/
@Generated("org.apache.camel.maven.packaging.ComponentDslMojo")
public interface RestSwaggerComponentBuilderFactory {
/**
* REST Swagger (camel-rest-swagger)
* Configure REST producers based on a Swagger (OpenAPI) specification
* document delegating to a component implementing the RestProducerFactory
* interface.
*
* Category: rest,swagger,http
* Since: 2.19
* Maven coordinates: org.apache.camel:camel-rest-swagger
*/
static RestSwaggerComponentBuilder restSwagger() {
return new RestSwaggerComponentBuilderImpl();
}
/**
* Builder for the REST Swagger component.
*/
interface RestSwaggerComponentBuilder
extends
ComponentBuilder<RestSwaggerComponent> {
/**
* API basePath, for example /v2. Default is unset, if set overrides the
* value present in Swagger specification.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*/
default RestSwaggerComponentBuilder basePath(java.lang.String basePath) {
doSetProperty("basePath", basePath);
return this;
}
/**
* Name of the Camel component that will perform the requests. The
* component must be present in Camel registry and it must implement
* RestProducerFactory service provider interface. If not set CLASSPATH
* is searched for single component that implements RestProducerFactory
* SPI. Can be overridden in endpoint configuration.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*/
default RestSwaggerComponentBuilder componentName(
java.lang.String componentName) {
doSetProperty("componentName", componentName);
return this;
}
/**
* What payload type this component capable of consuming. Could be one
* type, like application/json or multiple types as application/json,
* application/xml; q=0.5 according to the RFC7231. This equates to the
* value of Accept HTTP header. If set overrides any value found in the
* Swagger specification. Can be overridden in endpoint configuration.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*/
default RestSwaggerComponentBuilder consumes(java.lang.String consumes) {
doSetProperty("consumes", consumes);
return this;
}
/**
* Scheme hostname and port to direct the HTTP requests to in the form
* of https://hostname:port. Can be configured at the endpoint,
* component or in the corresponding REST configuration in the Camel
* Context. If you give this component a name (e.g. petstore) that REST
* configuration is consulted first, rest-swagger next, and global
* configuration last. If set overrides any value found in the Swagger
* specification, RestConfiguration. Can be overridden in endpoint
* configuration.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*/
default RestSwaggerComponentBuilder host(java.lang.String host) {
doSetProperty("host", host);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*/
default RestSwaggerComponentBuilder lazyStartProducer(
boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* What payload type this component is producing. For example
* application/json according to the RFC7231. This equates to the value
* of Content-Type HTTP header. If set overrides any value present in
* the Swagger specification. Can be overridden in endpoint
* configuration.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*/
default RestSwaggerComponentBuilder produces(java.lang.String produces) {
doSetProperty("produces", produces);
return this;
}
/**
* Path to the Swagger specification file. The scheme, host base path
* are taken from this specification, but these can be overridden with
* properties on the component or endpoint level. If not given the
* component tries to load swagger.json resource. Note that the host
* defined on the component and endpoint of this Component should
* contain the scheme, hostname and optionally the port in the URI
* syntax (i.e. https://api.example.com:8080). Can be overridden in
* endpoint configuration.
*
* The option is a: <code>java.net.URI</code> type.
*
* Default: swagger.json
* Group: producer
*/
default RestSwaggerComponentBuilder specificationUri(
java.net.URI specificationUri) {
doSetProperty("specificationUri", specificationUri);
return this;
}
/**
* Whether the component should use basic property binding (Camel 2.x)
* or the newer property binding with additional capabilities.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
@Deprecated
default RestSwaggerComponentBuilder basicPropertyBinding(
boolean basicPropertyBinding) {
doSetProperty("basicPropertyBinding", basicPropertyBinding);
return this;
}
/**
* Customize TLS parameters used by the component. If not set defaults
* to the TLS parameters set in the Camel context.
*
* The option is a:
* <code>org.apache.camel.support.jsse.SSLContextParameters</code> type.
*
* Group: security
*/
default RestSwaggerComponentBuilder sslContextParameters(
org.apache.camel.support.jsse.SSLContextParameters sslContextParameters) {
doSetProperty("sslContextParameters", sslContextParameters);
return this;
}
/**
* Enable usage of global SSL context parameters.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: security
*/
default RestSwaggerComponentBuilder useGlobalSslContextParameters(
boolean useGlobalSslContextParameters) {
doSetProperty("useGlobalSslContextParameters", useGlobalSslContextParameters);
return this;
}
}
class RestSwaggerComponentBuilderImpl
extends
AbstractComponentBuilder<RestSwaggerComponent>
implements
RestSwaggerComponentBuilder {
@Override
protected RestSwaggerComponent buildConcreteComponent() {
return new RestSwaggerComponent();
}
@Override
protected boolean setPropertyOnComponent(
Component component,
String name,
Object value) {
switch (name) {
case "basePath": ((RestSwaggerComponent) component).setBasePath((java.lang.String) value); return true;
case "componentName": ((RestSwaggerComponent) component).setComponentName((java.lang.String) value); return true;
case "consumes": ((RestSwaggerComponent) component).setConsumes((java.lang.String) value); return true;
case "host": ((RestSwaggerComponent) component).setHost((java.lang.String) value); return true;
case "lazyStartProducer": ((RestSwaggerComponent) component).setLazyStartProducer((boolean) value); return true;
case "produces": ((RestSwaggerComponent) component).setProduces((java.lang.String) value); return true;
case "specificationUri": ((RestSwaggerComponent) component).setSpecificationUri((java.net.URI) value); return true;
case "basicPropertyBinding": ((RestSwaggerComponent) component).setBasicPropertyBinding((boolean) value); return true;
case "sslContextParameters": ((RestSwaggerComponent) component).setSslContextParameters((org.apache.camel.support.jsse.SSLContextParameters) value); return true;
case "useGlobalSslContextParameters": ((RestSwaggerComponent) component).setUseGlobalSslContextParameters((boolean) value); return true;
default: return false;
}
}
}
}
| |
// Copyright (c) 2017, Baidu.com, Inc. All Rights Reserved
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.baidu.palo.persist;
import com.baidu.palo.alter.DecommissionBackendJob;
import com.baidu.palo.alter.RollupJob;
import com.baidu.palo.alter.SchemaChangeJob;
import com.baidu.palo.analysis.UserIdentity;
import com.baidu.palo.backup.BackupJob;
import com.baidu.palo.backup.BackupJob_D;
import com.baidu.palo.backup.Repository;
import com.baidu.palo.backup.RestoreJob;
import com.baidu.palo.backup.RestoreJob_D;
import com.baidu.palo.catalog.BrokerMgr;
import com.baidu.palo.catalog.Catalog;
import com.baidu.palo.catalog.Database;
import com.baidu.palo.cluster.BaseParam;
import com.baidu.palo.cluster.Cluster;
import com.baidu.palo.common.Config;
import com.baidu.palo.common.FeConstants;
import com.baidu.palo.common.io.Text;
import com.baidu.palo.common.io.Writable;
import com.baidu.palo.ha.MasterInfo;
import com.baidu.palo.journal.Journal;
import com.baidu.palo.journal.JournalCursor;
import com.baidu.palo.journal.JournalEntity;
import com.baidu.palo.journal.bdbje.BDBJEJournal;
import com.baidu.palo.journal.bdbje.Timestamp;
import com.baidu.palo.journal.local.LocalJournal;
import com.baidu.palo.load.AsyncDeleteJob;
import com.baidu.palo.load.DeleteInfo;
import com.baidu.palo.load.ExportJob;
import com.baidu.palo.load.ExportMgr;
import com.baidu.palo.load.Load;
import com.baidu.palo.load.LoadErrorHub;
import com.baidu.palo.load.LoadJob;
import com.baidu.palo.metric.MetricRepo;
import com.baidu.palo.mysql.privilege.UserProperty;
import com.baidu.palo.qe.SessionVariable;
import com.baidu.palo.system.Backend;
import com.baidu.palo.system.Frontend;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.io.File;
import java.io.IOException;
import java.util.List;
/**
* EditLog maintains a log of the memory modifications.
* Current we support only file editLog.
*/
public class EditLog {
public static final Logger LOG = LogManager.getLogger(EditLog.class);
private EditLogOutputStream editStream = null;
private long txId = 0;
private long numTransactions;
private long totalTimeTransactions;
private Journal journal;
public EditLog(String nodeName) {
String journalType = Config.edit_log_type;
if (journalType.equalsIgnoreCase("bdb")) {
journal = new BDBJEJournal(nodeName);
} else if (journalType.equalsIgnoreCase("local")) {
journal = new LocalJournal(Catalog.IMAGE_DIR);
Catalog.getInstance().setIsMaster(true);
}
}
public long getMaxJournalId() {
return journal.getMaxJournalId();
}
public long getMinJournalId() {
return journal.getMinJournalId();
}
public JournalCursor read(long fromId, long toId) {
return journal.read(fromId, toId);
}
public long getFinalizedJournalId() {
return journal.getFinalizedJournalId();
}
public void deleteJournals(long deleteToJournalId) {
journal.deleteJournals(deleteToJournalId);
}
public List<Long> getDatabaseNames() {
return journal.getDatabaseNames();
}
public synchronized int getNumEditStreams() {
return journal == null ? 0 : 1;
}
public static void loadJournal(Catalog catalog, JournalEntity journal) {
short opCode = journal.getOpCode();
if (opCode != OperationType.OP_SAVE_NEXTID && opCode != OperationType.OP_TIMESTAMP) {
LOG.debug("replay journal op code: {}", opCode);
}
try {
switch (opCode) {
case OperationType.OP_SAVE_NEXTID: {
String idString = ((Text) journal.getData()).toString();
long id = Long.parseLong(idString);
catalog.setNextId(id + 1);
break;
}
case OperationType.OP_CREATE_DB: {
Database db = (Database) journal.getData();
catalog.replayCreateDb(db);
break;
}
case OperationType.OP_DROP_DB: {
String dbName = ((Text) journal.getData()).toString();
catalog.replayDropDb(dbName);
break;
}
case OperationType.OP_ALTER_DB: {
DatabaseInfo dbInfo = (DatabaseInfo) journal.getData();
String dbName = dbInfo.getDbName();
LOG.info("Begin to unprotect alter db info {}", dbName);
catalog.replayAlterDatabaseQuota(dbName, dbInfo.getQuota());
break;
}
case OperationType.OP_ERASE_DB: {
Text dbId = (Text) journal.getData();
catalog.replayEraseDatabase(Long.parseLong(dbId.toString()));
break;
}
case OperationType.OP_RECOVER_DB: {
RecoverInfo info = (RecoverInfo) journal.getData();
catalog.replayRecoverDatabase(info);
break;
}
case OperationType.OP_RENAME_DB: {
DatabaseInfo dbInfo = (DatabaseInfo) journal.getData();
String dbName = dbInfo.getDbName();
LOG.info("Begin to unprotect rename db {}", dbName);
catalog.replayRenameDatabase(dbName, dbInfo.getNewDbName());
break;
}
case OperationType.OP_CREATE_TABLE: {
CreateTableInfo info = (CreateTableInfo) journal.getData();
LOG.info("Begin to unprotect create table. db = "
+ info.getDbName() + " table = " + info.getTable().getId());
catalog.replayCreateTable(info.getDbName(), info.getTable());
break;
}
case OperationType.OP_DROP_TABLE: {
DropInfo info = (DropInfo) journal.getData();
Database db = catalog.getDb(info.getDbId());
if (db == null) {
LOG.warn("failed to get db[{}]", info.getDbId());
break;
}
LOG.info("Begin to unprotect drop table. db = "
+ db.getFullName() + " table = " + info.getTableId());
catalog.replayDropTable(db, info.getTableId());
break;
}
case OperationType.OP_ADD_PARTITION: {
PartitionPersistInfo info = (PartitionPersistInfo) journal.getData();
LOG.info("Begin to unprotect add partition. db = " + info.getDbId()
+ " table = " + info.getTableId()
+ " partitionName = " + info.getPartition().getName());
catalog.replayAddPartition(info);
break;
}
case OperationType.OP_DROP_PARTITION: {
DropPartitionInfo info = (DropPartitionInfo) journal.getData();
LOG.info("Begin to unprotect drop partition. db = " + info.getDbId()
+ " table = " + info.getTableId()
+ " partitionName = " + info.getPartitionName());
catalog.replayDropPartition(info);
break;
}
case OperationType.OP_MODIFY_PARTITION: {
ModifyPartitionInfo info = (ModifyPartitionInfo) journal.getData();
LOG.info("Begin to unprotect modify partition. db = " + info.getDbId()
+ " table = " + info.getTableId() + " partitionId = " + info.getPartitionId());
catalog.replayModifyPartition(info);
break;
}
case OperationType.OP_ERASE_TABLE: {
Text tableId = (Text) journal.getData();
catalog.replayEraseTable(Long.parseLong(tableId.toString()));
break;
}
case OperationType.OP_ERASE_PARTITION: {
Text partitionId = (Text) journal.getData();
catalog.replayErasePartition(Long.parseLong(partitionId.toString()));
break;
}
case OperationType.OP_RECOVER_TABLE: {
RecoverInfo info = (RecoverInfo) journal.getData();
catalog.replayRecoverTable(info);
break;
}
case OperationType.OP_RECOVER_PARTITION: {
RecoverInfo info = (RecoverInfo) journal.getData();
catalog.replayRecoverPartition(info);
break;
}
case OperationType.OP_RENAME_TABLE: {
TableInfo info = (TableInfo) journal.getData();
catalog.replayRenameTable(info);
break;
}
case OperationType.OP_RENAME_PARTITION: {
TableInfo info = (TableInfo) journal.getData();
catalog.replayRenamePartition(info);
break;
}
case OperationType.OP_BACKUP_START:
case OperationType.OP_BACKUP_FINISH_SNAPSHOT:
case OperationType.OP_BACKUP_FINISH: {
BackupJob_D job = (BackupJob_D) journal.getData();
break;
}
case OperationType.OP_RESTORE_START:
case OperationType.OP_RESTORE_FINISH: {
RestoreJob_D job = (RestoreJob_D) journal.getData();
break;
}
case OperationType.OP_BACKUP_JOB: {
BackupJob job = (BackupJob) journal.getData();
catalog.getBackupHandler().replayAddJob(job);
break;
}
case OperationType.OP_RESTORE_JOB: {
RestoreJob job = (RestoreJob) journal.getData();
job.setCatalog(catalog);
catalog.getBackupHandler().replayAddJob(job);
break;
}
case OperationType.OP_START_ROLLUP: {
RollupJob job = (RollupJob) journal.getData();
catalog.getRollupHandler().replayInitJob(job, catalog);
break;
}
case OperationType.OP_FINISH_ROLLUP: {
RollupJob job = (RollupJob) journal.getData();
catalog.getRollupHandler().replayFinish(job, catalog);
break;
}
case OperationType.OP_CANCEL_ROLLUP: {
RollupJob job = (RollupJob) journal.getData();
catalog.getRollupHandler().replayCancel(job, catalog);
break;
}
case OperationType.OP_DROP_ROLLUP: {
DropInfo info = (DropInfo) journal.getData();
catalog.getRollupHandler().replayDropRollup(info, catalog);
break;
}
case OperationType.OP_START_SCHEMA_CHANGE: {
SchemaChangeJob job = (SchemaChangeJob) journal.getData();
LOG.info("Begin to unprotect create schema change job. db = " + job.getDbId()
+ " table = " + job.getTableId());
catalog.getSchemaChangeHandler().replayInitJob(job, catalog);
break;
}
case OperationType.OP_FINISH_SCHEMA_CHANGE: {
SchemaChangeJob job = (SchemaChangeJob) journal.getData();
catalog.getSchemaChangeHandler().replayFinish(job, catalog);
break;
}
case OperationType.OP_CANCEL_SCHEMA_CHANGE: {
SchemaChangeJob job = (SchemaChangeJob) journal.getData();
LOG.debug("Begin to unprotect cancel schema change. db = " + job.getDbId()
+ " table = " + job.getTableId());
catalog.getSchemaChangeHandler().replayCancel(job, catalog);
break;
}
case OperationType.OP_FINISH_CONSISTENCY_CHECK: {
ConsistencyCheckInfo info = (ConsistencyCheckInfo) journal.getData();
catalog.getConsistencyChecker().replayFinishConsistencyCheck(info, catalog);
break;
}
case OperationType.OP_CLEAR_ROLLUP_INFO: {
ReplicaPersistInfo info = (ReplicaPersistInfo) journal.getData();
catalog.getLoadInstance().replayClearRollupInfo(info, catalog);
break;
}
case OperationType.OP_RENAME_ROLLUP: {
TableInfo info = (TableInfo) journal.getData();
catalog.replayRenameRollup(info);
break;
}
case OperationType.OP_LOAD_START: {
LoadJob job = (LoadJob) journal.getData();
catalog.getLoadInstance().replayAddLoadJob(job);
break;
}
case OperationType.OP_LOAD_ETL: {
LoadJob job = (LoadJob) journal.getData();
catalog.getLoadInstance().replayEtlLoadJob(job);
break;
}
case OperationType.OP_LOAD_LOADING: {
LoadJob job = (LoadJob) journal.getData();
catalog.getLoadInstance().replayLoadingLoadJob(job);
break;
}
case OperationType.OP_LOAD_QUORUM: {
LoadJob job = (LoadJob) journal.getData();
Load load = catalog.getLoadInstance();
load.replayQuorumLoadJob(job, catalog);
break;
}
case OperationType.OP_LOAD_DONE: {
LoadJob job = (LoadJob) journal.getData();
Load load = catalog.getLoadInstance();
load.replayFinishLoadJob(job, catalog);
break;
}
case OperationType.OP_LOAD_CANCEL: {
LoadJob job = (LoadJob) journal.getData();
Load load = catalog.getLoadInstance();
load.replayCancelLoadJob(job);
break;
}
case OperationType.OP_EXPORT_CREATE: {
ExportJob job = (ExportJob) journal.getData();
ExportMgr exportMgr = catalog.getExportMgr();
exportMgr.replayCreateExportJob(job);
break;
}
case OperationType.OP_EXPORT_UPDATE_STATE:
ExportJob.StateTransfer op = (ExportJob.StateTransfer) journal.getData();
ExportMgr exportMgr = catalog.getExportMgr();
exportMgr.replayUpdateJobState(op.getJobId(), op.getState());
break;
case OperationType.OP_FINISH_SYNC_DELETE: {
DeleteInfo info = (DeleteInfo) journal.getData();
Load load = catalog.getLoadInstance();
load.replayDelete(info, catalog);
break;
}
case OperationType.OP_FINISH_ASYNC_DELETE: {
AsyncDeleteJob deleteJob = (AsyncDeleteJob) journal.getData();
Load load = catalog.getLoadInstance();
load.replayFinishAsyncDeleteJob(deleteJob, catalog);
break;
}
case OperationType.OP_ADD_REPLICA: {
ReplicaPersistInfo info = (ReplicaPersistInfo) journal.getData();
catalog.replayAddReplica(info);
break;
}
case OperationType.OP_DELETE_REPLICA: {
ReplicaPersistInfo info = (ReplicaPersistInfo) journal.getData();
catalog.replayDeleteReplica(info);
break;
}
case OperationType.OP_ADD_BACKEND: {
Backend be = (Backend) journal.getData();
Catalog.getCurrentSystemInfo().replayAddBackend(be);
break;
}
case OperationType.OP_DROP_BACKEND: {
Backend be = (Backend) journal.getData();
Catalog.getCurrentSystemInfo().replayDropBackend(be);
break;
}
case OperationType.OP_BACKEND_STATE_CHANGE: {
Backend be = (Backend) journal.getData();
Catalog.getCurrentSystemInfo().updateBackendState(be);
break;
}
case OperationType.OP_START_DECOMMISSION_BACKEND: {
DecommissionBackendJob job = (DecommissionBackendJob) journal.getData();
LOG.debug("{}: {}", opCode, job.getTableId());
catalog.getClusterHandler().replayInitJob(job, catalog);
break;
}
case OperationType.OP_FINISH_DECOMMISSION_BACKEND: {
DecommissionBackendJob job = (DecommissionBackendJob) journal.getData();
LOG.debug("{}: {}", opCode, job.getTableId());
catalog.getClusterHandler().replayFinish(job, catalog);
break;
}
case OperationType.OP_ADD_FIRST_FRONTEND:
case OperationType.OP_ADD_FRONTEND: {
Frontend fe = (Frontend) journal.getData();
catalog.addFrontendWithCheck(fe);
break;
}
case OperationType.OP_REMOVE_FRONTEND: {
Frontend fe = (Frontend) journal.getData();
catalog.replayDropFrontend(fe);
if (fe.getNodeName().equals(Catalog.getCurrentCatalog().getNodeName())) {
System.out.println("current fe " + fe + " is removed. will exit");
LOG.info("current fe " + fe + " is removed. will exit");
System.exit(-1);
}
break;
}
case OperationType.OP_ALTER_ACCESS_RESOURCE: {
UserProperty userProperty = (UserProperty) journal.getData();
catalog.getAuth().replayAlterAccess(userProperty);
break;
}
case OperationType.OP_DROP_USER: {
String userName = ((Text) journal.getData()).toString();
catalog.getAuth().replayOldDropUser(userName);
break;
}
case OperationType.OP_CREATE_USER: {
PrivInfo privInfo = (PrivInfo) journal.getData();
catalog.getAuth().replayCreateUser(privInfo);
break;
}
case OperationType.OP_NEW_DROP_USER: {
UserIdentity userIdent = (UserIdentity) journal.getData();
catalog.getAuth().replayDropUser(userIdent);
break;
}
case OperationType.OP_GRANT_PRIV: {
PrivInfo privInfo = (PrivInfo) journal.getData();
catalog.getAuth().replayGrant(privInfo);
break;
}
case OperationType.OP_REVOKE_PRIV: {
PrivInfo privInfo = (PrivInfo) journal.getData();
catalog.getAuth().replayRevoke(privInfo);
break;
}
case OperationType.OP_SET_PASSWORD: {
PrivInfo privInfo = (PrivInfo) journal.getData();
catalog.getAuth().replaySetPassword(privInfo);
break;
}
case OperationType.OP_CREATE_ROLE: {
PrivInfo privInfo = (PrivInfo) journal.getData();
catalog.getAuth().replayCreateRole(privInfo);
break;
}
case OperationType.OP_DROP_ROLE: {
PrivInfo privInfo = (PrivInfo) journal.getData();
catalog.getAuth().replayDropRole(privInfo);
break;
}
case OperationType.OP_TIMESTAMP: {
Timestamp stamp = (Timestamp) journal.getData();
catalog.setSynchronizedTime(stamp.getTimestamp());
break;
}
case OperationType.OP_MASTER_INFO_CHANGE: {
MasterInfo info = (MasterInfo) journal.getData();
catalog.setMaster(info);
break;
}
case OperationType.OP_META_VERSION: {
String versionString = ((Text) journal.getData()).toString();
int version = Integer.parseInt(versionString);
if (catalog.getJournalVersion() > FeConstants.meta_version) {
LOG.error("meta data version is out of date, image: {}. meta: {}."
+ "please update FeConstants.meta_version and restart.",
catalog.getJournalVersion(), FeConstants.meta_version);
System.exit(-1);
}
catalog.setJournalVersion(version);
break;
}
case OperationType.OP_GLOBAL_VARIABLE: {
SessionVariable variable = (SessionVariable) journal.getData();
catalog.replayGlobalVariable(variable);
break;
}
case OperationType.OP_CREATE_CLUSTER: {
final Cluster value = (Cluster) journal.getData();
catalog.replayCreateCluster(value);
break;
}
case OperationType.OP_DROP_CLUSTER: {
final ClusterInfo value = (ClusterInfo) journal.getData();
catalog.replayDropCluster(value);
break;
}
case OperationType.OP_EXPAND_CLUSTER: {
final ClusterInfo info = (ClusterInfo) journal.getData();
catalog.replayExpandCluster(info);
break;
}
case OperationType.OP_LINK_CLUSTER: {
final BaseParam param = (BaseParam) journal.getData();
catalog.replayLinkDb(param);
break;
}
case OperationType.OP_MIGRATE_CLUSTER: {
final BaseParam param = (BaseParam) journal.getData();
catalog.replayMigrateDb(param);
break;
}
case OperationType.OP_UPDATE_DB: {
final DatabaseInfo param = (DatabaseInfo) journal.getData();
catalog.replayUpdateDb(param);
break;
}
case OperationType.OP_DROP_LINKDB: {
final DropLinkDbAndUpdateDbInfo param = (DropLinkDbAndUpdateDbInfo) journal.getData();
catalog.replayDropLinkDb(param);
break;
}
case OperationType.OP_ADD_BROKER: {
final BrokerMgr.ModifyBrokerInfo param = (BrokerMgr.ModifyBrokerInfo) journal.getData();
catalog.getBrokerMgr().replayAddBrokers(param.brokerName, param.brokerAddresses);
break;
}
case OperationType.OP_DROP_BROKER: {
final BrokerMgr.ModifyBrokerInfo param = (BrokerMgr.ModifyBrokerInfo) journal.getData();
catalog.getBrokerMgr().replayDropBrokers(param.brokerName, param.brokerAddresses);
break;
}
case OperationType.OP_DROP_ALL_BROKER: {
final String param = journal.getData().toString();
catalog.getBrokerMgr().replayDropAllBroker(param);
break;
}
case OperationType.OP_SET_LOAD_ERROR_URL: {
final LoadErrorHub.Param param = (LoadErrorHub.Param) journal.getData();
catalog.getLoadInstance().setLoadErrorHubInfo(param);
break;
}
case OperationType.OP_UPDATE_CLUSTER_AND_BACKENDS: {
final BackendIdsUpdateInfo info = (BackendIdsUpdateInfo) journal.getData();
catalog.replayUpdateClusterAndBackends(info);
break;
}
case OperationType.OP_CREATE_REPOSITORY: {
Repository repository = (Repository) journal.getData();
catalog.getBackupHandler().getRepoMgr().addAndInitRepoIfNotExist(repository, true);
break;
}
case OperationType.OP_DROP_REPOSITORY: {
String repoName = ((Text) journal.getData()).toString();
catalog.getBackupHandler().getRepoMgr().removeRepo(repoName, true);
break;
}
default: {
IOException e = new IOException();
LOG.error("UNKNOWN Operation Type {}", opCode, e);
throw e;
}
}
} catch (Exception e) {
LOG.error("Operation Type {}", opCode, e);
}
}
/**
* Shutdown the file store.
*/
public synchronized void close() throws IOException {
journal.close();
}
public synchronized void createEditLogFile(File name) throws IOException {
EditLogOutputStream editLogOutputStream = new EditLogFileOutputStream(name);
editLogOutputStream.create();
editLogOutputStream.close();
}
public void open() {
journal.open();
}
/**
* Close current journal and start a new journal
*/
public void rollEditLog() {
journal.rollJournal();
}
/**
* Write an operation to the edit log. Do not sync to persistent store yet.
*/
private synchronized void logEdit(short op, Writable writable) {
if (this.getNumEditStreams() == 0) {
LOG.error("Fatal Error : no editLog stream");
throw new Error("Fatal Error : no editLog stream");
}
long start = System.currentTimeMillis();
try {
journal.write(op, writable);
} catch (Exception e) {
LOG.error("Fatal Error : write stream Exception", e);
Runtime.getRuntime().exit(-1);
}
// get a new transactionId
txId++;
// update statistics
long end = System.currentTimeMillis();
numTransactions++;
totalTimeTransactions += (end - start);
if (LOG.isDebugEnabled()) {
LOG.debug("nextId = {}, numTransactions = {}, totalTimeTransactions = {}, op = {}",
txId, numTransactions, totalTimeTransactions, op);
}
if (txId == Config.edit_log_roll_num) {
LOG.info("txId is equal to edit_log_roll_num {}, will roll edit.", txId);
rollEditLog();
txId = 0;
}
if (MetricRepo.isInit.get()) {
MetricRepo.COUNTER_EDIT_LOG_WRITE.increase(1L);
}
}
/**
* Return the size of the current EditLog
*/
synchronized long getEditLogSize() throws IOException {
return editStream.length();
}
public synchronized long getTxId() {
return txId;
}
public void logSaveNextId(long nextId) {
logEdit(OperationType.OP_SAVE_NEXTID, new Text(Long.toString(nextId)));
}
public void logCreateDb(Database db) {
logEdit(OperationType.OP_CREATE_DB, db);
}
public void logDropDb(String dbName) {
logEdit(OperationType.OP_DROP_DB, new Text(dbName));
}
public void logEraseDb(long dbId) {
logEdit(OperationType.OP_ERASE_DB, new Text(Long.toString(dbId)));
}
public void logRecoverDb(RecoverInfo info) {
logEdit(OperationType.OP_RECOVER_DB, info);
}
public void logAlterDb(DatabaseInfo dbInfo) {
logEdit(OperationType.OP_ALTER_DB, dbInfo);
}
public void logCreateTable(CreateTableInfo info) {
logEdit(OperationType.OP_CREATE_TABLE, info);
}
public void logAddPartition(PartitionPersistInfo info) {
logEdit(OperationType.OP_ADD_PARTITION, info);
}
public void logDropPartition(DropPartitionInfo info) {
logEdit(OperationType.OP_DROP_PARTITION, info);
}
public void logErasePartition(long partitionId) {
logEdit(OperationType.OP_ERASE_PARTITION, new Text(Long.toString(partitionId)));
}
public void logRecoverPartition(RecoverInfo info) {
logEdit(OperationType.OP_RECOVER_PARTITION, info);
}
public void logModifyPartition(ModifyPartitionInfo info) {
logEdit(OperationType.OP_MODIFY_PARTITION, info);
}
public void logDropTable(DropInfo info) {
logEdit(OperationType.OP_DROP_TABLE, info);
}
public void logEraseTable(long tableId) {
logEdit(OperationType.OP_ERASE_TABLE, new Text(Long.toString(tableId)));
}
public void logRecoverTable(RecoverInfo info) {
logEdit(OperationType.OP_RECOVER_TABLE, info);
}
public void logLoadStart(LoadJob job) {
logEdit(OperationType.OP_LOAD_START, job);
}
public void logLoadEtl(LoadJob job) {
logEdit(OperationType.OP_LOAD_ETL, job);
}
public void logLoadLoading(LoadJob job) {
logEdit(OperationType.OP_LOAD_LOADING, job);
}
public void logLoadQuorum(LoadJob job) {
logEdit(OperationType.OP_LOAD_QUORUM, job);
}
public void logLoadCancel(LoadJob job) {
logEdit(OperationType.OP_LOAD_CANCEL, job);
}
public void logLoadDone(LoadJob job) {
logEdit(OperationType.OP_LOAD_DONE, job);
}
public void logStartRollup(RollupJob rollupJob) {
logEdit(OperationType.OP_START_ROLLUP, rollupJob);
}
public void logFinishRollup(RollupJob rollupJob) {
logEdit(OperationType.OP_FINISH_ROLLUP, rollupJob);
}
public void logCancelRollup(RollupJob rollupJob) {
logEdit(OperationType.OP_CANCEL_ROLLUP, rollupJob);
}
public void logClearRollupIndexInfo(ReplicaPersistInfo info) {
logEdit(OperationType.OP_CLEAR_ROLLUP_INFO, info);
}
public void logDropRollup(DropInfo info) {
logEdit(OperationType.OP_DROP_ROLLUP, info);
}
public void logStartSchemaChange(SchemaChangeJob schemaChangeJob) {
logEdit(OperationType.OP_START_SCHEMA_CHANGE, schemaChangeJob);
}
public void logFinishSchemaChange(SchemaChangeJob schemaChangeJob) {
logEdit(OperationType.OP_FINISH_SCHEMA_CHANGE, schemaChangeJob);
}
public void logCancelSchemaChange(SchemaChangeJob schemaChangeJob) {
logEdit(OperationType.OP_CANCEL_SCHEMA_CHANGE, schemaChangeJob);
}
public void logFinishConsistencyCheck(ConsistencyCheckInfo info) {
logEdit(OperationType.OP_FINISH_CONSISTENCY_CHECK, info);
}
public void logAddBackend(Backend be) {
logEdit(OperationType.OP_ADD_BACKEND, be);
}
public void logDropBackend(Backend be) {
logEdit(OperationType.OP_DROP_BACKEND, be);
}
public void logAddFrontend(Frontend fe) {
logEdit(OperationType.OP_ADD_FRONTEND, fe);
}
public void logAddFirstFrontend(Frontend fe) {
logEdit(OperationType.OP_ADD_FIRST_FRONTEND, fe);
}
public void logRemoveFrontend(Frontend fe) {
logEdit(OperationType.OP_REMOVE_FRONTEND, fe);
}
public void logFinishSyncDelete(DeleteInfo info) {
logEdit(OperationType.OP_FINISH_SYNC_DELETE, info);
}
public void logFinishAsyncDelete(AsyncDeleteJob job) {
logEdit(OperationType.OP_FINISH_ASYNC_DELETE, job);
}
public void logAddReplica(ReplicaPersistInfo info) {
logEdit(OperationType.OP_ADD_REPLICA, info);
}
public void logDeleteReplica(ReplicaPersistInfo info) {
logEdit(OperationType.OP_DELETE_REPLICA, info);
}
public void logTimestamp(Timestamp stamp) {
logEdit(OperationType.OP_TIMESTAMP, stamp);
}
public void logMasterInfo(MasterInfo info) {
logEdit(OperationType.OP_MASTER_INFO_CHANGE, info);
}
public void logMetaVersion(int version) {
logEdit(OperationType.OP_META_VERSION, new Text(Integer.toString(version)));
}
public void logBackendStateChange(Backend be) {
logEdit(OperationType.OP_BACKEND_STATE_CHANGE, be);
}
public void logAlterAccess(UserProperty userProperty) {
logEdit(OperationType.OP_ALTER_ACCESS_RESOURCE, userProperty);
}
@Deprecated
public void logDropUser(String userName) {
logEdit(OperationType.OP_DROP_USER, new Text(userName));
}
public void logCreateUser(PrivInfo info) {
logEdit(OperationType.OP_CREATE_USER, info);
}
public void logNewDropUser(UserIdentity userIdent) {
logEdit(OperationType.OP_NEW_DROP_USER, userIdent);
}
public void logGrantPriv(PrivInfo info) {
logEdit(OperationType.OP_GRANT_PRIV, info);
}
public void logRevokePriv(PrivInfo info) {
logEdit(OperationType.OP_REVOKE_PRIV, info);
}
public void logSetPassword(PrivInfo info) {
logEdit(OperationType.OP_SET_PASSWORD, info);
}
public void logCreateRole(PrivInfo info) {
logEdit(OperationType.OP_CREATE_ROLE, info);
}
public void logDropRole(PrivInfo info) {
logEdit(OperationType.OP_DROP_ROLE, info);
}
public void logStartDecommissionBackend(DecommissionBackendJob job) {
logEdit(OperationType.OP_START_DECOMMISSION_BACKEND, job);
}
public void logFinishDecommissionBackend(DecommissionBackendJob job) {
logEdit(OperationType.OP_FINISH_DECOMMISSION_BACKEND, job);
}
public void logDatabaseRename(DatabaseInfo databaseInfo) {
logEdit(OperationType.OP_RENAME_DB, databaseInfo);
}
public void logUpdateDatabase(DatabaseInfo databaseInfo) {
logEdit(OperationType.OP_UPDATE_DB, databaseInfo);
}
public void logTableRename(TableInfo tableInfo) {
logEdit(OperationType.OP_RENAME_TABLE, tableInfo);
}
public void logRollupRename(TableInfo tableInfo) {
logEdit(OperationType.OP_RENAME_ROLLUP, tableInfo);
}
public void logPartitionRename(TableInfo tableInfo) {
logEdit(OperationType.OP_RENAME_PARTITION, tableInfo);
}
public void logBackupStart(BackupJob_D backupJob) {
logEdit(OperationType.OP_BACKUP_START, backupJob);
}
public void logBackupFinishSnapshot(BackupJob_D backupJob) {
logEdit(OperationType.OP_BACKUP_FINISH_SNAPSHOT, backupJob);
}
public void logBackupFinish(BackupJob_D backupJob) {
logEdit(OperationType.OP_BACKUP_FINISH, backupJob);
}
public void logRestoreJobStart(RestoreJob_D restoreJob) {
logEdit(OperationType.OP_RESTORE_START, restoreJob);
}
public void logRestoreFinish(RestoreJob_D restoreJob) {
logEdit(OperationType.OP_RESTORE_FINISH, restoreJob);
}
public void logGlobalVariable(SessionVariable variable) {
logEdit(OperationType.OP_GLOBAL_VARIABLE, variable);
}
public void logCreateCluster(Cluster cluster) {
logEdit(OperationType.OP_CREATE_CLUSTER, cluster);
}
public void logDropCluster(ClusterInfo info) {
logEdit(OperationType.OP_DROP_CLUSTER, info);
}
public void logUpdateDbClusterName(String info) {
logEdit(OperationType.OP_UPDATE_DB, new Text(info));
}
public void logExpandCluster(ClusterInfo ci) {
logEdit(OperationType.OP_EXPAND_CLUSTER, ci);
}
public void logLinkCluster(BaseParam param) {
logEdit(OperationType.OP_LINK_CLUSTER, param);
}
public void logMigrateCluster(BaseParam param) {
logEdit(OperationType.OP_MIGRATE_CLUSTER, param);
}
public void logDropLinkDb(DropLinkDbAndUpdateDbInfo info) {
logEdit(OperationType.OP_DROP_LINKDB, info);
}
public void logAddBroker(BrokerMgr.ModifyBrokerInfo info) {
logEdit(OperationType.OP_ADD_BROKER, info);
}
public void logDropBroker(BrokerMgr.ModifyBrokerInfo info) {
logEdit(OperationType.OP_DROP_BROKER, info);
}
public void logDropAllBroker(String brokerName) {
logEdit(OperationType.OP_DROP_ALL_BROKER, new Text(brokerName));
}
public void logSetLoadErrorHub(LoadErrorHub.Param param) {
logEdit(OperationType.OP_SET_LOAD_ERROR_URL, param);
}
public void logExportCreate(ExportJob job) {
logEdit(OperationType.OP_EXPORT_CREATE, job);
}
public void logExportUpdateState(long jobId, ExportJob.JobState newState) {
ExportJob.StateTransfer transfer = new ExportJob.StateTransfer(jobId, newState);
logEdit(OperationType.OP_EXPORT_UPDATE_STATE, transfer);
}
public void logUpdateClusterAndBackendState(BackendIdsUpdateInfo info) {
logEdit(OperationType.OP_UPDATE_CLUSTER_AND_BACKENDS, info);
}
public void logBackupJob(BackupJob job) {
logEdit(OperationType.OP_BACKUP_JOB, job);
}
public void logCreateRepository(Repository repo) {
logEdit(OperationType.OP_CREATE_REPOSITORY, repo);
}
public void logDropRepository(String repoName) {
logEdit(OperationType.OP_DROP_REPOSITORY, new Text(repoName));
}
public void logRestoreJob(RestoreJob job) {
logEdit(OperationType.OP_RESTORE_JOB, job);
}
}
| |
// Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.explore_sites;
import android.app.Activity;
import android.content.Context;
import android.os.Parcel;
import android.os.Parcelable;
import android.text.TextUtils;
import android.text.format.DateUtils;
import android.util.Base64;
import android.view.ViewGroup;
import androidx.annotation.IntDef;
import androidx.annotation.Nullable;
import androidx.annotation.VisibleForTesting;
import androidx.recyclerview.widget.RecyclerView;
import org.chromium.base.ApiCompatibilityUtils;
import org.chromium.base.metrics.RecordHistogram;
import org.chromium.base.metrics.RecordUserAction;
import org.chromium.chrome.R;
import org.chromium.chrome.browser.native_page.ContextMenuManager;
import org.chromium.chrome.browser.native_page.NativePageNavigationDelegate;
import org.chromium.chrome.browser.native_page.NativePageNavigationDelegateImpl;
import org.chromium.chrome.browser.profiles.Profile;
import org.chromium.chrome.browser.tab.EmptyTabObserver;
import org.chromium.chrome.browser.tab.Tab;
import org.chromium.chrome.browser.tab.TabObserver;
import org.chromium.chrome.browser.tabmodel.TabModelSelector;
import org.chromium.chrome.browser.ui.native_page.BasicNativePage;
import org.chromium.chrome.browser.ui.native_page.NativePageHost;
import org.chromium.components.browser_ui.widget.RoundedIconGenerator;
import org.chromium.components.embedder_support.util.UrlConstants;
import org.chromium.content_public.browser.NavigationController;
import org.chromium.content_public.browser.NavigationEntry;
import org.chromium.ui.modelutil.ListModel;
import org.chromium.ui.modelutil.PropertyModel;
import org.chromium.ui.modelutil.RecyclerViewAdapter;
import org.chromium.url.GURL;
import org.chromium.url.URI;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.net.URISyntaxException;
import java.util.List;
/**
* Provides functionality when the user interacts with the explore sites page.
*/
public class ExploreSitesPage extends BasicNativePage {
private static final long BACK_NAVIGATION_TIMEOUT_FOR_UMA = DateUtils.SECOND_IN_MILLIS * 30;
private static final String CONTEXT_MENU_USER_ACTION_PREFIX = "ExploreSites";
private static final int INITIAL_SCROLL_POSITION = 0;
private static final String NAVIGATION_ENTRY_PAGE_STATE_KEY = "ExploreSitesPageState";
// Constants that dictate sizes of rows and columns
private static final int MAX_COLUMNS_DENSE_TITLE_BOTTOM = 5;
private static final int MAX_COLUMNS_DENSE_TITLE_RIGHT = 3;
private static final int MAX_COLUMNS_ORIGINAL = 4;
private static final int MAX_ROWS_DENSE_TITLE_BOTTOM = 2;
private static final int MAX_ROWS_DENSE_TITLE_RIGHT = 3;
private static final int MAX_ROWS_ORIGINAL = 2;
public static final int MAX_TILE_COUNT_ALL_VARIATIONS =
Math.max(Math.max(MAX_COLUMNS_DENSE_TITLE_BOTTOM * MAX_ROWS_DENSE_TITLE_BOTTOM,
MAX_COLUMNS_DENSE_TITLE_RIGHT* MAX_ROWS_DENSE_TITLE_RIGHT),
MAX_COLUMNS_ORIGINAL* MAX_ROWS_ORIGINAL);
static final PropertyModel.WritableIntPropertyKey STATUS_KEY =
new PropertyModel.WritableIntPropertyKey();
static final PropertyModel.WritableIntPropertyKey SCROLL_TO_CATEGORY_KEY =
new PropertyModel.WritableIntPropertyKey();
static final PropertyModel
.ReadableObjectPropertyKey<ListModel<ExploreSitesCategory>> CATEGORY_LIST_KEY =
new PropertyModel.ReadableObjectPropertyKey<>();
static final PropertyModel.ReadableIntPropertyKey DENSE_VARIATION_KEY =
new PropertyModel.ReadableIntPropertyKey();
static final PropertyModel.ReadableIntPropertyKey MAX_COLUMNS_KEY =
new PropertyModel.ReadableIntPropertyKey();
static final PropertyModel.ReadableIntPropertyKey MAX_ROWS_KEY =
new PropertyModel.ReadableIntPropertyKey();
private static final int UNKNOWN_NAV_CATEGORY = -1;
@IntDef({CatalogLoadingState.LOADING, CatalogLoadingState.SUCCESS, CatalogLoadingState.ERROR,
CatalogLoadingState.LOADING_NET})
@Retention(RetentionPolicy.SOURCE)
public @interface CatalogLoadingState {
int LOADING = 1; // Loading catalog info from disk.
int SUCCESS = 2;
int ERROR = 3; // Error retrieving catalog resources from internet.
int LOADING_NET = 4; // Retrieving catalog resources from internet.
}
private NativePageHost mHost;
private Tab mTab;
private TabObserver mTabObserver;
private Profile mProfile;
private ViewGroup mView;
private RecyclerView mRecyclerView;
private StableScrollLayoutManager mLayoutManager;
private String mTitle;
private PropertyModel mModel;
private ContextMenuManager mContextMenuManager;
private int mNavigateToCategory;
private boolean mIsLoaded;
private int mInitialScrollPosition;
private boolean mScrollUserActionReported;
@DenseVariation
private int mDenseVariation;
private int mMaxColumns;
@VisibleForTesting
static class PageState implements Parcelable {
private Parcelable mStableLayoutManagerState;
private Long mLastTimestamp;
public static final Creator<PageState> CREATOR = new Creator<PageState>() {
@Override
public PageState createFromParcel(Parcel in) {
return new PageState(in);
}
@Override
public PageState[] newArray(int size) {
return new PageState[size];
}
};
PageState(Parcel parcel) {
mLastTimestamp = parcel.readLong();
mStableLayoutManagerState = parcel.readParcelable(getClass().getClassLoader());
}
PageState(Long lastTimestamp, Parcelable stableScrollLayoutManagerState) {
mLastTimestamp = lastTimestamp;
mStableLayoutManagerState = stableScrollLayoutManagerState;
}
@Override
public int describeContents() {
return 0;
}
@Override
public void writeToParcel(Parcel dest, int flags) {
dest.writeLong(mLastTimestamp);
dest.writeParcelable(mStableLayoutManagerState, flags);
}
public long getLastTimestamp() {
return mLastTimestamp;
}
public Parcelable getStableLayoutManagerState() {
return mStableLayoutManagerState;
}
}
/**
* Returns whether the given host is the ExploreSitesPage's host. Does not check the
* scheme, which is required to fully validate a URL.
* @param host The host to check
* @return Whether this host is the ExploreSitesPage host.
*/
public static boolean isExploreSitesHost(String host) {
return UrlConstants.EXPLORE_HOST.equals(host);
}
/**
* Create a new instance of the explore sites page.
*/
public ExploreSitesPage(
Activity activity, NativePageHost host, Tab tab, TabModelSelector tabModelSelector) {
super(host);
mHost = host;
mTab = tab;
mTitle = activity.getString(R.string.explore_sites_title);
mView = (ViewGroup) activity.getLayoutInflater().inflate(
R.layout.explore_sites_page_layout, null);
mProfile = Profile.fromWebContents(mTab.getWebContents());
mDenseVariation = ExploreSitesBridge.getDenseVariation();
int maxRows;
switch (mDenseVariation) {
case DenseVariation.DENSE_TITLE_BOTTOM:
maxRows = MAX_ROWS_DENSE_TITLE_BOTTOM;
mMaxColumns = MAX_COLUMNS_DENSE_TITLE_BOTTOM;
break;
case DenseVariation.DENSE_TITLE_RIGHT:
maxRows = MAX_ROWS_DENSE_TITLE_RIGHT;
mMaxColumns = MAX_COLUMNS_DENSE_TITLE_RIGHT;
break;
default:
maxRows = MAX_ROWS_ORIGINAL;
mMaxColumns = MAX_COLUMNS_ORIGINAL;
}
mModel = new PropertyModel
.Builder(STATUS_KEY, SCROLL_TO_CATEGORY_KEY, CATEGORY_LIST_KEY,
MAX_COLUMNS_KEY, MAX_ROWS_KEY, DENSE_VARIATION_KEY)
.with(CATEGORY_LIST_KEY, new ListModel<>())
.with(STATUS_KEY, CatalogLoadingState.LOADING)
.with(MAX_COLUMNS_KEY, mMaxColumns)
.with(MAX_ROWS_KEY, maxRows)
.with(DENSE_VARIATION_KEY, mDenseVariation)
.build();
Context context = mView.getContext();
mLayoutManager = new StableScrollLayoutManager(context);
// Set dimensions for iconGenerator
int iconSizePx;
int textSizeDimensionResource;
int iconRadius;
boolean isDense = ExploreSitesBridge.isDense(mDenseVariation);
if (isDense) {
iconSizePx = context.getResources().getDimensionPixelSize(
R.dimen.explore_sites_dense_icon_size);
iconRadius = context.getResources().getDimensionPixelSize(
R.dimen.explore_sites_dense_icon_corner_radius);
textSizeDimensionResource = R.dimen.explore_sites_dense_icon_text_size;
} else {
iconSizePx = context.getResources().getDimensionPixelSize(R.dimen.tile_view_icon_size);
textSizeDimensionResource = R.dimen.tile_view_icon_text_size;
iconRadius = iconSizePx / 2;
}
RoundedIconGenerator iconGenerator =
new RoundedIconGenerator(iconSizePx, iconSizePx, iconRadius,
ApiCompatibilityUtils.getColor(
context.getResources(), R.color.default_favicon_background_color),
context.getResources().getDimensionPixelSize(textSizeDimensionResource));
NativePageNavigationDelegateImpl navDelegate = new NativePageNavigationDelegateImpl(
activity, mProfile, host, tabModelSelector, mTab);
// ExploreSitePage is recreated upon reparenting. Safe to use |activity|.
Runnable closeContextMenuCallback = activity::closeContextMenu;
mContextMenuManager = createContextMenuManager(
navDelegate, closeContextMenuCallback, CONTEXT_MENU_USER_ACTION_PREFIX);
mTab.getWindowAndroid().addContextMenuCloseListener(mContextMenuManager);
CategoryCardAdapter adapterDelegate = new CategoryCardAdapter(
mModel, mLayoutManager, iconGenerator, mContextMenuManager, navDelegate, mProfile);
mRecyclerView = mView.findViewById(R.id.explore_sites_category_recycler);
CategoryCardViewHolderFactory factory = createCategoryCardViewHolderFactory();
RecyclerViewAdapter<CategoryCardViewHolderFactory.CategoryCardViewHolder, Void> adapter =
new RecyclerViewAdapter<>(adapterDelegate, factory);
mRecyclerView.setLayoutManager(mLayoutManager);
mRecyclerView.setAdapter(adapter);
mRecyclerView.addOnScrollListener(new RecyclerView.OnScrollListener() {
@Override
public void onScrolled(RecyclerView v, int x, int y) {
// y=0 on initial layout, even if the initial scroll position is requested
// that is not 0. Once user starts scrolling via touch, the onScrolled comes
// in bunches with |y| having a dY value of every small move, positive (scroll
// down) or negative (scroll up) number of dps for each move.
if (!mScrollUserActionReported && (y != 0)) {
mScrollUserActionReported = true;
RecordUserAction.record("Android.ExploreSitesPage.Scrolled");
}
}
});
mInitialScrollPosition = INITIAL_SCROLL_POSITION;
ExploreSitesBridge.getCatalog(mProfile,
ExploreSitesCatalogUpdateRequestSource.EXPLORE_SITES_PAGE, this::translateToModel);
initWithView(mView);
RecordUserAction.record("Android.ExploreSitesPage.Open");
}
protected ContextMenuManager createContextMenuManager(NativePageNavigationDelegate navDelegate,
Runnable closeContextMenuCallback, String contextMenuUserActionPrefix) {
return new ContextMenuManager(navDelegate,
(enabled) -> {}, closeContextMenuCallback, contextMenuUserActionPrefix);
}
private void translateToModel(@Nullable List<ExploreSitesCategory> categoryList) {
// If list is null or we received an empty catalog from network, show error.
if (!ExploreSitesBridge.isValidCatalog(categoryList)) {
mModel.set(STATUS_KEY, CatalogLoadingState.ERROR);
mIsLoaded = true;
return;
}
mModel.set(STATUS_KEY, CatalogLoadingState.SUCCESS);
ListModel<ExploreSitesCategory> categoryListModel = mModel.get(CATEGORY_LIST_KEY);
// Filter empty categories and categories with fewer sites originally than would fill a row.
for (ExploreSitesCategory category : categoryList) {
if ((category.getNumDisplayed() > 0) && (category.getMaxRows(mMaxColumns) > 0)) {
categoryListModel.add(category);
}
}
restorePageState();
if (mTab != null) {
// We want to observe page load start so that we can store the recycler view layout
// state, for making "back" work correctly.
mTabObserver = new EmptyTabObserver() {
@Override
public void onPageLoadStarted(Tab tab, GURL url) {
if (UrlConstants.CHROME_NATIVE_SCHEME.equals(url.getScheme())
&& UrlConstants.EXPLORE_HOST.equals(url.getHost())) {
return;
}
savePageState();
}
};
mTab.addObserver(mTabObserver);
}
mIsLoaded = true;
}
/**
* Restore the state of the page from a Base64 Encoded String located in this page's nav entry.
*/
private void restorePageState() {
// Get Parcelable from the nav entry.
PageState pageState = getPageStateFromNavigationEntry();
if (pageState == null) {
setInitialScrollPosition();
} else {
// Extract the previous timestamp.
long previousTimestamp = pageState.getLastTimestamp();
recordPageRevisitDelta(previousTimestamp);
// Restore the scroll position.
mLayoutManager.onRestoreInstanceState(pageState.getStableLayoutManagerState());
}
}
/*
* Retrieves the state of the page from the navigation entry and reconstitutes it into a
* PageState using PageState.CREATOR.
*/
private PageState getPageStateFromNavigationEntry() {
if (mTab.getWebContents() == null) return null;
NavigationController controller = mTab.getWebContents().getNavigationController();
int index = controller.getLastCommittedEntryIndex();
String pageStateFromNav =
controller.getEntryExtraData(index, NAVIGATION_ENTRY_PAGE_STATE_KEY);
if (TextUtils.isEmpty(pageStateFromNav)) return null;
byte[] parcelData = Base64.decode(pageStateFromNav, 0);
Parcel parcel = Parcel.obtain();
parcel.unmarshall(parcelData, 0, parcelData.length);
parcel.setDataPosition(0);
PageState pageState = PageState.CREATOR.createFromParcel(parcel);
parcel.recycle();
return pageState;
}
private void setInitialScrollPosition() {
int scrollPosition = mInitialScrollPosition;
if (mNavigateToCategory != UNKNOWN_NAV_CATEGORY) {
scrollPosition = lookupCategory();
}
if (scrollPosition == RecyclerView.NO_POSITION) {
// Default to first position.
scrollPosition = 0;
}
mModel.set(SCROLL_TO_CATEGORY_KEY, scrollPosition);
}
/** Records the time delta between page visits if it is under 30 seconds for UMA usage. */
private static void recordPageRevisitDelta(long previousTimestamp) {
long currentTimestamp = System.currentTimeMillis();
long delta = currentTimestamp - previousTimestamp;
if (delta < BACK_NAVIGATION_TIMEOUT_FOR_UMA) {
RecordHistogram.recordCustomTimesHistogram(
"ExploreSites.NavBackTime", delta, 1, BACK_NAVIGATION_TIMEOUT_FOR_UMA, 50);
}
}
/** Save the state of the page to a parcelable located in this page's nav entry. */
private void savePageState() {
if (mTab == null || mTab.getWebContents() == null) return;
NavigationController controller = mTab.getWebContents().getNavigationController();
int index = controller.getLastCommittedEntryIndex();
NavigationEntry entry = controller.getEntryAtIndex(index);
if (entry == null) return;
PageState pageState = createPageState();
Parcel parcel = Parcel.obtain();
pageState.writeToParcel(parcel, 0);
String marshalledState = Base64.encodeToString(parcel.marshall(), 0);
parcel.recycle();
controller.setEntryExtraData(index, NAVIGATION_ENTRY_PAGE_STATE_KEY, marshalledState);
}
private PageState createPageState() {
Parcelable layoutManagerState = mLayoutManager.onSaveInstanceState();
long timestamp = System.currentTimeMillis();
return new PageState(timestamp, layoutManagerState);
}
boolean isLoadedForTests() {
return mIsLoaded;
}
int initialScrollPositionForTests() {
return mInitialScrollPosition;
}
@Override
public String getHost() {
return UrlConstants.EXPLORE_HOST;
}
@Override
public String getTitle() {
return mTitle;
}
@Override
public void updateForUrl(String url) {
super.updateForUrl(url);
mNavigateToCategory = UNKNOWN_NAV_CATEGORY;
try {
mNavigateToCategory = Integer.parseInt(new URI(url).getFragment());
} catch (URISyntaxException | NumberFormatException ignored) {
}
if (mModel.get(STATUS_KEY) == CatalogLoadingState.SUCCESS) {
int category = lookupCategory();
if (category != RecyclerView.NO_POSITION) {
mModel.set(SCROLL_TO_CATEGORY_KEY, category);
}
}
}
@Override
public void destroy() {
if (mTabObserver != null) {
mTab.removeObserver(mTabObserver);
}
mTab.getWindowAndroid().removeContextMenuCloseListener(mContextMenuManager);
super.destroy();
}
private int lookupCategory() {
if (mNavigateToCategory != UNKNOWN_NAV_CATEGORY) {
ListModel<ExploreSitesCategory> categoryList = mModel.get(CATEGORY_LIST_KEY);
for (int i = 0; i < categoryList.size(); i++) {
if (categoryList.get(i).getId() == mNavigateToCategory) {
return i;
}
}
}
return RecyclerView.NO_POSITION;
}
protected CategoryCardViewHolderFactory createCategoryCardViewHolderFactory() {
switch (mDenseVariation) {
case DenseVariation.DENSE_TITLE_BOTTOM:
return new CategoryCardViewHolderFactoryDenseTitleBottom();
case DenseVariation.DENSE_TITLE_RIGHT:
return new CategoryCardViewHolderFactoryDenseTitleRight();
default:
return new CategoryCardViewHolderFactory();
}
}
}
| |
/*
* Copyright (c) 2010-2021 Allette Systems (Australia)
* http://www.allette.com.au
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.pageseeder.diffx.algorithm;
import org.junit.jupiter.api.Test;
import org.pageseeder.diffx.action.Action;
import org.pageseeder.diffx.action.ActionsBuffer;
import org.pageseeder.diffx.api.DiffAlgorithm;
import org.pageseeder.diffx.api.Operator;
import org.pageseeder.diffx.handler.MuxHandler;
import org.pageseeder.diffx.test.DiffAssertions;
import org.pageseeder.diffx.test.GeneralToken;
import org.pageseeder.diffx.test.TestHandler;
import org.pageseeder.diffx.token.XMLToken;
import java.util.List;
import java.util.stream.Collectors;
/**
* Test case for algorithm implementations.
*
* <p>To pass this test an algorithm must only be able to find the correct differences in a list of tokens.
* XML awareness isn't required.
*
* <p>All algorithms must pass this test to show that they produce correct results.</p>
*
* @author Christophe Lauret
* @version 0.9.0
*/
public abstract class BasicGeneralDiffTest extends AlgorithmTest<XMLToken> {
@Test
public final void testGeneral_Empty() {
String a = "";
String b = "";
String[] exp = new String[]{""};
assertGeneralDiffOK(a, b, exp);
}
// Identical ----------------------------------------------------------------
@Test
public final void testGeneral_Identical1() {
String a = "x";
String b = "x";
String[] exp = new String[]{"x"};
assertGeneralDiffOK(a, b, exp);
}
@Test
public final void testGeneral_Identical2() {
String a = "xx";
String b = "xx";
String[] exp = new String[]{"xx"};
assertGeneralDiffOK(a, b, exp);
}
@Test
public final void testGeneral_Identical3() {
String a = "xyz";
String b = "xyz";
String[] exp = new String[]{"xyz"};
assertGeneralDiffOK(a, b, exp);
}
@Test
public final void testGeneral_Identical10() {
String a = "abcdefghij";
String b = "abcdefghij";
String[] exp = new String[]{"abcdefghij"};
assertGeneralDiffOK(a, b, exp);
}
// Inserts and deletes ------------------------------------------------------
@Test
public final void testGeneral_Insert1() {
String a = "";
String b = "x";
String[] exp = new String[]{"+x"};
assertGeneralDiffOK(a, b, exp);
}
@Test
public final void testGeneral_Delete1() {
String a = "y";
String b = "";
String[] exp = new String[]{"-y"};
assertGeneralDiffOK(a, b, exp);
}
@Test
public final void testGeneral_Insert2() {
String a = "";
String b = "xx";
String[] exp = new String[]{"+x+x"};
assertGeneralDiffOK(a, b, exp);
}
@Test
public final void testGeneral_Delete2() {
String a = "yy";
String b = "";
String[] exp = new String[]{"-y-y"};
assertGeneralDiffOK(a, b, exp);
}
@Test
public final void testGeneral_Insert3() {
String a = "x";
String b = "xx";
String[] exp = new String[]{"x+x", "+xx"};
assertGeneralDiffOK(a, b, exp);
}
@Test
public final void testGeneral_Delete3() {
String a = "yy";
String b = "y";
String[] exp = new String[]{"y-y", "-yy"};
assertGeneralDiffOK(a, b, exp);
}
@Test
public final void testGeneral_Insert4() {
String a = "xx";
String b = "xxx";
String[] exp = new String[]{"+xxx", "x+xx", "xx+x"};
assertGeneralDiffOK(a, b, exp);
}
@Test
public final void testGeneral_Delete4() {
String a = "yyy";
String b = "yy";
String[] exp = new String[]{"-yyy", "y-yy", "yy-y"};
assertGeneralDiffOK(a, b, exp);
}
@Test
public final void testGeneral_Insert5() {
String a = " ";
String b = " x ";
String[] exp = new String[]{" +x "};
assertGeneralDiffOK(a, b, exp);
}
@Test
public final void testGeneral_Delete5() {
String a = " y ";
String b = " ";
String[] exp = new String[]{" -y "};
assertGeneralDiffOK(a, b, exp);
}
@Test
public final void testGeneral_Insert6() {
String a = "test";
String b = "testing";
String[] exp = new String[]{"test+i+n+g"};
assertGeneralDiffOK(a, b, exp);
}
@Test
public final void testGeneral_Delete6() {
String a = "testing";
String b = "test";
String[] exp = new String[]{"test-i-n-g"};
assertGeneralDiffOK(a, b, exp);
}
@Test
public final void testGeneral_Insert7() {
String a = "taste";
String b = "foretaste";
String[] exp = new String[]{"+f+o+r+etaste"};
assertGeneralDiffOK(a, b, exp);
}
@Test
public final void testGeneral_Delete7() {
String a = "foretaste";
String b = "taste";
String[] exp = new String[]{"-f-o-r-etaste"};
assertGeneralDiffOK(a, b, exp);
}
@Test
public final void testGeneral_Insert8() {
String a = "bobb";
String b = "baobab";
String[] exp = new String[]{"b+aob+ab"};
assertGeneralDiffOK(a, b, exp);
}
@Test
public final void testGeneral_Delete8() {
String a = "baobab";
String b = "bobb";
String[] exp = new String[]{"b-aob-ab"};
assertGeneralDiffOK(a, b, exp);
}
@Test
public final void testGeneral_Insert9() {
String a = "libb";
String b = "alibaba";
String[] exp = new String[]{"+alib+ab+a"};
assertGeneralDiffOK(a, b, exp);
}
@Test
public final void testGeneral_Delete9() {
String a = "alibaba";
String b = "libb";
String[] exp = new String[]{"-alib-ab-a"};
assertGeneralDiffOK(a, b, exp);
}
@Test
public final void testGeneral_Insert10() {
String a = "ink";
String b = "links";
String[] exp = new String[]{"+link+s"};
assertGeneralDiffOK(a, b, exp);
}
@Test
public final void testGeneral_Delete10() {
String a = "links";
String b = "ink";
String[] exp = new String[]{"-link-s"};
assertGeneralDiffOK(a, b, exp);
}
@Test
public final void testGeneral_Insert11() {
String a = "ink";
String b = "inks";
String[] exp = new String[]{"ink+s"};
assertGeneralDiffOK(a, b, exp);
}
@Test
public final void testGeneral_Delete11() {
String a = "inks";
String b = "ink";
String[] exp = new String[]{"ink-s"};
assertGeneralDiffOK(a, b, exp);
}
@Test
public final void testGeneral_Insert12() {
String a = "ink";
String b = "link";
String[] exp = new String[]{"+link"};
assertGeneralDiffOK(a, b, exp);
}
@Test
public final void testGeneral_Delete12() {
String a = "link";
String b = "ink";
String[] exp = new String[]{"-link"};
assertGeneralDiffOK(a, b, exp);
}
// Replacements -------------------------------------------------------------
@Test
public final void testGeneral_Replace1() {
String a = "y";
String b = "x";
String[] exp = new String[]{"+x-y", "-y+x"};
assertGeneralDiffOK(a, b, exp);
}
@Test
public final void testGeneral_Replace2() {
String a = "yy";
String b = "xx";
String[] exp = new String[]{
"+x+x-y-y",
"+x-y+x-y",
"+x-y-y+x",
"-y+x-y+x",
"-y-y+x+x",
"-y+x+x-y"
};
assertGeneralDiffOK(a, b, exp);
}
@Test
public final void testGeneral_Replace3() {
String a = "xbx";
String b = "xax";
String[] exp = new String[]{"x+a-bx", "x-b+ax"};
assertGeneralDiffOK(a, b, exp);
}
@Test
public final void testGeneral_Replace4() {
String a = "bxb";
String b = "axa";
String[] exp = new String[]{
"+a-bx+a-b",
"+a-bx-b+a",
"-b+ax+a-b",
"-b+ax-b+a",
};
assertGeneralDiffOK(a, b, exp);
}
@Test
public final void testGeneral_Replace5() {
String a = "bxbx";
String b = "axax";
String[] exp = new String[]{
"+a-bx+a-bx",
"+a-bx-b+ax",
"-b+ax+a-bx",
"-b+ax-b+ax",
};
assertGeneralDiffOK(a, b, exp);
}
@Test
public final void testGeneral_Replace6() {
String a = "xbxb";
String b = "xaxa";
String[] exp = new String[]{
"x+a-bx+a-b",
"x+a-bx-b+a",
"x-b+ax-b+a",
"x-b+ax+a-b"
};
assertGeneralDiffOK(a, b, exp);
}
@Test
public final void testGeneral_Replace7() {
String a = "bxax";
String b = "axbx";
String[] exp = new String[]{
"+a+xbx-a-x",
"+a+xb-x-ax",
"+a-bx+b-ax",
"-b-xax+b+x",
"-b+ax-a+bx",
"-b-xa+x+bx",
};
assertGeneralDiffOK(a, b, exp);
}
@Test
public final void testGeneral_Replace8() {
String a = "bxbx";
String b = "axax";
String[] exp = new String[]{
"+a-bx+a-bx",
"+a-bx-b+ax",
"-b+ax+a-bx",
"-b+ax-b+ax"
};
assertGeneralDiffOK(a, b, exp);
}
@Test
public final void testGeneral_Replace9() {
String a = "bxbxb";
String b = "axaxa";
String[] exp = new String[]{
"-b+ax-b+ax-b+a",
"-b+ax-b+ax+a-b",
"-b+ax+a-bx+a-b",
"+a-bx+a-bx-b+a",
"+a-bx+a-bx+a-b",
};
assertGeneralDiffOK(a, b, exp);
}
@Test
public final void testGeneral_Replace10() {
String a = "bxaxb";
String b = "axbxa";
String[] exp = new String[]{
"+a+xbxa-x-b",
"-b-xaxb+x+a"
};
assertGeneralDiffOK(a, b, exp);
}
@Test
public final void testGeneral_Replace11() {
String a = "xbxbx";
String b = "xaxax";
String[] exp = new String[]{
"x+a-bx+a-bx",
"x+a-bx-b+ax",
"x-b+ax+a-bx",
"x-b+ax-b+ax"
};
assertGeneralDiffOK(a, b, exp);
}
// More complex cases -------------------------------------------------------
@Test
public final void testGeneral_Complex1() {
String a = "bab";
String b = "aba";
String[] exp = new String[]{
"+aba-b",
"-bab+a"
};
assertGeneralDiffOK(a, b, exp);
}
@Test
public final void testGeneral_Complex2() {
String a = "baba";
String b = "abab";
String[] exp = new String[]{
"+abab-a",
"-baba+b"
};
assertGeneralDiffOK(a, b, exp);
}
@Test
public final void testGeneral_Complex3() {
String a = "babab";
String b = "ababa";
String[] exp = new String[]{
"+ababa-b",
"-babab+a",
};
assertGeneralDiffOK(a, b, exp);
}
@Test
public final void testGeneral_Complex4() {
String a = "two little";
String b = "one little";
String[] exp = new String[]{
"-t-wo+n+e little"
};
assertGeneralDiffOK(a, b, exp);
}
@Test
public final void testGeneral_Complex5() {
String a = "too little";
String b = "one little";
String[] exp = new String[]{
"-t-oo+n+e little",
"-to+n+e-o little",
"-to+n-o+e little",
"-to-o+n+e little"
};
assertGeneralDiffOK(a, b, exp);
}
@Test
public final void testGeneral_Complex6() {
String a = "bilabial";
String b = "balaclava";
String[] exp = new String[]{
"b+a+l+a+c-ila+v-b-ia-l",
"b-i+ala+c-b-i+la+v-l+a",
"b+a-ila+c-b-i-al+a+v+a",
"b+a-ila+c+l-b-ia+v+a-l",
"b-i+ala-b-i-a+cl+a+v+a",
"b-i-la-b-i+la+cl+a+v+a",
"b+a-ila-b+c+l-ia+v+a-l",
"b+a-ila+c+l+a+v-b-ia-l"
};
assertGeneralDiffOK(a, b, exp);
}
@Test
public final void testGeneral_Complex7() {
String a = "Sunday";
String b = "Saturday";
String[] exp = new String[]{
"S+a+tu+r-nday",
"S+a+tu-n+rday"
};
assertGeneralDiffOK(a, b, exp);
}
@Test
public final void testGeneral_Complex9() {
String a = "Monday Sunday";
String b = "Monday Tuesday Sunday";
String[] exp = new String[]{
"Monday +T+u+e+s+d+a+y+ Sunday",
"Monday+ +T+u+e+s+d+a+y Sunday",
"Mon+d+a+y+ +T+u+e+sday Sunday"
};
assertGeneralDiffOK(a, b, exp);
}
@Test
public final void testGeneral_Complex10() {
String a = "Monday Tuesday Sunday";
String b = "Monday Sunday";
String[] exp = new String[]{
"Monday -T-u-e-s-d-a-y- Sunday",
"Monday- -T-u-e-s-d-a-y Sunday",
"Mon-d-a-y- -T-u-e-sday Sunday"
};
assertGeneralDiffOK(a, b, exp);
}
@Test
public final void testGeneral_Complex11() {
String a = "A train";
String b = "A car";
String[] exp = new String[]{
"A +c+a-tr-a-i-n",
"A +c-t-ra+r-i-n",
"A -t+c-ra-i+r-n",
"A -t-r+ca-i-n+r",
"A -t-r+ca-i+r-n"
};
assertGeneralDiffOK(a, b, exp);
}
@Test
public final void testGeneral_Complex12() {
String a = "A train";
String b = "The car";
String[] exp = new String[]{
"+T+h-A+e -t-r+ca-i-n+r",
"+T+h+e-A -t+c-ra-i+r-n",
"+T+h+e-A +c+a-tr-a-i-n",
"+T+h+e-A +c-t-ra+r-i-n",
"-A+T+h+e -t-r+ca-i-n+r",
"-A+T+h+e -t-r+ca-i+r-n"
};
assertGeneralDiffOK(a, b, exp);
}
@Test
public final void testGeneral_Complex13() {
String a = "A blue train";
String b = "The red car";
String[] exp = new String[]{
"+T+h+e-A +r-b-l-ue+d +c+a-tr-a-i-n",
"+T+h+e-A -b-l+r-ue+d -t+c-ra-i+r-n",
"+T+h+e-A +r-b-l-ue+d +c-t-ra+r-i-n",
"+T-A- -b-l-u+he -tr+e+d+ +ca-i-n+r",
"+T+h-A- -b-l-ue -tr+e+d+ +ca+r-i-n",
"+T+h-A+e -b-l-u+re+d +c-t+ar-a-i-n",
"-A- -b-l-u+T+he -tr+e+d+ +ca-i-n+r",
"-A- -b-l-u+T+he -tr+e+d+ +ca-i+r-n"
};
assertGeneralDiffOK(a, b, exp);
}
@Test
public final void testGeneral_Complex14() {
String a = "A big train";
String b = "The little car";
String[] exp = new String[]{
"+T+h+e-A +l-bi+t+t+l+e-g +c+a-tr-a-i-n",
"+T+h-A+e -b+li-g- t+t+l+e+ -r+ca-i-n+r",
"+T+h+e-A +l-bi-g- t+t+l+e+ +c-ra+r-i-n",
"-A+T+h+e -b+li-g- +tt-r+l+e+ +ca-i-n+r",
"-A+T+h+e -b+li-g- t-r+t+l+e+ +ca-i-n+r",
"-A+T+h+e -b+li-g- t-r+t+l+e+ +ca-i+r-n"
};
assertGeneralDiffOK(a, b, exp);
}
@Test
public final void testGeneral_Complex15() {
String a = "A big blue train";
String b = "The little red car";
String[] exp = new String[]{
"+T+h+e-A +l-bi+t+t-g- -bl-ue -tr+e+d+ +ca+r-i-n",
"+T+h+e-A +l-bi+t+t-g- -bl-ue -tr+e+d+ +ca-i-n+r",
"+T+h-A+e -b+li+t-g- -b+tl-ue -tr+e+d+ +ca-i-n+r",
"+T+h+e-A +l-bi-g- +t+t-bl-ue -tr+e+d+ +ca-i+r-n",
"-A+T+h+e -b+li-g- -b+t+tl-ue -tr+e+d+ +ca-i-n+r",
"-A+T+h+e -b+li-g- -b+t+tl-ue -tr+e+d+ +ca-i+r-n"
};
assertGeneralDiffOK(a, b, exp);
}
@Test
public final void testGeneral_complex16() {
String a = "abamx";
String b = "xabx";
assertGeneralDiffOK(a, b);
}
@Test
public final void testGeneral_complex17() {
String a = "abcdefghijkjmnopqrx";
String b = "xabcdefghijkmnopqrx";
assertGeneralDiffOK(a, b);
}
@Test
public final void testGeneral_complex18() {
String a = "rhrwpdpunwx";
String b = "rhxrwpdunx";
assertGeneralDiffOK(a, b);
}
@Test
public final void testGeneral_complex19() {
String a = "bddcdlohjt";
String b = "tbdcllohjt";
assertGeneralDiffOK(a, b);
}
@Test
public final void testGeneral_complex20() {
String a = "bddcdl";
String b = "tbdcl";
assertGeneralDiffOK(a, b);
}
// helpers
// --------------------------------------------------------------------------
public final void assertGeneralDiffOK(String testA, String textB) {
assertGeneralDiffOK(testA, textB, new String[]{});
}
public final void assertGeneralDiffOK(String textA, String textB, String[] exp) {
DiffAlgorithm<XMLToken> algorithm = getDiffAlgorithm();
assertGeneralDiffOK(textA, textB, algorithm, exp);
}
public static void assertGeneralDiffOK(String textA, String textB, DiffAlgorithm<XMLToken> algorithm, String[] exp) {
List<GeneralToken> seqA = GeneralToken.toList(textA);
List<GeneralToken> seqB = GeneralToken.toList(textB);
ActionsBuffer<XMLToken> af = new ActionsBuffer<>();
TestHandler cf = new TestHandler();
// Run the diff
algorithm.diff(seqA, seqB, new MuxHandler<>(cf, af));
// Extract output and actions
String got = cf.getOutput();
List<Action<XMLToken>> actions = af.getActions();
// Check
try {
DiffAssertions.assertIsApplicable(seqA, seqB, actions);
DiffAssertions.assertIsCorrect(seqA, seqB, actions);
if (exp.length > 0) {
DiffAssertions.assertMatchTestOutput(actions, exp);
}
} catch (AssertionError ex) {
printGeneralErrorDetails(textA, textB, exp, got, actions);
throw ex;
}
}
/**
* Print the error details.
*/
private static void printGeneralErrorDetails(String textA, String textB, String[] exp, String got, List<Action<XMLToken>> actions) {
System.err.println("+------------------------------------------------");
System.err.println("| Input A: \"" + textA + "\"");
System.err.println("| Input B: \"" + textB + "\"");
System.err.println("| Output: \"" + got + "\"");
if (exp.length > 0) {
System.err.print("| Expect: ");
for (String s : exp) System.err.print("\"" + s + "\" ");
System.err.println();
}
System.err.print("| Actions: ");
for (Action<XMLToken> action : actions) {
System.err.print(action.operator() == Operator.DEL ? '-' : action.operator() == Operator.INS ? '+' : '=');
System.err.print(action.tokens().stream().map(Object::toString).collect(Collectors.toList()));
}
System.err.println();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.resourcemanager.webapp;
import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.WebResource;
import org.apache.hadoop.http.JettyUtils;
import org.apache.hadoop.util.Lists;
import org.apache.hadoop.yarn.api.records.ExecutionType;
import org.apache.hadoop.yarn.api.records.ExecutionTypeRequest;
import org.apache.hadoop.yarn.api.records.Priority;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.ResourceSizing;
import org.apache.hadoop.yarn.api.records.SchedulingRequest;
import org.apache.hadoop.yarn.api.resource.PlacementConstraint;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.activities.ActivityDiagnosticConstant;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.activities.GenericDiagnosticsCollector;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.MultivaluedMap;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.function.Predicate;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import static org.junit.Assert.assertEquals;
/**
* Some Utils for activities tests.
*/
public final class ActivitiesTestUtils {
public static final String TOTAL_RESOURCE_INSUFFICIENT_DIAGNOSTIC_PREFIX =
ActivityDiagnosticConstant.NODE_TOTAL_RESOURCE_INSUFFICIENT_FOR_REQUEST
+ ", " + GenericDiagnosticsCollector.RESOURCE_DIAGNOSTICS_PREFIX;
public static final String UNMATCHED_PARTITION_OR_PC_DIAGNOSTIC_PREFIX =
ActivityDiagnosticConstant.
NODE_DO_NOT_MATCH_PARTITION_OR_PLACEMENT_CONSTRAINTS + ", "
+ GenericDiagnosticsCollector.PLACEMENT_CONSTRAINT_DIAGNOSTICS_PREFIX;
/*
* Field names in response of scheduler/app activities.
*/
public static final String FN_ACT_ALLOCATIONS = "allocations";
public static final String FN_ACT_DIAGNOSTIC = "diagnostic";
public static final String FN_ACT_ALLOCATION_STATE = "allocationState";
public static final String FN_ACT_FINAL_ALLOCATION_STATE =
"finalAllocationState";
public static final String FN_ACT_NODE_ID = "nodeId";
public static final String FN_ACT_NODE_IDS = "nodeIds";
public static final String FN_ACT_COUNT = "count";
public static final String FN_ACT_APP_PRIORITY = "appPriority";
public static final String FN_ACT_REQUEST_PRIORITY = "requestPriority";
public static final String FN_ACT_ALLOCATION_REQUEST_ID =
"allocationRequestId";
public static final String FN_APP_ACT_CHILDREN = "children";
public static final String FN_APP_ACT_ROOT = "appActivities";
public static final String FN_SCHEDULER_ACT_NAME = "name";
public static final String FN_SCHEDULER_ACT_ALLOCATIONS_ROOT = "root";
public static final String FN_SCHEDULER_ACT_CHILDREN = "children";
public static final String FN_SCHEDULER_ACT_ROOT = "activities";
public static final String FN_SCHEDULER_BULK_ACT_ROOT = "bulkActivities";
private ActivitiesTestUtils(){}
public static List<JSONObject> findInAllocations(JSONObject allocationObj,
Predicate p) throws JSONException {
List<JSONObject> target = new ArrayList<>();
recursiveFindObj(allocationObj.getJSONObject(
FN_SCHEDULER_ACT_ALLOCATIONS_ROOT), p,
target);
return target;
}
private static void recursiveFindObj(JSONObject obj, Predicate p,
List<JSONObject> target) throws JSONException {
if (p.test(obj)) {
target.add(obj);
}
if (obj.has(FN_SCHEDULER_ACT_CHILDREN)) {
JSONArray childrenObjs = obj.optJSONArray(FN_SCHEDULER_ACT_CHILDREN);
if (childrenObjs != null) {
for (int i = 0; i < childrenObjs.length(); i++) {
recursiveFindObj(childrenObjs.getJSONObject(i), p, target);
}
} else {
JSONObject childrenObj = obj.optJSONObject(FN_SCHEDULER_ACT_CHILDREN);
recursiveFindObj(childrenObj, p, target);
}
}
}
public static SchedulingRequest schedulingRequest(int numContainers,
int priority, long allocReqId, int cores, int mem,
PlacementConstraint placementConstraintExpression, String... tags) {
return SchedulingRequest.newBuilder()
.priority(Priority.newInstance(priority))
.allocationRequestId(allocReqId)
.allocationTags(new HashSet<>(Arrays.asList(tags))).executionType(
ExecutionTypeRequest.newInstance(ExecutionType.GUARANTEED, true))
.resourceSizing(ResourceSizing
.newInstance(numContainers, Resource.newInstance(mem, cores)))
.placementConstraintExpression(placementConstraintExpression).build();
}
public static void verifyNumberOfNodes(JSONObject allocation, int expectValue)
throws Exception {
if (allocation.isNull(FN_SCHEDULER_ACT_ALLOCATIONS_ROOT)) {
assertEquals("State of allocation is wrong", expectValue, 0);
} else {
assertEquals("State of allocation is wrong", expectValue,
1 + getNumberOfNodes(
allocation.getJSONObject(FN_SCHEDULER_ACT_ALLOCATIONS_ROOT)));
}
}
public static int getNumberOfNodes(JSONObject allocation) throws Exception {
if (!allocation.isNull(FN_SCHEDULER_ACT_CHILDREN)) {
Object object = allocation.get(FN_SCHEDULER_ACT_CHILDREN);
if (object.getClass() == JSONObject.class) {
return 1 + getNumberOfNodes((JSONObject) object);
} else {
int count = 0;
for (int i = 0; i < ((JSONArray) object).length(); i++) {
count += (1 + getNumberOfNodes(
((JSONArray) object).getJSONObject(i)));
}
return count;
}
} else {
return 0;
}
}
public static void verifyStateOfAllocations(JSONObject allocation,
String nameToCheck, String expectState) throws Exception {
assertEquals("State of allocation is wrong", expectState,
allocation.get(nameToCheck));
}
public static void verifyNumberOfAllocations(JSONObject json, int expectValue)
throws Exception {
JSONObject activitiesJson;
if (json.has(FN_APP_ACT_ROOT)) {
activitiesJson = json.getJSONObject(FN_APP_ACT_ROOT);
} else if (json.has(FN_SCHEDULER_ACT_ROOT)) {
activitiesJson = json.getJSONObject(FN_SCHEDULER_ACT_ROOT);
} else {
throw new IllegalArgumentException("Can't parse allocations!");
}
if (activitiesJson.isNull(FN_ACT_ALLOCATIONS)) {
assertEquals("Number of allocations is wrong", expectValue, 0);
} else {
Object object = activitiesJson.get(FN_ACT_ALLOCATIONS);
if (object.getClass() == JSONObject.class) {
assertEquals("Number of allocations is wrong", expectValue, 1);
} else if (object.getClass() == JSONArray.class) {
assertEquals("Number of allocations is wrong in: " + object,
expectValue, ((JSONArray) object).length());
}
}
}
public static void verifyQueueOrder(JSONObject json, String expectOrder)
throws Exception {
String order = "";
if (!json.isNull(FN_SCHEDULER_ACT_ALLOCATIONS_ROOT)) {
JSONObject root = json.getJSONObject(FN_SCHEDULER_ACT_ALLOCATIONS_ROOT);
order = root.getString(FN_SCHEDULER_ACT_NAME) + "-" + getQueueOrder(root);
}
assertEquals("Order of queue is wrong", expectOrder,
order.substring(0, order.length() - 1));
}
public static String getQueueOrder(JSONObject node) throws Exception {
if (!node.isNull(FN_SCHEDULER_ACT_CHILDREN)) {
Object children = node.get(FN_SCHEDULER_ACT_CHILDREN);
if (children.getClass() == JSONObject.class) {
if (!((JSONObject) children).isNull(FN_ACT_APP_PRIORITY)) {
return "";
}
return ((JSONObject) children).getString(FN_SCHEDULER_ACT_NAME) + "-"
+ getQueueOrder((JSONObject) children);
} else if (children.getClass() == JSONArray.class) {
String order = "";
for (int i = 0; i < ((JSONArray) children).length(); i++) {
JSONObject child = (JSONObject) ((JSONArray) children).get(i);
if (!child.isNull(FN_ACT_APP_PRIORITY)) {
return "";
}
order += (child.getString(FN_SCHEDULER_ACT_NAME) + "-"
+ getQueueOrder(child));
}
return order;
}
}
return "";
}
public static JSONObject getFirstSubNodeFromJson(JSONObject json,
String... hierarchicalFieldNames) {
return getSubNodesFromJson(json, hierarchicalFieldNames).get(0);
}
public static List<JSONObject> getSubNodesFromJson(JSONObject json,
String... hierarchicalFieldNames) {
List<JSONObject> results = Lists.newArrayList(json);
for (String fieldName : hierarchicalFieldNames) {
results = results.stream().filter(e -> e.has(fieldName))
.flatMap(e -> getJSONObjects(e, fieldName).stream())
.collect(Collectors.toList());
if (results.isEmpty()) {
throw new IllegalArgumentException("Can't find hierarchical fields "
+ Arrays.toString(hierarchicalFieldNames));
}
}
return results;
}
private static List<JSONObject> getJSONObjects(JSONObject json,
String fieldName) {
List<JSONObject> objects = new ArrayList<>();
if (json.has(fieldName)) {
try {
Object tmpObj = json.get(fieldName);
if (tmpObj.getClass() == JSONObject.class) {
objects.add((JSONObject) tmpObj);
} else if (tmpObj.getClass() == JSONArray.class) {
for (int i = 0; i < ((JSONArray) tmpObj).length(); i++) {
objects.add(((JSONArray) tmpObj).getJSONObject(i));
}
}
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
return objects;
}
public static void verifyNumberOfAllocationAttempts(JSONObject allocation,
int expectValue) throws Exception {
if (allocation.isNull(FN_APP_ACT_CHILDREN)) {
assertEquals("Number of allocation attempts is wrong", expectValue, 0);
} else {
Object object = allocation.get(FN_APP_ACT_CHILDREN);
if (object.getClass() == JSONObject.class) {
assertEquals("Number of allocations attempts is wrong", expectValue, 1);
} else if (object.getClass() == JSONArray.class) {
assertEquals("Number of allocations attempts is wrong", expectValue,
((JSONArray) object).length());
}
}
}
public static JSONObject requestWebResource(WebResource webResource,
MultivaluedMap<String, String> params) {
if (params != null) {
webResource = webResource.queryParams(params);
}
ClientResponse response = webResource.accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + "; " + JettyUtils.UTF_8,
response.getType().toString());
return response.getEntity(JSONObject.class);
}
/**
* Convert format using {name} (HTTP base) into %s (Java based).
* @param format Initial format using {}.
* @param args Arguments for the format.
* @return New format using %s.
*/
public static String format(String format, Object... args) {
Pattern p = Pattern.compile("\\{.*?}");
Matcher m = p.matcher(format);
String newFormat = m.replaceAll("%s");
return String.format(newFormat, args);
}
}
| |
/*
* Copyright (c) 2013, SRI International
* All rights reserved.
* Licensed under the The BSD 3-Clause License;
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at:
*
* http://opensource.org/licenses/BSD-3-Clause
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* Neither the name of the aic-expresso nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.sri.ai.grinder.tester;
import static com.sri.ai.expresso.helper.Expressions.TRUE;
import static com.sri.ai.expresso.helper.Expressions.ZERO;
import static com.sri.ai.expresso.helper.Expressions.getVariablesBeingReferenced;
import static com.sri.ai.expresso.helper.Expressions.makeSymbol;
import static com.sri.ai.expresso.helper.Expressions.parse;
import static com.sri.ai.util.Util.in;
import static com.sri.ai.util.Util.join;
import static com.sri.ai.util.Util.list;
import static com.sri.ai.util.Util.pickKElementsWithoutReplacement;
import static com.sri.ai.util.Util.removeFromSetNonDestructively;
import java.util.Collection;
import java.util.LinkedHashSet;
import java.util.Map;
import java.util.Set;
import java.util.function.Function;
import com.google.common.annotations.Beta;
import com.sri.ai.expresso.api.Expression;
import com.sri.ai.expresso.api.QuantifiedExpression;
import com.sri.ai.expresso.api.Type;
import com.sri.ai.expresso.helper.SubExpressionsDepthFirstIterator;
import com.sri.ai.grinder.api.Constraint;
import com.sri.ai.grinder.api.Context;
import com.sri.ai.grinder.api.SingleQuantifierEliminationProblem;
import com.sri.ai.grinder.api.SingleVariableConstraint;
import com.sri.ai.grinder.api.Theory;
import com.sri.ai.grinder.core.constraint.IncompleteMultiVariableConstraint;
import com.sri.ai.grinder.core.solver.DefaultSingleQuantifierEliminationProblem;
import com.sri.ai.grinder.group.AssociativeCommutativeGroup;
import com.sri.ai.grinder.helper.AssignmentMapsIterator;
import com.sri.ai.grinder.helper.GrinderUtil;
import com.sri.ai.grinder.interpreter.Assignment;
import com.sri.ai.grinder.interpreter.BruteForceCommonInterpreter;
import com.sri.ai.grinder.library.boole.And;
import com.sri.ai.grinder.library.boole.ThereExists;
import com.sri.ai.grinder.library.indexexpression.IndexExpressions;
import com.sri.ai.grinder.library.set.Sets;
import com.sri.ai.grinder.rewriter.api.Simplifier;
import com.sri.ai.util.Util;
import com.sri.ai.util.base.BinaryFunction;
import com.sri.ai.util.base.NullaryFunction;
/**
* A class for testing SGDPLL(T) main components.
*
* @author braz
*
*/
@Beta
public class SGDPLLTTester {
private static final int NUMBER_OF_TESTS_TO_INDICATE_ON_CONSOLE = 1;
private static void output(String message) {
// System.out.println(message); // uncomment out if detailed output is desired.
}
/**
* Measures the time taken to run a given number of single-variable constraint tests with a maximum number of literals,
* without brute-force correctness testing.
* Throws an {@link Error} with the failure description if a test fails.
* @param random
* @param theoryTestingSupport
* @param numberOfTests
* @param maxNumberOfLiterals
* @param outputCount
*/
public static long measureTimeSingleVariableConstraints(TheoryTestingSupport theoryTestingSupport, long numberOfTests, int maxNumberOfLiterals, boolean outputCount) {
long start = System.currentTimeMillis();
testSingleVariableConstraints(false, theoryTestingSupport, numberOfTests, maxNumberOfLiterals, false /* no correctness test */);
long result = System.currentTimeMillis() - start;
if (outputCount) {
System.out.println("Total time: " + result + " ms.");
}
return result;
}
/**
* Given a theory and a number <code>n</code> of single-variable constraint tests,
* generates <code>n</code> formulas in the theory
* and see if those detected as unsatisfiable by the corresponding solver
* are indeed unsatisfiable (checked by brute force).
* Throws an {@link Error} with the failure description if a test fails.
* @param theoryTestingSupport
* @param numberOfTests
* @param maxNumberOfLiterals
* @param outputCount
*/
public static void testSingleVariableConstraints(boolean testAgainstBruteForce, TheoryTestingSupport theoryTestingSupport, long numberOfTests, int maxNumberOfLiterals, boolean outputCount) {
Context context = theoryTestingSupport.makeContextWithTestingInformation();
NullaryFunction<Constraint> makeInitialConstraint = () -> theoryTestingSupport.getTheory().makeSingleVariableConstraint(parse(theoryTestingSupport.pickTestingVariableAtRandom()), context);
Function<Constraint, Expression> makeRandomLiteral = c -> theoryTestingSupport.makeRandomLiteralOn(((SingleVariableConstraint)c).getVariable().toString(), context);
boolean isComplete = theoryTestingSupport.getTheory().singleVariableConstraintIsCompleteWithRespectToItsVariable();
TestRunner tester = isComplete? SGDPLLTTester::testCompleteSatisfiability : SGDPLLTTester::testIncompleteSatisfiability;
String problemName = (isComplete? "complete" : "incomplete") + " satisfiability for single-variable constraints";
runTesterGivenOnSuccessiveConjunctionsOfLiterals(problemName, tester, numberOfTests, maxNumberOfLiterals, testAgainstBruteForce, theoryTestingSupport, makeInitialConstraint, makeRandomLiteral, outputCount, context);
}
/**
* Given a theory and a number <code>n</code> of multi-variable constraint tests,
* generates <code>n</code> formulas in the theory
* and see if those detected as unsatisfiable by the corresponding incomplete solver
* are indeed unsatisfiable (checked by brute force).
* Note that we do not test if all unsatisfiable formulas are detected as such, because the solver is assumed to be incomplete.
* Throws an {@link Error} with the failure description if a test fails.
* @param theoryTestingSupport
* @param numberOfTests
* @param maxNumberOfLiterals
* @param outputCount
*/
public static void testIncompleteMultiVariableConstraints(
boolean testAgainstBruteForce, TheoryTestingSupport theoryTestingSupport, long numberOfTests, int maxNumberOfLiterals, boolean outputCount) {
NullaryFunction<Constraint> makeInitialConstraint = () -> new IncompleteMultiVariableConstraint(theoryTestingSupport.getTheory());
Context context = theoryTestingSupport.makeContextWithTestingInformation();
Function<Constraint, Expression> makeRandomLiteral = c -> theoryTestingSupport.makeRandomLiteral(context);
TestRunner tester = SGDPLLTTester::testIncompleteSatisfiability; // {@link IncompleteMultiVariableConstraint} is incomplete
runTesterGivenOnSuccessiveConjunctionsOfLiterals("incomplete satisfiability", tester, numberOfTests, maxNumberOfLiterals, testAgainstBruteForce, theoryTestingSupport, makeInitialConstraint, makeRandomLiteral, outputCount, context);
}
/**
* Given a theory and a number <code>n</code> of multi-variable constraint tests,
* generates <code>n</code> formulas in the theory
* and see if those detected as unsatisfiable by the corresponding solver
* are indeed unsatisfiable (checked by brute force).
* Throws an {@link Error} with the failure description if a test fails.
* @param theoryTestingSupport
* @param numberOfTests
* @param maxNumberOfLiterals
* @param outputCount
*/
public static void testCompleteMultiVariableConstraints(
boolean testAgainstBruteForce, TheoryTestingSupport theoryTestingSupport, long numberOfTests, int maxNumberOfLiterals, boolean outputCount) {
Context context = theoryTestingSupport.makeContextWithTestingInformation();
NullaryFunction<Constraint> makeInitialConstraint = () -> context;
Function<Constraint, Expression> makeRandomLiteral = c -> theoryTestingSupport.makeRandomLiteral(context);
TestRunner tester = SGDPLLTTester::testCompleteSatisfiability; // CompleteMultiVariableContext is complete
runTesterGivenOnSuccessiveConjunctionsOfLiterals("complete satisfiability", tester, numberOfTests, maxNumberOfLiterals, testAgainstBruteForce, theoryTestingSupport, makeInitialConstraint, makeRandomLiteral, outputCount, context);
}
private static interface TestRunner {
void runOneTest(Collection<Expression> literals, Constraint constraint, boolean testAgainstBruteForce, TheoryTestingSupport theoryTestingSupport, Context context) throws Error;
}
/**
* Generates a number of tests based on a conjunction each, which is formed by incrementally adding literals to it.
* The conjunction is provided to a {@link TestRunner} which runs some test based on it.
* Throws an {@link Error} with the failure description if a test fails.
* @param tester the {@link TestRunner}
* @param numberOfTests the number of tests to run
* @param maxNumberOfLiterals the maximum number of literals to add to each test conjunction
* @param theoryTestingSupport
* @param makeInitialConstraint a thunk generating new constraints equivalent to TRUE
* @param makeRandomLiteralGivenConstraint a function generating appropriate new literals (given generated constraint if needed)
* @param outputCount whether to output the test count
* @param context a context
* @throws Error
*/
public static void runTesterGivenOnSuccessiveConjunctionsOfLiterals(
String problemName,
TestRunner tester,
long numberOfTests,
int maxNumberOfLiterals,
boolean testAgainstBruteForce,
TheoryTestingSupport theoryTestingSupport,
NullaryFunction<Constraint> makeInitialConstraint,
Function<Constraint, Expression> makeRandomLiteralGivenConstraint,
boolean outputCount,
Context context) throws Error {
for (int i = 1; i != numberOfTests + 1; i++) {
Constraint constraint = makeInitialConstraint.apply();
Collection<Expression> literals = new LinkedHashSet<>();
output("\n\nStarting new conjunction");
for (int j = 0; !constraint.isContradiction() && j != maxNumberOfLiterals; j++) {
Expression literal = makeRandomLiteralGivenConstraint.apply(constraint);
constraint = addLiteralToConstraintAndTest(tester, literal, constraint, literals, testAgainstBruteForce, theoryTestingSupport, context);
}
indicateCompletionOfTest(outputCount, problemName, i, testAgainstBruteForce, theoryTestingSupport);
}
}
/**
* @param outputCount
* @param problemName
* @param i
* @param testAgainstBruteForce
* @param theoryTestingSupport
*/
public static void indicateCompletionOfTest(boolean outputCount, String problemName, int i, boolean testAgainstBruteForce, TheoryTestingSupport theoryTestingSupport) {
if (outputCount && i % NUMBER_OF_TESTS_TO_INDICATE_ON_CONSOLE == 0) {
if (testAgainstBruteForce) {
System.out.println("Tested (comparing against brute-force solution) " + i + " examples of " + problemName + " for " + theoryTestingSupport);
}
else {
System.out.println("Computed (without comparing against brute-force solution) " + i + " examples of " + problemName + " for " + theoryTestingSupport);
}
}
}
private static Constraint addLiteralToConstraintAndTest(TestRunner tester, Expression literal, Constraint constraint, Collection<Expression> literals, boolean testAgainstBruteForce, TheoryTestingSupport theoryTestingSupport, Context context) throws Error {
output("Constraint is " + constraint);
output("Adding " + literal + " (literals added so far: " + join(literals, " and ") + ")");
literals.add(literal);
Constraint newConstraint = constraint.conjoin(literal, context);
output("New constraint is " + newConstraint);
tester.runOneTest(literals, newConstraint, testAgainstBruteForce, theoryTestingSupport, context);
return newConstraint;
}
private static void testIncompleteSatisfiability(
Collection<Expression> literals,
Constraint constraint,
boolean testAgainstBruteForce,
TheoryTestingSupport theoryTestingSupport,
Context context)
throws Error {
if (constraint.isContradiction()) {
solverSaysItIsUnsatisfiable(literals, testAgainstBruteForce, theoryTestingSupport, context);
}
else {
// if constraint is not null, the conjunction of literals may or may not be satisfiable,
// because solver is incomplete, so in this case we do not check.
output("SolverUnderAssignment does not know yet if it is satisfiable or not. Current constraint is " + constraint);
}
}
private static void testCompleteSatisfiability(
Collection<Expression> literals,
Constraint constraint,
boolean testAgainstBruteForce,
TheoryTestingSupport theoryTestingSupport,
Context context)
throws Error {
if (constraint.isContradiction()) {
solverSaysItIsUnsatisfiable(literals, testAgainstBruteForce, theoryTestingSupport, context);
}
else {
solverSaysItIsSatisfiable(literals, constraint, testAgainstBruteForce, theoryTestingSupport, context);
}
}
/**
* @param literals
* @param constraint
* @param theoryTestingSupport
* @param context
* @throws Error
*/
protected static void solverSaysItIsSatisfiable(Collection<Expression> literals, Constraint constraint, boolean testAgainstBruteForce, TheoryTestingSupport theoryTestingSupport, Context context) throws Error {
if (testAgainstBruteForce) {
output("SolverUnderAssignment thinks it is satisfiable. Current constraint is " + constraint);
Expression literalsConjunction = And.make(literals);
boolean isUnsatisfiable = ! SGDPLLTTester.isSatisfiableByBruteForce(literalsConjunction, theoryTestingSupport, context);;
if (isUnsatisfiable) {
String message = join(literals, " and ") + " is unsatisfiable (by brute-force) but " +
theoryTestingSupport.getClass().getSimpleName() + " says it is satisfiable. " +
"Current constraint is " + constraint;
output(message);
throw new Error(message);
}
else {
output("Brute-force satisfiability test agrees that it is satisfiable. Constraint is " + constraint);
}
}
else {
output("Skipping test againt brute-force.");
}
}
/**
* @param literals
* @param theoryTestingSupport
* @param context
* @throws Error
*/
protected static void solverSaysItIsUnsatisfiable(Collection<Expression> literals, boolean testAgainstBruteForce, TheoryTestingSupport theoryTestingSupport, Context context) throws Error {
if (testAgainstBruteForce) {
output("SolverUnderAssignment thinks it is unsatisfiable.");
Expression formula = And.make(literals);
boolean isSatisfiable = SGDPLLTTester.isSatisfiableByBruteForce(formula, theoryTestingSupport, context);
if (isSatisfiable) {
String message = join(literals, " and ") + " is satisfiable (by brute-force) but " +
theoryTestingSupport.getClass().getSimpleName() + " says it is not. "
// + "Satisfying assignment is " + satisfyingAssignment + "."
;
output(message);
throw new Error(message);
}
else {
output("Brute-force satisfiability test agrees that it is unsatisfiable.");
}
}
else {
output("Skipping test againt brute-force.");
}
}
/**
* Determines whether a formula is satisfiable by adding existential quantifiers for each of its variables
* (according to the theory provided) and evaluating it.
* @param formula
* @param theoryTestingSupport
* @param context
* @return whether the formula is satisfiable.
*/
public static boolean isSatisfiableByBruteForce(Expression formula, TheoryTestingSupport theoryTestingSupport, Context context) {
Map<String, Type> variableNamesAndTypesForTesting = theoryTestingSupport.getVariableNamesAndTypesForTesting();
Expression quantifiedFormula = formula;
Collection<Expression> variables = theoryTestingSupport.getTheory().getVariablesIn(formula, context);
for (Expression variable : variables) {
Expression typeNameExpression = parse(variableNamesAndTypesForTesting.get(variable.toString()).toString());
quantifiedFormula = ThereExists.make(IndexExpressions.makeIndexExpression(variable, typeNameExpression), quantifiedFormula);
}
Expression evaluation = new BruteForceCommonInterpreter().apply(quantifiedFormula, context);
boolean result = evaluation.equals(TRUE);
return result;
}
/**
* Given a theory and a number <code>n</code> of single-variable constraint tests,
* generates <code>n</code> formulas in the theory
* and see if the model counting solver works (checked by brute force).
* Throws an {@link Error} with the failure description if a test fails.
* @param theoryTestingSupport
* @param numberOfTests
* @param maxNumberOfLiterals
* @param outputCount
*/
public static void testModelCountingForSingleVariableConstraints(
boolean testAgainstBruteForce, TheoryTestingSupport theoryTestingSupport, long numberOfTests, int maxNumberOfLiterals, boolean outputCount) {
Context context = theoryTestingSupport.makeContextWithTestingInformation();
Expression variable = parse(theoryTestingSupport.pickTestingVariableAtRandom());
NullaryFunction<Constraint> makeInitialConstraint = () -> theoryTestingSupport.getTheory().makeSingleVariableConstraint(variable, context);
Function<Constraint, Expression> makeRandomLiteral = c -> theoryTestingSupport.makeRandomLiteralOn(((SingleVariableConstraint)c).getVariable().toString(), context);
TestRunner tester = (ls, c, tB, cT, p) -> runModelCountingTestForSingleVariableConstraint(variable, ls, c, tB, cT.getTheory(), p);
runTesterGivenOnSuccessiveConjunctionsOfLiterals("model counting", tester, numberOfTests, maxNumberOfLiterals, testAgainstBruteForce, theoryTestingSupport, makeInitialConstraint, makeRandomLiteral, outputCount, context);
}
private static void runModelCountingTestForSingleVariableConstraint(
Expression variable,
Collection<Expression> literals,
Constraint constraint,
boolean testAgainstBruteForce,
Theory theory,
Context context) {
Expression literalsConjunction = And.make(literals);
String problemDescription = "model counting of " + literalsConjunction + " for variable " + variable;
output("Problem: " + problemDescription);
Simplifier symbolicSolver = (e, p) -> computeModelCountBySolver((SingleVariableConstraint) e, p);
SingleVariableConstraint singleVariableConstraint = (SingleVariableConstraint) constraint;
Expression symbolicSolution = symbolicSolver.apply(singleVariableConstraint, context);
if (Util.thereExists(new SubExpressionsDepthFirstIterator(symbolicSolution), e -> e instanceof QuantifiedExpression || Sets.isIntensionalSet(e))) {
throw new Error("Symbolic solution is not quantifier-free: " + symbolicSolution);
}
output("Symbolic result: " + symbolicSolution);
if (testAgainstBruteForce) {
if (singleVariableConstraint.isContradiction()) {
if (!symbolicSolution.equals(ZERO)) {
throw new Error("Constraint is contradiction, but symbolic solver does not produce 0, but instead " + symbolicSolution);
}
}
else {
Expression testingVariable = singleVariableConstraint.getVariable();
Set<Expression> allVariables = getVariablesBeingReferenced(singleVariableConstraint, context);
Collection<? extends Expression> otherVariables = removeFromSetNonDestructively(allVariables, v -> v.equals(testingVariable));
BinaryFunction<BruteForceCommonInterpreter, Context, Expression> fromInterpreterAndContextWithAssignmentToOtherVariablesToBruteForceSolution =
(interpreter, contextWithAssignmentToOtherVariables)
-> bruteForceModelCounterForVariableGivenInterpreterAndAssignmentToOtherVariables(
variable, literalsConjunction, interpreter, theory, contextWithAssignmentToOtherVariables);
testSymbolicVsBruteForceComputationForEachAssignment(theory, problemDescription, otherVariables, symbolicSolution, fromInterpreterAndContextWithAssignmentToOtherVariablesToBruteForceSolution, context);
}
}
else {
output("Skipping test againt brute-force.");
}
}
private static Expression computeModelCountBySolver(SingleVariableConstraint singleVariableConstraint, Context context) {
Expression symbolicSolution =
singleVariableConstraint.isContradiction()?
ZERO
: singleVariableConstraint.modelCount(context);
return symbolicSolution;
}
private static Expression bruteForceModelCounterForVariableGivenInterpreterAndAssignmentToOtherVariables(Expression variable, Expression conjunction, BruteForceCommonInterpreter interpreter, Theory theory, Context context) {
output("Computing model count by brute force of: " + conjunction);
int modelCount = 0;
Expression testingVariable = variable;
AssignmentMapsIterator testingVariableAssignmentsIterator = new AssignmentMapsIterator(list(testingVariable), context);
for (Map<Expression, Expression> testingVariableAssignment : in(testingVariableAssignmentsIterator)) {
Context extendedContext = Assignment.extendAssignments(testingVariableAssignment, context);
Expression value = interpreter.apply(conjunction, extendedContext);
if (value.equals(TRUE)) {
modelCount++;
}
// output("For " + completeInterpreter.getAssignment() + ",");
// output("value is " + value);
// output("Model count is " + modelCount + "\n");
}
return makeSymbol(modelCount);
}
/**
* Given a list of problem types, a theory and a number <code>n</code> of single-variable constraint tests,
* generates <code>n</code> problems with given body depth (number of levels of if then else expressions)
* and checks if symbolic evaluation works (checked by brute force).
* Throws an {@link Error} with the failure description if a test fails.
* @param problemTypes
* @param theoryTestingSupport
* @param numberOfTests
* @param maxNumberOfLiterals
* @param bodyDepth
* @param outputCount
*/
public static void testGroupProblemSolvingForSingleVariableConstraints(
boolean testAgainstBruteForce,
AssociativeCommutativeGroup group,
TheoryTestingSupport theoryTestingSupport,
long numberOfTests,
int maxNumberOfLiterals,
int bodyDepth,
boolean outputCount) {
String problemName = "quantification of " + group.getClass().getSimpleName() + " with single index";
TestRunner tester = (ls, c, tB, cT, p) -> runGroupProblemSolvingTestForSingleVariableConstraint(c, group, tB, cT, ls, bodyDepth, p);
runGroupProblemSolvingTesterForSuccessiveConstraints(problemName, tester, testAgainstBruteForce, group, theoryTestingSupport, numberOfTests, maxNumberOfLiterals, outputCount);
}
/**
* Same as {@link #testGroupProblemSolvingForSingleVariableConstraints(boolean, AssociativeCommutativeGroup, TheoryTestingSupport, long, int, int, boolean)},
* but for theories without constraint literals (usually translation-based theories).
* @param testAgainstBruteForce
* @param group
* @param theoryTestingSupport
* @param numberOfTests
* @param bodyDepth
* @param outputCount
*/
public static void testGroupProblemSolvingForSingleVariableConstraintsForTheoriesWithoutConstraintLiterals(
boolean testAgainstBruteForce,
AssociativeCommutativeGroup group,
TheoryTestingSupport theoryTestingSupport,
long numberOfTests,
int bodyDepth,
boolean outputCount) {
String problemName = "quantification of " + group.getClass().getSimpleName() + " with single index";
TestRunner tester = (ls, c, tB, cT, p) -> runGroupProblemSolvingTestForSingleVariableConstraint(c, group, tB, cT, ls, bodyDepth, p);
runGroupProblemSolvingTesterOnEmptyConstraint(problemName, tester, testAgainstBruteForce, group, theoryTestingSupport, numberOfTests, outputCount);
}
private static void runGroupProblemSolvingTestForSingleVariableConstraint(
Constraint constraint,
AssociativeCommutativeGroup group,
boolean testAgainstBruteForce,
TheoryTestingSupport theoryTestingSupport,
Collection<Expression> literals,
int bodyDepth,
Context context) {
SingleVariableConstraint singleVariableConstraint = (SingleVariableConstraint) constraint;
Expression index = singleVariableConstraint.getVariable();
runGroupProblemSolvingTestGivenConstraintOnRandomProblem(list(index), constraint, group, testAgainstBruteForce, theoryTestingSupport, bodyDepth, context);
}
/**
* Given a list of problem types, a theory and a number <code>n</code> of tests,
* generates <code>n</code> problems with given number of indices and body depth (number of levels of if then else expressions)
* and checks if symbolic evaluation works (checked by brute force).
* Throws an {@link Error} with the failure description if a test fails.
* @param numberOfIndices
* @param group
* @param theoryTestingSupport
* @param numberOfTests
* @param maxNumberOfLiterals
* @param bodyDepth
* @param outputCount
*/
public static void testGroupProblemSolvingForMultipleIndices(
int numberOfIndices,
boolean testAgainstBruteForce,
AssociativeCommutativeGroup group,
TheoryTestingSupport theoryTestingSupport,
long numberOfTests,
int maxNumberOfLiterals,
int bodyDepth,
boolean outputCount) {
String problemName = "quantification of " + group.getClass().getSimpleName() + " with " + numberOfIndices + " indices";
TestRunner tester = (ls, c, tB, cT, p) -> runGroupProblemSolvingTestForMultipleIndicesGivenConstraint(numberOfIndices, c, group, tB, cT, ls, bodyDepth, p);
runGroupProblemSolvingTesterForSuccessiveConstraints(problemName, tester, testAgainstBruteForce, group, theoryTestingSupport, numberOfTests, maxNumberOfLiterals, outputCount);
}
/**
* Same as {@link #testGroupProblemSolvingForMultipleIndices(int, boolean, AssociativeCommutativeGroup, TheoryTestingSupport, long, int, int, boolean)},
* but for theories without constraint literals (usually translation-based theories).
* @param numberOfIndices
* @param testAgainstBruteForce
* @param group
* @param theoryTestingSupport
* @param numberOfTests
* @param bodyDepth
* @param outputCount
*/
public static void testGroupProblemSolvingForMultipleIndicesForTheoriesWithoutConstraintLiterals(
int numberOfIndices,
boolean testAgainstBruteForce,
AssociativeCommutativeGroup group,
TheoryTestingSupport theoryTestingSupport,
long numberOfTests,
int bodyDepth,
boolean outputCount) {
String problemName = "quantification of " + group.getClass().getSimpleName() + " with " + numberOfIndices + " indices";
TestRunner tester = (ls, c, tB, cT, p) -> runGroupProblemSolvingTestForMultipleIndicesGivenConstraint(numberOfIndices, c, group, tB, cT, ls, bodyDepth, p);
runGroupProblemSolvingTesterOnEmptyConstraint(problemName, tester, testAgainstBruteForce, group, theoryTestingSupport, numberOfTests, outputCount);
}
private static void runGroupProblemSolvingTestForMultipleIndicesGivenConstraint(
int numberOfIndices,
Constraint constraint,
AssociativeCommutativeGroup group,
boolean testAgainstBruteForce,
TheoryTestingSupport theoryTestingSupport,
Collection<Expression> literals,
int bodyDepth,
Context context) {
if (numberOfIndices > theoryTestingSupport.getVariablesForTesting().size()) {
throw new Error("Test specifies " + numberOfIndices + " indices, but there are only " + theoryTestingSupport.getVariablesForTesting().size() + " available for testing in the theory: " + theoryTestingSupport.getVariablesForTesting());
}
Collection<Expression> indices =
pickKElementsWithoutReplacement(
theoryTestingSupport.getVariablesForTesting(),
numberOfIndices,
theoryTestingSupport.getRandom());
runGroupProblemSolvingTestGivenConstraintOnRandomProblem(indices, constraint, group, testAgainstBruteForce, theoryTestingSupport, bodyDepth, context);
}
private static void runGroupProblemSolvingTesterForSuccessiveConstraints(String problemName, TestRunner tester, boolean testAgainstBruteForce, AssociativeCommutativeGroup group, TheoryTestingSupport theoryTestingSupport, long numberOfTests, int maxNumberOfLiterals, boolean outputCount) throws Error {
Context context = theoryTestingSupport.makeContextWithTestingInformation();
NullaryFunction<Constraint> makeInitialConstraint = () -> theoryTestingSupport.getTheory().makeSingleVariableConstraint(parse(theoryTestingSupport.pickTestingVariableAtRandom()), context);
Function<Constraint, Expression> makeRandomLiteral = c -> theoryTestingSupport.makeRandomLiteralOn(((SingleVariableConstraint)c).getVariable().toString(), context);
runTesterGivenOnSuccessiveConjunctionsOfLiterals(problemName, tester, numberOfTests, maxNumberOfLiterals, testAgainstBruteForce, theoryTestingSupport, makeInitialConstraint, makeRandomLiteral, outputCount, context);
}
/**
* Same as {@link #runGroupProblemSolvingTesterForSuccessiveConstraints(String, TestRunner, boolean, AssociativeCommutativeGroup, TheoryTestingSupport, long, int, boolean)},
* but running the tester on a single, empty constraint.
* @param problemName
* @param tester
* @param testAgainstBruteForce
* @param group
* @param theoryTestingSupport
* @param numberOfTests
* @param outputCount
* @throws Error
*/
private static void runGroupProblemSolvingTesterOnEmptyConstraint(String problemName, TestRunner tester, boolean testAgainstBruteForce, AssociativeCommutativeGroup group, TheoryTestingSupport theoryTestingSupport, long numberOfTests, boolean outputCount) throws Error {
Context context = theoryTestingSupport.makeContextWithTestingInformation();
SingleVariableConstraint emptyConstraint =
theoryTestingSupport.getTheory()
.makeSingleVariableConstraint(parse(theoryTestingSupport.pickTestingVariableAtRandom()), context);
for (int i = 1; i != numberOfTests + 1; i++) {
tester.runOneTest(list(), emptyConstraint, testAgainstBruteForce, theoryTestingSupport, context);
indicateCompletionOfTest(outputCount, problemName, i, testAgainstBruteForce, theoryTestingSupport);
}
}
private static void runGroupProblemSolvingTestGivenConstraintOnRandomProblem(Collection<Expression> indices, Constraint constraint, AssociativeCommutativeGroup group, boolean testAgainstBruteForce, TheoryTestingSupport theoryTestingSupport, int bodyDepth, Context context) throws Error {
Expression body = makeBody(group, theoryTestingSupport, bodyDepth, context);
Expression problem = makeProblem(indices, constraint, body, group, context);
// Separate random problem generation from the rest so that, during debugging, we can use "Drop to frame" without re-generating a new problem.
runGroupProblemSolvingTestGivenConstraintAndProblem(problem, indices, constraint, body, testAgainstBruteForce, theoryTestingSupport, context);
}
/**
* @param problem
* @param indices
* @param constraint
* @param body
* @param testAgainstBruteForce
* @param theoryTestingSupport
* @param context
* @throws Error
*/
public static void runGroupProblemSolvingTestGivenConstraintAndProblem(Expression problem, Collection<Expression> indices, Constraint constraint, Expression body, boolean testAgainstBruteForce, TheoryTestingSupport theoryTestingSupport, Context context) throws Error {
Theory theory = theoryTestingSupport.getTheory();
Collection<? extends Expression> freeVariables = getFreeVariableMinusIndices(indices, constraint, body, context);
String problemDescription = problem.toString();
output(problemDescription);
Simplifier symbolicInterpreter = (e, c) -> theory.evaluate(e,c);
long start = System.currentTimeMillis();
Expression symbolicSolution = symbolicInterpreter.apply(problem, context);
long time = System.currentTimeMillis() - start;
output("Symbolic solution: " + symbolicSolution);
output("Computed in " + time + " ms");
if (Util.thereExists(new SubExpressionsDepthFirstIterator(symbolicSolution), e -> e instanceof QuantifiedExpression || Sets.isIntensionalSet(e))) {
throw new Error("Symbolic solution is not quantifier-free: " + symbolicSolution);
}
if (testAgainstBruteForce) {
BinaryFunction<BruteForceCommonInterpreter, Context, Expression>
bruteForceSolutionGivenInterpreterAndContextWithAssignmentToOtherVariables = (i, c) -> i.apply(problem, c);
testSymbolicVsBruteForceComputationForEachAssignment(
theory,
problemDescription,
freeVariables,
symbolicSolution,
bruteForceSolutionGivenInterpreterAndContextWithAssignmentToOtherVariables,
context);
// A more elegant approach would be to create a "for all free variables : symbolic = problem" expression
// and solve it by brute force instead of using testSymbolicVsBruteForceComputation
// which replicates the brute force interpreter to some extent.
// The reason we do not do this is simply due to the fact that the brute force interpreter would return "false"
// in case of failure, without indicating which assignment failed, which is very useful for debugging.
// If interpreters, and in fact the whole framework, provided proofs of its calculations,
// then we could simply use the more elegant approach.
}
else {
output("Skipping test againt brute-force.");
}
}
private static Expression makeProblem(Collection<Expression> indices, Constraint constraint, Expression body, AssociativeCommutativeGroup group, Context context) {
Expression currentBody = body;
boolean firstIndex = true;
for (Expression index : indices) {
Expression indexType = GrinderUtil.getTypeExpressionOfExpression(index, context);
Expression constraintOnThisIndex = firstIndex? constraint : TRUE;
SingleQuantifierEliminationProblem problem = new DefaultSingleQuantifierEliminationProblem(group, index, indexType, constraintOnThisIndex, currentBody);
currentBody = problem.toExpression();
firstIndex = false;
}
return currentBody;
}
private static Expression makeBody(AssociativeCommutativeGroup group, TheoryTestingSupport theoryTestingSupport, int bodyDepth, Context context) {
NullaryFunction<Expression> leafGenerator = () -> group.makeRandomConstant(theoryTestingSupport.getRandom());
Expression body = new RandomConditionalExpressionGenerator(theoryTestingSupport, bodyDepth, leafGenerator, context).apply();
return body;
}
private static Collection<? extends Expression> getFreeVariableMinusIndices(Collection<? extends Expression> indices, Constraint constraint, Expression body, Context context) {
Set<Expression> allVariables = getVariablesBeingReferenced(constraint, context);
allVariables.addAll(getVariablesBeingReferenced(body, context));
Collection<? extends Expression> freeVariablesMinusIndex = removeFromSetNonDestructively(allVariables, v -> indices.contains(v));
return freeVariablesMinusIndex;
}
/**
* Compares, for each assignment to given free variables, if value of a symbolic solution
* is the same as the solution by a brute force solver, given a {@link BruteForceCommonInterpreter} with that same assignment.
* @param problemDescription
* @param freeVariables
* @param symbolicSolution
* @param fromInterpreterAndContextWithAssignmentToBruteForceSolution
* @param context
* @throws Error
*/
private static void testSymbolicVsBruteForceComputationForEachAssignment(
Theory theory,
String problemDescription,
Collection<? extends Expression> freeVariables,
Expression symbolicSolution,
BinaryFunction<BruteForceCommonInterpreter, Context, Expression> fromInterpreterAndContextWithAssignmentToBruteForceSolution,
Context context) throws Error {
AssignmentMapsIterator assignmentsIterator = new AssignmentMapsIterator(freeVariables, context);
for (Map<Expression, Expression> assignment : in(assignmentsIterator)) {
testSymbolicVsBruteForceComputationForAssignment(assignment, theory, problemDescription, symbolicSolution, fromInterpreterAndContextWithAssignmentToBruteForceSolution, context);
}
}
private static void testSymbolicVsBruteForceComputationForAssignment(Map<Expression, Expression> assignment, Theory theory, String problemDescription, Expression symbolicSolution, BinaryFunction<BruteForceCommonInterpreter, Context, Expression> fromInterpreterAndContextWithAssignmentToBruteForceSolution, Context context) throws Error {
BruteForceCommonInterpreter interpreter = new BruteForceCommonInterpreter();
Context extendedContext = Assignment.extendAssignments(assignment, context);
Expression bruteForceResultUnderAssignment = fromInterpreterAndContextWithAssignmentToBruteForceSolution.apply(interpreter, extendedContext);
Expression symbolicResultUnderAssignment = interpreter.apply(symbolicSolution, extendedContext);
output("Under free variables assignment " + assignment);
output("Symbolic result becomes " + symbolicResultUnderAssignment);
output("Brute force result becomes " + bruteForceResultUnderAssignment + "\n");
if ( ! symbolicResultUnderAssignment.equals(bruteForceResultUnderAssignment)) {
throw new Error(
"Failure in testing of " + problemDescription + "\n"
+ "Symbolic solution: " + symbolicSolution + "\n"
+ "Under assignment to free variables: " + assignment + "\n"
+ "Value of symbolic solution : " + symbolicResultUnderAssignment + "\n"
+ "Value of brute force computation: " + bruteForceResultUnderAssignment + "\n"
+ "Context : " + extendedContext + "\n"
);
}
}
}
| |
package com.miz.filesources;
import java.io.File;
import java.io.IOException;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Locale;
import jcifs.smb.SmbException;
import jcifs.smb.SmbFile;
import android.content.Context;
import android.database.Cursor;
import com.miz.abstractclasses.MovieFileSource;
import com.miz.db.DbAdapter;
import com.miz.functions.DbMovie;
import com.miz.functions.FileSource;
import com.miz.functions.MizLib;
import com.miz.mizuu.MizuuApplication;
public class SmbMovie extends MovieFileSource<SmbFile> {
private HashMap<String, String> existingMovies = new HashMap<String, String>();
private SmbFile tempSmbFile;
public SmbMovie(Context context, FileSource fileSource, boolean ignoreRemovedFiles, boolean subFolderSearch, boolean clearLibrary, boolean disableEthernetWiFiCheck) {
super(context, fileSource, ignoreRemovedFiles, subFolderSearch, clearLibrary, disableEthernetWiFiCheck);
}
@Override
public void removeUnidentifiedFiles() {
DbAdapter db = MizuuApplication.getMovieAdapter();
List<DbMovie> dbMovies = getDbMovies();
ArrayList<FileSource> filesources = MizLib.getFileSources(MizLib.TYPE_MOVIE, true);
FileSource source;
SmbFile temp;
int count = dbMovies.size();
if (MizLib.isWifiConnected(getContext(), disableEthernetWiFiCheck())) {
for (int i = 0; i < count; i++) {
if (dbMovies.get(i).isNetworkFile()) {
try {
source = null;
for (int j = 0; j < filesources.size(); j++)
if (dbMovies.get(i).getFilepath().contains(filesources.get(j).getFilepath())) {
source = filesources.get(j);
continue;
}
if (source == null) {
if (dbMovies.get(i).isUnidentified())
db.deleteMovie(dbMovies.get(i).getRowId());
continue;
}
temp = new SmbFile(
MizLib.createSmbLoginString(
URLEncoder.encode(source.getDomain(), "utf-8"),
URLEncoder.encode(source.getUser(), "utf-8"),
URLEncoder.encode(source.getPassword(), "utf-8"),
dbMovies.get(i).getFilepath(),
false
));
if (temp.exists() && dbMovies.get(i).isUnidentified())
db.deleteMovie(dbMovies.get(i).getRowId());
} catch (Exception e) {} // Do nothing - the file isn't available (either MalformedURLException or SmbException)
}
}
}
}
@Override
public void removeUnavailableFiles() {
DbAdapter db = MizuuApplication.getMovieAdapter();
List<DbMovie> dbMovies = getDbMovies(), deletedMovies = new ArrayList<DbMovie>();
ArrayList<FileSource> filesources = MizLib.getFileSources(MizLib.TYPE_MOVIE, true);
FileSource source;
boolean deleted;
SmbFile temp;
int count = dbMovies.size();
if (MizLib.isWifiConnected(getContext(), disableEthernetWiFiCheck())) {
for (int i = 0; i < count; i++) {
if (dbMovies.get(i).isNetworkFile() && !dbMovies.get(i).hasOfflineCopy(getContext())) {
try {
source = null;
for (int j = 0; j < filesources.size(); j++)
if (dbMovies.get(i).getFilepath().contains(filesources.get(j).getFilepath())) {
source = filesources.get(j);
continue;
}
if (source == null) {
deleted = db.deleteMovie(dbMovies.get(i).getRowId());
if (deleted)
deletedMovies.add(dbMovies.get(i));
continue;
}
temp = new SmbFile(
MizLib.createSmbLoginString(
URLEncoder.encode(source.getDomain(), "utf-8"),
URLEncoder.encode(source.getUser(), "utf-8"),
URLEncoder.encode(source.getPassword(), "utf-8"),
dbMovies.get(i).getFilepath(),
false
));
if (!temp.exists()) {
deleted = db.deleteMovie(dbMovies.get(i).getRowId());
if (deleted)
deletedMovies.add(dbMovies.get(i));
}
} catch (Exception e) {} // Do nothing - the file isn't available (either MalformedURLException or SmbException)
}
}
}
count = deletedMovies.size();
for (int i = 0; i < count; i++) {
if (!db.movieExists(deletedMovies.get(i).getTmdbId())) {
MizLib.deleteFile(new File(deletedMovies.get(i).getThumbnail()));
MizLib.deleteFile(new File(deletedMovies.get(i).getBackdrop()));
}
}
// Clean up
deletedMovies.clear();
filesources.clear();
}
@Override
public List<String> searchFolder() {
if (getFolder() == null)
return new ArrayList<String>(); // Return empty List
DbAdapter dbHelper = MizuuApplication.getMovieAdapter();
Cursor cursor = dbHelper.fetchAllMovies(DbAdapter.KEY_TITLE + " ASC", ignoreRemovedFiles(), true); // Query database to return all movies to a cursor
try {
while (cursor.moveToNext()) {// Add all movies in cursor to ArrayList of all existing movies
existingMovies.put(cursor.getString(cursor.getColumnIndex(DbAdapter.KEY_FILEPATH)), "");
}
} catch (Exception e) {
} finally {
cursor.close(); // Close cursor
}
LinkedHashSet<String> results = new LinkedHashSet<String>();
// Do a recursive search in the file source folder
recursiveSearch(getFolder(), results);
List<String> list = new ArrayList<String>();
Iterator<String> it = results.iterator();
while (it.hasNext())
list.add(it.next());
return list;
}
@Override
public void recursiveSearch(SmbFile folder, LinkedHashSet<String> results) {
try {
if (searchSubFolders()) {
if (folder.isDirectory()) {
// Check if this is a DVD folder
if (folder.getName().equalsIgnoreCase("video_ts/")) {
SmbFile[] children = folder.listFiles();
for (int i = 0; i < children.length; i++) {
if (children[i].getName().equalsIgnoreCase("video_ts.ifo"))
addToResults(children[i], results);
}
} // Check if this is a Blu-ray folder
else if (folder.getName().equalsIgnoreCase("bdmv/")) {
SmbFile[] children = folder.listFiles();
for (int i = 0; i < children.length; i++) {
if (children[i].getName().equalsIgnoreCase("stream/")) {
SmbFile[] m2tsVideoFiles = children[i].listFiles();
if (m2tsVideoFiles.length > 0) {
SmbFile largestFile = m2tsVideoFiles[0];
for (int j = 0; j < m2tsVideoFiles.length; j++)
if (largestFile.length() < m2tsVideoFiles[j].length())
largestFile = m2tsVideoFiles[j];
addToResults(largestFile, results);
}
}
}
} else {
String[] childs = folder.list();
for (int i = 0; i < childs.length; i++) {
tempSmbFile = new SmbFile(folder.getCanonicalPath() + childs[i] + "/");
if (tempSmbFile.isDirectory()) {
recursiveSearch(tempSmbFile, results);
} else {
tempSmbFile = new SmbFile(folder.getCanonicalPath() + childs[i]);
addToResults(tempSmbFile, results);
}
}
}
} else {
addToResults(folder, results);
}
} else {
SmbFile[] children = folder.listFiles();
for (int i = 0; i < children.length; i++)
addToResults(children[i], results);
}
} catch (Exception e) {}
}
@Override
public void addToResults(SmbFile file, LinkedHashSet<String> results) {
if (supportsNfo() && MizLib.isNfoFile(file.getCanonicalPath())) {
try {
addNfoFile(MizLib.removeExtension(file.getCanonicalPath()), file.getInputStream());
} catch (IOException ignored) {}
} else if (MizLib.checkFileTypes(file.getCanonicalPath())) {
try {
if (file.length() < getFileSizeLimit() && !file.getName().equalsIgnoreCase("video_ts.ifo"))
return;
} catch (SmbException e) {
return;
}
if (!clearLibrary())
if (existingMovies.get(file.getCanonicalPath()) != null) return;
String tempFileName = file.getName().substring(0, file.getName().lastIndexOf("."));
if (tempFileName.toLowerCase(Locale.ENGLISH).matches(".*part[2-9]|cd[2-9]")) return;
//Add the file if it reaches this point
results.add(file.getCanonicalPath());
}
}
@Override
public SmbFile getRootFolder() {
try {
FileSource fs = getFileSource();
SmbFile root = new SmbFile(
MizLib.createSmbLoginString(
URLEncoder.encode(fs.getDomain(), "utf-8"),
URLEncoder.encode(fs.getUser(), "utf-8"),
URLEncoder.encode(fs.getPassword(), "utf-8"),
fs.getFilepath(),
true
));
return root;
} catch (Exception e) {}
return null;
}
@Override
public String toString() {
return MizLib.transformSmbPath(getRootFolder().getCanonicalPath());
}
}
| |
/* Copyright 2004 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package scomp.redefine.detailed;
import org.apache.xmlbeans.*;
import org.apache.xmlbeans.impl.xb.xsdschema.SchemaDocument;
import org.junit.jupiter.api.Test;
import javax.xml.namespace.QName;
import static org.junit.jupiter.api.Assertions.*;
import static scomp.common.BaseCase.createOptions;
public class MultipleRedefines {
private static final String[] MULTIPLE_SCHEMAS = {
"<xs:schema xmlns:xs=\"http://www.w3.org/2001/XMLSchema\">" +
" <xs:complexType name=\"T\">" +
" <xs:sequence>" +
" <xs:element name =\"A\" type=\"xs:string\"/>" +
" </xs:sequence>" +
" </xs:complexType>" +
"</xs:schema>",
"<xs:schema xmlns:xs=\"http://www.w3.org/2001/XMLSchema\">" +
" <xs:redefine schemaLocation=\"A.xsd\">" +
" <xs:complexType name=\"T\">" +
" <xs:complexContent>" +
" <xs:extension base=\"T\">" +
" <xs:sequence>" +
" <xs:element name=\"B\" type=\"xs:string\"/>" +
" </xs:sequence>" +
" </xs:extension>" +
" </xs:complexContent>" +
" </xs:complexType>" +
" </xs:redefine>" +
" </xs:schema>",
"<xs:schema xmlns:xs=\"http://www.w3.org/2001/XMLSchema\">" +
" <xs:complexType name=\"S\">" +
" <xs:sequence>" +
" <xs:element name=\"C\" type=\"xs:string\"/>" +
" </xs:sequence>" +
" </xs:complexType>" +
"</xs:schema>",
"<xs:schema xmlns:xs=\"http://www.w3.org/2001/XMLSchema\">" +
" <xs:redefine schemaLocation=\"B.xsd\">" +
" <xs:complexType name=\"T\">" +
" <xs:complexContent>" +
" <xs:extension base=\"T\">" +
" <xs:sequence>" +
" <xs:element name=\"D\" type=\"xs:string\"/>" +
" </xs:sequence>" +
" </xs:extension>" +
" </xs:complexContent>" +
" </xs:complexType>" +
" </xs:redefine>" +
" <xs:redefine schemaLocation=\"C.xsd\">" +
" <xs:complexType name=\"S\">" +
" <xs:complexContent>" +
" <xs:extension base=\"S\">" +
" <xs:sequence>" +
" <xs:element name=\"D\" type=\"xs:string\"/>" +
" </xs:sequence>" +
" </xs:extension>" +
" </xs:complexContent>" +
" </xs:complexType>" +
" </xs:redefine>" +
"</xs:schema>",
"<xs:schema xmlns:xs=\"http://www.w3.org/2001/XMLSchema\">" +
" <xs:redefine schemaLocation=\"D.xsd\">" +
" <xs:complexType name=\"S\">" +
" <xs:complexContent>" +
" <xs:extension base=\"S\">" +
" <xs:sequence>" +
" <xs:element name=\"E\" type=\"xs:string\"/>" +
" </xs:sequence>" +
" </xs:extension>" +
" </xs:complexContent>" +
" </xs:complexType>" +
" <xs:complexType name=\"T\">" +
" <xs:complexContent>" +
" <xs:extension base=\"T\">" +
" <xs:sequence>" +
" <xs:element name=\"E\" type=\"xs:string\"/>" +
" </xs:sequence>" +
" </xs:extension>" +
" </xs:complexContent>" +
" </xs:complexType>" +
" </xs:redefine>" +
"</xs:schema>"};
private static final String[] MULTIPLE_SCHEMAS_NAME = {
"A.xsd", "B.xsd", "C.xsd", "D.xsd", "E.xsd"};
private static final String[] CIRCULAR_SCHEMAS = {
"<xs:schema xmlns:xs=\"http://www.w3.org/2001/XMLSchema\">" +
" <xs:redefine schemaLocation=\"D.xsd\">" +
" <xs:complexType name=\"T\">" +
" <xs:complexContent>" +
" <xs:extension base=\"T\">" +
" <xs:sequence>" +
" <xs:element name=\"A\" type=\"xs:string\"/>" +
" </xs:sequence>" +
" </xs:extension>" +
" </xs:complexContent>" +
" </xs:complexType>" +
" </xs:redefine>" +
" <xs:complexType name=\"T\">" +
" <xs:sequence>" +
" <xs:element name=\"A\" type=\"xs:string\"/>" +
" </xs:sequence>" +
" </xs:complexType>" +
"</xs:schema>",
"<xs:schema xmlns:xs=\"http://www.w3.org/2001/XMLSchema\">" +
" <xs:redefine schemaLocation=\"A.xsd\">" +
" <xs:complexType name=\"T\">" +
" <xs:complexContent>" +
" <xs:extension base=\"T\">" +
" <xs:sequence>" +
" <xs:element name=\"B\" type=\"xs:string\"/>" +
" </xs:sequence>" +
" </xs:extension>" +
" </xs:complexContent>" +
" </xs:complexType>" +
" </xs:redefine>" +
"</xs:schema>",
"<xs:schema xmlns:xs=\"http://www.w3.org/2001/XMLSchema\">" +
" <xs:redefine schemaLocation=\"B.xsd\">" +
" <xs:complexType name=\"T\">" +
" <xs:complexContent>" +
" <xs:extension base=\"T\">" +
" <xs:sequence>" +
" <xs:element name=\"C\" type=\"xs:string\"/>" +
" </xs:sequence>" +
" </xs:extension>" +
" </xs:complexContent>" +
" </xs:complexType>" +
" </xs:redefine>" +
"</xs:schema>",
"<xs:schema xmlns:xs=\"http://www.w3.org/2001/XMLSchema\">" +
" <xs:redefine schemaLocation=\"C.xsd\">" +
" <xs:complexType name=\"T\">" +
" <xs:complexContent>" +
" <xs:extension base=\"T\">" +
" <xs:sequence>" +
" <xs:element name=\"D\" type=\"xs:string\"/>" +
" </xs:sequence>" +
" </xs:extension>" +
" </xs:complexContent>" +
" </xs:complexType>" +
" </xs:redefine>" +
"</xs:schema>"};
private static final String[] CIRCULAR_SCHEMAS_NAME = {
"A.xsd", "B.xsd", "C.xsd", "D.xsd"};
@Test
void testMultipleRedefines() throws Exception {
int N = MULTIPLE_SCHEMAS.length;
SchemaDocument[] sdocs = new SchemaDocument[N];
for (int i = 0; i < N; i++) {
sdocs[i] = SchemaDocument.Factory.parse(MULTIPLE_SCHEMAS[i]);
sdocs[i].documentProperties().setSourceName(MULTIPLE_SCHEMAS_NAME[i]);
}
SchemaTypeSystem ts = XmlBeans.compileXsd(sdocs, XmlBeans.getBuiltinTypeSystem(), createOptions());
assertNotNull(ts);
SchemaType t = ts.findType(new QName("", "T"));
assertNotNull(t);
SchemaParticle p = t.getContentModel();
assertNotNull(p);
assertEquals(p.getParticleType(), SchemaParticle.SEQUENCE);
SchemaParticle[] elts = p.getParticleChildren();
assertEquals(elts.length, 4);
for (SchemaParticle elt : elts) {
assertEquals(elt.getParticleType(), SchemaParticle.ELEMENT);
}
assertEquals("A", elts[0].getName().getLocalPart());
assertEquals("B", elts[1].getName().getLocalPart());
assertEquals("D", elts[2].getName().getLocalPart());
assertEquals("E", elts[3].getName().getLocalPart());
}
@Test
void testCircularRedefines() throws Exception {
int N =CIRCULAR_SCHEMAS.length;
SchemaDocument[] sdocs = new SchemaDocument[N];
for (int i = 0; i < N; i++) {
sdocs[i] = SchemaDocument.Factory.parse(CIRCULAR_SCHEMAS[i]);
sdocs[i].documentProperties().setSourceName(CIRCULAR_SCHEMAS_NAME[i]);
}
XmlOptions validateOptions = createOptions();
XmlException e = assertThrows(XmlException.class, () ->
XmlBeans.compileXsd(sdocs, XmlBeans.getBuiltinTypeSystem(), validateOptions));
XmlError xe = validateOptions.getErrorListener().iterator().next();
assertTrue(xe.getMessage().contains("circular"));
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.search.aggregations.bucket.terms;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.LongHash;
import org.elasticsearch.common.util.LongLongHash;
import org.elasticsearch.core.Releasable;
import org.elasticsearch.search.aggregations.CardinalityUpperBound;
import java.util.Locale;
/**
* Maps owning bucket ordinals and long bucket keys to bucket ordinals.
*/
public abstract class LongKeyedBucketOrds implements Releasable {
/**
* Build a {@link LongKeyedBucketOrds} who's values have unknown bounds.
*/
public static LongKeyedBucketOrds build(BigArrays bigArrays, CardinalityUpperBound cardinality) {
return cardinality.map(estimate -> estimate < 2 ? new FromSingle(bigArrays) : new FromMany(bigArrays));
}
/**
* Build a {@link LongKeyedBucketOrds} who's values have known bounds.
*/
public static LongKeyedBucketOrds buildForValueRange(BigArrays bigArrays, CardinalityUpperBound cardinality, long min, long max) {
return cardinality.map((int cardinalityUpperBound) -> {
if (cardinalityUpperBound < 2) {
return new FromSingle(bigArrays);
}
if (min < 0 || cardinalityUpperBound == Integer.MAX_VALUE) {
// cardinalityUpperBound tops out at maxint. If you see maxInt it could be anything above maxint.
return new FromMany(bigArrays);
}
int owningBucketOrdShift = Long.numberOfLeadingZeros(cardinalityUpperBound);
int maxBits = 64 - Long.numberOfLeadingZeros(max);
if (maxBits < owningBucketOrdShift) {
// There is enough space in a long to contain both the owning bucket and the entire range of values
return new FromManySmall(bigArrays, owningBucketOrdShift);
}
return new FromMany(bigArrays);
});
}
private LongKeyedBucketOrds() {}
/**
* Add the {@code owningBucketOrd, value} pair. Return the ord for
* their bucket if they have yet to be added, or {@code -1-ord}
* if they were already present.
*/
public abstract long add(long owningBucketOrd, long value);
/**
* Count the buckets in {@code owningBucketOrd}.
* <p>
* Some aggregations expect this to be fast but most wouldn't
* mind particularly if it weren't.
*/
public abstract long bucketsInOrd(long owningBucketOrd);
/**
* Find the {@code owningBucketOrd, value} pair. Return the ord for
* their bucket if they have been added or {@code -1} if they haven't.
*/
public abstract long find(long owningBucketOrd, long value);
/**
* Returns the value currently associated with the bucket ordinal.
*/
public abstract long get(long ordinal);
/**
* The number of collected buckets.
*/
public abstract long size();
/**
* The maximum possible used {@code owningBucketOrd}.
*/
public abstract long maxOwningBucketOrd();
/**
* Description used in profile results.
*/
public abstract String decribe();
/**
* Build an iterator for buckets inside {@code owningBucketOrd} in order
* of increasing ord.
* <p>
* When this is first returns it is "unpositioned" and you must call
* {@link BucketOrdsEnum#next()} to move it to the first value.
*/
public abstract BucketOrdsEnum ordsEnum(long owningBucketOrd);
/**
* An iterator for buckets inside a particular {@code owningBucketOrd}.
*/
public interface BucketOrdsEnum {
/**
* Advance to the next value.
* @return {@code true} if there *is* a next value,
* {@code false} if there isn't
*/
boolean next();
/**
* The ordinal of the current value.
*/
long ord();
/**
* The current value.
*/
long value();
/**
* An {@linkplain BucketOrdsEnum} that is empty.
*/
BucketOrdsEnum EMPTY = new BucketOrdsEnum() {
@Override
public boolean next() {
return false;
}
@Override
public long ord() {
return 0;
}
@Override
public long value() {
return 0;
}
};
}
/**
* Implementation that only works if it is collecting from a single bucket.
*/
public static class FromSingle extends LongKeyedBucketOrds {
private final LongHash ords;
public FromSingle(BigArrays bigArrays) {
ords = new LongHash(1, bigArrays);
}
@Override
public long add(long owningBucketOrd, long value) {
// This is in the critical path for collecting most aggs. Be careful of performance.
assert owningBucketOrd == 0;
return ords.add(value);
}
@Override
public long find(long owningBucketOrd, long value) {
assert owningBucketOrd == 0;
return ords.find(value);
}
@Override
public long get(long ordinal) {
return ords.get(ordinal);
}
@Override
public long bucketsInOrd(long owningBucketOrd) {
assert owningBucketOrd == 0;
return ords.size();
}
@Override
public long size() {
return ords.size();
}
@Override
public long maxOwningBucketOrd() {
return 0;
}
@Override
public String decribe() {
return "single bucket ords";
}
@Override
public BucketOrdsEnum ordsEnum(long owningBucketOrd) {
assert owningBucketOrd == 0;
return new BucketOrdsEnum() {
private long ord = -1;
private long value;
@Override
public boolean next() {
ord++;
if (ord >= ords.size()) {
return false;
}
value = ords.get(ord);
return true;
}
@Override
public long value() {
return value;
}
@Override
public long ord() {
return ord;
}
};
}
@Override
public void close() {
ords.close();
}
}
/**
* Implementation that works properly when collecting from many buckets.
*/
public static class FromMany extends LongKeyedBucketOrds {
private final LongLongHash ords;
public FromMany(BigArrays bigArrays) {
ords = new LongLongHash(2, bigArrays);
}
@Override
public long add(long owningBucketOrd, long value) {
// This is in the critical path for collecting most aggs. Be careful of performance.
return ords.add(owningBucketOrd, value);
}
@Override
public long find(long owningBucketOrd, long value) {
return ords.find(owningBucketOrd, value);
}
@Override
public long get(long ordinal) {
return ords.getKey2(ordinal);
}
@Override
public long bucketsInOrd(long owningBucketOrd) {
// TODO it'd be faster to count the number of buckets in a list of these ords rather than one at a time
long count = 0;
for (long i = 0; i < ords.size(); i++) {
if (ords.getKey1(i) == owningBucketOrd) {
count++;
}
}
return count;
}
@Override
public long size() {
return ords.size();
}
@Override
public long maxOwningBucketOrd() {
// TODO this is fairly expensive to compute. Can we avoid needing it?
long max = -1;
for (long i = 0; i < ords.size(); i++) {
max = Math.max(max, ords.getKey1(i));
}
return max;
}
@Override
public String decribe() {
return "many bucket ords";
}
@Override
public BucketOrdsEnum ordsEnum(long owningBucketOrd) {
// TODO it'd be faster to iterate many ords at once rather than one at a time
return new BucketOrdsEnum() {
private long ord = -1;
private long value;
@Override
public boolean next() {
while (true) {
ord++;
if (ord >= ords.size()) {
return false;
}
if (ords.getKey1(ord) == owningBucketOrd) {
value = ords.getKey2(ord);
return true;
}
}
}
@Override
public long value() {
return value;
}
@Override
public long ord() {
return ord;
}
};
}
@Override
public void close() {
ords.close();
}
}
/**
* Implementation that packs the {@code owningbucketOrd} into the top
* bits of a {@code long} and uses the bottom bits for the value.
*/
public static class FromManySmall extends LongKeyedBucketOrds {
private final LongHash ords;
private final int owningBucketOrdShift;
private final long owningBucketOrdMask;
public FromManySmall(BigArrays bigArrays, int owningBucketOrdShift) {
ords = new LongHash(2, bigArrays);
this.owningBucketOrdShift = owningBucketOrdShift;
this.owningBucketOrdMask = -1L << owningBucketOrdShift;
}
private long encode(long owningBucketOrd, long value) {
// This is in the critical path for collecting some aggs. Be careful of performance.
return (owningBucketOrd << owningBucketOrdShift) | value;
}
@Override
public long add(long owningBucketOrd, long value) {
// This is in the critical path for collecting lots of aggs. Be careful of performance.
long enc = encode(owningBucketOrd, value);
if (owningBucketOrd != (enc >>> owningBucketOrdShift) && (enc & ~owningBucketOrdMask) != value) {
throw new IllegalArgumentException(
String.format(
Locale.ROOT,
"[%s] and [%s] must fit in [%s..%s] bits",
owningBucketOrd,
value,
64 - owningBucketOrdShift,
owningBucketOrdShift
)
);
}
return ords.add(enc);
}
@Override
public long find(long owningBucketOrd, long value) {
if (Long.numberOfLeadingZeros(owningBucketOrd) < owningBucketOrdShift) {
return -1;
}
if ((value & owningBucketOrdMask) != 0) {
return -1;
}
return ords.find(encode(owningBucketOrd, value));
}
@Override
public long get(long ordinal) {
return ords.get(ordinal) & ~owningBucketOrdMask;
}
@Override
public long bucketsInOrd(long owningBucketOrd) {
// TODO it'd be faster to count the number of buckets in a list of these ords rather than one at a time
if (Long.numberOfLeadingZeros(owningBucketOrd) < owningBucketOrdShift) {
return 0;
}
long count = 0;
long enc = owningBucketOrd << owningBucketOrdShift;
for (long i = 0; i < ords.size(); i++) {
if ((ords.get(i) & owningBucketOrdMask) == enc) {
count++;
}
}
return count;
}
@Override
public long size() {
return ords.size();
}
@Override
public long maxOwningBucketOrd() {
// TODO this is fairly expensive to compute. Can we avoid needing it?
long max = -1;
for (long i = 0; i < ords.size(); i++) {
max = Math.max(max, (ords.get(i) & owningBucketOrdMask) >>> owningBucketOrdShift);
}
return max;
}
@Override
public String decribe() {
return "many bucket ords packed using [" + (64 - owningBucketOrdShift) + "/" + owningBucketOrdShift + "] bits";
}
@Override
public BucketOrdsEnum ordsEnum(long owningBucketOrd) {
// TODO it'd be faster to iterate many ords at once rather than one at a time
if (Long.numberOfLeadingZeros(owningBucketOrd) < owningBucketOrdShift) {
return BucketOrdsEnum.EMPTY;
}
final long encodedOwningBucketOrd = owningBucketOrd << owningBucketOrdShift;
return new BucketOrdsEnum() {
private long ord = -1;
private long value;
@Override
public boolean next() {
while (true) {
ord++;
if (ord >= ords.size()) {
return false;
}
long encoded = ords.get(ord);
if ((encoded & owningBucketOrdMask) == encodedOwningBucketOrd) {
value = encoded & ~owningBucketOrdMask;
return true;
}
}
}
@Override
public long value() {
return value;
}
@Override
public long ord() {
return ord;
}
};
}
@Override
public void close() {
ords.close();
}
}
}
| |
package apps.threedmanipulation.tools;
import java.nio.FloatBuffer;
import java.util.ArrayList;
import synergynetframework.appsystem.contentsystem.ContentSystem;
import synergynetframework.jme.cursorsystem.elements.twod.OrthoBringToTop;
import apps.threedmanipulation.ThreeDManipulation;
import apps.threedmanipulation.gestures.ControlPanelMoveRotateScale;
import apps.threedmanipulation.gestures.ControlPanelMoveRotateScale.RotateTranslateScaleListener;
import apps.threedmanipulation.gestures.OjbectManipulation;
import com.jme.bounding.OrthogonalBoundingBox;
import com.jme.image.Texture;
import com.jme.image.Texture.ApplyMode;
import com.jme.image.Texture.WrapMode;
import com.jme.math.Vector2f;
import com.jme.math.Vector3f;
import com.jme.renderer.ColorRGBA;
import com.jme.scene.Line;
import com.jme.scene.Node;
import com.jme.scene.Spatial;
import com.jme.scene.shape.Quad;
import com.jme.scene.state.BlendState;
import com.jme.scene.state.TextureState;
import com.jme.system.DisplaySystem;
import com.jme.util.TextureManager;
import com.jme.util.geom.BufferUtils;
/**
* The Class TouchPadScreen.
*/
public class TouchPadScreen extends Node {
/** The Constant serialVersionUID. */
private static final long serialVersionUID = 5768509222221148746L;
/** The content system. */
protected ContentSystem contentSystem;
/** The line. */
protected Line line;
/** The manipulated ojbect. */
protected Spatial manipulatedOjbect;
/** The screen frame. */
protected Quad screenFrame;
/** The screen quad. */
protected Quad screenQuad;
/** The telescope manipulate ojbect. */
protected OjbectManipulation telescopeManipulateOjbect;
/** The width. */
protected float width;
/**
* Instantiates a new touch pad screen.
*
* @param name
* the name
* @param contentSystem
* the content system
* @param width
* the width
* @param manipulatabledOjbect
* the manipulatabled ojbect
* @param line
* the line
*/
public TouchPadScreen(String name, ContentSystem contentSystem,
float width, Spatial manipulatabledOjbect, Line line) {
super(name);
this.contentSystem = contentSystem;
this.width = width;
this.manipulatedOjbect = manipulatabledOjbect;
this.line = line;
buildScreenQuad();
buildScreenFrame();
}
/**
* Builds the screen frame.
*/
public void buildScreenFrame() {
screenFrame = new Quad(name + "screenFrame", width + 60, width + 30);
screenFrame.setModelBound(new OrthogonalBoundingBox());
screenFrame.updateModelBound();
this.attachChild(screenFrame);
screenQuad.setLocalTranslation(0, -10, 0);
TextureState ts;
Texture texture;
ts = DisplaySystem.getDisplaySystem().getRenderer()
.createTextureState();
ts.setCorrectionType(TextureState.CorrectionType.Perspective);
texture = TextureManager.loadTexture(
ThreeDManipulation.class.getResource("touchpadbg.png"),
Texture.MinificationFilter.Trilinear,
Texture.MagnificationFilter.Bilinear);
texture.setWrap(WrapMode.Repeat);
texture.setApply(ApplyMode.Replace);
ts.setTexture(texture);
ts.apply();
screenFrame.setRenderState(ts);
screenFrame.updateRenderState();
BlendState alpha = DisplaySystem.getDisplaySystem().getRenderer()
.createBlendState();
alpha.setEnabled(true);
alpha.setBlendEnabled(true);
alpha.setSourceFunction(BlendState.SourceFunction.SourceAlpha);
alpha.setDestinationFunction(BlendState.DestinationFunction.OneMinusSourceAlpha);
alpha.setTestEnabled(true);
alpha.setTestFunction(BlendState.TestFunction.GreaterThan);
screenFrame.setRenderState(alpha);
screenFrame.updateRenderState();
ControlPanelMoveRotateScale monitorScreenMoveRotateScale = new ControlPanelMoveRotateScale(
screenFrame, this, null, telescopeManipulateOjbect, null);
monitorScreenMoveRotateScale.setPickMeOnly(true);
monitorScreenMoveRotateScale
.addRotateTranslateScaleListener(new RotateTranslateScaleListener() {
@Override
public void itemMoved(
ControlPanelMoveRotateScale multiTouchElement,
Spatial targetSpatial, float newLocationX,
float newLocationY, float oldLocationX,
float oldLocationY) {
Vector3f cursorWorldStart = DisplaySystem
.getDisplaySystem()
.getWorldCoordinates(
new Vector2f(newLocationX, newLocationY),
0.9f);
if ((cursorWorldStart != null)
&& (manipulatedOjbect != null)) {
FloatBuffer vectorBuff = BufferUtils
.createVector3Buffer(2);
FloatBuffer colorBuff = BufferUtils
.createFloatBuffer(new ColorRGBA[] {
ColorRGBA.gray, ColorRGBA.white });
BufferUtils.setInBuffer(cursorWorldStart,
vectorBuff, 0);
BufferUtils.setInBuffer(
manipulatedOjbect.getLocalTranslation(),
vectorBuff, 1);
line.setLineWidth(2f);
line.reconstruct(vectorBuff, null, colorBuff, null);
line.updateRenderState();
line.updateGeometricState(0f, false);
}
}
@Override
public void itemRotated(
ControlPanelMoveRotateScale multiTouchElement,
Spatial targetSpatial, float newAngle,
float oldAngle) {
}
@Override
public void itemScaled(
ControlPanelMoveRotateScale multiTouchElement,
Spatial targetSpatial, float scaleChange) {
}
});
@SuppressWarnings("unused")
OrthoBringToTop bringToTop = new OrthoBringToTop(screenFrame, this);
}
/**
* Builds the screen quad.
*/
public void buildScreenQuad() {
screenQuad = new Quad(name + "screenQuad", width + 50, width);
screenQuad.setModelBound(new OrthogonalBoundingBox());
screenQuad.updateModelBound();
this.attachChild(screenQuad);
TextureState ts;
Texture texture;
ts = DisplaySystem.getDisplaySystem().getRenderer()
.createTextureState();
ts.setCorrectionType(TextureState.CorrectionType.Perspective);
texture = TextureManager.loadTexture(
ThreeDManipulation.class.getResource("touchpad.png"),
Texture.MinificationFilter.Trilinear,
Texture.MagnificationFilter.Bilinear);
texture.setWrap(WrapMode.Repeat);
texture.setApply(ApplyMode.Replace);
ts.setTexture(texture);
ts.apply();
screenQuad.setRenderState(ts);
screenQuad.updateRenderState();
BlendState alpha = DisplaySystem.getDisplaySystem().getRenderer()
.createBlendState();
alpha.setEnabled(true);
alpha.setBlendEnabled(true);
alpha.setSourceFunction(BlendState.SourceFunction.SourceAlpha);
alpha.setDestinationFunction(BlendState.DestinationFunction.OneMinusSourceAlpha);
alpha.setTestEnabled(true);
alpha.setTestFunction(BlendState.TestFunction.GreaterThan);
screenQuad.setRenderState(alpha);
screenQuad.updateRenderState();
telescopeManipulateOjbect = new OjbectManipulation(screenQuad,
new ArrayList<Spatial>());
telescopeManipulateOjbect.setPickMeOnly(true);
telescopeManipulateOjbect.setControlledSpatial(this.manipulatedOjbect);
}
/**
* Gets the screen quad.
*
* @return the screen quad
*/
public Quad getScreenQuad() {
return screenQuad;
}
}
| |
package janala.csit6910;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.List;
import janala.config.Config;
public class JunitTestWriter {
private static final String linesep = System.getProperty("line.separator");
private String junitclassname;
private String packageName;
private String dirName;
public JunitTestWriter(String classname, String packageName, String dirName) {
this.junitclassname = classname;
this.packageName = packageName;
this.dirName = dirName;
}
/**
* Write JUnit PUTS by supplied inputs, if file already exists we append it.
* */
public void writeJunitTest(List<Object[]> inputs) {
File workingDir = getDir();
File file = new File(workingDir, junitclassname + ".java");
PrintWriter out = null;
BufferedReader br = null;
if (!file.exists()) {
workingDir.mkdir();
createJunitTemplate(file);
}
try {
br = new BufferedReader(new FileReader(file));
StringBuilder sb = new StringBuilder();
String line = br.readLine();
while (line != null) {
if (line.contains("$count")) {
int beginIdx = line.lastIndexOf("$") + 1;
int endIdx = line.lastIndexOf("*");
int count = Integer.parseInt(line.substring(beginIdx, endIdx).trim());
line = count > 0 ? " ,{" : " {";
for (int i = 0; i < inputs.size(); ++i) {
Object[] o = inputs.get(i);
for (int j = 0; j < o.length; ++j) {
if (j > 0) line += ", ";
line += toCodeString(o[j]);
}
}
line += "}" + linesep;
line += " /* $count$" + (count+1) + " */";
}
sb.append(line + linesep);
line = br.readLine();
}
out = createPrintWriter(file, false);
out.print(sb.toString());
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
if (out != null)
out.close();
if (br != null)
try {
br.close();
} catch (IOException e) {}
}
}
/**
* Write junit PUTS by supplied inputs and outputs, if file already exists we append it.<br />
* @param inputs set of valid inputs
* @param output set of return value, we assume at most return can be one-dimensional array.
* */
public void writeJunitTest(List<Object[]> inputs, List<Object[]> output) {
// row count should be equal
assert inputs.size() == output.size();
File workingDir = getDir();
File file = new File(workingDir, junitclassname + ".java");
PrintWriter out = null;
BufferedReader br = null;
if (!file.exists()) {
workingDir.mkdir();
createJunitTemplate(file);
}
try {
br = new BufferedReader(new FileReader(file));
StringBuilder sb = new StringBuilder();
String line = br.readLine();
while (line != null) {
if (line.contains("$count")) {
int beginIdx = line.lastIndexOf("$") + 1;
int endIdx = line.lastIndexOf("*");
int count = Integer.parseInt(line.substring(beginIdx, endIdx).trim());
int size = inputs.size();
line = count > 0 ? " ,{" : " {";
for (int i = 0; i < size; ++i) {
Object[] o = inputs.get(i);
for (int j = 0; j < o.length; ++j) {
if (j > 0) line += ", ";
line += o[j].toString();
}
o = output.get(i);
line += (o.length == 0 ? ", " : ", {");
for (int j = 0; j < o.length; ++j) {
if (j > 0) line += ", ";
line += o[j].toString();
}
if (o.length > 1)
line += "}";
}
line += "}" + linesep;
line += " /* $count$" + (count+1) + " */";
}
sb.append(line + linesep);
line = br.readLine();
}
out = createPrintWriter(file, false);
out.print(sb.toString());
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
if (out != null)
out.close();
if (br != null)
try {
br.close();
} catch (IOException e) {}
}
}
/**
* Create initial template file for PUTS
* */
protected void createJunitTemplate(File file) {
if (!file.exists())
try {
file.createNewFile();
} catch (IOException e) {
e.printStackTrace();
}
PrintWriter out = createPrintWriter(file, false);
try {
if (packageName.length() > 0) {
out.println("package " + packageName + ";");
}
out.println();
out.println("import java.util.Collection;");
out.println("import java.util.Arrays;");
out.println();
out.println("import junit.framework.*;");
out.println("import org.junit.Test;");
out.println("import org.junit.runner.RunWith;");
out.println("import org.junit.runners.Parameterized;");
out.println("import org.junit.runners.Parameterized.Parameter;");
out.println("import org.junit.runners.Parameterized.Parameters;");
out.println();
out.println("@RunWith(Parameterized.class)");
out.println("public class " + junitclassname + " extends TestCase {");
out.println();
out.println(" @Parameters");
out.println(" public static Collection<Object[]> data() {");
out.println(" return Arrays.asList(new Object[][] {");
out.println(" /* $count$0 */");
out.println(" });");
out.println(" }");
out.println();
// out.println(" @Parameter(value = 0)");
// out.println(" public Object fInput;");
// out.println();
// out.println(" @Parameter");
// out.println(" public Object fExpected;");
// out.println();
out.println(" public static boolean debug = false;");
out.println();
int testCounter = 1;
out.println(" @Test");
out.println(" public void test" + testCounter++ + "() throws Throwable {");
out.println(" if (debug) { System.out.println(); System.out.print(\"" + junitclassname + ".test" + (testCounter-1) + "\"); }");
out.println();
out.println(" // TODO: insert test in here");
out.println();
out.println(" }");
out.println("}");
} finally {
if (out != null)
out.close();
}
}
public static PrintWriter createPrintWriter(File file, boolean append) {
try {
return new PrintWriter(new BufferedWriter(new FileWriter(file, append)));
} catch (IOException e) {
//Log.out.println("Exception thrown while creating text print stream:" + file.getName());
e.printStackTrace();
System.exit(1);
throw new Error("This can't happen");
}
}
/**
* Normalize Object string value to match with code representation string.<br />
* for example: if Object were String, we add double quote in beginning and end.<br />
* TODO : move to Util class
* */
protected String toCodeString(Object o) {
String val = o.toString();
Class<?> c;
// try Integer, Long, Float
try {
Integer.parseInt(val);
c = Integer.class;
} catch (NumberFormatException e1) {
try {
Long.parseLong(val);
c = Long.class;
} catch (NumberFormatException e2) {
try {
Float.parseFloat(val);
c = Float.class;
} catch (NumberFormatException e3) {
c = null;
}
}
}
if (c != null) {
if (c.equals(Integer.class))
; // do nothing
else if (c.equals(Float.class))
val = val + "f";
else if (c.equals(Long.class))
val = val + "L";
} else {
if (val.length() == 1)
val = "'" + val + "'"; // char
else
val = "\"" + val + "\""; //string
}
return val;
}
private File getDir() {
File dir = null;
if (dirName == null || dirName.length() == 0)
dir = new File(System.getProperty("user.dir"));
else
dir = new File(dirName);
if (packageName == null)
return dir;
packageName = packageName.trim(); // Just in case.
if (packageName.length() == 0)
return dir;
String[] split = packageName.split("\\.");
for (String s : split) {
dir = new File(dir, s);
}
return dir;
}
}
| |
/*
* #%L
* xcode-maven-plugin
* %%
* Copyright (C) 2012 SAP AG
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package com.sap.prd.mobile.ios.mios;
import static java.lang.String.format;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.PrintStream;
import java.io.Reader;
import java.io.StringReader;
import java.io.UnsupportedEncodingException;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URL;
import java.nio.charset.Charset;
import java.security.KeyManagementException;
import java.security.NoSuchAlgorithmException;
import java.security.cert.CertificateException;
import java.security.cert.X509Certificate;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.logging.LogManager;
import java.util.logging.Logger;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLException;
import javax.net.ssl.SSLSession;
import javax.net.ssl.SSLSocket;
import javax.net.ssl.TrustManager;
import javax.net.ssl.X509TrustManager;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.CharSet;
import org.apache.commons.lang.StringUtils;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.conn.ClientConnectionManager;
import org.apache.http.conn.scheme.Scheme;
import org.apache.http.conn.scheme.SchemeRegistry;
import org.apache.http.conn.ssl.SSLSocketFactory;
import org.apache.http.conn.ssl.X509HostnameVerifier;
import org.apache.http.impl.client.BasicResponseHandler;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.codehaus.plexus.classworlds.ClassWorld;
import org.codehaus.plexus.classworlds.realm.ClassRealm;
import org.codehaus.plexus.classworlds.realm.DuplicateRealmException;
import org.sonatype.aether.RepositorySystem;
import org.sonatype.aether.RepositorySystemSession;
import org.sonatype.aether.collection.DependencyCollectionException;
import org.sonatype.aether.graph.Dependency;
import org.sonatype.aether.repository.RemoteRepository;
import org.sonatype.aether.util.artifact.DefaultArtifact;
import com.sap.prd.mobile.ios.mios.XCodeContext.SourceCodeLocation;
import com.sap.prd.mobile.ios.mios.verificationchecks.v_1_0_0.Check;
import com.sap.prd.mobile.ios.mios.verificationchecks.v_1_0_0.Checks;
/**
* Provides the possibility to perform verification checks.<br>
* The check classes and their severities are described in an additional xml document, defined in
* <code>xcode.verification.checks.definitionFile</code>.<br>
* The specific checks have to be implemented in separate projects. These projects define dependency
* to Xcode Maven Pugin Verification API and must not reference the xcode-maven-plugin project.
* The Xcode Maven Plugin Verification API project could be found <a href=https://github.com/sap-production/xcode-maven-plugin-verification-api>here</a>
* The coordinates of that projects need to be provided on the
* <code>check</code> node belonging to the test as attributes <code>groupId</code>,
* <code>artifactId</code> and <code>version</code>.<br>
* The classpath for this goal will be extended by the jars found under the specified GAVs. <br>
* Example checks definition:
*
* <pre>
* <checks>
* <check groupId="my.group.id" artifactId="artifactId" version="1.0.0" severity="ERROR" class="com.my.MyVerificationCheck1"/>
* <check groupId="my.group.id" artifactId="artifactId" version="1.0.0" severity="WARNING" class="com.my.MyVerificationCheck2"/>
* </checks>
* </pre>
*
* @goal verification-check
*
*/
public class XCodeVerificationCheckMojo extends BuildContextAwareMojo
{
private final static String COLON = ":", DOUBLE_SLASH = "//";
private static final Logger log = LogManager.getLogManager().getLogger(XCodePluginLogger.getLoggerName());
private enum Protocol
{
HTTP() {
@Override
Reader getCheckDefinitions(String location) throws IOException
{
HttpClient httpClient = new DefaultHttpClient();
HttpGet get = new HttpGet(getName() + COLON + DOUBLE_SLASH + location);
String response = httpClient.execute(get, new BasicResponseHandler());
return new StringReader(response);
}
},
HTTPS() {
@Override
Reader getCheckDefinitions(String location) throws IOException
{
HttpClient httpClient = new DefaultHttpClient();
try {
SSLContext sslcontext = SSLContext.getInstance("TLS");
X509TrustManager trustManager = new X509TrustManager() {
@Override
public X509Certificate[] getAcceptedIssuers()
{
return null;
}
@Override
public void checkServerTrusted(X509Certificate[] chain, String authType) throws CertificateException
{
}
@Override
public void checkClientTrusted(X509Certificate[] chain, String authType) throws CertificateException
{
}
};
X509HostnameVerifier hostNameVerifier = new X509HostnameVerifier() {
@Override
public boolean verify(String arg0, SSLSession arg1)
{
return true;
}
@Override
public void verify(String host, String[] cns, String[] subjectAlts) throws SSLException
{
}
@Override
public void verify(String host, X509Certificate cert) throws SSLException
{
}
@Override
public void verify(String host, SSLSocket ssl) throws IOException
{
}
};
final int port = new URL(getName() + COLON + DOUBLE_SLASH + location).getPort();
sslcontext.init(null, new TrustManager[] { trustManager }, null);
SSLSocketFactory sslSocketFactory = new SSLSocketFactory(sslcontext);
sslSocketFactory.setHostnameVerifier(hostNameVerifier);
ClientConnectionManager clientConnectionManager = httpClient.getConnectionManager();
SchemeRegistry sr = clientConnectionManager.getSchemeRegistry();
sr.register(new Scheme(getName(), sslSocketFactory, port));
}
catch (NoSuchAlgorithmException e) {
e.printStackTrace();
}
catch (KeyManagementException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
HttpGet get = new HttpGet(getName() + COLON + DOUBLE_SLASH + location);
String response = httpClient.execute(get, new BasicResponseHandler());
return new StringReader(response);
}
},
FILE() {
@Override
Reader getCheckDefinitions(String location) throws IOException
{
if (location.startsWith(DOUBLE_SLASH)) location = location.substring(DOUBLE_SLASH.length());
final File f = new File(location);
if (!f.canRead()) {
throw new IOException("Cannot read checkDefintionFile '" + f + "'.");
}
return new InputStreamReader((new FileInputStream(f)), "UTF-8");
}
};
abstract Reader getCheckDefinitions(String location) throws IOException;
String getName()
{
return name().toLowerCase(Locale.ENGLISH);
}
static String getProtocols()
{
final StringBuilder sb = new StringBuilder(16);
for (Protocol p : Protocol.values()) {
if (sb.length() != 0)
sb.append(", ");
sb.append(p.getName());
}
return sb.toString();
}
static Protocol getProtocol(String protocol) throws InvalidProtocolException
{
try {
return Protocol.valueOf(protocol.toUpperCase(Locale.ENGLISH));
}
catch (final IllegalArgumentException ex) {
throw new InvalidProtocolException(protocol, ex);
}
}
}
static class NoProtocolException extends XCodeException
{
private static final long serialVersionUID = -5510547403353575108L;
NoProtocolException(String message, Throwable cause)
{
super(message, cause);
}
};
static class InvalidProtocolException extends XCodeException
{
private static final long serialVersionUID = -5510547403353515108L;
InvalidProtocolException(String message, Throwable cause)
{
super(message, cause);
}
};
/**
* The entry point to Aether, i.e. the component doing all the work.
*
* @component
*/
protected RepositorySystem repoSystem;
/**
* The current repository/network configuration of Maven.
*
* @parameter default-value="${repositorySystemSession}"
* @readonly
*/
protected RepositorySystemSession repoSession;
/**
* The project's remote repositories to use for the resolution of project dependencies.
*
* @parameter default-value="${project.remoteProjectRepositories}"
* @readonly
*/
protected List<RemoteRepository> projectRepos;
/**
* Parameter, which controls the verification goal execution. By default, the verification goal
* will be skipped.
*
* @parameter expression="${xcode.verification.checks.skip}" default-value="true"
* @since 1.9.3
*/
private boolean skip;
/**
* The location where the check definition file is present. Could be a file on the local file
* system or a remote located file, accessed via http or https. <br>
* Examples:
* <ul>
* <li>-Dxcode.verification.checks.definitionFile=file:./checkDefinitionFile.xml
* <li>-Dxcode.verification.checks.definitionFile=http://example.com/checkDefinitionFile.xml
* <li>-Dxcode.verification.checks.definitionFile=https://example.com/checkDefinitionFile.xml
* </ul>
*
* @parameter expression="${xcode.verification.checks.definitionFile}"
* @since 1.9.3
*/
private String checkDefinitionFile;
@Override
public void execute() throws MojoExecutionException, MojoFailureException
{
if (skip) {
getLog()
.info(
String
.format(
"Verification check goal has been skipped intentionally since parameter 'xcode.verification.checks.skip' is '%s'.",
skip));
return;
}
try {
PackagingType.getByMavenType(packaging);
}
catch (PackagingType.UnknownPackagingTypeException ex)
{
getLog().info(
"Packaging type is " + packaging
+ ". There is no need to apply verification checks for this packaging type.");
return;
}
try {
final Checks checks = getChecks(checkDefinitionFile);
if (checks.getCheck().isEmpty()) {
getLog().warn(String.format("No checks configured in '%s'.", checkDefinitionFile));
}
Map<Check, Exception> failedChecks = new HashMap<Check, Exception>();
for (Check check : checks.getCheck()) {
try {
final ClassRealm verificationCheckRealm = extendClasspath(check);
final Exception ex = performCheck(verificationCheckRealm, check);
if (ex != null)
{
failedChecks.put(check, ex);
}
}
catch (DuplicateRealmException ex) {
throw new MojoExecutionException(ex.getMessage(), ex);
}
}
handleExceptions(failedChecks);
}
catch (XCodeException e) {
throw new MojoExecutionException(e.getMessage(), e);
}
catch (IOException e) {
throw new MojoExecutionException(e.getMessage(), e);
}
catch (JAXBException e) {
throw new MojoExecutionException(e.getMessage(), e);
}
catch (DependencyCollectionException e) {
throw new MojoExecutionException(e.getMessage(), e);
}
}
private Exception performCheck(ClassRealm verificationCheckRealm, final Check checkDesc)
throws MojoExecutionException
{
getLog().info(String.format("Performing verification check '%s'.", checkDesc.getClazz()));
if (getLog().isDebugEnabled()) {
final Charset defaultCharset = Charset.defaultCharset();
final ByteArrayOutputStream byteOs = new ByteArrayOutputStream();
final PrintStream ps;
try {
ps = new PrintStream(byteOs, true, defaultCharset.name());
} catch(UnsupportedEncodingException ex) {
throw new MojoExecutionException(String.format("Charset '%s' cannot be found.", defaultCharset.name()));
}
try {
verificationCheckRealm.display(ps);
ps.close();
getLog().debug(
String.format("Using classloader for loading verification check '%s':%s%s", checkDesc.getClazz(),
System.getProperty("line.separator"), new String(byteOs.toByteArray(), defaultCharset)));
}
finally {
IOUtils.closeQuietly(ps);
}
}
try {
final Class<?> verificationCheckClass = Class.forName(checkDesc.getClazz(), true, verificationCheckRealm);
getLog().debug(
String.format("Verification check class %s has been loaded by %s.", verificationCheckClass.getName(),
verificationCheckClass.getClassLoader()));
getLog().debug(
String.format("Verification check super class %s has been loaded by %s.", verificationCheckClass
.getSuperclass().getName(), verificationCheckClass.getSuperclass().getClassLoader()));
getLog().debug(
String.format("%s class used by this class (%s) has been loaded by %s.", VerificationCheck.class.getName(),
this.getClass().getName(), VerificationCheck.class.getClassLoader()));
for (final String configuration : getConfigurations()) {
for (final String sdk : getSDKs()) {
getLog().info(
String.format("Executing verification check: '%s' for configuration '%s' and sdk '%s'.",
verificationCheckClass.getName(), configuration, sdk));
final VerificationCheck verificationCheck = (VerificationCheck) verificationCheckClass.newInstance();
verificationCheck.setXcodeContext(getXCodeContext(SourceCodeLocation.WORKING_COPY, configuration, sdk));
verificationCheck.setMavenProject(project);
verificationCheck.setEffectiveBuildSettings(new EffectiveBuildSettings());
try {
verificationCheck.check();
}
catch (VerificationException ex) {
return ex;
}
catch (RuntimeException ex) {
return ex;
}
}
}
return null;
}
catch (ClassNotFoundException ex) {
throw new MojoExecutionException(
"Could not load verification check '"
+ checkDesc.getClazz()
+ "'. May be your classpath has not been properly extended. "
+
"Provide the GAV of the project containing the check as attributes as part of the check defintion in the check configuration file.",
ex);
}
catch (NoClassDefFoundError err) {
getLog().error(String.format("Could not load verification check '%s'. " +
"May be your classpath has not been properly extended. " +
"Additional dependencies need to be declard inside the check definition file: %s",
checkDesc.getClazz(), err.getMessage()), err);
throw err;
}
catch (InstantiationException ex) {
throw new MojoExecutionException(String.format("Could not instanciate verification check '%s': %s",
checkDesc.getClazz(), ex.getMessage()), ex);
}
catch (IllegalAccessException ex) {
throw new MojoExecutionException(String.format("Could not access verification check '%s': %s",
checkDesc.getClazz(), ex.getMessage()), ex);
}
}
private void handleExceptions(Map<Check, Exception> failedChecks)
throws MojoExecutionException
{
boolean mustFailedTheBuild = false;
for (Map.Entry<Check, Exception> entry : failedChecks.entrySet()) {
handleException(entry.getKey(), entry.getValue());
if (entry.getKey().getSeverity().equalsIgnoreCase("ERROR")) {
mustFailedTheBuild = true;
}
}
if (mustFailedTheBuild) {
throw new MojoExecutionException("Verification checks failed. See the log file for details.");
}
}
private void handleException(Check failedCheck, final Exception e)
{
final String message;
if (e instanceof VerificationException) {
message = "Verification check '" + failedCheck.getClazz() + " failed. " + e.getMessage();
}
else {
message = "Cannot perform check: " + failedCheck.getClazz() + ". Error during test setup " + e.getMessage();
}
if (failedCheck.getSeverity().equalsIgnoreCase("WARNING")) {
getLog().warn(message);
}
else {
getLog().error(message);
}
}
private ClassRealm extendClasspath(Check check) throws XCodeException, DependencyCollectionException,
DuplicateRealmException, MalformedURLException
{
final org.sonatype.aether.artifact.Artifact artifact = parseDependency(check);
final ClassLoader loader = this.getClass().getClassLoader();
if (!(loader instanceof ClassRealm)) {
throw new XCodeException("Could not add jar to classpath. Class loader '" + loader
+ "' is not an instance of '" + ClassRealm.class.getName() + "'.");
}
final ClassRealm classRealm = (ClassRealm) loader;
if (artifact == null)
{
return classRealm;
}
final Set<String> scopes = new HashSet<String>(Arrays.asList(org.apache.maven.artifact.Artifact.SCOPE_COMPILE,
org.apache.maven.artifact.Artifact.SCOPE_PROVIDED,
org.apache.maven.artifact.Artifact.SCOPE_RUNTIME,
org.apache.maven.artifact.Artifact.SCOPE_SYSTEM)); // do not resolve dependencies with scope "test".
final XCodeDownloadManager downloadManager = new XCodeDownloadManager(projectRepos, repoSystem, repoSession);
final Set<org.sonatype.aether.artifact.Artifact> theEmptyOmitsSet = Collections.emptySet();
final Set<org.sonatype.aether.artifact.Artifact> omits = downloadManager.resolveArtifactWithTransitveDependencies(
new Dependency(getVerificationAPIGav(), org.apache.maven.artifact.Artifact.SCOPE_COMPILE), scopes,
theEmptyOmitsSet);
omits.add(getVerificationAPIGav());
final Set<org.sonatype.aether.artifact.Artifact> artifacts = downloadManager
.resolveArtifactWithTransitveDependencies(new Dependency(artifact,
org.apache.maven.artifact.Artifact.SCOPE_COMPILE), scopes, omits);
final ClassRealm childClassRealm = classRealm.createChildRealm(getUniqueRealmId(classRealm.getWorld(),
classRealm.getId() + "-" + check.getClazz()));
addDependencies(childClassRealm, artifacts);
return childClassRealm;
}
private String getUniqueRealmId(final ClassWorld world, final String realmIdPrefix)
{
String uniqueRealmIdCandidate = null;
int i = 0;
while (true) {
uniqueRealmIdCandidate = realmIdPrefix + "-" + i;
if (world.getClassRealm(uniqueRealmIdCandidate) == null)
{
return uniqueRealmIdCandidate;
}
i++;
}
}
private void addDependencies(final ClassRealm childClassRealm, Set<org.sonatype.aether.artifact.Artifact> artifacts)
throws MalformedURLException
{
for (org.sonatype.aether.artifact.Artifact a : artifacts)
{
childClassRealm.addURL(a.getFile().toURI().toURL());
}
}
static org.sonatype.aether.artifact.Artifact parseDependency(final Check check)
throws XCodeException
{
final String groupId = check.getGroupId();
final String artifactId = check.getArtifactId();
final String version = check.getVersion();
if (StringUtils.isEmpty(groupId) && StringUtils.isEmpty(artifactId) && StringUtils.isEmpty(version)) {
log.info(
"No coordinates maintained for check represented by class '" + check.getClazz()
+ "'. Assuming this check is already contained in the classpath.");
return null;
}
if (StringUtils.isEmpty(groupId))
throw new XCodeException(String.format("groupId for check %s is null or emtpy", check.getClazz()));
if (StringUtils.isEmpty(artifactId))
throw new XCodeException(String.format("artifactId for check %s is null or emtpy", check.getClazz()));
if (StringUtils.isEmpty(version))
throw new XCodeException(String.format("version for check %s is null or emtpy", check.getClazz()));
return new DefaultArtifact(groupId, artifactId, "jar", version);
}
static Checks getChecks(final String checkDefinitionFileLocation) throws XCodeException, IOException, JAXBException
{
Reader checkDefinitions = null;
try {
checkDefinitions = getChecksDescriptor(checkDefinitionFileLocation);
return (Checks) JAXBContext.newInstance(Checks.class).createUnmarshaller().unmarshal(checkDefinitions);
}
finally {
IOUtils.closeQuietly(checkDefinitions);
}
}
org.sonatype.aether.artifact.Artifact getVerificationAPIGav() throws XCodeException
{
InputStream is = null;
try {
is = XCodeVerificationCheckMojo.class.getResourceAsStream("/misc/project.properties");
if (is == null)
{
throw new XCodeException("Cannot get the GAV of the xcode-maven-plugin");
}
Properties props = new Properties();
props.load(is);
final String groupId = props.getProperty("verification.api.groupId");
final String artifactId = props.getProperty("verification.api.artifactId");
final String version = props.getProperty("verification.api.version");
return new DefaultArtifact(groupId, artifactId, "jar", version);
}
catch (final IOException ex) {
throw new XCodeException("Cannot get the GAV for the verification API", ex);
}
finally {
IOUtils.closeQuietly(is);
}
}
static Reader getChecksDescriptor(final String checkDefinitionFileLocation) throws XCodeException, IOException
{
if (checkDefinitionFileLocation == null || checkDefinitionFileLocation.trim().isEmpty()) {
throw new XCodeException(
"CheckDefinitionFile was not configured. Cannot perform verification checks. Define check definition file with paramater 'xcode.verification.checks.definitionFile'.");
}
final Location location = Location.getLocation(checkDefinitionFileLocation);
try {
Protocol protocol = Protocol.valueOf(location.protocol);
return protocol.getCheckDefinitions(location.location);
}
catch (IllegalArgumentException ex) {
throw new InvalidProtocolException(format("Invalid protocol provided: '%s'. Supported values are:'%s'.",
location.protocol, Protocol.getProtocols()), ex);
}
catch (IOException ex) {
throw new IOException(format("Cannot get check definitions from '%s'.", checkDefinitionFileLocation), ex);
}
}
static class Location
{
static Location getLocation(final String locationUriString) throws InvalidProtocolException, NoProtocolException,
MalformedURLException
{
final URL url;
try {
url = new URL(locationUriString.trim());
}
catch (MalformedURLException ex) {
//
// trouble with protocol ???
//
try {
if (URI.create(locationUriString).getScheme() == null)
{
throw new NoProtocolException(String.format(
"Provide a protocol [%s] for parameter 'xcode.verification.checks.definitionFile'",
Protocol.getProtocols()), ex);
}
}
catch (RuntimeException ignore) {
//
// in this case we throw already the MalformedUrlExcpetion that indicates a problem with
// the URL
//
}
throw ex;
}
final Protocol protocol = Protocol.getProtocol(url.getProtocol());
final String location;
if (protocol == Protocol.FILE)
{
location = url.getPath();
}
else if (protocol == Protocol.HTTP || protocol == Protocol.HTTPS) {
location = locationUriString.trim().substring(
protocol.getName().length() + COLON.length() + DOUBLE_SLASH.length());
}
else {
throw new IllegalStateException(String.format("Unknown protocol: '%s'." + url.getProtocol()));
}
return new Location(protocol.getName(), location);
}
final String protocol;
final String location;
public Location(String protocol, String location)
{
this.protocol = protocol.toUpperCase(Locale.ENGLISH);
this.location = location;
}
}
}
| |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.ec2.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
import com.amazonaws.Request;
import com.amazonaws.services.ec2.model.transform.ModifyVpcEndpointServiceConfigurationRequestMarshaller;
/**
*
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ModifyVpcEndpointServiceConfigurationRequest extends AmazonWebServiceRequest implements Serializable, Cloneable,
DryRunSupportedRequest<ModifyVpcEndpointServiceConfigurationRequest> {
/**
* <p>
* The ID of the service.
* </p>
*/
private String serviceId;
/**
* <p>
* Indicate whether requests to create an endpoint to your service must be accepted.
* </p>
*/
private Boolean acceptanceRequired;
/**
* <p>
* The Amazon Resource Names (ARNs) of Network Load Balancers to add to your service configuration.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<String> addNetworkLoadBalancerArns;
/**
* <p>
* The Amazon Resource Names (ARNs) of Network Load Balancers to remove from your service configuration.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<String> removeNetworkLoadBalancerArns;
/**
* <p>
* The ID of the service.
* </p>
*
* @param serviceId
* The ID of the service.
*/
public void setServiceId(String serviceId) {
this.serviceId = serviceId;
}
/**
* <p>
* The ID of the service.
* </p>
*
* @return The ID of the service.
*/
public String getServiceId() {
return this.serviceId;
}
/**
* <p>
* The ID of the service.
* </p>
*
* @param serviceId
* The ID of the service.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ModifyVpcEndpointServiceConfigurationRequest withServiceId(String serviceId) {
setServiceId(serviceId);
return this;
}
/**
* <p>
* Indicate whether requests to create an endpoint to your service must be accepted.
* </p>
*
* @param acceptanceRequired
* Indicate whether requests to create an endpoint to your service must be accepted.
*/
public void setAcceptanceRequired(Boolean acceptanceRequired) {
this.acceptanceRequired = acceptanceRequired;
}
/**
* <p>
* Indicate whether requests to create an endpoint to your service must be accepted.
* </p>
*
* @return Indicate whether requests to create an endpoint to your service must be accepted.
*/
public Boolean getAcceptanceRequired() {
return this.acceptanceRequired;
}
/**
* <p>
* Indicate whether requests to create an endpoint to your service must be accepted.
* </p>
*
* @param acceptanceRequired
* Indicate whether requests to create an endpoint to your service must be accepted.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ModifyVpcEndpointServiceConfigurationRequest withAcceptanceRequired(Boolean acceptanceRequired) {
setAcceptanceRequired(acceptanceRequired);
return this;
}
/**
* <p>
* Indicate whether requests to create an endpoint to your service must be accepted.
* </p>
*
* @return Indicate whether requests to create an endpoint to your service must be accepted.
*/
public Boolean isAcceptanceRequired() {
return this.acceptanceRequired;
}
/**
* <p>
* The Amazon Resource Names (ARNs) of Network Load Balancers to add to your service configuration.
* </p>
*
* @return The Amazon Resource Names (ARNs) of Network Load Balancers to add to your service configuration.
*/
public java.util.List<String> getAddNetworkLoadBalancerArns() {
if (addNetworkLoadBalancerArns == null) {
addNetworkLoadBalancerArns = new com.amazonaws.internal.SdkInternalList<String>();
}
return addNetworkLoadBalancerArns;
}
/**
* <p>
* The Amazon Resource Names (ARNs) of Network Load Balancers to add to your service configuration.
* </p>
*
* @param addNetworkLoadBalancerArns
* The Amazon Resource Names (ARNs) of Network Load Balancers to add to your service configuration.
*/
public void setAddNetworkLoadBalancerArns(java.util.Collection<String> addNetworkLoadBalancerArns) {
if (addNetworkLoadBalancerArns == null) {
this.addNetworkLoadBalancerArns = null;
return;
}
this.addNetworkLoadBalancerArns = new com.amazonaws.internal.SdkInternalList<String>(addNetworkLoadBalancerArns);
}
/**
* <p>
* The Amazon Resource Names (ARNs) of Network Load Balancers to add to your service configuration.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setAddNetworkLoadBalancerArns(java.util.Collection)} or
* {@link #withAddNetworkLoadBalancerArns(java.util.Collection)} if you want to override the existing values.
* </p>
*
* @param addNetworkLoadBalancerArns
* The Amazon Resource Names (ARNs) of Network Load Balancers to add to your service configuration.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ModifyVpcEndpointServiceConfigurationRequest withAddNetworkLoadBalancerArns(String... addNetworkLoadBalancerArns) {
if (this.addNetworkLoadBalancerArns == null) {
setAddNetworkLoadBalancerArns(new com.amazonaws.internal.SdkInternalList<String>(addNetworkLoadBalancerArns.length));
}
for (String ele : addNetworkLoadBalancerArns) {
this.addNetworkLoadBalancerArns.add(ele);
}
return this;
}
/**
* <p>
* The Amazon Resource Names (ARNs) of Network Load Balancers to add to your service configuration.
* </p>
*
* @param addNetworkLoadBalancerArns
* The Amazon Resource Names (ARNs) of Network Load Balancers to add to your service configuration.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ModifyVpcEndpointServiceConfigurationRequest withAddNetworkLoadBalancerArns(java.util.Collection<String> addNetworkLoadBalancerArns) {
setAddNetworkLoadBalancerArns(addNetworkLoadBalancerArns);
return this;
}
/**
* <p>
* The Amazon Resource Names (ARNs) of Network Load Balancers to remove from your service configuration.
* </p>
*
* @return The Amazon Resource Names (ARNs) of Network Load Balancers to remove from your service configuration.
*/
public java.util.List<String> getRemoveNetworkLoadBalancerArns() {
if (removeNetworkLoadBalancerArns == null) {
removeNetworkLoadBalancerArns = new com.amazonaws.internal.SdkInternalList<String>();
}
return removeNetworkLoadBalancerArns;
}
/**
* <p>
* The Amazon Resource Names (ARNs) of Network Load Balancers to remove from your service configuration.
* </p>
*
* @param removeNetworkLoadBalancerArns
* The Amazon Resource Names (ARNs) of Network Load Balancers to remove from your service configuration.
*/
public void setRemoveNetworkLoadBalancerArns(java.util.Collection<String> removeNetworkLoadBalancerArns) {
if (removeNetworkLoadBalancerArns == null) {
this.removeNetworkLoadBalancerArns = null;
return;
}
this.removeNetworkLoadBalancerArns = new com.amazonaws.internal.SdkInternalList<String>(removeNetworkLoadBalancerArns);
}
/**
* <p>
* The Amazon Resource Names (ARNs) of Network Load Balancers to remove from your service configuration.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setRemoveNetworkLoadBalancerArns(java.util.Collection)} or
* {@link #withRemoveNetworkLoadBalancerArns(java.util.Collection)} if you want to override the existing values.
* </p>
*
* @param removeNetworkLoadBalancerArns
* The Amazon Resource Names (ARNs) of Network Load Balancers to remove from your service configuration.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ModifyVpcEndpointServiceConfigurationRequest withRemoveNetworkLoadBalancerArns(String... removeNetworkLoadBalancerArns) {
if (this.removeNetworkLoadBalancerArns == null) {
setRemoveNetworkLoadBalancerArns(new com.amazonaws.internal.SdkInternalList<String>(removeNetworkLoadBalancerArns.length));
}
for (String ele : removeNetworkLoadBalancerArns) {
this.removeNetworkLoadBalancerArns.add(ele);
}
return this;
}
/**
* <p>
* The Amazon Resource Names (ARNs) of Network Load Balancers to remove from your service configuration.
* </p>
*
* @param removeNetworkLoadBalancerArns
* The Amazon Resource Names (ARNs) of Network Load Balancers to remove from your service configuration.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ModifyVpcEndpointServiceConfigurationRequest withRemoveNetworkLoadBalancerArns(java.util.Collection<String> removeNetworkLoadBalancerArns) {
setRemoveNetworkLoadBalancerArns(removeNetworkLoadBalancerArns);
return this;
}
/**
* This method is intended for internal use only. Returns the marshaled request configured with additional
* parameters to enable operation dry-run.
*/
@Override
public Request<ModifyVpcEndpointServiceConfigurationRequest> getDryRunRequest() {
Request<ModifyVpcEndpointServiceConfigurationRequest> request = new ModifyVpcEndpointServiceConfigurationRequestMarshaller().marshall(this);
request.addParameter("DryRun", Boolean.toString(true));
return request;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getServiceId() != null)
sb.append("ServiceId: ").append(getServiceId()).append(",");
if (getAcceptanceRequired() != null)
sb.append("AcceptanceRequired: ").append(getAcceptanceRequired()).append(",");
if (getAddNetworkLoadBalancerArns() != null)
sb.append("AddNetworkLoadBalancerArns: ").append(getAddNetworkLoadBalancerArns()).append(",");
if (getRemoveNetworkLoadBalancerArns() != null)
sb.append("RemoveNetworkLoadBalancerArns: ").append(getRemoveNetworkLoadBalancerArns());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ModifyVpcEndpointServiceConfigurationRequest == false)
return false;
ModifyVpcEndpointServiceConfigurationRequest other = (ModifyVpcEndpointServiceConfigurationRequest) obj;
if (other.getServiceId() == null ^ this.getServiceId() == null)
return false;
if (other.getServiceId() != null && other.getServiceId().equals(this.getServiceId()) == false)
return false;
if (other.getAcceptanceRequired() == null ^ this.getAcceptanceRequired() == null)
return false;
if (other.getAcceptanceRequired() != null && other.getAcceptanceRequired().equals(this.getAcceptanceRequired()) == false)
return false;
if (other.getAddNetworkLoadBalancerArns() == null ^ this.getAddNetworkLoadBalancerArns() == null)
return false;
if (other.getAddNetworkLoadBalancerArns() != null && other.getAddNetworkLoadBalancerArns().equals(this.getAddNetworkLoadBalancerArns()) == false)
return false;
if (other.getRemoveNetworkLoadBalancerArns() == null ^ this.getRemoveNetworkLoadBalancerArns() == null)
return false;
if (other.getRemoveNetworkLoadBalancerArns() != null
&& other.getRemoveNetworkLoadBalancerArns().equals(this.getRemoveNetworkLoadBalancerArns()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getServiceId() == null) ? 0 : getServiceId().hashCode());
hashCode = prime * hashCode + ((getAcceptanceRequired() == null) ? 0 : getAcceptanceRequired().hashCode());
hashCode = prime * hashCode + ((getAddNetworkLoadBalancerArns() == null) ? 0 : getAddNetworkLoadBalancerArns().hashCode());
hashCode = prime * hashCode + ((getRemoveNetworkLoadBalancerArns() == null) ? 0 : getRemoveNetworkLoadBalancerArns().hashCode());
return hashCode;
}
@Override
public ModifyVpcEndpointServiceConfigurationRequest clone() {
return (ModifyVpcEndpointServiceConfigurationRequest) super.clone();
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.operator;
import com.facebook.presto.ExceededMemoryLimitException;
import com.facebook.presto.execution.TaskId;
import com.facebook.presto.operator.WindowOperator.WindowOperatorFactory;
import com.facebook.presto.operator.window.RowNumberFunction;
import com.facebook.presto.operator.window.WindowFunction;
import com.facebook.presto.sql.analyzer.Session;
import com.facebook.presto.util.MaterializedResult;
import com.google.common.collect.ImmutableList;
import io.airlift.units.DataSize;
import io.airlift.units.DataSize.Unit;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import java.util.List;
import java.util.concurrent.ExecutorService;
import static com.facebook.presto.operator.OperatorAssertion.assertOperatorEquals;
import static com.facebook.presto.operator.OperatorAssertion.toPages;
import static com.facebook.presto.operator.RowPagesBuilder.rowPagesBuilder;
import static com.facebook.presto.tuple.TupleInfo.SINGLE_BOOLEAN;
import static com.facebook.presto.tuple.TupleInfo.SINGLE_DOUBLE;
import static com.facebook.presto.tuple.TupleInfo.SINGLE_LONG;
import static com.facebook.presto.tuple.TupleInfo.SINGLE_VARBINARY;
import static com.facebook.presto.tuple.TupleInfo.Type.BOOLEAN;
import static com.facebook.presto.tuple.TupleInfo.Type.DOUBLE;
import static com.facebook.presto.tuple.TupleInfo.Type.FIXED_INT_64;
import static com.facebook.presto.tuple.TupleInfo.Type.VARIABLE_BINARY;
import static com.facebook.presto.util.MaterializedResult.resultBuilder;
import static com.facebook.presto.util.Threads.daemonThreadsNamed;
import static java.util.concurrent.Executors.newCachedThreadPool;
@Test(singleThreaded = true)
public class TestWindowOperator
{
private static final List<WindowFunction> ROW_NUMBER = ImmutableList.<WindowFunction>of(new RowNumberFunction());
private ExecutorService executor;
private DriverContext driverContext;
@BeforeMethod
public void setUp()
{
executor = newCachedThreadPool(daemonThreadsNamed("test"));
Session session = new Session("user", "source", "catalog", "schema", "address", "agent");
driverContext = new TaskContext(new TaskId("query", "stage", "task"), executor, session)
.addPipelineContext(true, true)
.addDriverContext();
}
@AfterMethod
public void tearDown()
{
executor.shutdownNow();
}
@Test
public void testRowNumber()
throws Exception
{
List<Page> input = rowPagesBuilder(SINGLE_LONG, SINGLE_DOUBLE)
.row(2, 0.3)
.row(4, 0.2)
.row(6, 0.1)
.pageBreak()
.row(-1, -0.1)
.row(5, 0.4)
.build();
WindowOperatorFactory operatorFactory = new WindowOperatorFactory(
0,
ImmutableList.of(SINGLE_LONG, SINGLE_DOUBLE),
ints(1, 0),
ROW_NUMBER,
ints(),
ints(0),
sortOrder(SortOrder.ASC_NULLS_LAST),
10);
Operator operator = operatorFactory.createOperator(driverContext);
MaterializedResult expected = resultBuilder(DOUBLE, FIXED_INT_64, FIXED_INT_64)
.row(-0.1, -1, 1)
.row(0.3, 2, 2)
.row(0.2, 4, 3)
.row(0.4, 5, 4)
.row(0.1, 6, 5)
.build();
assertOperatorEquals(operator, input, expected);
}
@Test
public void testRowNumberPartition()
throws Exception
{
List<Page> input = rowPagesBuilder(SINGLE_VARBINARY, SINGLE_LONG, SINGLE_DOUBLE, SINGLE_BOOLEAN)
.row("b", -1, -0.1, true)
.row("a", 2, 0.3, false)
.row("a", 4, 0.2, true)
.pageBreak()
.row("b", 5, 0.4, false)
.row("a", 6, 0.1, true)
.build();
WindowOperatorFactory operatorFactory = new WindowOperatorFactory(
0,
ImmutableList.of(SINGLE_VARBINARY, SINGLE_LONG, SINGLE_DOUBLE, SINGLE_BOOLEAN),
ints(0, 1, 2, 3),
ROW_NUMBER,
ints(0),
ints(1),
sortOrder(SortOrder.ASC_NULLS_LAST),
10);
Operator operator = operatorFactory.createOperator(driverContext);
MaterializedResult expected = resultBuilder(VARIABLE_BINARY, FIXED_INT_64, DOUBLE, BOOLEAN, FIXED_INT_64)
.row("a", 2, 0.3, false, 1)
.row("a", 4, 0.2, true, 2)
.row("a", 6, 0.1, true, 3)
.row("b", -1, -0.1, true, 1)
.row("b", 5, 0.4, false, 2)
.build();
assertOperatorEquals(operator, input, expected);
}
@Test
public void testRowNumberArbitrary()
throws Exception
{
List<Page> input = rowPagesBuilder(SINGLE_LONG)
.row(1)
.row(3)
.row(5)
.row(7)
.pageBreak()
.row(2)
.row(4)
.row(6)
.row(8)
.build();
WindowOperatorFactory operatorFactory = new WindowOperatorFactory(
0,
ImmutableList.of(SINGLE_LONG),
ints(0),
ROW_NUMBER,
ints(),
ints(),
sortOrder(),
10);
Operator operator = operatorFactory.createOperator(driverContext);
MaterializedResult expected = resultBuilder(FIXED_INT_64, FIXED_INT_64)
.row(1, 1)
.row(3, 2)
.row(5, 3)
.row(7, 4)
.row(2, 5)
.row(4, 6)
.row(6, 7)
.row(8, 8)
.build();
assertOperatorEquals(operator, input, expected);
}
@Test(expectedExceptions = ExceededMemoryLimitException.class, expectedExceptionsMessageRegExp = "Task exceeded max memory size of 10B")
public void testMemoryLimit()
throws Exception
{
List<Page> input = rowPagesBuilder(SINGLE_LONG, SINGLE_DOUBLE)
.row(1, 0.1)
.row(2, 0.2)
.pageBreak()
.row(-1, -0.1)
.row(4, 0.4)
.build();
Session session = new Session("user", "source", "catalog", "schema", "address", "agent");
DriverContext driverContext = new TaskContext(new TaskId("query", "stage", "task"), executor, session, new DataSize(10, Unit.BYTE))
.addPipelineContext(true, true)
.addDriverContext();
WindowOperatorFactory operatorFactory = new WindowOperatorFactory(
0,
ImmutableList.of(SINGLE_LONG, SINGLE_DOUBLE),
ints(1),
ROW_NUMBER,
ints(),
ints(0),
sortOrder(SortOrder.ASC_NULLS_LAST),
10);
Operator operator = operatorFactory.createOperator(driverContext);
toPages(operator, input);
}
private static int[] ints(int... array)
{
return array;
}
private static SortOrder[] sortOrder(SortOrder... array)
{
return array;
}
}
| |
/* Generated By:JavaCC: Do not edit this line. ConditionParserTokenManager.java */
package org.javacc.utils;
import java.io.StringReader;
import java.util.Map;
/** Token Manager. */
public class ConditionParserTokenManager implements ConditionParserConstants
{
/** Debug output. */
public java.io.PrintStream debugStream = System.out;
/** Set debug output. */
public void setDebugStream(java.io.PrintStream ds) { debugStream = ds; }
private final int jjStopStringLiteralDfa_0(int pos, long active0)
{
switch (pos)
{
case 0:
if ((active0 & 0x180000L) != 0L)
{
jjmatchedKind = 21;
return 5;
}
if ((active0 & 0x140L) != 0L)
return 2;
return -1;
case 1:
if ((active0 & 0x100L) != 0L)
return 0;
if ((active0 & 0x180000L) != 0L)
{
jjmatchedKind = 21;
jjmatchedPos = 1;
return 5;
}
return -1;
case 2:
if ((active0 & 0x180000L) != 0L)
{
jjmatchedKind = 21;
jjmatchedPos = 2;
return 5;
}
return -1;
case 3:
if ((active0 & 0x100000L) != 0L)
{
jjmatchedKind = 21;
jjmatchedPos = 3;
return 5;
}
if ((active0 & 0x80000L) != 0L)
return 5;
return -1;
default :
return -1;
}
}
private final int jjStartNfa_0(int pos, long active0)
{
return jjMoveNfa_0(jjStopStringLiteralDfa_0(pos, active0), pos + 1);
}
private int jjStopAtPos(int pos, int kind)
{
jjmatchedKind = kind;
jjmatchedPos = pos;
return pos + 1;
}
private int jjStartNfaWithStates_0(int pos, int kind, int state)
{
jjmatchedKind = kind;
jjmatchedPos = pos;
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) { return pos + 1; }
return jjMoveNfa_0(state, pos + 1);
}
private int jjMoveStringLiteralDfa0_0()
{
switch(curChar)
{
case 33:
return jjStopAtPos(0, 15);
case 38:
return jjMoveStringLiteralDfa1_0(0x20000L);
case 40:
return jjStopAtPos(0, 13);
case 41:
return jjStopAtPos(0, 14);
case 47:
return jjMoveStringLiteralDfa1_0(0x140L);
case 102:
return jjMoveStringLiteralDfa1_0(0x100000L);
case 116:
return jjMoveStringLiteralDfa1_0(0x80000L);
case 124:
return jjMoveStringLiteralDfa1_0(0x10000L);
case 126:
return jjStopAtPos(0, 18);
default :
return jjMoveNfa_0(3, 0);
}
}
private int jjMoveStringLiteralDfa1_0(long active0)
{
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) {
jjStopStringLiteralDfa_0(0, active0);
return 1;
}
switch(curChar)
{
case 38:
if ((active0 & 0x20000L) != 0L)
return jjStopAtPos(1, 17);
break;
case 42:
if ((active0 & 0x100L) != 0L)
return jjStartNfaWithStates_0(1, 8, 0);
break;
case 47:
if ((active0 & 0x40L) != 0L)
return jjStopAtPos(1, 6);
break;
case 97:
return jjMoveStringLiteralDfa2_0(active0, 0x100000L);
case 114:
return jjMoveStringLiteralDfa2_0(active0, 0x80000L);
case 124:
if ((active0 & 0x10000L) != 0L)
return jjStopAtPos(1, 16);
break;
default :
break;
}
return jjStartNfa_0(0, active0);
}
private int jjMoveStringLiteralDfa2_0(long old0, long active0)
{
if (((active0 &= old0)) == 0L)
return jjStartNfa_0(0, old0);
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) {
jjStopStringLiteralDfa_0(1, active0);
return 2;
}
switch(curChar)
{
case 108:
return jjMoveStringLiteralDfa3_0(active0, 0x100000L);
case 117:
return jjMoveStringLiteralDfa3_0(active0, 0x80000L);
default :
break;
}
return jjStartNfa_0(1, active0);
}
private int jjMoveStringLiteralDfa3_0(long old0, long active0)
{
if (((active0 &= old0)) == 0L)
return jjStartNfa_0(1, old0);
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) {
jjStopStringLiteralDfa_0(2, active0);
return 3;
}
switch(curChar)
{
case 101:
if ((active0 & 0x80000L) != 0L)
return jjStartNfaWithStates_0(3, 19, 5);
break;
case 115:
return jjMoveStringLiteralDfa4_0(active0, 0x100000L);
default :
break;
}
return jjStartNfa_0(2, active0);
}
private int jjMoveStringLiteralDfa4_0(long old0, long active0)
{
if (((active0 &= old0)) == 0L)
return jjStartNfa_0(2, old0);
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) {
jjStopStringLiteralDfa_0(3, active0);
return 4;
}
switch(curChar)
{
case 101:
if ((active0 & 0x100000L) != 0L)
return jjStartNfaWithStates_0(4, 20, 5);
break;
default :
break;
}
return jjStartNfa_0(3, active0);
}
static final long[] jjbitVec0 = {
0xfffffffffffffffeL, 0xffffffffffffffffL, 0xffffffffffffffffL, 0xffffffffffffffffL
};
static final long[] jjbitVec2 = {
0x0L, 0x0L, 0xffffffffffffffffL, 0xffffffffffffffffL
};
private int jjMoveNfa_0(int startState, int curPos)
{
//int[] nextStates; // not used
int startsAt = 0;
jjnewStateCnt = 6;
int i = 1;
jjstateSet[0] = startState;
//int j; // not used
int kind = 0x7fffffff;
for (;;)
{
if (++jjround == 0x7fffffff)
ReInitRounds();
if (curChar < 64)
{
long l = 1L << curChar;
do
{
switch(jjstateSet[--i])
{
case 3:
if (curChar == 36)
{
if (kind > 21)
kind = 21;
jjCheckNAdd(5);
}
else if (curChar == 47)
jjstateSet[jjnewStateCnt++] = 2;
break;
case 0:
if (curChar == 42)
jjstateSet[jjnewStateCnt++] = 1;
break;
case 1:
if ((0xffff7fffffffffffL & l) != 0L && kind > 7)
kind = 7;
break;
case 2:
if (curChar == 42)
jjstateSet[jjnewStateCnt++] = 0;
break;
case 4:
if (curChar != 36)
break;
if (kind > 21)
kind = 21;
jjCheckNAdd(5);
break;
case 5:
if ((0x3ff001000000000L & l) == 0L)
break;
if (kind > 21)
kind = 21;
jjCheckNAdd(5);
break;
default : break;
}
} while(i != startsAt);
}
else if (curChar < 128)
{
long l = 1L << (curChar & 077);
do
{
switch(jjstateSet[--i])
{
case 3:
case 5:
if ((0x7fffffe87fffffeL & l) == 0L)
break;
if (kind > 21)
kind = 21;
jjCheckNAdd(5);
break;
case 1:
if (kind > 7)
kind = 7;
break;
default : break;
}
} while(i != startsAt);
}
else
{
int hiByte = (int)(curChar >> 8);
int i1 = hiByte >> 6;
long l1 = 1L << (hiByte & 077);
int i2 = (curChar & 0xff) >> 6;
long l2 = 1L << (curChar & 077);
do
{
switch(jjstateSet[--i])
{
case 1:
if (jjCanMove_0(hiByte, i1, i2, l1, l2) && kind > 7)
kind = 7;
break;
default : break;
}
} while(i != startsAt);
}
if (kind != 0x7fffffff)
{
jjmatchedKind = kind;
jjmatchedPos = curPos;
kind = 0x7fffffff;
}
++curPos;
if ((i = jjnewStateCnt) == (startsAt = 6 - (jjnewStateCnt = startsAt)))
return curPos;
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) { return curPos; }
}
}
private int jjMoveStringLiteralDfa0_3()
{
switch(curChar)
{
case 42:
return jjMoveStringLiteralDfa1_3(0x800L);
default :
return 1;
}
}
private int jjMoveStringLiteralDfa1_3(long active0)
{
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) {
return 1;
}
switch(curChar)
{
case 47:
if ((active0 & 0x800L) != 0L)
return jjStopAtPos(1, 11);
break;
default :
return 2;
}
return 2;
}
private int jjMoveStringLiteralDfa0_1()
{
return jjMoveNfa_1(0, 0);
}
private int jjMoveNfa_1(int startState, int curPos)
{
//int[] nextStates; // not used
int startsAt = 0;
jjnewStateCnt = 3;
int i = 1;
jjstateSet[0] = startState;
//int j; // not used
int kind = 0x7fffffff;
for (;;)
{
if (++jjround == 0x7fffffff)
ReInitRounds();
if (curChar < 64)
{
long l = 1L << curChar;
do
{
switch(jjstateSet[--i])
{
case 0:
if ((0x2400L & l) != 0L)
{
if (kind > 9)
kind = 9;
}
if (curChar == 13)
jjstateSet[jjnewStateCnt++] = 1;
break;
case 1:
if (curChar == 10 && kind > 9)
kind = 9;
break;
case 2:
if (curChar == 13)
jjstateSet[jjnewStateCnt++] = 1;
break;
default : break;
}
} while(i != startsAt);
}
else if (curChar < 128)
{
long l = 1L << (curChar & 077);
do
{
switch(jjstateSet[--i])
{
default : break;
}
} while(i != startsAt);
}
else
{
int hiByte = (int)(curChar >> 8);
int i1 = hiByte >> 6;
long l1 = 1L << (hiByte & 077);
int i2 = (curChar & 0xff) >> 6;
long l2 = 1L << (curChar & 077);
do
{
switch(jjstateSet[--i])
{
default : break;
}
} while(i != startsAt);
}
if (kind != 0x7fffffff)
{
jjmatchedKind = kind;
jjmatchedPos = curPos;
kind = 0x7fffffff;
}
++curPos;
if ((i = jjnewStateCnt) == (startsAt = 3 - (jjnewStateCnt = startsAt)))
return curPos;
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) { return curPos; }
}
}
private int jjMoveStringLiteralDfa0_2()
{
switch(curChar)
{
case 42:
return jjMoveStringLiteralDfa1_2(0x400L);
default :
return 1;
}
}
private int jjMoveStringLiteralDfa1_2(long active0)
{
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) {
return 1;
}
switch(curChar)
{
case 47:
if ((active0 & 0x400L) != 0L)
return jjStopAtPos(1, 10);
break;
default :
return 2;
}
return 2;
}
static final int[] jjnextStates = {
};
private static final boolean jjCanMove_0(int hiByte, int i1, int i2, long l1, long l2)
{
switch(hiByte)
{
case 0:
return ((jjbitVec2[i2] & l2) != 0L);
default :
if ((jjbitVec0[i1] & l1) != 0L)
return true;
return false;
}
}
/** Token literal values. */
public static final String[] jjstrLiteralImages = {
"", null, null, null, null, null, null, null, null, null, null, null, null,
"\50", "\51", "\41", "\174\174", "\46\46", "\176", "\164\162\165\145",
"\146\141\154\163\145", null, null, null, };
/** Lexer state names. */
public static final String[] lexStateNames = {
"DEFAULT",
"IN_SINGLE_LINE_COMMENT",
"IN_FORMAL_COMMENT",
"IN_MULTI_LINE_COMMENT",
};
/** Lex State array. */
public static final int[] jjnewLexState = {
-1, -1, -1, -1, -1, -1, 1, 2, 3, 0, 0, 0, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
};
static final long[] jjtoToken = {
0x3fe001L,
};
static final long[] jjtoSkip = {
0xe3eL,
};
static final long[] jjtoSpecial = {
0xe00L,
};
static final long[] jjtoMore = {
0x11c0L,
};
protected JavaCharStream input_stream;
private final int[] jjrounds = new int[6];
private final int[] jjstateSet = new int[12];
StringBuffer image;
int jjimageLen;
int lengthOfMatch;
protected char curChar;
/** Constructor. */
public ConditionParserTokenManager(JavaCharStream stream){
if (JavaCharStream.staticFlag)
throw new Error("ERROR: Cannot use a static CharStream class with a non-static lexical analyzer.");
input_stream = stream;
}
/** Constructor. */
public ConditionParserTokenManager(JavaCharStream stream, int lexState){
this(stream);
SwitchTo(lexState);
}
/** Reinitialise parser. */
public void ReInit(JavaCharStream stream)
{
jjmatchedPos = jjnewStateCnt = 0;
curLexState = defaultLexState;
input_stream = stream;
ReInitRounds();
}
private void ReInitRounds()
{
int i;
jjround = 0x80000001;
for (i = 6; i-- > 0;)
jjrounds[i] = 0x80000000;
}
/** Reinitialise parser. */
public void ReInit(JavaCharStream stream, int lexState)
{
ReInit(stream);
SwitchTo(lexState);
}
/** Switch to specified lex state. */
public void SwitchTo(int lexState)
{
if (lexState >= 4 || lexState < 0)
throw new TokenMgrError("Error: Ignoring invalid lexical state : " + lexState + ". State unchanged.", TokenMgrError.INVALID_LEXICAL_STATE);
else
curLexState = lexState;
}
protected Token jjFillToken()
{
final Token t;
final String tokenImage;
final int beginLine;
final int endLine;
final int beginColumn;
final int endColumn;
String im = jjstrLiteralImages[jjmatchedKind];
tokenImage = (im == null) ? input_stream.GetImage() : im;
beginLine = input_stream.getBeginLine();
beginColumn = input_stream.getBeginColumn();
endLine = input_stream.getEndLine();
endColumn = input_stream.getEndColumn();
t = Token.newToken(jjmatchedKind, tokenImage);
t.beginLine = beginLine;
t.endLine = endLine;
t.beginColumn = beginColumn;
t.endColumn = endColumn;
return t;
}
int curLexState = 0;
int defaultLexState = 0;
int jjnewStateCnt;
int jjround;
int jjmatchedPos;
int jjmatchedKind;
/** Get the next Token. */
public Token getNextToken()
{
//int kind;
Token specialToken = null;
Token matchedToken;
int curPos = 0;
EOFLoop :
for (;;)
{
try
{
curChar = input_stream.BeginToken();
}
catch(java.io.IOException e)
{
jjmatchedKind = 0;
matchedToken = jjFillToken();
matchedToken.specialToken = specialToken;
return matchedToken;
}
image = null;
jjimageLen = 0;
for (;;)
{
switch(curLexState)
{
case 0:
try { input_stream.backup(0);
while (curChar <= 32 && (0x100003600L & (1L << curChar)) != 0L)
curChar = input_stream.BeginToken();
}
catch (java.io.IOException e1) { continue EOFLoop; }
jjmatchedKind = 0x7fffffff;
jjmatchedPos = 0;
curPos = jjMoveStringLiteralDfa0_0();
break;
case 1:
jjmatchedKind = 0x7fffffff;
jjmatchedPos = 0;
curPos = jjMoveStringLiteralDfa0_1();
if (jjmatchedPos == 0 && jjmatchedKind > 12)
{
jjmatchedKind = 12;
}
break;
case 2:
jjmatchedKind = 0x7fffffff;
jjmatchedPos = 0;
curPos = jjMoveStringLiteralDfa0_2();
if (jjmatchedPos == 0 && jjmatchedKind > 12)
{
jjmatchedKind = 12;
}
break;
case 3:
jjmatchedKind = 0x7fffffff;
jjmatchedPos = 0;
curPos = jjMoveStringLiteralDfa0_3();
if (jjmatchedPos == 0 && jjmatchedKind > 12)
{
jjmatchedKind = 12;
}
break;
}
if (jjmatchedKind != 0x7fffffff)
{
if (jjmatchedPos + 1 < curPos)
input_stream.backup(curPos - jjmatchedPos - 1);
if ((jjtoToken[jjmatchedKind >> 6] & (1L << (jjmatchedKind & 077))) != 0L)
{
matchedToken = jjFillToken();
matchedToken.specialToken = specialToken;
if (jjnewLexState[jjmatchedKind] != -1)
curLexState = jjnewLexState[jjmatchedKind];
return matchedToken;
}
else if ((jjtoSkip[jjmatchedKind >> 6] & (1L << (jjmatchedKind & 077))) != 0L)
{
if ((jjtoSpecial[jjmatchedKind >> 6] & (1L << (jjmatchedKind & 077))) != 0L)
{
matchedToken = jjFillToken();
if (specialToken == null)
specialToken = matchedToken;
else
{
matchedToken.specialToken = specialToken;
specialToken = (specialToken.next = matchedToken);
}
SkipLexicalActions(matchedToken);
}
else
SkipLexicalActions(null);
if (jjnewLexState[jjmatchedKind] != -1)
curLexState = jjnewLexState[jjmatchedKind];
continue EOFLoop;
}
MoreLexicalActions();
if (jjnewLexState[jjmatchedKind] != -1)
curLexState = jjnewLexState[jjmatchedKind];
curPos = 0;
jjmatchedKind = 0x7fffffff;
try {
curChar = input_stream.readChar();
continue;
}
catch (java.io.IOException e1) { }
}
int error_line = input_stream.getEndLine();
int error_column = input_stream.getEndColumn();
String error_after = null;
boolean EOFSeen = false;
try { input_stream.readChar(); input_stream.backup(1); }
catch (java.io.IOException e1) {
EOFSeen = true;
error_after = curPos <= 1 ? "" : input_stream.GetImage();
if (curChar == '\n' || curChar == '\r') {
error_line++;
error_column = 0;
}
else
error_column++;
}
if (!EOFSeen) {
input_stream.backup(1);
error_after = curPos <= 1 ? "" : input_stream.GetImage();
}
throw new TokenMgrError(EOFSeen, curLexState, error_line, error_column, error_after, curChar, TokenMgrError.LEXICAL_ERROR);
}
}
}
void SkipLexicalActions(Token matchedToken)
{
switch(jjmatchedKind)
{
default :
break;
}
}
void MoreLexicalActions()
{
jjimageLen += (lengthOfMatch = jjmatchedPos + 1);
switch(jjmatchedKind)
{
case 7 :
if (image == null)
image = new StringBuffer();
image.append(input_stream.GetSuffix(jjimageLen));
jjimageLen = 0;
input_stream.backup(1);
break;
default :
break;
}
}
private void jjCheckNAdd(int state)
{
if (jjrounds[state] != jjround)
{
jjstateSet[jjnewStateCnt++] = state;
jjrounds[state] = jjround;
}
}
private void jjAddStates(int start, int end)
{
do {
jjstateSet[jjnewStateCnt++] = jjnextStates[start];
} while (start++ != end);
}
private void jjCheckNAddTwoStates(int state1, int state2)
{
jjCheckNAdd(state1);
jjCheckNAdd(state2);
}
private void jjCheckNAddStates(int start, int end)
{
do {
jjCheckNAdd(jjnextStates[start]);
} while (start++ != end);
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.operator;
import com.facebook.presto.block.BlockAssertions;
import com.facebook.presto.common.block.Block;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.OutputTimeUnit;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.Warmup;
import org.openjdk.jmh.runner.Runner;
import org.openjdk.jmh.runner.RunnerException;
import org.openjdk.jmh.runner.options.Options;
import org.openjdk.jmh.runner.options.OptionsBuilder;
import org.openjdk.jmh.runner.options.VerboseMode;
import java.util.Random;
import static com.facebook.presto.block.BlockAssertions.createRandomDictionaryBlock;
import static com.facebook.presto.operator.UncheckedByteArrays.setLongUnchecked;
import static java.util.concurrent.TimeUnit.MICROSECONDS;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static sun.misc.Unsafe.ARRAY_LONG_INDEX_SCALE;
@State(Scope.Thread)
@OutputTimeUnit(MICROSECONDS)
@Fork(0)
@Warmup(iterations = 10, time = 500, timeUnit = MILLISECONDS)
@Measurement(iterations = 10, time = 500, timeUnit = MILLISECONDS)
@BenchmarkMode(Mode.AverageTime)
public class BenchmarkReadBlock
{
@Benchmark
public int sequentialCopyLongValues(BenchmarkData data)
{
int index = 0;
for (int i = 0; i < data.longValues.length; i++) {
index = setLongUnchecked(data.bytes, index, data.longValues[i]);
}
return index;
}
@Benchmark
public int sequentialCopyLongArrayBlock(BenchmarkData data)
{
int index = 0;
for (int i = 0; i < data.longValues.length; i++) {
index = setLongUnchecked(data.bytes, index, data.blockNoNulls.getLong(i));
}
return index;
}
@Benchmark
public int sequentialCopyUncheckedLongArrayBlock(BenchmarkData data)
{
int index = 0;
for (int i = 0; i < data.longValues.length; i++) {
index = setLongUnchecked(data.bytes, index, data.blockNoNulls.getLongUnchecked(i));
}
return index;
}
@Benchmark
public int randomCopyLongValues(BenchmarkData data)
{
int index = 0;
for (int i = 0; i < data.longValues.length; i++) {
index = setLongUnchecked(data.bytes, index, data.longValues[data.positions[i]]);
}
return index;
}
@Benchmark
public int randomCopyLongArrayBlock(BenchmarkData data)
{
int index = 0;
for (int i = 0; i < data.longValues.length; i++) {
index = setLongUnchecked(data.bytes, index, data.blockNoNulls.getLong(data.positions[i]));
}
return index;
}
@Benchmark
public int randomCopyUncheckedLongArrayBlock(BenchmarkData data)
{
int index = 0;
for (int i = 0; i < data.longValues.length; i++) {
index = setLongUnchecked(data.bytes, index, data.blockNoNulls.getLongUnchecked(data.positions[i]));
}
return index;
}
@Benchmark
public int sequentialCopyLongValuesWithNulls(BenchmarkData data)
{
int index = 0;
for (int i = 0; i < data.longValues.length; i++) {
int newIndex = setLongUnchecked(data.bytes, index, data.longValues[i]);
if (!data.nulls[i]) {
index = newIndex;
}
}
return index;
}
@Benchmark
public int sequentialCopyLongArrayBlockWithNulls(BenchmarkData data)
{
int index = 0;
for (int i = 0; i < data.longValues.length; i++) {
int newIndex = setLongUnchecked(data.bytes, index, data.blockWithNulls.getLong(i));
if (!data.blockWithNulls.isNull(i)) {
index = newIndex;
}
}
return index;
}
@Benchmark
public int sequentialCopyUncheckedLongArrayBlockWithNulls(BenchmarkData data)
{
int index = 0;
for (int i = 0; i < data.longValues.length; i++) {
int newIndex = setLongUnchecked(data.bytes, index, data.blockWithNulls.getLongUnchecked(i));
if (!data.blockWithNulls.isNullUnchecked(i)) {
index = newIndex;
}
}
return index;
}
@Benchmark
public int randomCopyLongValuesWithNulls(BenchmarkData data)
{
int index = 0;
for (int i = 0; i < data.longValues.length; i++) {
int newIndex = setLongUnchecked(data.bytes, index, data.longValues[data.positions[i]]);
if (!data.nulls[data.positions[i]]) {
index = newIndex;
}
}
return index;
}
@Benchmark
public int randomCopyLongArrayBlockWithNulls(BenchmarkData data)
{
int index = 0;
for (int i = 0; i < data.longValues.length; i++) {
int newIndex = setLongUnchecked(data.bytes, index, data.blockNoNulls.getLong(data.positions[i]));
if (!data.blockWithNulls.isNull(data.positions[i])) {
index = newIndex;
}
}
return index;
}
@Benchmark
public int randomCopyUncheckedLongArrayBlockWithNulls(BenchmarkData data)
{
int index = 0;
for (int i = 0; i < data.longValues.length; i++) {
int newIndex = setLongUnchecked(data.bytes, index, data.blockNoNulls.getLongUnchecked(data.positions[i]));
if (!data.blockWithNulls.isNullUnchecked(data.positions[i])) {
index = newIndex;
}
}
return index;
}
@Benchmark
public int randomCopyLongValuesWithDictionary(BenchmarkData data)
{
int index = 0;
for (int i = 0; i < data.longValues.length; i++) {
index = setLongUnchecked(data.bytes, index, data.longValues[data.ids[data.positions[i]]]);
}
return index;
}
@Benchmark
public int randomCopyDictionaryBlock(BenchmarkData data)
{
int index = 0;
for (int i = 0; i < data.longValues.length; i++) {
index = setLongUnchecked(data.bytes, index, data.dictionaryBlockNoNulls.getLong(data.positions[i]));
}
return index;
}
@Benchmark
public int randomCopyUncheckedDictionaryBlock(BenchmarkData data)
{
int index = 0;
for (int i = 0; i < data.longValues.length; i++) {
index = setLongUnchecked(data.bytes, index, data.dictionaryBlockNoNulls.getLongUnchecked(data.positions[i]));
}
return index;
}
@Benchmark
public int randomCopyLongValuesWithDictionaryWithNulls(BenchmarkData data)
{
int index = 0;
for (int i = 0; i < data.longValues.length; i++) {
int newIndex = setLongUnchecked(data.bytes, index, data.longValues[data.positions[i]]);
if (!data.nulls[data.ids[data.positions[i]]]) {
index = newIndex;
}
}
return index;
}
@Benchmark
public int randomCopyDictionaryBlockWithNulls(BenchmarkData data)
{
int index = 0;
for (int i = 0; i < data.longValues.length; i++) {
int newIndex = setLongUnchecked(data.bytes, index, data.dictionaryBlockWithNulls.getLong(data.positions[i]));
if (!data.dictionaryBlockWithNulls.isNull(data.positions[i])) {
index = newIndex;
}
}
return index;
}
@Benchmark
public int randomCopyUncheckedDictionaryBlockWithNulls(BenchmarkData data)
{
int index = 0;
for (int i = 0; i < data.longValues.length; i++) {
int newIndex = setLongUnchecked(data.bytes, index, data.dictionaryBlockWithNulls.getLongUnchecked(data.positions[i]));
if (!data.dictionaryBlockWithNulls.isNullUnchecked(data.positions[i])) {
index = newIndex;
}
}
return index;
}
@State(Scope.Thread)
public static class BenchmarkData
{
private static final int POSITIONS_PER_PAGE = 10000;
private final Random random = new Random(0);
private final long[] longValues = new long[POSITIONS_PER_PAGE];
private final boolean[] nulls = new boolean[POSITIONS_PER_PAGE];
private final int[] ids = new int[POSITIONS_PER_PAGE];
private final int[] positions = new int[POSITIONS_PER_PAGE];
private final Block blockNoNulls = BlockAssertions.createRandomLongsBlock(POSITIONS_PER_PAGE, 0.0f);
private final Block blockWithNulls = BlockAssertions.createRandomLongsBlock(POSITIONS_PER_PAGE, 0.2f);
private final Block dictionaryBlockNoNulls = createRandomDictionaryBlock(blockNoNulls, POSITIONS_PER_PAGE);
private final Block dictionaryBlockWithNulls = createRandomDictionaryBlock(blockWithNulls, POSITIONS_PER_PAGE);
private final byte[] bytes = new byte[POSITIONS_PER_PAGE * ARRAY_LONG_INDEX_SCALE];
@Setup
public void setup()
{
for (int i = 0; i < POSITIONS_PER_PAGE; i++) {
longValues[i] = random.nextLong();
ids[i] = random.nextInt(POSITIONS_PER_PAGE / 10);
positions[i] = i;
nulls[i] = i % 7 == 0;
}
}
}
public static void main(String[] args)
throws RunnerException
{
Options options = new OptionsBuilder()
.verbosity(VerboseMode.NORMAL)
.include(".*" + BenchmarkReadBlock.class.getSimpleName() + ".*")
.build();
new Runner(options).run();
}
}
| |
/*
* Copyright (C) 2013 Sebastien Diot.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.blockwithme.examples.chicken_and_egg;
/**
* The chicken-and-egg example, that shows how to create complex graphs of objects,
* referencing each other with final references.
*
* The Builder and GraphBuilder are also in this package. Currently, my testing was
* limited to this example, and so they surely are full of bugs.
*
* @see GraphBuilder for more details.
*
* @author monster
*/
public class ChickenAndEgg {
// First, the Builder classes
public static class FatherBuilder extends Builder<Father> {
public FatherBuilder() {
super("father", "mother", "kid");
}
@Override
public Father create() {
return new Father(this);
}
}
public static class MotherBuilder extends Builder<Mother> {
public MotherBuilder() {
super("mother", "father", "kid");
}
@Override
public Mother create() {
return new Mother(this);
}
}
// As you can see, the Builder can be used like regular Builders,
// And contain any number of additional data (like age), apart from the
// special "dependencies".
public static class KidBuilder extends Builder<Kid> {
public KidBuilder() {
super("kid", "kid.dog");
}
@Override
public Kid create() {
return new Kid(this);
}
private int age;
/**
* @return the age
*/
public int getAge() {
return age;
}
/**
* @param age the age to set
*/
public void setAge(final int age) {
this.age = age;
}
}
public static class DogBuilder extends Builder<Dog> {
public DogBuilder() {
super("kid.dog", "kid");
}
@Override
public Dog create() {
return new Dog(this);
}
}
// Now the domain classes. They all need a special constructor.,
// that accepts the builder, and extract the values needed to create
// the object.
public static interface Person {
}
public static class Father implements Person {
public final Kid kid;
public final Mother mother;
public Father(final Builder<Father> builder) {
builder.set(this);
kid = (Kid) builder.getDep("kid");
mother = (Mother) builder.getDep("mother");
}
}
public static class Mother implements Person {
public final Kid kid;
public final Father father;
public Mother(final Builder<Mother> builder) {
builder.set(this);
kid = (Kid) builder.getDep("kid");
father = (Father) builder.getDep("father");
}
}
public static class Kid implements Person {
public final Dog dog;
public final int age;
public Kid(final KidBuilder builder) {
builder.set(this);
dog = (Dog) builder.getDep("kid.dog");
age = builder.age;
}
}
public static class Dog {
public final Person owner;
public Dog(final Builder<Dog> builder) {
builder.set(this);
owner = (Person) builder.getDep("kid");
}
}
/**
* @param args
*/
public static void main(final String[] args) {
// Create the GraphBuilder
final GraphBuilder graphBuilder = new GraphBuilder();
// Create the Builders
final KidBuilder kb = new KidBuilder();
final DogBuilder db = new DogBuilder();
final FatherBuilder fb = new FatherBuilder();
final MotherBuilder mb = new MotherBuilder();
// Do some builder initialization
kb.setAge(10);
// Add the builders to the GraphBuilder
// In theory, by implementing the createMissingBuilders(), it would
// be possible for the builder themselves to define their dependencies.
graphBuilder.addBuilder(kb);
graphBuilder.addBuilder(db);
graphBuilder.addBuilder(fb);
graphBuilder.addBuilder(mb);
// Pray to your God(s)!
// Expect exceptions here, including stack overflow!
// @see GraphBuilder for more details.
graphBuilder.start();
// We're done! All objects have been created and initialized successfully,
// and we can extract them from the builders. If there is a graph-root,
// then it is enough to extract it, and from there navigate the graph
// normally. Therefore, you might not need to keep a reference to any
// builder except the root builder.
final Kid kid = kb.get();
if (kid == null) {
throw new IllegalStateException("kid not created");
}
final Dog dog = db.get();
if (dog == null) {
throw new IllegalStateException("dog not created");
}
final Father father = fb.get();
if (father == null) {
throw new IllegalStateException("father not created");
}
final Mother mother = mb.get();
if (mother == null) {
throw new IllegalStateException("mother not created");
}
if (kid.dog == null) {
throw new IllegalStateException("dog not set in kid");
}
if (dog.owner == null) {
throw new IllegalStateException("owner not set in dog");
}
if (father.kid == null) {
throw new IllegalStateException("kid not set in father");
}
if (mother.kid == null) {
throw new IllegalStateException("kid not set in mother");
}
if (father.mother == null) {
throw new IllegalStateException("mother not set in father");
}
if (mother.father == null) {
throw new IllegalStateException("father not set in mother");
}
System.out.println("SUCCESS!");
}
}
| |
/**
* Logback: the reliable, generic, fast and flexible logging framework.
* Copyright (C) 1999-2015, QOS.ch. All rights reserved.
*
* This program and the accompanying materials are dual-licensed under
* either the terms of the Eclipse Public License v1.0 as published by
* the Eclipse Foundation
*
* or (per the licensee's choosing)
*
* under the terms of the GNU Lesser General Public License version 2.1
* as published by the Free Software Foundation.
*/
package ch.qos.logback.core.rolling;
import java.io.File;
import java.util.Date;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import ch.qos.logback.core.CoreConstants;
import ch.qos.logback.core.rolling.helper.*;
/**
* <code>TimeBasedRollingPolicy</code> is both easy to configure and quite
* powerful. It allows the roll over to be made based on time. It is possible to
* specify that the roll over occur once per day, per week or per month.
*
* <p>For more information, please refer to the online manual at
* http://logback.qos.ch/manual/appenders.html#TimeBasedRollingPolicy
*
* @author Ceki Gülcü
*/
public class TimeBasedRollingPolicy<E> extends RollingPolicyBase implements
TriggeringPolicy<E> {
static final String FNP_NOT_SET = "The FileNamePattern option must be set before using TimeBasedRollingPolicy. ";
static final int INFINITE_HISTORY = 0;
// WCS: without compression suffix
FileNamePattern fileNamePatternWCS;
private Compressor compressor;
private RenameUtil renameUtil = new RenameUtil();
Future<?> future;
private int maxHistory = INFINITE_HISTORY;
private ArchiveRemover archiveRemover;
TimeBasedFileNamingAndTriggeringPolicy<E> timeBasedFileNamingAndTriggeringPolicy;
boolean cleanHistoryOnStart = false;
public void start() {
// set the LR for our utility object
renameUtil.setContext(this.context);
// find out period from the filename pattern
if (fileNamePatternStr != null) {
fileNamePattern = new FileNamePattern(fileNamePatternStr, this.context);
determineCompressionMode();
} else {
addWarn(FNP_NOT_SET);
addWarn(CoreConstants.SEE_FNP_NOT_SET);
throw new IllegalStateException(FNP_NOT_SET
+ CoreConstants.SEE_FNP_NOT_SET);
}
compressor = new Compressor(compressionMode);
compressor.setContext(context);
// wcs : without compression suffix
fileNamePatternWCS = new FileNamePattern(Compressor.computeFileNameStr_WCS(
fileNamePatternStr, compressionMode), this.context);
addInfo("Will use the pattern " + fileNamePatternWCS
+ " for the active file");
if(compressionMode == CompressionMode.ZIP) {
String zipEntryFileNamePatternStr = transformFileNamePattern2ZipEntry(fileNamePatternStr);
zipEntryFileNamePattern = new FileNamePattern(zipEntryFileNamePatternStr, context);
}
if (timeBasedFileNamingAndTriggeringPolicy == null) {
timeBasedFileNamingAndTriggeringPolicy = new DefaultTimeBasedFileNamingAndTriggeringPolicy<E>();
}
timeBasedFileNamingAndTriggeringPolicy.setContext(context);
timeBasedFileNamingAndTriggeringPolicy.setTimeBasedRollingPolicy(this);
timeBasedFileNamingAndTriggeringPolicy.start();
// the maxHistory property is given to TimeBasedRollingPolicy instead of to
// the TimeBasedFileNamingAndTriggeringPolicy. This makes it more convenient
// for the user at the cost of inconsistency here.
if (maxHistory != INFINITE_HISTORY) {
archiveRemover = timeBasedFileNamingAndTriggeringPolicy.getArchiveRemover();
archiveRemover.setMaxHistory(maxHistory);
if(cleanHistoryOnStart) {
addInfo("Cleaning on start up");
archiveRemover.clean(new Date(timeBasedFileNamingAndTriggeringPolicy.getCurrentTime()));
}
}
super.start();
}
@Override
public void stop() {
if(!isStarted())
return;
waitForAsynchronousJobToStop();
super.stop();
}
private void waitForAsynchronousJobToStop() {
if(future != null) {
try {
future.get(CoreConstants.SECONDS_TO_WAIT_FOR_COMPRESSION_JOBS, TimeUnit.SECONDS);
} catch (TimeoutException e) {
addError("Timeout while waiting for compression job to finish", e);
} catch (Exception e) {
addError("Unexpected exception while waiting for compression job to finish", e);
}
}
}
private String transformFileNamePattern2ZipEntry(String fileNamePatternStr) {
String slashified = FileFilterUtil.slashify(fileNamePatternStr);
return FileFilterUtil.afterLastSlash(slashified);
}
public void setTimeBasedFileNamingAndTriggeringPolicy(
TimeBasedFileNamingAndTriggeringPolicy<E> timeBasedTriggering) {
this.timeBasedFileNamingAndTriggeringPolicy = timeBasedTriggering;
}
public TimeBasedFileNamingAndTriggeringPolicy<E> getTimeBasedFileNamingAndTriggeringPolicy() {
return timeBasedFileNamingAndTriggeringPolicy;
}
public void rollover() throws RolloverFailure {
// when rollover is called the elapsed period's file has
// been already closed. This is a working assumption of this method.
String elapsedPeriodsFileName = timeBasedFileNamingAndTriggeringPolicy
.getElapsedPeriodsFileName();
String elapsedPeriodStem = FileFilterUtil.afterLastSlash(elapsedPeriodsFileName);
if (compressionMode == CompressionMode.NONE) {
if (getParentsRawFileProperty() != null) {
renameUtil.rename(getParentsRawFileProperty(), elapsedPeriodsFileName);
} // else { nothing to do if CompressionMode == NONE and parentsRawFileProperty == null }
} else {
if (getParentsRawFileProperty() == null) {
future = asyncCompress(elapsedPeriodsFileName, elapsedPeriodsFileName, elapsedPeriodStem);
} else {
future = renamedRawAndAsyncCompress(elapsedPeriodsFileName, elapsedPeriodStem);
}
}
if (archiveRemover != null) {
archiveRemover.clean(new Date(timeBasedFileNamingAndTriggeringPolicy.getCurrentTime()));
}
}
Future asyncCompress(String nameOfFile2Compress, String nameOfCompressedFile, String innerEntryName)
throws RolloverFailure {
AsynchronousCompressor ac = new AsynchronousCompressor(compressor);
return ac.compressAsynchronously(nameOfFile2Compress, nameOfCompressedFile, innerEntryName);
}
Future renamedRawAndAsyncCompress(String nameOfCompressedFile, String innerEntryName)
throws RolloverFailure {
String parentsRawFile = getParentsRawFileProperty();
String tmpTarget = parentsRawFile + System.nanoTime() + ".tmp";
renameUtil.rename(parentsRawFile, tmpTarget);
return asyncCompress(tmpTarget, nameOfCompressedFile, innerEntryName);
}
/**
*
* The active log file is determined by the value of the parent's filename
* option. However, in case the file name is left blank, then, the active log
* file equals the file name for the current period as computed by the
* <b>FileNamePattern</b> option.
*
* <p>The RollingPolicy must know whether it is responsible for changing the
* name of the active file or not. If the active file name is set by the user
* via the configuration file, then the RollingPolicy must let it like it is.
* If the user does not specify an active file name, then the RollingPolicy
* generates one.
*
* <p> To be sure that the file name used by the parent class has been
* generated by the RollingPolicy and not specified by the user, we keep track
* of the last generated name object and compare its reference to the parent
* file name. If they match, then the RollingPolicy knows it's responsible for
* the change of the file name.
*
*/
public String getActiveFileName() {
String parentsRawFileProperty = getParentsRawFileProperty();
if (parentsRawFileProperty != null) {
return parentsRawFileProperty;
} else {
return timeBasedFileNamingAndTriggeringPolicy
.getCurrentPeriodsFileNameWithoutCompressionSuffix();
}
}
public boolean isTriggeringEvent(File activeFile, final E event) {
return timeBasedFileNamingAndTriggeringPolicy.isTriggeringEvent(activeFile, event);
}
/**
* Get the number of archive files to keep.
*
* @return number of archive files to keep
*/
public int getMaxHistory() {
return maxHistory;
}
/**
* Set the maximum number of archive files to keep.
*
* @param maxHistory
* number of archive files to keep
*/
public void setMaxHistory(int maxHistory) {
this.maxHistory = maxHistory;
}
public boolean isCleanHistoryOnStart() {
return cleanHistoryOnStart;
}
/**
* Should archive removal be attempted on application start up? Default is false.
* @since 1.0.1
* @param cleanHistoryOnStart
*/
public void setCleanHistoryOnStart(boolean cleanHistoryOnStart) {
this.cleanHistoryOnStart = cleanHistoryOnStart;
}
@Override
public String toString() {
return "c.q.l.core.rolling.TimeBasedRollingPolicy";
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.itest.jmh;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import org.apache.camel.util.CaseInsensitiveMap;
import org.junit.Test;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.Level;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.infra.Blackhole;
import org.openjdk.jmh.runner.Runner;
import org.openjdk.jmh.runner.options.Options;
import org.openjdk.jmh.runner.options.OptionsBuilder;
import org.openjdk.jmh.runner.options.TimeValue;
import static org.apache.commons.lang3.RandomStringUtils.randomAlphabetic;
/**
* Tests {@link CaseInsensitiveMap}
*/
public class CaseInsensitiveMapTest {
@Test
public void launchBenchmark() throws Exception {
Options opt = new OptionsBuilder()
// Specify which benchmarks to run.
// You can be more specific if you'd like to run only one benchmark per test.
.include(this.getClass().getName() + ".*")
// Set the following options as needed
.mode(Mode.SampleTime)
.timeUnit(TimeUnit.MILLISECONDS)
.warmupTime(TimeValue.seconds(1))
.warmupIterations(2)
.measurementTime(TimeValue.seconds(5))
.measurementIterations(5)
.threads(1)
.forks(1)
.shouldFailOnError(true)
.shouldDoGC(true)
.measurementBatchSize(1000000)
.build();
new Runner(opt).run();
}
// The JMH samples are the best documentation for how to use it
// http://hg.openjdk.java.net/code-tools/jmh/file/tip/jmh-samples/src/main/java/org/openjdk/jmh/samples/
@State(Scope.Thread)
public static class MapsBenchmarkState {
CaseInsensitiveMap camelMap;
com.cedarsoftware.util.CaseInsensitiveMap cedarsoftMap;
HashMap hashMap;
@Setup(Level.Trial)
public void initialize() {
camelMap = new CaseInsensitiveMap();
cedarsoftMap = new com.cedarsoftware.util.CaseInsensitiveMap();
hashMap = new HashMap();
}
}
@State(Scope.Benchmark)
public static class MapsSourceDataBenchmarkState {
Map<String, Object> map1 = generateRandomMap(10);
Map<String, Object> map2 = generateRandomMap(10);
private Map<String, Object> generateRandomMap(int size) {
return IntStream.range(0, size)
.boxed()
.collect(Collectors.toMap(i -> randomAlphabetic(10), i-> randomAlphabetic(10)));
}
}
@Benchmark
public void camelMapSimpleCase(MapsBenchmarkState state, Blackhole bh) {
Map map = state.camelMap;
map.put("foo", "Hello World");
Object o1 = map.get("foo");
bh.consume(o1);
Object o2 = map.get("FOO");
bh.consume(o2);
map.put("BAR", "Bye World");
Object o3 = map.get("bar");
bh.consume(o3);
Object o4 = map.get("BAR");
bh.consume(o4);
}
@Benchmark
public void cedarsoftMapSimpleCase(MapsBenchmarkState state, Blackhole bh) {
Map map = state.cedarsoftMap;
map.put("foo", "Hello World");
Object o1 = map.get("foo");
bh.consume(o1);
Object o2 = map.get("FOO");
bh.consume(o2);
map.put("BAR", "Bye World");
Object o3 = map.get("bar");
bh.consume(o3);
Object o4 = map.get("BAR");
bh.consume(o4);
}
@Benchmark
public void hashMapSimpleCase(MapsBenchmarkState state, Blackhole bh) {
Map map = state.hashMap;
map.put("foo", "Hello World");
Object o1 = map.get("foo");
bh.consume(o1);
Object o2 = map.get("FOO");
bh.consume(o2);
map.put("BAR", "Bye World");
Object o3 = map.get("bar");
bh.consume(o3);
Object o4 = map.get("BAR");
bh.consume(o4);
}
@Benchmark
public void camelMapComplexCase(MapsBenchmarkState mapsBenchmarkState, MapsSourceDataBenchmarkState sourceDataState, Blackhole blackhole) {
// step 1 - initialize map with existing elements
Map map = mapsBenchmarkState.camelMap;
// step 2 - add elements one by one
sourceDataState.map2.entrySet().forEach(entry -> blackhole.consume(map.put(entry.getKey(), entry.getValue())));
// step 3 - remove elements one by one
sourceDataState.map1.keySet().forEach(key -> blackhole.consume(map.get(key)));
// step 4 - remove elements one by one
sourceDataState.map1.keySet().forEach(key -> blackhole.consume(map.remove(key)));
// step 5 - add couple of element at once
map.putAll(sourceDataState.map1);
blackhole.consume(map);
}
@Benchmark
public void cedarsoftMapComplexCase(MapsBenchmarkState mapsBenchmarkState, MapsSourceDataBenchmarkState sourceDataState, Blackhole blackhole) {
// step 1 - initialize map with existing elements
Map map = mapsBenchmarkState.cedarsoftMap;
// step 2 - add elements one by one
sourceDataState.map2.entrySet().forEach(entry -> blackhole.consume(map.put(entry.getKey(), entry.getValue())));
// step 3 - remove elements one by one
sourceDataState.map1.keySet().forEach(key -> blackhole.consume(map.get(key)));
// step 4 - remove elements one by one
sourceDataState.map1.keySet().forEach(key -> blackhole.consume(map.remove(key)));
// step 5 - add couple of element at once
map.putAll(sourceDataState.map1);
blackhole.consume(map);
}
@Benchmark
public void hashMapComplexCase(MapsBenchmarkState mapsBenchmarkState, MapsSourceDataBenchmarkState sourceDataState, Blackhole blackhole) {
// step 1 - initialize map with existing elements
Map map = mapsBenchmarkState.hashMap;
// step 2 - add elements one by one
sourceDataState.map2.entrySet().forEach(entry -> blackhole.consume(map.put(entry.getKey(), entry.getValue())));
// step 3 - remove elements one by one
sourceDataState.map1.keySet().forEach(key -> blackhole.consume(map.get(key)));
// step 4 - remove elements one by one
sourceDataState.map1.keySet().forEach(key -> blackhole.consume(map.remove(key)));
// step 5 - add couple of element at once
map.putAll(sourceDataState.map1);
blackhole.consume(map);
}
}
| |
/*
* Copyright (C) 2015-2018 SoftIndex LLC.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.datakernel.csp;
import io.datakernel.async.function.AsyncConsumer;
import io.datakernel.async.process.AsyncCloseable;
import io.datakernel.async.process.AsyncExecutor;
import io.datakernel.bytebuf.ByteBuf;
import io.datakernel.common.exception.UncheckedException;
import io.datakernel.csp.dsl.ChannelConsumerTransformer;
import io.datakernel.csp.queue.ChannelQueue;
import io.datakernel.csp.queue.ChannelZeroBuffer;
import io.datakernel.eventloop.Eventloop;
import io.datakernel.net.AsyncTcpSocket;
import io.datakernel.promise.Promise;
import io.datakernel.promise.SettablePromise;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Arrays;
import java.util.Iterator;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.function.Supplier;
import static io.datakernel.common.Recyclable.tryRecycle;
/**
* This interface represents consumer of data items that should be used serially
* (each consecutive {@link #accept(Object)} operation should be called only after
* previous {@link #accept(Object)} operation finishes.
* <p>
* After consumer is closed, all subsequent calls to {@link #accept(Object)} will
* return a completed exceptionally promise.
* <p>
* If any exception is caught while consuming data items, {@link #closeEx(Throwable)}
* method should be called. All resources should be freed and the caught exception
* should be propagated to all related processes.
* <p>
* If {@link #accept(Object)} takes {@code null} as argument, it represents end-of-stream
* and means that no additional data should be consumed.
*/
public interface ChannelConsumer<T> extends AsyncCloseable {
/**
* Consumes a provided value and returns a
* {@link Promise} as a marker of success.
*/
@NotNull
Promise<Void> accept(@Nullable T value);
default Promise<Void> acceptEndOfStream() {
return accept(null);
}
/**
* Accepts provided items and returns {@code Promise} as a
* marker of completion. If one of the items was accepted
* with an error, subsequent items will be recycled and a
* {@code Promise} of exception will be returned.
*/
@NotNull
@SuppressWarnings("unchecked")
default Promise<Void> acceptAll(T... items) {
return acceptAll(Arrays.asList(items));
}
/**
* @see ChannelConsumers#acceptAll(ChannelConsumer, Iterator)
*/
@NotNull
default Promise<Void> acceptAll(@NotNull Iterator<? extends T> it) {
return ChannelConsumers.acceptAll(this, it);
}
/**
* @see #acceptAll(Iterator)
*/
default Promise<Void> acceptAll(@NotNull Iterable<T> iterable) {
return acceptAll(iterable.iterator());
}
/**
* Wraps {@link AsyncConsumer} in {@code ChannelConsumer}.
*
* @see ChannelConsumer#of(AsyncConsumer, AsyncCloseable)
*/
static <T> ChannelConsumer<T> of(@NotNull AsyncConsumer<T> consumer) {
return of(consumer, e -> {});
}
/**
* Wraps {@link AsyncConsumer} in {@code ChannelConsumer}.
*
* @param consumer AsyncConsumer to be wrapped
* @param closeable a Cancellable, which will be set to the returned ChannelConsumer
* @param <T> type of data to be consumed
* @return AbstractChannelConsumer which wraps AsyncConsumer
*/
static <T> ChannelConsumer<T> of(@NotNull AsyncConsumer<T> consumer, @Nullable AsyncCloseable closeable) {
return new AbstractChannelConsumer<T>(closeable) {
final AsyncConsumer<T> thisConsumer = consumer;
@Override
protected Promise<Void> doAccept(T value) {
if (value != null) {
return thisConsumer.accept(value);
}
return Promise.complete();
}
};
}
/**
* Wraps Java's {@link Consumer} in {@code ChannelConsumer}.
*/
static <T> ChannelConsumer<T> ofConsumer(@NotNull Consumer<T> consumer) {
return of(AsyncConsumer.of(consumer));
}
/**
* Creates a consumer which always returns Promise
* of exception when accepts values.
*
* @param e an exception which is wrapped in returned
* Promise when {@code accept()} is called
* @param <T> type of data to be consumed
* @return an AbstractChannelConsumer which always
* returns Promise of exception when accepts values
*/
static <T> ChannelConsumer<T> ofException(Throwable e) {
return new AbstractChannelConsumer<T>() {
@Override
protected Promise<Void> doAccept(T value) {
tryRecycle(value);
return Promise.ofException(e);
}
};
}
/**
* @see #ofSupplier(Function, ChannelQueue)
*/
static <T> ChannelConsumer<T> ofSupplier(Function<ChannelSupplier<T>, Promise<Void>> supplier) {
return ofSupplier(supplier, new ChannelZeroBuffer<>());
}
static <T> ChannelConsumer<T> ofSupplier(Function<ChannelSupplier<T>, Promise<Void>> supplier, ChannelQueue<T> queue) {
Promise<Void> extraAcknowledge = supplier.apply(queue.getSupplier());
ChannelConsumer<T> result = queue.getConsumer();
if (extraAcknowledge == Promise.complete()) return result;
return result
.withAcknowledgement(ack -> ack.both(extraAcknowledge));
}
/**
* Unwraps {@code ChannelConsumer} of provided {@code Promise}.
* If provided Promise is already successfully completed, its
* result will be returned, otherwise an {@code AbstractChannelConsumer}
* is created, which waits for the Promise to be completed before accepting
* any value. A Promise of Exception will be returned if Promise was completed
* with an exception.
*
* @param promise Promise of {@code ChannelConsumer}
* @param <T> type of data to be consumed
* @return ChannelConsumer b
*/
static <T> ChannelConsumer<T> ofPromise(Promise<? extends ChannelConsumer<T>> promise) {
if (promise.isResult()) return promise.getResult();
return new AbstractChannelConsumer<T>() {
ChannelConsumer<T> consumer;
Throwable exception;
@Override
protected Promise<Void> doAccept(T value) {
if (consumer != null) return consumer.accept(value);
return promise.thenEx((consumer, e) -> {
if (e == null) {
this.consumer = consumer;
return consumer.accept(value);
} else {
tryRecycle(value);
return Promise.ofException(e);
}
});
}
@Override
protected void onClosed(@NotNull Throwable e) {
exception = e;
promise.whenResult(supplier -> supplier.closeEx(e));
}
};
}
static <T> ChannelConsumer<T> ofAnotherEventloop(@NotNull Eventloop anotherEventloop,
@NotNull ChannelConsumer<T> anotherEventloopConsumer) {
if (Eventloop.getCurrentEventloop() == anotherEventloop) {
return anotherEventloopConsumer;
}
return new AbstractChannelConsumer<T>() {
@Override
protected Promise<Void> doAccept(@Nullable T value) {
SettablePromise<Void> promise = new SettablePromise<>();
eventloop.startExternalTask();
anotherEventloop.execute(() ->
anotherEventloopConsumer.accept(value)
.whenComplete((v, e) -> {
eventloop.execute(() -> promise.accept(v, e));
eventloop.completeExternalTask();
}));
return promise;
}
@Override
protected void onClosed(@NotNull Throwable e) {
eventloop.startExternalTask();
anotherEventloop.execute(() -> {
anotherEventloopConsumer.closeEx(e);
eventloop.completeExternalTask();
});
}
};
}
/**
* Returns a {@code ChannelConsumer} wrapped in {@link Supplier}
* and calls its {@code accept()} when {@code accept()} method is called.
*
* @param provider provider of the {@code ChannelConsumer}
* @return a {@code ChannelConsumer} which was wrapped in the {@code provider}
*/
static <T> ChannelConsumer<T> ofLazyProvider(Supplier<? extends ChannelConsumer<T>> provider) {
return new AbstractChannelConsumer<T>() {
private ChannelConsumer<T> consumer;
@Override
protected Promise<Void> doAccept(@Nullable T value) {
if (consumer == null) consumer = provider.get();
return consumer.accept(value);
}
@Override
protected void onClosed(@NotNull Throwable e) {
if (consumer != null) {
consumer.closeEx(e);
}
}
};
}
/**
* Wraps {@link AsyncTcpSocket#write(ByteBuf)} operation into {@link ChannelConsumer}.
*
* @return {@link ChannelConsumer} of ByteBufs that will be sent to network
*/
static ChannelConsumer<ByteBuf> ofSocket(AsyncTcpSocket socket) {
return ChannelConsumer.of(socket::write, socket)
.withAcknowledgement(ack -> ack
.then(() -> socket.write(null)));
}
/**
* Transforms current {@code ChannelConsumer} with provided {@link ChannelConsumerTransformer}.
*
* @param fn transformer of the {@code ChannelConsumer}
* @param <R> result value after transformation
* @return result of transformation applied to the current {@code ChannelConsumer}
*/
default <R> R transformWith(ChannelConsumerTransformer<T, R> fn) {
return fn.transform(this);
}
default ChannelConsumer<T> async() {
return new AbstractChannelConsumer<T>(this) {
@Override
protected Promise<Void> doAccept(T value) {
return ChannelConsumer.this.accept(value).async();
}
};
}
/**
* Creates a wrapper ChannelConsumer which executes current
* ChannelConsumer's {@code accept(T value)} within the
* {@code asyncExecutor}.
*
* @param asyncExecutor executes ChannelConsumer
* @return a wrapper of current ChannelConsumer which executes
* in provided {@code asyncExecutor}
*/
default ChannelConsumer<T> withExecutor(AsyncExecutor asyncExecutor) {
return new AbstractChannelConsumer<T>(this) {
@Override
protected Promise<Void> doAccept(T value) {
return asyncExecutor.execute(() -> ChannelConsumer.this.accept(value));
}
};
}
/**
* Creates a wrapper ChannelConsumer - when its {@code accept(T value)}
* is called, if provided {@code value} doesn't equal {@code null}, it
* will be accepted by the provided {@code fn} first and then by this
* ChannelConsumer.
*
* @param fn {@link Consumer} which accepts the value passed by {@code apply(T value)}
* @return a wrapper ChannelConsumer
*/
default ChannelConsumer<T> peek(Consumer<? super T> fn) {
return new AbstractChannelConsumer<T>(this) {
@Override
protected Promise<Void> doAccept(T value) {
if (value != null) fn.accept(value);
return ChannelConsumer.this.accept(value);
}
};
}
/**
* Creates a wrapper ChannelConsumer - when its {@code accept(T value)}
* is called, {@code fn} will be applied to the provided {@code value} first
* and the result of the {@code fn} will be accepted by current ChannelConsumer.
* If provide {@code value} is {@code null}, {@code fn} won't be applied.
*
* @param fn {@link Function} to be applied to the value of {@code apply(T value)}
* @param <V> type of data accepted and returned by the {@code fn} and accepted by ChannelConsumer
* @return a wrapper ChannelConsumer
*/
default <V> ChannelConsumer<V> map(Function<? super V, ? extends T> fn) {
return new AbstractChannelConsumer<V>(this) {
@Override
protected Promise<Void> doAccept(V value) {
if (value != null) {
T newValue;
try {
newValue = fn.apply(value);
} catch (UncheckedException u) {
ChannelConsumer.this.closeEx(u.getCause());
return Promise.ofException(u.getCause());
}
return ChannelConsumer.this.accept(newValue);
} else {
return ChannelConsumer.this.acceptEndOfStream();
}
}
};
}
/**
* Creates a wrapper ChannelConsumer - when its {@code accept(T value)}
* is called, {@code fn} will be applied to the provided {@code value} first
* and the result of the {@code fn} will be accepted by current ChannelConsumer
* asynchronously. If provided {@code value} is {@code null}, {@code fn} won't
* be applied.
*
* @param fn {@link Function} to be applied to the value of {@code apply(T value)}
* @param <V> type of data accepted by the {@code fn} and ChannelConsumer
* @return a wrapper ChannelConsumer
*/
default <V> ChannelConsumer<V> mapAsync(Function<? super V, ? extends Promise<T>> fn) {
return new AbstractChannelConsumer<V>(this) {
@Override
protected Promise<Void> doAccept(V value) {
return value != null ?
fn.apply(value)
.then(ChannelConsumer.this::accept) :
ChannelConsumer.this.acceptEndOfStream();
}
};
}
/**
* Creates a wrapper ChannelConsumer - when its {@code accept(T value)}
* is called, current ChannelConsumer will accept the value only of it
* passes {@link Predicate} test.
*
* @param predicate {@link Predicate} which is used to filter accepted value
* @return a wrapper ChannelConsumer
*/
default ChannelConsumer<T> filter(Predicate<? super T> predicate) {
return new AbstractChannelConsumer<T>(this) {
@Override
protected Promise<Void> doAccept(T value) {
if (value != null && predicate.test(value)) {
return ChannelConsumer.this.accept(value);
} else {
tryRecycle(value);
return Promise.complete();
}
}
};
}
/**
* Creates a wrapper ChannelConsumer - after its {@code accept(T value)}
* is called and completed, an acknowledgement is returned. An acknowledgement
* is a {@link SettablePromise} which is accepted by the provided {@code fn}
* and then materialized.
*
* @param fn a function applied to the {@code SettablePromise} which is then
* materialized and returned
* @return a wrapper ChannelConsumer
*/
default ChannelConsumer<T> withAcknowledgement(Function<Promise<Void>, Promise<Void>> fn) {
SettablePromise<Void> acknowledgement = new SettablePromise<>();
Promise<Void> newAcknowledgement = fn.apply(acknowledgement);
return new AbstractChannelConsumer<T>(this) {
@Override
protected Promise<Void> doAccept(@Nullable T value) {
if (value != null) {
return ChannelConsumer.this.accept(value)
.thenEx(($, e) -> {
if (e == null) {
return Promise.complete();
}
acknowledgement.trySetException(e);
return newAcknowledgement;
});
} else {
ChannelConsumer.this.accept(null).whenComplete(acknowledgement::trySet);
return newAcknowledgement;
}
}
@Override
protected void onClosed(@NotNull Throwable e) {
acknowledgement.trySetException(e);
}
};
}
/**
* Returns a Promise as a marker of completion.
*/
static Promise<Void> getAcknowledgement(Consumer<Function<Promise<Void>, Promise<Void>>> fn) {
return Promise.ofCallback(cb ->
fn.accept(ack -> ack.whenComplete(cb)));
}
}
| |
/*
* Copyright 2012 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.launcher.cli;
import org.gradle.StartParameter;
import org.gradle.cli.CommandLineParser;
import org.gradle.cli.ParsedCommandLine;
import org.gradle.cli.SystemPropertiesCommandLineConverter;
import org.gradle.configuration.GradleLauncherMetaData;
import org.gradle.initialization.BuildLayoutParameters;
import org.gradle.initialization.DefaultCommandLineConverter;
import org.gradle.initialization.GradleLauncherFactory;
import org.gradle.initialization.LayoutCommandLineConverter;
import org.gradle.internal.SystemProperties;
import org.gradle.internal.nativeintegration.services.NativeServices;
import org.gradle.internal.service.ServiceRegistry;
import org.gradle.internal.service.ServiceRegistryBuilder;
import org.gradle.internal.service.scopes.GlobalScopeServices;
import org.gradle.launcher.cli.converter.DaemonCommandLineConverter;
import org.gradle.launcher.cli.converter.LayoutToPropertiesConverter;
import org.gradle.launcher.cli.converter.PropertiesToDaemonParametersConverter;
import org.gradle.launcher.cli.converter.PropertiesToStartParameterConverter;
import org.gradle.launcher.daemon.bootstrap.ForegroundDaemonAction;
import org.gradle.launcher.daemon.client.DaemonClient;
import org.gradle.launcher.daemon.client.DaemonClientFactory;
import org.gradle.launcher.daemon.client.DaemonClientGlobalServices;
import org.gradle.launcher.daemon.client.DaemonStopClient;
import org.gradle.launcher.daemon.configuration.CurrentProcess;
import org.gradle.launcher.daemon.configuration.DaemonParameters;
import org.gradle.launcher.daemon.configuration.ForegroundDaemonConfiguration;
import org.gradle.launcher.exec.BuildActionExecuter;
import org.gradle.launcher.exec.BuildActionParameters;
import org.gradle.launcher.exec.InProcessBuildActionExecuter;
import org.gradle.logging.internal.OutputEventListener;
import java.lang.management.ManagementFactory;
import java.util.HashMap;
import java.util.Map;
class BuildActionsFactory implements CommandLineAction {
private static final String FOREGROUND = "foreground";
private static final String STOP = "stop";
private final ServiceRegistry loggingServices;
private final LayoutCommandLineConverter layoutConverter;
private final SystemPropertiesCommandLineConverter propertiesConverter;
private final LayoutToPropertiesConverter layoutToPropertiesConverter;
private final PropertiesToStartParameterConverter propertiesToStartParameterConverter;
private final DefaultCommandLineConverter commandLineConverter;
private final DaemonCommandLineConverter daemonConverter;
private final PropertiesToDaemonParametersConverter propertiesToDaemonParametersConverter;
BuildActionsFactory(ServiceRegistry loggingServices) {
this(loggingServices, new DefaultCommandLineConverter());
}
BuildActionsFactory(ServiceRegistry loggingServices, DefaultCommandLineConverter commandLineConverter,
DaemonCommandLineConverter daemonConverter, LayoutCommandLineConverter layoutConverter,
SystemPropertiesCommandLineConverter propertiesConverter,
LayoutToPropertiesConverter layoutToPropertiesConverter,
PropertiesToStartParameterConverter propertiesToStartParameterConverter,
PropertiesToDaemonParametersConverter propertiesToDaemonParametersConverter) {
this.loggingServices = loggingServices;
this.commandLineConverter = commandLineConverter;
this.daemonConverter = daemonConverter;
this.layoutConverter = layoutConverter;
this.propertiesConverter = propertiesConverter;
this.layoutToPropertiesConverter = layoutToPropertiesConverter;
this.propertiesToStartParameterConverter = propertiesToStartParameterConverter;
this.propertiesToDaemonParametersConverter = propertiesToDaemonParametersConverter;
}
private BuildActionsFactory(ServiceRegistry loggingServices, DefaultCommandLineConverter commandLineConverter) {
this(loggingServices, commandLineConverter, new DaemonCommandLineConverter(),
commandLineConverter.getLayoutConverter(), commandLineConverter.getSystemPropertiesConverter(),
new LayoutToPropertiesConverter(), new PropertiesToStartParameterConverter(), new PropertiesToDaemonParametersConverter());
}
public void configureCommandLineParser(CommandLineParser parser) {
commandLineConverter.configure(parser);
daemonConverter.configure(parser);
parser.option(FOREGROUND).hasDescription("Starts the Gradle daemon in the foreground.").incubating();
parser.option(STOP).hasDescription("Stops the Gradle daemon if it is running.");
}
public Runnable createAction(CommandLineParser parser, ParsedCommandLine commandLine) {
BuildLayoutParameters layout = new BuildLayoutParameters();
layoutConverter.convert(commandLine, layout);
Map<String, String> properties = new HashMap<String, String>();
layoutToPropertiesConverter.convert(layout, properties);
propertiesConverter.convert(commandLine, properties);
StartParameter startParameter = new StartParameter();
propertiesToStartParameterConverter.convert(properties, startParameter);
commandLineConverter.convert(commandLine, startParameter);
DaemonParameters daemonParameters = new DaemonParameters(layout, startParameter.getSystemPropertiesArgs());
propertiesToDaemonParametersConverter.convert(properties, daemonParameters);
daemonConverter.convert(commandLine, daemonParameters);
if (commandLine.hasOption(STOP)) {
return stopAllDaemons(daemonParameters, loggingServices);
}
if (commandLine.hasOption(FOREGROUND)) {
ForegroundDaemonConfiguration conf = new ForegroundDaemonConfiguration(
daemonParameters.getUid(), daemonParameters.getBaseDir(), daemonParameters.getIdleTimeout());
return new ForegroundDaemonAction(loggingServices, conf);
}
if (daemonParameters.isEnabled()) {
return runBuildWithDaemon(startParameter, daemonParameters, loggingServices);
}
if (canUseCurrentProcess(daemonParameters)) {
return runBuildInProcess(startParameter, daemonParameters, loggingServices);
}
return runBuildInSingleUseDaemon(startParameter, daemonParameters, loggingServices);
}
private Runnable stopAllDaemons(DaemonParameters daemonParameters, ServiceRegistry loggingServices) {
ServiceRegistry clientSharedServices = createGlobalClientServices();
ServiceRegistry clientServices = clientSharedServices.get(DaemonClientFactory.class).createStopDaemonServices(loggingServices.get(OutputEventListener.class), daemonParameters);
DaemonStopClient stopClient = clientServices.get(DaemonStopClient.class);
return new StopDaemonAction(stopClient);
}
private Runnable runBuildWithDaemon(StartParameter startParameter, DaemonParameters daemonParameters, ServiceRegistry loggingServices) {
// Create a client that will match based on the daemon startup parameters.
ServiceRegistry clientSharedServices = createGlobalClientServices();
ServiceRegistry clientServices = clientSharedServices.get(DaemonClientFactory.class).createBuildClientServices(loggingServices.get(OutputEventListener.class), daemonParameters, System.in);
DaemonClient client = clientServices.get(DaemonClient.class);
return daemonBuildAction(startParameter, daemonParameters, client);
}
private boolean canUseCurrentProcess(DaemonParameters requiredBuildParameters) {
CurrentProcess currentProcess = new CurrentProcess();
return currentProcess.configureForBuild(requiredBuildParameters);
}
private Runnable runBuildInProcess(StartParameter startParameter, DaemonParameters daemonParameters, ServiceRegistry loggingServices) {
ServiceRegistry globalServices = ServiceRegistryBuilder.builder()
.displayName("Global services")
.parent(loggingServices)
.parent(NativeServices.getInstance())
.provider(new GlobalScopeServices(false))
.build();
InProcessBuildActionExecuter executer = new InProcessBuildActionExecuter(globalServices.get(GradleLauncherFactory.class));
return daemonBuildAction(startParameter, daemonParameters, executer);
}
private Runnable runBuildInSingleUseDaemon(StartParameter startParameter, DaemonParameters daemonParameters, ServiceRegistry loggingServices) {
//(SF) this is a workaround until this story is completed. I'm hardcoding setting the idle timeout to be max X mins.
//this way we avoid potential runaway daemons that steal resources on linux and break builds on windows.
//We might leave that in if we decide it's a good idea for an extra safety net.
int maxTimeout = 2 * 60 * 1000;
if (daemonParameters.getIdleTimeout() > maxTimeout) {
daemonParameters.setIdleTimeout(maxTimeout);
}
//end of workaround.
// Create a client that will not match any existing daemons, so it will always startup a new one
ServiceRegistry clientSharedServices = createGlobalClientServices();
ServiceRegistry clientServices = clientSharedServices.get(DaemonClientFactory.class).createSingleUseDaemonClientServices(loggingServices.get(OutputEventListener.class), daemonParameters, System.in);
DaemonClient client = clientServices.get(DaemonClient.class);
return daemonBuildAction(startParameter, daemonParameters, client);
}
private ServiceRegistry createGlobalClientServices() {
return ServiceRegistryBuilder.builder()
.displayName("Daemon client global services")
.parent(NativeServices.getInstance())
.provider(new GlobalScopeServices(false))
.provider(new DaemonClientGlobalServices())
.build();
}
private Runnable daemonBuildAction(StartParameter startParameter, DaemonParameters daemonParameters, BuildActionExecuter<BuildActionParameters> executer) {
return new RunBuildAction(executer, startParameter, SystemProperties.getCurrentDir(), clientMetaData(), getBuildStartTime(), daemonParameters.getEffectiveSystemProperties(), System.getenv());
}
private long getBuildStartTime() {
return ManagementFactory.getRuntimeMXBean().getStartTime();
}
private GradleLauncherMetaData clientMetaData() {
return new GradleLauncherMetaData();
}
}
| |
/*
* Copyright 2001-2004 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.axis.configuration;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.InputStream;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.io.Writer;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.List;
import javax.xml.namespace.QName;
import org.apache.axis.AxisEngine;
import org.apache.axis.ConfigurationException;
import org.apache.axis.Handler;
import org.apache.axis.WSDDEngineConfiguration;
import org.apache.axis.components.logger.LogFactory;
import org.apache.axis.deployment.wsdd.WSDDDeployment;
import org.apache.axis.deployment.wsdd.WSDDDocument;
import org.apache.axis.deployment.wsdd.WSDDGlobalConfiguration;
import org.apache.axis.encoding.TypeMappingRegistry;
import org.apache.axis.handlers.soap.SOAPService;
import org.apache.axis.utils.Admin;
import org.apache.axis.utils.ClassUtils;
import org.apache.axis.utils.Messages;
import org.apache.axis.utils.XMLUtils;
import org.apache.commons.logging.Log;
import org.w3c.dom.Document;
/**
* A simple ConfigurationProvider that uses the Admin class to read +
* write XML files.
*
* @author Glen Daniels (gdaniels@apache.org)
* @author Glyn Normington (glyn@apache.org)
*/
public class FileProvider implements WSDDEngineConfiguration {
protected static Log log =
LogFactory.getLog(FileProvider.class.getName());
private WSDDDeployment deployment = null;
private String filename;
private File configFile = null;
private InputStream myInputStream = null;
private boolean readOnly = true;
// Should we search the classpath for the file if we don't find it in
// the specified location?
private boolean searchClasspath = true;
/**
* Constructor which accesses a file in the current directory of the
* engine or at an absolute path.
*/
public FileProvider(String filename) {
this.filename = filename;
configFile = new File(filename);
check();
}
/**
* Constructor which accesses a file relative to a specific base
* path.
*/
public FileProvider(String basepath, String filename)
throws ConfigurationException {
this.filename = filename;
File dir = new File(basepath);
/*
* If the basepath is not a readable directory, throw an internal
* exception to make it easier to debug setup problems.
*/
if (!dir.exists() || !dir.isDirectory() || !dir.canRead()) {
throw new ConfigurationException(Messages.getMessage
("invalidConfigFilePath",
basepath));
}
configFile = new File(basepath, filename);
check();
}
/**
* Check the configuration file attributes and remember whether
* or not the file is read-only.
*/
private void check() {
try {
readOnly = configFile.canRead() & !configFile.canWrite();
} catch (SecurityException se){
readOnly = true;
}
/*
* If file is read-only, log informational message
* as configuration changes will not persist.
*/
if (readOnly) {
log.info(Messages.getMessage("readOnlyConfigFile"));
}
}
/**
* Constructor which takes an input stream directly.
* Note: The configuration will be read-only in this case!
*/
public FileProvider(InputStream is) {
setInputStream(is);
}
public void setInputStream(InputStream is) {
myInputStream = is;
}
private InputStream getInputStream() {
return myInputStream;
}
public WSDDDeployment getDeployment() {
return deployment;
}
public void setDeployment(WSDDDeployment deployment) {
this.deployment = deployment;
}
/**
* Determine whether or not we will look for a "*-config.wsdd" file
* on the classpath if we don't find it in the specified location.
*
* @param searchClasspath true if we should search the classpath
*/
public void setSearchClasspath(boolean searchClasspath) {
this.searchClasspath = searchClasspath;
}
public void configureEngine(AxisEngine engine)
throws ConfigurationException {
try {
if (getInputStream() == null) {
try {
setInputStream(new FileInputStream(configFile));
} catch (Exception e) {
if (searchClasspath)
setInputStream(ClassUtils.getResourceAsStream(engine.getClass(), filename, true));
}
}
if (getInputStream() == null) {
throw new ConfigurationException(
Messages.getMessage("noConfigFile"));
}
WSDDDocument doc = new WSDDDocument(XMLUtils.
newDocument(getInputStream()));
deployment = doc.getDeployment();
deployment.configureEngine(engine);
engine.refreshGlobalOptions();
setInputStream(null);
} catch (Exception e) {
throw new ConfigurationException(e);
}
}
/**
* Save the engine configuration. In case there's a problem, we
* write it to a string before saving it out to the actual file so
* we don't screw up the file.
*/
public void writeEngineConfig(AxisEngine engine)
throws ConfigurationException {
if (!readOnly) {
try {
Document doc = Admin.listConfig(engine);
Writer osWriter = new OutputStreamWriter(
new FileOutputStream(configFile),XMLUtils.getEncoding());
PrintWriter writer = new PrintWriter(new BufferedWriter(osWriter));
XMLUtils.DocumentToWriter(doc, writer);
writer.println();
writer.close();
} catch (Exception e) {
throw new ConfigurationException(e);
}
}
}
/**
* retrieve an instance of the named handler
* @param qname XXX
* @return XXX
* @throws ConfigurationException XXX
*/
public Handler getHandler(QName qname) throws ConfigurationException {
return deployment.getHandler(qname);
}
/**
* retrieve an instance of the named service
* @param qname XXX
* @return XXX
* @throws ConfigurationException XXX
*/
public SOAPService getService(QName qname) throws ConfigurationException {
SOAPService service = deployment.getService(qname);
if (service == null) {
throw new ConfigurationException(Messages.getMessage("noService10",
qname.toString()));
}
return service;
}
/**
* Get a service which has been mapped to a particular namespace
*
* @param namespace a namespace URI
* @return an instance of the appropriate Service, or null
*/
public SOAPService getServiceByNamespaceURI(String namespace)
throws ConfigurationException {
return deployment.getServiceByNamespaceURI(namespace);
}
/**
* retrieve an instance of the named transport
* @param qname XXX
* @return XXX
* @throws ConfigurationException XXX
*/
public Handler getTransport(QName qname) throws ConfigurationException {
return deployment.getTransport(qname);
}
public TypeMappingRegistry getTypeMappingRegistry()
throws ConfigurationException {
return deployment.getTypeMappingRegistry();
}
/**
* Returns a global request handler.
*/
public Handler getGlobalRequest() throws ConfigurationException {
return deployment.getGlobalRequest();
}
/**
* Returns a global response handler.
*/
public Handler getGlobalResponse() throws ConfigurationException {
return deployment.getGlobalResponse();
}
/**
* Returns the global configuration options.
*/
public Hashtable getGlobalOptions() throws ConfigurationException {
WSDDGlobalConfiguration globalConfig
= deployment.getGlobalConfiguration();
if (globalConfig != null)
return globalConfig.getParametersTable();
return null;
}
/**
* Get an enumeration of the services deployed to this engine
*/
public Iterator getDeployedServices() throws ConfigurationException {
return deployment.getDeployedServices();
}
/**
* Get a list of roles that this engine plays globally. Services
* within the engine configuration may also add additional roles.
*
* @return a <code>List</code> of the roles for this engine
*/
public List getRoles() {
return deployment.getRoles();
}
}
| |
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.webapps;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.os.Build;
import android.preference.PreferenceManager;
import android.test.suitebuilder.annotation.MediumTest;
import android.view.View;
import org.chromium.base.ApplicationStatus;
import org.chromium.base.ThreadUtils;
import org.chromium.base.test.util.CommandLineFlags;
import org.chromium.base.test.util.DisabledTest;
import org.chromium.base.test.util.UrlUtils;
import org.chromium.chrome.browser.ChromeActivity;
import org.chromium.chrome.browser.ChromeSwitches;
import org.chromium.chrome.browser.ChromeTabbedActivity;
import org.chromium.chrome.browser.ShortcutHelper;
import org.chromium.chrome.browser.ShortcutSource;
import org.chromium.chrome.browser.document.DocumentActivity;
import org.chromium.chrome.browser.tab.Tab;
import org.chromium.chrome.browser.tab.TabIdManager;
import org.chromium.chrome.test.MultiActivityTestBase;
import org.chromium.chrome.test.util.ActivityUtils;
import org.chromium.chrome.test.util.ApplicationTestUtils;
import org.chromium.chrome.test.util.DisableInTabbedMode;
import org.chromium.chrome.test.util.browser.TabLoadObserver;
import org.chromium.content.browser.test.util.Criteria;
import org.chromium.content.browser.test.util.CriteriaHelper;
import org.chromium.content.browser.test.util.JavaScriptUtils;
import org.chromium.content.browser.test.util.TouchCommon;
import org.chromium.content_public.common.ScreenOrientationValues;
import java.lang.ref.WeakReference;
import java.util.List;
/**
* Tests that WebappActivities are launched correctly.
*
* This test seems a little wonky because WebappActivities launched differently, depending on what
* OS the user is on. Pre-L, WebappActivities were manually instanced and assigned by the
* WebappManager. On L and above, WebappActivities are automatically instanced by Android and the
* FLAG_ACTIVITY_NEW_DOCUMENT mechanism. Moreover, we don't have access to the task list pre-L so
* we have to assume that any non-running WebappActivities are not listed in Android's Overview.
*/
public class WebappModeTest extends MultiActivityTestBase {
private static final String WEBAPP_1_ID = "webapp_id_1";
private static final String WEBAPP_1_URL =
UrlUtils.encodeHtmlDataUri("<html><body bgcolor='#011684'>Webapp 1</body></html>");
private static final String WEBAPP_1_TITLE = "Web app #1";
private static final String WEBAPP_2_ID = "webapp_id_2";
private static final String WEBAPP_2_URL =
UrlUtils.encodeHtmlDataUri("<html><body bgcolor='#840116'>Webapp 2</body></html>");
private static final String WEBAPP_2_TITLE = "Web app #2";
private static final String WEBAPP_ICON = "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAIAAACQd1PeAAAACXB"
+ "IWXMAAAsTAAALEwEAmpwYAAAAB3RJTUUH3wQIFB4cxOfiSQAAABl0RVh0Q29tbWVudABDcmVhdGVkIHdpdG"
+ "ggR0lNUFeBDhcAAAAMSURBVAjXY2AUawEAALcAnI/TkI8AAAAASUVORK5CYII=";
private boolean isNumberOfRunningActivitiesCorrect(final int numActivities) throws Exception {
return CriteriaHelper.pollForCriteria(new Criteria() {
@Override
public boolean isSatisfied() {
Context context = getInstrumentation().getTargetContext();
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP
&& ApplicationTestUtils.getNumChromeTasks(context) != numActivities) {
return false;
}
int count = 0;
List<WeakReference<Activity>> activities = ApplicationStatus.getRunningActivities();
for (WeakReference<Activity> activity : activities) {
if (activity.get() instanceof WebappActivity) count++;
}
return count == numActivities;
}
});
}
private void fireWebappIntent(String id, String url, String title, String icon,
boolean addMac) throws Exception {
Intent intent = new Intent();
intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
intent.setPackage(getInstrumentation().getTargetContext().getPackageName());
intent.setAction(WebappLauncherActivity.ACTION_START_WEBAPP);
if (addMac) {
// Needed for security reasons. If the MAC is excluded, the URL of the webapp is opened
// in a browser window, instead.
String mac = ShortcutHelper.getEncodedMac(getInstrumentation().getTargetContext(), url);
intent.putExtra(ShortcutHelper.EXTRA_MAC, mac);
}
WebappInfo webappInfo = WebappInfo.create(id, url, icon, title, null,
ScreenOrientationValues.PORTRAIT, ShortcutSource.UNKNOWN,
ShortcutHelper.MANIFEST_COLOR_INVALID_OR_MISSING,
ShortcutHelper.MANIFEST_COLOR_INVALID_OR_MISSING, false);
webappInfo.setWebappIntentExtras(intent);
getInstrumentation().getTargetContext().startActivity(intent);
getInstrumentation().waitForIdleSync();
ApplicationTestUtils.waitUntilChromeInForeground();
}
@Override
public void setUp() throws Exception {
super.setUp();
// Register the webapps so when the data storage is opened, the test doesn't crash. There is
// no race condition with the retrival as AsyncTasks are run sequentially on the background
// thread.
WebappRegistry.registerWebapp(getInstrumentation().getTargetContext(), WEBAPP_1_ID);
WebappRegistry.registerWebapp(getInstrumentation().getTargetContext(), WEBAPP_2_ID);
}
/**
* Tests that WebappActivities are started properly.
*/
@MediumTest
public void testWebappLaunches() throws Exception {
final Activity firstActivity =
startWebappActivity(WEBAPP_1_ID, WEBAPP_1_URL, WEBAPP_1_TITLE, WEBAPP_ICON);
assertTrue(isNumberOfRunningActivitiesCorrect(1));
// Firing a different Intent should start a new WebappActivity instance.
fireWebappIntent(WEBAPP_2_ID, WEBAPP_2_URL, WEBAPP_2_TITLE, WEBAPP_ICON, true);
assertTrue(CriteriaHelper.pollForUIThreadCriteria(new Criteria() {
@Override
public boolean isSatisfied() {
Activity lastActivity = ApplicationStatus.getLastTrackedFocusedActivity();
return isWebappActivityReady(lastActivity) && lastActivity != firstActivity;
}
}));
assertTrue(isNumberOfRunningActivitiesCorrect(2));
// Firing the first Intent should bring back the first WebappActivity instance.
fireWebappIntent(WEBAPP_1_ID, WEBAPP_1_URL, WEBAPP_1_TITLE, WEBAPP_ICON, true);
assertTrue(CriteriaHelper.pollForUIThreadCriteria(new Criteria() {
@Override
public boolean isSatisfied() {
Activity lastActivity = ApplicationStatus.getLastTrackedFocusedActivity();
return isWebappActivityReady(lastActivity) && lastActivity == firstActivity;
}
}));
assertTrue(isNumberOfRunningActivitiesCorrect(2));
}
/**
* Tests that the WebappActivity gets the next available Tab ID instead of 0.
*/
@MediumTest
public void testWebappTabIdsProperlyAssigned() throws Exception {
Context context = getInstrumentation().getTargetContext();
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context);
SharedPreferences.Editor editor = prefs.edit();
editor.putInt(TabIdManager.PREF_NEXT_ID, 11684);
editor.apply();
final WebappActivity webappActivity =
startWebappActivity(WEBAPP_1_ID, WEBAPP_1_URL, WEBAPP_1_TITLE, WEBAPP_ICON);
assertTrue(isNumberOfRunningActivitiesCorrect(1));
assertEquals("Wrong Tab ID was used", 11684, webappActivity.getActivityTab().getId());
}
/**
* Tests that a WebappActivity can be brought forward by calling
* WebContentsDelegateAndroid.activateContents().
*
* Flaky: https://crbug.com/539755
* @MediumTest
*/
@DisabledTest
public void testActivateContents() throws Exception {
runForegroundingTest(true);
}
/**
* Tests that a WebappActivity can be brought forward by firing an Intent with
* TabOpenType.BRING_TAB_TO_FRONT.
*
* Flaky: https://crbug.com/539755
* @MediumTest
*/
@DisabledTest
public void testBringTabToFront() throws Exception {
runForegroundingTest(false);
}
private void runForegroundingTest(boolean viaActivateContents) throws Exception {
// Start the WebappActivity.
final WebappActivity activity =
startWebappActivity(WEBAPP_1_ID, WEBAPP_1_URL, WEBAPP_1_TITLE, WEBAPP_ICON);
assertTrue(isNumberOfRunningActivitiesCorrect(1));
// Return home.
final Context context = getInstrumentation().getTargetContext();
ApplicationTestUtils.fireHomeScreenIntent(context);
getInstrumentation().waitForIdleSync();
if (viaActivateContents) {
// Bring it back via the Tab.
activity.getActivityTab().getTabWebContentsDelegateAndroid().activateContents();
} else {
// Bring the WebappActivity back via an Intent.
int webappTabId = activity.getActivityTab().getId();
Intent intent = Tab.createBringTabToFrontIntent(webappTabId);
intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
context.startActivity(intent);
}
// When Chrome is back in the foreground, confirm that the original Activity was restored.
getInstrumentation().waitForIdleSync();
ApplicationTestUtils.waitUntilChromeInForeground();
assertTrue(CriteriaHelper.pollForCriteria(new Criteria() {
@Override
public boolean isSatisfied() {
return activity == ApplicationStatus.getLastTrackedFocusedActivity()
&& activity.hasWindowFocus();
}
}));
assertTrue(isNumberOfRunningActivitiesCorrect(1));
}
/**
* Ensure WebappActivities can't be launched without proper security checks.
*/
@MediumTest
public void testWebappRequiresValidMac() throws Exception {
// Try to start a WebappActivity. Fail because the Intent is insecure.
fireWebappIntent(WEBAPP_1_ID, WEBAPP_1_URL, WEBAPP_1_TITLE, WEBAPP_ICON, false);
assertTrue(CriteriaHelper.pollForUIThreadCriteria(new Criteria() {
@Override
public boolean isSatisfied() {
Activity lastActivity = ApplicationStatus.getLastTrackedFocusedActivity();
if (!lastActivity.findViewById(android.R.id.content).hasWindowFocus()) return false;
return lastActivity instanceof ChromeTabbedActivity
|| lastActivity instanceof DocumentActivity;
}
}));
final Activity firstActivity = ApplicationStatus.getLastTrackedFocusedActivity();
// Firing a correct Intent should start a new WebappActivity instance.
fireWebappIntent(WEBAPP_2_ID, WEBAPP_2_URL, WEBAPP_2_TITLE, WEBAPP_ICON, true);
assertTrue(CriteriaHelper.pollForUIThreadCriteria(new Criteria() {
@Override
public boolean isSatisfied() {
Activity lastActivity = ApplicationStatus.getLastTrackedFocusedActivity();
return isWebappActivityReady(lastActivity) && lastActivity != firstActivity;
}
}));
}
/**
* Tests that WebappActivities handle window.open() properly in document mode.
*/
@DisableInTabbedMode
@MediumTest
public void testWebappHandlesWindowOpenInDocumentMode() throws Exception {
triggerWindowOpenAndWaitForLoad(DocumentActivity.class, ONCLICK_LINK, true);
}
/**
* Tests that WebappActivities handle window.open() properly in tabbed mode.
*/
@CommandLineFlags.Add(ChromeSwitches.DISABLE_DOCUMENT_MODE)
@MediumTest
public void testWebappHandlesWindowOpenInTabbedMode() throws Exception {
triggerWindowOpenAndWaitForLoad(ChromeTabbedActivity.class, ONCLICK_LINK, true);
}
/**
* Tests that WebappActivities handle suppressed window.open() properly in document mode.
*/
@DisableInTabbedMode
@MediumTest
public void testWebappHandlesSuppressedWindowOpenInDocumentMode() throws Exception {
triggerWindowOpenAndWaitForLoad(DocumentActivity.class, HREF_NO_REFERRER_LINK, false);
}
/**
* Tests that WebappActivities handle suppressed window.open() properly in tabbed mode.
*/
@CommandLineFlags.Add(ChromeSwitches.DISABLE_DOCUMENT_MODE)
@MediumTest
public void testWebappHandlesSuppressedWindowOpenInTabbedMode() throws Exception {
triggerWindowOpenAndWaitForLoad(ChromeTabbedActivity.class, HREF_NO_REFERRER_LINK, false);
}
private <T extends ChromeActivity> void triggerWindowOpenAndWaitForLoad(
Class<T> classToWaitFor, String linkHtml, boolean checkContents) throws Exception {
final WebappActivity webappActivity =
startWebappActivity(WEBAPP_1_ID, WEBAPP_1_URL, WEBAPP_1_TITLE, WEBAPP_ICON);
assertTrue(isNumberOfRunningActivitiesCorrect(1));
// Load up the test page.
assertTrue(CriteriaHelper.pollForCriteria(
new TabLoadObserver(webappActivity.getActivityTab(), linkHtml)));
// Do a plain click to make the link open in the main browser via a window.open().
// If the window is opened successfully, javascript on the first page triggers and changes
// its URL as a signal for this test.
Runnable fgTrigger = new Runnable() {
@Override
public void run() {
ThreadUtils.runOnUiThreadBlocking(new Runnable() {
@Override
public void run() {
View view = webappActivity.findViewById(android.R.id.content).getRootView();
TouchCommon.singleClickView(view);
}
});
}
};
ChromeActivity secondActivity = ActivityUtils.waitForActivity(
getInstrumentation(), classToWaitFor, fgTrigger);
waitForFullLoad(secondActivity, "Page 4");
if (checkContents) {
assertEquals("New WebContents was not created",
SUCCESS_URL, webappActivity.getActivityTab().getUrl());
}
assertNotSame("Wrong Activity in foreground",
webappActivity, ApplicationStatus.getLastTrackedFocusedActivity());
// Close the child window to kick the user back to the WebappActivity.
JavaScriptUtils.executeJavaScript(
secondActivity.getActivityTab().getWebContents(), "window.close()");
assertTrue(CriteriaHelper.pollForUIThreadCriteria(new Criteria() {
@Override
public boolean isSatisfied() {
return webappActivity == ApplicationStatus.getLastTrackedFocusedActivity();
}
}));
ApplicationTestUtils.waitUntilChromeInForeground();
}
/**
* Starts a WebappActivity for the given data and waits for it to be initialized. We can't use
* ActivityUtils.waitForActivity() because of the way WebappActivity is instanced on pre-L
* devices.
*/
private WebappActivity startWebappActivity(String id, String url, String title, String icon)
throws Exception {
fireWebappIntent(id, url, title, icon, true);
assertTrue(CriteriaHelper.pollForUIThreadCriteria(new Criteria() {
@Override
public boolean isSatisfied() {
Activity lastActivity = ApplicationStatus.getLastTrackedFocusedActivity();
return isWebappActivityReady(lastActivity);
}
}));
return (WebappActivity) ApplicationStatus.getLastTrackedFocusedActivity();
}
/** Returns true when the last Activity is a WebappActivity and is ready for testing .*/
private boolean isWebappActivityReady(Activity lastActivity) {
if (!(lastActivity instanceof WebappActivity)) return false;
WebappActivity webappActivity = (WebappActivity) lastActivity;
if (webappActivity.getActivityTab() == null) return false;
View rootView = webappActivity.findViewById(android.R.id.content);
if (!rootView.hasWindowFocus()) return false;
return true;
}
}
| |
/*
* Copyright 2012-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.bind;
import java.io.IOException;
import java.util.Collections;
import javax.validation.Validation;
import javax.validation.constraints.NotNull;
import org.junit.Test;
import org.springframework.beans.NotWritablePropertyException;
import org.springframework.boot.context.config.RandomValuePropertySource;
import org.springframework.context.support.StaticMessageSource;
import org.springframework.core.env.MutablePropertySources;
import org.springframework.core.env.StandardEnvironment;
import org.springframework.core.env.SystemEnvironmentPropertySource;
import org.springframework.core.io.ByteArrayResource;
import org.springframework.core.io.support.PropertiesLoaderUtils;
import org.springframework.mock.env.MockPropertySource;
import org.springframework.validation.BindException;
import org.springframework.validation.Validator;
import org.springframework.validation.beanvalidation.SpringValidatorAdapter;
import static org.assertj.core.api.Assertions.assertThat;
/**
* Tests for {@link PropertiesConfigurationFactory}.
*
* @author Dave Syer
*/
public class PropertiesConfigurationFactoryTests {
private PropertiesConfigurationFactory<Foo> factory;
private Validator validator;
private boolean ignoreUnknownFields = true;
private String targetName = null;
@Test
public void testValidPropertiesLoadsWithDash() throws Exception {
Foo foo = createFoo("na-me: blah\nbar: blah");
assertThat(foo.bar).isEqualTo("blah");
assertThat(foo.name).isEqualTo("blah");
}
@Test
public void testUnknownPropertyOkByDefault() throws Exception {
Foo foo = createFoo("hi: hello\nname: foo\nbar: blah");
assertThat(foo.bar).isEqualTo("blah");
}
@Test(expected = NotWritablePropertyException.class)
public void testUnknownPropertyCausesLoadFailure() throws Exception {
this.ignoreUnknownFields = false;
createFoo("hi: hello\nname: foo\nbar: blah");
}
@Test(expected = BindException.class)
public void testMissingPropertyCausesValidationError() throws Exception {
this.validator = new SpringValidatorAdapter(
Validation.buildDefaultValidatorFactory().getValidator());
createFoo("bar: blah");
}
@Test
public void testValidationErrorCanBeSuppressed() throws Exception {
this.validator = new SpringValidatorAdapter(
Validation.buildDefaultValidatorFactory().getValidator());
setupFactory();
this.factory.setExceptionIfInvalid(false);
bindFoo("bar: blah");
}
@Test
public void systemEnvironmentBindingFailuresAreIgnored() throws Exception {
setupFactory();
MutablePropertySources propertySources = new MutablePropertySources();
MockPropertySource propertySource = new MockPropertySource(
StandardEnvironment.SYSTEM_ENVIRONMENT_PROPERTY_SOURCE_NAME);
propertySource.setProperty("doesNotExist", "foo");
propertySource.setProperty("name", "bar");
propertySources.addFirst(propertySource);
this.factory.setPropertySources(propertySources);
this.factory.setIgnoreUnknownFields(false);
this.factory.afterPropertiesSet();
Foo foo = this.factory.getObject();
assertThat(foo.name).isEqualTo("bar");
}
@Test
public void systemPropertyBindingFailuresAreIgnored() throws Exception {
setupFactory();
MutablePropertySources propertySources = new MutablePropertySources();
MockPropertySource propertySource = new MockPropertySource(
StandardEnvironment.SYSTEM_PROPERTIES_PROPERTY_SOURCE_NAME);
propertySource.setProperty("doesNotExist", "foo");
propertySource.setProperty("name", "bar");
propertySources.addFirst(propertySource);
this.factory.setPropertySources(propertySources);
this.factory.setIgnoreUnknownFields(false);
this.factory.afterPropertiesSet();
}
@Test
public void testBindWithDashPrefix() throws Exception {
// gh-4045
this.targetName = "foo-bar";
MutablePropertySources propertySources = new MutablePropertySources();
propertySources.addLast(new SystemEnvironmentPropertySource("systemEnvironment",
Collections.<String, Object>singletonMap("FOO_BAR_NAME", "blah")));
propertySources.addLast(new RandomValuePropertySource());
setupFactory();
this.factory.setPropertySources(propertySources);
this.factory.afterPropertiesSet();
Foo foo = this.factory.getObject();
assertThat(foo.name).isEqualTo("blah");
}
@Test
public void testBindWithDelimitedPrefixUsingMatchingDelimiter() throws Exception {
this.targetName = "env_foo";
this.ignoreUnknownFields = false;
MutablePropertySources propertySources = new MutablePropertySources();
propertySources.addLast(new SystemEnvironmentPropertySource("systemEnvironment",
Collections.<String, Object>singletonMap("ENV_FOO_NAME", "blah")));
propertySources.addLast(new RandomValuePropertySource("random"));
setupFactory();
this.factory.setPropertySources(propertySources);
this.factory.afterPropertiesSet();
Foo foo = this.factory.getObject();
assertThat(foo.name).isEqualTo("blah");
}
@Test
public void testBindWithDelimitedPrefixUsingDifferentDelimiter() throws Exception {
this.targetName = "env.foo";
MutablePropertySources propertySources = new MutablePropertySources();
propertySources.addLast(new SystemEnvironmentPropertySource("systemEnvironment",
Collections.<String, Object>singletonMap("ENV_FOO_NAME", "blah")));
propertySources.addLast(new RandomValuePropertySource("random"));
this.ignoreUnknownFields = false;
setupFactory();
this.factory.setPropertySources(propertySources);
this.factory.afterPropertiesSet();
Foo foo = this.factory.getObject();
assertThat(foo.name).isEqualTo("blah");
}
private Foo createFoo(final String values) throws Exception {
setupFactory();
return bindFoo(values);
}
private Foo bindFoo(final String values) throws Exception {
this.factory.setProperties(PropertiesLoaderUtils
.loadProperties(new ByteArrayResource(values.getBytes())));
this.factory.afterPropertiesSet();
return this.factory.getObject();
}
private void setupFactory() throws IOException {
this.factory = new PropertiesConfigurationFactory<Foo>(Foo.class);
this.factory.setValidator(this.validator);
this.factory.setTargetName(this.targetName);
this.factory.setIgnoreUnknownFields(this.ignoreUnknownFields);
this.factory.setMessageSource(new StaticMessageSource());
}
// Foo needs to be public and to have setters for all properties
public static class Foo {
@NotNull
private String name;
private String bar;
private String spring_foo_baz;
private String fooBar;
public String getSpringFooBaz() {
return this.spring_foo_baz;
}
public void setSpringFooBaz(String spring_foo_baz) {
this.spring_foo_baz = spring_foo_baz;
}
public String getName() {
return this.name;
}
public void setName(String name) {
this.name = name;
}
public String getBar() {
return this.bar;
}
public void setBar(String bar) {
this.bar = bar;
}
public String getFooBar() {
return this.fooBar;
}
public void setFooBar(String fooBar) {
this.fooBar = fooBar;
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.