gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
package org.multiverse.stms.gamma.transactionalobjects.txnref;
import org.junit.Before;
import org.junit.Test;
import org.multiverse.api.LockMode;
import org.multiverse.api.exceptions.LockedException;
import org.multiverse.stms.gamma.GammaStm;
import org.multiverse.stms.gamma.transactionalobjects.GammaTxnRef;
import org.multiverse.stms.gamma.transactions.GammaTxn;
import static org.junit.Assert.*;
import static org.multiverse.TestUtils.assertOrecValue;
import static org.multiverse.api.TxnThreadLocal.clearThreadLocalTxn;
import static org.multiverse.api.TxnThreadLocal.setThreadLocalTxn;
import static org.multiverse.stms.gamma.GammaTestUtils.assertGlobalConflictCount;
import static org.multiverse.stms.gamma.GammaTestUtils.assertVersionAndValue;
import static org.multiverse.stms.gamma.GammaTestUtils.makeReadBiased;
public class GammaTxnRef_atomicIsNullTest {
private GammaStm stm;
@Before
public void setUp() {
stm = new GammaStm();
clearThreadLocalTxn();
}
// ==================== write biased ========================================
@Test
public void writeBiased_whenReadLocked_thenSuccess() {
String initialValue = null;
GammaTxnRef<String> ref = new GammaTxnRef<String>(stm, initialValue);
long initialVersion = ref.getVersion();
GammaTxn tx = stm.newDefaultTxn();
ref.getLock().acquire(tx, LockMode.Read);
long orecValue = ref.orec;
long conflictCount = stm.globalConflictCounter.count();
boolean result = ref.atomicIsNull();
assertTrue(result);
assertOrecValue(ref, orecValue);
assertVersionAndValue(ref, initialVersion, initialValue);
assertGlobalConflictCount(stm, conflictCount);
}
@Test
public void writeBiased_whenWriteLocked_thenSuccess() {
String initialValue = null;
GammaTxnRef<String> ref = new GammaTxnRef<String>(stm, initialValue);
long initialVersion = ref.getVersion();
GammaTxn tx = stm.newDefaultTxn();
ref.getLock().acquire(tx, LockMode.Write);
long orecValue = ref.orec;
long conflictCount = stm.globalConflictCounter.count();
boolean result = ref.atomicIsNull();
assertTrue(result);
assertOrecValue(ref, orecValue);
assertVersionAndValue(ref, initialVersion, initialValue);
assertGlobalConflictCount(stm, conflictCount);
}
@Test
public void writeBiased_whenExclusiveLocked_thenLockedException() {
GammaTxnRef<String> ref = new GammaTxnRef<String>(stm, null);
long initialVersion = ref.version;
GammaTxn tx = stm.newDefaultTxn();
ref.getLock().acquire(tx, LockMode.Exclusive);
long orecValue = ref.orec;
long conflictCount = stm.globalConflictCounter.count();
try {
ref.atomicIsNull();
fail();
} catch (LockedException expected) {
}
assertOrecValue(ref, orecValue);
assertVersionAndValue(ref, initialVersion, null);
assertGlobalConflictCount(stm, conflictCount);
}
@Test
public void writeBiased_whenNull() {
GammaTxnRef<String> ref = new GammaTxnRef<String>(stm, null);
long initialVersion = ref.getVersion();
long orecValue = ref.orec;
long conflictCount = stm.globalConflictCounter.count();
boolean result = ref.atomicIsNull();
assertTrue(result);
assertOrecValue(ref, orecValue);
assertVersionAndValue(ref, initialVersion, null);
assertGlobalConflictCount(stm, conflictCount);
}
@Test
public void writeBiased_whenActiveTransactionAvailable_thenIgnored() {
String initialValue = "foo";
GammaTxnRef<String> ref = new GammaTxnRef<String>(stm, initialValue);
long initialVersion = ref.getVersion();
GammaTxn tx = stm.newDefaultTxn();
setThreadLocalTxn(tx);
ref.set(tx, null);
long orecValue = ref.orec;
long conflictCount = stm.globalConflictCounter.count();
boolean result = ref.atomicIsNull();
assertFalse(result);
assertOrecValue(ref, orecValue);
assertVersionAndValue(ref, initialVersion, initialValue);
assertGlobalConflictCount(stm, conflictCount);
}
@Test
public void writeBiased_whenNotNull() {
String initialValue = "foo";
GammaTxnRef<String> ref = new GammaTxnRef<String>(stm, initialValue);
long initialVersion = ref.getVersion();
long orecValue = ref.orec;
long conflictCount = stm.globalConflictCounter.count();
boolean result = ref.atomicIsNull();
assertFalse(result);
assertOrecValue(ref, orecValue);
assertVersionAndValue(ref, initialVersion, initialValue);
assertGlobalConflictCount(stm, conflictCount);
}
// ==================== read biased ========================================
@Test
public void readBiased_whenReadLocked_thenSuccess() {
String initialValue = null;
GammaTxnRef<String> ref = makeReadBiased(new GammaTxnRef<String>(stm, initialValue));
long initialVersion = ref.getVersion();
GammaTxn tx = stm.newDefaultTxn();
ref.getLock().acquire(tx, LockMode.Read);
long orecValue = ref.orec;
long conflictCount = stm.globalConflictCounter.count();
boolean result = ref.atomicIsNull();
assertTrue(result);
assertOrecValue(ref, orecValue);
assertVersionAndValue(ref, initialVersion, initialValue);
assertGlobalConflictCount(stm, conflictCount);
}
@Test
public void readBiased_whenWriteLocked_thenSuccess() {
String initialValue = null;
GammaTxnRef<String> ref = makeReadBiased(new GammaTxnRef<String>(stm, initialValue));
long initialVersion = ref.getVersion();
GammaTxn tx = stm.newDefaultTxn();
ref.getLock().acquire(tx, LockMode.Write);
long orecValue = ref.orec;
long conflictCount = stm.globalConflictCounter.count();
boolean result = ref.atomicIsNull();
assertTrue(result);
assertOrecValue(ref, orecValue);
assertVersionAndValue(ref, initialVersion, initialValue);
assertGlobalConflictCount(stm, conflictCount);
}
@Test
public void readBiased_whenExclusiveLocked_thenLockedException() {
String initialValue = null;
GammaTxnRef<String> ref = makeReadBiased(new GammaTxnRef<String>(stm, initialValue));
long initialVersion = ref.version;
GammaTxn tx = stm.newDefaultTxn();
ref.getLock().acquire(tx, LockMode.Exclusive);
long orecValue = ref.orec;
long conflictCount = stm.globalConflictCounter.count();
try {
ref.atomicIsNull();
fail();
} catch (LockedException expected) {
}
assertOrecValue(ref, orecValue);
assertVersionAndValue(ref, initialVersion, null);
assertGlobalConflictCount(stm, conflictCount);
}
@Test
public void readBiased_whenNull() {
String initialValue = null;
GammaTxnRef<String> ref = makeReadBiased(new GammaTxnRef<String>(stm, initialValue));
long initialVersion = ref.getVersion();
long orecValue = ref.orec;
long conflictCount = stm.globalConflictCounter.count();
boolean result = ref.atomicIsNull();
assertTrue(result);
assertOrecValue(ref, orecValue);
assertVersionAndValue(ref, initialVersion, null);
assertGlobalConflictCount(stm, conflictCount);
}
@Test
public void readBiased_whenActiveTransactionAvailable_thenIgnored() {
String initialValue = "foo";
GammaTxnRef<String> ref = makeReadBiased(new GammaTxnRef<String>(stm, initialValue));
long initialVersion = ref.getVersion();
GammaTxn tx = stm.newDefaultTxn();
setThreadLocalTxn(tx);
ref.set(tx, null);
long orecValue = ref.orec;
long conflictCount = stm.globalConflictCounter.count();
boolean result = ref.atomicIsNull();
assertFalse(result);
assertOrecValue(ref, orecValue);
assertVersionAndValue(ref, initialVersion, initialValue);
assertGlobalConflictCount(stm, conflictCount);
}
@Test
public void readBiased_whenNotNull() {
String initialValue = "foo";
GammaTxnRef<String> ref = makeReadBiased(new GammaTxnRef<String>(stm, initialValue));
long initialVersion = ref.getVersion();
long orecValue = ref.orec;
long conflictCount = stm.globalConflictCounter.count();
boolean result = ref.atomicIsNull();
assertFalse(result);
assertOrecValue(ref, orecValue);
assertVersionAndValue(ref, initialVersion, initialValue);
assertGlobalConflictCount(stm, conflictCount);
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dataproc/v1beta2/workflow_templates.proto
package com.google.cloud.dataproc.v1beta2;
/**
*
*
* <pre>
* Configuration for parameter validation.
* </pre>
*
* Protobuf type {@code google.cloud.dataproc.v1beta2.ParameterValidation}
*/
public final class ParameterValidation extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dataproc.v1beta2.ParameterValidation)
ParameterValidationOrBuilder {
private static final long serialVersionUID = 0L;
// Use ParameterValidation.newBuilder() to construct.
private ParameterValidation(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ParameterValidation() {}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private ParameterValidation(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.dataproc.v1beta2.RegexValidation.Builder subBuilder = null;
if (validationTypeCase_ == 1) {
subBuilder =
((com.google.cloud.dataproc.v1beta2.RegexValidation) validationType_)
.toBuilder();
}
validationType_ =
input.readMessage(
com.google.cloud.dataproc.v1beta2.RegexValidation.parser(),
extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(
(com.google.cloud.dataproc.v1beta2.RegexValidation) validationType_);
validationType_ = subBuilder.buildPartial();
}
validationTypeCase_ = 1;
break;
}
case 18:
{
com.google.cloud.dataproc.v1beta2.ValueValidation.Builder subBuilder = null;
if (validationTypeCase_ == 2) {
subBuilder =
((com.google.cloud.dataproc.v1beta2.ValueValidation) validationType_)
.toBuilder();
}
validationType_ =
input.readMessage(
com.google.cloud.dataproc.v1beta2.ValueValidation.parser(),
extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(
(com.google.cloud.dataproc.v1beta2.ValueValidation) validationType_);
validationType_ = subBuilder.buildPartial();
}
validationTypeCase_ = 2;
break;
}
default:
{
if (!parseUnknownFieldProto3(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dataproc.v1beta2.WorkflowTemplatesProto
.internal_static_google_cloud_dataproc_v1beta2_ParameterValidation_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dataproc.v1beta2.WorkflowTemplatesProto
.internal_static_google_cloud_dataproc_v1beta2_ParameterValidation_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dataproc.v1beta2.ParameterValidation.class,
com.google.cloud.dataproc.v1beta2.ParameterValidation.Builder.class);
}
private int validationTypeCase_ = 0;
private java.lang.Object validationType_;
public enum ValidationTypeCase implements com.google.protobuf.Internal.EnumLite {
REGEX(1),
VALUES(2),
VALIDATIONTYPE_NOT_SET(0);
private final int value;
private ValidationTypeCase(int value) {
this.value = value;
}
/** @deprecated Use {@link #forNumber(int)} instead. */
@java.lang.Deprecated
public static ValidationTypeCase valueOf(int value) {
return forNumber(value);
}
public static ValidationTypeCase forNumber(int value) {
switch (value) {
case 1:
return REGEX;
case 2:
return VALUES;
case 0:
return VALIDATIONTYPE_NOT_SET;
default:
return null;
}
}
public int getNumber() {
return this.value;
}
};
public ValidationTypeCase getValidationTypeCase() {
return ValidationTypeCase.forNumber(validationTypeCase_);
}
public static final int REGEX_FIELD_NUMBER = 1;
/**
*
*
* <pre>
* Validation based on regular expressions.
* </pre>
*
* <code>.google.cloud.dataproc.v1beta2.RegexValidation regex = 1;</code>
*/
public boolean hasRegex() {
return validationTypeCase_ == 1;
}
/**
*
*
* <pre>
* Validation based on regular expressions.
* </pre>
*
* <code>.google.cloud.dataproc.v1beta2.RegexValidation regex = 1;</code>
*/
public com.google.cloud.dataproc.v1beta2.RegexValidation getRegex() {
if (validationTypeCase_ == 1) {
return (com.google.cloud.dataproc.v1beta2.RegexValidation) validationType_;
}
return com.google.cloud.dataproc.v1beta2.RegexValidation.getDefaultInstance();
}
/**
*
*
* <pre>
* Validation based on regular expressions.
* </pre>
*
* <code>.google.cloud.dataproc.v1beta2.RegexValidation regex = 1;</code>
*/
public com.google.cloud.dataproc.v1beta2.RegexValidationOrBuilder getRegexOrBuilder() {
if (validationTypeCase_ == 1) {
return (com.google.cloud.dataproc.v1beta2.RegexValidation) validationType_;
}
return com.google.cloud.dataproc.v1beta2.RegexValidation.getDefaultInstance();
}
public static final int VALUES_FIELD_NUMBER = 2;
/**
*
*
* <pre>
* Validation based on a list of allowed values.
* </pre>
*
* <code>.google.cloud.dataproc.v1beta2.ValueValidation values = 2;</code>
*/
public boolean hasValues() {
return validationTypeCase_ == 2;
}
/**
*
*
* <pre>
* Validation based on a list of allowed values.
* </pre>
*
* <code>.google.cloud.dataproc.v1beta2.ValueValidation values = 2;</code>
*/
public com.google.cloud.dataproc.v1beta2.ValueValidation getValues() {
if (validationTypeCase_ == 2) {
return (com.google.cloud.dataproc.v1beta2.ValueValidation) validationType_;
}
return com.google.cloud.dataproc.v1beta2.ValueValidation.getDefaultInstance();
}
/**
*
*
* <pre>
* Validation based on a list of allowed values.
* </pre>
*
* <code>.google.cloud.dataproc.v1beta2.ValueValidation values = 2;</code>
*/
public com.google.cloud.dataproc.v1beta2.ValueValidationOrBuilder getValuesOrBuilder() {
if (validationTypeCase_ == 2) {
return (com.google.cloud.dataproc.v1beta2.ValueValidation) validationType_;
}
return com.google.cloud.dataproc.v1beta2.ValueValidation.getDefaultInstance();
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (validationTypeCase_ == 1) {
output.writeMessage(1, (com.google.cloud.dataproc.v1beta2.RegexValidation) validationType_);
}
if (validationTypeCase_ == 2) {
output.writeMessage(2, (com.google.cloud.dataproc.v1beta2.ValueValidation) validationType_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (validationTypeCase_ == 1) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
1, (com.google.cloud.dataproc.v1beta2.RegexValidation) validationType_);
}
if (validationTypeCase_ == 2) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
2, (com.google.cloud.dataproc.v1beta2.ValueValidation) validationType_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dataproc.v1beta2.ParameterValidation)) {
return super.equals(obj);
}
com.google.cloud.dataproc.v1beta2.ParameterValidation other =
(com.google.cloud.dataproc.v1beta2.ParameterValidation) obj;
boolean result = true;
result = result && getValidationTypeCase().equals(other.getValidationTypeCase());
if (!result) return false;
switch (validationTypeCase_) {
case 1:
result = result && getRegex().equals(other.getRegex());
break;
case 2:
result = result && getValues().equals(other.getValues());
break;
case 0:
default:
}
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
switch (validationTypeCase_) {
case 1:
hash = (37 * hash) + REGEX_FIELD_NUMBER;
hash = (53 * hash) + getRegex().hashCode();
break;
case 2:
hash = (37 * hash) + VALUES_FIELD_NUMBER;
hash = (53 * hash) + getValues().hashCode();
break;
case 0:
default:
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dataproc.v1beta2.ParameterValidation parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataproc.v1beta2.ParameterValidation parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataproc.v1beta2.ParameterValidation parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataproc.v1beta2.ParameterValidation parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataproc.v1beta2.ParameterValidation parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataproc.v1beta2.ParameterValidation parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataproc.v1beta2.ParameterValidation parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dataproc.v1beta2.ParameterValidation parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dataproc.v1beta2.ParameterValidation parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dataproc.v1beta2.ParameterValidation parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dataproc.v1beta2.ParameterValidation parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dataproc.v1beta2.ParameterValidation parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.dataproc.v1beta2.ParameterValidation prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Configuration for parameter validation.
* </pre>
*
* Protobuf type {@code google.cloud.dataproc.v1beta2.ParameterValidation}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dataproc.v1beta2.ParameterValidation)
com.google.cloud.dataproc.v1beta2.ParameterValidationOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dataproc.v1beta2.WorkflowTemplatesProto
.internal_static_google_cloud_dataproc_v1beta2_ParameterValidation_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dataproc.v1beta2.WorkflowTemplatesProto
.internal_static_google_cloud_dataproc_v1beta2_ParameterValidation_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dataproc.v1beta2.ParameterValidation.class,
com.google.cloud.dataproc.v1beta2.ParameterValidation.Builder.class);
}
// Construct using com.google.cloud.dataproc.v1beta2.ParameterValidation.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
validationTypeCase_ = 0;
validationType_ = null;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dataproc.v1beta2.WorkflowTemplatesProto
.internal_static_google_cloud_dataproc_v1beta2_ParameterValidation_descriptor;
}
@java.lang.Override
public com.google.cloud.dataproc.v1beta2.ParameterValidation getDefaultInstanceForType() {
return com.google.cloud.dataproc.v1beta2.ParameterValidation.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dataproc.v1beta2.ParameterValidation build() {
com.google.cloud.dataproc.v1beta2.ParameterValidation result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dataproc.v1beta2.ParameterValidation buildPartial() {
com.google.cloud.dataproc.v1beta2.ParameterValidation result =
new com.google.cloud.dataproc.v1beta2.ParameterValidation(this);
if (validationTypeCase_ == 1) {
if (regexBuilder_ == null) {
result.validationType_ = validationType_;
} else {
result.validationType_ = regexBuilder_.build();
}
}
if (validationTypeCase_ == 2) {
if (valuesBuilder_ == null) {
result.validationType_ = validationType_;
} else {
result.validationType_ = valuesBuilder_.build();
}
}
result.validationTypeCase_ = validationTypeCase_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return (Builder) super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return (Builder) super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return (Builder) super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dataproc.v1beta2.ParameterValidation) {
return mergeFrom((com.google.cloud.dataproc.v1beta2.ParameterValidation) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dataproc.v1beta2.ParameterValidation other) {
if (other == com.google.cloud.dataproc.v1beta2.ParameterValidation.getDefaultInstance())
return this;
switch (other.getValidationTypeCase()) {
case REGEX:
{
mergeRegex(other.getRegex());
break;
}
case VALUES:
{
mergeValues(other.getValues());
break;
}
case VALIDATIONTYPE_NOT_SET:
{
break;
}
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.dataproc.v1beta2.ParameterValidation parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.cloud.dataproc.v1beta2.ParameterValidation) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int validationTypeCase_ = 0;
private java.lang.Object validationType_;
public ValidationTypeCase getValidationTypeCase() {
return ValidationTypeCase.forNumber(validationTypeCase_);
}
public Builder clearValidationType() {
validationTypeCase_ = 0;
validationType_ = null;
onChanged();
return this;
}
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataproc.v1beta2.RegexValidation,
com.google.cloud.dataproc.v1beta2.RegexValidation.Builder,
com.google.cloud.dataproc.v1beta2.RegexValidationOrBuilder>
regexBuilder_;
/**
*
*
* <pre>
* Validation based on regular expressions.
* </pre>
*
* <code>.google.cloud.dataproc.v1beta2.RegexValidation regex = 1;</code>
*/
public boolean hasRegex() {
return validationTypeCase_ == 1;
}
/**
*
*
* <pre>
* Validation based on regular expressions.
* </pre>
*
* <code>.google.cloud.dataproc.v1beta2.RegexValidation regex = 1;</code>
*/
public com.google.cloud.dataproc.v1beta2.RegexValidation getRegex() {
if (regexBuilder_ == null) {
if (validationTypeCase_ == 1) {
return (com.google.cloud.dataproc.v1beta2.RegexValidation) validationType_;
}
return com.google.cloud.dataproc.v1beta2.RegexValidation.getDefaultInstance();
} else {
if (validationTypeCase_ == 1) {
return regexBuilder_.getMessage();
}
return com.google.cloud.dataproc.v1beta2.RegexValidation.getDefaultInstance();
}
}
/**
*
*
* <pre>
* Validation based on regular expressions.
* </pre>
*
* <code>.google.cloud.dataproc.v1beta2.RegexValidation regex = 1;</code>
*/
public Builder setRegex(com.google.cloud.dataproc.v1beta2.RegexValidation value) {
if (regexBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
validationType_ = value;
onChanged();
} else {
regexBuilder_.setMessage(value);
}
validationTypeCase_ = 1;
return this;
}
/**
*
*
* <pre>
* Validation based on regular expressions.
* </pre>
*
* <code>.google.cloud.dataproc.v1beta2.RegexValidation regex = 1;</code>
*/
public Builder setRegex(
com.google.cloud.dataproc.v1beta2.RegexValidation.Builder builderForValue) {
if (regexBuilder_ == null) {
validationType_ = builderForValue.build();
onChanged();
} else {
regexBuilder_.setMessage(builderForValue.build());
}
validationTypeCase_ = 1;
return this;
}
/**
*
*
* <pre>
* Validation based on regular expressions.
* </pre>
*
* <code>.google.cloud.dataproc.v1beta2.RegexValidation regex = 1;</code>
*/
public Builder mergeRegex(com.google.cloud.dataproc.v1beta2.RegexValidation value) {
if (regexBuilder_ == null) {
if (validationTypeCase_ == 1
&& validationType_
!= com.google.cloud.dataproc.v1beta2.RegexValidation.getDefaultInstance()) {
validationType_ =
com.google.cloud.dataproc.v1beta2.RegexValidation.newBuilder(
(com.google.cloud.dataproc.v1beta2.RegexValidation) validationType_)
.mergeFrom(value)
.buildPartial();
} else {
validationType_ = value;
}
onChanged();
} else {
if (validationTypeCase_ == 1) {
regexBuilder_.mergeFrom(value);
}
regexBuilder_.setMessage(value);
}
validationTypeCase_ = 1;
return this;
}
/**
*
*
* <pre>
* Validation based on regular expressions.
* </pre>
*
* <code>.google.cloud.dataproc.v1beta2.RegexValidation regex = 1;</code>
*/
public Builder clearRegex() {
if (regexBuilder_ == null) {
if (validationTypeCase_ == 1) {
validationTypeCase_ = 0;
validationType_ = null;
onChanged();
}
} else {
if (validationTypeCase_ == 1) {
validationTypeCase_ = 0;
validationType_ = null;
}
regexBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Validation based on regular expressions.
* </pre>
*
* <code>.google.cloud.dataproc.v1beta2.RegexValidation regex = 1;</code>
*/
public com.google.cloud.dataproc.v1beta2.RegexValidation.Builder getRegexBuilder() {
return getRegexFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Validation based on regular expressions.
* </pre>
*
* <code>.google.cloud.dataproc.v1beta2.RegexValidation regex = 1;</code>
*/
public com.google.cloud.dataproc.v1beta2.RegexValidationOrBuilder getRegexOrBuilder() {
if ((validationTypeCase_ == 1) && (regexBuilder_ != null)) {
return regexBuilder_.getMessageOrBuilder();
} else {
if (validationTypeCase_ == 1) {
return (com.google.cloud.dataproc.v1beta2.RegexValidation) validationType_;
}
return com.google.cloud.dataproc.v1beta2.RegexValidation.getDefaultInstance();
}
}
/**
*
*
* <pre>
* Validation based on regular expressions.
* </pre>
*
* <code>.google.cloud.dataproc.v1beta2.RegexValidation regex = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataproc.v1beta2.RegexValidation,
com.google.cloud.dataproc.v1beta2.RegexValidation.Builder,
com.google.cloud.dataproc.v1beta2.RegexValidationOrBuilder>
getRegexFieldBuilder() {
if (regexBuilder_ == null) {
if (!(validationTypeCase_ == 1)) {
validationType_ = com.google.cloud.dataproc.v1beta2.RegexValidation.getDefaultInstance();
}
regexBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataproc.v1beta2.RegexValidation,
com.google.cloud.dataproc.v1beta2.RegexValidation.Builder,
com.google.cloud.dataproc.v1beta2.RegexValidationOrBuilder>(
(com.google.cloud.dataproc.v1beta2.RegexValidation) validationType_,
getParentForChildren(),
isClean());
validationType_ = null;
}
validationTypeCase_ = 1;
onChanged();
;
return regexBuilder_;
}
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataproc.v1beta2.ValueValidation,
com.google.cloud.dataproc.v1beta2.ValueValidation.Builder,
com.google.cloud.dataproc.v1beta2.ValueValidationOrBuilder>
valuesBuilder_;
/**
*
*
* <pre>
* Validation based on a list of allowed values.
* </pre>
*
* <code>.google.cloud.dataproc.v1beta2.ValueValidation values = 2;</code>
*/
public boolean hasValues() {
return validationTypeCase_ == 2;
}
/**
*
*
* <pre>
* Validation based on a list of allowed values.
* </pre>
*
* <code>.google.cloud.dataproc.v1beta2.ValueValidation values = 2;</code>
*/
public com.google.cloud.dataproc.v1beta2.ValueValidation getValues() {
if (valuesBuilder_ == null) {
if (validationTypeCase_ == 2) {
return (com.google.cloud.dataproc.v1beta2.ValueValidation) validationType_;
}
return com.google.cloud.dataproc.v1beta2.ValueValidation.getDefaultInstance();
} else {
if (validationTypeCase_ == 2) {
return valuesBuilder_.getMessage();
}
return com.google.cloud.dataproc.v1beta2.ValueValidation.getDefaultInstance();
}
}
/**
*
*
* <pre>
* Validation based on a list of allowed values.
* </pre>
*
* <code>.google.cloud.dataproc.v1beta2.ValueValidation values = 2;</code>
*/
public Builder setValues(com.google.cloud.dataproc.v1beta2.ValueValidation value) {
if (valuesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
validationType_ = value;
onChanged();
} else {
valuesBuilder_.setMessage(value);
}
validationTypeCase_ = 2;
return this;
}
/**
*
*
* <pre>
* Validation based on a list of allowed values.
* </pre>
*
* <code>.google.cloud.dataproc.v1beta2.ValueValidation values = 2;</code>
*/
public Builder setValues(
com.google.cloud.dataproc.v1beta2.ValueValidation.Builder builderForValue) {
if (valuesBuilder_ == null) {
validationType_ = builderForValue.build();
onChanged();
} else {
valuesBuilder_.setMessage(builderForValue.build());
}
validationTypeCase_ = 2;
return this;
}
/**
*
*
* <pre>
* Validation based on a list of allowed values.
* </pre>
*
* <code>.google.cloud.dataproc.v1beta2.ValueValidation values = 2;</code>
*/
public Builder mergeValues(com.google.cloud.dataproc.v1beta2.ValueValidation value) {
if (valuesBuilder_ == null) {
if (validationTypeCase_ == 2
&& validationType_
!= com.google.cloud.dataproc.v1beta2.ValueValidation.getDefaultInstance()) {
validationType_ =
com.google.cloud.dataproc.v1beta2.ValueValidation.newBuilder(
(com.google.cloud.dataproc.v1beta2.ValueValidation) validationType_)
.mergeFrom(value)
.buildPartial();
} else {
validationType_ = value;
}
onChanged();
} else {
if (validationTypeCase_ == 2) {
valuesBuilder_.mergeFrom(value);
}
valuesBuilder_.setMessage(value);
}
validationTypeCase_ = 2;
return this;
}
/**
*
*
* <pre>
* Validation based on a list of allowed values.
* </pre>
*
* <code>.google.cloud.dataproc.v1beta2.ValueValidation values = 2;</code>
*/
public Builder clearValues() {
if (valuesBuilder_ == null) {
if (validationTypeCase_ == 2) {
validationTypeCase_ = 0;
validationType_ = null;
onChanged();
}
} else {
if (validationTypeCase_ == 2) {
validationTypeCase_ = 0;
validationType_ = null;
}
valuesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Validation based on a list of allowed values.
* </pre>
*
* <code>.google.cloud.dataproc.v1beta2.ValueValidation values = 2;</code>
*/
public com.google.cloud.dataproc.v1beta2.ValueValidation.Builder getValuesBuilder() {
return getValuesFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Validation based on a list of allowed values.
* </pre>
*
* <code>.google.cloud.dataproc.v1beta2.ValueValidation values = 2;</code>
*/
public com.google.cloud.dataproc.v1beta2.ValueValidationOrBuilder getValuesOrBuilder() {
if ((validationTypeCase_ == 2) && (valuesBuilder_ != null)) {
return valuesBuilder_.getMessageOrBuilder();
} else {
if (validationTypeCase_ == 2) {
return (com.google.cloud.dataproc.v1beta2.ValueValidation) validationType_;
}
return com.google.cloud.dataproc.v1beta2.ValueValidation.getDefaultInstance();
}
}
/**
*
*
* <pre>
* Validation based on a list of allowed values.
* </pre>
*
* <code>.google.cloud.dataproc.v1beta2.ValueValidation values = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataproc.v1beta2.ValueValidation,
com.google.cloud.dataproc.v1beta2.ValueValidation.Builder,
com.google.cloud.dataproc.v1beta2.ValueValidationOrBuilder>
getValuesFieldBuilder() {
if (valuesBuilder_ == null) {
if (!(validationTypeCase_ == 2)) {
validationType_ = com.google.cloud.dataproc.v1beta2.ValueValidation.getDefaultInstance();
}
valuesBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataproc.v1beta2.ValueValidation,
com.google.cloud.dataproc.v1beta2.ValueValidation.Builder,
com.google.cloud.dataproc.v1beta2.ValueValidationOrBuilder>(
(com.google.cloud.dataproc.v1beta2.ValueValidation) validationType_,
getParentForChildren(),
isClean());
validationType_ = null;
}
validationTypeCase_ = 2;
onChanged();
;
return valuesBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFieldsProto3(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dataproc.v1beta2.ParameterValidation)
}
// @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.ParameterValidation)
private static final com.google.cloud.dataproc.v1beta2.ParameterValidation DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dataproc.v1beta2.ParameterValidation();
}
public static com.google.cloud.dataproc.v1beta2.ParameterValidation getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ParameterValidation> PARSER =
new com.google.protobuf.AbstractParser<ParameterValidation>() {
@java.lang.Override
public ParameterValidation parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ParameterValidation(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<ParameterValidation> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ParameterValidation> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dataproc.v1beta2.ParameterValidation getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
package jpl.mipl.io.vicar;
import java.util.*;
import java.util.ArrayList; // just to make javadoc happy
import java.text.Collator;
import java.io.*;
import org.w3c.dom.*;
import jpl.mipl.io.util.*;
/**
* This class maintains the key/value pair for a single VICAR label item,
* and provides mechanisms to retrieve and modify the key and value.
* <p>
* Items are stored internally as strings in the format required for the
* label (with one exception, see below)... but this fact is transparent
* to the caller, who can use the int/float/double get/set methods as
* appropriate. The exception is strings... the VICAR standard specifies
* that single quotes embedded in a string are doubled when written to the
* file. However, strings are stored internally with the doubles removed
* (which are added again in <code>toLabelString</code>). This is a slight
* inconsistency of design, but should be transparent to the caller.
* Note that <code>toString</code> itself converts to a more human-readable
* format; <code>toLabelString</code> converts to a format suitable for
* the label.
* <p>
* Access to this object is not synchronized in any way. The caller must
* synchronize if necessary. <code>ArrayList</code>s are used instead of
* <code>Vector</code>s for efficiency.
* <p>
* Note that per VICAR convention, keywords are always stored in upper case.
* However, they use case-insensitive comparisons.
*/
public class VicarLabelItem implements Cloneable, Serializable
{
/** Keyword.
* @serial */
protected String _key;
/** Value.
* @serial */
protected List _value;
/** Data type if known.
* @serial */
protected int _type = TYPE_UNSPECIFIED;
/***********************************************************************
* These values are used to keep track of the "natural" data type of the
* label item. Although all items are stored internally as String, it is
* often convenient to access them numerically.
*/
public static final int TYPE_UNSPECIFIED = 0;
/** Integers may have only +-0123456789 */
public static final int TYPE_INTEGER = 1;
/** Floats are integers plus .eEdD */
public static final int TYPE_FLOAT = 2;
/** Doubles are not guessed at, but can be set or retrieved */
public static final int TYPE_DOUBLE = 3;
/** Strings are anything else, or if they have quotes */
public static final int TYPE_STRING = 4;
/***********************************************************************
* The STRING representation of each type, for XML.
*/
public static final String _typeStrings[] = {
"UNSPECIFIED",
"INTEGER",
"FLOAT",
"DOUBLE",
"STRING"
};
////////////////////////////////////////////////////////////////////////
// Constructors
////////////////////////////////////////////////////////////////////////
/***********************************************************************
* Create an empty label item. Must call <code>set</code> functions to make
* this useful.
*/
public VicarLabelItem()
{
_key = null;
_value = new ArrayList(0);
_type = TYPE_UNSPECIFIED;
}
/***********************************************************************
* Create a label item with keyword only. Can be useful for equality
* comparisons (which looks at keyword only, not value).
*/
public VicarLabelItem(String key)
{
_key = key.toUpperCase();
_value = new ArrayList(0);
_type = TYPE_UNSPECIFIED;
}
/***********************************************************************
* Create a label item from a single string
*/
public VicarLabelItem(String key, String value)
{
_key = key.toUpperCase();
_value = new ArrayList(1);
_value.add(value);
_type = TYPE_STRING;
}
/***********************************************************************
* Create a label item from an array of strings
*/
public VicarLabelItem(String key, String[] value)
{
_key = key.toUpperCase();
_value = new ArrayList(value.length);
for (int i=0; i < value.length; i++)
_value.add(value[i]);
_type = TYPE_STRING;
}
/***********************************************************************
* Create a label item from a single integer
*/
public VicarLabelItem(String key, int value)
{
_key = key.toUpperCase();
_value = new ArrayList(1);
_value.add(getLabelString(value));
_type = TYPE_INTEGER;
}
/***********************************************************************
* Create a label item from an array of integers
*/
public VicarLabelItem(String key, int[] value)
{
_key = key.toUpperCase();
_value = new ArrayList(value.length);
for (int i=0; i < value.length; i++)
_value.add(getLabelString(value[i]));
_type = TYPE_INTEGER;
}
/***********************************************************************
* Create a label item from a single float
*/
public VicarLabelItem(String key, float value)
{
_key = key.toUpperCase();
_value = new ArrayList(1);
_value.add(getLabelString(value));
_type = TYPE_FLOAT;
}
/***********************************************************************
* Create a label item from an array of floats
*/
public VicarLabelItem(String key, float[] value)
{
_key = key.toUpperCase();
_value = new ArrayList(value.length);
for (int i=0; i < value.length; i++)
_value.add(getLabelString(value[i]));
_type = TYPE_FLOAT;
}
/***********************************************************************
* Create a label item from a single double
*/
public VicarLabelItem(String key, double value)
{
_key = key.toUpperCase();
_value = new ArrayList(1);
_value.add(getLabelString(value));
_type = TYPE_DOUBLE;
}
/***********************************************************************
* Create a label item from an array of doubles
*/
public VicarLabelItem(String key, double[] value)
{
_key = key.toUpperCase();
_value = new ArrayList(value.length);
for (int i=0; i < value.length; i++)
_value.add(getLabelString(value[i]));
_type = TYPE_DOUBLE;
}
/***********************************************************************
* Create a label item from a single string, but we don't necessarily
* know the type, so "guess" at it. This form is indended for use by
* the label reader, not by applications (who should know their type!)
* @param quote <code>true</code> if a quote was present (forces a string),
* <code>false</code> if no quote (meaning we guess).
*/
public VicarLabelItem(String key, String value, boolean quote)
{
_key = key.toUpperCase();
_value = new ArrayList(1);
_value.add(value);
if (quote)
_type = TYPE_STRING;
else {
_type = TYPE_UNSPECIFIED;
guessType(value);
}
}
/***********************************************************************
* Create a label item from an array of strings, but we don't know the type.
* @see #VicarLabelItem(String, String, boolean)
*/
public VicarLabelItem(String key, String[] value, boolean quote)
{
_key = key.toUpperCase();
_value = new ArrayList(value.length);
if (quote)
_type = TYPE_STRING;
else
_type = TYPE_UNSPECIFIED;
for (int i=0; i < value.length; i++) {
_value.add(value[i]);
if (!quote)
guessType(value[i]);
}
}
/***********************************************************************
* Create a label item from a vector of strings, but we don't know the type.
* @see #VicarLabelItem(String, String, boolean)
*/
public VicarLabelItem(String key, List value, boolean quote)
{
_key = key.toUpperCase();
_value = new ArrayList(value.size());
if (quote)
_type = TYPE_STRING;
else
_type = TYPE_UNSPECIFIED;
for (int i=0; i < value.size(); i++) {
_value.add(value.get(i));
if (!quote)
guessType((String)value.get(i));
}
}
/***********************************************************************
* Create a label item from an XML Element. See <code>fromXML()</code>
* for more information (the arguments are identical).
* @see #fromXML(Element, List)
*/
public VicarLabelItem(Element item, List errorList)
{
this();
fromXML(item, errorList);
}
////////////////////////////////////////////////////////////////////////
// Access methods
////////////////////////////////////////////////////////////////////////
/***********************************************************************
* Get the keyword
*/
public String getKeyword()
{
return _key;
}
/***********************************************************************
* Set the keyword
*/
public void setKeyword(String key)
{
_key = key.toUpperCase();
}
/***********************************************************************
* Get the "natural" type. Will return TYPE_UNSPECIFIED only if there are
* no values. There is no <code>setType</code> because that must be
* consistent with the values themselves.
*/
public int getType()
{
return _type;
}
/***********************************************************************
* Get the number of elements in the value.
*/
public int getNumElements()
{
return _value.size();
}
/***********************************************************************
* Two VicarLabelItem objects are equal if their Keywords are equal.
* The values are not considered. This makes searches in VicarLabelSet
* much easier. We also test for equality of the keyword with a String.
*/
public boolean equals(Object o)
{
if (o instanceof String)
return _key.equalsIgnoreCase((String)o);
if (o instanceof VicarLabelItem)
return _key.equalsIgnoreCase(((VicarLabelItem)o).getKeyword());
return false;
}
////////////////////////////////////////////////////////////////////////
// Value retrieval
////////////////////////////////////////////////////////////////////////
/***********************************************************************
* Get the (first) value as a string. Useful if there's only one value.
* @throws ArrayIndexOutOfBoundsException but this need not be declared
*/
public String getString()
{
return (String)_value.get(0);
}
/***********************************************************************
* Get the (first) value as an integer. If the type is not TYPE_INTEGER,
* 0 is returned. Useful if there's only one value.
* @throws ArrayIndexOutOfBoundsException but this need not be declared
*/
public int getInteger()
{
return getIntegerFromString((String)_value.get(0));
}
/***********************************************************************
* Get the (first) value as a float. If the type is not TYPE_INTEGER,
* TYPE_FLOAT, or TYPE_DOUBLE, 0 is returned. Useful if there's only one
* value.
* @throws ArrayIndexOutOfBoundsException but this need not be declared
*/
public float getFloat()
{
return getFloatFromString((String)_value.get(0));
}
/***********************************************************************
* Get the (first) value as a double. If the type is not TYPE_INTEGER,
* TYPE_FLOAT, or TYPE_DOUBLE, 0 is returned. Useful if there's only one
* value.
* @throws ArrayIndexOutOfBoundsException but this need not be declared
*/
public double getDouble()
{
return getDoubleFromString((String)_value.get(0));
}
/***********************************************************************
* Get the given element as a string.
* @throws ArrayIndexOutOfBoundsException but this need not be declared
*/
public String getString(int index)
{
return (String)_value.get(index);
}
/***********************************************************************
* Get the given element as an integer. If the type is not TYPE_INTEGER,
* 0 is returned.
* @throws ArrayIndexOutOfBoundsException but this need not be declared
*/
public int getInteger(int index)
{
return getIntegerFromString((String)_value.get(index));
}
/***********************************************************************
* Get the given element as a float. If the type is not TYPE_INTEGER,
* TYPE_FLOAT, or TYPE_DOUBLE, 0 is returned.
* @throws ArrayIndexOutOfBoundsException but this need not be declared
*/
public float getFloat(int index)
{
return getFloatFromString((String)_value.get(index));
}
/***********************************************************************
* Get the given element as a double. If the type is not TYPE_INTEGER,
* TYPE_FLOAT, or TYPE_DOUBLE, 0 is returned.
* @throws ArrayIndexOutOfBoundsException but this need not be declared
*/
public double getDouble(int index)
{
return getDoubleFromString((String)_value.get(index));
}
/***********************************************************************
* Get the value as a string array.
*/
public String[] getStringArray()
{
return (String[])_value.toArray(new String[_value.size()]);
}
/***********************************************************************
* Get the value as an integer array. If the type is not TYPE_INTEGER,
* an array full of 0's is returned.
*/
public int[] getIntegerArray()
{
int[] array = new int[_value.size()];
for (int i=0; i < _value.size(); i++)
array[i] = getIntegerFromString((String)_value.get(i));
return array;
}
/***********************************************************************
* Get the value as a float array. If the type is not TYPE_INTEGER,
* TYPE_FLOAT, or TYPE_DOUBLE, an array full of 0's is returned.
*/
public float[] getFloatArray()
{
float[] array = new float[_value.size()];
for (int i=0; i < _value.size(); i++)
array[i] = getFloatFromString((String)_value.get(i));
return array;
}
/***********************************************************************
* Get the value as a double array. If the type is not TYPE_INTEGER,
* TYPE_FLOAT, or TYPE_DOUBLE, an array full of 0's is returned.
*/
public double[] getDoubleArray()
{
double[] array = new double[_value.size()];
for (int i=0; i < _value.size(); i++)
array[i] = getDoubleFromString((String)_value.get(i));
return array;
}
////////////////////////////////////////////////////////////////////////
// Value modification
////////////////////////////////////////////////////////////////////////
/***********************************************************************
* Set the value from a string. When done, the vector will have one and
* only one element in it (any previous elements are discarded).
*/
public void setValue(String value)
{
_value = new ArrayList(1);
_value.add(value);
_type = TYPE_STRING;
}
/***********************************************************************
* Set the value from an integer. When done, the vector will have one and
* only one element in it (any previous elements are discarded).
*/
public void setValue(int value)
{
_value = new ArrayList(1);
_value.add(getLabelString(value));
_type = TYPE_INTEGER;
}
/***********************************************************************
* Set the value from a float. When done, the vector will have one and
* only one element in it (any previous elements are discarded).
*/
public void setValue(float value)
{
_value = new ArrayList(1);
_value.add(getLabelString(value));
_type = TYPE_FLOAT;
}
/***********************************************************************
* Set the value from a double. When done, the vector will have one and
* only one element in it (any previous elements are discarded).
*/
public void setValue(double value)
{
_value = new ArrayList(1);
_value.add(getLabelString(value));
_type = TYPE_DOUBLE;
}
/***********************************************************************
* Set the given element as a string. Setting with an index of the current
* size, or -1, will append the item to the end. Elements other than the
* indexed one are left unchanged.
* @throws ArrayIndexOutOfBoundsException but this need not be declared
*/
public void setValue(String value, int index)
{
if (index == -1 || index == _value.size())
_value.add(value);
else
_value.set(index, value);
_type = TYPE_STRING;
}
/***********************************************************************
* Set the given element as an integer. Setting with an index of the current
* size, or -1, will append the item to the end. Elements other than the
* indexed one are left unchanged.
* @throws ArrayIndexOutOfBoundsException but this need not be declared
*/
public void setValue(int value, int index)
{
if (index == -1 || index == _value.size())
_value.add(getLabelString(value));
else
_value.set(index, getLabelString(value));
checkConsistentType(TYPE_INTEGER);
}
/***********************************************************************
* Set the given element as a float. Setting with an index of the current
* size, or -1, will append the item to the end. Elements other than the
* indexed one are left unchanged.
* @throws ArrayIndexOutOfBoundsException but this need not be declared
*/
public void setValue(float value, int index)
{
if (index == -1 || index == _value.size())
_value.add(getLabelString(value));
else
_value.set(index, getLabelString(value));
checkConsistentType(TYPE_FLOAT);
}
/***********************************************************************
* Set the given element as a double. Setting with an index of the current
* size, or -1, will append the item to the end. Elements other than the
* indexed one are left unchanged.
* @throws ArrayIndexOutOfBoundsException but this need not be declared
*/
public void setValue(double value, int index)
{
if (index == -1 || index == _value.size())
_value.add(getLabelString(value));
else
_value.set(index, getLabelString(value));
checkConsistentType(TYPE_DOUBLE);
}
/***********************************************************************
* Set the entire value to a string array. Any previous items are discarded.
*/
public void setValue(String[] value)
{
_value = new ArrayList(value.length);
for (int i=0; i < value.length; i++)
_value.add(value[i]);
_type = TYPE_STRING;
}
/***********************************************************************
* Set the entire value to an integer array. Any previous items are discarded.
*/
public void setValue(int[] value)
{
_value = new ArrayList(value.length);
for (int i=0; i < value.length; i++)
_value.add(getLabelString(value[i]));
_type = TYPE_INTEGER;
}
/***********************************************************************
* Set the entire value to a float array. Any previous items are discarded.
*/
public void setValue(float[] value)
{
_value = new ArrayList(value.length);
for (int i=0; i < value.length; i++)
_value.add(getLabelString(value[i]));
_type = TYPE_FLOAT;
}
/***********************************************************************
* Set the entire value to a double array. Any previous items are discarded.
*/
public void setValue(double[] value)
{
_value = new ArrayList(value.length);
for (int i=0; i < value.length; i++)
_value.add(getLabelString(value[i]));
_type = TYPE_DOUBLE;
}
////////////////////////////////////////////////////////////////////////
// Structure modification
////////////////////////////////////////////////////////////////////////
/***********************************************************************
* Removes an element from the value list.
* @throws ArrayIndexOutOfBoundsException but this need not be declared
*/
public void remove(int index)
{
_value.remove(index);
}
/***********************************************************************
* Inserts a string at the given index. The new element appears before
* the element currently at that index.
* @throws ArrayIndexOutOfBoundsException but this need not be declared
* @see ArrayList#add(int, Object)
*/
public void insert(String value, int index)
{
_value.add(index, value);
_type = TYPE_STRING;
}
/***********************************************************************
* Inserts an integer at the given index. The new element appears before
* the element currently at that index.
* @throws ArrayIndexOutOfBoundsException but this need not be declared
* @see ArrayList#add(int, Object)
*/
public void insert(int value, int index)
{
_value.add(index, getLabelString(value));
checkConsistentType(TYPE_INTEGER);
}
/***********************************************************************
* Inserts a float at the given index. The new element appears before
* the element currently at that index.
* @throws ArrayIndexOutOfBoundsException but this need not be declared
* @see ArrayList#add(int, Object)
*/
public void insert(float value, int index)
{
_value.add(index, getLabelString(value));
checkConsistentType(TYPE_FLOAT);
}
/***********************************************************************
* Inserts a double at the given index. The new element appears before
* the element currently at that index.
* @throws ArrayIndexOutOfBoundsException but this need not be declared
* @see ArrayList#add(int, Object)
*/
public void insert(double value, int index)
{
_value.add(index, getLabelString(value));
checkConsistentType(TYPE_DOUBLE);
}
/***********************************************************************
* Returns the entire Item as a valid VICAR label string, ready to be
* added to a label, in key=value form. Specifically, quotes internal
* to strings are doubled, per the VICAR standard. (This is not necessary
* to do for input because <code>VicarLabelParser</code> handles it).
*/
public String toLabelString()
{
StringBuffer buf = new StringBuffer(50);
buf.append(_key);
buf.append("=");
if (_value.size() <= 1)
buf.append(getLabelFormattedString((String)_value.get(0)));
else { // multivalued element
buf.append("(");
for (int i=0; i < _value.size(); i++) {
if (i != 0)
buf.append(",");
buf.append(getLabelFormattedString((String)_value.get(i)));
// buf.append(_value.get(i));
}
buf.append(")");
}
return buf.toString();
}
/***********************************************************************
* Returns the entire Item as a string, intended for human viewing. This
* string is <em>not</em> sufficient to be stuffed in an actual label; use
* <code>toLabelString</code> for that. The differences: internal quotes
* in strings are not doubled, and spaces are added for readability.
* @see #toLabelString()
*/
public String toString()
{
StringBuffer buf = new StringBuffer(50);
buf.append(_key);
buf.append(" = "); // more spaces than toLabelString()
if (_value.size() == 0) {
// do nothing
buf.append("NO_VALUE");
}
else if (_value.size() <= 1) {
if (_type == TYPE_STRING)
buf.append("'");
buf.append((String)_value.get(0));
if (_type == TYPE_STRING)
buf.append("'");
}
else { // multivalued element
buf.append("(");
for (int i=0; i < _value.size(); i++) {
if (i != 0)
buf.append(", "); // more spaces
if (_type == TYPE_STRING)
buf.append("'");
buf.append(_value.get(i));
if (_type == TYPE_STRING)
buf.append("'");
}
buf.append(")");
}
return buf.toString();
}
/***********************************************************************
* Returns the entire Item as an XML DOM <item> node. Subnodes will be
* created for multivalued Items as necessary. See the VICAR label DTD.
* !!!!TBD: reference to this DTD!!!!
* @param document The <code>Document</code> which will contain the node.
* @see #toLabelString()
*/
public Node toXML(Document doc)
{
// Create the ITEM node, and all the standard attributes
Element item = doc.createElement("item");
item.setAttribute("key", _key);
if (_type != TYPE_UNSPECIFIED) // no attr if type unknown
item.setAttribute("type", _typeStrings[_type]);
// Check for single or multivalued label value
if (_value.size() <= 1) {
//!!!! HANDLE QUOTES !!!!
item.setAttribute("value", (String)_value.get(0));
}
else { // multivalued element
int n = _value.size();
for (int i=0; i < n; i++) {
Element sub = doc.createElement("subitem");
//!!!! HANDLE QUOTES !!!!
sub.setAttribute("value", (String)_value.get(i));
item.appendChild(sub);
}
}
return item;
}
/***********************************************************************
* Sets the entire Item (key and value) based on the supplied XML DOM
* <item> node. See the VICAR label DTD.
* !!!!TBD: reference to this DTD!!!!
* <p>
* If anything is wrong with the supplied node, error/warning messages
* will be appended to the supplied <code>errorList</code>. However, the
* parser keeps working as long as possible, despite errors. The practical
* upshot of this is that the errors can usually be ignored; the parser will
* do the best it can. However, the errors can be displayed to the user
* if desired. An exception will be thrown only if something unrecoverable
* happens. A missing keyword will default to "UNKNOWN".
*
* @param item The <code>Element</code> from which to extract the item info.
* @param errorList A list of <code>String</code>s to which any parsing
* errors will be appended. Can be <code>null</code>, in which case no
* errors are returned.
* @throws DOMException if something unrecoverable happens. Improperly
* formatted items or violations of the DTD do not necessarily result in
* exceptions; parsing continues as long as possible and problems are appended
* to the <code>errorList</code>.
*/
public void fromXML(Element item, List errorList)
{
// Make sure it's an ITEM node
DOMutils domUtils = new DOMutils();
if (errorList != null) {
if (!item.getTagName().equals("item"))
errorList.add("Element " + item.getTagName() + " should have a tag of 'item'");
}
// Set the key
_key = item.getAttribute("key").toUpperCase();
if (_key == null || _key.length() == 0) {
_key = item.getAttribute("name").toUpperCase();
if (_key == null || _key.length() == 0) {
_key = "UNKNOWN";
if (errorList != null)
errorList.add("Item has no 'name' or 'key' attribute: " + item.toString()
+ ". Keyword of UNKNOWN used");
}
}
// Check for type
_type = TYPE_UNSPECIFIED;
String type = item.getAttribute("type");
if (type == null || type.length() == 0)
_type = TYPE_UNSPECIFIED;
else if (type.equalsIgnoreCase("STRING"))
_type = TYPE_STRING;
else if (type.equalsIgnoreCase("INTEGER"))
_type = TYPE_INTEGER;
else if (type.equalsIgnoreCase("FLOAT"))
_type = TYPE_FLOAT;
else if (type.equalsIgnoreCase("DOUBLE"))
_type = TYPE_DOUBLE;
else if (type.equalsIgnoreCase("UNSPECIFIED"))
_type = TYPE_UNSPECIFIED;
else if (errorList != null)
errorList.add("Type attribute " + type + " is invalid. UNSPECIFIED assumed");
// Get the subitem list, if any
NodeList subitems = item.getElementsByTagName("subitem");
int nel = subitems.getLength();
// Check for value
//!!!! THIS IS WHAT IT SHOULD BE:
//!!!! if (item.hasAttribute("value")) {
//!!!! but xerces doesn't support DOM level 2. When JDK goes to 1.4, replace
//!!!! the following with the above. }
if (item.getAttributeNode("value") != null) {
// Single value exists
String value = item.getAttribute("value");
_value = new ArrayList(1);
_value.add(value);
int old_type = _type;
guessType(value); // Not only guesses, but makes sure it's valid
if (errorList != null && old_type != _type &&
old_type != TYPE_UNSPECIFIED)
errorList.add("Type mismatch for key " + _key + ". Specified type is " + _typeStrings[old_type] + " but value is " + _typeStrings[_type]);
if (errorList != null && nel != 0)
errorList.add("Value attribute specified for key " + _key + " but subitems are present. Subitems ignored");
}
else { // Multiple values
if (nel == 0) { // no subitems either!
_value = new ArrayList(0);
// value is NOT in an attribute, try to get it
// put something into _value
String v = "";
// get the value from DOMutils
// DOMutils domUtils = new DOMutils();
// String getNodeValueString(Node node)
v = domUtils.getNodeValue(item);
if (v != null && v != "") {
_value = new ArrayList(1);
_value.add(v);
guessType(v);
} else {
if (errorList != null)
errorList.add("Empty value for key " + _key);
}
}
else {
int old_type = _type;
_value = new ArrayList(nel);
for (int i=0; i < nel; i++) {
Node subitem = subitems.item(i);
String v = "";
if (!(subitem instanceof Element)) {
if (errorList != null)
errorList.add("Key " + _key + ", subitem " + i + " is not a DOM Element type");
}
else {
Element si = (Element)subitem;
//!!!! THIS IS WHAT IT SHOULD BE:
//!!!! if (errorList != null && !si.hasAttribute("value"))
//!!!! but xerces doesn't support DOM level 2. When JDK goes to 1.4, replace
//!!!! the following with the above.
v = si.getAttribute("value"); // empty string OK
if (v != null && v != "") {
_value = new ArrayList(1);
_value.add(v);
guessType(v);
} else {
v = domUtils.getNodeValue(si); // value isn't in
if (v != null && v != "") {
// _value = new ArrayList(1);
_value.add(v);
guessType(v);
} else {
if (errorList != null && si.getAttributeNode("value") == null)
errorList.add("Key " + _key + ", subitem " + i + " has no 'value' attribute");
}
//
}
}
if (errorList != null && old_type != _type &&
old_type != TYPE_UNSPECIFIED)
errorList.add("Type mismatch for key " + _key + ". Specified type is " + _typeStrings[old_type] + " but value is " + _typeStrings[_type]);
}
} // else subitems
} // multiple values
}
////////////////////////////////////////////////////////////////////////
// Internal utilities
////////////////////////////////////////////////////////////////////////
/***********************************************************************
* Returns a String representation of the given integer, suitable for
* a VICAR label.
*/
protected String getLabelString(int value)
{
return Integer.toString(value);
}
/***********************************************************************
* Returns a String representation of the given float, suitable for
* a VICAR label.
*/
protected String getLabelString(float value)
{
String s = Float.toString(value);
// C prints "Inf" or "-Inf" while java prints "Infinity". Change to
// the C form for consistency with the VICAR standard (not that
// infinity is really *allowed*, but...). NaN is the same in both.
if (s.equals("Infinity"))
return "Inf";
else if (s.equals("-Infinity"))
return "-Inf";
return s;
}
/***********************************************************************
* Returns a String representation of the given double, suitable for
* a VICAR label.
*/
protected String getLabelString(double value)
{
String s = Double.toString(value);
// C prints "Inf" or "-Inf" while java prints "Infinity". Change to
// the C form for consistency with the VICAR standard (not that
// infinity is really *allowed*, but...). NaN is the same in both.
if (s.equals("Infinity"))
return "Inf";
else if (s.equals("-Infinity"))
return "-Inf";
return s;
}
/***********************************************************************
* Returns a String representation of the given string, suitable for
* a VICAR label. Quotes are provided around strings, and Internal single
* quotes are doubled. Note that strings are <em>not</em> stored this way
* inside the VicarLabelItem object. This should only be called when the
* string is being formatted for output to a label. This is a slight design
* inconsistency, since everything else is stored in ready-to-go string format.
*/
protected String getLabelFormattedString(String value)
{
if (_type != TYPE_STRING)
return value;
StringBuffer buf = new StringBuffer(value.length());
buf.append("'");
int from = 0;
int to;
while ((to = value.indexOf('\'', from)) != -1) {
if (from != to)
buf.append(value.substring(from, to));
buf.append("''"); // double the quote
from = to + 1;
}
to = value.length(); // get the rest
if (from != to) // last part of string
buf.append(value.substring(from,to));
buf.append("'");
return buf.toString();
}
/***********************************************************************
* Parses the given String according to the VICAR label rules, and returns
* an integer. If the string is not a valid int, 0 is quietly returned.
*/
protected int getIntegerFromString(String s)
{
int value = 0;
if (s.charAt(0) == '+') // parseInt doesn't like +
s = s.substring(1); // delete it
try {
value = Integer.parseInt(s);
} catch (NumberFormatException e) {
value = 0;
}
return value;
}
/***********************************************************************
* Parses the given String according to the VICAR label rules, and returns
* a float. If the string is not a valid float, 0.0 is quietly returned.
*/
protected float getFloatFromString(String s)
{
float value = 0.0f;
s.replace('d', 'e'); // parseFloat doesn't like d/D for
s.replace('D', 'E'); // exponents, so use e/E
try {
value = Float.parseFloat(s);
} catch (NumberFormatException e) {
value = 0.0f;
}
return value;
}
/***********************************************************************
* Parses the given String according to the VICAR label rules, and returns
* a double. If the string is not a valid double, 0.0 is quietly returned.
*/
protected double getDoubleFromString(String s)
{
double value = 0.0;
s.replace('d', 'e'); // parseDouble doesn't like d/D for
s.replace('D', 'E'); // exponents, so use e/E
try {
value = Double.parseDouble(s);
} catch (NumberFormatException e) {
value = 0.0;
}
return value;
}
/***********************************************************************
* "Guess" at the type of the string by looking for non-digit characters.
* Updates <code>_type</code> appropriately. <code>_type</code> will only
* be "promoted", e.g. unspecified->int->real->string, never the reverse...
* presumably other elements in the value already constrain it.
* <p>
* If the type is already specified, this routine ensures that the string
* is in fact valid for that type. If not, the type is quietly demoted to
* the appropriate less-restrictive type.
* <p>
* Integer characters are <code>+-0123456789</code>
* Float characters are <code>+-012345678.eEdD</code>
* Double is never guessed by this routine.
*/
protected void guessType(String s)
{
if (_type == TYPE_STRING) // can't override this...
return;
boolean int_okay = true;
boolean float_okay = true;
String int_chars = "+-0123456789";
String float_chars = "+-0123456789.eEdD";
int len = s.length();
for (int i=0; i < len; i++) {
int ch = s.charAt(i);
if (float_chars.indexOf(ch) == -1) // not found!
float_okay = false;
if (int_chars.indexOf(ch) == -1) // not found!
int_okay = false;
if (!float_okay && !int_okay) {
_type = TYPE_STRING;
return; // it won't get better...
}
}
if (_type == TYPE_UNSPECIFIED) {
if (float_okay)
_type = TYPE_FLOAT;
if (int_okay)
_type = TYPE_INTEGER;
return;
}
if (_type == TYPE_INTEGER) {
if (!int_okay)
_type = TYPE_FLOAT;
if (!float_okay) // oops, shouldn't happen
_type = TYPE_STRING;
return;
}
// must be TYPE_FLOAT or TYPE_DOUBLE
if (!float_okay) // oops, shouldn't happen
_type = TYPE_STRING;
return;
}
/***********************************************************************
* Checks to see if the entire value (all elements) is consistent with the
* given type, and sets _type appropriately. The idea is, someone made a
* change to a single element with the given type, so we see if that type
* *can* be applied to the entire value, in preference to anything already
* there. After all, the user might have just replaced the only value
* that was invalid for TYPE_INTEGER, for example.
*/
protected void checkConsistentType(int type)
{
_type = type; // this is what we want
if (_type == TYPE_STRING)
return; // no point in checking further
for (int i=0; i < _value.size(); i++) {
guessType((String)_value.get(i));
}
}
/***********************************************************************
* Creates a deep copy of the object. Since _value is a vector of String,
* we don't need to copy the strings themselves, just the vector.
*/
public Object clone()
{
VicarLabelItem item;
try {
item = (VicarLabelItem)super.clone();
} catch (Exception e) {
return null;
}
item._value = (ArrayList)((ArrayList)(item._value)).clone();
return item;
}
}
| |
// GenericsNote: Converted.
/*
* Copyright 2003-2004 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.collections15.map;
import org.apache.commons.collections15.*;
import org.apache.commons.collections15.iterators.EmptyOrderedIterator;
import org.apache.commons.collections15.iterators.EmptyOrderedMapIterator;
import java.util.ConcurrentModificationException;
import java.util.Iterator;
import java.util.Map;
import java.util.NoSuchElementException;
/**
* An abstract implementation of a hash-based map that links entries to create an
* ordered map and which provides numerous points for subclasses to override.
* <p/>
* This class implements all the features necessary for a subclass linked
* hash-based map. Key-value entries are stored in instances of the
* <code>LinkEntry</code> class which can be overridden and replaced.
* The iterators can similarly be replaced, without the need to replace the KeySet,
* EntrySet and Values view classes.
* <p/>
* Overridable methods are provided to change the default hashing behaviour, and
* to change how entries are added to and removed from the map. Hopefully, all you
* need for unusual subclasses is here.
* <p/>
* This implementation maintains order by original insertion, but subclasses
* may work differently. The <code>OrderedMap</code> interface is implemented
* to provide access to bidirectional iteration and extra convenience methods.
* <p/>
* The <code>orderedMapIterator()</code> method provides direct access to a
* bidirectional iterator. The iterators from the other views can also be cast
* to <code>OrderedIterator</code> if required.
* <p/>
* All the available iterators can be reset back to the start by casting to
* <code>ResettableIterator</code> and calling <code>reset()</code>.
* <p/>
* The implementation is also designed to be subclassed, with lots of useful
* methods exposed.
*
* @author java util LinkedHashMap
* @author Matt Hall, John Watkinson, Stephen Colebourne
* @version $Revision: 1.1 $ $Date: 2005/10/11 17:05:32 $
* @since Commons Collections 3.0
*/
public class AbstractLinkedMap <K,V> extends AbstractHashedMap<K, V> implements OrderedMap<K, V> {
/**
* Header in the linked list
*/
protected transient LinkEntry<K, V> header;
/**
* Constructor only used in deserialization, do not use otherwise.
*/
protected AbstractLinkedMap() {
super();
}
/**
* Constructor which performs no validation on the passed in parameters.
*
* @param initialCapacity the initial capacity, must be a power of two
* @param loadFactor the load factor, must be > 0.0f and generally < 1.0f
* @param threshold the threshold, must be sensible
*/
protected AbstractLinkedMap(int initialCapacity, float loadFactor, int threshold) {
super(initialCapacity, loadFactor, threshold);
}
/**
* Constructs a new, empty map with the specified initial capacity.
*
* @param initialCapacity the initial capacity
* @throws IllegalArgumentException if the initial capacity is less than one
*/
protected AbstractLinkedMap(int initialCapacity) {
super(initialCapacity);
}
/**
* Constructs a new, empty map with the specified initial capacity and
* load factor.
*
* @param initialCapacity the initial capacity
* @param loadFactor the load factor
* @throws IllegalArgumentException if the initial capacity is less than one
* @throws IllegalArgumentException if the load factor is less than zero
*/
protected AbstractLinkedMap(int initialCapacity, float loadFactor) {
super(initialCapacity, loadFactor);
}
/**
* Constructor copying elements from another map.
*
* @param map the map to copy
* @throws NullPointerException if the map is null
*/
protected AbstractLinkedMap(Map<? extends K, ? extends V> map) {
super(map);
}
/**
* Initialise this subclass during construction.
*/
protected void init() {
header = new LinkEntry<K, V>(null, -1, null, null);
header.before = header.after = header;
}
//-----------------------------------------------------------------------
/**
* Checks whether the map contains the specified value.
*
* @param value the value to search for
* @return true if the map contains the value
*/
public boolean containsValue(Object value) {
// override uses faster iterator
if (value == null) {
for (LinkEntry entry = header.after; entry != header; entry = entry.after) {
if (entry.getValue() == null) {
return true;
}
}
} else {
for (LinkEntry entry = header.after; entry != header; entry = entry.after) {
if (isEqualValue(value, entry.getValue())) {
return true;
}
}
}
return false;
}
/**
* Clears the map, resetting the size to zero and nullifying references
* to avoid garbage collection issues.
*/
public void clear() {
// override to reset the linked list
super.clear();
header.before = header.after = header;
}
//-----------------------------------------------------------------------
/**
* Gets the first key in the map, which is the most recently inserted.
*
* @return the most recently inserted key
*/
public K firstKey() {
if (size == 0) {
throw new NoSuchElementException("Map is empty");
}
return header.after.getKey();
}
/**
* Gets the last key in the map, which is the first inserted.
*
* @return the eldest key
*/
public K lastKey() {
if (size == 0) {
throw new NoSuchElementException("Map is empty");
}
return header.before.getKey();
}
/**
* Gets the next key in sequence.
*
* @param key the key to get after
* @return the next key
*/
public K nextKey(K key) {
LinkEntry<K, V> entry = (LinkEntry<K, V>) getEntry(key);
return (entry == null || entry.after == header ? null : entry.after.getKey());
}
/**
* Gets the previous key in sequence.
*
* @param key the key to get before
* @return the previous key
*/
public K previousKey(K key) {
LinkEntry<K, V> entry = (LinkEntry<K, V>) getEntry(key);
return (entry == null || entry.before == header ? null : entry.before.getKey());
}
//-----------------------------------------------------------------------
/**
* Gets the key at the specified index.
*
* @param index the index to retrieve
* @return the key at the specified index
* @throws IndexOutOfBoundsException if the index is invalid
*/
protected LinkEntry<K, V> getEntry(int index) {
if (index < 0) {
throw new IndexOutOfBoundsException("Index " + index + " is less than zero");
}
if (index >= size) {
throw new IndexOutOfBoundsException("Index " + index + " is invalid for size " + size);
}
LinkEntry<K, V> entry;
if (index < (size / 2)) {
// Search forwards
entry = header.after;
for (int currentIndex = 0; currentIndex < index; currentIndex++) {
entry = entry.after;
}
} else {
// Search backwards
entry = header;
for (int currentIndex = size; currentIndex > index; currentIndex--) {
entry = entry.before;
}
}
return entry;
}
/**
* Adds an entry into this map, maintaining insertion order.
* <p/>
* This implementation adds the entry to the data storage table and
* to the end of the linked list.
*
* @param entry the entry to add
* @param hashIndex the index into the data array to store at
*/
protected void addEntry(HashEntry<K, V> entry, int hashIndex) {
LinkEntry<K, V> link = (LinkEntry<K, V>) entry;
link.after = header;
link.before = header.before;
header.before.after = link;
header.before = link;
data[hashIndex] = entry;
}
/**
* Creates an entry to store the data.
* <p/>
* This implementation creates a new LinkEntry instance.
*
* @param next the next entry in sequence
* @param hashCode the hash code to use
* @param key the key to store
* @param value the value to store
* @return the newly created entry
*/
protected HashEntry<K, V> createEntry(HashEntry<K, V> next, int hashCode, K key, V value) {
return new LinkEntry<K, V>(next, hashCode, key, value);
}
/**
* Removes an entry from the map and the linked list.
* <p/>
* This implementation removes the entry from the linked list chain, then
* calls the superclass implementation.
*
* @param entry the entry to remove
* @param hashIndex the index into the data structure
* @param previous the previous entry in the chain
*/
protected void removeEntry(HashEntry<K, V> entry, int hashIndex, HashEntry<K, V> previous) {
LinkEntry<K, V> link = (LinkEntry<K, V>) entry;
link.before.after = link.after;
link.after.before = link.before;
link.after = null;
link.before = null;
super.removeEntry(entry, hashIndex, previous);
}
//-----------------------------------------------------------------------
/**
* Gets the <code>before</code> field from a <code>LinkEntry</code>.
* Used in subclasses that have no visibility of the field.
*
* @param entry the entry to query, must not be null
* @return the <code>before</code> field of the entry
* @throws NullPointerException if the entry is null
* @since Commons Collections 3.1
*/
protected LinkEntry<K, V> entryBefore(LinkEntry<K, V> entry) {
return entry.before;
}
/**
* Gets the <code>after</code> field from a <code>LinkEntry</code>.
* Used in subclasses that have no visibility of the field.
*
* @param entry the entry to query, must not be null
* @return the <code>after</code> field of the entry
* @throws NullPointerException if the entry is null
* @since Commons Collections 3.1
*/
protected LinkEntry<K, V> entryAfter(LinkEntry<K, V> entry) {
return entry.after;
}
//-----------------------------------------------------------------------
/**
* Gets an iterator over the map.
* Changes made to the iterator affect this map.
* <p/>
* A MapIterator returns the keys in the map. It also provides convenient
* methods to get the key and value, and set the value.
* It avoids the need to create an entrySet/keySet/values object.
*
* @return the map iterator
*/
public MapIterator<K, V> mapIterator() {
if (size == 0) {
return EmptyOrderedMapIterator.INSTANCE;
}
return new LinkMapIterator<K, V>(this);
}
/**
* Gets a bidirectional iterator over the map.
* Changes made to the iterator affect this map.
* <p/>
* A MapIterator returns the keys in the map. It also provides convenient
* methods to get the key and value, and set the value.
* It avoids the need to create an entrySet/keySet/values object.
*
* @return the map iterator
*/
public OrderedMapIterator<K, V> orderedMapIterator() {
if (size == 0) {
return EmptyOrderedMapIterator.INSTANCE;
}
return new LinkMapIterator<K, V>(this);
}
/**
* MapIterator implementation.
*/
protected static class LinkMapIterator <K,V> extends LinkIterator<K, V> implements OrderedMapIterator<K, V>, OrderedIterator<K>, ResettableIterator<K> {
protected LinkMapIterator(AbstractLinkedMap<K, V> parent) {
super(parent);
}
public K next() {
return super.nextEntry().getKey();
}
public K previous() {
return super.previousEntry().getKey();
}
public K getKey() {
HashEntry<K, V> current = currentEntry();
if (current == null) {
throw new IllegalStateException(AbstractHashedMap.GETKEY_INVALID);
}
return current.getKey();
}
public V getValue() {
HashEntry<K, V> current = currentEntry();
if (current == null) {
throw new IllegalStateException(AbstractHashedMap.GETVALUE_INVALID);
}
return current.getValue();
}
public V setValue(V value) {
HashEntry<K, V> current = currentEntry();
if (current == null) {
throw new IllegalStateException(AbstractHashedMap.SETVALUE_INVALID);
}
return current.setValue(value);
}
}
//-----------------------------------------------------------------------
/**
* Creates an entry set iterator.
* Subclasses can override this to return iterators with different properties.
*
* @return the entrySet iterator
*/
protected Iterator<Map.Entry<K, V>> createEntrySetIterator() {
if (size() == 0) {
return EmptyOrderedIterator.INSTANCE;
}
return new EntrySetIterator<K, V>(this);
}
/**
* EntrySet iterator.
*/
protected static class EntrySetIterator <K,V> extends LinkIterator<K, V> implements OrderedIterator<Map.Entry<K, V>>, ResettableIterator<Map.Entry<K, V>> {
protected EntrySetIterator(AbstractLinkedMap<K, V> parent) {
super(parent);
}
public Map.Entry<K, V> next() {
return super.nextEntry();
}
public Map.Entry<K, V> previous() {
return super.previousEntry();
}
}
//-----------------------------------------------------------------------
/**
* Creates a key set iterator.
* Subclasses can override this to return iterators with different properties.
*
* @return the keySet iterator
*/
protected Iterator createKeySetIterator() {
if (size() == 0) {
return EmptyOrderedIterator.INSTANCE;
}
return new KeySetIterator(this);
}
/**
* KeySet iterator.
*/
protected static class KeySetIterator <K,V> extends LinkIterator<K, V> implements OrderedIterator<K>, ResettableIterator<K> {
protected KeySetIterator(AbstractLinkedMap<K, V> parent) {
super(parent);
}
public K next() {
return super.nextEntry().getKey();
}
public K previous() {
return super.previousEntry().getKey();
}
}
//-----------------------------------------------------------------------
/**
* Creates a values iterator.
* Subclasses can override this to return iterators with different properties.
*
* @return the values iterator
*/
protected Iterator<V> createValuesIterator() {
if (size() == 0) {
return EmptyOrderedIterator.INSTANCE;
}
return new ValuesIterator<K, V>(this);
}
/**
* Values iterator.
*/
protected static class ValuesIterator <K,V> extends LinkIterator<K, V> implements OrderedIterator<V>, ResettableIterator<V> {
protected ValuesIterator(AbstractLinkedMap<K, V> parent) {
super(parent);
}
public V next() {
return super.nextEntry().getValue();
}
public V previous() {
return super.previousEntry().getValue();
}
}
//-----------------------------------------------------------------------
/**
* LinkEntry that stores the data.
* <p/>
* If you subclass <code>AbstractLinkedMap</code> but not <code>LinkEntry</code>
* then you will not be able to access the protected fields.
* The <code>entryXxx()</code> methods on <code>AbstractLinkedMap</code> exist
* to provide the necessary access.
*/
protected static class LinkEntry <K,V> extends HashEntry<K, V> {
/**
* The entry before this one in the order
*/
protected LinkEntry<K, V> before;
/**
* The entry after this one in the order
*/
protected LinkEntry<K, V> after;
/**
* Constructs a new entry.
*
* @param next the next entry in the hash bucket sequence
* @param hashCode the hash code
* @param key the key
* @param value the value
*/
protected LinkEntry(HashEntry<K, V> next, int hashCode, K key, V value) {
super(next, hashCode, key, value);
}
}
/**
* Base Iterator that iterates in link order.
*/
protected static abstract class LinkIterator <K,V> {
/**
* The parent map
*/
protected final AbstractLinkedMap<K, V> parent;
/**
* The current (last returned) entry
*/
protected LinkEntry<K, V> last;
/**
* The next entry
*/
protected LinkEntry<K, V> next;
/**
* The modification count expected
*/
protected int expectedModCount;
protected LinkIterator(AbstractLinkedMap<K, V> parent) {
super();
this.parent = parent;
this.next = parent.header.after;
this.expectedModCount = parent.modCount;
}
public boolean hasNext() {
return (next != parent.header);
}
public boolean hasPrevious() {
return (next.before != parent.header);
}
protected LinkEntry<K, V> nextEntry() {
if (parent.modCount != expectedModCount) {
throw new ConcurrentModificationException();
}
if (next == parent.header) {
throw new NoSuchElementException(AbstractHashedMap.NO_NEXT_ENTRY);
}
last = next;
next = next.after;
return last;
}
protected LinkEntry<K, V> previousEntry() {
if (parent.modCount != expectedModCount) {
throw new ConcurrentModificationException();
}
LinkEntry<K, V> previous = next.before;
if (previous == parent.header) {
throw new NoSuchElementException(AbstractHashedMap.NO_PREVIOUS_ENTRY);
}
next = previous;
last = previous;
return last;
}
protected LinkEntry<K, V> currentEntry() {
return last;
}
public void remove() {
if (last == null) {
throw new IllegalStateException(AbstractHashedMap.REMOVE_INVALID);
}
if (parent.modCount != expectedModCount) {
throw new ConcurrentModificationException();
}
parent.remove(last.getKey());
last = null;
expectedModCount = parent.modCount;
}
public void reset() {
last = null;
next = parent.header.after;
}
public String toString() {
if (last != null) {
return "Iterator[" + last.getKey() + "=" + last.getValue() + "]";
} else {
return "Iterator[]";
}
}
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v8/common/criteria.proto
package com.google.ads.googleads.v8.common;
/**
* <pre>
* Represents a Carrier Criterion.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v8.common.CarrierInfo}
*/
public final class CarrierInfo extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v8.common.CarrierInfo)
CarrierInfoOrBuilder {
private static final long serialVersionUID = 0L;
// Use CarrierInfo.newBuilder() to construct.
private CarrierInfo(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CarrierInfo() {
carrierConstant_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new CarrierInfo();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private CarrierInfo(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 18: {
java.lang.String s = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
carrierConstant_ = s;
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v8.common.CriteriaProto.internal_static_google_ads_googleads_v8_common_CarrierInfo_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v8.common.CriteriaProto.internal_static_google_ads_googleads_v8_common_CarrierInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v8.common.CarrierInfo.class, com.google.ads.googleads.v8.common.CarrierInfo.Builder.class);
}
private int bitField0_;
public static final int CARRIER_CONSTANT_FIELD_NUMBER = 2;
private volatile java.lang.Object carrierConstant_;
/**
* <pre>
* The Carrier constant resource name.
* </pre>
*
* <code>optional string carrier_constant = 2;</code>
* @return Whether the carrierConstant field is set.
*/
@java.lang.Override
public boolean hasCarrierConstant() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* The Carrier constant resource name.
* </pre>
*
* <code>optional string carrier_constant = 2;</code>
* @return The carrierConstant.
*/
@java.lang.Override
public java.lang.String getCarrierConstant() {
java.lang.Object ref = carrierConstant_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
carrierConstant_ = s;
return s;
}
}
/**
* <pre>
* The Carrier constant resource name.
* </pre>
*
* <code>optional string carrier_constant = 2;</code>
* @return The bytes for carrierConstant.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getCarrierConstantBytes() {
java.lang.Object ref = carrierConstant_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
carrierConstant_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, carrierConstant_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, carrierConstant_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v8.common.CarrierInfo)) {
return super.equals(obj);
}
com.google.ads.googleads.v8.common.CarrierInfo other = (com.google.ads.googleads.v8.common.CarrierInfo) obj;
if (hasCarrierConstant() != other.hasCarrierConstant()) return false;
if (hasCarrierConstant()) {
if (!getCarrierConstant()
.equals(other.getCarrierConstant())) return false;
}
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasCarrierConstant()) {
hash = (37 * hash) + CARRIER_CONSTANT_FIELD_NUMBER;
hash = (53 * hash) + getCarrierConstant().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v8.common.CarrierInfo parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v8.common.CarrierInfo parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v8.common.CarrierInfo parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v8.common.CarrierInfo parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v8.common.CarrierInfo parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v8.common.CarrierInfo parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v8.common.CarrierInfo parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v8.common.CarrierInfo parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v8.common.CarrierInfo parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v8.common.CarrierInfo parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v8.common.CarrierInfo parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v8.common.CarrierInfo parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v8.common.CarrierInfo prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Represents a Carrier Criterion.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v8.common.CarrierInfo}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v8.common.CarrierInfo)
com.google.ads.googleads.v8.common.CarrierInfoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v8.common.CriteriaProto.internal_static_google_ads_googleads_v8_common_CarrierInfo_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v8.common.CriteriaProto.internal_static_google_ads_googleads_v8_common_CarrierInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v8.common.CarrierInfo.class, com.google.ads.googleads.v8.common.CarrierInfo.Builder.class);
}
// Construct using com.google.ads.googleads.v8.common.CarrierInfo.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
carrierConstant_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v8.common.CriteriaProto.internal_static_google_ads_googleads_v8_common_CarrierInfo_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v8.common.CarrierInfo getDefaultInstanceForType() {
return com.google.ads.googleads.v8.common.CarrierInfo.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v8.common.CarrierInfo build() {
com.google.ads.googleads.v8.common.CarrierInfo result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v8.common.CarrierInfo buildPartial() {
com.google.ads.googleads.v8.common.CarrierInfo result = new com.google.ads.googleads.v8.common.CarrierInfo(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
to_bitField0_ |= 0x00000001;
}
result.carrierConstant_ = carrierConstant_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v8.common.CarrierInfo) {
return mergeFrom((com.google.ads.googleads.v8.common.CarrierInfo)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v8.common.CarrierInfo other) {
if (other == com.google.ads.googleads.v8.common.CarrierInfo.getDefaultInstance()) return this;
if (other.hasCarrierConstant()) {
bitField0_ |= 0x00000001;
carrierConstant_ = other.carrierConstant_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.ads.googleads.v8.common.CarrierInfo parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.ads.googleads.v8.common.CarrierInfo) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private java.lang.Object carrierConstant_ = "";
/**
* <pre>
* The Carrier constant resource name.
* </pre>
*
* <code>optional string carrier_constant = 2;</code>
* @return Whether the carrierConstant field is set.
*/
public boolean hasCarrierConstant() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* The Carrier constant resource name.
* </pre>
*
* <code>optional string carrier_constant = 2;</code>
* @return The carrierConstant.
*/
public java.lang.String getCarrierConstant() {
java.lang.Object ref = carrierConstant_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
carrierConstant_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* The Carrier constant resource name.
* </pre>
*
* <code>optional string carrier_constant = 2;</code>
* @return The bytes for carrierConstant.
*/
public com.google.protobuf.ByteString
getCarrierConstantBytes() {
java.lang.Object ref = carrierConstant_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
carrierConstant_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* The Carrier constant resource name.
* </pre>
*
* <code>optional string carrier_constant = 2;</code>
* @param value The carrierConstant to set.
* @return This builder for chaining.
*/
public Builder setCarrierConstant(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
carrierConstant_ = value;
onChanged();
return this;
}
/**
* <pre>
* The Carrier constant resource name.
* </pre>
*
* <code>optional string carrier_constant = 2;</code>
* @return This builder for chaining.
*/
public Builder clearCarrierConstant() {
bitField0_ = (bitField0_ & ~0x00000001);
carrierConstant_ = getDefaultInstance().getCarrierConstant();
onChanged();
return this;
}
/**
* <pre>
* The Carrier constant resource name.
* </pre>
*
* <code>optional string carrier_constant = 2;</code>
* @param value The bytes for carrierConstant to set.
* @return This builder for chaining.
*/
public Builder setCarrierConstantBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
bitField0_ |= 0x00000001;
carrierConstant_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v8.common.CarrierInfo)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v8.common.CarrierInfo)
private static final com.google.ads.googleads.v8.common.CarrierInfo DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v8.common.CarrierInfo();
}
public static com.google.ads.googleads.v8.common.CarrierInfo getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CarrierInfo>
PARSER = new com.google.protobuf.AbstractParser<CarrierInfo>() {
@java.lang.Override
public CarrierInfo parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new CarrierInfo(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<CarrierInfo> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CarrierInfo> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v8.common.CarrierInfo getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// THIS IS MODIFIED COPY OF THE "L" PLATFORM CLASS. BE CAREFUL ABOUT EDITS.
// THIS CODE SHOULD FOLLOW ANDROID STYLE.
//
// Changes:
// Replace ArrayMap (new in Android L) with HashMap
package org.uribeacon.advertise.compat;
import android.support.annotation.Nullable;
import android.os.Parcel;
import android.os.ParcelUuid;
import android.os.Parcelable;
import android.util.SparseArray;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Advertise data packet container for Bluetooth LE advertising. This represents the data to be
* advertised as well as the scan response data for active scans.
* <p>
* Use {@link AdvertiseData.Builder} to create an instance of {@link AdvertiseData} to be
* advertised.
*
* @see BluetoothLeAdvertiser
* @see ScanRecord
*/
public final class AdvertiseData implements Parcelable {
@Nullable
private final List<ParcelUuid> mServiceUuids;
private final SparseArray<byte[]> mManufacturerSpecificData;
private final Map<ParcelUuid, byte[]> mServiceData;
private final boolean mIncludeTxPowerLevel;
private final boolean mIncludeDeviceName;
private AdvertiseData(List<ParcelUuid> serviceUuids,
SparseArray<byte[]> manufacturerData,
Map<ParcelUuid, byte[]> serviceData,
boolean includeTxPowerLevel,
boolean includeDeviceName) {
mServiceUuids = serviceUuids;
mManufacturerSpecificData = manufacturerData;
mServiceData = serviceData;
mIncludeTxPowerLevel = includeTxPowerLevel;
mIncludeDeviceName = includeDeviceName;
}
/**
* Returns a list of service UUIDs within the advertisement that are used to identify the
* Bluetooth GATT services.
*/
public List<ParcelUuid> getServiceUuids() {
return mServiceUuids;
}
/**
* Returns an array of manufacturer Id and the corresponding manufacturer specific data. The
* manufacturer id is a non-negative number assigned by Bluetooth SIG.
*/
public SparseArray<byte[]> getManufacturerSpecificData() {
return mManufacturerSpecificData;
}
/**
* Returns a map of 16-bit UUID and its corresponding service data.
*/
public Map<ParcelUuid, byte[]> getServiceData() {
return mServiceData;
}
/**
* Whether the transmission power level will be included in the advertisement packet.
*/
public boolean getIncludeTxPowerLevel() {
return mIncludeTxPowerLevel;
}
/**
* Whether the device name will be included in the advertisement packet.
*/
public boolean getIncludeDeviceName() {
return mIncludeDeviceName;
}
/**
* @hide
*/
@Override
public int hashCode() {
return Objects.hash(mServiceUuids, mManufacturerSpecificData, mServiceData,
mIncludeDeviceName, mIncludeTxPowerLevel);
}
/**
* @hide
*/
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
AdvertiseData other = (AdvertiseData) obj;
return Objects.equals(mServiceUuids, other.mServiceUuids) &&
BluetoothLeUtils.equals(mManufacturerSpecificData, other.mManufacturerSpecificData) &&
BluetoothLeUtils.equals(mServiceData, other.mServiceData) &&
mIncludeDeviceName == other.mIncludeDeviceName &&
mIncludeTxPowerLevel == other.mIncludeTxPowerLevel;
}
@Override
public String toString() {
return "AdvertiseData [mServiceUuids=" + mServiceUuids + ", mManufacturerSpecificData="
+ BluetoothLeUtils.toString(mManufacturerSpecificData) + ", mServiceData="
+ BluetoothLeUtils.toString(mServiceData)
+ ", mIncludeTxPowerLevel=" + mIncludeTxPowerLevel + ", mIncludeDeviceName="
+ mIncludeDeviceName + "]";
}
@Override
public int describeContents() {
return 0;
}
@Override
public void writeToParcel(Parcel dest, int flags) {
dest.writeList(mServiceUuids);
// mManufacturerSpecificData could not be null.
dest.writeInt(mManufacturerSpecificData.size());
for (int i = 0; i < mManufacturerSpecificData.size(); ++i) {
dest.writeInt(mManufacturerSpecificData.keyAt(i));
byte[] data = mManufacturerSpecificData.valueAt(i);
if (data == null) {
dest.writeInt(0);
} else {
dest.writeInt(1);
dest.writeInt(data.length);
dest.writeByteArray(data);
}
}
dest.writeInt(mServiceData.size());
for (ParcelUuid uuid : mServiceData.keySet()) {
dest.writeParcelable(uuid, flags);
byte[] data = mServiceData.get(uuid);
if (data == null) {
dest.writeInt(0);
} else {
dest.writeInt(1);
dest.writeInt(data.length);
dest.writeByteArray(data);
}
}
dest.writeByte((byte) (getIncludeTxPowerLevel() ? 1 : 0));
dest.writeByte((byte) (getIncludeDeviceName() ? 1 : 0));
}
public static final Parcelable.Creator<AdvertiseData> CREATOR =
new Creator<AdvertiseData>() {
@Override
public AdvertiseData[] newArray(int size) {
return new AdvertiseData[size];
}
@Override
public AdvertiseData createFromParcel(Parcel in) {
Builder builder = new Builder();
@SuppressWarnings("unchecked")
List<ParcelUuid> uuids = in.readArrayList(ParcelUuid.class.getClassLoader());
if (uuids != null) {
for (ParcelUuid uuid : uuids) {
builder.addServiceUuid(uuid);
}
}
int manufacturerSize = in.readInt();
for (int i = 0; i < manufacturerSize; ++i) {
int manufacturerId = in.readInt();
if (in.readInt() == 1) {
int manufacturerDataLength = in.readInt();
byte[] manufacturerData = new byte[manufacturerDataLength];
in.readByteArray(manufacturerData);
builder.addManufacturerData(manufacturerId, manufacturerData);
}
}
int serviceDataSize = in.readInt();
for (int i = 0; i < serviceDataSize; ++i) {
ParcelUuid serviceDataUuid = in.readParcelable(
ParcelUuid.class.getClassLoader());
if (in.readInt() == 1) {
int serviceDataLength = in.readInt();
byte[] serviceData = new byte[serviceDataLength];
in.readByteArray(serviceData);
builder.addServiceData(serviceDataUuid, serviceData);
}
}
builder.setIncludeTxPowerLevel(in.readByte() == 1);
builder.setIncludeDeviceName(in.readByte() == 1);
return builder.build();
}
};
/**
* Builder for {@link AdvertiseData}.
*/
public static final class Builder {
@Nullable
private List<ParcelUuid> mServiceUuids = new ArrayList<ParcelUuid>();
private SparseArray<byte[]> mManufacturerSpecificData = new SparseArray<byte[]>();
private Map<ParcelUuid, byte[]> mServiceData = new HashMap<ParcelUuid, byte[]>();
private boolean mIncludeTxPowerLevel;
private boolean mIncludeDeviceName;
/**
* Add a service UUID to advertise data.
*
* @param serviceUuid A service UUID to be advertised.
* @throws IllegalArgumentException If the {@code serviceUuids} are null.
*/
public Builder addServiceUuid(ParcelUuid serviceUuid) {
if (serviceUuid == null) {
throw new IllegalArgumentException("serivceUuids are null");
}
mServiceUuids.add(serviceUuid);
return this;
}
/**
* Add service data to advertise data.
*
* @param serviceDataUuid 16-bit UUID of the service the data is associated with
* @param serviceData Service data
* @throws IllegalArgumentException If the {@code serviceDataUuid} or {@code serviceData} is
* empty.
*/
public Builder addServiceData(ParcelUuid serviceDataUuid, byte[] serviceData) {
if (serviceDataUuid == null || serviceData == null) {
throw new IllegalArgumentException(
"serviceDataUuid or serviceDataUuid is null");
}
mServiceData.put(serviceDataUuid, serviceData);
return this;
}
/**
* Add manufacturer specific data.
* <p>
* Please refer to the Bluetooth Assigned Numbers document provided by the <a
* href="https://www.bluetooth.org">Bluetooth SIG</a> for a list of existing company
* identifiers.
*
* @param manufacturerId Manufacturer ID assigned by Bluetooth SIG.
* @param manufacturerSpecificData Manufacturer specific data
* @throws IllegalArgumentException If the {@code manufacturerId} is negative or
* {@code manufacturerSpecificData} is null.
*/
public Builder addManufacturerData(int manufacturerId, byte[] manufacturerSpecificData) {
if (manufacturerId < 0) {
throw new IllegalArgumentException(
"invalid manufacturerId - " + manufacturerId);
}
if (manufacturerSpecificData == null) {
throw new IllegalArgumentException("manufacturerSpecificData is null");
}
mManufacturerSpecificData.put(manufacturerId, manufacturerSpecificData);
return this;
}
/**
* Whether the transmission power level should be included in the advertise packet. Tx power
* level field takes 3 bytes in advertise packet.
*/
public Builder setIncludeTxPowerLevel(boolean includeTxPowerLevel) {
mIncludeTxPowerLevel = includeTxPowerLevel;
return this;
}
/**
* Set whether the device name should be included in advertise packet.
*/
public Builder setIncludeDeviceName(boolean includeDeviceName) {
mIncludeDeviceName = includeDeviceName;
return this;
}
/**
* Build the {@link AdvertiseData}.
*/
public AdvertiseData build() {
return new AdvertiseData(mServiceUuids, mManufacturerSpecificData, mServiceData,
mIncludeTxPowerLevel, mIncludeDeviceName);
}
}
}
| |
/*
* Copyright 2011 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.errorprone;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.StandardSystemProperty.JAVA_SPECIFICATION_VERSION;
import com.google.common.base.Optional;
import com.google.common.collect.Iterables;
import com.google.errorprone.scanner.BuiltInCheckerSuppliers;
import com.google.errorprone.scanner.ErrorProneScannerTransformer;
import com.google.errorprone.scanner.Scanner;
import com.google.errorprone.scanner.ScannerSupplier;
import com.sun.source.util.TaskEvent;
import com.sun.source.util.TaskListener;
import com.sun.tools.javac.api.JavacTaskImpl;
import com.sun.tools.javac.api.JavacTool;
import com.sun.tools.javac.api.MultiTaskListener;
import com.sun.tools.javac.file.JavacFileManager;
import com.sun.tools.javac.main.Main;
import com.sun.tools.javac.main.Main.Result;
import com.sun.tools.javac.util.Context;
import com.sun.tools.javac.util.JavacMessages;
import java.io.IOException;
import java.io.InputStream;
import java.io.PrintWriter;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Properties;
import javax.annotation.processing.Processor;
import javax.tools.DiagnosticListener;
import javax.tools.JavaFileManager;
import javax.tools.JavaFileObject;
/**
* An error-prone compiler that matches the interface of {@link com.sun.tools.javac.main.Main}.
* Used by plexus-java-compiler-errorprone.
*
* @author alexeagle@google.com (Alex Eagle)
*/
public class ErrorProneCompiler {
/**
* Entry point for compiling Java code with error-prone enabled.
* All default checks are run, and the compile fails if they find a bug.
*
* @param args the same args which could be passed to javac on the command line
*/
public static void main(String[] args) {
System.exit(compile(args).exitCode);
}
/**
* Compiles in-process.
*
* @param listener listens to the diagnostics produced by error-prone
* @param args the same args which would be passed to javac on the command line
* @return result from the compiler invocation
*/
public static Result compile(DiagnosticListener<JavaFileObject> listener, String[] args) {
ErrorProneCompiler compiler = new ErrorProneCompiler.Builder()
.listenToDiagnostics(listener)
.build();
return compiler.run(args);
}
/**
* Programmatic interface to the error-prone Java compiler.
*
* @param args the same args which would be passed to javac on the command line
* @return result from the compiler invocation
*/
public static Result compile(String[] args) {
return new Builder().build().run(args);
}
/**
* Programmatic interface to the error-prone Java compiler.
*
* @param args the same args which would be passed to javac on the command line
* @param out a {@link PrintWriter} to which to send diagnostic output
* @return result from the compiler invocation
*/
public static Result compile(String[] args, PrintWriter out) {
ErrorProneCompiler compiler = new ErrorProneCompiler.Builder()
.redirectOutputTo(out)
.build();
return compiler.run(args);
}
private final DiagnosticListener<? super JavaFileObject> diagnosticListener;
private final PrintWriter errOutput;
private final String compilerName;
private final ScannerSupplier scannerSupplier;
private ErrorProneCompiler(
String compilerName,
PrintWriter errOutput,
DiagnosticListener<? super JavaFileObject> diagnosticListener,
ScannerSupplier scannerSupplier) {
this.errOutput = errOutput;
this.compilerName = compilerName;
this.diagnosticListener = diagnosticListener;
this.scannerSupplier = checkNotNull(scannerSupplier);
}
public static class Builder {
private DiagnosticListener<? super JavaFileObject> diagnosticListener = null;
private PrintWriter errOutput = new PrintWriter(System.err, true);
private String compilerName = "javac (with error-prone)";
private ScannerSupplier scannerSupplier = BuiltInCheckerSuppliers.defaultChecks();
public ErrorProneCompiler build() {
return new ErrorProneCompiler(
compilerName,
errOutput,
diagnosticListener,
scannerSupplier);
}
public Builder named(String compilerName) {
this.compilerName = compilerName;
return this;
}
public Builder redirectOutputTo(PrintWriter errOutput) {
this.errOutput = errOutput;
return this;
}
public Builder listenToDiagnostics(DiagnosticListener<? super JavaFileObject> listener) {
this.diagnosticListener = listener;
return this;
}
public Builder report(ScannerSupplier scannerSupplier) {
this.scannerSupplier = scannerSupplier;
return this;
}
}
public Result run(String[] args) {
Context context = new Context();
JavacFileManager.preRegister(context);
return run(args, context);
}
/**
* Default to compiling with the same -source and -target as the host's javac.
*
* <p>This prevents, e.g., targeting Java 8 by default when using error-prone on JDK7.
*/
private Iterable<String> defaultToLatestSupportedLanguageLevel(Iterable<String> args) {
String overrideLanguageLevel;
switch (JAVA_SPECIFICATION_VERSION.value()) {
case "1.7":
overrideLanguageLevel = "7";
break;
case "1.8":
overrideLanguageLevel = "8";
break;
default:
return args;
}
return Iterables.concat(
Arrays.asList(
// suppress xlint 'options' warnings to avoid diagnostics like:
// 'bootstrap class path not set in conjunction with -source 1.7'
"-Xlint:-options",
"-source", overrideLanguageLevel,
"-target", overrideLanguageLevel),
args);
}
/**
* Sets javac's {@code -XDcompilePolicy} flag to {@code byfile}. This ensures that all classes in
* a file are attributed before any of them are lowered. Error Prone depends on this behavior
* when analyzing files that contain multiple classes.
*
* @throws InvalidCommandLineOptionException if the {@code -XDcompilePolicy} flag is passed
* in the existing arguments with a value other than {@code byfile}
*/
private Iterable<String> setCompilePolicyToByFile(Iterable<String> args)
throws InvalidCommandLineOptionException {
for (String arg : args) {
if (arg.startsWith("-XDcompilePolicy")) {
String value = arg.substring(arg.indexOf('=') + 1);
if (!value.equals("byfile")) {
throw new InvalidCommandLineOptionException(
"-XDcompilePolicy must be byfile for Error Prone to work properly");
}
// If there is already an "-XDcompilePolicy=byfile" flag, don't do anything.
return args;
}
}
return Iterables.concat(
args,
Arrays.asList("-XDcompilePolicy=byfile"));
}
private String[] prepareCompilation(String[] argv, Context context)
throws InvalidCommandLineOptionException {
Iterable<String> newArgs = defaultToLatestSupportedLanguageLevel(Arrays.asList(argv));
newArgs = setCompilePolicyToByFile(newArgs);
ErrorProneOptions epOptions = ErrorProneOptions.processArgs(newArgs);
argv = epOptions.getRemainingArgs();
if (diagnosticListener != null) {
context.put(DiagnosticListener.class, diagnosticListener);
}
Scanner scanner = scannerSupplier.applyOverrides(epOptions).get();
CodeTransformer transformer = ErrorProneScannerTransformer.create(scanner);
setupMessageBundle(context);
enableEndPositions(context);
ErrorProneJavacJavaCompiler.preRegister(context, transformer, epOptions);
return argv;
}
private Result run(String[] argv, Context context) {
try {
argv = prepareCompilation(argv, context);
} catch (InvalidCommandLineOptionException e) {
errOutput.println(e.getMessage());
errOutput.flush();
return Result.CMDERR;
}
return new Main(compilerName, errOutput).compile(argv, context);
}
public Result run(
String[] argv,
List<JavaFileObject> javaFileObjects) {
Context context = new Context();
return run(argv, context, null, javaFileObjects, Collections.<Processor>emptyList());
}
public Result run(
String[] argv,
Context context,
JavaFileManager fileManager,
List<JavaFileObject> javaFileObjects,
Iterable<? extends Processor> processors) {
try {
argv = prepareCompilation(argv, context);
} catch (InvalidCommandLineOptionException e) {
errOutput.println(e.getMessage());
errOutput.flush();
return Result.CMDERR;
}
JavacTool tool = JavacTool.create();
JavacTaskImpl task = (JavacTaskImpl) tool.getTask(
errOutput,
fileManager,
null,
Arrays.asList(argv),
null,
javaFileObjects,
context);
if (processors != null) {
task.setProcessors(processors);
}
return task.doCall();
}
/**
* Registers our message bundle.
*/
public static void setupMessageBundle(Context context) {
JavacMessages.instance(context).add("com.google.errorprone.errors");
}
private static final String PROPERTIES_RESOURCE =
"/META-INF/maven/com.google.errorprone/error_prone_core/pom.properties";
/** Loads the Error Prone version. */
public static Optional<String> loadVersionFromPom() {
try (InputStream stream = ErrorProneCompiler.class.getResourceAsStream(PROPERTIES_RESOURCE)) {
if (stream == null) {
return Optional.absent();
}
Properties mavenProperties = new Properties();
mavenProperties.load(stream);
return Optional.of(mavenProperties.getProperty("version"));
} catch (IOException expected) {
return Optional.absent();
}
}
private static final TaskListener EMPTY_LISTENER = new TaskListener() {
@Override public void started(TaskEvent e) {}
@Override public void finished(TaskEvent e) {}
};
/** Convinces javac to run in 'API mode', and collect end position information. */
private static void enableEndPositions(Context context) {
MultiTaskListener.instance(context).add(EMPTY_LISTENER);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.processors.elasticsearch;
import okhttp3.HttpUrl;
import okhttp3.OkHttpClient;
import okhttp3.Response;
import okhttp3.ResponseBody;
import org.apache.commons.lang3.StringUtils;
import org.apache.nifi.annotation.behavior.EventDriven;
import org.apache.nifi.annotation.behavior.InputRequirement;
import org.apache.nifi.annotation.behavior.SupportsBatching;
import org.apache.nifi.annotation.behavior.WritesAttribute;
import org.apache.nifi.annotation.behavior.WritesAttributes;
import org.apache.nifi.annotation.documentation.CapabilityDescription;
import org.apache.nifi.annotation.documentation.Tags;
import org.apache.nifi.annotation.lifecycle.OnScheduled;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.logging.ComponentLog;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.ProcessSession;
import org.apache.nifi.processor.Relationship;
import org.apache.nifi.processor.exception.ProcessException;
import org.apache.nifi.processor.util.StandardValidators;
import org.apache.nifi.stream.io.ByteArrayInputStream;
import org.codehaus.jackson.JsonNode;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import java.util.stream.Stream;
@InputRequirement(InputRequirement.Requirement.INPUT_ALLOWED)
@EventDriven
@SupportsBatching
@Tags({"elasticsearch", "fetch", "read", "get", "http"})
@CapabilityDescription("Retrieves a document from Elasticsearch using the specified connection properties and the "
+ "identifier of the document to retrieve. Note that the full body of the document will be read into memory before being "
+ "written to a Flow File for transfer.")
@WritesAttributes({
@WritesAttribute(attribute = "filename", description = "The filename attribute is set to the document identifier"),
@WritesAttribute(attribute = "es.index", description = "The Elasticsearch index containing the document"),
@WritesAttribute(attribute = "es.type", description = "The Elasticsearch document type")
})
public class FetchElasticsearchHttp extends AbstractElasticsearchHttpProcessor {
private static final String FIELD_INCLUDE_QUERY_PARAM = "_source_include";
public static final Relationship REL_SUCCESS = new Relationship.Builder()
.name("success")
.description("All FlowFiles that are read from Elasticsearch are routed to this relationship.")
.build();
public static final Relationship REL_FAILURE = new Relationship.Builder()
.name("failure")
.description("All FlowFiles that cannot be read from Elasticsearch are routed to this relationship. Note that only incoming "
+ "flow files will be routed to failure.")
.build();
public static final Relationship REL_RETRY = new Relationship.Builder().name("retry")
.description("A FlowFile is routed to this relationship if the document cannot be fetched but attempting the operation again may "
+ "succeed. Note that if the processor has no incoming connections, flow files may still be sent to this relationship "
+ "based on the processor properties and the results of the fetch operation.")
.build();
public static final Relationship REL_NOT_FOUND = new Relationship.Builder().name("not found")
.description("A FlowFile is routed to this relationship if the specified document does not exist in the Elasticsearch cluster. "
+ "Note that if the processor has no incoming connections, flow files may still be sent to this relationship based "
+ "on the processor properties and the results of the fetch operation.")
.build();
public static final PropertyDescriptor DOC_ID = new PropertyDescriptor.Builder()
.name("fetch-es-doc-id")
.displayName("Document Identifier")
.description("The identifier of the document to be fetched")
.required(true)
.expressionLanguageSupported(true)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.build();
public static final PropertyDescriptor INDEX = new PropertyDescriptor.Builder()
.name("fetch-es-index")
.displayName("Index")
.description("The name of the index to read from.")
.required(true)
.expressionLanguageSupported(true)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.build();
public static final PropertyDescriptor TYPE = new PropertyDescriptor.Builder()
.name("fetch-es-type")
.displayName("Type")
.description("The (optional) type of this document, used by Elasticsearch for indexing and searching. If the property is empty, "
+ "the first document matching the identifier across all types will be retrieved.")
.required(false)
.expressionLanguageSupported(true)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.build();
public static final PropertyDescriptor FIELDS = new PropertyDescriptor.Builder()
.name("fetch-es-fields")
.displayName("Fields")
.description("A comma-separated list of fields to retrieve from the document. If the Fields property is left blank, "
+ "then the entire document's source will be retrieved.")
.required(false)
.expressionLanguageSupported(true)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.build();
private static final Set<Relationship> relationships;
private static final List<PropertyDescriptor> propertyDescriptors;
static {
final Set<Relationship> _rels = new HashSet<>();
_rels.add(REL_SUCCESS);
_rels.add(REL_FAILURE);
_rels.add(REL_RETRY);
_rels.add(REL_NOT_FOUND);
relationships = Collections.unmodifiableSet(_rels);
final List<PropertyDescriptor> descriptors = new ArrayList<>();
descriptors.add(ES_URL);
descriptors.add(PROP_SSL_CONTEXT_SERVICE);
descriptors.add(USERNAME);
descriptors.add(PASSWORD);
descriptors.add(CONNECT_TIMEOUT);
descriptors.add(RESPONSE_TIMEOUT);
descriptors.add(DOC_ID);
descriptors.add(INDEX);
descriptors.add(TYPE);
descriptors.add(FIELDS);
propertyDescriptors = Collections.unmodifiableList(descriptors);
}
@Override
public Set<Relationship> getRelationships() {
return relationships;
}
@Override
public final List<PropertyDescriptor> getSupportedPropertyDescriptors() {
return propertyDescriptors;
}
@OnScheduled
public void setup(ProcessContext context) {
super.setup(context);
}
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
FlowFile flowFile = null;
if (context.hasIncomingConnection()) {
flowFile = session.get();
// If we have no FlowFile, and all incoming connections are self-loops then we can continue on.
// However, if we have no FlowFile and we have connections coming from other Processors, then
// we know that we should run only if we have a FlowFile.
if (flowFile == null && context.hasNonLoopConnection()) {
return;
}
}
OkHttpClient okHttpClient = getClient();
if (flowFile == null) {
flowFile = session.create();
}
final String index = context.getProperty(INDEX).evaluateAttributeExpressions(flowFile).getValue();
final String docId = context.getProperty(DOC_ID).evaluateAttributeExpressions(flowFile).getValue();
final String docType = context.getProperty(TYPE).evaluateAttributeExpressions(flowFile).getValue();
final String fields = context.getProperty(FIELDS).isSet()
? context.getProperty(FIELDS).evaluateAttributeExpressions(flowFile).getValue()
: null;
// Authentication
final String username = context.getProperty(USERNAME).evaluateAttributeExpressions(flowFile).getValue();
final String password = context.getProperty(PASSWORD).evaluateAttributeExpressions().getValue();
final ComponentLog logger = getLogger();
Response getResponse = null;
try {
logger.debug("Fetching {}/{}/{} from Elasticsearch", new Object[]{index, docType, docId});
// read the url property from the context
final String urlstr = StringUtils.trimToEmpty(context.getProperty(ES_URL).evaluateAttributeExpressions().getValue());
final URL url = buildRequestURL(urlstr, docId, index, docType, fields);
final long startNanos = System.nanoTime();
getResponse = sendRequestToElasticsearch(okHttpClient, url, username, password, "GET", null);
final int statusCode = getResponse.code();
if (isSuccess(statusCode)) {
ResponseBody body = getResponse.body();
final byte[] bodyBytes = body.bytes();
JsonNode responseJson = parseJsonResponse(new ByteArrayInputStream(bodyBytes));
boolean found = responseJson.get("found").asBoolean(false);
String retrievedIndex = responseJson.get("_index").asText();
String retrievedType = responseJson.get("_type").asText();
String retrievedId = responseJson.get("_id").asText();
if (found) {
JsonNode source = responseJson.get("_source");
flowFile = session.putAttribute(flowFile, "filename", retrievedId);
flowFile = session.putAttribute(flowFile, "es.index", retrievedIndex);
flowFile = session.putAttribute(flowFile, "es.type", retrievedType);
if (source != null) {
flowFile = session.write(flowFile, out -> {
out.write(source.toString().getBytes());
});
}
logger.debug("Elasticsearch document " + retrievedId + " fetched, routing to success");
// emit provenance event
final long millis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos);
if (context.hasNonLoopConnection()) {
session.getProvenanceReporter().fetch(flowFile, url.toExternalForm(), millis);
} else {
session.getProvenanceReporter().receive(flowFile, url.toExternalForm(), millis);
}
session.transfer(flowFile, REL_SUCCESS);
} else {
logger.warn("Failed to read {}/{}/{} from Elasticsearch: Document not found",
new Object[]{index, docType, docId});
// We couldn't find the document, so send it to "not found"
session.transfer(flowFile, REL_NOT_FOUND);
}
} else {
if (statusCode == 404) {
logger.warn("Failed to read {}/{}/{} from Elasticsearch: Document not found",
new Object[]{index, docType, docId});
// We couldn't find the document, so penalize it and send it to "not found"
session.transfer(flowFile, REL_NOT_FOUND);
} else {
// 5xx -> RETRY, but a server error might last a while, so yield
if (statusCode / 100 == 5) {
logger.warn("Elasticsearch returned code {} with message {}, transferring flow file to retry. This is likely a server problem, yielding...",
new Object[]{statusCode, getResponse.message()});
session.transfer(flowFile, REL_RETRY);
context.yield();
} else if (context.hasIncomingConnection()) { // 1xx, 3xx, 4xx -> NO RETRY
logger.warn("Elasticsearch returned code {} with message {}, transferring flow file to failure", new Object[]{statusCode, getResponse.message()});
session.transfer(flowFile, REL_FAILURE);
} else {
logger.warn("Elasticsearch returned code {} with message {}", new Object[]{statusCode, getResponse.message()});
session.remove(flowFile);
}
}
}
} catch (IOException ioe) {
logger.error("Failed to read from Elasticsearch due to {}, this may indicate an error in configuration "
+ "(hosts, username/password, etc.). Routing to retry",
new Object[]{ioe.getLocalizedMessage()}, ioe);
if (context.hasIncomingConnection()) {
session.transfer(flowFile, REL_RETRY);
} else {
session.remove(flowFile);
}
context.yield();
} catch (Exception e) {
logger.error("Failed to read {} from Elasticsearch due to {}", new Object[]{flowFile, e.getLocalizedMessage()}, e);
if (context.hasIncomingConnection()) {
session.transfer(flowFile, REL_FAILURE);
} else {
session.remove(flowFile);
}
context.yield();
} finally {
if (getResponse != null) {
getResponse.close();
}
}
}
private URL buildRequestURL(String baseUrl, String docId, String index, String type, String fields) throws MalformedURLException {
if (StringUtils.isEmpty(baseUrl)) {
throw new MalformedURLException("Base URL cannot be null");
}
HttpUrl.Builder builder = HttpUrl.parse(baseUrl).newBuilder();
builder.addPathSegment(index);
builder.addPathSegment((StringUtils.isEmpty(type)) ? "_all" : type);
builder.addPathSegment(docId);
if (!StringUtils.isEmpty(fields)) {
String trimmedFields = Stream.of(fields.split(",")).map(String::trim).collect(Collectors.joining(","));
builder.addQueryParameter(FIELD_INCLUDE_QUERY_PARAM, trimmedFields);
}
return builder.build().url();
}
}
| |
/*
* Copyright (c) 2000, 2021, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package org.xml.sax.helpers;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.util.Iterator;
import java.util.Objects;
import java.util.ServiceConfigurationError;
import java.util.ServiceLoader;
import jdk.xml.internal.SecuritySupport;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
/**
* Factory for creating an XML reader.
*
* <p>This class contains static methods for creating an XML reader
* from an explicit class name, or based on runtime defaults:
*
* <pre>
* try {
* XMLReader myReader = XMLReaderFactory.createXMLReader();
* } catch (SAXException e) {
* System.err.println(e.getMessage());
* }
* </pre>
*
* <p><strong>Note to Distributions bundled with parsers:</strong>
* You should modify the implementation of the no-arguments
* <em>createXMLReader</em> to handle cases where the external
* configuration mechanisms aren't set up. That method should do its
* best to return a parser when one is in the class path, even when
* nothing bound its class name to {@code org.xml.sax.driver} so
* those configuration mechanisms would see it.
*
* @since 1.4, SAX 2.0
* @author David Megginson, David Brownell
* @version 2.0.1 (sax2r2)
*
* @deprecated It is recommended to use {@link javax.xml.parsers.SAXParserFactory}
* instead.
*/
@Deprecated(since="9")
final public class XMLReaderFactory
{
/**
* Private constructor.
*
* <p>This constructor prevents the class from being instantiated.
*/
private XMLReaderFactory ()
{
}
private static final String property = "org.xml.sax.driver";
/**
* Obtains a new instance of a {@link org.xml.sax.XMLReader}.
* This method uses the following ordered lookup procedure to find and load
* the {@link org.xml.sax.XMLReader} implementation class:
* <ol>
* <li>If the system property {@code org.xml.sax.driver}
* has a value, that is used as an XMLReader class name. </li>
* <li>
* Use the service-provider loading facility, defined by the
* {@link java.util.ServiceLoader} class, to attempt to locate and load an
* implementation of the service {@link org.xml.sax.XMLReader} by using the
* {@linkplain java.lang.Thread#getContextClassLoader() current thread's context class loader}.
* If the context class loader is null, the
* {@linkplain ClassLoader#getSystemClassLoader() system class loader} will
* be used.
* </li>
* <li>
* Deprecated. Look for a class name in the {@code META-INF/services/org.xml.sax.driver}
* file in a jar file available to the runtime.</li>
* <li>
* <p>
* Otherwise, the system-default implementation is returned.
* </li>
* </ol>
*
* @apiNote
* The process that looks for a class name in the
* {@code META-INF/services/org.xml.sax.driver} file in a jar file does not
* conform to the specification of the service-provider loading facility
* as defined in {@link java.util.ServiceLoader} and therefore does not
* support modularization. It is deprecated as of Java SE 9 and subject to
* removal in a future release.
*
* @return a new XMLReader.
* @throws org.xml.sax.SAXException If no default XMLReader class
* can be identified and instantiated.
* @see #createXMLReader(java.lang.String)
*/
public static XMLReader createXMLReader ()
throws SAXException
{
String className = null;
ClassLoader cl = SecuritySupport.getClassLoader();
// 1. try the JVM-instance-wide system property
try {
className = SecuritySupport.getSystemProperty(property);
}
catch (RuntimeException e) { /* continue searching */ }
// 2. try the ServiceLoader
if (className == null) {
final XMLReader provider = findServiceProvider(XMLReader.class, cl);
if (provider != null) {
return provider;
}
}
// 3. try META-INF/services/org.xml.sax.driver. This old process allows
// legacy providers to be found
if (className == null) {
className = jarLookup(cl);
}
// 4. Distro-specific fallback
if (className == null) {
return new com.sun.org.apache.xerces.internal.parsers.SAXParser();
}
return loadClass (cl, className);
}
/**
* Attempt to create an XML reader from a class name.
*
* <p>Given a class name, this method attempts to load
* and instantiate the class as an XML reader.
*
* <p>Note that this method will not be usable in environments where
* the caller (perhaps an applet) is not permitted to load classes
* dynamically.
*
* @param className a class name
* @return A new XML reader.
* @throws org.xml.sax.SAXException If the class cannot be
* loaded, instantiated, and cast to XMLReader.
* @see #createXMLReader()
*/
public static XMLReader createXMLReader (String className)
throws SAXException
{
return loadClass (SecuritySupport.getClassLoader(), className);
}
private static XMLReader loadClass (ClassLoader loader, String className)
throws SAXException
{
try {
return NewInstance.newInstance (XMLReader.class, loader, className);
} catch (ClassNotFoundException e1) {
throw new SAXException("SAX2 driver class " + className +
" not found", e1);
} catch (IllegalAccessException e2) {
throw new SAXException("SAX2 driver class " + className +
" found but cannot be loaded", e2);
} catch (InstantiationException e3) {
throw new SAXException("SAX2 driver class " + className +
" loaded but cannot be instantiated (no empty public constructor?)",
e3);
} catch (ClassCastException e4) {
throw new SAXException("SAX2 driver class " + className +
" does not implement XMLReader", e4);
}
}
/**
* Locates a provider by directly reading the jar service file.
* @param loader the ClassLoader to be used to read the service file
* @return the name of the provider, or null if nothing is found
*/
private static String jarLookup(final ClassLoader loader) {
final ClassLoader cl = Objects.requireNonNull(loader);
String clsFromJar = null;
String service = "META-INF/services/" + property;
InputStream in;
BufferedReader reader;
try {
in = SecuritySupport.getResourceAsStream(cl, service);
// If no provider found then try the current ClassLoader
if (in == null) {
in = SecuritySupport.getResourceAsStream(null, service);
}
if (in != null) {
reader = new BufferedReader (new InputStreamReader (in, "UTF8"));
clsFromJar = reader.readLine ();
in.close ();
}
} catch (IOException e) {
}
return clsFromJar;
}
/*
* Try to find provider using the ServiceLoader API
*
* @param type Base class / Service interface of the factory to find.
*
* @return instance of provider class if found or null
*/
@SuppressWarnings("removal")
private static <T> T findServiceProvider(final Class<T> type, final ClassLoader loader)
throws SAXException {
ClassLoader cl = Objects.requireNonNull(loader);
try {
return AccessController.doPrivileged((PrivilegedAction<T>) () -> {
final ServiceLoader<T> serviceLoader;
serviceLoader = ServiceLoader.load(type, cl);
final Iterator<T> iterator = serviceLoader.iterator();
if (iterator.hasNext()) {
return iterator.next();
} else {
return null;
}
});
} catch(ServiceConfigurationError e) {
final RuntimeException x = new RuntimeException(
"Provider for " + type + " cannot be created", e);
throw new SAXException("Provider for " + type + " cannot be created", x);
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.operator;
import com.google.common.collect.ImmutableList;
import io.airlift.stats.TestingGcMonitor;
import io.airlift.units.DataSize;
import io.prestosql.RowPagesBuilder;
import io.prestosql.memory.DefaultQueryContext;
import io.prestosql.memory.MemoryPool;
import io.prestosql.spi.Page;
import io.prestosql.spi.QueryId;
import io.prestosql.spi.memory.MemoryPoolId;
import io.prestosql.spi.type.Type;
import io.prestosql.spiller.SpillSpaceTracker;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.ScheduledExecutorService;
import java.util.function.Function;
import static io.airlift.concurrent.Threads.daemonThreadsNamed;
import static io.airlift.testing.Assertions.assertBetweenInclusive;
import static io.airlift.testing.Assertions.assertGreaterThan;
import static io.airlift.testing.Assertions.assertLessThan;
import static io.airlift.units.DataSize.Unit.GIGABYTE;
import static io.airlift.units.DataSize.Unit.MEGABYTE;
import static io.prestosql.RowPagesBuilder.rowPagesBuilder;
import static io.prestosql.SessionTestUtils.TEST_SESSION;
import static io.prestosql.operator.OperatorAssertion.finishOperator;
import static io.prestosql.spi.type.BigintType.BIGINT;
import static io.prestosql.testing.TestingTaskContext.createTaskContext;
import static io.prestosql.testing.assertions.Assert.assertEquals;
import static java.util.Objects.requireNonNull;
import static java.util.concurrent.Executors.newCachedThreadPool;
import static java.util.concurrent.Executors.newScheduledThreadPool;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
public final class GroupByHashYieldAssertion
{
private static final ExecutorService EXECUTOR = newCachedThreadPool(daemonThreadsNamed("test-executor-%s"));
private static final ScheduledExecutorService SCHEDULED_EXECUTOR = newScheduledThreadPool(2, daemonThreadsNamed("test-scheduledExecutor-%s"));
private GroupByHashYieldAssertion() {}
public static List<Page> createPagesWithDistinctHashKeys(Type type, int pageCount, int positionCountPerPage)
{
RowPagesBuilder rowPagesBuilder = rowPagesBuilder(true, ImmutableList.of(0), type);
for (int i = 0; i < pageCount; i++) {
rowPagesBuilder.addSequencePage(positionCountPerPage, positionCountPerPage * i);
}
return rowPagesBuilder.build();
}
/**
* @param operatorFactory creates an Operator that should directly or indirectly contain GroupByHash
* @param getHashCapacity returns the hash table capacity for the input operator
* @param additionalMemoryInBytes the memory used in addition to the GroupByHash in the operator (e.g., aggregator)
*/
public static GroupByHashYieldResult finishOperatorWithYieldingGroupByHash(List<Page> input, Type hashKeyType, OperatorFactory operatorFactory, Function<Operator, Integer> getHashCapacity, long additionalMemoryInBytes)
{
assertLessThan(additionalMemoryInBytes, 1L << 21, "additionalMemoryInBytes should be a relatively small number");
List<Page> result = new LinkedList<>();
// mock an adjustable memory pool
QueryId queryId = new QueryId("test_query");
MemoryPool memoryPool = new MemoryPool(new MemoryPoolId("test"), new DataSize(1, GIGABYTE));
DefaultQueryContext queryContext = new DefaultQueryContext(
queryId,
new DataSize(512, MEGABYTE),
new DataSize(1024, MEGABYTE),
memoryPool,
new TestingGcMonitor(),
EXECUTOR,
SCHEDULED_EXECUTOR,
new DataSize(512, MEGABYTE),
new SpillSpaceTracker(new DataSize(512, MEGABYTE)));
DriverContext driverContext = createTaskContext(queryContext, EXECUTOR, TEST_SESSION)
.addPipelineContext(0, true, true, false)
.addDriverContext();
Operator operator = operatorFactory.createOperator(driverContext);
// run operator
int yieldCount = 0;
long expectedReservedExtraBytes = 0;
for (Page page : input) {
// unblocked
assertTrue(operator.needsInput());
// saturate the pool with a tiny memory left
long reservedMemoryInBytes = memoryPool.getFreeBytes() - additionalMemoryInBytes;
memoryPool.reserve(queryId, "test", reservedMemoryInBytes);
long oldMemoryUsage = operator.getOperatorContext().getDriverContext().getMemoryUsage();
int oldCapacity = getHashCapacity.apply(operator);
// add a page and verify different behaviors
operator.addInput(page);
// get output to consume the input
Page output = operator.getOutput();
if (output != null) {
result.add(output);
}
long newMemoryUsage = operator.getOperatorContext().getDriverContext().getMemoryUsage();
// Skip if the memory usage is not large enough since we cannot distinguish
// between rehash and memory used by aggregator
if (newMemoryUsage < new DataSize(4, MEGABYTE).toBytes()) {
// free the pool for the next iteration
memoryPool.free(queryId, "test", reservedMemoryInBytes);
// this required in case input is blocked
operator.getOutput();
continue;
}
long actualIncreasedMemory = newMemoryUsage - oldMemoryUsage;
if (operator.needsInput()) {
// We have successfully added a page
// Assert we are not blocked
assertTrue(operator.getOperatorContext().isWaitingForMemory().isDone());
// assert the hash capacity is not changed; otherwise, we should have yielded
assertTrue(oldCapacity == getHashCapacity.apply(operator));
// We are not going to rehash; therefore, assert the memory increase only comes from the aggregator
assertLessThan(actualIncreasedMemory, additionalMemoryInBytes);
// free the pool for the next iteration
memoryPool.free(queryId, "test", reservedMemoryInBytes);
}
else {
// We failed to finish the page processing i.e. we yielded
yieldCount++;
// Assert we are blocked
assertFalse(operator.getOperatorContext().isWaitingForMemory().isDone());
// Hash table capacity should not change
assertEquals(oldCapacity, (long) getHashCapacity.apply(operator));
// Increased memory is no smaller than the hash table size and no greater than the hash table size + the memory used by aggregator
if (hashKeyType == BIGINT) {
// groupIds and values double by hashCapacity; while valuesByGroupId double by maxFill = hashCapacity / 0.75
expectedReservedExtraBytes = oldCapacity * (long) (Long.BYTES * 1.75 + Integer.BYTES) + page.getRetainedSizeInBytes();
}
else {
// groupAddressByHash, groupIdsByHash, and rawHashByHashPosition double by hashCapacity; while groupAddressByGroupId double by maxFill = hashCapacity / 0.75
expectedReservedExtraBytes = oldCapacity * (long) (Long.BYTES * 1.75 + Integer.BYTES + Byte.BYTES) + page.getRetainedSizeInBytes();
}
assertBetweenInclusive(actualIncreasedMemory, expectedReservedExtraBytes, expectedReservedExtraBytes + additionalMemoryInBytes);
// Output should be blocked as well
assertNull(operator.getOutput());
// Free the pool to unblock
memoryPool.free(queryId, "test", reservedMemoryInBytes);
// Trigger a process through getOutput() or needsInput()
output = operator.getOutput();
if (output != null) {
result.add(output);
}
assertTrue(operator.needsInput());
// Hash table capacity has increased
assertGreaterThan(getHashCapacity.apply(operator), oldCapacity);
// Assert the estimated reserved memory before rehash is very close to the one after rehash
long rehashedMemoryUsage = operator.getOperatorContext().getDriverContext().getMemoryUsage();
assertBetweenInclusive(rehashedMemoryUsage * 1.0 / newMemoryUsage, 0.99, 1.01);
// unblocked
assertTrue(operator.needsInput());
}
}
result.addAll(finishOperator(operator));
return new GroupByHashYieldResult(yieldCount, expectedReservedExtraBytes, result);
}
public static final class GroupByHashYieldResult
{
private final int yieldCount;
private final long maxReservedBytes;
private final List<Page> output;
public GroupByHashYieldResult(int yieldCount, long maxReservedBytes, List<Page> output)
{
this.yieldCount = yieldCount;
this.maxReservedBytes = maxReservedBytes;
this.output = requireNonNull(output, "output is null");
}
public int getYieldCount()
{
return yieldCount;
}
public long getMaxReservedBytes()
{
return maxReservedBytes;
}
public List<Page> getOutput()
{
return output;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.io.network.partition;
import org.apache.flink.core.memory.MemorySegment;
import org.apache.flink.core.memory.MemorySegmentFactory;
import org.apache.flink.runtime.io.network.buffer.Buffer;
import org.apache.flink.runtime.io.network.buffer.FreeingBufferRecycler;
import org.apache.flink.runtime.io.network.buffer.NetworkBuffer;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import java.io.IOException;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.List;
import java.util.Queue;
import java.util.Random;
import static org.apache.flink.util.Preconditions.checkNotNull;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
/**
* Tests for writing and reading {@link PartitionedFile} with {@link PartitionedFileWriter} and
* {@link PartitionedFileReader}.
*/
public class PartitionedFileWriteReadTest {
@Rule public final TemporaryFolder temporaryFolder = new TemporaryFolder();
@Test
public void testWriteAndReadPartitionedFile() throws Exception {
int numSubpartitions = 10;
int bufferSize = 1024;
int numBuffers = 1000;
int numRegions = 10;
Random random = new Random(1111);
List<Buffer>[] buffersWritten = new List[numSubpartitions];
List<Buffer>[] buffersRead = new List[numSubpartitions];
Queue<Buffer>[] regionBuffers = new Queue[numSubpartitions];
for (int subpartition = 0; subpartition < numSubpartitions; ++subpartition) {
buffersWritten[subpartition] = new ArrayList<>();
buffersRead[subpartition] = new ArrayList<>();
regionBuffers[subpartition] = new ArrayDeque<>();
}
PartitionedFileWriter fileWriter = createPartitionedFileWriter(numSubpartitions);
for (int region = 0; region < numRegions; ++region) {
fileWriter.startNewRegion();
for (int i = 0; i < numBuffers; ++i) {
Buffer buffer = createBuffer(random, bufferSize);
int subpartition = random.nextInt(numSubpartitions);
buffersWritten[subpartition].add(buffer);
regionBuffers[subpartition].add(buffer);
}
int[] writeOrder =
PartitionSortedBufferTest.getRandomSubpartitionOrder(numSubpartitions);
for (int index = 0; index < numSubpartitions; ++index) {
int subpartition = writeOrder[index];
while (!regionBuffers[subpartition].isEmpty()) {
fileWriter.writeBuffer(regionBuffers[subpartition].poll(), subpartition);
}
}
}
PartitionedFile partitionedFile = fileWriter.finish();
for (int subpartition = 0; subpartition < numSubpartitions; ++subpartition) {
PartitionedFileReader fileReader =
new PartitionedFileReader(partitionedFile, subpartition);
while (fileReader.hasRemaining()) {
MemorySegment readBuffer = MemorySegmentFactory.allocateUnpooledSegment(bufferSize);
Buffer buffer = fileReader.readBuffer(readBuffer, (buf) -> {});
buffersRead[subpartition].add(buffer);
}
fileReader.close();
}
for (int subpartition = 0; subpartition < numSubpartitions; ++subpartition) {
assertEquals(buffersWritten[subpartition].size(), buffersRead[subpartition].size());
for (int i = 0; i < buffersWritten[subpartition].size(); ++i) {
assertBufferEquals(
buffersWritten[subpartition].get(i), buffersRead[subpartition].get(i));
}
}
}
@Test
public void testWriteAndReadWithEmptySubpartition() throws Exception {
int numRegions = 10;
int numSubpartitions = 5;
int bufferSize = 1024;
Random random = new Random(1111);
Queue<Buffer>[] subpartitionBuffers = new ArrayDeque[numSubpartitions];
for (int subpartition = 0; subpartition < numSubpartitions; ++subpartition) {
subpartitionBuffers[subpartition] = new ArrayDeque<>();
}
PartitionedFileWriter fileWriter = createPartitionedFileWriter(numSubpartitions);
for (int region = 0; region < numRegions; ++region) {
fileWriter.startNewRegion();
for (int subpartition = 0; subpartition < numSubpartitions; ++subpartition) {
if (random.nextBoolean()) {
Buffer buffer = createBuffer(random, bufferSize);
subpartitionBuffers[subpartition].add(buffer);
fileWriter.writeBuffer(buffer, subpartition);
}
}
}
PartitionedFile partitionedFile = fileWriter.finish();
for (int subpartition = 0; subpartition < numSubpartitions; ++subpartition) {
PartitionedFileReader fileReader =
new PartitionedFileReader(partitionedFile, subpartition);
while (fileReader.hasRemaining()) {
MemorySegment readBuffer = MemorySegmentFactory.allocateUnpooledSegment(bufferSize);
Buffer buffer = checkNotNull(fileReader.readBuffer(readBuffer, (buf) -> {}));
assertBufferEquals(checkNotNull(subpartitionBuffers[subpartition].poll()), buffer);
}
fileReader.close();
assertTrue(subpartitionBuffers[subpartition].isEmpty());
}
}
private void assertBufferEquals(Buffer expected, Buffer actual) {
assertEquals(expected.getDataType(), actual.getDataType());
assertEquals(expected.getNioBufferReadable(), actual.getNioBufferReadable());
}
private Buffer createBuffer(Random random, int bufferSize) {
boolean isBuffer = random.nextBoolean();
Buffer.DataType dataType =
isBuffer ? Buffer.DataType.DATA_BUFFER : Buffer.DataType.EVENT_BUFFER;
int dataSize = random.nextInt(bufferSize) + 1;
byte[] data = new byte[dataSize];
return new NetworkBuffer(MemorySegmentFactory.wrap(data), (buf) -> {}, dataType, dataSize);
}
@Test(expected = IllegalStateException.class)
public void testNotWriteDataOfTheSameSubpartitionTogether() throws Exception {
PartitionedFileWriter partitionedFileWriter = createPartitionedFileWriter(2);
try {
MemorySegment segment = MemorySegmentFactory.allocateUnpooledSegment(1024);
NetworkBuffer buffer1 = new NetworkBuffer(segment, (buf) -> {});
partitionedFileWriter.writeBuffer(buffer1, 1);
NetworkBuffer buffer2 = new NetworkBuffer(segment, (buf) -> {});
partitionedFileWriter.writeBuffer(buffer2, 0);
NetworkBuffer buffer3 = new NetworkBuffer(segment, (buf) -> {});
partitionedFileWriter.writeBuffer(buffer3, 1);
} finally {
partitionedFileWriter.finish();
}
}
@Test(expected = IllegalStateException.class)
public void testWriteFinishedPartitionedFile() throws Exception {
PartitionedFileWriter partitionedFileWriter = createAndFinishPartitionedFileWriter();
MemorySegment segment = MemorySegmentFactory.allocateUnpooledSegment(1024);
NetworkBuffer buffer = new NetworkBuffer(segment, (buf) -> {});
partitionedFileWriter.writeBuffer(buffer, 0);
}
@Test(expected = IllegalStateException.class)
public void testFinishPartitionedFileWriterTwice() throws Exception {
PartitionedFileWriter partitionedFileWriter = createAndFinishPartitionedFileWriter();
partitionedFileWriter.finish();
}
@Test(expected = IllegalStateException.class)
public void testReadClosedPartitionedFile() throws Exception {
PartitionedFileReader partitionedFileReader = createAndClosePartitionedFiledReader();
MemorySegment target = MemorySegmentFactory.allocateUnpooledSegment(1024);
partitionedFileReader.readBuffer(target, FreeingBufferRecycler.INSTANCE);
}
@Test
public void testReadEmptyPartitionedFile() throws Exception {
try (PartitionedFileReader partitionedFileReader = createPartitionedFiledReader()) {
MemorySegment target = MemorySegmentFactory.allocateUnpooledSegment(1024);
assertNull(partitionedFileReader.readBuffer(target, FreeingBufferRecycler.INSTANCE));
}
}
private PartitionedFileReader createAndClosePartitionedFiledReader() throws IOException {
PartitionedFileReader fileReader = createPartitionedFiledReader();
fileReader.close();
return fileReader;
}
private PartitionedFileReader createPartitionedFiledReader() throws IOException {
PartitionedFile partitionedFile = createPartitionedFile();
return new PartitionedFileReader(partitionedFile, 1);
}
private PartitionedFile createPartitionedFile() throws IOException {
PartitionedFileWriter partitionedFileWriter = createPartitionedFileWriter(2);
return partitionedFileWriter.finish();
}
private PartitionedFileWriter createPartitionedFileWriter(int numSubpartitions)
throws IOException {
String basePath = temporaryFolder.newFile().getPath();
return new PartitionedFileWriter(numSubpartitions, 640, basePath);
}
private PartitionedFileWriter createAndFinishPartitionedFileWriter() throws IOException {
PartitionedFileWriter partitionedFileWriter = createPartitionedFileWriter(1);
partitionedFileWriter.finish();
return partitionedFileWriter;
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.search.aggregations.bucket;
import org.apache.lucene.index.LeafReaderContext;
import org.elasticsearch.common.util.LongArray;
import org.elasticsearch.core.Releasable;
import org.elasticsearch.search.aggregations.AggregationExecutionException;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorBase;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.CardinalityUpperBound;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.LeafBucketCollector;
import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregator;
import org.elasticsearch.search.aggregations.bucket.terms.LongKeyedBucketOrds;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.AggregationPath;
import org.elasticsearch.search.sort.SortOrder;
import java.io.IOException;
import java.util.AbstractList;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.function.BiConsumer;
import java.util.function.Function;
import java.util.function.IntConsumer;
import java.util.function.LongUnaryOperator;
import java.util.function.ToLongFunction;
public abstract class BucketsAggregator extends AggregatorBase {
private final IntConsumer multiBucketConsumer;
private LongArray docCounts;
protected final DocCountProvider docCountProvider;
public BucketsAggregator(
String name,
AggregatorFactories factories,
AggregationContext context,
Aggregator parent,
CardinalityUpperBound bucketCardinality,
Map<String, Object> metadata
) throws IOException {
super(name, factories, context, parent, bucketCardinality, metadata);
multiBucketConsumer = context.multiBucketConsumer();
docCounts = bigArrays().newLongArray(1, true);
docCountProvider = new DocCountProvider();
}
/**
* Return an upper bound of the maximum bucket ordinal seen so far.
*/
public final long maxBucketOrd() {
return docCounts.size();
}
/**
* Ensure there are at least <code>maxBucketOrd</code> buckets available.
*/
public final void grow(long maxBucketOrd) {
docCounts = bigArrays().grow(docCounts, maxBucketOrd);
}
/**
* Utility method to collect the given doc in the given bucket (identified by the bucket ordinal)
*/
public final void collectBucket(LeafBucketCollector subCollector, int doc, long bucketOrd) throws IOException {
grow(bucketOrd + 1);
collectExistingBucket(subCollector, doc, bucketOrd);
}
/**
* Same as {@link #collectBucket(LeafBucketCollector, int, long)}, but doesn't check if the docCounts needs to be re-sized.
*/
public final void collectExistingBucket(LeafBucketCollector subCollector, int doc, long bucketOrd) throws IOException {
int docCount = docCountProvider.getDocCount(doc);
if (docCounts.increment(bucketOrd, docCount) == docCount) {
// We calculate the final number of buckets only during the reduce phase. But we still need to
// trigger bucket consumer from time to time in order to give it a chance to check available memory and break
// the execution if we are running out. To achieve that we are passing 0 as a bucket count.
multiBucketConsumer.accept(0);
}
subCollector.collect(doc, bucketOrd);
}
/**
* Merge doc counts. If the {@linkplain Aggregator} is delayed then you must also call
* {@link BestBucketsDeferringCollector#rewriteBuckets(LongUnaryOperator)} to merge the delayed buckets.
* @param mergeMap a unary operator which maps a bucket's ordinal to the ordinal it should be merged with.
* If a bucket's ordinal is mapped to -1 then the bucket is removed entirely.
*/
public final void rewriteBuckets(long newNumBuckets, LongUnaryOperator mergeMap) {
LongArray oldDocCounts = docCounts;
boolean success = false;
try {
docCounts = bigArrays().newLongArray(newNumBuckets, true);
success = true;
docCounts.fill(0, newNumBuckets, 0);
for (long i = 0; i < oldDocCounts.size(); i++) {
long docCount = oldDocCounts.get(i);
if (docCount == 0) continue;
// Skip any in the map which have been "removed", signified with -1
long destinationOrdinal = mergeMap.applyAsLong(i);
if (destinationOrdinal != -1) {
docCounts.increment(destinationOrdinal, docCount);
}
}
} finally {
if (success) {
oldDocCounts.close();
}
}
}
public LongArray getDocCounts() {
return docCounts;
}
/**
* Utility method to increment the doc counts of the given bucket (identified by the bucket ordinal)
*/
public final void incrementBucketDocCount(long bucketOrd, long inc) {
docCounts = bigArrays().grow(docCounts, bucketOrd + 1);
docCounts.increment(bucketOrd, inc);
}
/**
* Utility method to return the number of documents that fell in the given bucket (identified by the bucket ordinal)
*/
public final long bucketDocCount(long bucketOrd) {
if (bucketOrd >= docCounts.size()) {
// This may happen eg. if no document in the highest buckets is accepted by a sub aggregator.
// For example, if there is a long terms agg on 3 terms 1,2,3 with a sub filter aggregator and if no document with 3 as a value
// matches the filter, then the filter will never collect bucket ord 3. However, the long terms agg will call
// bucketAggregations(3) on the filter aggregator anyway to build sub-aggregations.
return 0;
} else {
return docCounts.get(bucketOrd);
}
}
/**
* Hook to allow taking an action before building the sub agg results.
*/
protected void prepareSubAggs(long[] ordsToCollect) throws IOException {}
/**
* Build the results of the sub-aggregations of the buckets at each of
* the provided ordinals.
* <p>
* Most aggregations should probably use something like
* {@link #buildSubAggsForAllBuckets(Object[][], ToLongFunction, BiConsumer)}
* or {@link #buildAggregationsForVariableBuckets(long[], LongKeyedBucketOrds, BucketBuilderForVariable, ResultBuilderForVariable)}
* or {@link #buildAggregationsForFixedBucketCount(long[], int, BucketBuilderForFixedCount, Function)}
* or {@link #buildAggregationsForSingleBucket(long[], SingleBucketResultBuilder)}
* instead of calling this directly.
* @return the sub-aggregation results in the same order as the provided
* array of ordinals
*/
protected final InternalAggregations[] buildSubAggsForBuckets(long[] bucketOrdsToCollect) throws IOException {
prepareSubAggs(bucketOrdsToCollect);
InternalAggregation[][] aggregations = new InternalAggregation[subAggregators.length][];
for (int i = 0; i < subAggregators.length; i++) {
aggregations[i] = subAggregators[i].buildAggregations(bucketOrdsToCollect);
}
InternalAggregations[] result = new InternalAggregations[bucketOrdsToCollect.length];
for (int ord = 0; ord < bucketOrdsToCollect.length; ord++) {
final int thisOrd = ord;
result[ord] = InternalAggregations.from(new AbstractList<InternalAggregation>() {
@Override
public InternalAggregation get(int index) {
return aggregations[index][thisOrd];
}
@Override
public int size() {
return aggregations.length;
}
});
}
return result;
}
/**
* Build the sub aggregation results for a list of buckets and set them on
* the buckets. This is usually used by aggregations that are selective
* in which bucket they build. They use some mechanism of selecting a list
* of buckets to build use this method to "finish" building the results.
* @param buckets the buckets to finish building
* @param bucketToOrd how to convert a bucket into an ordinal
* @param setAggs how to set the sub-aggregation results on a bucket
*/
protected final <B> void buildSubAggsForBuckets(B[] buckets, ToLongFunction<B> bucketToOrd, BiConsumer<B, InternalAggregations> setAggs)
throws IOException {
InternalAggregations[] results = buildSubAggsForBuckets(Arrays.stream(buckets).mapToLong(bucketToOrd).toArray());
for (int i = 0; i < buckets.length; i++) {
setAggs.accept(buckets[i], results[i]);
}
}
/**
* Build the sub aggregation results for a list of buckets and set them on
* the buckets. This is usually used by aggregations that are selective
* in which bucket they build. They use some mechanism of selecting a list
* of buckets to build use this method to "finish" building the results.
* @param buckets the buckets to finish building
* @param bucketToOrd how to convert a bucket into an ordinal
* @param setAggs how to set the sub-aggregation results on a bucket
*/
protected final <B> void buildSubAggsForAllBuckets(
B[][] buckets,
ToLongFunction<B> bucketToOrd,
BiConsumer<B, InternalAggregations> setAggs
) throws IOException {
int totalBucketOrdsToCollect = 0;
for (B[] bucketsForOneResult : buckets) {
totalBucketOrdsToCollect += bucketsForOneResult.length;
}
long[] bucketOrdsToCollect = new long[totalBucketOrdsToCollect];
int s = 0;
for (B[] bucketsForOneResult : buckets) {
for (B bucket : bucketsForOneResult) {
bucketOrdsToCollect[s++] = bucketToOrd.applyAsLong(bucket);
}
}
InternalAggregations[] results = buildSubAggsForBuckets(bucketOrdsToCollect);
s = 0;
for (int r = 0; r < buckets.length; r++) {
for (int b = 0; b < buckets[r].length; b++) {
setAggs.accept(buckets[r][b], results[s++]);
}
}
}
/**
* Build aggregation results for an aggregator that has a fixed number of buckets per owning ordinal.
* @param <B> the type of the bucket
* @param owningBucketOrds owning bucket ordinals for which to build the results
* @param bucketsPerOwningBucketOrd how many buckets there are per ord
* @param bucketBuilder how to build a bucket
* @param resultBuilder how to build a result from buckets
*/
protected final <B> InternalAggregation[] buildAggregationsForFixedBucketCount(
long[] owningBucketOrds,
int bucketsPerOwningBucketOrd,
BucketBuilderForFixedCount<B> bucketBuilder,
Function<List<B>, InternalAggregation> resultBuilder
) throws IOException {
int totalBuckets = owningBucketOrds.length * bucketsPerOwningBucketOrd;
long[] bucketOrdsToCollect = new long[totalBuckets];
int bucketOrdIdx = 0;
for (long owningBucketOrd : owningBucketOrds) {
long ord = owningBucketOrd * bucketsPerOwningBucketOrd;
for (int offsetInOwningOrd = 0; offsetInOwningOrd < bucketsPerOwningBucketOrd; offsetInOwningOrd++) {
bucketOrdsToCollect[bucketOrdIdx++] = ord++;
}
}
bucketOrdIdx = 0;
InternalAggregations[] subAggregationResults = buildSubAggsForBuckets(bucketOrdsToCollect);
InternalAggregation[] results = new InternalAggregation[owningBucketOrds.length];
for (int owningOrdIdx = 0; owningOrdIdx < owningBucketOrds.length; owningOrdIdx++) {
List<B> buckets = new ArrayList<>(bucketsPerOwningBucketOrd);
for (int offsetInOwningOrd = 0; offsetInOwningOrd < bucketsPerOwningBucketOrd; offsetInOwningOrd++) {
buckets.add(
bucketBuilder.build(
offsetInOwningOrd,
bucketDocCount(bucketOrdsToCollect[bucketOrdIdx]),
subAggregationResults[bucketOrdIdx++]
)
);
}
results[owningOrdIdx] = resultBuilder.apply(buckets);
}
return results;
}
@FunctionalInterface
protected interface BucketBuilderForFixedCount<B> {
B build(int offsetInOwningOrd, long docCount, InternalAggregations subAggregationResults);
}
/**
* Build aggregation results for an aggregator that always contain a single bucket.
* @param owningBucketOrds owning bucket ordinals for which to build the results
* @param resultBuilder how to build a result from the sub aggregation results
*/
protected final InternalAggregation[] buildAggregationsForSingleBucket(long[] owningBucketOrds, SingleBucketResultBuilder resultBuilder)
throws IOException {
/*
* It'd be entirely reasonable to call
* `consumeBucketsAndMaybeBreak(owningBucketOrds.length)`
* here but we don't because single bucket aggs never have.
*/
InternalAggregations[] subAggregationResults = buildSubAggsForBuckets(owningBucketOrds);
InternalAggregation[] results = new InternalAggregation[owningBucketOrds.length];
for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) {
results[ordIdx] = resultBuilder.build(owningBucketOrds[ordIdx], subAggregationResults[ordIdx]);
}
return results;
}
@FunctionalInterface
protected interface SingleBucketResultBuilder {
InternalAggregation build(long owningBucketOrd, InternalAggregations subAggregationResults);
}
/**
* Build aggregation results for an aggregator with a varying number of
* {@code long} keyed buckets.
* @param owningBucketOrds owning bucket ordinals for which to build the results
* @param bucketOrds hash of values to the bucket ordinal
*/
protected final <B> InternalAggregation[] buildAggregationsForVariableBuckets(
long[] owningBucketOrds,
LongKeyedBucketOrds bucketOrds,
BucketBuilderForVariable<B> bucketBuilder,
ResultBuilderForVariable<B> resultBuilder
) throws IOException {
long totalOrdsToCollect = 0;
final int[] bucketsInOrd = new int[owningBucketOrds.length];
for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) {
final long bucketCount = bucketOrds.bucketsInOrd(owningBucketOrds[ordIdx]);
bucketsInOrd[ordIdx] = (int) bucketCount;
totalOrdsToCollect += bucketCount;
}
if (totalOrdsToCollect > Integer.MAX_VALUE) {
throw new AggregationExecutionException(
"Can't collect more than [" + Integer.MAX_VALUE + "] buckets but attempted [" + totalOrdsToCollect + "]"
);
}
long[] bucketOrdsToCollect = new long[(int) totalOrdsToCollect];
int b = 0;
for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) {
LongKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds[ordIdx]);
while (ordsEnum.next()) {
bucketOrdsToCollect[b++] = ordsEnum.ord();
}
}
InternalAggregations[] subAggregationResults = buildSubAggsForBuckets(bucketOrdsToCollect);
InternalAggregation[] results = new InternalAggregation[owningBucketOrds.length];
b = 0;
for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) {
List<B> buckets = new ArrayList<>(bucketsInOrd[ordIdx]);
LongKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds[ordIdx]);
while (ordsEnum.next()) {
if (bucketOrdsToCollect[b] != ordsEnum.ord()) {
throw new AggregationExecutionException(
"Iteration order of ["
+ bucketOrds
+ "] changed without mutating. ["
+ ordsEnum.ord()
+ "] should have been ["
+ bucketOrdsToCollect[b]
+ "]"
);
}
buckets.add(bucketBuilder.build(ordsEnum.value(), bucketDocCount(ordsEnum.ord()), subAggregationResults[b++]));
}
results[ordIdx] = resultBuilder.build(owningBucketOrds[ordIdx], buckets);
}
return results;
}
@FunctionalInterface
protected interface BucketBuilderForVariable<B> {
B build(long bucketValue, long docCount, InternalAggregations subAggregationResults);
}
@FunctionalInterface
protected interface ResultBuilderForVariable<B> {
InternalAggregation build(long owninigBucketOrd, List<B> buckets);
}
@Override
public final void close() {
try (Releasable releasable = docCounts) {
super.close();
}
}
@Override
public Aggregator resolveSortPath(AggregationPath.PathElement next, Iterator<AggregationPath.PathElement> path) {
if (this instanceof SingleBucketAggregator) {
return resolveSortPathOnValidAgg(next, path);
}
return super.resolveSortPath(next, path);
}
@Override
public BucketComparator bucketComparator(String key, SortOrder order) {
if (false == this instanceof SingleBucketAggregator) {
return super.bucketComparator(key, order);
}
if (key == null || "doc_count".equals(key)) {
return (lhs, rhs) -> order.reverseMul() * Long.compare(bucketDocCount(lhs), bucketDocCount(rhs));
}
throw new IllegalArgumentException(
"Ordering on a single-bucket aggregation can only be done on its doc_count. "
+ "Either drop the key (a la \""
+ name()
+ "\") or change it to \"doc_count\" (a la \""
+ name()
+ ".doc_count\") or \"key\"."
);
}
public static boolean descendsFromGlobalAggregator(Aggregator parent) {
while (parent != null) {
if (parent.getClass() == GlobalAggregator.class) {
return true;
}
parent = parent.parent();
}
return false;
}
@Override
protected void preGetSubLeafCollectors(LeafReaderContext ctx) throws IOException {
super.preGetSubLeafCollectors(ctx);
// Set LeafReaderContext to the doc_count provider
docCountProvider.setLeafReaderContext(ctx);
}
}
| |
package org.sagebionetworks.table.worker;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.util.LinkedList;
import java.util.List;
import java.util.UUID;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.sagebionetworks.StackConfiguration;
import org.sagebionetworks.StackConfigurationSingleton;
import org.sagebionetworks.aws.SynapseS3Client;
import org.sagebionetworks.ids.IdGenerator;
import org.sagebionetworks.ids.IdType;
import org.sagebionetworks.repo.manager.EntityManager;
import org.sagebionetworks.repo.manager.SemaphoreManager;
import org.sagebionetworks.repo.manager.UserManager;
import org.sagebionetworks.repo.manager.asynch.AsynchJobStatusManager;
import org.sagebionetworks.repo.manager.table.ColumnModelManager;
import org.sagebionetworks.repo.manager.table.TableEntityManager;
import org.sagebionetworks.repo.model.AuthorizationConstants.BOOTSTRAP_PRINCIPAL;
import org.sagebionetworks.repo.model.DatastoreException;
import org.sagebionetworks.repo.model.InvalidModelException;
import org.sagebionetworks.repo.model.UnauthorizedException;
import org.sagebionetworks.repo.model.UserInfo;
import org.sagebionetworks.repo.model.asynch.AsynchJobState;
import org.sagebionetworks.repo.model.asynch.AsynchronousJobStatus;
import org.sagebionetworks.repo.model.dao.FileHandleDao;
import org.sagebionetworks.repo.model.dbo.dao.table.TableModelTestUtils;
import org.sagebionetworks.repo.model.file.S3FileHandle;
import org.sagebionetworks.repo.model.table.ColumnModel;
import org.sagebionetworks.repo.model.table.ColumnType;
import org.sagebionetworks.repo.model.table.CsvTableDescriptor;
import org.sagebionetworks.repo.model.table.DownloadFromTableRequest;
import org.sagebionetworks.repo.model.table.DownloadFromTableResult;
import org.sagebionetworks.repo.model.table.Query;
import org.sagebionetworks.repo.model.table.QueryBundleRequest;
import org.sagebionetworks.repo.model.table.QueryResultBundle;
import org.sagebionetworks.repo.model.table.Row;
import org.sagebionetworks.repo.model.table.RowReferenceSet;
import org.sagebionetworks.repo.model.table.TableConstants;
import org.sagebionetworks.repo.model.table.TableEntity;
import org.sagebionetworks.repo.model.table.TableRowChange;
import org.sagebionetworks.repo.model.table.TableUpdateTransactionRequest;
import org.sagebionetworks.repo.model.table.UploadToTableRequest;
import org.sagebionetworks.repo.model.table.UploadToTableResult;
import org.sagebionetworks.repo.web.NotFoundException;
import org.sagebionetworks.table.cluster.utils.TableModelUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import com.amazonaws.services.s3.model.GetObjectRequest;
import com.google.common.collect.Lists;
import au.com.bytecode.opencsv.CSVReader;
import au.com.bytecode.opencsv.CSVWriter;
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(locations = { "classpath:test-context.xml" })
public class TableCSVAppenderWorkerIntegrationTest {
public static final int MAX_WAIT_MS = 1000 * 60;
@Autowired
AsynchJobStatusManager asynchJobStatusManager;
@Autowired
StackConfiguration config;
@Autowired
FileHandleDao fileHandleDao;
@Autowired
EntityManager entityManager;
@Autowired
TableEntityManager tableEntityManager;
@Autowired
ColumnModelManager columnManager;
@Autowired
UserManager userManager;
@Autowired
SynapseS3Client s3Client;
@Autowired
SemaphoreManager semphoreManager;
@Autowired
private IdGenerator idGenerator;
private UserInfo adminUserInfo;
RowReferenceSet referenceSet;
List<ColumnModel> schema;
List<String> headers;
private String tableId;
private List<String> toDelete = Lists.newArrayList();
private List<File> tempFiles = Lists.newArrayList();
private List<S3FileHandle> fileHandles = Lists.newArrayList();
@Before
public void before() throws NotFoundException {
semphoreManager.releaseAllLocksAsAdmin(new UserInfo(true));
// Start with an empty queue.
asynchJobStatusManager.emptyAllQueues();
// Get the admin user
adminUserInfo = userManager.getUserInfo(BOOTSTRAP_PRINCIPAL.THE_ADMIN_USER.getPrincipalId());
this.schema = new LinkedList<ColumnModel>();
}
@After
public void after() {
if (adminUserInfo != null) {
for (String id : toDelete) {
try {
entityManager.deleteEntity(adminUserInfo, id);
} catch (Exception e) {
}
}
}
for (File tempFile : tempFiles)
tempFile.delete();
for (S3FileHandle fileHandle : fileHandles) {
s3Client.deleteObject(fileHandle.getBucketName(), fileHandle.getKey());
fileHandleDao.delete(fileHandle.getId());
}
}
@Test
public void testRoundTripWithCSVHeaders() throws DatastoreException, InvalidModelException, UnauthorizedException, NotFoundException,
IOException, InterruptedException {
doTestRoundTrip(true);
}
private void doTestRoundTrip(boolean useCSVHeader) throws DatastoreException, InvalidModelException, UnauthorizedException,
NotFoundException, IOException,
InterruptedException {
// Create a few columns
// String
ColumnModel cm = new ColumnModel();
cm.setColumnType(ColumnType.STRING);
cm.setName("somestrings");
cm = columnManager.createColumnModel(adminUserInfo, cm);
this.schema.add(cm);
// integer
cm = new ColumnModel();
cm.setColumnType(ColumnType.INTEGER);
cm.setName("someinteger");
cm = columnManager.createColumnModel(adminUserInfo, cm);
schema.add(cm);
headers = TableModelUtils.getIds(schema);
// create the table
createTableWithSchema();
// Create a CSV file to upload
File tempFile = File.createTempFile("TableCSVAppenderWorkerIntegrationTest", ".csv");
tempFiles.add(tempFile);
CSVWriter csv = new CSVWriter(new FileWriter(tempFile));
int rowCount = 100;
try {
if (useCSVHeader) {
// Write the header
csv.writeNext(new String[] { schema.get(1).getName(), schema.get(0).getName() });
}
// Write some rows
for (int i = 0; i < rowCount; i++) {
csv.writeNext(new String[] { "" + i, "stringdata" + i });
}
} finally {
csv.close();
}
S3FileHandle fileHandle = uploadFile(tempFile);
// We are now ready to start the job
UploadToTableRequest body = new UploadToTableRequest();
body.setTableId(tableId);
body.setUploadFileHandleId(fileHandle.getId());
body.setEntityId(tableId);
CsvTableDescriptor csvTableDescriptor = new CsvTableDescriptor();
csvTableDescriptor.setIsFirstLineHeader(useCSVHeader);
body.setCsvTableDescriptor(csvTableDescriptor);
if (!useCSVHeader) {
body.setColumnIds(Lists.newArrayList(schema.get(1).getId(), schema.get(0).getId()));
}
TableUpdateTransactionRequest txRequest = TableModelUtils.wrapInTransactionRequest(body);
AsynchronousJobStatus status = asynchJobStatusManager.startJob(adminUserInfo, txRequest);
// Wait for the job to complete.
status = waitForStatus(adminUserInfo, status);
assertNotNull(status);
UploadToTableResult response = TableModelUtils.extractResponseFromTransaction(status.getResponseBody(), UploadToTableResult.class);
assertNotNull(response.getEtag());
assertEquals(new Long(rowCount), response.getRowsProcessed());
// There should be two change set applied to the table
List<TableRowChange> changes = this.tableEntityManager.listRowSetsKeysForTable(tableId);
assertNotNull(changes);
assertEquals(2, changes.size());
TableRowChange change = changes.get(1);
assertEquals(new Long(rowCount), change.getRowCount());
// the etag of the change should match what the job returned.
assertEquals(change.getEtag(), response.getEtag());
}
@Test
public void testUpdateRoundTrip() throws DatastoreException, InvalidModelException, UnauthorizedException, NotFoundException,
IOException, InterruptedException {
// Create a few columns
// String
ColumnModel cm = new ColumnModel();
cm.setColumnType(ColumnType.STRING);
cm.setName("somestrings");
cm = columnManager.createColumnModel(adminUserInfo, cm);
this.schema.add(cm);
// integer
cm = new ColumnModel();
cm.setColumnType(ColumnType.INTEGER);
cm.setName("someinteger");
cm = columnManager.createColumnModel(adminUserInfo, cm);
schema.add(cm);
// create the table.
createTableWithSchema();
// Create a CSV file to upload
File tempFile = File.createTempFile("TableCSVAppenderWorkerIntegrationTest", ".csv");
tempFiles.add(tempFile);
CSVWriter csv = new CSVWriter(new FileWriter(tempFile));
int rowCount = 100;
try {
// Write the header
csv.writeNext(new String[] { schema.get(1).getName(), schema.get(0).getName() });
// Write some rows
for (int i = 0; i < rowCount; i++) {
csv.writeNext(new String[] { "" + i, "stringdata" + i });
}
} finally {
csv.close();
}
S3FileHandle fileHandle = uploadFile(tempFile);
// We are now ready to start the job
UploadToTableRequest body = new UploadToTableRequest();
body.setTableId(tableId);
body.setEntityId(tableId);
body.setUploadFileHandleId(fileHandle.getId());
System.out.println("Inserting");
TableUpdateTransactionRequest txRequest = TableModelUtils.wrapInTransactionRequest(body);
AsynchronousJobStatus status = asynchJobStatusManager.startJob(adminUserInfo, txRequest);
// Wait for the job to complete.
status = waitForStatus(adminUserInfo, status);
// download the csv
DownloadFromTableRequest download = new DownloadFromTableRequest();
download.setSql("select somestrings, someinteger from " + tableId);
download.setIncludeRowIdAndRowVersion(true);
download.setCsvTableDescriptor(new CsvTableDescriptor());
download.getCsvTableDescriptor().setIsFirstLineHeader(true);
System.out.println("Downloading");
status = asynchJobStatusManager.startJob(adminUserInfo, download);
status = waitForStatus(adminUserInfo, status);
DownloadFromTableResult downloadResult = (DownloadFromTableResult) status.getResponseBody();
S3FileHandle resultFile = (S3FileHandle) fileHandleDao.get(downloadResult.getResultsFileHandleId());
fileHandles.add(resultFile);
tempFile = File.createTempFile("DownloadCSV", ".csv");
tempFiles.add(tempFile);
s3Client.getObject(new GetObjectRequest(resultFile.getBucketName(), resultFile.getKey()), tempFile);
// Load the CSV data
CSVReader csvReader = new CSVReader(new FileReader(tempFile));
List<String[]> results = csvReader.readAll();
csvReader.close();
// modify it
int i = 3000;
for (String[] row : results.subList(1, results.size())) {
assertEquals(4, row.length);
row[2] += "-changed" + i++;
}
tempFile = File.createTempFile("TableCSVAppenderWorkerIntegrationTest2", ".csv");
tempFiles.add(tempFile);
csv = new CSVWriter(new FileWriter(tempFile));
for (String[] row : results) {
csv.writeNext(row);
}
csv.close();
fileHandle = uploadFile(tempFile);
// We are now ready to start the job
body = new UploadToTableRequest();
body.setTableId(tableId);
body.setEntityId(tableId);
body.setUploadFileHandleId(fileHandle.getId());
System.out.println("Appending");
txRequest = TableModelUtils.wrapInTransactionRequest(body);
status = asynchJobStatusManager.startJob(adminUserInfo, txRequest);
// Wait for the job to complete.
status = waitForStatus(adminUserInfo, status);
// There should be two change sets applied to the table
List<TableRowChange> changes = this.tableEntityManager.listRowSetsKeysForTable(tableId);
assertNotNull(changes);
assertEquals(3, changes.size());
}
/**
* Test added for support of SYNR-976
* Update a table from a CSV that does not include all of the columns.
* The update should only change the column in the CSV, the other
* column should not be modified.
*/
@Test
public void testPartialCSVUpdate() throws DatastoreException, InvalidModelException, UnauthorizedException, NotFoundException,
IOException, InterruptedException {
// Create a few columns
// String
ColumnModel cm = new ColumnModel();
cm.setColumnType(ColumnType.STRING);
cm.setName("somestrings");
cm = columnManager.createColumnModel(adminUserInfo, cm);
this.schema.add(cm);
// integer
cm = new ColumnModel();
cm.setColumnType(ColumnType.INTEGER);
cm.setName("someinteger");
cm = columnManager.createColumnModel(adminUserInfo, cm);
schema.add(cm);
// create the table.
createTableWithSchema();
// Create a CSV file to upload
File tempFile = File.createTempFile("TableCSVAppenderWorkerIntegrationTest", ".csv");
tempFiles.add(tempFile);
CSVWriter csv = new CSVWriter(new FileWriter(tempFile));
int rowCount = 1;
try {
// Write the header
csv.writeNext(new String[] { schema.get(1).getName(), schema.get(0).getName() });
// Write some rows
for (int i = 0; i < rowCount; i++) {
csv.writeNext(new String[] { "" + i, "stringdata" + i });
}
} finally {
csv.close();
}
S3FileHandle fileHandle = uploadFile(tempFile);
// We are now ready to start the job
UploadToTableRequest body = new UploadToTableRequest();
body.setTableId(tableId);
body.setEntityId(tableId);
body.setUploadFileHandleId(fileHandle.getId());
System.out.println("Inserting");
TableUpdateTransactionRequest txRequest = TableModelUtils.wrapInTransactionRequest(body);
AsynchronousJobStatus status = asynchJobStatusManager.startJob(adminUserInfo, txRequest);
// Wait for the job to complete.
status = waitForStatus(adminUserInfo, status);
// download the csv
DownloadFromTableRequest download = new DownloadFromTableRequest();
download.setSql("select somestrings from " + tableId);
download.setIncludeRowIdAndRowVersion(true);
download.setCsvTableDescriptor(new CsvTableDescriptor());
download.getCsvTableDescriptor().setIsFirstLineHeader(true);
System.out.println("Downloading");
status = asynchJobStatusManager.startJob(adminUserInfo, download);
status = waitForStatus(adminUserInfo, status);
DownloadFromTableResult downloadResult = (DownloadFromTableResult) status.getResponseBody();
S3FileHandle resultFile = (S3FileHandle) fileHandleDao.get(downloadResult.getResultsFileHandleId());
fileHandles.add(resultFile);
tempFile = File.createTempFile("DownloadCSV", ".csv");
tempFiles.add(tempFile);
s3Client.getObject(new GetObjectRequest(resultFile.getBucketName(), resultFile.getKey()), tempFile);
// Load the CSV data
CSVReader csvReader = new CSVReader(new FileReader(tempFile));
List<String[]> results = csvReader.readAll();
csvReader.close();
// modify it
int i = 3000;
for (String[] row : results.subList(1, results.size())) {
assertEquals(3, row.length);
row[2] += "-changed" + i++;
}
tempFile = File.createTempFile("TableCSVAppenderWorkerIntegrationTest2", ".csv");
tempFiles.add(tempFile);
csv = new CSVWriter(new FileWriter(tempFile));
for (String[] row : results) {
csv.writeNext(row);
}
csv.close();
fileHandle = uploadFile(tempFile);
// We are now ready to start the job
body = new UploadToTableRequest();
body.setTableId(tableId);
body.setEntityId(tableId);
body.setUploadFileHandleId(fileHandle.getId());
CsvTableDescriptor csvTableDescriptor = new CsvTableDescriptor();
csvTableDescriptor.setIsFirstLineHeader(true);
csvTableDescriptor.setSeparator(",");
body.setCsvTableDescriptor(csvTableDescriptor);
System.out.println("Appending");
txRequest = TableModelUtils.wrapInTransactionRequest(body);
status = asynchJobStatusManager.startJob(adminUserInfo, txRequest);
// Wait for the job to complete.
status = waitForStatus(adminUserInfo, status);
// query for the results
Query query = new Query();
query.setSql("select * from "+tableId+" limit 1");
QueryBundleRequest queryBundle = new QueryBundleRequest();
queryBundle.setQuery(query);
status = asynchJobStatusManager.startJob(adminUserInfo, queryBundle);
// Wait for the job to complete.
status = waitForStatus(adminUserInfo, status);
QueryResultBundle bundleResults = (QueryResultBundle) status.getResponseBody();
assertNotNull(bundleResults);
assertNotNull(bundleResults.getQueryResult());
assertNotNull(bundleResults.getQueryResult().getQueryResults());
assertNotNull(bundleResults.getQueryResult().getQueryResults().getRows());
List<Row> rows = bundleResults.getQueryResult().getQueryResults().getRows();
assertEquals(1, rows.size());
Row row = rows.get(0);
assertNotNull(row.getValues());
assertEquals(2, row.getValues().size());
// the column changed in the CSV should be changed.
assertEquals("stringdata0-changed3000",row.getValues().get(0));
// validate the column not included in the update CVS remains unchanged.
assertEquals("0",row.getValues().get(1));
}
/**
* A complex use case is needed to verify that both PLFM-3155 and PLFM-3355 are fixed.
* For this use case a CSV that includes both ROW_ID and ROW_VERSION is used to create
* a new table. The CSV contains one column of actual string data with a header named 'bar'.
* The new table table has a single string column named 'foo'. Since the column name
* of the table and the CSV header do not match, the CSV will be uploaded with
* linesToSkip=1 and isFirstLineHeader=false. The expected result is all data from
* the 'bar' column in the CSV is successfully added to the 'foo' column of the
* table.
*/
@Test
public void testPLFM_3155andPLFM_3354() throws Exception {
// the column name is 'foo;
ColumnModel column = TableModelTestUtils.createColumn(null, "foo", ColumnType.STRING);
column.setMaximumSize(2L);
column = columnManager.createColumnModel(adminUserInfo, column);
schema = Lists.newArrayList(column);
// create the table
createTableWithSchema();
// Create a CSV file to upload
File tempFile = File.createTempFile("TestPLFM_3155andPLFM_3354", ".csv");
tempFiles.add(tempFile);
CSVWriter csv = new CSVWriter(new FileWriter(tempFile));
int rowCount = 10;
try {
// the header name is 'bar'
csv.writeNext(new String[] { TableConstants.ROW_ID, TableConstants.ROW_VERSION, "bar"});
// Write some rows
for (int i = 0; i < rowCount; i++) {
csv.writeNext(new String[] { "" + (100+i), "99","d" + i });
}
} finally {
csv.close();
}
S3FileHandle fileHandle = uploadFile(tempFile);
// Upload the CSV to the table
UploadToTableRequest body = new UploadToTableRequest();
body.setTableId(tableId);
body.setEntityId(tableId);
body.setUploadFileHandleId(fileHandle.getId());
// skip the first line
body.setLinesToSkip(1L);
CsvTableDescriptor csvDescriptor = new CsvTableDescriptor();
// the first
csvDescriptor.setIsFirstLineHeader(false);
body.setCsvTableDescriptor(csvDescriptor);
TableUpdateTransactionRequest txRequest = TableModelUtils.wrapInTransactionRequest(body);
AsynchronousJobStatus status = asynchJobStatusManager.startJob(adminUserInfo, txRequest);
// Wait for the job to complete.
status = waitForStatus(adminUserInfo, status);
assertNotNull(status);
UploadToTableResult response = TableModelUtils.extractResponseFromTransaction(status.getResponseBody(), UploadToTableResult.class);
assertNotNull(response.getEtag());
assertEquals(new Long(rowCount), response.getRowsProcessed());
}
/**
* Create a table using the schema.
*
*/
void createTableWithSchema() {
List<String> ids = TableModelUtils.getIds(schema);
headers = ids;
// Create the table
TableEntity table = new TableEntity();
table.setColumnIds(headers);
table.setName(UUID.randomUUID().toString());
tableId = entityManager.createEntity(adminUserInfo, table, null);
toDelete.add(tableId);
// Bind the columns. This is normally done at the service layer but the workers cannot depend on that layer.
tableEntityManager.setTableSchema(adminUserInfo, headers, tableId);
}
private AsynchronousJobStatus waitForStatus(UserInfo user, AsynchronousJobStatus status) throws InterruptedException, DatastoreException,
NotFoundException {
long start = System.currentTimeMillis();
while (!AsynchJobState.COMPLETE.equals(status.getJobState())) {
assertFalse("Job Failed: " + status.getErrorDetails(), AsynchJobState.FAILED.equals(status.getJobState()));
assertTrue("Timed out waiting for table status", (System.currentTimeMillis() - start) < MAX_WAIT_MS);
Thread.sleep(1000);
// Get the status again
status = this.asynchJobStatusManager.getJobStatus(user, status.getJobId());
}
return status;
}
/**
* Upload the given file to S3 and create a S3FileHandle for it.
*
* @param tempFile
* @return
*/
private S3FileHandle uploadFile(File tempFile){
S3FileHandle fileHandle = new S3FileHandle();
fileHandle.setBucketName(StackConfigurationSingleton.singleton().getS3Bucket());
fileHandle.setKey(UUID.randomUUID() + ".csv");
fileHandle.setContentType("text/csv");
fileHandle.setCreatedBy("" + adminUserInfo.getId());
fileHandle.setFileName(tempFile.getName());
fileHandle.setId(idGenerator.generateNewId(IdType.FILE_IDS).toString());
fileHandle.setEtag(UUID.randomUUID().toString());
fileHandle.setPreviewId(fileHandle.getId());
fileHandle.setContentSize(tempFile.length());
// Upload the File to S3
fileHandle = (S3FileHandle) fileHandleDao.createFile(fileHandle);
fileHandles.add(fileHandle);
// Upload the file to S3.
s3Client.putObject(fileHandle.getBucketName(), fileHandle.getKey(), tempFile);
return fileHandle;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache license, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the license for the specific language governing permissions and
* limitations under the license.
*/
package org.apache.logging.log4j.core.appender;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
import java.util.zip.Deflater;
import org.apache.logging.log4j.core.Filter;
import org.apache.logging.log4j.core.Layout;
import org.apache.logging.log4j.core.LogEvent;
import org.apache.logging.log4j.core.appender.rolling.DefaultRolloverStrategy;
import org.apache.logging.log4j.core.appender.rolling.RollingFileManager;
import org.apache.logging.log4j.core.appender.rolling.RolloverStrategy;
import org.apache.logging.log4j.core.appender.rolling.TriggeringPolicy;
import org.apache.logging.log4j.core.config.Configuration;
import org.apache.logging.log4j.core.config.plugins.Plugin;
import org.apache.logging.log4j.core.config.plugins.PluginAttribute;
import org.apache.logging.log4j.core.config.plugins.PluginConfiguration;
import org.apache.logging.log4j.core.config.plugins.PluginElement;
import org.apache.logging.log4j.core.config.plugins.PluginFactory;
import org.apache.logging.log4j.core.layout.PatternLayout;
import org.apache.logging.log4j.core.net.Advertiser;
import org.apache.logging.log4j.core.util.Booleans;
import org.apache.logging.log4j.core.util.Integers;
/**
* An appender that writes to files and can roll over at intervals.
*/
@Plugin(name = "RollingFile", category = "Core", elementType = "appender", printObject = true)
public final class RollingFileAppender extends AbstractOutputStreamAppender<RollingFileManager> {
private static final int DEFAULT_BUFFER_SIZE = 8192;
private static final long serialVersionUID = 1L;
private final String fileName;
private final String filePattern;
private Object advertisement;
private final Advertiser advertiser;
private RollingFileAppender(final String name, final Layout<? extends Serializable> layout, final Filter filter,
final RollingFileManager manager, final String fileName, final String filePattern,
final boolean ignoreExceptions, final boolean immediateFlush, final Advertiser advertiser) {
super(name, layout, filter, ignoreExceptions, immediateFlush, manager);
if (advertiser != null) {
final Map<String, String> configuration = new HashMap<>(layout.getContentFormat());
configuration.put("contentType", layout.getContentType());
configuration.put("name", name);
advertisement = advertiser.advertise(configuration);
}
this.fileName = fileName;
this.filePattern = filePattern;
this.advertiser = advertiser;
}
@Override
public void stop() {
super.stop();
if (advertiser != null) {
advertiser.unadvertise(advertisement);
}
}
/**
* Write the log entry rolling over the file when required.
* @param event The LogEvent.
*/
@Override
public void append(final LogEvent event) {
getManager().checkRollover(event);
super.append(event);
}
/**
* Returns the File name for the Appender.
* @return The file name.
*/
public String getFileName() {
return fileName;
}
/**
* Returns the file pattern used when rolling over.
* @return The file pattern.
*/
public String getFilePattern() {
return filePattern;
}
/**
* Returns the triggering policy
* @return The TriggeringPolicy
*/
public <T extends TriggeringPolicy> T getTriggeringPolicy() {
return getManager().getTriggeringPolicy();
}
/**
* Create a RollingFileAppender.
* @param fileName The name of the file that is actively written to. (required).
* @param filePattern The pattern of the file name to use on rollover. (required).
* @param append If true, events are appended to the file. If false, the file
* is overwritten when opened. Defaults to "true"
* @param name The name of the Appender (required).
* @param bufferedIO When true, I/O will be buffered. Defaults to "true".
* @param bufferSizeStr buffer size for buffered IO (default is 8192).
* @param immediateFlush When true, events are immediately flushed. Defaults to "true".
* @param policy The triggering policy. (required).
* @param strategy The rollover strategy. Defaults to DefaultRolloverStrategy.
* @param layout The layout to use (defaults to the default PatternLayout).
* @param filter The Filter or null.
* @param ignore If {@code "true"} (default) exceptions encountered when appending events are logged; otherwise
* they are propagated to the caller.
* @param advertise "true" if the appender configuration should be advertised, "false" otherwise.
* @param advertiseURI The advertised URI which can be used to retrieve the file contents.
* @param config The Configuration.
* @return A RollingFileAppender.
*/
@PluginFactory
public static RollingFileAppender createAppender(
@PluginAttribute("fileName") final String fileName,
@PluginAttribute("filePattern") final String filePattern,
@PluginAttribute("append") final String append,
@PluginAttribute("name") final String name,
@PluginAttribute("bufferedIO") final String bufferedIO,
@PluginAttribute("bufferSize") final String bufferSizeStr,
@PluginAttribute("immediateFlush") final String immediateFlush,
@PluginElement("Policy") final TriggeringPolicy policy,
@PluginElement("Strategy") RolloverStrategy strategy,
@PluginElement("Layout") Layout<? extends Serializable> layout,
@PluginElement("Filter") final Filter filter,
@PluginAttribute("ignoreExceptions") final String ignore,
@PluginAttribute("advertise") final String advertise,
@PluginAttribute("advertiseURI") final String advertiseURI,
@PluginConfiguration final Configuration config) {
final boolean isAppend = Booleans.parseBoolean(append, true);
final boolean ignoreExceptions = Booleans.parseBoolean(ignore, true);
final boolean isBuffered = Booleans.parseBoolean(bufferedIO, true);
final boolean isFlush = Booleans.parseBoolean(immediateFlush, true);
final boolean isAdvertise = Boolean.parseBoolean(advertise);
final int bufferSize = Integers.parseInt(bufferSizeStr, DEFAULT_BUFFER_SIZE);
if (!isBuffered && bufferSize > 0) {
LOGGER.warn("The bufferSize is set to {} but bufferedIO is not true: {}", bufferSize, bufferedIO);
}
if (name == null) {
LOGGER.error("No name provided for FileAppender");
return null;
}
if (fileName == null) {
LOGGER.error("No filename was provided for FileAppender with name " + name);
return null;
}
if (filePattern == null) {
LOGGER.error("No filename pattern provided for FileAppender with name " + name);
return null;
}
if (policy == null) {
LOGGER.error("A TriggeringPolicy must be provided");
return null;
}
if (strategy == null) {
strategy = DefaultRolloverStrategy.createStrategy(null, null, null,
String.valueOf(Deflater.DEFAULT_COMPRESSION), config);
}
if (layout == null) {
layout = PatternLayout.createDefaultLayout();
}
final RollingFileManager manager = RollingFileManager.getFileManager(fileName, filePattern, isAppend,
isBuffered, policy, strategy, advertiseURI, layout, bufferSize);
if (manager == null) {
return null;
}
return new RollingFileAppender(name, layout, filter, manager, fileName, filePattern,
ignoreExceptions, isFlush, isAdvertise ? config.getAdvertiser() : null);
}
}
| |
// Copyright (C) 2017 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.acceptance.server.notedb;
import static com.google.common.collect.ImmutableSortedSet.toImmutableSortedSet;
import static com.google.common.truth.Truth.assertThat;
import static com.google.common.truth.Truth8.assertThat;
import static com.google.common.truth.TruthJUnit.assume;
import static com.google.gerrit.server.notedb.NoteDbChangeState.NOTE_DB_PRIMARY_STATE;
import static com.google.gerrit.server.notedb.NotesMigrationState.NOTE_DB;
import static com.google.gerrit.server.notedb.NotesMigrationState.READ_WRITE_NO_SEQUENCE;
import static com.google.gerrit.server.notedb.NotesMigrationState.READ_WRITE_WITH_SEQUENCE_NOTE_DB_PRIMARY;
import static com.google.gerrit.server.notedb.NotesMigrationState.READ_WRITE_WITH_SEQUENCE_REVIEW_DB_PRIMARY;
import static com.google.gerrit.server.notedb.NotesMigrationState.REVIEW_DB;
import static com.google.gerrit.server.notedb.NotesMigrationState.WRITE;
import static java.nio.charset.StandardCharsets.UTF_8;
import static java.util.Comparator.naturalOrder;
import static org.easymock.EasyMock.createStrictMock;
import static org.easymock.EasyMock.expectLastCall;
import static org.easymock.EasyMock.replay;
import static org.easymock.EasyMock.verify;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSortedSet;
import com.google.common.io.MoreFiles;
import com.google.common.io.RecursiveDeleteOption;
import com.google.gerrit.acceptance.AbstractDaemonTest;
import com.google.gerrit.acceptance.GerritConfig;
import com.google.gerrit.acceptance.NoHttpd;
import com.google.gerrit.acceptance.PushOneCommit;
import com.google.gerrit.acceptance.Sandboxed;
import com.google.gerrit.acceptance.UseLocalDisk;
import com.google.gerrit.extensions.registration.DynamicSet;
import com.google.gerrit.extensions.registration.RegistrationHandle;
import com.google.gerrit.reviewdb.client.Change;
import com.google.gerrit.reviewdb.client.PatchSet;
import com.google.gerrit.reviewdb.client.Project;
import com.google.gerrit.reviewdb.client.RefNames;
import com.google.gerrit.reviewdb.server.ReviewDb;
import com.google.gerrit.server.CommentsUtil;
import com.google.gerrit.server.Sequences;
import com.google.gerrit.server.config.SitePaths;
import com.google.gerrit.server.notedb.ChangeBundle;
import com.google.gerrit.server.notedb.ChangeBundleReader;
import com.google.gerrit.server.notedb.NoteDbChangeState;
import com.google.gerrit.server.notedb.NoteDbChangeState.PrimaryStorage;
import com.google.gerrit.server.notedb.NoteDbChangeState.RefState;
import com.google.gerrit.server.notedb.NotesMigrationState;
import com.google.gerrit.server.notedb.rebuild.MigrationException;
import com.google.gerrit.server.notedb.rebuild.NoteDbMigrator;
import com.google.gerrit.server.notedb.rebuild.NotesMigrationStateListener;
import com.google.gerrit.server.schema.ReviewDbFactory;
import com.google.gerrit.testing.ConfigSuite;
import com.google.gerrit.testing.NoteDbMode;
import com.google.gwtorm.server.OrmException;
import com.google.gwtorm.server.SchemaFactory;
import com.google.inject.Inject;
import com.google.inject.Provider;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.stream.Stream;
import org.eclipse.jgit.internal.storage.file.FileRepository;
import org.eclipse.jgit.junit.TestRepository;
import org.eclipse.jgit.lib.Config;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ObjectLoader;
import org.eclipse.jgit.lib.ObjectReader;
import org.eclipse.jgit.lib.Ref;
import org.eclipse.jgit.lib.RefUpdate;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.lib.RepositoryCache.FileKey;
import org.eclipse.jgit.storage.file.FileBasedConfig;
import org.eclipse.jgit.util.FS;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
@Sandboxed
@UseLocalDisk
@NoHttpd
public class OnlineNoteDbMigrationIT extends AbstractDaemonTest {
private static final String INVALID_STATE = "deadbeefdeadbeefdeadbeefdeadbeefdeadbeef";
@ConfigSuite.Default
public static Config defaultConfig() {
Config cfg = new Config();
cfg.setInt("noteDb", "changes", "sequenceBatchSize", 10);
cfg.setInt("noteDb", "changes", "initialSequenceGap", 500);
return cfg;
}
// Tests in this class are generally interested in the actual ReviewDb contents, but the shifting
// migration state may result in various kinds of wrappers showing up unexpectedly.
@Inject @ReviewDbFactory private SchemaFactory<ReviewDb> schemaFactory;
@Inject private ChangeBundleReader changeBundleReader;
@Inject private CommentsUtil commentsUtil;
@Inject private DynamicSet<NotesMigrationStateListener> listeners;
@Inject private Provider<NoteDbMigrator.Builder> migratorBuilderProvider;
@Inject private Sequences sequences;
@Inject private SitePaths sitePaths;
private FileBasedConfig noteDbConfig;
private List<RegistrationHandle> addedListeners;
@Before
public void setUp() throws Exception {
assume().that(NoteDbMode.get()).isEqualTo(NoteDbMode.OFF);
// Unlike in the running server, for tests, we don't stack notedb.config on gerrit.config.
noteDbConfig = new FileBasedConfig(sitePaths.notedb_config.toFile(), FS.detect());
assertNotesMigrationState(REVIEW_DB, false, false);
addedListeners = new ArrayList<>();
}
@After
public void tearDown() throws Exception {
if (addedListeners != null) {
addedListeners.forEach(RegistrationHandle::remove);
addedListeners = null;
}
}
@Test
public void preconditionsFail() throws Exception {
List<Change.Id> cs = ImmutableList.of(new Change.Id(1));
List<Project.NameKey> ps = ImmutableList.of(new Project.NameKey("p"));
assertMigrationException(
"Cannot rebuild without noteDb.changes.write=true", b -> b, NoteDbMigrator::rebuild);
assertMigrationException(
"Cannot combine changes, projects and skipProjects",
b -> b.setChanges(cs).setProjects(ps),
m -> {});
assertMigrationException(
"Cannot combine changes, projects and skipProjects",
b -> b.setChanges(cs).setSkipProjects(ps),
m -> {});
assertMigrationException(
"Cannot combine changes, projects and skipProjects",
b -> b.setProjects(ps).setSkipProjects(ps),
m -> {});
assertMigrationException(
"Cannot set changes or projects or skipProjects during full migration",
b -> b.setChanges(cs),
NoteDbMigrator::migrate);
assertMigrationException(
"Cannot set changes or projects or skipProjects during full migration",
b -> b.setProjects(ps),
NoteDbMigrator::migrate);
assertMigrationException(
"Cannot set changes or projects or skipProjects during full migration",
b -> b.setSkipProjects(ps),
NoteDbMigrator::migrate);
setNotesMigrationState(READ_WRITE_WITH_SEQUENCE_REVIEW_DB_PRIMARY);
assertMigrationException(
"Migration has already progressed past the endpoint of the \"trial mode\" state",
b -> b.setTrialMode(true),
NoteDbMigrator::migrate);
setNotesMigrationState(READ_WRITE_WITH_SEQUENCE_NOTE_DB_PRIMARY);
assertMigrationException(
"Cannot force rebuild changes; NoteDb is already the primary storage for some changes",
b -> b.setForceRebuild(true),
NoteDbMigrator::migrate);
}
@Test
@GerritConfig(name = "noteDb.changes.initialSequenceGap", value = "-7")
public void initialSequenceGapMustBeNonNegative() throws Exception {
setNotesMigrationState(READ_WRITE_NO_SEQUENCE);
assertMigrationException("Sequence gap must be non-negative: -7", b -> b, m -> {});
}
@Test
public void rebuildOneChangeTrialModeAndForceRebuild() throws Exception {
PushOneCommit.Result r = createChange();
Change.Id id = r.getChange().getId();
migrate(b -> b.setTrialMode(true));
assertNotesMigrationState(READ_WRITE_NO_SEQUENCE, false, true);
ObjectId oldMetaId;
try (Repository repo = repoManager.openRepository(project);
ReviewDb db = schemaFactory.open()) {
Ref ref = repo.exactRef(RefNames.changeMetaRef(id));
assertThat(ref).isNotNull();
oldMetaId = ref.getObjectId();
Change c = db.changes().get(id);
assertThat(c).isNotNull();
NoteDbChangeState state = NoteDbChangeState.parse(c);
assertThat(state).isNotNull();
assertThat(state.getPrimaryStorage()).isEqualTo(PrimaryStorage.REVIEW_DB);
assertThat(state.getRefState()).hasValue(RefState.create(oldMetaId, ImmutableMap.of()));
// Force change to be out of date, and change topic so it will get rebuilt as something other
// than oldMetaId.
c.setNoteDbState(INVALID_STATE);
c.setTopic(name("a-new-topic"));
db.changes().update(ImmutableList.of(c));
}
migrate(b -> b.setTrialMode(true));
assertNotesMigrationState(READ_WRITE_NO_SEQUENCE, false, true);
try (Repository repo = repoManager.openRepository(project);
ReviewDb db = schemaFactory.open()) {
// Change is out of date, but was not rebuilt without forceRebuild.
assertThat(repo.exactRef(RefNames.changeMetaRef(id)).getObjectId()).isEqualTo(oldMetaId);
Change c = db.changes().get(id);
assertThat(c.getNoteDbState()).isEqualTo(INVALID_STATE);
}
migrate(b -> b.setTrialMode(true).setForceRebuild(true));
assertNotesMigrationState(READ_WRITE_NO_SEQUENCE, false, true);
try (Repository repo = repoManager.openRepository(project);
ReviewDb db = schemaFactory.open()) {
Ref ref = repo.exactRef(RefNames.changeMetaRef(id));
assertThat(ref).isNotNull();
ObjectId newMetaId = ref.getObjectId();
assertThat(newMetaId).isNotEqualTo(oldMetaId);
NoteDbChangeState state = NoteDbChangeState.parse(db.changes().get(id));
assertThat(state).isNotNull();
assertThat(state.getPrimaryStorage()).isEqualTo(PrimaryStorage.REVIEW_DB);
assertThat(state.getRefState()).hasValue(RefState.create(newMetaId, ImmutableMap.of()));
}
}
@Test
public void autoMigrateTrialMode() throws Exception {
PushOneCommit.Result r = createChange();
Change.Id id = r.getChange().getId();
migrate(b -> b.setAutoMigrate(true).setTrialMode(true).setStopAtStateForTesting(WRITE));
assertNotesMigrationState(WRITE, true, true);
migrate(b -> b);
// autoMigrate is still enabled so that we can continue the migration by only unsetting trial.
assertNotesMigrationState(READ_WRITE_NO_SEQUENCE, true, true);
ObjectId metaId;
try (Repository repo = repoManager.openRepository(project);
ReviewDb db = schemaFactory.open()) {
Ref ref = repo.exactRef(RefNames.changeMetaRef(id));
assertThat(ref).isNotNull();
metaId = ref.getObjectId();
NoteDbChangeState state = NoteDbChangeState.parse(db.changes().get(id));
assertThat(state).isNotNull();
assertThat(state.getPrimaryStorage()).isEqualTo(PrimaryStorage.REVIEW_DB);
assertThat(state.getRefState()).hasValue(RefState.create(metaId, ImmutableMap.of()));
}
// Unset trial mode and the next migration runs to completion.
noteDbConfig.load();
NoteDbMigrator.setTrialMode(noteDbConfig, false);
noteDbConfig.save();
migrate(b -> b);
assertNotesMigrationState(NOTE_DB, false, false);
try (Repository repo = repoManager.openRepository(project);
ReviewDb db = schemaFactory.open()) {
Ref ref = repo.exactRef(RefNames.changeMetaRef(id));
assertThat(ref).isNotNull();
assertThat(ref.getObjectId()).isEqualTo(metaId);
NoteDbChangeState state = NoteDbChangeState.parse(db.changes().get(id));
assertThat(state).isNotNull();
assertThat(state.getPrimaryStorage()).isEqualTo(PrimaryStorage.NOTE_DB);
}
}
@Test
public void rebuildSubsetOfChanges() throws Exception {
setNotesMigrationState(WRITE);
PushOneCommit.Result r1 = createChange();
PushOneCommit.Result r2 = createChange();
Change.Id id1 = r1.getChange().getId();
Change.Id id2 = r2.getChange().getId();
invalidateNoteDbState(id1, id2);
migrate(b -> b.setChanges(ImmutableList.of(id2)), NoteDbMigrator::rebuild);
assertNotRebuilt(id1);
assertRebuilt(id2);
}
@Test
public void rebuildSubsetOfProjects() throws Exception {
setNotesMigrationState(WRITE);
Project.NameKey p2 = createProject("project2");
TestRepository<?> tr2 = cloneProject(p2, admin);
PushOneCommit.Result r1 = createChange();
PushOneCommit.Result r2 = pushFactory.create(db, admin.getIdent(), tr2).to("refs/for/master");
Change.Id id1 = r1.getChange().getId();
Change.Id id2 = r2.getChange().getId();
invalidateNoteDbState(id1, id2);
migrate(b -> b.setProjects(ImmutableList.of(p2)), NoteDbMigrator::rebuild);
assertNotRebuilt(id1);
assertRebuilt(id2);
}
@Test
public void rebuildNonSkippedProjects() throws Exception {
setNotesMigrationState(WRITE);
Project.NameKey p2 = createProject("project2");
TestRepository<?> tr2 = cloneProject(p2, admin);
Project.NameKey p3 = createProject("project3");
TestRepository<?> tr3 = cloneProject(p3, admin);
PushOneCommit.Result r1 = createChange();
PushOneCommit.Result r2 = pushFactory.create(db, admin.getIdent(), tr2).to("refs/for/master");
PushOneCommit.Result r3 = pushFactory.create(db, admin.getIdent(), tr3).to("refs/for/master");
Change.Id id1 = r1.getChange().getId();
Change.Id id2 = r2.getChange().getId();
Change.Id id3 = r3.getChange().getId();
invalidateNoteDbState(id1, id2, id3);
migrate(b -> b.setSkipProjects(ImmutableList.of(p3)), NoteDbMigrator::rebuild);
assertRebuilt(id1, id2);
assertNotRebuilt(id3);
}
private void invalidateNoteDbState(Change.Id... ids) throws OrmException {
List<Change> list = new ArrayList<>(ids.length);
try (ReviewDb db = schemaFactory.open()) {
for (Change.Id id : ids) {
Change c = db.changes().get(id);
c.setNoteDbState(INVALID_STATE);
list.add(c);
}
db.changes().update(list);
}
}
private void assertRebuilt(Change.Id... ids) throws OrmException {
try (ReviewDb db = schemaFactory.open()) {
for (Change.Id id : ids) {
NoteDbChangeState s = NoteDbChangeState.parse(db.changes().get(id));
assertThat(s.getChangeMetaId().name()).isNotEqualTo(INVALID_STATE);
}
}
}
private void assertNotRebuilt(Change.Id... ids) throws OrmException {
try (ReviewDb db = schemaFactory.open()) {
for (Change.Id id : ids) {
NoteDbChangeState s = NoteDbChangeState.parse(db.changes().get(id));
assertThat(s.getChangeMetaId().name()).isEqualTo(INVALID_STATE);
}
}
}
@Test
public void enableSequencesNoGap() throws Exception {
testEnableSequences(0, 3, "13");
}
@Test
public void enableSequencesWithGap() throws Exception {
testEnableSequences(-1, 502, "512");
}
private void testEnableSequences(int builderOption, int expectedFirstId, String expectedRefValue)
throws Exception {
PushOneCommit.Result r = createChange();
Change.Id id = r.getChange().getId();
assertThat(id.get()).isEqualTo(1);
migrate(
b ->
b.setSequenceGap(builderOption)
.setStopAtStateForTesting(READ_WRITE_WITH_SEQUENCE_REVIEW_DB_PRIMARY));
assertThat(sequences.nextChangeId()).isEqualTo(expectedFirstId);
assertThat(sequences.nextChangeId()).isEqualTo(expectedFirstId + 1);
try (Repository repo = repoManager.openRepository(allProjects);
ObjectReader reader = repo.newObjectReader()) {
Ref ref = repo.exactRef("refs/sequences/changes");
assertThat(ref).isNotNull();
ObjectLoader loader = reader.open(ref.getObjectId());
assertThat(loader.getType()).isEqualTo(Constants.OBJ_BLOB);
// Acquired a block of 10 to serve the first nextChangeId call after migration.
assertThat(new String(loader.getCachedBytes(), UTF_8)).isEqualTo(expectedRefValue);
}
try (ReviewDb db = schemaFactory.open()) {
// Underlying, unused ReviewDb is still on its own sequence.
@SuppressWarnings("deprecation")
int nextFromReviewDb = db.nextChangeId();
assertThat(nextFromReviewDb).isEqualTo(3);
}
}
@Test
public void fullMigrationSameThread() throws Exception {
testFullMigration(1);
}
@Test
public void fullMigrationMultipleThreads() throws Exception {
testFullMigration(2);
}
private void testFullMigration(int threads) throws Exception {
PushOneCommit.Result r1 = createChange();
PushOneCommit.Result r2 = createChange();
Change.Id id1 = r1.getChange().getId();
Change.Id id2 = r2.getChange().getId();
Set<String> objectFiles = getObjectFiles(project);
assertThat(objectFiles).isNotEmpty();
migrate(b -> b.setThreads(threads));
assertNotesMigrationState(NOTE_DB, false, false);
assertThat(sequences.nextChangeId()).isEqualTo(503);
assertThat(getObjectFiles(project)).containsExactlyElementsIn(objectFiles);
ObjectId oldMetaId = null;
int rowVersion = 0;
try (ReviewDb db = schemaFactory.open();
Repository repo = repoManager.openRepository(project)) {
for (Change.Id id : ImmutableList.of(id1, id2)) {
String refName = RefNames.changeMetaRef(id);
Ref ref = repo.exactRef(refName);
assertThat(ref).named(refName).isNotNull();
Change c = db.changes().get(id);
assertThat(c.getTopic()).named("topic of change %s", id).isNull();
NoteDbChangeState s = NoteDbChangeState.parse(c);
assertThat(s.getPrimaryStorage())
.named("primary storage of change %s", id)
.isEqualTo(PrimaryStorage.NOTE_DB);
assertThat(s.getRefState()).named("ref state of change %s").isEmpty();
if (id.equals(id1)) {
oldMetaId = ref.getObjectId();
rowVersion = c.getRowVersion();
}
}
}
// Do not open a new context, to simulate races with other threads that opened a context earlier
// in the migration process; this needs to work.
gApi.changes().id(id1.get()).topic(name("a-topic"));
// Of course, it should also work with a new context.
resetCurrentApiUser();
gApi.changes().id(id1.get()).topic(name("another-topic"));
try (ReviewDb db = schemaFactory.open();
Repository repo = repoManager.openRepository(project)) {
assertThat(repo.exactRef(RefNames.changeMetaRef(id1)).getObjectId()).isNotEqualTo(oldMetaId);
Change c = db.changes().get(id1);
assertThat(c.getTopic()).isNull();
assertThat(c.getRowVersion()).isEqualTo(rowVersion);
}
}
@Test
public void fullMigrationOneChangeWithNoPatchSets() throws Exception {
PushOneCommit.Result r1 = createChange();
PushOneCommit.Result r2 = createChange();
Change.Id id1 = r1.getChange().getId();
Change.Id id2 = r2.getChange().getId();
db.changes().beginTransaction(id2);
try {
db.patchSets().delete(db.patchSets().byChange(id2));
db.commit();
} finally {
db.rollback();
}
migrate(b -> b);
assertNotesMigrationState(NOTE_DB, false, false);
try (ReviewDb db = schemaFactory.open();
Repository repo = repoManager.openRepository(project)) {
assertThat(repo.exactRef(RefNames.changeMetaRef(id1))).isNotNull();
assertThat(db.changes().get(id1).getNoteDbState()).isEqualTo(NOTE_DB_PRIMARY_STATE);
// A change with no patch sets is so corrupt that it is completely skipped by the migration
// process.
assertThat(repo.exactRef(RefNames.changeMetaRef(id2))).isNull();
assertThat(db.changes().get(id2).getNoteDbState()).isNull();
}
}
@Test
public void fullMigrationOneChangeWithNoProject() throws Exception {
PushOneCommit.Result r1 = createChange();
Change.Id id1 = r1.getChange().getId();
Project.NameKey p2 = createProject("project2");
TestRepository<?> tr2 = cloneProject(p2, admin);
PushOneCommit.Result r2 = pushFactory.create(db, admin.getIdent(), tr2).to("refs/for/master");
Change.Id id2 = r2.getChange().getId();
// TODO(davido): Find an easier way to wipe out a repository from the file system.
MoreFiles.deleteRecursively(
FileKey.lenient(
sitePaths
.resolve(cfg.getString("gerrit", null, "basePath"))
.resolve(p2.get())
.toFile(),
FS.DETECTED)
.getFile()
.toPath(),
RecursiveDeleteOption.ALLOW_INSECURE);
migrate(b -> b);
assertNotesMigrationState(NOTE_DB, false, false);
try (ReviewDb db = schemaFactory.open();
Repository repo = repoManager.openRepository(project)) {
assertThat(repo.exactRef(RefNames.changeMetaRef(id1))).isNotNull();
assertThat(db.changes().get(id1).getNoteDbState()).isEqualTo(NOTE_DB_PRIMARY_STATE);
}
// A change without project is so corrupt that it is completely skipped by the migration
// process.
assertThat(db.changes().get(id2).getNoteDbState()).isNull();
}
@Test
public void fullMigrationMissingPatchSetRefs() throws Exception {
PushOneCommit.Result r = createChange();
Change.Id id = r.getChange().getId();
try (Repository repo = repoManager.openRepository(project)) {
RefUpdate u = repo.updateRef(new PatchSet.Id(id, 1).toRefName());
u.setForceUpdate(true);
assertThat(u.delete()).isEqualTo(RefUpdate.Result.FORCED);
}
ChangeBundle reviewDbBundle;
try (ReviewDb db = schemaFactory.open()) {
reviewDbBundle = changeBundleReader.fromReviewDb(db, id);
}
migrate(b -> b);
assertNotesMigrationState(NOTE_DB, false, false);
try (ReviewDb db = schemaFactory.open();
Repository repo = repoManager.openRepository(project)) {
// Change migrated successfully even though it was missing patch set refs.
assertThat(repo.exactRef(RefNames.changeMetaRef(id))).isNotNull();
assertThat(db.changes().get(id).getNoteDbState()).isEqualTo(NOTE_DB_PRIMARY_STATE);
ChangeBundle noteDbBundle =
ChangeBundle.fromNotes(commentsUtil, notesFactory.createChecked(db, project, id));
assertThat(noteDbBundle.differencesFrom(reviewDbBundle)).isEmpty();
}
}
@Test
public void autoMigrationConfig() throws Exception {
createChange();
migrate(b -> b.setStopAtStateForTesting(WRITE));
assertNotesMigrationState(WRITE, false, false);
migrate(b -> b.setAutoMigrate(true).setStopAtStateForTesting(READ_WRITE_NO_SEQUENCE));
assertNotesMigrationState(READ_WRITE_NO_SEQUENCE, true, false);
migrate(b -> b);
assertNotesMigrationState(NOTE_DB, false, false);
}
@Test
public void notesMigrationStateListener() throws Exception {
NotesMigrationStateListener listener = createStrictMock(NotesMigrationStateListener.class);
listener.preStateChange(REVIEW_DB, WRITE);
expectLastCall();
listener.preStateChange(WRITE, READ_WRITE_NO_SEQUENCE);
expectLastCall();
listener.preStateChange(READ_WRITE_NO_SEQUENCE, READ_WRITE_WITH_SEQUENCE_REVIEW_DB_PRIMARY);
expectLastCall();
listener.preStateChange(
READ_WRITE_WITH_SEQUENCE_REVIEW_DB_PRIMARY, READ_WRITE_WITH_SEQUENCE_NOTE_DB_PRIMARY);
listener.preStateChange(READ_WRITE_WITH_SEQUENCE_NOTE_DB_PRIMARY, NOTE_DB);
expectLastCall();
replay(listener);
addListener(listener);
createChange();
migrate(b -> b);
assertNotesMigrationState(NOTE_DB, false, false);
verify(listener);
}
@Test
public void notesMigrationStateListenerFails() throws Exception {
NotesMigrationStateListener listener = createStrictMock(NotesMigrationStateListener.class);
listener.preStateChange(REVIEW_DB, WRITE);
expectLastCall();
listener.preStateChange(WRITE, READ_WRITE_NO_SEQUENCE);
IOException listenerException = new IOException("Listener failed");
expectLastCall().andThrow(listenerException);
replay(listener);
addListener(listener);
createChange();
try {
migrate(b -> b);
fail("expected IOException");
} catch (IOException e) {
assertThat(e).isSameAs(listenerException);
}
assertNotesMigrationState(WRITE, false, false);
verify(listener);
}
private void assertNotesMigrationState(
NotesMigrationState expected, boolean autoMigrate, boolean trialMode) throws Exception {
assertThat(NotesMigrationState.forNotesMigration(notesMigration)).hasValue(expected);
noteDbConfig.load();
assertThat(NotesMigrationState.forConfig(noteDbConfig)).hasValue(expected);
assertThat(NoteDbMigrator.getAutoMigrate(noteDbConfig))
.named("noteDb.changes.autoMigrate")
.isEqualTo(autoMigrate);
assertThat(NoteDbMigrator.getTrialMode(noteDbConfig))
.named("noteDb.changes.trial")
.isEqualTo(trialMode);
}
private void setNotesMigrationState(NotesMigrationState state) throws Exception {
noteDbConfig.load();
state.setConfigValues(noteDbConfig);
noteDbConfig.save();
notesMigration.setFrom(state);
}
@FunctionalInterface
interface PrepareBuilder {
NoteDbMigrator.Builder prepare(NoteDbMigrator.Builder b) throws Exception;
}
@FunctionalInterface
interface RunMigration {
void run(NoteDbMigrator m) throws Exception;
}
private void migrate(PrepareBuilder b) throws Exception {
migrate(b, NoteDbMigrator::migrate);
}
private void migrate(PrepareBuilder b, RunMigration m) throws Exception {
try (NoteDbMigrator migrator = b.prepare(migratorBuilderProvider.get()).build()) {
m.run(migrator);
}
}
private void assertMigrationException(
String expectMessageContains, PrepareBuilder b, RunMigration m) throws Exception {
try {
migrate(b, m);
fail("expected MigrationException");
} catch (MigrationException e) {
assertThat(e).hasMessageThat().contains(expectMessageContains);
}
}
private void addListener(NotesMigrationStateListener listener) {
addedListeners.add(listeners.add("gerrit", listener));
}
private ImmutableSortedSet<String> getObjectFiles(Project.NameKey project) throws Exception {
try (Repository repo = repoManager.openRepository(project);
Stream<Path> paths =
Files.walk(((FileRepository) repo).getObjectDatabase().getDirectory().toPath())) {
return paths
.filter(path -> !Files.isDirectory(path))
.map(Path::toString)
.filter(name -> !name.endsWith(".pack") && !name.endsWith(".idx"))
.collect(toImmutableSortedSet(naturalOrder()));
}
}
}
| |
/*
* Zed Attack Proxy (ZAP) and its related class files.
*
* ZAP is an HTTP/HTTPS proxy for assessing web application security.
*
* Copyright 2014 The ZAP Development Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.zaproxy.zap.extension.ascanrulesBeta;
import java.io.IOException;
import java.util.regex.Pattern;
import org.apache.commons.codec.DecoderException;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.codec.binary.Hex;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.parosproxy.paros.Constant;
import org.parosproxy.paros.core.scanner.AbstractAppParamPlugin;
import org.parosproxy.paros.core.scanner.Alert;
import org.parosproxy.paros.core.scanner.Category;
import org.parosproxy.paros.network.HttpMessage;
/** @author yhawke (2014) */
public class PaddingOracleScanRule extends AbstractAppParamPlugin {
// List of all possible errors
private static final String[] ERROR_PATTERNS = {
"BadPaddingException",
"padding",
"runtime",
"runtime error",
"server error",
"cryptographicexception",
"crypto"
};
// Logger object
private static final Logger log = LogManager.getLogger(PaddingOracleScanRule.class);
@Override
public int getId() {
return 90024;
}
@Override
public String getName() {
return Constant.messages.getString("ascanbeta.paddingoracle.name");
}
@Override
public String getDescription() {
return Constant.messages.getString("ascanbeta.paddingoracle.desc");
}
@Override
public int getCategory() {
return Category.MISC;
}
@Override
public String getSolution() {
return Constant.messages.getString("ascanbeta.paddingoracle.soln");
}
@Override
public String getReference() {
return Constant.messages.getString("ascanbeta.paddingoracle.refs");
}
@Override
public int getCweId() {
return 209;
}
@Override
public int getWascId() {
// There's not a real classification for this
// so we consider the general "Improper Input Handling" class
// http://projects.webappsec.org/w/page/13246933/Improper%20Input%20Handling
return 20;
}
@Override
public int getRisk() {
return Alert.RISK_HIGH;
}
@Override
public void init() {
// do nothing
}
/**
* Scan for Paddding Oracle Vulnerabilites
*
* @param msg a request only copy of the original message (the response isn't copied)
* @param paramName the parameter name that need to be exploited
* @param value the original parameter value
*/
@Override
public void scan(HttpMessage msg, String paramName, String value) {
// Get rid of strings that are all numeric
// (they probably aren't encoded and they pollute results)
if (!value.matches("^[0-9]+$")) {
for (OracleEncoder encoder : OracleEncoder.values()) {
if (checkPaddingOracle(paramName, value, encoder)) {
break;
}
}
}
}
private boolean checkPaddingOracle(String paramName, String value, OracleEncoder encoder) {
// Get the decoded value
byte[] oracle = encoder.decode(value);
if ((oracle != null) && isEncrypted(oracle)) {
try {
// First test is for double control
HttpMessage msg = getNewMsg();
String encodedValue = encoder.encode(oracle);
setParameter(msg, paramName, encodedValue);
sendAndReceive(msg);
// If the control test returned an error, then keep going
if (isPage200(msg)) {
// Response without any modification
String controlResponse = msg.getResponseBody().toString();
// The first test is going to change the last bit
oracle[oracle.length - 1] ^= 0x1;
encodedValue = encoder.encode(oracle);
setParameter(msg, paramName, encodedValue);
sendAndReceive(msg);
// First check if an Internal Server Error ws launched
// in this case we found (very) likely Padding Oracle vulnerability
if (isPage500(msg)) {
// We Found IT!
// First do logging
log.debug(
"[Padding Oracle Found] on parameter [{}] with payload [{}]",
paramName,
encodedValue);
newAlert()
.setConfidence(Alert.CONFIDENCE_MEDIUM)
.setParam(paramName)
.setAttack(msg.getRequestHeader().getURI().toString())
.setEvidence(msg.getResponseHeader().getReasonPhrase())
.setMessage(msg)
.raise();
}
// Otherwise check the response with the last bit changed
String lastBitResponse = msg.getResponseBody().toString();
// Check if changing the last bit produced a result that
// changing the first bit didn't. These results are based
// on a list of error strings.
for (String pattern : ERROR_PATTERNS) {
if (lastBitResponse.contains(pattern)
&& !controlResponse.contains(pattern)) {
// We Found IT!
// First do logging
log.debug(
"[Padding Oracle Found] on parameter [{}] with payload [{}]",
paramName,
encodedValue);
newAlert()
.setConfidence(Alert.CONFIDENCE_MEDIUM)
.setParam(paramName)
.setAttack(msg.getRequestHeader().getURI().toString())
.setEvidence(pattern)
.setMessage(msg)
.raise();
// All done. No need to look for vulnerabilities on subsequent
// parameters on the same request (to reduce performance impact)
return true;
}
// Check if the scan has been stopped
// if yes dispose resources and exit
if (isStop()) {
return true;
}
}
}
} catch (IOException ex) {
// Do not try to internationalise this.. we need an error message in any event..
// if it's in English, it's still better than not having it at all.
log.warn(
"Padding Oracle vulnerability check failed for parameter [{}] and payload [{}] due to an I/O error",
paramName,
encoder.encode(oracle),
ex);
}
}
return false;
}
/**
* Decide if the data given in 'data' is encrypted It turns out that this is difficult to do on
* short strings, so we are going to solve this by cheating. Basically, check if the string
* contains any non-ascii characters (<0x20 or >0x7F). The odds of a 4-character encrypted
* string having at least one character that falls outside of ASCII is almost 100%. We also
* ignore any string longer than 16 bytes, since those are generally too short to be encrypted.
*
* @param value the value that need to be checked
* @return true if it seems to be encrypted
*/
private boolean isEncrypted(byte[] value) {
// Make sure we have a reasonable sized string
// (encrypted strings tend to be long, and short strings tend to break our numbers)
if (value.length < 16) {
return false;
}
int notAscii = 0;
for (int i = 0; i < value.length; i++) {
if (value[i] < 0x20 || value[i] > 0x7F) {
notAscii++;
}
}
return (notAscii > (value.length / 4));
}
/**
* Enumeration Utility which is able to manage all specifi encoding/decoding tasks that could be
* met during rule's testing
*/
public enum OracleEncoder {
HEX {
// Hex strings are a-fA-F0-9. Although it's technically possible for a
// base64 string to look like this, it's exceptionally unlikely.
private final Pattern HEX_PATTERN = Pattern.compile("^([a-fA-F0-9]{2})+$");
@Override
public String encode(byte[] value) {
return Hex.encodeHexString(value);
}
@Override
public byte[] decode(String value) {
if (HEX_PATTERN.matcher(value).matches()) {
try {
return Hex.decodeHex(value.toCharArray());
} catch (DecoderException ex) {
}
}
return null;
}
},
BASE64URL {
// base64url always has an integer 0, 1, or 2 at the end, and contains letters,
// numbers, -, and _. The final byte is the number of padding bytes, so the
// string length with a number of extra bytes equal to the final digit has to be
// a multiple of 4.
private final Pattern BASE64URL_PATTERN = Pattern.compile("^[a-zA-Z0-9_-]+[012]$");
@Override
public String encode(byte[] value) {
String encoded = Base64.encodeBase64URLSafeString(value);
int padding = (4 - (encoded.length() % 4)) % 4;
return encoded + Integer.toString(padding);
}
@Override
public byte[] decode(String value) {
if (BASE64URL_PATTERN.matcher(value).matches()) {
// The last letter represents the length
int last = value.length() - 1;
if (((last + (int) value.charAt(last)) % 4) == 0) {
Base64 decoder = new Base64(true);
return decoder.decode(value.substring(0, last));
}
}
return null;
}
},
BASE64 {
// base64 strings are similar, except they can contain + and /, and end
// with 0 - 2 '=' signs. They are also a multiple of 4 bytes.
private final Pattern BASE64_PATTERN = Pattern.compile("^[a-zA-Z0-9/+]+={0,2}$");
@Override
public String encode(byte[] value) {
return Base64.encodeBase64String(value);
}
@Override
public byte[] decode(String value) {
if (BASE64_PATTERN.matcher(value).matches()) {
if ((value.length() % 4) == 0) {
return Base64.decodeBase64(value);
}
}
return null;
}
};
public abstract String encode(byte[] value);
public abstract byte[] decode(String value);
}
}
| |
/**
* Copyright 2012 multibit.org
*
* Licensed under the MIT license (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://opensource.org/licenses/mit-license.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.multibit.viewsystem.swing.action;
import java.io.File;
import java.util.Collection;
import java.util.UUID;
import junit.framework.TestCase;
import org.junit.Test;
import org.multibit.controller.Controller;
import org.multibit.controller.bitcoin.BitcoinController;
import org.multibit.controller.BitcoinControllerTest;
import org.multibit.controller.SimpleWalletBusyListener;
import org.multibit.file.PrivateKeyAndDate;
import org.multibit.file.PrivateKeysHandler;
import org.multibit.message.Message;
import org.multibit.message.MessageManager;
import org.multibit.viewsystem.swing.view.components.FontSizer;
import org.multibit.viewsystem.swing.view.panels.ExportPrivateKeysPanel;
import com.google.bitcoin.core.Utils;
import com.google.bitcoin.crypto.EncryptedPrivateKey;
import com.google.bitcoin.crypto.KeyCrypter;
import com.google.bitcoin.crypto.KeyCrypterException;
import org.multibit.CreateControllers;
public class ExportPrivateKeysSubmitActionTest extends TestCase {
private static final String EXPECTED_ENTER_THE_WALLET_PASSWORD = "Enter the wallet password";
private static final String EXPECTED_YOU_MUST_SELECT_AN_OUTPUT_FILE = "You must select an output file";
private static final String EXPECTED_ENTER_THE_EXPORT_FILE_PASSWORD = "Enter the password you want to use for the export file";
private static final String EXPECTED_PASSWORDS_DO_NOT_MATCH = "The password and repeat password do not match";
private static final String EXPECTED_COULD_NOT_DECRYPT_INPUT_STRING = "Could not decrypt input string";
private static final String EXPECTED_THE_PRIVATE_KEYS_WERE_EXPORTED = "The private keys were exported.";
private static final String EXPECTED_THE_EXPORT_FILE_COULD_BE_READ_IN_CORRECTLY = "The export file could be read in correctly and the private keys match the wallet contents";
public static final CharSequence EXPORT_FILE_PASSWORD = "the quick brown fox jumps over the lazy dog 0123456789";
public static final CharSequence WALLET_PASSWORD = "the unbelievable lightness of being";
public static final CharSequence WRONG_PASSWORD = "this is the wrong password";
@Test
public void testExportPrivateKeysWithNonEncryptedWallet() throws Exception {
// Create MultiBit controller.
final CreateControllers.Controllers controllers = CreateControllers.createControllers();
BitcoinController controller = controllers.bitcoinController;
// Create a new wallet and put it in the model as the active wallet.
ActionTestUtils.createNewActiveWallet(controller, "testExportPrivateKeysWithNonEncryptedWallet", false, null);
// Hook up a wallet busy listener.
SimpleWalletBusyListener walletBusyListener = new SimpleWalletBusyListener();
controller.registerWalletBusyListener(walletBusyListener);
// Create a new ExportPrivateKeysSubmitAction to test.
FontSizer.INSTANCE.initialise(controller);
ExportPrivateKeysPanel exportPanel = new ExportPrivateKeysPanel(controller, null);
ExportPrivateKeysSubmitAction exportAction = exportPanel.getExportPrivateKeySubmitAction();
assertNotNull("exportAction was not created successfully", exportAction);
assertEquals("Wrong number of keys at wallet creation", 1, controller.getModel().getActiveWallet().getKeychain().size());
assertTrue("Wallet password is enabled when it should not be", !exportPanel.isWalletPasswordFieldEnabled());
// Execute - this is with an unencrypted wallet and default settings.
exportAction.actionPerformed(null);
assertEquals("Wrong message1 after default export execute", EXPECTED_YOU_MUST_SELECT_AN_OUTPUT_FILE, exportPanel.getMessageText1());
assertEquals("Wrong message2 after default export execute", "", exportPanel.getMessageText2().trim());
// Set the output file name.
String outputFilename1 = controller.getModel().getActiveWalletFilename() + "-" + UUID.randomUUID().toString() + ".key";
exportPanel.setOutputFilename(outputFilename1);
// Execute - this should now complain that no export file password is set (as password protect export file is selected by default).
exportAction.actionPerformed(null);
assertEquals("Wrong message1 after no password set execute", EXPECTED_ENTER_THE_EXPORT_FILE_PASSWORD, exportPanel.getMessageText1());
assertEquals("Wrong message2 after no password set execute", "", exportPanel.getMessageText2().trim());
// Set the first export file password.
exportPanel.setExportPassword(EXPORT_FILE_PASSWORD);
// Execute = this is with one only of the export file passwords set.
exportAction.actionPerformed(null);
assertEquals("Wrong message1 after no password set execute", EXPECTED_PASSWORDS_DO_NOT_MATCH, exportPanel.getMessageText1());
assertEquals("Wrong message2 after no password set execute", "", exportPanel.getMessageText2().trim());
// Set the repeat export file password.
exportPanel.setRepeatExportPassword(EXPORT_FILE_PASSWORD);
// Check the export file currently does not exist.
assertTrue("Encrypted export file exists when it should not", !(new File(outputFilename1)).exists());
// Execute = this should actually write the encrypted export file.
exportAction.actionPerformed(null);
BitcoinControllerTest.waitForWalletNotBusy(walletBusyListener);
assertTrue("Encrypted export file does not exist when it should", (new File(outputFilename1)).exists());
assertEquals("Wrong message1 after encrypted export is good to go", EXPECTED_THE_PRIVATE_KEYS_WERE_EXPORTED, exportPanel.getMessageText1());
assertEquals("Wrong message2 after encrypted export is good to go", EXPECTED_THE_EXPORT_FILE_COULD_BE_READ_IN_CORRECTLY, exportPanel.getMessageText2().trim());
// Try to read in the encrypted exported private key file with no password - this should fail.
PrivateKeysHandler privateKeysHandler = new PrivateKeysHandler(controller.getModel().getNetworkParameters());
Collection<PrivateKeyAndDate> privateKeyAndDates = null;
try {
privateKeyAndDates = privateKeysHandler.readInPrivateKeys(new File(outputFilename1), null);
fail("An encrypted export file was read in with no password. Fail.");
} catch (KeyCrypterException kce) {
// This is what should happen.
assertTrue("Unexpected exception thrown when decoding export file with no password", kce.getMessage().indexOf(EXPECTED_COULD_NOT_DECRYPT_INPUT_STRING) > -1);
}
// Try to read in the encrypted exported private key file with the wrong password - this should fail.
try {
privateKeyAndDates = privateKeysHandler.readInPrivateKeys(new File(outputFilename1), WRONG_PASSWORD);
fail("An encrypted export file was read in with the wrong password. Fail.");
} catch (KeyCrypterException kce) {
// This is what should happen.
assertTrue("Unexpected exception thrown when decoding export file with wrong password", kce.getMessage().indexOf(EXPECTED_COULD_NOT_DECRYPT_INPUT_STRING) > -1);
}
// Read in the encrypted exported private key file with the correct password.
privateKeysHandler = new PrivateKeysHandler(controller.getModel().getNetworkParameters());
privateKeyAndDates = privateKeysHandler.readInPrivateKeys(new File(outputFilename1), EXPORT_FILE_PASSWORD);
assertEquals("Wrong number of keys read in from encrypted export file", 1, privateKeyAndDates.size());
assertEquals("Wrong private key read in from encrypted export file", Utils.bytesToHexString(controller.getModel().getActiveWallet().getKeychain().iterator().next().getPrivKeyBytes()),
Utils.bytesToHexString(privateKeyAndDates.iterator().next().getKey().getPrivKeyBytes()));
// Set the export file password protect radio to output unencrypted.
exportPanel.getDoNotPasswordProtect().setSelected(true);
// Set the output file name.
String outputFilename2 = controller.getModel().getActiveWalletFilename() + "-" + UUID.randomUUID().toString() + ".key";
exportPanel.setOutputFilename(outputFilename2);
// Check the export file currently does not exist.
assertTrue("Unencrypted export file exists when it should not", !(new File(outputFilename2)).exists());
// Execute = this should actually write the unencrypted export file.
exportAction.actionPerformed(null);
BitcoinControllerTest.waitForWalletNotBusy(walletBusyListener);
assertTrue("Unencrypted export file does not exist when it should", (new File(outputFilename2)).exists());
assertEquals("Wrong message1 after unencrypted export is good to go", EXPECTED_THE_PRIVATE_KEYS_WERE_EXPORTED, exportPanel.getMessageText1());
assertEquals("Wrong message2 after unencrypted export is good to go", EXPECTED_THE_EXPORT_FILE_COULD_BE_READ_IN_CORRECTLY, exportPanel.getMessageText2().trim());
// Read in the unencrypted exported private key file.
privateKeyAndDates = privateKeysHandler.readInPrivateKeys(new File(outputFilename2), null);
assertEquals("Wrong number of keys read in from unencrypted export file", 1, privateKeyAndDates.size());
assertEquals("Wrong private key read in from unencrypted export file", Utils.bytesToHexString(controller.getModel().getActiveWallet().getKeychain().iterator().next().getPrivKeyBytes()),
Utils.bytesToHexString(privateKeyAndDates.iterator().next().getKey().getPrivKeyBytes()));
}
@Test
public void testExportPrivateKeysWithEncryptedWallet() throws Exception {
// Create MultiBit controller.
final CreateControllers.Controllers controllers = CreateControllers.createControllers();
BitcoinController controller = controllers.bitcoinController;
// Create a new encrypted wallet and put it in the model as the active wallet.
ActionTestUtils.createNewActiveWallet(controller, "testExportPrivateKeysWithEncryptedWallet", true, WALLET_PASSWORD);
// Hook up a wallet busy listener.
SimpleWalletBusyListener walletBusyListener = new SimpleWalletBusyListener();
controller.registerWalletBusyListener(walletBusyListener);
// Remember the private keys for the key - for comparision later.
// Copy the private key bytes for checking later.
EncryptedPrivateKey encryptedPrivateKey = controller.getModel().getActiveWallet().getKeychain().get(0).getEncryptedPrivateKey();
KeyCrypter keyCrypter = controller.getModel().getActiveWallet().getKeyCrypter();
byte[] originalPrivateKeyBytes = keyCrypter.decrypt(encryptedPrivateKey, keyCrypter.deriveKey(WALLET_PASSWORD));
// Create a new ExportPrivateKeysSubmitAction to test.
FontSizer.INSTANCE.initialise(controller);
ExportPrivateKeysPanel exportPanel = new ExportPrivateKeysPanel(controller, null);
ExportPrivateKeysSubmitAction exportAction = exportPanel.getExportPrivateKeySubmitAction();
assertNotNull("exportAction was not created successfully", exportAction);
assertEquals("Wrong number of keys at wallet creation", 1, controller.getModel().getActiveWallet().getKeychain().size());
assertTrue("Wallet password is not enabled when it should be", exportPanel.isWalletPasswordFieldEnabled());
// Execute - this is with an encrypted wallet and default settings. (No wallet password set).
exportAction.actionPerformed(null);
assertEquals("Wrong message1 after default export execute", EXPECTED_ENTER_THE_WALLET_PASSWORD, exportPanel.getMessageText1());
assertEquals("Wrong message2 after default export execute", "", exportPanel.getMessageText2().trim());
// Set the wallet password.
exportPanel.setWalletPassword(WALLET_PASSWORD);
// Execute - this should now complain that no export file is set.
exportAction.actionPerformed(null);
assertEquals("Wrong message1 after password set execute", EXPECTED_YOU_MUST_SELECT_AN_OUTPUT_FILE, exportPanel.getMessageText1());
assertEquals("Wrong message2 after password set execute", "", exportPanel.getMessageText2().trim());
// Set the output file name.
String outputFilename1 = controller.getModel().getActiveWalletFilename() + "-" + UUID.randomUUID().toString() + ".key";
exportPanel.setOutputFilename(outputFilename1);
// Execute - this should now complain that no export file password is set (as password protect export file is selected by default).
exportAction.actionPerformed(null);
assertEquals("Wrong message1 after no password set execute", EXPECTED_ENTER_THE_EXPORT_FILE_PASSWORD, exportPanel.getMessageText1());
assertEquals("Wrong message2 after no password set execute", "", exportPanel.getMessageText2().trim());
// Set the first export file password.
exportPanel.setExportPassword(EXPORT_FILE_PASSWORD);
// Execute = this is with one only of the export file passwords set.
exportAction.actionPerformed(null);
assertEquals("Wrong message1 after no password set execute", EXPECTED_PASSWORDS_DO_NOT_MATCH, exportPanel.getMessageText1());
assertEquals("Wrong message2 after no password set execute", "", exportPanel.getMessageText2().trim());
// Set the repeat export file password.
exportPanel.setRepeatExportPassword(EXPORT_FILE_PASSWORD);
// Check the export file currently does not exist.
assertTrue("Encrypted export file exists when it should not", !(new File(outputFilename1)).exists());
// Execute = this should actually write the encrypted export file.
exportAction.actionPerformed(null);
BitcoinControllerTest.waitForWalletNotBusy(walletBusyListener);
assertTrue("Encrypted export file does not exist when it should", (new File(outputFilename1)).exists());
assertEquals("Wrong message1 after encrypted export is good to go", EXPECTED_THE_PRIVATE_KEYS_WERE_EXPORTED, exportPanel.getMessageText1());
assertEquals("Wrong message2 after encrypted export is good to go", EXPECTED_THE_EXPORT_FILE_COULD_BE_READ_IN_CORRECTLY, exportPanel.getMessageText2().trim());
// Try to read in the encrypted exported private key file with no export file password - this should fail.
PrivateKeysHandler privateKeysHandler = new PrivateKeysHandler(controller.getModel().getNetworkParameters());
Collection<PrivateKeyAndDate> privateKeyAndDates = null;
try {
privateKeyAndDates = privateKeysHandler.readInPrivateKeys(new File(outputFilename1), null);
fail("An encrypted export file was read in with no export file password. Fail.");
} catch (KeyCrypterException kce) {
// This is what should happen.
assertTrue("Unexpected exception thrown when decoding export file with no export file password", kce.getMessage().indexOf(EXPECTED_COULD_NOT_DECRYPT_INPUT_STRING) > -1);
}
// Try to read in the encrypted exported private key file with the wrong export file password - this should fail.
try {
privateKeyAndDates = privateKeysHandler.readInPrivateKeys(new File(outputFilename1), WRONG_PASSWORD);
fail("An encrypted export file was read in with the wrong export file password. Fail.");
} catch (KeyCrypterException kce) {
// This is what should happen.
assertTrue("Unexpected exception thrown when decoding export file with wrong export file password", kce.getMessage().indexOf(EXPECTED_COULD_NOT_DECRYPT_INPUT_STRING) > -1);
}
// Read in the encrypted exported private key file with the correct export file password.
privateKeysHandler = new PrivateKeysHandler(controller.getModel().getNetworkParameters());
privateKeyAndDates = privateKeysHandler.readInPrivateKeys(new File(outputFilename1), EXPORT_FILE_PASSWORD);
assertEquals("Wrong number of keys read in from encrypted export file", 1, privateKeyAndDates.size());
assertEquals("Wrong private key read in from encrypted export file", Utils.bytesToHexString(originalPrivateKeyBytes),
Utils.bytesToHexString(privateKeyAndDates.iterator().next().getKey().getPrivKeyBytes()));
// Set the export file password protect radio to output unencrypted.
exportPanel.getDoNotPasswordProtect().setSelected(true);
// Set the wallet password.
exportPanel.setWalletPassword(WALLET_PASSWORD);
// Set the output file name.
String outputFilename2 = controller.getModel().getActiveWalletFilename() + "-" + UUID.randomUUID().toString() + ".key";
exportPanel.setOutputFilename(outputFilename2);
// Check the export file currently does not exist.
assertTrue("Unencrypted export file exists when it should not", !(new File(outputFilename2)).exists());
// Execute = this should actually write the unencrypted export file.
exportAction.actionPerformed(null);
BitcoinControllerTest.waitForWalletNotBusy(walletBusyListener);
assertTrue("Unencrypted export file does not exist when it should", (new File(outputFilename2)).exists());
assertEquals("Wrong message1 after unencrypted export is good to go", EXPECTED_THE_PRIVATE_KEYS_WERE_EXPORTED, exportPanel.getMessageText1());
assertEquals("Wrong message2 after unencrypted export is good to go", EXPECTED_THE_EXPORT_FILE_COULD_BE_READ_IN_CORRECTLY, exportPanel.getMessageText2().trim());
// Read in the unencrypted exported private key file.
privateKeyAndDates = privateKeysHandler.readInPrivateKeys(new File(outputFilename2), null);
assertEquals("Wrong number of keys read in from unencrypted export file", 1, privateKeyAndDates.size());
assertEquals("Wrong private key read in from unencrypted export file", Utils.bytesToHexString(originalPrivateKeyBytes),
Utils.bytesToHexString(privateKeyAndDates.iterator().next().getKey().getPrivKeyBytes()));
}
@Test
public void testNoWalletSelected() throws Exception {
// Create MultiBit controller.
final CreateControllers.Controllers controllers = CreateControllers.createControllers();
BitcoinController controller = controllers.bitcoinController;
// This test runs against an empty PerWalletModelDataList.
assertTrue("There was an active wallet when there should not be", controller.getModel().thereIsNoActiveWallet());
// Create a new ExportPrivateKeysSubmitAction to test.
FontSizer.INSTANCE.initialise(controller);
ExportPrivateKeysPanel exportPrivateKeysPanel = new ExportPrivateKeysPanel(controller, null);
ExportPrivateKeysSubmitAction exportPrivateKeysSubmitAction = exportPrivateKeysPanel.getExportPrivateKeySubmitAction();
assertNotNull("exportPrivateKeysSubmitAction was not created successfully", exportPrivateKeysSubmitAction);
// Execute.
exportPrivateKeysSubmitAction.actionPerformed(null);
Object[] messages = MessageManager.INSTANCE.getMessages().toArray();
assertTrue("There were no messages but there should have been", messages != null && messages.length > 0);
assertEquals("Wrong message after receive monacoin confirm with no active wallet", ResetTransactionsSubmitActionTest.EXPECTED_NO_WALLET_IS_SELECTED, ((Message)messages[messages.length - 1]).getText());
}
}
| |
package osgi.enroute.trains.station.provider;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.stream.Collectors;
import org.osgi.service.component.annotations.Activate;
import org.osgi.service.component.annotations.Component;
import org.osgi.service.component.annotations.Reference;
import org.osgi.service.event.Event;
import org.osgi.service.event.EventAdmin;
import org.osgi.service.metatype.annotations.Designate;
import org.osgi.service.metatype.annotations.ObjectClassDefinition;
import osgi.enroute.dto.api.DTOs;
import osgi.enroute.trains.passenger.api.Passenger;
import osgi.enroute.trains.passenger.api.Person;
import osgi.enroute.trains.passenger.api.PersonDatabase;
import osgi.enroute.trains.station.provider.StationsManagerImpl.Config;
import osgi.enroute.trains.stations.api.Station;
import osgi.enroute.trains.stations.api.StationObservation;
import osgi.enroute.trains.stations.api.StationObservation.Type;
import osgi.enroute.trains.stations.api.StationsManager;
/**
* The StationsManager handles the checking in/out of passengers
* and the arriving/leaving of trains in stations.
*
*
*/
@Designate(ocd = Config.class, factory = true)
@Component(name = Config.STATION_CONFIGURATION_PID)
public class StationsManagerImpl implements StationsManager{
@ObjectClassDefinition
@interface Config {
final static public String STATION_CONFIGURATION_PID = "osgi.enroute.trains.station.manager";
/**
* Comma-separated list with station1:segment,station2:segment,...
*/
String[] stations();
}
@Reference
private PersonDatabase personDB;
@Reference
private EventAdmin ea;
@Reference
private DTOs dtos;
private Map<String, String> stations = new HashMap<>();
private ReentrantReadWriteLock lock = new ReentrantReadWriteLock(true);
private Map<String, List<Passenger>> passengersInStation = new HashMap<>();
private Map<String, List<Passenger>> passengersOnTrain = new HashMap<>();
@Activate
public void activate(Config c){
for(String s : c.stations()){
String[] split = s.split(":");
stations.put(split[0], split[1]);
passengersInStation.put(split[0], new ArrayList<>());
}
}
@Override
public List<Station> getStations() {
return stations.entrySet().stream().map(e -> {
Station s = new Station();
s.name = e.getKey();
s.segment = e.getValue();
return s;
}).collect(Collectors.toList());
}
@Override
public String getStationSegment(String station) {
return stations.get(station);
}
@Override
public String getStation(String segment){
return stations.entrySet().stream().filter(e -> e.getValue().equals(segment)).map(e -> e.getKey()).findFirst().get();
}
@Override
public List<Passenger> getPassengersWaiting(String station) {
try {
lock.readLock().lock();
return new ArrayList<Passenger>(passengersInStation.get(station));
} finally {
lock.readLock().unlock();
}
}
@Override
public List<Passenger> getPassengersOnTrain(String train) {
try {
lock.readLock().lock();
if(!passengersOnTrain.containsKey(train)){
passengersOnTrain.put(train, new ArrayList<>());
}
return new ArrayList<Passenger>(passengersOnTrain.get(train));
} finally {
lock.readLock().unlock();
}
}
@Override
public Passenger checkIn(String personId, String station, String destination) {
// TODO throw exceptions instead of null return?
Person person = personDB.getPerson(personId);
if(person == null){
System.err.println("Non-existent person <" + personId +
"> tried to check in at station <" + station +
"> to destination <" + destination + ">");
return null;
}
if(!passengersInStation.containsKey(station)){
System.err.println("Station <" + station + "> is not managed by this StationsManager");
return null;
}
if(!passengersInStation.containsKey(destination)){
System.err.println("Station <" + station + "> is not managed by this StationsManager");
return null;
}
if(!checkValidPersonLocation(personId, station)){
// System.err.println("Person "+personId+" cannot be at "+station);
return null;
}
Passenger p = new Passenger();
p.person = person;
p.inStation = station;
p.destination = destination;
System.out.println(p.person.firstName+" "+p.person.lastName+" checked in at "+station+" to travel to "+destination);
try {
lock.writeLock().lock();
List<Passenger> waiting = passengersInStation.get(station);
waiting.add(p);
// System.out.println("Now "+waiting.size()+" passengers waiting in "+station);
} finally {
lock.writeLock().unlock();
}
checkIn(personId, station);
return p;
}
private void checkIn(String personId, String station){
StationObservation checkIn = new StationObservation();
checkIn.type = Type.CHECK_IN;
checkIn.personId = personId;
checkIn.station = station;
try {
Event event = new Event(StationObservation.TOPIC, dtos.asMap(checkIn));
ea.postEvent(event);
} catch(Exception e){
System.err.println("Error sending CheckIn Event: "+e.getMessage());
}
}
@Override
public List<Passenger> leave(String train, String station) {
// TODO throw exceptions instead of null return?
if(!checkValidTrainLocation(train, station)){
// System.err.println("Cannot board the train as it is not in the station");
return null;
}
if(!passengersInStation.containsKey(station)){
// System.err.println("Station "+station+" is not managed by this StationsManager");
return null;
}
try {
lock.writeLock().lock();
List<Passenger> onTrain = passengersOnTrain.get(train);
if(onTrain == null){
onTrain = new ArrayList<>();
passengersOnTrain.put(train, onTrain);
}
List<Passenger> inStation = passengersInStation.get(station);
Iterator<Passenger> it = inStation.iterator();
while(it.hasNext()){
Passenger p = it.next();
// TODO should we check with operator whether this train stops at destination station?!
System.out.println(p.person.firstName+" "+p.person.lastName+" boards train "+train+" at station "+station);
onTrain.add(p);
it.remove();
}
return onTrain;
} finally {
lock.writeLock().unlock();
try {
StationObservation o = new StationObservation();
o.type = Type.DEPARTURE;
o.train = train;
o.station = station;
ea.postEvent(new Event(StationObservation.TOPIC, dtos.asMap(o)));
} catch (Exception e) {
System.err.println("Error sending departure event");
}
}
}
@Override
public void arrive(String train, String station) {
// TODO throw exceptions?
if(!checkValidTrainLocation(train, station)){
System.err.println("Cannot unboard the train as it is not in the station");
return;
}
if(!passengersInStation.containsKey(station)){
System.err.println("Station "+station+" is not managed by this StationsManager");
return;
}
try {
StationObservation o = new StationObservation();
o.type = Type.ARRIVAL;
o.train = train;
o.station = station;
ea.postEvent(new Event(StationObservation.TOPIC, dtos.asMap(o)));
} catch (Exception e) {
System.err.println("Error sending arrival event");
}
try {
lock.writeLock().lock();
List<Passenger> onTrain = passengersOnTrain.get(train);
if(onTrain == null){
onTrain = new ArrayList<>();
passengersOnTrain.put(train, onTrain);
}
Iterator<Passenger> it = onTrain.iterator();
while(it.hasNext()){
Passenger p = it.next();
if(p.destination.equals(station)){
System.out.println(p.person.firstName+" "+p.person.lastName+" checked out "+station);
it.remove();
checkOut(p.person.id, station);
}
}
} finally {
lock.writeLock().unlock();
}
}
private void checkOut(String personId, String station){
StationObservation checkOut = new StationObservation();
checkOut.type = Type.CHECK_OUT;
checkOut.personId = personId;
checkOut.station = station;
try {
Event event = new Event(StationObservation.TOPIC, dtos.asMap(checkOut));
ea.postEvent(event);
} catch(Exception e){
System.err.println("Error sending CheckOut Event: "+e.getMessage());
}
}
private boolean checkValidPersonLocation(String personId, String station){
try {
lock.readLock().lock();
for(List<Passenger> p : passengersInStation.values()){
if(p.stream().filter(passenger -> passenger.person.id.equals(personId)).findFirst().isPresent())
return false;
}
for(List<Passenger> p : passengersOnTrain.values()){
if(p.stream().filter(passenger -> passenger.person.id.equals(personId)).findFirst().isPresent())
return false;
}
} finally {
lock.readLock().unlock();
}
return true;
}
private boolean checkValidTrainLocation(String train, String station){
// TODO implement a check whether a train is actually in this station
return true;
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight.daemon.impl;
import com.intellij.codeHighlighting.BackgroundEditorHighlighter;
import com.intellij.codeHighlighting.HighlightingPass;
import com.intellij.codeHighlighting.Pass;
import com.intellij.codeHighlighting.TextEditorHighlightingPass;
import com.intellij.codeInsight.daemon.DaemonCodeAnalyzerSettings;
import com.intellij.codeInsight.daemon.DaemonCodeAnalyzerSettingsImpl;
import com.intellij.codeInsight.daemon.LineMarkerInfo;
import com.intellij.codeInsight.daemon.ReferenceImporter;
import com.intellij.codeInsight.hint.HintManager;
import com.intellij.codeInsight.intention.impl.FileLevelIntentionComponent;
import com.intellij.codeInsight.intention.impl.IntentionHintComponent;
import com.intellij.ide.PowerSaveMode;
import com.intellij.lang.annotation.HighlightSeverity;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.application.ex.ApplicationEx;
import com.intellij.openapi.application.ex.ApplicationManagerEx;
import com.intellij.openapi.components.PersistentStateComponent;
import com.intellij.openapi.components.State;
import com.intellij.openapi.components.Storage;
import com.intellij.openapi.components.StoragePathMacros;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.RangeMarker;
import com.intellij.openapi.editor.ex.RangeHighlighterEx;
import com.intellij.openapi.editor.impl.DocumentMarkupModel;
import com.intellij.openapi.editor.markup.MarkupModel;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.fileEditor.FileEditor;
import com.intellij.openapi.fileEditor.FileEditorManager;
import com.intellij.openapi.fileEditor.TextEditor;
import com.intellij.openapi.fileEditor.ex.FileEditorManagerEx;
import com.intellij.openapi.fileEditor.impl.text.TextEditorProvider;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.fileTypes.FileTypeManager;
import com.intellij.openapi.fileTypes.impl.FileTypeManagerImpl;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.vfs.VirtualFileManager;
import com.intellij.openapi.vfs.newvfs.RefreshQueueImpl;
import com.intellij.packageDependencies.DependencyValidationManager;
import com.intellij.psi.*;
import com.intellij.psi.impl.PsiDocumentManagerBase;
import com.intellij.psi.search.scope.packageSet.NamedScopeManager;
import com.intellij.psi.util.PsiUtilCore;
import com.intellij.util.*;
import com.intellij.util.io.storage.HeavyProcessLatch;
import com.intellij.util.ui.UIUtil;
import gnu.trove.THashMap;
import gnu.trove.THashSet;
import org.jdom.Element;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.TestOnly;
import java.util.*;
/**
* This class also controls the auto-reparse and auto-hints.
*/
@State(
name = "DaemonCodeAnalyzer",
storages = @Storage(file = StoragePathMacros.WORKSPACE_FILE)
)
public class DaemonCodeAnalyzerImpl extends DaemonCodeAnalyzerEx implements PersistentStateComponent<Element>, Disposable {
private static final Logger LOG = Logger.getInstance("#com.intellij.codeInsight.daemon.impl.DaemonCodeAnalyzerImpl");
private static final Key<List<LineMarkerInfo>> MARKERS_IN_EDITOR_DOCUMENT_KEY = Key.create("MARKERS_IN_EDITOR_DOCUMENT");
private static final Key<List<HighlightInfo>> FILE_LEVEL_HIGHLIGHTS = Key.create("FILE_LEVEL_HIGHLIGHTS");
private final Project myProject;
private final DaemonCodeAnalyzerSettings mySettings;
@NotNull private final EditorTracker myEditorTracker;
@NotNull private final PsiDocumentManager myPsiDocumentManager;
private DaemonProgressIndicator myUpdateProgress; //guarded by this
private final Runnable myUpdateRunnable = createUpdateRunnable();
private final Alarm myAlarm = new Alarm();
private boolean myUpdateByTimerEnabled = true;
private final Collection<VirtualFile> myDisabledHintsFiles = new THashSet<VirtualFile>();
private final Collection<VirtualFile> myDisabledHighlightingFiles = new THashSet<VirtualFile>();
private final FileStatusMap myFileStatusMap;
private DaemonCodeAnalyzerSettings myLastSettings;
private volatile IntentionHintComponent myLastIntentionHint;
private volatile boolean myDisposed; // the only possible transition: false -> true
private volatile boolean myInitialized; // the only possible transition: false -> true
@NonNls private static final String DISABLE_HINTS_TAG = "disable_hints";
@NonNls private static final String FILE_TAG = "file";
@NonNls private static final String URL_ATT = "url";
private final PassExecutorService myPassExecutorService;
private volatile boolean allowToInterrupt = true;
public DaemonCodeAnalyzerImpl(@NotNull Project project,
@NotNull DaemonCodeAnalyzerSettings daemonCodeAnalyzerSettings,
@NotNull EditorTracker editorTracker,
@NotNull PsiDocumentManager psiDocumentManager,
@SuppressWarnings("UnusedParameters") @NotNull final NamedScopeManager namedScopeManager,
@SuppressWarnings("UnusedParameters") @NotNull final DependencyValidationManager dependencyValidationManager) {
myProject = project;
mySettings = daemonCodeAnalyzerSettings;
myEditorTracker = editorTracker;
myPsiDocumentManager = psiDocumentManager;
myLastSettings = ((DaemonCodeAnalyzerSettingsImpl)daemonCodeAnalyzerSettings).clone();
myFileStatusMap = new FileStatusMap(project);
myPassExecutorService = new PassExecutorService(project);
Disposer.register(this, myPassExecutorService);
Disposer.register(this, myFileStatusMap);
DaemonProgressIndicator.setDebug(LOG.isDebugEnabled());
assert !myInitialized : "Double Initializing";
Disposer.register(this, new StatusBarUpdater(project));
myInitialized = true;
myDisposed = false;
myFileStatusMap.markAllFilesDirty("DCAI init");
Disposer.register(this, new Disposable() {
@Override
public void dispose() {
assert myInitialized : "Disposing not initialized component";
assert !myDisposed : "Double dispose";
stopProcess(false, "Dispose");
myDisposed = true;
myLastSettings = null;
}
});
}
@Override
public void dispose() {
}
@NotNull
@TestOnly
public static List<HighlightInfo> getHighlights(@NotNull Document document, HighlightSeverity minSeverity, @NotNull Project project) {
List<HighlightInfo> infos = new ArrayList<HighlightInfo>();
processHighlights(document, project, minSeverity, 0, document.getTextLength(),
new CommonProcessors.CollectProcessor<HighlightInfo>(infos));
return infos;
}
@Override
@NotNull
@TestOnly
public List<HighlightInfo> getFileLevelHighlights(@NotNull Project project, @NotNull PsiFile file) {
VirtualFile vFile = file.getViewProvider().getVirtualFile();
final FileEditorManager manager = FileEditorManager.getInstance(project);
List<HighlightInfo> result = new ArrayList<HighlightInfo>();
for (FileEditor fileEditor : manager.getEditors(vFile)) {
final List<HighlightInfo> infos = fileEditor.getUserData(FILE_LEVEL_HIGHLIGHTS);
if (infos == null) continue;
for (HighlightInfo info : infos) {
result.add(info);
}
}
return result;
}
@Override
public void cleanFileLevelHighlights(@NotNull Project project, final int group, PsiFile psiFile) {
if (psiFile == null) return;
FileViewProvider provider = psiFile.getViewProvider();
VirtualFile vFile = provider.getVirtualFile();
final FileEditorManager manager = FileEditorManager.getInstance(project);
for (FileEditor fileEditor : manager.getEditors(vFile)) {
final List<HighlightInfo> infos = fileEditor.getUserData(FILE_LEVEL_HIGHLIGHTS);
if (infos == null) continue;
List<HighlightInfo> infosToRemove = new ArrayList<HighlightInfo>();
for (HighlightInfo info : infos) {
if (info.getGroup() == group) {
manager.removeTopComponent(fileEditor, info.fileLevelComponent);
infosToRemove.add(info);
}
}
infos.removeAll(infosToRemove);
}
}
@Override
public void addFileLevelHighlight(@NotNull final Project project,
final int group,
@NotNull final HighlightInfo info,
@NotNull final PsiFile psiFile) {
VirtualFile vFile = psiFile.getViewProvider().getVirtualFile();
final FileEditorManager manager = FileEditorManager.getInstance(project);
for (FileEditor fileEditor : manager.getEditors(vFile)) {
if (fileEditor instanceof TextEditor) {
FileLevelIntentionComponent component = new FileLevelIntentionComponent(info.getDescription(), info.getSeverity(),
info.getGutterIconRenderer(), info.quickFixActionRanges,
project, psiFile, ((TextEditor)fileEditor).getEditor());
manager.addTopComponent(fileEditor, component);
List<HighlightInfo> fileLevelInfos = fileEditor.getUserData(FILE_LEVEL_HIGHLIGHTS);
if (fileLevelInfos == null) {
fileLevelInfos = new ArrayList<HighlightInfo>();
fileEditor.putUserData(FILE_LEVEL_HIGHLIGHTS, fileLevelInfos);
}
info.fileLevelComponent = component;
info.setGroup(group);
fileLevelInfos.add(info);
}
}
}
@Override
@NotNull
public List<HighlightInfo> runMainPasses(@NotNull PsiFile psiFile,
@NotNull Document document,
@NotNull final ProgressIndicator progress) {
// clear status maps to run passes from scratch so that refCountHolder won't conflict and try to restart itself on partially filled maps
myFileStatusMap.markAllFilesDirty("prepare to run main passes");
stopProcess(false, "disable background daemon");
myPassExecutorService.cancelAll(true);
final List<HighlightInfo> result;
try {
result = new ArrayList<HighlightInfo>();
final VirtualFile virtualFile = psiFile.getVirtualFile();
if (virtualFile != null && !virtualFile.getFileType().isBinary()) {
List<TextEditorHighlightingPass> passes =
TextEditorHighlightingPassRegistrarEx.getInstanceEx(myProject).instantiateMainPasses(psiFile, document,
HighlightInfoProcessor.getEmpty());
Collections.sort(passes, new Comparator<TextEditorHighlightingPass>() {
@Override
public int compare(@NotNull TextEditorHighlightingPass o1, @NotNull TextEditorHighlightingPass o2) {
if (o1 instanceof GeneralHighlightingPass) return -1;
if (o2 instanceof GeneralHighlightingPass) return 1;
return 0;
}
});
LOG.debug("All passes for " + psiFile.getName()+ " started (" + passes+"). progress canceled: "+progress.isCanceled());
try {
for (TextEditorHighlightingPass pass : passes) {
pass.doCollectInformation(progress);
result.addAll(pass.getInfos());
}
}
catch (ProcessCanceledException e) {
LOG.debug("Canceled: " + progress);
throw e;
}
LOG.debug("All passes for " + psiFile.getName()+ " run. progress canceled: "+progress.isCanceled()+"; infos: "+result);
}
}
finally {
stopProcess(true, "re-enable background daemon after main passes run");
}
return result;
}
@NotNull
@TestOnly
public List<HighlightInfo> runPasses(@NotNull PsiFile file,
@NotNull Document document,
@NotNull TextEditor textEditor,
@NotNull int[] toIgnore,
boolean canChangeDocument,
@Nullable Runnable callbackWhileWaiting) throws ProcessCanceledException {
return runPasses(file, document, Collections.singletonList(textEditor), toIgnore, canChangeDocument, callbackWhileWaiting);
}
@NotNull
@TestOnly
List<HighlightInfo> runPasses(@NotNull PsiFile file,
@NotNull Document document,
@NotNull List<TextEditor> textEditors,
@NotNull int[] toIgnore,
boolean canChangeDocument,
@Nullable Runnable callbackWhileWaiting) throws ProcessCanceledException {
assert myInitialized;
assert !myDisposed;
ApplicationEx application = ApplicationManagerEx.getApplicationEx();
application.assertIsDispatchThread();
if (application.isWriteAccessAllowed()) {
throw new AssertionError("Must not start highlighting from within write action, or deadlock is imminent");
}
DaemonProgressIndicator.setDebug(true);
((FileTypeManagerImpl)FileTypeManager.getInstance()).drainReDetectQueue();
// pump first so that queued event do not interfere
UIUtil.dispatchAllInvocationEvents();
// refresh will fire write actions interfering with highlighting
while (RefreshQueueImpl.isRefreshInProgress() || HeavyProcessLatch.INSTANCE.isRunning()) {
UIUtil.dispatchAllInvocationEvents();
}
UIUtil.dispatchAllInvocationEvents();
Project project = file.getProject();
FileStatusMap.getAndClearLog();
FileStatusMap fileStatusMap = getFileStatusMap();
fileStatusMap.allowDirt(canChangeDocument);
Map<FileEditor, HighlightingPass[]> map = new HashMap<FileEditor, HighlightingPass[]>();
for (TextEditor textEditor : textEditors) {
TextEditorBackgroundHighlighter highlighter = (TextEditorBackgroundHighlighter)textEditor.getBackgroundHighlighter();
final List<TextEditorHighlightingPass> passes = highlighter.getPasses(toIgnore);
HighlightingPass[] array = passes.toArray(new HighlightingPass[passes.size()]);
assert array.length != 0 : "Highlighting is disabled for the file " + file;
map.put(textEditor, array);
}
for (int ignoreId : toIgnore) {
fileStatusMap.markFileUpToDate(document, ignoreId);
}
myAlarm.cancelAllRequests();
final DaemonProgressIndicator progress = createUpdateProgress();
myPassExecutorService.submitPasses(map, progress);
try {
while (progress.isRunning()) {
try {
progress.checkCanceled();
if (callbackWhileWaiting != null) {
callbackWhileWaiting.run();
}
myPassExecutorService.waitFor(50);
UIUtil.dispatchAllInvocationEvents();
Throwable savedException = PassExecutorService.getSavedException(progress);
if (savedException != null) throw savedException;
}
catch (RuntimeException e) {
throw e;
}
catch (Error e) {
throw e;
}
catch (Throwable e) {
throw new RuntimeException(e);
}
}
HighlightingSessionImpl session = (HighlightingSessionImpl)HighlightingSessionImpl.getHighlightingSession(file, progress);
try {
myPassExecutorService.waitFor(50000);
if (session != null) {
session.waitForHighlightInfosApplied();
}
}
catch (Throwable e) {
throw new RuntimeException(e);
}
UIUtil.dispatchAllInvocationEvents();
UIUtil.dispatchAllInvocationEvents();
assert progress.isCanceled() && progress.isDisposed();
return getHighlights(document, null, project);
}
finally {
DaemonProgressIndicator.setDebug(false);
String log = FileStatusMap.getAndClearLog();
fileStatusMap.allowDirt(true);
try {
waitForTermination();
}
catch (Throwable e) {
LOG.error(log, e);
}
}
}
@TestOnly
public void prepareForTest() {
setUpdateByTimerEnabled(false);
waitForTermination();
}
@TestOnly
public void cleanupAfterTest() {
if (myProject.isOpen()) {
prepareForTest();
}
}
void waitForTermination() {
myPassExecutorService.cancelAll(true);
}
@Override
public void settingsChanged() {
DaemonCodeAnalyzerSettings settings = DaemonCodeAnalyzerSettings.getInstance();
if (settings.isCodeHighlightingChanged(myLastSettings)) {
restart();
}
myLastSettings = ((DaemonCodeAnalyzerSettingsImpl)settings).clone();
}
@Override
public void updateVisibleHighlighters(@NotNull Editor editor) {
ApplicationManager.getApplication().assertIsDispatchThread();
// no need, will not work anyway
}
@Override
public void setUpdateByTimerEnabled(boolean value) {
myUpdateByTimerEnabled = value;
stopProcess(value, "Update by timer change");
}
private int myDisableCount;
@Override
public void disableUpdateByTimer(@NotNull Disposable parentDisposable) {
setUpdateByTimerEnabled(false);
myDisableCount++;
ApplicationManager.getApplication().assertIsDispatchThread();
Disposer.register(parentDisposable, new Disposable() {
@Override
public void dispose() {
myDisableCount--;
if (myDisableCount == 0) {
setUpdateByTimerEnabled(true);
}
}
});
}
boolean isUpdateByTimerEnabled() {
return myUpdateByTimerEnabled;
}
@Override
public void setImportHintsEnabled(@NotNull PsiFile file, boolean value) {
VirtualFile vFile = file.getVirtualFile();
if (value) {
myDisabledHintsFiles.remove(vFile);
stopProcess(true, "Import hints change");
}
else {
myDisabledHintsFiles.add(vFile);
HintManager.getInstance().hideAllHints();
}
}
@Override
public void resetImportHintsEnabledForProject() {
myDisabledHintsFiles.clear();
}
@Override
public void setHighlightingEnabled(@NotNull PsiFile file, boolean value) {
VirtualFile virtualFile = PsiUtilCore.getVirtualFile(file);
if (value) {
myDisabledHighlightingFiles.remove(virtualFile);
}
else {
myDisabledHighlightingFiles.add(virtualFile);
}
}
@Override
public boolean isHighlightingAvailable(@Nullable PsiFile file) {
if (file == null || !file.isPhysical()) return false;
if (myDisabledHighlightingFiles.contains(PsiUtilCore.getVirtualFile(file))) return false;
if (file instanceof PsiCompiledElement) return false;
final FileType fileType = file.getFileType();
// To enable T.O.D.O. highlighting
return !fileType.isBinary();
}
@Override
public boolean isImportHintsEnabled(@NotNull PsiFile file) {
return isAutohintsAvailable(file) && !myDisabledHintsFiles.contains(file.getVirtualFile());
}
@Override
public boolean isAutohintsAvailable(PsiFile file) {
return isHighlightingAvailable(file) && !(file instanceof PsiCompiledElement);
}
@Override
public void restart() {
myFileStatusMap.markAllFilesDirty("Global restart");
stopProcess(true, "Global restart");
}
@Override
public void restart(@NotNull PsiFile file) {
Document document = myPsiDocumentManager.getCachedDocument(file);
if (document == null) return;
String reason = "Psi file restart: " + file.getName();
myFileStatusMap.markFileScopeDirty(document, new TextRange(0, document.getTextLength()), file.getTextLength(), reason);
stopProcess(true, reason);
}
@NotNull
List<TextEditorHighlightingPass> getPassesToShowProgressFor(Document document) {
List<TextEditorHighlightingPass> allPasses = myPassExecutorService.getAllSubmittedPasses();
List<TextEditorHighlightingPass> result = new ArrayList<TextEditorHighlightingPass>(allPasses.size());
for (TextEditorHighlightingPass pass : allPasses) {
if (pass.getDocument() == document || pass.getDocument() == null) {
result.add(pass);
}
}
return result;
}
boolean isAllAnalysisFinished(@NotNull PsiFile file) {
if (myDisposed) return false;
Document document = myPsiDocumentManager.getCachedDocument(file);
return document != null &&
document.getModificationStamp() == file.getViewProvider().getModificationStamp() &&
myFileStatusMap.allDirtyScopesAreNull(document);
}
@Override
public boolean isErrorAnalyzingFinished(@NotNull PsiFile file) {
if (myDisposed) return false;
Document document = myPsiDocumentManager.getCachedDocument(file);
return document != null &&
document.getModificationStamp() == file.getViewProvider().getModificationStamp() &&
myFileStatusMap.getFileDirtyScope(document, Pass.UPDATE_ALL) == null;
}
@Override
@NotNull
public FileStatusMap getFileStatusMap() {
return myFileStatusMap;
}
synchronized boolean isRunning() {
return myUpdateProgress != null && !myUpdateProgress.isCanceled();
}
synchronized void stopProcess(boolean toRestartAlarm, @NonNls String reason) {
if (!allowToInterrupt) throw new RuntimeException("Cannot interrupt daemon");
cancelUpdateProgress(toRestartAlarm, reason);
myAlarm.cancelAllRequests();
boolean restart = toRestartAlarm && !myDisposed && myInitialized;
if (restart) {
UIUtil.invokeLaterIfNeeded(new Runnable() {
@Override
public void run() {
if (myAlarm.isEmpty()) {
myAlarm.addRequest(myUpdateRunnable, mySettings.AUTOREPARSE_DELAY);
}
}
});
}
}
private synchronized void cancelUpdateProgress(final boolean start, @NonNls String reason) {
PassExecutorService.log(myUpdateProgress, null, "Cancel", reason, start);
if (myUpdateProgress != null) {
myUpdateProgress.cancel();
myPassExecutorService.cancelAll(false);
myUpdateProgress = null;
}
}
static boolean processHighlightsNearOffset(@NotNull Document document,
@NotNull Project project,
@NotNull final HighlightSeverity minSeverity,
final int offset,
final boolean includeFixRange,
@NotNull final Processor<HighlightInfo> processor) {
return processHighlights(document, project, null, 0, document.getTextLength(), new Processor<HighlightInfo>() {
@Override
public boolean process(@NotNull HighlightInfo info) {
if (!isOffsetInsideHighlightInfo(offset, info, includeFixRange)) return true;
int compare = info.getSeverity().compareTo(minSeverity);
return compare < 0 || processor.process(info);
}
});
}
@Nullable
public HighlightInfo findHighlightByOffset(@NotNull Document document, final int offset, final boolean includeFixRange) {
return findHighlightByOffset(document, offset, includeFixRange, HighlightSeverity.INFORMATION);
}
@Nullable
HighlightInfo findHighlightByOffset(@NotNull Document document,
final int offset,
final boolean includeFixRange,
@NotNull HighlightSeverity minSeverity) {
final List<HighlightInfo> foundInfoList = new SmartList<HighlightInfo>();
processHighlightsNearOffset(document, myProject, minSeverity, offset, includeFixRange,
new Processor<HighlightInfo>() {
@Override
public boolean process(@NotNull HighlightInfo info) {
if (info.getSeverity() == HighlightInfoType.ELEMENT_UNDER_CARET_SEVERITY) {
return true;
}
if (!foundInfoList.isEmpty()) {
HighlightInfo foundInfo = foundInfoList.get(0);
int compare = foundInfo.getSeverity().compareTo(info.getSeverity());
if (compare < 0) {
foundInfoList.clear();
}
else if (compare > 0) {
return true;
}
}
foundInfoList.add(info);
return true;
}
});
if (foundInfoList.isEmpty()) return null;
if (foundInfoList.size() == 1) return foundInfoList.get(0);
return new HighlightInfoComposite(foundInfoList);
}
private static boolean isOffsetInsideHighlightInfo(int offset, @NotNull HighlightInfo info, boolean includeFixRange) {
RangeHighlighterEx highlighter = info.highlighter;
if (highlighter == null || !highlighter.isValid()) return false;
int startOffset = highlighter.getStartOffset();
int endOffset = highlighter.getEndOffset();
if (startOffset <= offset && offset <= endOffset) {
return true;
}
if (!includeFixRange) return false;
RangeMarker fixMarker = info.fixMarker;
if (fixMarker != null) { // null means its range is the same as highlighter
if (!fixMarker.isValid()) return false;
startOffset = fixMarker.getStartOffset();
endOffset = fixMarker.getEndOffset();
return startOffset <= offset && offset <= endOffset;
}
return false;
}
@NotNull
public static List<LineMarkerInfo> getLineMarkers(@NotNull Document document, Project project) {
ApplicationManager.getApplication().assertIsDispatchThread();
MarkupModel markup = DocumentMarkupModel.forDocument(document, project, true);
return ObjectUtils.notNull(markup.getUserData(MARKERS_IN_EDITOR_DOCUMENT_KEY), Collections.<LineMarkerInfo>emptyList());
}
static void setLineMarkers(@NotNull Document document, List<LineMarkerInfo> lineMarkers, Project project) {
ApplicationManager.getApplication().assertIsDispatchThread();
MarkupModel markup = DocumentMarkupModel.forDocument(document, project, true);
markup.putUserData(MARKERS_IN_EDITOR_DOCUMENT_KEY, lineMarkers);
}
void setLastIntentionHint(@NotNull Project project,
@NotNull PsiFile file,
@NotNull Editor editor,
@NotNull ShowIntentionsPass.IntentionsInfo intentions,
boolean hasToRecreate) {
if (!editor.getSettings().isShowIntentionBulb()) {
return;
}
ApplicationManager.getApplication().assertIsDispatchThread();
hideLastIntentionHint();
if (editor.getCaretModel().getCaretCount() > 1) return;
IntentionHintComponent hintComponent = IntentionHintComponent.showIntentionHint(project, file, editor, intentions, false);
if (hasToRecreate) {
hintComponent.recreate();
}
myLastIntentionHint = hintComponent;
}
void hideLastIntentionHint() {
ApplicationManager.getApplication().assertIsDispatchThread();
IntentionHintComponent hint = myLastIntentionHint;
if (hint != null && hint.isVisible()) {
hint.hide();
myLastIntentionHint = null;
}
}
@Nullable
public IntentionHintComponent getLastIntentionHint() {
return myLastIntentionHint;
}
@Nullable
@Override
public Element getState() {
Element state = new Element("state");
if (myDisabledHintsFiles.isEmpty()) {
return state;
}
List<String> array = new SmartList<String>();
for (VirtualFile file : myDisabledHintsFiles) {
if (file.isValid()) {
array.add(file.getUrl());
}
}
if (!array.isEmpty()) {
Collections.sort(array);
Element disableHintsElement = new Element(DISABLE_HINTS_TAG);
state.addContent(disableHintsElement);
for (String url : array) {
disableHintsElement.addContent(new Element(FILE_TAG).setAttribute(URL_ATT, url));
}
}
return state;
}
@Override
public void loadState(Element state) {
myDisabledHintsFiles.clear();
Element element = state.getChild(DISABLE_HINTS_TAG);
if (element != null) {
for (Element e : element.getChildren(FILE_TAG)) {
String url = e.getAttributeValue(URL_ATT);
if (url != null) {
VirtualFile file = VirtualFileManager.getInstance().findFileByUrl(url);
if (file != null) {
myDisabledHintsFiles.add(file);
}
}
}
}
}
private final Runnable submitPassesRunnable = new Runnable() {
@Override
public void run() {
PassExecutorService.log(getUpdateProgress(), null, "Update Runnable. myUpdateByTimerEnabled:",
myUpdateByTimerEnabled, " something disposed:",
PowerSaveMode.isEnabled() || myDisposed || !myProject.isInitialized(), " activeEditors:",
myProject.isDisposed() ? null : getSelectedEditors());
if (!myUpdateByTimerEnabled) return;
if (myDisposed) return;
ApplicationManager.getApplication().assertIsDispatchThread();
final Collection<FileEditor> activeEditors = getSelectedEditors();
if (activeEditors.isEmpty()) return;
if (ApplicationManager.getApplication().isWriteAccessAllowed()) {
// makes no sense to start from within write action, will cancel anyway
// we'll restart when the write action finish
return;
}
final PsiDocumentManagerBase documentManager = (PsiDocumentManagerBase)myPsiDocumentManager;
if (documentManager.hasUncommitedDocuments()) {
documentManager.cancelAndRunWhenAllCommitted("restart daemon when all committed", this);
return;
}
if (RefResolveService.ENABLED &&
!RefResolveService.getInstance(myProject).isUpToDate() &&
RefResolveService.getInstance(myProject).getQueueSize() == 1) {
return; // if the user have just typed in something, wait until the file is re-resolved
// (or else it will blink like crazy since unused symbols calculation depends on resolve service)
}
Map<FileEditor, HighlightingPass[]> passes = new THashMap<FileEditor, HighlightingPass[]>(activeEditors.size());
for (FileEditor fileEditor : activeEditors) {
BackgroundEditorHighlighter highlighter = fileEditor.getBackgroundHighlighter();
if (highlighter != null) {
HighlightingPass[] highlightingPasses = highlighter.createPassesForEditor();
passes.put(fileEditor, highlightingPasses);
}
}
// cancel all after calling createPasses() since there are perverts {@link com.intellij.util.xml.ui.DomUIFactoryImpl} who are changing PSI there
cancelUpdateProgress(true, "Cancel by alarm");
myAlarm.cancelAllRequests();
DaemonProgressIndicator progress = createUpdateProgress();
myPassExecutorService.submitPasses(passes, progress);
}
};
@NotNull
private Runnable createUpdateRunnable() {
return new Runnable() {
@Override
public void run() {
ApplicationManager.getApplication().assertIsDispatchThread();
if (myDisposed || !myProject.isInitialized() || PowerSaveMode.isEnabled()) {
return;
}
if (HeavyProcessLatch.INSTANCE.isRunning()) {
if (myAlarm.isEmpty()) {
myAlarm.addRequest(myUpdateRunnable, mySettings.AUTOREPARSE_DELAY);
}
return;
}
Editor activeEditor = FileEditorManager.getInstance(myProject).getSelectedTextEditor();
if (activeEditor == null) {
submitPassesRunnable.run();
}
else {
((PsiDocumentManagerBase)myPsiDocumentManager).cancelAndRunWhenAllCommitted("start daemon when all committed",
submitPassesRunnable);
}
}
};
}
@NotNull
private synchronized DaemonProgressIndicator createUpdateProgress() {
DaemonProgressIndicator old = myUpdateProgress;
if (old != null && !old.isCanceled()) {
old.cancel();
}
DaemonProgressIndicator progress = new DaemonProgressIndicator() {
@Override
public void stopIfRunning() {
super.stopIfRunning();
myProject.getMessageBus().syncPublisher(DAEMON_EVENT_TOPIC).daemonFinished();
}
};
progress.start();
myUpdateProgress = progress;
return progress;
}
@Override
public void autoImportReferenceAtCursor(@NotNull Editor editor, @NotNull PsiFile file) {
for (ReferenceImporter importer : Extensions.getExtensions(ReferenceImporter.EP_NAME)) {
if (importer.autoImportReferenceAtCursor(editor, file)) break;
}
}
@TestOnly
synchronized DaemonProgressIndicator getUpdateProgress() {
return myUpdateProgress;
}
@TestOnly
void allowToInterrupt(boolean can) {
allowToInterrupt = can;
}
@NotNull
private Collection<FileEditor> getSelectedEditors() {
// Editors in modal context
List<Editor> editors = getActiveEditors();
Collection<FileEditor> activeTextEditors = new THashSet<FileEditor>(editors.size());
for (Editor editor : editors) {
TextEditor textEditor = TextEditorProvider.getInstance().getTextEditor(editor);
activeTextEditors.add(textEditor);
}
if (ApplicationManager.getApplication().getCurrentModalityState() != ModalityState.NON_MODAL) {
return activeTextEditors;
}
// Editors in tabs.
Collection<FileEditor> result = new THashSet<FileEditor>();
Collection<VirtualFile> files = new THashSet<VirtualFile>(activeTextEditors.size());
final FileEditor[] tabEditors = FileEditorManager.getInstance(myProject).getSelectedEditors();
for (FileEditor tabEditor : tabEditors) {
VirtualFile file = ((FileEditorManagerEx)FileEditorManager.getInstance(myProject)).getFile(tabEditor);
if (file != null) {
files.add(file);
}
result.add(tabEditor);
}
// do not duplicate documents
for (FileEditor fileEditor : activeTextEditors) {
VirtualFile file = ((FileEditorManagerEx)FileEditorManager.getInstance(myProject)).getFile(fileEditor);
if (file != null && files.contains(file)) continue;
result.add(fileEditor);
}
return result;
}
@NotNull
private List<Editor> getActiveEditors() {
return myEditorTracker.getActiveEditors();
}
}
| |
/*
* YUI Compressor
* http://developer.yahoo.com/yui/compressor/
* Author: Julien Lecomte - http://www.julienlecomte.net/
* Copyright (c) 2011 Yahoo! Inc. All rights reserved.
* The copyrights embodied in the content of this file are licensed
* by Yahoo! Inc. under the BSD (revised) open source license.
*/
package com.yahoo.platform.yui.compressor;
import com.yahoo.platform.yui.compressor.org.mozilla.javascript.*;
import java.io.IOException;
import java.io.Reader;
import java.io.Writer;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class JavaScriptCompressor {
static final ArrayList ones;
static final ArrayList twos;
static final ArrayList threes;
static final Set builtin = new HashSet();
static final Map literals = new Hashtable();
static final Set reserved = new HashSet();
static {
// This list contains all the 3 characters or less built-in global
// symbols available in a browser. Please add to this list if you
// see anything missing.
builtin.add("NaN");
builtin.add("top");
ones = new ArrayList();
for (char c = 'a'; c <= 'z'; c++)
ones.add(Character.toString(c));
for (char c = 'A'; c <= 'Z'; c++)
ones.add(Character.toString(c));
twos = new ArrayList();
for (int i = 0; i < ones.size(); i++) {
String one = (String) ones.get(i);
for (char c = 'a'; c <= 'z'; c++)
twos.add(one + Character.toString(c));
for (char c = 'A'; c <= 'Z'; c++)
twos.add(one + Character.toString(c));
for (char c = '0'; c <= '9'; c++)
twos.add(one + Character.toString(c));
}
threes = new ArrayList();
for (int i = 0; i < twos.size(); i++) {
String two = (String) twos.get(i);
for (char c = 'a'; c <= 'z'; c++)
threes.add(two + Character.toString(c));
for (char c = 'A'; c <= 'Z'; c++)
threes.add(two + Character.toString(c));
for (char c = '0'; c <= '9'; c++)
threes.add(two + Character.toString(c));
}
// Remove two-letter JavaScript reserved words and built-in globals...
twos.remove("as");
twos.remove("is");
twos.remove("do");
twos.remove("if");
twos.remove("in");
twos.removeAll(builtin);
// Remove three-letter JavaScript reserved words and built-in globals...
threes.remove("for");
threes.remove("int");
threes.remove("new");
threes.remove("try");
threes.remove("use");
threes.remove("var");
threes.removeAll(builtin);
// That's up to ((26+26)*(1+(26+26+10)))*(1+(26+26+10))-8
// (206,380 symbols per scope)
// The following list comes from org/mozilla/javascript/Decompiler.java...
literals.put(new Integer(Token.GET), "get ");
literals.put(new Integer(Token.SET), "set ");
literals.put(new Integer(Token.TRUE), "true");
literals.put(new Integer(Token.FALSE), "false");
literals.put(new Integer(Token.NULL), "null");
literals.put(new Integer(Token.THIS), "this");
literals.put(new Integer(Token.FUNCTION), "function");
literals.put(new Integer(Token.COMMA), ",");
literals.put(new Integer(Token.LC), "{");
literals.put(new Integer(Token.RC), "}");
literals.put(new Integer(Token.LP), "(");
literals.put(new Integer(Token.RP), ")");
literals.put(new Integer(Token.LB), "[");
literals.put(new Integer(Token.RB), "]");
literals.put(new Integer(Token.DOT), ".");
literals.put(new Integer(Token.NEW), "new ");
literals.put(new Integer(Token.DELPROP), "delete ");
literals.put(new Integer(Token.IF), "if");
literals.put(new Integer(Token.ELSE), "else");
literals.put(new Integer(Token.FOR), "for");
literals.put(new Integer(Token.IN), " in ");
literals.put(new Integer(Token.WITH), "with");
literals.put(new Integer(Token.WHILE), "while");
literals.put(new Integer(Token.DO), "do");
literals.put(new Integer(Token.TRY), "try");
literals.put(new Integer(Token.CATCH), "catch");
literals.put(new Integer(Token.FINALLY), "finally");
literals.put(new Integer(Token.THROW), "throw");
literals.put(new Integer(Token.SWITCH), "switch");
literals.put(new Integer(Token.BREAK), "break");
literals.put(new Integer(Token.CONTINUE), "continue");
literals.put(new Integer(Token.CASE), "case");
literals.put(new Integer(Token.DEFAULT), "default");
literals.put(new Integer(Token.RETURN), "return");
literals.put(new Integer(Token.VAR), "var ");
literals.put(new Integer(Token.SEMI), ";");
literals.put(new Integer(Token.ASSIGN), "=");
literals.put(new Integer(Token.ASSIGN_ADD), "+=");
literals.put(new Integer(Token.ASSIGN_SUB), "-=");
literals.put(new Integer(Token.ASSIGN_MUL), "*=");
literals.put(new Integer(Token.ASSIGN_DIV), "/=");
literals.put(new Integer(Token.ASSIGN_MOD), "%=");
literals.put(new Integer(Token.ASSIGN_BITOR), "|=");
literals.put(new Integer(Token.ASSIGN_BITXOR), "^=");
literals.put(new Integer(Token.ASSIGN_BITAND), "&=");
literals.put(new Integer(Token.ASSIGN_LSH), "<<=");
literals.put(new Integer(Token.ASSIGN_RSH), ">>=");
literals.put(new Integer(Token.ASSIGN_URSH), ">>>=");
literals.put(new Integer(Token.HOOK), "?");
literals.put(new Integer(Token.OBJECTLIT), ":");
literals.put(new Integer(Token.COLON), ":");
literals.put(new Integer(Token.OR), "||");
literals.put(new Integer(Token.AND), "&&");
literals.put(new Integer(Token.BITOR), "|");
literals.put(new Integer(Token.BITXOR), "^");
literals.put(new Integer(Token.BITAND), "&");
literals.put(new Integer(Token.SHEQ), "===");
literals.put(new Integer(Token.SHNE), "!==");
literals.put(new Integer(Token.EQ), "==");
literals.put(new Integer(Token.NE), "!=");
literals.put(new Integer(Token.LE), "<=");
literals.put(new Integer(Token.LT), "<");
literals.put(new Integer(Token.GE), ">=");
literals.put(new Integer(Token.GT), ">");
literals.put(new Integer(Token.INSTANCEOF), " instanceof ");
literals.put(new Integer(Token.LSH), "<<");
literals.put(new Integer(Token.RSH), ">>");
literals.put(new Integer(Token.URSH), ">>>");
literals.put(new Integer(Token.TYPEOF), "typeof");
literals.put(new Integer(Token.VOID), "void ");
literals.put(new Integer(Token.CONST), "const ");
literals.put(new Integer(Token.NOT), "!");
literals.put(new Integer(Token.BITNOT), "~");
literals.put(new Integer(Token.POS), "+");
literals.put(new Integer(Token.NEG), "-");
literals.put(new Integer(Token.INC), "++");
literals.put(new Integer(Token.DEC), "--");
literals.put(new Integer(Token.ADD), "+");
literals.put(new Integer(Token.SUB), "-");
literals.put(new Integer(Token.MUL), "*");
literals.put(new Integer(Token.DIV), "/");
literals.put(new Integer(Token.MOD), "%");
literals.put(new Integer(Token.COLONCOLON), "::");
literals.put(new Integer(Token.DOTDOT), "..");
literals.put(new Integer(Token.DOTQUERY), ".(");
literals.put(new Integer(Token.XMLATTR), "@");
literals.put(new Integer(Token.LET), "let ");
literals.put(new Integer(Token.YIELD), "yield ");
// See http://developer.mozilla.org/en/docs/Core_JavaScript_1.5_Reference:Reserved_Words
// JavaScript 1.5 reserved words
reserved.add("break");
reserved.add("case");
reserved.add("catch");
reserved.add("continue");
reserved.add("default");
reserved.add("delete");
reserved.add("do");
reserved.add("else");
reserved.add("finally");
reserved.add("for");
reserved.add("function");
reserved.add("if");
reserved.add("in");
reserved.add("instanceof");
reserved.add("new");
reserved.add("return");
reserved.add("switch");
reserved.add("this");
reserved.add("throw");
reserved.add("try");
reserved.add("typeof");
reserved.add("var");
reserved.add("void");
reserved.add("while");
reserved.add("with");
// Words reserved for future use
reserved.add("abstract");
reserved.add("boolean");
reserved.add("byte");
reserved.add("char");
reserved.add("class");
reserved.add("const");
reserved.add("debugger");
reserved.add("double");
reserved.add("enum");
reserved.add("export");
reserved.add("extends");
reserved.add("final");
reserved.add("float");
reserved.add("goto");
reserved.add("implements");
reserved.add("import");
reserved.add("int");
reserved.add("interface");
reserved.add("long");
reserved.add("native");
reserved.add("package");
reserved.add("private");
reserved.add("protected");
reserved.add("public");
reserved.add("short");
reserved.add("static");
reserved.add("super");
reserved.add("synchronized");
reserved.add("throws");
reserved.add("transient");
reserved.add("volatile");
// These are not reserved, but should be taken into account
// in isValidIdentifier (See jslint source code)
reserved.add("arguments");
reserved.add("eval");
reserved.add("true");
reserved.add("false");
reserved.add("Infinity");
reserved.add("NaN");
reserved.add("null");
reserved.add("undefined");
}
private static int countChar(String haystack, char needle) {
int idx = 0;
int count = 0;
int length = haystack.length();
while (idx < length) {
char c = haystack.charAt(idx++);
if (c == needle) {
count++;
}
}
return count;
}
private static int printSourceString(String source, int offset, StringBuffer sb) {
int length = source.charAt(offset);
++offset;
if ((0x8000 & length) != 0) {
length = ((0x7FFF & length) << 16) | source.charAt(offset);
++offset;
}
if (sb != null) {
String str = source.substring(offset, offset + length);
sb.append(str);
}
return offset + length;
}
private static int printSourceNumber(String source,
int offset, StringBuffer sb) {
double number = 0.0;
char type = source.charAt(offset);
++offset;
if (type == 'S') {
if (sb != null) {
number = source.charAt(offset);
}
++offset;
} else if (type == 'J' || type == 'D') {
if (sb != null) {
long lbits;
lbits = (long) source.charAt(offset) << 48;
lbits |= (long) source.charAt(offset + 1) << 32;
lbits |= (long) source.charAt(offset + 2) << 16;
lbits |= (long) source.charAt(offset + 3);
if (type == 'J') {
number = lbits;
} else {
number = Double.longBitsToDouble(lbits);
}
}
offset += 4;
} else {
// Bad source
throw new RuntimeException();
}
if (sb != null) {
sb.append(ScriptRuntime.numberToString(number, 10));
}
return offset;
}
private static ArrayList parse(Reader in, ErrorReporter reporter)
throws IOException, EvaluatorException {
CompilerEnvirons env = new CompilerEnvirons();
env.setLanguageVersion(Context.VERSION_1_7);
Parser parser = new Parser(env, reporter);
parser.parse(in, null, 1);
String source = parser.getEncodedSource();
int offset = 0;
int length = source.length();
ArrayList tokens = new ArrayList();
StringBuffer sb = new StringBuffer();
while (offset < length) {
int tt = source.charAt(offset++);
switch (tt) {
case Token.CONDCOMMENT:
case Token.KEEPCOMMENT:
case Token.NAME:
case Token.REGEXP:
case Token.STRING:
sb.setLength(0);
offset = printSourceString(source, offset, sb);
tokens.add(new JavaScriptToken(tt, sb.toString()));
break;
case Token.NUMBER:
sb.setLength(0);
offset = printSourceNumber(source, offset, sb);
tokens.add(new JavaScriptToken(tt, sb.toString()));
break;
default:
String literal = (String) literals.get(new Integer(tt));
if (literal != null) {
tokens.add(new JavaScriptToken(tt, literal));
}
break;
}
}
return tokens;
}
private static void processStringLiterals(ArrayList tokens, boolean merge) {
String tv;
int i, length = tokens.size();
JavaScriptToken token, prevToken, nextToken;
if (merge) {
// Concatenate string literals that are being appended wherever
// it is safe to do so. Note that we take care of the case:
// "a" + "b".toUpperCase()
for (i = 0; i < length; i++) {
token = (JavaScriptToken) tokens.get(i);
switch (token.getType()) {
case Token.ADD:
if (i > 0 && i < length) {
prevToken = (JavaScriptToken) tokens.get(i - 1);
nextToken = (JavaScriptToken) tokens.get(i + 1);
if (prevToken.getType() == Token.STRING && nextToken.getType() == Token.STRING &&
(i == length - 1 || ((JavaScriptToken) tokens.get(i + 2)).getType() != Token.DOT)) {
tokens.set(i - 1, new JavaScriptToken(Token.STRING,
prevToken.getValue() + nextToken.getValue()));
tokens.remove(i + 1);
tokens.remove(i);
i = i - 1;
length = length - 2;
break;
}
}
}
}
}
// Second pass...
for (i = 0; i < length; i++) {
token = (JavaScriptToken) tokens.get(i);
if (token.getType() == Token.STRING) {
tv = token.getValue();
// Finally, add the quoting characters and escape the string. We use
// the quoting character that minimizes the amount of escaping to save
// a few additional bytes.
char quotechar;
int singleQuoteCount = countChar(tv, '\'');
int doubleQuoteCount = countChar(tv, '"');
if (doubleQuoteCount <= singleQuoteCount) {
quotechar = '"';
} else {
quotechar = '\'';
}
tv = quotechar + escapeString(tv, quotechar) + quotechar;
// String concatenation transforms the old script scheme:
// '<scr'+'ipt ...><'+'/script>'
// into the following:
// '<script ...></script>'
// which breaks if this code is embedded inside an HTML document.
// Since this is not the right way to do this, let's fix the code by
// transforming all "</script" into "<\/script"
if (tv.indexOf("</script") >= 0) {
tv = tv.replaceAll("<\\/script", "<\\\\/script");
}
tokens.set(i, new JavaScriptToken(Token.STRING, tv));
}
}
}
// Add necessary escaping that was removed in Rhino's tokenizer.
private static String escapeString(String s, char quotechar) {
assert quotechar == '"' || quotechar == '\'';
if (s == null) {
return null;
}
StringBuffer sb = new StringBuffer();
for (int i = 0, L = s.length(); i < L; i++) {
int c = s.charAt(i);
if (c == quotechar) {
sb.append("\\");
}
sb.append((char) c);
}
return sb.toString();
}
/*
* Simple check to see whether a string is a valid identifier name.
* If a string matches this pattern, it means it IS a valid
* identifier name. If a string doesn't match it, it does not
* necessarily mean it is not a valid identifier name.
*/
private static final Pattern SIMPLE_IDENTIFIER_NAME_PATTERN = Pattern.compile("^[a-zA-Z_][a-zA-Z0-9_]*$");
private static boolean isValidIdentifier(String s) {
Matcher m = SIMPLE_IDENTIFIER_NAME_PATTERN.matcher(s);
return (m.matches() && !reserved.contains(s));
}
/*
* Transforms obj["foo"] into obj.foo whenever possible, saving 3 bytes.
*/
private static void optimizeObjectMemberAccess(ArrayList tokens) {
String tv;
int i, length;
JavaScriptToken token;
for (i = 0, length = tokens.size(); i < length; i++) {
if (((JavaScriptToken) tokens.get(i)).getType() == Token.LB &&
i > 0 && i < length - 2 &&
((JavaScriptToken) tokens.get(i - 1)).getType() == Token.NAME &&
((JavaScriptToken) tokens.get(i + 1)).getType() == Token.STRING &&
((JavaScriptToken) tokens.get(i + 2)).getType() == Token.RB) {
token = (JavaScriptToken) tokens.get(i + 1);
tv = token.getValue();
tv = tv.substring(1, tv.length() - 1);
if (isValidIdentifier(tv)) {
tokens.set(i, new JavaScriptToken(Token.DOT, "."));
tokens.set(i + 1, new JavaScriptToken(Token.NAME, tv));
tokens.remove(i + 2);
i = i + 2;
length = length - 1;
}
}
}
}
/*
* Transforms 'foo': ... into foo: ... whenever possible, saving 2 bytes.
*/
private static void optimizeObjLitMemberDecl(ArrayList tokens) {
String tv;
int i, length;
JavaScriptToken token;
for (i = 0, length = tokens.size(); i < length; i++) {
if (((JavaScriptToken) tokens.get(i)).getType() == Token.OBJECTLIT &&
i > 0 && ((JavaScriptToken) tokens.get(i - 1)).getType() == Token.STRING) {
token = (JavaScriptToken) tokens.get(i - 1);
tv = token.getValue();
tv = tv.substring(1, tv.length() - 1);
if (isValidIdentifier(tv)) {
tokens.set(i - 1, new JavaScriptToken(Token.NAME, tv));
}
}
}
}
private ErrorReporter logger;
private boolean munge;
private boolean verbose;
private static final int BUILDING_SYMBOL_TREE = 1;
private static final int CHECKING_SYMBOL_TREE = 2;
private int mode;
private int offset;
private int braceNesting;
private ArrayList tokens;
private Stack scopes = new Stack();
private ScriptOrFnScope globalScope = new ScriptOrFnScope(-1, null);
private Hashtable indexedScopes = new Hashtable();
public JavaScriptCompressor(Reader in, ErrorReporter reporter)
throws IOException, EvaluatorException {
this.logger = reporter;
this.tokens = parse(in, reporter);
}
public void compress(Writer out, Writer mungemap, int linebreak, boolean munge, boolean verbose,
boolean preserveAllSemiColons, boolean disableOptimizations)
throws IOException {
this.munge = munge;
this.verbose = verbose;
processStringLiterals(this.tokens, !disableOptimizations);
if (!disableOptimizations) {
optimizeObjectMemberAccess(this.tokens);
optimizeObjLitMemberDecl(this.tokens);
}
buildSymbolTree();
// DO NOT TOUCH this.tokens BETWEEN THESE TWO PHASES (BECAUSE OF this.indexedScopes)
mungeSymboltree();
StringBuffer sb = printSymbolTree(linebreak, preserveAllSemiColons);
out.write(sb.toString());
if (mungemap != null) {
printMungeMapping(mungemap);
}
}
private ScriptOrFnScope getCurrentScope() {
return (ScriptOrFnScope) scopes.peek();
}
private void enterScope(ScriptOrFnScope scope) {
scopes.push(scope);
}
private void leaveCurrentScope() {
scopes.pop();
}
private JavaScriptToken consumeToken() {
return (JavaScriptToken) tokens.get(offset++);
}
private JavaScriptToken getToken(int delta) {
try {
return (JavaScriptToken) tokens.get(offset + delta);
} catch(IndexOutOfBoundsException ex) {
return null;
}
}
/*
* Returns the identifier for the specified symbol defined in
* the specified scope or in any scope above it. Returns null
* if this symbol does not have a corresponding identifier.
*/
private JavaScriptIdentifier getIdentifier(String symbol, ScriptOrFnScope scope) {
JavaScriptIdentifier identifier;
while (scope != null) {
identifier = scope.getIdentifier(symbol);
if (identifier != null) {
return identifier;
}
scope = scope.getParentScope();
}
return null;
}
/*
* If either 'eval' or 'with' is used in a local scope, we must make
* sure that all containing local scopes don't get munged. Otherwise,
* the obfuscation would potentially introduce bugs.
*/
private void protectScopeFromObfuscation(ScriptOrFnScope scope) {
assert scope != null;
if (scope == globalScope) {
// The global scope does not get obfuscated,
// so we don't need to worry about it...
return;
}
// Find the highest local scope containing the specified scope.
while (scope.getParentScope() != globalScope) {
scope = scope.getParentScope();
}
assert scope.getParentScope() == globalScope;
scope.preventMunging();
}
private String getDebugString(int max) {
assert max > 0;
StringBuffer result = new StringBuffer();
int start = Math.max(offset - max, 0);
int end = Math.min(offset + max, tokens.size());
for (int i = start; i < end; i++) {
JavaScriptToken token = (JavaScriptToken) tokens.get(i);
if (i == offset - 1) {
result.append(" ---> ");
}
result.append(token.getValue());
if (i == offset - 1) {
result.append(" <--- ");
}
}
return result.toString();
}
private void warn(String message, boolean showDebugString) {
if (verbose) {
if (showDebugString) {
message = message + "\n" + getDebugString(10);
}
logger.warning(message, null, -1, null, -1);
}
}
private void parseFunctionDeclaration() {
String symbol;
JavaScriptToken token;
ScriptOrFnScope currentScope, fnScope;
JavaScriptIdentifier identifier;
currentScope = getCurrentScope();
token = consumeToken();
if (token.getType() == Token.NAME) {
if (mode == BUILDING_SYMBOL_TREE) {
// Get the name of the function and declare it in the current scope.
symbol = token.getValue();
if (currentScope.getIdentifier(symbol) != null) {
warn("The function " + symbol + " has already been declared in the same scope...", true);
}
currentScope.declareIdentifier(symbol);
}
token = consumeToken();
}
assert token.getType() == Token.LP;
if (mode == BUILDING_SYMBOL_TREE) {
fnScope = new ScriptOrFnScope(braceNesting, currentScope);
indexedScopes.put(new Integer(offset), fnScope);
} else {
fnScope = (ScriptOrFnScope) indexedScopes.get(new Integer(offset));
}
// Parse function arguments.
int argpos = 0;
while ((token = consumeToken()).getType() != Token.RP) {
assert token.getType() == Token.NAME ||
token.getType() == Token.COMMA;
if (token.getType() == Token.NAME && mode == BUILDING_SYMBOL_TREE) {
symbol = token.getValue();
identifier = fnScope.declareIdentifier(symbol);
if (symbol.equals("$super") && argpos == 0) {
// Exception for Prototype 1.6...
identifier.preventMunging();
}
argpos++;
}
}
token = consumeToken();
assert token.getType() == Token.LC;
braceNesting++;
token = getToken(0);
if (token.getType() == Token.STRING &&
getToken(1).getType() == Token.SEMI) {
// This is a hint. Hints are empty statements that look like
// "localvar1:nomunge, localvar2:nomunge"; They allow developers
// to prevent specific symbols from getting obfuscated (some heretic
// implementations, such as Prototype 1.6, require specific variable
// names, such as $super for example, in order to work appropriately.
// Note: right now, only "nomunge" is supported in the right hand side
// of a hint. However, in the future, the right hand side may contain
// other values.
consumeToken();
String hints = token.getValue();
// Remove the leading and trailing quotes...
hints = hints.substring(1, hints.length() - 1).trim();
StringTokenizer st1 = new StringTokenizer(hints, ",");
while (st1.hasMoreTokens()) {
String hint = st1.nextToken();
int idx = hint.indexOf(':');
if (idx <= 0 || idx >= hint.length() - 1) {
if (mode == BUILDING_SYMBOL_TREE) {
// No need to report the error twice, hence the test...
warn("Invalid hint syntax: " + hint, true);
}
break;
}
String variableName = hint.substring(0, idx).trim();
String variableType = hint.substring(idx + 1).trim();
if (mode == BUILDING_SYMBOL_TREE) {
fnScope.addHint(variableName, variableType);
} else if (mode == CHECKING_SYMBOL_TREE) {
identifier = fnScope.getIdentifier(variableName);
if (identifier != null) {
if (variableType.equals("nomunge")) {
identifier.preventMunging();
} else {
warn("Unsupported hint value: " + hint, true);
}
} else {
warn("Hint refers to an unknown identifier: " + hint, true);
}
}
}
}
parseScope(fnScope);
}
private void parseCatch() {
String symbol;
JavaScriptToken token;
ScriptOrFnScope currentScope;
JavaScriptIdentifier identifier;
token = getToken(-1);
assert token.getType() == Token.CATCH;
token = consumeToken();
assert token.getType() == Token.LP;
token = consumeToken();
assert token.getType() == Token.NAME;
symbol = token.getValue();
currentScope = getCurrentScope();
if (mode == BUILDING_SYMBOL_TREE) {
// We must declare the exception identifier in the containing function
// scope to avoid errors related to the obfuscation process. No need to
// display a warning if the symbol was already declared here...
currentScope.declareIdentifier(symbol);
} else {
identifier = getIdentifier(symbol, currentScope);
identifier.incrementRefcount();
}
token = consumeToken();
assert token.getType() == Token.RP;
}
private void parseExpression() {
// Parse the expression until we encounter a comma or a semi-colon
// in the same brace nesting, bracket nesting and paren nesting.
// Parse functions if any...
String symbol;
JavaScriptToken token;
ScriptOrFnScope currentScope;
JavaScriptIdentifier identifier;
int expressionBraceNesting = braceNesting;
int bracketNesting = 0;
int parensNesting = 0;
int length = tokens.size();
while (offset < length) {
token = consumeToken();
currentScope = getCurrentScope();
switch (token.getType()) {
case Token.SEMI:
case Token.COMMA:
if (braceNesting == expressionBraceNesting &&
bracketNesting == 0 &&
parensNesting == 0) {
return;
}
break;
case Token.FUNCTION:
parseFunctionDeclaration();
break;
case Token.LC:
braceNesting++;
break;
case Token.RC:
braceNesting--;
assert braceNesting >= expressionBraceNesting;
break;
case Token.LB:
bracketNesting++;
break;
case Token.RB:
bracketNesting--;
break;
case Token.LP:
parensNesting++;
break;
case Token.RP:
parensNesting--;
break;
case Token.CONDCOMMENT:
if (mode == BUILDING_SYMBOL_TREE) {
protectScopeFromObfuscation(currentScope);
warn("Using JScript conditional comments is not recommended." + (munge ? " Moreover, using JScript conditional comments reduces the level of compression!" : ""), true);
}
break;
case Token.NAME:
symbol = token.getValue();
if (mode == BUILDING_SYMBOL_TREE) {
if (symbol.equals("eval")) {
protectScopeFromObfuscation(currentScope);
warn("Using 'eval' is not recommended." + (munge ? " Moreover, using 'eval' reduces the level of compression!" : ""), true);
}
} else if (mode == CHECKING_SYMBOL_TREE) {
if ((offset < 2 ||
(getToken(-2).getType() != Token.DOT &&
getToken(-2).getType() != Token.GET &&
getToken(-2).getType() != Token.SET)) &&
getToken(0).getType() != Token.OBJECTLIT) {
identifier = getIdentifier(symbol, currentScope);
if (identifier == null) {
if (symbol.length() <= 3 && !builtin.contains(symbol)) {
// Here, we found an undeclared and un-namespaced symbol that is
// 3 characters or less in length. Declare it in the global scope.
// We don't need to declare longer symbols since they won't cause
// any conflict with other munged symbols.
globalScope.declareIdentifier(symbol);
// I removed the warning since was only being done when
// for identifiers 3 chars or less, and was just causing
// noise for people who happen to rely on an externally
// declared variable that happen to be that short. We either
// should always warn or never warn -- the fact that we
// declare the short symbols in the global space doesn't
// change anything.
// warn("Found an undeclared symbol: " + symbol, true);
}
} else {
identifier.incrementRefcount();
}
}
}
break;
}
}
}
private void parseScope(ScriptOrFnScope scope) {
String symbol;
JavaScriptToken token;
JavaScriptIdentifier identifier;
int length = tokens.size();
enterScope(scope);
while (offset < length) {
token = consumeToken();
switch (token.getType()) {
case Token.VAR:
if (mode == BUILDING_SYMBOL_TREE && scope.incrementVarCount() > 1) {
warn("Try to use a single 'var' statement per scope.", true);
}
/* FALLSTHROUGH */
case Token.CONST:
// The var keyword is followed by at least one symbol name.
// If several symbols follow, they are comma separated.
for (; ;) {
token = consumeToken();
assert token.getType() == Token.NAME;
if (mode == BUILDING_SYMBOL_TREE) {
symbol = token.getValue();
if (scope.getIdentifier(symbol) == null) {
scope.declareIdentifier(symbol);
} else {
warn("The variable " + symbol + " has already been declared in the same scope...", true);
}
}
token = getToken(0);
assert token.getType() == Token.SEMI ||
token.getType() == Token.ASSIGN ||
token.getType() == Token.COMMA ||
token.getType() == Token.IN;
if (token.getType() == Token.IN) {
break;
} else {
parseExpression();
token = getToken(-1);
if (token.getType() == Token.SEMI) {
break;
}
}
}
break;
case Token.FUNCTION:
parseFunctionDeclaration();
break;
case Token.LC:
braceNesting++;
break;
case Token.RC:
braceNesting--;
assert braceNesting >= scope.getBraceNesting();
if (braceNesting == scope.getBraceNesting()) {
leaveCurrentScope();
return;
}
break;
case Token.WITH:
if (mode == BUILDING_SYMBOL_TREE) {
// Inside a 'with' block, it is impossible to figure out
// statically whether a symbol is a local variable or an
// object member. As a consequence, the only thing we can
// do is turn the obfuscation off for the highest scope
// containing the 'with' block.
protectScopeFromObfuscation(scope);
warn("Using 'with' is not recommended." + (munge ? " Moreover, using 'with' reduces the level of compression!" : ""), true);
}
break;
case Token.CATCH:
parseCatch();
break;
case Token.CONDCOMMENT:
if (mode == BUILDING_SYMBOL_TREE) {
protectScopeFromObfuscation(scope);
warn("Using JScript conditional comments is not recommended." + (munge ? " Moreover, using JScript conditional comments reduces the level of compression." : ""), true);
}
break;
case Token.NAME:
symbol = token.getValue();
if (mode == BUILDING_SYMBOL_TREE) {
if (symbol.equals("eval")) {
protectScopeFromObfuscation(scope);
warn("Using 'eval' is not recommended." + (munge ? " Moreover, using 'eval' reduces the level of compression!" : ""), true);
}
} else if (mode == CHECKING_SYMBOL_TREE) {
if ((offset < 2 || getToken(-2).getType() != Token.DOT) &&
getToken(0).getType() != Token.OBJECTLIT) {
identifier = getIdentifier(symbol, scope);
if (identifier == null) {
if (symbol.length() <= 3 && !builtin.contains(symbol)) {
// Here, we found an undeclared and un-namespaced symbol that is
// 3 characters or less in length. Declare it in the global scope.
// We don't need to declare longer symbols since they won't cause
// any conflict with other munged symbols.
globalScope.declareIdentifier(symbol);
// warn("Found an undeclared symbol: " + symbol, true);
}
} else {
identifier.incrementRefcount();
}
}
}
break;
}
}
}
private void buildSymbolTree() {
offset = 0;
braceNesting = 0;
scopes.clear();
indexedScopes.clear();
indexedScopes.put(new Integer(0), globalScope);
mode = BUILDING_SYMBOL_TREE;
parseScope(globalScope);
}
private void mungeSymboltree() {
if (!munge) {
return;
}
// One problem with obfuscation resides in the use of undeclared
// and un-namespaced global symbols that are 3 characters or less
// in length. Here is an example:
//
// var declaredGlobalVar;
//
// function declaredGlobalFn() {
// var localvar;
// localvar = abc; // abc is an undeclared global symbol
// }
//
// In the example above, there is a slim chance that localvar may be
// munged to 'abc', conflicting with the undeclared global symbol
// abc, creating a potential bug. The following code detects such
// global symbols. This must be done AFTER the entire file has been
// parsed, and BEFORE munging the symbol tree. Note that declaring
// extra symbols in the global scope won't hurt.
//
// Note: Since we go through all the tokens to do this, we also use
// the opportunity to count how many times each identifier is used.
offset = 0;
braceNesting = 0;
scopes.clear();
mode = CHECKING_SYMBOL_TREE;
parseScope(globalScope);
globalScope.munge();
}
private StringBuffer printSymbolTree(int linebreakpos, boolean preserveAllSemiColons)
throws IOException {
offset = 0;
braceNesting = 0;
scopes.clear();
String symbol;
JavaScriptToken token;
JavaScriptToken lastToken = getToken(0);
ScriptOrFnScope currentScope;
JavaScriptIdentifier identifier;
int length = tokens.size();
StringBuffer result = new StringBuffer();
int linestartpos = 0;
enterScope(globalScope);
while (offset < length) {
token = consumeToken();
symbol = token.getValue();
currentScope = getCurrentScope();
switch (token.getType()) {
case Token.GET:
case Token.SET:
lastToken = token;
case Token.NAME:
if (offset >= 2 && getToken(-2).getType() == Token.DOT ||
getToken(0).getType() == Token.OBJECTLIT) {
result.append(symbol);
} else {
identifier = getIdentifier(symbol, currentScope);
if (identifier != null) {
if (identifier.getMungedValue() != null) {
result.append(identifier.getMungedValue());
} else {
result.append(symbol);
}
if (currentScope != globalScope && identifier.getRefcount() == 0) {
warn("The symbol " + symbol + " is declared but is apparently never used.\nThis code can probably be written in a more compact way.", true);
}
} else {
result.append(symbol);
}
}
break;
case Token.REGEXP:
case Token.STRING:
result.append(symbol);
break;
case Token.NUMBER:
if (getToken(0).getType() == Token.DOT) {
// calling methods on int requires a leading dot so JS doesn't
// treat the method as the decimal component of a float
result.append('(');
result.append(symbol);
result.append(')');
} else {
result.append(symbol);
}
break;
case Token.ADD:
case Token.SUB:
result.append((String) literals.get(new Integer(token.getType())));
if (offset < length) {
token = getToken(0);
if (token.getType() == Token.INC ||
token.getType() == Token.DEC ||
token.getType() == Token.ADD ||
token.getType() == Token.DEC) {
// Handle the case x +/- ++/-- y
// We must keep a white space here. Otherwise, x +++ y would be
// interpreted as x ++ + y by the compiler, which is a bug (due
// to the implicit assignment being done on the wrong variable)
result.append(' ');
} else if (token.getType() == Token.POS && getToken(-1).getType() == Token.ADD ||
token.getType() == Token.NEG && getToken(-1).getType() == Token.SUB) {
// Handle the case x + + y and x - - y
result.append(' ');
}
}
break;
case Token.FUNCTION:
if (lastToken.getType() != Token.GET && lastToken.getType() != Token.SET) {
result.append("function");
}
lastToken = token;
token = consumeToken();
if (token.getType() == Token.NAME) {
result.append(' ');
symbol = token.getValue();
identifier = getIdentifier(symbol, currentScope);
assert identifier != null;
if (identifier.getMungedValue() != null) {
result.append(identifier.getMungedValue());
} else {
result.append(symbol);
}
if (currentScope != globalScope && identifier.getRefcount() == 0) {
warn("The symbol " + symbol + " is declared but is apparently never used.\nThis code can probably be written in a more compact way.", true);
}
token = consumeToken();
}
assert token.getType() == Token.LP;
result.append('(');
currentScope = (ScriptOrFnScope) indexedScopes.get(new Integer(offset));
enterScope(currentScope);
while ((token = consumeToken()).getType() != Token.RP) {
assert token.getType() == Token.NAME || token.getType() == Token.COMMA;
if (token.getType() == Token.NAME) {
symbol = token.getValue();
identifier = getIdentifier(symbol, currentScope);
assert identifier != null;
if (identifier.getMungedValue() != null) {
result.append(identifier.getMungedValue());
} else {
result.append(symbol);
}
} else if (token.getType() == Token.COMMA) {
result.append(',');
}
}
result.append(')');
token = consumeToken();
assert token.getType() == Token.LC;
result.append('{');
braceNesting++;
token = getToken(0);
if (token.getType() == Token.STRING &&
getToken(1).getType() == Token.SEMI) {
// This is a hint. Skip it!
consumeToken();
consumeToken();
}
break;
case Token.RETURN:
case Token.TYPEOF:
result.append(literals.get(new Integer(token.getType())));
// No space needed after 'return' and 'typeof' when followed
// by '(', '[', '{', a string or a regexp.
if (offset < length) {
token = getToken(0);
if (token.getType() != Token.LP &&
token.getType() != Token.LB &&
token.getType() != Token.LC &&
token.getType() != Token.STRING &&
token.getType() != Token.REGEXP &&
token.getType() != Token.SEMI) {
result.append(' ');
}
}
break;
case Token.CASE:
case Token.THROW:
result.append(literals.get(new Integer(token.getType())));
// White-space needed after 'case' and 'throw' when not followed by a string.
if (offset < length && getToken(0).getType() != Token.STRING) {
result.append(' ');
}
break;
case Token.BREAK:
case Token.CONTINUE:
result.append(literals.get(new Integer(token.getType())));
if (offset < length && getToken(0).getType() != Token.SEMI) {
// If 'break' or 'continue' is not followed by a semi-colon, it must
// be followed by a label, hence the need for a white space.
result.append(' ');
}
break;
case Token.LC:
result.append('{');
braceNesting++;
break;
case Token.RC:
result.append('}');
braceNesting--;
assert braceNesting >= currentScope.getBraceNesting();
if (braceNesting == currentScope.getBraceNesting()) {
leaveCurrentScope();
}
break;
case Token.SEMI:
// No need to output a semi-colon if the next character is a right-curly...
if (preserveAllSemiColons || offset < length && getToken(0).getType() != Token.RC) {
result.append(';');
}
if (linebreakpos >= 0 && result.length() - linestartpos > linebreakpos) {
// Some source control tools don't like it when files containing lines longer
// than, say 8000 characters, are checked in. The linebreak option is used in
// that case to split long lines after a specific column.
result.append('\n');
linestartpos = result.length();
}
break;
case Token.COMMA:
// No need to output a comma if the next character is a right-curly or a right-square bracket
if (offset < length && getToken(0).getType() != Token.RC && getToken(0).getType() != Token.RB) {
result.append(',');
}
break;
case Token.CONDCOMMENT:
case Token.KEEPCOMMENT:
if (result.length() > 0 && result.charAt(result.length() - 1) != '\n') {
result.append("\n");
}
result.append("/*");
if (token.getType() == Token.KEEPCOMMENT) {
result.append("!");
}
result.append(symbol);
result.append("*/\n");
break;
default:
String literal = (String) literals.get(new Integer(token.getType()));
if (literal != null) {
result.append(literal);
} else {
warn("This symbol cannot be printed: " + symbol, true);
}
break;
}
}
// Append a semi-colon at the end, even if unnecessary semi-colons are
// supposed to be removed. This is especially useful when concatenating
// several minified files (the absence of an ending semi-colon at the
// end of one file may very likely cause a syntax error)
if (!preserveAllSemiColons &&
result.length() > 0 &&
getToken(-1).getType() != Token.CONDCOMMENT &&
getToken(-1).getType() != Token.KEEPCOMMENT) {
if (result.charAt(result.length() - 1) == '\n') {
result.setCharAt(result.length() - 1, ';');
} else {
result.append(';');
}
}
return result;
}
private void printMungeMapping(Writer map) throws IOException {
StringBuffer sb = new StringBuffer();
globalScope.getFullMapping(sb, "");
map.write(sb.toString());
}
}
| |
package client;
import org.apache.log4j.Logger;
import server.CarDataModel;
import javax.swing.*;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
import javax.xml.bind.Unmarshaller;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.*;
import java.net.Socket;
import java.util.ArrayList;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.ExecutionException;
public class EntranceClient extends JFrame
{
private Logger Logger;
private JButton connectToCarParkButton;
private JButton disconnectFromCarParkButton;
private JComboBox carMakeCombo;
private JComboBox carLicenceCombo;
private JLabel carsOnGroundFloor;
private JLabel carsOnFirstFloor;
private JButton sendCarDetailsButton;
private JPanel EntranceForm;
private JLabel connectionStatus;
private JLabel carsQueued;
private volatile boolean isConnected;
private Socket clientSocket;
private Timer Timer;
private final ArrayList<CarDataModel> queuedCarsCollection;
public EntranceClient()
{
super("Entrance Client");
setContentPane(EntranceForm);
Logger = org.apache.log4j.Logger.getLogger(this.getClass().getCanonicalName());
connectionStatus.setVisible(false);
disconnectFromCarParkButton.setEnabled(false);
carMakeCombo.addItem("Acura");
carMakeCombo.addItem("Aston Martin");
carMakeCombo.addItem("Audi");
carMakeCombo.addItem("Bentley");
carMakeCombo.addItem("BMW");
carMakeCombo.addItem("Bugatti");
carMakeCombo.addItem("Dodge");
carMakeCombo.addItem("Ferrari");
carMakeCombo.addItem("Fiat");
carMakeCombo.addItem("Ford");
carMakeCombo.addItem("Honda");
carMakeCombo.addItem("Hyundai");
carMakeCombo.addItem("Infiniti");
carMakeCombo.addItem("Jaguar");
carMakeCombo.addItem("Jeep");
carMakeCombo.addItem("KIA");
carMakeCombo.addItem("Koenigsegg");
carMakeCombo.addItem("Land Rover");
carMakeCombo.addItem("Lexus");
carMakeCombo.addItem("Lotus");
carMakeCombo.addItem("Maserati");
carMakeCombo.addItem("Mazda");
carMakeCombo.addItem("McLaren");
carMakeCombo.addItem("Mercedes");
carMakeCombo.addItem("MG");
carMakeCombo.addItem("Mini");
carMakeCombo.addItem("Mitsubishi");
carMakeCombo.addItem("Nissan");
carMakeCombo.addItem("Noble");
carMakeCombo.addItem("Porsche");
carMakeCombo.addItem("Rolls-Royce");
carMakeCombo.addItem("SAAB");
carMakeCombo.addItem("Smart");
carMakeCombo.addItem("Subaru");
carMakeCombo.addItem("Suzuki");
carMakeCombo.addItem("Tesla");
carMakeCombo.addItem("Toyota");
carMakeCombo.addItem("Volkswagen");
carMakeCombo.addItem("Volvo");
for (int i = 0; i < 100; i++)
{
carLicenceCombo.addItem(LicencePlate.generateLicensePlate());
}
this.addWindowListener(new java.awt.event.WindowAdapter()
{
@Override
public void windowClosing(java.awt.event.WindowEvent windowEvent)
{
try
{
Dispose();
}
catch (IOException e)
{
e.printStackTrace();
}
}
});
connectToCarParkButton.addActionListener(new ActionListener()
{
@Override
public void actionPerformed(ActionEvent e)
{
try
{
ConnectMain();
if (isConnected)
{
connectToCarParkButton.setEnabled(false);
disconnectFromCarParkButton.setEnabled(true);
connectionStatus.setVisible(true);
}
}
catch (IOException e1)
{
e1.printStackTrace();
}
}
});
disconnectFromCarParkButton.addActionListener(new ActionListener()
{
@Override
public void actionPerformed(ActionEvent e)
{
try
{
Dispose();
if (!isConnected)
{
connectToCarParkButton.setEnabled(true);
disconnectFromCarParkButton.setEnabled(false);
connectionStatus.setVisible(false);
}
}
catch (IOException e1)
{
e1.printStackTrace();
}
}
});
pack();
setVisible(true);
sendCarDetailsButton.addActionListener(new ActionListener()
{
@Override
public void actionPerformed(ActionEvent e)
{
int selectedIndexMake = carMakeCombo.getSelectedIndex();
String carMake = carMakeCombo.getItemAt(selectedIndexMake).toString();
int selectedIndexLicence = carLicenceCombo.getSelectedIndex();
String carLicence = carLicenceCombo.getItemAt(selectedIndexLicence).toString();
try
{
SendData(carMake, carLicence);
}
catch (IOException e1)
{
e1.printStackTrace();
}
catch (JAXBException e1)
{
e1.printStackTrace();
}
}
});
queuedCarsCollection = new ArrayList<CarDataModel>();
}
private void ConnectMain() throws IOException
{
try
{
Logger.info("Opening Client Connection..");
clientSocket = new Socket("127.0.0.1", 10031);
if (clientSocket.isConnected())
{
Logger.info("Connected to Server on: " + clientSocket.getInetAddress() + ":" + clientSocket.getPort());
isConnected = clientSocket.isConnected();
Timer = new Timer();
Timer.scheduleAtFixedRate(new WorkerClass(), 0, 2 * 1000);
Logger.info("Polling timer enabled to run at intervals of 2 seconds");
}
}
catch (Exception e)
{
Logger.error(e.getMessage());
}
}
private void SendData(String carMake, String carLicence) throws IOException, JAXBException
{
if (isConnected)
{
Logger.info("User Requested to Send Car Data");
StringBuilder outgoingXml = new StringBuilder();
outgoingXml.append("<?xml version=\"1.0\" encoding=\"utf-8\"?>");
outgoingXml.append("<Client Type=\"ENTRANCE\"><Car Make=\"" + carMake);
outgoingXml.append("\" Licence=\"" + carLicence);
outgoingXml.append("\"/>");
outgoingXml.append("</Client>\r\n");
if (Integer.parseInt(carsOnGroundFloor.getText()) != 20 || Integer.parseInt(carsOnFirstFloor.getText()) != 20)
{
Logger.info("Car Park has spaces available to Park in.");
PrintWriter printWriter = new PrintWriter(clientSocket.getOutputStream(), true);
printWriter.println(outgoingXml);
}
else
{
Logger.info("No free spaces in Car Park. Car to be queued at Entrance.");
CarDataModel carModel = new CarDataModel();
carModel.setCarMake(carMake);
carModel.setCarLicence(carLicence);
queuedCarsCollection.add(carModel);
Logger.info("Car Queued - Make: " + carMake + " Licence: " + carLicence);
}
}
}
private void Dispose() throws IOException
{
if (isConnected)
{
Logger.info("Client Disconnected from Server. Disposing all resources.");
clientSocket.close();
isConnected = false;
}
}
class WorkerClass extends TimerTask
{
@Override
public void run()
{
new SwingWorker<FloorSpaceDataModel, TimerTask>()
{
@Override
protected FloorSpaceDataModel doInBackground() throws Exception
{
try
{
if (isConnected)
{
BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(clientSocket.getInputStream()));
String incomingParkingSpacesUpdate;
while ((incomingParkingSpacesUpdate = bufferedReader.readLine()) != null)
{
Logger.info("Received Space Info from Server: " + incomingParkingSpacesUpdate);
JAXBContext jaxbContext = JAXBContext.newInstance(FloorSpaceDataModel.class);
Unmarshaller unmarshaller = jaxbContext.createUnmarshaller();
StringReader stringReader = new StringReader(incomingParkingSpacesUpdate);
FloorSpaceDataModel deserialisedFloorInfo = (FloorSpaceDataModel) unmarshaller.unmarshal(stringReader);
return deserialisedFloorInfo;
}
}
}
catch (JAXBException e)
{
Logger.error(e.getMessage());
}
catch (IOException e)
{
Logger.error(e.getMessage());
}
return null;
}
@Override
protected void done()
{
super.done();
try
{
if (isConnected)
{
FloorSpaceDataModel s = get();
carsQueued.setText(Integer.toString(queuedCarsCollection.size()));
int groundFloorSpace = 0;
int firstFloorSpace = 0;
for (final FloorInfo info : s.FloorInfoList)
{
if (info.Level == FloorLevel.GROUNDFLOOR)
{
Logger.info("Current Number of Cars Parked on Ground Floor: " + info.SpaceCount);
carsOnGroundFloor.setText(info.SpaceCount);
groundFloorSpace = Integer.parseInt(info.SpaceCount);
}
else if (info.Level == FloorLevel.FIRSTFLOOR)
{
Logger.info("Current Number of Cars Parked on First Floor: " + info.SpaceCount);
carsOnFirstFloor.setText(info.SpaceCount);
firstFloorSpace = Integer.parseInt(info.SpaceCount);
}
}
if (queuedCarsCollection.size() > 0)
{
Logger.info("Queued Cars Found. ");
int differenceGF;
int differenceFF;
differenceGF = 20 - groundFloorSpace;
differenceFF = 20 - firstFloorSpace;
if (differenceGF != 0)
{
Logger.info("Spaces available on Ground Floor: " + differenceGF + "Dequeueing Cars...");
//do groundfloor
for (int i = 0; i < differenceGF; i++)
{
CarDataModel model = queuedCarsCollection.get(i);
SendData(model.CarMake, model.CarLicence);
queuedCarsCollection.remove(i);
}
}
if (differenceFF != 0)
{
Logger.info("Spaces available on First Floor: " + differenceFF + "Dequeueing Cars...");
//do firstfloor
for (int i = 0; i < differenceFF; i++)
{
CarDataModel model = queuedCarsCollection.get(i);
SendData(model.CarMake, model.CarLicence);
queuedCarsCollection.remove(i);
}
}
}
}
}
catch (InterruptedException e)
{
Logger.error(e.getMessage());
}
catch (ExecutionException e)
{
Logger.error(e.getMessage());
}
catch (JAXBException e)
{
Logger.error("XML Parsing Error: " + e.getMessage());
}
catch (IOException e)
{
Logger.error(e.getMessage());
}
}
}.execute();
}
}
}
| |
/*
* Copyright 2016 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.pubsub.v1;
import com.google.api.client.util.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.protobuf.Empty;
import com.google.pubsub.v1.AcknowledgeRequest;
import com.google.pubsub.v1.GetSubscriptionRequest;
import com.google.pubsub.v1.ModifyAckDeadlineRequest;
import com.google.pubsub.v1.PublisherGrpc.PublisherImplBase;
import com.google.pubsub.v1.PullRequest;
import com.google.pubsub.v1.PullResponse;
import com.google.pubsub.v1.StreamingPullRequest;
import com.google.pubsub.v1.StreamingPullResponse;
import com.google.pubsub.v1.SubscriberGrpc.SubscriberImplBase;
import com.google.pubsub.v1.Subscription;
import io.grpc.Status;
import io.grpc.Status.Code;
import io.grpc.StatusException;
import io.grpc.stub.StreamObserver;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingDeque;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
/**
* A fake implementation of {@link PublisherImplBase}, that can be used to test clients of a Cloud
* Pub/Sub Publisher.
*/
class FakeSubscriberServiceImpl extends SubscriberImplBase {
private final AtomicBoolean subscriptionInitialized = new AtomicBoolean(false);
private String subscription = "";
private final AtomicInteger messageAckDeadline =
new AtomicInteger(Subscriber.MIN_ACK_DEADLINE_SECONDS);
private final List<Stream> openedStreams = new ArrayList<>();
private final List<Stream> closedStreams = new ArrayList<>();
private final List<String> acks = new ArrayList<>();
private final List<ModifyAckDeadline> modAckDeadlines = new ArrayList<>();
private final List<PullRequest> receivedPullRequest = new ArrayList<>();
private final BlockingQueue<PullResponse> pullResponses = new LinkedBlockingDeque<>();
private int currentStream;
public static enum CloseSide {
SERVER,
CLIENT
}
public static final class ModifyAckDeadline {
private final String ackId;
private final long seconds;
public ModifyAckDeadline(String ackId, long seconds) {
Preconditions.checkNotNull(ackId);
this.ackId = ackId;
this.seconds = seconds;
}
public String getAckId() {
return ackId;
}
public long getSeconds() {
return seconds;
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof ModifyAckDeadline)) {
return false;
}
ModifyAckDeadline other = (ModifyAckDeadline) obj;
return other.ackId.equals(this.ackId) && other.seconds == this.seconds;
}
@Override
public int hashCode() {
return ackId.hashCode();
}
@Override
public String toString() {
return "Ack ID: " + ackId + ", deadline seconds: " + seconds;
}
}
private static class Stream {
private StreamObserver<StreamingPullRequest> requestObserver;
private StreamObserver<StreamingPullResponse> responseObserver;
}
private class StreamingPullRequestObserver implements StreamObserver<StreamingPullRequest> {
private final Stream stream;
private final StreamObserver<StreamingPullResponse> responseObserver;
StreamingPullRequestObserver(
Stream stream, StreamObserver<StreamingPullResponse> responseObserver) {
this.stream = stream;
this.responseObserver = responseObserver;
}
@Override
public void onNext(StreamingPullRequest request) {
synchronized (stream) {
if (!request.getSubscription().isEmpty()) {
if (!subscription.isEmpty() && !subscription.equals(request.getSubscription())) {
responseObserver.onError(
new StatusException(
Status.fromCode(Code.ABORTED)
.withDescription("Can only set one subscription.")));
return;
}
synchronized (subscriptionInitialized) {
if (subscription.isEmpty()) {
if (request.getStreamAckDeadlineSeconds() == 0) {
responseObserver.onError(
new StatusException(
Status.fromCode(Code.INVALID_ARGUMENT)
.withDescription("A stream must be initialized with a ack deadline.")));
}
subscription = request.getSubscription();
subscriptionInitialized.set(true);
subscriptionInitialized.notifyAll();
}
}
addOpenedStream(stream);
stream.notifyAll();
}
if (request.getStreamAckDeadlineSeconds() > 0) {
synchronized (messageAckDeadline) {
messageAckDeadline.set(request.getStreamAckDeadlineSeconds());
messageAckDeadline.notifyAll();
}
}
if (subscription.isEmpty()) {
closeStream(stream);
responseObserver.onError(
new StatusException(
Status.fromCode(Code.ABORTED)
.withDescription(
"The stream has not been properly initialized with a "
+ "subscription.")));
return;
}
if (request.getAckIdsCount() > 0) {
addReceivedAcks(request.getAckIdsList());
}
if (request.getModifyDeadlineAckIdsCount() > 0) {
if (request.getModifyDeadlineAckIdsCount() != request.getModifyDeadlineSecondsCount()) {
closeStream(stream);
responseObserver.onError(
new StatusException(
Status.fromCode(Code.ABORTED)
.withDescription("Invalid modify ack deadline request.")));
return;
}
Iterator<String> ackIds = request.getModifyDeadlineAckIdsList().iterator();
Iterator<Integer> seconds = request.getModifyDeadlineSecondsList().iterator();
while (ackIds.hasNext() && seconds.hasNext()) {
addReceivedModifyAckDeadline(new ModifyAckDeadline(ackIds.next(), seconds.next()));
}
}
}
}
@Override
public void onError(Throwable error) {
closeStream(stream);
}
@Override
public void onCompleted() {
closeStream(stream);
stream.responseObserver.onCompleted();
}
}
@Override
public StreamObserver<StreamingPullRequest> streamingPull(
StreamObserver<StreamingPullResponse> responseObserver) {
Stream stream = new Stream();
stream.requestObserver = new StreamingPullRequestObserver(stream, responseObserver);
stream.responseObserver = responseObserver;
return stream.requestObserver;
}
public void sendStreamingResponse(StreamingPullResponse pullResponse)
throws InterruptedException {
waitForRegistedSubscription();
synchronized (openedStreams) {
waitForOpenedStreams(1);
openedStreams.get(getAndAdvanceCurrentStream()).responseObserver.onNext(pullResponse);
}
}
public void setMessageAckDeadlineSeconds(int ackDeadline) {
messageAckDeadline.set(ackDeadline);
}
public void enqueuePullResponse(PullResponse response) {
pullResponses.add(response);
}
@Override
public void getSubscription(
GetSubscriptionRequest request, StreamObserver<Subscription> responseObserver) {
responseObserver.onNext(
Subscription.newBuilder()
.setName(request.getSubscription())
.setAckDeadlineSeconds(messageAckDeadline.get())
.setTopic("fake-topic")
.build());
responseObserver.onCompleted();
}
@Override
public void pull(PullRequest request, StreamObserver<PullResponse> responseObserver) {
synchronized (receivedPullRequest) {
receivedPullRequest.add(request);
}
try {
responseObserver.onNext(pullResponses.take());
responseObserver.onCompleted();
} catch (InterruptedException e) {
responseObserver.onError(e);
}
}
@Override
public void acknowledge(
AcknowledgeRequest request, io.grpc.stub.StreamObserver<Empty> responseObserver) {
addReceivedAcks(request.getAckIdsList());
responseObserver.onNext(Empty.getDefaultInstance());
responseObserver.onCompleted();
}
@Override
public void modifyAckDeadline(
ModifyAckDeadlineRequest request, StreamObserver<Empty> responseObserver) {
for (String ackId : request.getAckIdsList()) {
addReceivedModifyAckDeadline(new ModifyAckDeadline(ackId, request.getAckDeadlineSeconds()));
}
responseObserver.onNext(Empty.getDefaultInstance());
responseObserver.onCompleted();
}
public void sendError(Throwable error) throws InterruptedException {
waitForRegistedSubscription();
synchronized (openedStreams) {
waitForOpenedStreams(1);
Stream stream = openedStreams.get(getAndAdvanceCurrentStream());
stream.responseObserver.onError(error);
closeStream(stream);
}
}
public String waitForRegistedSubscription() throws InterruptedException {
synchronized (subscriptionInitialized) {
while (!subscriptionInitialized.get()) {
subscriptionInitialized.wait();
}
}
return subscription;
}
public List<String> waitAndConsumeReceivedAcks(int expectedCount) throws InterruptedException {
synchronized (acks) {
while (acks.size() < expectedCount) {
acks.wait();
}
List<String> receivedAcksCopy = ImmutableList.copyOf(acks.subList(0, expectedCount));
acks.removeAll(receivedAcksCopy);
return receivedAcksCopy;
}
}
public List<ModifyAckDeadline> waitAndConsumeModifyAckDeadlines(int expectedCount)
throws InterruptedException {
synchronized (modAckDeadlines) {
while (modAckDeadlines.size() < expectedCount) {
modAckDeadlines.wait();
}
List<ModifyAckDeadline> modAckDeadlinesCopy =
ImmutableList.copyOf(modAckDeadlines.subList(0, expectedCount));
modAckDeadlines.removeAll(modAckDeadlinesCopy);
return modAckDeadlinesCopy;
}
}
public int waitForClosedStreams(int expectedCount) throws InterruptedException {
synchronized (closedStreams) {
while (closedStreams.size() < expectedCount) {
closedStreams.wait();
}
return closedStreams.size();
}
}
public int waitForOpenedStreams(int expectedCount) throws InterruptedException {
synchronized (openedStreams) {
while (openedStreams.size() < expectedCount) {
openedStreams.wait();
}
return openedStreams.size();
}
}
public void waitForStreamAckDeadline(int expectedValue) throws InterruptedException {
synchronized (messageAckDeadline) {
while (messageAckDeadline.get() != expectedValue) {
messageAckDeadline.wait();
}
}
}
public int getOpenedStreamsCount() {
return openedStreams.size();
}
public int getClosedStreamsCount() {
return closedStreams.size();
}
public List<String> getAcks() {
return acks;
}
public List<ModifyAckDeadline> getModifyAckDeadlines() {
return modAckDeadlines;
}
public void reset() {
synchronized (subscriptionInitialized) {
synchronized (openedStreams) {
synchronized (acks) {
synchronized (modAckDeadlines) {
openedStreams.clear();
closedStreams.clear();
acks.clear();
modAckDeadlines.clear();
subscriptionInitialized.set(false);
subscription = "";
pullResponses.clear();
receivedPullRequest.clear();
currentStream = 0;
}
}
}
}
}
private void addOpenedStream(Stream stream) {
synchronized (openedStreams) {
openedStreams.add(stream);
openedStreams.notifyAll();
}
}
private void closeStream(Stream stream) {
synchronized (openedStreams) {
openedStreams.remove(stream);
closedStreams.add(stream);
}
synchronized (closedStreams) {
closedStreams.notifyAll();
}
}
private int getAndAdvanceCurrentStream() {
int current = currentStream;
synchronized (openedStreams) {
currentStream = (currentStream + 1) % openedStreams.size();
}
return current;
}
private void addReceivedAcks(Collection<String> newAckIds) {
synchronized (acks) {
acks.addAll(newAckIds);
acks.notifyAll();
}
}
private void addReceivedModifyAckDeadline(ModifyAckDeadline newAckDeadline) {
synchronized (modAckDeadlines) {
modAckDeadlines.add(newAckDeadline);
modAckDeadlines.notifyAll();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.notebook;
import static java.lang.String.format;
import java.io.IOException;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import org.apache.commons.lang.StringUtils;
import org.apache.zeppelin.common.JsonSerializable;
import org.apache.zeppelin.completer.CompletionType;
import org.apache.zeppelin.conf.ZeppelinConfiguration;
import org.apache.zeppelin.display.AngularObject;
import org.apache.zeppelin.display.AngularObjectRegistry;
import org.apache.zeppelin.display.Input;
import org.apache.zeppelin.interpreter.InterpreterFactory;
import org.apache.zeppelin.interpreter.InterpreterGroup;
import org.apache.zeppelin.interpreter.InterpreterInfo;
import org.apache.zeppelin.interpreter.InterpreterResult;
import org.apache.zeppelin.interpreter.InterpreterResultMessage;
import org.apache.zeppelin.interpreter.InterpreterSetting;
import org.apache.zeppelin.interpreter.InterpreterSettingManager;
import org.apache.zeppelin.interpreter.remote.RemoteAngularObjectRegistry;
import org.apache.zeppelin.interpreter.thrift.InterpreterCompletion;
import org.apache.zeppelin.notebook.repo.NotebookRepo;
import org.apache.zeppelin.notebook.utility.IdHashes;
import org.apache.zeppelin.scheduler.Job;
import org.apache.zeppelin.scheduler.Job.Status;
import org.apache.zeppelin.search.SearchService;
import org.apache.zeppelin.user.AuthenticationInfo;
import org.apache.zeppelin.user.Credentials;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
/**
* Binded interpreters for a note
*/
public class Note implements ParagraphJobListener, JsonSerializable {
private static final Logger logger = LoggerFactory.getLogger(Note.class);
private static final long serialVersionUID = 7920699076577612429L;
private static Gson gson = new GsonBuilder()
.setPrettyPrinting()
.setDateFormat("yyyy-MM-dd HH:mm:ss.SSS")
.registerTypeAdapter(Date.class, new NotebookImportDeserializer())
.registerTypeAdapterFactory(Input.TypeAdapterFactory)
.create();
// threadpool for delayed persist of note
private static final ScheduledThreadPoolExecutor delayedPersistThreadPool =
new ScheduledThreadPoolExecutor(0);
static {
delayedPersistThreadPool.setRemoveOnCancelPolicy(true);
}
final List<Paragraph> paragraphs = new LinkedList<>();
private String name = "";
private String id;
private Map<String, Object> noteParams = new HashMap<>();
private LinkedHashMap<String, Input> noteForms = new LinkedHashMap<>();
private transient ZeppelinConfiguration conf = ZeppelinConfiguration.create();
private Map<String, List<AngularObject>> angularObjects = new HashMap<>();
private transient InterpreterFactory factory;
private transient InterpreterSettingManager interpreterSettingManager;
private transient JobListenerFactory jobListenerFactory;
private transient NotebookRepo repo;
private transient SearchService index;
private transient ScheduledFuture delayedPersist;
private transient NoteEventListener noteEventListener;
private transient Credentials credentials;
private transient NoteNameListener noteNameListener;
/*
* note configurations.
* - looknfeel - cron
*/
private Map<String, Object> config = new HashMap<>();
/*
* note information.
* - cron : cron expression validity.
*/
private Map<String, Object> info = new HashMap<>();
public Note() {
generateId();
}
public Note(NotebookRepo repo, InterpreterFactory factory,
InterpreterSettingManager interpreterSettingManager, JobListenerFactory jlFactory,
SearchService noteIndex, Credentials credentials, NoteEventListener noteEventListener) {
this.repo = repo;
this.factory = factory;
this.interpreterSettingManager = interpreterSettingManager;
this.jobListenerFactory = jlFactory;
this.index = noteIndex;
this.noteEventListener = noteEventListener;
this.credentials = credentials;
generateId();
}
private void generateId() {
id = IdHashes.generateId();
}
public boolean isPersonalizedMode() {
Object v = getConfig().get("personalizedMode");
return null != v && "true".equals(v);
}
public void setPersonalizedMode(Boolean value) {
String valueString = StringUtils.EMPTY;
if (value) {
valueString = "true";
} else {
valueString = "false";
}
getConfig().put("personalizedMode", valueString);
clearUserParagraphs(value);
}
private void clearUserParagraphs(boolean isPersonalized) {
if (!isPersonalized) {
for (Paragraph p : paragraphs) {
p.clearUserParagraphs();
}
}
}
public String getId() {
return id;
}
public String getName() {
if (isNameEmpty()) {
name = getId();
}
return name;
}
public Map<String, Object> getNoteParams() {
return noteParams;
}
public void setNoteParams(Map<String, Object> noteParams) {
this.noteParams = noteParams;
}
public LinkedHashMap<String, Input> getNoteForms() {
return noteForms;
}
public void setNoteForms(LinkedHashMap<String, Input> noteForms) {
this.noteForms = noteForms;
}
public String getNameWithoutPath() {
String notePath = getName();
int lastSlashIndex = notePath.lastIndexOf("/");
// The note is in the root folder
if (lastSlashIndex < 0) {
return notePath;
}
return notePath.substring(lastSlashIndex + 1);
}
/**
* @return normalized folder path, which is folderId
*/
public String getFolderId() {
String notePath = getName();
// Ignore first '/'
if (notePath.charAt(0) == '/')
notePath = notePath.substring(1);
int lastSlashIndex = notePath.lastIndexOf("/");
// The root folder
if (lastSlashIndex < 0) {
return Folder.ROOT_FOLDER_ID;
}
String folderId = notePath.substring(0, lastSlashIndex);
return folderId;
}
public boolean isNameEmpty() {
return this.name.trim().isEmpty();
}
private String normalizeNoteName(String name) {
name = name.trim();
name = name.replace("\\", "/");
while (name.contains("///")) {
name = name.replaceAll("///", "/");
}
name = name.replaceAll("//", "/");
if (name.length() == 0) {
name = "/";
}
return name;
}
public void setName(String name) {
String oldName = this.name;
if (name.indexOf('/') >= 0 || name.indexOf('\\') >= 0) {
name = normalizeNoteName(name);
}
this.name = name;
if (this.noteNameListener != null && !oldName.equals(name)) {
noteNameListener.onNoteNameChanged(this, oldName);
}
}
public void setNoteNameListener(NoteNameListener listener) {
this.noteNameListener = listener;
}
public void setInterpreterFactory(InterpreterFactory factory) {
this.factory = factory;
synchronized (paragraphs) {
for (Paragraph p : paragraphs) {
p.setInterpreterFactory(factory);
}
}
}
void setInterpreterSettingManager(InterpreterSettingManager interpreterSettingManager) {
this.interpreterSettingManager = interpreterSettingManager;
}
public void initializeJobListenerForParagraph(Paragraph paragraph) {
final Note paragraphNote = paragraph.getNote();
if (!paragraphNote.getId().equals(this.getId())) {
throw new IllegalArgumentException(
format("The paragraph %s from note %s " + "does not belong to note %s", paragraph.getId(),
paragraphNote.getId(), this.getId()));
}
boolean foundParagraph = false;
for (Paragraph ownParagraph : paragraphs) {
if (paragraph.getId().equals(ownParagraph.getId())) {
paragraph.setListener(this.jobListenerFactory.getParagraphJobListener(this));
foundParagraph = true;
}
}
if (!foundParagraph) {
throw new IllegalArgumentException(
format("Cannot find paragraph %s " + "from note %s", paragraph.getId(),
paragraphNote.getId()));
}
}
void setJobListenerFactory(JobListenerFactory jobListenerFactory) {
this.jobListenerFactory = jobListenerFactory;
}
void setNotebookRepo(NotebookRepo repo) {
this.repo = repo;
}
public void setIndex(SearchService index) {
this.index = index;
}
public Credentials getCredentials() {
return credentials;
}
public void setCredentials(Credentials credentials) {
this.credentials = credentials;
}
Map<String, List<AngularObject>> getAngularObjects() {
return angularObjects;
}
/**
* Create a new paragraph and add it to the end of the note.
*/
public Paragraph addNewParagraph(AuthenticationInfo authenticationInfo) {
return insertNewParagraph(paragraphs.size(), authenticationInfo);
}
/**
* Clone paragraph and add it to note.
*
* @param srcParagraph source paragraph
*/
void addCloneParagraph(Paragraph srcParagraph) {
// Keep paragraph original ID
final Paragraph newParagraph = new Paragraph(srcParagraph.getId(), this, this, factory);
Map<String, Object> config = new HashMap<>(srcParagraph.getConfig());
Map<String, Object> param = srcParagraph.settings.getParams();
LinkedHashMap<String, Input> form = srcParagraph.settings.getForms();
newParagraph.setConfig(config);
newParagraph.settings.setParams(param);
newParagraph.settings.setForms(form);
newParagraph.setText(srcParagraph.getText());
newParagraph.setTitle(srcParagraph.getTitle());
try {
Gson gson = new Gson();
String resultJson = gson.toJson(srcParagraph.getReturn());
InterpreterResult result = InterpreterResult.fromJson(resultJson);
newParagraph.setReturn(result, null);
} catch (Exception e) {
// 'result' part of Note consists of exception, instead of actual interpreter results
logger.warn(
"Paragraph " + srcParagraph.getId() + " has a result with exception. " + e.getMessage());
}
synchronized (paragraphs) {
paragraphs.add(newParagraph);
}
if (noteEventListener != null) {
noteEventListener.onParagraphCreate(newParagraph);
}
}
/**
* Create a new paragraph and insert it to the note in given index.
*
* @param index index of paragraphs
*/
public Paragraph insertNewParagraph(int index, AuthenticationInfo authenticationInfo) {
Paragraph paragraph = createParagraph(index, authenticationInfo);
insertParagraph(paragraph, index);
return paragraph;
}
private Paragraph createParagraph(int index, AuthenticationInfo authenticationInfo) {
Paragraph p = new Paragraph(this, this, factory);
p.setAuthenticationInfo(authenticationInfo);
setParagraphMagic(p, index);
return p;
}
public void addParagraph(Paragraph paragraph) {
insertParagraph(paragraph, paragraphs.size());
}
public void insertParagraph(Paragraph paragraph, int index) {
synchronized (paragraphs) {
paragraphs.add(index, paragraph);
}
if (noteEventListener != null) {
noteEventListener.onParagraphCreate(paragraph);
}
}
/**
* Remove paragraph by id.
*
* @param paragraphId ID of paragraph
* @return a paragraph that was deleted, or <code>null</code> otherwise
*/
public Paragraph removeParagraph(String user, String paragraphId) {
removeAllAngularObjectInParagraph(user, paragraphId);
interpreterSettingManager.removeResourcesBelongsToParagraph(getId(), paragraphId);
synchronized (paragraphs) {
Iterator<Paragraph> i = paragraphs.iterator();
while (i.hasNext()) {
Paragraph p = i.next();
if (p.getId().equals(paragraphId)) {
index.deleteIndexDoc(this, p);
i.remove();
if (noteEventListener != null) {
noteEventListener.onParagraphRemove(p);
}
return p;
}
}
}
return null;
}
public void clearParagraphOutputFields(Paragraph p) {
p.setReturn(null, null);
p.clearRuntimeInfo(null);
}
public Paragraph clearPersonalizedParagraphOutput(String paragraphId, String user) {
synchronized (paragraphs) {
for (Paragraph p : paragraphs) {
if (!p.getId().equals(paragraphId)) {
continue;
}
p = p.getUserParagraphMap().get(user);
clearParagraphOutputFields(p);
return p;
}
}
return null;
}
/**
* Clear paragraph output by id.
*
* @param paragraphId ID of paragraph
* @return Paragraph
*/
public Paragraph clearParagraphOutput(String paragraphId) {
synchronized (paragraphs) {
for (Paragraph p : paragraphs) {
if (!p.getId().equals(paragraphId)) {
continue;
}
clearParagraphOutputFields(p);
return p;
}
}
return null;
}
/**
* Clear all paragraph output of note
*/
public void clearAllParagraphOutput() {
synchronized (paragraphs) {
for (Paragraph p : paragraphs) {
p.setReturn(null, null);
}
}
}
/**
* Move paragraph into the new index (order from 0 ~ n-1).
*
* @param paragraphId ID of paragraph
* @param index new index
*/
public void moveParagraph(String paragraphId, int index) {
moveParagraph(paragraphId, index, false);
}
/**
* Move paragraph into the new index (order from 0 ~ n-1).
*
* @param paragraphId ID of paragraph
* @param index new index
* @param throwWhenIndexIsOutOfBound whether throw IndexOutOfBoundException
* when index is out of bound
*/
public void moveParagraph(String paragraphId, int index, boolean throwWhenIndexIsOutOfBound) {
synchronized (paragraphs) {
int oldIndex;
Paragraph p = null;
if (index < 0 || index >= paragraphs.size()) {
if (throwWhenIndexIsOutOfBound) {
throw new IndexOutOfBoundsException(
"paragraph size is " + paragraphs.size() + " , index is " + index);
} else {
return;
}
}
for (int i = 0; i < paragraphs.size(); i++) {
if (paragraphs.get(i).getId().equals(paragraphId)) {
oldIndex = i;
if (oldIndex == index) {
return;
}
p = paragraphs.remove(i);
}
}
if (p != null) {
paragraphs.add(index, p);
}
}
}
public boolean isLastParagraph(String paragraphId) {
if (!paragraphs.isEmpty()) {
synchronized (paragraphs) {
if (paragraphId.equals(paragraphs.get(paragraphs.size() - 1).getId())) {
return true;
}
}
return false;
}
/** because empty list, cannot remove nothing right? */
return true;
}
public int getParagraphCount() {
return paragraphs.size();
}
public Paragraph getParagraph(String paragraphId) {
synchronized (paragraphs) {
for (Paragraph p : paragraphs) {
if (p.getId().equals(paragraphId)) {
return p;
}
}
}
return null;
}
public Paragraph getLastParagraph() {
synchronized (paragraphs) {
return paragraphs.get(paragraphs.size() - 1);
}
}
public List<Map<String, String>> generateParagraphsInfo() {
List<Map<String, String>> paragraphsInfo = new LinkedList<>();
synchronized (paragraphs) {
for (Paragraph p : paragraphs) {
Map<String, String> info = populateParagraphInfo(p);
paragraphsInfo.add(info);
}
}
return paragraphsInfo;
}
public Map<String, String> generateSingleParagraphInfo(String paragraphId) {
synchronized (paragraphs) {
for (Paragraph p : paragraphs) {
if (p.getId().equals(paragraphId)) {
return populateParagraphInfo(p);
}
}
return new HashMap<>();
}
}
private Map<String, String> populateParagraphInfo(Paragraph p) {
Map<String, String> info = new HashMap<>();
info.put("id", p.getId());
info.put("status", p.getStatus().toString());
if (p.getDateStarted() != null) {
info.put("started", p.getDateStarted().toString());
}
if (p.getDateFinished() != null) {
info.put("finished", p.getDateFinished().toString());
}
if (p.getStatus().isRunning()) {
info.put("progress", String.valueOf(p.progress()));
} else {
info.put("progress", String.valueOf(100));
}
return info;
}
private void setParagraphMagic(Paragraph p, int index) {
if (paragraphs.size() > 0) {
String replName;
if (index == 0) {
replName = paragraphs.get(0).getIntpText();
} else {
replName = paragraphs.get(index - 1).getIntpText();
}
if (p.isValidInterpreter(replName) && StringUtils.isNotEmpty(replName)) {
p.setText("%" + replName + "\n");
}
}
}
/**
* Run all paragraphs sequentially.
*/
public synchronized void runAll() {
String cronExecutingUser = (String) getConfig().get("cronExecutingUser");
if (null == cronExecutingUser) {
cronExecutingUser = "anonymous";
}
AuthenticationInfo authenticationInfo = new AuthenticationInfo();
authenticationInfo.setUser(cronExecutingUser);
runAll(authenticationInfo, true);
}
public void runAll(AuthenticationInfo authenticationInfo, boolean blocking) {
for (Paragraph p : getParagraphs()) {
if (!p.isEnabled()) {
continue;
}
p.setAuthenticationInfo(authenticationInfo);
if (!run(p.getId(), blocking)) {
logger.warn("Skip running the remain notes because paragraph {} fails", p.getId());
break;
}
}
}
public boolean run(String paragraphId) {
return run(paragraphId, false);
}
/**
* Run a single paragraph.
*
* @param paragraphId ID of paragraph
*/
public boolean run(String paragraphId, boolean blocking) {
Paragraph p = getParagraph(paragraphId);
p.setListener(jobListenerFactory.getParagraphJobListener(this));
return p.execute(blocking);
}
/**
* Check whether all paragraphs belongs to this note has terminated
*/
boolean isTerminated() {
synchronized (paragraphs) {
for (Paragraph p : paragraphs) {
if (!p.isTerminated()) {
return false;
}
}
}
return true;
}
/**
* Return true if there is a running or pending paragraph
*/
boolean isRunningOrPending() {
synchronized (paragraphs) {
for (Paragraph p : paragraphs) {
Status status = p.getStatus();
if (status.isRunning() || status.isPending()) {
return true;
}
}
}
return false;
}
public boolean isTrash() {
String path = getName();
if (path.charAt(0) == '/') {
path = path.substring(1);
}
return path.split("/")[0].equals(Folder.TRASH_FOLDER_ID);
}
public List<InterpreterCompletion> completion(String paragraphId, String buffer, int cursor) {
Paragraph p = getParagraph(paragraphId);
p.setListener(jobListenerFactory.getParagraphJobListener(this));
return p.completion(buffer, cursor);
}
public List<InterpreterCompletion> getInterpreterCompletion() {
List<InterpreterCompletion> completion = new LinkedList();
for (InterpreterSetting intp : interpreterSettingManager.getInterpreterSettings(getId())) {
List<InterpreterInfo> intInfo = intp.getInterpreterInfos();
if (intInfo.size() > 1) {
for (InterpreterInfo info : intInfo) {
String name = intp.getName() + "." + info.getName();
completion.add(new InterpreterCompletion(name, name, CompletionType.setting.name()));
}
} else {
completion.add(new InterpreterCompletion(intp.getName(), intp.getName(),
CompletionType.setting.name()));
}
}
return completion;
}
public List<Paragraph> getParagraphs() {
synchronized (paragraphs) {
return new LinkedList<>(paragraphs);
}
}
private void snapshotAngularObjectRegistry(String user) {
angularObjects = new HashMap<>();
List<InterpreterSetting> settings = interpreterSettingManager.getInterpreterSettings(getId());
if (settings == null || settings.size() == 0) {
return;
}
for (InterpreterSetting setting : settings) {
InterpreterGroup intpGroup = setting.getInterpreterGroup(user, id);
if (intpGroup != null) {
AngularObjectRegistry registry = intpGroup.getAngularObjectRegistry();
angularObjects.put(intpGroup.getId(), registry.getAllWithGlobal(id));
}
}
}
private void removeAllAngularObjectInParagraph(String user, String paragraphId) {
angularObjects = new HashMap<>();
List<InterpreterSetting> settings = interpreterSettingManager.getInterpreterSettings(getId());
if (settings == null || settings.size() == 0) {
return;
}
for (InterpreterSetting setting : settings) {
if (setting.getInterpreterGroup(user, id) == null) {
continue;
}
InterpreterGroup intpGroup = setting.getInterpreterGroup(user, id);
AngularObjectRegistry registry = intpGroup.getAngularObjectRegistry();
if (registry instanceof RemoteAngularObjectRegistry) {
// remove paragraph scope object
((RemoteAngularObjectRegistry) registry).removeAllAndNotifyRemoteProcess(id, paragraphId);
// remove app scope object
List<ApplicationState> appStates = getParagraph(paragraphId).getAllApplicationStates();
if (appStates != null) {
for (ApplicationState app : appStates) {
((RemoteAngularObjectRegistry) registry)
.removeAllAndNotifyRemoteProcess(id, app.getId());
}
}
} else {
registry.removeAll(id, paragraphId);
// remove app scope object
List<ApplicationState> appStates = getParagraph(paragraphId).getAllApplicationStates();
if (appStates != null) {
for (ApplicationState app : appStates) {
registry.removeAll(id, app.getId());
}
}
}
}
}
public void persist(AuthenticationInfo subject) throws IOException {
Preconditions.checkNotNull(subject, "AuthenticationInfo should not be null");
stopDelayedPersistTimer();
snapshotAngularObjectRegistry(subject.getUser());
index.updateIndexDoc(this);
repo.save(this, subject);
}
/**
* Persist this note with maximum delay.
*/
public void persist(int maxDelaySec, AuthenticationInfo subject) {
startDelayedPersistTimer(maxDelaySec, subject);
}
void unpersist(AuthenticationInfo subject) throws IOException {
repo.remove(getId(), subject);
}
/**
* Return new note for specific user. this inserts and replaces user paragraph which doesn't
* exists in original paragraph
*
* @param user specific user
* @return new Note for the user
*/
public Note getUserNote(String user) {
Note newNote = new Note();
newNote.name = getName();
newNote.id = getId();
newNote.config = getConfig();
newNote.angularObjects = getAngularObjects();
Paragraph newParagraph;
for (Paragraph p : paragraphs) {
newParagraph = p.getUserParagraph(user);
if (null == newParagraph) {
newParagraph = p.cloneParagraphForUser(user);
}
newNote.paragraphs.add(newParagraph);
}
return newNote;
}
private void startDelayedPersistTimer(int maxDelaySec, final AuthenticationInfo subject) {
synchronized (this) {
if (delayedPersist != null) {
return;
}
delayedPersist = delayedPersistThreadPool.schedule(new Runnable() {
@Override
public void run() {
try {
persist(subject);
} catch (IOException e) {
logger.error(e.getMessage(), e);
}
}
}, maxDelaySec, TimeUnit.SECONDS);
}
}
private void stopDelayedPersistTimer() {
synchronized (this) {
if (delayedPersist == null) {
return;
}
delayedPersist.cancel(false);
}
}
public Map<String, Object> getConfig() {
if (config == null) {
config = new HashMap<>();
}
return config;
}
public void setConfig(Map<String, Object> config) {
this.config = config;
}
public Map<String, Object> getInfo() {
if (info == null) {
info = new HashMap<>();
}
return info;
}
public void setInfo(Map<String, Object> info) {
this.info = info;
}
@Override
public void beforeStatusChange(Job job, Status before, Status after) {
if (jobListenerFactory != null) {
ParagraphJobListener listener = jobListenerFactory.getParagraphJobListener(this);
if (listener != null) {
listener.beforeStatusChange(job, before, after);
}
}
}
@Override
public void afterStatusChange(Job job, Status before, Status after) {
if (jobListenerFactory != null) {
ParagraphJobListener listener = jobListenerFactory.getParagraphJobListener(this);
if (listener != null) {
listener.afterStatusChange(job, before, after);
}
}
if (noteEventListener != null) {
noteEventListener.onParagraphStatusChange((Paragraph) job, after);
}
}
@Override
public void onProgressUpdate(Job job, int progress) {
if (jobListenerFactory != null) {
ParagraphJobListener listener = jobListenerFactory.getParagraphJobListener(this);
if (listener != null) {
listener.onProgressUpdate(job, progress);
}
}
}
@Override
public void onOutputAppend(Paragraph paragraph, int idx, String output) {
if (jobListenerFactory != null) {
ParagraphJobListener listener = jobListenerFactory.getParagraphJobListener(this);
if (listener != null) {
listener.onOutputAppend(paragraph, idx, output);
}
}
}
@Override
public void onOutputUpdate(Paragraph paragraph, int idx, InterpreterResultMessage msg) {
if (jobListenerFactory != null) {
ParagraphJobListener listener = jobListenerFactory.getParagraphJobListener(this);
if (listener != null) {
listener.onOutputUpdate(paragraph, idx, msg);
}
}
}
@Override
public void onOutputUpdateAll(Paragraph paragraph, List<InterpreterResultMessage> msgs) {
if (jobListenerFactory != null) {
ParagraphJobListener listener = jobListenerFactory.getParagraphJobListener(this);
if (listener != null) {
listener.onOutputUpdateAll(paragraph, msgs);
}
}
}
void setNoteEventListener(NoteEventListener noteEventListener) {
this.noteEventListener = noteEventListener;
}
boolean hasInterpreterBinded() {
return !interpreterSettingManager.getInterpreterSettings(getId()).isEmpty();
}
@Override
public String toJson() {
return gson.toJson(this);
}
public static Note fromJson(String json) {
Note note = gson.fromJson(json, Note.class);
convertOldInput(note);
note.postProcessParagraphs();
return note;
}
public void postProcessParagraphs() {
for (Paragraph p : paragraphs) {
p.clearRuntimeInfos();
p.parseText();
}
}
private static void convertOldInput(Note note) {
for (Paragraph p : note.paragraphs) {
p.settings.convertOldInput();
}
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Note note = (Note) o;
if (paragraphs != null ? !paragraphs.equals(note.paragraphs) : note.paragraphs != null) {
return false;
}
//TODO(zjffdu) exclude name because FolderView.index use Note as key and consider different name
//as same note
// if (name != null ? !name.equals(note.name) : note.name != null) return false;
if (id != null ? !id.equals(note.id) : note.id != null) {
return false;
}
if (angularObjects != null ?
!angularObjects.equals(note.angularObjects) : note.angularObjects != null) {
return false;
}
if (config != null ? !config.equals(note.config) : note.config != null) {
return false;
}
return info != null ? info.equals(note.info) : note.info == null;
}
@Override
public int hashCode() {
int result = paragraphs != null ? paragraphs.hashCode() : 0;
// result = 31 * result + (name != null ? name.hashCode() : 0);
result = 31 * result + (id != null ? id.hashCode() : 0);
result = 31 * result + (angularObjects != null ? angularObjects.hashCode() : 0);
result = 31 * result + (config != null ? config.hashCode() : 0);
result = 31 * result + (info != null ? info.hashCode() : 0);
return result;
}
@VisibleForTesting
public static Gson getGson() {
return gson;
}
}
| |
/*
* Copyright 2017 LinkedIn Corp. Licensed under the BSD 2-Clause License (the "License"). See License in the project root for license information.
*/
package com.linkedin.kafka.cruisecontrol.monitor;
import com.codahale.metrics.Gauge;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.Timer;
import com.linkedin.cruisecontrol.exception.NotEnoughValidWindowsException;
import com.linkedin.cruisecontrol.metricdef.MetricDef;
import com.linkedin.cruisecontrol.monitor.sampling.aggregator.AggregatedMetricValues;
import com.linkedin.cruisecontrol.monitor.sampling.aggregator.Extrapolation;
import com.linkedin.cruisecontrol.monitor.sampling.aggregator.MetricSampleCompleteness;
import com.linkedin.cruisecontrol.monitor.sampling.aggregator.MetricValues;
import com.linkedin.cruisecontrol.monitor.sampling.aggregator.ValuesAndExtrapolations;
import com.linkedin.kafka.cruisecontrol.common.Resource;
import com.linkedin.kafka.cruisecontrol.analyzer.AnalyzerUtils;
import com.linkedin.kafka.cruisecontrol.common.KafkaCruiseControlThreadFactory;
import com.linkedin.kafka.cruisecontrol.common.MetadataClient;
import com.linkedin.kafka.cruisecontrol.config.BrokerCapacityConfigResolver;
import com.linkedin.kafka.cruisecontrol.config.KafkaCruiseControlConfig;
import com.linkedin.kafka.cruisecontrol.async.progress.GeneratingClusterModel;
import com.linkedin.kafka.cruisecontrol.async.progress.OperationProgress;
import com.linkedin.kafka.cruisecontrol.async.progress.WaitingForClusterModel;
import com.linkedin.kafka.cruisecontrol.model.Broker;
import com.linkedin.kafka.cruisecontrol.model.ClusterModel;
import com.linkedin.kafka.cruisecontrol.monitor.metricdefinition.KafkaMetricDef;
import com.linkedin.kafka.cruisecontrol.monitor.sampling.BrokerEntity;
import com.linkedin.kafka.cruisecontrol.monitor.sampling.PartitionEntity;
import com.linkedin.kafka.cruisecontrol.monitor.sampling.aggregator.KafkaBrokerMetricSampleAggregator;
import com.linkedin.kafka.cruisecontrol.monitor.sampling.aggregator.KafkaPartitionMetricSampleAggregator;
import com.linkedin.cruisecontrol.monitor.sampling.aggregator.MetricSampleAggregationResult;
import com.linkedin.kafka.cruisecontrol.monitor.sampling.PartitionMetricSample;
import com.linkedin.kafka.cruisecontrol.monitor.sampling.aggregator.SampleExtrapolation;
import com.linkedin.kafka.cruisecontrol.monitor.task.LoadMonitorTaskRunner;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.SortedMap;
import java.util.SortedSet;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.kafka.clients.Metadata;
import org.apache.kafka.common.Cluster;
import org.apache.kafka.common.Node;
import org.apache.kafka.common.PartitionInfo;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.internals.ClusterResourceListeners;
import org.apache.kafka.common.utils.Time;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* The LoadMonitor monitors the workload of a Kafka cluster. It periodically triggers the metric sampling and
* maintains the collected {@link PartitionMetricSample}. It is also responsible for aggregate the metrics samples into
* {@link AggregatedMetricValues} for the analyzer to generate the balancing proposals.
*/
public class LoadMonitor {
// Kafka Load Monitor server log.
private static final Logger LOG = LoggerFactory.getLogger(LoadMonitor.class);
private static final long METADATA_TTL = 5000L;
private final int _numPartitionMetricSampleWindows;
private final LoadMonitorTaskRunner _loadMonitorTaskRunner;
private final KafkaPartitionMetricSampleAggregator _partitionMetricSampleAggregator;
private final KafkaBrokerMetricSampleAggregator _brokerMetricSampleAggregator;
// A semaphore to help throttle the simultaneous cluster model creation
private final Semaphore _clusterModelSemaphore;
private final MetadataClient _metadataClient;
private final BrokerCapacityConfigResolver _brokerCapacityConfigResolver;
private final ScheduledExecutorService _sensorUpdaterExecutor;
private final Timer _clusterModelCreationTimer;
private final ThreadLocal<Boolean> _acquiredClusterModelSemaphore;
private final ModelCompletenessRequirements _defaultModelCompletenessRequirements;
// Sensor values
private volatile int _numValidSnapshotWindows;
private volatile double _monitoredPartitionsPercentage;
private volatile int _totalMonitoredSnapshotWindows;
private volatile int _numPartitionsWithExtrapolations;
private volatile long _lastUpdate;
private volatile ModelGeneration _cachedBrokerLoadGeneration;
private volatile ClusterModel.BrokerStats _cachedBrokerLoadStats;
/**
* Construct a load monitor.
*
* @param config The load monitor configuration.
* @param time The time object.
* @param dropwizardMetricRegistry The sensor registry for cruise control
* @param metricDef The metric definitions.
*/
public LoadMonitor(KafkaCruiseControlConfig config,
Time time,
MetricRegistry dropwizardMetricRegistry,
MetricDef metricDef) {
this(config,
new MetadataClient(config,
new Metadata(5000L,
config.getLong(KafkaCruiseControlConfig.METADATA_MAX_AGE_CONFIG),
false,
false,
new ClusterResourceListeners()),
METADATA_TTL,
time),
time,
dropwizardMetricRegistry,
metricDef);
}
/**
* Package private constructor for unit tests.
*/
LoadMonitor(KafkaCruiseControlConfig config,
MetadataClient metadataClient,
Time time,
MetricRegistry dropwizardMetricRegistry,
MetricDef metricDef) {
_metadataClient = metadataClient;
_brokerCapacityConfigResolver = config.getConfiguredInstance(KafkaCruiseControlConfig.BROKER_CAPACITY_CONFIG_RESOLVER_CLASS_CONFIG,
BrokerCapacityConfigResolver.class);
_numPartitionMetricSampleWindows = config.getInt(KafkaCruiseControlConfig.NUM_PARTITION_METRICS_WINDOWS_CONFIG);
_partitionMetricSampleAggregator = new KafkaPartitionMetricSampleAggregator(config, metadataClient.metadata());
_brokerMetricSampleAggregator = new KafkaBrokerMetricSampleAggregator(config);
_acquiredClusterModelSemaphore = ThreadLocal.withInitial(() -> false);
// We use the number of proposal precomputing threads config to ensure there is enough concurrency if users
// wants that.
int numPrecomputingThread = config.getInt(KafkaCruiseControlConfig.NUM_PROPOSAL_PRECOMPUTE_THREADS_CONFIG);
_clusterModelSemaphore = new Semaphore(Math.max(1, numPrecomputingThread), true);
_defaultModelCompletenessRequirements =
MonitorUtils.combineLoadRequirementOptions(AnalyzerUtils.getGoalMapByPriority(config).values());
_loadMonitorTaskRunner =
new LoadMonitorTaskRunner(config, _partitionMetricSampleAggregator, _brokerMetricSampleAggregator,
_metadataClient, metricDef, time, dropwizardMetricRegistry);
_clusterModelCreationTimer = dropwizardMetricRegistry.timer(MetricRegistry.name("LoadMonitor",
"cluster-model-creation-timer"));
SensorUpdater sensorUpdater = new SensorUpdater();
_sensorUpdaterExecutor = Executors.newSingleThreadScheduledExecutor(new KafkaCruiseControlThreadFactory("LoadMonitorSensorUpdater", true, LOG));
_sensorUpdaterExecutor.scheduleAtFixedRate(sensorUpdater, 0, SensorUpdater.UPDATE_INTERVAL_MS, TimeUnit.MILLISECONDS);
dropwizardMetricRegistry.register(MetricRegistry.name("LoadMonitor", "valid-windows"),
(Gauge<Integer>) this::numValidSnapshotWindows);
dropwizardMetricRegistry.register(MetricRegistry.name("LoadMonitor", "monitored-partitions-percentage"),
(Gauge<Double>) this::monitoredPartitionsPercentage);
dropwizardMetricRegistry.register(MetricRegistry.name("LoadMonitor", "total-monitored-windows"),
(Gauge<Integer>) this::totalMonitoredSnapshotWindows);
dropwizardMetricRegistry.register(MetricRegistry.name("LoadMonitor", "num-partitions-with-extrapolations"),
(Gauge<Integer>) this::numPartitionsWithExtrapolations);
}
/**
* Start the load monitor.
*/
public void startUp() {
_loadMonitorTaskRunner.start();
}
/**
* Shutdown the load monitor.
*/
public void shutdown() {
LOG.info("Shutting down load monitor.");
try {
_brokerCapacityConfigResolver.close();
_sensorUpdaterExecutor.shutdown();
} catch (Exception e) {
LOG.warn("Received exception when closing broker capacity resolver.", e);
}
_loadMonitorTaskRunner.shutdown();
_metadataClient.close();
LOG.info("Load Monitor shutdown completed.");
}
/**
* Get the state of the load monitor.
*/
public LoadMonitorState state(OperationProgress operationProgress) {
LoadMonitorTaskRunner.LoadMonitorTaskRunnerState state = _loadMonitorTaskRunner.state();
MetadataClient.ClusterAndGeneration clusterAndGeneration = _metadataClient.refreshMetadata();
Cluster kafkaCluster = clusterAndGeneration.cluster();
int totalNumPartitions = MonitorUtils.totalNumPartitions(kafkaCluster);
double minMonitoredPartitionsPercentage = _defaultModelCompletenessRequirements.minMonitoredPartitionsPercentage();
// Get the window to monitored partitions percentage mapping.
SortedMap<Long, Float> validPartitionRatio =
_partitionMetricSampleAggregator.partitionCoverageByWindows(clusterAndGeneration);
// Get valid snapshot window number and populate the monitored partition map.
SortedSet<Long> validWindows = _partitionMetricSampleAggregator.validWindows(clusterAndGeneration,
minMonitoredPartitionsPercentage);
int numValidSnapshotWindows = validWindows.size();
// Get the number of valid partitions and sample extrapolations.
int numValidPartitions = 0;
Map<TopicPartition, List<SampleExtrapolation>> extrapolations = Collections.emptyMap();
if (_partitionMetricSampleAggregator.numAvailableWindows() >= _numPartitionMetricSampleWindows) {
try {
MetricSampleAggregationResult<String, PartitionEntity> metricSampleAggregationResult =
_partitionMetricSampleAggregator.aggregate(clusterAndGeneration, Long.MAX_VALUE, operationProgress);
Map<PartitionEntity, ValuesAndExtrapolations> loads = metricSampleAggregationResult.valuesAndExtrapolations();
extrapolations = partitionSampleExtrapolations(metricSampleAggregationResult.valuesAndExtrapolations());
numValidPartitions = loads.size();
} catch (Exception e) {
LOG.warn("Received exception when trying to get the load monitor state", e);
}
}
switch (state) {
case NOT_STARTED:
return LoadMonitorState.notStarted();
case RUNNING:
return LoadMonitorState.running(numValidSnapshotWindows,
validPartitionRatio,
numValidPartitions,
totalNumPartitions,
extrapolations);
case SAMPLING:
return LoadMonitorState.sampling(numValidSnapshotWindows,
validPartitionRatio,
numValidPartitions,
totalNumPartitions,
extrapolations);
case PAUSED:
return LoadMonitorState.paused(numValidSnapshotWindows,
validPartitionRatio,
numValidPartitions,
totalNumPartitions,
extrapolations);
case BOOTSTRAPPING:
double bootstrapProgress = _loadMonitorTaskRunner.bootStrapProgress();
// Handle the race between querying the state and getting the progress.
return LoadMonitorState.bootstrapping(numValidSnapshotWindows,
validPartitionRatio,
numValidPartitions,
totalNumPartitions,
bootstrapProgress >= 0 ? bootstrapProgress : 1.0,
extrapolations);
case TRAINING:
return LoadMonitorState.training(numValidSnapshotWindows,
validPartitionRatio,
numValidPartitions,
totalNumPartitions,
extrapolations);
case LOADING:
return LoadMonitorState.loading(numValidSnapshotWindows,
validPartitionRatio,
numValidPartitions,
totalNumPartitions,
_loadMonitorTaskRunner.sampleLoadingProgress());
default:
throw new IllegalStateException("Should never be here.");
}
}
/**
* Return the load monitor task runner state.
*/
public LoadMonitorTaskRunner.LoadMonitorTaskRunnerState taskRunnerState() {
return _loadMonitorTaskRunner.state();
}
/**
* Bootstrap the load monitor for a given period.
* @param startMs the starting time of the bootstrap period.
* @param endMs the end time of the bootstrap period.
* @param clearMetrics clear the existing metric samples.
*/
public void bootstrap(long startMs, long endMs, boolean clearMetrics) {
_loadMonitorTaskRunner.bootstrap(startMs, endMs, clearMetrics);
}
/**
* Bootstrap the load monitor from the given timestamp until it catches up.
* This method clears all existing metric samples.
* @param startMs the starting time of the bootstrap period.
* @param clearMetrics clear the existing metric samples.
*/
public void bootstrap(long startMs, boolean clearMetrics) {
_loadMonitorTaskRunner.bootstrap(startMs, clearMetrics);
}
/**
* Bootstrap the load monitor with the most recent metric samples until it catches up.
* This method clears all existing metric samples.
*
* @param clearMetrics clear the existing metric samples.
*/
public void bootstrap(boolean clearMetrics) {
_loadMonitorTaskRunner.bootstrap(clearMetrics);
}
/**
* Train the load model with metric samples.
* @param startMs training period starting time.
* @param endMs training period end time.
*/
public void train(long startMs, long endMs) {
_loadMonitorTaskRunner.train(startMs, endMs);
}
/**
* Get the cluster information from Kafka metadata.
*/
public Cluster kafkaCluster() {
return _metadataClient.cluster();
}
/**
* Pause all the activities of the load monitor. The load monitor can only be paused when it is in
* RUNNING state.
*/
public void pauseMetricSampling() {
_loadMonitorTaskRunner.pauseSampling();
}
/**
* Resume the activities of the load monitor.
*/
public void resumeMetricSampling() {
_loadMonitorTaskRunner.resumeSampling();
}
/**
* Acquire the semaphore for the cluster model generation.
* @param operationProgress the progress for the job.
* @throws InterruptedException
*/
public AutoCloseableSemaphore acquireForModelGeneration(OperationProgress operationProgress)
throws InterruptedException {
if (_acquiredClusterModelSemaphore.get()) {
throw new IllegalStateException("The thread has already acquired the semaphore for cluster model generation.");
}
WaitingForClusterModel step = new WaitingForClusterModel();
operationProgress.addStep(step);
_clusterModelSemaphore.acquire();
_acquiredClusterModelSemaphore.set(true);
step.done();
return new AutoCloseableSemaphore();
}
/**
* Get the latest metric values of the brokers. The metric values are from the current active metric window.
*
* @return the latest metric values of brokers.
*/
public Map<BrokerEntity, ValuesAndExtrapolations> currentBrokerMetricValues() {
return _brokerMetricSampleAggregator.peekCurrentWindow();
}
/**
* Get the latest metric values of the partitions. The metric values are from the current active metric window.
*
* @return the latest metric values of partitions.
*/
public Map<PartitionEntity, ValuesAndExtrapolations> currentPartitionMetricValues() {
return _partitionMetricSampleAggregator.peekCurrentWindow();
}
/**
* Get the most recent cluster load model before the given timestamp.
*
* @param now The current time in millisecond.
* @param requirements the load requirements for getting the cluster model.
* @param operationProgress the progress to report.
* @return A cluster model with the configured number of windows whose timestamp is before given timestamp.
*/
public ClusterModel clusterModel(long now, ModelCompletenessRequirements requirements, OperationProgress operationProgress)
throws NotEnoughValidWindowsException {
ClusterModel clusterModel = clusterModel(-1L, now, requirements, operationProgress);
// Micro optimization: put the broker stats construction out of the lock.
ClusterModel.BrokerStats brokerStats = clusterModel.brokerStats();
// update the cached brokerLoadStats
synchronized (this) {
_cachedBrokerLoadStats = brokerStats;
_cachedBrokerLoadGeneration = clusterModel.generation();
}
return clusterModel;
}
/**
* Get the cluster load model for a time range.
*
* @param from start of the time window
* @param to end of the time window
* @param requirements the load completeness requirements.
* @param operationProgress the progress of the job to report.
* @return A cluster model with the available snapshots whose timestamp is in the given window.
* @throws NotEnoughValidWindowsException
*/
public ClusterModel clusterModel(long from,
long to,
ModelCompletenessRequirements requirements,
OperationProgress operationProgress)
throws NotEnoughValidWindowsException {
long start = System.currentTimeMillis();
MetadataClient.ClusterAndGeneration clusterAndGeneration = _metadataClient.refreshMetadata();
Cluster kafkaCluster = clusterAndGeneration.cluster();
// Get the metric aggregation result.
MetricSampleAggregationResult<String, PartitionEntity> metricSampleAggregationResult =
_partitionMetricSampleAggregator.aggregate(clusterAndGeneration, from, to, requirements, operationProgress);
Map<PartitionEntity, ValuesAndExtrapolations> loadSnapshots = metricSampleAggregationResult.valuesAndExtrapolations();
GeneratingClusterModel step = new GeneratingClusterModel(loadSnapshots.size());
operationProgress.addStep(step);
// Create an empty cluster model first.
long currentLoadGeneration = metricSampleAggregationResult.generation();
ModelGeneration modelGeneration = new ModelGeneration(clusterAndGeneration.generation(), currentLoadGeneration);
MetricSampleCompleteness<String, PartitionEntity> completeness = metricSampleAggregationResult.completeness();
ClusterModel clusterModel = new ClusterModel(modelGeneration, completeness.validEntityRatio());
final Timer.Context ctx = _clusterModelCreationTimer.time();
try {
// Create the racks and brokers.
// Shuffle nodes before getting their capacity from the capacity resolver.
// This enables a capacity resolver to estimate the capacity of the nodes, for which the capacity retrieval has
// failed.
// The use case for this estimation is that if the capacity of one of the nodes is not available (e.g. due to some
// 3rd party service issue), the capacity resolver may want to use the capacity of a peer node as the capacity for
// that node.
// To this end, Cruise Control handles the case that the first node is problematic so the capacity resolver does
// not have the chance to get the capacity for the other nodes.
// Shuffling the node order helps, as the problematic node is unlikely to always be the first node in the list.
List<Node> shuffledNodes = new ArrayList<>(kafkaCluster.nodes());
Collections.shuffle(shuffledNodes);
for (Node node : shuffledNodes) {
// If the rack is not specified, we use the host info as rack info.
String rack = getRackHandleNull(node);
clusterModel.createRack(rack);
Map<Resource, Double> brokerCapacity =
_brokerCapacityConfigResolver.capacityForBroker(rack, node.host(), node.id());
clusterModel.createBroker(rack, node.host(), node.id(), brokerCapacity);
}
// populate snapshots for the cluster model.
for (Map.Entry<PartitionEntity, ValuesAndExtrapolations> entry : loadSnapshots.entrySet()) {
TopicPartition tp = entry.getKey().tp();
ValuesAndExtrapolations leaderLoad = entry.getValue();
populateLoad(kafkaCluster, clusterModel, tp, leaderLoad);
}
// Get the dead brokers and mark them as dead.
deadBrokers(kafkaCluster).forEach(brokerId -> clusterModel.setBrokerState(brokerId, Broker.State.DEAD));
LOG.debug("Generated cluster model in {} ms", System.currentTimeMillis() - start);
} finally {
ctx.stop();
}
return clusterModel;
}
/**
* Get the current cluster model generation. This is useful to avoid unnecessary cluster model creation which is
* expensive.
*/
public ModelGeneration clusterModelGeneration() {
int clusterGeneration = _metadataClient.refreshMetadata().generation();
return new ModelGeneration(clusterGeneration, _partitionMetricSampleAggregator.generation());
}
/**
* Get the cached load.
* @return the cached load, null if the load
*/
public ClusterModel.BrokerStats cachedBrokerLoadStats() {
int clusterGeneration = _metadataClient.refreshMetadata().generation();
synchronized (this) {
if (_cachedBrokerLoadGeneration != null
&& clusterGeneration == _cachedBrokerLoadGeneration.clusterGeneration()
&& _partitionMetricSampleAggregator.generation() == _cachedBrokerLoadGeneration.loadGeneration()) {
return _cachedBrokerLoadStats;
}
}
return null;
}
/**
* Get all the active brokers in the cluster based on the partition assignment. If a metadata refresh failed due to
* timeout, the current metadata information will be used. This is to handle the case that all the brokers are down.
* @param timeout the timeout in milliseconds.
* @return All the brokers in the cluster that has at least one replica assigned.
*/
public Set<Integer> brokersWithPartitions(long timeout) {
Cluster kafkaCluster = _metadataClient.refreshMetadata(timeout).cluster();
return brokersWithPartitions(kafkaCluster);
}
/**
* Check whether the monitored load meets the load requirements.
*/
public boolean meetCompletenessRequirements(ModelCompletenessRequirements requirements) {
MetadataClient.ClusterAndGeneration clusterAndGeneration = _metadataClient.refreshMetadata();
int availableNumSnapshots =
_partitionMetricSampleAggregator.validWindows(clusterAndGeneration,
requirements.minMonitoredPartitionsPercentage())
.size();
int requiredSnapshot = requirements.minRequiredNumWindows();
return availableNumSnapshots >= requiredSnapshot;
}
/**
* @return all the available broker level metrics. Null is returned if nothing is available.
*/
public MetricSampleAggregationResult<String, BrokerEntity> brokerMetrics() {
Set<BrokerEntity> brokerEntities = new HashSet<>();
for (Node node : _metadataClient.cluster().nodes()) {
brokerEntities.add(new BrokerEntity(node.host(), node.id()));
}
return _brokerMetricSampleAggregator.aggregate(brokerEntities);
}
/**
* Package private for unit test.
*/
KafkaPartitionMetricSampleAggregator partitionSampleAggregator() {
return _partitionMetricSampleAggregator;
}
private void populateLoad(Cluster kafkaCluster,
ClusterModel clusterModel,
TopicPartition tp,
ValuesAndExtrapolations valuesAndExtrapolations) {
PartitionInfo partitionInfo = kafkaCluster.partition(tp);
// If partition info does not exist, the topic may have been deleted.
if (partitionInfo != null) {
for (int index = 0; index < partitionInfo.replicas().length; index++) {
Node replica = partitionInfo.replicas()[index];
String rack = getRackHandleNull(replica);
// Note that we assume the capacity resolver can still return the broker capacity even if the broker
// is dead. We need this to get the host resource capacity.
Map<Resource, Double> brokerCapacity =
_brokerCapacityConfigResolver.capacityForBroker(rack, replica.host(), replica.id());
clusterModel.handleDeadBroker(rack, replica.id(), brokerCapacity);
boolean isLeader;
if (partitionInfo.leader() == null) {
LOG.warn("Detected offline partition {}-{}, skipping", partitionInfo.topic(), partitionInfo.partition());
continue;
} else {
isLeader = replica.id() == partitionInfo.leader().id();
}
clusterModel.createReplica(rack, replica.id(), tp, index, isLeader);
clusterModel.setReplicaLoad(rack,
replica.id(),
tp,
getAggregatedMetricValues(valuesAndExtrapolations,
kafkaCluster.partition(tp),
isLeader),
valuesAndExtrapolations.windows());
}
}
}
/**
* Get the {@link AggregatedMetricValues} based on the replica role (leader/follower) and the replication factor.
*
* @param valuesAndExtrapolations the values and extrapolations of the leader replica.
* @param partitionInfo the partition info.
* @param isLeader whether the value is created for leader replica or follower replica.
* @return the {@link AggregatedMetricValues} to use for the given replica.
*/
private AggregatedMetricValues getAggregatedMetricValues(ValuesAndExtrapolations valuesAndExtrapolations,
PartitionInfo partitionInfo,
boolean isLeader) {
AggregatedMetricValues aggregatedMetricValues = valuesAndExtrapolations.metricValues();
if (isLeader) {
return fillInReplicationBytesOut(aggregatedMetricValues, partitionInfo);
} else {
return MonitorUtils.toFollowerMetricValues(aggregatedMetricValues);
}
}
/**
* When the replica is a leader replica, we need to fill in the replication bytes out if it has not been filled in
* yet. This is because currently Kafka does not report this metric. We simply use the leader bytes in rate multiplied
* by the number of followers as the replication bytes out rate. The assumption is that all the followers will
* eventually keep up with the leader.
*
* We only fill in the replication bytes out rate when creating the cluster model because the replication factor
* may have changed since the time the PartitionMetricSample was created.
*
* @param aggregatedMetricValues the {@link AggregatedMetricValues} for the leader replica.
* @param info the partition info for the partition.
* @return the {@link AggregatedMetricValues} with the replication bytes out rate filled in.
*/
private AggregatedMetricValues fillInReplicationBytesOut(AggregatedMetricValues aggregatedMetricValues,
PartitionInfo info) {
int numFollowers = info.replicas().length - 1;
int leaderBytesInRateId = KafkaMetricDef.commonMetricDefId(KafkaMetricDef.LEADER_BYTES_IN);
int replicationBytesOutRateId = KafkaMetricDef.commonMetricDefId(KafkaMetricDef.REPLICATION_BYTES_OUT_RATE);
MetricValues leaderBytesInRate = aggregatedMetricValues.valuesFor(leaderBytesInRateId);
MetricValues replicationBytesOutRate = aggregatedMetricValues.valuesFor(replicationBytesOutRateId);
// If the replication bytes out rate is already reported, update it. Otherwise add a new MetricValues.
if (replicationBytesOutRate == null) {
replicationBytesOutRate = new MetricValues(leaderBytesInRate.length());
aggregatedMetricValues.add(replicationBytesOutRateId, replicationBytesOutRate);
}
for (int i = 0; i < leaderBytesInRate.length(); i++) {
replicationBytesOutRate.set(i, leaderBytesInRate.get(i) * numFollowers);
}
return aggregatedMetricValues;
}
private String getRackHandleNull(Node node) {
return node.rack() == null || node.rack().isEmpty() ? node.host() : node.rack();
}
private Set<Integer> brokersWithPartitions(Cluster kafkaCluster) {
Set<Integer> allBrokers = new HashSet<>();
for (String topic : kafkaCluster.topics()) {
for (PartitionInfo pi : kafkaCluster.partitionsForTopic(topic)) {
for (Node node : pi.replicas()) {
allBrokers.add(node.id());
}
}
}
return allBrokers;
}
private Set<Integer> deadBrokers(Cluster kafkaCluster) {
Set<Integer> brokersWithPartitions = brokersWithPartitions(kafkaCluster);
kafkaCluster.nodes().forEach(node -> brokersWithPartitions.remove(node.id()));
return brokersWithPartitions;
}
private Map<TopicPartition, List<SampleExtrapolation>> partitionSampleExtrapolations(Map<PartitionEntity, ValuesAndExtrapolations> valuesAndExtrapolations) {
Map<TopicPartition, List<SampleExtrapolation>> sampleExtrapolations = new HashMap<>();
for (Map.Entry<PartitionEntity, ValuesAndExtrapolations> entry : valuesAndExtrapolations.entrySet()) {
TopicPartition tp = entry.getKey().tp();
Map<Integer, Extrapolation> extrapolations = entry.getValue().extrapolations();
if (!extrapolations.isEmpty()) {
List<SampleExtrapolation> extrapolationForPartition = sampleExtrapolations.computeIfAbsent(tp, p -> new ArrayList<>());
extrapolations.forEach((t, imputation) -> extrapolationForPartition.add(new SampleExtrapolation(t, imputation)));
}
}
return sampleExtrapolations;
}
private int numValidSnapshotWindows() {
return _lastUpdate + SensorUpdater.UPDATE_TIMEOUT_MS > System.currentTimeMillis() ? _numValidSnapshotWindows : -1;
}
private int totalMonitoredSnapshotWindows() {
return _lastUpdate + SensorUpdater.UPDATE_TIMEOUT_MS > System.currentTimeMillis() ? _totalMonitoredSnapshotWindows : -1;
}
private double monitoredPartitionsPercentage() {
return _lastUpdate + SensorUpdater.UPDATE_TIMEOUT_MS > System.currentTimeMillis() ? _monitoredPartitionsPercentage : 0.0;
}
private int numPartitionsWithExtrapolations() {
return _lastUpdate + SensorUpdater.UPDATE_TIMEOUT_MS > System.currentTimeMillis() ? _numPartitionsWithExtrapolations : -1;
}
private double getMonitoredPartitionsPercentage() {
MetadataClient.ClusterAndGeneration clusterAndGeneration = _metadataClient.refreshMetadata();
Cluster kafkaCluster = clusterAndGeneration.cluster();
MetricSampleAggregationResult<String, PartitionEntity> metricSampleAggregationResult;
try {
metricSampleAggregationResult = _partitionMetricSampleAggregator.aggregate(clusterAndGeneration,
System.currentTimeMillis(),
new OperationProgress());
} catch (NotEnoughValidWindowsException e) {
return 0.0;
}
Map<PartitionEntity, ValuesAndExtrapolations> partitionLoads = metricSampleAggregationResult.valuesAndExtrapolations();
AtomicInteger numPartitionsWithExtrapolations = new AtomicInteger(0);
partitionLoads.values().forEach(valuesAndExtrapolations -> {
if (!valuesAndExtrapolations.extrapolations().isEmpty()) {
numPartitionsWithExtrapolations.incrementAndGet();
}
});
_numPartitionsWithExtrapolations = numPartitionsWithExtrapolations.get();
int totalNumPartitions = MonitorUtils.totalNumPartitions(kafkaCluster);
return totalNumPartitions > 0 ? metricSampleAggregationResult.completeness().validEntityRatio() : 0.0;
}
/**
* We have a separate class to update values for sensors.
*/
private class SensorUpdater implements Runnable {
// The interval for sensor value update.
static final long UPDATE_INTERVAL_MS = 30000;
// The maximum time allowed to make an update. If the sensor value cannot be updated in time, the sensor value
// will be invalidated.
static final long UPDATE_TIMEOUT_MS = 10 * UPDATE_INTERVAL_MS;
@Override
public void run() {
try {
MetadataClient.ClusterAndGeneration clusterAndGeneration = _metadataClient.clusterAndGeneration();
double minMonitoredPartitionsPercentage = _defaultModelCompletenessRequirements.minMonitoredPartitionsPercentage();
_numValidSnapshotWindows = _partitionMetricSampleAggregator.validWindows(clusterAndGeneration,
minMonitoredPartitionsPercentage)
.size();
_monitoredPartitionsPercentage = getMonitoredPartitionsPercentage();
_totalMonitoredSnapshotWindows = _partitionMetricSampleAggregator.allWindows().size();
_lastUpdate = System.currentTimeMillis();
} catch (Throwable t) {
// We catch all the throwables because we don't want the sensor updater to die
LOG.warn("Load monitor sensor updater received exception ", t);
}
}
}
public class AutoCloseableSemaphore implements AutoCloseable {
private AtomicBoolean _closed = new AtomicBoolean(false);
@Override
public void close() throws Exception {
if (_closed.compareAndSet(false, true)) {
_clusterModelSemaphore.release();
_acquiredClusterModelSemaphore.set(false);
}
}
}
}
| |
package com.cloudhopper.commons.util.codec;
/*
* #%L
* ch-commons-util
* %%
* Copyright (C) 2012 Cloudhopper by Twitter
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import java.nio.ByteBuffer;
import java.text.ParseException;
/**
* Code originally copied from the OpenDS Java project.
*
* http://www.opends.org/
*
* This class removes the dependency on the Jakarta commons-codec library.
* Since this utility package is used in every Cloudhopper Java project,
* providing our own implementation (that's also much faster) allows the removal
* of a transitive dependency for every Cloudhopper Java project :-)
*
* This class provides methods for performing base64 encoding and decoding.
* Base64 is a mechanism for encoding binary data in ASCII form by converting
* sets of three bytes with eight significant bits each to sets of four bytes
* with six significant bits each.
*
* @author joelauer (twitter: @jjlauer or <a href="http://twitter.com/jjlauer" target=window>http://twitter.com/jjlauer</a>)
*/
public class Base64Codec {
/**
* The set of characters that may be used in base64-encoded values.
*/
private static final char[] BASE64_ALPHABET =
("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" +
"0123456789+/").toCharArray();
/**
* Prevent instance creation.
*/
private Base64Codec() {
// No implementation required.
}
/**
* Encodes the provided raw data using base64.
* @param rawData The raw data to encode. It must not be <CODE>null</CODE>.
* @return The base64-encoded representation of the provided raw data.
*/
public static String encode(byte[] rawData) {
StringBuilder buffer = new StringBuilder(4 * rawData.length / 3);
int pos = 0;
int iterations = rawData.length / 3;
for (int i = 0; i < iterations; i++) {
int value = ((rawData[pos++] & 0xFF) << 16) |
((rawData[pos++] & 0xFF) << 8) | (rawData[pos++] & 0xFF);
buffer.append(BASE64_ALPHABET[(value >>> 18) & 0x3F]);
buffer.append(BASE64_ALPHABET[(value >>> 12) & 0x3F]);
buffer.append(BASE64_ALPHABET[(value >>> 6) & 0x3F]);
buffer.append(BASE64_ALPHABET[value & 0x3F]);
}
switch (rawData.length % 3) {
case 1:
buffer.append(BASE64_ALPHABET[(rawData[pos] >>> 2) & 0x3F]);
buffer.append(BASE64_ALPHABET[(rawData[pos] << 4) & 0x3F]);
buffer.append("==");
break;
case 2:
int value = ((rawData[pos++] & 0xFF) << 8) | (rawData[pos] & 0xFF);
buffer.append(BASE64_ALPHABET[(value >>> 10) & 0x3F]);
buffer.append(BASE64_ALPHABET[(value >>> 4) & 0x3F]);
buffer.append(BASE64_ALPHABET[(value << 2) & 0x3F]);
buffer.append("=");
break;
}
return buffer.toString();
}
/**
* Decodes the provided set of base64-encoded data.
* @param encodedData The base64-encoded data to decode. It must not be
* <CODE>null</CODE>.
* @return The decoded raw data.
* @throws ParseException If a problem occurs while attempting to decode the
* provided data.
*/
public static byte[] decode(String encodedData) throws ParseException {
// The encoded value must have length that is a multiple of four bytes.
int length = encodedData.length();
if ((length % 4) != 0) {
//Message message = ERR_BASE64_DECODE_INVALID_LENGTH.get(encodedData);
throw new ParseException("Base64 data was not 4-byte aligned", 0);
}
ByteBuffer buffer = ByteBuffer.allocate(length);
for (int i = 0; i < length; i += 4) {
boolean append = true;
int value = 0;
for (int j = 0; j < 4; j++) {
switch (encodedData.charAt(i + j)) {
case 'A':
value <<= 6;
break;
case 'B':
value = (value << 6) | 0x01;
break;
case 'C':
value = (value << 6) | 0x02;
break;
case 'D':
value = (value << 6) | 0x03;
break;
case 'E':
value = (value << 6) | 0x04;
break;
case 'F':
value = (value << 6) | 0x05;
break;
case 'G':
value = (value << 6) | 0x06;
break;
case 'H':
value = (value << 6) | 0x07;
break;
case 'I':
value = (value << 6) | 0x08;
break;
case 'J':
value = (value << 6) | 0x09;
break;
case 'K':
value = (value << 6) | 0x0A;
break;
case 'L':
value = (value << 6) | 0x0B;
break;
case 'M':
value = (value << 6) | 0x0C;
break;
case 'N':
value = (value << 6) | 0x0D;
break;
case 'O':
value = (value << 6) | 0x0E;
break;
case 'P':
value = (value << 6) | 0x0F;
break;
case 'Q':
value = (value << 6) | 0x10;
break;
case 'R':
value = (value << 6) | 0x11;
break;
case 'S':
value = (value << 6) | 0x12;
break;
case 'T':
value = (value << 6) | 0x13;
break;
case 'U':
value = (value << 6) | 0x14;
break;
case 'V':
value = (value << 6) | 0x15;
break;
case 'W':
value = (value << 6) | 0x16;
break;
case 'X':
value = (value << 6) | 0x17;
break;
case 'Y':
value = (value << 6) | 0x18;
break;
case 'Z':
value = (value << 6) | 0x19;
break;
case 'a':
value = (value << 6) | 0x1A;
break;
case 'b':
value = (value << 6) | 0x1B;
break;
case 'c':
value = (value << 6) | 0x1C;
break;
case 'd':
value = (value << 6) | 0x1D;
break;
case 'e':
value = (value << 6) | 0x1E;
break;
case 'f':
value = (value << 6) | 0x1F;
break;
case 'g':
value = (value << 6) | 0x20;
break;
case 'h':
value = (value << 6) | 0x21;
break;
case 'i':
value = (value << 6) | 0x22;
break;
case 'j':
value = (value << 6) | 0x23;
break;
case 'k':
value = (value << 6) | 0x24;
break;
case 'l':
value = (value << 6) | 0x25;
break;
case 'm':
value = (value << 6) | 0x26;
break;
case 'n':
value = (value << 6) | 0x27;
break;
case 'o':
value = (value << 6) | 0x28;
break;
case 'p':
value = (value << 6) | 0x29;
break;
case 'q':
value = (value << 6) | 0x2A;
break;
case 'r':
value = (value << 6) | 0x2B;
break;
case 's':
value = (value << 6) | 0x2C;
break;
case 't':
value = (value << 6) | 0x2D;
break;
case 'u':
value = (value << 6) | 0x2E;
break;
case 'v':
value = (value << 6) | 0x2F;
break;
case 'w':
value = (value << 6) | 0x30;
break;
case 'x':
value = (value << 6) | 0x31;
break;
case 'y':
value = (value << 6) | 0x32;
break;
case 'z':
value = (value << 6) | 0x33;
break;
case '0':
value = (value << 6) | 0x34;
break;
case '1':
value = (value << 6) | 0x35;
break;
case '2':
value = (value << 6) | 0x36;
break;
case '3':
value = (value << 6) | 0x37;
break;
case '4':
value = (value << 6) | 0x38;
break;
case '5':
value = (value << 6) | 0x39;
break;
case '6':
value = (value << 6) | 0x3A;
break;
case '7':
value = (value << 6) | 0x3B;
break;
case '8':
value = (value << 6) | 0x3C;
break;
case '9':
value = (value << 6) | 0x3D;
break;
case '+':
value = (value << 6) | 0x3E;
break;
case '/':
value = (value << 6) | 0x3F;
break;
case '=':
append = false;
switch (j) {
case 2:
buffer.put((byte) ((value >>> 4) & 0xFF));
break;
case 3:
buffer.put((byte) ((value >>> 10) & 0xFF));
buffer.put((byte) ((value >>> 2) & 0xFF));
break;
}
break;
default:
//Message message = ERR_BASE64_DECODE_INVALID_CHARACTER.get(encodedData, encodedData.charAt(i + j));
throw new ParseException("Invalid Base64 character '" + encodedData.charAt(i + j) + "'", i+j);
}
if (!append) {
break;
}
}
if (append) {
buffer.put((byte) ((value >>> 16) & 0xFF));
buffer.put((byte) ((value >>> 8) & 0xFF));
buffer.put((byte) (value & 0xFF));
} else {
break;
}
}
buffer.flip();
byte[] returnArray = new byte[buffer.limit()];
buffer.get(returnArray);
return returnArray;
}
}
| |
/*******************************************************************************
* Copyright 2014 Christoffer Hindelid. http://www.hindelid.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.hindelid.ld.thirtyfour;
import com.badlogic.gdx.ApplicationAdapter;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.Input;
import com.badlogic.gdx.audio.Sound;
import com.badlogic.gdx.graphics.Color;
import com.badlogic.gdx.graphics.GL20;
import com.badlogic.gdx.graphics.OrthographicCamera;
import com.badlogic.gdx.graphics.glutils.ShapeRenderer;
import com.badlogic.gdx.math.Vector2;
import com.badlogic.gdx.utils.TimeUtils;
import com.badlogic.gdx.utils.viewport.ExtendViewport;
import com.badlogic.gdx.utils.viewport.Viewport;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
public class Main extends ApplicationAdapter {
private Viewport mViewPort;
private OrthographicCamera mCamera;
private ShapeRenderer mShapeRenderer;
private TreeBranch mRoot;
public static TreeBranch mNextRoot = null;
private Vector2 mCurrentViewCord;
private List<Fish> mFishes = new ArrayList<Fish>();
private List<Octopus> mOctopuses = new ArrayList<Octopus>();
private HUDDisplay mHUDDisplay;
private float mAvgX[] = new float[10];
private int mAvgXpos = 0;
private static Sound mStartSound;
private static Sound mDeadSound;
private static Sound mOctoSound;
private static Sound mFishSound;
private float mSpeed;
private float mTotalPoints = 0f;
private boolean mDead = false;
@Override
public void create() {
mShapeRenderer = new ShapeRenderer();
mHUDDisplay = new HUDDisplay();
mCamera = new OrthographicCamera();
mViewPort = new ExtendViewport(Constants.VIEW_SIZE_X, Constants.VIEW_SIZE_Y, mCamera);
mCurrentViewCord = new Vector2(Constants.VIEW_SIZE_X / 2, Constants.VIEW_SIZE_Y / 2);
moveAndUpdateCamera();
mStartSound = Gdx.audio.newSound(Gdx.files.internal("start.wav"));
mDeadSound = Gdx.audio.newSound(Gdx.files.internal("dead.wav"));
mOctoSound = Gdx.audio.newSound(Gdx.files.internal("octo.wav"));
mFishSound = Gdx.audio.newSound(Gdx.files.internal("fish.wav"));
resetGame();
}
@Override
public void dispose() {
mHUDDisplay.dispose();
mStartSound.dispose();
mDeadSound.dispose();
mOctoSound.dispose();
mFishSound.dispose();
super.dispose();
}
@Override
public void resize(int aWidth, int aHeight) {
mViewPort.update(aWidth, aHeight);
mHUDDisplay.resize(aWidth, aHeight);
}
private void resetGame() {
for(int i=0;i<10;i++) {
mAvgX[i] = 0;
}
mRoot = new TreeBranch(
new Vector2(Constants.VIEW_SIZE_X / 2f, 0),
new Vector2(Constants.VIEW_SIZE_X / 2f, 0.5f),
true,
1);
mNextRoot = null;
mCamera.zoom = 1.0f;
TreeBranch.sGlobal.setZero();
mSpeed = Constants.SPEED;
mHUDDisplay.reset();
mDead = false;
mTotalPoints = 0;
mStartSound.play();
}
@Override
public void render() {
long before = TimeUtils.nanoTime();
tick();
if (TreeBranch.sGlobal.y < 1000f) {
Gdx.gl.glClearColor(0, 0, TreeBranch.sGlobal.y / 1000f, 1);
} else if (TreeBranch.sGlobal.y < 2000f) {
Gdx.gl.glClearColor((TreeBranch.sGlobal.y-1000f) / 1000f, 0, 1, 1);
} else {
Gdx.gl.glClearColor((3000f - TreeBranch.sGlobal.y) / 1000f, 0, (3000f - TreeBranch.sGlobal.y) / 1000f, 1);
}
Gdx.gl.glClear(GL20.GL_COLOR_BUFFER_BIT);
mShapeRenderer.setProjectionMatrix(mCamera.combined);
mShapeRenderer.begin(ShapeRenderer.ShapeType.Line);
mShapeRenderer.setColor(Color.BROWN);
mRoot.render(mShapeRenderer);
mShapeRenderer.setColor(Color.GREEN);
mRoot.renderLeefs(mShapeRenderer);
mShapeRenderer.setColor(Color.WHITE);
for (Fish f : mFishes) {
f.render(mShapeRenderer);
}
for (Octopus o : mOctopuses) {
o.render(mShapeRenderer);
}
mShapeRenderer.end();
mHUDDisplay.render((int)mTotalPoints);
long after = TimeUtils.nanoTime();
//System.out.println("speed:" + mSpeed + " y:" + TreeBranch.sGlobal.y + " Time:" + (after - before) / 1000); //TODO remove
if (TreeBranch.sGlobal.y < 15f) {
mHUDDisplay.renderStartScreen();
}
if (mDead) {
if (mHUDDisplay.renderGameOver((int)mTotalPoints)) {
resetGame();
}
}
if (Gdx.input.isKeyJustPressed(Input.Keys.ESCAPE)) {
Gdx.app.exit();
}
}
private void tick() {
checkForNewRoot();
spawnNewStuff();
moveAndUpdateCamera();
if (TreeBranch.sNext) {
TreeBranch.sNext = false;
mRoot.split();
}
Iterator<Fish> fishIter = mFishes.iterator();
while (fishIter.hasNext()) {
if (mRoot.checkCollision(fishIter.next().mBoundingBox)) {
mTotalPoints += 10;
mFishSound.play();
fishIter.remove();
mHUDDisplay.incHP();
}
}
Iterator<Octopus> octoIter = mOctopuses.iterator();
while (octoIter.hasNext()) {
if (mRoot.checkCollision(octoIter.next().mBoundingBox)) {
octoIter.remove();
mTotalPoints -= 10;
if (mHUDDisplay.decHP()) {
mDead = true;
mDeadSound.play();
} else {
mOctoSound.play();
}
}
}
removeObsticles();
if (mDead) {
mSpeed = 0;
} else {
mSpeed = Constants.SPEED + TreeBranch.sGlobal.y/2000f;
}
}
/**
* To avoid unnecessary CPU time when change the root.
*/
private void checkForNewRoot() {
if (mNextRoot != null && mNextRoot != mRoot) {
mRoot = mNextRoot;
}
}
private void spawnNewStuff() {
if (TreeBranch.sGlobal.y < 10f) { //Don't spawn anything in the beginning.
return;
}
float ypos = TreeBranch.sGlobal.y;
if ((ypos * 150f / 800f + 30f) * Constants.sRandom.nextFloat() < 1f) {
spawnFishAbove();
}
if (Constants.sRandom.nextFloat() * 100f - (50f * ypos / 1024f) < 1f) {
spawnOctoAbove();
}
}
private void removeObsticles() {
Iterator<Fish> fishIter = mFishes.iterator();
while (fishIter.hasNext()) {
if (fishIter.next().mPos.y < TreeBranch.sGlobal.y - 10f) {
fishIter.remove();
}
}
Iterator<Octopus> octoIter = mOctopuses.iterator();
while (octoIter.hasNext()) {
if (octoIter.next().mPos.y < TreeBranch.sGlobal.y - 10f) {
octoIter.remove();
}
}
}
private void spawnFishAbove() {
boolean collided = false;
Fish fish = new Fish(Constants.sRandom.nextFloat() * 30f - 15f + TreeBranch.sGlobal.x, TreeBranch.sGlobal.y + 10f);
for (Octopus o : mOctopuses) {
if (o.mBoundingBox.overlaps(fish.mBoundingBox)) {
collided = true;
}
}
for (Fish f : mFishes) {
if (f.mBoundingBox.overlaps(fish.mBoundingBox)) {
collided = true;
}
}
if (!collided) {
mFishes.add(fish);
}
}
private void spawnOctoAbove() {
boolean collided = false;
Octopus octopus = new Octopus(Constants.sRandom.nextFloat() * 30f - 15f + TreeBranch.sGlobal.x, TreeBranch.sGlobal.y + 15f);
for (Octopus o : mOctopuses) {
if (o.mBoundingBox.overlaps(octopus.mBoundingBox)) {
collided = true;
}
}
for (Fish f : mFishes) {
if (f.mBoundingBox.overlaps(octopus.mBoundingBox)) {
collided = true;
}
}
if (!collided) {
mOctopuses.add(octopus);
}
}
private void moveAndUpdateCamera() {
TreeBranch.sGlobal.y += mSpeed;
mTotalPoints += mSpeed;
if ((TreeBranch.sGlobal.y > 10f && mCamera.zoom < 3f) ||
(TreeBranch.sGlobal.y > 200f && mCamera.zoom < 5f)||
(TreeBranch.sGlobal.y > 400f && mCamera.zoom < 5f)) {
mCamera.zoom += 0.01f;
}
mCurrentViewCord.set(getAvgOfLast10X(),TreeBranch.sGlobal.y);
mCamera.position.set(mCurrentViewCord, 0);
mCamera.update();
}
private float getAvgOfLast10X() {
mAvgX[mAvgXpos++] = TreeBranch.sGlobal.x;
mAvgXpos %= 10;
float avg = 0;
for (int i=0; i< 10; i++) {
avg += mAvgX[i];
}
avg /= 10f;
return avg;
}
}
| |
package ca.appsimulations.jlqninterface.lqn.model.writer;
import ca.appsimulations.jlqninterface.lqn.entities.*;
import ca.appsimulations.jlqninterface.lqn.model.LqnModel;
import ca.appsimulations.jlqninterface.lqn.model.LqnXmlDetails;
import ca.appsimulations.jlqninterface.lqn.model.SolverParams;
import lombok.extern.slf4j.Slf4j;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.transform.OutputKeys;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Map;
import java.util.SortedSet;
import java.util.TreeSet;
import static ca.appsimulations.jlqninterface.lqn.model.handler.LqnXmlAttributes.*;
import static ca.appsimulations.jlqninterface.lqn.model.handler.LqnXmlElements.*;
import static java.util.stream.Collectors.toMap;
@Slf4j
public class LqnModelWriter {
public static void write(LqnModel lqnModel, String outputPath) {
DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance();
documentBuilderFactory.setNamespaceAware(true);
DocumentBuilder documentBuilder;
try {
documentBuilder = documentBuilderFactory.newDocumentBuilder();
Document doc = documentBuilder.newDocument();
Element lqnModelRoot = appendLqnModel(doc, lqnModel.xmlDetails());
appendSolverParams(doc, lqnModelRoot, lqnModel.solverParams());
appendProcessors(doc, lqnModelRoot, lqnModel.processors());
writeDocument(doc, outputPath);
}
catch (Exception e) {
e.printStackTrace();
}
}
private static Element appendLqnModel(Document doc, LqnXmlDetails lqnXmlDetails) {
Element lqnModelRoot = doc.createElement(LQN_MODEL.value());
lqnModelRoot.setAttribute(XMLNS_XSI.value(), lqnXmlDetails.xmlnsXsi());
lqnModelRoot.setAttribute(NAME.value(), lqnXmlDetails.name());
lqnModelRoot.setAttribute(DESCRIPTION.value(), lqnXmlDetails.description());
lqnModelRoot.setAttribute(SCHEMA_LOCATION.value(), lqnXmlDetails.schemaLocation());
doc.appendChild(lqnModelRoot);
return lqnModelRoot;
}
private static void appendSolverParams(Document doc,
Element lqnModelRoot,
SolverParams solverParams) {
Element solverParamsElem = doc.createElement(SOLVER_PARAMS.value());
solverParamsElem.setAttribute(COMMENT.value(), solverParams.comment());
solverParamsElem.setAttribute(CONVERGENCE_VALUE.value(), Double.toString(solverParams.convergence()));
solverParamsElem.setAttribute(ITERATION_LIMIT.value(), Integer.toString(solverParams.iterationLimit()));
solverParamsElem.setAttribute(UNDER_RELAX_COEFF.value(), Double.toString(solverParams.underRelaxCoeff()));
solverParamsElem.setAttribute(PRINT_INTERVAL.value(), Integer.toString(solverParams.printInterval()));
lqnModelRoot.appendChild(solverParamsElem);
}
private static void appendProcessors(Document doc, Element lqnModelRoot, ArrayList<Processor> processors) {
processors.forEach(processor -> {
Element processorElem = doc.createElement(PROCESSOR.value());
processorElem.setAttribute(NAME.value(), processor.getName());
processorElem.setAttribute(SCHEDULING.value(), processor.getScheduling().value());
if (processor.getMultiplicity() > 1 &&
processor.getScheduling().equals(ProcessorSchedulingType.INF) == false) {
processorElem.setAttribute(MULTIPLICITY.value(), processor.getMutiplicityString());
}
if (processor.getScheduling().equals(ProcessorSchedulingType.PS)) {
processorElem.setAttribute(QUANTUM.value(), Double.toString(processor.getQuantum()));
}
if (processor.getReplication() > 1 &&
processor.getScheduling().equals(ProcessorSchedulingType.INF) == false) {
processorElem.setAttribute(REPLICATION.value(), Integer.toString(processor.getReplication()));
}
appendTasks(doc, processorElem, processor.getTasks());
lqnModelRoot.appendChild(processorElem);
});
}
private static void appendTasks(Document doc, Element processorElem, ArrayList<Task> tasks) {
tasks.forEach(task -> {
Element taskElem = doc.createElement(TASK.value());
taskElem.setAttribute(NAME.value(), task.getName());
taskElem.setAttribute(MULTIPLICITY.value(), task.getMutiplicityString());
taskElem.setAttribute(SCHEDULING.value(), task.getScheduling().value());
if (task.getReplication() > 1 && task.isRefTask() == false) {
taskElem.setAttribute(REPLICATION.value(), Integer.toString(task.getReplication()));
}
appendEntries(doc, taskElem, task.getEntries(), task.isRefTask());
appendFanIn(doc, taskElem, task.getFanInMap());
appendFanOut(doc, taskElem, task.getFanOutMap());
processorElem.appendChild(taskElem);
});
}
private static void appendFanOut(Document doc, Element taskElem, Map<Task, Integer> fanOutMap) {
Map<String, Integer> fanOuts =
fanOutMap.entrySet().stream().collect(toMap(entry -> entry.getKey().getName(),
entry -> entry.getValue()));
SortedSet<String> keys = new TreeSet<String>(fanOuts.keySet());
keys.stream().forEach(key -> {
Element fanOutElem = doc.createElement(FAN_OUT.value());
fanOutElem.setAttribute(DEST.value(), key);
fanOutElem.setAttribute(VALUE.value(), fanOuts.get(key).toString());
taskElem.appendChild(fanOutElem);
});
}
private static void appendFanIn(Document doc, Element taskElem, Map<Task, Integer> fanInMap) {
Map<String, Integer> fanIns =
fanInMap.entrySet().stream().collect(toMap(entry -> entry.getKey().getName(),
entry -> entry.getValue()));
SortedSet<String> keys = new TreeSet<String>(fanIns.keySet());
keys.stream().forEach(key -> {
Element fanInElem = doc.createElement(FAN_IN.value());
fanInElem.setAttribute(SOURCE.value(), key);
fanInElem.setAttribute(VALUE.value(), fanIns.get(key).toString());
taskElem.appendChild(fanInElem);
});
}
private static void appendEntries(Document doc, Element taskElem, ArrayList<Entry> entries, boolean refTask) {
entries.forEach(entry -> {
Element entryElem = doc.createElement(ENTRY.value());
entryElem.setAttribute(NAME.value(), entry.getName());
entryElem.setAttribute(TYPE.value(), entry.getEntryType().value());
appendEntryPhaseActivities(doc, entryElem, entry.getEntryPhaseActivities(), refTask);
taskElem.appendChild(entryElem);
});
}
private static void appendEntryPhaseActivities(Document doc,
Element entryElem,
EntryPhaseActivities entryPhaseActivities, boolean refTask) {
Element entryPhaseElem = doc.createElement(ENTRY_PHASE_ACTIVITIES.value());
appendActivities(doc, entryPhaseElem, entryPhaseActivities.getActivityAtPhase(1), refTask);
entryElem.appendChild(entryPhaseElem);
}
private static void appendActivities(Document doc,
Element entryPhaseElem,
ActivityPhases activity,
boolean refTask) {
Element activityElem = doc.createElement(ACTIVITY.value());
activityElem.setAttribute(NAME.value(), activity.getName());
activityElem.setAttribute(PHASE.value(), Integer.toString(activity.getPhase()));
activityElem.setAttribute(HOST_DEMAND_MEAN.value(), Double.toString(activity.getHost_demand_mean()));
if (refTask == false && activity.getThinkTime() > 0) {
throw new IllegalArgumentException("non ref task cannot have think time > 0");
}
else if (activity.getThinkTime() > 0) {
activityElem.setAttribute(THINK_TIME.value(), Double.toString(activity.getThinkTime()));
}
appendSynchCalls(doc, activityElem, activity);
entryPhaseElem.appendChild(activityElem);
}
private static void appendSynchCalls(Document doc, Element activityElem, ActivityPhases activity) {
activity.getSynchCalls().forEach(synchCall -> {
Element synchCallElem = doc.createElement(SYNCH_CALL.value());
synchCallElem.setAttribute(DEST.value(), synchCall.getStrDestEntry());
synchCallElem.setAttribute(CALLS_MEAN.value(), Double.toString(synchCall.getCallsMean()));
activityElem.appendChild(synchCallElem);
});
}
private static void writeDocument(Document doc, String outputPath)
throws TransformerException {
Transformer transformer = TransformerFactory.newInstance().newTransformer();
transformer.setOutputProperty(OutputKeys.INDENT, "yes");
transformer.setOutputProperty("{http://xml.apache.org/xslt}indent-amount", "2");
DOMSource source = new DOMSource(doc);
File file = new File(outputPath);
try {
file.createNewFile();
}
catch (IOException e) {
e.printStackTrace();
file = null;
}
FileOutputStream fileOutputStream;
try {
if (file != null) {
fileOutputStream = new FileOutputStream(file);
log.info("writing lqn model to file: " + file.getAbsolutePath());
StreamResult console = new StreamResult(fileOutputStream);
transformer.transform(source, console);
log.info("document created");
}
}
catch (FileNotFoundException e) {
e.printStackTrace();
}
}
}
| |
/**
*
*/
package gov.va.med;
import gov.va.med.imaging.exceptions.OIDFormatException;
import java.io.Serializable;
import java.util.Arrays;
import java.util.regex.Pattern;
/**
* An object identifier implementation consistent with RFC3061
*
* @see http://www.ietf.org/rfc/rfc3061.txt
* @author vhaiswbeckec
*
* From RFC3061:
* The NSS portion of the identifier is based on the string encoding
* rules found in RFC 1778 Section 2.15 [4] which specifies a series
* of digits separated by a period with the most significant digit
* being at the left and the least significant being at the right.
* At no time shall the NSS portion of the URN contain the human
* readable description of a particular node in the OID tree. The
* NSS portion of the name is strictly limited to the digits 0-9 and
* the '.' character with no leading zeros. No other characters are
* permitted. This is all expressed in the following ABNF:
*
* oid = number *( DOT number )
* number = DIGIT / ( LEADDIGIT 1*DIGIT )
* LEADDIGIT = %x31-39 ; 1-9
* DIGIT = %x30 / LEADDIGIT ; 0-9
* DOT = %x2E ; period
*/
public class OID
implements Comparable<OID>, Serializable
{
private static final long serialVersionUID = 1L;
//private final static String groupDelimiterRegex = "\\.";
public final static String groupRegex = "[1-9][0-9]*";
public final static Pattern groupPattern = Pattern.compile(groupRegex);
public final static String OID_REGEX = "([1-9][0-9]*)((?:(?:0x2E)[1-9][0-9]*)*)";
public final static Pattern OID_PATTERN = Pattern.compile(OID_REGEX);
/**
*
* @param oidAsString
* @return
* @throws OIDFormatException
*/
public static OID create(String oidAsString)
throws OIDFormatException
{
return create(oidAsString, null);
}
/**
*
* @param oidAsString
* @param description
* @return
* @throws OIDFormatException
*/
public static OID create(String oidAsString, String description)
throws OIDFormatException
{
String[] groups = parseOIDString(oidAsString);
OID newOid = new OID( groups, description );
return newOid;
}
/**
* Parse a String in the format n[.n] where n is a base-10
* integer number. The resulting String will have each group
* in an element of the returned array.
*
* e.g. 1.2.33.444 returns {"1","2","33","444"}
*
* @param oidAsString
* @return
* @throws OIDFormatException
*/
public static String[] parseOIDString(String oidAsString)
throws OIDFormatException
{
String groups[] = oidAsString.split("\\x2e");
if(groups == null || groups.length < 1)
throw new OIDFormatException("The string '" + oidAsString + "' is not in valid OID format, e.g. 1.2.3.4.56" );
for(String group : groups)
if( !groupPattern.matcher(group).matches() )
throw new OIDFormatException("The group '" + group + "' of the string '" + oidAsString + "' is not in valid OID format, e.g. 1.2.3.4.56" );
return groups;
}
/**
*
* @param oidAsString
* @return
*/
public static boolean isValidOIDString(String oidAsString)
{
try
{
parseOIDString(oidAsString);
return true;
}
catch (OIDFormatException x)
{
return false;
}
}
// ========================================================================================================
// Instance Members
// ========================================================================================================
private final String[] groups;
private final String description;
private OID(String[] groups)
{
this(groups, null);
}
private OID(String[] groups, String description)
{
this.groups = new String[groups.length];
// make a copy so that the array can't be changed after this instance is created
// the String array members, are implicitly final
System.arraycopy(groups, 0, this.groups, 0, groups.length);
this.description = description;
}
public String getDescription()
{
return this.description;
}
/**
* If this OID is an ancestor of the given OID then return true.
* An ancestor is an OID where all groups in the ancestor match the
* corresponding group in the descendant and the descendant group
* has more groups than the ancestor.
*
* @return
*/
public boolean isAncestorOf(OID descendant)
{
if(this.groups.length >= descendant.groups.length)
return false;
for(int index=0; index < this.groups.length; ++index)
if(! this.groups[index].equals(descendant.groups[index]))
return false;
return true;
}
@Override
public String toString()
{
StringBuilder sb = new StringBuilder();
for(String group : this.groups)
{
if(sb.length() > 0)
sb.append('.');
sb.append(group);
}
return sb.toString();
}
@Override
public int hashCode()
{
final int prime = 31;
int result = 1;
result = prime * result + Arrays.hashCode(this.groups);
return result;
}
@Override
public boolean equals(Object obj)
{
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
final OID other = (OID) obj;
if (!Arrays.equals(this.groups, other.groups))
return false;
return true;
}
/**
* Compares this object with the specified object for order.
* Returns a negative integer, zero, or a positive integer as this object is
* less than, equal to, or greater than the specified object.
*
* The natural ordering of the OID class is defined as:
* 1.) parent OIDs are less than offspring OID
* 2.) groups to the left have precedence over groups to the right
* 3.) groups of lower numeric value are less than those of a higher value
*/
@Override
public int compareTo(OID that)
{
if(this==that)
return 0;
if(this.equals(that))
return 0;
for(int index=0; index<this.groups.length; ++index)
{
// all groups match up until now and that group has no more groups
// so we must be an descendant of that group
if(index >= that.groups.length)
return 1;
int groupCompare = this.groups[index].compareTo(that.groups[index]);
if(groupCompare != 0)
return groupCompare;
}
if(this.groups.length < that.groups.length)
return -1;
return 0;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.builder.endpoint.dsl;
import java.util.Map;
import javax.annotation.Generated;
import org.apache.camel.builder.EndpointConsumerBuilder;
import org.apache.camel.builder.EndpointProducerBuilder;
import org.apache.camel.builder.endpoint.AbstractEndpointBuilder;
/**
* Interact with ServiceNow via its REST API.
*
* Generated by camel build tools - do NOT edit this file!
*/
@Generated("org.apache.camel.maven.packaging.EndpointDslMojo")
public interface ServiceNowEndpointBuilderFactory {
/**
* Builder for endpoint for the ServiceNow component.
*/
public interface ServiceNowEndpointBuilder
extends
EndpointProducerBuilder {
default AdvancedServiceNowEndpointBuilder advanced() {
return (AdvancedServiceNowEndpointBuilder) this;
}
/**
* Set this parameter to true to return only scorecards where the
* indicator Display field is selected. Set this parameter to all to
* return scorecards with any Display field value. This parameter is
* true by default.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: true
* Group: producer
*
* @param display the value to set
* @return the dsl builder
*/
default ServiceNowEndpointBuilder display(String display) {
doSetProperty("display", display);
return this;
}
/**
* Return the display value (true), actual value (false), or both (all)
* for reference fields (default: false).
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: false
* Group: producer
*
* @param displayValue the value to set
* @return the dsl builder
*/
default ServiceNowEndpointBuilder displayValue(String displayValue) {
doSetProperty("displayValue", displayValue);
return this;
}
/**
* True to exclude Table API links for reference fields (default:
* false).
*
* The option is a: <code>java.lang.Boolean</code> type.
*
* Group: producer
*
* @param excludeReferenceLink the value to set
* @return the dsl builder
*/
default ServiceNowEndpointBuilder excludeReferenceLink(
Boolean excludeReferenceLink) {
doSetProperty("excludeReferenceLink", excludeReferenceLink);
return this;
}
/**
* True to exclude Table API links for reference fields (default:
* false).
*
* The option will be converted to a
* <code>java.lang.Boolean</code> type.
*
* Group: producer
*
* @param excludeReferenceLink the value to set
* @return the dsl builder
*/
default ServiceNowEndpointBuilder excludeReferenceLink(
String excludeReferenceLink) {
doSetProperty("excludeReferenceLink", excludeReferenceLink);
return this;
}
/**
* Set this parameter to true to return only scorecards that are
* favorites of the querying user.
*
* The option is a: <code>java.lang.Boolean</code> type.
*
* Group: producer
*
* @param favorites the value to set
* @return the dsl builder
*/
default ServiceNowEndpointBuilder favorites(Boolean favorites) {
doSetProperty("favorites", favorites);
return this;
}
/**
* Set this parameter to true to return only scorecards that are
* favorites of the querying user.
*
* The option will be converted to a
* <code>java.lang.Boolean</code> type.
*
* Group: producer
*
* @param favorites the value to set
* @return the dsl builder
*/
default ServiceNowEndpointBuilder favorites(String favorites) {
doSetProperty("favorites", favorites);
return this;
}
/**
* Set this parameter to true to always return all available aggregates
* for an indicator, including when an aggregate has already been
* applied. If a value is not specified, this parameter defaults to
* false and returns no aggregates.
*
* The option is a: <code>java.lang.Boolean</code> type.
*
* Group: producer
*
* @param includeAggregates the value to set
* @return the dsl builder
*/
default ServiceNowEndpointBuilder includeAggregates(
Boolean includeAggregates) {
doSetProperty("includeAggregates", includeAggregates);
return this;
}
/**
* Set this parameter to true to always return all available aggregates
* for an indicator, including when an aggregate has already been
* applied. If a value is not specified, this parameter defaults to
* false and returns no aggregates.
*
* The option will be converted to a
* <code>java.lang.Boolean</code> type.
*
* Group: producer
*
* @param includeAggregates the value to set
* @return the dsl builder
*/
default ServiceNowEndpointBuilder includeAggregates(
String includeAggregates) {
doSetProperty("includeAggregates", includeAggregates);
return this;
}
/**
* Set this parameter to true to return all available aggregates for an
* indicator when no aggregate has been applied. If a value is not
* specified, this parameter defaults to false and returns no
* aggregates.
*
* The option is a: <code>java.lang.Boolean</code> type.
*
* Group: producer
*
* @param includeAvailableAggregates the value to set
* @return the dsl builder
*/
default ServiceNowEndpointBuilder includeAvailableAggregates(
Boolean includeAvailableAggregates) {
doSetProperty("includeAvailableAggregates", includeAvailableAggregates);
return this;
}
/**
* Set this parameter to true to return all available aggregates for an
* indicator when no aggregate has been applied. If a value is not
* specified, this parameter defaults to false and returns no
* aggregates.
*
* The option will be converted to a
* <code>java.lang.Boolean</code> type.
*
* Group: producer
*
* @param includeAvailableAggregates the value to set
* @return the dsl builder
*/
default ServiceNowEndpointBuilder includeAvailableAggregates(
String includeAvailableAggregates) {
doSetProperty("includeAvailableAggregates", includeAvailableAggregates);
return this;
}
/**
* Set this parameter to true to return all available breakdowns for an
* indicator. If a value is not specified, this parameter defaults to
* false and returns no breakdowns.
*
* The option is a: <code>java.lang.Boolean</code> type.
*
* Group: producer
*
* @param includeAvailableBreakdowns the value to set
* @return the dsl builder
*/
default ServiceNowEndpointBuilder includeAvailableBreakdowns(
Boolean includeAvailableBreakdowns) {
doSetProperty("includeAvailableBreakdowns", includeAvailableBreakdowns);
return this;
}
/**
* Set this parameter to true to return all available breakdowns for an
* indicator. If a value is not specified, this parameter defaults to
* false and returns no breakdowns.
*
* The option will be converted to a
* <code>java.lang.Boolean</code> type.
*
* Group: producer
*
* @param includeAvailableBreakdowns the value to set
* @return the dsl builder
*/
default ServiceNowEndpointBuilder includeAvailableBreakdowns(
String includeAvailableBreakdowns) {
doSetProperty("includeAvailableBreakdowns", includeAvailableBreakdowns);
return this;
}
/**
* Set this parameter to true to return all notes associated with the
* score. The note element contains the note text as well as the author
* and timestamp when the note was added.
*
* The option is a: <code>java.lang.Boolean</code> type.
*
* Group: producer
*
* @param includeScoreNotes the value to set
* @return the dsl builder
*/
default ServiceNowEndpointBuilder includeScoreNotes(
Boolean includeScoreNotes) {
doSetProperty("includeScoreNotes", includeScoreNotes);
return this;
}
/**
* Set this parameter to true to return all notes associated with the
* score. The note element contains the note text as well as the author
* and timestamp when the note was added.
*
* The option will be converted to a
* <code>java.lang.Boolean</code> type.
*
* Group: producer
*
* @param includeScoreNotes the value to set
* @return the dsl builder
*/
default ServiceNowEndpointBuilder includeScoreNotes(
String includeScoreNotes) {
doSetProperty("includeScoreNotes", includeScoreNotes);
return this;
}
/**
* Set this parameter to true to return all scores for a scorecard. If a
* value is not specified, this parameter defaults to false and returns
* only the most recent score value.
*
* The option is a: <code>java.lang.Boolean</code> type.
*
* Group: producer
*
* @param includeScores the value to set
* @return the dsl builder
*/
default ServiceNowEndpointBuilder includeScores(Boolean includeScores) {
doSetProperty("includeScores", includeScores);
return this;
}
/**
* Set this parameter to true to return all scores for a scorecard. If a
* value is not specified, this parameter defaults to false and returns
* only the most recent score value.
*
* The option will be converted to a
* <code>java.lang.Boolean</code> type.
*
* Group: producer
*
* @param includeScores the value to set
* @return the dsl builder
*/
default ServiceNowEndpointBuilder includeScores(String includeScores) {
doSetProperty("includeScores", includeScores);
return this;
}
/**
* True to set raw value of input fields (default: false).
*
* The option is a: <code>java.lang.Boolean</code> type.
*
* Group: producer
*
* @param inputDisplayValue the value to set
* @return the dsl builder
*/
default ServiceNowEndpointBuilder inputDisplayValue(
Boolean inputDisplayValue) {
doSetProperty("inputDisplayValue", inputDisplayValue);
return this;
}
/**
* True to set raw value of input fields (default: false).
*
* The option will be converted to a
* <code>java.lang.Boolean</code> type.
*
* Group: producer
*
* @param inputDisplayValue the value to set
* @return the dsl builder
*/
default ServiceNowEndpointBuilder inputDisplayValue(
String inputDisplayValue) {
doSetProperty("inputDisplayValue", inputDisplayValue);
return this;
}
/**
* Set this parameter to true to return only scorecards for key
* indicators.
*
* The option is a: <code>java.lang.Boolean</code> type.
*
* Group: producer
*
* @param key the value to set
* @return the dsl builder
*/
default ServiceNowEndpointBuilder key(Boolean key) {
doSetProperty("key", key);
return this;
}
/**
* Set this parameter to true to return only scorecards for key
* indicators.
*
* The option will be converted to a
* <code>java.lang.Boolean</code> type.
*
* Group: producer
*
* @param key the value to set
* @return the dsl builder
*/
default ServiceNowEndpointBuilder key(String key) {
doSetProperty("key", key);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default ServiceNowEndpointBuilder lazyStartProducer(
boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option will be converted to a <code>boolean</code>
* type.
*
* Default: false
* Group: producer
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default ServiceNowEndpointBuilder lazyStartProducer(
String lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Defines both request and response models.
*
* The option is a: <code>java.util.Map&lt;java.lang.String,
* java.lang.Class&lt;java.lang.Object&gt;&gt;</code>
* type.
* The option is multivalued, and you can use the models(String, Object)
* method to add a value (call the method multiple times to set more
* values).
*
* Group: producer
*
* @param key the option key
* @param value the option value
* @return the dsl builder
*/
default ServiceNowEndpointBuilder models(String key, Object value) {
doSetMultiValueProperty("models", "model." + key, value);
return this;
}
/**
* Defines both request and response models.
*
* The option is a: <code>java.util.Map&lt;java.lang.String,
* java.lang.Class&lt;java.lang.Object&gt;&gt;</code>
* type.
* The option is multivalued, and you can use the models(String, Object)
* method to add a value (call the method multiple times to set more
* values).
*
* Group: producer
*
* @param values the values
* @return the dsl builder
*/
default ServiceNowEndpointBuilder models(Map values) {
doSetMultiValueProperties("models", "model.", values);
return this;
}
/**
* Enter the maximum number of scorecards each query can return. By
* default this value is 10, and the maximum is 100.
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Default: 10
* Group: producer
*
* @param perPage the value to set
* @return the dsl builder
*/
default ServiceNowEndpointBuilder perPage(Integer perPage) {
doSetProperty("perPage", perPage);
return this;
}
/**
* Enter the maximum number of scorecards each query can return. By
* default this value is 10, and the maximum is 100.
*
* The option will be converted to a
* <code>java.lang.Integer</code> type.
*
* Default: 10
* Group: producer
*
* @param perPage the value to set
* @return the dsl builder
*/
default ServiceNowEndpointBuilder perPage(String perPage) {
doSetProperty("perPage", perPage);
return this;
}
/**
* The ServiceNow release to target, default to Helsinki See
* https://docs.servicenow.com.
*
* The option is a:
* <code>org.apache.camel.component.servicenow.ServiceNowRelease</code> type.
*
* Default: HELSINKI
* Group: producer
*
* @param release the value to set
* @return the dsl builder
*/
default ServiceNowEndpointBuilder release(ServiceNowRelease release) {
doSetProperty("release", release);
return this;
}
/**
* The ServiceNow release to target, default to Helsinki See
* https://docs.servicenow.com.
*
* The option will be converted to a
* <code>org.apache.camel.component.servicenow.ServiceNowRelease</code> type.
*
* Default: HELSINKI
* Group: producer
*
* @param release the value to set
* @return the dsl builder
*/
default ServiceNowEndpointBuilder release(String release) {
doSetProperty("release", release);
return this;
}
/**
* Defines the request model.
*
* The option is a: <code>java.util.Map&lt;java.lang.String,
* java.lang.Class&lt;java.lang.Object&gt;&gt;</code>
* type.
* The option is multivalued, and you can use the requestModels(String,
* Object) method to add a value (call the method multiple times to set
* more values).
*
* Group: producer
*
* @param key the option key
* @param value the option value
* @return the dsl builder
*/
default ServiceNowEndpointBuilder requestModels(String key, Object value) {
doSetMultiValueProperty("requestModels", "request-model." + key, value);
return this;
}
/**
* Defines the request model.
*
* The option is a: <code>java.util.Map&lt;java.lang.String,
* java.lang.Class&lt;java.lang.Object&gt;&gt;</code>
* type.
* The option is multivalued, and you can use the requestModels(String,
* Object) method to add a value (call the method multiple times to set
* more values).
*
* Group: producer
*
* @param values the values
* @return the dsl builder
*/
default ServiceNowEndpointBuilder requestModels(Map values) {
doSetMultiValueProperties("requestModels", "request-model.", values);
return this;
}
/**
* The default resource, can be overridden by header
* CamelServiceNowResource.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param resource the value to set
* @return the dsl builder
*/
default ServiceNowEndpointBuilder resource(String resource) {
doSetProperty("resource", resource);
return this;
}
/**
* Defines the response model.
*
* The option is a: <code>java.util.Map&lt;java.lang.String,
* java.lang.Class&lt;java.lang.Object&gt;&gt;</code>
* type.
* The option is multivalued, and you can use the responseModels(String,
* Object) method to add a value (call the method multiple times to set
* more values).
*
* Group: producer
*
* @param key the option key
* @param value the option value
* @return the dsl builder
*/
default ServiceNowEndpointBuilder responseModels(
String key,
Object value) {
doSetMultiValueProperty("responseModels", "response-model." + key, value);
return this;
}
/**
* Defines the response model.
*
* The option is a: <code>java.util.Map&lt;java.lang.String,
* java.lang.Class&lt;java.lang.Object&gt;&gt;</code>
* type.
* The option is multivalued, and you can use the responseModels(String,
* Object) method to add a value (call the method multiple times to set
* more values).
*
* Group: producer
*
* @param values the values
* @return the dsl builder
*/
default ServiceNowEndpointBuilder responseModels(Map values) {
doSetMultiValueProperties("responseModels", "response-model.", values);
return this;
}
/**
* Specify the value to use when sorting results. By default, queries
* sort records by value.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param sortBy the value to set
* @return the dsl builder
*/
default ServiceNowEndpointBuilder sortBy(String sortBy) {
doSetProperty("sortBy", sortBy);
return this;
}
/**
* Specify the sort direction, ascending or descending. By default,
* queries sort records in descending order. Use sysparm_sortdir=asc to
* sort in ascending order.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param sortDir the value to set
* @return the dsl builder
*/
default ServiceNowEndpointBuilder sortDir(String sortDir) {
doSetProperty("sortDir", sortDir);
return this;
}
/**
* True to suppress auto generation of system fields (default: false).
*
* The option is a: <code>java.lang.Boolean</code> type.
*
* Group: producer
*
* @param suppressAutoSysField the value to set
* @return the dsl builder
*/
default ServiceNowEndpointBuilder suppressAutoSysField(
Boolean suppressAutoSysField) {
doSetProperty("suppressAutoSysField", suppressAutoSysField);
return this;
}
/**
* True to suppress auto generation of system fields (default: false).
*
* The option will be converted to a
* <code>java.lang.Boolean</code> type.
*
* Group: producer
*
* @param suppressAutoSysField the value to set
* @return the dsl builder
*/
default ServiceNowEndpointBuilder suppressAutoSysField(
String suppressAutoSysField) {
doSetProperty("suppressAutoSysField", suppressAutoSysField);
return this;
}
/**
* Set this value to true to remove the Link header from the response.
* The Link header allows you to request additional pages of data when
* the number of records matching your query exceeds the query limit.
*
* The option is a: <code>java.lang.Boolean</code> type.
*
* Group: producer
*
* @param suppressPaginationHeader the value to set
* @return the dsl builder
*/
default ServiceNowEndpointBuilder suppressPaginationHeader(
Boolean suppressPaginationHeader) {
doSetProperty("suppressPaginationHeader", suppressPaginationHeader);
return this;
}
/**
* Set this value to true to remove the Link header from the response.
* The Link header allows you to request additional pages of data when
* the number of records matching your query exceeds the query limit.
*
* The option will be converted to a
* <code>java.lang.Boolean</code> type.
*
* Group: producer
*
* @param suppressPaginationHeader the value to set
* @return the dsl builder
*/
default ServiceNowEndpointBuilder suppressPaginationHeader(
String suppressPaginationHeader) {
doSetProperty("suppressPaginationHeader", suppressPaginationHeader);
return this;
}
/**
* The default table, can be overridden by header CamelServiceNowTable.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param table the value to set
* @return the dsl builder
*/
default ServiceNowEndpointBuilder table(String table) {
doSetProperty("table", table);
return this;
}
/**
* Set this parameter to true to return only scorecards that have a
* target.
*
* The option is a: <code>java.lang.Boolean</code> type.
*
* Group: producer
*
* @param target the value to set
* @return the dsl builder
*/
default ServiceNowEndpointBuilder target(Boolean target) {
doSetProperty("target", target);
return this;
}
/**
* Set this parameter to true to return only scorecards that have a
* target.
*
* The option will be converted to a
* <code>java.lang.Boolean</code> type.
*
* Group: producer
*
* @param target the value to set
* @return the dsl builder
*/
default ServiceNowEndpointBuilder target(String target) {
doSetProperty("target", target);
return this;
}
/**
* Gets only those categories whose parent is a catalog.
*
* The option is a: <code>java.lang.Boolean</code> type.
*
* Group: producer
*
* @param topLevelOnly the value to set
* @return the dsl builder
*/
default ServiceNowEndpointBuilder topLevelOnly(Boolean topLevelOnly) {
doSetProperty("topLevelOnly", topLevelOnly);
return this;
}
/**
* Gets only those categories whose parent is a catalog.
*
* The option will be converted to a
* <code>java.lang.Boolean</code> type.
*
* Group: producer
*
* @param topLevelOnly the value to set
* @return the dsl builder
*/
default ServiceNowEndpointBuilder topLevelOnly(String topLevelOnly) {
doSetProperty("topLevelOnly", topLevelOnly);
return this;
}
/**
* The proxy host name.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: proxy
*
* @param proxyHost the value to set
* @return the dsl builder
*/
default ServiceNowEndpointBuilder proxyHost(String proxyHost) {
doSetProperty("proxyHost", proxyHost);
return this;
}
/**
* The proxy port number.
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Group: proxy
*
* @param proxyPort the value to set
* @return the dsl builder
*/
default ServiceNowEndpointBuilder proxyPort(Integer proxyPort) {
doSetProperty("proxyPort", proxyPort);
return this;
}
/**
* The proxy port number.
*
* The option will be converted to a
* <code>java.lang.Integer</code> type.
*
* Group: proxy
*
* @param proxyPort the value to set
* @return the dsl builder
*/
default ServiceNowEndpointBuilder proxyPort(String proxyPort) {
doSetProperty("proxyPort", proxyPort);
return this;
}
/**
* The ServiceNow REST API url.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param apiUrl the value to set
* @return the dsl builder
*/
default ServiceNowEndpointBuilder apiUrl(String apiUrl) {
doSetProperty("apiUrl", apiUrl);
return this;
}
/**
* OAuth2 ClientID.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param oauthClientId the value to set
* @return the dsl builder
*/
default ServiceNowEndpointBuilder oauthClientId(String oauthClientId) {
doSetProperty("oauthClientId", oauthClientId);
return this;
}
/**
* OAuth2 ClientSecret.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param oauthClientSecret the value to set
* @return the dsl builder
*/
default ServiceNowEndpointBuilder oauthClientSecret(
String oauthClientSecret) {
doSetProperty("oauthClientSecret", oauthClientSecret);
return this;
}
/**
* OAuth token Url.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param oauthTokenUrl the value to set
* @return the dsl builder
*/
default ServiceNowEndpointBuilder oauthTokenUrl(String oauthTokenUrl) {
doSetProperty("oauthTokenUrl", oauthTokenUrl);
return this;
}
/**
* ServiceNow account password, MUST be provided.
*
* The option is a: <code>java.lang.String</code> type.
*
* Required: true
* Group: security
*
* @param password the value to set
* @return the dsl builder
*/
default ServiceNowEndpointBuilder password(String password) {
doSetProperty("password", password);
return this;
}
/**
* Password for proxy authentication.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param proxyPassword the value to set
* @return the dsl builder
*/
default ServiceNowEndpointBuilder proxyPassword(String proxyPassword) {
doSetProperty("proxyPassword", proxyPassword);
return this;
}
/**
* Username for proxy authentication.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param proxyUserName the value to set
* @return the dsl builder
*/
default ServiceNowEndpointBuilder proxyUserName(String proxyUserName) {
doSetProperty("proxyUserName", proxyUserName);
return this;
}
/**
* To configure security using SSLContextParameters. See
* http://camel.apache.org/camel-configuration-utilities.html.
*
* The option is a:
* <code>org.apache.camel.support.jsse.SSLContextParameters</code> type.
*
* Group: security
*
* @param sslContextParameters the value to set
* @return the dsl builder
*/
default ServiceNowEndpointBuilder sslContextParameters(
Object sslContextParameters) {
doSetProperty("sslContextParameters", sslContextParameters);
return this;
}
/**
* To configure security using SSLContextParameters. See
* http://camel.apache.org/camel-configuration-utilities.html.
*
* The option will be converted to a
* <code>org.apache.camel.support.jsse.SSLContextParameters</code> type.
*
* Group: security
*
* @param sslContextParameters the value to set
* @return the dsl builder
*/
default ServiceNowEndpointBuilder sslContextParameters(
String sslContextParameters) {
doSetProperty("sslContextParameters", sslContextParameters);
return this;
}
/**
* ServiceNow user account name, MUST be provided.
*
* The option is a: <code>java.lang.String</code> type.
*
* Required: true
* Group: security
*
* @param userName the value to set
* @return the dsl builder
*/
default ServiceNowEndpointBuilder userName(String userName) {
doSetProperty("userName", userName);
return this;
}
}
/**
* Advanced builder for endpoint for the ServiceNow component.
*/
public interface AdvancedServiceNowEndpointBuilder
extends
EndpointProducerBuilder {
default ServiceNowEndpointBuilder basic() {
return (ServiceNowEndpointBuilder) this;
}
/**
* The ServiceNow REST API version, default latest.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: advanced
*
* @param apiVersion the value to set
* @return the dsl builder
*/
default AdvancedServiceNowEndpointBuilder apiVersion(String apiVersion) {
doSetProperty("apiVersion", apiVersion);
return this;
}
/**
* The date format used for Json serialization/deserialization.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: yyyy-MM-dd
* Group: advanced
*
* @param dateFormat the value to set
* @return the dsl builder
*/
default AdvancedServiceNowEndpointBuilder dateFormat(String dateFormat) {
doSetProperty("dateFormat", dateFormat);
return this;
}
/**
* The date-time format used for Json serialization/deserialization.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: yyyy-MM-dd HH:mm:ss
* Group: advanced
*
* @param dateTimeFormat the value to set
* @return the dsl builder
*/
default AdvancedServiceNowEndpointBuilder dateTimeFormat(
String dateTimeFormat) {
doSetProperty("dateTimeFormat", dateTimeFormat);
return this;
}
/**
* To configure http-client.
*
* The option is a:
* <code>org.apache.cxf.transports.http.configuration.HTTPClientPolicy</code> type.
*
* Group: advanced
*
* @param httpClientPolicy the value to set
* @return the dsl builder
*/
default AdvancedServiceNowEndpointBuilder httpClientPolicy(
Object httpClientPolicy) {
doSetProperty("httpClientPolicy", httpClientPolicy);
return this;
}
/**
* To configure http-client.
*
* The option will be converted to a
* <code>org.apache.cxf.transports.http.configuration.HTTPClientPolicy</code> type.
*
* Group: advanced
*
* @param httpClientPolicy the value to set
* @return the dsl builder
*/
default AdvancedServiceNowEndpointBuilder httpClientPolicy(
String httpClientPolicy) {
doSetProperty("httpClientPolicy", httpClientPolicy);
return this;
}
/**
* Sets Jackson's ObjectMapper to use for request/reply.
*
* The option is a:
* <code>com.fasterxml.jackson.databind.ObjectMapper</code>
* type.
*
* Group: advanced
*
* @param mapper the value to set
* @return the dsl builder
*/
default AdvancedServiceNowEndpointBuilder mapper(Object mapper) {
doSetProperty("mapper", mapper);
return this;
}
/**
* Sets Jackson's ObjectMapper to use for request/reply.
*
* The option will be converted to a
* <code>com.fasterxml.jackson.databind.ObjectMapper</code>
* type.
*
* Group: advanced
*
* @param mapper the value to set
* @return the dsl builder
*/
default AdvancedServiceNowEndpointBuilder mapper(String mapper) {
doSetProperty("mapper", mapper);
return this;
}
/**
* To configure proxy authentication.
*
* The option is a:
* <code>org.apache.cxf.configuration.security.ProxyAuthorizationPolicy</code> type.
*
* Group: advanced
*
* @param proxyAuthorizationPolicy the value to set
* @return the dsl builder
*/
default AdvancedServiceNowEndpointBuilder proxyAuthorizationPolicy(
Object proxyAuthorizationPolicy) {
doSetProperty("proxyAuthorizationPolicy", proxyAuthorizationPolicy);
return this;
}
/**
* To configure proxy authentication.
*
* The option will be converted to a
* <code>org.apache.cxf.configuration.security.ProxyAuthorizationPolicy</code> type.
*
* Group: advanced
*
* @param proxyAuthorizationPolicy the value to set
* @return the dsl builder
*/
default AdvancedServiceNowEndpointBuilder proxyAuthorizationPolicy(
String proxyAuthorizationPolicy) {
doSetProperty("proxyAuthorizationPolicy", proxyAuthorizationPolicy);
return this;
}
/**
* Set this parameter to true to retrieve the target record when using
* import set api. The import set result is then replaced by the target
* record.
*
* The option is a: <code>java.lang.Boolean</code> type.
*
* Default: false
* Group: advanced
*
* @param retrieveTargetRecordOnImport the value to set
* @return the dsl builder
*/
default AdvancedServiceNowEndpointBuilder retrieveTargetRecordOnImport(
Boolean retrieveTargetRecordOnImport) {
doSetProperty("retrieveTargetRecordOnImport", retrieveTargetRecordOnImport);
return this;
}
/**
* Set this parameter to true to retrieve the target record when using
* import set api. The import set result is then replaced by the target
* record.
*
* The option will be converted to a
* <code>java.lang.Boolean</code> type.
*
* Default: false
* Group: advanced
*
* @param retrieveTargetRecordOnImport the value to set
* @return the dsl builder
*/
default AdvancedServiceNowEndpointBuilder retrieveTargetRecordOnImport(
String retrieveTargetRecordOnImport) {
doSetProperty("retrieveTargetRecordOnImport", retrieveTargetRecordOnImport);
return this;
}
/**
* The time format used for Json serialization/deserialization.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: HH:mm:ss
* Group: advanced
*
* @param timeFormat the value to set
* @return the dsl builder
*/
default AdvancedServiceNowEndpointBuilder timeFormat(String timeFormat) {
doSetProperty("timeFormat", timeFormat);
return this;
}
}
/**
* Proxy enum for
* <code>org.apache.camel.component.servicenow.ServiceNowRelease</code>
* enum.
*/
enum ServiceNowRelease {
FUJI,
GENEVA,
HELSINKI;
}
public interface ServiceNowBuilders {
/**
* ServiceNow (camel-servicenow)
* Interact with ServiceNow via its REST API.
*
* Category: api,cloud,management
* Since: 2.18
* Maven coordinates: org.apache.camel:camel-servicenow
*
* Syntax: <code>servicenow:instanceName</code>
*
* Path parameter: instanceName (required)
* The ServiceNow instance name
*
* @param path instanceName
* @return the dsl builder
*/
default ServiceNowEndpointBuilder servicenow(String path) {
return ServiceNowEndpointBuilderFactory.endpointBuilder("servicenow", path);
}
/**
* ServiceNow (camel-servicenow)
* Interact with ServiceNow via its REST API.
*
* Category: api,cloud,management
* Since: 2.18
* Maven coordinates: org.apache.camel:camel-servicenow
*
* Syntax: <code>servicenow:instanceName</code>
*
* Path parameter: instanceName (required)
* The ServiceNow instance name
*
* @param componentName to use a custom component name for the endpoint
* instead of the default name
* @param path instanceName
* @return the dsl builder
*/
default ServiceNowEndpointBuilder servicenow(
String componentName,
String path) {
return ServiceNowEndpointBuilderFactory.endpointBuilder(componentName, path);
}
}
static ServiceNowEndpointBuilder endpointBuilder(
String componentName,
String path) {
class ServiceNowEndpointBuilderImpl extends AbstractEndpointBuilder implements ServiceNowEndpointBuilder, AdvancedServiceNowEndpointBuilder {
public ServiceNowEndpointBuilderImpl(String path) {
super(componentName, path);
}
}
return new ServiceNowEndpointBuilderImpl(path);
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.java.decompiler.modules.decompiler.exps;
import org.jetbrains.java.decompiler.code.CodeConstants;
import org.jetbrains.java.decompiler.main.TextBuffer;
import org.jetbrains.java.decompiler.main.collectors.BytecodeMappingTracer;
import org.jetbrains.java.decompiler.modules.decompiler.ExprProcessor;
import org.jetbrains.java.decompiler.modules.decompiler.vars.CheckTypesResult;
import org.jetbrains.java.decompiler.struct.gen.VarType;
import org.jetbrains.java.decompiler.struct.match.MatchEngine;
import org.jetbrains.java.decompiler.struct.match.MatchNode;
import org.jetbrains.java.decompiler.util.InterpreterUtil;
import org.jetbrains.java.decompiler.util.ListStack;
import java.util.*;
public class FunctionExprent extends Exprent {
public static final int FUNCTION_ADD = 0;
public static final int FUNCTION_SUB = 1;
public static final int FUNCTION_MUL = 2;
public static final int FUNCTION_DIV = 3;
public static final int FUNCTION_AND = 4;
public static final int FUNCTION_OR = 5;
public static final int FUNCTION_XOR = 6;
public static final int FUNCTION_REM = 7;
public static final int FUNCTION_SHL = 8;
public static final int FUNCTION_SHR = 9;
public static final int FUNCTION_USHR = 10;
public static final int FUNCTION_BIT_NOT = 11;
public static final int FUNCTION_BOOL_NOT = 12;
public static final int FUNCTION_NEG = 13;
public final static int FUNCTION_I2L = 14;
public final static int FUNCTION_I2F = 15;
public final static int FUNCTION_I2D = 16;
public final static int FUNCTION_L2I = 17;
public final static int FUNCTION_L2F = 18;
public final static int FUNCTION_L2D = 19;
public final static int FUNCTION_F2I = 20;
public final static int FUNCTION_F2L = 21;
public final static int FUNCTION_F2D = 22;
public final static int FUNCTION_D2I = 23;
public final static int FUNCTION_D2L = 24;
public final static int FUNCTION_D2F = 25;
public final static int FUNCTION_I2B = 26;
public final static int FUNCTION_I2C = 27;
public final static int FUNCTION_I2S = 28;
public final static int FUNCTION_CAST = 29;
public final static int FUNCTION_INSTANCEOF = 30;
public final static int FUNCTION_ARRAY_LENGTH = 31;
public final static int FUNCTION_IMM = 32;
public final static int FUNCTION_MMI = 33;
public final static int FUNCTION_IPP = 34;
public final static int FUNCTION_PPI = 35;
public final static int FUNCTION_IIF = 36;
public final static int FUNCTION_LCMP = 37;
public final static int FUNCTION_FCMPL = 38;
public final static int FUNCTION_FCMPG = 39;
public final static int FUNCTION_DCMPL = 40;
public final static int FUNCTION_DCMPG = 41;
public static final int FUNCTION_EQ = 42;
public static final int FUNCTION_NE = 43;
public static final int FUNCTION_LT = 44;
public static final int FUNCTION_GE = 45;
public static final int FUNCTION_GT = 46;
public static final int FUNCTION_LE = 47;
public static final int FUNCTION_CADD = 48;
public static final int FUNCTION_COR = 49;
public static final int FUNCTION_STR_CONCAT = 50;
private static final VarType[] TYPES = {
VarType.VARTYPE_LONG,
VarType.VARTYPE_FLOAT,
VarType.VARTYPE_DOUBLE,
VarType.VARTYPE_INT,
VarType.VARTYPE_FLOAT,
VarType.VARTYPE_DOUBLE,
VarType.VARTYPE_INT,
VarType.VARTYPE_LONG,
VarType.VARTYPE_DOUBLE,
VarType.VARTYPE_INT,
VarType.VARTYPE_LONG,
VarType.VARTYPE_FLOAT,
VarType.VARTYPE_BYTE,
VarType.VARTYPE_CHAR,
VarType.VARTYPE_SHORT
};
private static final String[] OPERATORS = {
" + ",
" - ",
" * ",
" / ",
" & ",
" | ",
" ^ ",
" % ",
" << ",
" >> ",
" >>> ",
" == ",
" != ",
" < ",
" >= ",
" > ",
" <= ",
" && ",
" || ",
" + "
};
private static final int[] PRECEDENCE = {
3, // FUNCTION_ADD
3, // FUNCTION_SUB
2, // FUNCTION_MUL
2, // FUNCTION_DIV
7, // FUNCTION_AND
9, // FUNCTION_OR
8, // FUNCTION_XOR
2, // FUNCTION_REM
4, // FUNCTION_SHL
4, // FUNCTION_SHR
4, // FUNCTION_USHR
1, // FUNCTION_BIT_NOT
1, // FUNCTION_BOOL_NOT
1, // FUNCTION_NEG
1, // FUNCTION_I2L
1, // FUNCTION_I2F
1, // FUNCTION_I2D
1, // FUNCTION_L2I
1, // FUNCTION_L2F
1, // FUNCTION_L2D
1, // FUNCTION_F2I
1, // FUNCTION_F2L
1, // FUNCTION_F2D
1, // FUNCTION_D2I
1, // FUNCTION_D2L
1, // FUNCTION_D2F
1, // FUNCTION_I2B
1, // FUNCTION_I2C
1, // FUNCTION_I2S
1, // FUNCTION_CAST
6, // FUNCTION_INSTANCEOF
0, // FUNCTION_ARRAY_LENGTH
1, // FUNCTION_IMM
1, // FUNCTION_MMI
1, // FUNCTION_IPP
1, // FUNCTION_PPI
12, // FUNCTION_IFF
-1, // FUNCTION_LCMP
-1, // FUNCTION_FCMPL
-1, // FUNCTION_FCMPG
-1, // FUNCTION_DCMPL
-1, // FUNCTION_DCMPG
6, // FUNCTION_EQ = 41;
6, // FUNCTION_NE = 42;
5, // FUNCTION_LT = 43;
5, // FUNCTION_GE = 44;
5, // FUNCTION_GT = 45;
5, // FUNCTION_LE = 46;
10, // FUNCTION_CADD = 47;
11, // FUNCTION_COR = 48;
3 // FUNCTION_STR_CONCAT = 49;
};
private static final Set<Integer> ASSOCIATIVITY = new HashSet<>(Arrays.asList(
FUNCTION_ADD, FUNCTION_MUL, FUNCTION_AND, FUNCTION_OR, FUNCTION_XOR, FUNCTION_CADD, FUNCTION_COR, FUNCTION_STR_CONCAT));
private int funcType;
private VarType implicitType;
private final List<Exprent> lstOperands;
public FunctionExprent(int funcType, ListStack<Exprent> stack, Set<Integer> bytecodeOffsets) {
this(funcType, new ArrayList<>(), bytecodeOffsets);
if (funcType >= FUNCTION_BIT_NOT && funcType <= FUNCTION_PPI && funcType != FUNCTION_CAST && funcType != FUNCTION_INSTANCEOF) {
lstOperands.add(stack.pop());
}
else if (funcType == FUNCTION_IIF) {
throw new RuntimeException("no direct instantiation possible");
}
else {
Exprent expr = stack.pop();
lstOperands.add(stack.pop());
lstOperands.add(expr);
}
}
public FunctionExprent(int funcType, List<Exprent> operands, Set<Integer> bytecodeOffsets) {
super(EXPRENT_FUNCTION);
this.funcType = funcType;
this.lstOperands = operands;
addBytecodeOffsets(bytecodeOffsets);
}
public FunctionExprent(int funcType, Exprent operand, Set<Integer> bytecodeOffsets) {
this(funcType, new ArrayList<>(1), bytecodeOffsets);
lstOperands.add(operand);
}
@Override
public VarType getExprType() {
VarType exprType = null;
if (funcType <= FUNCTION_NEG || funcType == FUNCTION_IPP || funcType == FUNCTION_PPI
|| funcType == FUNCTION_IMM || funcType == FUNCTION_MMI) {
VarType type1 = lstOperands.get(0).getExprType();
VarType type2 = null;
if (lstOperands.size() > 1) {
type2 = lstOperands.get(1).getExprType();
}
switch (funcType) {
case FUNCTION_IMM:
case FUNCTION_MMI:
case FUNCTION_IPP:
case FUNCTION_PPI:
exprType = implicitType;
break;
case FUNCTION_BOOL_NOT:
exprType = VarType.VARTYPE_BOOLEAN;
break;
case FUNCTION_SHL:
case FUNCTION_SHR:
case FUNCTION_USHR:
case FUNCTION_BIT_NOT:
case FUNCTION_NEG:
exprType = getMaxVarType(new VarType[]{type1});
break;
case FUNCTION_ADD:
case FUNCTION_SUB:
case FUNCTION_MUL:
case FUNCTION_DIV:
case FUNCTION_REM:
exprType = getMaxVarType(new VarType[]{type1, type2});
break;
case FUNCTION_AND:
case FUNCTION_OR:
case FUNCTION_XOR:
if (type1.type == CodeConstants.TYPE_BOOLEAN & type2.type == CodeConstants.TYPE_BOOLEAN) {
exprType = VarType.VARTYPE_BOOLEAN;
}
else {
exprType = getMaxVarType(new VarType[]{type1, type2});
}
}
}
else if (funcType == FUNCTION_CAST) {
exprType = lstOperands.get(1).getExprType();
}
else if (funcType == FUNCTION_IIF) {
Exprent param1 = lstOperands.get(1);
Exprent param2 = lstOperands.get(2);
VarType supertype = VarType.getCommonSupertype(param1.getExprType(), param2.getExprType());
if (param1.type == Exprent.EXPRENT_CONST && param2.type == Exprent.EXPRENT_CONST &&
supertype.type != CodeConstants.TYPE_BOOLEAN && VarType.VARTYPE_INT.isSuperset(supertype)) {
exprType = VarType.VARTYPE_INT;
}
else {
exprType = supertype;
}
}
else if (funcType == FUNCTION_STR_CONCAT) {
exprType = VarType.VARTYPE_STRING;
}
else if (funcType >= FUNCTION_EQ || funcType == FUNCTION_INSTANCEOF) {
exprType = VarType.VARTYPE_BOOLEAN;
}
else if (funcType >= FUNCTION_ARRAY_LENGTH) {
exprType = VarType.VARTYPE_INT;
}
else {
exprType = TYPES[funcType - FUNCTION_I2L];
}
return exprType;
}
@Override
public int getExprentUse() {
if (funcType >= FUNCTION_IMM && funcType <= FUNCTION_PPI) {
return 0;
}
else {
int ret = Exprent.MULTIPLE_USES | Exprent.SIDE_EFFECTS_FREE;
for (Exprent expr : lstOperands) {
ret &= expr.getExprentUse();
}
return ret;
}
}
@Override
public CheckTypesResult checkExprTypeBounds() {
CheckTypesResult result = new CheckTypesResult();
Exprent param1 = lstOperands.get(0);
VarType type1 = param1.getExprType();
Exprent param2 = null;
VarType type2 = null;
if (lstOperands.size() > 1) {
param2 = lstOperands.get(1);
type2 = param2.getExprType();
}
switch (funcType) {
case FUNCTION_IIF:
VarType supertype = getExprType();
if (supertype == null) {
supertype = getExprType();
}
result.addMinTypeExprent(param1, VarType.VARTYPE_BOOLEAN);
result.addMinTypeExprent(param2, VarType.getMinTypeInFamily(supertype.typeFamily));
result.addMinTypeExprent(lstOperands.get(2), VarType.getMinTypeInFamily(supertype.typeFamily));
break;
case FUNCTION_I2L:
case FUNCTION_I2F:
case FUNCTION_I2D:
case FUNCTION_I2B:
case FUNCTION_I2C:
case FUNCTION_I2S:
result.addMinTypeExprent(param1, VarType.VARTYPE_BYTECHAR);
result.addMaxTypeExprent(param1, VarType.VARTYPE_INT);
break;
case FUNCTION_IMM:
case FUNCTION_IPP:
case FUNCTION_MMI:
case FUNCTION_PPI:
result.addMinTypeExprent(param1, implicitType);
result.addMaxTypeExprent(param1, implicitType);
break;
case FUNCTION_ADD:
case FUNCTION_SUB:
case FUNCTION_MUL:
case FUNCTION_DIV:
case FUNCTION_REM:
case FUNCTION_SHL:
case FUNCTION_SHR:
case FUNCTION_USHR:
case FUNCTION_LT:
case FUNCTION_GE:
case FUNCTION_GT:
case FUNCTION_LE:
result.addMinTypeExprent(param2, VarType.VARTYPE_BYTECHAR);
case FUNCTION_BIT_NOT:
// case FUNCTION_BOOL_NOT:
case FUNCTION_NEG:
result.addMinTypeExprent(param1, VarType.VARTYPE_BYTECHAR);
break;
case FUNCTION_AND:
case FUNCTION_OR:
case FUNCTION_XOR:
case FUNCTION_EQ:
case FUNCTION_NE: {
if (type1.type == CodeConstants.TYPE_BOOLEAN) {
if (type2.isStrictSuperset(type1)) {
result.addMinTypeExprent(param1, VarType.VARTYPE_BYTECHAR);
}
else { // both are booleans
boolean param1_false_boolean =
type1.isFalseBoolean() || (param1.type == Exprent.EXPRENT_CONST && !((ConstExprent)param1).hasBooleanValue());
boolean param2_false_boolean =
type1.isFalseBoolean() || (param2.type == Exprent.EXPRENT_CONST && !((ConstExprent)param2).hasBooleanValue());
if (param1_false_boolean || param2_false_boolean) {
result.addMinTypeExprent(param1, VarType.VARTYPE_BYTECHAR);
result.addMinTypeExprent(param2, VarType.VARTYPE_BYTECHAR);
}
}
}
else if (type2.type == CodeConstants.TYPE_BOOLEAN) {
if (type1.isStrictSuperset(type2)) {
result.addMinTypeExprent(param2, VarType.VARTYPE_BYTECHAR);
}
}
}
}
return result;
}
@Override
public List<Exprent> getAllExprents() {
List<Exprent> lst = new ArrayList<>();
lst.addAll(lstOperands);
return lst;
}
@Override
public Exprent copy() {
List<Exprent> lst = new ArrayList<>();
for (Exprent expr : lstOperands) {
lst.add(expr.copy());
}
FunctionExprent func = new FunctionExprent(funcType, lst, bytecode);
func.setImplicitType(implicitType);
return func;
}
@Override
public boolean equals(Object o) {
if (o == this) return true;
if (o == null || !(o instanceof FunctionExprent)) return false;
FunctionExprent fe = (FunctionExprent)o;
return funcType == fe.getFuncType() &&
InterpreterUtil.equalLists(lstOperands, fe.getLstOperands()); // TODO: order of operands insignificant
}
@Override
public void replaceExprent(Exprent oldExpr, Exprent newExpr) {
for (int i = 0; i < lstOperands.size(); i++) {
if (oldExpr == lstOperands.get(i)) {
lstOperands.set(i, newExpr);
}
}
}
@Override
public TextBuffer toJava(int indent, BytecodeMappingTracer tracer) {
tracer.addMapping(bytecode);
if (funcType <= FUNCTION_USHR) {
return wrapOperandString(lstOperands.get(0), false, indent, tracer)
.append(OPERATORS[funcType])
.append(wrapOperandString(lstOperands.get(1), true, indent, tracer));
}
if (funcType >= FUNCTION_EQ) {
return wrapOperandString(lstOperands.get(0), false, indent, tracer)
.append(OPERATORS[funcType - FUNCTION_EQ + 11])
.append(wrapOperandString(lstOperands.get(1), true, indent, tracer));
}
switch (funcType) {
case FUNCTION_BIT_NOT:
return wrapOperandString(lstOperands.get(0), true, indent, tracer).prepend("~");
case FUNCTION_BOOL_NOT:
return wrapOperandString(lstOperands.get(0), true, indent, tracer).prepend("!");
case FUNCTION_NEG:
return wrapOperandString(lstOperands.get(0), true, indent, tracer).prepend("-");
case FUNCTION_CAST:
return lstOperands.get(1).toJava(indent, tracer).enclose("(", ")").append(wrapOperandString(lstOperands.get(0), true, indent, tracer));
case FUNCTION_ARRAY_LENGTH:
Exprent arr = lstOperands.get(0);
TextBuffer res = wrapOperandString(arr, false, indent, tracer);
if (arr.getExprType().arrayDim == 0) {
VarType objArr = VarType.VARTYPE_OBJECT.resizeArrayDim(1); // type family does not change
res.enclose("((" + ExprProcessor.getCastTypeName(objArr) + ")", ")");
}
return res.append(".length");
case FUNCTION_IIF:
return wrapOperandString(lstOperands.get(0), true, indent, tracer)
.append("?")
.append(wrapOperandString(lstOperands.get(1), true, indent, tracer))
.append(":")
.append(wrapOperandString(lstOperands.get(2), true, indent, tracer));
case FUNCTION_IPP:
return wrapOperandString(lstOperands.get(0), true, indent, tracer).append("++");
case FUNCTION_PPI:
return wrapOperandString(lstOperands.get(0), true, indent, tracer).prepend("++");
case FUNCTION_IMM:
return wrapOperandString(lstOperands.get(0), true, indent, tracer).append("--");
case FUNCTION_MMI:
return wrapOperandString(lstOperands.get(0), true, indent, tracer).prepend("--");
case FUNCTION_INSTANCEOF:
return wrapOperandString(lstOperands.get(0), true, indent, tracer).append(" instanceof ").append(wrapOperandString(lstOperands.get(1), true, indent, tracer));
case FUNCTION_LCMP: // shouldn't appear in the final code
return wrapOperandString(lstOperands.get(0), true, indent, tracer).prepend("__lcmp__(")
.append(",")
.append(wrapOperandString(lstOperands.get(1), true, indent, tracer))
.append(")");
case FUNCTION_FCMPL: // shouldn't appear in the final code
return wrapOperandString(lstOperands.get(0), true, indent, tracer).prepend("__fcmpl__(")
.append(",")
.append(wrapOperandString(lstOperands.get(1), true, indent, tracer))
.append(")");
case FUNCTION_FCMPG: // shouldn't appear in the final code
return wrapOperandString(lstOperands.get(0), true, indent, tracer).prepend("__fcmpg__(")
.append(",")
.append(wrapOperandString(lstOperands.get(1), true, indent, tracer))
.append(")");
case FUNCTION_DCMPL: // shouldn't appear in the final code
return wrapOperandString(lstOperands.get(0), true, indent, tracer).prepend("__dcmpl__(")
.append(",")
.append(wrapOperandString(lstOperands.get(1), true, indent, tracer))
.append(")");
case FUNCTION_DCMPG: // shouldn't appear in the final code
return wrapOperandString(lstOperands.get(0), true, indent, tracer).prepend("__dcmpg__(")
.append(",")
.append(wrapOperandString(lstOperands.get(1), true, indent, tracer))
.append(")");
}
if (funcType <= FUNCTION_I2S) {
return wrapOperandString(lstOperands.get(0), true, indent, tracer).prepend("(" + ExprProcessor.getTypeName(
TYPES[funcType - FUNCTION_I2L]) + ")");
}
// return "<unknown function>";
throw new RuntimeException("invalid function");
}
@Override
public int getPrecedence() {
return getPrecedence(funcType);
}
public static int getPrecedence(int func) {
return PRECEDENCE[func];
}
public VarType getSimpleCastType() {
return TYPES[funcType - FUNCTION_I2L];
}
private TextBuffer wrapOperandString(Exprent expr, boolean eq, int indent, BytecodeMappingTracer tracer) {
int myprec = getPrecedence();
int exprprec = expr.getPrecedence();
boolean parentheses = exprprec > myprec;
if (!parentheses && eq) {
parentheses = (exprprec == myprec);
if (parentheses) {
if (expr.type == Exprent.EXPRENT_FUNCTION &&
((FunctionExprent)expr).getFuncType() == funcType) {
parentheses = !ASSOCIATIVITY.contains(funcType);
}
}
}
TextBuffer res = expr.toJava(indent, tracer);
if (parentheses) {
res.enclose("(", ")");
}
return res;
}
private static VarType getMaxVarType(VarType[] arr) {
int[] types = new int[]{CodeConstants.TYPE_DOUBLE, CodeConstants.TYPE_FLOAT, CodeConstants.TYPE_LONG};
VarType[] vartypes = new VarType[]{VarType.VARTYPE_DOUBLE, VarType.VARTYPE_FLOAT, VarType.VARTYPE_LONG};
for (int i = 0; i < types.length; i++) {
for (int j = 0; j < arr.length; j++) {
if (arr[j].type == types[i]) {
return vartypes[i];
}
}
}
return VarType.VARTYPE_INT;
}
// *****************************************************************************
// getter and setter methods
// *****************************************************************************
public int getFuncType() {
return funcType;
}
public void setFuncType(int funcType) {
this.funcType = funcType;
}
public List<Exprent> getLstOperands() {
return lstOperands;
}
public void setImplicitType(VarType implicitType) {
this.implicitType = implicitType;
}
// *****************************************************************************
// IMatchable implementation
// *****************************************************************************
public boolean match(MatchNode matchNode, MatchEngine engine) {
if(!super.match(matchNode, engine)) {
return false;
}
Integer type = (Integer)matchNode.getRuleValue(MatchProperties.EXPRENT_FUNCTYPE);
if(type != null) {
if(this.funcType != type.intValue()) {
return false;
}
}
return true;
}
}
| |
/*
* Copyright 2019 NAVER Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.navercorp.pinpoint.web.vo.callstacks;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import com.navercorp.pinpoint.common.server.bo.AnnotationBo;
import com.navercorp.pinpoint.common.server.bo.ApiMetaDataBo;
import com.navercorp.pinpoint.common.server.bo.MethodTypeEnum;
import com.navercorp.pinpoint.loader.service.AnnotationKeyRegistryService;
import com.navercorp.pinpoint.loader.service.ServiceTypeRegistryService;
import com.navercorp.pinpoint.common.trace.AnnotationKey;
import com.navercorp.pinpoint.common.trace.AnnotationKeyMatcher;
import com.navercorp.pinpoint.common.trace.ServiceType;
import com.navercorp.pinpoint.common.server.util.AnnotationUtils;
import com.navercorp.pinpoint.common.server.trace.ApiDescription;
import com.navercorp.pinpoint.common.server.trace.ApiDescriptionParser;
import com.navercorp.pinpoint.web.calltree.span.Align;
import com.navercorp.pinpoint.web.calltree.span.CallTreeNode;
import com.navercorp.pinpoint.web.service.AnnotationKeyMatcherService;
import com.navercorp.pinpoint.web.service.ProxyRequestTypeRegistryService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author minwoo.jung
*/
public class RecordFactory {
private final Logger logger = LoggerFactory.getLogger(this.getClass());
// spans with id = 0 are regarded as root - start at 1
private int idGen = 1;
private AnnotationKeyMatcherService annotationKeyMatcherService;
private ServiceTypeRegistryService registry;
private AnnotationKeyRegistryService annotationKeyRegistryService;
private final ApiDescriptionParser apiDescriptionParser = new ApiDescriptionParser();
private final AnnotationRecordFormatter annotationRecordFormatter;
private final ProxyRequestTypeRegistryService proxyRequestTypeRegistryService;
public RecordFactory(final AnnotationKeyMatcherService annotationKeyMatcherService, final ServiceTypeRegistryService registry, final AnnotationKeyRegistryService annotationKeyRegistryService, final ProxyRequestTypeRegistryService proxyRequestTypeRegistryService) {
this.annotationKeyMatcherService = annotationKeyMatcherService;
this.registry = registry;
this.annotationKeyRegistryService = annotationKeyRegistryService;
this.proxyRequestTypeRegistryService = proxyRequestTypeRegistryService;
this.annotationRecordFormatter = new AnnotationRecordFormatter(proxyRequestTypeRegistryService);
}
public Record get(final CallTreeNode node) {
final Align align = node.getAlign();
align.setId(getNextId());
final int parentId = getParentId(node);
Api api = getApi(align);
final String argument = getArgument(align);
final Record record = new DefaultRecord(align.getDepth(),
align.getId(),
parentId,
true,
api.getTitle(),
argument,
align.getStartTime(),
align.getElapsed(),
align.getGap(),
align.getAgentId(),
align.getApplicationId(),
registry.findServiceType(align.getServiceType()),
align.getDestinationId(),
align.hasChild(),
false,
align.getTransactionId(),
align.getSpanId(),
align.getExecutionMilliseconds(),
api.getMethodTypeEnum(),
true);
record.setSimpleClassName(api.getClassName());
record.setFullApiDescription(api.getDescription());
return record;
}
private String getArgument(final Align align) {
final String rpc = align.getRpc();
if (rpc != null) {
return rpc;
}
return getDisplayArgument(align);
}
private String getDisplayArgument(Align align) {
final AnnotationBo displayArgument = getDisplayArgument0(align.getServiceType(), align.getAnnotationBoList());
if (displayArgument == null) {
return "";
}
final AnnotationKey key = findAnnotationKey(displayArgument.getKey());
return this.annotationRecordFormatter.formatArguments(key, displayArgument, align);
}
private AnnotationBo getDisplayArgument0(final short serviceType, final List<AnnotationBo> annotationBoList) {
if (annotationBoList == null) {
return null;
}
final AnnotationKeyMatcher matcher = annotationKeyMatcherService.findAnnotationKeyMatcher(serviceType);
if (matcher == null) {
return null;
}
for (AnnotationBo annotation : annotationBoList) {
int key = annotation.getKey();
if (matcher.matches(key)) {
return annotation;
}
}
return null;
}
public Record getFilteredRecord(final CallTreeNode node, String apiTitle) {
final Align align = node.getAlign();
align.setId(getNextId());
final int parentId = getParentId(node);
// Api api = getApi(align);
final Record record = new DefaultRecord(align.getDepth(),
align.getId(),
parentId,
true,
apiTitle,
"",
align.getStartTime(),
align.getElapsed(),
align.getGap(),
"UNKNOWN",
align.getApplicationId(),
ServiceType.UNKNOWN,
"",
false,
false,
align.getTransactionId(),
align.getSpanId(),
align.getExecutionMilliseconds(),
MethodTypeEnum.DEFAULT,
false);
return record;
}
public Record getException(final int depth, final int parentId, final Align align) {
if (!align.hasException()) {
return null;
}
return new ExceptionRecord(depth, getNextId(), parentId, align);
}
public List<Record> getAnnotations(final int depth, final int parentId, Align align) {
List<Record> list = new ArrayList<>();
for (AnnotationBo annotation : align.getAnnotationBoList()) {
final AnnotationKey key = findAnnotationKey(annotation.getKey());
if (key.isViewInRecordSet()) {
final String title = this.annotationRecordFormatter.formatTitle(key, annotation, align);
final String arguments = this.annotationRecordFormatter.formatArguments(key, annotation, align);
final Record record = new AnnotationRecord(depth, getNextId(), parentId, title, arguments, annotation.isAuthorized());
list.add(record);
}
}
return list;
}
public Record getParameter(final int depth, final int parentId, final String method, final String argument) {
return new ParameterRecord(depth, getNextId(), parentId, method, argument);
}
int getParentId(final CallTreeNode node) {
final CallTreeNode parent = node.getParent();
if (parent == null) {
if (!node.getAlign().isSpan()) {
throw new IllegalStateException("parent is null. node=" + node);
}
return 0;
}
return parent.getAlign().getId();
}
private Api getApi(final Align align) {
final AnnotationBo annotation = AnnotationUtils.findAnnotationBo(align.getAnnotationBoList(), AnnotationKey.API_METADATA);
if (annotation != null) {
final Api api = new Api();
final ApiMetaDataBo apiMetaData = (ApiMetaDataBo) annotation.getValue();
String apiInfo = getApiInfo(apiMetaData);
api.setTitle(apiInfo);
api.setDescription(apiInfo);
if (apiMetaData.getMethodTypeEnum() == MethodTypeEnum.DEFAULT) {
try {
ApiDescription apiDescription = apiDescriptionParser.parse(api.description);
api.setTitle(apiDescription.getSimpleMethodDescription());
api.setClassName(apiDescription.getSimpleClassName());
} catch (Exception e) {
logger.debug("Failed to api parse. {}", api.description, e);
}
}
api.setMethodTypeEnum(apiMetaData.getMethodTypeEnum());
return api;
} else {
final Api api = new Api();
AnnotationKey apiMetaDataError = getApiMetaDataError(align.getAnnotationBoList());
api.setTitle(apiMetaDataError.getName());
return api;
}
}
private String getApiInfo(ApiMetaDataBo apiMetaDataBo) {
if (apiMetaDataBo.getLineNumber() != -1) {
return apiMetaDataBo.getApiInfo() + ":" + apiMetaDataBo.getLineNumber();
} else {
return apiMetaDataBo.getApiInfo();
}
}
public AnnotationKey getApiMetaDataError(List<AnnotationBo> annotationBoList) {
for (AnnotationBo bo : annotationBoList) {
AnnotationKey apiErrorCode = annotationKeyRegistryService.findApiErrorCode(bo.getKey());
if (apiErrorCode != null) {
return apiErrorCode;
}
}
// could not find a more specific error - returns generalized error
return AnnotationKey.ERROR_API_METADATA_ERROR;
}
private AnnotationKey findAnnotationKey(int key) {
return annotationKeyRegistryService.findAnnotationKey(key);
}
private int getNextId() {
return idGen++;
}
private static class Api {
private String title = "";
private String className = "";
private String description = "";
private MethodTypeEnum methodTypeEnum = MethodTypeEnum.DEFAULT;
public Api() {
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getClassName() {
return className;
}
public void setClassName(String className) {
this.className = className;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public MethodTypeEnum getMethodTypeEnum() {
return methodTypeEnum;
}
public void setMethodTypeEnum(MethodTypeEnum methodTypeEnum) {
this.methodTypeEnum = Objects.requireNonNull(methodTypeEnum, "methodTypeEnum");
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @author Victor A. Martynov
* @version $Revision: 1.1.2.8 $
*/
package org.apache.harmony.rmi.activation;
import java.io.BufferedOutputStream;
import java.io.EOFException;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.OutputStream;
import java.io.Serializable;
import java.rmi.ConnectException;
import java.rmi.ConnectIOException;
import java.rmi.MarshalException;
import java.rmi.MarshalledObject;
import java.rmi.Naming;
import java.rmi.RMISecurityManager;
import java.rmi.Remote;
import java.rmi.RemoteException;
import java.rmi.activation.ActivationDesc;
import java.rmi.activation.ActivationException;
import java.rmi.activation.ActivationGroup;
import java.rmi.activation.ActivationGroupDesc;
import java.rmi.activation.ActivationGroupID;
import java.rmi.activation.ActivationID;
import java.rmi.activation.ActivationInstantiator;
import java.rmi.activation.ActivationMonitor;
import java.rmi.activation.ActivationSystem;
import java.rmi.activation.Activator;
import java.rmi.activation.UnknownGroupException;
import java.rmi.activation.UnknownObjectException;
import java.rmi.activation.ActivationGroupDesc.CommandEnvironment;
import java.rmi.registry.LocateRegistry;
import java.rmi.registry.Registry;
import java.rmi.server.ObjID;
import java.rmi.server.RemoteObject;
import java.rmi.server.RemoteServer;
import java.rmi.server.RemoteStub;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Enumeration;
import java.util.Hashtable;
import java.util.logging.Level;
import org.apache.harmony.rmi.common.GetBooleanPropAction;
import org.apache.harmony.rmi.common.GetLongPropAction;
import org.apache.harmony.rmi.common.GetStringPropAction;
import org.apache.harmony.rmi.common.RMIConstants;
import org.apache.harmony.rmi.common.RMILog;
import org.apache.harmony.rmi.common.RMIProperties;
import org.apache.harmony.rmi.internal.nls.Messages;
import org.apache.harmony.rmi.remoteref.UnicastServerRef;
import org.apache.harmony.rmi.server.ExportManager;
import org.apache.harmony.rmi.transport.RMIObjectOutputStream;
/**
* Represents rmid - RMI Activation Daemon. Implements
* all 3 remote interfaces that are essential for Activation:
* <code>ActivationSystem</code>, <code>ActivationMonitor</code>
* and <code>Activator</code>. This is done to avoid
* the multiplication of references pointing to Hashtables that keep
* information about ActivationGroupIDs and ActivationIDs and their
* mappings to ActivationGroupDescriptors and ActivationDescriptors.
*
* RMID is partially crash proof, which means it saves its state into two
* files: snapshot.rmid and delta.rmid. snapshot.rmid contains the
* snapshot of the structure that contains information about Activation
* Groups and Activatable Objects registered in this RMID and delta.rmid
* reflects the changes occurred since last snapshot.
*
* The objects that are saved in Snapshot:
* <UL>
* <LI>ActivationID: UID uid, String refType, UnicastRef2 RemoteRef</LI>
* <LI>ActivationDesc: ActivationGroupID groupID, String className, String
* location, MarshalledObject data, boolean restart</LI>
* <LI>ActivationGroupID: ActivationSystem system, UID uid</LI>
* <LI>ActivationGroupDesc: String className, String location,
* MarshalledObject data, ActivationGroupDesc.CommandEnvironment env,
* Properties props</LI>
* </UL>
*
* @author Victor A. Martynov
* @version $Revision: 1.1.2.8 $
*/
public class Rmid extends RemoteServer implements ActivationSystem,
ActivationMonitor, Activator, RmidMBean {
private static final long serialVersionUID = -4936383024184263236L;
/**
* Standard logger for RMI Activation.
*
* @see org.apache.harmony.rmi.common.RMILog#getActivationLog()
*/
private static RMILog rLog = RMILog.getActivationLog();
/**
* Internal registry in which the Activation System is registered.
*/
private static Registry internalRegistry = null;
/**
* Port for internal registry.
*
* @see java.rmi.activation.ActivationSystem#SYSTEM_PORT
*/
private static int port = ActivationSystem.SYSTEM_PORT;
/**
* The stub for <code>this </code> object.
*/
private Remote thisStub;
/**
* Indicates whether this instance of RMID should start monitor.
*/
private static boolean startMonitor = false;
/**
* The instance of RmidMonitor of this Rmid.
*/
private static RmidMonitor rmidMonitor = null;
/**
* Mapping from ActivationID to its ActivationGroupID.
*/
public static Hashtable groupIDByActivationID;
/**
* Mapping from ActivationGroupID to ActivationGroupInfo.
*/
static Hashtable groupInfoByGroupId;
/**
* The timeout that is given to the ActivationGroup VM to
* start(milliseconds).
*
* @see org.apache.harmony.rmi.common.RMIConstants#DEFAULT_ACTIVATION_EXECTIMEOUT
* @see org.apache.harmony.rmi.common.RMIProperties#ACTIVATION_EXECTIMEOUT_PROP
*/
private static long groupStartTimeout =
RMIConstants.DEFAULT_ACTIVATION_EXECTIMEOUT;
/**
* Represents the interval between the snapshots of the RMID
* current state.
*
* @see org.apache.harmony.rmi.common.RMIConstants#DEFAULT_SNAPSHOTINTERVAL
* @see org.apache.harmony.rmi.common.RMIProperties#ACTIVATION_SNAPSHOTINTERVAL_PROP
*/
private static long snapshotInterval =
RMIConstants.DEFAULT_SNAPSHOTINTERVAL;
/**
* Indicates whether the debug information about logging - snapshots and
* deltas of the RMID state - should be printed.
*
* @see org.apache.harmony.rmi.common.RMIProperties#ACTIVATION_LOG_DEBUG_PROP
*/
private static boolean loggingDebug = false;
/**
* The maximum amount of activation groups(VMs).
*
* @see org.apache.harmony.rmi.common.RMIConstants#MAX_CONCURRENT_STARTING_GROUPS
* @see org.apache.harmony.rmi.common.RMIProperties#MAXSTARTGROUP_PROP
*/
private static long maxConcurrentStartingGroups =
RMIConstants.MAX_CONCURRENT_STARTING_GROUPS;
/**
* Indicates whether common activation debug information should be printed.
* @see org.apache.harmony.rmi.common.RMIProperties#ACTIVATION_DEBUGEXEC_PROP
*/
private static boolean activationDebug;
/**
* Arguments passed to every activation group VM of this Activation System.
* These arguments are passed in the RMID command line using "-C" option.
*/
private static String[] groupArgs = null;
/**
* Represents the amount of deltas that
* happened after last snapshot. When this value exceeds
* snapshotInterval the snapshot is made and this
* variable is reset to 0.
*/
private static int deltasCounter = 0;
/**
* The flag that indicates that the RMID is in restore phase. No
* changes that are made to the RMID database during this flag is set
* are recorded in DELTA_FILE.
*/
private static boolean restoreLock = false;
/**
* This variable represents the amount of groups that can be started
* immediately. The initial value of this variable is
* maxConcurrentStartingGroups and when a process of group start is
* initiated this value is decremented. As soon as group finishes its
* starting procedures the value is increased.
*/
private static long startingGroups = maxConcurrentStartingGroups;
/**
* The log level of RMID persistence state activities.
*/
private static Level persistenceDebugLevel = RMILog.SILENT;
/**
* The log level of the general debugging information.
*/
public static Level commonDebugLevel = RMILog.SILENT;
/**
* The folder to hold RMID logging information: snapshot and delta files.
*
* @see org.apache.harmony.rmi.common.RMIConstants#DEFAULT_LOG_FOLDER
*/
private static String logFolder = RMIConstants.DEFAULT_LOG_FOLDER;
private class Lock {}
private Object lock = new Lock();
/**
* The name of the monitor class for RMID.
*
* @see RmidMonitor
* @see org.apache.harmony.rmi.common.RMIConstants#DEFAULT_ACTIVATION_MONITOR_CLASS_NAME
* @see org.apache.harmony.rmi.common.RMIProperties#ACTIVATION_MONITOR_CLASS_NAME_PROP
*/
static String monitorClassName;
/**
* Initializes activation system. Called in {@link #main(String[]) main}.
*/
private Rmid(int port) {
try {
/*
* The process of starting RMID should not be interrupted by any
* incoming calls so we put this whole process into synchronized
* block on the global lock object.
*/
synchronized (lock) {
internalRegistry = LocateRegistry.createRegistry(port);
// rmi.log.38=Registry created: {0}
rLog.log(commonDebugLevel, Messages.getString("rmi.log.38", //$NON-NLS-1$
internalRegistry));
// rmi.log.39=Creating Activation System on port {0}.
rLog.log(commonDebugLevel, Messages.getString("rmi.log.39", //$NON-NLS-1$
port));
UnicastServerRef usr = new UnicastServerRef(port, null,
null, new ObjID(RMIConstants.ACTIVATION_SYSTEM_ID));
thisStub = ExportManager.exportObject(this, usr, false);
// rmi.log.3A=stub's ref = {0}
rLog.log(commonDebugLevel, Messages.getString("rmi.log.3A", //$NON-NLS-1$
((RemoteObject) thisStub).getRef()));
this.ref = ((RemoteStub) thisStub).getRef();
String activationSystemURL = "rmi://:" + port //$NON-NLS-1$
+ "/java.rmi.activation.ActivationSystem"; //$NON-NLS-1$
// rmi.log.3B=URL = {0}
rLog.log(commonDebugLevel, Messages.getString("rmi.log.3B", activationSystemURL)); //$NON-NLS-1$
// rmi.log.3C=Stub = {0}
rLog.log(commonDebugLevel, Messages.getString("rmi.log.3C", thisStub)); //$NON-NLS-1$
Naming.rebind(activationSystemURL, thisStub);
// rmi.log.3D=Rebind was successful.
rLog.log(commonDebugLevel, Messages.getString("rmi.log.3D")); //$NON-NLS-1$
groupIDByActivationID = new Hashtable();
groupInfoByGroupId = new Hashtable();
if (startMonitor) {
rmidMonitor = RmidMonitorFactory
.getRmidMonitor(monitorClassName);
// rmi.log.3E=RmidMonitor created: {0}
rLog.log(commonDebugLevel, Messages.getString("rmi.log.3E", //$NON-NLS-1$
rmidMonitor));
/*
* Failed to obtain RmidMonitor.
*/
if (rmidMonitor == null) {
startMonitor = false;
}
}
restore();
}
} catch (Throwable t) {
// rmi.log.3F=Exception in RMID: {0}
rLog.log(commonDebugLevel, Messages.getString("rmi.log.3F", t)); //$NON-NLS-1$
t.printStackTrace();
System.exit(1);
}
}
/**
* Waits until the startup procedure of RMID is completed.
*/
private void waitStartup() {
synchronized (lock) {
//This block was intentionally left empty.
}
}
/**
* Main method to start activation system.
*
* @see RMIConstants#RMID_USAGE
*/
public static void main(String args[]) {
/* Setting the security manager. */
if (System.getSecurityManager() == null) {
System.setSecurityManager(new RMISecurityManager());
}
/*
* Reading properties.
*/
groupStartTimeout = ((Long) AccessController
.doPrivileged(new GetLongPropAction(
RMIProperties.ACTIVATION_EXECTIMEOUT_PROP,
groupStartTimeout))).longValue();
snapshotInterval = ((Long) AccessController
.doPrivileged(new GetLongPropAction(
RMIProperties.ACTIVATION_SNAPSHOTINTERVAL_PROP,
snapshotInterval))).longValue();
loggingDebug = ((Boolean) AccessController
.doPrivileged(new GetBooleanPropAction(
RMIProperties.ACTIVATION_LOG_DEBUG_PROP,
loggingDebug))).booleanValue();
maxConcurrentStartingGroups = ((Long) AccessController
.doPrivileged(new GetLongPropAction(
RMIProperties.MAXSTARTGROUP_PROP,
maxConcurrentStartingGroups))).longValue();
activationDebug = ((Boolean) AccessController
.doPrivileged(new GetBooleanPropAction(
RMIProperties.ACTIVATION_DEBUGEXEC_PROP, false)))
.booleanValue();
monitorClassName = (String) AccessController
.doPrivileged(new GetStringPropAction(
RMIProperties.ACTIVATION_MONITOR_CLASS_NAME_PROP,
RMIConstants.DEFAULT_ACTIVATION_MONITOR_CLASS_NAME));
if (loggingDebug) {
persistenceDebugLevel = RMILog.VERBOSE;
}
if (activationDebug) {
commonDebugLevel = RMILog.VERBOSE;
}
// rmi.log.40=\nThe following properties were set on RMID:
rLog.log(commonDebugLevel,
Messages.getString("rmi.log.40") + "\n" //$NON-NLS-1$ //$NON-NLS-2$
+ RMIProperties.ACTIVATION_EXECTIMEOUT_PROP
+ " = " + groupStartTimeout + "\n" //$NON-NLS-1$ //$NON-NLS-2$
+ RMIProperties.ACTIVATION_SNAPSHOTINTERVAL_PROP
+ " = " + snapshotInterval + "\n" //$NON-NLS-1$ //$NON-NLS-2$
+ RMIProperties.ACTIVATION_LOG_DEBUG_PROP + " = " //$NON-NLS-1$
+ loggingDebug + "\n" //$NON-NLS-1$
+ RMIProperties.MAXSTARTGROUP_PROP + " = " //$NON-NLS-1$
+ maxConcurrentStartingGroups + "\n" //$NON-NLS-1$
+ RMIProperties.ACTIVATION_DEBUGEXEC_PROP + " = " //$NON-NLS-1$
+ activationDebug + "\n" //$NON-NLS-1$
+ RMIProperties.ACTIVATION_MONITOR_CLASS_NAME_PROP
+ " = " + monitorClassName); //$NON-NLS-1$
/*
* The ArrayList for temporary holding of "-C" options.
*/
ArrayList tmpGroupArgs = new ArrayList();
/*
* Parsing command line arguments.
*/
for (int i = 0; i < args.length; i++) {
String argument = args[i];
if (argument.equals("-port")) { //$NON-NLS-1$
if (i + 1 >= args.length) {
// rmi.console.02=Insufficient arguments: port should be specified.
System.out.println(Messages.getString("rmi.console.02")); //$NON-NLS-1$
printUsage();
System.exit(1);
}
try {
port = Integer.parseInt(args[i + 1]);
} catch (NumberFormatException nfe) {
// rmi.console.03=Malformed port number.
System.out.println(Messages.getString("rmi.console.03")); //$NON-NLS-1$
printUsage();
System.exit(1);
}
i++;
} else if (argument.equals("-log")) { //$NON-NLS-1$
if (i + 1 >= args.length) {
// rmi.console.04=Insufficient arguments: log folder should be specified.
System.out.println(Messages.getString("rmi.console.04")); //$NON-NLS-1$
printUsage();
System.exit(1);
}
logFolder = args[i + 1];
i++;
} else if (argument.equals("-stop")) { //$NON-NLS-1$
try {
ActivationSystem system = ActivationGroup.getSystem();
system.shutdown();
// rmi.log.41=RMID was shut down
rLog.log(commonDebugLevel, Messages.getString("rmi.log.41")); //$NON-NLS-1$
return;
} catch (Throwable t) {
t.printStackTrace();
System.exit(1);
}
} else if (argument.equals("-C")) { //$NON-NLS-1$
tmpGroupArgs.add(args[i].substring(2));
} else if (argument.equals("-help")) { //$NON-NLS-1$
printUsage();
return;
} else if (argument.equals("-monitor")) { //$NON-NLS-1$
// rmi.log.42=Monitor option selected.
rLog.log(commonDebugLevel, Messages.getString("rmi.log.42")); //$NON-NLS-1$
startMonitor = true;
} else {
/*
* Illegal option found.
*/
// rmi.console.05=Illegal option: {0}
System.out.println(Messages.getString("rmi.console.05", argument)); //$NON-NLS-1$
printUsage();
System.exit(1);
}
}
/*
* Extracting collected "-C" options from ArrayList.
*/
groupArgs = (String[]) tmpGroupArgs
.toArray(new String[tmpGroupArgs.size()]);
/*
* Adding separator at the end of log folder.
*/
if (!logFolder.endsWith(File.separator)) {
logFolder = logFolder + File.separator;
}
final File dir = new File(logFolder);
/*
* Creating log folder.
*/
AccessController.doPrivileged(new PrivilegedAction() {
public Object run() {
if (!dir.exists() && !dir.mkdir()) {
//rmi.console.06=Cannot create log folder: {0}
System.out.println(Messages.getString("rmi.console.06", //$NON-NLS-1$
logFolder));
System.exit(1);
}
return null;
}
});
try {
Rmid rmid = new Rmid(port);
// rmi.log.43=RMID instance created: {0}
rLog.log(commonDebugLevel, Messages.getString("rmi.log.43", rmid)); //$NON-NLS-1$
Thread.sleep(Long.MAX_VALUE);
} catch (Throwable t) {
// rmi.log.44=Exception: {0}
rLog.log(commonDebugLevel, Messages.getString("rmi.log.44", t)); //$NON-NLS-1$
t.printStackTrace();
System.exit(1);
}
}
/**
* Prints the usage syntax for RMID.
*/
private static void printUsage() {
System.out.println(RMIConstants.RMID_USAGE);
}
/* *********************************************************************
*
* Next methods belong to ActivationSystem remote interface.
*
*********************************************************************/
public ActivationGroupID registerGroup(ActivationGroupDesc agdesc)
throws ActivationException {
waitStartup();
ActivationGroupID agid = new ActivationGroupID(this);
ActivationGroupInfo agi = new ActivationGroupInfo(agid, agdesc);
if (groupInfoByGroupId.containsKey(agid)) {
// rmi.2E=This group is already registered.
throw new ActivationException(Messages.getString("rmi.2E")); //$NON-NLS-1$
}
groupInfoByGroupId.put(agid, agi);
if (!restoreLock) {
writeDelta(Delta.PUT, "group", agid, agdesc); //$NON-NLS-1$
// rmi.log.45=Delta was saved:
rLog.log(persistenceDebugLevel, Messages.getString("rmi.log.45") //$NON-NLS-1$
+ Delta.PUT + "," + "group" + ", " + agid + ", " //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //$NON-NLS-4$
+ agdesc);
}
return agid;
}
public ActivationMonitor activeGroup(ActivationGroupID gID,
ActivationInstantiator group, long incarnation)
throws UnknownGroupException, ActivationException {
waitStartup();
// rmi.log.46=Rmid.activeGroup: {0}, {1}, {2}
rLog.log(commonDebugLevel, Messages.getString("rmi.log.46", //$NON-NLS-1$
new Object[]{gID, group, incarnation}));
// rmi.log.47=groupID2groupInfo_H = {0}
rLog.log(commonDebugLevel, Messages.getString("rmi.log.47", //$NON-NLS-1$
groupInfoByGroupId));
ActivationGroupInfo agi = (ActivationGroupInfo) groupInfoByGroupId
.get(gID);
// rmi.log.48=Rmid.activeGroup group info = {0}
rLog.log(commonDebugLevel, Messages.getString("rmi.log.48", agi));//$NON-NLS-1$
if (agi == null) {
// rmi.2F=Group is not registered: {0}
throw new UnknownGroupException(Messages.getString("rmi.2F", gID)); //$NON-NLS-1$
} else if (agi.isActive()) {
// rmi.30=Group is already active: {0}
throw new ActivationException(Messages.getString("rmi.30", gID)); //$NON-NLS-1$
}
// rmi.log.49=ready to execute agi.active()
rLog.log(commonDebugLevel, Messages.getString("rmi.log.49")); //$NON-NLS-1$
agi.active(group, incarnation);
// rmi.log.4A=Rmid.activeGroup finished.
rLog.log(commonDebugLevel, Messages.getString("rmi.log.4A")); //$NON-NLS-1$
return this;
}
/**
* This method is absent in Java Remote Method Invocation
* specification.
*
* @param aID
* @throws UnknownObjectException if <code>ActivationID</code>
* is not registered
* @throws ActivationException for general failure
* @throws RemoteException if remote call fails
*/
public ActivationDesc getActivationDesc(ActivationID aID)
throws UnknownObjectException {
waitStartup();
ActivationGroupID agid = (ActivationGroupID) groupIDByActivationID
.get(aID);
ActivationGroupInfo info = (ActivationGroupInfo) groupInfoByGroupId
.get(agid);
ActivationDesc adesc = info.getActivationDesc(aID);
if (adesc == null) {
// rmi.31=No ActivationDesc for ActivationID {0}
throw new UnknownObjectException(Messages.getString("rmi.31", aID)); //$NON-NLS-1$
}
return adesc;
}
/**
* This method is absent in Java Remote Method Invocation
* specification.
*
* @throws UnknownGroupException - if agid is not registered
* @throws ActivationException - for general failure
* @throws RemoteException - if remote call fails
*/
public ActivationGroupDesc getActivationGroupDesc(
ActivationGroupID agid) throws UnknownObjectException {
waitStartup();
ActivationGroupInfo agi = (ActivationGroupInfo) groupInfoByGroupId
.get(agid);
if (agi == null) {
// rmi.32=No ActivationGroupDesc for ActivationGroupID {0}
throw new UnknownObjectException(Messages.getString("rmi.32", agid)); //$NON-NLS-1$
}
return agi.getActivationGroupDesc();
}
public ActivationID registerObject(ActivationDesc adesc) {
waitStartup();
// rmi.log.4B=ActivationSystemImpl.registerObject():
rLog.log(commonDebugLevel,Messages.getString("rmi.log.4B")); //$NON-NLS-1$
ActivationGroupID agid = adesc.getGroupID();
// rmi.log.4C=agid : {0}
rLog.log(commonDebugLevel, Messages.getString("rmi.log.4C", agid)); //$NON-NLS-1$
// rmi.log.4D=Activator stub = {0}
rLog.log(commonDebugLevel, Messages.getString("rmi.log.4D", thisStub)); //$NON-NLS-1$
ActivationID aid = new ActivationID((Activator) thisStub);
// rmi.log.4E=aid : {0}
rLog.log(commonDebugLevel, Messages.getString("rmi.log.4E", aid)); //$NON-NLS-1$
// rmi.log.4C=agid : {0}
rLog.log(commonDebugLevel, Messages.getString("rmi.log.4C", agid)); //$NON-NLS-1$
ActivationGroupInfo info = (ActivationGroupInfo) groupInfoByGroupId
.get(agid);
// rmi.log.50=ActivationGroupInfo = {0}
rLog.log(commonDebugLevel, Messages.getString("rmi.log.50", info)); //$NON-NLS-1$
info.registerObject(aid, adesc);
// rmi.log.51=Activation desc was added.
rLog.log(commonDebugLevel, Messages.getString("rmi.log.51")); //$NON-NLS-1$
return aid;
}
public ActivationDesc setActivationDesc(ActivationID id,
ActivationDesc desc) {
waitStartup();
ActivationGroupID agid = (ActivationGroupID) groupIDByActivationID
.get(id);
ActivationGroupInfo agi = (ActivationGroupInfo) groupInfoByGroupId
.get(agid);
return agi.setActivationDesc(id, desc);
}
public ActivationGroupDesc setActivationGroupDesc(
ActivationGroupID id, ActivationGroupDesc desc) {
waitStartup();
ActivationGroupInfo agi = (ActivationGroupInfo) groupInfoByGroupId
.get(id);
return agi.setActivationGroupDesc(id, desc);
}
public void shutdown() {
synchronized (lock) {
// rmi.log.52=The Rmid is going to shutdown
rLog.log(commonDebugLevel, Messages.getString("rmi.log.52")); //$NON-NLS-1$
Enumeration enumeration = groupInfoByGroupId.elements();
while (enumeration.hasMoreElements()) {
try {
ActivationGroupInfo agi = (ActivationGroupInfo) enumeration
.nextElement();
agi.shutdown();
} catch (Throwable t) {
// rmi.log.53=Exception in Rmid.shutdown: {0}
rLog.log(commonDebugLevel,Messages.getString("rmi.log.53", t)); //$NON-NLS-1$
t.printStackTrace();
}
}
// rmi.log.54=...... Done.
rLog.log(commonDebugLevel, Messages.getString("rmi.log.54")); //$NON-NLS-1$
System.exit(0);
}
}
public void unregisterGroup(ActivationGroupID id)
throws ActivationException, UnknownGroupException,
RemoteException {
waitStartup();
ActivationGroupInfo agi = (ActivationGroupInfo) groupInfoByGroupId
.remove(id);
if (agi == null) {
// rmi.34=Attempt to unregister unknown group {0}
throw new UnknownGroupException(Messages.getString("rmi.34", id)); //$NON-NLS-1$
}
agi.unregister();
}
/**
* @param aID the ActivationID of the object that should be removed.
*/
public void unregisterObject(ActivationID aID) {
waitStartup();
ActivationGroupID gID = (ActivationGroupID) groupIDByActivationID
.get(aID);
ActivationGroupInfo gInfo = (ActivationGroupInfo) groupInfoByGroupId
.get(gID);
gInfo.unregisterObject(aID);
}
/* *********************************************************************
*
* Next methods belong to ActivationMonitor remote interface.
*
*********************************************************************/
public void activeObject(ActivationID id, MarshalledObject obj)
throws RemoteException, UnknownObjectException {
waitStartup();
// rmi.log.56=Rmid.activeObject: {0}; {1}
rLog.log(commonDebugLevel, Messages.getString("rmi.log.56", //$NON-NLS-1$
id, obj));
ActivationGroupID agid = (ActivationGroupID) groupIDByActivationID
.get(id);
// rmi.log.57=agid = {0}
rLog.log(commonDebugLevel, Messages.getString("rmi.log.57", agid)); //$NON-NLS-1$
ActivationGroupInfo agi = (ActivationGroupInfo) groupInfoByGroupId
.get(agid);
// rmi.log.58=agi = {0}
rLog.log(commonDebugLevel, Messages.getString("rmi.log.58", agi)); //$NON-NLS-1$
ObjectInfo oi = (ObjectInfo) agi.objectInfoByActivationID.get(id);
// rmi.log.59=oi= {0}
rLog.log(commonDebugLevel, Messages.getString("rmi.log.59", oi)); //$NON-NLS-1$
oi.active();
// rmi.log.5A=Rmid.activeObject finished.
rLog.log(commonDebugLevel, Messages.getString("rmi.log.5A")); //$NON-NLS-1$
}
public void inactiveGroup(ActivationGroupID id, long incarnation) {
waitStartup();
ActivationGroupInfo agi = (ActivationGroupInfo) groupInfoByGroupId
.get(id);
agi.inactive(incarnation);
}
public void inactiveObject(ActivationID aID) {
waitStartup();
ActivationGroupID gID = (ActivationGroupID) groupIDByActivationID
.get(aID);
ActivationGroupInfo gInfo = (ActivationGroupInfo) groupInfoByGroupId
.get(gID);
gInfo.inactiveObject(aID);
}
/* *********************************************************************
*
* Next methods belong to Activator remote interface.
*
*********************************************************************/
public MarshalledObject activate(ActivationID id, boolean force)
throws ActivationException, UnknownObjectException,
RemoteException {
waitStartup();
// rmi.log.5B=ActivatorImpl.activate({0}; {1})
rLog.log(commonDebugLevel, Messages.getString("rmi.log.5B", //$NON-NLS-1$
id, force));
ActivationGroupID agid = (ActivationGroupID) groupIDByActivationID
.get(id);
// rmi.log.57=agid = {0}
rLog.log(commonDebugLevel, Messages.getString("rmi.log.57", agid)); //$NON-NLS-1$
ActivationGroupInfo info = (ActivationGroupInfo) groupInfoByGroupId
.get(agid);
// rmi.log.5C=info = {0}
rLog.log(commonDebugLevel, Messages.getString("rmi.log.5C", info)); //$NON-NLS-1$
return info.activateObject(id, force);
}
/**
* This class holds all the information needed about ActivationGroup.
* It contains the following information: ActivationGroupID
* ActivationGroupDesc and the array of activatable objects within this
* group. The ActivationSystem holds the array of such objects.
*/
private class ActivationGroupInfo {
private ActivationGroupID agid;
private ActivationGroupDesc agdesc;
private Hashtable objectInfoByActivationID;
private long incarnation;
private boolean isActive;
private ActivationInstantiator activationInstantiator;
private Process process;
public ActivationGroupInfo(ActivationGroupID agid,
ActivationGroupDesc agdesc) {
this.agdesc = agdesc;
this.agid = agid;
objectInfoByActivationID = new Hashtable();
incarnation = 0;
isActive = false;
if (startMonitor) {
rmidMonitor.addGroup(agid);
}
}
public synchronized void inactiveObject(ActivationID aID) {
ObjectInfo oi = (ObjectInfo) objectInfoByActivationID.get(aID);
oi.inactive();
}
public synchronized void unregisterObject(ActivationID aID) {
objectInfoByActivationID.remove(aID);
groupIDByActivationID.remove(aID);
if (startMonitor) {
rmidMonitor.removeObject(aID);
}
}
public synchronized ActivationDesc setActivationDesc(
ActivationID id, ActivationDesc desc) {
ObjectInfo oi = (ObjectInfo) objectInfoByActivationID.get(id);
ActivationDesc oldDesc = oi.getActivationDesc();
oi.setActivationDesc(desc);
return oldDesc;
}
public synchronized ActivationGroupDesc setActivationGroupDesc(
ActivationGroupID id, ActivationGroupDesc desc) {
ActivationGroupDesc oldDesc = agdesc;
agdesc = desc;
return oldDesc;
}
public synchronized void registerObject(ActivationID id,
ActivationDesc desc) {
groupIDByActivationID.put(id, agid);
ObjectInfo oi = new ObjectInfo(id, desc);
objectInfoByActivationID.put(id, oi);
if (!restoreLock) {
writeDelta(Delta.PUT, "object", id, desc); //$NON-NLS-1$
// rmi.log.5D=New delta was generated.
rLog.log(persistenceDebugLevel, Messages
.getString("rmi.log.5D")); //$NON-NLS-1$
}
}
public synchronized MarshalledObject activateObject(
ActivationID id, boolean force)
throws ActivationException, RemoteException {
// rmi.log.5E=GroupInfo: id={0}; force={1}
rLog.log(commonDebugLevel, Messages.getString("rmi.log.5E", //$NON-NLS-1$
id, force));
if (!isActive) {
activateGroup();
// rmi.log.5F=Group was activated.
rLog.log(commonDebugLevel, Messages.getString("rmi.log.5F")); //$NON-NLS-1$
} else {
// rmi.log.60=Group was reused.
rLog.log(commonDebugLevel, Messages.getString("rmi.log.60")); //$NON-NLS-1$
}
ObjectInfo oi = (ObjectInfo) objectInfoByActivationID.get(id);
// rmi.log.61=activation_instantiator = {0}
rLog.log(commonDebugLevel, Messages.getString("rmi.log.61", //$NON-NLS-1$
activationInstantiator));
Exception signalException = null;
try {
return oi.activate(activationInstantiator);
} catch (ConnectException ce) {
} catch (ConnectIOException cioe) {
} catch (MarshalException me) {
} catch (Exception e) {
signalException = e;
}
if (signalException == null) {
// rmi.log.62=The group seems to be dead: Killing process, reactivating group.
rLog.log(commonDebugLevel, Messages.getString("rmi.log.62")); //$NON-NLS-1$
if (process != null) {
process.destroy();
}
isActive = false;
activationInstantiator = null;
activateGroup();
return oi.activate(activationInstantiator);
}
// rmi.35=Exception:
throw new ActivationException(Messages.getString("rmi.35"), signalException); //$NON-NLS-1$
}
public synchronized void activateGroup() {
/*
* Constructing an appropriate commandline to start activation
* group.
*
*/
String args[];
ArrayList al = new ArrayList();
CommandEnvironment ce = agdesc.getCommandEnvironment();
if (ce != null) {
String[] options = ce.getCommandOptions();
String cmd = ce.getCommandPath();
if (cmd != null) {
al.add(cmd);
} else {
al.add("java"); //$NON-NLS-1$
}
al.addAll(Arrays.asList(options));
} else {
/*
* Getting properties that affect group VM execution.
*/
String javaVmNameVal = (String) AccessController.doPrivileged(
new GetStringPropAction("java.vm.name")); //$NON-NLS-1$
String javaHomeVal = (String) AccessController.doPrivileged(
new GetStringPropAction("java.home")); //$NON-NLS-1$
String javaClassPathVal = (String) AccessController.doPrivileged(
new GetStringPropAction("java.class.path")); //$NON-NLS-1$
String policy = (String) AccessController.doPrivileged(
new GetStringPropAction("java.security.policy")); //$NON-NLS-1$
String bootClassPathVal = (String)
AccessController.doPrivileged(new GetStringPropAction(
(javaVmNameVal.equals("J9") //$NON-NLS-1$
? "org.apache.harmony.boot.class.path" //$NON-NLS-1$
: "sun.boot.class.path"))); //$NON-NLS-1$
String executable = new File(new File(
javaHomeVal, "bin"), "java").getPath(); //$NON-NLS-1$ //$NON-NLS-2$
// Add name of Java executable to run.
al.add(executable);
if (bootClassPathVal != null) {
al.add("-Xbootclasspath:" + bootClassPathVal); //$NON-NLS-1$
}
if (javaClassPathVal != null) {
al.add("-classpath"); //$NON-NLS-1$
al.add(javaClassPathVal);
}
if (policy != null) {
// Apply security policy.
al.add("-Djava.security.policy=" + policy); //$NON-NLS-1$
}
}
/*
* Passing the "-C" options to the ActivationGroup VM.
*/
for (int i = 0; i < groupArgs.length; i++) {
// rmi.log.63=Option was passed through '-C': {0}
rLog.log(commonDebugLevel,Messages.getString("rmi.log.63", //$NON-NLS-1$
groupArgs[i]));
al.add(groupArgs[i]);
}
al.add("org.apache.harmony.rmi.activation.ActivationGroupImpl"); //$NON-NLS-1$
args = (String[]) al.toArray(new String[al.size()]);
// rmi.log.64=args = {0}
rLog.log(commonDebugLevel, Messages.getString("rmi.log.64", Arrays.asList(args))); //$NON-NLS-1$
try {
final String[] argsLocal = args;
process = (Process) AccessController
.doPrivileged(new PrivilegedExceptionAction() {
public Object run() throws IOException {
return Runtime.getRuntime()
.exec(argsLocal);
}
});
startingGroups--;
InputStream in = process.getInputStream();
InputStream err = process.getErrorStream();
new DebugThread(in).start();
new DebugThread(err).start();
// rmi.log.65=ActivationGroup started: {0}
rLog.log(commonDebugLevel, Messages.getString("rmi.log.65", //$NON-NLS-1$
process));
incarnation++;
OutputStream os = process.getOutputStream();
RMIObjectOutputStream oos = new RMIObjectOutputStream(
new BufferedOutputStream(os));
oos.writeObject(agid);
// rmi.log.66=Agid written: {0}
rLog.log(commonDebugLevel, Messages.getString("rmi.log.66", agid)); //$NON-NLS-1$
oos.writeObject(agdesc);
// rmi.log.67=Agdesc written: {0}
rLog.log(commonDebugLevel, Messages.getString("rmi.log.67", agdesc)); //$NON-NLS-1$
oos.writeLong(incarnation);
// rmi.log.68=incarnation written: {0}
rLog.log(commonDebugLevel, Messages.getString("rmi.log.68", //$NON-NLS-1$
incarnation));
oos.flush();
// rmi.log.69=flushed
rLog.log(commonDebugLevel, Messages.getString("rmi.log.69")); //$NON-NLS-1$
oos.close();
os.close();
// rmi.log.6A=closed
rLog.log(commonDebugLevel, Messages.getString("rmi.log.6A")); //$NON-NLS-1$
if (activationInstantiator == null) {
try {
this.wait(groupStartTimeout);
} catch (InterruptedException t) {
}
}
startingGroups++;
} catch (Throwable t) {
// rmi.log.6B=Cannot start ActivationGroup.\n Exception: {0}
rLog.log(commonDebugLevel, Messages.getString("rmi.log.6B", t)); //$NON-NLS-1$
t.printStackTrace();
}
}
/**
* Callback from ActivationGroup.createGroup that the groups was
* created.
*/
public synchronized void active(ActivationInstantiator ai,
long incarnation) {
// rmi.log.6C=ActivationGroupInfo.activeGroup[ActInst={0}; incarn={1}]
rLog.log(commonDebugLevel, Messages.getString("rmi.log.6C", ai, //$NON-NLS-1$
incarnation));
activationInstantiator = ai;
notify();
if (this.incarnation != incarnation) {
// rmi.33=Different incarnations of this group happened.
throw new RuntimeException(Messages.getString("rmi.33")); //$NON-NLS-1$
}
activationInstantiator = ai;
isActive = true;
if (startMonitor) {
rmidMonitor.activeGroup(agid);
}
// rmi.log.6D=ActivationGroupInfo.activeGroup finished.
rLog.log(commonDebugLevel, Messages.getString("rmi.log.6D")); //$NON-NLS-1$
}
public ActivationGroupDesc getActivationGroupDesc() {
return agdesc;
}
public boolean isActive() {
return isActive;
}
public ActivationDesc getActivationDesc(ActivationID aid) {
return (ActivationDesc) ((ObjectInfo) objectInfoByActivationID
.get(aid)).getActivationDesc();
}
/**
* Shut the activation group down.
*
* @return The exit value of activation group's VM.
*/
public synchronized int shutdown() {
if (process != null) {
process.destroy();
int val = process.exitValue();
process = null;
return val;
}
return 0;
}
public synchronized void unregister() {
Enumeration keys = objectInfoByActivationID.keys();
while (keys.hasMoreElements()) {
ActivationID id = (ActivationID) keys.nextElement();
objectInfoByActivationID.remove(id);
groupIDByActivationID.remove(id);
}
}
public synchronized void inactive(long incarnation) {
isActive = false;
}
public String toString() {
return "GroupInfo[ ActivationGroupID = " + agid + "]"; //$NON-NLS-1$ //$NON-NLS-2$
}
}
/**
* Structure to hold just 1 activated object.
*/
private class ObjectInfo {
boolean isActive;
private ActivationDesc desc;
private ActivationID id;
MarshalledObject cachedInstance = null;
public ObjectInfo(ActivationID id, ActivationDesc desc) {
this.id = id;
this.desc = desc;
isActive = false;
if (startMonitor) {
rmidMonitor.addObject(id, desc.getGroupID());
}
}
public synchronized MarshalledObject activate(
ActivationInstantiator ai) throws ActivationException,
RemoteException {
// rmi.log.6E=ObjectInfo.activate started. Act Inst = {0}
rLog.log(commonDebugLevel,Messages.getString("rmi.log.6E", ai)); //$NON-NLS-1$
if (cachedInstance != null) {
// rmi.log.6F=Subsequent call to activate, returning cached instance.
rLog.log(commonDebugLevel, Messages.getString("rmi.log.6F")); //$NON-NLS-1$
return cachedInstance;
}
MarshalledObject mo = ai.newInstance(id, desc);
// rmi.log.70=ObjectInfo.activate completed: {0}
rLog.log(commonDebugLevel, Messages.getString("rmi.log.70", mo)); //$NON-NLS-1$
return mo;
}
public ActivationDesc getActivationDesc() {
return desc;
}
public synchronized void setActivationDesc(ActivationDesc desc) {
this.desc = desc;
}
public void active() {
if (startMonitor) {
rmidMonitor.activeObject(id);
}
}
public void inactive() {
/*
* When the object is being deactivated, the cached instance of
* its stub becomes invalid.
*/
cachedInstance = null;
if (startMonitor) {
rmidMonitor.inactiveObject(id);
}
}
}
/**
* DebugThread - the thread that consumes the contents of the given
* InputStream and prints it to console. Usually it is used to read
* information from error and input streams of the ActivationGroup.
*/
private class DebugThread extends Thread {
InputStream is = null;
public DebugThread(InputStream is) {
this.is = is;
}
public void run() {
try {
byte tmp[] = new byte[1000];
while (true) {
int c = is.read(tmp);
if (c == -1) break;
byte buf[] = new byte[c];
System.arraycopy(tmp, 0, buf, 0, c);
if (c != 0) System.out.print(new String(buf));
}
} catch (Throwable t) {
}
}
}
private synchronized void snapshot() {
try {
File f = new File(logFolder
+ RMIConstants.DEFAULT_SNAPSHOT_FILE);
FileOutputStream fos = new FileOutputStream(f, false);
ObjectOutputStream out = new ObjectOutputStream(fos);
out.writeObject(new Snapshot());
out.close();
fos.close();
} catch (Throwable t) {
t.printStackTrace();
throw new RuntimeException(t);
}
}
private synchronized void writeDelta(final int op, final String name,
final Object key, final Object val) {
deltasCounter++;
AccessController.doPrivileged(new PrivilegedAction() {
public Object run() {
if (deltasCounter < snapshotInterval) {
try {
File f = new File(logFolder
+ RMIConstants.DEFAULT_DELTA_FILE);
FileOutputStream fos = new FileOutputStream(f,
true);
ObjectOutputStream out = new ObjectOutputStream(
fos);
out.writeObject(new Delta(op, name, key, val));
out.close();
fos.close();
// rmi.log.71=Delta was written.
rLog.log(persistenceDebugLevel, Messages.getString("rmi.log.71")); //$NON-NLS-1$
} catch (Throwable t) {
t.printStackTrace();
System.exit(1);
}
} else {
File df = new File(logFolder
+ RMIConstants.DEFAULT_DELTA_FILE);
df.delete();
snapshot();
deltasCounter = 0;
}
return null;
}
});
}
private synchronized void restore() throws Exception {
restoreLock = true;
final File sf = new File(logFolder
+ RMIConstants.DEFAULT_SNAPSHOT_FILE);
final File df = new File(logFolder
+ RMIConstants.DEFAULT_DELTA_FILE);
try {
AccessController.doPrivileged(new PrivilegedExceptionAction() {
public Object run() throws Exception {
if (sf.exists()) {
FileInputStream fis = new FileInputStream(sf);
ObjectInputStream in = new ObjectInputStream(fis);
in.readObject();
in.close();
fis.close();
}
try {
if (df.exists()) {
FileInputStream fis = new FileInputStream(df);
while (true) {
ObjectInputStream in = new ObjectInputStream(
fis);
Delta d = (Delta) in.readObject();
// rmi.log.72=A delta was restored: {0}
rLog.log(persistenceDebugLevel,
Messages.getString("rmi.log.72", d)); //$NON-NLS-1$
}
}
} catch (EOFException eofe) {
// This section was intentionally left empty.
// Indicates that End of File reached -
//meaning all deltas were read.
return null;
}
return null;
}
});
} catch (Throwable t) {
// rmi.log.73=Exception in restore: {0}
rLog.log(persistenceDebugLevel, Messages.getString("rmi.log.73", t)); //$NON-NLS-1$
t.printStackTrace();
/*
* Returning Activation System into initial state:
*/
groupIDByActivationID = new Hashtable();
groupInfoByGroupId = new Hashtable();
System.gc();
}
restoreLock = false;
}
class Delta implements Serializable {
private static final long serialVersionUID = 103662164369676173L;
private static final int PUT = 0;
private static final int REMOVE = 1;
public int op;
public String name;
public MarshalledObject mkey;
public MarshalledObject mval;
public Delta(int op, String name, Object key, Object val)
throws Exception {
this.op = op;
this.name = name;
mkey = new MarshalledObject(key);
mval = new MarshalledObject(val);
}
public String toString() {
try {
return "Delta: " + op + "," + name + ", " + mkey.get() //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //$NON-NLS-4$
+ ", " + mval.get(); //$NON-NLS-1$
} catch (Throwable t) {
t.printStackTrace();
return "" + t; //$NON-NLS-1$
}
}
private synchronized void writeObject(ObjectOutputStream out)
throws IOException {
out.writeInt(op);
out.writeUTF(name);
out.writeObject(mkey);
out.writeObject(mval);
}
private synchronized void readObject(ObjectInputStream in)
throws IOException, ClassNotFoundException {
op = in.readInt();
name = in.readUTF();
mkey = (MarshalledObject) in.readObject();
mval = (MarshalledObject) in.readObject();
// rmi.log.75=Delta: Data read:
rLog.log(persistenceDebugLevel, Messages.getString("rmi.log.75") + op //$NON-NLS-1$
+ ", " + name + ", " + mkey.get() + ", " + mval.get()); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
if (op == PUT) {
if (name.equals("group")) { //$NON-NLS-1$
ActivationGroupID agid = (ActivationGroupID) mkey
.get();
// rmi.log.76=Restore agid: {0}
rLog.log(persistenceDebugLevel, Messages.getString("rmi.log.76", //$NON-NLS-1$
agid));
ActivationGroupDesc agdesc = (ActivationGroupDesc) mval
.get();
ActivationGroupInfo agi = new ActivationGroupInfo(
agid, agdesc);
// rmi.log.77=Restore agi: {0}
rLog.log(persistenceDebugLevel, Messages.getString("rmi.log.77", agi)); //$NON-NLS-1$
groupInfoByGroupId.put(agid, agi);
// rmi.log.78=The data were put into groupID2groupInfo_H
rLog.log(persistenceDebugLevel, Messages.getString("rmi.log.78")); //$NON-NLS-1$
}
if (name.equals("object")) { //$NON-NLS-1$
// rmi.log.79=Trying to restore ActivationID:
rLog.log(persistenceDebugLevel, Messages.getString("rmi.log.79")); //$NON-NLS-1$
ActivationID aid = (ActivationID) mkey.get();
// rmi.log.0F=aid = {0}
rLog.log(persistenceDebugLevel, Messages.getString("rmi.log.0F", aid)); //$NON-NLS-1$
ActivationDesc adesc = (ActivationDesc) mval.get();
// rmi.log.7A=adesc = {0}
rLog.log(persistenceDebugLevel, Messages.getString("rmi.log.7A", adesc)); //$NON-NLS-1$
ActivationGroupID agid = adesc.getGroupID();
// rmi.log.57=agid = {0}
rLog.log(persistenceDebugLevel, Messages.getString("rmi.log.57", agid)); //$NON-NLS-1$
ActivationGroupInfo agi =
(ActivationGroupInfo) groupInfoByGroupId.get(agid);
// rmi.log.58=agi = {0}
rLog.log(persistenceDebugLevel, Messages.getString("rmi.log.58", agi)); //$NON-NLS-1$
groupIDByActivationID.put(aid, agid);
agi.registerObject(aid, adesc);
// rmi.log.7D=Object was registered.
rLog.log(persistenceDebugLevel, Messages.getString("rmi.log.7D")); //$NON-NLS-1$
// rmi.log.7E=Object was put into hashtable.
rLog.log(persistenceDebugLevel, Messages.getString("rmi.log.7E")); //$NON-NLS-1$
}
}
if (op == REMOVE) {
if (name.equals("object")) { //$NON-NLS-1$
groupIDByActivationID.remove(mkey.get());
}
if (name.equals("group")) { //$NON-NLS-1$
groupInfoByGroupId.remove(mkey.get());
}
}
}
}
class Snapshot implements Serializable {
private static final long serialVersionUID = 2006016754895450831L;
private synchronized void writeObject(ObjectOutputStream out)
throws IOException {
Hashtable h0 = new Hashtable();
Hashtable h1 = new Hashtable();
Enumeration e0 = groupInfoByGroupId.keys();
while (e0.hasMoreElements()) {
ActivationGroupID agid = (ActivationGroupID) e0
.nextElement();
MarshalledObject mo_agid = new MarshalledObject(agid);
ActivationGroupInfo agi =
(ActivationGroupInfo) groupInfoByGroupId.get(agid);
ActivationGroupDesc agdesc = agi.getActivationGroupDesc();
h0.put(mo_agid, agdesc);
Enumeration e1 = agi.objectInfoByActivationID.keys();
while (e1.hasMoreElements()) {
ActivationID aid = (ActivationID) e1.nextElement();
ObjectInfo oi = (ObjectInfo) agi.objectInfoByActivationID
.get(aid);
ActivationDesc adesc = oi.getActivationDesc();
h1.put(aid, adesc);
}
}
out.writeObject(h0);
out.writeObject(h1);
}
private synchronized void readObject(ObjectInputStream in)
throws IOException, ClassNotFoundException {
Hashtable h0 = (Hashtable) in.readObject();
Hashtable h1 = (Hashtable) in.readObject();
// rmi.log.7F=Restore:
rLog.log(persistenceDebugLevel, Messages.getString("rmi.log.7F")); //$NON-NLS-1$
// rmi.log.80=h0 = {0}
rLog.log(persistenceDebugLevel, Messages.getString("rmi.log.80", h0)); //$NON-NLS-1$
// rmi.log.81=h1 = {0}
rLog.log(persistenceDebugLevel, Messages.getString("rmi.log.81", h1)); //$NON-NLS-1$
Enumeration e0 = h0.keys();
while (e0.hasMoreElements()) {
MarshalledObject mo_agid = (MarshalledObject) e0
.nextElement();
ActivationGroupID agid = (ActivationGroupID) mo_agid.get();
// rmi.log.76=Restore agid: {0}
rLog.log(persistenceDebugLevel, Messages.getString("rmi.log.76", agid)); //$NON-NLS-1$
ActivationGroupDesc agdesc = (ActivationGroupDesc) h0
.get(mo_agid);
// rmi.log.82=Restore agdesc: {0}
rLog.log(persistenceDebugLevel, Messages.getString("rmi.log.82", //$NON-NLS-1$
agdesc));
ActivationGroupInfo agi = new ActivationGroupInfo(agid,
agdesc);
// rmi.log.77=Restore agi: {0}
rLog.log(persistenceDebugLevel, Messages.getString("rmi.log.77") + agi); //$NON-NLS-1$
groupInfoByGroupId.put(agid, agi);
// rmi.log.78=The data were put into groupID2groupInfo_H
rLog.log(persistenceDebugLevel, Messages.getString("rmi.log.78")); //$NON-NLS-1$
}
Enumeration e1 = h1.keys();
while (e1.hasMoreElements()) {
ActivationID aid = (ActivationID) e1.nextElement();
ActivationDesc adesc = (ActivationDesc) h1.get(aid);
ActivationGroupID agid = adesc.getGroupID();
ActivationGroupInfo agi =
(ActivationGroupInfo) groupInfoByGroupId.get(agid);
agi.registerObject(aid, adesc);
groupIDByActivationID.put(aid, agid);
}
}
}
}
| |
package io.dropwizard.metrics.graphite;
import io.dropwizard.metrics.Clock;
import io.dropwizard.metrics.Counter;
import io.dropwizard.metrics.Gauge;
import io.dropwizard.metrics.Histogram;
import io.dropwizard.metrics.Meter;
import io.dropwizard.metrics.MetricFilter;
import io.dropwizard.metrics.MetricRegistry;
import io.dropwizard.metrics.Snapshot;
import io.dropwizard.metrics.Timer;
import org.junit.Before;
import org.junit.Test;
import org.mockito.InOrder;
import io.dropwizard.metrics.*;
import java.net.UnknownHostException;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.concurrent.TimeUnit;
import static org.mockito.Mockito.*;
public class GraphiteReporterTest {
private final long timestamp = 1000198;
private final Clock clock = mock(Clock.class);
private final Graphite graphite = mock(Graphite.class);
private final MetricRegistry registry = mock(MetricRegistry.class);
private final GraphiteReporter reporter = GraphiteReporter.forRegistry(registry)
.withClock(clock)
.prefixedWith("prefix")
.convertRatesTo(TimeUnit.SECONDS)
.convertDurationsTo(TimeUnit.MILLISECONDS)
.filter(MetricFilter.ALL)
.build(graphite);
@Before
public void setUp() throws Exception {
when(clock.getTime()).thenReturn(timestamp * 1000);
}
@Test
public void doesNotReportStringGaugeValues() throws Exception {
reporter.report(map("gauge", gauge("value")),
this.<Counter>map(),
this.<Histogram>map(),
this.<Meter>map(),
this.<Timer>map());
final InOrder inOrder = inOrder(graphite);
inOrder.verify(graphite).isConnected();
inOrder.verify(graphite).connect();
inOrder.verify(graphite, never()).send("prefix.gauge", "value", timestamp);
inOrder.verify(graphite).flush();
verifyNoMoreInteractions(graphite);
}
@Test
public void reportsByteGaugeValues() throws Exception {
reporter.report(map("gauge", gauge((byte) 1)),
this.<Counter>map(),
this.<Histogram>map(),
this.<Meter>map(),
this.<Timer>map());
final InOrder inOrder = inOrder(graphite);
inOrder.verify(graphite).isConnected();
inOrder.verify(graphite).connect();
inOrder.verify(graphite).send("prefix.gauge", "1", timestamp);
inOrder.verify(graphite).flush();
verifyNoMoreInteractions(graphite);
}
@Test
public void reportsShortGaugeValues() throws Exception {
reporter.report(map("gauge", gauge((short) 1)),
this.<Counter>map(),
this.<Histogram>map(),
this.<Meter>map(),
this.<Timer>map());
final InOrder inOrder = inOrder(graphite);
inOrder.verify(graphite).isConnected();
inOrder.verify(graphite).connect();
inOrder.verify(graphite).send("prefix.gauge", "1", timestamp);
inOrder.verify(graphite).flush();
verifyNoMoreInteractions(graphite);
}
@Test
public void reportsIntegerGaugeValues() throws Exception {
reporter.report(map("gauge", gauge(1)),
this.<Counter>map(),
this.<Histogram>map(),
this.<Meter>map(),
this.<Timer>map());
final InOrder inOrder = inOrder(graphite);
inOrder.verify(graphite).isConnected();
inOrder.verify(graphite).connect();
inOrder.verify(graphite).send("prefix.gauge", "1", timestamp);
inOrder.verify(graphite).flush();
verifyNoMoreInteractions(graphite);
}
@Test
public void reportsLongGaugeValues() throws Exception {
reporter.report(map("gauge", gauge(1L)),
this.<Counter>map(),
this.<Histogram>map(),
this.<Meter>map(),
this.<Timer>map());
final InOrder inOrder = inOrder(graphite);
inOrder.verify(graphite).isConnected();
inOrder.verify(graphite).connect();
inOrder.verify(graphite).send("prefix.gauge", "1", timestamp);
inOrder.verify(graphite).flush();
verifyNoMoreInteractions(graphite);
}
@Test
public void reportsFloatGaugeValues() throws Exception {
reporter.report(map("gauge", gauge(1.1f)),
this.<Counter>map(),
this.<Histogram>map(),
this.<Meter>map(),
this.<Timer>map());
final InOrder inOrder = inOrder(graphite);
inOrder.verify(graphite).isConnected();
inOrder.verify(graphite).connect();
inOrder.verify(graphite).send("prefix.gauge", "1.10", timestamp);
inOrder.verify(graphite).flush();
verifyNoMoreInteractions(graphite);
}
@Test
public void reportsDoubleGaugeValues() throws Exception {
reporter.report(map("gauge", gauge(1.1)),
this.<Counter>map(),
this.<Histogram>map(),
this.<Meter>map(),
this.<Timer>map());
final InOrder inOrder = inOrder(graphite);
inOrder.verify(graphite).isConnected();
inOrder.verify(graphite).connect();
inOrder.verify(graphite).send("prefix.gauge", "1.10", timestamp);
inOrder.verify(graphite).flush();
verifyNoMoreInteractions(graphite);
}
@Test
public void reportsCounters() throws Exception {
final Counter counter = mock(Counter.class);
when(counter.getCount()).thenReturn(100L);
reporter.report(this.<Gauge>map(),
this.<Counter>map("counter", counter),
this.<Histogram>map(),
this.<Meter>map(),
this.<Timer>map());
final InOrder inOrder = inOrder(graphite);
inOrder.verify(graphite).isConnected();
inOrder.verify(graphite).connect();
inOrder.verify(graphite).send("prefix.counter.count", "100", timestamp);
inOrder.verify(graphite).flush();
verifyNoMoreInteractions(graphite);
}
@Test
public void reportsHistograms() throws Exception {
final Histogram histogram = mock(Histogram.class);
when(histogram.getCount()).thenReturn(1L);
final Snapshot snapshot = mock(Snapshot.class);
when(snapshot.getMax()).thenReturn(2L);
when(snapshot.getMean()).thenReturn(3.0);
when(snapshot.getMin()).thenReturn(4L);
when(snapshot.getStdDev()).thenReturn(5.0);
when(snapshot.getMedian()).thenReturn(6.0);
when(snapshot.get75thPercentile()).thenReturn(7.0);
when(snapshot.get95thPercentile()).thenReturn(8.0);
when(snapshot.get98thPercentile()).thenReturn(9.0);
when(snapshot.get99thPercentile()).thenReturn(10.0);
when(snapshot.get999thPercentile()).thenReturn(11.0);
when(histogram.getSnapshot()).thenReturn(snapshot);
reporter.report(this.<Gauge>map(),
this.<Counter>map(),
this.<Histogram>map("histogram", histogram),
this.<Meter>map(),
this.<Timer>map());
final InOrder inOrder = inOrder(graphite);
inOrder.verify(graphite).isConnected();
inOrder.verify(graphite).connect();
inOrder.verify(graphite).send("prefix.histogram.count", "1", timestamp);
inOrder.verify(graphite).send("prefix.histogram.max", "2", timestamp);
inOrder.verify(graphite).send("prefix.histogram.mean", "3.00", timestamp);
inOrder.verify(graphite).send("prefix.histogram.min", "4", timestamp);
inOrder.verify(graphite).send("prefix.histogram.stddev", "5.00", timestamp);
inOrder.verify(graphite).send("prefix.histogram.p50", "6.00", timestamp);
inOrder.verify(graphite).send("prefix.histogram.p75", "7.00", timestamp);
inOrder.verify(graphite).send("prefix.histogram.p95", "8.00", timestamp);
inOrder.verify(graphite).send("prefix.histogram.p98", "9.00", timestamp);
inOrder.verify(graphite).send("prefix.histogram.p99", "10.00", timestamp);
inOrder.verify(graphite).send("prefix.histogram.p999", "11.00", timestamp);
inOrder.verify(graphite).flush();
verifyNoMoreInteractions(graphite);
}
@Test
public void reportsMeters() throws Exception {
final Meter meter = mock(Meter.class);
when(meter.getCount()).thenReturn(1L);
when(meter.getOneMinuteRate()).thenReturn(2.0);
when(meter.getFiveMinuteRate()).thenReturn(3.0);
when(meter.getFifteenMinuteRate()).thenReturn(4.0);
when(meter.getMeanRate()).thenReturn(5.0);
reporter.report(this.<Gauge>map(),
this.<Counter>map(),
this.<Histogram>map(),
this.<Meter>map("meter", meter),
this.<Timer>map());
final InOrder inOrder = inOrder(graphite);
inOrder.verify(graphite).isConnected();
inOrder.verify(graphite).connect();
inOrder.verify(graphite).send("prefix.meter.count", "1", timestamp);
inOrder.verify(graphite).send("prefix.meter.m1_rate", "2.00", timestamp);
inOrder.verify(graphite).send("prefix.meter.m5_rate", "3.00", timestamp);
inOrder.verify(graphite).send("prefix.meter.m15_rate", "4.00", timestamp);
inOrder.verify(graphite).send("prefix.meter.mean_rate", "5.00", timestamp);
inOrder.verify(graphite).flush();
verifyNoMoreInteractions(graphite);
}
@Test
public void reportsTimers() throws Exception {
final Timer timer = mock(Timer.class);
when(timer.getCount()).thenReturn(1L);
when(timer.getMeanRate()).thenReturn(2.0);
when(timer.getOneMinuteRate()).thenReturn(3.0);
when(timer.getFiveMinuteRate()).thenReturn(4.0);
when(timer.getFifteenMinuteRate()).thenReturn(5.0);
final Snapshot snapshot = mock(Snapshot.class);
when(snapshot.getMax()).thenReturn(TimeUnit.MILLISECONDS.toNanos(100));
when(snapshot.getMean()).thenReturn((double) TimeUnit.MILLISECONDS.toNanos(200));
when(snapshot.getMin()).thenReturn(TimeUnit.MILLISECONDS.toNanos(300));
when(snapshot.getStdDev()).thenReturn((double) TimeUnit.MILLISECONDS.toNanos(400));
when(snapshot.getMedian()).thenReturn((double) TimeUnit.MILLISECONDS.toNanos(500));
when(snapshot.get75thPercentile()).thenReturn((double) TimeUnit.MILLISECONDS.toNanos(600));
when(snapshot.get95thPercentile()).thenReturn((double) TimeUnit.MILLISECONDS.toNanos(700));
when(snapshot.get98thPercentile()).thenReturn((double) TimeUnit.MILLISECONDS.toNanos(800));
when(snapshot.get99thPercentile()).thenReturn((double) TimeUnit.MILLISECONDS.toNanos(900));
when(snapshot.get999thPercentile()).thenReturn((double) TimeUnit.MILLISECONDS
.toNanos(1000));
when(timer.getSnapshot()).thenReturn(snapshot);
reporter.report(this.<Gauge>map(),
this.<Counter>map(),
this.<Histogram>map(),
this.<Meter>map(),
map("timer", timer));
final InOrder inOrder = inOrder(graphite);
inOrder.verify(graphite).isConnected();
inOrder.verify(graphite).connect();
inOrder.verify(graphite).send("prefix.timer.max", "100.00", timestamp);
inOrder.verify(graphite).send("prefix.timer.mean", "200.00", timestamp);
inOrder.verify(graphite).send("prefix.timer.min", "300.00", timestamp);
inOrder.verify(graphite).send("prefix.timer.stddev", "400.00", timestamp);
inOrder.verify(graphite).send("prefix.timer.p50", "500.00", timestamp);
inOrder.verify(graphite).send("prefix.timer.p75", "600.00", timestamp);
inOrder.verify(graphite).send("prefix.timer.p95", "700.00", timestamp);
inOrder.verify(graphite).send("prefix.timer.p98", "800.00", timestamp);
inOrder.verify(graphite).send("prefix.timer.p99", "900.00", timestamp);
inOrder.verify(graphite).send("prefix.timer.p999", "1000.00", timestamp);
inOrder.verify(graphite).send("prefix.timer.count", "1", timestamp);
inOrder.verify(graphite).send("prefix.timer.m1_rate", "3.00", timestamp);
inOrder.verify(graphite).send("prefix.timer.m5_rate", "4.00", timestamp);
inOrder.verify(graphite).send("prefix.timer.m15_rate", "5.00", timestamp);
inOrder.verify(graphite).send("prefix.timer.mean_rate", "2.00", timestamp);
inOrder.verify(graphite).flush();
verifyNoMoreInteractions(graphite);
}
@Test
public void closesConnectionIfGraphiteIsUnavailable() throws Exception {
doThrow(new UnknownHostException("UNKNOWN-HOST")).when(graphite).connect();
reporter.report(map("gauge", gauge(1)),
this.<Counter>map(),
this.<Histogram>map(),
this.<Meter>map(),
this.<Timer>map());
final InOrder inOrder = inOrder(graphite);
inOrder.verify(graphite).isConnected();
inOrder.verify(graphite).connect();
inOrder.verify(graphite).close();
verifyNoMoreInteractions(graphite);
}
@Test
public void closesConnectionIfAnUnexpectedExceptionOccurs() throws Exception {
final Gauge gauge = mock(Gauge.class);
when(gauge.getValue()).thenThrow(new RuntimeException("kaboom"));
reporter.report(map("gauge", gauge),
this.<Counter>map(),
this.<Histogram>map(),
this.<Meter>map(),
this.<Timer>map());
final InOrder inOrder = inOrder(graphite);
inOrder.verify(graphite).isConnected();
inOrder.verify(graphite).connect();
inOrder.verify(graphite).close();
verifyNoMoreInteractions(graphite);
}
@Test
public void closesConnectionOnReporterStop() throws Exception {
reporter.stop();
verify(graphite).close();
verifyNoMoreInteractions(graphite);
}
private <T> SortedMap<MetricName, T> map() {
return new TreeMap<MetricName, T>();
}
private <T> SortedMap<MetricName, T> map(String name, T metric) {
final TreeMap<MetricName, T> map = new TreeMap<MetricName, T>();
map.put(MetricName.build(name), metric);
return map;
}
private <T> Gauge gauge(T value) {
final Gauge gauge = mock(Gauge.class);
when(gauge.getValue()).thenReturn(value);
return gauge;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wali;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedInputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.EOFException;
import java.io.File;
import java.io.FileFilter;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.text.NumberFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.SortedSet;
import java.util.TreeSet;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
@SuppressWarnings("deprecation")
public class TestMinimalLockingWriteAheadLog {
private static final Logger logger = LoggerFactory.getLogger(TestMinimalLockingWriteAheadLog.class);
@Test
public void testTruncatedPartitionHeader() throws IOException {
final int numPartitions = 4;
final Path path = Paths.get("target/testTruncatedPartitionHeader");
deleteRecursively(path.toFile());
assertTrue(path.toFile().mkdirs());
final AtomicInteger counter = new AtomicInteger(0);
final SerDe<Object> serde = new SerDe<Object>() {
@Override
public void readHeader(DataInputStream in) throws IOException {
if (counter.getAndIncrement() == 1) {
throw new EOFException("Intentionally thrown for unit test");
}
}
@Override
public void serializeEdit(Object previousRecordState, Object newRecordState, DataOutputStream out) throws IOException {
out.write(1);
}
@Override
public void serializeRecord(Object record, DataOutputStream out) throws IOException {
out.write(1);
}
@Override
public Object deserializeEdit(DataInputStream in, Map<Object, Object> currentRecordStates, int version) throws IOException {
final int val = in.read();
return (val == 1) ? new Object() : null;
}
@Override
public Object deserializeRecord(DataInputStream in, int version) throws IOException {
final int val = in.read();
return (val == 1) ? new Object() : null;
}
@Override
public Object getRecordIdentifier(Object record) {
return 1;
}
@Override
public UpdateType getUpdateType(Object record) {
return UpdateType.CREATE;
}
@Override
public String getLocation(Object record) {
return null;
}
@Override
public int getVersion() {
return 0;
}
};
final WriteAheadRepository<Object> repo = new MinimalLockingWriteAheadLog<>(path, numPartitions, serde, null);
try {
final Collection<Object> initialRecs = repo.recoverRecords();
assertTrue(initialRecs.isEmpty());
repo.update(Collections.singletonList(new Object()), false);
repo.update(Collections.singletonList(new Object()), false);
repo.update(Collections.singletonList(new Object()), false);
} finally {
repo.shutdown();
}
final WriteAheadRepository<Object> secondRepo = new MinimalLockingWriteAheadLog<>(path, numPartitions, serde, null);
try {
secondRepo.recoverRecords();
} finally {
secondRepo.shutdown();
}
}
@Test
@Disabled("For manual performance testing")
public void testUpdatePerformance() throws IOException, InterruptedException {
final int numPartitions = 16;
final Path path = Paths.get("target/minimal-locking-repo");
deleteRecursively(path.toFile());
assertTrue(path.toFile().mkdirs());
final DummyRecordSerde serde = new DummyRecordSerde();
final WriteAheadRepository<DummyRecord> repo = new MinimalLockingWriteAheadLog<>(path, numPartitions, serde, null);
final Collection<DummyRecord> initialRecs = repo.recoverRecords();
assertTrue(initialRecs.isEmpty());
final long updateCountPerThread = 1_000_000;
final int numThreads = 4;
final Thread[] threads = new Thread[numThreads];
final int batchSize = 1;
long previousBytes = 0;
for (int j = 0; j < 2; j++) {
for (int i = 0; i < numThreads; i++) {
final Thread t = new Thread(() -> {
final List<DummyRecord> batch = new ArrayList<>();
for (int i1 = 0; i1 < updateCountPerThread / batchSize; i1++) {
batch.clear();
for (int j1 = 0; j1 < batchSize; j1++) {
final DummyRecord record = new DummyRecord(String.valueOf(i1), UpdateType.CREATE);
batch.add(record);
}
assertDoesNotThrow(() -> repo.update(batch, false));
}
});
threads[i] = t;
}
final long start = System.nanoTime();
for (final Thread t : threads) {
t.start();
}
for (final Thread t : threads) {
t.join();
}
long bytes = 0L;
for (final File file : path.toFile().listFiles()) {
if (file.getName().startsWith("partition-")) {
for (final File journalFile : file.listFiles()) {
bytes += journalFile.length();
}
}
}
bytes -= previousBytes;
previousBytes = bytes;
final long millis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - start);
final long eventsPerSecond = (updateCountPerThread * numThreads * 1000) / millis;
final String eps = NumberFormat.getInstance().format(eventsPerSecond);
final long bytesPerSecond = bytes * 1000 / millis;
final String bps = NumberFormat.getInstance().format(bytesPerSecond);
if (j == 0) {
System.out.println(millis + " ms to insert " + updateCountPerThread * numThreads + " updates using " + numThreads + " threads, *as a warmup!* "
+ eps + " events per second, " + bps + " bytes per second");
} else {
System.out.println(millis + " ms to insert " + updateCountPerThread * numThreads + " updates using " + numThreads + " threads, "
+ eps + " events per second, " + bps + " bytes per second");
}
}
}
@Test
public void testRepoDoesntContinuallyGrowOnOutOfMemoryError() throws IOException, InterruptedException {
final int numPartitions = 8;
final Path path = Paths.get("target/minimal-locking-repo");
deleteRecursively(path.toFile());
assertTrue(path.toFile().mkdirs());
final DummyRecordSerde serde = new DummyRecordSerde();
final WriteAheadRepository<DummyRecord> repo = new MinimalLockingWriteAheadLog<>(path, numPartitions, serde, null);
try {
final Collection<DummyRecord> initialRecs = repo.recoverRecords();
assertTrue(initialRecs.isEmpty());
serde.setThrowOOMEAfterNSerializeEdits(100);
for (int i = 0; i < 108; i++) {
try {
final DummyRecord record = new DummyRecord(String.valueOf(i), UpdateType.CREATE);
repo.update(Collections.singleton(record), false);
} catch (final OutOfMemoryError oome) {
logger.info("Received OOME on record " + i);
}
}
long expectedSize = sizeOf(path.toFile());
for (int i = 0; i < 1000; i++) {
final DummyRecord record = new DummyRecord(String.valueOf(i), UpdateType.CREATE);
assertThrows(IOException.class, () -> repo.update(Collections.singleton(record), false));
}
long newSize = sizeOf(path.toFile());
assertEquals(expectedSize, newSize);
assertThrows(OutOfMemoryError.class, () -> repo.checkpoint());
expectedSize = sizeOf(path.toFile());
for (int i = 0; i < 100000; i++) {
final DummyRecord record = new DummyRecord(String.valueOf(i), UpdateType.CREATE);
assertThrows(IOException.class, () -> repo.update(Collections.singleton(record), false));
}
newSize = sizeOf(path.toFile());
assertEquals(expectedSize, newSize);
} finally {
repo.shutdown();
}
}
/**
* This test is intended to continually update the Write-ahead log using many threads, then
* stop and restore the repository to check for any corruption. There were reports of potential threading
* issues leading to repository corruption. This was an attempt to replicate. It should not be run as a
* unit test, really, but will be left, as it can be valuable to exercise the implementation
*
* @throws IOException if unable to read from/write to the write-ahead log
* @throws InterruptedException if a thread is interrupted
*/
@Test
@Disabled
public void tryToCauseThreadingIssue() throws IOException, InterruptedException {
System.setProperty("org.slf4j.simpleLogger.log.org.wali", "INFO");
final int numThreads = 12;
final long iterationsPerThread = 1000000;
final int numAttempts = 1000;
final Path path = Paths.get("D:/dummy/minimal-locking-repo");
path.toFile().mkdirs();
final AtomicReference<WriteAheadRepository<DummyRecord>> writeRepoRef = new AtomicReference<>();
final AtomicBoolean checkpointing = new AtomicBoolean(false);
final Thread bgThread = new Thread(() -> {
while (true) {
checkpointing.set(true);
final WriteAheadRepository<DummyRecord> repo = writeRepoRef.get();
if (repo != null) {
assertDoesNotThrow(() -> repo.checkpoint());
}
checkpointing.set(false);
try {
TimeUnit.SECONDS.sleep(5);
} catch (InterruptedException e) {
}
}
});
bgThread.setDaemon(true);
bgThread.start();
for (int x = 0; x < numAttempts; x++) {
final DummyRecordSerde serde = new DummyRecordSerde();
final WriteAheadRepository<DummyRecord> writeRepo = new MinimalLockingWriteAheadLog<>(path, 256, serde, null);
final Collection<DummyRecord> writeRecords = writeRepo.recoverRecords();
for (final DummyRecord record : writeRecords) {
assertEquals("B", record.getProperty("A"));
}
writeRepoRef.set(writeRepo);
final Thread[] threads = new Thread[numThreads];
for (int i = 0; i < numThreads; i++) {
final Thread t = new InlineCreationInsertThread(iterationsPerThread, writeRepo);
t.start();
threads[i] = t;
}
for (final Thread t : threads) {
t.join();
}
writeRepoRef.set(null);
writeRepo.shutdown();
boolean cp = checkpointing.get();
while (cp) {
Thread.sleep(100L);
cp = checkpointing.get();
}
final WriteAheadRepository<DummyRecord> readRepo = new MinimalLockingWriteAheadLog<>(path, 256, serde, null);
// ensure that we are able to recover the records properly
final Collection<DummyRecord> readRecords = readRepo.recoverRecords();
for (final DummyRecord record : readRecords) {
assertEquals("B", record.getProperty("A"));
}
readRepo.shutdown();
}
}
@Test
public void testWrite() throws IOException, InterruptedException {
final int numPartitions = 8;
final Path path = Paths.get("target/minimal-locking-repo");
deleteRecursively(path.toFile());
assertTrue(path.toFile().mkdirs());
final DummyRecordSerde serde = new DummyRecordSerde();
final WriteAheadRepository<DummyRecord> repo = new MinimalLockingWriteAheadLog<>(path, numPartitions, serde, null);
final Collection<DummyRecord> initialRecs = repo.recoverRecords();
assertTrue(initialRecs.isEmpty());
final List<InsertThread> threads = new ArrayList<>();
for (int i = 0; i < 10; i++) {
threads.add(new InsertThread(10000, 1000000 * i, repo));
}
final long start = System.nanoTime();
for (final InsertThread thread : threads) {
thread.start();
}
for (final InsertThread thread : threads) {
thread.join();
}
final long nanos = System.nanoTime() - start;
final long millis = TimeUnit.MILLISECONDS.convert(nanos, TimeUnit.NANOSECONDS);
System.out.println("Took " + millis + " millis to insert 1,000,000 records each in its own transaction");
repo.shutdown();
final WriteAheadRepository<DummyRecord> recoverRepo = new MinimalLockingWriteAheadLog<>(path, numPartitions, serde, null);
final Collection<DummyRecord> recoveredRecords = recoverRepo.recoverRecords();
assertFalse(recoveredRecords.isEmpty());
assertEquals(100000, recoveredRecords.size());
for (final DummyRecord record : recoveredRecords) {
final Map<String, String> recoveredProps = record.getProperties();
assertEquals(1, recoveredProps.size());
assertEquals("B", recoveredProps.get("A"));
}
}
@Test
public void testRecoverAfterIOException() throws IOException {
final int numPartitions = 5;
final Path path = Paths.get("target/minimal-locking-repo-test-recover-after-ioe");
deleteRecursively(path.toFile());
Files.createDirectories(path);
final DummyRecordSerde serde = new DummyRecordSerde();
final WriteAheadRepository<DummyRecord> repo = new MinimalLockingWriteAheadLog<>(path, numPartitions, serde, null);
final Collection<DummyRecord> initialRecs = repo.recoverRecords();
assertTrue(initialRecs.isEmpty());
serde.setThrowIOEAfterNSerializeEdits(7); // serialize the 2 transactions, then the first edit of the third transaction; then throw IOException
final List<DummyRecord> firstTransaction = new ArrayList<>();
firstTransaction.add(new DummyRecord("1", UpdateType.CREATE));
firstTransaction.add(new DummyRecord("2", UpdateType.CREATE));
firstTransaction.add(new DummyRecord("3", UpdateType.CREATE));
final List<DummyRecord> secondTransaction = new ArrayList<>();
secondTransaction.add(new DummyRecord("1", UpdateType.UPDATE).setProperty("abc", "123"));
secondTransaction.add(new DummyRecord("2", UpdateType.UPDATE).setProperty("cba", "123"));
secondTransaction.add(new DummyRecord("3", UpdateType.UPDATE).setProperty("aaa", "123"));
final List<DummyRecord> thirdTransaction = new ArrayList<>();
thirdTransaction.add(new DummyRecord("1", UpdateType.DELETE));
thirdTransaction.add(new DummyRecord("2", UpdateType.DELETE));
repo.update(firstTransaction, true);
repo.update(secondTransaction, true);
assertThrows(IOException.class, () -> repo.update(thirdTransaction, true));
repo.shutdown();
serde.setThrowIOEAfterNSerializeEdits(-1);
final WriteAheadRepository<DummyRecord> recoverRepo = new MinimalLockingWriteAheadLog<>(path, numPartitions, serde, null);
final Collection<DummyRecord> recoveredRecords = recoverRepo.recoverRecords();
assertFalse(recoveredRecords.isEmpty());
assertEquals(3, recoveredRecords.size());
boolean record1 = false, record2 = false, record3 = false;
for (final DummyRecord record : recoveredRecords) {
switch (record.getId()) {
case "1":
record1 = true;
assertEquals("123", record.getProperty("abc"));
break;
case "2":
record2 = true;
assertEquals("123", record.getProperty("cba"));
break;
case "3":
record3 = true;
assertEquals("123", record.getProperty("aaa"));
break;
}
}
assertTrue(record1);
assertTrue(record2);
assertTrue(record3);
}
@Test
public void testRecoverFileThatHasTrailingNULBytesAndTruncation() throws IOException {
final int numPartitions = 5;
final Path path = Paths.get("target/testRecoverFileThatHasTrailingNULBytesAndTruncation");
deleteRecursively(path.toFile());
Files.createDirectories(path);
final DummyRecordSerde serde = new DummyRecordSerde();
final WriteAheadRepository<DummyRecord> repo = new MinimalLockingWriteAheadLog<>(path, numPartitions, serde, null);
final Collection<DummyRecord> initialRecs = repo.recoverRecords();
assertTrue(initialRecs.isEmpty());
final List<DummyRecord> firstTransaction = new ArrayList<>();
firstTransaction.add(new DummyRecord("1", UpdateType.CREATE));
firstTransaction.add(new DummyRecord("2", UpdateType.CREATE));
firstTransaction.add(new DummyRecord("3", UpdateType.CREATE));
final List<DummyRecord> secondTransaction = new ArrayList<>();
secondTransaction.add(new DummyRecord("1", UpdateType.UPDATE).setProperty("abc", "123"));
secondTransaction.add(new DummyRecord("2", UpdateType.UPDATE).setProperty("cba", "123"));
secondTransaction.add(new DummyRecord("3", UpdateType.UPDATE).setProperty("aaa", "123"));
final List<DummyRecord> thirdTransaction = new ArrayList<>();
thirdTransaction.add(new DummyRecord("1", UpdateType.DELETE));
thirdTransaction.add(new DummyRecord("2", UpdateType.DELETE));
repo.update(firstTransaction, true);
repo.update(secondTransaction, true);
repo.update(thirdTransaction, true);
repo.shutdown();
final File partition3Dir = path.resolve("partition-2").toFile();
final File journalFile = partition3Dir.listFiles()[0];
final byte[] contents = Files.readAllBytes(journalFile.toPath());
// Truncate the contents of the journal file by 8 bytes. Then replace with 28 trailing NUL bytes,
// as this is what we often see when we have a sudden power loss.
final byte[] truncated = Arrays.copyOfRange(contents, 0, contents.length - 8);
final byte[] withNuls = new byte[truncated.length + 28];
System.arraycopy(truncated, 0, withNuls, 0, truncated.length);
try (final OutputStream fos = new FileOutputStream(journalFile)) {
fos.write(withNuls);
}
final WriteAheadRepository<DummyRecord> recoverRepo = new MinimalLockingWriteAheadLog<>(path, numPartitions, serde, null);
final Collection<DummyRecord> recoveredRecords = recoverRepo.recoverRecords();
assertFalse(recoveredRecords.isEmpty());
assertEquals(3, recoveredRecords.size());
boolean record1 = false, record2 = false, record3 = false;
for (final DummyRecord record : recoveredRecords) {
switch (record.getId()) {
case "1":
record1 = true;
assertEquals("123", record.getProperty("abc"));
break;
case "2":
record2 = true;
assertEquals("123", record.getProperty("cba"));
break;
case "3":
record3 = true;
assertEquals("123", record.getProperty("aaa"));
break;
}
}
assertTrue(record1);
assertTrue(record2);
assertTrue(record3);
}
@Test
public void testRecoverFileThatHasTrailingNULBytesNoTruncation() throws IOException {
final int numPartitions = 5;
final Path path = Paths.get("target/testRecoverFileThatHasTrailingNULBytesNoTruncation");
deleteRecursively(path.toFile());
Files.createDirectories(path);
final DummyRecordSerde serde = new DummyRecordSerde();
final WriteAheadRepository<DummyRecord> repo = new MinimalLockingWriteAheadLog<>(path, numPartitions, serde, null);
final Collection<DummyRecord> initialRecs = repo.recoverRecords();
assertTrue(initialRecs.isEmpty());
final List<DummyRecord> firstTransaction = new ArrayList<>();
firstTransaction.add(new DummyRecord("1", UpdateType.CREATE));
firstTransaction.add(new DummyRecord("2", UpdateType.CREATE));
firstTransaction.add(new DummyRecord("3", UpdateType.CREATE));
final List<DummyRecord> secondTransaction = new ArrayList<>();
secondTransaction.add(new DummyRecord("1", UpdateType.UPDATE).setProperty("abc", "123"));
secondTransaction.add(new DummyRecord("2", UpdateType.UPDATE).setProperty("cba", "123"));
secondTransaction.add(new DummyRecord("3", UpdateType.UPDATE).setProperty("aaa", "123"));
final List<DummyRecord> thirdTransaction = new ArrayList<>();
thirdTransaction.add(new DummyRecord("1", UpdateType.DELETE));
thirdTransaction.add(new DummyRecord("2", UpdateType.DELETE));
repo.update(firstTransaction, true);
repo.update(secondTransaction, true);
repo.update(thirdTransaction, true);
repo.shutdown();
final File partition3Dir = path.resolve("partition-2").toFile();
final File journalFile = partition3Dir.listFiles()[0];
// Truncate the contents of the journal file by 8 bytes. Then replace with 28 trailing NUL bytes,
// as this is what we often see when we have a sudden power loss.
final byte[] withNuls = new byte[28];
try (final OutputStream fos = new FileOutputStream(journalFile, true)) {
fos.write(withNuls);
}
final WriteAheadRepository<DummyRecord> recoverRepo = new MinimalLockingWriteAheadLog<>(path, numPartitions, serde, null);
final Collection<DummyRecord> recoveredRecords = recoverRepo.recoverRecords();
assertFalse(recoveredRecords.isEmpty());
assertEquals(1, recoveredRecords.size());
boolean record1 = false, record2 = false, record3 = false;
for (final DummyRecord record : recoveredRecords) {
switch (record.getId()) {
case "1":
record1 = record.getUpdateType() != UpdateType.DELETE;
assertEquals("123", record.getProperty("abc"));
break;
case "2":
record2 = record.getUpdateType() != UpdateType.DELETE;
assertEquals("123", record.getProperty("cba"));
break;
case "3":
record3 = true;
assertEquals("123", record.getProperty("aaa"));
break;
}
}
assertFalse(record1);
assertFalse(record2);
assertTrue(record3);
}
@Test
public void testCannotModifyLogAfterAllAreBlackListed() throws IOException {
final int numPartitions = 5;
final Path path = Paths.get("target/minimal-locking-repo-test-cannot-modify-after-all-blacklisted");
deleteRecursively(path.toFile());
Files.createDirectories(path);
final DummyRecordSerde serde = new DummyRecordSerde();
final WriteAheadRepository<DummyRecord> repo = new MinimalLockingWriteAheadLog<>(path, numPartitions, serde, null);
final Collection<DummyRecord> initialRecs = repo.recoverRecords();
assertTrue(initialRecs.isEmpty());
serde.setThrowIOEAfterNSerializeEdits(3); // serialize the first transaction, then fail on all subsequent transactions
final List<DummyRecord> firstTransaction = new ArrayList<>();
firstTransaction.add(new DummyRecord("1", UpdateType.CREATE));
firstTransaction.add(new DummyRecord("2", UpdateType.CREATE));
firstTransaction.add(new DummyRecord("3", UpdateType.CREATE));
final List<DummyRecord> secondTransaction = new ArrayList<>();
secondTransaction.add(new DummyRecord("1", UpdateType.UPDATE).setProperty("abc", "123"));
secondTransaction.add(new DummyRecord("2", UpdateType.UPDATE).setProperty("cba", "123"));
secondTransaction.add(new DummyRecord("3", UpdateType.UPDATE).setProperty("aaa", "123"));
final List<DummyRecord> thirdTransaction = new ArrayList<>();
thirdTransaction.add(new DummyRecord("1", UpdateType.DELETE));
thirdTransaction.add(new DummyRecord("2", UpdateType.DELETE));
repo.update(firstTransaction, true);
assertThrows(IOException.class, () -> repo.update(secondTransaction, true));
for (int i = 0; i < 4; i++) {
assertThrows(IOException.class, () -> repo.update(thirdTransaction, true));
}
serde.setThrowIOEAfterNSerializeEdits(-1);
final List<DummyRecord> fourthTransaction = new ArrayList<>();
fourthTransaction.add(new DummyRecord("1", UpdateType.DELETE));
IOException e = assertThrows(IOException.class, () -> repo.update(fourthTransaction, true));
assertTrue(e.getMessage().contains("All Partitions have been blacklisted"));
repo.shutdown();
serde.setThrowIOEAfterNSerializeEdits(-1);
final WriteAheadRepository<DummyRecord> recoverRepo = new MinimalLockingWriteAheadLog<>(path, numPartitions, serde, null);
final Collection<DummyRecord> recoveredRecords = recoverRepo.recoverRecords();
assertFalse(recoveredRecords.isEmpty());
assertEquals(3, recoveredRecords.size());
}
@Test
public void testStriping() throws IOException {
final int numPartitions = 6;
final Path path = Paths.get("target/minimal-locking-repo-striped");
deleteRecursively(path.toFile());
Files.createDirectories(path);
final SortedSet<Path> paths = new TreeSet<>();
paths.add(path.resolve("stripe-1"));
paths.add(path.resolve("stripe-2"));
final DummyRecordSerde serde = new DummyRecordSerde();
final WriteAheadRepository<DummyRecord> repo = new MinimalLockingWriteAheadLog<>(paths, numPartitions, serde, null);
final Collection<DummyRecord> initialRecs = repo.recoverRecords();
assertTrue(initialRecs.isEmpty());
final InsertThread inserter = new InsertThread(100000, 0, repo);
inserter.run();
for (final Path partitionPath : paths) {
final File[] files = partitionPath.toFile().listFiles(new FileFilter() {
@Override
public boolean accept(File pathname) {
return pathname.getName().startsWith("partition");
}
});
assertEquals(3, files.length);
for (final File file : files) {
final File[] journalFiles = file.listFiles();
assertEquals(1, journalFiles.length);
}
}
repo.checkpoint();
}
@Test
public void testShutdownWhileBlacklisted() throws IOException {
final Path path = Paths.get("target/minimal-locking-repo-shutdown-blacklisted");
deleteRecursively(path.toFile());
Files.createDirectories(path);
final SerDe<SimpleRecord> failOnThirdWriteSerde = new SerDe<SimpleRecord>() {
private int writes = 0;
@Override
public void serializeEdit(SimpleRecord previousRecordState, SimpleRecord newRecordState, DataOutputStream out) throws IOException {
serializeRecord(newRecordState, out);
}
@Override
public void serializeRecord(SimpleRecord record, DataOutputStream out) throws IOException {
int size = (int) record.getSize();
out.writeLong(record.getSize());
for (int i = 0; i < size; i++) {
out.write('A');
}
if (++writes == 3) {
throw new IOException("Intentional Exception for Unit Testing");
}
out.writeLong(record.getId());
}
@Override
public SimpleRecord deserializeEdit(DataInputStream in, Map<Object, SimpleRecord> currentRecordStates, int version) throws IOException {
return deserializeRecord(in, version);
}
@Override
public SimpleRecord deserializeRecord(DataInputStream in, int version) throws IOException {
long size = in.readLong();
for (int i = 0; i < (int) size; i++) {
in.read();
}
long id = in.readLong();
return new SimpleRecord(id, size);
}
@Override
public Object getRecordIdentifier(SimpleRecord record) {
return record.getId();
}
@Override
public UpdateType getUpdateType(SimpleRecord record) {
return UpdateType.CREATE;
}
@Override
public String getLocation(SimpleRecord record) {
return null;
}
@Override
public int getVersion() {
return 0;
}
};
final WriteAheadRepository<SimpleRecord> writeRepo = new MinimalLockingWriteAheadLog<>(path, 1, failOnThirdWriteSerde, null);
final Collection<SimpleRecord> initialRecs = writeRepo.recoverRecords();
assertTrue(initialRecs.isEmpty());
writeRepo.update(Collections.singleton(new SimpleRecord(1L, 1L)), false);
writeRepo.update(Collections.singleton(new SimpleRecord(2L, 2L)), false);
// Use a size of 8194 because the BufferedOutputStream has a buffer size of 8192 and we want
// to exceed this for testing purposes.
assertThrows(IOException.class,
() -> writeRepo.update(Collections.singleton(new SimpleRecord(3L, 8194L)), false));
final Path partitionDir = path.resolve("partition-0");
final File journalFile = partitionDir.toFile().listFiles()[0];
final long journalFileSize = journalFile.length();
verifyBlacklistedJournalContents(journalFile, failOnThirdWriteSerde);
writeRepo.shutdown();
// Ensure that calling shutdown() didn't write anything to the journal file
final long newJournalSize = journalFile.length();
assertEquals(newJournalSize, journalFile.length(), "Calling Shutdown wrote " + (newJournalSize - journalFileSize) + " bytes to the journal file");
}
private void verifyBlacklistedJournalContents(final File journalFile, final SerDe<?> serde) throws IOException {
try (final FileInputStream fis = new FileInputStream(journalFile);
final InputStream bis = new BufferedInputStream(fis);
final DataInputStream in = new DataInputStream(bis)) {
// Verify header info.
final String waliClassName = in.readUTF();
assertEquals(MinimalLockingWriteAheadLog.class.getName(), waliClassName);
final int waliVersion = in.readInt();
assertTrue(waliVersion > 0);
final String serdeClassName = in.readUTF();
assertEquals(serde.getClass().getName(), serdeClassName);
final int serdeVersion = in.readInt();
assertEquals(serde.getVersion(), serdeVersion);
for (int i = 0; i < 2; i++) {
long transactionId = in.readLong();
assertEquals(i, transactionId);
// read what serde wrote
long size = in.readLong();
assertEquals((i + 1), size);
for (int j = 0; j < (int) size; j++) {
final int c = in.read();
assertEquals('A', c);
}
long id = in.readLong();
assertEquals((i + 1), id);
int transactionIndicator = in.read();
assertEquals(2, transactionIndicator);
}
// In previous implementations, we would still have a partial record written out.
// In the current version, however, the serde above would result in the data serialization
// failing and as a result no data would be written to the stream, so the stream should
// now be out of data
final int nextByte = in.read();
assertEquals(-1, nextByte);
}
}
@Test
public void testDecreaseNumberOfPartitions() throws IOException {
final Path path = Paths.get("target/minimal-locking-repo-decrease-partitions");
deleteRecursively(path.toFile());
Files.createDirectories(path);
final DummyRecordSerde serde = new DummyRecordSerde();
final WriteAheadRepository<DummyRecord> writeRepo = new MinimalLockingWriteAheadLog<>(path, 256, serde, null);
final Collection<DummyRecord> initialRecs = writeRepo.recoverRecords();
assertTrue(initialRecs.isEmpty());
final DummyRecord record1 = new DummyRecord("1", UpdateType.CREATE);
writeRepo.update(Collections.singleton(record1), false);
for (int i=0; i < 8; i++) {
final DummyRecord r = new DummyRecord("1", UpdateType.UPDATE);
r.setProperty("i", String.valueOf(i));
writeRepo.update(Collections.singleton(r), false);
}
writeRepo.shutdown();
final WriteAheadRepository<DummyRecord> recoverRepo = new MinimalLockingWriteAheadLog<>(path, 6, serde, null);
final Collection<DummyRecord> records = recoverRepo.recoverRecords();
final List<DummyRecord> list = new ArrayList<>(records);
assertEquals(1, list.size());
final DummyRecord recoveredRecord = list.get(0);
assertEquals("1", recoveredRecord.getId());
assertEquals("7",recoveredRecord.getProperty("i"));
}
private static class InsertThread extends Thread {
private final List<List<DummyRecord>> records;
private final WriteAheadRepository<DummyRecord> repo;
public InsertThread(final int numInsertions, final int startIndex, final WriteAheadRepository<DummyRecord> repo) {
records = new ArrayList<>();
for (int i = 0; i < numInsertions; i++) {
final DummyRecord record = new DummyRecord(String.valueOf(i + startIndex), UpdateType.CREATE);
record.setProperty("A", "B");
final List<DummyRecord> list = new ArrayList<>();
list.add(record);
records.add(list);
}
this.repo = repo;
}
@Override
public void run() {
int counter = 0;
for (final List<DummyRecord> list : records) {
final boolean forceSync = (++counter == records.size());
assertDoesNotThrow(() -> repo.update(list, forceSync));
}
}
}
private static class InlineCreationInsertThread extends Thread {
private final long iterations;
private final WriteAheadRepository<DummyRecord> repo;
public InlineCreationInsertThread(final long numInsertions, final WriteAheadRepository<DummyRecord> repo) {
this.iterations = numInsertions;
this.repo = repo;
}
@Override
public void run() {
final List<DummyRecord> list = new ArrayList<>(1);
list.add(null);
final UpdateType[] updateTypes = new UpdateType[] { UpdateType.CREATE, UpdateType.DELETE, UpdateType.UPDATE };
final Random random = new Random();
for (long i = 0; i < iterations; i++) {
final int updateTypeIndex = random.nextInt(updateTypes.length);
final UpdateType updateType = updateTypes[updateTypeIndex];
final DummyRecord record = new DummyRecord(String.valueOf(i), updateType);
record.setProperty("A", "B");
list.set(0, record);
try {
repo.update(list, false);
} catch (final Throwable t) {
t.printStackTrace();
}
}
}
}
private void deleteRecursively(final File file) {
final File[] children = file.listFiles();
if (children != null) {
for (final File child : children) {
deleteRecursively(child);
}
}
file.delete();
}
private long sizeOf(final File file) {
long size = 0L;
if (file.isDirectory()) {
final File[] children = file.listFiles();
if (children != null) {
for (final File child : children) {
size += sizeOf(child);
}
}
}
size += file.length();
return size;
}
static class SimpleRecord {
private long id;
private long size;
public SimpleRecord(final long id, final long size) {
this.id = id;
this.size = size;
}
public long getId() {
return id;
}
public long getSize() {
return size;
}
}
}
| |
/*
* Copyright 2010-2014 Ning, Inc.
* Copyright 2014-2019 Groupon, Inc
* Copyright 2014-2019 The Billing Project, LLC
*
* The Billing Project licenses this file to you under the Apache License, version 2.0
* (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package org.killbill.billing.util.customfield.api;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import org.killbill.billing.ErrorCode;
import org.killbill.billing.ObjectType;
import org.killbill.billing.account.api.ImmutableAccountData;
import org.killbill.billing.api.TestApiListener.NextEvent;
import org.killbill.billing.callcontext.InternalTenantContext;
import org.killbill.billing.util.UtilTestSuiteWithEmbeddedDB;
import org.killbill.billing.util.api.CustomFieldApiException;
import org.killbill.billing.util.customfield.CustomField;
import org.killbill.billing.util.customfield.StringCustomField;
import org.killbill.billing.util.entity.Pagination;
import org.mockito.Mockito;
import org.skife.jdbi.v2.Handle;
import org.skife.jdbi.v2.tweak.HandleCallback;
import org.testng.Assert;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import com.google.common.collect.ImmutableList;
public class TestDefaultCustomFieldUserApi extends UtilTestSuiteWithEmbeddedDB {
private final UUID accountId = UUID.randomUUID();
private Long accountRecordId = 19384012L;
@Override
@BeforeMethod(groups = "slow")
public void beforeMethod() throws Exception {
if (hasFailed()) {
return;
}
super.beforeMethod();
accountRecordId = generateAccountRecordId(accountId);
final ImmutableAccountData immutableAccountData = Mockito.mock(ImmutableAccountData.class);
Mockito.when(immutableAccountInternalApi.getImmutableAccountDataByRecordId(Mockito.<Long>eq(accountRecordId), Mockito.<InternalTenantContext>any())).thenReturn(immutableAccountData);
}
@Test(groups = "slow")
public void testCustomFieldNoId() throws Exception {
// Verify that when coming from a plugin, the id isn't required
final CustomField customField = Mockito.mock(CustomField.class);
Mockito.when(customField.getObjectId()).thenReturn(accountId);
Mockito.when(customField.getObjectType()).thenReturn(ObjectType.ACCOUNT);
Mockito.when(customField.getFieldName()).thenReturn(UUID.randomUUID().toString());
Mockito.when(customField.getFieldValue()).thenReturn(UUID.randomUUID().toString());
eventsListener.pushExpectedEvents(NextEvent.CUSTOM_FIELD);
customFieldUserApi.addCustomFields(ImmutableList.<CustomField>of(customField), callContext);
assertListenerStatus();
}
@Test(groups = "slow")
public void testCustomFieldBasic() throws Exception {
final CustomField customField1 = new StringCustomField("some123", "some 456", ObjectType.ACCOUNT, accountId, callContext.getCreatedDate());
final CustomField customField2 = new StringCustomField("other123", "other 456", ObjectType.ACCOUNT, accountId, callContext.getCreatedDate());
eventsListener.pushExpectedEvents(NextEvent.CUSTOM_FIELD, NextEvent.CUSTOM_FIELD);
customFieldUserApi.addCustomFields(ImmutableList.<CustomField>of(customField1, customField2), callContext);
assertListenerStatus();
// Verify operation is indeed transactional, and nothing was inserted
final CustomField customField3 = new StringCustomField("qrqrq123", "qrqrq 456", ObjectType.ACCOUNT, accountId, callContext.getCreatedDate());
try {
customFieldUserApi.addCustomFields(ImmutableList.<CustomField>of(customField3, customField1), callContext);
} catch (CustomFieldApiException e) {
Assert.assertEquals(e.getCode(), ErrorCode.CUSTOM_FIELD_ALREADY_EXISTS.getCode());
}
List<CustomField> all = customFieldUserApi.getCustomFieldsForAccount(accountId, callContext);
Assert.assertEquals(all.size(), 2);
eventsListener.pushExpectedEvent(NextEvent.CUSTOM_FIELD);
customFieldUserApi.addCustomFields(ImmutableList.<CustomField>of(customField3), callContext);
assertListenerStatus();
all = customFieldUserApi.getCustomFieldsForAccount(accountId, callContext);
Assert.assertEquals(all.size(), 3);
eventsListener.pushExpectedEvents(NextEvent.CUSTOM_FIELD, NextEvent.CUSTOM_FIELD);
customFieldUserApi.removeCustomFields(ImmutableList.of(customField1, customField3), callContext);
assertListenerStatus();
all = customFieldUserApi.getCustomFieldsForAccount(accountId, callContext);
Assert.assertEquals(all.size(), 1);
Assert.assertEquals(all.get(0).getId(), customField2.getId());
Assert.assertEquals(all.get(0).getObjectId(), accountId);
Assert.assertEquals(all.get(0).getObjectType(), ObjectType.ACCOUNT);
Assert.assertEquals(all.get(0).getFieldName(), customField2.getFieldName());
Assert.assertEquals(all.get(0).getFieldValue(), customField2.getFieldValue());
}
@Test(groups = "slow")
public void testCustomFieldUpdate() throws Exception {
final CustomField customField1 = new StringCustomField("gtqre", "value1", ObjectType.ACCOUNT, accountId, callContext.getCreatedDate());
eventsListener.pushExpectedEvents(NextEvent.CUSTOM_FIELD);
customFieldUserApi.addCustomFields(ImmutableList.<CustomField>of(customField1), callContext);
assertListenerStatus();
final CustomField update1 = new StringCustomField(customField1.getId(), customField1.getFieldName(), "value2", customField1.getObjectType(), customField1.getObjectId(), callContext.getCreatedDate());
customFieldUserApi.updateCustomFields(ImmutableList.of(update1), callContext);
List<CustomField> all = customFieldUserApi.getCustomFieldsForAccount(accountId, callContext);
Assert.assertEquals(all.size(), 1);
Assert.assertEquals(all.get(0).getId(), update1.getId());
Assert.assertEquals(all.get(0).getObjectType(), update1.getObjectType());
Assert.assertEquals(all.get(0).getObjectId(), update1.getObjectId());
Assert.assertEquals(all.get(0).getFieldName(), update1.getFieldName());
Assert.assertEquals(all.get(0).getFieldValue(), "value2");
try {
customFieldUserApi.updateCustomFields(ImmutableList.<CustomField>of(new StringCustomField("gtqre", "value1", ObjectType.ACCOUNT, accountId, callContext.getCreatedDate())), callContext);
Assert.fail("Updating custom field should fail");
} catch (final CustomFieldApiException e) {
Assert.assertEquals(e.getCode(), ErrorCode.CUSTOM_FIELD_DOES_NOT_EXISTS_FOR_ID.getCode());
}
try {
customFieldUserApi.updateCustomFields(ImmutableList.<CustomField>of(new StringCustomField(customField1.getId(), "wrongName", "value2", customField1.getObjectType(), customField1.getObjectId(), callContext.getCreatedDate())), callContext);
Assert.fail("Updating custom field should fail");
} catch (final CustomFieldApiException e) {
Assert.assertEquals(e.getCode(), ErrorCode.CUSTOM_FIELD_INVALID_UPDATE.getCode());
}
}
@Test(groups = "slow")
public void testSaveCustomFieldWithAccountRecordId() throws Exception {
checkPagination(0);
final String cfName = UUID.randomUUID().toString().substring(1, 4);
final String cfValue = UUID.randomUUID().toString().substring(1, 4);
final CustomField customField = new StringCustomField(cfName, cfValue, ObjectType.ACCOUNT, accountId, callContext.getCreatedDate());
eventsListener.pushExpectedEvent(NextEvent.CUSTOM_FIELD);
customFieldUserApi.addCustomFields(ImmutableList.<CustomField>of(customField), callContext);
assertListenerStatus();
checkPagination(1);
// Verify the field was saved
final List<CustomField> customFields = customFieldUserApi.getCustomFieldsForObject(accountId, ObjectType.ACCOUNT, callContext);
Assert.assertEquals(customFields.size(), 1);
Assert.assertEquals(customFields.get(0).getFieldName(), customField.getFieldName());
Assert.assertEquals(customFields.get(0).getFieldValue(), customField.getFieldValue());
Assert.assertEquals(customFields.get(0).getObjectId(), customField.getObjectId());
Assert.assertEquals(customFields.get(0).getObjectType(), customField.getObjectType());
// Verify the account_record_id was populated
dbi.withHandle(new HandleCallback<Void>() {
@Override
public Void withHandle(final Handle handle) throws Exception {
final List<Map<String, Object>> values = handle.select("select account_record_id from custom_fields where object_id = ?", accountId.toString());
Assert.assertEquals(values.size(), 1);
Assert.assertEquals(values.get(0).keySet().size(), 1);
Assert.assertEquals(Long.valueOf(values.get(0).get("account_record_id").toString()), accountRecordId);
return null;
}
});
eventsListener.pushExpectedEvent(NextEvent.CUSTOM_FIELD);
customFieldUserApi.removeCustomFields(customFields, callContext);
assertListenerStatus();
List<CustomField> remainingCustomFields = customFieldUserApi.getCustomFieldsForObject(accountId, ObjectType.ACCOUNT, callContext);
Assert.assertEquals(remainingCustomFields.size(), 0);
checkPagination(0);
// Add again the custom field
final CustomField newCustomField = new StringCustomField(cfName, cfValue, ObjectType.ACCOUNT, accountId, callContext.getCreatedDate());
eventsListener.pushExpectedEvent(NextEvent.CUSTOM_FIELD);
customFieldUserApi.addCustomFields(ImmutableList.<CustomField>of(newCustomField), callContext);
assertListenerStatus();
remainingCustomFields = customFieldUserApi.getCustomFieldsForObject(accountId, ObjectType.ACCOUNT, callContext);
Assert.assertEquals(remainingCustomFields.size(), 1);
checkPagination(1);
// Delete again
eventsListener.pushExpectedEvent(NextEvent.CUSTOM_FIELD);
customFieldUserApi.removeCustomFields(remainingCustomFields, callContext);
assertListenerStatus();
remainingCustomFields = customFieldUserApi.getCustomFieldsForObject(accountId, ObjectType.ACCOUNT, callContext);
Assert.assertEquals(remainingCustomFields.size(), 0);
checkPagination(0);
}
private void checkPagination(final long nbRecords) {
final Pagination<CustomField> foundCustomFields = customFieldUserApi.searchCustomFields("ACCOUNT", 0L, nbRecords + 1L, callContext);
Assert.assertEquals(foundCustomFields.iterator().hasNext(), nbRecords > 0);
Assert.assertEquals(foundCustomFields.getMaxNbRecords(), (Long) nbRecords);
Assert.assertEquals(foundCustomFields.getTotalNbRecords(), (Long) nbRecords);
final Pagination<CustomField> gotCustomFields = customFieldUserApi.getCustomFields(0L, nbRecords + 1L, callContext);
Assert.assertEquals(gotCustomFields.iterator().hasNext(), nbRecords > 0);
Assert.assertEquals(gotCustomFields.getMaxNbRecords(), (Long) nbRecords);
Assert.assertEquals(gotCustomFields.getTotalNbRecords(), (Long) nbRecords);
}
}
| |
/*
* Copyright (c) 2017 Martin Pfeffer
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.celox.brutus.activities;
import android.annotation.TargetApi;
import android.content.Context;
import android.content.res.Configuration;
import android.media.Ringtone;
import android.media.RingtoneManager;
import android.net.Uri;
import android.os.Build;
import android.os.Bundle;
import android.preference.ListPreference;
import android.preference.Preference;
import android.preference.PreferenceFragment;
import android.preference.PreferenceManager;
import android.preference.RingtonePreference;
import android.support.v7.app.ActionBar;
import android.text.TextUtils;
import android.view.MenuItem;
import com.pepperonas.aesprefs.AesPrefs;
import com.pepperonas.andbasx.base.ToastUtils;
import io.celox.brutus.R;
import java.util.List;
public class SettingsActivity extends SecuredAppCompatPreferenceActivity {
private String mPanel = "general";
private static Preference.OnPreferenceChangeListener sBindPreferenceSummaryToValueListener = new Preference.OnPreferenceChangeListener() {
@Override
public boolean onPreferenceChange(Preference preference, Object value) {
String stringValue = value.toString();
if (preference instanceof ListPreference) {
// For list preferences, look up the correct display value in
// the preference's 'entries' list.
ListPreference listPreference = (ListPreference) preference;
int index = listPreference.findIndexOfValue(stringValue);
// Set the summary to reflect the new value.
preference.setSummary(
index >= 0
? listPreference.getEntries()[index]
: null);
} else if (preference instanceof RingtonePreference) {
// For ringtone preferences, look up the correct display value
// using RingtoneManager.
if (TextUtils.isEmpty(stringValue)) {
// Empty values correspond to 'silent' (no ringtone).
preference.setSummary(R.string.pref_ringtone_silent);
} else {
Ringtone ringtone = RingtoneManager.getRingtone(
preference.getContext(), Uri.parse(stringValue));
if (ringtone == null) {
// Clear the summary if there was a lookup error.
preference.setSummary(null);
} else {
// Set the summary to reflect the new ringtone display
// name.
String name = ringtone.getTitle(preference.getContext());
preference.setSummary(name);
}
}
} else {
// For all other preferences, set the summary to the value's
// simple string representation.
preference.setSummary(stringValue);
}
return true;
}
};
/**
* Helper method to determine if the device has an extra-large screen. For
* example, 10" tablets are extra-large.
*/
private static boolean isXLargeTablet(Context context) {
return (context.getResources().getConfiguration().screenLayout
& Configuration.SCREENLAYOUT_SIZE_MASK) >= Configuration.SCREENLAYOUT_SIZE_XLARGE;
}
/**
* Binds a preference's summary to its value. More specifically, when the
* preference's value is changed, its summary (line of text below the
* preference title) is updated to reflect the value. The summary is also
* immediately updated upon calling this method. The exact display format is
* dependent on the type of preference.
*
* @see #sBindPreferenceSummaryToValueListener
*/
private static void bindPreferenceSummaryToValue(Preference preference) {
// Set the listener to watch for value changes.
preference.setOnPreferenceChangeListener(sBindPreferenceSummaryToValueListener);
// Trigger the listener immediately with the preference's
// current value.
sBindPreferenceSummaryToValueListener.onPreferenceChange(preference,
PreferenceManager
.getDefaultSharedPreferences(preference.getContext())
.getString(preference.getKey(), ""));
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setupActionBar();
ToastUtils.toastShort("last: " + getIntent().getStringExtra("panel"));
}
@Override
public void onBackPressed() {
AesPrefs.putLong("lua", System.currentTimeMillis());
super.onBackPressed();
}
/**
* Set up the {@link android.app.ActionBar}, if the API is available.
*/
private void setupActionBar() {
ActionBar actionBar = getSupportActionBar();
if (actionBar != null) {
// Show the Up button in the action bar.
actionBar.setDisplayHomeAsUpEnabled(true);
}
}
/** {@inheritDoc} */
@Override
public boolean onIsMultiPane() {
return isXLargeTablet(this);
}
/** {@inheritDoc} */
@Override
@TargetApi(Build.VERSION_CODES.HONEYCOMB)
public void onBuildHeaders(List<Header> target) {
loadHeadersFromResource(R.xml.pref_headers, target);
}
/**
* This method stops fragment injection in malicious applications.
* Make sure to deny any unknown fragments here.
*/
protected boolean isValidFragment(String fragmentName) {
return PreferenceFragment.class.getName().equals(fragmentName)
|| GeneralPreferenceFragment.class.getName().equals(fragmentName)
|| DataSyncPreferenceFragment.class.getName().equals(fragmentName)
|| NotificationPreferenceFragment.class.getName().equals(fragmentName);
}
/**
* This fragment shows general preferences only. It is used when the
* activity is showing a two-pane settings UI.
*/
@TargetApi(Build.VERSION_CODES.HONEYCOMB)
public static class GeneralPreferenceFragment extends PreferenceFragment {
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
addPreferencesFromResource(R.xml.pref_general);
setHasOptionsMenu(true);
// Bind the summaries of EditText/List/Dialog/Ringtone preferences
// to their values. When their values change, their summaries are
// updated to reflect the new value, per the Android Design
// guidelines.
bindPreferenceSummaryToValue(findPreference("example_text"));
bindPreferenceSummaryToValue(findPreference("example_list"));
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
int id = item.getItemId();
if (id == android.R.id.home) {
getActivity().onBackPressed();
return true;
}
return super.onOptionsItemSelected(item);
}
}
/**
* This fragment shows notification preferences only. It is used when the
* activity is showing a two-pane settings UI.
*/
@TargetApi(Build.VERSION_CODES.HONEYCOMB)
public static class NotificationPreferenceFragment extends PreferenceFragment {
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
addPreferencesFromResource(R.xml.pref_notification);
setHasOptionsMenu(true);
// Bind the summaries of EditText/List/Dialog/Ringtone preferences
// to their values. When their values change, their summaries are
// updated to reflect the new value, per the Android Design
// guidelines.
bindPreferenceSummaryToValue(findPreference("notifications_new_message_ringtone"));
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
int id = item.getItemId();
if (id == android.R.id.home) {
getActivity().onBackPressed();
return true;
}
return super.onOptionsItemSelected(item);
}
}
/**
* This fragment shows data and sync preferences only. It is used when the
* activity is showing a two-pane settings UI.
*/
@TargetApi(Build.VERSION_CODES.HONEYCOMB)
public static class DataSyncPreferenceFragment extends PreferenceFragment {
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
addPreferencesFromResource(R.xml.pref_data_sync);
setHasOptionsMenu(true);
// Bind the summaries of EditText/List/Dialog/Ringtone preferences
// to their values. When their values change, their summaries are
// updated to reflect the new value, per the Android Design
// guidelines.
bindPreferenceSummaryToValue(findPreference("sync_frequency"));
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
int id = item.getItemId();
if (id == android.R.id.home) {
getActivity().onBackPressed();
return true;
}
return super.onOptionsItemSelected(item);
}
}
public String getPanel() {
return mPanel;
}
}
| |
/*
* Copyright 2015-2017 Realm Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.realm.processor;
import com.squareup.javawriter.JavaWriter;
import java.io.IOException;
import java.io.StringWriter;
import java.net.URI;
import java.util.ArrayList;
import java.util.EnumSet;
import java.util.List;
import javax.lang.model.element.Modifier;
import javax.tools.SimpleJavaFileObject;
// Helper class for creating RealmObject java files
public class RealmSyntheticTestClass extends SimpleJavaFileObject {
public static class Field {
private final Builder builder;
private String name;
private String type;
private String initializer;
private boolean hasGetter = true;
private boolean hasSetter = true;
private EnumSet<Modifier> modifiers = EnumSet.of(Modifier.PRIVATE);
private final List<String> annotations = new ArrayList<String>();
Field(Builder builder) {
this.builder = builder;
}
public Field name(String name) {
this.name = name.substring(0, 1).toUpperCase() + name.substring(1, name.length());
return this;
}
public Field type(String type) {
this.type = type;
return this;
}
public Field modifiers(Modifier... modifiers) {
this.modifiers = EnumSet.of(modifiers[0], modifiers); // yuk
return this;
}
public Field clearAnnotations() {
this.annotations.clear();
return this;
}
public Field annotation(String annotation) {
this.annotations.add(annotation);
return this;
}
public Field initializer(String initializer) {
this.initializer = initializer;
return this;
}
public Field hasGetter(boolean hasGetter) {
this.hasGetter = hasGetter;
return this;
}
public Field hasSetter(boolean hasSetter) {
this.hasSetter = hasSetter;
return this;
}
public Builder builder() {
return builder;
}
}
public static class Builder {
private final List<Field> fields = new ArrayList<Field>();
private String name;
public Builder name(String name) {
this.name = name;
return this;
}
// Note: this returns the new field, not the builder.
// To get the builder back, use Field.builder()
public Field field() {
Field f = new Field(this);
fields.add(f);
return f;
}
// Convenience method to support legacy usage
public Builder field(String name, String type, String annotation) {
field().name(name).type(type).annotation(annotation);
return this;
}
public RealmSyntheticTestClass build() throws IOException {
StringWriter stringWriter = new StringWriter();
JavaWriter writer = new JavaWriter(stringWriter);
// Package name
writer.emitPackage("some.test");
// Import Realm classes
writer.emitImports("io.realm.*");
writer.emitImports("io.realm.annotations.*");
// Begin the class definition
writer.beginType(
name, // full qualified name of the item to generate
"class", // the type of the item
EnumSet.of(Modifier.PUBLIC), // modifiers to apply
"RealmObject") // class to extend
.emitEmptyLine();
for (Field field : fields) { generateField(writer, field); }
writer.endType();
return new RealmSyntheticTestClass(stringWriter, name);
}
private void generateField(JavaWriter writer, Field field) throws IOException {
if (field.name == null) { throw new IllegalArgumentException("A field must have a name"); }
if (field.type == null) { throw new IllegalArgumentException("A field must have a type"); }
// Declaration of field
for (String annotation : field.annotations) { writer.emitAnnotation(annotation); }
writer.emitField(field.type, field.name, field.modifiers, field.initializer);
if (field.hasSetter) { emitSetter(writer, field); }
if (field.hasGetter) { emitGetter(writer, field); }
}
private void emitSetter(JavaWriter writer, Field field) throws IOException {
// Setter
writer.beginMethod(
"void", // Return type
"set" + field.name, // Method name
EnumSet.of(Modifier.PUBLIC), field.type, field.name); // Modifiers
writer.emitStatement("realmSet$" + field.name + "(" + field.name + ")");
writer.endMethod();
// Realm Setter
writer.beginMethod(
"void", // Return type
"realmSet$" + field.name, // Method name
EnumSet.of(Modifier.PUBLIC), field.type, field.name); // Modifiers
writer.emitStatement("this." + field.name + "=" + field.name);
writer.endMethod();
}
private void emitGetter(JavaWriter writer, Field field) throws IOException {
// Getter
writer.beginMethod(
field.type, // Return type
"get" + field.name, // Method name
EnumSet.of(Modifier.PUBLIC)); // Modifiers
writer.emitStatement("return realmGet$" + field.name + "()");
writer.endMethod();
// Realm Getter
writer.beginMethod(
field.type, // Return type
"realmGet$" + field.name, // Method name
EnumSet.of(Modifier.PUBLIC)); // Modifiers
writer.emitStatement("return " + field.name);
writer.endMethod();
}
}
private final StringWriter stringWriter;
private RealmSyntheticTestClass(StringWriter stringWriter, String name) {
super(URI.create(name + ".java"), Kind.SOURCE);
this.stringWriter = stringWriter;
}
@Override
public CharSequence getCharContent(boolean ignoreEncodingErrors) throws IOException {
return stringWriter.getBuffer();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.db.compaction;
import java.io.IOException;
import java.util.*;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Predicate;
import com.google.common.base.Predicates;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.Sets;
import com.google.common.primitives.Ints;
import org.apache.cassandra.io.sstable.format.SSTableReader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.db.ColumnFamilyStore;
import org.apache.cassandra.db.RowPosition;
import org.apache.cassandra.dht.Bounds;
import org.apache.cassandra.dht.Range;
import org.apache.cassandra.dht.Token;
import org.apache.cassandra.service.StorageService;
import org.apache.cassandra.utils.Pair;
public class LeveledManifest
{
private static final Logger logger = LoggerFactory.getLogger(LeveledManifest.class);
/**
* limit the number of L0 sstables we do at once, because compaction bloom filter creation
* uses a pessimistic estimate of how many keys overlap (none), so we risk wasting memory
* or even OOMing when compacting highly overlapping sstables
*/
private static final int MAX_COMPACTING_L0 = 32;
/**
* If we go this many rounds without compacting
* in the highest level, we start bringing in sstables from
* that level into lower level compactions
*/
private static final int NO_COMPACTION_LIMIT = 25;
private final ColumnFamilyStore cfs;
@VisibleForTesting
protected final List<SSTableReader>[] generations;
private final RowPosition[] lastCompactedKeys;
private final long maxSSTableSizeInBytes;
private final SizeTieredCompactionStrategyOptions options;
private final int [] compactionCounter;
LeveledManifest(ColumnFamilyStore cfs, int maxSSTableSizeInMB, SizeTieredCompactionStrategyOptions options)
{
this.cfs = cfs;
this.maxSSTableSizeInBytes = maxSSTableSizeInMB * 1024L * 1024L;
this.options = options;
// allocate enough generations for a PB of data, with a 1-MB sstable size. (Note that if maxSSTableSize is
// updated, we will still have sstables of the older, potentially smaller size. So don't make this
// dependent on maxSSTableSize.)
int n = (int) Math.log10(1000 * 1000 * 1000);
generations = new List[n];
lastCompactedKeys = new RowPosition[n];
for (int i = 0; i < generations.length; i++)
{
generations[i] = new ArrayList<>();
lastCompactedKeys[i] = cfs.partitioner.getMinimumToken().minKeyBound();
}
compactionCounter = new int[n];
}
public static LeveledManifest create(ColumnFamilyStore cfs, int maxSSTableSize, List<SSTableReader> sstables)
{
return create(cfs, maxSSTableSize, sstables, new SizeTieredCompactionStrategyOptions());
}
public static LeveledManifest create(ColumnFamilyStore cfs, int maxSSTableSize, Iterable<SSTableReader> sstables, SizeTieredCompactionStrategyOptions options)
{
LeveledManifest manifest = new LeveledManifest(cfs, maxSSTableSize, options);
// ensure all SSTables are in the manifest
for (SSTableReader ssTableReader : sstables)
{
manifest.add(ssTableReader);
}
for (int i = 1; i < manifest.getAllLevelSize().length; i++)
{
manifest.repairOverlappingSSTables(i);
}
return manifest;
}
public synchronized void add(SSTableReader reader)
{
int level = reader.getSSTableLevel();
assert level < generations.length : "Invalid level " + level + " out of " + (generations.length - 1);
logDistribution();
if (canAddSSTable(reader))
{
// adding the sstable does not cause overlap in the level
logger.debug("Adding {} to L{}", reader, level);
generations[level].add(reader);
}
else
{
// this can happen if:
// * a compaction has promoted an overlapping sstable to the given level, or
// was also supposed to add an sstable at the given level.
// * we are moving sstables from unrepaired to repaired and the sstable
// would cause overlap
//
// The add(..):ed sstable will be sent to level 0
try
{
reader.descriptor.getMetadataSerializer().mutateLevel(reader.descriptor, 0);
reader.reloadSSTableMetadata();
}
catch (IOException e)
{
logger.error("Could not change sstable level - adding it at level 0 anyway, we will find it at restart.", e);
}
generations[0].add(reader);
}
}
public synchronized void replace(Collection<SSTableReader> removed, Collection<SSTableReader> added)
{
assert !removed.isEmpty(); // use add() instead of promote when adding new sstables
logDistribution();
if (logger.isDebugEnabled())
logger.debug("Replacing [{}]", toString(removed));
// the level for the added sstables is the max of the removed ones,
// plus one if the removed were all on the same level
int minLevel = Integer.MAX_VALUE;
for (SSTableReader sstable : removed)
{
int thisLevel = remove(sstable);
minLevel = Math.min(minLevel, thisLevel);
}
// it's valid to do a remove w/o an add (e.g. on truncate)
if (added.isEmpty())
return;
if (logger.isDebugEnabled())
logger.debug("Adding [{}]", toString(added));
for (SSTableReader ssTableReader : added)
add(ssTableReader);
lastCompactedKeys[minLevel] = SSTableReader.sstableOrdering.max(added).last;
}
public synchronized void repairOverlappingSSTables(int level)
{
SSTableReader previous = null;
Collections.sort(generations[level], SSTableReader.sstableComparator);
List<SSTableReader> outOfOrderSSTables = new ArrayList<>();
for (SSTableReader current : generations[level])
{
if (previous != null && current.first.compareTo(previous.last) <= 0)
{
logger.warn(String.format("At level %d, %s [%s, %s] overlaps %s [%s, %s]. This could be caused by a bug in Cassandra 1.1.0 .. 1.1.3 or due to the fact that you have dropped sstables from another node into the data directory. " +
"Sending back to L0. If you didn't drop in sstables, and have not yet run scrub, you should do so since you may also have rows out-of-order within an sstable",
level, previous, previous.first, previous.last, current, current.first, current.last));
outOfOrderSSTables.add(current);
}
else
{
previous = current;
}
}
if (!outOfOrderSSTables.isEmpty())
{
for (SSTableReader sstable : outOfOrderSSTables)
sendBackToL0(sstable);
}
}
/**
* Checks if adding the sstable creates an overlap in the level
* @param sstable the sstable to add
* @return true if it is safe to add the sstable in the level.
*/
private boolean canAddSSTable(SSTableReader sstable)
{
int level = sstable.getSSTableLevel();
if (level == 0)
return true;
List<SSTableReader> copyLevel = new ArrayList<>(generations[level]);
copyLevel.add(sstable);
Collections.sort(copyLevel, SSTableReader.sstableComparator);
SSTableReader previous = null;
for (SSTableReader current : copyLevel)
{
if (previous != null && current.first.compareTo(previous.last) <= 0)
return false;
previous = current;
}
return true;
}
private synchronized void sendBackToL0(SSTableReader sstable)
{
remove(sstable);
try
{
sstable.descriptor.getMetadataSerializer().mutateLevel(sstable.descriptor, 0);
sstable.reloadSSTableMetadata();
add(sstable);
}
catch (IOException e)
{
throw new RuntimeException("Could not reload sstable meta data", e);
}
}
private String toString(Collection<SSTableReader> sstables)
{
StringBuilder builder = new StringBuilder();
for (SSTableReader sstable : sstables)
{
builder.append(sstable.descriptor.cfname)
.append('-')
.append(sstable.descriptor.generation)
.append("(L")
.append(sstable.getSSTableLevel())
.append("), ");
}
return builder.toString();
}
public static long maxBytesForLevel(int level, long maxSSTableSizeInBytes)
{
if (level == 0)
return 4L * maxSSTableSizeInBytes;
double bytes = Math.pow(10, level) * maxSSTableSizeInBytes;
if (bytes > Long.MAX_VALUE)
throw new RuntimeException("At most " + Long.MAX_VALUE + " bytes may be in a compaction level; your maxSSTableSize must be absurdly high to compute " + bytes);
return (long) bytes;
}
/**
* @return highest-priority sstables to compact, and level to compact them to
* If no compactions are necessary, will return null
*/
public synchronized CompactionCandidate getCompactionCandidates()
{
// during bootstrap we only do size tiering in L0 to make sure
// the streamed files can be placed in their original levels
if (StorageService.instance.isBootstrapMode())
{
List<SSTableReader> mostInteresting = getSSTablesForSTCS(getLevel(0));
if (!mostInteresting.isEmpty())
{
logger.info("Bootstrapping - doing STCS in L0");
return new CompactionCandidate(mostInteresting, 0, Long.MAX_VALUE);
}
return null;
}
// LevelDB gives each level a score of how much data it contains vs its ideal amount, and
// compacts the level with the highest score. But this falls apart spectacularly once you
// get behind. Consider this set of levels:
// L0: 988 [ideal: 4]
// L1: 117 [ideal: 10]
// L2: 12 [ideal: 100]
//
// The problem is that L0 has a much higher score (almost 250) than L1 (11), so what we'll
// do is compact a batch of MAX_COMPACTING_L0 sstables with all 117 L1 sstables, and put the
// result (say, 120 sstables) in L1. Then we'll compact the next batch of MAX_COMPACTING_L0,
// and so forth. So we spend most of our i/o rewriting the L1 data with each batch.
//
// If we could just do *all* L0 a single time with L1, that would be ideal. But we can't
// -- see the javadoc for MAX_COMPACTING_L0.
//
// LevelDB's way around this is to simply block writes if L0 compaction falls behind.
// We don't have that luxury.
//
// So instead, we
// 1) force compacting higher levels first, which minimizes the i/o needed to compact
// optimially which gives us a long term win, and
// 2) if L0 falls behind, we will size-tiered compact it to reduce read overhead until
// we can catch up on the higher levels.
//
// This isn't a magic wand -- if you are consistently writing too fast for LCS to keep
// up, you're still screwed. But if instead you have intermittent bursts of activity,
// it can help a lot.
for (int i = generations.length - 1; i > 0; i--)
{
List<SSTableReader> sstables = getLevel(i);
if (sstables.isEmpty())
continue; // mostly this just avoids polluting the debug log with zero scores
// we want to calculate score excluding compacting ones
Set<SSTableReader> sstablesInLevel = Sets.newHashSet(sstables);
Set<SSTableReader> remaining = Sets.difference(sstablesInLevel, cfs.getDataTracker().getCompacting());
double score = (double) SSTableReader.getTotalBytes(remaining) / (double)maxBytesForLevel(i, maxSSTableSizeInBytes);
logger.debug("Compaction score for level {} is {}", i, score);
if (score > 1.001)
{
// before proceeding with a higher level, let's see if L0 is far enough behind to warrant STCS
if (!DatabaseDescriptor.getDisableSTCSInL0() && getLevel(0).size() > MAX_COMPACTING_L0)
{
List<SSTableReader> mostInteresting = getSSTablesForSTCS(getLevel(0));
if (!mostInteresting.isEmpty())
{
logger.debug("L0 is too far behind, performing size-tiering there first");
return new CompactionCandidate(mostInteresting, 0, Long.MAX_VALUE);
}
}
// L0 is fine, proceed with this level
Collection<SSTableReader> candidates = getCandidatesFor(i);
if (!candidates.isEmpty())
{
int nextLevel = getNextLevel(candidates);
candidates = getOverlappingStarvedSSTables(nextLevel, candidates);
if (logger.isDebugEnabled())
logger.debug("Compaction candidates for L{} are {}", i, toString(candidates));
return new CompactionCandidate(candidates, nextLevel, cfs.getCompactionStrategy().getMaxSSTableBytes());
}
else
{
logger.debug("No compaction candidates for L{}", i);
}
}
}
// Higher levels are happy, time for a standard, non-STCS L0 compaction
if (getLevel(0).isEmpty())
return null;
Collection<SSTableReader> candidates = getCandidatesFor(0);
if (candidates.isEmpty())
return null;
return new CompactionCandidate(candidates, getNextLevel(candidates), cfs.getCompactionStrategy().getMaxSSTableBytes());
}
private List<SSTableReader> getSSTablesForSTCS(Collection<SSTableReader> sstables)
{
Iterable<SSTableReader> candidates = cfs.getDataTracker().getUncompactingSSTables(sstables);
List<Pair<SSTableReader,Long>> pairs = SizeTieredCompactionStrategy.createSSTableAndLengthPairs(AbstractCompactionStrategy.filterSuspectSSTables(candidates));
List<List<SSTableReader>> buckets = SizeTieredCompactionStrategy.getBuckets(pairs,
options.bucketHigh,
options.bucketLow,
options.minSSTableSize);
return SizeTieredCompactionStrategy.mostInterestingBucket(buckets, 4, 32);
}
/**
* If we do something that makes many levels contain too little data (cleanup, change sstable size) we will "never"
* compact the high levels.
*
* This method finds if we have gone many compaction rounds without doing any high-level compaction, if so
* we start bringing in one sstable from the highest level until that level is either empty or is doing compaction.
*
* @param targetLevel the level the candidates will be compacted into
* @param candidates the original sstables to compact
* @return
*/
private Collection<SSTableReader> getOverlappingStarvedSSTables(int targetLevel, Collection<SSTableReader> candidates)
{
Set<SSTableReader> withStarvedCandidate = new HashSet<>(candidates);
for (int i = generations.length - 1; i > 0; i--)
compactionCounter[i]++;
compactionCounter[targetLevel] = 0;
if (logger.isDebugEnabled())
{
for (int j = 0; j < compactionCounter.length; j++)
logger.debug("CompactionCounter: {}: {}", j, compactionCounter[j]);
}
for (int i = generations.length - 1; i > 0; i--)
{
if (getLevelSize(i) > 0)
{
if (compactionCounter[i] > NO_COMPACTION_LIMIT)
{
// we try to find an sstable that is fully contained within the boundaries we are compacting;
// say we are compacting 3 sstables: 0->30 in L1 and 0->12, 12->33 in L2
// this means that we will not create overlap in L2 if we add an sstable
// contained within 0 -> 33 to the compaction
RowPosition max = null;
RowPosition min = null;
for (SSTableReader candidate : candidates)
{
if (min == null || candidate.first.compareTo(min) < 0)
min = candidate.first;
if (max == null || candidate.last.compareTo(max) > 0)
max = candidate.last;
}
Set<SSTableReader> compacting = cfs.getDataTracker().getCompacting();
Range<RowPosition> boundaries = new Range<>(min, max);
for (SSTableReader sstable : getLevel(i))
{
Range<RowPosition> r = new Range<RowPosition>(sstable.first, sstable.last);
if (boundaries.contains(r) && !compacting.contains(sstable))
{
logger.info("Adding high-level (L{}) {} to candidates", sstable.getSSTableLevel(), sstable);
withStarvedCandidate.add(sstable);
return withStarvedCandidate;
}
}
}
return candidates;
}
}
return candidates;
}
public synchronized int getLevelSize(int i)
{
if (i >= generations.length)
throw new ArrayIndexOutOfBoundsException("Maximum valid generation is " + (generations.length - 1));
return getLevel(i).size();
}
public synchronized int[] getAllLevelSize()
{
int[] counts = new int[generations.length];
for (int i = 0; i < counts.length; i++)
counts[i] = getLevel(i).size();
return counts;
}
private void logDistribution()
{
if (logger.isDebugEnabled())
{
for (int i = 0; i < generations.length; i++)
{
if (!getLevel(i).isEmpty())
{
logger.debug("L{} contains {} SSTables ({} bytes) in {}",
i, getLevel(i).size(), SSTableReader.getTotalBytes(getLevel(i)), this);
}
}
}
}
@VisibleForTesting
public int remove(SSTableReader reader)
{
int level = reader.getSSTableLevel();
assert level >= 0 : reader + " not present in manifest: "+level;
generations[level].remove(reader);
return level;
}
private static Set<SSTableReader> overlapping(Collection<SSTableReader> candidates, Iterable<SSTableReader> others)
{
assert !candidates.isEmpty();
/*
* Picking each sstable from others that overlap one of the sstable of candidates is not enough
* because you could have the following situation:
* candidates = [ s1(a, c), s2(m, z) ]
* others = [ s3(e, g) ]
* In that case, s2 overlaps none of s1 or s2, but if we compact s1 with s2, the resulting sstable will
* overlap s3, so we must return s3.
*
* Thus, the correct approach is to pick sstables overlapping anything between the first key in all
* the candidate sstables, and the last.
*/
Iterator<SSTableReader> iter = candidates.iterator();
SSTableReader sstable = iter.next();
Token first = sstable.first.getToken();
Token last = sstable.last.getToken();
while (iter.hasNext())
{
sstable = iter.next();
first = first.compareTo(sstable.first.getToken()) <= 0 ? first : sstable.first.getToken();
last = last.compareTo(sstable.last.getToken()) >= 0 ? last : sstable.last.getToken();
}
return overlapping(first, last, others);
}
@VisibleForTesting
static Set<SSTableReader> overlapping(SSTableReader sstable, Iterable<SSTableReader> others)
{
return overlapping(sstable.first.getToken(), sstable.last.getToken(), others);
}
/**
* @return sstables from @param sstables that contain keys between @param start and @param end, inclusive.
*/
private static Set<SSTableReader> overlapping(Token start, Token end, Iterable<SSTableReader> sstables)
{
assert start.compareTo(end) <= 0;
Set<SSTableReader> overlapped = new HashSet<>();
Bounds<Token> promotedBounds = new Bounds<Token>(start, end);
for (SSTableReader candidate : sstables)
{
Bounds<Token> candidateBounds = new Bounds<Token>(candidate.first.getToken(), candidate.last.getToken());
if (candidateBounds.intersects(promotedBounds))
overlapped.add(candidate);
}
return overlapped;
}
private static final Predicate<SSTableReader> suspectP = new Predicate<SSTableReader>()
{
public boolean apply(SSTableReader candidate)
{
return candidate.isMarkedSuspect();
}
};
/**
* @return highest-priority sstables to compact for the given level.
* If no compactions are possible (because of concurrent compactions or because some sstables are blacklisted
* for prior failure), will return an empty list. Never returns null.
*/
private Collection<SSTableReader> getCandidatesFor(int level)
{
assert !getLevel(level).isEmpty();
logger.debug("Choosing candidates for L{}", level);
final Set<SSTableReader> compacting = cfs.getDataTracker().getCompacting();
if (level == 0)
{
Set<SSTableReader> compactingL0 = getCompacting(0);
RowPosition lastCompactingKey = null;
RowPosition firstCompactingKey = null;
for (SSTableReader candidate : compactingL0)
{
if (firstCompactingKey == null || candidate.first.compareTo(firstCompactingKey) < 0)
firstCompactingKey = candidate.first;
if (lastCompactingKey == null || candidate.last.compareTo(lastCompactingKey) > 0)
lastCompactingKey = candidate.last;
}
// L0 is the dumping ground for new sstables which thus may overlap each other.
//
// We treat L0 compactions specially:
// 1a. add sstables to the candidate set until we have at least maxSSTableSizeInMB
// 1b. prefer choosing older sstables as candidates, to newer ones
// 1c. any L0 sstables that overlap a candidate, will also become candidates
// 2. At most MAX_COMPACTING_L0 sstables from L0 will be compacted at once
// 3. If total candidate size is less than maxSSTableSizeInMB, we won't bother compacting with L1,
// and the result of the compaction will stay in L0 instead of being promoted (see promote())
//
// Note that we ignore suspect-ness of L1 sstables here, since if an L1 sstable is suspect we're
// basically screwed, since we expect all or most L0 sstables to overlap with each L1 sstable.
// So if an L1 sstable is suspect we can't do much besides try anyway and hope for the best.
Set<SSTableReader> candidates = new HashSet<>();
Set<SSTableReader> remaining = new HashSet<>();
Iterables.addAll(remaining, Iterables.filter(getLevel(0), Predicates.not(suspectP)));
for (SSTableReader sstable : ageSortedSSTables(remaining))
{
if (candidates.contains(sstable))
continue;
Sets.SetView<SSTableReader> overlappedL0 = Sets.union(Collections.singleton(sstable), overlapping(sstable, remaining));
if (!Sets.intersection(overlappedL0, compactingL0).isEmpty())
continue;
for (SSTableReader newCandidate : overlappedL0)
{
if (firstCompactingKey == null || lastCompactingKey == null || overlapping(firstCompactingKey.getToken(), lastCompactingKey.getToken(), Arrays.asList(newCandidate)).size() == 0)
candidates.add(newCandidate);
remaining.remove(newCandidate);
}
if (candidates.size() > MAX_COMPACTING_L0)
{
// limit to only the MAX_COMPACTING_L0 oldest candidates
candidates = new HashSet<>(ageSortedSSTables(candidates).subList(0, MAX_COMPACTING_L0));
break;
}
}
// leave everything in L0 if we didn't end up with a full sstable's worth of data
if (SSTableReader.getTotalBytes(candidates) > maxSSTableSizeInBytes)
{
// add sstables from L1 that overlap candidates
// if the overlapping ones are already busy in a compaction, leave it out.
// TODO try to find a set of L0 sstables that only overlaps with non-busy L1 sstables
Set<SSTableReader> l1overlapping = overlapping(candidates, getLevel(1));
if (Sets.intersection(l1overlapping, compacting).size() > 0)
return Collections.emptyList();
if (!overlapping(candidates, compactingL0).isEmpty())
return Collections.emptyList();
candidates = Sets.union(candidates, l1overlapping);
}
if (candidates.size() < 2)
return Collections.emptyList();
else
return candidates;
}
// for non-L0 compactions, pick up where we left off last time
Collections.sort(getLevel(level), SSTableReader.sstableComparator);
int start = 0; // handles case where the prior compaction touched the very last range
for (int i = 0; i < getLevel(level).size(); i++)
{
SSTableReader sstable = getLevel(level).get(i);
if (sstable.first.compareTo(lastCompactedKeys[level]) > 0)
{
start = i;
break;
}
}
// look for a non-suspect keyspace to compact with, starting with where we left off last time,
// and wrapping back to the beginning of the generation if necessary
for (int i = 0; i < getLevel(level).size(); i++)
{
SSTableReader sstable = getLevel(level).get((start + i) % getLevel(level).size());
Set<SSTableReader> candidates = Sets.union(Collections.singleton(sstable), overlapping(sstable, getLevel(level + 1)));
if (Iterables.any(candidates, suspectP))
continue;
if (Sets.intersection(candidates, compacting).isEmpty())
return candidates;
}
// all the sstables were suspect or overlapped with something suspect
return Collections.emptyList();
}
private Set<SSTableReader> getCompacting(int level)
{
Set<SSTableReader> sstables = new HashSet<>();
Set<SSTableReader> levelSSTables = new HashSet<>(getLevel(level));
for (SSTableReader sstable : cfs.getDataTracker().getCompacting())
{
if (levelSSTables.contains(sstable))
sstables.add(sstable);
}
return sstables;
}
private List<SSTableReader> ageSortedSSTables(Collection<SSTableReader> candidates)
{
List<SSTableReader> ageSortedCandidates = new ArrayList<>(candidates);
Collections.sort(ageSortedCandidates, SSTableReader.maxTimestampComparator);
return ageSortedCandidates;
}
@Override
public String toString()
{
return "Manifest@" + hashCode();
}
public int getLevelCount()
{
for (int i = generations.length - 1; i >= 0; i--)
{
if (getLevel(i).size() > 0)
return i;
}
return 0;
}
public synchronized SortedSet<SSTableReader> getLevelSorted(int level, Comparator<SSTableReader> comparator)
{
return ImmutableSortedSet.copyOf(comparator, getLevel(level));
}
public List<SSTableReader> getLevel(int i)
{
return generations[i];
}
public synchronized int getEstimatedTasks()
{
long tasks = 0;
long[] estimated = new long[generations.length];
for (int i = generations.length - 1; i >= 0; i--)
{
List<SSTableReader> sstables = getLevel(i);
// If there is 1 byte over TBL - (MBL * 1.001), there is still a task left, so we need to round up.
estimated[i] = (long)Math.ceil((double)Math.max(0L, SSTableReader.getTotalBytes(sstables) - (long)(maxBytesForLevel(i, maxSSTableSizeInBytes) * 1.001)) / (double)maxSSTableSizeInBytes);
tasks += estimated[i];
}
logger.debug("Estimating {} compactions to do for {}.{}",
Arrays.toString(estimated), cfs.keyspace.getName(), cfs.name);
return Ints.checkedCast(tasks);
}
public int getNextLevel(Collection<SSTableReader> sstables)
{
int maximumLevel = Integer.MIN_VALUE;
int minimumLevel = Integer.MAX_VALUE;
for (SSTableReader sstable : sstables)
{
maximumLevel = Math.max(sstable.getSSTableLevel(), maximumLevel);
minimumLevel = Math.min(sstable.getSSTableLevel(), minimumLevel);
}
int newLevel;
if (minimumLevel == 0 && minimumLevel == maximumLevel && SSTableReader.getTotalBytes(sstables) < maxSSTableSizeInBytes)
{
newLevel = 0;
}
else
{
newLevel = minimumLevel == maximumLevel ? maximumLevel + 1 : maximumLevel;
assert newLevel > 0;
}
return newLevel;
}
public Iterable<SSTableReader> getAllSSTables()
{
Set<SSTableReader> sstables = new HashSet<>();
for (List<SSTableReader> generation : generations)
{
sstables.addAll(generation);
}
return sstables;
}
public static class CompactionCandidate
{
public final Collection<SSTableReader> sstables;
public final int level;
public final long maxSSTableBytes;
public CompactionCandidate(Collection<SSTableReader> sstables, int level, long maxSSTableBytes)
{
this.sstables = sstables;
this.level = level;
this.maxSSTableBytes = maxSSTableBytes;
}
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/datastore/v1beta3/datastore.proto
package com.google.datastore.v1beta3;
/**
* <pre>
* The request for [Datastore.BeginTransaction][google.datastore.v1beta3.Datastore.BeginTransaction].
* </pre>
*
* Protobuf type {@code google.datastore.v1beta3.BeginTransactionRequest}
*/
public final class BeginTransactionRequest extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.datastore.v1beta3.BeginTransactionRequest)
BeginTransactionRequestOrBuilder {
// Use BeginTransactionRequest.newBuilder() to construct.
private BeginTransactionRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private BeginTransactionRequest() {
projectId_ = "";
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return com.google.protobuf.UnknownFieldSet.getDefaultInstance();
}
private BeginTransactionRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
int mutable_bitField0_ = 0;
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!input.skipField(tag)) {
done = true;
}
break;
}
case 66: {
java.lang.String s = input.readStringRequireUtf8();
projectId_ = s;
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.datastore.v1beta3.DatastoreProto.internal_static_google_datastore_v1beta3_BeginTransactionRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.datastore.v1beta3.DatastoreProto.internal_static_google_datastore_v1beta3_BeginTransactionRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.datastore.v1beta3.BeginTransactionRequest.class, com.google.datastore.v1beta3.BeginTransactionRequest.Builder.class);
}
public static final int PROJECT_ID_FIELD_NUMBER = 8;
private volatile java.lang.Object projectId_;
/**
* <pre>
* The ID of the project against which to make the request.
* </pre>
*
* <code>optional string project_id = 8;</code>
*/
public java.lang.String getProjectId() {
java.lang.Object ref = projectId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
projectId_ = s;
return s;
}
}
/**
* <pre>
* The ID of the project against which to make the request.
* </pre>
*
* <code>optional string project_id = 8;</code>
*/
public com.google.protobuf.ByteString
getProjectIdBytes() {
java.lang.Object ref = projectId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
projectId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!getProjectIdBytes().isEmpty()) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 8, projectId_);
}
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!getProjectIdBytes().isEmpty()) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(8, projectId_);
}
memoizedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.datastore.v1beta3.BeginTransactionRequest)) {
return super.equals(obj);
}
com.google.datastore.v1beta3.BeginTransactionRequest other = (com.google.datastore.v1beta3.BeginTransactionRequest) obj;
boolean result = true;
result = result && getProjectId()
.equals(other.getProjectId());
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
hash = (37 * hash) + PROJECT_ID_FIELD_NUMBER;
hash = (53 * hash) + getProjectId().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.datastore.v1beta3.BeginTransactionRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.datastore.v1beta3.BeginTransactionRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.datastore.v1beta3.BeginTransactionRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.datastore.v1beta3.BeginTransactionRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.datastore.v1beta3.BeginTransactionRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.datastore.v1beta3.BeginTransactionRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.datastore.v1beta3.BeginTransactionRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.datastore.v1beta3.BeginTransactionRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.datastore.v1beta3.BeginTransactionRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.datastore.v1beta3.BeginTransactionRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.datastore.v1beta3.BeginTransactionRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* The request for [Datastore.BeginTransaction][google.datastore.v1beta3.Datastore.BeginTransaction].
* </pre>
*
* Protobuf type {@code google.datastore.v1beta3.BeginTransactionRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.datastore.v1beta3.BeginTransactionRequest)
com.google.datastore.v1beta3.BeginTransactionRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.datastore.v1beta3.DatastoreProto.internal_static_google_datastore_v1beta3_BeginTransactionRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.datastore.v1beta3.DatastoreProto.internal_static_google_datastore_v1beta3_BeginTransactionRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.datastore.v1beta3.BeginTransactionRequest.class, com.google.datastore.v1beta3.BeginTransactionRequest.Builder.class);
}
// Construct using com.google.datastore.v1beta3.BeginTransactionRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
public Builder clear() {
super.clear();
projectId_ = "";
return this;
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.datastore.v1beta3.DatastoreProto.internal_static_google_datastore_v1beta3_BeginTransactionRequest_descriptor;
}
public com.google.datastore.v1beta3.BeginTransactionRequest getDefaultInstanceForType() {
return com.google.datastore.v1beta3.BeginTransactionRequest.getDefaultInstance();
}
public com.google.datastore.v1beta3.BeginTransactionRequest build() {
com.google.datastore.v1beta3.BeginTransactionRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public com.google.datastore.v1beta3.BeginTransactionRequest buildPartial() {
com.google.datastore.v1beta3.BeginTransactionRequest result = new com.google.datastore.v1beta3.BeginTransactionRequest(this);
result.projectId_ = projectId_;
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.datastore.v1beta3.BeginTransactionRequest) {
return mergeFrom((com.google.datastore.v1beta3.BeginTransactionRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.datastore.v1beta3.BeginTransactionRequest other) {
if (other == com.google.datastore.v1beta3.BeginTransactionRequest.getDefaultInstance()) return this;
if (!other.getProjectId().isEmpty()) {
projectId_ = other.projectId_;
onChanged();
}
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.datastore.v1beta3.BeginTransactionRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.datastore.v1beta3.BeginTransactionRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object projectId_ = "";
/**
* <pre>
* The ID of the project against which to make the request.
* </pre>
*
* <code>optional string project_id = 8;</code>
*/
public java.lang.String getProjectId() {
java.lang.Object ref = projectId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
projectId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* The ID of the project against which to make the request.
* </pre>
*
* <code>optional string project_id = 8;</code>
*/
public com.google.protobuf.ByteString
getProjectIdBytes() {
java.lang.Object ref = projectId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
projectId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* The ID of the project against which to make the request.
* </pre>
*
* <code>optional string project_id = 8;</code>
*/
public Builder setProjectId(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
projectId_ = value;
onChanged();
return this;
}
/**
* <pre>
* The ID of the project against which to make the request.
* </pre>
*
* <code>optional string project_id = 8;</code>
*/
public Builder clearProjectId() {
projectId_ = getDefaultInstance().getProjectId();
onChanged();
return this;
}
/**
* <pre>
* The ID of the project against which to make the request.
* </pre>
*
* <code>optional string project_id = 8;</code>
*/
public Builder setProjectIdBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
projectId_ = value;
onChanged();
return this;
}
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return this;
}
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return this;
}
// @@protoc_insertion_point(builder_scope:google.datastore.v1beta3.BeginTransactionRequest)
}
// @@protoc_insertion_point(class_scope:google.datastore.v1beta3.BeginTransactionRequest)
private static final com.google.datastore.v1beta3.BeginTransactionRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.datastore.v1beta3.BeginTransactionRequest();
}
public static com.google.datastore.v1beta3.BeginTransactionRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<BeginTransactionRequest>
PARSER = new com.google.protobuf.AbstractParser<BeginTransactionRequest>() {
public BeginTransactionRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new BeginTransactionRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<BeginTransactionRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<BeginTransactionRequest> getParserForType() {
return PARSER;
}
public com.google.datastore.v1beta3.BeginTransactionRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
// Copyright (c) 2003-2014, Jodd Team (jodd.org). All Rights Reserved.
package jodd.joy.crypt;
import jodd.util.Bits;
import jodd.util.StringPool;
import java.io.UnsupportedEncodingException;
/**
* Threefish cipher.
* http://www.schneier.com/threefish.html
*/
public class Threefish extends BlockCipher {
// === GENERAL CONSTANTS ===
private static final long EXTENDED_KEY_SCHEDULE_CONST = 6148914691236517205L;
public static final int BLOCK_SIZE_BITS_256 = 256;
public static final int BLOCK_SIZE_BITS_512 = 512;
public static final int BLOCK_SIZE_BITS_1024 = 1024;
private static final int ROUNDS_72 = 72;
private static final int ROUNDS_80 = 80;
private static final int WORDS_4 = 4;
private static final int WORDS_8 = 8;
private static final int WORDS_16 = 16;
private static final int TWEAK_VALUES = 3;
private static final int SUBKEY_INTERVAL = 4;
// === VALUES FOR THE WORD PERMUTATION ===
/**
* Word permutation constants for PI(i) for Nw = 4.
*/
private static final int[] PI4 = {0, 3, 2, 1};
/**
* Word permutation constants for PI(i) for Nw = 8.
*/
private static final int[] PI8 = {2, 1, 4, 7, 6, 5, 0, 3};
/**
* Word permutation constants for PI(i) for Nw = 16.
*/
private static final int[] PI16 = {0, 9, 2, 13, 6, 11, 4, 15, 10, 7, 12, 3, 14, 5, 8, 1};
// === VALUES FOR THE REVERSE WORD PERMUTATION ===
/**
* Reverse word permutation constants for PI(i) for Nw = 4.
*/
private static final int[] RPI4 = {0, 3, 2, 1}; // note: RPI4 == PI4
/**
* Reverse word permutation constants for PI(i) for Nw = 8.
*/
private static final int[] RPI8 = {6, 1, 0, 7, 2, 5, 4, 3};
/**
* Reverse word permutation constants for PI(i) for Nw = 16.
*/
private static final int[] RPI16 = {0, 15, 2, 11, 6, 13, 4, 9, 14, 1, 8, 5, 10, 3, 12, 7};
// === ROTATION CONSTANTS FOR THE MIX FUNCTION ===
private static final int DEPTH_OF_D_IN_R = 8;
/**
* Rotational constants Rd,j for Nw = 4.
*/
private static final int[][] R4 = {
{5, 56},
{36, 28},
{13, 46},
{58, 44},
{26, 20},
{53, 35},
{11, 42},
{59, 50}
};
/**
* Rotational constants Rd,j for Nw = 8.
*/
private static final int[][] R8 = {
{38, 30, 50, 53},
{48, 20, 43, 31},
{34, 14, 15, 27},
{26, 12, 58, 7},
{33, 49, 8, 42},
{39, 27, 41, 14},
{29, 26, 11, 9},
{33, 51, 39, 35}
};
/**
* Rotation constants Rd,j for Nw = 16.
*/
private static final int[][] R16 = {
{55, 43, 37, 40, 16, 22, 38, 12},
{25, 25, 46, 13, 14, 13, 52, 57},
{33, 8, 18, 57, 21, 12, 32, 54},
{34, 43, 25, 60, 44, 9, 59, 34},
{28, 7, 47, 48, 51, 9, 35, 41},
{17, 6, 18, 25, 43, 42, 40, 15},
{58, 7, 32, 45, 19, 18, 2, 56},
{47, 49, 27, 58, 37, 48, 53, 56},
};
// === FIELDS CREATED DURING INSTANTIATION FOR PERFORMANCE REASONS ===
private final long[] t = new long[TWEAK_VALUES]; // initial tweak words including t2
private final long[] x = new long[2];
private final long[] y = new long[2];
// === FINAL FIELDS DETERMINED BY BLOCKSIZE ===
private final int blockSize; // block size (in bits)
private final int nr; // number of rounds depending on block size
// === FIELDS DETERMINED BY KEY SIZE DURING INIT() ===
private long[] k; // initial key words including knw
private int nw; // number of key words excluding knw
private int[] pi; // word permutation pi (depends on number of words <=> block size)
private int[] rpi; // reverse word permutation rpi (depends on number of words <=> blocksize)
private int[][] r; // rotational constants (depends on number of words <=> block size)
// === FIELDS DETERMINED BY KEY SIZE DURING INIT() FOR PERFORMANCE REASONS ===
// NOTE next fields use lazy instantiation
// NOTE performance/memory: can we even use the same array? let's not before testing
private long[] vd;
private long[] ed;
private long[] fd;
private long[] ksd;
/**
* Threefish implementation using the specified blocksize in bits.
*
* @param blockSize either 256, 512 or 1024 (bits)
*/
public Threefish(final int blockSize) {
super(blockSize);
this.blockSize = blockSize;
switch (blockSize) {
case BLOCK_SIZE_BITS_256:
case BLOCK_SIZE_BITS_512:
nr = ROUNDS_72;
break;
case BLOCK_SIZE_BITS_1024:
nr = ROUNDS_80;
break;
default:
throw new IllegalArgumentException("Illegal blocksize, use 256, 512 or 1024 bit values as blocksize");
}
}
/**
* Threefish implementation using the specified blocksize in bits, specifying the number of rounds directly instead
* of using the default number of rounds depending on the blockSize. Mainly used for (performance) testing
* purposes.
*
* @param blockSize either 256, 512 or 1024 (bits)
* @param rounds the number of rounds 1..2^31, must be multiple of 4
*/
public Threefish(final int blockSize, final int rounds) {
super(blockSize);
this.blockSize = blockSize;
switch (blockSize) {
case BLOCK_SIZE_BITS_256:
case BLOCK_SIZE_BITS_512:
case BLOCK_SIZE_BITS_1024:
break;
default:
throw new IllegalArgumentException("Illegal blocksize, use 256, 512 or 1024 bit values as blocksize");
}
if (rounds <= 0 || rounds % 4 != 0) {
throw new IllegalArgumentException("Number of rounds should be at least 1 and should be a multiple of 4");
}
nr = rounds;
}
/**
* Initialize the cipher using the key and the tweak value.
*
* @param key the Threefish key to use
* @param tweak the tweak values to use
*/
public void init(final long[] key, final long[] tweak) {
final int newNw = key.length;
// only create new arrays if the value of N{w} changes (different key size)
if (nw != newNw) {
nw = newNw;
switch (nw) {
case WORDS_4:
pi = PI4;
rpi = RPI4;
r = R4;
break;
case WORDS_8:
pi = PI8;
rpi = RPI8;
r = R8;
break;
case WORDS_16:
pi = PI16;
rpi = RPI16;
r = R16;
break;
default:
throw new RuntimeException("Invalid threefish key");
}
this.k = new long[nw + 1];
// instantiation of these fields here for performance reasons
vd = new long[nw]; // v is the intermediate value v{d} at round d
ed = new long[nw]; // ed is the value of e{d} at round d
fd = new long[nw]; // fd is the value of f{d} at round d
ksd = new long[nw]; // ksd is the value of k{s} at round d
}
System.arraycopy(key, 0, this.k, 0, key.length);
long knw = EXTENDED_KEY_SCHEDULE_CONST;
for (int i = 0; i < nw; i++) {
knw ^= this.k[i];
}
this.k[nw] = knw;
// set tweak values
t[0] = tweak[0];
t[1] = tweak[1];
t[2] = t[0] ^ t[1];
}
/**
* Implementation of the E(K, T, P) function.
* The K and T values should be set previously using the init() method. This version is the 64 bit implementation
* of Threefish.
*
* @param p the initial plain text
* @param c the final value defined as value v{d} where d = N{r}
*/
public void blockEncrypt(final long[] p, final long[] c) {
// initial value = plain
System.arraycopy(p, 0, vd, 0, nw);
for (int d = 0; d < nr; d++) { // do the rounds
// calculate e{d,i}
if (d % SUBKEY_INTERVAL == 0) {
final int s = d / SUBKEY_INTERVAL;
keySchedule(s);
for (int i = 0; i < nw; i++) {
ed[i] = vd[i] + ksd[i];
}
} else {
System.arraycopy(vd, 0, ed, 0, nw);
}
for (int j = 0; j < nw / 2; j++) {
x[0] = ed[j * 2];
x[1] = ed[j * 2 + 1];
mix(j, d);
fd[j * 2] = y[0];
fd[j * 2 + 1] = y[1];
}
for (int i = 0; i < nw; i++) {
vd[i] = fd[pi[i]];
}
}
// do the last keyschedule
keySchedule(nr / SUBKEY_INTERVAL);
for (int i = 0; i < nw; i++) {
c[i] = vd[i] + ksd[i];
}
}
/**
* Implementation of the MIX function.
*
* @param j the index in the rotation constants
* @param d the round
*/
private void mix(final int j, final int d) {
y[0] = x[0] + x[1];
final long rotl = r[d % DEPTH_OF_D_IN_R][j];
// java left rotation for a long
y[1] = (x[1] << rotl) | (x[1] >>> (Long.SIZE - rotl));
y[1] ^= y[0];
}
/**
* Implementation of the D(K, T, C) function.
* The K and T values should be set previously using the init() method. This version is the 64 bit implementation
* of Threefish.
*
* @param c the cipher text
* @param p the plain text
*/
public void blockDecrypt(final long[] c, final long[] p) {
// initial value = plain
System.arraycopy(c, 0, vd, 0, nw);
for (int d = nr; d > 0; d--) { // do the rounds
// calculate e{d,i}
if (d % SUBKEY_INTERVAL == 0) {
final int s = d / SUBKEY_INTERVAL;
keySchedule(s); // calculate same keys
for (int i = 0; i < nw; i++) {
fd[i] = vd[i] - ksd[i];
}
} else {
System.arraycopy(vd, 0, fd, 0, nw);
}
for (int i = 0; i < nw; i++) {
ed[i] = fd[rpi[i]];
}
for (int j = 0; j < nw / 2; j++) {
y[0] = ed[j * 2];
y[1] = ed[j * 2 + 1];
demix(j, d - 1);
vd[j * 2] = x[0];
vd[j * 2 + 1] = x[1];
}
}
// do the first keyschedule
keySchedule(0);
for (int i = 0; i < nw; i++) {
p[i] = vd[i] - ksd[i];
}
}
/**
* Implementation of the un-MIX function.
*/
private void demix(final int j, final int d) {
y[1] ^= y[0];
final long rotr = r[d % DEPTH_OF_D_IN_R][j]; // NOTE performance: darn, creation on stack!
// right shift
x[1] = (y[1] << (Long.SIZE - rotr)) | (y[1] >>> rotr);
x[0] = y[0] - x[1];
}
/**
* Creates the subkeys.
*
* @param s the value of the round devided by 4
*/
private void keySchedule(final int s) {
for (int i = 0; i < nw; i++) {
// just put in the main key first
ksd[i] = k[(s + i) % (nw + 1)];
// don't add anything for i = 0,...,Nw - 4
if (i == nw - 3) { // second to last
ksd[i] += t[s % TWEAK_VALUES];
} else if (i == nw - 2) { // first to last
ksd[i] += t[(s + 1) % TWEAK_VALUES];
} else if (i == nw - 1) { // last
ksd[i] += s;
}
}
}
// ---------------------------------------------------------------- user friendly methods
/**
* Initializes cipher in a simple way.
*/
public void init(String keyMessage, long tweak1, long tweak2) {
long[] tweak = new long[] {tweak1, tweak2};
byte[] key = new byte[blockSize / Byte.SIZE];
byte[] keyData = getBytes(keyMessage);
System.arraycopy(keyData, 0, key, 0, key.length < keyData.length ? key.length : keyData.length);
init(bytesToLongs(key), tweak);
}
/**
* Encrypts a block.
*/
@Override
public byte[] encryptBlock(byte[] content, int offset) {
long[] contentBlock = bytesToLongs(content, offset, blockSizeInBytes);
long[] encryptedBlock = new long[blockSize / Long.SIZE];
blockEncrypt(contentBlock, encryptedBlock);
return longsToBytes(encryptedBlock);
}
@Override
public byte[] decryptBlock(byte[] encryptedContent, int offset) {
long[] encryptedBlock = bytesToLongs(encryptedContent, offset, blockSizeInBytes);
long[] decryptedBlock= new long[encryptedBlock.length];
blockDecrypt(encryptedBlock, decryptedBlock);
return longsToBytes(decryptedBlock);
}
/**
* Encrypts a string.
*/
public byte[] encryptString(String plain) {
return encrypt(getBytes(plain));
}
/**
* Decrypts a string.
*/
public String decryptString(byte[] encrypted) {
try {
return new String(decrypt(encrypted), StringPool.UTF_8);
} catch (UnsupportedEncodingException ignore) {
return null;
}
}
// ---------------------------------------------------------------- util
protected byte[] getBytes(String string) {
try {
return string.getBytes(StringPool.UTF_8);
} catch (UnsupportedEncodingException ignore) {
return null;
}
}
protected static long[] bytesToLongs(byte[] ba) {
return bytesToLongs(ba, 0, ba.length);
}
/**
* Converts segment of byte array into long array.
*/
protected static long[] bytesToLongs(byte[] ba, int offset, int size) {
long[] result = new long[size >> 3];
int i8 = offset;
for (int i = 0; i < result.length; i++) {
result[i] = Bits.getLong(ba, i8);
i8 += 8;
}
return result;
}
protected static byte[] longsToBytes(long[] la) {
byte[] result = new byte[la.length << 3];
int i8 = 0;
for (long l : la) {
Bits.putLong(result, i8, l);
i8 += 8;
}
return result;
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.network.fluent;
import com.azure.core.annotation.ReturnType;
import com.azure.core.annotation.ServiceMethod;
import com.azure.core.http.rest.PagedFlux;
import com.azure.core.http.rest.PagedIterable;
import com.azure.core.http.rest.Response;
import com.azure.core.management.polling.PollResult;
import com.azure.core.util.Context;
import com.azure.core.util.polling.PollerFlux;
import com.azure.core.util.polling.SyncPoller;
import com.azure.resourcemanager.network.fluent.models.ExpressRouteCircuitAuthorizationInner;
import java.nio.ByteBuffer;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
/**
* An instance of this class provides access to all the operations defined in ExpressRouteCircuitAuthorizationsClient.
*/
public interface ExpressRouteCircuitAuthorizationsClient {
/**
* Deletes the specified authorization from the specified express route circuit.
*
* @param resourceGroupName The name of the resource group.
* @param circuitName The name of the express route circuit.
* @param authorizationName The name of the authorization.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Mono<Response<Flux<ByteBuffer>>> deleteWithResponseAsync(
String resourceGroupName, String circuitName, String authorizationName);
/**
* Deletes the specified authorization from the specified express route circuit.
*
* @param resourceGroupName The name of the resource group.
* @param circuitName The name of the express route circuit.
* @param authorizationName The name of the authorization.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
PollerFlux<PollResult<Void>, Void> beginDeleteAsync(
String resourceGroupName, String circuitName, String authorizationName);
/**
* Deletes the specified authorization from the specified express route circuit.
*
* @param resourceGroupName The name of the resource group.
* @param circuitName The name of the express route circuit.
* @param authorizationName The name of the authorization.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
SyncPoller<PollResult<Void>, Void> beginDelete(
String resourceGroupName, String circuitName, String authorizationName);
/**
* Deletes the specified authorization from the specified express route circuit.
*
* @param resourceGroupName The name of the resource group.
* @param circuitName The name of the express route circuit.
* @param authorizationName The name of the authorization.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
SyncPoller<PollResult<Void>, Void> beginDelete(
String resourceGroupName, String circuitName, String authorizationName, Context context);
/**
* Deletes the specified authorization from the specified express route circuit.
*
* @param resourceGroupName The name of the resource group.
* @param circuitName The name of the express route circuit.
* @param authorizationName The name of the authorization.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Mono<Void> deleteAsync(String resourceGroupName, String circuitName, String authorizationName);
/**
* Deletes the specified authorization from the specified express route circuit.
*
* @param resourceGroupName The name of the resource group.
* @param circuitName The name of the express route circuit.
* @param authorizationName The name of the authorization.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
void delete(String resourceGroupName, String circuitName, String authorizationName);
/**
* Deletes the specified authorization from the specified express route circuit.
*
* @param resourceGroupName The name of the resource group.
* @param circuitName The name of the express route circuit.
* @param authorizationName The name of the authorization.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
void delete(String resourceGroupName, String circuitName, String authorizationName, Context context);
/**
* Gets the specified authorization from the specified express route circuit.
*
* @param resourceGroupName The name of the resource group.
* @param circuitName The name of the express route circuit.
* @param authorizationName The name of the authorization.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the specified authorization from the specified express route circuit.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Mono<Response<ExpressRouteCircuitAuthorizationInner>> getWithResponseAsync(
String resourceGroupName, String circuitName, String authorizationName);
/**
* Gets the specified authorization from the specified express route circuit.
*
* @param resourceGroupName The name of the resource group.
* @param circuitName The name of the express route circuit.
* @param authorizationName The name of the authorization.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the specified authorization from the specified express route circuit.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Mono<ExpressRouteCircuitAuthorizationInner> getAsync(
String resourceGroupName, String circuitName, String authorizationName);
/**
* Gets the specified authorization from the specified express route circuit.
*
* @param resourceGroupName The name of the resource group.
* @param circuitName The name of the express route circuit.
* @param authorizationName The name of the authorization.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the specified authorization from the specified express route circuit.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
ExpressRouteCircuitAuthorizationInner get(String resourceGroupName, String circuitName, String authorizationName);
/**
* Gets the specified authorization from the specified express route circuit.
*
* @param resourceGroupName The name of the resource group.
* @param circuitName The name of the express route circuit.
* @param authorizationName The name of the authorization.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the specified authorization from the specified express route circuit.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Response<ExpressRouteCircuitAuthorizationInner> getWithResponse(
String resourceGroupName, String circuitName, String authorizationName, Context context);
/**
* Creates or updates an authorization in the specified express route circuit.
*
* @param resourceGroupName The name of the resource group.
* @param circuitName The name of the express route circuit.
* @param authorizationName The name of the authorization.
* @param authorizationParameters Authorization in an ExpressRouteCircuit resource.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return authorization in an ExpressRouteCircuit resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Mono<Response<Flux<ByteBuffer>>> createOrUpdateWithResponseAsync(
String resourceGroupName,
String circuitName,
String authorizationName,
ExpressRouteCircuitAuthorizationInner authorizationParameters);
/**
* Creates or updates an authorization in the specified express route circuit.
*
* @param resourceGroupName The name of the resource group.
* @param circuitName The name of the express route circuit.
* @param authorizationName The name of the authorization.
* @param authorizationParameters Authorization in an ExpressRouteCircuit resource.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return authorization in an ExpressRouteCircuit resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
PollerFlux<PollResult<ExpressRouteCircuitAuthorizationInner>, ExpressRouteCircuitAuthorizationInner>
beginCreateOrUpdateAsync(
String resourceGroupName,
String circuitName,
String authorizationName,
ExpressRouteCircuitAuthorizationInner authorizationParameters);
/**
* Creates or updates an authorization in the specified express route circuit.
*
* @param resourceGroupName The name of the resource group.
* @param circuitName The name of the express route circuit.
* @param authorizationName The name of the authorization.
* @param authorizationParameters Authorization in an ExpressRouteCircuit resource.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return authorization in an ExpressRouteCircuit resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
SyncPoller<PollResult<ExpressRouteCircuitAuthorizationInner>, ExpressRouteCircuitAuthorizationInner>
beginCreateOrUpdate(
String resourceGroupName,
String circuitName,
String authorizationName,
ExpressRouteCircuitAuthorizationInner authorizationParameters);
/**
* Creates or updates an authorization in the specified express route circuit.
*
* @param resourceGroupName The name of the resource group.
* @param circuitName The name of the express route circuit.
* @param authorizationName The name of the authorization.
* @param authorizationParameters Authorization in an ExpressRouteCircuit resource.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return authorization in an ExpressRouteCircuit resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
SyncPoller<PollResult<ExpressRouteCircuitAuthorizationInner>, ExpressRouteCircuitAuthorizationInner>
beginCreateOrUpdate(
String resourceGroupName,
String circuitName,
String authorizationName,
ExpressRouteCircuitAuthorizationInner authorizationParameters,
Context context);
/**
* Creates or updates an authorization in the specified express route circuit.
*
* @param resourceGroupName The name of the resource group.
* @param circuitName The name of the express route circuit.
* @param authorizationName The name of the authorization.
* @param authorizationParameters Authorization in an ExpressRouteCircuit resource.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return authorization in an ExpressRouteCircuit resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Mono<ExpressRouteCircuitAuthorizationInner> createOrUpdateAsync(
String resourceGroupName,
String circuitName,
String authorizationName,
ExpressRouteCircuitAuthorizationInner authorizationParameters);
/**
* Creates or updates an authorization in the specified express route circuit.
*
* @param resourceGroupName The name of the resource group.
* @param circuitName The name of the express route circuit.
* @param authorizationName The name of the authorization.
* @param authorizationParameters Authorization in an ExpressRouteCircuit resource.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return authorization in an ExpressRouteCircuit resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
ExpressRouteCircuitAuthorizationInner createOrUpdate(
String resourceGroupName,
String circuitName,
String authorizationName,
ExpressRouteCircuitAuthorizationInner authorizationParameters);
/**
* Creates or updates an authorization in the specified express route circuit.
*
* @param resourceGroupName The name of the resource group.
* @param circuitName The name of the express route circuit.
* @param authorizationName The name of the authorization.
* @param authorizationParameters Authorization in an ExpressRouteCircuit resource.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return authorization in an ExpressRouteCircuit resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
ExpressRouteCircuitAuthorizationInner createOrUpdate(
String resourceGroupName,
String circuitName,
String authorizationName,
ExpressRouteCircuitAuthorizationInner authorizationParameters,
Context context);
/**
* Gets all authorizations in an express route circuit.
*
* @param resourceGroupName The name of the resource group.
* @param circuitName The name of the circuit.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return all authorizations in an express route circuit.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
PagedFlux<ExpressRouteCircuitAuthorizationInner> listAsync(String resourceGroupName, String circuitName);
/**
* Gets all authorizations in an express route circuit.
*
* @param resourceGroupName The name of the resource group.
* @param circuitName The name of the circuit.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return all authorizations in an express route circuit.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
PagedIterable<ExpressRouteCircuitAuthorizationInner> list(String resourceGroupName, String circuitName);
/**
* Gets all authorizations in an express route circuit.
*
* @param resourceGroupName The name of the resource group.
* @param circuitName The name of the circuit.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return all authorizations in an express route circuit.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
PagedIterable<ExpressRouteCircuitAuthorizationInner> list(
String resourceGroupName, String circuitName, Context context);
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.parse;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.common.FileUtils;
import org.apache.hadoop.hive.common.StatsSetupConst;
import org.apache.hadoop.hive.common.ValidTxnList;
import org.apache.hadoop.hive.common.ValidTxnWriteIdList;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.ql.Context;
import org.apache.hadoop.hive.ql.Driver;
import org.apache.hadoop.hive.ql.QueryState;
import org.apache.hadoop.hive.ql.exec.ExplainTask;
import org.apache.hadoop.hive.ql.exec.FetchTask;
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.exec.TaskFactory;
import org.apache.hadoop.hive.ql.parse.ExplainConfiguration.AnalyzeState;
import org.apache.hadoop.hive.ql.parse.ExplainConfiguration.VectorizationDetailLevel;
import org.apache.hadoop.hive.ql.plan.ExplainWork;
import org.apache.hadoop.hive.ql.processors.CommandProcessorException;
import org.apache.hadoop.hive.ql.stats.StatsAggregator;
import org.apache.hadoop.hive.ql.stats.StatsCollectionContext;
import org.apache.hadoop.hive.ql.stats.fs.FSStatsAggregator;
/**
* ExplainSemanticAnalyzer.
*
*/
public class ExplainSemanticAnalyzer extends BaseSemanticAnalyzer {
List<FieldSchema> fieldList;
ExplainConfiguration config;
public ExplainSemanticAnalyzer(QueryState queryState) throws SemanticException {
super(queryState);
config = new ExplainConfiguration();
}
@Override
public void analyzeInternal(ASTNode ast) throws SemanticException {
final int childCount = ast.getChildCount();
int i = 1; // Skip TOK_QUERY.
while (i < childCount) {
int explainOptions = ast.getChild(i).getType();
if (explainOptions == HiveParser.KW_FORMATTED) {
config.setFormatted(true);
} else if (explainOptions == HiveParser.KW_EXTENDED) {
config.setExtended(true);
} else if (explainOptions == HiveParser.KW_DEPENDENCY) {
config.setDependency(true);
} else if (explainOptions == HiveParser.KW_CBO) {
config.setCbo(true);
} else if (explainOptions == HiveParser.KW_COST) {
config.setCboCost(true);
} else if (explainOptions == HiveParser.KW_JOINCOST) {
config.setCboJoinCost(true);
} else if (explainOptions == HiveParser.KW_LOGICAL) {
config.setLogical(true);
} else if (explainOptions == HiveParser.KW_AUTHORIZATION) {
config.setAuthorize(true);
} else if (explainOptions == HiveParser.KW_ANALYZE) {
config.setAnalyze(AnalyzeState.RUNNING);
config.setExplainRootPath(ctx.getMRTmpPath());
} else if (explainOptions == HiveParser.KW_VECTORIZATION) {
config.setVectorization(true);
if (i + 1 < childCount) {
int vectorizationOption = ast.getChild(i + 1).getType();
// [ONLY]
if (vectorizationOption == HiveParser.TOK_ONLY) {
config.setVectorizationOnly(true);
i++;
if (i + 1 >= childCount) {
break;
}
vectorizationOption = ast.getChild(i + 1).getType();
}
// [SUMMARY|OPERATOR|EXPRESSION|DETAIL]
if (vectorizationOption == HiveParser.TOK_SUMMARY) {
config.setVectorizationDetailLevel(VectorizationDetailLevel.SUMMARY);
i++;
} else if (vectorizationOption == HiveParser.TOK_OPERATOR) {
config.setVectorizationDetailLevel(VectorizationDetailLevel.OPERATOR);
i++;
} else if (vectorizationOption == HiveParser.TOK_EXPRESSION) {
config.setVectorizationDetailLevel(VectorizationDetailLevel.EXPRESSION);
i++;
} else if (vectorizationOption == HiveParser.TOK_DETAIL) {
config.setVectorizationDetailLevel(VectorizationDetailLevel.DETAIL);
i++;
}
}
} else if (explainOptions == HiveParser.KW_LOCKS) {
config.setLocks(true);
} else if (explainOptions == HiveParser.KW_AST){
config.setAst(true);
} else if (explainOptions == HiveParser.KW_DEBUG) {
config.setDebug(true);
} else {
// UNDONE: UNKNOWN OPTION?
}
i++;
}
ctx.setExplainConfig(config);
ctx.setExplainPlan(true);
ASTNode input = (ASTNode) ast.getChild(0);
// explain analyze is composed of two steps
// step 1 (ANALYZE_STATE.RUNNING), run the query and collect the runtime #rows
// step 2 (ANALYZE_STATE.ANALYZING), explain the query and provide the runtime #rows collected.
if (config.getAnalyze() == AnalyzeState.RUNNING) {
String query = ctx.getTokenRewriteStream().toString(input.getTokenStartIndex(),
input.getTokenStopIndex());
LOG.info("Explain analyze (running phase) for query " + query);
conf.unset(ValidTxnList.VALID_TXNS_KEY);
conf.unset(ValidTxnWriteIdList.VALID_TABLES_WRITEIDS_KEY);
Context runCtx = null;
try {
runCtx = new Context(conf);
// runCtx and ctx share the configuration, but not isExplainPlan()
runCtx.setExplainConfig(config);
try (Driver driver = new Driver(conf, runCtx, queryState.getLineageState())) {
driver.run(query);
while (driver.getResults(new ArrayList<String>())) {
}
} catch (CommandProcessorException e) {
throw new SemanticException(e.getErrorMessage(), e.getException());
}
config.setOpIdToRuntimeNumRows(aggregateStats(config.getExplainRootPath()));
} catch (IOException e1) {
throw new SemanticException(e1);
}
ctx.resetOpContext();
ctx.resetStream();
TaskFactory.resetId();
LOG.info("Explain analyze (analyzing phase) for query " + query);
config.setAnalyze(AnalyzeState.ANALYZING);
}
//Creating new QueryState unfortunately causes all .q.out to change - do this in a separate ticket
//Sharing QueryState between generating the plan and executing the query seems bad
//BaseSemanticAnalyzer sem = SemanticAnalyzerFactory.get(new QueryState(queryState.getConf()), input);
BaseSemanticAnalyzer sem = SemanticAnalyzerFactory.get(queryState, input);
sem.analyze(input, ctx);
sem.validate();
inputs = sem.getInputs();
outputs = sem.getOutputs();
ctx.setResFile(ctx.getLocalTmpPath());
List<Task<?>> tasks = sem.getAllRootTasks();
if (tasks == null) {
tasks = Collections.emptyList();
}
FetchTask fetchTask = sem.getFetchTask();
if (fetchTask != null) {
// Initialize fetch work such that operator tree will be constructed.
fetchTask.getWork().initializeForFetch(ctx.getOpContext());
}
ParseContext pCtx = null;
if (sem instanceof SemanticAnalyzer) {
pCtx = ((SemanticAnalyzer)sem).getParseContext();
}
config.setUserLevelExplain(!config.isExtended()
&& !config.isFormatted()
&& !config.isDependency()
&& !config.isCbo()
&& !config.isLogical()
&& !config.isVectorization()
&& !config.isAuthorize()
&& (
(
HiveConf.getBoolVar(ctx.getConf(), HiveConf.ConfVars.HIVE_EXPLAIN_USER)
&&
HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_EXECUTION_ENGINE).equals("tez")
)
||
(
HiveConf.getBoolVar(ctx.getConf(), HiveConf.ConfVars.HIVE_SPARK_EXPLAIN_USER)
&&
HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_EXECUTION_ENGINE).equals("spark")
)
)
);
ExplainWork work = new ExplainWork(ctx.getResFile(),
pCtx,
tasks,
fetchTask,
input,
sem,
config,
ctx.getCboInfo(),
ctx.getOptimizedSql(),
ctx.getCalcitePlan());
work.setAppendTaskType(
HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVEEXPLAINDEPENDENCYAPPENDTASKTYPES));
ExplainTask explTask = (ExplainTask) TaskFactory.get(work);
fieldList = ExplainTask.getResultSchema();
rootTasks.add(explTask);
}
private Map<String, Long> aggregateStats(Path localTmpPath) {
Map<String, Long> opIdToRuntimeNumRows = new HashMap<String, Long>();
// localTmpPath is the root of all the stats.
// Under it, there will be SEL_1/statsfiles, SEL_2/statsfiles etc where SEL_1 and SEL_2 are the op ids.
FileSystem fs;
FileStatus[] statuses = null;
try {
fs = localTmpPath.getFileSystem(conf);
statuses = fs.listStatus(localTmpPath, FileUtils.HIDDEN_FILES_PATH_FILTER);
// statuses can be null if it is DDL, etc
} catch (IOException e) {
LOG.warn(e.toString());
}
if (statuses != null) {
for (FileStatus status : statuses) {
if (status.isDir()) {
StatsCollectionContext scc = new StatsCollectionContext(conf);
String[] names = status.getPath().toString().split(Path.SEPARATOR);
String opId = names[names.length - 1];
scc.setStatsTmpDir(status.getPath().toString());
StatsAggregator statsAggregator = new FSStatsAggregator();
if (!statsAggregator.connect(scc)) {
// -1 means that there is no stats
opIdToRuntimeNumRows.put(opId, -1L);
} else {
String value = statsAggregator.aggregateStats("", StatsSetupConst.RUN_TIME_ROW_COUNT);
opIdToRuntimeNumRows.put(opId, Long.parseLong(value));
}
if (statsAggregator != null) {
statsAggregator.closeConnection(scc);
}
}
}
}
return opIdToRuntimeNumRows;
}
@Override
public List<FieldSchema> getResultSchema() {
return fieldList;
}
@Override
public boolean skipAuthorization() {
List<Task<?>> rootTasks = getRootTasks();
assert rootTasks != null && rootTasks.size() == 1;
Task task = rootTasks.get(0);
return task instanceof ExplainTask && ((ExplainTask)task).getWork().isAuthorize();
}
}
| |
package in.projectmanas.manasliaison.tasks;
import android.app.Activity;
import android.app.Dialog;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.os.AsyncTask;
import android.util.Log;
import com.google.android.gms.common.GoogleApiAvailability;
import com.google.api.client.extensions.android.http.AndroidHttp;
import com.google.api.client.googleapis.extensions.android.gms.auth.GooglePlayServicesAvailabilityIOException;
import com.google.api.client.googleapis.extensions.android.gms.auth.UserRecoverableAuthIOException;
import com.google.api.client.http.HttpTransport;
import com.google.api.client.json.JsonFactory;
import com.google.api.client.json.jackson2.JacksonFactory;
import com.google.api.services.sheets.v4.model.BatchGetValuesResponse;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import in.projectmanas.manasliaison.MyCredential;
import in.projectmanas.manasliaison.activities.LoginActivity;
import in.projectmanas.manasliaison.backendless_classes.Sheet;
import in.projectmanas.manasliaison.listeners.DetailsUpdatedListener;
import static in.projectmanas.manasliaison.activities.LoginActivity.REQUEST_GOOGLE_PLAY_SERVICES;
/**
* Created by knnat on 9/30/2017.
*/
public class UpdateAllDetails extends AsyncTask<String, Void, ArrayList<ArrayList<ArrayList<String>>>> {
public DetailsUpdatedListener delegate = null;
private com.google.api.services.sheets.v4.Sheets mService = null;
private Exception mLastError = null;
private List<String> ranges;
private Activity context;
private String regNumber, userName, emailID, interviewStatus1, interviewStatus2, tpStatus, mobileNumber, prefDiv1, prefDiv2, pref1Schedule, pref2Schedule, numInterviewConducted, numTPShortlisted, numApplicants, numSelected;
public UpdateAllDetails(Activity context) {
SharedPreferences sharedPreferences = context.getSharedPreferences("UserDetails", Context.MODE_PRIVATE);
emailID = sharedPreferences.getString("emailID", "emailID");
if (emailID.equals("emailID")) {
context.startActivity(new Intent(context, LoginActivity.class));
context.finish();
}
this.context = context;
HttpTransport transport = AndroidHttp.newCompatibleTransport();
JsonFactory jsonFactory = JacksonFactory.getDefaultInstance();
mService = new com.google.api.services.sheets.v4.Sheets.Builder(
transport, jsonFactory, new MyCredential())
.setApplicationName("Manas-Liaison")
.build();
}
@Override
protected ArrayList<ArrayList<ArrayList<String>>> doInBackground(String[] params) {
try {
ranges = Arrays.asList(params);
return getDataFromApi();
} catch (Exception e) {
Log.e("Error", e.toString());
mLastError = e;
cancel(true);
return null;
}
}
private ArrayList<ArrayList<ArrayList<String>>> getDataFromApi() throws IOException {
String spreadsheetId = Sheet.findFirst().getSpreadsheetId();
//Log.d("Id sheet:", spreadsheetId);
BatchGetValuesResponse response = this.mService.spreadsheets().values()
.batchGet(spreadsheetId)
.setRanges(ranges)
.execute();
ArrayList<List<List<Object>>> values = new ArrayList<>();
for (int i = 0; i < response.getValueRanges().size(); i++) {
values.add(response.getValueRanges().get(i).getValues());
}
ArrayList<ArrayList<ArrayList<String>>> valueStrings = new ArrayList<>();
if (values.size() > 0) {
//Log.d("size", values.size() + " ");
for (int i = 0; i < values.size(); i++) {
ArrayList<ArrayList<String>> currentTable = new ArrayList<>();
if (values.get(i) == null)
valueStrings.add(currentTable);
else {
for (List row : values.get(i)) {
//Log.d("adasd", row.size()+ "");
ArrayList<String> currentRow = new ArrayList<>();
for (Object ob : row) {
//Log.d("Output here", ob.toString());
currentRow.add(ob.toString());
}
currentTable.add(currentRow);
}
valueStrings.add(currentTable);
}
}
}
//Log.d("Output recieved of size", valueStrings.size() + "");
return valueStrings;
}
@Override
protected void onPostExecute(ArrayList<ArrayList<ArrayList<String>>> outputList) {
int interviewAcceptedCounter = 0, rejectedCounter = 0, maybeCounter = 0, selectedCounter = 0;
SharedPreferences sharedPreferences = context.getSharedPreferences("UserDetails", Context.MODE_PRIVATE);
emailID = sharedPreferences.getString("emailID", "emailID");
if (emailID.equals("emailID")) {
context.startActivity(new Intent(context, LoginActivity.class));
context.finish();
}
try {
int foundIndex = -1;
ArrayList<ArrayList<String>> output = outputList.get(0);
numApplicants = output.size() + "";
for (int i = 0; i < output.size(); i++) {
ArrayList<String> row = output.get(i);
if (row.size() > 0 && row.get(0).equals(emailID)) {
foundIndex = i;
break;
}
}
try {
interviewStatus1 = outputList.get(1).get(foundIndex).get(0);
} catch (Exception e) {
interviewStatus1 = "";
}
try {
interviewStatus2 = outputList.get(2).get(foundIndex).get(0);
} catch (Exception e) {
interviewStatus2 = "";
}
try {
tpStatus = outputList.get(3).get(foundIndex).get(0);
} catch (Exception e) {
tpStatus = "";
}
try {
userName = outputList.get(4).get(foundIndex).get(0);
} catch (Exception e) {
userName = "";
}
try {
regNumber = outputList.get(5).get(foundIndex).get(0);
} catch (Exception e) {
regNumber = "";
}
try {
mobileNumber = outputList.get(6).get(foundIndex).get(0);
} catch (Exception e) {
mobileNumber = "";
}
try {
prefDiv1 = outputList.get(7).get(foundIndex).get(0);
} catch (Exception e) {
prefDiv1 = "";
}
try {
prefDiv2 = outputList.get(8).get(foundIndex).get(0);
} catch (Exception e) {
prefDiv2 = "";
}
try {
if (interviewStatus1.equals("SCHEDULED")) {
pref1Schedule = outputList.get(9).get(foundIndex).get(0);
} else {
pref1Schedule = "NOT SCHEDULED";
}
} catch (Exception ignored) {
}
try {
if (interviewStatus2.equals("SCHEDULED")) {
pref2Schedule = outputList.get(10).get(foundIndex).get(0);
} else {
pref2Schedule = "NOT SCHEDULED";
}
} catch (Exception ignored) {
}
/*
ArrayList<ArrayList<String>> interviewStatus = outputList.get(1);
for (ArrayList<String> arrayList : interviewStatus) {
if (arrayList.size() > 0) {
if (arrayList.get(0).equals("ACCEPTED")) {
interviewAcceptedCounter++;
} else if (arrayList.get(0).equals("REJECTED")) {
rejectedCounter++;
} else if (arrayList.get(0).equals("MAYBE")) {
maybeCounter++;
}
}
}
ArrayList<ArrayList<String>> tpStatus = outputList.get(3);
for (ArrayList<String> arrayList : tpStatus) {
if (arrayList.size() > 0) {
if (arrayList.get(0).equals("ACCEPTED")) {
selectedCounter++;
}
}
}
numInterviewConducted = (interviewAcceptedCounter + rejectedCounter + maybeCounter) + "";
numTPShortlisted = "" + interviewAcceptedCounter;
numSelected = "" + selectedCounter;
*/
} catch (Exception e) {
}
cacheAllData();
delegate.onDetailsUpdated();
}
private void cacheAllData() {
SharedPreferences sharedPreferences = context.getSharedPreferences("UserDetails", Context.MODE_PRIVATE);
SharedPreferences.Editor editor = sharedPreferences.edit();
editor
.putString("name", userName)
.putString("emailID", emailID)
.putString("interviewStatus1", interviewStatus1)
.putString("interviewStatus2", interviewStatus2)
.putString("tpStatus", tpStatus)
.putString("regNumber", regNumber)
.putString("mobileNumber", mobileNumber)
.putString("prefDiv1", prefDiv1)
.putString("prefDiv2", prefDiv2)
.putString("pref1Schedule", pref1Schedule)
.putString("pref2Schedule", pref2Schedule)
/*.putString("numApplicants", numApplicants)
.putString("numInterviewConducted", numInterviewConducted)
.putString("numTPShortlisted", numTPShortlisted)
.putString("numSelected", numSelected)*/
.apply();
}
void showGooglePlayServicesAvailabilityErrorDialog(
final int connectionStatusCode) {
GoogleApiAvailability apiAvailability = GoogleApiAvailability.getInstance();
Dialog dialog = apiAvailability.getErrorDialog(
context,
connectionStatusCode,
REQUEST_GOOGLE_PLAY_SERVICES);
dialog.show();
}
@Override
protected void onCancelled() {
delegate.onUpdationFailed();
if (mLastError != null) {
if (mLastError instanceof GooglePlayServicesAvailabilityIOException) {
showGooglePlayServicesAvailabilityErrorDialog(
((GooglePlayServicesAvailabilityIOException) mLastError)
.getConnectionStatusCode());
} else if (mLastError instanceof UserRecoverableAuthIOException) {
context.startActivityForResult(
((UserRecoverableAuthIOException) mLastError).getIntent(),
LoginActivity.REQUEST_AUTHORIZATION);
} else {
Log.e("Error", "The following error occurred:\n"
+ mLastError.getMessage());
//Toast.makeText(context, mLastError.toString(), Toast.LENGTH_LONG).show();
//context.getPreferences(Context.MODE_PRIVATE).edit().clear().apply();
//context.startActivity(new Intent(context, LoginActivity.class));
}
} else {
Log.e("Error", "Request cancelled.");
}
}
}
| |
/**
* Jobs Plugin for Bukkit
* Copyright (C) 2011 Zak Ford <zak.j.ford@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package me.zford.jobs.bukkit.listeners;
import java.util.List;
import me.zford.jobs.Jobs;
import me.zford.jobs.Player;
import me.zford.jobs.bukkit.BukkitUtil;
import me.zford.jobs.bukkit.JobsPlugin;
import me.zford.jobs.bukkit.actions.BlockActionInfo;
import me.zford.jobs.bukkit.actions.EntityActionInfo;
import me.zford.jobs.bukkit.actions.ItemActionInfo;
import me.zford.jobs.config.ConfigManager;
import me.zford.jobs.container.ActionType;
import me.zford.jobs.container.JobsPlayer;
import org.bukkit.GameMode;
import org.bukkit.Material;
import org.bukkit.block.Block;
import org.bukkit.entity.Item;
import org.bukkit.entity.LivingEntity;
import org.bukkit.entity.Projectile;
import org.bukkit.entity.Tameable;
import org.bukkit.event.EventHandler;
import org.bukkit.event.EventPriority;
import org.bukkit.event.Listener;
import org.bukkit.event.block.BlockBreakEvent;
import org.bukkit.event.block.BlockPlaceEvent;
import org.bukkit.event.enchantment.EnchantItemEvent;
import org.bukkit.event.entity.CreatureSpawnEvent;
import org.bukkit.event.entity.EntityDamageByEntityEvent;
import org.bukkit.event.entity.EntityDeathEvent;
import org.bukkit.event.entity.CreatureSpawnEvent.SpawnReason;
import org.bukkit.event.inventory.BrewEvent;
import org.bukkit.event.inventory.CraftItemEvent;
import org.bukkit.event.inventory.FurnaceSmeltEvent;
import org.bukkit.event.inventory.InventoryAction;
import org.bukkit.event.inventory.InventoryClickEvent;
import org.bukkit.event.inventory.InventoryType.SlotType;
import org.bukkit.event.player.PlayerFishEvent;
import org.bukkit.event.player.PlayerInteractEvent;
import org.bukkit.inventory.AnvilInventory;
import org.bukkit.inventory.CraftingInventory;
import org.bukkit.inventory.EnchantingInventory;
import org.bukkit.inventory.Inventory;
import org.bukkit.inventory.ItemStack;
import org.bukkit.inventory.Recipe;
import org.bukkit.metadata.FixedMetadataValue;
import org.bukkit.metadata.MetadataValue;
public class JobsPaymentListener implements Listener {
private JobsPlugin plugin;
private final String furnaceOwnerMetadata = "jobsFurnaceOwner";
private final String brewingOwnerMetadata = "jobsBrewingOwner";
private final String mobSpawnerMetadata = "jobsMobSpawner";
public JobsPaymentListener(JobsPlugin plugin){
this.plugin = plugin;
}
@EventHandler(priority=EventPriority.MONITOR, ignoreCancelled=true)
public void onBlockBreak(BlockBreakEvent event) {
// remove furnace metadata for broken block
Block block = event.getBlock();
if (block == null)
return;
if (block.getType().equals(Material.FURNACE) && block.hasMetadata(furnaceOwnerMetadata))
block.removeMetadata(furnaceOwnerMetadata, plugin);
// make sure plugin is enabled
if(!plugin.isEnabled()) return;
Player player = BukkitUtil.wrapPlayer(event.getPlayer());
if (!player.isOnline())
return;
// check if in creative
if (event.getPlayer().getGameMode().equals(GameMode.CREATIVE) && !ConfigManager.getJobsConfiguration().payInCreative())
return;
if (!Jobs.getPermissionHandler().hasWorldPermission(player, player.getLocation().getWorld()))
return;
// restricted area multiplier
double multiplier = ConfigManager.getJobsConfiguration().getRestrictedMultiplier(player);
JobsPlayer jPlayer = Jobs.getPlayerManager().getJobsPlayer(player.getName());
Jobs.action(jPlayer, new BlockActionInfo(block, ActionType.BREAK), multiplier);
}
@EventHandler(priority=EventPriority.MONITOR, ignoreCancelled=true)
public void onBlockPlace(BlockPlaceEvent event) {
Block block = event.getBlock();
if (block == null)
return;
// make sure plugin is enabled
if(!plugin.isEnabled()) return;
// check to make sure you can build
if(!event.canBuild()) return;
Player player = BukkitUtil.wrapPlayer(event.getPlayer());
if (!player.isOnline())
return;
// check if in creative
if (event.getPlayer().getGameMode().equals(GameMode.CREATIVE) && !ConfigManager.getJobsConfiguration().payInCreative())
return;
if (!Jobs.getPermissionHandler().hasWorldPermission(player, player.getLocation().getWorld()))
return;
// restricted area multiplier
double multiplier = ConfigManager.getJobsConfiguration().getRestrictedMultiplier(player);
JobsPlayer jPlayer = Jobs.getPlayerManager().getJobsPlayer(player.getName());
Jobs.action(jPlayer, new BlockActionInfo(block, ActionType.PLACE), multiplier);
}
@EventHandler(priority=EventPriority.MONITOR, ignoreCancelled=true)
public void onPlayerFish(PlayerFishEvent event) {
// make sure plugin is enabled
if(!plugin.isEnabled()) return;
Player player = BukkitUtil.wrapPlayer(event.getPlayer());
// check if in creative
if (event.getPlayer().getGameMode().equals(GameMode.CREATIVE) && !ConfigManager.getJobsConfiguration().payInCreative())
return;
if (!Jobs.getPermissionHandler().hasWorldPermission(player, player.getLocation().getWorld()))
return;
// restricted area multiplier
double multiplier = ConfigManager.getJobsConfiguration().getRestrictedMultiplier(player);
if (event.getState().equals(PlayerFishEvent.State.CAUGHT_FISH) && event.getCaught() instanceof Item) {
JobsPlayer jPlayer = Jobs.getPlayerManager().getJobsPlayer(player.getName());
ItemStack items = ((Item) event.getCaught()).getItemStack();
Jobs.action(jPlayer, new ItemActionInfo(items, ActionType.FISH), multiplier);
}
}
@EventHandler(priority=EventPriority.MONITOR, ignoreCancelled=true)
public void onInventoryCraft(InventoryClickEvent e) {
if (!(e instanceof CraftItemEvent))
return;
CraftItemEvent event = (CraftItemEvent) e;
// make sure plugin is enabled
if(!plugin.isEnabled()) return;
// If event is nothing or place, do nothing
switch (event.getAction()) {
case NOTHING:
case PLACE_ONE:
case PLACE_ALL:
case PLACE_SOME:
return;
default:
break;
}
if (event.getAction() == InventoryAction.NOTHING)
return;
System.out.println(event.getAction().toString());
CraftingInventory inv = event.getInventory();
if (!(inv instanceof CraftingInventory) || !event.getSlotType().equals(SlotType.RESULT))
return;
Recipe recipe = event.getRecipe();
if (recipe == null)
return;
if (!(event.getWhoClicked() instanceof org.bukkit.entity.Player))
return;
System.out.println(event.getRecipe().getResult().getType().toString());
org.bukkit.entity.Player bukkitPlayer = (org.bukkit.entity.Player) event.getWhoClicked();
Player player = BukkitUtil.wrapPlayer(bukkitPlayer);
ItemStack resultStack = recipe.getResult();
if (resultStack == null)
return;
if (!Jobs.getPermissionHandler().hasWorldPermission(player, player.getLocation().getWorld()))
return;
// check if in creative
if (bukkitPlayer.getGameMode().equals(GameMode.CREATIVE) && !ConfigManager.getJobsConfiguration().payInCreative())
return;
double multiplier = ConfigManager.getJobsConfiguration().getRestrictedMultiplier(player);
JobsPlayer jPlayer = Jobs.getPlayerManager().getJobsPlayer(player.getName());
Jobs.action(jPlayer, new ItemActionInfo(resultStack, ActionType.CRAFT), multiplier);
}
@EventHandler(priority=EventPriority.MONITOR, ignoreCancelled=true)
public void onInventoryRepair(InventoryClickEvent event) {
// make sure plugin is enabled
if(!plugin.isEnabled()) return;
Inventory inv = event.getInventory();
// If event is nothing or place, do nothing
switch (event.getAction()) {
case NOTHING:
case PLACE_ONE:
case PLACE_ALL:
case PLACE_SOME:
return;
default:
break;
}
// must be anvil inventory
if (!(inv instanceof AnvilInventory))
return;
// Must be "container" slot 9
if (!event.getSlotType().equals(SlotType.CONTAINER) || event.getSlot() != 2)
return;
if (!(event.getWhoClicked() instanceof org.bukkit.entity.Player))
return;
org.bukkit.entity.Player bukkitPlayer = (org.bukkit.entity.Player) event.getWhoClicked();
Player player = BukkitUtil.wrapPlayer(bukkitPlayer);
ItemStack resultStack = event.getCurrentItem();
if (resultStack == null)
return;
if (!Jobs.getPermissionHandler().hasWorldPermission(player, player.getLocation().getWorld()))
return;
// check if in creative
if (bukkitPlayer.getGameMode().equals(GameMode.CREATIVE) && !ConfigManager.getJobsConfiguration().payInCreative())
return;
double multiplier = ConfigManager.getJobsConfiguration().getRestrictedMultiplier(player);
JobsPlayer jPlayer = Jobs.getPlayerManager().getJobsPlayer(player.getName());
Jobs.action(jPlayer, new ItemActionInfo(resultStack, ActionType.REPAIR), multiplier);
}
@EventHandler(priority=EventPriority.MONITOR, ignoreCancelled=true)
public void onEnchantItem(EnchantItemEvent event) {
// make sure plugin is enabled
if(!plugin.isEnabled()) return;
Inventory inv = event.getInventory();
if (!(inv instanceof EnchantingInventory))
return;
org.bukkit.entity.Player bukkitPlayer = event.getEnchanter();
ItemStack resultStack = ((EnchantingInventory) inv).getItem();
if (resultStack == null)
return;
Player player = BukkitUtil.wrapPlayer(bukkitPlayer);
if (!Jobs.getPermissionHandler().hasWorldPermission(player, player.getLocation().getWorld()))
return;
// check if in creative
if (bukkitPlayer.getGameMode().equals(GameMode.CREATIVE) && !ConfigManager.getJobsConfiguration().payInCreative())
return;
double multiplier = ConfigManager.getJobsConfiguration().getRestrictedMultiplier(player);
JobsPlayer jPlayer = Jobs.getPlayerManager().getJobsPlayer(player.getName());
Jobs.action(jPlayer, new ItemActionInfo(resultStack, ActionType.ENCHANT), multiplier);
}
@EventHandler(priority=EventPriority.MONITOR, ignoreCancelled=true)
public void onFurnaceSmelt(FurnaceSmeltEvent event) {
if (!plugin.isEnabled())
return;
Block block = event.getBlock();
if (block == null)
return;
if (!block.hasMetadata(furnaceOwnerMetadata))
return;
List<MetadataValue> data = block.getMetadata(furnaceOwnerMetadata);
if (data.isEmpty())
return;
// only care about first
MetadataValue value = data.get(0);
String playerName = value.asString();
Player player = Jobs.getServer().getPlayerExact(playerName);
if (player == null || !player.isOnline())
return;
if (!Jobs.getPermissionHandler().hasWorldPermission(player, player.getLocation().getWorld()))
return;
double multiplier = ConfigManager.getJobsConfiguration().getRestrictedMultiplier(player);
JobsPlayer jPlayer = Jobs.getPlayerManager().getJobsPlayer(player.getName());
Jobs.action(jPlayer, new ItemActionInfo(event.getResult(), ActionType.SMELT), multiplier);
}
@EventHandler(priority=EventPriority.MONITOR, ignoreCancelled=true)
public void onBrewEvent(BrewEvent event) {
if (!plugin.isEnabled())
return;
Block block = event.getBlock();
if (block == null)
return;
if (!block.hasMetadata(brewingOwnerMetadata))
return;
List<MetadataValue> data = block.getMetadata(brewingOwnerMetadata);
if (data.isEmpty())
return;
// only care about first
MetadataValue value = data.get(0);
String playerName = value.asString();
Player player = Jobs.getServer().getPlayerExact(playerName);
if (player == null || !player.isOnline())
return;
if (!Jobs.getPermissionHandler().hasWorldPermission(player, player.getLocation().getWorld()))
return;
double multiplier = ConfigManager.getJobsConfiguration().getRestrictedMultiplier(player);
JobsPlayer jPlayer = Jobs.getPlayerManager().getJobsPlayer(player.getName());
Jobs.action(jPlayer, new ItemActionInfo(event.getContents().getIngredient(), ActionType.BREW), multiplier);
}
@EventHandler(priority=EventPriority.MONITOR)
public void onEntityDeath(EntityDeathEvent event) {
// Entity that died must be living
if(!(event.getEntity() instanceof LivingEntity))
return;
LivingEntity lVictim = (LivingEntity)event.getEntity();
// mob spawner, no payment or experience
if (lVictim.hasMetadata(mobSpawnerMetadata)) {
lVictim.removeMetadata(mobSpawnerMetadata, plugin);
return;
}
// make sure plugin is enabled
if(!plugin.isEnabled())
return;
if (event.getEntity().getLastDamageCause() instanceof EntityDamageByEntityEvent){
EntityDamageByEntityEvent e = (EntityDamageByEntityEvent)event.getEntity().getLastDamageCause();
org.bukkit.entity.Player pDamager = null;
if(e.getDamager() instanceof org.bukkit.entity.Player) {
pDamager = (org.bukkit.entity.Player) e.getDamager();
} else if(e.getDamager() instanceof Projectile && ((Projectile)e.getDamager()).getShooter() instanceof org.bukkit.entity.Player) {
pDamager = (org.bukkit.entity.Player)((Projectile)e.getDamager()).getShooter();
} else if(e.getDamager() instanceof Tameable) {
Tameable t = (Tameable) e.getDamager();
if (t.isTamed() && t.getOwner() instanceof org.bukkit.entity.Player) {
pDamager = (org.bukkit.entity.Player) t.getOwner();
}
}
if(pDamager != null) {
// check if in creative
if (pDamager.getGameMode().equals(GameMode.CREATIVE) && !ConfigManager.getJobsConfiguration().payInCreative())
return;
Player player = BukkitUtil.wrapPlayer(pDamager);
if (!Jobs.getPermissionHandler().hasWorldPermission(player, player.getLocation().getWorld()))
return;
// restricted area multiplier
double multiplier = ConfigManager.getJobsConfiguration().getRestrictedMultiplier(player);
// pay
JobsPlayer jDamager = Jobs.getPlayerManager().getJobsPlayer(player.getName());
Jobs.action(jDamager, new EntityActionInfo(lVictim.getType(), ActionType.KILL), multiplier);
}
}
}
@EventHandler(priority=EventPriority.MONITOR, ignoreCancelled=true)
public void onCreatureSpawn(CreatureSpawnEvent event) {
if(!(event.getEntity() instanceof LivingEntity))
return;
if(!event.getSpawnReason().equals(SpawnReason.SPAWNER))
return;
if(ConfigManager.getJobsConfiguration().payNearSpawner())
return;
LivingEntity creature = (LivingEntity)event.getEntity();
creature.setMetadata(mobSpawnerMetadata, new FixedMetadataValue(plugin, true));
}
@EventHandler(priority=EventPriority.MONITOR, ignoreCancelled=true)
public void onPlayerInteract(PlayerInteractEvent event) {
if (!plugin.isEnabled())
return;
Block block = event.getClickedBlock();
if (block == null)
return;
if (block.getType().equals(Material.FURNACE)) {
if (block.hasMetadata(furnaceOwnerMetadata))
block.removeMetadata(furnaceOwnerMetadata, plugin);
block.setMetadata(furnaceOwnerMetadata, new FixedMetadataValue(plugin, event.getPlayer().getName()));
} else if (block.getType().equals(Material.BREWING_STAND)) {
if (block.hasMetadata(brewingOwnerMetadata))
block.removeMetadata(brewingOwnerMetadata, plugin);
block.setMetadata(brewingOwnerMetadata, new FixedMetadataValue(plugin, event.getPlayer().getName()));
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.executiongraph;
import org.apache.flink.api.common.JobID;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.configuration.CheckpointingOptions;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.IllegalConfigurationException;
import org.apache.flink.configuration.JobManagerOptions;
import org.apache.flink.configuration.WebOptions;
import org.apache.flink.metrics.MetricGroup;
import org.apache.flink.runtime.JobException;
import org.apache.flink.runtime.blob.BlobWriter;
import org.apache.flink.runtime.checkpoint.CheckpointIDCounter;
import org.apache.flink.runtime.checkpoint.CheckpointRecoveryFactory;
import org.apache.flink.runtime.checkpoint.CheckpointStatsTracker;
import org.apache.flink.runtime.checkpoint.CompletedCheckpointStore;
import org.apache.flink.runtime.checkpoint.MasterTriggerRestoreHook;
import org.apache.flink.runtime.checkpoint.hooks.MasterHooks;
import org.apache.flink.runtime.client.JobExecutionException;
import org.apache.flink.runtime.client.JobSubmissionException;
import org.apache.flink.runtime.executiongraph.failover.FailoverStrategy;
import org.apache.flink.runtime.executiongraph.failover.FailoverStrategyLoader;
import org.apache.flink.runtime.executiongraph.failover.flip1.partitionrelease.PartitionReleaseStrategy;
import org.apache.flink.runtime.executiongraph.failover.flip1.partitionrelease.PartitionReleaseStrategyFactoryLoader;
import org.apache.flink.runtime.executiongraph.metrics.DownTimeGauge;
import org.apache.flink.runtime.executiongraph.metrics.NumberOfFullRestartsGauge;
import org.apache.flink.runtime.executiongraph.metrics.RestartTimeGauge;
import org.apache.flink.runtime.executiongraph.metrics.UpTimeGauge;
import org.apache.flink.runtime.executiongraph.restart.RestartStrategy;
import org.apache.flink.runtime.io.network.partition.PartitionTracker;
import org.apache.flink.runtime.jobgraph.JobGraph;
import org.apache.flink.runtime.jobgraph.JobVertex;
import org.apache.flink.runtime.jobgraph.JobVertexID;
import org.apache.flink.runtime.jobgraph.jsonplan.JsonPlanGenerator;
import org.apache.flink.runtime.jobgraph.tasks.CheckpointCoordinatorConfiguration;
import org.apache.flink.runtime.jobgraph.tasks.JobCheckpointingSettings;
import org.apache.flink.runtime.jobmaster.slotpool.SlotProvider;
import org.apache.flink.runtime.shuffle.ShuffleMaster;
import org.apache.flink.runtime.state.StateBackend;
import org.apache.flink.runtime.state.StateBackendLoader;
import org.apache.flink.util.DynamicCodeLoadingException;
import org.apache.flink.util.SerializedValue;
import org.slf4j.Logger;
import javax.annotation.Nullable;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.Executor;
import java.util.concurrent.ScheduledExecutorService;
import static org.apache.flink.util.Preconditions.checkNotNull;
/**
* Utility class to encapsulate the logic of building an {@link ExecutionGraph} from a {@link JobGraph}.
*/
public class ExecutionGraphBuilder {
/**
* Builds the ExecutionGraph from the JobGraph.
* If a prior execution graph exists, the JobGraph will be attached. If no prior execution
* graph exists, then the JobGraph will become attach to a new empty execution graph.
*/
public static ExecutionGraph buildGraph(
@Nullable ExecutionGraph prior,
JobGraph jobGraph,
Configuration jobManagerConfig,
ScheduledExecutorService futureExecutor,
Executor ioExecutor,
SlotProvider slotProvider,
ClassLoader classLoader,
CheckpointRecoveryFactory recoveryFactory,
Time rpcTimeout,
RestartStrategy restartStrategy,
MetricGroup metrics,
BlobWriter blobWriter,
Time allocationTimeout,
Logger log,
ShuffleMaster<?> shuffleMaster,
PartitionTracker partitionTracker) throws JobExecutionException, JobException {
checkNotNull(jobGraph, "job graph cannot be null");
final String jobName = jobGraph.getName();
final JobID jobId = jobGraph.getJobID();
final FailoverStrategy.Factory failoverStrategy =
FailoverStrategyLoader.loadFailoverStrategy(jobManagerConfig, log);
final JobInformation jobInformation = new JobInformation(
jobId,
jobName,
jobGraph.getSerializedExecutionConfig(),
jobGraph.getJobConfiguration(),
jobGraph.getUserJarBlobKeys(),
jobGraph.getClasspaths());
final int maxPriorAttemptsHistoryLength =
jobManagerConfig.getInteger(JobManagerOptions.MAX_ATTEMPTS_HISTORY_SIZE);
final PartitionReleaseStrategy.Factory partitionReleaseStrategyFactory =
PartitionReleaseStrategyFactoryLoader.loadPartitionReleaseStrategyFactory(jobManagerConfig);
final boolean forcePartitionReleaseOnConsumption =
jobManagerConfig.getBoolean(JobManagerOptions.FORCE_PARTITION_RELEASE_ON_CONSUMPTION);
// create a new execution graph, if none exists so far
final ExecutionGraph executionGraph;
try {
executionGraph = (prior != null) ? prior :
new ExecutionGraph(
jobInformation,
futureExecutor,
ioExecutor,
rpcTimeout,
restartStrategy,
maxPriorAttemptsHistoryLength,
failoverStrategy,
slotProvider,
classLoader,
blobWriter,
allocationTimeout,
partitionReleaseStrategyFactory,
shuffleMaster,
forcePartitionReleaseOnConsumption,
partitionTracker);
} catch (IOException e) {
throw new JobException("Could not create the ExecutionGraph.", e);
}
// set the basic properties
executionGraph.setScheduleMode(jobGraph.getScheduleMode());
executionGraph.setQueuedSchedulingAllowed(jobGraph.getAllowQueuedScheduling());
try {
executionGraph.setJsonPlan(JsonPlanGenerator.generatePlan(jobGraph));
}
catch (Throwable t) {
log.warn("Cannot create JSON plan for job", t);
// give the graph an empty plan
executionGraph.setJsonPlan("{}");
}
// initialize the vertices that have a master initialization hook
// file output formats create directories here, input formats create splits
final long initMasterStart = System.nanoTime();
log.info("Running initialization on master for job {} ({}).", jobName, jobId);
for (JobVertex vertex : jobGraph.getVertices()) {
String executableClass = vertex.getInvokableClassName();
if (executableClass == null || executableClass.isEmpty()) {
throw new JobSubmissionException(jobId,
"The vertex " + vertex.getID() + " (" + vertex.getName() + ") has no invokable class.");
}
try {
vertex.initializeOnMaster(classLoader);
}
catch (Throwable t) {
throw new JobExecutionException(jobId,
"Cannot initialize task '" + vertex.getName() + "': " + t.getMessage(), t);
}
}
log.info("Successfully ran initialization on master in {} ms.",
(System.nanoTime() - initMasterStart) / 1_000_000);
// topologically sort the job vertices and attach the graph to the existing one
List<JobVertex> sortedTopology = jobGraph.getVerticesSortedTopologicallyFromSources();
if (log.isDebugEnabled()) {
log.debug("Adding {} vertices from job graph {} ({}).", sortedTopology.size(), jobName, jobId);
}
executionGraph.attachJobGraph(sortedTopology);
if (log.isDebugEnabled()) {
log.debug("Successfully created execution graph from job graph {} ({}).", jobName, jobId);
}
// configure the state checkpointing
JobCheckpointingSettings snapshotSettings = jobGraph.getCheckpointingSettings();
if (snapshotSettings != null) {
List<ExecutionJobVertex> triggerVertices =
idToVertex(snapshotSettings.getVerticesToTrigger(), executionGraph);
List<ExecutionJobVertex> ackVertices =
idToVertex(snapshotSettings.getVerticesToAcknowledge(), executionGraph);
List<ExecutionJobVertex> confirmVertices =
idToVertex(snapshotSettings.getVerticesToConfirm(), executionGraph);
CompletedCheckpointStore completedCheckpoints;
CheckpointIDCounter checkpointIdCounter;
try {
int maxNumberOfCheckpointsToRetain = jobManagerConfig.getInteger(
CheckpointingOptions.MAX_RETAINED_CHECKPOINTS);
if (maxNumberOfCheckpointsToRetain <= 0) {
// warning and use 1 as the default value if the setting in
// state.checkpoints.max-retained-checkpoints is not greater than 0.
log.warn("The setting for '{} : {}' is invalid. Using default value of {}",
CheckpointingOptions.MAX_RETAINED_CHECKPOINTS.key(),
maxNumberOfCheckpointsToRetain,
CheckpointingOptions.MAX_RETAINED_CHECKPOINTS.defaultValue());
maxNumberOfCheckpointsToRetain = CheckpointingOptions.MAX_RETAINED_CHECKPOINTS.defaultValue();
}
completedCheckpoints = recoveryFactory.createCheckpointStore(jobId, maxNumberOfCheckpointsToRetain, classLoader);
checkpointIdCounter = recoveryFactory.createCheckpointIDCounter(jobId);
}
catch (Exception e) {
throw new JobExecutionException(jobId, "Failed to initialize high-availability checkpoint handler", e);
}
// Maximum number of remembered checkpoints
int historySize = jobManagerConfig.getInteger(WebOptions.CHECKPOINTS_HISTORY_SIZE);
CheckpointStatsTracker checkpointStatsTracker = new CheckpointStatsTracker(
historySize,
ackVertices,
snapshotSettings.getCheckpointCoordinatorConfiguration(),
metrics);
// load the state backend from the application settings
final StateBackend applicationConfiguredBackend;
final SerializedValue<StateBackend> serializedAppConfigured = snapshotSettings.getDefaultStateBackend();
if (serializedAppConfigured == null) {
applicationConfiguredBackend = null;
}
else {
try {
applicationConfiguredBackend = serializedAppConfigured.deserializeValue(classLoader);
} catch (IOException | ClassNotFoundException e) {
throw new JobExecutionException(jobId,
"Could not deserialize application-defined state backend.", e);
}
}
final StateBackend rootBackend;
try {
rootBackend = StateBackendLoader.fromApplicationOrConfigOrDefault(
applicationConfiguredBackend, jobManagerConfig, classLoader, log);
}
catch (IllegalConfigurationException | IOException | DynamicCodeLoadingException e) {
throw new JobExecutionException(jobId, "Could not instantiate configured state backend", e);
}
// instantiate the user-defined checkpoint hooks
final SerializedValue<MasterTriggerRestoreHook.Factory[]> serializedHooks = snapshotSettings.getMasterHooks();
final List<MasterTriggerRestoreHook<?>> hooks;
if (serializedHooks == null) {
hooks = Collections.emptyList();
}
else {
final MasterTriggerRestoreHook.Factory[] hookFactories;
try {
hookFactories = serializedHooks.deserializeValue(classLoader);
}
catch (IOException | ClassNotFoundException e) {
throw new JobExecutionException(jobId, "Could not instantiate user-defined checkpoint hooks", e);
}
final Thread thread = Thread.currentThread();
final ClassLoader originalClassLoader = thread.getContextClassLoader();
thread.setContextClassLoader(classLoader);
try {
hooks = new ArrayList<>(hookFactories.length);
for (MasterTriggerRestoreHook.Factory factory : hookFactories) {
hooks.add(MasterHooks.wrapHook(factory.create(), classLoader));
}
}
finally {
thread.setContextClassLoader(originalClassLoader);
}
}
final CheckpointCoordinatorConfiguration chkConfig = snapshotSettings.getCheckpointCoordinatorConfiguration();
executionGraph.enableCheckpointing(
chkConfig,
triggerVertices,
ackVertices,
confirmVertices,
hooks,
checkpointIdCounter,
completedCheckpoints,
rootBackend,
checkpointStatsTracker);
}
// create all the metrics for the Execution Graph
metrics.gauge(RestartTimeGauge.METRIC_NAME, new RestartTimeGauge(executionGraph));
metrics.gauge(DownTimeGauge.METRIC_NAME, new DownTimeGauge(executionGraph));
metrics.gauge(UpTimeGauge.METRIC_NAME, new UpTimeGauge(executionGraph));
metrics.gauge(NumberOfFullRestartsGauge.METRIC_NAME, new NumberOfFullRestartsGauge(executionGraph));
executionGraph.getFailoverStrategy().registerMetrics(metrics);
return executionGraph;
}
private static List<ExecutionJobVertex> idToVertex(
List<JobVertexID> jobVertices, ExecutionGraph executionGraph) throws IllegalArgumentException {
List<ExecutionJobVertex> result = new ArrayList<>(jobVertices.size());
for (JobVertexID id : jobVertices) {
ExecutionJobVertex vertex = executionGraph.getJobVertex(id);
if (vertex != null) {
result.add(vertex);
} else {
throw new IllegalArgumentException(
"The snapshot checkpointing settings refer to non-existent vertex " + id);
}
}
return result;
}
// ------------------------------------------------------------------------
/** This class is not supposed to be instantiated. */
private ExecutionGraphBuilder() {}
}
| |
/*
* Jitsi, the OpenSource Java VoIP and Instant Messaging client.
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package net.java.sip.communicator.plugin.otr.authdialog;
import java.awt.*;
import java.util.*;
import java.util.List;
import javax.swing.table.*;
import net.java.sip.communicator.plugin.otr.*;
import net.java.sip.communicator.plugin.otr.OtrContactManager.OtrContact;
import net.java.sip.communicator.service.contactlist.*;
import net.java.sip.communicator.service.protocol.*;
import org.osgi.framework.*;
/**
* A special {@link Panel} for fingerprints display.
*
* @author George Politis
* @author Yana Stamcheva
*/
public class KnownFingerprintsTableModel
extends AbstractTableModel
implements ScOtrKeyManagerListener
{
/**
* Serial version UID.
*/
private static final long serialVersionUID = 0L;
public static final int CONTACTNAME_INDEX = 0;
public static final int VERIFIED_INDEX = 1;
public static final int FINGERPRINT_INDEX = 2;
public final LinkedHashMap<Contact, List<String>> allContactsFingerprints =
new LinkedHashMap<Contact, List<String>>();
public KnownFingerprintsTableModel()
{
// Get the protocolproviders
ServiceReference[] protocolProviderRefs = null;
try
{
protocolProviderRefs =
OtrActivator.bundleContext
.getServiceReferences(
ProtocolProviderService.class.getName(), null);
}
catch (InvalidSyntaxException ex)
{
return;
}
if (protocolProviderRefs == null
|| protocolProviderRefs.length < 1)
return;
// Populate contacts.
for (int i = 0; i < protocolProviderRefs.length; i++)
{
ProtocolProviderService provider
= (ProtocolProviderService) OtrActivator
.bundleContext
.getService(protocolProviderRefs[i]);
Iterator<MetaContact> metaContacts =
OtrActivator.getContactListService()
.findAllMetaContactsForProvider(provider);
while (metaContacts.hasNext())
{
MetaContact metaContact = metaContacts.next();
Iterator<Contact> contacts = metaContact.getContacts();
while (contacts.hasNext())
{
Contact contact = contacts.next();
allContactsFingerprints.put(
contact,
OtrActivator.scOtrKeyManager.getAllRemoteFingerprints(
contact));
}
}
}
OtrActivator.scOtrKeyManager.addListener(this);
}
/**
* Implements AbstractTableModel#getColumnName(int).
*/
@Override
public String getColumnName(int column)
{
switch (column)
{
case CONTACTNAME_INDEX:
return OtrActivator.resourceService
.getI18NString(
"plugin.otr.configform.COLUMN_NAME_CONTACT");
case VERIFIED_INDEX:
return OtrActivator.resourceService
.getI18NString(
"plugin.otr.configform.COLUMN_NAME_VERIFIED_STATUS");
case FINGERPRINT_INDEX:
return OtrActivator.resourceService
.getI18NString(
"plugin.otr.configform.FINGERPRINT");
default:
return null;
}
}
/**
* Implements AbstractTableModel#getValueAt(int,int).
*/
public Object getValueAt(int row, int column)
{
Contact contact = getContactFromRow(row);
String fingerprint = getFingerprintFromRow(row);
switch (column)
{
case CONTACTNAME_INDEX:
return contact.getDisplayName();
case VERIFIED_INDEX:
// TODO: Maybe use a CheckBoxColumn?
return (OtrActivator.scOtrKeyManager
.isVerified(contact, fingerprint))
? OtrActivator.resourceService.getI18NString(
"plugin.otr.configform.COLUMN_VALUE_VERIFIED_TRUE")
: OtrActivator.resourceService.getI18NString(
"plugin.otr.configform.COLUMN_VALUE_VERIFIED_FALSE");
case FINGERPRINT_INDEX:
return fingerprint;
default:
return null;
}
}
Contact getContactFromRow(int row)
{
if (row < 0 || row >= getRowCount())
return null;
int index = -1;
Contact contact = null;
for (Map.Entry<Contact, List<String>> entry :
allContactsFingerprints.entrySet())
{
boolean found = false;
contact = entry.getKey();
List<String> fingerprints = entry.getValue();
for (String f : fingerprints)
{
index++;
if (index == row)
{
found = true;
break;
}
}
if (found) break;
}
return contact;
}
String getFingerprintFromRow(int row)
{
if (row < 0 || row >= getRowCount())
return null;
int index = -1;
String fingerprint = null;
for (Map.Entry<Contact, List<String>> entry :
allContactsFingerprints.entrySet())
{
boolean found = false;
List<String> fingerprints = entry.getValue();
for (String f : fingerprints)
{
index++;
fingerprint = f;
if (index == row)
{
found = true;
break;
}
}
if (found) break;
}
return fingerprint;
}
/**
* Implements AbstractTableModel#getRowCount().
*/
public int getRowCount()
{
int rowCount = 0;
for (Map.Entry<Contact, List<String>> entry :
allContactsFingerprints.entrySet())
rowCount += entry.getValue().size();
return rowCount;
}
/**
* Implements AbstractTableModel#getColumnCount().
*/
public int getColumnCount()
{
return 3;
}
@Override
public void contactVerificationStatusChanged(OtrContact otrContact)
{
Contact contact = otrContact.contact;
allContactsFingerprints.put(
contact,
OtrActivator.scOtrKeyManager.getAllRemoteFingerprints(contact));
this.fireTableDataChanged();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ambari.server.controller.internal;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.google.inject.persist.Transactional;
import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.ObjectNotFoundException;
import org.apache.ambari.server.api.resources.OperatingSystemResourceDefinition;
import org.apache.ambari.server.api.resources.RepositoryResourceDefinition;
import org.apache.ambari.server.api.services.AmbariMetaInfo;
import org.apache.ambari.server.controller.spi.NoSuchParentResourceException;
import org.apache.ambari.server.controller.spi.NoSuchResourceException;
import org.apache.ambari.server.controller.spi.Predicate;
import org.apache.ambari.server.controller.spi.Request;
import org.apache.ambari.server.controller.spi.RequestStatus;
import org.apache.ambari.server.controller.spi.Resource;
import org.apache.ambari.server.controller.spi.Resource.Type;
import org.apache.ambari.server.controller.spi.ResourceAlreadyExistsException;
import org.apache.ambari.server.controller.spi.SystemException;
import org.apache.ambari.server.controller.spi.UnsupportedPropertyException;
import org.apache.ambari.server.controller.utilities.PropertyHelper;
import org.apache.ambari.server.orm.dao.ClusterVersionDAO;
import org.apache.ambari.server.orm.dao.RepositoryVersionDAO;
import org.apache.ambari.server.orm.dao.StackDAO;
import org.apache.ambari.server.orm.entities.ClusterVersionEntity;
import org.apache.ambari.server.orm.entities.OperatingSystemEntity;
import org.apache.ambari.server.orm.entities.RepositoryEntity;
import org.apache.ambari.server.orm.entities.RepositoryVersionEntity;
import org.apache.ambari.server.orm.entities.StackEntity;
import org.apache.ambari.server.state.OperatingSystemInfo;
import org.apache.ambari.server.state.RepositoryVersionState;
import org.apache.ambari.server.state.StackId;
import org.apache.ambari.server.state.StackInfo;
import org.apache.ambari.server.state.stack.upgrade.RepositoryVersionHelper;
import org.apache.commons.lang.ObjectUtils;
import org.apache.commons.lang.StringUtils;
import com.google.common.collect.Lists;
import com.google.gson.Gson;
import com.google.inject.Inject;
/**
* Resource provider for repository versions resources.
*/
public class RepositoryVersionResourceProvider extends AbstractResourceProvider {
// ----- Property ID constants ---------------------------------------------
public static final String REPOSITORY_VERSION_ID_PROPERTY_ID = PropertyHelper.getPropertyId("RepositoryVersions", "id");
public static final String REPOSITORY_VERSION_STACK_NAME_PROPERTY_ID = PropertyHelper.getPropertyId("RepositoryVersions", "stack_name");
public static final String REPOSITORY_VERSION_STACK_VERSION_PROPERTY_ID = PropertyHelper.getPropertyId("RepositoryVersions", "stack_version");
public static final String REPOSITORY_VERSION_REPOSITORY_VERSION_PROPERTY_ID = PropertyHelper.getPropertyId("RepositoryVersions", "repository_version");
public static final String REPOSITORY_VERSION_DISPLAY_NAME_PROPERTY_ID = PropertyHelper.getPropertyId("RepositoryVersions", "display_name");
public static final String REPOSITORY_VERSION_UPGRADE_PACK_PROPERTY_ID = PropertyHelper.getPropertyId("RepositoryVersions", "upgrade_pack");
public static final String SUBRESOURCE_OPERATING_SYSTEMS_PROPERTY_ID = new OperatingSystemResourceDefinition().getPluralName();
public static final String SUBRESOURCE_REPOSITORIES_PROPERTY_ID = new RepositoryResourceDefinition().getPluralName();
@SuppressWarnings("serial")
private static Set<String> pkPropertyIds = new HashSet<String>() {
{
add(REPOSITORY_VERSION_ID_PROPERTY_ID);
}
};
@SuppressWarnings("serial")
public static Set<String> propertyIds = new HashSet<String>() {
{
add(REPOSITORY_VERSION_ID_PROPERTY_ID);
add(REPOSITORY_VERSION_REPOSITORY_VERSION_PROPERTY_ID);
add(REPOSITORY_VERSION_DISPLAY_NAME_PROPERTY_ID);
add(REPOSITORY_VERSION_STACK_NAME_PROPERTY_ID);
add(REPOSITORY_VERSION_STACK_VERSION_PROPERTY_ID);
add(REPOSITORY_VERSION_UPGRADE_PACK_PROPERTY_ID);
add(SUBRESOURCE_OPERATING_SYSTEMS_PROPERTY_ID);
}
};
@SuppressWarnings("serial")
public static Map<Type, String> keyPropertyIds = new HashMap<Type, String>() {
{
put(Type.Stack, REPOSITORY_VERSION_STACK_NAME_PROPERTY_ID);
put(Type.StackVersion, REPOSITORY_VERSION_STACK_VERSION_PROPERTY_ID);
put(Type.RepositoryVersion, REPOSITORY_VERSION_ID_PROPERTY_ID);
}
};
@Inject
private Gson gson;
@Inject
private RepositoryVersionDAO repositoryVersionDAO;
@Inject
private ClusterVersionDAO clusterVersionDAO;
@Inject
private AmbariMetaInfo ambariMetaInfo;
@Inject
private RepositoryVersionHelper repositoryVersionHelper;
/**
* Data access object used for lookup up stacks.
*/
@Inject
private StackDAO stackDAO;
/**
* Create a new resource provider.
*
*/
public RepositoryVersionResourceProvider() {
super(propertyIds, keyPropertyIds);
}
@Override
public RequestStatus createResources(final Request request)
throws SystemException,
UnsupportedPropertyException,
ResourceAlreadyExistsException,
NoSuchParentResourceException {
for (final Map<String, Object> properties : request.getProperties()) {
createResources(new Command<Void>() {
@Override
public Void invoke() throws AmbariException {
final String[] requiredProperties = {
REPOSITORY_VERSION_DISPLAY_NAME_PROPERTY_ID,
SUBRESOURCE_OPERATING_SYSTEMS_PROPERTY_ID,
REPOSITORY_VERSION_STACK_NAME_PROPERTY_ID,
REPOSITORY_VERSION_STACK_VERSION_PROPERTY_ID,
REPOSITORY_VERSION_REPOSITORY_VERSION_PROPERTY_ID
};
for (String propertyName: requiredProperties) {
if (properties.get(propertyName) == null) {
throw new AmbariException("Property " + propertyName + " should be provided");
}
}
final RepositoryVersionEntity entity = toRepositoryVersionEntity(properties);
if (repositoryVersionDAO.findByDisplayName(entity.getDisplayName()) != null) {
throw new AmbariException("Repository version with name " + entity.getDisplayName() + " already exists");
}
if (repositoryVersionDAO.findByStackAndVersion(entity.getStack(), entity.getVersion()) != null) {
throw new AmbariException("Repository version for stack " + entity.getStack() + " and version " + entity.getVersion() + " already exists");
}
validateRepositoryVersion(entity);
repositoryVersionDAO.create(entity);
notifyCreate(Resource.Type.RepositoryVersion, request);
return null;
}
});
}
return getRequestStatus(null);
}
@Override
public Set<Resource> getResources(Request request, Predicate predicate)
throws SystemException, UnsupportedPropertyException, NoSuchResourceException, NoSuchParentResourceException {
final Set<Resource> resources = new HashSet<Resource>();
final Set<String> requestedIds = getRequestPropertyIds(request, predicate);
final Set<Map<String, Object>> propertyMaps = getPropertyMaps(predicate);
List<RepositoryVersionEntity> requestedEntities = new ArrayList<RepositoryVersionEntity>();
for (Map<String, Object> propertyMap: propertyMaps) {
final StackId stackId = getStackInformationFromUrl(propertyMap);
if (stackId != null && propertyMaps.size() == 1 && propertyMap.get(REPOSITORY_VERSION_ID_PROPERTY_ID) == null) {
requestedEntities.addAll(repositoryVersionDAO.findByStack(stackId));
} else {
final Long id;
try {
id = Long.parseLong(propertyMap.get(REPOSITORY_VERSION_ID_PROPERTY_ID).toString());
} catch (Exception ex) {
throw new SystemException("Repository version should have numerical id");
}
final RepositoryVersionEntity entity = repositoryVersionDAO.findByPK(id);
if (entity == null) {
throw new NoSuchResourceException("There is no repository version with id " + id);
} else {
requestedEntities.add(entity);
}
}
}
for (RepositoryVersionEntity entity: requestedEntities) {
final Resource resource = new ResourceImpl(Resource.Type.RepositoryVersion);
setResourceProperty(resource, REPOSITORY_VERSION_ID_PROPERTY_ID, entity.getId(), requestedIds);
setResourceProperty(resource, REPOSITORY_VERSION_STACK_NAME_PROPERTY_ID, entity.getStackName(), requestedIds);
setResourceProperty(resource, REPOSITORY_VERSION_STACK_VERSION_PROPERTY_ID, entity.getStackVersion(), requestedIds);
setResourceProperty(resource, REPOSITORY_VERSION_DISPLAY_NAME_PROPERTY_ID, entity.getDisplayName(), requestedIds);
setResourceProperty(resource, REPOSITORY_VERSION_UPGRADE_PACK_PROPERTY_ID, entity.getUpgradePackage(), requestedIds);
setResourceProperty(resource, REPOSITORY_VERSION_REPOSITORY_VERSION_PROPERTY_ID, entity.getVersion(), requestedIds);
resources.add(resource);
}
return resources;
}
@Override
@Transactional
public RequestStatus updateResources(Request request, Predicate predicate)
throws SystemException, UnsupportedPropertyException, NoSuchResourceException, NoSuchParentResourceException {
final Set<Map<String, Object>> propertyMaps = request.getProperties();
modifyResources(new Command<Void>() {
@Override
public Void invoke() throws AmbariException {
for (Map<String, Object> propertyMap : propertyMaps) {
final Long id;
try {
id = Long.parseLong(propertyMap.get(REPOSITORY_VERSION_ID_PROPERTY_ID).toString());
} catch (Exception ex) {
throw new AmbariException("Repository version should have numerical id");
}
final RepositoryVersionEntity entity = repositoryVersionDAO.findByPK(id);
if (entity == null) {
throw new ObjectNotFoundException("There is no repository version with id " + id);
}
if (StringUtils.isNotBlank(ObjectUtils.toString(propertyMap.get(REPOSITORY_VERSION_UPGRADE_PACK_PROPERTY_ID)))) {
StackEntity stackEntity = entity.getStack();
String stackName = stackEntity.getStackName();
String stackVersion = stackEntity.getStackVersion();
final List<ClusterVersionEntity> clusterVersionEntities = clusterVersionDAO.findByStackAndVersion(
stackName, stackVersion, entity.getVersion());
if (!clusterVersionEntities.isEmpty()) {
final ClusterVersionEntity firstClusterVersion = clusterVersionEntities.get(0);
throw new AmbariException("Upgrade pack can't be changed for repository version which is " +
firstClusterVersion.getState().name() + " on cluster " + firstClusterVersion.getClusterEntity().getClusterName());
}
final String upgradePackage = propertyMap.get(REPOSITORY_VERSION_UPGRADE_PACK_PROPERTY_ID).toString();
entity.setUpgradePackage(upgradePackage);
}
List<OperatingSystemEntity> operatingSystemEntities = null;
if (StringUtils.isNotBlank(ObjectUtils.toString(propertyMap.get(SUBRESOURCE_OPERATING_SYSTEMS_PROPERTY_ID)))) {
final Object operatingSystems = propertyMap.get(SUBRESOURCE_OPERATING_SYSTEMS_PROPERTY_ID);
final String operatingSystemsJson = gson.toJson(operatingSystems);
try {
operatingSystemEntities = repositoryVersionHelper.parseOperatingSystems(operatingSystemsJson);
} catch (Exception ex) {
throw new AmbariException("Json structure for operating systems is incorrect", ex);
}
entity.setOperatingSystems(operatingSystemsJson);
}
if (StringUtils.isNotBlank(ObjectUtils.toString(propertyMap.get(REPOSITORY_VERSION_DISPLAY_NAME_PROPERTY_ID)))) {
entity.setDisplayName(propertyMap.get(REPOSITORY_VERSION_DISPLAY_NAME_PROPERTY_ID).toString());
}
validateRepositoryVersion(entity);
repositoryVersionDAO.merge(entity);
//
// Update metaInfo table as well
//
if (operatingSystemEntities != null) {
String stackName = entity.getStackName();
String stackVersion = entity.getStackVersion();
for (OperatingSystemEntity osEntity : operatingSystemEntities) {
List<RepositoryEntity> repositories = osEntity.getRepositories();
for (RepositoryEntity repository : repositories) {
ambariMetaInfo.updateRepoBaseURL(stackName, stackVersion, osEntity.getOsType(), repository.getRepositoryId(), repository.getBaseUrl());
}
}
}
}
return null;
}
});
return getRequestStatus(null);
}
@Override
public RequestStatus deleteResources(Predicate predicate)
throws SystemException, UnsupportedPropertyException, NoSuchResourceException, NoSuchParentResourceException {
final Set<Map<String, Object>> propertyMaps = getPropertyMaps(predicate);
final List<RepositoryVersionEntity> entitiesToBeRemoved = new ArrayList<RepositoryVersionEntity>();
for (Map<String, Object> propertyMap : propertyMaps) {
final Long id;
try {
id = Long.parseLong(propertyMap.get(REPOSITORY_VERSION_ID_PROPERTY_ID).toString());
} catch (Exception ex) {
throw new SystemException("Repository version should have numerical id");
}
final RepositoryVersionEntity entity = repositoryVersionDAO.findByPK(id);
if (entity == null) {
throw new NoSuchResourceException("There is no repository version with id " + id);
}
StackEntity stackEntity = entity.getStack();
String stackName = stackEntity.getStackName();
String stackVersion = stackEntity.getStackVersion();
final List<ClusterVersionEntity> clusterVersionEntities = clusterVersionDAO.findByStackAndVersion(
stackName, stackVersion, entity.getVersion());
final List<RepositoryVersionState> forbiddenToDeleteStates = Lists.newArrayList(
RepositoryVersionState.CURRENT,
RepositoryVersionState.INSTALLED,
RepositoryVersionState.INSTALLING,
RepositoryVersionState.UPGRADED,
RepositoryVersionState.UPGRADING);
for (ClusterVersionEntity clusterVersionEntity : clusterVersionEntities) {
if (clusterVersionEntity.getRepositoryVersion().getId().equals(id) && forbiddenToDeleteStates.contains(clusterVersionEntity.getState())) {
throw new SystemException("Repository version can't be deleted as it is " +
clusterVersionEntity.getState().name() + " on cluster " + clusterVersionEntity.getClusterEntity().getClusterName());
}
}
entitiesToBeRemoved.add(entity);
}
for (RepositoryVersionEntity entity: entitiesToBeRemoved) {
repositoryVersionDAO.remove(entity);
}
return getRequestStatus(null);
}
@Override
protected Set<String> getPKPropertyIds() {
return pkPropertyIds;
}
/**
* Validates newly created repository versions to contain actual information.
*
* @param repositoryVersion repository version
* @throws AmbariException exception with error message
*/
protected void validateRepositoryVersion(RepositoryVersionEntity repositoryVersion) throws AmbariException {
final StackId requiredStack = new StackId(repositoryVersion.getStack());
final String stackName = requiredStack.getStackName();
final String stackMajorVersion = requiredStack.getStackVersion();
final String stackFullName = requiredStack.getStackId();
// check that stack exists
final StackInfo stackInfo = ambariMetaInfo.getStack(stackName, stackMajorVersion);
if (stackInfo.getUpgradePacks() == null) {
throw new AmbariException("Stack " + stackFullName + " doesn't have upgrade packages");
}
// List of all repo urls that are already added at stack
Set<String> existingRepoUrls = new HashSet<String>();
List<RepositoryVersionEntity> existingRepoVersions = repositoryVersionDAO.findByStack(requiredStack);
for (RepositoryVersionEntity existingRepoVersion : existingRepoVersions) {
for (OperatingSystemEntity operatingSystemEntity : existingRepoVersion.getOperatingSystems()) {
for (RepositoryEntity repositoryEntity : operatingSystemEntity.getRepositories()) {
if (! repositoryEntity.getRepositoryId().startsWith("HDP-UTILS") && // HDP-UTILS is shared between repo versions
! existingRepoVersion.getId().equals(repositoryVersion.getId())) { // Allow modifying already defined repo version
existingRepoUrls.add(repositoryEntity.getBaseUrl());
}
}
}
}
// check that repositories contain only supported operating systems
final Set<String> osSupported = new HashSet<String>();
for (OperatingSystemInfo osInfo: ambariMetaInfo.getOperatingSystems(stackName, stackMajorVersion)) {
osSupported.add(osInfo.getOsType());
}
final Set<String> osRepositoryVersion = new HashSet<String>();
for (OperatingSystemEntity os: repositoryVersion.getOperatingSystems()) {
osRepositoryVersion.add(os.getOsType());
for (RepositoryEntity repositoryEntity : os.getRepositories()) {
String baseUrl = repositoryEntity.getBaseUrl();
if (existingRepoUrls.contains(baseUrl)) {
throw new AmbariException("Base url " + baseUrl + " is already defined for another repository version. " +
"Setting up base urls that contain the same versions of components will cause rolling upgrade to fail.");
}
}
}
if (osRepositoryVersion.isEmpty()) {
throw new AmbariException("At least one set of repositories for OS should be provided");
}
for (String os: osRepositoryVersion) {
if (!osSupported.contains(os)) {
throw new AmbariException("Operating system type " + os + " is not supported by stack " + stackFullName);
}
}
if (!RepositoryVersionEntity.isVersionInStack(repositoryVersion.getStackId(), repositoryVersion.getVersion())) {
throw new AmbariException(MessageFormat.format("Version {0} needs to belong to stack {1}",
repositoryVersion.getVersion(), repositoryVersion.getStackName() + "-" + repositoryVersion.getStackVersion()));
}
}
/**
* Transforms map of json properties to repository version entity.
*
* @param properties json map
* @return constructed entity
* @throws AmbariException if some properties are missing or json has incorrect structure
*/
protected RepositoryVersionEntity toRepositoryVersionEntity(Map<String, Object> properties) throws AmbariException {
final RepositoryVersionEntity entity = new RepositoryVersionEntity();
final String stackName = properties.get(REPOSITORY_VERSION_STACK_NAME_PROPERTY_ID).toString();
final String stackVersion = properties.get(REPOSITORY_VERSION_STACK_VERSION_PROPERTY_ID).toString();
StackEntity stackEntity = stackDAO.find(stackName, stackVersion);
entity.setDisplayName(properties.get(REPOSITORY_VERSION_DISPLAY_NAME_PROPERTY_ID).toString());
entity.setStack(stackEntity);
entity.setVersion(properties.get(REPOSITORY_VERSION_REPOSITORY_VERSION_PROPERTY_ID).toString());
final Object operatingSystems = properties.get(SUBRESOURCE_OPERATING_SYSTEMS_PROPERTY_ID);
final String operatingSystemsJson = gson.toJson(operatingSystems);
try {
repositoryVersionHelper.parseOperatingSystems(operatingSystemsJson);
} catch (Exception ex) {
throw new AmbariException("Json structure for operating systems is incorrect", ex);
}
entity.setOperatingSystems(operatingSystemsJson);
entity.setUpgradePackage(repositoryVersionHelper.getUpgradePackageName(stackName, stackVersion, entity.getVersion()));
return entity;
}
protected StackId getStackInformationFromUrl(Map<String, Object> propertyMap) {
if (propertyMap.containsKey(REPOSITORY_VERSION_STACK_NAME_PROPERTY_ID) && propertyMap.containsKey(REPOSITORY_VERSION_STACK_VERSION_PROPERTY_ID)) {
return new StackId(propertyMap.get(REPOSITORY_VERSION_STACK_NAME_PROPERTY_ID).toString(), propertyMap.get(REPOSITORY_VERSION_STACK_VERSION_PROPERTY_ID).toString());
}
return null;
}
}
| |
/*
* Copyright 2014 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.overlord.dtgov.ui.server.services;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.List;
import javax.inject.Inject;
import org.apache.commons.lang.StringUtils;
import org.jboss.errai.bus.server.annotations.Service;
import org.oasis_open.docs.s_ramp.ns.s_ramp_v1.BaseArtifactType;
import org.overlord.dtgov.common.model.DtgovModel;
import org.overlord.dtgov.ui.client.shared.beans.ProcessBean;
import org.overlord.dtgov.ui.client.shared.beans.ProcessStatusEnum;
import org.overlord.dtgov.ui.client.shared.beans.ProcessesFilterBean;
import org.overlord.dtgov.ui.client.shared.beans.ProcessesResultSetBean;
import org.overlord.dtgov.ui.client.shared.exceptions.DtgovUiException;
import org.overlord.dtgov.ui.client.shared.services.IProcessService;
import org.overlord.dtgov.ui.server.services.dtgov.DtGovClientAccessor;
import org.overlord.dtgov.ui.server.services.dtgov.IDtgovClient;
import org.overlord.dtgov.ui.server.services.sramp.SrampApiClientAccessor;
import org.overlord.sramp.atom.err.SrampAtomException;
import org.overlord.sramp.client.SrampAtomApiClient;
import org.overlord.sramp.client.SrampClientException;
import org.overlord.sramp.client.SrampClientQuery;
import org.overlord.sramp.client.query.ArtifactSummary;
import org.overlord.sramp.client.query.QueryResultSet;
import org.overlord.sramp.common.SrampModelUtils;
/**
* Concrete implementation of the process service interface.
*
* @author David Virgil Naranjo
*/
@Service
public class ProcessService implements IProcessService {
private static final int PAGE_SIZE = 10;
@Inject
private SrampApiClientAccessor _srampClientAccessor;
@Inject
private DtGovClientAccessor _dtgovClientAccessor;
/**
* @see org.overlord.dtgov.ui.client.shared.services.IProcessService#search(org.overlord.dtgov.ui.client.shared.beans.ProcessesFilterBean, int, java.lang.String, boolean)
*/
@Override
public ProcessesResultSetBean search(ProcessesFilterBean filters, int page, String sortColumnId, boolean sortAscending) throws DtgovUiException {
int pageSize = PAGE_SIZE;
int req_startIndex = (page - 1) * pageSize;
SrampClientQuery query = null;
query = createQuery(filters);
SrampClientQuery scq = query.startIndex(req_startIndex).orderBy(sortColumnId);
if (sortAscending) {
scq = scq.ascending();
} else {
scq = scq.descending();
}
QueryResultSet resultSet = null;
try {
resultSet = scq.count(pageSize + 1).query();
} catch (SrampClientException e) {
throw new DtgovUiException(e);
} catch (SrampAtomException e) {
throw new DtgovUiException(e);
}
ProcessesResultSetBean bean=new ProcessesResultSetBean();
List<ProcessBean> processes = new ArrayList<ProcessBean>();
for (ArtifactSummary summary : resultSet) {
String status = summary.getCustomPropertyValue(DtgovModel.CUSTOM_PROPERTY_STATUS);
String workflow = summary.getCustomPropertyValue(DtgovModel.CUSTOM_PROPERTY_WORKFLOW);
String artifactName = summary.getCustomPropertyValue(DtgovModel.CUSTOM_PROPERTY_ARTIFACT_NAME);
String artifactId = summary.getCustomPropertyValue(DtgovModel.CUSTOM_PROPERTY_ARTIFACT_ID);
ProcessBean processBean = new ProcessBean(summary.getUuid(), workflow, artifactName, artifactId, ProcessStatusEnum.valueOf(status));
processes.add(processBean);
}
boolean hasMorePages = false;
if (processes.size() > pageSize) {
processes.remove(processes.get(processes.size() - 1));
hasMorePages = true;
}
// Does the server support opensearch style attributes? If so,
// use that information. Else figure it out from the request params.
if (resultSet.getTotalResults() != -1) {
bean.setItemsPerPage(pageSize);
bean.setStartIndex(resultSet.getStartIndex());
bean.set_totalResults(resultSet.getTotalResults());
} else {
bean.setItemsPerPage(pageSize);
bean.set_totalResults(hasMorePages ? pageSize + 1 : processes.size());
bean.setStartIndex(req_startIndex);
}
bean.setProcesses(processes);
return bean;
}
/**
* @see org.overlord.dtgov.ui.client.shared.services.IProcessService#abort(java.lang.String)
*/
@Override
public boolean abort(String uuid) throws DtgovUiException {
BaseArtifactType artifact;
try {
artifact = _srampClientAccessor.getClient().getArtifactMetaData(uuid);
} catch (SrampClientException e1) {
throw new DtgovUiException(e1);
} catch (SrampAtomException e1) {
throw new DtgovUiException(e1);
}
if (artifact != null) {
String processId = SrampModelUtils.getCustomProperty(artifact, DtgovModel.CUSTOM_PROPERTY_PROCESS_ID);
String targetUUID = SrampModelUtils.getCustomProperty(artifact, DtgovModel.CUSTOM_PROPERTY_ARTIFACT_ID);
IDtgovClient client = _dtgovClientAccessor.getClient();
try {
client.stopProcess(targetUUID, new Long(processId));
} catch (Exception e) {
throw new DtgovUiException(e);
}
return true;
} else {
return false;
}
}
/**
* Creates the query.
*
* @param filters
* the filters
* @return the sramp client query
*/
private SrampClientQuery createQuery(ProcessesFilterBean filters) {
StringBuilder queryBuilder = new StringBuilder();
// Initial query
queryBuilder.append("/s-ramp/ext/" + DtgovModel.WorkflowInstanceType); //$NON-NLS-1$
List<Object> params = new ArrayList<Object>();
if (filters != null) {
List<String> criteria = new ArrayList<String>();
if (filters.getArtifact() != null && filters.getArtifact().trim().length() > 0) {
criteria.add("fn:matches(@" + DtgovModel.CUSTOM_PROPERTY_ARTIFACT_NAME + ", ?)"); //$NON-NLS-1$ //$NON-NLS-2$
params.add(filters.getArtifact().replace("*", ".*")); //$NON-NLS-1$ //$NON-NLS-2$
}
if (StringUtils.isNotBlank(filters.getWorkflow())) {
criteria.add("@" + DtgovModel.CUSTOM_PROPERTY_WORKFLOW + " = ?"); //$NON-NLS-1$ //$NON-NLS-2$
params.add(filters.getWorkflow());
}
if (filters.getStatus() != null) {
criteria.add("@" + DtgovModel.CUSTOM_PROPERTY_STATUS + " = ?"); //$NON-NLS-1$ //$NON-NLS-2$
params.add(filters.getStatus().name());
}
// Now create the query predicate from the generated criteria
if (criteria.size() > 0) {
queryBuilder.append("["); //$NON-NLS-1$
queryBuilder.append(StringUtils.join(criteria, " and ")); //$NON-NLS-1$
queryBuilder.append("]"); //$NON-NLS-1$
}
}
// Create the query, and parameterize it
SrampAtomApiClient client = _srampClientAccessor.getClient();
SrampClientQuery query = client.buildQuery(queryBuilder.toString());
for (Object param : params) {
if (param instanceof String) {
query.parameter((String) param);
}
if (param instanceof Calendar) {
query.parameter((Calendar) param);
}
}
query.propertyName(DtgovModel.CUSTOM_PROPERTY_ARTIFACT_ID);
query.propertyName(DtgovModel.CUSTOM_PROPERTY_ARTIFACT_NAME);
query.propertyName(DtgovModel.CUSTOM_PROPERTY_WORKFLOW);
query.propertyName(DtgovModel.CUSTOM_PROPERTY_STATUS);
return query;
}
/**
* Gets the sramp client accessor.
*
* @return the sramp client accessor
*/
public SrampApiClientAccessor getSrampClientAccessor() {
return _srampClientAccessor;
}
/**
* Sets the sramp client accessor.
*
* @param srampClientAccessor
* the new sramp client accessor
*/
public void setSrampClientAccessor(SrampApiClientAccessor srampClientAccessor) {
this._srampClientAccessor = srampClientAccessor;
}
/**
* Gets the dtgov client accessor.
*
* @return the dtgov client accessor
*/
public DtGovClientAccessor getDtgovClientAccessor() {
return _dtgovClientAccessor;
}
/**
* Sets the dtgov client accessor.
*
* @param dtgovClientAccessor
* the new dtgov client accessor
*/
public void setDtgovClientAccessor(DtGovClientAccessor dtgovClientAccessor) {
this._dtgovClientAccessor = dtgovClientAccessor;
}
}
| |
/*
* Copyright 2014, The Sporting Exchange Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.betfair.cougar.transport.socket;
import com.betfair.cougar.api.ExecutionContext;
import com.betfair.cougar.api.DehydratedExecutionContext;
import com.betfair.cougar.api.RequestUUID;
import com.betfair.cougar.api.geolocation.GeoLocationDetails;
import com.betfair.cougar.api.security.IdentityChain;
import com.betfair.cougar.api.security.IdentityToken;
import com.betfair.cougar.core.api.OperationBindingDescriptor;
import com.betfair.cougar.core.api.ServiceBindingDescriptor;
import com.betfair.cougar.core.api.ServiceVersion;
import com.betfair.cougar.core.api.ev.ExecutionResult;
import com.betfair.cougar.core.api.ev.ExecutionVenue;
import com.betfair.cougar.core.api.ev.OperationDefinition;
import com.betfair.cougar.core.api.ev.OperationKey;
import com.betfair.cougar.core.api.ev.TimeConstraints;
import com.betfair.cougar.core.api.exception.CougarException;
import com.betfair.cougar.core.api.exception.CougarServiceException;
import com.betfair.cougar.core.api.exception.ServerFaultCode;
import com.betfair.cougar.core.api.security.IdentityResolverFactory;
import com.betfair.cougar.core.api.tracing.Tracer;
import com.betfair.cougar.core.api.transcription.Parameter;
import com.betfair.cougar.core.api.transcription.ParameterType;
import com.betfair.cougar.core.impl.DefaultTimeConstraints;
import com.betfair.cougar.logging.EventLoggingRegistry;
import com.betfair.cougar.marshalling.api.socket.RemotableMethodInvocationMarshaller;
import com.betfair.cougar.netutil.nio.CougarProtocol;
import com.betfair.cougar.transport.api.CommandResolver;
import com.betfair.cougar.transport.api.CommandValidator;
import com.betfair.cougar.transport.api.ExecutionCommand;
import com.betfair.cougar.transport.api.protocol.CougarObjectInput;
import com.betfair.cougar.transport.api.protocol.CougarObjectOutput;
import com.betfair.cougar.netutil.nio.hessian.HessianObjectIOFactory;
import com.betfair.cougar.transport.api.protocol.socket.InvocationResponse;
import com.betfair.cougar.transport.api.protocol.socket.SocketOperationBindingDescriptor;
import com.betfair.cougar.transport.impl.AbstractCommandProcessor;
import com.betfair.cougar.util.RequestUUIDImpl;
import com.betfair.cougar.util.UUIDGeneratorImpl;
import org.junit.Before;
import org.junit.Test;
import org.mockito.*;
import org.springframework.beans.factory.annotation.Required;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.lang.reflect.Field;
import java.security.cert.X509Certificate;
import java.util.Collections;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.Executor;
import static org.junit.Assert.*;
import static org.mockito.Matchers.*;
import static org.mockito.Mockito.*;
/**
* Unit test for @see SocketTransportCommandProcessorImpl
*/
public class SocketTransportCommandProcessorTest {
private static final OperationKey key = new OperationKey(new ServiceVersion("v1.0"), "TestingService", "TestCall");
private static final ParameterType returnType = new ParameterType(String.class, null);
private static final Object[] args = { "arg1", new Integer(2), Boolean.TRUE};
private static final long CORRELATION_ID = 9999L;
private Date receivedTime = new Date();
private Date requestTime = new Date();
private final DehydratedExecutionContext ctx = new DehydratedExecutionContext() {
@Override
public GeoLocationDetails getLocation() {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public IdentityChain getIdentity() {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public RequestUUID getRequestUUID() {
return new RequestUUIDImpl();
}
@Override
public Date getReceivedTime() {
return receivedTime;
}
@Override
public Date getRequestTime() {
return requestTime;
}
@Override
public boolean traceLoggingEnabled() {
return false;
}
@Override
public List<IdentityToken> getIdentityTokens() {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void setIdentityChain(IdentityChain chain) {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public int getTransportSecurityStrengthFactor() {
return 0; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean isTransportSecure() {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
};
private SocketTransportCommandProcessor commandProcessor;
@Mock
private Tracer tracer;
@Mock
private ExecutionVenue ev;
@Mock
private Executor executor;
@Mock
private RemotableMethodInvocationMarshaller marshaller;
@Mock
private EventLoggingRegistry eventLoggingRegistry;
private ByteArrayOutputStream out = new ByteArrayOutputStream();
@Before
public void init() {
MockitoAnnotations.initMocks(this);
RequestUUIDImpl.setGenerator(new UUIDGeneratorImpl());
commandProcessor = new SocketTransportCommandProcessor();
commandProcessor.setExecutionVenue(ev);
commandProcessor.setExecutor(executor);
commandProcessor.setMarshaller(marshaller);
commandProcessor.setRegistry(eventLoggingRegistry);
commandProcessor.setIdentityResolverFactory(Mockito.mock(IdentityResolverFactory.class));
commandProcessor.setTracer(tracer);
}
private class SocketTransportCommandProcessorDelegator extends AbstractCommandProcessor<SocketTransportCommand> {
@Override
public void bind(ServiceBindingDescriptor operation) {
commandProcessor.bind(operation);
}
@Override
protected List<CommandValidator<SocketTransportCommand>> getCommandValidators() {
return Collections.EMPTY_LIST;
}
@Override
public CommandResolver<SocketTransportCommand> createCommandResolver(SocketTransportCommand command, Tracer tracer) {
CommandResolver<SocketTransportCommand> commandResolver = commandProcessor.createCommandResolver(command, tracer);
assertNotNull(commandResolver);
verify(ev, atLeast(1)).getOperationDefinition(eq(key));
ExecutionContext resolvedContext = commandResolver.resolveExecutionContext();
assertTrue(resolvedContext instanceof SocketRequestContextImpl);
try {
Field f = SocketRequestContextImpl.class.getDeclaredField("wrapped");
f.setAccessible(true);
assertEquals(ctx, f.get(resolvedContext));
} catch (IllegalAccessException e) {
e.printStackTrace();
fail(e.getMessage());
} catch (NoSuchFieldException e) {
e.printStackTrace();
fail(e.getMessage());
}
Iterable<ExecutionCommand> iterable = commandResolver.resolveExecutionCommands();
assertNotNull(iterable);
Iterator<ExecutionCommand> iter = iterable.iterator();
ExecutionCommand cmd = iter.next();
assertNotNull(cmd);
assertFalse(iter.hasNext());
assertArrayEquals("arguments don't match", args, cmd.getArgs());
assertEquals("Operation Key doesn't match", key, cmd.getOperationKey());
try {
final String success="success";
ByteArrayOutputStream bos = new ByteArrayOutputStream();
CougarObjectOutput dos = new HessianObjectIOFactory(false).newCougarObjectOutput(bos, CougarProtocol.TRANSPORT_PROTOCOL_VERSION_MAX_SUPPORTED);
dos.flush();
//Test onResult
cmd.onResult(new ExecutionResult(success));
assertArrayEquals("CorrelationId wasn't written to the output stream correctly", bos.toByteArray(), out.toByteArray());
verify(marshaller).writeInvocationResponse(argThat(matchesSuccessResponse(success)), any(CougarObjectOutput.class),anyByte());
out.reset();
//This is necessary because the mockito gubbins that records an operation cannot handle
//to calls to the same object (marshaller in this case) so we need to stub a new one and
//re-add it to the command processor.
//http://mockito.googlecode.com/svn/branches/1.6/javadoc/org/mockito/Mockito.html
marshaller = Mockito.mock(RemotableMethodInvocationMarshaller.class);
commandProcessor.setMarshaller(marshaller);
//Test onException
CougarException ex = new CougarServiceException(ServerFaultCode.AcceptTypeNotValid, "BANG!");
cmd.onResult(new ExecutionResult(ex));
assertArrayEquals("CorrelationId wasn't written to the output stream correctly", bos.toByteArray(), out.toByteArray());
verify(marshaller).writeInvocationResponse(argThat(matchesExceptionalResponse(ex)), any(CougarObjectOutput.class),anyByte());
} catch (IOException ex) {
fail("Should not have thrown IOException here");
}
return commandResolver;
}
private ArgumentMatcher<InvocationResponse> matchesSuccessResponse(final Object responseValue) {
return new ArgumentMatcher<InvocationResponse>() {
@Override
public boolean matches(Object argument) {
assertTrue(argument instanceof InvocationResponse);
InvocationResponse response = (InvocationResponse)argument;
assertTrue(response.isSuccess());
assertEquals(responseValue, (response.getResult()));
assertNull(response.getException());
return true;
}
};
}
private ArgumentMatcher<InvocationResponse> matchesExceptionalResponse(final Object responseValue) {
return new ArgumentMatcher<InvocationResponse>() {
@Override
public boolean matches(Object argument) {
assertTrue(argument instanceof InvocationResponse);
InvocationResponse response = (InvocationResponse)argument;
assertFalse(response.isSuccess());
assertEquals(responseValue, ((InvocationResponse)argument).getException());
return true;
}
};
}
public RemotableMethodInvocationMarshaller getMarshaller() {
return null;
}
public EventLoggingRegistry getRegistry() {
return null;
}
@Required
public void setMarshaller(RemotableMethodInvocationMarshaller marshaller) {
}
@Required
public void setRegistry(EventLoggingRegistry registry) {
}
@Override
public void writeErrorResponse(SocketTransportCommand command, DehydratedExecutionContext context, CougarException e, boolean traceStarted) {
}
@Override
public ExecutionVenue getExecutionVenue() {
return null;
}
@Override
public void process(SocketTransportCommand command) {
}
@Override
public void setExecutor(Executor executor) {
}
public void onCougarStart() {
commandProcessor.onCougarStart();
}
}
private CommandResolver<SocketTransportCommand> createCommandResolver(TimeConstraints toReturn, Tracer tracer) throws IOException {
SocketTransportRPCCommand command = Mockito.mock(SocketTransportRPCCommand.class);
when(command.getOutput()).thenReturn(new HessianObjectIOFactory(false).newCougarObjectOutput(out, CougarProtocol.TRANSPORT_PROTOCOL_VERSION_MAX_SUPPORTED));
MyIoSession session = new MyIoSession("abc");
session.setAttribute(CougarProtocol.PROTOCOL_VERSION_ATTR_NAME, CougarProtocol.TRANSPORT_PROTOCOL_VERSION_MAX_SUPPORTED);
when(command.getSession()).thenReturn(session);
when(marshaller.readExecutionContext(any(CougarObjectInput.class), any(String.class), any(X509Certificate[].class), anyInt(), anyByte())).thenReturn(ctx);
when(marshaller.readOperationKey(any(CougarObjectInput.class))).thenReturn(key);
when(marshaller.readArgs(any(Parameter[].class), any(CougarObjectInput.class))).thenReturn(args);
when(marshaller.readTimeConstraintsIfPresent(any(CougarObjectInput.class), anyByte())).thenReturn(toReturn);
final OperationKey opKey = new OperationKey(new ServiceVersion(1,0), "TestingService", "TestCall");
OperationDefinition opDef = Mockito.mock(OperationDefinition.class);
when(opDef.getReturnType()).thenReturn(returnType);
when(opDef.getOperationKey()).thenReturn(opKey);
when(ev.getOperationDefinition(key)).thenReturn(opDef);
SocketTransportCommandProcessorDelegator d = new SocketTransportCommandProcessorDelegator();
ServiceBindingDescriptor desc = mock(ServiceBindingDescriptor.class);
OperationBindingDescriptor[] bindingDescriptors = new OperationBindingDescriptor[1];
SocketOperationBindingDescriptor opDesc = new SocketOperationBindingDescriptor(opKey);
opDesc.setOperationKey(opKey);
bindingDescriptors[0] = opDesc;
when(desc.getOperationBindings()).thenReturn(bindingDescriptors);
when(desc.getServiceName()).thenReturn(opKey.getServiceName());
when(desc.getServiceVersion()).thenReturn(opKey.getVersion());
d.bind(desc);
d.onCougarStart();
return d.createCommandResolver(command, tracer);
}
@Test
public void testCreateCommandResolver() throws IOException {
createCommandResolver(DefaultTimeConstraints.NO_CONSTRAINTS, tracer);
}
@Test
public void createCommandResolver_NoTimeout() throws IOException {
// resolve the command
CommandResolver<SocketTransportCommand> cr = createCommandResolver(DefaultTimeConstraints.NO_CONSTRAINTS, tracer);
Iterable<ExecutionCommand> executionCommands = cr.resolveExecutionCommands();
// check the output
ExecutionCommand executionCommand = executionCommands.iterator().next();
TimeConstraints constraints = executionCommand.getTimeConstraints();
assertNull(constraints.getExpiryTime());
}
@Test
public void createCommandResolver_WithTimeout() throws IOException {
// resolve the command
CommandResolver<SocketTransportCommand> cr = createCommandResolver(DefaultTimeConstraints.fromTimeout(10000), tracer);
Iterable<ExecutionCommand> executionCommands = cr.resolveExecutionCommands();
// check the output
ExecutionCommand executionCommand = executionCommands.iterator().next();
TimeConstraints constraints = executionCommand.getTimeConstraints();
assertNotNull(constraints.getExpiryTime());
}
@Test
public void createCommandResolver_WithTimeoutAndOldRequestTime() throws IOException {
requestTime = new Date(System.currentTimeMillis()-10001);
// resolve the command
CommandResolver<SocketTransportCommand> cr = createCommandResolver(DefaultTimeConstraints.fromTimeout(10000), tracer);
Iterable<ExecutionCommand> executionCommands = cr.resolveExecutionCommands();
// check the output
ExecutionCommand executionCommand = executionCommands.iterator().next();
TimeConstraints constraints = executionCommand.getTimeConstraints();
assertTrue(constraints.getExpiryTime() < System.currentTimeMillis());
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.builder.component.dsl;
import javax.annotation.Generated;
import org.apache.camel.Component;
import org.apache.camel.builder.component.AbstractComponentBuilder;
import org.apache.camel.builder.component.ComponentBuilder;
import org.apache.camel.component.seda.SedaComponent;
/**
* The seda component provides asynchronous call to another endpoint from any
* CamelContext in the same JVM.
*
* Generated by camel-package-maven-plugin - do not edit this file!
*/
@Generated("org.apache.camel.maven.packaging.ComponentDslMojo")
public interface SedaComponentBuilderFactory {
/**
* SEDA (camel-seda)
* The seda component provides asynchronous call to another endpoint from
* any CamelContext in the same JVM.
*
* Category: core,endpoint
* Since: 1.1
* Maven coordinates: org.apache.camel:camel-seda
*/
static SedaComponentBuilder seda() {
return new SedaComponentBuilderImpl();
}
/**
* Builder for the SEDA component.
*/
interface SedaComponentBuilder extends ComponentBuilder<SedaComponent> {
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions occurred while the consumer is trying to
* pickup incoming messages, or the likes, will now be processed as a
* message and handled by the routing Error Handler. By default the
* consumer will use the org.apache.camel.spi.ExceptionHandler to deal
* with exceptions, that will be logged at WARN or ERROR level and
* ignored.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*/
default SedaComponentBuilder bridgeErrorHandler(
boolean bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* Sets the default number of concurrent threads processing exchanges.
*
* The option is a: <code>int</code> type.
*
* Default: 1
* Group: consumer
*/
default SedaComponentBuilder concurrentConsumers(int concurrentConsumers) {
doSetProperty("concurrentConsumers", concurrentConsumers);
return this;
}
/**
* Whether a thread that sends messages to a full SEDA queue will block
* until the queue's capacity is no longer exhausted. By default, an
* exception will be thrown stating that the queue is full. By enabling
* this option, the calling thread will instead block and wait until the
* message can be accepted.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*/
default SedaComponentBuilder defaultBlockWhenFull(
boolean defaultBlockWhenFull) {
doSetProperty("defaultBlockWhenFull", defaultBlockWhenFull);
return this;
}
/**
* Whether a thread that sends messages to a full SEDA queue will be
* discarded. By default, an exception will be thrown stating that the
* queue is full. By enabling this option, the calling thread will give
* up sending and continue, meaning that the message was not sent to the
* SEDA queue.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*/
default SedaComponentBuilder defaultDiscardWhenFull(
boolean defaultDiscardWhenFull) {
doSetProperty("defaultDiscardWhenFull", defaultDiscardWhenFull);
return this;
}
/**
* Whether a thread that sends messages to a full SEDA queue will block
* until the queue's capacity is no longer exhausted. By default, an
* exception will be thrown stating that the queue is full. By enabling
* this option, where a configured timeout can be added to the block
* case. Utilizing the .offer(timeout) method of the underlining java
* queue.
*
* The option is a: <code>long</code> type.
*
* Group: producer
*/
default SedaComponentBuilder defaultOfferTimeout(
long defaultOfferTimeout) {
doSetProperty("defaultOfferTimeout", defaultOfferTimeout);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*/
default SedaComponentBuilder lazyStartProducer(boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether the component should use basic property binding (Camel 2.x)
* or the newer property binding with additional capabilities.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default SedaComponentBuilder basicPropertyBinding(
boolean basicPropertyBinding) {
doSetProperty("basicPropertyBinding", basicPropertyBinding);
return this;
}
/**
* Sets the default queue factory.
*
* The option is a:
* <code>org.apache.camel.component.seda.BlockingQueueFactory<org.apache.camel.Exchange></code> type.
*
* Group: advanced
*/
default SedaComponentBuilder defaultQueueFactory(
org.apache.camel.component.seda.BlockingQueueFactory<org.apache.camel.Exchange> defaultQueueFactory) {
doSetProperty("defaultQueueFactory", defaultQueueFactory);
return this;
}
/**
* Sets the default maximum capacity of the SEDA queue (i.e., the number
* of messages it can hold).
*
* The option is a: <code>int</code> type.
*
* Default: 1000
* Group: advanced
*/
default SedaComponentBuilder queueSize(int queueSize) {
doSetProperty("queueSize", queueSize);
return this;
}
}
class SedaComponentBuilderImpl
extends
AbstractComponentBuilder<SedaComponent>
implements
SedaComponentBuilder {
@Override
protected SedaComponent buildConcreteComponent() {
return new SedaComponent();
}
@Override
protected boolean setPropertyOnComponent(
Component component,
String name,
Object value) {
switch (name) {
case "bridgeErrorHandler": ((SedaComponent) component).setBridgeErrorHandler((boolean) value); return true;
case "concurrentConsumers": ((SedaComponent) component).setConcurrentConsumers((int) value); return true;
case "defaultBlockWhenFull": ((SedaComponent) component).setDefaultBlockWhenFull((boolean) value); return true;
case "defaultDiscardWhenFull": ((SedaComponent) component).setDefaultDiscardWhenFull((boolean) value); return true;
case "defaultOfferTimeout": ((SedaComponent) component).setDefaultOfferTimeout((long) value); return true;
case "lazyStartProducer": ((SedaComponent) component).setLazyStartProducer((boolean) value); return true;
case "basicPropertyBinding": ((SedaComponent) component).setBasicPropertyBinding((boolean) value); return true;
case "defaultQueueFactory": ((SedaComponent) component).setDefaultQueueFactory((org.apache.camel.component.seda.BlockingQueueFactory) value); return true;
case "queueSize": ((SedaComponent) component).setQueueSize((int) value); return true;
default: return false;
}
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapred;
import java.io.IOException;
import java.io.InputStream;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FutureDataInputStreamBuilder;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.Seekable;
import org.apache.hadoop.fs.impl.FutureIOSupport;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.compress.CodecPool;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.CompressionCodecFactory;
import org.apache.hadoop.io.compress.Decompressor;
import org.apache.hadoop.io.compress.SplitCompressionInputStream;
import org.apache.hadoop.io.compress.SplittableCompressionCodec;
import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.lib.input.CompressedSplitLineReader;
import org.apache.hadoop.mapreduce.lib.input.SplitLineReader;
import org.apache.hadoop.mapreduce.lib.input.UncompressedSplitLineReader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Treats keys as offset in file and value as line.
*/
@InterfaceAudience.LimitedPrivate({"MapReduce", "Pig"})
@InterfaceStability.Unstable
public class LineRecordReader implements RecordReader<LongWritable, Text> {
private static final Logger LOG =
LoggerFactory.getLogger(LineRecordReader.class.getName());
private CompressionCodecFactory compressionCodecs = null;
private long start;
private long pos;
private long end;
private SplitLineReader in;
private FSDataInputStream fileIn;
private final Seekable filePosition;
int maxLineLength;
private CompressionCodec codec;
private Decompressor decompressor;
/**
* A class that provides a line reader from an input stream.
* @deprecated Use {@link org.apache.hadoop.util.LineReader} instead.
*/
@Deprecated
public static class LineReader extends org.apache.hadoop.util.LineReader {
LineReader(InputStream in) {
super(in);
}
LineReader(InputStream in, int bufferSize) {
super(in, bufferSize);
}
public LineReader(InputStream in, Configuration conf) throws IOException {
super(in, conf);
}
LineReader(InputStream in, byte[] recordDelimiter) {
super(in, recordDelimiter);
}
LineReader(InputStream in, int bufferSize, byte[] recordDelimiter) {
super(in, bufferSize, recordDelimiter);
}
public LineReader(InputStream in, Configuration conf,
byte[] recordDelimiter) throws IOException {
super(in, conf, recordDelimiter);
}
}
public LineRecordReader(Configuration job,
FileSplit split) throws IOException {
this(job, split, null);
}
public LineRecordReader(Configuration job, FileSplit split,
byte[] recordDelimiter) throws IOException {
this.maxLineLength = job.getInt(org.apache.hadoop.mapreduce.lib.input.
LineRecordReader.MAX_LINE_LENGTH, Integer.MAX_VALUE);
start = split.getStart();
end = start + split.getLength();
final Path file = split.getPath();
compressionCodecs = new CompressionCodecFactory(job);
codec = compressionCodecs.getCodec(file);
// open the file and seek to the start of the split
final FutureDataInputStreamBuilder builder =
file.getFileSystem(job).openFile(file);
FutureIOSupport.propagateOptions(builder, job,
MRJobConfig.INPUT_FILE_OPTION_PREFIX,
MRJobConfig.INPUT_FILE_MANDATORY_PREFIX);
fileIn = FutureIOSupport.awaitFuture(builder.build());
if (isCompressedInput()) {
decompressor = CodecPool.getDecompressor(codec);
if (codec instanceof SplittableCompressionCodec) {
final SplitCompressionInputStream cIn =
((SplittableCompressionCodec)codec).createInputStream(
fileIn, decompressor, start, end,
SplittableCompressionCodec.READ_MODE.BYBLOCK);
in = new CompressedSplitLineReader(cIn, job, recordDelimiter);
start = cIn.getAdjustedStart();
end = cIn.getAdjustedEnd();
filePosition = cIn; // take pos from compressed stream
} else {
if (start != 0) {
// So we have a split that is part of a file stored using
// a Compression codec that cannot be split.
throw new IOException("Cannot seek in " +
codec.getClass().getSimpleName() + " compressed stream");
}
in = new SplitLineReader(codec.createInputStream(fileIn,
decompressor), job, recordDelimiter);
filePosition = fileIn;
}
} else {
fileIn.seek(start);
in = new UncompressedSplitLineReader(
fileIn, job, recordDelimiter, split.getLength());
filePosition = fileIn;
}
// If this is not the first split, we always throw away first record
// because we always (except the last split) read one extra line in
// next() method.
if (start != 0) {
start += in.readLine(new Text(), 0, maxBytesToConsume(start));
}
this.pos = start;
}
public LineRecordReader(InputStream in, long offset, long endOffset,
int maxLineLength) {
this(in, offset, endOffset, maxLineLength, null);
}
public LineRecordReader(InputStream in, long offset, long endOffset,
int maxLineLength, byte[] recordDelimiter) {
this.maxLineLength = maxLineLength;
this.in = new SplitLineReader(in, recordDelimiter);
this.start = offset;
this.pos = offset;
this.end = endOffset;
filePosition = null;
}
public LineRecordReader(InputStream in, long offset, long endOffset,
Configuration job)
throws IOException{
this(in, offset, endOffset, job, null);
}
public LineRecordReader(InputStream in, long offset, long endOffset,
Configuration job, byte[] recordDelimiter)
throws IOException{
this.maxLineLength = job.getInt(org.apache.hadoop.mapreduce.lib.input.
LineRecordReader.MAX_LINE_LENGTH, Integer.MAX_VALUE);
this.in = new SplitLineReader(in, job, recordDelimiter);
this.start = offset;
this.pos = offset;
this.end = endOffset;
filePosition = null;
}
public LongWritable createKey() {
return new LongWritable();
}
public Text createValue() {
return new Text();
}
private boolean isCompressedInput() {
return (codec != null);
}
private int maxBytesToConsume(long pos) {
return isCompressedInput()
? Integer.MAX_VALUE
: (int) Math.max(Math.min(Integer.MAX_VALUE, end - pos), maxLineLength);
}
private long getFilePosition() throws IOException {
long retVal;
if (isCompressedInput() && null != filePosition) {
retVal = filePosition.getPos();
} else {
retVal = pos;
}
return retVal;
}
private int skipUtfByteOrderMark(Text value) throws IOException {
// Strip BOM(Byte Order Mark)
// Text only support UTF-8, we only need to check UTF-8 BOM
// (0xEF,0xBB,0xBF) at the start of the text stream.
int newMaxLineLength = (int) Math.min(3L + (long) maxLineLength,
Integer.MAX_VALUE);
int newSize = in.readLine(value, newMaxLineLength, maxBytesToConsume(pos));
// Even we read 3 extra bytes for the first line,
// we won't alter existing behavior (no backwards incompat issue).
// Because the newSize is less than maxLineLength and
// the number of bytes copied to Text is always no more than newSize.
// If the return size from readLine is not less than maxLineLength,
// we will discard the current line and read the next line.
pos += newSize;
int textLength = value.getLength();
byte[] textBytes = value.getBytes();
if ((textLength >= 3) && (textBytes[0] == (byte)0xEF) &&
(textBytes[1] == (byte)0xBB) && (textBytes[2] == (byte)0xBF)) {
// find UTF-8 BOM, strip it.
LOG.info("Found UTF-8 BOM and skipped it");
textLength -= 3;
newSize -= 3;
if (textLength > 0) {
// It may work to use the same buffer and not do the copyBytes
textBytes = value.copyBytes();
value.set(textBytes, 3, textLength);
} else {
value.clear();
}
}
return newSize;
}
/** Read a line. */
public synchronized boolean next(LongWritable key, Text value)
throws IOException {
// We always read one extra line, which lies outside the upper
// split limit i.e. (end - 1)
while (getFilePosition() <= end || in.needAdditionalRecordAfterSplit()) {
key.set(pos);
int newSize = 0;
if (pos == 0) {
newSize = skipUtfByteOrderMark(value);
} else {
newSize = in.readLine(value, maxLineLength, maxBytesToConsume(pos));
pos += newSize;
}
if (newSize == 0) {
return false;
}
if (newSize < maxLineLength) {
return true;
}
// line too long. try again
LOG.info("Skipped line of size " + newSize + " at pos " + (pos - newSize));
}
return false;
}
/**
* Get the progress within the split
*/
public synchronized float getProgress() throws IOException {
if (start == end) {
return 0.0f;
} else {
return Math.min(1.0f, (getFilePosition() - start) / (float)(end - start));
}
}
public synchronized long getPos() throws IOException {
return pos;
}
public synchronized void close() throws IOException {
try {
if (in != null) {
in.close();
}
} finally {
if (decompressor != null) {
CodecPool.returnDecompressor(decompressor);
decompressor = null;
}
}
}
}
| |
/*******************************************************************************
* Copyright 2011-2013 Sergey Tarasevich
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package com.rincliu.library.common.persistence.image.core;
import android.graphics.Bitmap;
import android.os.Handler;
import android.widget.ImageView;
import com.rincliu.library.common.persistence.image.cache.disc.DiscCacheAware;
import com.rincliu.library.common.persistence.image.core.assist.*;
import com.rincliu.library.common.persistence.image.core.assist.FailReason.FailType;
import com.rincliu.library.common.persistence.image.core.decode.ImageDecoder;
import com.rincliu.library.common.persistence.image.core.decode.ImageDecodingInfo;
import com.rincliu.library.common.persistence.image.core.download.ImageDownloader;
import com.rincliu.library.common.persistence.image.core.download.ImageDownloader.Scheme;
import com.rincliu.library.common.persistence.image.utils.IoUtils;
import com.rincliu.library.common.persistence.image.utils.L;
import java.io.*;
import java.lang.ref.Reference;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.locks.ReentrantLock;
/**
* Presents load'n'display image task. Used to load image from Internet or
* file system, decode it to {@link Bitmap}, and display it in
* {@link ImageView} using {@link DisplayBitmapTask}.
*
* @author Sergey Tarasevich (nostra13[at]gmail[dot]com)
* @see ImageLoaderConfiguration
* @see ImageLoadingInfo
* @since 1.3.1
*/
final class LoadAndDisplayImageTask implements Runnable {
private static final String LOG_WAITING_FOR_RESUME = "ImageLoader is paused. Waiting... [%s]";
private static final String LOG_RESUME_AFTER_PAUSE = ".. Resume loading [%s]";
private static final String LOG_DELAY_BEFORE_LOADING = "Delay %d ms before loading... [%s]";
private static final String LOG_START_DISPLAY_IMAGE_TASK = "Start display image task [%s]";
private static final String LOG_WAITING_FOR_IMAGE_LOADED = "Image already is loading. Waiting... [%s]";
private static final String LOG_GET_IMAGE_FROM_MEMORY_CACHE_AFTER_WAITING = "...Get cached bitmap from memory after waiting. [%s]";
private static final String LOG_LOAD_IMAGE_FROM_NETWORK = "Load image from network [%s]";
private static final String LOG_LOAD_IMAGE_FROM_DISC_CACHE = "Load image from disc cache [%s]";
private static final String LOG_PREPROCESS_IMAGE = "PreProcess image before caching in memory [%s]";
private static final String LOG_POSTPROCESS_IMAGE = "PostProcess image before displaying [%s]";
private static final String LOG_CACHE_IMAGE_IN_MEMORY = "Cache image in memory [%s]";
private static final String LOG_CACHE_IMAGE_ON_DISC = "Cache image on disc [%s]";
private static final String LOG_PROCESS_IMAGE_BEFORE_CACHE_ON_DISC = "Process image before cache on disc [%s]";
private static final String LOG_TASK_CANCELLED_IMAGEVIEW_REUSED = "ImageView is reused for another image. Task is cancelled. [%s]";
private static final String LOG_TASK_CANCELLED_IMAGEVIEW_LOST = "ImageView was collected by GC. Task is cancelled. [%s]";
private static final String LOG_TASK_INTERRUPTED = "Task was interrupted [%s]";
private static final String ERROR_PRE_PROCESSOR_NULL = "Pre-processor returned null [%s]";
private static final String ERROR_POST_PROCESSOR_NULL = "Pre-processor returned null [%s]";
private static final String ERROR_PROCESSOR_FOR_DISC_CACHE_NULL = "Bitmap processor for disc cache returned null [%s]";
private static final int BUFFER_SIZE = 32 * 1024; // 32 Kb
private final ImageLoaderEngine engine;
private final ImageLoadingInfo imageLoadingInfo;
private final Handler handler;
// Helper references
private final ImageLoaderConfiguration configuration;
private final ImageDownloader downloader;
private final ImageDownloader networkDeniedDownloader;
private final ImageDownloader slowNetworkDownloader;
private final ImageDecoder decoder;
private final boolean writeLogs;
final String uri;
private final String memoryCacheKey;
final Reference<ImageView> imageViewRef;
private final ImageSize targetSize;
final DisplayImageOptions options;
final ImageLoadingListener listener;
// State vars
private LoadedFrom loadedFrom = LoadedFrom.NETWORK;
private boolean imageViewCollected = false;
public LoadAndDisplayImageTask(ImageLoaderEngine engine, ImageLoadingInfo imageLoadingInfo, Handler handler) {
this.engine = engine;
this.imageLoadingInfo = imageLoadingInfo;
this.handler = handler;
configuration = engine.configuration;
downloader = configuration.downloader;
networkDeniedDownloader = configuration.networkDeniedDownloader;
slowNetworkDownloader = configuration.slowNetworkDownloader;
decoder = configuration.decoder;
writeLogs = configuration.writeLogs;
uri = imageLoadingInfo.uri;
memoryCacheKey = imageLoadingInfo.memoryCacheKey;
imageViewRef = imageLoadingInfo.imageViewRef;
targetSize = imageLoadingInfo.targetSize;
options = imageLoadingInfo.options;
listener = imageLoadingInfo.listener;
}
@Override
public void run() {
if (waitIfPaused())
return;
if (delayIfNeed())
return;
ReentrantLock loadFromUriLock = imageLoadingInfo.loadFromUriLock;
log(LOG_START_DISPLAY_IMAGE_TASK);
if (loadFromUriLock.isLocked()) {
log(LOG_WAITING_FOR_IMAGE_LOADED);
}
loadFromUriLock.lock();
Bitmap bmp;
try {
if (checkTaskIsNotActual())
return;
bmp = configuration.memoryCache.get(memoryCacheKey);
if (bmp == null) {
bmp = tryLoadBitmap();
if (imageViewCollected)
return; // listener callback already was fired
if (bmp == null)
return; // listener callback already was fired
if (checkTaskIsNotActual() || checkTaskIsInterrupted())
return;
if (options.shouldPreProcess()) {
log(LOG_PREPROCESS_IMAGE);
bmp = options.getPreProcessor().process(bmp);
if (bmp == null) {
L.e(ERROR_PRE_PROCESSOR_NULL);
}
}
if (bmp != null && options.isCacheInMemory()) {
log(LOG_CACHE_IMAGE_IN_MEMORY);
configuration.memoryCache.put(memoryCacheKey, bmp);
}
} else {
loadedFrom = LoadedFrom.MEMORY_CACHE;
log(LOG_GET_IMAGE_FROM_MEMORY_CACHE_AFTER_WAITING);
}
if (bmp != null && options.shouldPostProcess()) {
log(LOG_POSTPROCESS_IMAGE);
bmp = options.getPostProcessor().process(bmp);
if (bmp == null) {
L.e(ERROR_POST_PROCESSOR_NULL, memoryCacheKey);
}
}
} finally {
loadFromUriLock.unlock();
}
if (checkTaskIsNotActual() || checkTaskIsInterrupted())
return;
DisplayBitmapTask displayBitmapTask = new DisplayBitmapTask(bmp, imageLoadingInfo, engine, loadedFrom);
displayBitmapTask.setLoggingEnabled(writeLogs);
handler.post(displayBitmapTask);
}
/** @return true - if task should be interrupted; false - otherwise */
private boolean waitIfPaused() {
AtomicBoolean pause = engine.getPause();
synchronized (pause) {
if (pause.get()) {
log(LOG_WAITING_FOR_RESUME);
try {
pause.wait();
} catch (InterruptedException e) {
L.e(LOG_TASK_INTERRUPTED, memoryCacheKey);
return true;
}
log(LOG_RESUME_AFTER_PAUSE);
}
}
return checkTaskIsNotActual();
}
/** @return true - if task should be interrupted; false - otherwise */
private boolean delayIfNeed() {
if (options.shouldDelayBeforeLoading()) {
log(LOG_DELAY_BEFORE_LOADING, options.getDelayBeforeLoading(), memoryCacheKey);
try {
Thread.sleep(options.getDelayBeforeLoading());
} catch (InterruptedException e) {
L.e(LOG_TASK_INTERRUPTED, memoryCacheKey);
return true;
}
return checkTaskIsNotActual();
}
return false;
}
/**
* Check whether target ImageView wasn't collected by GC and the image URI
* of this task matches to image URI which is actual for current ImageView
* at this moment and fire
* {@link ImageLoadingListener#onLoadingCancelled(String, android.view.View)}
* event if it doesn't.
*/
private boolean checkTaskIsNotActual() {
ImageView imageView = checkImageViewRef();
return imageView == null || checkImageViewReused(imageView);
}
private ImageView checkImageViewRef() {
ImageView imageView = imageViewRef.get();
if (imageView == null) {
imageViewCollected = true;
log(LOG_TASK_CANCELLED_IMAGEVIEW_LOST);
fireCancelEvent();
}
return imageView;
}
private boolean checkImageViewReused(ImageView imageView) {
String currentCacheKey = engine.getLoadingUriForView(imageView);
// Check whether memory cache key (image URI) for current ImageView is
// actual.
// If ImageView is reused for another task then current task should be
// cancelled.
boolean imageViewWasReused = !memoryCacheKey.equals(currentCacheKey);
if (imageViewWasReused) {
log(LOG_TASK_CANCELLED_IMAGEVIEW_REUSED);
fireCancelEvent();
}
return imageViewWasReused;
}
/** Check whether the current task was interrupted */
private boolean checkTaskIsInterrupted() {
boolean interrupted = Thread.interrupted();
if (interrupted)
log(LOG_TASK_INTERRUPTED);
return interrupted;
}
private Bitmap tryLoadBitmap() {
File imageFile = getImageFileInDiscCache();
Bitmap bitmap = null;
try {
if (imageFile.exists()) {
log(LOG_LOAD_IMAGE_FROM_DISC_CACHE);
loadedFrom = LoadedFrom.DISC_CACHE;
bitmap = decodeImage(Scheme.FILE.wrap(imageFile.getAbsolutePath()));
if (imageViewCollected)
return null;
}
if (bitmap == null || bitmap.getWidth() <= 0 || bitmap.getHeight() <= 0) {
log(LOG_LOAD_IMAGE_FROM_NETWORK);
loadedFrom = LoadedFrom.NETWORK;
String imageUriForDecoding = options.isCacheOnDisc() ? tryCacheImageOnDisc(imageFile) : uri;
if (!checkTaskIsNotActual()) {
bitmap = decodeImage(imageUriForDecoding);
if (imageViewCollected)
return null;
if (bitmap == null || bitmap.getWidth() <= 0 || bitmap.getHeight() <= 0) {
fireFailEvent(FailType.DECODING_ERROR, null);
}
}
}
} catch (IllegalStateException e) {
fireFailEvent(FailType.NETWORK_DENIED, null);
} catch (IOException e) {
L.e(e);
fireFailEvent(FailType.IO_ERROR, e);
if (imageFile.exists()) {
imageFile.delete();
}
} catch (OutOfMemoryError e) {
L.e(e);
fireFailEvent(FailType.OUT_OF_MEMORY, e);
} catch (Throwable e) {
L.e(e);
fireFailEvent(FailType.UNKNOWN, e);
}
return bitmap;
}
private File getImageFileInDiscCache() {
DiscCacheAware discCache = configuration.discCache;
File imageFile = discCache.get(uri);
File cacheDir = imageFile.getParentFile();
if (cacheDir == null || (!cacheDir.exists() && !cacheDir.mkdirs())) {
imageFile = configuration.reserveDiscCache.get(uri);
cacheDir = imageFile.getParentFile();
if (cacheDir != null && !cacheDir.exists()) {
cacheDir.mkdirs();
}
}
return imageFile;
}
private Bitmap decodeImage(String imageUri) throws IOException {
ImageView imageView = checkImageViewRef();
if (imageView == null)
return null;
ViewScaleType viewScaleType = ViewScaleType.fromImageView(imageView);
ImageDecodingInfo decodingInfo = new ImageDecodingInfo(memoryCacheKey, imageUri, targetSize, viewScaleType,
getDownloader(), options);
return decoder.decode(decodingInfo);
}
/** @return Cached image URI; or original image URI if caching failed */
private String tryCacheImageOnDisc(File targetFile) {
log(LOG_CACHE_IMAGE_ON_DISC);
try {
int width = configuration.maxImageWidthForDiscCache;
int height = configuration.maxImageHeightForDiscCache;
boolean saved = false;
if (width > 0 || height > 0) {
saved = downloadSizedImage(targetFile, width, height);
}
if (!saved) {
downloadImage(targetFile);
}
configuration.discCache.put(uri, targetFile);
return Scheme.FILE.wrap(targetFile.getAbsolutePath());
} catch (IOException e) {
L.e(e);
return uri;
}
}
private boolean downloadSizedImage(File targetFile, int maxWidth, int maxHeight) throws IOException {
// Download, decode, compress and save image
ImageSize targetImageSize = new ImageSize(maxWidth, maxHeight);
DisplayImageOptions specialOptions = new DisplayImageOptions.Builder().cloneFrom(options).imageScaleType(
ImageScaleType.IN_SAMPLE_INT).build();
ImageDecodingInfo decodingInfo = new ImageDecodingInfo(memoryCacheKey, uri, targetImageSize,
ViewScaleType.FIT_INSIDE, getDownloader(), specialOptions);
Bitmap bmp = decoder.decode(decodingInfo);
if (bmp == null)
return false;
if (configuration.processorForDiscCache != null) {
log(LOG_PROCESS_IMAGE_BEFORE_CACHE_ON_DISC);
bmp = configuration.processorForDiscCache.process(bmp);
if (bmp == null) {
L.e(ERROR_PROCESSOR_FOR_DISC_CACHE_NULL, memoryCacheKey);
return false;
}
}
OutputStream os = new BufferedOutputStream(new FileOutputStream(targetFile), BUFFER_SIZE);
boolean savedSuccessfully;
try {
savedSuccessfully = bmp.compress(configuration.imageCompressFormatForDiscCache,
configuration.imageQualityForDiscCache, os);
} finally {
IoUtils.closeSilently(os);
}
bmp.recycle();
return savedSuccessfully;
}
private void downloadImage(File targetFile) throws IOException {
InputStream is = getDownloader().getStream(uri, options.getExtraForDownloader());
try {
OutputStream os = new BufferedOutputStream(new FileOutputStream(targetFile), BUFFER_SIZE);
try {
IoUtils.copyStream(is, os);
} finally {
IoUtils.closeSilently(os);
}
} finally {
IoUtils.closeSilently(is);
}
}
private void fireFailEvent(final FailType failType, final Throwable failCause) {
if (!Thread.interrupted()) {
handler.post(new Runnable() {
@Override
public void run() {
ImageView imageView = imageViewRef.get();
if (imageView != null) {
if (options.shouldShowImageResOnFail()) {
imageView.setImageResource(options.getImageResOnFail());
} else if (options.shouldShowImageOnFail()) {
imageView.setImageDrawable(options.getImageOnFail());
}
}
listener.onLoadingFailed(uri, imageView, new FailReason(failType, failCause));
}
});
}
}
private void fireCancelEvent() {
if (!Thread.interrupted()) {
handler.post(new Runnable() {
@Override
public void run() {
listener.onLoadingCancelled(uri, imageViewRef.get());
}
});
}
}
private ImageDownloader getDownloader() {
ImageDownloader d;
if (engine.isNetworkDenied()) {
d = networkDeniedDownloader;
} else if (engine.isSlowNetwork()) {
d = slowNetworkDownloader;
} else {
d = downloader;
}
return d;
}
String getLoadingUri() {
return uri;
}
private void log(String message) {
if (writeLogs)
L.d(message, memoryCacheKey);
}
private void log(String message, Object... args) {
if (writeLogs)
L.d(message, args);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metrics.influxdb;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.SortedMap;
import java.util.concurrent.TimeUnit;
import org.influxdb.dto.Point;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.codahale.metrics.Counter;
import com.codahale.metrics.Counting;
import com.codahale.metrics.Gauge;
import com.codahale.metrics.Histogram;
import com.codahale.metrics.Meter;
import com.codahale.metrics.Metered;
import com.codahale.metrics.MetricFilter;
import com.codahale.metrics.Snapshot;
import com.codahale.metrics.Timer;
import com.google.common.base.Optional;
import com.google.common.collect.Lists;
import com.typesafe.config.Config;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.metrics.Measurements;
import org.apache.gobblin.metrics.reporter.ConfiguredScheduledReporter;
import org.apache.gobblin.util.ConfigUtils;
import static org.apache.gobblin.metrics.Measurements.*;
/**
* InfluxDB reporter for metrics
*
* @author Lorand Bendig
*
*/
public class InfluxDBReporter extends ConfiguredScheduledReporter {
private final InfluxDBPusher influxDBPusher;
private static final Logger LOGGER = LoggerFactory.getLogger(InfluxDBReporter.class);
public InfluxDBReporter(Builder<?> builder, Config config) {
super(builder, config);
if (builder.influxDBPusher.isPresent()) {
this.influxDBPusher = builder.influxDBPusher.get();
} else {
this.influxDBPusher =
new InfluxDBPusher.Builder(builder.url, builder.username, builder.password, builder.database,
builder.connectionType).build();
}
}
/**
* A static factory class for obtaining new {@link org.apache.gobblin.metrics.influxdb.InfluxDBReporter.Builder}s
*
* @see org.apache.gobblin.metrics.influxdb.InfluxDBReporter.Builder
*/
public static class Factory {
public static BuilderImpl newBuilder() {
return new BuilderImpl();
}
}
public static class BuilderImpl extends Builder<BuilderImpl> {
@Override
protected BuilderImpl self() {
return this;
}
}
/**
* Builder for {@link InfluxDBReporter}. Defaults to no filter, reporting rates in seconds and times in
* milliseconds using TCP sending type
*/
public static abstract class Builder<T extends ConfiguredScheduledReporter.Builder<T>> extends
ConfiguredScheduledReporter.Builder<T> {
protected MetricFilter filter;
protected String url;
protected String username;
protected String password;
protected String database;
protected InfluxDBConnectionType connectionType;
protected Optional<InfluxDBPusher> influxDBPusher;
protected Builder() {
super();
this.name = "InfluxDBReporter";
this.influxDBPusher = Optional.absent();
this.filter = MetricFilter.ALL;
this.connectionType = InfluxDBConnectionType.TCP;
}
/**
* Set {@link org.apache.gobblin.metrics.influxdb.InfluxDBPusher} to use.
*/
public T withInfluxDBPusher(InfluxDBPusher pusher) {
this.influxDBPusher = Optional.of(pusher);
return self();
}
/**
* Set connection parameters for the {@link org.apache.gobblin.metrics.influxdb.InfluxDBPusher} creation
*/
public T withConnection(String url, String username, String password, String database) {
this.url = url;
this.username = username;
this.password = password;
this.database = database;
return self();
}
/**
* Set {@link org.apache.gobblin.metrics.influxdb.InfluxDBConnectionType} to use.
*/
public T withConnectionType(InfluxDBConnectionType connectionType) {
this.connectionType = connectionType;
return self();
}
/**
* Only report metrics which match the given filter.
*
* @param filter a {@link MetricFilter}
* @return {@code this}
*/
public T filter(MetricFilter filter) {
this.filter = filter;
return self();
}
/**
* Builds and returns {@link InfluxDBReporter}.
*
* @return InfluxDBReporter
*/
public InfluxDBReporter build(Properties props) throws IOException {
return new InfluxDBReporter(this, ConfigUtils.propertiesToConfig(props,
Optional.of(ConfigurationKeys.METRICS_CONFIGURATIONS_PREFIX)));
}
}
@Override
protected void report(SortedMap<String, Gauge> gauges, SortedMap<String, Counter> counters,
SortedMap<String, Histogram> histograms, SortedMap<String, Meter> meters, SortedMap<String, Timer> timers,
Map<String, Object> tags) {
String prefix = getMetricNamePrefix(tags);
long timestamp = System.currentTimeMillis();
List<Point> points = Lists.newArrayList();
try {
for (Map.Entry<String, Gauge> gauge : gauges.entrySet()) {
reportGauge(points, prefix, gauge.getKey(), gauge.getValue(), timestamp);
}
for (Map.Entry<String, Counter> counter : counters.entrySet()) {
reportCounter(points, prefix, counter.getKey(), counter.getValue(), timestamp);
}
for (Map.Entry<String, Histogram> histogram : histograms.entrySet()) {
reportHistogram(points, prefix, histogram.getKey(), histogram.getValue(), timestamp);
}
for (Map.Entry<String, Meter> meter : meters.entrySet()) {
reportMetered(points, prefix, meter.getKey(), meter.getValue(), timestamp);
}
for (Map.Entry<String, Timer> timer : timers.entrySet()) {
reportTimer(points, prefix, timer.getKey(), timer.getValue(), timestamp);
}
influxDBPusher.push(points);
} catch (IOException ioe) {
LOGGER.error("Error sending metrics to InfluxDB", ioe);
}
}
private void reportGauge(List<Point> points, String prefix, String name, Gauge gauge, long timestamp)
throws IOException {
String metricName = getKey(prefix, name);
points.add(buildMetricAsPoint(metricName, gauge.getValue(), timestamp));
}
private void reportCounter(List<Point> points, String prefix, String name, Counting counter, long timestamp)
throws IOException {
String metricName = getKey(prefix, name, COUNT.getName());
points.add(buildMetricAsPoint(metricName, counter.getCount(), false, timestamp));
}
private void reportHistogram(List<Point> points, String prefix, String name, Histogram histogram, long timestamp)
throws IOException {
reportCounter(points, prefix, name, histogram, timestamp);
reportSnapshot(points, prefix, name, histogram.getSnapshot(), timestamp, false);
}
private void reportTimer(List<Point> points, String prefix, String name, Timer timer, long timestamp)
throws IOException {
reportSnapshot(points, prefix, name, timer.getSnapshot(), timestamp, true);
reportMetered(points, prefix, name, timer, timestamp);
}
private void reportSnapshot(List<Point> points, String prefix, String name, Snapshot snapshot, long timestamp,
boolean convertDuration) throws IOException {
String baseMetricName = getKey(prefix, name);
points.add(buildMetricAsPoint(getKey(baseMetricName, MIN), snapshot.getMin(), convertDuration, timestamp));
points.add(buildMetricAsPoint(getKey(baseMetricName, MAX), snapshot.getMax(), convertDuration, timestamp));
points.add(buildMetricAsPoint(getKey(baseMetricName, MEAN), snapshot.getMean(), convertDuration, timestamp));
points.add(buildMetricAsPoint(getKey(baseMetricName, STDDEV), snapshot.getStdDev(), convertDuration, timestamp));
points.add(buildMetricAsPoint(getKey(baseMetricName, MEDIAN), snapshot.getMedian(), convertDuration, timestamp));
points.add(buildMetricAsPoint(getKey(baseMetricName, PERCENTILE_75TH), snapshot.get75thPercentile(), convertDuration, timestamp));
points.add(buildMetricAsPoint(getKey(baseMetricName, PERCENTILE_95TH), snapshot.get95thPercentile(), convertDuration, timestamp));
points.add(buildMetricAsPoint(getKey(baseMetricName, PERCENTILE_98TH), snapshot.get98thPercentile(), convertDuration, timestamp));
points.add(buildMetricAsPoint(getKey(baseMetricName, PERCENTILE_99TH), snapshot.get99thPercentile(), convertDuration, timestamp));
points.add(buildMetricAsPoint(getKey(baseMetricName, PERCENTILE_999TH), snapshot.get999thPercentile(), convertDuration, timestamp));
}
private void reportMetered(List<Point> points, String prefix, String name, Metered metered, long timestamp)
throws IOException {
reportCounter(points,prefix, name, metered, timestamp);
String baseMetricName = getKey(prefix, name);
points.add(buildRateAsPoint(getKey(baseMetricName, RATE_1MIN), metered.getOneMinuteRate(), timestamp));
points.add(buildRateAsPoint(getKey(baseMetricName, RATE_5MIN), metered.getFiveMinuteRate(), timestamp));
points.add(buildRateAsPoint(getKey(baseMetricName, RATE_15MIN), metered.getFifteenMinuteRate(), timestamp));
points.add(buildRateAsPoint(getKey(baseMetricName, MEAN_RATE), metered.getMeanRate(), timestamp));
}
private Point buildMetricAsPoint(String metricName, Number value, boolean toDuration, long timestamp)
throws IOException {
Number metricValue = toDuration ? convertDuration(value.doubleValue()) : value;
return buildMetricAsPoint(metricName, metricValue, timestamp);
}
private Point buildRateAsPoint(String metricName, double value, long timestamp)
throws IOException {
return buildMetricAsPoint(metricName, convertRate(value), timestamp);
}
private Point buildMetricAsPoint(String name, Object value, long timestamp) throws IOException {
return Point.measurement(name).field("value", value).time(timestamp, TimeUnit.MILLISECONDS).build();
}
private String getKey(String baseName, Measurements measurements) {
return getKey(baseName, measurements.getName());
}
private String getKey(String... keys) {
return JOINER.join(keys);
}
}
| |
package com.planet_ink.coffee_mud.core.intermud.i3;
import com.planet_ink.coffee_mud.core.intermud.i3.packets.*;
import com.planet_ink.coffee_mud.core.intermud.i3.persist.*;
import com.planet_ink.coffee_mud.core.intermud.i3.server.*;
import com.planet_ink.coffee_mud.core.intermud.i3.net.*;
import com.planet_ink.coffee_mud.core.intermud.*;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.CMSecurity.DbgFlag;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.ChannelsLibrary.CMChannel;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
import java.io.Serializable;
/*
Copyright 2000-2014 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
@SuppressWarnings({"unchecked","rawtypes"})
public class IMudInterface implements ImudServices, Serializable
{
public static final long serialVersionUID=0;
public String version="CoffeeMud vX.X";
public String name="CoffeeMud";
public String i3state="Development";
public Room universalR=null;
public int port=5555;
public List<CMChannel> channels=new XVector<CMChannel>(
new CMChannel[]{new CMChannel("I3CHAT","diku_chat",""),
new CMChannel("I3GOSSIP","diku_immortals",""),
new CMChannel("GREET","diku_code","")});
private static volatile long lastPacketReceivedTime = System.currentTimeMillis();
String[][] i3ansi_conversion=
{
/*
* Conversion Format Below:
*
* { "<MUD TRANSLATION>", "PINKFISH", "ANSI TRANSLATION" }
*
* Foreground Standard Colors
*/
{ "^K", "%^BLACK%^", "\033[0;0;30m" }, // Black
{ "^R", "%^RED%^", "\033[0;0;31m" }, // Dark Red
{ "^G", "%^GREEN%^", "\033[0;0;32m" }, // Dark Green
{ "^Y", "%^ORANGE%^", "\033[0;0;33m" }, // Orange/Brown
{ "^B", "%^BLUE%^", "\033[0;0;34m" }, // Dark Blue
{ "^P", "%^MAGENTA%^", "\033[0;0;35m" }, // Purple/Magenta
{ "^C", "%^CYAN%^", "\033[0;0;36m" }, // Cyan
{ "^W", "%^WHITE%^", "\033[0;0;37m" }, // Grey
/* Background colors */
{ "", "%^B_BLACK%^", "\033[40m" }, // Black
{ "", "%^B_RED%^", "\033[41m" }, // Red
{ "", "%^B_GREEN%^", "\033[42m" }, // Green
{ "", "%^B_ORANGE%^", "\033[43m" }, // Orange
{ "", "%^B_YELLOW%^", "\033[43m" }, // Yellow, which may as well be orange since ANSI doesn't do that
{ "", "%^B_BLUE%^", "\033[44m" }, // Blue
{ "", "%^B_MAGENTA%^", "\033[45m" }, // Purple/Magenta
{ "", "%^B_CYAN%^", "\033[46m" }, // Cyan
{ "", "%^B_WHITE%^", "\033[47m" }, // White
/* Text Affects */
{ "^.", "%^RESET%^", "\033[0m" }, // Reset Text
{ "^.", "%^RESET%^", "\033[0m" }, // Reset Text
{ "^H", "%^BOLD%^", "\033[1m" }, // Bolden Text(Brightens it)
{ "^.", "%^EBOLD%^", "\033[0m" }, // Assumed to be a reset tag to stop bold
{ "^_", "%^UNDERLINE%^", "\033[4m" }, // Underline Text
{ "^*", "%^FLASH%^", "\033[5m" }, // Blink Text
{ "^/", "%^ITALIC%^", "\033[6m" }, // Italic Text
{ "", "%^REVERSE%^", "\033[7m" }, // Reverse Background and Foreground Colors
/* Foreground extended colors */
{ "^k", "%^BLACK%^%^BOLD%^", "\033[0;1;30m" }, // Dark Grey
{ "^r", "%^RED%^%^BOLD%^", "\033[0;1;31m" }, // Red
{ "^g", "%^GREEN%^%^BOLD%^", "\033[0;1;32m" }, // Green
{ "^y", "%^YELLOW%^", "\033[0;1;33m" }, // Yellow
{ "^b", "%^BLUE%^%^BOLD%^", "\033[0;1;34m" }, // Blue
{ "^p", "%^MAGENTA%^%^BOLD%^", "\033[0;1;35m" }, // Pink
{ "^c", "%^CYAN%^%^BOLD%^", "\033[0;1;36m" }, // Light Blue
{ "^w", "%^WHITE%^%^BOLD%^", "\033[0;1;37m" }, // White
/* Blinking foreground standard color */
{ "^K^*", "%^BLACK%^%^FLASH%^", "\033[0;5;30m" }, // Black
{ "^R^*", "%^RED%^%^FLASH%^", "\033[0;5;31m" }, // Dark Red
{ "^G^*", "%^GREEN%^%^FLASH%^", "\033[0;5;32m" }, // Dark Green
{ "^Y^*", "%^ORANGE%^%^FLASH%^", "\033[0;5;33m" }, // Orange/Brown
{ "^B^*", "%^BLUE%^%^FLASH%^", "\033[0;5;34m" }, // Dark Blue
{ "^P^*", "%^MAGENTA%^%^FLASH%^", "\033[0;5;35m" }, // Magenta/Purple
{ "^C^*", "%^CYAN%^%^FLASH%^", "\033[0;5;36m" }, // Cyan
{ "^W^*", "%^WHITE%^%^FLASH%^", "\033[0;5;37m" }, // Grey
{ "^k^*", "%^BLACK%^%^BOLD%^%^FLASH%^", "\033[1;5;30m" }, // Dark Grey
{ "^r^*", "%^RED%^%^BOLD%^%^FLASH%^", "\033[1;5;31m" }, // Red
{ "^g^*", "%^GREEN%^%^BOLD%^%^FLASH%^", "\033[1;5;32m" }, // Green
{ "^y^*", "%^YELLOW%^%^FLASH%^", "\033[1;5;33m" }, // Yellow
{ "^b^*", "%^BLUE%^%^BOLD%^%^FLASH%^", "\033[1;5;34m" }, // Blue
{ "^p^*", "%^MAGENTA%^%^BOLD%^%^FLASH%^", "\033[1;5;35m" }, // Pink
{ "^c^*", "%^CYAN%^%^BOLD%^%^FLASH%^", "\033[1;5;36m" }, // Light Blue
{ "^w^*", "%^WHITE%^%^BOLD%^%^FLASH%^", "\033[1;5;37m" } // White
};
public IMudInterface(String Name, String Version, int Port, String i3status, List<CMChannel> Channels)
{
if(Name!=null) name=Name;
if(i3status!=null) i3state=i3status;
if(Version!=null) version=Version;
if(Channels!=null) channels=Channels;
port=Port;
}
public String _(final String str, final String ... xs)
{
return CMLib.lang().fullSessionTranslation(str, xs);
}
public void setChannelsMap(List<CMChannel> channels)
{
this.channels=channels;
}
protected MOB findSessMob(String mobName)
{
return CMLib.sessions().findPlayerOnline(mobName, true);
}
protected Room getUniversalRoom()
{
if(universalR==null)
{
universalR=CMClass.getLocale("StdRoom");
}
return universalR;
}
public String fixColors(String str)
{
final StringBuffer buf=new StringBuffer(str);
int startedAt=-1;
for(int i=0;i<buf.length();i++)
{
if(buf.charAt(i)=='%')
{
if(startedAt<0)
startedAt=i;
else
if(((i+1)<buf.length())&&(buf.charAt(i+1)=='^'))
{
String found=null;
final String code=buf.substring(startedAt,i+2);
for (final String[] element : i3ansi_conversion)
{
if(code.equals(element[1]))
{found=element[0]; break;}
}
if(found!=null)
{
buf.replace(startedAt,i+2,found);
i=startedAt+1;
}
startedAt=-1;
}
}
}
return buf.toString();
}
public String socialFixIn(String str)
{
str=CMStrings.replaceAll(str,"$N","<S-NAME>");
str=CMStrings.replaceAll(str,"$n","<S-NAME>");
str=CMStrings.replaceAll(str,"$T","<T-NAMESELF>");
str=CMStrings.replaceAll(str,"$t","<T-NAMESELF>");
str=CMStrings.replaceAll(str,"$O","<T-NAMESELF>");
str=CMStrings.replaceAll(str,"$o","<T-NAMESELF>");
str=CMStrings.replaceAll(str,"$m","<S-HIM-HER>");
str=CMStrings.replaceAll(str,"$M","<T-HIM-HER>");
str=CMStrings.replaceAll(str,"$s","<S-HIS-HER>");
str=CMStrings.replaceAll(str,"$S","<T-HIS-HER>");
str=CMStrings.replaceAll(str,"$e","<S-HE-SHE>");
str=CMStrings.replaceAll(str,"$E","<T-HE-SHE>");
str=CMStrings.replaceAll(str,"`","\'");
if(str.equals("$")) return "";
return str.trim();
}
@Override
public long getLastPacketReceivedTime()
{
return lastPacketReceivedTime;
}
@Override
public void resetLastPacketReceivedTime()
{
lastPacketReceivedTime=System.currentTimeMillis();
}
/**
* Handles an incoming I3 packet asynchronously.
* An implementation should make sure that asynchronously
* processing the incoming packet will not have any
* impact, otherwise you could end up with bizarre
* behaviour like an intermud chat line appearing
* in the middle of a room description. If your
* mudlib is not prepared to handle multiple threads,
* just stack up incoming packets and pull them off
* the stack during your main thread of execution.
* @param packet the incoming packet
*/
@Override
public void receive(Packet packet)
{
switch(packet.type)
{
case Packet.CHAN_EMOTE:
case Packet.CHAN_MESSAGE:
case Packet.CHAN_TARGET:
{
lastPacketReceivedTime=System.currentTimeMillis();
final ChannelPacket ck=(ChannelPacket)packet;
final String channelName=ck.channel;
CMMsg msg=null;
if((ck.sender_mud!=null)&&(ck.sender_mud.equalsIgnoreCase(getMudName())))
return;
if((ck.channel==null)||(ck.channel.length()==0))
return;
String channelColor="";
final int channelInt=CMLib.channels().getChannelIndex(channelName);
int channelCode=channelInt;
if(channelInt >= 0)
{
channelColor=CMLib.channels().getChannel(channelInt).colorOverride;
}
else
{
channelCode=47;
}
if(channelColor.length()==0)
channelColor="^Q";
ck.message=fixColors(CMProps.applyINIFilter(ck.message,CMProps.Str.CHANNELFILTER));
if(ck.message_target!=null)
ck.message_target=fixColors(CMProps.applyINIFilter(ck.message_target,CMProps.Str.CHANNELFILTER));
final MOB mob=CMClass.getFactoryMOB();
mob.setName(ck.sender_name+"@"+ck.sender_mud);
mob.setLocation(getUniversalRoom());
MOB targetMOB=null;
boolean killtargetmob=false;
if(ck.type==Packet.CHAN_TARGET)
{
if((ck.target_mud!=null)&&(ck.target_mud.equalsIgnoreCase(getMudName())))
targetMOB=CMLib.players().getLoadPlayer(ck.target_name);
if((ck.target_visible_name!=null)&&(ck.target_mud!=null)&&(targetMOB==null))
{
killtargetmob=true;
targetMOB=CMClass.getFactoryMOB();
targetMOB.setName(ck.target_visible_name+"@"+ck.target_mud);
targetMOB.setLocation(getUniversalRoom());
}
String msgs=socialFixIn(ck.message);
msgs=CMProps.applyINIFilter(msgs,CMProps.Str.EMOTEFILTER);
String targmsgs=socialFixIn(ck.message_target);
targmsgs=CMProps.applyINIFilter(targmsgs,CMProps.Str.EMOTEFILTER);
final String str=channelColor+"^<CHANNEL \""+channelName+"\"^>["+channelName+"] "+msgs+"^</CHANNEL^>^N^.";
final String str2=channelColor+"^<CHANNEL \""+channelName+"\"^>["+channelName+"] "+targmsgs+"^</CHANNEL^>^N^.";
msg=CMClass.getMsg(mob,targetMOB,null,CMMsg.NO_EFFECT,null,CMMsg.MASK_CHANNEL|(CMMsg.TYP_CHANNEL+channelCode),str2,CMMsg.MASK_CHANNEL|(CMMsg.TYP_CHANNEL+channelCode),str);
}
else
if(ck.type==Packet.CHAN_EMOTE)
{
String msgs=socialFixIn(ck.message);
msgs=CMProps.applyINIFilter(msgs,CMProps.Str.EMOTEFILTER);
final String str=channelColor+"^<CHANNEL \""+channelName+"\"^>["+channelName+"] "+msgs+"^</CHANNEL^>^N^.";
msg=CMClass.getMsg(mob,null,null,CMMsg.NO_EFFECT,null,CMMsg.NO_EFFECT,null,CMMsg.MASK_CHANNEL|(CMMsg.TYP_CHANNEL+channelCode),str);
}
else
{
final String str=channelColor+"^<CHANNEL \""+channelName+"\"^>"+mob.name()+" "+channelName+"(S) '"+ck.message+"'^</CHANNEL^>^N^.";
msg=CMClass.getMsg(mob,null,null,CMMsg.NO_EFFECT,null,CMMsg.NO_EFFECT,null,CMMsg.MASK_CHANNEL|(CMMsg.TYP_CHANNEL+channelCode),str);
}
CMLib.commands().monitorGlobalMessage(mob.location(), msg);
if(channelInt>=0) CMLib.channels().channelQueUp(channelInt,msg);
for(final Session S : CMLib.sessions().localOnlineIterable())
{
final MOB M=S.mob();
if(((channelInt<0)||CMLib.channels().mayReadThisChannel(mob,false,S,channelInt))
&&(M.location()!=null)
&&(M.location().okMessage(M,msg)))
M.executeMsg(M,msg);
}
mob.destroy();
if((targetMOB!=null)&&(killtargetmob)) targetMOB.destroy();
}
break;
case Packet.LOCATE_QUERY:
{
lastPacketReceivedTime=System.currentTimeMillis();
final LocateQueryPacket lk=(LocateQueryPacket)packet;
String stat="online";
String name=CMStrings.capitalizeAndLower(lk.user_name);
final MOB smob=findSessMob(lk.user_name);
if(smob!=null)
{
if(CMLib.flags().isCloaked(smob))
stat="exists, but not logged in";
}
else
if(CMLib.players().getPlayer(lk.user_name)!=null)
stat="exists, but not logged in";
else
if(CMLib.players().playerExists(lk.user_name))
stat="exists, but is not online";
else
name=null;
if(name!=null)
{
final LocateReplyPacket lpk=new LocateReplyPacket(lk.sender_name,lk.sender_mud,name,0,stat);
try
{
lpk.send();
}catch(final Exception e){Log.errOut("IMudClient",e);}
}
}
break;
case Packet.LOCATE_REPLY:
{
lastPacketReceivedTime=System.currentTimeMillis();
final LocateReplyPacket lk=(LocateReplyPacket)packet;
final MOB smob=findSessMob(lk.target_name);
if(smob!=null)
smob.tell(fixColors(lk.located_visible_name)+"@"+fixColors(lk.located_mud_name)+" ("+lk.idle_time+"): "+fixColors(lk.status));
}
break;
case Packet.FINGER_REQUEST:
{
lastPacketReceivedTime=System.currentTimeMillis();
final FingerRequest lk=(FingerRequest)packet;
Packet pkt;
final MOB M=CMLib.players().getLoadPlayer(lk.target_name);
if(M==null)
pkt=new ErrorPacket(lk.sender_name,lk.sender_mud,"unk-user","User "+lk.target_name+" is not known here.","0");
else
{
final FingerReply fpkt = new FingerReply(lk.sender_name,lk.sender_mud);
pkt=fpkt;
fpkt.e_mail="0";
final Session sess=M.session();
if((sess==null)||(!sess.isAfk()))
fpkt.idle_time="-1";
else
fpkt.idle_time=Long.toString(sess.getIdleMillis()/1000);
fpkt.ip_time="0"; // what IS this?
fpkt.loginout_time=CMLib.time().date2String(M.playerStats().getLastDateTime());
fpkt.real_name="0"; // don't even know this
if(M.titledName().equals(M.name()))
fpkt.title="An ordinary "+M.charStats().displayClassName();
else
fpkt.title=M.titledName();
fpkt.visible_name=M.name();
fpkt.extra=M.name()+" is a "+M.charStats().raceName()+" "+M.charStats().displayClassName();
}
try
{
pkt.send();
}catch(final Exception e){Log.errOut("IMudClient",e);}
}
break;
case Packet.FINGER_REPLY:
{
lastPacketReceivedTime=System.currentTimeMillis();
final FingerReply lk=(FingerReply)packet;
final MOB smob=findSessMob(lk.target_name);
if(smob!=null)
{
final StringBuilder response=new StringBuilder("");
if((lk.visible_name.length()>0)&&(!lk.visible_name.equals("0")))
response.append("^H").append(CMStrings.padRight(_("Name"),10)).append(": ^N").append(lk.visible_name).append("\n\r");
if((lk.title.length()>0)&&(!lk.title.equals("0")))
response.append("^H").append(CMStrings.padRight(_("Title"),10)).append(": ^N").append(lk.title).append("\n\r");
if((lk.real_name.length()>0)&&(!lk.real_name.equals("0")))
response.append("^H").append(CMStrings.padRight(_("Real Name"),10)).append(": ^N").append(lk.real_name).append("\n\r");
if((lk.e_mail.length()>0)&&(!lk.e_mail.equals("0")))
response.append("^H").append(CMStrings.padRight(_("Email"),10)).append(": ^N").append(lk.e_mail).append("\n\r");
if((lk.loginout_time.length()>0)&&(!lk.loginout_time.equals("0")))
response.append("^H").append(CMStrings.padRight(_("Logged"),10)).append(": ^N").append(lk.loginout_time).append("\n\r");
if((lk.ip_time.length()>0)&&(!lk.ip_time.equals("0")))
response.append("^H").append(CMStrings.padRight(_("IP Time"),10)).append(": ^N").append(lk.ip_time).append("\n\r");
if((lk.extra.length()>0)&&(!lk.extra.equals("0")))
response.append("^H").append(CMStrings.padRight(_("Extra"),10)).append(": ^N").append(lk.extra).append("\n\r");
smob.tell(response.toString());
}
}
break;
case Packet.MAUTH_REQUEST:
{
lastPacketReceivedTime=System.currentTimeMillis();
final MudAuthRequest lk=(MudAuthRequest)packet;
if(lk.sender_mud.equalsIgnoreCase(I3Server.getMudName()))
{
if(CMSecurity.isDebugging(DbgFlag.I3))
Log.debugOut("I3","Received my own mud-auth.");
}
else
Log.sysOut("I3","MUD "+lk.sender_mud+" wants to mud-auth.");
final MudAuthReply pkt = new MudAuthReply(lk.sender_mud, System.currentTimeMillis());
try
{
pkt.send();
}catch(final Exception e){Log.errOut("IMudClient",e);}
}
break;
case Packet.MAUTH_REPLY:
{
lastPacketReceivedTime=System.currentTimeMillis();
final MudAuthReply lk=(MudAuthReply)packet;
if(lk.sender_mud.equalsIgnoreCase(I3Server.getMudName()))
{
if(CMSecurity.isDebugging(DbgFlag.I3))
Log.debugOut("I3","I replied to my mud-auth.");
lastPacketReceivedTime=System.currentTimeMillis();
}
else
Log.sysOut("I3","MUD "+lk.sender_mud+" replied to my mud-auth with key "+lk.key+".");
}
break;
case Packet.WHO_REPLY:
{
lastPacketReceivedTime=System.currentTimeMillis();
final WhoPacket wk=(WhoPacket)packet;
final MOB smob=findSessMob(wk.target_name);
if(smob!=null)
{
final StringBuffer buf=new StringBuffer("\n\rwhois@"+fixColors(wk.sender_mud)+":\n\r");
final Vector V=wk.who;
if(V.size()==0)
buf.append("Nobody!");
else
for(int v=0;v<V.size();v++)
{
final Vector V2=(Vector)V.elementAt(v);
final String nom = fixColors((String)V2.elementAt(0));
int idle=0;
if(V2.elementAt(1) instanceof Integer)
idle = ((Integer)V2.elementAt(1)).intValue();
final String xtra = fixColors((String)V2.elementAt(2));
buf.append("["+CMStrings.padRight(nom,20)+"] "+xtra+" ("+idle+")\n\r");
}
smob.session().wraplessPrintln(buf.toString());
break;
}
}
break;
case Packet.CHAN_WHO_REP:
{
lastPacketReceivedTime=System.currentTimeMillis();
final ChannelWhoReply wk=(ChannelWhoReply)packet;
final MOB smob=findSessMob(wk.target_name);
if(smob!=null)
{
final StringBuffer buf=new StringBuffer("\n\rListening on "+wk.channel+"@"+fixColors(wk.sender_mud)+":\n\r");
final Vector V=wk.who;
if(V.size()==0)
buf.append("Nobody!");
else
for(int v=0;v<V.size();v++)
{
final String nom = fixColors((String)V.elementAt(v));
buf.append("["+CMStrings.padRight(nom,20)+"]\n\r");
}
smob.session().wraplessPrintln(buf.toString());
smob.session().setPromptFlag(true);
break;
}
}
break;
case Packet.CHAN_WHO_REQ:
{
lastPacketReceivedTime=System.currentTimeMillis();
final ChannelWhoRequest wk=(ChannelWhoRequest)packet;
final ChannelWhoReply wkr=new ChannelWhoReply();
wkr.target_name=wk.sender_name;
wkr.target_mud=wk.sender_mud;
wkr.channel=wk.channel;
final int channelInt=CMLib.channels().getChannelIndex(wk.channel);
final Vector whoV=new Vector();
for(final Session S : CMLib.sessions().localOnlineIterable())
{
final MOB M=S.mob();
if((CMLib.channels().mayReadThisChannel(M,false,S,channelInt))
&&(M!=null)
&&(!CMLib.flags().isCloaked(M)))
whoV.addElement(M.name());
}
wkr.who=whoV;
try
{
wkr.send();
}catch(final Exception e){Log.errOut("IMudClient",e);}
}
break;
case Packet.CHAN_USER_REQ:
{
lastPacketReceivedTime=System.currentTimeMillis();
final ChannelUserRequest wk=(ChannelUserRequest)packet;
final ChannelUserReply wkr=new ChannelUserReply();
wkr.target_name=wk.sender_name;
wkr.target_mud=wk.sender_mud;
wkr.userRequested=wk.userToRequest;
final MOB M=CMLib.players().getLoadPlayer(wk.userToRequest);
if(M!=null)
{
wkr.userVisibleName = M.name();
wkr.gender=(char)M.charStats().getStat(CharStats.STAT_GENDER);
try
{
wkr.send();
}catch(final Exception e){Log.errOut("IMudClient",e);}
}
}
break;
case Packet.WHO_REQUEST:
{
lastPacketReceivedTime=System.currentTimeMillis();
final WhoPacket wk=(WhoPacket)packet;
final WhoPacket wkr=new WhoPacket();
wkr.type=Packet.WHO_REPLY;
wkr.target_name=wk.sender_name;
wkr.target_mud=wk.sender_mud;
final Vector whoV=new Vector();
for(final Session S : CMLib.sessions().localOnlineIterable())
{
MOB smob=S.mob();
if((smob!=null)&&(smob.soulMate()!=null))
smob=smob.soulMate();
if((!S.isStopped())&&(smob!=null)
&&(!smob.amDead())
&&(CMLib.flags().isInTheGame(smob,true))
&&(!CMLib.flags().isCloaked(smob)))
{
final Vector whoV2=new Vector();
whoV2.addElement(smob.name());
whoV2.addElement(Integer.valueOf((int)(S.getIdleMillis()/1000)));
whoV2.addElement(smob.charStats().displayClassLevel(smob,true));
whoV.addElement(whoV2);
}
}
wkr.who=whoV;
try
{
wkr.send();
}catch(final Exception e){Log.errOut("IMudClient",e);}
}
break;
case Packet.TELL:
{
lastPacketReceivedTime=System.currentTimeMillis();
final TellPacket tk=(TellPacket)packet;
final MOB smob=findSessMob(tk.target_name);
if(smob!=null)
{
MOB mob=null;
final PlayerStats pstats=smob.playerStats();
if(pstats!=null)
{
if((pstats.getReplyToMOB()!=null)&&(pstats.getReplyToMOB().Name().indexOf('@')>=0))
mob=pstats.getReplyToMOB();
else
mob=CMClass.getFactoryMOB();
pstats.setReplyTo(mob, PlayerStats.REPLY_TELL);
}
else
mob=CMClass.getFactoryMOB();
mob.setName(tk.sender_name+"@"+tk.sender_mud);
mob.setLocation(getUniversalRoom());
tk.message=fixColors(CMProps.applyINIFilter(tk.message,CMProps.Str.SAYFILTER));
CMLib.commands().postSay(mob,smob,tk.message,true,true);
}
}
break;
default:
Log.errOut("IMudInterface","Unknown type: "+packet.type);
break;
}
}
/**
* @return an enumeration of channels this mud subscribes to
*/
@Override
public java.util.Enumeration getChannels()
{
final Vector V=new Vector();
for(final CMChannel chan : channels)
V.addElement(chan.i3name);
return V.elements();
}
/**
* Register a fake channel
* @param chan the remote channel name
* @return the local channel name for the specified new local channel name
* @see com.planet_ink.coffee_mud.core.intermud.i3.packets.ImudServices#getLocalChannel
*/
@Override
public boolean addChannel(CMChannel chan)
{
if((getLocalChannel(chan.i3name).length()==0)
&&(getRemoteChannel(chan.name).length()==0))
{
channels.add(chan);
return true;
}
return false;
}
/**
* Remote a channel
* @param remoteChannelName the remote name
* @return true if remove succeeds
*/
@Override
public boolean delChannel(String remoteChannelName)
{
for(int i=0;i<channels.size();i++)
if(channels.get(i).i3name.equalsIgnoreCase(remoteChannelName))
{
channels.remove(i);
return true;
}
return false;
}
/**
* Given a I3 channel name, this method should provide
* the local name for that channel.
* Example:
* <PRE>
* if( str.equals("imud_code") ) return "intercre";
* </PRE>
* @param str the remote name of the desired channel
* @return the local channel name for a remote channel
* @see #getRemoteChannel
*/
@Override
public String getLocalChannel(String str)
{
for(final CMChannel chan : channels)
if(chan.i3name.equalsIgnoreCase(str))
return chan.name;
return "";
}
/**
* Given a local channel name, this method should provide
* the local mask for that channel.
* Example:
* <PRE>
* if( str.equals("ICODE") ) return "";
* </PRE>
* @param str the local name of the desired channel
* @return the local channel mask for a remote channel
* @see #getLocalMask
*/
public String getLocalMask(String str)
{
for(final CMChannel chan : channels)
if(chan.name.equalsIgnoreCase(str))
return chan.mask;
return "";
}
/**
* @return the name of this mud
*/
@Override
public String getMudName()
{
return name;
}
/**
* @return the software name and version
*/
@Override
public String getMudVersion()
{
return version;
}
/**
* @return the software name and version
*/
@Override
public String getMudState()
{
return i3state;
}
/**
* @return the player port for this mud
*/
@Override
public int getMudPort()
{
return port;
}
/**
* Given a remote channel name, returns the mask
* required.
* Example:
* <PRE>
* if( str.equals("intercre") ) return "";
* </PRE>
* @param str the remote name of the desired channel
* @return the remote mask of the specified local channel
*/
@Override
public String getRemoteMask(String str)
{
for(final CMChannel chan : channels)
if(chan.i3name.equalsIgnoreCase(str))
return chan.mask;
return "";
}
/**
* Given a local channel name, returns the remote
* channel name.
* Example:
* <PRE>
* if( str.equals("intercre") ) return "imud_code";
* </PRE>
* @param str the local name of the desired channel
* @return the remote name of the specified local channel
*/
@Override
public String getRemoteChannel(String str)
{
for(final CMChannel chan : channels)
if(chan.name.equalsIgnoreCase(str))
return chan.i3name;
return "";
}
}
| |
/**
*
* Copyright 2003-2006 Jive Software.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jivesoftware.smackx.jingleold.mediaimpl.jspeex;
import java.io.IOException;
import java.net.DatagramSocket;
import java.net.InetAddress;
import java.net.ServerSocket;
import java.security.GeneralSecurityException;
import java.util.logging.Logger;
import javax.media.NoProcessorException;
import javax.media.format.UnsupportedFormatException;
import javax.media.rtp.rtcp.SenderReport;
import javax.media.rtp.rtcp.SourceDescription;
import mil.jfcom.cie.media.session.MediaSession;
import mil.jfcom.cie.media.session.MediaSessionListener;
import mil.jfcom.cie.media.session.StreamPlayer;
import mil.jfcom.cie.media.srtp.packetizer.SpeexFormat;
import org.jivesoftware.smackx.jingleold.JingleSession;
import org.jivesoftware.smackx.jingleold.media.JingleMediaSession;
import org.jivesoftware.smackx.jingleold.media.PayloadType;
import org.jivesoftware.smackx.jingleold.nat.TransportCandidate;
/**
* This Class implements a complete JingleMediaSession.
* It sould be used to transmit and receive audio captured from the Mic.
* This Class should be automaticly controlled by JingleSession.
* But you could also use in any VOIP application.
* For better NAT Traversal support this implementation don't support only receive or only transmit.
* To receive you MUST transmit. So the only implemented and functionally methods are startTransmit() and stopTransmit()
*
* @author Thiago Camargo
*/
public class AudioMediaSession extends JingleMediaSession implements MediaSessionListener {
private static final Logger LOGGER = Logger.getLogger(AudioMediaSession.class.getName());
private MediaSession mediaSession;
/**
* Create a Session using Speex Codec
*
* @param localhost localHost
* @param localPort localPort
* @param remoteHost remoteHost
* @param remotePort remotePort
* @param eventHandler eventHandler
* @param quality quality
* @param secure secure
* @param micOn micOn
* @return MediaSession
* @throws NoProcessorException
* @throws UnsupportedFormatException
* @throws IOException
* @throws GeneralSecurityException
*/
public static MediaSession createSession(String localhost, int localPort, String remoteHost, int remotePort, MediaSessionListener eventHandler, int quality, boolean secure, boolean micOn) throws NoProcessorException, UnsupportedFormatException, IOException, GeneralSecurityException {
SpeexFormat.setFramesPerPacket(1);
/**
* The master key. Hardcoded for now.
*/
byte[] masterKey = new byte[]{(byte) 0xE1, (byte) 0xF9, 0x7A, 0x0D, 0x3E, 0x01, (byte) 0x8B, (byte) 0xE0, (byte) 0xD6, 0x4F, (byte) 0xA3, 0x2C, 0x06, (byte) 0xDE, 0x41, 0x39};
/**
* The master salt. Hardcoded for now.
*/
byte[] masterSalt = new byte[]{0x0E, (byte) 0xC6, 0x75, (byte) 0xAD, 0x49, (byte) 0x8A, (byte) 0xFE, (byte) 0xEB, (byte) 0xB6, (byte) 0x96, 0x0B, 0x3A, (byte) 0xAB, (byte) 0xE6};
DatagramSocket[] localPorts = MediaSession.getLocalPorts(InetAddress.getByName(localhost), localPort);
MediaSession session = MediaSession.createInstance(remoteHost, remotePort, localPorts, quality, secure, masterKey, masterSalt);
session.setListener(eventHandler);
session.setSourceDescription(new SourceDescription[]{new SourceDescription(SourceDescription.SOURCE_DESC_NAME, "Superman", 1, false), new SourceDescription(SourceDescription.SOURCE_DESC_EMAIL, "cdcie.tester@je.jfcom.mil", 1, false), new SourceDescription(SourceDescription.SOURCE_DESC_LOC, InetAddress.getByName(localhost) + " Port " + session.getLocalDataPort(), 1, false), new SourceDescription(SourceDescription.SOURCE_DESC_TOOL, "JFCOM CDCIE Audio Chat", 1, false)});
return session;
}
/**
* Creates a org.jivesoftware.jingleaudio.jspeex.AudioMediaSession with defined payload type, remote and local candidates
*
* @param payloadType Payload of the jmf
* @param remote the remote information. The candidate that the jmf will be sent to.
* @param local the local information. The candidate that will receive the jmf
* @param locator media locator
*/
public AudioMediaSession(final PayloadType payloadType, final TransportCandidate remote,
final TransportCandidate local, String locator, JingleSession jingleSession) {
super(payloadType, remote, local, locator == null ? "dsound://" : locator, jingleSession);
initialize();
}
/**
* Initialize the Audio Channel to make it able to send and receive audio
*/
public void initialize() {
String ip;
String localIp;
int localPort;
int remotePort;
if (this.getLocal().getSymmetric() != null) {
ip = this.getLocal().getIp();
localIp = this.getLocal().getLocalIp();
localPort = getFreePort();
remotePort = this.getLocal().getSymmetric().getPort();
LOGGER.fine(this.getLocal().getConnection() + " " + ip + ": " + localPort + "->" + remotePort);
}
else {
ip = this.getRemote().getIp();
localIp = this.getLocal().getLocalIp();
localPort = this.getLocal().getPort();
remotePort = this.getRemote().getPort();
}
try {
mediaSession = createSession(localIp, localPort, ip, remotePort, this, 2, false, true);
}
catch (NoProcessorException e) {
e.printStackTrace();
}
catch (UnsupportedFormatException e) {
e.printStackTrace();
}
catch (IOException e) {
e.printStackTrace();
}
catch (GeneralSecurityException e) {
e.printStackTrace();
}
}
/**
* Starts transmission and for NAT Traversal reasons start receiving also.
*/
public void startTrasmit() {
try {
LOGGER.fine("start");
mediaSession.start(true);
this.mediaReceived("");
}
catch (IOException e) {
e.printStackTrace();
}
}
/**
* Set transmit activity. If the active is true, the instance should trasmit.
* If it is set to false, the instance should pause transmit.
*
* @param active active state
*/
public void setTrasmit(boolean active) {
// Do nothing
}
/**
* For NAT Reasons this method does nothing. Use startTransmit() to start transmit and receive jmf
*/
public void startReceive() {
// Do nothing
}
/**
* Stops transmission and for NAT Traversal reasons stop receiving also.
*/
public void stopTrasmit() {
if (mediaSession != null)
mediaSession.close();
}
/**
* For NAT Reasons this method does nothing. Use startTransmit() to start transmit and receive jmf
*/
public void stopReceive() {
// Do nothing
}
public void newStreamIdentified(StreamPlayer streamPlayer) {
}
public void senderReportReceived(SenderReport report) {
}
public void streamClosed(StreamPlayer stream, boolean timeout) {
}
/**
* Obtain a free port we can use.
*
* @return A free port number.
*/
protected int getFreePort() {
ServerSocket ss;
int freePort = 0;
for (int i = 0; i < 10; i++) {
freePort = (int) (10000 + Math.round(Math.random() * 10000));
freePort = freePort % 2 == 0 ? freePort : freePort + 1;
try {
ss = new ServerSocket(freePort);
freePort = ss.getLocalPort();
ss.close();
return freePort;
}
catch (IOException e) {
e.printStackTrace();
}
}
try {
ss = new ServerSocket(0);
freePort = ss.getLocalPort();
ss.close();
}
catch (IOException e) {
e.printStackTrace();
}
return freePort;
}
}
| |
/**
* OLAT - Online Learning and Training<br>
* http://www.olat.org
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Copyright (c) frentix GmbH, Switzerland,<br>
* http://www.frentix.com
* <p>
*/
package org.olat.repository.portlet;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import org.olat.core.commons.persistence.DBFactory;
import org.olat.core.gui.UserRequest;
import org.olat.core.gui.Windows;
import org.olat.core.gui.components.Component;
import org.olat.core.gui.components.link.Link;
import org.olat.core.gui.components.link.LinkFactory;
import org.olat.core.gui.components.table.ColumnDescriptor;
import org.olat.core.gui.components.table.DefaultColumnDescriptor;
import org.olat.core.gui.components.table.Table;
import org.olat.core.gui.components.table.TableController;
import org.olat.core.gui.components.table.TableEvent;
import org.olat.core.gui.components.table.TableGuiConfiguration;
import org.olat.core.gui.components.velocity.VelocityContainer;
import org.olat.core.gui.control.Controller;
import org.olat.core.gui.control.Event;
import org.olat.core.gui.control.WindowControl;
import org.olat.core.gui.control.generic.dtabs.DTab;
import org.olat.core.gui.control.generic.dtabs.DTabs;
import org.olat.core.gui.control.generic.portal.AbstractPortletRunController;
import org.olat.core.gui.control.generic.portal.PortletDefaultTableDataModel;
import org.olat.core.gui.control.generic.portal.PortletEntry;
import org.olat.core.gui.control.generic.portal.PortletToolSortingControllerImpl;
import org.olat.core.gui.control.generic.portal.SortingCriteria;
import org.olat.core.gui.translator.Translator;
import org.olat.core.util.coordinate.CoordinatorManager;
import org.olat.core.util.event.GenericEventListener;
import org.olat.core.util.resource.OresHelper;
import org.olat.group.BusinessGroup;
import org.olat.repository.RepositoryEntry;
import org.olat.repository.RepositoryEntryTypeColumnDescriptor;
import org.olat.repository.RepositoryManager;
import org.olat.repository.RepositoyUIFactory;
import org.olat.repository.site.RepositorySite;
/**
* Description:<br>
* Runtime view that shows a list of courses, either as student or teacher
* <P>
* Initial Date: 06.03.2009 <br>
*
* @author gnaegi
*/
public class RepositoryPortletRunController extends AbstractPortletRunController implements GenericEventListener {
private static final String CMD_LAUNCH = "cmd.launch";
private final TableController tableCtr;
private RepositoryPortletTableDataModel repoEntryListModel;
private final VelocityContainer repoEntriesVC;
private final boolean studentView;
private final Link showAllLink;
/**
* Constructor
*
* @param wControl
* @param ureq
* @param trans
* @param portletName
* @param studentView true: show courses where I'm student; false: show courses where I'm teacher
*/
public RepositoryPortletRunController(final WindowControl wControl, final UserRequest ureq, final Translator trans, final String portletName,
final boolean studentView) {
super(wControl, ureq, trans, portletName);
this.studentView = studentView;
sortingTermsList.add(SortingCriteria.ALPHABETICAL_SORTING);
repoEntriesVC = this.createVelocityContainer("repositoryPortlet");
showAllLink = LinkFactory.createLink("repositoryPortlet.showAll", repoEntriesVC, this);
final TableGuiConfiguration tableConfig = new TableGuiConfiguration();
tableConfig.setTableEmptyMessage(trans.translate("repositoryPortlet.noentry"));
tableConfig.setDisplayTableHeader(false);
tableConfig.setCustomCssClass("b_portlet_table");
tableConfig.setDisplayRowCount(false);
tableConfig.setPageingEnabled(false);
tableConfig.setDownloadOffered(false);
// disable the default sorting for this table
tableConfig.setSortingEnabled(false);
tableCtr = new TableController(tableConfig, ureq, getWindowControl(), trans);
listenTo(tableCtr);
// dummy header key, won't be used since setDisplayTableHeader is set to
// false
tableCtr.addColumnDescriptor(new RepositoryEntryTypeColumnDescriptor("repositoryPortlet.img", 2, CMD_LAUNCH, trans.getLocale(), ColumnDescriptor.ALIGNMENT_LEFT));
tableCtr.addColumnDescriptor(new DefaultColumnDescriptor("repositoryPortlet.name", 0, CMD_LAUNCH, trans.getLocale(), ColumnDescriptor.ALIGNMENT_LEFT));
this.sortingCriteria = getPersistentSortingConfiguration(ureq);
reloadModel(this.sortingCriteria);
repoEntriesVC.put("table", tableCtr.getInitialComponent());
putInitialPanel(repoEntriesVC);
// register for businessgroup type events
// FIXME:RH:repo listen to changes
CoordinatorManager.getInstance().getCoordinator().getEventBus().registerFor(this, ureq.getIdentity(), OresHelper.lookupType(BusinessGroup.class));
}
private List<PortletEntry> getAllPortletEntries() {
if (studentView) {
final List<RepositoryEntry> allRepoEntries = RepositoryManager.getInstance().getLearningResourcesAsStudent(identity);
return convertRepositoryEntriesToPortletEntryList(allRepoEntries);
} else {
final List<RepositoryEntry> allRepoEntries = RepositoryManager.getInstance().getLearningResourcesAsTeacher(identity);
return convertRepositoryEntriesToPortletEntryList(allRepoEntries);
}
}
private List<PortletEntry> convertRepositoryEntriesToPortletEntryList(final List<RepositoryEntry> items) {
final List<PortletEntry> convertedList = new ArrayList<PortletEntry>();
for (final RepositoryEntry item : items) {
final boolean closed = RepositoryManager.getInstance().createRepositoryEntryStatus(item.getStatusCode()).isClosed();
if (!closed) {
convertedList.add(new RepositoryPortletEntry(item));
}
}
return convertedList;
}
@Override
protected void reloadModel(final SortingCriteria sortingCriteria) {
if (sortingCriteria.getSortingType() == SortingCriteria.AUTO_SORTING) {
List<PortletEntry> entries = getAllPortletEntries();
entries = getSortedList(entries, sortingCriteria);
repoEntryListModel = new RepositoryPortletTableDataModel(entries, getLocale());
tableCtr.setTableDataModel(repoEntryListModel);
} else {
reloadModel(this.getPersistentManuallySortedItems());
}
}
@Override
protected void reloadModel(final List<PortletEntry> sortedItems) {
repoEntryListModel = new RepositoryPortletTableDataModel(sortedItems, getLocale());
tableCtr.setTableDataModel(repoEntryListModel);
}
/**
* @see org.olat.core.gui.control.DefaultController#event(org.olat.core.gui.UserRequest, org.olat.core.gui.components.Component, org.olat.core.gui.control.Event)
*/
@Override
public void event(final UserRequest ureq, final Component source, final Event event) {
if (source == showAllLink) {
// activate learning resource tab in top navigation and active my courses menu item
final DTabs dts = (DTabs) Windows.getWindows(ureq).getWindow(ureq).getAttribute("DTabs");
// attach controller / action extension dynamically to lr-site
if (studentView) {
dts.activateStatic(ureq, RepositorySite.class.getName(), "search.mycourses.student");
} else {
dts.activateStatic(ureq, RepositorySite.class.getName(), "search.mycourses.teacher");
}
}
}
/**
* @see org.olat.core.gui.control.ControllerEventListener#dispatchEvent(org.olat.core.gui.UserRequest, org.olat.core.gui.control.Controller,
* org.olat.core.gui.control.Event)
*/
@Override
public void event(final UserRequest ureq, final Controller source, final Event event) {
super.event(ureq, source, event);
if (source == tableCtr) {
if (event.getCommand().equals(Table.COMMANDLINK_ROWACTION_CLICKED)) {
final TableEvent te = (TableEvent) event;
final String actionid = te.getActionId();
if (actionid.equals(CMD_LAUNCH)) {
final int rowId = te.getRowId();
RepositoryEntry repoEntry = repoEntryListModel.getRepositoryEntry(rowId);
// refresh repo entry, attach to hibernate session
repoEntry = (RepositoryEntry) DBFactory.getInstance().loadObject(repoEntry);
// get run controller fro this repo entry and launch it in new tab
final DTabs dts = (DTabs) Windows.getWindows(ureq).getWindow(ureq).getAttribute("DTabs");
// was brasato:: DTabs dts = wControl.getDTabs();
DTab dt = dts.getDTab(repoEntry.getOlatResource());
if (dt == null) {
// does not yet exist -> create and add
dt = dts.createDTab(repoEntry.getOlatResource(), repoEntry.getDisplayname());
// tabs full
if (dt != null) {
final Controller runCtr = RepositoyUIFactory.createLaunchController(repoEntry, null, ureq, dt.getWindowControl());
dt.setController(runCtr);
dts.addDTab(dt);
dts.activate(ureq, dt, null); // null: do not activate to a certain view
}
} else {
dts.activate(ureq, dt, null);
}
}
}
}
}
/**
* @see org.olat.core.gui.control.DefaultController#doDispose(boolean)
*/
@Override
protected void doDispose() {
super.doDispose();
// FIXME:RH:repo listen to changes
// de-register for businessgroup type events
// CoordinatorManager.getInstance().getCoordinator().getEventBus().deregisterFor(this, OresHelper.lookupType(BusinessGroup.class));
// POST: all firing event for the source just deregistered are finished
// (listeners lock in EventAgency)
}
@Override
public void event(final Event event) {
// FIXME:RH:repo listen to changes
// if (event instanceof BusinessGroupModifiedEvent) {
// BusinessGroupModifiedEvent mev = (BusinessGroupModifiedEvent) event;
// // TODO:fj:b this operation should not be too expensive since many other
// // users have to be served also
// // store the event and apply it only when the component validate event is
// // fired.
// // FIXME:fj:a check all such event that they do not say, execute more than
// // 1-2 db queries : 100 listening users -> 100-200 db queries!
// // TODO:fj:b concept of defering that event if this controller here is in
// // the dispatchEvent - code (e.g. DefaultController implements
// // GenericEventListener)
// // -> to avoid rare race conditions like e.g. dispose->deregister and null
// // controllers, but queue is still firing events
// boolean modified = mev.updateBusinessGroupList(groupList, ident);
// if (modified) tableCtr.modelChanged();
// }
}
/**
* Retrieves the persistent sortingCriteria and the persistent manually sorted, if any, creates the table model for the manual sorting, and instantiates the
* PortletToolSortingControllerImpl.
*
* @param ureq
* @param wControl
* @return a PortletToolSortingControllerImpl instance.
*/
protected PortletToolSortingControllerImpl createSortingTool(final UserRequest ureq, final WindowControl wControl) {
if (portletToolsController == null) {
final List<PortletEntry> portletEntryList = getAllPortletEntries();
final PortletDefaultTableDataModel tableDataModel = new RepositoryPortletTableDataModel(portletEntryList, ureq.getLocale());
final List sortedItems = getPersistentManuallySortedItems();
portletToolsController = new PortletToolSortingControllerImpl(ureq, wControl, getTranslator(), sortingCriteria, tableDataModel, sortedItems);
portletToolsController.setConfigManualSorting(true);
portletToolsController.setConfigAutoSorting(true);
portletToolsController.addControllerListener(this);
}
return portletToolsController;
}
/**
* Retrieves the persistent manually sorted items for the current portlet.
*
* @param ureq
* @return
*/
private List<PortletEntry> getPersistentManuallySortedItems() {
final List<PortletEntry> portletEntryList = getAllPortletEntries();
return this.getPersistentManuallySortedItems(portletEntryList);
}
/**
* Comparator implementation used for sorting BusinessGroup entries according with the input sortingCriteria.
* <p>
*
* @param sortingCriteria
* @return a Comparator for the input sortingCriteria
*/
@Override
protected Comparator getComparator(final SortingCriteria sortingCriteria) {
return new Comparator() {
@Override
public int compare(final Object o1, final Object o2) {
final RepositoryEntry repoEntry1 = ((RepositoryPortletEntry) o1).getValue();
final RepositoryEntry repoEntry2 = ((RepositoryPortletEntry) o2).getValue();
int comparisonResult = 0;
if (sortingCriteria.getSortingTerm() == SortingCriteria.ALPHABETICAL_SORTING) {
comparisonResult = collator.compare(repoEntry1.getDisplayname(), repoEntry2.getDisplayname());
}
if (!sortingCriteria.isAscending()) {
// if not isAscending return (-comparisonResult)
return -comparisonResult;
}
return comparisonResult;
}
};
}
}
| |
// This file was generated by Mendix Modeler.
//
// WARNING: Code you write here will be lost the next time you deploy the project.
package myfirstmodule.proxies;
public class Entity
{
private final com.mendix.systemwideinterfaces.core.IMendixObject entityMendixObject;
private final com.mendix.systemwideinterfaces.core.IContext context;
/**
* Internal name of this entity
*/
public static final java.lang.String entityName = "MyFirstModule.Entity";
/**
* Enum describing members of this entity
*/
public enum MemberNames
{
EntityID("EntityID"),
Entity_Container("MyFirstModule.Entity_Container");
private java.lang.String metaName;
MemberNames(java.lang.String s)
{
metaName = s;
}
@Override
public java.lang.String toString()
{
return metaName;
}
}
public Entity(com.mendix.systemwideinterfaces.core.IContext context)
{
this(context, com.mendix.core.Core.instantiate(context, "MyFirstModule.Entity"));
}
protected Entity(com.mendix.systemwideinterfaces.core.IContext context, com.mendix.systemwideinterfaces.core.IMendixObject entityMendixObject)
{
if (entityMendixObject == null)
throw new java.lang.IllegalArgumentException("The given object cannot be null.");
if (!com.mendix.core.Core.isSubClassOf("MyFirstModule.Entity", entityMendixObject.getType()))
throw new java.lang.IllegalArgumentException("The given object is not a MyFirstModule.Entity");
this.entityMendixObject = entityMendixObject;
this.context = context;
}
/**
* @deprecated Use 'Entity.load(IContext, IMendixIdentifier)' instead.
*/
@Deprecated
public static myfirstmodule.proxies.Entity initialize(com.mendix.systemwideinterfaces.core.IContext context, com.mendix.systemwideinterfaces.core.IMendixIdentifier mendixIdentifier) throws com.mendix.core.CoreException
{
return myfirstmodule.proxies.Entity.load(context, mendixIdentifier);
}
/**
* Initialize a proxy using context (recommended). This context will be used for security checking when the get- and set-methods without context parameters are called.
* The get- and set-methods with context parameter should be used when for instance sudo access is necessary (IContext.createSudoClone() can be used to obtain sudo access).
*/
public static myfirstmodule.proxies.Entity initialize(com.mendix.systemwideinterfaces.core.IContext context, com.mendix.systemwideinterfaces.core.IMendixObject mendixObject)
{
return new myfirstmodule.proxies.Entity(context, mendixObject);
}
public static myfirstmodule.proxies.Entity load(com.mendix.systemwideinterfaces.core.IContext context, com.mendix.systemwideinterfaces.core.IMendixIdentifier mendixIdentifier) throws com.mendix.core.CoreException
{
com.mendix.systemwideinterfaces.core.IMendixObject mendixObject = com.mendix.core.Core.retrieveId(context, mendixIdentifier);
return myfirstmodule.proxies.Entity.initialize(context, mendixObject);
}
public static java.util.List<myfirstmodule.proxies.Entity> load(com.mendix.systemwideinterfaces.core.IContext context, java.lang.String xpathConstraint) throws com.mendix.core.CoreException
{
java.util.List<myfirstmodule.proxies.Entity> result = new java.util.ArrayList<myfirstmodule.proxies.Entity>();
for (com.mendix.systemwideinterfaces.core.IMendixObject obj : com.mendix.core.Core.retrieveXPathQuery(context, "//MyFirstModule.Entity" + xpathConstraint))
result.add(myfirstmodule.proxies.Entity.initialize(context, obj));
return result;
}
/**
* Commit the changes made on this proxy object.
*/
public final void commit() throws com.mendix.core.CoreException
{
com.mendix.core.Core.commit(context, getMendixObject());
}
/**
* Commit the changes made on this proxy object using the specified context.
*/
public final void commit(com.mendix.systemwideinterfaces.core.IContext context) throws com.mendix.core.CoreException
{
com.mendix.core.Core.commit(context, getMendixObject());
}
/**
* Delete the object.
*/
public final void delete()
{
com.mendix.core.Core.delete(context, getMendixObject());
}
/**
* Delete the object using the specified context.
*/
public final void delete(com.mendix.systemwideinterfaces.core.IContext context)
{
com.mendix.core.Core.delete(context, getMendixObject());
}
/**
* @return value of EntityID
*/
public final java.lang.Long getEntityID()
{
return getEntityID(getContext());
}
/**
* @param context
* @return value of EntityID
*/
public final java.lang.Long getEntityID(com.mendix.systemwideinterfaces.core.IContext context)
{
return (java.lang.Long) getMendixObject().getValue(context, MemberNames.EntityID.toString());
}
/**
* Set value of EntityID
* @param entityid
*/
public final void setEntityID(java.lang.Long entityid)
{
setEntityID(getContext(), entityid);
}
/**
* Set value of EntityID
* @param context
* @param entityid
*/
public final void setEntityID(com.mendix.systemwideinterfaces.core.IContext context, java.lang.Long entityid)
{
getMendixObject().setValue(context, MemberNames.EntityID.toString(), entityid);
}
/**
* @return value of Entity_Container
*/
public final myfirstmodule.proxies.Container getEntity_Container() throws com.mendix.core.CoreException
{
return getEntity_Container(getContext());
}
/**
* @param context
* @return value of Entity_Container
*/
public final myfirstmodule.proxies.Container getEntity_Container(com.mendix.systemwideinterfaces.core.IContext context) throws com.mendix.core.CoreException
{
myfirstmodule.proxies.Container result = null;
com.mendix.systemwideinterfaces.core.IMendixIdentifier identifier = getMendixObject().getValue(context, MemberNames.Entity_Container.toString());
if (identifier != null)
result = myfirstmodule.proxies.Container.load(context, identifier);
return result;
}
/**
* Set value of Entity_Container
* @param entity_container
*/
public final void setEntity_Container(myfirstmodule.proxies.Container entity_container)
{
setEntity_Container(getContext(), entity_container);
}
/**
* Set value of Entity_Container
* @param context
* @param entity_container
*/
public final void setEntity_Container(com.mendix.systemwideinterfaces.core.IContext context, myfirstmodule.proxies.Container entity_container)
{
if (entity_container == null)
getMendixObject().setValue(context, MemberNames.Entity_Container.toString(), null);
else
getMendixObject().setValue(context, MemberNames.Entity_Container.toString(), entity_container.getMendixObject().getId());
}
/**
* @return the IMendixObject instance of this proxy for use in the Core interface.
*/
public final com.mendix.systemwideinterfaces.core.IMendixObject getMendixObject()
{
return entityMendixObject;
}
/**
* @return the IContext instance of this proxy, or null if no IContext instance was specified at initialization.
*/
public final com.mendix.systemwideinterfaces.core.IContext getContext()
{
return context;
}
@Override
public boolean equals(Object obj)
{
if (obj == this)
return true;
if (obj != null && getClass().equals(obj.getClass()))
{
final myfirstmodule.proxies.Entity that = (myfirstmodule.proxies.Entity) obj;
return getMendixObject().equals(that.getMendixObject());
}
return false;
}
@Override
public int hashCode()
{
return getMendixObject().hashCode();
}
/**
* @return String name of this class
*/
public static java.lang.String getType()
{
return "MyFirstModule.Entity";
}
/**
* @return String GUID from this object, format: ID_0000000000
* @deprecated Use getMendixObject().getId().toLong() to get a unique identifier for this object.
*/
@Deprecated
public java.lang.String getGUID()
{
return "ID_" + getMendixObject().getId().toLong();
}
}
| |
/*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2021 DBeaver Corp and others
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jkiss.dbeaver.ext.wmi.model;
import org.eclipse.core.runtime.IAdaptable;
import org.jkiss.code.NotNull;
import org.jkiss.code.Nullable;
import org.jkiss.dbeaver.DBException;
import org.jkiss.dbeaver.model.DBPDataSource;
import org.jkiss.dbeaver.model.DBPDataSourceContainer;
import org.jkiss.dbeaver.model.DBPDataSourceInfo;
import org.jkiss.dbeaver.model.DBPExclusiveResource;
import org.jkiss.dbeaver.model.connection.DBPConnectionConfiguration;
import org.jkiss.dbeaver.model.exec.DBCExecutionContext;
import org.jkiss.dbeaver.model.exec.DBCExecutionContextDefaults;
import org.jkiss.dbeaver.model.exec.DBCExecutionPurpose;
import org.jkiss.dbeaver.model.exec.DBCSession;
import org.jkiss.dbeaver.model.impl.AbstractExecutionContext;
import org.jkiss.dbeaver.model.impl.SimpleExclusiveLock;
import org.jkiss.dbeaver.model.meta.Association;
import org.jkiss.dbeaver.model.qm.QMUtils;
import org.jkiss.dbeaver.model.runtime.DBRProgressMonitor;
import org.jkiss.dbeaver.model.sql.SQLDialect;
import org.jkiss.dbeaver.model.struct.DBSInstance;
import org.jkiss.dbeaver.model.struct.DBSObject;
import org.jkiss.dbeaver.model.struct.DBSObjectContainer;
import org.jkiss.wmi.service.WMIService;
import java.util.Collection;
import java.util.Collections;
/**
* WMIDataSource
*/
public class WMIDataSource implements DBPDataSource, DBSInstance, DBCExecutionContext, IAdaptable
{
private final DBPDataSourceContainer container;
private WMINamespace rootNamespace;
private final SQLDialect dialect;
private final long id;
private final DBPExclusiveResource exclusiveLock = new SimpleExclusiveLock();
public WMIDataSource(DBPDataSourceContainer container)
throws DBException
{
this.container = container;
this.dialect = new WMIDialect();
this.id = AbstractExecutionContext.generateContextId();
QMUtils.getDefaultHandler().handleContextOpen(this, false);
}
@NotNull
@Override
public DBPDataSourceContainer getContainer()
{
return container;
}
@NotNull
@Override
public DBPDataSourceInfo getInfo()
{
return new WMIDataSourceInfo();
}
@Override
public Object getDataSourceFeature(String featureId) {
return null;
}
@NotNull
@Override
public DBCExecutionContext getDefaultContext(DBRProgressMonitor monitor, boolean meta) {
return this;
}
@NotNull
@Override
public DBCExecutionContext[] getAllContexts() {
return new DBCExecutionContext[] { this };
}
@Override
public long getContextId() {
return this.id;
}
@NotNull
@Override
public String getContextName() {
return "WMI Data Source";
}
@Nullable
@Override
public String getDescription() {
return null;
}
@Nullable
@Override
public DBSObject getParentObject() {
return container;
}
@NotNull
@Override
public WMIDataSource getDataSource() {
return this;
}
@Override
public DBSInstance getOwnerInstance() {
return this;
}
@Override
public boolean isConnected()
{
return true;
}
@NotNull
@Override
public DBCSession openSession(@NotNull DBRProgressMonitor monitor, @NotNull DBCExecutionPurpose purpose, @NotNull String task)
{
return new WMISession(monitor, purpose, task, this);
}
@Override
public void checkContextAlive(DBRProgressMonitor monitor) throws DBException {
// do nothing
}
@NotNull
@Override
public DBCExecutionContext openIsolatedContext(@NotNull DBRProgressMonitor monitor, @NotNull String purpose, @Nullable DBCExecutionContext initFrom) throws DBException
{
return this;
}
@NotNull
@Override
public InvalidateResult invalidateContext(@NotNull DBRProgressMonitor monitor, boolean closeOnFailure) throws DBException
{
throw new DBException("Connection invalidate not supported");
}
@Nullable
@Override
public DBCExecutionContextDefaults getContextDefaults() {
return null;
}
@Override
public void initialize(@NotNull DBRProgressMonitor monitor) throws DBException
{
final DBPConnectionConfiguration connectionInfo = container.getActualConnectionConfiguration();
try {
WMIService service = WMIService.connect(
connectionInfo.getServerName(),
connectionInfo.getHostName(),
connectionInfo.getUserName(),
connectionInfo.getUserPassword(),
null,
connectionInfo.getDatabaseName());
this.rootNamespace = new WMINamespace(null, this, connectionInfo.getDatabaseName(), service);
} catch (UnsatisfiedLinkError e) {
throw new DBException("Can't link with WMI native library", e);
} catch (Throwable e) {
throw new DBException("Can't connect to WMI service", e);
}
}
@Override
public void close() {
if (rootNamespace != null) {
rootNamespace.close();
if (rootNamespace.service != null) {
rootNamespace.service.close();
}
rootNamespace = null;
}
QMUtils.getDefaultHandler().handleContextClose(this);
}
@NotNull
@Override
public DBSInstance getDefaultInstance() {
return this;
}
@NotNull
@Override
public Collection<? extends DBSInstance> getAvailableInstances() {
return Collections.singletonList(this);
}
@Override
public void shutdown(DBRProgressMonitor monitor)
{
this.close();
}
@NotNull
@Override
public DBPExclusiveResource getExclusiveLock() {
return exclusiveLock;
}
@Association
public Collection<WMINamespace> getNamespaces()
{
return Collections.singletonList(rootNamespace);
}
public WMIService getService()
{
return rootNamespace.service;
}
@Override
public <T> T getAdapter(Class<T> adapter)
{
if (adapter == DBSObjectContainer.class) {
return adapter.cast(rootNamespace);
}
return null;
}
@Override
public SQLDialect getSQLDialect() {
return dialect;
}
@NotNull
@Override
public String getName() {
return container.getName();
}
@Override
public boolean isPersisted() {
return true;
}
}
| |
/***
* ASM: a very small and fast Java bytecode manipulation framework
* Copyright (c) 2000-2011 INRIA, France Telecom
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. Neither the name of the copyright holders nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
* THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.objectweb.asm.commons;
import org.objectweb.asm.AnnotationVisitor;
import org.objectweb.asm.Label;
import org.objectweb.asm.MethodVisitor;
import org.objectweb.asm.Opcodes;
import org.objectweb.asm.Type;
import org.objectweb.asm.TypePath;
/**
* A {@link MethodVisitor} that renumbers local variables in their order of
* appearance. This adapter allows one to easily add new local variables to a
* method. It may be used by inheriting from this class, but the preferred way
* of using it is via delegation: the next visitor in the chain can indeed add
* new locals when needed by calling {@link #newLocal} on this adapter (this
* requires a reference back to this {@link LocalVariablesSorter}).
*
* @author Chris Nokleberg
* @author Eugene Kuleshov
* @author Eric Bruneton
*/
public class LocalVariablesSorter extends MethodVisitor {
private static final Type OBJECT_TYPE = Type
.getObjectType("java/lang/Object");
/**
* Mapping from old to new local variable indexes. A local variable at index
* i of size 1 is remapped to 'mapping[2*i]', while a local variable at
* index i of size 2 is remapped to 'mapping[2*i+1]'.
*/
private int[] mapping = new int[40];
/**
* Array used to store stack map local variable types after remapping.
*/
private Object[] newLocals = new Object[20];
/**
* Index of the first local variable, after formal parameters.
*/
protected final int firstLocal;
/**
* Index of the next local variable to be created by {@link #newLocal}.
*/
protected int nextLocal;
/**
* Indicates if at least one local variable has moved due to remapping.
*/
private boolean changed;
/**
* Creates a new {@link LocalVariablesSorter}. <i>Subclasses must not use
* this constructor</i>. Instead, they must use the
* {@link #LocalVariablesSorter(int, int, String, MethodVisitor)} version.
*
* @param access
* access flags of the adapted method.
* @param desc
* the method's descriptor (see {@link Type Type}).
* @param mv
* the method visitor to which this adapter delegates calls.
*/
public LocalVariablesSorter(final int access, final String desc,
final MethodVisitor mv) {
this(Opcodes.ASM5, access, desc, mv);
}
/**
* Creates a new {@link LocalVariablesSorter}.
*
* @param api
* the ASM API version implemented by this visitor. Must be one
* of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}.
* @param access
* access flags of the adapted method.
* @param desc
* the method's descriptor (see {@link Type Type}).
* @param mv
* the method visitor to which this adapter delegates calls.
*/
protected LocalVariablesSorter(final int api, final int access,
final String desc, final MethodVisitor mv) {
super(api, mv);
Type[] args = Type.getArgumentTypes(desc);
nextLocal = (Opcodes.ACC_STATIC & access) == 0 ? 1 : 0;
for (int i = 0; i < args.length; i++) {
nextLocal += args[i].getSize();
}
firstLocal = nextLocal;
}
@Override
public void visitVarInsn(final int opcode, final int var) {
Type type;
switch (opcode) {
case Opcodes.LLOAD:
case Opcodes.LSTORE:
type = Type.LONG_TYPE;
break;
case Opcodes.DLOAD:
case Opcodes.DSTORE:
type = Type.DOUBLE_TYPE;
break;
case Opcodes.FLOAD:
case Opcodes.FSTORE:
type = Type.FLOAT_TYPE;
break;
case Opcodes.ILOAD:
case Opcodes.ISTORE:
type = Type.INT_TYPE;
break;
default:
// case Opcodes.ALOAD:
// case Opcodes.ASTORE:
// case RET:
type = OBJECT_TYPE;
break;
}
mv.visitVarInsn(opcode, remap(var, type));
}
@Override
public void visitIincInsn(final int var, final int increment) {
mv.visitIincInsn(remap(var, Type.INT_TYPE), increment);
}
@Override
public void visitMaxs(final int maxStack, final int maxLocals) {
mv.visitMaxs(maxStack, nextLocal);
}
@Override
public void visitLocalVariable(final String name, final String desc,
final String signature, final Label start, final Label end,
final int index) {
int newIndex = remap(index, Type.getType(desc));
mv.visitLocalVariable(name, desc, signature, start, end, newIndex);
}
@Override
public AnnotationVisitor visitLocalVariableAnnotation(int typeRef,
TypePath typePath, Label[] start, Label[] end, int[] index,
String desc, boolean visible) {
Type t = Type.getType(desc);
int[] newIndex = new int[index.length];
for (int i = 0; i < newIndex.length; ++i) {
newIndex[i] = remap(index[i], t);
}
return mv.visitLocalVariableAnnotation(typeRef, typePath, start, end,
newIndex, desc, visible);
}
@Override
public void visitFrame(final int type, final int nLocal,
final Object[] local, final int nStack, final Object[] stack) {
if (type != Opcodes.F_NEW) { // uncompressed frame
throw new IllegalStateException(
"ClassReader.accept() should be called with EXPAND_FRAMES flag");
}
if (!changed) { // optimization for the case where mapping = identity
mv.visitFrame(type, nLocal, local, nStack, stack);
return;
}
// creates a copy of newLocals
Object[] oldLocals = new Object[newLocals.length];
System.arraycopy(newLocals, 0, oldLocals, 0, oldLocals.length);
updateNewLocals(newLocals);
// copies types from 'local' to 'newLocals'
// 'newLocals' already contains the variables added with 'newLocal'
int index = 0; // old local variable index
int number = 0; // old local variable number
for (; number < nLocal; ++number) {
Object t = local[number];
int size = t == Opcodes.LONG || t == Opcodes.DOUBLE ? 2 : 1;
if (t != Opcodes.TOP) {
Type typ = OBJECT_TYPE;
if (t == Opcodes.INTEGER) {
typ = Type.INT_TYPE;
} else if (t == Opcodes.FLOAT) {
typ = Type.FLOAT_TYPE;
} else if (t == Opcodes.LONG) {
typ = Type.LONG_TYPE;
} else if (t == Opcodes.DOUBLE) {
typ = Type.DOUBLE_TYPE;
} else if (t instanceof String) {
typ = Type.getObjectType((String) t);
}
setFrameLocal(remap(index, typ), t);
}
index += size;
}
// removes TOP after long and double types as well as trailing TOPs
index = 0;
number = 0;
for (int i = 0; index < newLocals.length; ++i) {
Object t = newLocals[index++];
if (t != null && t != Opcodes.TOP) {
newLocals[i] = t;
number = i + 1;
if (t == Opcodes.LONG || t == Opcodes.DOUBLE) {
index += 1;
}
} else {
newLocals[i] = Opcodes.TOP;
}
}
// visits remapped frame
mv.visitFrame(type, number, newLocals, nStack, stack);
// restores original value of 'newLocals'
newLocals = oldLocals;
}
// -------------
/**
* Creates a new local variable of the given type.
*
* @param type
* the type of the local variable to be created.
* @return the identifier of the newly created local variable.
*/
public int newLocal(final Type type) {
Object t;
switch (type.getSort()) {
case Type.BOOLEAN:
case Type.CHAR:
case Type.BYTE:
case Type.SHORT:
case Type.INT:
t = Opcodes.INTEGER;
break;
case Type.FLOAT:
t = Opcodes.FLOAT;
break;
case Type.LONG:
t = Opcodes.LONG;
break;
case Type.DOUBLE:
t = Opcodes.DOUBLE;
break;
case Type.ARRAY:
t = type.getDescriptor();
break;
// case Type.OBJECT:
default:
t = type.getInternalName();
break;
}
int local = newLocalMapping(type);
setLocalType(local, type);
setFrameLocal(local, t);
return local;
}
/**
* Notifies subclasses that a new stack map frame is being visited. The
* array argument contains the stack map frame types corresponding to the
* local variables added with {@link #newLocal}. This method can update
* these types in place for the stack map frame being visited. The default
* implementation of this method does nothing, i.e. a local variable added
* with {@link #newLocal} will have the same type in all stack map frames.
* But this behavior is not always the desired one, for instance if a local
* variable is added in the middle of a try/catch block: the frame for the
* exception handler should have a TOP type for this new local.
*
* @param newLocals
* the stack map frame types corresponding to the local variables
* added with {@link #newLocal} (and null for the others). The
* format of this array is the same as in
* {@link MethodVisitor#visitFrame}, except that long and double
* types use two slots. The types for the current stack map frame
* must be updated in place in this array.
*/
protected void updateNewLocals(Object[] newLocals) {
}
/**
* Notifies subclasses that a local variable has been added or remapped. The
* default implementation of this method does nothing.
*
* @param local
* a local variable identifier, as returned by {@link #newLocal
* newLocal()}.
* @param type
* the type of the value being stored in the local variable.
*/
protected void setLocalType(final int local, final Type type) {
}
private void setFrameLocal(final int local, final Object type) {
int l = newLocals.length;
if (local >= l) {
Object[] a = new Object[Math.max(2 * l, local + 1)];
System.arraycopy(newLocals, 0, a, 0, l);
newLocals = a;
}
newLocals[local] = type;
}
private int remap(final int var, final Type type) {
if (var + type.getSize() <= firstLocal) {
return var;
}
int key = 2 * var + type.getSize() - 1;
int size = mapping.length;
if (key >= size) {
int[] newMapping = new int[Math.max(2 * size, key + 1)];
System.arraycopy(mapping, 0, newMapping, 0, size);
mapping = newMapping;
}
int value = mapping[key];
if (value == 0) {
value = newLocalMapping(type);
setLocalType(value, type);
mapping[key] = value + 1;
} else {
value--;
}
if (value != var) {
changed = true;
}
return value;
}
protected int newLocalMapping(final Type type) {
int local = nextLocal;
nextLocal += type.getSize();
return local;
}
}
| |
/*
* Copyright (c) 2016-present, RxJava Contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is
* distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See
* the License for the specific language governing permissions and limitations under the License.
*/
package io.reactivex.rxjava3.internal.operators.flowable;
import static org.junit.Assert.assertNull;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.*;
import java.io.IOException;
import java.util.*;
import org.junit.*;
import org.reactivestreams.*;
import io.reactivex.rxjava3.core.*;
import io.reactivex.rxjava3.exceptions.TestException;
import io.reactivex.rxjava3.functions.*;
import io.reactivex.rxjava3.internal.functions.Functions;
import io.reactivex.rxjava3.internal.schedulers.ImmediateThinScheduler;
import io.reactivex.rxjava3.internal.subscriptions.BooleanSubscription;
import io.reactivex.rxjava3.operators.ConditionalSubscriber;
import io.reactivex.rxjava3.operators.QueueFuseable;
import io.reactivex.rxjava3.plugins.RxJavaPlugins;
import io.reactivex.rxjava3.processors.*;
import io.reactivex.rxjava3.schedulers.Schedulers;
import io.reactivex.rxjava3.subscribers.TestSubscriber;
import io.reactivex.rxjava3.testsupport.*;
public class FlowableMapTest extends RxJavaTest {
Subscriber<String> stringSubscriber;
Subscriber<String> stringSubscriber2;
static final BiFunction<String, Integer, String> APPEND_INDEX = new BiFunction<String, Integer, String>() {
@Override
public String apply(String value, Integer index) {
return value + index;
}
};
@Before
public void before() {
stringSubscriber = TestHelper.mockSubscriber();
stringSubscriber2 = TestHelper.mockSubscriber();
}
@Test
public void map() {
Map<String, String> m1 = getMap("One");
Map<String, String> m2 = getMap("Two");
Flowable<Map<String, String>> flowable = Flowable.just(m1, m2);
Flowable<String> m = flowable.map(new Function<Map<String, String>, String>() {
@Override
public String apply(Map<String, String> map) {
return map.get("firstName");
}
});
m.subscribe(stringSubscriber);
verify(stringSubscriber, never()).onError(any(Throwable.class));
verify(stringSubscriber, times(1)).onNext("OneFirst");
verify(stringSubscriber, times(1)).onNext("TwoFirst");
verify(stringSubscriber, times(1)).onComplete();
}
@Test
public void mapMany() {
/* simulate a top-level async call which returns IDs */
Flowable<Integer> ids = Flowable.just(1, 2);
/* now simulate the behavior to take those IDs and perform nested async calls based on them */
Flowable<String> m = ids.flatMap(new Function<Integer, Flowable<String>>() {
@Override
public Flowable<String> apply(Integer id) {
/* simulate making a nested async call which creates another Flowable */
Flowable<Map<String, String>> subFlowable = null;
if (id == 1) {
Map<String, String> m1 = getMap("One");
Map<String, String> m2 = getMap("Two");
subFlowable = Flowable.just(m1, m2);
} else {
Map<String, String> m3 = getMap("Three");
Map<String, String> m4 = getMap("Four");
subFlowable = Flowable.just(m3, m4);
}
/* simulate kicking off the async call and performing a select on it to transform the data */
return subFlowable.map(new Function<Map<String, String>, String>() {
@Override
public String apply(Map<String, String> map) {
return map.get("firstName");
}
});
}
});
m.subscribe(stringSubscriber);
verify(stringSubscriber, never()).onError(any(Throwable.class));
verify(stringSubscriber, times(1)).onNext("OneFirst");
verify(stringSubscriber, times(1)).onNext("TwoFirst");
verify(stringSubscriber, times(1)).onNext("ThreeFirst");
verify(stringSubscriber, times(1)).onNext("FourFirst");
verify(stringSubscriber, times(1)).onComplete();
}
@Test
public void mapMany2() {
Map<String, String> m1 = getMap("One");
Map<String, String> m2 = getMap("Two");
Flowable<Map<String, String>> flowable1 = Flowable.just(m1, m2);
Map<String, String> m3 = getMap("Three");
Map<String, String> m4 = getMap("Four");
Flowable<Map<String, String>> flowable2 = Flowable.just(m3, m4);
Flowable<Flowable<Map<String, String>>> f = Flowable.just(flowable1, flowable2);
Flowable<String> m = f.flatMap(new Function<Flowable<Map<String, String>>, Flowable<String>>() {
@Override
public Flowable<String> apply(Flowable<Map<String, String>> f) {
return f.map(new Function<Map<String, String>, String>() {
@Override
public String apply(Map<String, String> map) {
return map.get("firstName");
}
});
}
});
m.subscribe(stringSubscriber);
verify(stringSubscriber, never()).onError(any(Throwable.class));
verify(stringSubscriber, times(1)).onNext("OneFirst");
verify(stringSubscriber, times(1)).onNext("TwoFirst");
verify(stringSubscriber, times(1)).onNext("ThreeFirst");
verify(stringSubscriber, times(1)).onNext("FourFirst");
verify(stringSubscriber, times(1)).onComplete();
}
@Test
public void mapWithError() {
final List<Throwable> errors = new ArrayList<>();
Flowable<String> w = Flowable.just("one", "fail", "two", "three", "fail");
Flowable<String> m = w.map(new Function<String, String>() {
@Override
public String apply(String s) {
if ("fail".equals(s)) {
throw new TestException("Forced Failure");
}
return s;
}
}).doOnError(new Consumer<Throwable>() {
@Override
public void accept(Throwable t1) {
errors.add(t1);
}
});
m.subscribe(stringSubscriber);
verify(stringSubscriber, times(1)).onNext("one");
verify(stringSubscriber, never()).onNext("two");
verify(stringSubscriber, never()).onNext("three");
verify(stringSubscriber, never()).onComplete();
verify(stringSubscriber, times(1)).onError(any(TestException.class));
TestHelper.assertError(errors, 0, TestException.class, "Forced Failure");
}
@Test(expected = IllegalArgumentException.class)
public void mapWithIssue417() {
Flowable.just(1).observeOn(Schedulers.computation())
.map(new Function<Integer, Integer>() {
@Override
public Integer apply(Integer arg0) {
throw new IllegalArgumentException("any error");
}
}).blockingSingle();
}
@Test(expected = IllegalArgumentException.class)
public void mapWithErrorInFuncAndThreadPoolScheduler() throws InterruptedException {
// The error will throw in one of threads in the thread pool.
// If map does not handle it, the error will disappear.
// so map needs to handle the error by itself.
Flowable<String> m = Flowable.just("one")
.observeOn(Schedulers.computation())
.map(new Function<String, String>() {
@Override
public String apply(String arg0) {
throw new IllegalArgumentException("any error");
}
});
// block for response, expecting exception thrown
m.blockingLast();
}
/**
* While mapping over range(1,0).last() we expect NoSuchElementException since the sequence is empty.
*/
@Test
public void errorPassesThruMap() {
assertNull(Flowable.range(1, 0).lastElement().map(new Function<Integer, Integer>() {
@Override
public Integer apply(Integer i) {
return i;
}
}).blockingGet());
}
/**
* We expect IllegalStateException to pass thru map.
*/
@Test(expected = IllegalStateException.class)
public void errorPassesThruMap2() {
Flowable.error(new IllegalStateException()).map(new Function<Object, Object>() {
@Override
public Object apply(Object i) {
return i;
}
}).blockingSingle();
}
/**
* We expect an ArithmeticException exception here because last() emits a single value
* but then we divide by 0.
*/
@Test(expected = ArithmeticException.class)
public void mapWithErrorInFunc() {
Flowable.range(1, 1).lastElement().map(new Function<Integer, Integer>() {
@Override
public Integer apply(Integer i) {
return i / 0;
}
}).blockingGet();
}
private static Map<String, String> getMap(String prefix) {
Map<String, String> m = new HashMap<>();
m.put("firstName", prefix + "First");
m.put("lastName", prefix + "Last");
return m;
}
@Test
public void functionCrashUnsubscribes() {
PublishProcessor<Integer> pp = PublishProcessor.create();
TestSubscriber<Integer> ts = new TestSubscriber<>();
pp.map(new Function<Integer, Integer>() {
@Override
public Integer apply(Integer v) {
throw new TestException();
}
}).subscribe(ts);
Assert.assertTrue("Not subscribed?", pp.hasSubscribers());
pp.onNext(1);
Assert.assertFalse("Subscribed?", pp.hasSubscribers());
ts.assertError(TestException.class);
}
@Test
public void mapFilter() {
Flowable.range(1, 2)
.map(new Function<Integer, Integer>() {
@Override
public Integer apply(Integer v) throws Exception {
return v + 1;
}
})
.filter(new Predicate<Integer>() {
@Override
public boolean test(Integer v) throws Exception {
return true;
}
})
.test()
.assertResult(2, 3);
}
@Test
public void mapFilterMapperCrash() {
Flowable.range(1, 2)
.map(new Function<Integer, Integer>() {
@Override
public Integer apply(Integer v) throws Exception {
throw new TestException();
}
})
.filter(new Predicate<Integer>() {
@Override
public boolean test(Integer v) throws Exception {
return true;
}
})
.test()
.assertFailure(TestException.class);
}
@Test
public void mapFilterHidden() {
Flowable.range(1, 2).hide()
.map(new Function<Integer, Integer>() {
@Override
public Integer apply(Integer v) throws Exception {
return v + 1;
}
})
.filter(new Predicate<Integer>() {
@Override
public boolean test(Integer v) throws Exception {
return true;
}
})
.test()
.assertResult(2, 3);
}
@Test
public void mapFilterFused() {
TestSubscriberEx<Integer> ts = new TestSubscriberEx<Integer>().setInitialFusionMode(QueueFuseable.ANY);
Flowable.range(1, 2)
.map(new Function<Integer, Integer>() {
@Override
public Integer apply(Integer v) throws Exception {
return v + 1;
}
})
.filter(new Predicate<Integer>() {
@Override
public boolean test(Integer v) throws Exception {
return true;
}
})
.subscribe(ts);
ts.assertFuseable()
.assertFusionMode(QueueFuseable.SYNC)
.assertResult(2, 3);
}
@Test
public void mapFilterFusedHidden() {
TestSubscriberEx<Integer> ts = new TestSubscriberEx<Integer>().setInitialFusionMode(QueueFuseable.ANY);
Flowable.range(1, 2).hide()
.map(new Function<Integer, Integer>() {
@Override
public Integer apply(Integer v) throws Exception {
return v + 1;
}
})
.filter(new Predicate<Integer>() {
@Override
public boolean test(Integer v) throws Exception {
return true;
}
})
.subscribe(ts);
ts.assertFuseable()
.assertFusionMode(QueueFuseable.NONE)
.assertResult(2, 3);
}
@Test
public void sourceIgnoresCancel() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
Flowable.fromPublisher(new Publisher<Integer>() {
@Override
public void subscribe(Subscriber<? super Integer> s) {
s.onSubscribe(new BooleanSubscription());
s.onNext(1);
s.onNext(2);
s.onError(new IOException());
s.onComplete();
}
})
.map(new Function<Integer, Object>() {
@Override
public Object apply(Integer v) throws Exception {
throw new TestException();
}
})
.test()
.assertFailure(TestException.class);
TestHelper.assertUndeliverable(errors, 0, IOException.class);
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void mapFilterMapperCrashFused() {
TestSubscriberEx<Integer> ts = new TestSubscriberEx<Integer>().setInitialFusionMode(QueueFuseable.ANY);
Flowable.range(1, 2).hide()
.map(new Function<Integer, Integer>() {
@Override
public Integer apply(Integer v) throws Exception {
throw new TestException();
}
})
.filter(new Predicate<Integer>() {
@Override
public boolean test(Integer v) throws Exception {
return true;
}
})
.subscribe(ts);
ts.assertFuseable()
.assertFusionMode(QueueFuseable.NONE)
.assertFailure(TestException.class);
}
@Test
public void sourceIgnoresCancelFilter() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
Flowable.fromPublisher(new Publisher<Integer>() {
@Override
public void subscribe(Subscriber<? super Integer> s) {
s.onSubscribe(new BooleanSubscription());
s.onNext(1);
s.onNext(2);
s.onError(new IOException());
s.onComplete();
}
})
.map(new Function<Integer, Integer>() {
@Override
public Integer apply(Integer v) throws Exception {
throw new TestException();
}
})
.filter(new Predicate<Integer>() {
@Override
public boolean test(Integer v) throws Exception {
return true;
}
})
.test()
.assertFailure(TestException.class);
TestHelper.assertUndeliverable(errors, 0, IOException.class);
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void mapFilterFused2() {
TestSubscriberEx<Integer> ts = new TestSubscriberEx<Integer>().setInitialFusionMode(QueueFuseable.ANY);
UnicastProcessor<Integer> up = UnicastProcessor.create();
up
.map(new Function<Integer, Integer>() {
@Override
public Integer apply(Integer v) throws Exception {
return v + 1;
}
})
.filter(new Predicate<Integer>() {
@Override
public boolean test(Integer v) throws Exception {
return true;
}
})
.subscribe(ts);
up.onNext(1);
up.onNext(2);
up.onComplete();
ts.assertFuseable()
.assertFusionMode(QueueFuseable.ASYNC)
.assertResult(2, 3);
}
@Test
public void sourceIgnoresCancelConditional() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
Flowable.fromPublisher(new Publisher<Integer>() {
@Override
public void subscribe(Subscriber<? super Integer> s) {
ConditionalSubscriber<? super Integer> cs = (ConditionalSubscriber<? super Integer>)s;
cs.onSubscribe(new BooleanSubscription());
cs.tryOnNext(1);
cs.tryOnNext(2);
cs.onError(new IOException());
cs.onComplete();
}
})
.map(new Function<Integer, Integer>() {
@Override
public Integer apply(Integer v) throws Exception {
throw new TestException();
}
})
.filter(new Predicate<Integer>() {
@Override
public boolean test(Integer v) throws Exception {
return true;
}
})
.test()
.assertFailure(TestException.class);
TestHelper.assertUndeliverable(errors, 0, IOException.class);
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void dispose() {
TestHelper.checkDisposed(Flowable.range(1, 5).map(Functions.identity()));
}
@Test
public void doubleOnSubscribe() {
TestHelper.checkDoubleOnSubscribeFlowable(new Function<Flowable<Object>, Flowable<Object>>() {
@Override
public Flowable<Object> apply(Flowable<Object> f) throws Exception {
return f.map(Functions.identity());
}
});
}
@Test
public void fusedSync() {
TestSubscriberEx<Integer> ts = new TestSubscriberEx<Integer>().setInitialFusionMode(QueueFuseable.ANY);
Flowable.range(1, 5)
.map(Functions.<Integer>identity())
.subscribe(ts);
ts.assertFusionMode(QueueFuseable.SYNC)
.assertResult(1, 2, 3, 4, 5);
}
@Test
public void fusedAsync() {
TestSubscriberEx<Integer> ts = new TestSubscriberEx<Integer>().setInitialFusionMode(QueueFuseable.ANY);
UnicastProcessor<Integer> up = UnicastProcessor.create();
up
.map(Functions.<Integer>identity())
.subscribe(ts);
TestHelper.emit(up, 1, 2, 3, 4, 5);
ts.assertFusionMode(QueueFuseable.ASYNC)
.assertResult(1, 2, 3, 4, 5);
}
@Test
public void fusedReject() {
TestSubscriberEx<Integer> ts = new TestSubscriberEx<Integer>().setInitialFusionMode(QueueFuseable.ANY | QueueFuseable.BOUNDARY);
Flowable.range(1, 5)
.map(Functions.<Integer>identity())
.subscribe(ts);
ts.assertFusionMode(QueueFuseable.NONE)
.assertResult(1, 2, 3, 4, 5);
}
@Test
public void badSource() {
TestHelper.checkBadSourceFlowable(new Function<Flowable<Object>, Object>() {
@Override
public Object apply(Flowable<Object> f) throws Exception {
return f.map(Functions.identity());
}
}, false, 1, 1, 1);
}
@Test
public void conditionalFusionNoNPE() {
TestSubscriberEx<Object> ts = new TestSubscriberEx<>()
.setInitialFusionMode(QueueFuseable.ANY);
Flowable.empty()
.observeOn(ImmediateThinScheduler.INSTANCE)
.filter(v -> true)
.map(v -> v)
.filter(v -> true)
.subscribe(ts)
;
ts.assertResult();
}
}
| |
/*
* Copyright (C) 2009 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.icg.bluetoothmessenger;
import android.app.Activity;
import android.bluetooth.BluetoothAdapter;
import android.bluetooth.BluetoothDevice;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.util.Log;
import android.view.KeyEvent;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.Window;
import android.view.View.OnClickListener;
import android.view.inputmethod.EditorInfo;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ListView;
import android.widget.TextView;
import android.widget.Toast;
/**
* This is the main Activity that displays the current chat session.
*/
public class BluetoothMessenger extends Activity {
// Debugging
private static final String TAG = "BluetoothMessenger";
private static final boolean D = true;
// Message types sent from the BluetoothChatService Handler
public static final int MESSAGE_STATE_CHANGE = 1;
public static final int MESSAGE_READ = 2;
public static final int MESSAGE_WRITE = 3;
public static final int MESSAGE_DEVICE_NAME = 4;
public static final int MESSAGE_TOAST = 5;
// Key names received from the BluetoothChatService Handler
public static final String DEVICE_NAME = "device_name";
public static final String TOAST = "toast";
// Intent request codes
private static final int REQUEST_CONNECT_DEVICE = 1;
private static final int REQUEST_ENABLE_BT = 2;
// Layout Views
private TextView mTitle;
private ListView mConversationView;
private EditText mOutEditText;
private Button mSendButton;
// Name of the connected device
private String mConnectedDeviceName = null;
// Array adapter for the conversation thread
private ArrayAdapter<String> mConversationArrayAdapter;
// String buffer for outgoing messages
private StringBuffer mOutStringBuffer;
// Local Bluetooth adapter
private BluetoothAdapter mBluetoothAdapter = null;
// Member object for the chat services
private MessengerService mChatService = null;
private Context cntx;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if(D) Log.e(TAG, "+++ ON CREATE +++");
cntx = getApplicationContext();
// Set up the window layout
requestWindowFeature(Window.FEATURE_CUSTOM_TITLE);
setContentView(R.layout.main);
getWindow().setFeatureInt(Window.FEATURE_CUSTOM_TITLE, R.layout.custom_title);
// Set up the custom title
mTitle = (TextView) findViewById(R.id.title_left_text);
mTitle.setText(R.string.app_name);
mTitle = (TextView) findViewById(R.id.title_right_text);
// Get local Bluetooth adapter
mBluetoothAdapter = BluetoothAdapter.getDefaultAdapter();
// If the adapter is null, then Bluetooth is not supported
if (mBluetoothAdapter == null) {
Toast.makeText(this, "Bluetooth is not available", Toast.LENGTH_LONG).show();
finish();
return;
}
}
@Override
public void onStart() {
super.onStart();
if(D) Log.e(TAG, "++ ON START ++");
// If BT is not on, request that it be enabled.
// setupChat() will then be called during onActivityResult
if (!mBluetoothAdapter.isEnabled()) {
Intent enableIntent = new Intent(BluetoothAdapter.ACTION_REQUEST_ENABLE);
startActivityForResult(enableIntent, REQUEST_ENABLE_BT);
// Otherwise, setup the chat session
} else {
if (mChatService == null) setupChat();
}
}
@Override
public synchronized void onResume() {
super.onResume();
if(D) Log.e(TAG, "+ ON RESUME +");
// Performing this check in onResume() covers the case in which BT was
// not enabled during onStart(), so we were paused to enable it...
// onResume() will be called when ACTION_REQUEST_ENABLE activity returns.
if (mChatService != null) {
// Only if the state is STATE_NONE, do we know that we haven't started already
if (mChatService.getState() == MessengerService.STATE_NONE) {
// Start the Bluetooth chat services
mChatService.start();
}
}
}
private void setupChat() {
Log.d(TAG, "setupChat()");
// Initialize the array adapter for the conversation thread
mConversationArrayAdapter = new ArrayAdapter<String>(this, R.layout.message);
mConversationView = (ListView) findViewById(R.id.in);
mConversationView.setAdapter(mConversationArrayAdapter);
// Initialize the compose field with a listener for the return key
mOutEditText = (EditText) findViewById(R.id.edit_text_out);
mOutEditText.setOnEditorActionListener(mWriteListener);
// Initialize the send button with a listener that for click events
mSendButton = (Button) findViewById(R.id.button_send);
mSendButton.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
// Send a message using content of the edit text widget
TextView view = (TextView) findViewById(R.id.edit_text_out);
String message = view.getText().toString();
sendMessage(message);
}
});
// Initialize the BluetoothChatService to perform bluetooth connections
mChatService = new MessengerService(this, mHandler);
// Initialize the buffer for outgoing messages
mOutStringBuffer = new StringBuffer("");
}
@Override
public synchronized void onPause() {
super.onPause();
if(D) Log.e(TAG, "- ON PAUSE -");
}
@Override
public void onStop() {
super.onStop();
if(D) Log.e(TAG, "-- ON STOP --");
}
@Override
public void onDestroy() {
super.onDestroy();
// Stop the Bluetooth chat services
if (mChatService != null) mChatService.stop();
if(D) Log.e(TAG, "--- ON DESTROY ---");
}
private void ensureDiscoverable() {
if(D) Log.d(TAG, "ensure discoverable");
if (mBluetoothAdapter.getScanMode() !=
BluetoothAdapter.SCAN_MODE_CONNECTABLE_DISCOVERABLE) {
Intent discoverableIntent = new Intent(BluetoothAdapter.ACTION_REQUEST_DISCOVERABLE);
discoverableIntent.putExtra(BluetoothAdapter.EXTRA_DISCOVERABLE_DURATION, 300);
startActivity(discoverableIntent);
}
}
/**
* Sends a message.
* @param message A string of text to send.
*/
private void sendMessage(String message) {
// Check that we're actually connected before trying anything
if (mChatService.getState() != MessengerService.STATE_CONNECTED) {
Toast.makeText(this, R.string.not_connected, Toast.LENGTH_SHORT).show();
return;
}
// Check that there's actually something to send
if (message.length() > 0) {
// Get the message bytes and tell the BluetoothChatService to write
byte[] send = message.getBytes();
mChatService.write(send);
// Reset out string buffer to zero and clear the edit text field
mOutStringBuffer.setLength(0);
mOutEditText.setText(mOutStringBuffer);
}
}
// The action listener for the EditText widget, to listen for the return key
private TextView.OnEditorActionListener mWriteListener =
new TextView.OnEditorActionListener() {
public boolean onEditorAction(TextView view, int actionId, KeyEvent event) {
// If the action is a key-up event on the return key, send the message
if (actionId == EditorInfo.IME_NULL && event.getAction() == KeyEvent.ACTION_UP) {
String message = view.getText().toString();
sendMessage(message);
}
if(D) Log.i(TAG, "END onEditorAction");
return true;
}
};
// The Handler that gets information back from the BluetoothChatService
private final Handler mHandler = new Handler() {
@Override
public void handleMessage(Message msg) {
switch (msg.what) {
case MESSAGE_STATE_CHANGE:
if(D) Log.i(TAG, "MESSAGE_STATE_CHANGE: " + msg.arg1);
switch (msg.arg1) {
case MessengerService.STATE_CONNECTED:
mTitle.setText(R.string.title_connected_to);
mTitle.append(mConnectedDeviceName);
mConversationArrayAdapter.clear();
break;
case MessengerService.STATE_CONNECTING:
mTitle.setText(R.string.title_connecting);
break;
case MessengerService.STATE_LISTEN:
case MessengerService.STATE_NONE:
mTitle.setText(R.string.title_not_connected);
break;
}
break;
case MESSAGE_WRITE:
byte[] writeBuf = (byte[]) msg.obj;
// construct a string from the buffer
String writeMessage = new String(writeBuf);
mConversationArrayAdapter.add("Me: " + writeMessage);
break;
case MESSAGE_READ:
byte[] readBuf = (byte[]) msg.obj;
// construct a string from the valid bytes in the buffer
String readMessage = new String(readBuf, 0, msg.arg1);
mConversationArrayAdapter.add(mConnectedDeviceName+": " + readMessage);
ReceiverNotificationService.showNotification(cntx, readMessage, mConnectedDeviceName);
break;
case MESSAGE_DEVICE_NAME:
// save the connected device's name
mConnectedDeviceName = msg.getData().getString(DEVICE_NAME);
Toast.makeText(getApplicationContext(), "Connected to "
+ mConnectedDeviceName, Toast.LENGTH_SHORT).show();
break;
case MESSAGE_TOAST:
Toast.makeText(getApplicationContext(), msg.getData().getString(TOAST),
Toast.LENGTH_SHORT).show();
break;
}
}
};
public void onActivityResult(int requestCode, int resultCode, Intent data) {
if(D) Log.d(TAG, "onActivityResult " + resultCode);
switch (requestCode) {
case REQUEST_CONNECT_DEVICE:
// When DeviceListActivity returns with a device to connect
if (resultCode == Activity.RESULT_OK) {
// Get the device MAC address
String address = data.getExtras()
.getString(DeviceListActivity.EXTRA_DEVICE_ADDRESS);
// Get the BLuetoothDevice object
BluetoothDevice device = mBluetoothAdapter.getRemoteDevice(address);
// Attempt to connect to the device
mChatService.connect(device);
}
break;
case REQUEST_ENABLE_BT:
// When the request to enable Bluetooth returns
if (resultCode == Activity.RESULT_OK) {
// Bluetooth is now enabled, so set up a chat session
setupChat();
} else {
// User did not enable Bluetooth or an error occured
Log.d(TAG, "BT not enabled");
Toast.makeText(this, R.string.bt_not_enabled_leaving, Toast.LENGTH_SHORT).show();
finish();
}
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
MenuInflater inflater = getMenuInflater();
inflater.inflate(R.menu.option_menu, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.scan:
// Launch the DeviceListActivity to see devices and do scan
Intent serverIntent = new Intent(this, DeviceListActivity.class);
startActivityForResult(serverIntent, REQUEST_CONNECT_DEVICE);
return true;
case R.id.discoverable:
// Ensure this device is discoverable by others
ensureDiscoverable();
return true;
}
return false;
}
}
| |
/*
* $Id$
*/
/*
Copyright (c) 2000-2014 Board of Trustees of Leland Stanford Jr. University,
all rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
STANFORD UNIVERSITY BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
Except as contained in this notice, the name of Stanford University shall not
be used in advertising or otherwise to promote the sale, use or other dealings
in this Software without prior written authorization from Stanford University.
*/
package org.lockss.plugin.mathematicalsciencespublishers;
import java.net.*;
import java.util.*;
import org.lockss.test.*;
import org.lockss.util.ListUtil;
import org.lockss.plugin.*;
import org.lockss.config.Configuration;
import org.lockss.daemon.*;
import org.lockss.extractor.*;
import org.lockss.plugin.ArchivalUnit.*;
import org.lockss.plugin.definable.*;
import org.lockss.plugin.wrapper.*;
public class TestMathematicalSciencesPublishersPlugin extends LockssTestCase {
static final String BASE_URL_KEY = ConfigParamDescr.BASE_URL.getKey();
static final String JOURNAL_ID_KEY = ConfigParamDescr.JOURNAL_ID.getKey();
static final String YEAR_KEY = ConfigParamDescr.YEAR.getKey();
private MockLockssDaemon theDaemon;
private DefinablePlugin plugin;
public TestMathematicalSciencesPublishersPlugin(String msg) {
super(msg);
}
public void setUp() throws Exception {
super.setUp();
setUpDiskSpace();
theDaemon = getMockLockssDaemon();
plugin = new DefinablePlugin();
plugin.initPlugin(getMockLockssDaemon(),
"org.lockss.plugin.mathematicalsciencespublishers." +
"ClockssMathematicalSciencesPublishersPlugin");
}
public void testGetAuNullConfig()
throws ArchivalUnit.ConfigurationException {
try {
plugin.configureAu(null, null);
fail("Didn't throw ArchivalUnit.ConfigurationException");
} catch (ArchivalUnit.ConfigurationException e) {
}
}
public void testCreateAu() {
Properties props = new Properties();
props.setProperty(BASE_URL_KEY, "http://www.example.com/");
props.setProperty(JOURNAL_ID_KEY, "j_id");
props.setProperty(YEAR_KEY, "2004");
DefinableArchivalUnit au = null;
try {
au = makeAuFromProps(props);
}
catch (ConfigurationException ex) {
}
au.getName();
}
private DefinableArchivalUnit makeAuFromProps(Properties props)
throws ArchivalUnit.ConfigurationException {
Configuration config = ConfigurationUtil.fromProps(props);
return (DefinableArchivalUnit)plugin.configureAu(config, null);
}
public void testGetAuHandlesBadUrl()
throws ArchivalUnit.ConfigurationException, MalformedURLException {
Properties props = new Properties();
props.setProperty(BASE_URL_KEY, "blah");
props.setProperty(JOURNAL_ID_KEY, "322");
props.setProperty(YEAR_KEY, "2001");
try {
makeAuFromProps(props);
fail ("Didn't throw InstantiationException when given a bad url");
} catch (ArchivalUnit.ConfigurationException auie) {
assertNotNull(auie.getCause());
}
}
public void testGetAuConstructsProperAu()
throws ArchivalUnit.ConfigurationException, MalformedURLException {
Properties props = new Properties();
props.setProperty(BASE_URL_KEY, "http://www.example.com/");
props.setProperty(JOURNAL_ID_KEY, "j_id");
props.setProperty(YEAR_KEY, "2004");
DefinableArchivalUnit au = makeAuFromProps(props);
assertEquals("Mathematical Sciences Publishers Journals Plugin (CLOCKSS), " +
"Base URL http://www.example.com/, " +
"Journal ID j_id, Year 2004", au.getName());
}
public void testGetPluginId() {
assertEquals("org.lockss.plugin.mathematicalsciencespublishers." +
"ClockssMathematicalSciencesPublishersPlugin",
plugin.getPluginId());
}
public void testGetAuConfigProperties() {
assertEquals(ListUtil.list(ConfigParamDescr.BASE_URL,
ConfigParamDescr.JOURNAL_ID,
ConfigParamDescr.YEAR),
plugin.getLocalAuConfigDescrs());
}
public void testGetArticleMetadataExtractor() {
Properties props = new Properties();
props.setProperty(BASE_URL_KEY, "http://www.example.com/");
props.setProperty(JOURNAL_ID_KEY, "asdf");
props.setProperty(YEAR_KEY, "2004");
DefinableArchivalUnit au = null;
try {
au = makeAuFromProps(props);
}
catch (ConfigurationException ex) {
}
assertTrue(""+plugin.getArticleMetadataExtractor(MetadataTarget.Any(), au),
plugin.getArticleMetadataExtractor(null, au) instanceof ArticleMetadataExtractor);
assertTrue(""+plugin.getFileMetadataExtractor(MetadataTarget.Any(), "text/html", au),
plugin.getFileMetadataExtractor(MetadataTarget.Any(), "text/html", au) instanceof
FileMetadataExtractor
);
}
public void testGetHashFilterFactory() {
assertNull(plugin.getHashFilterFactory("BogusFilterFactory"));
assertNull(plugin.getHashFilterFactory("application/pdf"));
assertNotNull(plugin.getHashFilterFactory("text/html"));
assertTrue(WrapperUtil.unwrap(plugin.getHashFilterFactory("application/xhtml+xml"))
instanceof org.lockss.plugin.mathematicalsciencespublishers.MathematicalSciencesPublishersHtmlFilterFactory);
}
public void testGetArticleIteratorFactory() {
assertTrue(WrapperUtil.unwrap(plugin.getArticleIteratorFactory())
instanceof org.lockss.plugin.mathematicalsciencespublishers.MathematicalSciencesPublishersArticleIteratorFactory);
}
// Test the crawl rules for AmericanMathematicalSocietyPlugin
public void testShouldCacheProperPages() throws Exception {
String ROOT_URL = "http://www.example.com/";
Properties props = new Properties();
props.setProperty(BASE_URL_KEY, ROOT_URL);
props.setProperty(JOURNAL_ID_KEY, "asdf");
props.setProperty(YEAR_KEY, "2004");
DefinableArchivalUnit au = null;
try {
au = makeAuFromProps(props);
}
catch (ConfigurationException ex) {
}
theDaemon.getLockssRepository(au);
// Test for pages that should get crawled
// permission page/start url
shouldCacheTest(ROOT_URL + "asdf/2004/manifest", true, au);
// toc page for an issue
shouldCacheTest(ROOT_URL + "asdf/2004/82/index.xhtml", true, au);
// article files
shouldCacheTest(ROOT_URL + "asdf/2004/82/p01.xhtml", true, au);
shouldCacheTest(ROOT_URL + "asdf/2004/any/awful/stuff", true, au);
// and not forward citations
shouldCacheTest(ROOT_URL + "asdf/2004/04/f054.xhtml", false, au);
// should not get crawled - wrong journal/year
shouldCacheTest(ROOT_URL + "ecgd/2004/82/p01.xhtml", false, au);
shouldCacheTest(ROOT_URL + "asdf/2013/82/p01.xhtml", false, au);
// should not get crawled - LOCKSS
shouldCacheTest("http://lockss.stanford.edu", false, au);
}
private void shouldCacheTest(String url, boolean shouldCache, ArchivalUnit au) {
log.info ("shouldCacheTest url: " + url);
assertEquals(shouldCache, au.shouldBeCached(url));
}
}
| |
/*
* #%L
* BroadleafCommerce Framework
* %%
* Copyright (C) 2009 - 2013 Broadleaf Commerce
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package org.broadleafcommerce.core.order.domain;
import org.broadleafcommerce.common.currency.util.BroadleafCurrencyUtils;
import org.broadleafcommerce.common.money.Money;
import org.broadleafcommerce.common.presentation.AdminPresentation;
import org.broadleafcommerce.common.presentation.AdminPresentationClass;
import org.broadleafcommerce.common.presentation.AdminPresentationToOneLookup;
import org.broadleafcommerce.common.presentation.client.SupportedFieldType;
import org.broadleafcommerce.core.catalog.domain.Product;
import org.broadleafcommerce.core.catalog.domain.ProductImpl;
import org.broadleafcommerce.core.catalog.domain.Sku;
import org.broadleafcommerce.core.catalog.domain.SkuBundleItem;
import org.broadleafcommerce.core.catalog.domain.SkuBundleItemImpl;
import org.broadleafcommerce.core.catalog.domain.SkuImpl;
import org.hibernate.annotations.BatchSize;
import org.hibernate.annotations.Cache;
import org.hibernate.annotations.CacheConcurrencyStrategy;
import org.hibernate.annotations.Index;
import org.hibernate.annotations.NotFound;
import org.hibernate.annotations.NotFoundAction;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.persistence.CascadeType;
import javax.persistence.CollectionTable;
import javax.persistence.Column;
import javax.persistence.ElementCollection;
import javax.persistence.Entity;
import javax.persistence.Inheritance;
import javax.persistence.InheritanceType;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.MapKeyColumn;
import javax.persistence.OneToMany;
import javax.persistence.Table;
@Entity
@Inheritance(strategy = InheritanceType.JOINED)
@Table(name = "BLC_DISCRETE_ORDER_ITEM")
@Cache(usage=CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region="blOrderElements")
@AdminPresentationClass(friendlyName = "DiscreteOrderItemImpl_discreteOrderItem")
public class DiscreteOrderItemImpl extends OrderItemImpl implements DiscreteOrderItem {
private static final long serialVersionUID = 1L;
@Column(name="BASE_RETAIL_PRICE", precision=19, scale=5)
@AdminPresentation(excluded = true, friendlyName = "DiscreteOrderItemImpl_Base_Retail_Price", order=2,
group = "DiscreteOrderItemImpl_Pricing", fieldType=SupportedFieldType.MONEY)
protected BigDecimal baseRetailPrice;
@Column(name="BASE_SALE_PRICE", precision=19, scale=5)
@AdminPresentation(excluded = true, friendlyName = "DiscreteOrderItemImpl_Base_Sale_Price", order=2,
group = "DiscreteOrderItemImpl_Pricing", fieldType= SupportedFieldType.MONEY)
protected BigDecimal baseSalePrice;
@ManyToOne(targetEntity = SkuImpl.class, optional=false)
@JoinColumn(name = "SKU_ID", nullable = false)
@Index(name="DISCRETE_SKU_INDEX", columnNames={"SKU_ID"})
@AdminPresentation(friendlyName = "DiscreteOrderItemImpl_Sku", order=Presentation.FieldOrder.SKU,
group = OrderItemImpl.Presentation.Group.Name.Catalog, groupOrder = OrderItemImpl.Presentation.Group.Order.Catalog)
@AdminPresentationToOneLookup()
protected Sku sku;
@ManyToOne(targetEntity = ProductImpl.class)
@JoinColumn(name = "PRODUCT_ID")
@Index(name="DISCRETE_PRODUCT_INDEX", columnNames={"PRODUCT_ID"})
@NotFound(action = NotFoundAction.IGNORE)
@AdminPresentation(friendlyName = "DiscreteOrderItemImpl_Product", order=Presentation.FieldOrder.PRODUCT,
group = OrderItemImpl.Presentation.Group.Name.Catalog, groupOrder = OrderItemImpl.Presentation.Group.Order.Catalog)
@AdminPresentationToOneLookup()
protected Product product;
@ManyToOne(targetEntity = BundleOrderItemImpl.class)
@JoinColumn(name = "BUNDLE_ORDER_ITEM_ID")
@AdminPresentation(excluded = true)
protected BundleOrderItem bundleOrderItem;
@ManyToOne(targetEntity = SkuBundleItemImpl.class)
@JoinColumn(name = "SKU_BUNDLE_ITEM_ID")
@AdminPresentation(excluded = true)
protected SkuBundleItem skuBundleItem;
@ElementCollection
@MapKeyColumn(name="NAME")
@Column(name="VALUE")
@CollectionTable(name="BLC_ORDER_ITEM_ADD_ATTR", joinColumns=@JoinColumn(name="ORDER_ITEM_ID"))
@BatchSize(size = 50)
@Deprecated
protected Map<String, String> additionalAttributes = new HashMap<String, String>();
@OneToMany(mappedBy = "discreteOrderItem", targetEntity = DiscreteOrderItemFeePriceImpl.class, cascade = { CascadeType.ALL }, orphanRemoval = true)
@Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region = "blOrderElements")
protected List<DiscreteOrderItemFeePrice> discreteOrderItemFeePrices = new ArrayList<DiscreteOrderItemFeePrice>();
@Override
public Sku getSku() {
return sku;
}
@Override
public void setSku(Sku sku) {
this.sku = sku;
if (sku.hasRetailPrice()) {
this.baseRetailPrice = sku.getRetailPrice().getAmount();
}
if (sku.hasSalePrice()) {
this.baseSalePrice = sku.getSalePrice().getAmount();
}
this.itemTaxable = sku.isTaxable();
setName(sku.getName());
}
@Override
public Boolean isTaxable() {
return (sku == null || sku.isTaxable() == null || sku.isTaxable());
}
@Override
public Product getProduct() {
return product;
}
@Override
public void setProduct(Product product) {
this.product = product;
}
@Override
public BundleOrderItem getBundleOrderItem() {
return bundleOrderItem;
}
@Override
public void setBundleOrderItem(BundleOrderItem bundleOrderItem) {
if (this.order != null && bundleOrderItem != null) {
throw new IllegalStateException("Cannot set a BundleOrderItem on a DiscreteOrderItem that is already associated with an Order");
}
this.bundleOrderItem = bundleOrderItem;
}
@Override
public void setOrder(Order order) {
if (order != null && bundleOrderItem != null) {
throw new IllegalStateException("Cannot set an Order on a DiscreteOrderItem that is already associated with a BundleOrderItem");
}
this.order = order;
}
/**
* If this item is part of a bundle that was created via a ProductBundle, then this
* method returns a reference to the corresponding SkuBundleItem.
* <p/>
* For manually created
* <p/>
* For all others, this method returns null.
*
* @return
*/
@Override
public SkuBundleItem getSkuBundleItem() {
return skuBundleItem;
}
/**
* Sets the associated SkuBundleItem.
*
* @param SkuBundleItem
*/
@Override
public void setSkuBundleItem(SkuBundleItem SkuBundleItem) {
this.skuBundleItem =SkuBundleItem;
}
@Override
public String getName() {
String name = super.getName();
if (name == null) {
return sku.getName();
}
return name;
}
@Override
public Order getOrder() {
if (order == null) {
if (getBundleOrderItem() != null) {
return getBundleOrderItem().getOrder();
}
}
return order;
}
private boolean updateSalePrice() {
if (isSalePriceOverride()) {
return false;
}
Money skuSalePrice = (getSku().getSalePrice() == null ? null : getSku().getSalePrice());
// Override retail/sale prices from skuBundle.
if (skuBundleItem != null) {
if (skuBundleItem.getSalePrice() != null) {
skuSalePrice = skuBundleItem.getSalePrice();
}
}
boolean updated = false;
//use the sku prices - the retail and sale prices could be null
if (skuSalePrice != null && !skuSalePrice.getAmount().equals(salePrice)) {
baseSalePrice = skuSalePrice.getAmount();
salePrice = skuSalePrice.getAmount();
updated = true;
}
// Adjust prices by adding in fees if they are attached.
if (getDiscreteOrderItemFeePrices() != null) {
for (DiscreteOrderItemFeePrice fee : getDiscreteOrderItemFeePrices()) {
Money returnPrice = convertToMoney(salePrice);
salePrice = returnPrice.add(fee.getAmount()).getAmount();
}
}
return updated;
}
private boolean updateRetailPrice() {
if (isRetailPriceOverride()) {
return false;
}
Money skuRetailPrice = getSku().getRetailPrice();
// Override retail/sale prices from skuBundle.
if (skuBundleItem != null) {
if (skuBundleItem.getRetailPrice() != null) {
skuRetailPrice = skuBundleItem.getRetailPrice();
}
}
boolean updated = false;
//use the sku prices - the retail and sale prices could be null
if (!skuRetailPrice.getAmount().equals(retailPrice)) {
baseRetailPrice = skuRetailPrice.getAmount();
retailPrice = skuRetailPrice.getAmount();
updated = true;
}
// Adjust prices by adding in fees if they are attached.
if (getDiscreteOrderItemFeePrices() != null) {
for (DiscreteOrderItemFeePrice fee : getDiscreteOrderItemFeePrices()) {
Money returnPrice = convertToMoney(retailPrice);
retailPrice = returnPrice.add(fee.getAmount()).getAmount();
}
}
return updated;
}
@Override
public boolean updateSaleAndRetailPrices() {
boolean salePriceUpdated = updateSalePrice();
boolean retailPriceUpdated = updateRetailPrice();
if (!isRetailPriceOverride() && !isSalePriceOverride()) {
if (salePrice != null && salePrice.compareTo(retailPrice) <= 0) {
price = salePrice;
} else {
price = retailPrice;
}
}
return salePriceUpdated || retailPriceUpdated;
}
@Override
public Map<String, String> getAdditionalAttributes() {
return additionalAttributes;
}
@Override
public void setAdditionalAttributes(Map<String, String> additionalAttributes) {
this.additionalAttributes = additionalAttributes;
}
@Override
public Money getBaseRetailPrice() {
return convertToMoney(baseRetailPrice);
}
@Override
public void setBaseRetailPrice(Money baseRetailPrice) {
this.baseRetailPrice = baseRetailPrice.getAmount();
}
@Override
public Money getBaseSalePrice() {
return convertToMoney(baseSalePrice);
}
@Override
public void setBaseSalePrice(Money baseSalePrice) {
this.baseSalePrice = baseSalePrice==null?null:baseSalePrice.getAmount();
}
@Override
public List<DiscreteOrderItemFeePrice> getDiscreteOrderItemFeePrices() {
return discreteOrderItemFeePrices;
}
@Override
public void setDiscreteOrderItemFeePrices(List<DiscreteOrderItemFeePrice> discreteOrderItemFeePrices) {
this.discreteOrderItemFeePrices = discreteOrderItemFeePrices;
}
protected Money convertToMoney(BigDecimal amount) {
return amount == null ? null : BroadleafCurrencyUtils.getMoney(amount, getOrder().getCurrency());
}
@Override
public OrderItem clone() {
DiscreteOrderItem orderItem = (DiscreteOrderItem) super.clone();
if (discreteOrderItemFeePrices != null) {
for (DiscreteOrderItemFeePrice feePrice : discreteOrderItemFeePrices) {
DiscreteOrderItemFeePrice cloneFeePrice = feePrice.clone();
cloneFeePrice.setDiscreteOrderItem(orderItem);
orderItem.getDiscreteOrderItemFeePrices().add(cloneFeePrice);
}
}
if (additionalAttributes != null) {
orderItem.getAdditionalAttributes().putAll(additionalAttributes);
}
orderItem.setBaseRetailPrice(convertToMoney(baseRetailPrice));
orderItem.setBaseSalePrice(convertToMoney(baseSalePrice));
orderItem.setBundleOrderItem(bundleOrderItem);
orderItem.setProduct(product);
orderItem.setSku(sku);
if (orderItem.getOrder() == null) {
throw new IllegalStateException("Either an Order or a BundleOrderItem must be set on the DiscreteOrderItem");
}
return orderItem;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (!getClass().isAssignableFrom(obj.getClass())) {
return false;
}
DiscreteOrderItemImpl other = (DiscreteOrderItemImpl) obj;
if (!super.equals(obj)) {
return false;
}
if (id != null && other.id != null) {
return id.equals(other.id);
}
if (bundleOrderItem == null) {
if (other.bundleOrderItem != null) {
return false;
}
} else if (!bundleOrderItem.equals(other.bundleOrderItem)) {
return false;
}
if (sku == null) {
if (other.sku != null) {
return false;
}
} else if (!sku.equals(other.sku)) {
return false;
}
return true;
}
@Override
public int hashCode() {
final int prime = super.hashCode();
int result = 1;
result = prime * result + ((bundleOrderItem == null) ? 0 : bundleOrderItem.hashCode());
result = prime * result + ((sku == null) ? 0 : sku.hashCode());
return result;
}
@Override
public boolean isDiscountingAllowed() {
if (discountsAllowed == null) {
return sku.isDiscountable();
} else {
return discountsAllowed.booleanValue();
}
}
@Override
public BundleOrderItem findParentItem() {
for (OrderItem orderItem : getOrder().getOrderItems()) {
if (orderItem instanceof BundleOrderItem) {
BundleOrderItem bundleItem = (BundleOrderItem) orderItem;
for (OrderItem containedItem : bundleItem.getOrderItems()) {
if (containedItem.equals(this)) {
return bundleItem;
}
}
}
}
return null;
}
public static class Presentation {
public static class Tab {
public static class Name {
public static final String OrderItems = "OrderImpl_Order_Items_Tab";
}
public static class Order {
public static final int OrderItems = 2000;
}
}
public static class Group {
public static class Name {
}
public static class Order {
}
}
public static class FieldOrder {
public static final int PRODUCT = 2000;
public static final int SKU = 3000;
}
}
@Override
public boolean isSkuActive() {
return sku.isActive();
}
}
| |
/*
* Artificial Intelligence for Humans
* Volume 2: Nature Inspired Algorithms
* Java Version
* http://www.aifh.org
* http://www.jeffheaton.com
*
* Code repository:
* https://github.com/jeffheaton/aifh
*
* Copyright 2014 by Jeff Heaton
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* For more information on Heaton Research copyrights, licenses
* and trademarks visit:
* http://www.heatonresearch.com/copyright
*/
package com.heatonresearch.aifh.examples.ca.mergelife.universe;
import com.heatonresearch.aifh.randomize.GenerateRandom;
public class Universe implements Cloneable {
/**
* The cell size.
*/
private final int cellSize;
/**
* The universe.
*/
private final UniverseCell[][] data;
/**
* The constructor.
*
* @param height The universe height.
* @param width The universe width.
* @param theSize The number of dimensions in a universe cell.
*/
public Universe(final int height, final int width, final int theSize) {
this.cellSize = theSize;
this.data = new UniverseCell[height][width];
for (int row = 0; row < height; row++) {
for (int col = 0; col < width; col++) {
this.data[row][col] = new UniverseCell(theSize);
}
}
}
/**
* Add the specified value to the specified dimension of the specified cell.
*
* @param row The cell's row.
* @param col The cell's column.
* @param i The index of the dimension to add to.
* @param d The value to add to the cell.
*/
public void add(final int row, final int col, final int i, final double d) {
this.data[row][col].add(i, d);
}
/**
* {@inheritDoc}
*/
@Override
public Object clone() {
final Universe result = new Universe(getHeight(), getWidth(),
this.cellSize);
result.copy(this);
return result;
}
/**
* Compare this universe to another and return the difference. A value of zero indicates an identical universe.
* The lower the value, the more similar.
*
* @param otherUniverse The other universe.
* @return The difference between the universes.
*/
public double compare(final Universe otherUniverse) {
int result = 0;
int total = 0;
for (int row = 0; row < otherUniverse.getHeight(); row++) {
for (int col = 0; col < otherUniverse.getWidth(); col++) {
final int d1 = Math.abs((int) (255 * get(row, col).getAvg()));
final int d2 = Math.abs((int) (255 * otherUniverse
.get(row, col).getAvg()));
if (Math.abs(d1 - d2) > 10) {
result++;
}
total++;
}
}
return (double) result / (double) total;
}
/**
* Copy another universe into this one.
*
* @param source The source universe.
*/
public void copy(final Universe source) {
for (int row = 0; row < getHeight(); row++) {
for (int col = 0; col < getWidth(); col++) {
for (int i = 0; i < this.cellSize; i++) {
this.data[row][col].set(i, source.get(row, col).get(i));
}
}
}
}
/**
* Get the universe cell for the specified row and column.
*
* @param row The row.
* @param col The column.
* @return The universe cell.
*/
public UniverseCell get(final int row, final int col) {
return this.data[row][col];
}
/**
* Get the universe cell for the specified row, column and index.
*
* @param row The row of the cell.
* @param col The column of the cell.
* @param i The index (dimension) inside the cell.
* @return The value.
*/
public double get(final int row, final int col, final int i) {
return this.data[row][col].get(i);
}
/**
* The number of dimensions inside a cell.
*
* @return The size of a cell.
*/
public int getCellSize() {
return this.cellSize;
}
/**
* @return The universe grid.
*/
public UniverseCell[][] getData() {
return this.data;
}
/**
* @return The height of the universe.
*/
public int getHeight() {
return this.data.length;
}
/**
* @return The width of the universe.
*/
public int getWidth() {
return this.data[0].length;
}
/**
* Determine if row and col are valid. Both must be above zero and within the height and width.
*
* @param row The row.
* @param col The column.
* @return True, if valid.
*/
public boolean isValid(final int row, final int col) {
return !(row < 0 || col < 0 || row >= getHeight() || col >= getWidth());
}
/**
* Randomize the universe.
*
* @param rnd A random number generator.
*/
public void randomize(GenerateRandom rnd) {
for (int row = 0; row < getHeight(); row++) {
for (int col = 0; col < getWidth(); col++) {
for (int i = 0; i < 3; i++) {
this.data[row][col].randomize(rnd);
}
}
}
}
}
| |
/*
* Copyright 2007 Soren Davidsen, Tanesha Networks
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.encuestame.utils.captcha;
import java.io.ByteArrayInputStream;
import java.util.Properties;
import junit.framework.TestCase;
import org.encuestame.utils.captcha.http.HttpLoader;
import org.encuestame.utils.categories.test.InternetTest;
import org.junit.experimental.categories.Category;
import org.w3c.dom.Document;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
@Category(InternetTest.class)
public class ReCaptchaImplTest extends TestCase {
ReCaptchaImpl r;
MockHttpLoader l;
protected void setUp() throws Exception {
r = new ReCaptchaImpl();
l = new MockHttpLoader();
r.setIncludeNoscript(false);
r.setPrivateKey("testing");
r.setPublicKey("testing");
r.setRecaptchaServer(ReCaptchaImpl.HTTPS_SERVER);
r.setHttpLoader(l);
}
public void testCreateCaptchaHtml() {
String html = r.createRecaptchaHtml(null, null);
assertTrue(html.indexOf("<script") != -1);
r.setIncludeNoscript(true);
assertTrue(r.createRecaptchaHtml(null, null).indexOf("<noscript>") != -1);
String html2 = r.createRecaptchaHtml("The Error", null);
assertTrue(html2.indexOf("&error=The+Error") != -1);
Properties options = new Properties();
options.setProperty("theme", "mytheme");
options.setProperty("tabindex", "1");
String html3 = r.createRecaptchaHtml("The Error", options);
assertTrue(html3.indexOf("theme:'mytheme'") != -1);
assertTrue(html3.indexOf("tabindex:'1'") != -1);
assertTrue(html3.indexOf(",") != -1);
// check the shortcut
String html4 = r.createRecaptchaHtml("Some Error", "othertheme", new Integer(3));
assertTrue(html4.indexOf("theme:'othertheme'") != -1);
assertTrue(html4.indexOf("tabindex:'3'") != -1);
assertTrue(html4.indexOf(",") != -1);
}
public void testNotReachable() {
r.setVerifyUrl("http://www.example.com22/");
ReCaptchaResponse re = r.checkAnswer("123.123.123.123", "asdfasdfasdf", "zxcvzxcvzxcv");
assertTrue(!re.isValid());
assertEquals("recaptcha-not-reachable", re.getErrorMessage());
}
public void testAlternativeVerifyUrl() {
// check that we hit the "correct" verifyurl
l.setNextUrl("http://www.google.com/recaptcha/api/verify");
r.checkAnswer("123.123.123.123", "asdfasdfasdf", "zxcvzxcvzxcv");
// check that we now hit the new one.
l.setNextUrl("http://www.example.com/");
r.setVerifyUrl("http://www.example.com/");
r.checkAnswer("123.123.123.123", "asdfasdfasdf", "zxcvzxcvzxcv");
}
public void testHtmlIsXhtml() throws Exception {
r.setIncludeNoscript(true);
String html = r.createRecaptchaHtml(null, null);
// wrap the html in a root element.
html = "<root>" + html + "</root>";
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
DocumentBuilder builder = dbf.newDocumentBuilder();
Document doc = builder.parse(new ByteArrayInputStream(html.getBytes()));
// should be OK here.
}
public void testCheckAnswer() {
l.setNextReply("true\nnone");
ReCaptchaResponse reponse = r.checkAnswer("123.123.123.123", "abcdefghijklmnop", "response");
assertTrue(reponse.isValid());
assertEquals(null, reponse.getErrorMessage());
}
public void testCheckAnswer_02() {
l.setNextReply("true\n");
ReCaptchaResponse reponse = r.checkAnswer("123.123.123.123", "abcdefghijklmnop", "response");
assertTrue(reponse.isValid());
}
public void testCheckAnswer_03() {
l.setNextReply("true");
ReCaptchaResponse reponse = r.checkAnswer("123.123.123.123", "abcdefghijklmnop", "response");
assertTrue(reponse.isValid());
}
public void testCheckAnswer_04() {
l.setNextReply("false");
ReCaptchaResponse reponse = r.checkAnswer("123.123.123.123", "abcdefghijklmnop", "response");
assertFalse(reponse.isValid());
assertEquals("recaptcha4j-missing-error-message", reponse.getErrorMessage());
}
public void testCheckAnswer_05() {
l.setNextReply("nottrue");
ReCaptchaResponse reponse = r.checkAnswer("123.123.123.123", "abcdefghijklmnop", "response");
assertFalse(reponse.isValid());
assertEquals("recaptcha4j-missing-error-message", reponse.getErrorMessage());
}
public void testCheckAnswer_06() {
l.setNextReply("false\nblabla");
ReCaptchaResponse reponse = r.checkAnswer("123.123.123.123", "abcdefghijklmnop", "response");
assertFalse(reponse.isValid());
assertEquals("blabla", reponse.getErrorMessage());
}
public void testCheckAnswer_07() {
l.setNextReply("false\nblabla\n\n");
ReCaptchaResponse reponse = r.checkAnswer("123.123.123.123", "abcdefghijklmnop", "response");
assertFalse(reponse.isValid());
assertEquals("blabla", reponse.getErrorMessage());
}
public class MockHttpLoader implements HttpLoader {
String url;
String postdata;
private String reply;
public void setNextUrl(String url) {
this.url = url;
}
public void setNextPostdata(String postdata) {
this.postdata = postdata;
}
public void setNextReply(String reply) {
this.reply = reply;
}
public String httpGet(String url) {
if (this.url != null)
assertEquals(this.url, url);
return reply;
}
public String httpPost(String url, String postdata) {
if (this.url != null)
assertEquals(this.url, url);
if (this.postdata != null)
assertEquals(this.postdata, postdata);
return reply;
}
}
}
| |
// Copyright (C) 2013 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.acceptance.git;
import static com.google.common.truth.Truth.assertThat;
import com.google.common.collect.Iterables;
import com.google.gerrit.acceptance.AbstractDaemonTest;
import com.google.gerrit.acceptance.NoHttpd;
import com.google.gerrit.acceptance.PushOneCommit;
import com.google.gerrit.common.data.Permission;
import com.google.gerrit.reviewdb.client.Change;
import com.google.gerrit.reviewdb.client.PatchSet;
import com.google.gerrit.reviewdb.client.PatchSetApproval;
import com.google.gerrit.reviewdb.client.Project;
import com.google.gerrit.server.ApprovalsUtil;
import com.google.gerrit.server.git.CommitMergeStatus;
import com.google.gerrit.server.notedb.ChangeNotes;
import com.google.gwtorm.server.OrmException;
import com.google.inject.Inject;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.Ref;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.revwalk.RevCommit;
import org.eclipse.jgit.revwalk.RevObject;
import org.eclipse.jgit.revwalk.RevTag;
import org.eclipse.jgit.revwalk.RevWalk;
import org.eclipse.jgit.transport.RefSpec;
import org.junit.Test;
import java.io.IOException;
@NoHttpd
public class SubmitOnPushIT extends AbstractDaemonTest {
@Inject
private ApprovalsUtil approvalsUtil;
@Inject
private ChangeNotes.Factory changeNotesFactory;
@Test
public void submitOnPush() throws Exception {
grant(Permission.SUBMIT, project, "refs/for/refs/heads/master");
PushOneCommit.Result r = pushTo("refs/for/master%submit");
r.assertOkStatus();
r.assertChange(Change.Status.MERGED, null, admin);
assertSubmitApproval(r.getPatchSetId());
assertCommit(project, "refs/heads/master");
}
@Test
public void submitOnPushWithTag() throws Exception {
grant(Permission.SUBMIT, project, "refs/for/refs/heads/master");
grant(Permission.CREATE, project, "refs/tags/*");
grant(Permission.PUSH, project, "refs/tags/*");
PushOneCommit.Tag tag = new PushOneCommit.Tag("v1.0");
PushOneCommit push = pushFactory.create(db, admin.getIdent(), testRepo);
push.setTag(tag);
PushOneCommit.Result r = push.to("refs/for/master%submit");
r.assertOkStatus();
r.assertChange(Change.Status.MERGED, null, admin);
assertSubmitApproval(r.getPatchSetId());
assertCommit(project, "refs/heads/master");
assertTag(project, "refs/heads/master", tag);
}
@Test
public void submitOnPushWithAnnotatedTag() throws Exception {
grant(Permission.SUBMIT, project, "refs/for/refs/heads/master");
PushOneCommit.AnnotatedTag tag =
new PushOneCommit.AnnotatedTag("v1.0", "annotation", admin.getIdent());
PushOneCommit push = pushFactory.create(db, admin.getIdent(), testRepo);
push.setTag(tag);
PushOneCommit.Result r = push.to("refs/for/master%submit");
r.assertOkStatus();
r.assertChange(Change.Status.MERGED, null, admin);
assertSubmitApproval(r.getPatchSetId());
assertCommit(project, "refs/heads/master");
assertTag(project, "refs/heads/master", tag);
}
@Test
public void submitOnPushToRefsMetaConfig() throws Exception {
grant(Permission.SUBMIT, project, "refs/for/refs/meta/config");
git().fetch().setRefSpecs(new RefSpec("refs/meta/config:refs/meta/config")).call();
testRepo.reset("refs/meta/config");
PushOneCommit.Result r = pushTo("refs/for/refs/meta/config%submit");
r.assertOkStatus();
r.assertChange(Change.Status.MERGED, null, admin);
assertSubmitApproval(r.getPatchSetId());
assertCommit(project, "refs/meta/config");
}
@Test
public void submitOnPushMergeConflict() throws Exception {
ObjectId objectId = repo().getRef("HEAD").getObjectId();
push("refs/heads/master", "one change", "a.txt", "some content");
testRepo.reset(objectId);
grant(Permission.SUBMIT, project, "refs/for/refs/heads/master");
PushOneCommit.Result r =
push("refs/for/master%submit", "other change", "a.txt", "other content");
r.assertErrorStatus();
r.assertChange(Change.Status.NEW, null);
r.assertMessage(CommitMergeStatus.PATH_CONFLICT.getMessage());
}
@Test
public void submitOnPushSuccessfulMerge() throws Exception {
String master = "refs/heads/master";
ObjectId objectId = repo().getRef("HEAD").getObjectId();
push(master, "one change", "a.txt", "some content");
testRepo.reset(objectId);
grant(Permission.SUBMIT, project, "refs/for/refs/heads/master");
PushOneCommit.Result r =
push("refs/for/master%submit", "other change", "b.txt", "other content");
r.assertOkStatus();
r.assertChange(Change.Status.MERGED, null, admin);
assertMergeCommit(master, "other change");
}
@Test
public void submitOnPushNewPatchSet() throws Exception {
PushOneCommit.Result r =
push("refs/for/master", PushOneCommit.SUBJECT, "a.txt", "some content");
grant(Permission.SUBMIT, project, "refs/for/refs/heads/master");
r = push("refs/for/master%submit", PushOneCommit.SUBJECT, "a.txt",
"other content", r.getChangeId());
r.assertOkStatus();
r.assertChange(Change.Status.MERGED, null, admin);
Change c = Iterables.getOnlyElement(
queryProvider.get().byKeyPrefix(r.getChangeId())).change();
assertThat(db.patchSets().byChange(c.getId()).toList()).hasSize(2);
assertSubmitApproval(r.getPatchSetId());
assertCommit(project, "refs/heads/master");
}
@Test
public void submitOnPushNotAllowed_Error() throws Exception {
PushOneCommit.Result r = pushTo("refs/for/master%submit");
r.assertErrorStatus("submit not allowed");
}
@Test
public void submitOnPushNewPatchSetNotAllowed_Error() throws Exception {
PushOneCommit.Result r =
push("refs/for/master", PushOneCommit.SUBJECT, "a.txt", "some content");
r = push("refs/for/master%submit", PushOneCommit.SUBJECT, "a.txt",
"other content", r.getChangeId());
r.assertErrorStatus("submit not allowed");
}
@Test
public void submitOnPushingDraft_Error() throws Exception {
PushOneCommit.Result r = pushTo("refs/for/master%draft,submit");
r.assertErrorStatus("cannot submit draft");
}
@Test
public void submitOnPushToNonExistingBranch_Error() throws Exception {
String branchName = "non-existing";
PushOneCommit.Result r = pushTo("refs/for/" + branchName + "%submit");
r.assertErrorStatus("branch " + branchName + " not found");
}
@Test
public void mergeOnPushToBranch() throws Exception {
grant(Permission.PUSH, project, "refs/heads/master");
PushOneCommit.Result r =
push("refs/for/master", PushOneCommit.SUBJECT, "a.txt", "some content");
r.assertOkStatus();
git().push()
.setRefSpecs(new RefSpec(r.getCommitId().name() + ":refs/heads/master"))
.call();
assertCommit(project, "refs/heads/master");
assertThat(getSubmitter(r.getPatchSetId())).isNull();
Change c = db.changes().get(r.getPatchSetId().getParentKey());
assertThat(c.getStatus()).isEqualTo(Change.Status.MERGED);
}
@Test
public void mergeOnPushToBranchWithNewPatchset() throws Exception {
grant(Permission.PUSH, project, "refs/heads/master");
PushOneCommit.Result r = pushTo("refs/for/master");
r.assertOkStatus();
PushOneCommit push =
pushFactory.create(db, admin.getIdent(), testRepo, PushOneCommit.SUBJECT,
"b.txt", "anotherContent", r.getChangeId());
r = push.to("refs/heads/master");
r.assertOkStatus();
assertCommit(project, "refs/heads/master");
assertThat(getSubmitter(r.getPatchSetId())).isNull();
Change c = db.changes().get(r.getPatchSetId().getParentKey());
assertThat(c.getStatus()).isEqualTo(Change.Status.MERGED);
}
private PatchSetApproval getSubmitter(PatchSet.Id patchSetId)
throws OrmException {
Change c = db.changes().get(patchSetId.getParentKey());
ChangeNotes notes = changeNotesFactory.create(c).load();
return approvalsUtil.getSubmitter(db, notes, patchSetId);
}
private void assertSubmitApproval(PatchSet.Id patchSetId) throws OrmException {
PatchSetApproval a = getSubmitter(patchSetId);
assertThat(a.isSubmit()).isTrue();
assertThat(a.getValue()).isEqualTo((short) 1);
assertThat(a.getAccountId()).isEqualTo(admin.id);
}
private void assertCommit(Project.NameKey project, String branch) throws IOException {
try (Repository r = repoManager.openRepository(project);
RevWalk rw = new RevWalk(r)) {
RevCommit c = rw.parseCommit(r.getRef(branch).getObjectId());
assertThat(c.getShortMessage()).isEqualTo(PushOneCommit.SUBJECT);
assertThat(c.getAuthorIdent().getEmailAddress()).isEqualTo(admin.email);
assertThat(c.getCommitterIdent().getEmailAddress()).isEqualTo(
admin.email);
}
}
private void assertMergeCommit(String branch, String subject) throws IOException {
try (Repository r = repoManager.openRepository(project);
RevWalk rw = new RevWalk(r)) {
RevCommit c = rw.parseCommit(r.getRef(branch).getObjectId());
assertThat(c.getParentCount()).isEqualTo(2);
assertThat(c.getShortMessage()).isEqualTo("Merge \"" + subject + "\"");
assertThat(c.getAuthorIdent().getEmailAddress()).isEqualTo(admin.email);
assertThat(c.getCommitterIdent().getEmailAddress()).isEqualTo(
serverIdent.get().getEmailAddress());
}
}
private void assertTag(Project.NameKey project, String branch,
PushOneCommit.Tag tag) throws IOException {
try (Repository repo = repoManager.openRepository(project)) {
Ref tagRef = repo.getRef(tag.name);
assertThat(tagRef).isNotNull();
ObjectId taggedCommit = null;
if (tag instanceof PushOneCommit.AnnotatedTag) {
PushOneCommit.AnnotatedTag annotatedTag = (PushOneCommit.AnnotatedTag)tag;
try (RevWalk rw = new RevWalk(repo)) {
RevObject object = rw.parseAny(tagRef.getObjectId());
assertThat(object).isInstanceOf(RevTag.class);
RevTag tagObject = (RevTag) object;
assertThat(tagObject.getFullMessage())
.isEqualTo(annotatedTag.message);
assertThat(tagObject.getTaggerIdent()).isEqualTo(annotatedTag.tagger);
taggedCommit = tagObject.getObject();
}
} else {
taggedCommit = tagRef.getObjectId();
}
ObjectId headCommit = repo.getRef(branch).getObjectId();
assertThat(taggedCommit).isNotNull();
assertThat(taggedCommit).isEqualTo(headCommit);
}
}
private PushOneCommit.Result push(String ref, String subject,
String fileName, String content) throws Exception {
PushOneCommit push =
pushFactory.create(db, admin.getIdent(), testRepo, subject, fileName, content);
return push.to(ref);
}
private PushOneCommit.Result push(String ref, String subject,
String fileName, String content, String changeId) throws Exception {
PushOneCommit push = pushFactory.create(db, admin.getIdent(), testRepo, subject,
fileName, content, changeId);
return push.to(ref);
}
}
| |
/**
* Copyright (c) 2014 Samsung Electronics, Inc.,
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.samsung.sec.dexter.cppcheck.plugin;
import com.google.common.base.Strings;
import com.samsung.sec.dexter.core.analyzer.AnalysisConfig;
import com.samsung.sec.dexter.core.analyzer.AnalysisResult;
import com.samsung.sec.dexter.core.analyzer.ResultFileConstant;
import com.samsung.sec.dexter.core.checker.Checker;
import com.samsung.sec.dexter.core.checker.CheckerConfig;
import com.samsung.sec.dexter.core.checker.IChecker;
import com.samsung.sec.dexter.core.config.DexterConfig;
import com.samsung.sec.dexter.core.config.DexterConfig.LANGUAGE;
import com.samsung.sec.dexter.core.defect.PreOccurence;
import com.samsung.sec.dexter.core.exception.DexterRuntimeException;
import com.samsung.sec.dexter.core.util.DexterUtil;
import com.samsung.sec.dexter.util.CppUtil;
import com.samsung.sec.dexter.util.TranslationUnitFactory;
import java.util.Map;
import org.apache.log4j.Logger;
import org.eclipse.cdt.core.dom.ast.IASTTranslationUnit;
import org.eclipse.cdt.core.parser.ParserLanguage;
import org.xml.sax.Attributes;
import org.xml.sax.SAXException;
import org.xml.sax.helpers.DefaultHandler;
public class ResultFileHandler extends DefaultHandler {
private PreOccurence currentOccurence;
private AnalysisResult result;
private AnalysisConfig config;
private CheckerConfig checkerConfig;
private final static Logger logger = Logger.getLogger(ResultFileHandler.class);
public ResultFileHandler(final AnalysisResult result, final AnalysisConfig config,
final CheckerConfig checkerConfig) {
this.config = config;
this.checkerConfig = checkerConfig;
this.result = result;
}
/*
* (non-Javadoc)
*
* @see org.xml.sax.helpers.DefaultHandler#startDocument()
*/
@Override
public void startDocument() throws SAXException {
super.startDocument();
}
/*
* (non-Javadoc)
*
* @see org.xml.sax.helpers.DefaultHandler#endDocument()
*/
@Override
public void endDocument() throws SAXException {
super.endDocument();
}
/*
* (non-Javadoc)
*
* @see org.xml.sax.helpers.DefaultHandler#startElement(java.lang.String, java.lang.String, java.lang.String,
* org.xml.sax.Attributes)
*/
@Override
public void startElement(final String uri, final String localName, final String qName, final Attributes attributes)
throws SAXException {
super.startElement(uri, localName, qName, attributes);
if ("error".equals(qName)) {
final String checkerCode = attributes.getValue("id").toLowerCase();
currentOccurence = new PreOccurence();
currentOccurence.setLanguage(LANGUAGE.CPP.toString());
if (checkerCode.startsWith(DexterConfig.SECURITY_CHECK_PREFIX)) {
currentOccurence.setMessage(attributes.getValue("msg").replace("'", "'"));
} else {
currentOccurence.setMessage(attributes.getValue("verbose").replace("'", "'"));
}
currentOccurence.setToolName(CppcheckDexterPlugin.PLUGIN_NAME);
currentOccurence.setFileName(config.getFileName());
currentOccurence.setModulePath(config.getModulePath());
currentOccurence.setCheckerCode(checkerCode);
IChecker checker = checkerConfig.getChecker(checkerCode);
if (Strings.isNullOrEmpty(checker.getCode())) {
createNewChecker(attributes, checkerCode);
} else {
currentOccurence.setSeverityCode(checker.getSeverityCode());
currentOccurence.setCategoryName(checker.getCategoryName());
}
} else if ("location".equals(qName)) {
final String fileName = attributes.getValue("file");
if (!result.getSourceFileFullPath().equals(DexterUtil.refinePath(fileName))) {
logger.debug("target file and defect detected file are not same");
logger.debug("target file: " + result.getSourceFileFullPath());
logger.debug("detected file: " + fileName);
return;
}
currentOccurence.setStartLine(Integer.parseInt(attributes.getValue("line")));
currentOccurence.setEndLine(Integer.parseInt(attributes.getValue("line")));
currentOccurence.setCharStart(-1);
currentOccurence.setCharEnd(-1);
String locationMsg = attributes.getValue("msg");
if (Strings.isNullOrEmpty(locationMsg) == false) {
currentOccurence.setMessage(currentOccurence.getMessage() + " " + localName);
}
try {
final CharSequence sourcecode = config.getSourcecodeThatReadIfNotExist();
IASTTranslationUnit translationUnit = TranslationUnitFactory.getASTTranslationUnit(
sourcecode.toString(),
ParserLanguage.CPP,
config.getSourceFileFullPath());
Map<String, String> nameMap = CppUtil.extractModuleName(translationUnit, sourcecode.toString(),
currentOccurence.getStartLine());
if (Strings.isNullOrEmpty(nameMap.get(ResultFileConstant.CLASS_NAME)) == false) {
currentOccurence.setClassName(nameMap.get(ResultFileConstant.CLASS_NAME));
}
if (Strings.isNullOrEmpty(nameMap.get(ResultFileConstant.METHOD_NAME)) == false) {
currentOccurence.setMethodName(nameMap.get(ResultFileConstant.METHOD_NAME));
}
} catch (DexterRuntimeException e) {
logger.warn(e);
}
}
}
private void createNewChecker(final Attributes attributes, final String checkerCode) {
if (DexterConfig.getInstance().isSpecifiedCheckerOptionEnabledByCli()) {
return;
}
Checker checker = new Checker(checkerCode, checkerCode,
CppcheckDexterPlugin.PLUGIN_VERSION.getVersion(), true);
checker.setSeverityCode("ETC");
checker.setActive("true".equals(attributes.getValue("inconclusive")) == false);
currentOccurence.setSeverityCode("ETC");
currentOccurence.setCategoryName("");
checkerConfig.addChecker(checker);
logger.info("Found new checker(" + checkerCode + ") in " + config.getSourceFileFullPath());
}
/*
* (non-Javadoc)
*
* @see org.xml.sax.helpers.DefaultHandler#endElement(java.lang.String, java.lang.String, java.lang.String)
*/
@Override
public void endElement(final String uri, final String localName, final String qName) throws SAXException {
super.endElement(uri, localName, qName);
if ("error".equals(qName)) {
if (checkerConfig.isActiveChecker(currentOccurence.getCheckerCode()) == false) {
return;
}
if (currentOccurence.getStartLine() != -1) {
result.addDefectWithPreOccurence(currentOccurence);
} else {
logger.warn("Not added defect(start line is -1) : " + currentOccurence.toJson());
}
}
}
}
| |
package ca.uhn.fhir.rest.param;
/*
* #%L
* HAPI FHIR - Core Library
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.model.base.composite.BaseCodingDt;
import ca.uhn.fhir.model.base.composite.BaseIdentifierDt;
import ca.uhn.fhir.model.primitive.UriDt;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import org.hl7.fhir.instance.model.api.IBaseCoding;
import static org.apache.commons.lang3.StringUtils.defaultString;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
public class TokenParam extends BaseParam /*implements IQueryParameterType*/ {
private TokenParamModifier myModifier;
private String mySystem;
private String myValue;
private Boolean myMdmExpand;
/**
* Constructor
*/
public TokenParam() {
super();
}
/**
* Constructor which copies the {@link InternalCodingDt#getSystemElement() system} and
* {@link InternalCodingDt#getCodeElement() code} from a {@link InternalCodingDt} instance and adds it as a parameter
*
* @param theCodingDt The coding
*/
public TokenParam(BaseCodingDt theCodingDt) {
this(toSystemValue(theCodingDt.getSystemElement()), theCodingDt.getCodeElement().getValue());
}
/**
* Constructor which copies the {@link BaseIdentifierDt#getSystemElement() system} and
* {@link BaseIdentifierDt#getValueElement() value} from a {@link BaseIdentifierDt} instance and adds it as a
* parameter
*
* @param theIdentifierDt The identifier
*/
public TokenParam(BaseIdentifierDt theIdentifierDt) {
this(toSystemValue(theIdentifierDt.getSystemElement()), theIdentifierDt.getValueElement().getValue());
}
/**
* Construct a {@link TokenParam} from the {@link IBaseCoding#getSystem()} () system} and
* {@link IBaseCoding#getCode()} () code} of a {@link IBaseCoding} instance.
*
* @param theCoding The coding
*/
public TokenParam(IBaseCoding theCoding) {
this(theCoding.getSystem(), theCoding.getCode());
}
public TokenParam(String theSystem, String theValue) {
setSystem(theSystem);
setValue(theValue);
}
public TokenParam(String theSystem, String theValue, boolean theText) {
if (theText && isNotBlank(theSystem)) {
throw new IllegalArgumentException("theSystem can not be non-blank if theText is true (:text searches do not include a system). In other words, set the first parameter to null for a text search");
}
setSystem(theSystem);
setValue(theValue);
setText(theText);
}
/**
* Constructor that takes a code but no system
*/
public TokenParam(String theCode) {
this(null, theCode);
}
public boolean isMdmExpand() {
return myMdmExpand != null && myMdmExpand;
}
public TokenParam setMdmExpand(boolean theMdmExpand) {
myMdmExpand = theMdmExpand;
return this;
}
@Override
String doGetQueryParameterQualifier() {
if (getModifier() != null) {
return getModifier().getValue();
}
return null;
}
/**
* {@inheritDoc}
*/
@Override
String doGetValueAsQueryToken(FhirContext theContext) {
if (getSystem() != null) {
if (getValue() != null) {
return ParameterUtil.escape(StringUtils.defaultString(getSystem())) + '|' + ParameterUtil.escape(getValue());
} else {
return ParameterUtil.escape(StringUtils.defaultString(getSystem())) + '|';
}
}
return ParameterUtil.escape(getValue());
}
/**
* {@inheritDoc}
*/
@Override
void doSetValueAsQueryToken(FhirContext theContext, String theParamName, String theQualifier, String theParameter) {
setModifier(null);
if (theQualifier != null) {
TokenParamModifier modifier = TokenParamModifier.forValue(theQualifier);
setModifier(modifier);
if (modifier == TokenParamModifier.TEXT) {
setSystem(null);
setValue(ParameterUtil.unescape(theParameter));
return;
}
}
setSystem(null);
if (theParameter == null) {
setValue(null);
} else {
int barIndex = ParameterUtil.nonEscapedIndexOf(theParameter, '|');
if (barIndex != -1) {
setSystem(theParameter.substring(0, barIndex));
setValue(ParameterUtil.unescape(theParameter.substring(barIndex + 1)));
} else {
setValue(ParameterUtil.unescape(theParameter));
}
}
}
/**
* Returns the modifier for this token
*/
public TokenParamModifier getModifier() {
return myModifier;
}
public TokenParam setModifier(TokenParamModifier theModifier) {
myModifier = theModifier;
return this;
}
/**
* Returns the system for this token. Note that if a {@link #getModifier()} is being used, the entire value of the
* parameter will be placed in {@link #getValue() value} and this method will return <code>null</code>.
* <p
* Also note that this value may be <code>null</code> or <code>""</code> (empty string) and that
* each of these have a different meaning. When a token is passed on a URL and it has no
* vertical bar (often meaning "return values that match the given code in any codesystem")
* this method will return <code>null</code>. When a token is passed on a URL and it has
* a vetical bar but nothing before the bar (often meaning "return values that match the
* given code but that have no codesystem) this method will return <code>""</code>
* </p>
*/
public String getSystem() {
return mySystem;
}
public TokenParam setSystem(String theSystem) {
mySystem = theSystem;
return this;
}
/**
* Returns the value for the token (generally the value to the right of the
* vertical bar on the URL)
*/
public String getValue() {
return myValue;
}
public TokenParam setValue(String theValue) {
myValue = theValue;
return this;
}
public InternalCodingDt getValueAsCoding() {
return new InternalCodingDt(mySystem, myValue);
}
public String getValueNotNull() {
return defaultString(myValue);
}
public boolean isEmpty() {
return StringUtils.isBlank(mySystem) && StringUtils.isBlank(myValue) && getMissing() == null;
}
/**
* Returns true if {@link #getModifier()} returns {@link TokenParamModifier#TEXT}
*/
public boolean isText() {
return myModifier == TokenParamModifier.TEXT;
}
/**
* @deprecated Use {@link #setModifier(TokenParamModifier)} instead
*/
@Deprecated
public TokenParam setText(boolean theText) {
if (theText) {
myModifier = TokenParamModifier.TEXT;
} else {
myModifier = null;
}
return this;
}
@Override
public String toString() {
ToStringBuilder builder = new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE);
builder.append("system", defaultString(getSystem()));
if (myModifier != null) {
builder.append(":" + myModifier.getValue());
}
builder.append("value", getValue());
if (getMissing() != null) {
builder.append(":missing", getMissing());
}
return builder.toString();
}
@Override
public boolean equals(Object theO) {
if (this == theO) {
return true;
}
if (theO == null || getClass() != theO.getClass()) {
return false;
}
TokenParam that = (TokenParam) theO;
EqualsBuilder b = new EqualsBuilder();
b.append(myModifier, that.myModifier);
b.append(mySystem, that.mySystem);
b.append(myValue, that.myValue);
return b.isEquals();
}
@Override
public int hashCode() {
HashCodeBuilder b = new HashCodeBuilder(17, 37);
b.append(myModifier);
b.append(mySystem);
b.append(myValue);
return b.toHashCode();
}
private static String toSystemValue(UriDt theSystem) {
return theSystem.getValueAsString();
}
}
| |
/**********************************************************************
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
**********************************************************************/
/***************************************************
* A TMUDF that executes a generic JDBC query
* and returns the result of the one SQL statement
* in the list that produces results as a table-valued
* output
*
* Invocation (all arguments are strings):
*
* select ... from udf(JDBC(
* <name of JDBC driver jar>, // file name of the JDBC driver jar, stored
* // in $TRAF_VAR/udr/public/external_libs
* <name of JDBC driver class in the jar>,
* <connection string>,
* <user name>,
* <password>,
* <statement_type>,
* <sql statement 1>
* [ , <sql statements 2 ...n> ] )) ...
*
* The first 7 arguments are required and must be
* string literals that are available at compile
* time.
* Statement type:
* 'source': This statement produces a result
* (only type allowed at this time)
* (may support "target" to insert
* into a table via JDBC later)
*
* Note that only one of the SQL statements can be
* a select or other result-producing statements.
* The others can perform setup and cleanup
* operations, if necessary (e.g. create table,
* insert, select, drop table).
*
* For an example, see file
* core/sql/regress/udr/TEST002.
***************************************************/
package org.trafodion.libmgmt;
import org.trafodion.sql.udr.*;
import java.sql.*;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Vector;
import java.lang.Math;
import java.util.Properties;
import java.util.logging.Logger;
class JDBCUDR extends UDR
{
// class used to connect, both at compile and at runtime
static class JdbcConnectionInfo
{
String driverJar_;
String driverClassName_;
String connectionString_;
String username_;
String password_;
boolean debug_;
Connection conn_;
public void setJar(String jar)
{ driverJar_ = jar; }
public void setClass(String className)
{ driverClassName_ = className; }
public void setConnString(String connString)
{ connectionString_ = connString; }
public void setUsername(String userName)
{ username_ = userName; }
public void setPassword(String password)
{ password_ = password; }
public void setDebug(boolean debug) { debug_ = debug; }
public Connection connect() throws UDRException
{
try {
Path driverJarPath = Paths.get(driverJar_);
// for security reasons, we sandbox the allowed driver jars
// into $TRAF_VAR/udr/public/external_libs
driverJarPath = driverJarPath.normalize();
if (driverJarPath.isAbsolute())
{
if (! driverJarPath.startsWith(
LmUtility.getSandboxRootForUser(null)))
throw new UDRException(
38010,
"The jar name of the JDBC driver must be a name relative to %s, got %s",
LmUtility.getSandboxRootForUser(null).toString(),
driverJar_);
}
else
driverJarPath = LmUtility.getExternalLibsDirForUser(null).resolve(
driverJarPath);
// for security reasons we also reject the Trafodion T2
// driver (check both class name and URL)
if (driverClassName_.equals("org.apache.trafodion.jdbc.t2.T2Driver"))
throw new UDRException(
38012,
"This UDF does not support the Trafodion T2 driver class %s",
driverClassName_);
if (LmT2Driver.checkURL(connectionString_))
throw new UDRException(
38013,
"This UDF does not support the Trafodion T2 driver URL %s",
connectionString_);
// Create a class loader that can access the jar file
// specified by the caller. Note that this is only needed
// because the JDBC UDR is a predefined UDR and is loaded
// by the standard class loader. If it were a regular UDR,
// it would have been loaded by LmClassLoader and we would
// not need to create an LmClassLoader here.
LmClassLoader jdbcJarLoader = LmUtility.createClassLoader(
driverJarPath.toString(),0);
Driver d = (Driver) Class.forName(driverClassName_,
true,
jdbcJarLoader).newInstance();
// go through an intermediary driver, since the DriverManager
// will not accept classes that are not loaded by the default
// class loader
DriverManager.registerDriver(new URLDriver(d));
conn_ = DriverManager.getConnection(connectionString_,
username_,
password_);
return conn_;
}
catch (ClassNotFoundException cnf) {
throw new UDRException(
38020,
"JDBC driver class %s not found. Please make sure the JDBC driver jar %s is stored in %s. Message: %s",
driverClassName_,
driverJar_,
LmUtility.getSandboxRootForUser(null).toString(),
cnf.getMessage());
}
catch (SQLException se) {
throw new UDRException(
38020,
"SQL exception during connect. Message: %s",
se.getMessage());
}
catch (Exception e) {
if (debug_)
{
System.out.println("Debug: Exception during connect:");
try { e.printStackTrace(System.out); }
catch (Exception e2) {}
}
throw new UDRException(
38020,
"Exception during connect: %s",
e.getMessage());
}
}
public Connection getConnection() { return conn_; }
public void disconnect() throws SQLException
{
conn_.close();
conn_ = null;
}
};
// list of SQL statements to execute
static class SQLStatementInfo
{
// list of SQL statements to execute
Vector<String> sqlStrings_;
// which of the above is the one that
// produces the table-valued result?
int resultStatementIndex_;
// prepared result-producing statement
PreparedStatement resultStatement_;
SQLStatementInfo()
{
sqlStrings_ = new Vector<String>();
resultStatementIndex_ = -1;
}
void addStatementText(String sqlText)
{
sqlStrings_.add(sqlText);
}
void addResultProducingStatement(PreparedStatement preparedStmt,
int resultStatementIndex)
{
resultStatement_ = preparedStmt;
resultStatementIndex_ = resultStatementIndex;
}
String getStatementText(int ix) { return sqlStrings_.get(ix); }
PreparedStatement getResultStatement(){ return resultStatement_; }
int getNumStatements() { return sqlStrings_.size(); }
int getResultStatementIndex() { return resultStatementIndex_; }
};
// Define data that gets passed between compiler phases
static class JdbcCompileTimeData extends UDRWriterCompileTimeData
{
JdbcConnectionInfo jci_;
SQLStatementInfo sqi_;
JdbcCompileTimeData()
{
jci_ = new JdbcConnectionInfo();
sqi_ = new SQLStatementInfo();
}
};
static class URLDriver implements Driver {
private Driver driver_;
URLDriver(Driver d) { driver_ = d; }
public boolean acceptsURL(String u) throws SQLException {
return driver_.acceptsURL(u);
}
public Connection connect(String u, Properties p) throws SQLException {
return driver_.connect(u, p);
}
public int getMajorVersion() {
return driver_.getMajorVersion();
}
public int getMinorVersion() {
return driver_.getMinorVersion();
}
public DriverPropertyInfo[] getPropertyInfo(String u, Properties p) throws SQLException {
return driver_.getPropertyInfo(u, p);
}
public boolean jdbcCompliant() {
return driver_.jdbcCompliant();
}
public Logger getParentLogger() throws SQLFeatureNotSupportedException {
return driver_.getParentLogger();
}
}
JdbcConnectionInfo getConnectionInfo(UDRInvocationInfo info) throws UDRException
{
return ((JdbcCompileTimeData) info.getUDRWriterCompileTimeData()).jci_;
}
SQLStatementInfo getSQLStatementInfo(UDRInvocationInfo info) throws UDRException
{
return ((JdbcCompileTimeData) info.getUDRWriterCompileTimeData()).sqi_;
}
// default constructor
public JDBCUDR()
{}
// a method to process the input parameters, this is
// used both at compile time and at runtime
private void handleInputParams(UDRInvocationInfo info,
JdbcConnectionInfo jci,
SQLStatementInfo sqi,
boolean isCompileTime)
throws UDRException
{
int numInParams = info.par().getNumColumns();
// Right now we don't support table inputs
if (isCompileTime && info.getNumTableInputs() != 0)
throw new UDRException(
38300,
"%s must be called with no table-valued inputs",
info.getUDRName());
if (numInParams < 7)
throw new UDRException(
38310,
"Expecting at least 7 parameters for %s UDR",
info.getUDRName());
// loop over scalar input parameters
for (int p=0; p<numInParams; p++)
{
if (isCompileTime &&
! info.par().isAvailable(p))
throw new UDRException(
38320,
"Parameter %d of %s must be a compile time constant",
p+1,
info.getUDRName());
String paramValue = info.par().getString(p);
switch (p)
{
case 0:
jci.setJar(paramValue);
break;
case 1:
jci.setClass(paramValue);
break;
case 2:
jci.setConnString(paramValue);
break;
case 3:
jci.setUsername(paramValue);
break;
case 4:
jci.setPassword(paramValue);
break;
case 5:
// Only statement type supported
// so far is select, we may support insert later
if (paramValue.compareToIgnoreCase("source") != 0)
throw new UDRException(
38330,
"The only statement type supported so far is 'source' in parameter 6 of %s",
info.getUDRName());
break;
default:
// SQL statement (there could be multiple)
sqi.addStatementText(paramValue);
break;
}
if (isCompileTime)
// add the actual parameter as a formal parameter
// (the formal parameter list is initially empty)
info.addFormalParameter(info.par().getColumn(p));
}
jci.setDebug(info.getDebugFlags() != 0);
// Prepare each provided statement. We will verify that
// only one of these statements produces result rows,
// which will become our table-valued output.
int numSQLStatements = sqi.getNumStatements();
// sanity check
if (numSQLStatements != numInParams-6)
throw new UDRException(383400, "internal error");
if (numSQLStatements < 1)
throw new UDRException(383500, "At least one SQL statement must be given in parameters 6 and following");
if (isCompileTime)
{
// walk through all statements, check whether they are
// valid by preparing them, and determine which one is
// the one that generates a result set
String currentStmtText = "";
try
{
jci.connect();
for (int s=0; s<numSQLStatements; s++)
{
currentStmtText = sqi.getStatementText(s);
// System.out.printf("Statement to prepare: %s\n", currentStmtText);
PreparedStatement preparedStmt =
jci.getConnection().prepareStatement(currentStmtText);
// if (preparedStmt != null)
// System.out.printf("Prepare was successful\n");
ParameterMetaData pmd = preparedStmt.getParameterMetaData();
if (pmd != null && pmd.getParameterCount() != 0)
throw new UDRException(
38360,
"Statement %s requires %d input parameters, which is not supported",
currentStmtText, pmd.getParameterCount());
ResultSetMetaData desc = preparedStmt.getMetaData();
int numResultCols = desc.getColumnCount();
// System.out.printf("Number of output columns: %d", numResultCols);
if (numResultCols > 0)
{
if (sqi.getResultStatementIndex() >= 0)
throw new UDRException(
38370,
"More than one of the statements provided produce output, this is not supported (%d and %d)",
sqi.getResultStatementIndex()+1,
s+1);
// we found the statement that is producing the result
sqi.addResultProducingStatement(preparedStmt, s);
// now add the output columns
for (int c=0; c<numResultCols; c++)
{
String colName = desc.getColumnLabel(c+1);
TypeInfo udrType = getUDRTypeFromJDBCType(desc, c+1);
info.out().addColumn(new ColumnInfo(colName, udrType));
}
}
}
jci.disconnect();
}
catch (SQLException e)
{
throw new UDRException(
38380,
"SQL Exception when preparing SQL statement %s. Exception text: %s",
currentStmtText, e.getMessage());
}
}
}
TypeInfo getUDRTypeFromJDBCType(ResultSetMetaData desc,
int colNumOneBased) throws UDRException
{
TypeInfo result;
final int maxLength = 100000;
int colJDBCType;
// the ingredients to make a UDR type and their default values
TypeInfo.SQLTypeCode sqlType = TypeInfo.SQLTypeCode.UNDEFINED_SQL_TYPE;
int length = 0;
boolean nullable = false;
int scale = 0;
TypeInfo.SQLCharsetCode charset = TypeInfo.SQLCharsetCode.CHARSET_UCS2;
TypeInfo.SQLIntervalCode intervalCode = TypeInfo.SQLIntervalCode.UNDEFINED_INTERVAL_CODE;
int precision = 0;
TypeInfo.SQLCollationCode collation = TypeInfo.SQLCollationCode.SYSTEM_COLLATION;
try {
colJDBCType = desc.getColumnType(colNumOneBased);
nullable = (desc.isNullable(colNumOneBased) != ResultSetMetaData.columnNoNulls);
// map the JDBC type to a Trafodion UDR parameter type
switch (colJDBCType)
{
case java.sql.Types.SMALLINT:
case java.sql.Types.TINYINT:
case java.sql.Types.BOOLEAN:
if (desc.isSigned(colNumOneBased))
sqlType = TypeInfo.SQLTypeCode.SMALLINT;
else
sqlType = TypeInfo.SQLTypeCode.SMALLINT_UNSIGNED;
break;
case java.sql.Types.INTEGER:
if (desc.isSigned(colNumOneBased))
sqlType = TypeInfo.SQLTypeCode.INT;
else
sqlType = TypeInfo.SQLTypeCode.INT_UNSIGNED;
break;
case java.sql.Types.BIGINT:
sqlType = TypeInfo.SQLTypeCode.LARGEINT;
break;
case java.sql.Types.DECIMAL:
case java.sql.Types.NUMERIC:
if (desc.isSigned(colNumOneBased))
sqlType = TypeInfo.SQLTypeCode.NUMERIC;
else
sqlType = TypeInfo.SQLTypeCode.NUMERIC_UNSIGNED;
precision = desc.getPrecision(colNumOneBased);
scale = desc.getScale(colNumOneBased);
break;
case java.sql.Types.REAL:
sqlType = TypeInfo.SQLTypeCode.REAL;
break;
case java.sql.Types.DOUBLE:
case java.sql.Types.FLOAT:
sqlType = TypeInfo.SQLTypeCode.DOUBLE_PRECISION;
break;
case java.sql.Types.CHAR:
case java.sql.Types.NCHAR:
sqlType = TypeInfo.SQLTypeCode.CHAR;
length = Math.min(desc.getPrecision(colNumOneBased), maxLength);
charset = TypeInfo.SQLCharsetCode.CHARSET_UCS2;
break;
case java.sql.Types.VARCHAR:
case java.sql.Types.NVARCHAR:
sqlType = TypeInfo.SQLTypeCode.VARCHAR;
length = Math.min(desc.getPrecision(colNumOneBased), maxLength);
charset = TypeInfo.SQLCharsetCode.CHARSET_UCS2;
break;
case java.sql.Types.DATE:
sqlType = TypeInfo.SQLTypeCode.DATE;
break;
case java.sql.Types.TIME:
sqlType = TypeInfo.SQLTypeCode.TIME;
break;
case java.sql.Types.TIMESTAMP:
sqlType = TypeInfo.SQLTypeCode.TIMESTAMP;
scale = 3;
break;
// BLOB - not supported yet, map to varchar
// case java.sql.Types.BLOB:
// sqlType = TypeInfo.SQLTypeCode.BLOB;
// break;
// CLOB - not supported yet, map to varchar
// case java.sql.Types.CLOB:
// sqlType = TypeInfo.SQLTypeCode.CLOB;
// break;
case java.sql.Types.ARRAY:
case java.sql.Types.BINARY:
case java.sql.Types.BIT:
case java.sql.Types.BLOB:
case java.sql.Types.DATALINK:
case java.sql.Types.DISTINCT:
case java.sql.Types.JAVA_OBJECT:
case java.sql.Types.LONGVARBINARY:
case java.sql.Types.NULL:
case java.sql.Types.OTHER:
case java.sql.Types.REF:
case java.sql.Types.STRUCT:
case java.sql.Types.VARBINARY:
// these types produce a binary result, represented
// as varchar(n) character set iso88591
sqlType = TypeInfo.SQLTypeCode.VARCHAR;
length = Math.min(desc.getPrecision(colNumOneBased), maxLength);
charset = TypeInfo.SQLCharsetCode.CHARSET_ISO88591;
break;
case java.sql.Types.LONGVARCHAR:
case java.sql.Types.LONGNVARCHAR:
case java.sql.Types.CLOB:
case java.sql.Types.NCLOB:
case java.sql.Types.ROWID:
case java.sql.Types.SQLXML:
// these types produce a varchar(n) character set utf8 result
sqlType = TypeInfo.SQLTypeCode.VARCHAR;
length = Math.min(desc.getPrecision(colNumOneBased), maxLength);
charset = TypeInfo.SQLCharsetCode.CHARSET_UCS2;
break;
}
} catch (SQLException e) {
throw new UDRException(
38500,
"Error determinging the type of output column %d: ",
colNumOneBased,
e.getMessage());
}
result = new TypeInfo(
sqlType,
length,
nullable,
scale,
charset,
intervalCode,
precision,
collation);
return result;
}
// determine output columns dynamically at compile time
@Override
public void describeParamsAndColumns(UDRInvocationInfo info)
throws UDRException
{
// create an object with common info for this
// UDF invocation that we will carry through the
// compilation phases
info.setUDRWriterCompileTimeData(new JdbcCompileTimeData());
// retrieve the compile time data, we will do this for
// every compile phase
JdbcConnectionInfo jci = getConnectionInfo(info);
SQLStatementInfo sqi = getSQLStatementInfo(info);
// process input parameters
handleInputParams(info, jci, sqi, true);
}
// override the runtime method
@Override
public void processData(UDRInvocationInfo info,
UDRPlanInfo plan)
throws UDRException
{
// retrieve the compile time data, we will do this for
// every compile phase
JdbcConnectionInfo jci = new JdbcConnectionInfo();
SQLStatementInfo sqi = new SQLStatementInfo();
int numCols = info.out().getNumColumns();
// process input parameters (again, now at runtime)
handleInputParams(info, jci, sqi, false);
int numSQLStatements = sqi.getNumStatements();
int numSQLResultSets = 0;
String stmtText = null;
try {
Connection conn = jci.connect();
Statement stmt = conn.createStatement();
for (int s=0; s<numSQLStatements; s++)
{
stmtText = sqi.getStatementText(s);
boolean hasResultSet = stmt.execute(stmtText);
if (hasResultSet)
{
ResultSet rs = stmt.getResultSet();
numSQLResultSets++;
if (numSQLResultSets > 1)
throw new UDRException(
38700,
"More than one result set returned by UDF %s",
info.getUDRName());
if (rs.getMetaData().getColumnCount() != numCols)
throw new UDRException(
38702,
"Number of columns returned by UDF %s (%d) differs from the number determined at compile time (%d)",
info.getUDRName(),
rs.getMetaData().getColumnCount(),
numCols);
while (rs.next())
{
for (int c=0; c<numCols; c++)
{
TypeInfo typ = info.out().getColumn(c).getType();
switch (typ.getSQLTypeSubClass())
{
case FIXED_CHAR_TYPE:
case VAR_CHAR_TYPE:
info.out().setString(c, rs.getString(c+1));
break;
case EXACT_NUMERIC_TYPE:
info.out().setLong(c, rs.getLong(c+1));
break;
case APPROXIMATE_NUMERIC_TYPE:
info.out().setDouble(c, rs.getDouble(c+1));
break;
case DATE_TYPE:
info.out().setTime(c, rs.getDate(c+1));
break;
case TIME_TYPE:
info.out().setTime(c, rs.getTime(c+1));
break;
case TIMESTAMP_TYPE:
info.out().setTime(c, rs.getTimestamp(c+1));
break;
case LOB_SUB_CLASS:
throw new UDRException(38710, "LOB parameters not yet supported");
default:
throw new UDRException(38720, "Unexpected data type encountered");
} // switch
if (rs.wasNull())
info.out().setNull(c);
} // loop over columns
// produce a result row
emitRow(info);
} // loop over result rows
} // statement produces a result set
} // loop over statements
jci.disconnect();
} catch (SQLException e) {
throw new UDRException(
38730,
"Error preparing statement %s at runtime: %s",
stmtText,
e.getMessage());
}
}
};
| |
/*
* Copyright (c) 2016, WSO2 Inc. (http://wso2.com) All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.andes.kernel.disruptor.inbound;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.andes.kernel.AMQPConstructStore;
import org.wso2.andes.kernel.AndesBinding;
import org.wso2.andes.kernel.AndesContext;
import org.wso2.andes.kernel.AndesContextInformationManager;
import org.wso2.andes.kernel.AndesContextStore;
import org.wso2.andes.kernel.AndesException;
import org.wso2.andes.kernel.router.AndesMessageRouter;
import org.wso2.andes.kernel.subscription.AndesSubscriptionManager;
import org.wso2.andes.kernel.subscription.StorageQueue;
import java.util.ArrayList;
import java.util.List;
/**
* DB sync requests are handled through this event
*/
public class InboundDBSyncRequestEvent implements AndesInboundStateEvent {
private static Log log = LogFactory.getLog(InboundDBSyncRequestEvent.class);
/**
* Type of this event
*/
private static final String EVENT_TYPE = "DB_SYNC";
/**
* Store keeping control information of broker
*/
private AndesContextStore andesContextStore;
/**
* Manager instance to modify message routers, queues, bindings etc
*/
private AndesContextInformationManager contextInformationManager;
/**
* Manager instance to create/delete subscriptions
*/
private AndesSubscriptionManager subscriptionManager;
/**
* AMQP specific artifact store
*/
private AMQPConstructStore amqpConstructStore;
/**
* {@inheritDoc}
*/
@Override
public void updateState() throws AndesException {
log.info("Running DB sync task.");
reloadMessageRoutersFromDB();
reloadQueuesFromDB();
reloadBindingsFromDB();
reloadSubscriptions();
}
/**
* {@inheritDoc}
*/
@Override
public String eventInfo() {
return EVENT_TYPE;
}
/**
* Prepare event for publishing
*
* @param contextStore store keeping control information of broker
* @param amqpConstructStore AMQP specific artifact store
* @param contextInformationManager manager instance to modify message routers, queues, bindings etc
* @param subscriptionManager manager instance to create/delete subscriptions
*/
public void prepareEvent(AndesContextStore contextStore, AMQPConstructStore amqpConstructStore,
AndesContextInformationManager contextInformationManager,
AndesSubscriptionManager subscriptionManager) {
this.andesContextStore = contextStore;
this.contextInformationManager = contextInformationManager;
this.subscriptionManager = subscriptionManager;
this.amqpConstructStore = amqpConstructStore;
}
/**
* Reload message routers from DB. Delete information in memory that is not
* stored in DB, and add information in DB but not in memory.
*
* @throws AndesException
*/
private void reloadMessageRoutersFromDB() throws AndesException {
List<AndesMessageRouter> messageRoutersStored = andesContextStore.getAllMessageRoutersStored();
List<AndesMessageRouter> messageRoutersInMemory = AndesContext.getInstance().
getMessageRouterRegistry().getAllMessageRouters();
List<AndesMessageRouter> copyOfMessageRoutersStored = new ArrayList<>(messageRoutersStored);
messageRoutersStored.removeAll(messageRoutersInMemory);
for (AndesMessageRouter messageRouter : messageRoutersStored) {
log.warn("Recovering node. Adding exchange " + messageRouter.toString());
InboundExchangeSyncEvent exchangeCreateEvent =
new InboundExchangeSyncEvent(messageRouter.encodeAsString());
exchangeCreateEvent.prepareForCreateExchangeSync(contextInformationManager);
exchangeCreateEvent.updateState();
}
messageRoutersInMemory.removeAll(copyOfMessageRoutersStored);
for (AndesMessageRouter messageRouter : messageRoutersInMemory) {
log.warn("Recovering node. Removing exchange " + messageRouter.toString());
InboundExchangeSyncEvent exchangeDeleteEvent =
new InboundExchangeSyncEvent(messageRouter.encodeAsString());
exchangeDeleteEvent.prepareForDeleteExchangeSync(contextInformationManager);
exchangeDeleteEvent.updateState();
}
}
/**
* Reload queues from DB. Delete information in memory that is not
* stored in DB, and add information in DB but not in memory.
*
* @throws AndesException
*/
private void reloadQueuesFromDB() throws AndesException {
List<StorageQueue> queuesStored = andesContextStore.getAllQueuesStored();
List<StorageQueue> queuesInMemory = AndesContext.getInstance().getStorageQueueRegistry()
.getAllStorageQueues();
List<StorageQueue> copyOfQueuesStored = new ArrayList<>(queuesStored);
queuesStored.removeAll(queuesInMemory);
for (StorageQueue queue : queuesStored) {
log.warn("Recovering node. Adding queue to queue registry " + queue.toString());
InboundQueueSyncEvent queueCreateEvent = new InboundQueueSyncEvent(queue.encodeAsString());
queueCreateEvent.prepareForSyncCreateQueue(contextInformationManager);
queueCreateEvent.updateState();
}
queuesInMemory.removeAll(copyOfQueuesStored);
for (StorageQueue queue : queuesInMemory) {
log.warn("Recovering node. Removing queue from queue registry " + queue.toString());
InboundQueueSyncEvent queueDeleteEvent = new InboundQueueSyncEvent(queue.encodeAsString());
queueDeleteEvent.prepareForSyncDeleteQueue(contextInformationManager);
queueDeleteEvent.updateState();
}
}
/**
* Reload bindings from DB. Delete information in memory that is not
* stored in DB, and add information in DB but not in memory.
*
* @throws AndesException
*/
private void reloadBindingsFromDB() throws AndesException {
List<AndesMessageRouter> routersStored = andesContextStore.getAllMessageRoutersStored();
for (AndesMessageRouter messageRouter : routersStored) {
List<AndesBinding> bindingsStored =
andesContextStore.getBindingsStoredForExchange(messageRouter.getName());
List<AndesBinding> inMemoryBindings =
amqpConstructStore.getBindingsForExchange(messageRouter.getName());
List<AndesBinding> copyOfBindingsStored = new ArrayList<>(bindingsStored);
bindingsStored.removeAll(inMemoryBindings);
for (AndesBinding binding : bindingsStored) {
log.warn("Recovering node. Adding binding " + binding.toString());
InboundBindingSyncEvent bindingCreateEvent =
new InboundBindingSyncEvent(binding.encodeAsString());
bindingCreateEvent.prepareForAddBindingEvent(contextInformationManager);
bindingCreateEvent.updateState();
}
inMemoryBindings.removeAll(copyOfBindingsStored);
for (AndesBinding binding : inMemoryBindings) {
log.warn("Recovering node. removing binding " + binding.toString());
InboundBindingSyncEvent bindingDeleteEvent =
new InboundBindingSyncEvent(binding.encodeAsString());
bindingDeleteEvent.prepareForRemoveBinding(contextInformationManager);
bindingDeleteEvent.updateState();
}
}
}
/**
* Reload subscriptions from DB. Delete information in memory that is not
* stored in DB, and add information in DB but not in memory.
*
* @throws AndesException
*/
private void reloadSubscriptions() throws AndesException {
subscriptionManager.reloadSubscriptionsFromStorage();
}
}
| |
/**
* Copyright (C) 2004-2011 Jive Software. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jivesoftware.spark.plugins.transfersettings;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Properties;
import java.util.StringTokenizer;
/**
* Bean whose properties are the various preference settings for file transfer.
*/
public class FileTransferSettings {
private List<String> extensions = new ArrayList<>();
private List<String> JIDs = new ArrayList<>();
private int kb;
private boolean checkSize = false;
String cannedRejectionMessage;
private static File BACKING_STORE = new File(System.getProperty("user.home") + "/.sparkExt.properties");
/**
* Returns a {@link List} of strings - one for each blocked file extension. Strings are in the form <tt>*.{extension}</tt>.
* @return a {@link List} of blocked file extensions
*/
public List<String> getBlockedExtensions(){
return extensions;
}
/**
* Sets the {@link List} of blocked file extensions.
* @param extensions the {@link List} of blocked file extensions.
*/
public void setBlockedExtensions(List<String> extensions){
this.extensions = extensions;
}
/**
* Returns a {@link List} of blocked JIDs. File transfers from users with those JIDs will be automaticlly rejected.
* @return a {@link List} of blocked JIDs.
*/
public List<String> getBlockedJIDs() {
return JIDs;
}
/**
* Sets the {@link List} of blocked JIDs.
* @param JIDs the {@link List} of blocked JIDs.
*/
public void setBlockedJIDS(List<String> JIDs){
this.JIDs = JIDs;
}
/**
* Returns the maximum file size in kilobytes for file transfers. If {@link #getCheckFileSize} returns true,
* files larger than this maximum will not be accepted.
* @return the maximum file size in kilobytes for file transfers.
*/
public int getMaxFileSize(){
return kb;
}
/**
* Sets the maximum file size in kilobytes for file transfers.
* @param kb the maximum file size in kilobytes for file transfers.
*/
public void setMaxFileSize(int kb){
this.kb = kb;
}
/**
* Returns true if there is a maximum allowable file size for transfers.
* @return true if there is a maximum allowable file size for transfers.
*/
public boolean getCheckFileSize(){
return checkSize;
}
/**
* If set to true, files larger than the maximum file size as returned by {@link #getMaxFileSize}
* will not be accepted.
* @param checkSize true if size should be checked.
*/
public void setCheckFileSize(boolean checkSize){
this.checkSize = checkSize;
}
/**
* Returns the text of a canned message sent to requestors whose file transfers were automatically rejected. If this
* returns null or an empty string, no message will be sent.
* @return the text of a canned message sent to requestors whose file transfers were automatically rejected.
*/
public String getCannedRejectionMessage() {
return cannedRejectionMessage;
}
/**
* Sets the text of a canned message sent to requestors whose file transfers were automatically rejected. If set
* to null or an empty string, no message will be sent.
* @param cannedRejectionMessage the canned message text.
*/
public void setCannedRejectionMessage(String cannedRejectionMessage) {
this.cannedRejectionMessage = cannedRejectionMessage;
}
/**
* Loads the properties from the filesystem.
*/
public void load() {
Properties props = new Properties();
if (BACKING_STORE.exists()) {
try {
props.load(new FileInputStream(BACKING_STORE));
String types = props.getProperty("extensions");
if (types != null) {
this.extensions = convertSettingsStringToList(types);
}
String users = props.getProperty("jids");
if (users != null) {
this.JIDs = convertSettingsStringToList(users);
}
String ignore = props.getProperty("checkFileSize");
if (ignore != null) {
this.checkSize = Boolean.valueOf(ignore);
}
String maxSize = props.getProperty("maxSize");
if (maxSize != null) {
this.kb = Integer.parseInt(maxSize);
}
this.cannedRejectionMessage = props.getProperty("cannedResponse");
} catch (IOException ioe) {
System.out.println("Error Loading properties from Filesystem"+ioe);
//TODO handle error better.
}
}
}
/**
* Saves the properties to the filesystem.
*/
public void store() {
Properties props = new Properties();
try {
props.setProperty("extensions", convertSettingsListToString(extensions));
props.setProperty("jids", convertSettingsListToString(JIDs));
props.setProperty("checkFileSize", Boolean.toString(checkSize));
props.setProperty("maxSize", Integer.toString(kb));
if (cannedRejectionMessage != null) {
props.setProperty("cannedResponse", cannedRejectionMessage);
}
props.store(new FileOutputStream(BACKING_STORE), BACKING_STORE.getAbsolutePath());
} catch (IOException ioe) {
System.err.println(ioe);
}
}
/**
* Converts a list of strings to a single comma separated string
* @param settings the {@link List} of strings.
* @return a comma separated string.
*/
public static String convertSettingsListToString(List<String> settings) {
StringBuilder buffer = new StringBuilder();
for (Iterator<String> iter=settings.iterator(); iter.hasNext(); ) {
buffer.append(iter.next());
if (iter.hasNext()) {
buffer.append(",");
}
}
return buffer.toString();
}
/**
* Converts the supplied string to a {@link List} of strings. The input is split
* with the tokensL: ',' ':' '\n' '\t' '\r' and ' '.
* @param settings the string to convert.
* @return the resultant {@link List}.
*/
public static List<String> convertSettingsStringToList(String settings) {
List<String> list = new ArrayList<>();
StringTokenizer tokenizer = new StringTokenizer(settings, ",;\n\t\r ");
while (tokenizer.hasMoreTokens()) {
list.add(tokenizer.nextToken());
}
return list;
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.redshift.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
* <p>
* Describes the default cluster parameters for a parameter group family.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/redshift-2012-12-01/DefaultClusterParameters" target="_top">AWS
* API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DefaultClusterParameters implements Serializable, Cloneable {
/**
* <p>
* The name of the cluster parameter group family to which the engine default parameters apply.
* </p>
*/
private String parameterGroupFamily;
/**
* <p>
* A value that indicates the starting point for the next set of response records in a subsequent request. If a
* value is returned in a response, you can retrieve the next set of records by providing this returned marker value
* in the <code>Marker</code> parameter and retrying the command. If the <code>Marker</code> field is empty, all
* response records have been retrieved for the request.
* </p>
*/
private String marker;
/**
* <p>
* The list of cluster default parameters.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<Parameter> parameters;
/**
* <p>
* The name of the cluster parameter group family to which the engine default parameters apply.
* </p>
*
* @param parameterGroupFamily
* The name of the cluster parameter group family to which the engine default parameters apply.
*/
public void setParameterGroupFamily(String parameterGroupFamily) {
this.parameterGroupFamily = parameterGroupFamily;
}
/**
* <p>
* The name of the cluster parameter group family to which the engine default parameters apply.
* </p>
*
* @return The name of the cluster parameter group family to which the engine default parameters apply.
*/
public String getParameterGroupFamily() {
return this.parameterGroupFamily;
}
/**
* <p>
* The name of the cluster parameter group family to which the engine default parameters apply.
* </p>
*
* @param parameterGroupFamily
* The name of the cluster parameter group family to which the engine default parameters apply.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DefaultClusterParameters withParameterGroupFamily(String parameterGroupFamily) {
setParameterGroupFamily(parameterGroupFamily);
return this;
}
/**
* <p>
* A value that indicates the starting point for the next set of response records in a subsequent request. If a
* value is returned in a response, you can retrieve the next set of records by providing this returned marker value
* in the <code>Marker</code> parameter and retrying the command. If the <code>Marker</code> field is empty, all
* response records have been retrieved for the request.
* </p>
*
* @param marker
* A value that indicates the starting point for the next set of response records in a subsequent request. If
* a value is returned in a response, you can retrieve the next set of records by providing this returned
* marker value in the <code>Marker</code> parameter and retrying the command. If the <code>Marker</code>
* field is empty, all response records have been retrieved for the request.
*/
public void setMarker(String marker) {
this.marker = marker;
}
/**
* <p>
* A value that indicates the starting point for the next set of response records in a subsequent request. If a
* value is returned in a response, you can retrieve the next set of records by providing this returned marker value
* in the <code>Marker</code> parameter and retrying the command. If the <code>Marker</code> field is empty, all
* response records have been retrieved for the request.
* </p>
*
* @return A value that indicates the starting point for the next set of response records in a subsequent request.
* If a value is returned in a response, you can retrieve the next set of records by providing this returned
* marker value in the <code>Marker</code> parameter and retrying the command. If the <code>Marker</code>
* field is empty, all response records have been retrieved for the request.
*/
public String getMarker() {
return this.marker;
}
/**
* <p>
* A value that indicates the starting point for the next set of response records in a subsequent request. If a
* value is returned in a response, you can retrieve the next set of records by providing this returned marker value
* in the <code>Marker</code> parameter and retrying the command. If the <code>Marker</code> field is empty, all
* response records have been retrieved for the request.
* </p>
*
* @param marker
* A value that indicates the starting point for the next set of response records in a subsequent request. If
* a value is returned in a response, you can retrieve the next set of records by providing this returned
* marker value in the <code>Marker</code> parameter and retrying the command. If the <code>Marker</code>
* field is empty, all response records have been retrieved for the request.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DefaultClusterParameters withMarker(String marker) {
setMarker(marker);
return this;
}
/**
* <p>
* The list of cluster default parameters.
* </p>
*
* @return The list of cluster default parameters.
*/
public java.util.List<Parameter> getParameters() {
if (parameters == null) {
parameters = new com.amazonaws.internal.SdkInternalList<Parameter>();
}
return parameters;
}
/**
* <p>
* The list of cluster default parameters.
* </p>
*
* @param parameters
* The list of cluster default parameters.
*/
public void setParameters(java.util.Collection<Parameter> parameters) {
if (parameters == null) {
this.parameters = null;
return;
}
this.parameters = new com.amazonaws.internal.SdkInternalList<Parameter>(parameters);
}
/**
* <p>
* The list of cluster default parameters.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setParameters(java.util.Collection)} or {@link #withParameters(java.util.Collection)} if you want to
* override the existing values.
* </p>
*
* @param parameters
* The list of cluster default parameters.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DefaultClusterParameters withParameters(Parameter... parameters) {
if (this.parameters == null) {
setParameters(new com.amazonaws.internal.SdkInternalList<Parameter>(parameters.length));
}
for (Parameter ele : parameters) {
this.parameters.add(ele);
}
return this;
}
/**
* <p>
* The list of cluster default parameters.
* </p>
*
* @param parameters
* The list of cluster default parameters.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DefaultClusterParameters withParameters(java.util.Collection<Parameter> parameters) {
setParameters(parameters);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getParameterGroupFamily() != null)
sb.append("ParameterGroupFamily: ").append(getParameterGroupFamily()).append(",");
if (getMarker() != null)
sb.append("Marker: ").append(getMarker()).append(",");
if (getParameters() != null)
sb.append("Parameters: ").append(getParameters());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DefaultClusterParameters == false)
return false;
DefaultClusterParameters other = (DefaultClusterParameters) obj;
if (other.getParameterGroupFamily() == null ^ this.getParameterGroupFamily() == null)
return false;
if (other.getParameterGroupFamily() != null && other.getParameterGroupFamily().equals(this.getParameterGroupFamily()) == false)
return false;
if (other.getMarker() == null ^ this.getMarker() == null)
return false;
if (other.getMarker() != null && other.getMarker().equals(this.getMarker()) == false)
return false;
if (other.getParameters() == null ^ this.getParameters() == null)
return false;
if (other.getParameters() != null && other.getParameters().equals(this.getParameters()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getParameterGroupFamily() == null) ? 0 : getParameterGroupFamily().hashCode());
hashCode = prime * hashCode + ((getMarker() == null) ? 0 : getMarker().hashCode());
hashCode = prime * hashCode + ((getParameters() == null) ? 0 : getParameters().hashCode());
return hashCode;
}
@Override
public DefaultClusterParameters clone() {
try {
return (DefaultClusterParameters) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| |
/*
* Copyright 2011 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.android.apps.mytracks.content;
import android.content.SharedPreferences;
import android.content.SharedPreferences.OnSharedPreferenceChangeListener;
import android.database.ContentObserver;
import android.os.Handler;
import android.util.Log;
import java.util.EnumSet;
import java.util.Set;
/**
* Data source manager. Creates observers/listeners and manages their
* registration with {@link DataSource}. The observers/listeners calls
* {@link DataSourceListener} when data changes.
*
* @author Rodrigo Damazio
*/
public class DataSourceManager {
private static final String TAG = DataSourceManager.class.getSimpleName();
/**
* Observer when the tracks table is updated.
*
* @author Jimmy Shih
*/
private class TracksTableObserver extends ContentObserver {
public TracksTableObserver() {
super(handler);
}
@Override
public void onChange(boolean selfChange) {
dataSourceListener.notifyTracksTableUpdated();
}
}
/**
* Observer when the waypoints table is updated.
*
* @author Jimmy Shih
*/
private class WaypointsTableObserver extends ContentObserver {
public WaypointsTableObserver() {
super(handler);
}
@Override
public void onChange(boolean selfChange) {
dataSourceListener.notifyWaypointsTableUpdated();
}
}
/**
* Observer when the track points table is updated.
*
* @author Jimmy Shih
*/
private class TrackPointsTableObserver extends ContentObserver {
public TrackPointsTableObserver() {
super(handler);
}
@Override
public void onChange(boolean selfChange) {
dataSourceListener.notifyTrackPointsTableUpdated();
}
}
/**
* Listener for preference changes.
*
* @author Jimmy Shih
*/
private class PreferenceListener implements OnSharedPreferenceChangeListener {
@Override
public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String key) {
dataSourceListener.notifyPreferenceChanged(key);
}
}
private final DataSource dataSource;
private final DataSourceListener dataSourceListener;
// Registered listeners
private final Set<TrackDataType> registeredListeners = EnumSet.noneOf(TrackDataType.class);
private final Handler handler;
private final TracksTableObserver tracksTableObserver;
private final WaypointsTableObserver waypointsTableObserver;
private final TrackPointsTableObserver trackPointsTableObserver;
private final PreferenceListener preferenceListener;
public DataSourceManager(DataSource dataSource, DataSourceListener dataSourceListener) {
this.dataSource = dataSource;
this.dataSourceListener = dataSourceListener;
handler = new Handler();
tracksTableObserver = new TracksTableObserver();
waypointsTableObserver = new WaypointsTableObserver();
trackPointsTableObserver = new TrackPointsTableObserver();
preferenceListener = new PreferenceListener();
}
/**
* Updates listeners with data source.
*
* @param listeners the listeners
*/
public void updateListeners(EnumSet<TrackDataType> listeners) {
EnumSet<TrackDataType> neededListeners = EnumSet.copyOf(listeners);
/*
* Map SAMPLED_OUT_POINT_UPDATES to POINT_UPDATES since they correspond to
* the same internal listener
*/
if (neededListeners.contains(TrackDataType.SAMPLED_OUT_TRACK_POINTS_TABLE)) {
neededListeners.remove(TrackDataType.SAMPLED_OUT_TRACK_POINTS_TABLE);
neededListeners.add(TrackDataType.SAMPLED_IN_TRACK_POINTS_TABLE);
}
Log.d(TAG, "Updating listeners " + neededListeners);
// Unnecessary = registered - needed
Set<TrackDataType> unnecessaryListeners = EnumSet.copyOf(registeredListeners);
unnecessaryListeners.removeAll(neededListeners);
// Missing = needed - registered
Set<TrackDataType> missingListeners = EnumSet.copyOf(neededListeners);
missingListeners.removeAll(registeredListeners);
// Remove unnecessary listeners
for (TrackDataType trackDataType : unnecessaryListeners) {
unregisterListener(trackDataType);
}
// Add missing listeners
for (TrackDataType trackDataType : missingListeners) {
registerListener(trackDataType);
}
// Update registered listeners
registeredListeners.clear();
registeredListeners.addAll(neededListeners);
}
/**
* Registers a listener with data source.
*
* @param trackDataType the listener data type
*/
private void registerListener(TrackDataType trackDataType) {
switch (trackDataType) {
case TRACKS_TABLE:
dataSource.registerContentObserver(TracksColumns.CONTENT_URI, tracksTableObserver);
break;
case WAYPOINTS_TABLE:
dataSource.registerContentObserver(WaypointsColumns.CONTENT_URI, waypointsTableObserver);
break;
case SAMPLED_IN_TRACK_POINTS_TABLE:
dataSource.registerContentObserver(
TrackPointsColumns.CONTENT_URI, trackPointsTableObserver);
break;
case SAMPLED_OUT_TRACK_POINTS_TABLE:
// Do nothing. SAMPLED_OUT_POINT_UPDATES is mapped to POINT_UPDATES.
break;
case PREFERENCE:
dataSource.registerOnSharedPreferenceChangeListener(preferenceListener);
break;
default:
break;
}
}
/**
* Unregisters a listener with data source.
*
* @param trackDataType listener data type
*/
private void unregisterListener(TrackDataType trackDataType) {
switch (trackDataType) {
case TRACKS_TABLE:
dataSource.unregisterContentObserver(tracksTableObserver);
break;
case WAYPOINTS_TABLE:
dataSource.unregisterContentObserver(waypointsTableObserver);
break;
case SAMPLED_IN_TRACK_POINTS_TABLE:
dataSource.unregisterContentObserver(trackPointsTableObserver);
break;
case SAMPLED_OUT_TRACK_POINTS_TABLE:
// Do nothing. SAMPLED_OUT_POINT_UPDATES is mapped to POINT_UPDATES.
break;
case PREFERENCE:
dataSource.unregisterOnSharedPreferenceChangeListener(preferenceListener);
break;
default:
break;
}
}
/**
* Unregisters all listeners with data source.
*/
public void unregisterAllListeners() {
for (TrackDataType trackDataType : TrackDataType.values()) {
unregisterListener(trackDataType);
}
}
}
| |
/* Copyright (c) 2011 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hotf.server;
import com.google.appengine.api.datastore.DatastoreService;
import com.google.appengine.api.datastore.DatastoreServiceFactory;
import com.google.appengine.api.datastore.Entity;
import com.google.appengine.api.datastore.EntityNotFoundException;
import com.google.appengine.api.datastore.Key;
import com.google.appengine.api.datastore.KeyFactory;
import com.google.appengine.api.datastore.Query;
import com.google.appengine.api.datastore.Transaction;
import com.google.appengine.api.memcache.Expiration;
import com.google.appengine.api.memcache.MemcacheService;
import com.google.appengine.api.memcache.MemcacheService.SetPolicy;
import com.google.appengine.api.memcache.MemcacheServiceFactory;
import java.util.Random;
/**
* A counter which can be incremented rapidly.
*
* Capable of incrementing the counter and increasing the number of shards. When
* incrementing, a random shard is selected to prevent a single shard from being
* written too frequently. If increments are being made too quickly, increase
* the number of shards to divide the load. Performs datastore operations using
* the low level datastore API.
*/
public abstract class ShardedCounter {
/**
* Convenience class which contains constants related to a named sharded
* counter. The counter name provided in the constructor is used as
* the entity key.
*/
private static final class Counter {
/**
* Entity kind representing a named sharded counter.
*/
private static final String KIND = "Counter";
/**
* Property to store the number of shards in a given {@value #KIND} named
* sharded counter.
*/
private static final String SHARD_COUNT = "shard_count";
}
/**
* Convenience class which contains constants related to the counter shards.
* The shard number (as a String) is used as the entity key.
*/
private static final class CounterShard {
/**
* Entity kind prefix, which is concatenated with the counter name to form
* the final entity kind, which represents counter shards.
*/
private static final String KIND_PREFIX = "CounterShard_";
/**
* Property to store the current count within a counter shard.
*/
private static final String COUNT = "count";
}
private static final DatastoreService ds = DatastoreServiceFactory
.getDatastoreService();
/**
* Default number of shards.
*/
private static final int INITIAL_SHARDS = 5;
/**
* The name of this counter.
*/
private final String counterName;
/**
* A random number generating, for distributing writes across shards.
*/
private final Random generator = new Random();
/**
* The counter shard kind for this counter.
*/
private String kind;
private final MemcacheService mc = MemcacheServiceFactory
.getMemcacheService();
/**
* Constructor which creates a sharded counter using the provided counter
* name.
*
* @param counterName name of the sharded counter
*/
public ShardedCounter(String counterName) {
this.counterName = counterName;
kind = CounterShard.KIND_PREFIX + counterName;
}
/**
* Increase the number of shards for a given sharded counter. Will never
* decrease the number of shards.
*
* @param count Number of new shards to build and store
*/
public void addShards(int count) {
Key counterKey = KeyFactory.createKey(Counter.KIND, counterName);
incrementPropertyTx(counterKey, Counter.SHARD_COUNT, count, INITIAL_SHARDS
+ count);
}
/**
* Retrieve the value of this sharded counter.
*
* @return Summed total of all shards' counts
*/
public long getCount() {
Long value = (Long) mc.get(kind);
if (value != null) {
return value;
}
long sum = 0;
Query query = new Query(kind);
for (Entity shard : ds.prepare(query).asIterable()) {
sum += (Long) shard.getProperty(CounterShard.COUNT);
}
mc.put(kind, sum, Expiration.byDeltaSeconds(60),
SetPolicy.ADD_ONLY_IF_NOT_PRESENT);
return sum;
}
/**
* Increment the value of this sharded counter.
*/
public void increment() {
// Find how many shards are in this counter.
int numShards = getShardCount();
// Choose the shard randomly from the available shards.
long shardNum = generator.nextInt(numShards);
Key shardKey = KeyFactory.createKey(kind, Long.toString(shardNum));
incrementPropertyTx(shardKey, CounterShard.COUNT, 1, 1);
mc.increment(kind, 1);
}
/**
* Get the number of shards in this counter.
*
* @return shard count
*/
private int getShardCount() {
try {
Key counterKey = KeyFactory.createKey(Counter.KIND, counterName);
Entity counter = ds.get(counterKey);
Long shardCount = (Long) counter.getProperty(Counter.SHARD_COUNT);
return shardCount.intValue();
} catch (EntityNotFoundException ignore) {
return INITIAL_SHARDS;
}
}
/**
* Increment datastore property value inside a transaction. If the entity with
* the provided key does not exist, instead create an entity with the supplied
* initial property value.
*
* @param key the entity key to update or create
* @param prop the property name to be incremented
* @param increment the amount by which to increment
* @param initialValue the value to use if the entity does not exist
*/
private void incrementPropertyTx(Key key, String prop, long increment,
long initialValue) {
Transaction tx = ds.beginTransaction();
Entity thing;
long value;
try {
thing = ds.get(tx, key);
value = (Long) thing.getProperty(prop) + increment;
} catch (EntityNotFoundException e) {
thing = new Entity(key);
value = initialValue;
}
thing.setUnindexedProperty(prop, value);
ds.put(tx, thing);
tx.commit();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.builder.component.dsl;
import javax.annotation.Generated;
import org.apache.camel.Component;
import org.apache.camel.builder.component.AbstractComponentBuilder;
import org.apache.camel.builder.component.ComponentBuilder;
import org.apache.camel.component.rest.RestComponent;
/**
* Expose REST services or call external REST services.
*
* Generated by camel-package-maven-plugin - do not edit this file!
*/
@Generated("org.apache.camel.maven.packaging.ComponentDslMojo")
public interface RestEndpointComponentBuilderFactory {
/**
* REST (camel-rest)
* Expose REST services or call external REST services.
*
* Category: core,rest
* Since: 2.14
* Maven coordinates: org.apache.camel:camel-rest
*
* @return the dsl builder
*/
static RestEndpointComponentBuilder restEndpoint() {
return new RestEndpointComponentBuilderImpl();
}
/**
* Builder for the REST component.
*/
interface RestEndpointComponentBuilder
extends
ComponentBuilder<RestComponent> {
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions occurred while the consumer is trying to
* pickup incoming messages, or the likes, will now be processed as a
* message and handled by the routing Error Handler. By default the
* consumer will use the org.apache.camel.spi.ExceptionHandler to deal
* with exceptions, that will be logged at WARN or ERROR level and
* ignored.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default RestEndpointComponentBuilder bridgeErrorHandler(
boolean bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* The Camel Rest component to use for (consumer) the REST transport,
* such as jetty, servlet, undertow. If no component has been explicit
* configured, then Camel will lookup if there is a Camel component that
* integrates with the Rest DSL, or if a
* org.apache.camel.spi.RestConsumerFactory is registered in the
* registry. If either one is found, then that is being used.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: consumer
*
* @param consumerComponentName the value to set
* @return the dsl builder
*/
default RestEndpointComponentBuilder consumerComponentName(
java.lang.String consumerComponentName) {
doSetProperty("consumerComponentName", consumerComponentName);
return this;
}
/**
* The swagger api doc resource to use. The resource is loaded from
* classpath by default and must be in JSON format.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param apiDoc the value to set
* @return the dsl builder
*/
default RestEndpointComponentBuilder apiDoc(java.lang.String apiDoc) {
doSetProperty("apiDoc", apiDoc);
return this;
}
/**
* The Camel Rest component to use for (producer) the REST transport,
* such as http, undertow. If no component has been explicit configured,
* then Camel will lookup if there is a Camel component that integrates
* with the Rest DSL, or if a org.apache.camel.spi.RestProducerFactory
* is registered in the registry. If either one is found, then that is
* being used.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param componentName the value to set
* @return the dsl builder
*/
@Deprecated
default RestEndpointComponentBuilder componentName(
java.lang.String componentName) {
doSetProperty("componentName", componentName);
return this;
}
/**
* Host and port of HTTP service to use (override host in swagger
* schema).
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param host the value to set
* @return the dsl builder
*/
default RestEndpointComponentBuilder host(java.lang.String host) {
doSetProperty("host", host);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default RestEndpointComponentBuilder lazyStartProducer(
boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* The Camel Rest component to use for (producer) the REST transport,
* such as http, undertow. If no component has been explicit configured,
* then Camel will lookup if there is a Camel component that integrates
* with the Rest DSL, or if a org.apache.camel.spi.RestProducerFactory
* is registered in the registry. If either one is found, then that is
* being used.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param producerComponentName the value to set
* @return the dsl builder
*/
default RestEndpointComponentBuilder producerComponentName(
java.lang.String producerComponentName) {
doSetProperty("producerComponentName", producerComponentName);
return this;
}
/**
* Whether autowiring is enabled. This is used for automatic autowiring
* options (the option must be marked as autowired) by looking up in the
* registry to find if there is a single instance of matching type,
* which then gets configured on the component. This can be used for
* automatic configuring JDBC data sources, JMS connection factories,
* AWS Clients, etc.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: advanced
*
* @param autowiredEnabled the value to set
* @return the dsl builder
*/
default RestEndpointComponentBuilder autowiredEnabled(
boolean autowiredEnabled) {
doSetProperty("autowiredEnabled", autowiredEnabled);
return this;
}
}
class RestEndpointComponentBuilderImpl
extends
AbstractComponentBuilder<RestComponent>
implements
RestEndpointComponentBuilder {
@Override
protected RestComponent buildConcreteComponent() {
return new RestComponent();
}
@Override
protected boolean setPropertyOnComponent(
Component component,
String name,
Object value) {
switch (name) {
case "bridgeErrorHandler": ((RestComponent) component).setBridgeErrorHandler((boolean) value); return true;
case "consumerComponentName": ((RestComponent) component).setConsumerComponentName((java.lang.String) value); return true;
case "apiDoc": ((RestComponent) component).setApiDoc((java.lang.String) value); return true;
case "componentName": ((RestComponent) component).setComponentName((java.lang.String) value); return true;
case "host": ((RestComponent) component).setHost((java.lang.String) value); return true;
case "lazyStartProducer": ((RestComponent) component).setLazyStartProducer((boolean) value); return true;
case "producerComponentName": ((RestComponent) component).setProducerComponentName((java.lang.String) value); return true;
case "autowiredEnabled": ((RestComponent) component).setAutowiredEnabled((boolean) value); return true;
default: return false;
}
}
}
}
| |
/*
* JBoss, Home of Professional Open Source
* Copyright 2008, Red Hat, Inc., and individual contributors
* by the @authors tag. See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.weld.util;
import java.io.Serializable;
import java.lang.annotation.Annotation;
import java.lang.annotation.Repeatable;
import java.lang.reflect.GenericArrayType;
import java.lang.reflect.Modifier;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.lang.reflect.TypeVariable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import jakarta.decorator.Decorator;
import jakarta.enterprise.context.ApplicationScoped;
import jakarta.enterprise.context.ConversationScoped;
import jakarta.enterprise.context.Dependent;
import jakarta.enterprise.context.NormalScope;
import jakarta.enterprise.context.RequestScoped;
import jakarta.enterprise.context.SessionScoped;
import jakarta.enterprise.context.spi.Contextual;
import jakarta.enterprise.context.spi.CreationalContext;
import jakarta.enterprise.event.Observes;
import jakarta.enterprise.event.ObservesAsync;
import jakarta.enterprise.inject.Alternative;
import jakarta.enterprise.inject.CreationException;
import jakarta.enterprise.inject.Disposes;
import jakarta.enterprise.inject.Typed;
import jakarta.enterprise.inject.Vetoed;
import jakarta.enterprise.inject.spi.AnnotatedConstructor;
import jakarta.enterprise.inject.spi.AnnotatedMethod;
import jakarta.enterprise.inject.spi.AnnotatedType;
import jakarta.enterprise.inject.spi.Bean;
import jakarta.enterprise.inject.spi.BeanAttributes;
import jakarta.enterprise.inject.spi.Extension;
import jakarta.enterprise.inject.spi.PassivationCapable;
import jakarta.enterprise.inject.spi.Prioritized;
import jakarta.inject.Inject;
import org.jboss.weld.annotated.enhanced.EnhancedAnnotated;
import org.jboss.weld.annotated.enhanced.EnhancedAnnotatedConstructor;
import org.jboss.weld.annotated.enhanced.EnhancedAnnotatedMethod;
import org.jboss.weld.annotated.enhanced.EnhancedAnnotatedType;
import org.jboss.weld.bean.AbstractBean;
import org.jboss.weld.bean.AbstractProducerBean;
import org.jboss.weld.bean.DecoratorImpl;
import org.jboss.weld.bean.ForwardingBean;
import org.jboss.weld.bean.InterceptorImpl;
import org.jboss.weld.bean.RIBean;
import org.jboss.weld.bean.WeldBean;
import org.jboss.weld.bootstrap.api.ServiceRegistry;
import org.jboss.weld.bootstrap.enablement.ModuleEnablement;
import org.jboss.weld.injection.FieldInjectionPoint;
import org.jboss.weld.injection.MethodInjectionPoint;
import org.jboss.weld.injection.ResourceInjection;
import org.jboss.weld.interceptor.spi.model.InterceptionType;
import org.jboss.weld.interceptor.util.InterceptionTypeRegistry;
import org.jboss.weld.logging.BeanLogger;
import org.jboss.weld.logging.MetadataLogger;
import org.jboss.weld.logging.UtilLogger;
import org.jboss.weld.manager.BeanManagerImpl;
import org.jboss.weld.metadata.cache.MergedStereotypes;
import org.jboss.weld.metadata.cache.MetaAnnotationStore;
import org.jboss.weld.resolution.QualifierInstance;
import org.jboss.weld.resources.spi.ClassFileInfo;
import org.jboss.weld.serialization.spi.BeanIdentifier;
import org.jboss.weld.serialization.spi.ContextualStore;
import org.jboss.weld.util.bytecode.BytecodeUtils;
import org.jboss.weld.util.collections.ImmutableSet;
import org.jboss.weld.util.reflection.Formats;
import org.jboss.weld.util.reflection.Reflections;
/**
* Helper class for bean inspection
*
* @author Pete Muir
* @author David Allen
* @author Marius Bogoevici
* @author Ales Justin
* @author Jozef Hartinger
*/
public class Beans {
private Beans() {
}
/**
* Indicates if a bean's scope type is passivating
*
* @param bean The bean to inspect
* @return True if the scope is passivating, false otherwise
*/
public static boolean isPassivatingScope(Bean<?> bean, BeanManagerImpl manager) {
if (bean == null) {
return false;
} else {
return manager.getServices().get(MetaAnnotationStore.class).getScopeModel(bean.getScope()).isPassivating();
}
}
/**
* Tests if a bean is capable of having its state temporarily stored to secondary storage
*
* @param bean The bean to inspect
* @return True if the bean is passivation capable
*/
public static boolean isPassivationCapableBean(Bean<?> bean) {
if (bean instanceof RIBean<?>) {
return ((RIBean<?>) bean).isPassivationCapableBean();
} else {
return bean instanceof PassivationCapable;
}
}
/**
* Tests if a bean is capable of having its state temporarily stored to secondary storage
*
* @param bean The bean to inspect
* @return True if the bean is passivation capable
*/
public static boolean isPassivationCapableDependency(Bean<?> bean) {
if (bean instanceof RIBean<?>) {
return ((RIBean<?>) bean).isPassivationCapableDependency();
}
return bean instanceof PassivationCapable;
}
/**
* Indicates if a bean is proxyable
*
* @param bean The bean to test
* @return True if proxyable, false otherwise
*/
public static boolean isBeanProxyable(Bean<?> bean, BeanManagerImpl manager) {
if (bean instanceof RIBean<?>) {
return ((RIBean<?>) bean).isProxyable();
} else {
return Proxies.isTypesProxyable(bean.getTypes(), manager.getServices());
}
}
public static List<EnhancedAnnotatedMethod<?, ?>> getInterceptableMethods(EnhancedAnnotatedType<?> type) {
List<EnhancedAnnotatedMethod<?, ?>> annotatedMethods = new ArrayList<EnhancedAnnotatedMethod<?, ?>>();
for (EnhancedAnnotatedMethod<?, ?> annotatedMethod : type.getEnhancedMethods()) {
// note that a bridge method can be a candidate for interception in rare cases; do not discard those
boolean businessMethod = !annotatedMethod.isStatic() && !annotatedMethod.isAnnotationPresent(Inject.class);
if (businessMethod && !isInterceptorMethod(annotatedMethod)) {
annotatedMethods.add(annotatedMethod);
}
}
return annotatedMethods;
}
private static boolean isInterceptorMethod(AnnotatedMethod<?> annotatedMethod) {
for (InterceptionType interceptionType : InterceptionTypeRegistry.getSupportedInterceptionTypes()) {
if (annotatedMethod.isAnnotationPresent(InterceptionTypeRegistry.getAnnotationClass(interceptionType))) {
return true;
}
}
return false;
}
/**
* Checks that all the qualifiers in the set requiredQualifiers are in the set of qualifiers. Qualifier equality rules for
* annotation members are followed.
*
* @param requiredQualifiers The required qualifiers
* @param qualifiers The set of qualifiers to check
* @return True if all matches, false otherwise
*/
public static boolean containsAllQualifiers(Set<QualifierInstance> requiredQualifiers, Set<QualifierInstance> qualifiers) {
return qualifiers.containsAll(requiredQualifiers);
}
public static boolean containsAllInterceptionBindings(Set<Annotation> expectedBindings,
Set<QualifierInstance> existingBindings, BeanManagerImpl manager) {
final Set<QualifierInstance> expected = manager.extractInterceptorBindingsForQualifierInstance(QualifierInstance.of(expectedBindings, manager.getServices().get(MetaAnnotationStore.class)));
return expected.isEmpty() ? false : manager.extractInterceptorBindingsForQualifierInstance(existingBindings).containsAll(expected);
}
/**
* Retains only beans which are enabled.
*
* @param beans The mutable set of beans to filter
* @param beanManager The bean manager
* @return a mutable set of enabled beans
*/
public static <T extends Bean<?>> Set<T> removeDisabledBeans(Set<T> beans, final BeanManagerImpl beanManager) {
if (beans.isEmpty()) {
return beans;
} else {
for (Iterator<T> iterator = beans.iterator(); iterator.hasNext();) {
if (!isBeanEnabled(iterator.next(), beanManager.getEnabled())) {
iterator.remove();
}
}
return beans;
}
}
public static boolean isBeanEnabled(Bean<?> bean, ModuleEnablement enabled) {
if (bean.isAlternative()) {
boolean isEnabled = false;
if (enabled.isEnabledAlternativeClass(bean.getBeanClass())) {
isEnabled = true;
} else {
for (Class<? extends Annotation> stereotype : bean.getStereotypes()) {
if (enabled.isEnabledAlternativeStereotype(stereotype)) {
isEnabled = true;
break;
}
}
}
// For synthetic enabled alternatives, the ModuleEnablement may not yet be aware of them
if (!isEnabled
&& ((bean instanceof WeldBean && ((WeldBean<?>) bean).getPriority() != null) || bean instanceof Prioritized)) {
isEnabled = true;
}
return isEnabled;
} else if (bean instanceof AbstractProducerBean<?, ?, ?>) {
AbstractProducerBean<?, ?, ?> receiverBean = (AbstractProducerBean<?, ?, ?>) bean;
return isBeanEnabled(receiverBean.getDeclaringBean(), enabled);
} else if (bean instanceof DecoratorImpl<?>) {
return enabled.isDecoratorEnabled(bean.getBeanClass());
} else if (bean instanceof InterceptorImpl<?>) {
return enabled.isInterceptorEnabled(bean.getBeanClass());
} else {
return true;
}
}
/**
* Is alternative.
*
* @param annotated the annotated
* @param mergedStereotypes merged stereotypes
* @return true if alternative, false otherwise
*/
public static boolean isAlternative(EnhancedAnnotated<?, ?> annotated, MergedStereotypes<?, ?> mergedStereotypes) {
return annotated.isAnnotationPresent(Alternative.class) || mergedStereotypes.isAlternative();
}
public static <T> EnhancedAnnotatedConstructor<T> getBeanConstructorStrict(EnhancedAnnotatedType<T> type) {
EnhancedAnnotatedConstructor<T> constructor = getBeanConstructor(type);
if (constructor == null) {
throw UtilLogger.LOG.unableToFindConstructor(type);
}
return constructor;
}
public static <T> EnhancedAnnotatedConstructor<T> getBeanConstructor(EnhancedAnnotatedType<T> type) {
Collection<EnhancedAnnotatedConstructor<T>> initializerAnnotatedConstructors = type
.getEnhancedConstructors(Inject.class);
BeanLogger.LOG.foundInjectableConstructors(initializerAnnotatedConstructors, type);
EnhancedAnnotatedConstructor<T> constructor = null;
if (initializerAnnotatedConstructors.size() > 1) {
throw UtilLogger.LOG.ambiguousConstructor(type, initializerAnnotatedConstructors);
} else if (initializerAnnotatedConstructors.size() == 1) {
constructor = initializerAnnotatedConstructors.iterator().next();
BeanLogger.LOG.foundOneInjectableConstructor(constructor, type);
} else if (type.getNoArgsEnhancedConstructor() != null) {
constructor = type.getNoArgsEnhancedConstructor();
BeanLogger.LOG.foundDefaultConstructor(constructor, type);
}
if (constructor != null) {
if (!constructor.getEnhancedParameters(Disposes.class).isEmpty()) {
throw BeanLogger.LOG.parameterAnnotationNotAllowedOnConstructor("@Disposes", constructor,
Formats.formatAsStackTraceElement(constructor.getJavaMember()));
}
if (!constructor.getEnhancedParameters(Observes.class).isEmpty()) {
throw BeanLogger.LOG.parameterAnnotationNotAllowedOnConstructor("@Observes", constructor,
Formats.formatAsStackTraceElement(constructor.getJavaMember()));
}
if (!constructor.getEnhancedParameters(ObservesAsync.class).isEmpty()) {
throw BeanLogger.LOG.parameterAnnotationNotAllowedOnConstructor("@ObservesAsync", constructor,
Formats.formatAsStackTraceElement(constructor.getJavaMember()));
}
}
return constructor;
}
/**
* Injects EJBs and other EE resources.
*
* @param resourceInjectionsHierarchy
* @param beanInstance
* @param ctx
*/
public static <T> void injectEEFields(Iterable<Set<ResourceInjection<?>>> resourceInjectionsHierarchy,
T beanInstance, CreationalContext<T> ctx) {
for (Set<ResourceInjection<?>> resourceInjections : resourceInjectionsHierarchy) {
for (ResourceInjection<?> resourceInjection : resourceInjections) {
resourceInjection.injectResourceReference(beanInstance, ctx);
}
}
}
/**
* Gets the declared bean type
*
* @return The bean type
*/
public static Type getDeclaredBeanType(Class<?> clazz) {
Type[] actualTypeArguments = Reflections.getActualTypeArguments(clazz);
if (actualTypeArguments.length == 1) {
return actualTypeArguments[0];
} else {
return null;
}
}
/**
* Injects bound fields
*
* @param instance The instance to inject into
*/
public static <T> void injectBoundFields(T instance, CreationalContext<T> creationalContext, BeanManagerImpl manager,
Iterable<? extends FieldInjectionPoint<?, ?>> injectableFields) {
for (FieldInjectionPoint<?, ?> injectableField : injectableFields) {
injectableField.inject(instance, manager, creationalContext);
}
}
public static <T> void injectFieldsAndInitializers(T instance, CreationalContext<T> ctx, BeanManagerImpl beanManager,
List<? extends Iterable<? extends FieldInjectionPoint<?, ?>>> injectableFields,
List<? extends Iterable<? extends MethodInjectionPoint<?, ?>>> initializerMethods) {
if (injectableFields.size() != initializerMethods.size()) {
throw UtilLogger.LOG.invalidQuantityInjectableFieldsAndInitializerMethods(injectableFields, initializerMethods);
}
for (int i = 0; i < injectableFields.size(); i++) {
injectBoundFields(instance, ctx, beanManager, injectableFields.get(i));
callInitializers(instance, ctx, beanManager, initializerMethods.get(i));
}
}
/**
* Calls all initializers of the bean
*
* @param instance The bean instance
*/
public static <T> void callInitializers(T instance, CreationalContext<T> creationalContext, BeanManagerImpl manager,
Iterable<? extends MethodInjectionPoint<?, ?>> initializerMethods) {
for (MethodInjectionPoint<?, ?> initializer : initializerMethods) {
initializer.invoke(instance, null, manager, creationalContext, CreationException.class);
}
}
public static <T> boolean isInterceptor(AnnotatedType<T> annotatedItem) {
return annotatedItem.isAnnotationPresent(jakarta.interceptor.Interceptor.class);
}
public static <T> boolean isDecorator(EnhancedAnnotatedType<T> annotatedItem) {
return annotatedItem.isAnnotationPresent(Decorator.class);
}
public static Set<Annotation> mergeInQualifiers(BeanManagerImpl manager, Collection<Annotation> qualifiers, Annotation[] newQualifiers) {
Set<Annotation> result = new HashSet<Annotation>();
if (qualifiers != null && !(qualifiers.isEmpty())) {
result.addAll(qualifiers);
}
if (newQualifiers != null && newQualifiers.length > 0) {
final MetaAnnotationStore store = manager.getServices().get(MetaAnnotationStore.class);
Set<Annotation> checkedNewQualifiers = new HashSet<Annotation>();
for (Annotation qualifier : newQualifiers) {
if (!store.getBindingTypeModel(qualifier.annotationType()).isValid()) {
throw UtilLogger.LOG.annotationNotQualifier(qualifier);
}
Class<? extends Annotation> annotationType = qualifier.annotationType();
if (!annotationType.isAnnotationPresent(Repeatable.class)) {
for (Annotation annotation : checkedNewQualifiers) {
if(annotationType.equals(annotation.annotationType())) {
throw UtilLogger.LOG.redundantQualifier(qualifier, Arrays.toString(newQualifiers));
}
}
}
checkedNewQualifiers.add(qualifier);
}
result.addAll(checkedNewQualifiers);
}
return result;
}
/**
* Illegal bean types are ignored except for array and primitive types and unless {@link Typed} is used.
*
* @return the set of bean types from an annotated element
*/
public static Set<Type> getTypes(EnhancedAnnotated<?, ?> annotated) {
// array and primitive types require special treatment
if (annotated.getJavaClass().isArray() || annotated.getJavaClass().isPrimitive()) {
return ImmutableSet.<Type>builder().addAll(annotated.getBaseType(), Object.class).build();
} else {
if (annotated.isAnnotationPresent(Typed.class)) {
return ImmutableSet.<Type>builder().addAll(getTypedTypes(Reflections.buildTypeMap(annotated.getTypeClosure()),
annotated.getJavaClass(), annotated.getAnnotation(Typed.class))).build();
} else {
if (annotated.getJavaClass().isInterface()) {
return getLegalBeanTypes(annotated.getTypeClosure(), annotated, Object.class);
}
return getLegalBeanTypes(annotated.getTypeClosure(), annotated);
}
}
}
/**
* Bean types of a bean that uses the {@link Typed} annotation.
*/
public static Set<Type> getTypedTypes(Map<Class<?>, Type> typeClosure, Class<?> rawType, Typed typed) {
Set<Type> types = new HashSet<Type>();
for (Class<?> specifiedClass : typed.value()) {
Type tmp = typeClosure.get(specifiedClass);
if (tmp != null) {
types.add(tmp);
} else {
throw BeanLogger.LOG.typedClassNotInHierarchy(specifiedClass.getName(), rawType, Formats.formatTypes(typeClosure.values()));
}
}
types.add(Object.class);
return types;
}
/**
* Indicates if the type is a simple Web Bean
*
* @param annotatedType The type to inspect
* @return True if simple Web Bean, false otherwise
*/
public static boolean isTypeManagedBeanOrDecoratorOrInterceptor(AnnotatedType<?> annotatedType) {
Class<?> javaClass = annotatedType.getJavaClass();
return !javaClass.isEnum() && !Extension.class.isAssignableFrom(javaClass)
&& Reflections.isTopLevelOrStaticNestedClass(javaClass) && !Reflections.isParameterizedTypeWithWildcard(javaClass)
&& hasSimpleCdiConstructor(annotatedType);
}
/**
*
* @param classFileInfo
* @param checkTypeModifiers - this flag reflects whether Jandex version including fix for JANDEX-37 could be used
* @return
*/
public static boolean isTypeManagedBeanOrDecoratorOrInterceptor(ClassFileInfo classFileInfo, boolean checkTypeModifiers) {
boolean isTypeManagedBean = ((classFileInfo.getModifiers() & BytecodeUtils.ENUM) == 0) && !classFileInfo.isAssignableTo(Extension.class)
&& classFileInfo.hasCdiConstructor()
&& (!Modifier.isAbstract(classFileInfo.getModifiers()) || classFileInfo.isAnnotationDeclared(Decorator.class));
if (checkTypeModifiers) {
return isTypeManagedBean && (ClassFileInfo.NestingType.TOP_LEVEL.equals(classFileInfo.getNestingType()) || Modifier.isStatic(classFileInfo.getModifiers()));
} else {
return isTypeManagedBean;
}
}
public static boolean isDecoratorDeclaringInAppropriateConstructor(ClassFileInfo classFileInfo) {
return !classFileInfo.hasCdiConstructor() && classFileInfo.isAnnotationDeclared(Decorator.class);
}
public static boolean isDecoratorDeclaringInAppropriateConstructor(AnnotatedType<?> annotatedType) {
return !hasSimpleCdiConstructor(annotatedType) && annotatedType.isAnnotationPresent(Decorator.class);
}
public static boolean hasSimpleCdiConstructor(AnnotatedType<?> type) {
for (AnnotatedConstructor<?> constructor : type.getConstructors()) {
if (constructor.getParameters().isEmpty()) {
return true;
}
if (constructor.isAnnotationPresent(Inject.class)) {
return true;
}
}
return false;
}
/**
* Determines if this Java class should be vetoed as a result of presence of {@link Veto} annotations.
*/
public static boolean isVetoed(Class<?> javaClass) {
if (javaClass.isAnnotationPresent(Vetoed.class)) {
return true;
}
return isPackageVetoed(javaClass.getPackage());
}
public static boolean isVetoed(AnnotatedType<?> type) {
if (type.isAnnotationPresent(Vetoed.class)) {
return true;
}
return isPackageVetoed(type.getJavaClass().getPackage());
}
private static boolean isPackageVetoed(Package pkg) {
return pkg != null && pkg.isAnnotationPresent(Vetoed.class);
}
/**
* Generates a unique signature for {@link BeanAttributes}.
*/
public static String createBeanAttributesId(BeanAttributes<?> attributes) {
StringBuilder builder = new StringBuilder();
builder.append(attributes.getName());
builder.append(",");
builder.append(attributes.getScope().getName());
builder.append(",");
builder.append(attributes.isAlternative());
builder.append(AnnotatedTypes.createAnnotationCollectionId(attributes.getQualifiers()));
builder.append(createTypeCollectionId(attributes.getStereotypes()));
builder.append(createTypeCollectionId(attributes.getTypes()));
return builder.toString();
}
/**
* Generates a unique signature of a collection of types.
*/
public static String createTypeCollectionId(Collection<? extends Type> types) {
StringBuilder builder = new StringBuilder();
List<? extends Type> sortedTypes = new ArrayList<Type>(types);
Collections.sort(sortedTypes, TypeComparator.INSTANCE);
builder.append("[");
for (Iterator<? extends Type> iterator = sortedTypes.iterator(); iterator.hasNext();) {
builder.append(createTypeId(iterator.next()));
if (iterator.hasNext()) {
builder.append(",");
}
}
builder.append("]");
return builder.toString();
}
/**
* Creates a unique signature for a {@link Type}.
*/
private static String createTypeId(Type type) {
if (type instanceof Class<?>) {
return Reflections.<Class<?>> cast(type).getName();
}
if (type instanceof ParameterizedType) {
ParameterizedType parameterizedType = (ParameterizedType) type;
StringBuilder builder = new StringBuilder();
builder.append(createTypeId(parameterizedType.getRawType()));
builder.append("<");
for (int i = 0; i < parameterizedType.getActualTypeArguments().length; i++) {
builder.append(createTypeId(parameterizedType.getActualTypeArguments()[i]));
if (i != parameterizedType.getActualTypeArguments().length - 1) {
builder.append(",");
}
}
builder.append(">");
return builder.toString();
}
if (type instanceof TypeVariable<?>) {
return Reflections.<TypeVariable<?>> cast(type).getName();
}
if (type instanceof GenericArrayType) {
return createTypeId(Reflections.<GenericArrayType> cast(type).getGenericComponentType());
}
throw new java.lang.IllegalArgumentException("Unknown type " + type);
}
private static class TypeComparator implements Comparator<Type>, Serializable {
private static final long serialVersionUID = -2162735176891985078L;
private static final TypeComparator INSTANCE = new TypeComparator();
@Override
public int compare(Type o1, Type o2) {
return createTypeId(o1).compareTo(createTypeId(o2));
}
}
public static <T, S, X extends EnhancedAnnotated<T, S>> X checkEnhancedAnnotatedAvailable(X enhancedAnnotated) {
if (enhancedAnnotated == null) {
throw new IllegalStateException("Enhanced metadata should not be used at runtime.");
}
return enhancedAnnotated;
}
public static boolean hasBuiltinScope(Bean<?> bean) {
return RequestScoped.class.equals(bean.getScope()) || SessionScoped.class.equals(bean.getScope()) || ApplicationScoped.class.equals(bean.getScope())
|| ConversationScoped.class.equals(bean.getScope()) || Dependent.class.equals(bean.getScope());
}
public static Class<? extends Annotation> getBeanDefiningAnnotationScope(AnnotatedType<?> annotatedType) {
for (Annotation annotation : annotatedType.getAnnotations()) {
if (annotation.annotationType().isAnnotationPresent(NormalScope.class) || annotation.annotationType().equals(Dependent.class)) {
return annotation.annotationType();
}
}
return null;
}
/**
* @param types The initial set of types
* @param baseType
* @param additionalTypes Types to add to the initial set
* @return the set of legal bean types
*/
public static Set<Type> getLegalBeanTypes(Set<Type> types, Object baseType, Type... additionalTypes) {
if (additionalTypes != null && additionalTypes.length > 0) {
// Micro-optimization is not possible
return omitIllegalBeanTypes(types, baseType).addAll(additionalTypes).build();
}
for (Type type : types) {
if (Types.isIllegalBeanType(type)) {
return omitIllegalBeanTypes(types, baseType).build();
}
}
return types;
}
static ImmutableSet.Builder<Type> omitIllegalBeanTypes(Set<Type> types, Object baseType) {
ImmutableSet.Builder<Type> builder = ImmutableSet.builder();
for (Type type : types) {
if (Types.isIllegalBeanType(type)) {
MetadataLogger.LOG.illegalBeanTypeIgnored(type, baseType);
} else {
builder.add(type);
}
}
return builder;
}
/**
* @param contextual
* @param contextualStore
* @return the identifier for the given contextual
* @see #getIdentifier(Contextual, ContextualStore, ServiceRegistry)
*/
public static BeanIdentifier getIdentifier(Contextual<?> contextual, ContextualStore contextualStore) {
return getIdentifier(contextual, contextualStore, null);
}
/**
* @param contextual
* @param serviceRegistry
* @return the identifier for the given contextual
* @see #getIdentifier(Contextual, ContextualStore, ServiceRegistry)
*/
public static BeanIdentifier getIdentifier(Contextual<?> contextual, ServiceRegistry serviceRegistry) {
return getIdentifier(contextual, null, serviceRegistry);
}
/**
*
* @param bean
* @return <code>true</code> if final methods should be ignored when checking proxyability
*/
public static boolean shouldIgnoreFinalMethods(Bean<?> bean) {
if (bean instanceof AbstractBean<?, ?>) {
AbstractBean<?, ?> abstractBean = (AbstractBean<?, ?>) bean;
return abstractBean.isIgnoreFinalMethods();
}
return false;
}
public static Bean<?> unwrap(Bean<?> bean) {
if (bean instanceof ForwardingBean) {
ForwardingBean<?> forwarding = (ForwardingBean<?>) bean;
return forwarding.delegate();
}
return bean;
}
/**
* A slightly optimized way to get the bean identifier - there is not need to call ContextualStore.putIfAbsent() for passivation capable beans because it's
* already called during bootstrap. See also {@link BeanManagerImpl#addBean(Bean)}.
*
* @param contextual
* @param contextualStore
* @param serviceRegistry
* @return the identifier for the given contextual
*/
private static BeanIdentifier getIdentifier(Contextual<?> contextual, ContextualStore contextualStore, ServiceRegistry serviceRegistry) {
if (contextual instanceof RIBean<?>) {
return ((RIBean<?>) contextual).getIdentifier();
}
if (contextualStore == null) {
contextualStore = serviceRegistry.get(ContextualStore.class);
}
return contextualStore.putIfAbsent(contextual);
}
}
| |
package de.hftstuttgart.projectindoorweb.web;
import de.hftstuttgart.projectindoorweb.web.internal.ResponseWrapper;
import de.hftstuttgart.projectindoorweb.web.internal.requests.building.AddNewBuilding;
import de.hftstuttgart.projectindoorweb.web.internal.requests.building.GetAllBuildings;
import de.hftstuttgart.projectindoorweb.web.internal.requests.building.GetSingleBuilding;
import de.hftstuttgart.projectindoorweb.web.internal.requests.building.UpdateBuilding;
import de.hftstuttgart.projectindoorweb.web.internal.requests.positioning.BatchPositionResult;
import de.hftstuttgart.projectindoorweb.web.internal.requests.positioning.GenerateBatchPositionResults;
import de.hftstuttgart.projectindoorweb.web.internal.requests.positioning.GenerateSinglePositionResult;
import de.hftstuttgart.projectindoorweb.web.internal.requests.positioning.GetAllEvaalEntries;
import de.hftstuttgart.projectindoorweb.web.internal.requests.positioning.GetEvaluationFilesForBuilding;
import de.hftstuttgart.projectindoorweb.web.internal.requests.positioning.GetRadioMapFilesForBuilding;
import de.hftstuttgart.projectindoorweb.web.internal.requests.positioning.SinglePositionResult;
import de.hftstuttgart.projectindoorweb.web.internal.requests.project.AddNewProject;
import de.hftstuttgart.projectindoorweb.web.internal.requests.project.GetAlgorithmParameters;
import de.hftstuttgart.projectindoorweb.web.internal.requests.project.GetAllAlgorithmTypes;
import de.hftstuttgart.projectindoorweb.web.internal.requests.project.GetAllProjects;
import de.hftstuttgart.projectindoorweb.web.internal.requests.project.LoadSelectedProject;
import de.hftstuttgart.projectindoorweb.web.internal.requests.project.UpdateProject;
import org.springframework.web.multipart.MultipartFile;
import java.io.File;
import java.util.List;
/**
* REST transmission service. Uses all underlying services of the Back End. Works as gateway between REST ContrResources
* and Back End functionalities. All Components need to access <code>this</code> service in order to access Back End
* functionalities. Do not use other services directly!
*/
public interface RestTransmissionService {
//Evaal file processing and generating position results
/**
* Processes given Evaal files. Given files can be Evaluation files or radio map files.
*
* @param buildingId The building id which the given files are associated with. Must not be <code>null</code>.
* @param evaluationFiles Flag if given files are evaluation files. True if files are evaluation files, otherwise false.
* @param radioMapFiles Radio map file array. Must not be <code>null</code>.
* @param transformedPointsFile Transformed points file array. Can be null.
* @return String message containing information if processing was successful or not.
*/
String processEvaalFiles(String buildingId, boolean evaluationFiles,
MultipartFile[] radioMapFiles, MultipartFile[] transformedPointsFile);
/**
* Removes a evaal file with the given evaal file identifier.
*
* @param evaalFileIdentifier The identifier of the evaal file. Must not be <code>null</code>.
* @return String message containing information if deletion was successful or not.
*/
String deleteEvaalFile(String evaalFileIdentifier);
/**
* Generates a list of batch position results from a given batch.
*
* @param generateBatchPositionResults Batch containing information needed for position calculation. Must not be
* <code><null</code>.
* @return A list of position results or an empty list if given input is invalid.
*/
List<BatchPositionResult> generatePositionResults(GenerateBatchPositionResults generateBatchPositionResults);
/**
* Generates one position result with a given position information line.
*
* @param generateSinglePositionResult Position information line. Must not be <code>null</code>.
* @return One position result or an empty position result if given data is invalid.
*/
SinglePositionResult getPositionForWifiReading(GenerateSinglePositionResult generateSinglePositionResult);
/**
* Retrieves a position result of a project based on the given project identifier.
*
* @param projectIdentifier The project identifier of the wanted project's position. Must not be <code>null</code>.
* @return A list of position results or an empty list if given data is invalid.
*/
List<BatchPositionResult> getPositionResultsForProjectIdentifier(String projectIdentifier);
/**
* Retrieves all evaluation files for a given Building.
*
* @param buildingIdentifier The building identifier of the wanted evaluation files. Must not be <code>null</code>
* @return A list of evaluation files or an
*/
List<GetEvaluationFilesForBuilding> getEvaluationFilesForBuilding(String buildingIdentifier);
/**
* Retrieves all radio map files of a given building.
*
* @param buildingIdentifier The identifier of the building which is associated with the wanted radio map files.
* @return A list of radio map files or an empty list if given data was invalid.
*/
List<GetRadioMapFilesForBuilding> getRadioMapFilesForBuilding(String buildingIdentifier);
/**
* Retrieves all currently available evaal file entries.
*
* @return A list of evaal file entries or an empty list if no files have been found.
*/
List<GetAllEvaalEntries> getAllEvaalEntries();
//Projects
/**
* Adds a new project to the database.
*
* @param addNewProject project which should be added to the database. Must not be <code>null</code>.
* @return A response containing a message if addition was successful and an associated project id.
*/
ResponseWrapper addNewProject(AddNewProject addNewProject);
/**
* Overwrites an already available project with a given project.
*
* @param updateProject The project which updates a already available one. Must not be <code>null</code>.
* @return A String message informing about update status.
*/
String updateProject(UpdateProject updateProject);
/**
* Deletes a project with a given identifier.
*
* @param projectIdentifier The identfier of the project which should be deleted. Must not be <code>null</code>.
* @return A String message informing about deletion status.
*/
String deleteProject(String projectIdentifier);
/**
* Loads and retrieves a project based on the given project identifier.
*
* @param projectIdentifier The project identifier of the project which should be loaded and retrieved.
* Must not be <code>null</code>
* @return The wanted project or an empty result element if given data was invalid.
*/
LoadSelectedProject loadSelectedProject(String projectIdentifier);
/**
* Retrieves all projects currently availabe in the database:.
*
* @return A list of projects currently available or an empty list if no projects have been found.
*/
List<GetAllProjects> getAllProjects();
//Algorithms and parameters
/**
* Retrieves all currently available algorithm types.
*
* @return A list of algorithm types or an empty list if none have been found.
*/
List<GetAllAlgorithmTypes> getAllAlgorithmTypes();
/**
* Retrieves all parameters currently available in the Back End.
*
* @return A list of currently available parameters or an empty list if no parameters have been found
*/
List<GetAlgorithmParameters> getAllParameters();
/**
* Retrieves all known parameters which are associated with the given algorithm type.
*
* @param algorithmType The algorithm type from which the known parameters should be retrieved from. Must not
* be <code>null</code>.
* @return A list of know parameters or an empty list if given data is invalid.
*/
List<GetAlgorithmParameters> getParametersForAlgorithm(String algorithmType);
//Buildings
/**
* Adds a new building to the database.
*
* @param addNewBuilding The building which should be added. Must not be <code>null</code>.
* @return A response containing a String message if addition was successful and an associated building id.
*/
ResponseWrapper addNewBuilding(AddNewBuilding addNewBuilding);
/**
* Retrieves all available buildings in the database.
*
* @return A list of buildings or an empty list if no buildings have been found.
*/
List<GetAllBuildings> getAllBuildings();
/**
* Retrieves a building based on the given building identifier.
*
* @param buildingIdentifier The identifier of the wanted building. Must not be <code>null</code>.
* @return A building which is associated with the given identifier or an empty result object if
* given data was invalid.
*/
GetSingleBuilding getSingleBuilding(String buildingIdentifier);
/**
* Overwrites an already available building based on the given building element.
*
* @param updateBuilding The building which should overwrite an already existing building.
* Must not be <code>null</code>.
* @return A String message describing the update status of the building.
*/
String updateBuilding(UpdateBuilding updateBuilding);
/**
* Adds a new floor to a given building.
*
* @param buildingIdentifier The identifier of the building which is associated to the given floor.
* Must not be <code>null</code>.
* @param floorIdentifier The floor identifier which is associated with the given building. Mst
* not be <code>null</code>.
* @param floorName The floor name. Must not be <code>null</code>.
* @param floorMapFile The floor map file. Must not be <code>null</code>.
* @return
*/
String addFloorToBuilding(String buildingIdentifier, String floorIdentifier, String floorName, MultipartFile floorMapFile);
/**
* Retrieves a file which represents the floor map of the given floor identifier.
*
* @param floorIdentifier The floor identifier. Must not be <code>null</code>.
* @return A file which represents the floor map. Or <code>null</code> if no file has been found!
*/
File getFloorMap(String floorIdentifier);
/**
* Deletes a building based on the given building identifier.
*
* @param buildingIdentifier The identifier of the given building
* @return A String message describing the deletion status of the given building.
*/
String deleteBuilding(String buildingIdentifier);
}
| |
package org.apache.hadoop.util.bloom;
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.util.AbstractCollection;
import java.util.Iterator;
import org.apache.hadoop.util.bloom.BloomFilterCommonTester.BloomFilterTestStrategy;
import org.apache.hadoop.util.hash.Hash;
import org.junit.Assert;
import org.junit.Test;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
public class TestBloomFilters {
int numInsertions = 1000;
int bitSize = BloomFilterCommonTester.optimalNumOfBits(numInsertions, 0.03);
int hashFunctionNumber = 5;
private static final ImmutableMap<Integer, ? extends AbstractCollection<Key>> FALSE_POSITIVE_UNDER_1000 = ImmutableMap
.of(Hash.JENKINS_HASH, new AbstractCollection<Key>() {
final ImmutableList<Key> falsePositive = ImmutableList.<Key> of(
new Key("99".getBytes()), new Key("963".getBytes()));
@Override
public Iterator<Key> iterator() {
return falsePositive.iterator();
}
@Override
public int size() {
return falsePositive.size();
}
}, Hash.MURMUR_HASH, new AbstractCollection<Key>() {
final ImmutableList<Key> falsePositive = ImmutableList.<Key> of(
new Key("769".getBytes()), new Key("772".getBytes()),
new Key("810".getBytes()), new Key("874".getBytes()));
@Override
public Iterator<Key> iterator() {
return falsePositive.iterator();
}
@Override
public int size() {
return falsePositive.size();
}
});
private enum Digits {
ODD(1), EVEN(0);
int start;
Digits(int start) {
this.start = start;
}
int getStart() {
return start;
}
}
@Test
public void testDynamicBloomFilter() {
int hashId = Hash.JENKINS_HASH;
Filter filter = new DynamicBloomFilter(bitSize, hashFunctionNumber,
Hash.JENKINS_HASH, 3);
BloomFilterCommonTester.of(hashId, numInsertions)
.withFilterInstance(filter)
.withTestCases(ImmutableSet.of(BloomFilterTestStrategy.KEY_TEST_STRATEGY,
BloomFilterTestStrategy.ADD_KEYS_STRATEGY,
BloomFilterTestStrategy.EXCEPTIONS_CHECK_STRATEGY,
BloomFilterTestStrategy.WRITE_READ_STRATEGY,
BloomFilterTestStrategy.ODD_EVEN_ABSENT_STRATEGY))
.test();
assertNotNull("testDynamicBloomFilter error ", filter.toString());
}
@Test
public void testCountingBloomFilter() {
int hashId = Hash.JENKINS_HASH;
CountingBloomFilter filter = new CountingBloomFilter(bitSize,
hashFunctionNumber, hashId);
Key key = new Key(new byte[] { 48, 48 });
filter.add(key);
assertTrue("CountingBloomFilter.membership error ",
filter.membershipTest(key));
assertTrue("CountingBloomFilter.approximateCount error",
filter.approximateCount(key) == 1);
filter.add(key);
assertTrue("CountingBloomFilter.approximateCount error",
filter.approximateCount(key) == 2);
filter.delete(key);
assertTrue("CountingBloomFilter.membership error ",
filter.membershipTest(key));
filter.delete(key);
assertFalse("CountingBloomFilter.membership error ",
filter.membershipTest(key));
assertTrue("CountingBloomFilter.approximateCount error",
filter.approximateCount(key) == 0);
BloomFilterCommonTester.of(hashId, numInsertions)
.withFilterInstance(filter)
.withTestCases(ImmutableSet.of(BloomFilterTestStrategy.KEY_TEST_STRATEGY,
BloomFilterTestStrategy.ADD_KEYS_STRATEGY,
BloomFilterTestStrategy.EXCEPTIONS_CHECK_STRATEGY,
BloomFilterTestStrategy.ODD_EVEN_ABSENT_STRATEGY,
BloomFilterTestStrategy.WRITE_READ_STRATEGY,
BloomFilterTestStrategy.FILTER_OR_STRATEGY,
BloomFilterTestStrategy.FILTER_XOR_STRATEGY)).test();
}
@Test
public void testRetouchedBloomFilterSpecific() {
int numInsertions = 1000;
int hashFunctionNumber = 5;
ImmutableSet<Integer> hashes = ImmutableSet.of(Hash.MURMUR_HASH,
Hash.JENKINS_HASH);
for (Integer hashId : hashes) {
RetouchedBloomFilter filter = new RetouchedBloomFilter(bitSize,
hashFunctionNumber, hashId);
checkOnAbsentFalsePositive(hashId, numInsertions, filter, Digits.ODD,
RemoveScheme.MAXIMUM_FP);
filter.and(new RetouchedBloomFilter(bitSize, hashFunctionNumber, hashId));
checkOnAbsentFalsePositive(hashId, numInsertions, filter, Digits.EVEN,
RemoveScheme.MAXIMUM_FP);
filter.and(new RetouchedBloomFilter(bitSize, hashFunctionNumber, hashId));
checkOnAbsentFalsePositive(hashId, numInsertions, filter, Digits.ODD,
RemoveScheme.MINIMUM_FN);
filter.and(new RetouchedBloomFilter(bitSize, hashFunctionNumber, hashId));
checkOnAbsentFalsePositive(hashId, numInsertions, filter, Digits.EVEN,
RemoveScheme.MINIMUM_FN);
filter.and(new RetouchedBloomFilter(bitSize, hashFunctionNumber, hashId));
checkOnAbsentFalsePositive(hashId, numInsertions, filter, Digits.ODD,
RemoveScheme.RATIO);
filter.and(new RetouchedBloomFilter(bitSize, hashFunctionNumber, hashId));
checkOnAbsentFalsePositive(hashId, numInsertions, filter, Digits.EVEN,
RemoveScheme.RATIO);
filter.and(new RetouchedBloomFilter(bitSize, hashFunctionNumber, hashId));
}
}
private void checkOnAbsentFalsePositive(int hashId, int numInsertions,
final RetouchedBloomFilter filter, Digits digits, short removeSchema) {
AbstractCollection<Key> falsePositives = FALSE_POSITIVE_UNDER_1000
.get(hashId);
if (falsePositives == null)
Assert.fail(String.format("false positives for hash %d not founded",
hashId));
filter.addFalsePositive(falsePositives);
for (int i = digits.getStart(); i < numInsertions; i += 2) {
filter.add(new Key(Integer.toString(i).getBytes()));
}
for (Key key : falsePositives) {
filter.selectiveClearing(key, removeSchema);
}
for (int i = 1 - digits.getStart(); i < numInsertions; i += 2) {
assertFalse(" testRetouchedBloomFilterAddFalsePositive error " + i,
filter.membershipTest(new Key(Integer.toString(i).getBytes())));
}
}
@Test
public void testFiltersWithJenkinsHash() {
int hashId = Hash.JENKINS_HASH;
BloomFilterCommonTester.of(hashId, numInsertions)
.withFilterInstance(new BloomFilter(bitSize, hashFunctionNumber, hashId))
.withFilterInstance(new RetouchedBloomFilter(bitSize, hashFunctionNumber, hashId))
.withTestCases(ImmutableSet.of(BloomFilterTestStrategy.KEY_TEST_STRATEGY,
BloomFilterTestStrategy.ADD_KEYS_STRATEGY,
BloomFilterTestStrategy.EXCEPTIONS_CHECK_STRATEGY,
BloomFilterTestStrategy.ODD_EVEN_ABSENT_STRATEGY,
BloomFilterTestStrategy.WRITE_READ_STRATEGY,
BloomFilterTestStrategy.FILTER_OR_STRATEGY,
BloomFilterTestStrategy.FILTER_AND_STRATEGY,
BloomFilterTestStrategy.FILTER_XOR_STRATEGY)).test();
}
@Test
public void testFiltersWithMurmurHash() {
int hashId = Hash.MURMUR_HASH;
BloomFilterCommonTester.of(hashId, numInsertions)
.withFilterInstance(new BloomFilter(bitSize, hashFunctionNumber, hashId))
.withFilterInstance(new RetouchedBloomFilter(bitSize, hashFunctionNumber, hashId))
.withTestCases(ImmutableSet.of(BloomFilterTestStrategy.KEY_TEST_STRATEGY,
BloomFilterTestStrategy.ADD_KEYS_STRATEGY,
BloomFilterTestStrategy.EXCEPTIONS_CHECK_STRATEGY,
BloomFilterTestStrategy.ODD_EVEN_ABSENT_STRATEGY,
BloomFilterTestStrategy.WRITE_READ_STRATEGY,
BloomFilterTestStrategy.FILTER_OR_STRATEGY,
BloomFilterTestStrategy.FILTER_AND_STRATEGY,
BloomFilterTestStrategy.FILTER_XOR_STRATEGY)).test();
}
}
| |
/*
* The MIT License
*
* Copyright (c) 2004-2009, Sun Microsystems, Inc., Kohsuke Kawaguchi
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson.slaves;
import com.gargoylesoftware.htmlunit.html.HtmlPage;
import hudson.Proc;
import hudson.Util;
import hudson.model.Computer;
import hudson.model.Node;
import hudson.model.Node.Mode;
import hudson.model.Slave;
import hudson.remoting.Which;
import hudson.util.ArgumentListBuilder;
import jenkins.security.SlaveToMasterCallable;
import jenkins.slaves.RemotingWorkDirSettings;
import org.junit.Assume;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.jvnet.hudson.test.JenkinsRule;
import org.jvnet.hudson.test.SmokeTest;
import org.jvnet.hudson.test.TestExtension;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.TimeUnit;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
import java.awt.*;
import static org.hamcrest.Matchers.instanceOf;
import org.junit.rules.TemporaryFolder;
import org.jvnet.hudson.test.Issue;
import org.jvnet.hudson.test.recipes.LocalData;
/**
* Tests of {@link JNLPLauncher}.
* @author Kohsuke Kawaguchi
*/
@Category(SmokeTest.class)
public class JNLPLauncherTest {
@Rule public JenkinsRule j = new JenkinsRule();
@Rule public TemporaryFolder tmpDir = new TemporaryFolder();
/**
* Starts a JNLP agent and makes sure it successfully connects to Jenkins.
*/
@Test
public void testLaunch() throws Exception {
Assume.assumeFalse("Skipping JNLPLauncherTest.testLaunch because we are running headless", GraphicsEnvironment.isHeadless());
Computer c = addTestSlave(false);
launchJnlpAndVerify(c, buildJnlpArgs(c));
}
/**
* Starts a JNLP agent and makes sure it successfully connects to Jenkins.
*/
@Test
@Issue("JENKINS-39370")
public void testLaunchWithWorkDir() throws Exception {
Assume.assumeFalse("Skipping JNLPLauncherTest.testLaunch because we are running headless", GraphicsEnvironment.isHeadless());
File workDir = tmpDir.newFolder("workDir");
Computer c = addTestSlave(false);
launchJnlpAndVerify(c, buildJnlpArgs(c).add("-workDir", workDir.getAbsolutePath()));
assertTrue("Remoting work dir should have been created", new File(workDir, "remoting").exists());
}
/**
* Tests the '-headless' option.
* (Although this test doesn't really assert that the agent really is running in a headless mdoe.)
*/
@Test
public void testHeadlessLaunch() throws Exception {
Computer c = addTestSlave(false);
launchJnlpAndVerify(c, buildJnlpArgs(c).add("-arg","-headless"));
// make sure that onOffline gets called just the right number of times
assertEquals(1, ComputerListener.all().get(ListenerImpl.class).offlined);
}
@Test
@Issue("JENKINS-44112")
public void testHeadlessLaunchWithWorkDir() throws Exception {
Assume.assumeFalse("Skipping JNLPLauncherTest.testLaunch because we are running headless", GraphicsEnvironment.isHeadless());
Computer c = addTestSlave(true);
launchJnlpAndVerify(c, buildJnlpArgs(c).add("-arg","-headless"));
assertEquals(1, ComputerListener.all().get(ListenerImpl.class).offlined);
}
@Test
@Issue("JENKINS-39370")
public void testHeadlessLaunchWithCustomWorkDir() throws Exception {
Assume.assumeFalse("Skipping JNLPLauncherTest.testLaunch because we are running headless", GraphicsEnvironment.isHeadless());
File workDir = tmpDir.newFolder("workDir");
Computer c = addTestSlave(false);
launchJnlpAndVerify(c, buildJnlpArgs(c).add("-arg","-headless", "-workDir", workDir.getAbsolutePath()));
assertEquals(1, ComputerListener.all().get(ListenerImpl.class).offlined);
}
@Test
@LocalData
@Issue("JENKINS-44112")
public void testNoWorkDirMigration() throws Exception {
Computer computer = j.jenkins.getComputer("Foo");
assertThat(computer, instanceOf(SlaveComputer.class));
SlaveComputer c = (SlaveComputer)computer;
ComputerLauncher launcher = c.getLauncher();
assertThat(launcher, instanceOf(JNLPLauncher.class));
JNLPLauncher jnlpLauncher = (JNLPLauncher)launcher;
assertNotNull("Work Dir Settings should be defined",
jnlpLauncher.getWorkDirSettings());
assertTrue("Work directory should be disabled for the migrated agent",
jnlpLauncher.getWorkDirSettings().isDisabled());
}
@Test
@Issue("JENKINS-44112")
@SuppressWarnings("deprecation")
public void testDefaults() throws Exception {
assertTrue("Work directory should be disabled for agents created via old API", new JNLPLauncher().getWorkDirSettings().isDisabled());
}
@Test
@Issue("JENKINS-47056")
public void testDelegatingComputerLauncher() throws Exception {
Assume.assumeFalse("Skipping JNLPLauncherTest.testDelegatingComputerLauncher because we are running headless", GraphicsEnvironment.isHeadless());
File workDir = tmpDir.newFolder("workDir");
ComputerLauncher launcher = new JNLPLauncher("", "", new RemotingWorkDirSettings(false, workDir.getAbsolutePath(), "internalDir", false));
launcher = new DelegatingComputerLauncherImpl(launcher);
Computer c = addTestSlave(launcher);
launchJnlpAndVerify(c, buildJnlpArgs(c));
assertTrue("Remoting work dir should have been created", new File(workDir, "internalDir").exists());
}
@Test
@Issue("JENKINS-47056")
public void testComputerLauncherFilter() throws Exception {
Assume.assumeFalse("Skipping JNLPLauncherTest.testComputerLauncherFilter because we are running headless", GraphicsEnvironment.isHeadless());
File workDir = tmpDir.newFolder("workDir");
ComputerLauncher launcher = new JNLPLauncher("", "", new RemotingWorkDirSettings(false, workDir.getAbsolutePath(), "internalDir", false));
launcher = new ComputerLauncherFilterImpl(launcher);
Computer c = addTestSlave(launcher);
launchJnlpAndVerify(c, buildJnlpArgs(c));
assertTrue("Remoting work dir should have been created", new File(workDir, "internalDir").exists());
}
@TestExtension("testHeadlessLaunch")
public static class ListenerImpl extends ComputerListener {
int offlined = 0;
@Override
public void onOffline(Computer c) {
offlined++;
assertTrue(c.isOffline());
}
}
private static class DelegatingComputerLauncherImpl extends DelegatingComputerLauncher {
public DelegatingComputerLauncherImpl(ComputerLauncher launcher) {
super(launcher);
}
}
private static class ComputerLauncherFilterImpl extends ComputerLauncherFilter {
public ComputerLauncherFilterImpl(ComputerLauncher launcher) {
super(launcher);
}
}
private ArgumentListBuilder buildJnlpArgs(Computer c) throws Exception {
ArgumentListBuilder args = new ArgumentListBuilder();
args.add(new File(new File(System.getProperty("java.home")),"bin/java").getPath(),"-jar");
args.add(Which.jarFile(netx.jnlp.runtime.JNLPRuntime.class).getAbsolutePath());
args.add("-headless","-basedir");
args.add(j.createTmpDir());
args.add("-nosecurity","-jnlp", j.getURL() + "computer/"+c.getName()+"/slave-agent.jnlp");
if (c instanceof SlaveComputer) {
SlaveComputer sc = (SlaveComputer)c;
ComputerLauncher launcher = sc.getLauncher();
if (launcher instanceof JNLPLauncher) {
args.add(((JNLPLauncher)launcher).getWorkDirSettings().toCommandLineArgs(sc));
}
}
return args;
}
/**
* Launches the JNLP slave agent and asserts its basic operations.
*/
private void launchJnlpAndVerify(Computer c, ArgumentListBuilder args) throws Exception {
Proc proc = j.createLocalLauncher().launch().cmds(args).stdout(System.out).pwd(".").start();
try {
// verify that the connection is established, up to 20 secs
for( int i=0; i<200; i++ ) {
Thread.sleep(100);
if(!c.isOffline())
break;
}
if (c.isOffline()) {
System.out.println(c.getLog());
fail("Slave failed to go online");
}
// run some trivial thing
System.err.println("Calling task...");
assertEquals("done", c.getChannel().callAsync(new NoopTask()).get(5 * 60, TimeUnit.SECONDS));
System.err.println("...done.");
} finally {
proc.kill();
}
Thread.sleep(500);
assertTrue(c.isOffline());
}
/**
* Adds a JNLP {@link Slave} to the system and returns it.
*/
private Computer addTestSlave(boolean enableWorkDir) throws Exception {
return addTestSlave(new JNLPLauncher(enableWorkDir));
}
/**
* Adds a JNLP {@link Slave} to the system and returns it.
*/
private Computer addTestSlave(ComputerLauncher launcher) throws Exception {
List<Node> slaves = new ArrayList<Node>(j.jenkins.getNodes());
File dir = Util.createTempDir();
slaves.add(new DumbSlave("test","dummy",dir.getAbsolutePath(),"1", Mode.NORMAL, "",
launcher, RetentionStrategy.INSTANCE, new ArrayList<NodeProperty<?>>()));
j.jenkins.setNodes(slaves);
Computer c = j.jenkins.getComputer("test");
assertNotNull(c);
return c;
}
private static class NoopTask extends SlaveToMasterCallable<String,RuntimeException> {
public String call() {
return "done";
}
private static final long serialVersionUID = 1L;
}
@Test
public void testConfigRoundtrip() throws Exception {
DumbSlave s = j.createSlave();
JNLPLauncher original = new JNLPLauncher("a", "b");
s.setLauncher(original);
j.assertEqualDataBoundBeans(((JNLPLauncher) s.getLauncher()).getWorkDirSettings(), RemotingWorkDirSettings.getEnabledDefaults());
RemotingWorkDirSettings custom = new RemotingWorkDirSettings(false, null, "custom", false);
((JNLPLauncher) s.getLauncher()).setWorkDirSettings(custom);
HtmlPage p = j.createWebClient().getPage(s, "configure");
j.submit(p.getFormByName("config"));
j.assertEqualBeans(original,s.getLauncher(),"tunnel,vmargs");
j.assertEqualDataBoundBeans(((JNLPLauncher) s.getLauncher()).getWorkDirSettings(), custom);
}
}
| |
package eu.atos.sla.service.rest;
import java.util.Date;
import java.util.UUID;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Component;
import eu.atos.sla.service.rest.helpers.ViolationHelper;
import eu.atos.sla.service.rest.helpers.exception.HelperException;
import eu.atos.sla.service.types.DateParam;
/**
* Rest Service that exposes all the stored information of the SLA core.
*
* A violation is serialized as:
*
*<pre>
* {@code
* <violation>
* <uuid>ce0e148f-dfac-4492-bb26-ad2e9a6965ec</uuid>
* <contract_uuid>agreement04</contract_uuid>
* <service_scope></service_scope>
* <metric_name>Performance</metric_name>
* <datetime>2014-01-14T11:28:22Z</datetime>
* <actual_value>0.09555700123360344</actual_value>
* </violation>
* }
* or
* {
* "uuid": "e0e148f-dfac-4492-bb26-ad2e9a6965ec",
* "contract_uuid": "agreement04",
* "service_scope" : "",
* "metric_name" : "Performance",
* "datetime" : "2014-01-14T11:28:22Z"
* "actual_value": "0.09555700123360344"
* }
*
* </pre>
* @author Pedro Rey
*/
@Deprecated
@Path("/violationso")
@Component
@Scope("request")
public class ViolationRest extends AbstractSLARest {
@Context
HttpHeaders headers;
@Autowired
private ViolationHelper helper;
public static Logger logger = LoggerFactory.getLogger(ViolationRest.class);
public ViolationRest() {
}
private ViolationHelper getViolationHelper() {
return helper;
}
/**
* Returns the information of an specific violation given an uuid If the
* violation it is not in the database, it returns 404 with empty payload
*
* <pre>
* GET /violations/{violation_uuid}
*
* Request:
* GET /violation HTTP/1.1
* Accept: application/xml
*
* Response:
* HTTP/1.1 200 OK
* Content-type: application/xml
*
* {@code
* <?xml version="1.0" encoding="UTF-8"?>
* <violation>...</violation>
* }
*
* </pre>
*
* Example:
* <li>curl
* http://localhost:8080/sla-service/violations/?agrementId=agreement04</li>
*
* @return violations according to parameters in the query string.
*/
@GET
@Path("{uuid}")
@Produces(MediaType.APPLICATION_XML)
public Response getViolationByUuid(@PathParam("uuid") UUID violationUuid) {
logger.debug("StartOf getViolationByUuid - REQUEST for /violations/" + violationUuid);
String serializedViolations = null;
ViolationHelper violationRestHelper = getViolationHelper();
try{
serializedViolations = violationRestHelper.getViolationByUUID(violationUuid);
} catch (HelperException e) {
logger.info("getViolationByUuid exception:"+e.getMessage());
return buildResponse(e);
}
logger.debug("EndOf getViolationByUuid");
return buildResponse(200, serializedViolations);
}
/**
* Search violations given an agreementId as query string.
*
* If no parameters specified, return all violations.
*
* <pre>
* GET /violations{?agreementId,guaranteeTerm,providerId,begin,end}
*
* Request:
* GET /violation HTTP/1.1
* Accept: application/xml
*
* Response:
* HTTP/1.1 200 OK
* Content-type: application/xml
*
* {@code
* <?xml version="1.0" encoding="UTF-8"?>
* <collection href="/violations">
* <items offset="0" total="1">
* <violation>...</violation>
* <violation>...</violation>
* ...
* </items>
* </collection>
* }
*
* </pre>
*
* Examples:
* <li>curl -H"Accept: application/xml"
* http://localhost:8080/sla-service/violations/?agrementId=agreement04&guaranteeTerm=gt_uptime</li>
* <li>curl "Content-type: application/json"
* http://localhost:8080/sla-service/violations/?providerId=agreement04&begin=2014-03-18T15:23:00</li>
*
* @param agreementId
* @param guaranteeTerm
* @param providerUuid
* @param begin
* @param end
* @return violations
*
* @return violations according to parameters in the query string.
*/
@GET
@Consumes(MediaType.APPLICATION_XML)
@Produces(MediaType.APPLICATION_XML)
public Response getViolationsXML(
@QueryParam("agreementId") String agreementId,
@QueryParam("guaranteeTerm") String guaranteeTerm,
@QueryParam("providerId") String providerUuid,
@QueryParam("begin") DateParam begin, @QueryParam("end") DateParam end) {
logger.debug("StartOf getViolationsXML REQUEST for /violations/?agreementId=\"\"&guaranteeTerm=\"\"&providerUuid=\"\"&begin=\"\"&end");
logger.debug("Accept: application/xml");
String serializedViolations = null;
Date dBegin = (begin == null)? null : begin.getDate();
Date dEnd = (end == null)? null : end.getDate();
ViolationHelper violationRestHelper = getViolationHelper();
try{
serializedViolations = violationRestHelper.getViolations(
agreementId, guaranteeTerm, providerUuid, dBegin, dEnd, MediaType.APPLICATION_XML );
} catch (HelperException e) {
logger.info("getViolationsXML exception:"+e.getMessage());
return buildResponse(e);
}
logger.debug("EndOf getViolationsXML");
return buildResponse(200, serializedViolations);
}
/**
* Search violations given an agreementId as query string.
*
* If no parameters specified, return all violations.
*
* <pre>
* GET /violations
*
* Request:
* GET /violation HTTP/1.1
* Accept: application/json
*
* Response:
* HTTP/1.1 200 OK
* Content-type: application/json
*
* {@code
* [
* {...},
* {...} ...
* ...
* ]
* }
*
* </pre>
*
* Example:
* <li>curl "Content-type: application/json"
* http://localhost:8080/sla-service/violations/?agrementId=agreement04</li>
*
* @return violations according to parameters in the query string.
*/
@GET
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Response getViolationsJson(
@QueryParam("agreementId") String agreementId,
@QueryParam("guaranteeTerm") String guaranteeTerm,
@QueryParam("providerUuid") String providerUuid,
@QueryParam("begin") DateParam begin, @QueryParam("end") DateParam end){
logger.debug("StartOf getViolationsJson REQUEST for /violations/?agreementId=\"\"&guaranteeTerm=\"\"&providerUuid=\"\"&begin=\"\"&end");
logger.debug("Accept: application/json");
String serializedViolations = null;
Date dBegin = (begin == null)? null : begin.getDate();
Date dEnd = (end == null)? null : end.getDate();
ViolationHelper violationRestHelper = getViolationHelper();
try{
serializedViolations = violationRestHelper.getViolations(
agreementId, guaranteeTerm, providerUuid, dBegin, dEnd, MediaType.APPLICATION_JSON );
} catch (HelperException e) {
logger.info("getViolationsJson exception:"+e.getMessage());
return buildResponse(e);
}
logger.debug("EndOf getViolationsJson");
return buildResponse(200, serializedViolations);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.waveprotocol.wave.client.editor.content.paragraph;
import static org.waveprotocol.wave.client.editor.content.paragraph.Line.DIRTY;
import com.google.common.annotations.VisibleForTesting;
import org.waveprotocol.wave.client.editor.content.ContentElement;
import org.waveprotocol.wave.client.editor.content.ContentNode;
import org.waveprotocol.wave.model.document.indexed.LocationMapper;
import org.waveprotocol.wave.model.util.CollectionUtils;
import org.waveprotocol.wave.model.util.IdentityMap;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* Encapsulates the logic for renumbering ordered list items. Tries to do the
* minimum amount of work.
*
* When nodes are changed in a way that might possibly require renumbering,
* users must call {@link #markDirty(ContentElement, String)} for new or updated
* nodes and {@link #markRemoved(ContentElement)} for removed nodes. To actually
* do the renumbering, call {@link #renumberAll()}.
*
* Guarantees at worst linear time (in the size of the document) but in most
* cases should be linear in the number of nodes actually requiring renumbering.
*
* @author danilatos@google.com (Daniel Danilatos)
*/
public class OrderedListRenumberer {
/**
* Keeps track of the current number at each level of indentation, for a mode
* of traversal going forward through the document. For example, when going
* forward, reducing the level of indentation resets the numbers of higher
* levels of indentation.
*/
static class LevelNumbers {
final int[] numbers = new int[Paragraph.MAX_INDENT + 1];
int currentLevel = 0;
{
for (int i = 0; i < numbers.length; i++) {
numbers[i] = 1;
}
}
LevelNumbers(int initialLevel, int initialNumber) {
setLevel(initialLevel);
setNumber(initialNumber);
}
void setLevel(int level) {
for (int i = currentLevel + 1; i <= level; i++) {
numbers[i] = 1;
}
currentLevel = level;
}
void setNumber(int number) {
numbers[currentLevel] = number;
}
int getNumberAndIncrement() {
assert numbers[currentLevel] != 0;
return numbers[currentLevel]++;
}
@Override
public String toString() {
return "current=" + currentLevel + ", " + numbers.toString();
}
}
/**
* A wrapper around Line that contains cached information for sorting
* purposes. (Some of these properties are expensive to compute, so we get a
* significant speedup by calculating them up front).
*
* See {@link #sortedLines()} for details on sorting order.
*/
static class ComparableLine implements Comparable<ComparableLine> {
final Line line;
final int docId;
final int minIndent;
final int location;
private ComparableLine(Line line, int docId, int minIndent, int location) {
this.line = line;
this.docId = docId;
this.minIndent = minIndent;
this.location = location;
}
@Override
public int compareTo(ComparableLine d2) {
// Partition by documents first
if (this.docId != d2.docId) {
return this.docId - d2.docId;
}
// Then by indentation level
if (this.minIndent != d2.minIndent) {
return this.minIndent - d2.minIndent;
}
// Finally by document order
return this.location - d2.location;
}
}
/**
* Renderer for when a renumbering occurs.
*/
private final ParagraphHtmlRenderer htmlRenderer;
/**
* Map of element to minimum affected indentation level. The interpretation is
* that something happened at the given element, at the 'importance' of the
* stored level.
*/
private final IdentityMap<ContentElement, Integer> dirtyElements =
CollectionUtils.createIdentityMap();
/** Used to avoid a short-circuit optimisation when testing */
@VisibleForTesting boolean updateHtmlEvenWhenNullImplNodelet = false;
OrderedListRenumberer(ParagraphHtmlRenderer htmlRenderer) {
this.htmlRenderer = htmlRenderer;
}
/**
* Marks the given element as having changed in a way that might affect
* numbering in some way.
*
* @param oldIndent the indent attribute the element had before the change
* that necessitated calling this method.
*/
public void markDirty(ContentElement paraElement, String oldIndent) {
int indent = Paragraph.getIndent(oldIndent);
markDirty(paraElement, indent);
}
private void markDirty(ContentElement paraElement, int indent) {
assert paraElement.isContentAttached() : paraElement + " not attached!";
if (!dirtyElements.has(paraElement) || dirtyElements.get(paraElement) > indent) {
dirtyElements.put(paraElement, indent);
}
}
/**
* Marks the given line as having been removed.
*/
public void markRemoved(ContentElement elem) {
Line line = Line.fromParagraph(elem);
if (line == null) {
// Not in a line structure. See comment in sortedLines().
// Unfortunately we need this check in two places.
return;
}
Line next = line.next();
if (next != null) {
// Mark the next one dirty, but preserving the minimum indent between the
// removed line and the next line.
markDirty(next.getParagraph(),
dirtyElements.has(line.getParagraph()) ? minIndent(line) : line.getIndent());
}
}
/**
* @return true if a renumbering is needed.
*/
public boolean renumberNeeded() {
return !dirtyElements.isEmpty();
}
/**
* Renumber everything in one go.
*/
public void renumberAll() {
List<ComparableLine> lines = sortedLines();
for (ComparableLine data : lines) {
Line line = data.line;
// a renumber might remove several elements from the dirtyElements map
if (isDirty(line)) {
renumber(line);
}
}
assert checkDirtyElementsContainsOnlyObsoleteLines();
dirtyElements.clear();
}
/**
* Perform a renumbering in the vicinity of the given line. This might end up
* renumbering many other lines that were marked as dirty.
*/
private void renumber(final Line aroundLine) {
// Short cut
if (aroundLine.getParagraph().getImplNodelet() == null && !updateHtmlEvenWhenNullImplNodelet) {
// bail if no impl nodelet, the node might be shelved
markClean(aroundLine);
return;
}
assert aroundLine.getCachedNumberValue() == DIRTY && isDirty(aroundLine);
int minimumIndent = minIndent(aroundLine);
Line startingLine = aroundLine;
int startingNumber = 1;
// Determine starting line and number by going backwards with the loop.
// If we find a clean, numbered line at the same level, break early and
// take that as the the starting line and number. Otherwise, break once
// we hit a superior line, and take the one just after it as the starting
// line, (and with a starting number of 1).
Line line = aroundLine;
while (line != null) {
RelativeImportance importance =
importance(minimumIndent, line.isDecimalListItem(), line.getIndent());
if (importance == RelativeImportance.MATCH) {
if (line.getCachedNumberValue() != DIRTY) {
// Note, in this case, the startingLine will be renumbered
// redundantly (but this makes the logic simpler).
startingLine = line;
startingNumber = line.getCachedNumberValue();
break;
}
} else if (importance == RelativeImportance.SUPERIOR) {
break;
}
startingLine = line;
line = line.previous();
}
assert startingLine != null;
// Do the actual renumbering
renumberRange(startingLine, minimumIndent, startingNumber);
assert !isDirty(aroundLine);
}
/**
* Renumbers a contiguous range of lines, marking them as clean.
*
* (Split out as a separate inner method so it's easier to see what variables
* are needed from the previous step).
*
* @param startingLine initial line to begin renumbering from.
* @param minimumIndent minimum indent within which to renumber. Renumbering
* will stop when the decision relative to this indent is superior
* (exited into a more "important" section).
* @param startingNumber initial number for the starting line (minimum 1).
*/
private void renumberRange(Line startingLine, int minimumIndent, int startingNumber) {
assert startingNumber >= 1;
Line line = startingLine;
int currentIndent = line.getIndent();
LevelNumbers numbers = new LevelNumbers(currentIndent, startingNumber);
assert minimumIndent == currentIndent || startingNumber == 1;
while (line != null) {
int prevIndent = currentIndent;
currentIndent = line.getIndent();
boolean isNumbered = line.isDecimalListItem();
numbers.setLevel(currentIndent);
if (importance(minimumIndent, isNumbered, currentIndent) == RelativeImportance.SUPERIOR &&
line != startingLine) {
// Break if we've reached a higher importance run.
// Note the special case to avoid breaking for the first element, as
// it being dirty might have affected subsequent lines that will need
// to be renumbered.
break;
}
if (isNumbered) {
// Renumber numbered items
int num = numbers.getNumberAndIncrement();
if (line.getCachedNumberValue() != num) {
line.setCachedNumberValue(num);
htmlRenderer.updateListValue(line.getParagraph(), num);
}
// Expensive assert for debugging
// assert debugHasCorrectNumber(line);
} else {
// Reset the count for anything non-numbered
numbers.setNumber(1);
}
markClean(line);
line = line.next();
}
}
/**
* Grab all the dirty lines, culling obsolete ones, and return them in a
* special order.
*
* Order first by owning document, then by indentation order (least indented
* to most) then by document traversal order. We have this fancy ordering so
* that the renumbering logic can make assumptions that make the code simpler
* and linear in complexity.
*/
private List<ComparableLine> sortedLines() {
final List<ComparableLine> list = new ArrayList<ComparableLine>();
final int[] nextDocId = new int[1];
final IdentityMap<LocationMapper<?>, Integer> docIds = CollectionUtils.createIdentityMap();
// Add them all to the list in one go at the rendering stage, to avoid
// potential issues of elements being re-ordered, removed, etc since the
// time they were originally placed in the map. We're also going to
// pre-compute the comparison information, as it's relatively expensive
// and so better to do it once up-front for each line rather than log(n)
// times during the sort.
dirtyElements.each(new IdentityMap.ProcV<ContentElement, Integer>() {
@Override public void apply(ContentElement paraElement, Integer oldIndent) {
// If there's no line element, then treat it as a default case with no
// numbering. So we don't do anything about it. Also ignore if the
// element is no longer attached.
assert paraElement != null;
Line line = getLineIfRelevant(paraElement);
if (line != null) {
line.setCachedNumberValue(DIRTY);
LocationMapper<ContentNode> mapper = paraElement.getLocationMapper();
int docId;
if (!docIds.has(mapper)) {
docId = nextDocId[0]++;
docIds.put(mapper, docId);
} else {
docId = docIds.get(mapper);
}
list.add(new ComparableLine(line, docId, minIndent(line),
mapper.getLocation(line.getLineElement())));
}
}
});
Collections.sort(list);
return list;
}
/**
* @return the minimum indent the given line ever had since the last
* renumbering
*/
private int minIndent(Line line) {
assert isDirty(line);
return Math.min(line.getIndent(), dirtyElements.get(line.getParagraph()));
}
private boolean isDirty(Line line) {
return dirtyElements.has(line.getParagraph());
}
private void markClean(Line line) {
dirtyElements.remove(line.getParagraph());
}
/**
* See {@link #importance(int, boolean, int)} for details on use and each enum
* instance for details on specifics.
*/
static enum RelativeImportance {
/** The element is part of a superior indentation sequence */
SUPERIOR,
/** The element is part of an inferior indentation sequence */
INFERIOR,
/** The element is a match */
MATCH
}
/**
* Relative to the {@code relativeToIndent}, decide the 'relative importance'
* of a line based on its properties (passed as the remaining parameters).
*
* See {@link RelativeImportance} for details on each possibility.
*/
static RelativeImportance importance(int relativeToIndent, boolean isNumberedItem, int indent) {
if (indent == relativeToIndent && isNumberedItem) {
return RelativeImportance.MATCH;
}
// NOTE(danilatos): Ideally we'd have this behaviour as well, but it requires
// complicated & potentially expensive re-numbering rules
// if (!ParagraphBehaviour.isList(type) && line.paragraph.getFirstChild() == null) {
// // Skip over non-list, empty paragraphs. I.e. invisible paragraphs.
// } else if (indent <= startingIndent) {
if (indent <= relativeToIndent) {
return RelativeImportance.SUPERIOR;
}
return RelativeImportance.INFERIOR;
}
/**
* Decide if a paragraph element is relevant, and if so, return the associated
* line. Otherwise return null.
*
* This method defines the logic of whether or not to ignore a dirty element.
*/
private Line getLineIfRelevant(ContentElement paraElement) {
// Check that the element is still in the DOM, and that it has
// an associated line. If it does not, we'll assume that in this case,
// we're in some other thing that's using ParagraphRenderer but doesn't
// have a line structure associated, such as a caption. (Instead of
// checking and asserting that a paragraph must have an associated line).
return paraElement.isContentAttached() ? Line.fromParagraph(paraElement) : null;
}
//////////////////////////////////
//// Methods used for testing only
boolean checkDirtyElementsContainsOnlyObsoleteLines() {
dirtyElements.each(new IdentityMap.ProcV<ContentElement, Integer>() {
@Override public void apply(ContentElement paraElement, Integer oldIndent) {
if (getLineIfRelevant(paraElement) != null) {
throw new AssertionError("Non-obsolete line found in dirtyElements after renumbering ");
}
}
});
// return a boolean so we can call this from an assert statement.
return true;
}
@SuppressWarnings("unused") // Called from an expensive assertion that
// is commented out by default for performance reasons. Generally useful
// for debugging.
private boolean debugHasCorrectNumber(Line line) {
if (!line.isDecimalListItem()) {
return true;
}
Line prev = debugNextNumberedItem(line, false, line.getIndent());
boolean ret;
if (prev == null) {
ret = line.getCachedNumberValue() == 1;
} else {
if (prev.getCachedNumberValue() == DIRTY) {
assert prev.getCachedNumberValue() != DIRTY;
}
ret = line.getCachedNumberValue() == prev.getCachedNumberValue() + 1;
}
if (ret == false) {
return false;
} else {
return true;
}
}
private Line debugNextNumberedItem(
Line startingLine, boolean forwards, int startingIndent) {
Line line = forwards ? startingLine.next(): startingLine.previous();
assert line != startingLine;
while (line != null) {
switch (importance(startingIndent, line.isDecimalListItem(), line.getIndent())) {
case SUPERIOR:
return null;
case MATCH:
return line;
case INFERIOR:
// continue
}
line = forwards ? line.next() : line.previous();
assert line != startingLine;
}
return null;
}
}
| |
package de.darwinspl.importer.featureide;
import java.nio.file.Paths;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.eclipse.core.resources.IFile;
import de.darwinspl.importer.DarwinSPLFeatureModelImporter;
import de.ovgu.featureide.fm.core.ExtensionManager.NoSuchExtensionException;
import de.ovgu.featureide.fm.core.base.IFeature;
import de.ovgu.featureide.fm.core.base.IFeatureModel;
import de.ovgu.featureide.fm.core.base.IFeatureStructure;
import de.ovgu.featureide.fm.core.base.impl.FMFactoryManager;
import de.ovgu.featureide.fm.core.io.IFeatureModelFormat;
import de.ovgu.featureide.fm.core.io.Problem;
import de.ovgu.featureide.fm.core.io.ProblemList;
import de.ovgu.featureide.fm.core.io.manager.FileHandler;
import de.ovgu.featureide.fm.core.io.xml.XmlFeatureModelFormat;
import eu.hyvar.evolution.HyEvolutionFactory;
import eu.hyvar.evolution.HyName;
import eu.hyvar.feature.HyFeature;
import eu.hyvar.feature.HyFeatureChild;
import eu.hyvar.feature.HyFeatureFactory;
import eu.hyvar.feature.HyFeatureModel;
import eu.hyvar.feature.HyFeatureType;
import eu.hyvar.feature.HyFeatureTypeEnum;
import eu.hyvar.feature.HyGroup;
import eu.hyvar.feature.HyGroupComposition;
import eu.hyvar.feature.HyGroupType;
import eu.hyvar.feature.HyGroupTypeEnum;
import eu.hyvar.feature.HyRootFeature;
public class FeatureIDEFeatureModelImporter implements DarwinSPLFeatureModelImporter<IFeatureModel> {
private Map<IFeature, HyFeature> featureMap;
private IFeatureModel featureIDEfeatureModel;
private HyFeatureModel darwinSPLfeatureModel;
private static final HyFeatureFactory featureFactory = HyFeatureFactory.eINSTANCE;
private static final HyEvolutionFactory evolutionFactory = HyEvolutionFactory.eINSTANCE;
@Override
public HyFeatureModel importFeatureModel(String pathToFile) {
XmlFeatureModelFormat format = new XmlFeatureModelFormat();
try {
IFeatureModel featureModel = loadFeatureModel(pathToFile, format);
return importFeatureModel(featureModel);
} catch (NoSuchExtensionException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return null;
}
private static IFeatureModel loadFeatureModel(String filepath, IFeatureModelFormat format)
throws NoSuchExtensionException {
IFeatureModel fm = null;
fm = FMFactoryManager.getFactory(filepath, format).createFeatureModel();
final ProblemList errors = FileHandler.load(Paths.get(filepath), fm, format).getErrors();
if (!errors.isEmpty()) {
for (Problem p : errors) {
System.err.println(p);
}
}
return fm;
}
@Override
public HyFeatureModel importFeatureModel(IFeatureModel featureModelToImport) {
this.featureIDEfeatureModel = featureModelToImport;
if (featureModelToImport == null) {
// TODO proper logging
System.err.println("Could not import FeatureIDE feature model as it was null");
return null;
}
if (featureModelToImport.getStructure().getRoot() == null) {
// TODO proper logging
System.err.println("Could not import FeatureIDE feature model as its root was null");
return null;
}
featureMap = new HashMap<IFeature, HyFeature>();
HyFeatureModel hyFeatureModel = featureFactory.createHyFeatureModel();
IFeature rootFeature = featureModelToImport.getStructure().getRoot().getFeature();
processRoot(hyFeatureModel, rootFeature);
// processSubTree(hyFeatureModel, hyRootFeature, featureModel, rootFeature);
this.darwinSPLfeatureModel = hyFeatureModel;
return hyFeatureModel;
}
private HyFeature doImportFeature(IFeature feature, HyFeatureModel dwFeatureModel) {
String name = feature.getName();
HyFeature dwFeature = featureFactory.createHyFeature();
dwFeatureModel.getFeatures().add(dwFeature);
featureMap.put(feature, dwFeature);
HyName dwName = evolutionFactory.createHyName();
dwName.setName(name);
dwFeature.getNames().add(dwName);
IFeatureStructure featureStructure = feature.getStructure();
IFeatureStructure parentFeatureStructure = featureStructure.getParent();
IFeature parentFeature = parentFeatureStructure == null ? null : parentFeatureStructure.getFeature();
//Variation type of feature
HyFeatureType featureType = featureFactory.createHyFeatureType();
dwFeature.getTypes().add(featureType);
if (parentFeature == null) {
//Root feature is always mandatory
featureType.setType(HyFeatureTypeEnum.MANDATORY);
} else if (parentFeatureStructure.isOr() || parentFeatureStructure.isAlternative()) {
//In dedicated groups, all features are perceived as being optional
// If the feature's group is alternative or or but only one feature exists in this group, it has to be mandatory.
if(parentFeatureStructure.getChildren().size() > 1) {
featureType.setType(HyFeatureTypeEnum.OPTIONAL);
}
else {
featureType.setType(HyFeatureTypeEnum.MANDATORY);
}
} else if (featureStructure.isMandatory()) {
featureType.setType(HyFeatureTypeEnum.MANDATORY);
} else {
//Can only be optional
featureType.setType(HyFeatureTypeEnum.OPTIONAL);
}
List<IFeatureStructure> childStructures = featureStructure.getChildren();
if (!childStructures.isEmpty()) {
List<HyFeatureChild> dwFeatureChildren = dwFeature.getParentOf();
HyFeatureChild featureChild = featureFactory.createHyFeatureChild();
dwFeatureChildren.add(featureChild);
HyGroup dwGroup = featureFactory.createHyGroup();
dwGroup.getChildOf().add(featureChild);
dwFeatureModel.getGroups().add(dwGroup);
List<HyGroupComposition> groupCompositions = dwGroup.getParentOf();
HyGroupComposition groupComposition = featureFactory.createHyGroupComposition();
groupCompositions.add(groupComposition);
//Variation type of group
HyGroupType groupType = featureFactory.createHyGroupType();
dwGroup.getTypes().add(groupType);
// can only be alternative or or if more than one feature.
if (childStructures.size() > 1) {
if (featureStructure.isAlternative()) {
groupType.setType(HyGroupTypeEnum.ALTERNATIVE);
} else if (featureStructure.isOr()) {
groupType.setType(HyGroupTypeEnum.OR);
} else if (featureStructure.isAnd()) {
// Minimum is the number of mandatory child features
groupType.setType(HyGroupTypeEnum.AND);
}
}
else {
groupType.setType(HyGroupTypeEnum.AND);
}
for (IFeatureStructure childStructure : childStructures) {
IFeature childFeature = childStructure.getFeature();
HyFeature dwChildFeature = doImportFeature(childFeature, dwFeatureModel);
groupComposition.getFeatures().add(dwChildFeature);
}
}
return dwFeature;
}
/**
*
* @param hyFeatureModel
* @param rootFeature
* @return
*/
private HyFeature processRoot(HyFeatureModel dwFeatureModel, IFeature rootFeature) {
HyFeature dwRootFeatureFeature = doImportFeature(rootFeature, dwFeatureModel);
HyRootFeature hyRootFeature = featureFactory.createHyRootFeature();
dwFeatureModel.getRootFeature().add(hyRootFeature);
hyRootFeature.setFeature(dwRootFeatureFeature);
return dwRootFeatureFeature;
}
public Map<IFeature, HyFeature> getFeatureMap() {
return featureMap;
}
public IFeatureModel getFeatureIDEfeatureModel() {
return featureIDEfeatureModel;
}
public HyFeatureModel getDarwinSPLfeatureModel() {
return darwinSPLfeatureModel;
}
@Override
public HyFeatureModel importFeatureModel(IFile file) {
return importFeatureModel(file.getRawLocation().makeAbsolute().toOSString());
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jmeter.gui.util;
import java.awt.BorderLayout;
import java.awt.Container;
import java.awt.Dimension;
import java.awt.FlowLayout;
import java.awt.Point;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import javax.swing.JButton;
import javax.swing.JComponent;
import javax.swing.JDialog;
import javax.swing.JEditorPane;
import javax.swing.JFrame;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JTable;
import javax.swing.table.TableModel;
import org.apache.jmeter.gui.GuiPackage;
import org.apache.jmeter.gui.action.KeyStrokes;
import org.apache.jmeter.util.JMeterUtils;
import org.apache.jorphan.gui.GuiUtils;
/**
* Dialog text box to display some text in a box
*
*/
public class TextBoxDialoger implements ActionListener {
private static final String CANCEL_COMMAND = "cancel_dialog"; // $NON-NLS-1$
private static final String SAVE_CLOSE_COMMAND = "save_close_dialog"; // $NON-NLS-1$
private static final String CLOSE_COMMAND = "close_dialog"; // $NON-NLS-1$
private JDialog dialog;
private JEditorPane textBox;
private String originalText;
private boolean editable = false;
/**
* Dialog text box
*/
public TextBoxDialoger() {
// Empty box
init(""); //$NON-NLS-1$
}
/**
* Dialog text box
* @param text - text to display in a box
*/
public TextBoxDialoger(String text) {
init(text);
}
/**
* Dialog text box
* @param text - text to display in a box
* @param editable - allow to modify text
*/
public TextBoxDialoger(String text, boolean editable) {
this.editable = editable;
init(text);
}
private void init(String text) {
createDialogBox();
setTextBox(text);
dialog.setVisible(true);
}
private void createDialogBox() {
JFrame mainFrame = GuiPackage.getInstance().getMainFrame();
String title = editable ? JMeterUtils.getResString("textbox_title_edit") //$NON-NLS-1$
: JMeterUtils.getResString("textbox_title_view"); //$NON-NLS-1$
dialog = new JDialog(mainFrame, title, true); // modal dialog box
// Close action dialog box when tapping Escape key
JPanel content = (JPanel) dialog.getContentPane();
content.registerKeyboardAction(this, KeyStrokes.ESC,
JComponent.WHEN_IN_FOCUSED_WINDOW);
textBox = new JEditorPane();
textBox.setEditable(editable);
JScrollPane textBoxScrollPane = GuiUtils.makeScrollPane(textBox);
JPanel btnBar = new JPanel();
btnBar.setLayout(new FlowLayout(FlowLayout.RIGHT));
if (editable) {
JButton cancelBtn = new JButton(JMeterUtils.getResString("textbox_cancel")); //$NON-NLS-1$
cancelBtn.setActionCommand(CANCEL_COMMAND);
cancelBtn.addActionListener(this);
JButton saveBtn = new JButton(JMeterUtils.getResString("textbox_save_close")); //$NON-NLS-1$
saveBtn.setActionCommand(SAVE_CLOSE_COMMAND);
saveBtn.addActionListener(this);
btnBar.add(cancelBtn);
btnBar.add(saveBtn);
} else {
JButton closeBtn = new JButton(JMeterUtils.getResString("textbox_close")); //$NON-NLS-1$
closeBtn.setActionCommand(CLOSE_COMMAND);
closeBtn.addActionListener(this);
btnBar.add(closeBtn);
}
// Prepare dialog box
Container panel = dialog.getContentPane();
dialog.setMinimumSize(new Dimension(400, 250));
panel.add(textBoxScrollPane, BorderLayout.CENTER);
panel.add(btnBar, BorderLayout.SOUTH);
// determine location on screen
Point p = mainFrame.getLocationOnScreen();
Dimension d1 = mainFrame.getSize();
Dimension d2 = dialog.getSize();
dialog.setLocation(p.x + (d1.width - d2.width) / 2, p.y + (d1.height - d2.height) / 2);
dialog.pack();
}
private void closeDialog() {
dialog.setVisible(false);
}
@Override
public void actionPerformed(ActionEvent e) {
String command = e.getActionCommand();
if (CANCEL_COMMAND.equals(command)) {
closeDialog();
setTextBox(originalText);
} else {
// must be CLOSE or SAVE_CLOSE COMMANDS
closeDialog();
}
}
public void setTextBox(String text) {
originalText = text; // text backup
textBox.setText(text);
}
public String getTextBox() {
return textBox.getText();
}
/**
* Class to display a dialog box and cell's content
* when double click on a table's cell
*
*/
public static class TextBoxDoubleClick extends MouseAdapter {
private JTable table = null;
public TextBoxDoubleClick(JTable table) {
super();
this.table = table;
}
@Override
public void mouseClicked(MouseEvent e) {
if (e.getClickCount() == 2) { // double click
TableModel tm = table.getModel();
Object value = tm.getValueAt(table.getSelectedRow(), table.getSelectedColumn());
new TextBoxDialoger(value.toString(), false); // view only NOSONAR this instantiation opens a popup
}
}
}
/**
* Class to edit in a dialog box the cell's content
* when double (pressed) click on a table's cell which is editable
*
*/
public static class TextBoxDoubleClickPressed extends MouseAdapter {
private JTable table = null;
public TextBoxDoubleClickPressed(JTable table) {
super();
this.table = table;
}
@Override
public void mousePressed(MouseEvent e) {
if (e.getClickCount() == 2) { // double (pressed) click
TableModel tm = table.getModel();
Object value = tm.getValueAt(table.getSelectedRow(), table.getSelectedColumn());
if (value instanceof String) {
if (table.getCellEditor() != null) {
table.getCellEditor().cancelCellEditing(); // in main table (evt mousePressed because cell is editable)
}
TextBoxDialoger tbd = new TextBoxDialoger(value.toString(), true);
tm.setValueAt(tbd.getTextBox(), table.getSelectedRow(), table.getSelectedColumn());
} // else do nothing (cell isn't a string to edit)
}
}
}
}
| |
/*
* Copyright 2013 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.jenkins.plugins.storage;
import java.io.IOException;
import java.io.Serializable;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import org.jenkinsci.Symbol;
import org.kohsuke.stapler.DataBoundConstructor;
import org.kohsuke.stapler.DataBoundSetter;
import org.kohsuke.stapler.QueryParameter;
import org.kohsuke.stapler.StaplerRequest;
import com.google.jenkins.plugins.credentials.domains.RequiresDomain;
import hudson.Extension;
import hudson.FilePath;
import hudson.Launcher;
import hudson.model.AbstractProject;
import hudson.model.Run;
import hudson.model.TaskListener;
import hudson.tasks.BuildStepDescriptor;
import hudson.tasks.BuildStepMonitor;
import hudson.tasks.Builder;
import hudson.util.FormValidation;
import jenkins.tasks.SimpleBuildStep;
import net.sf.json.JSONObject;
/**
* This upload extension implements the classical upload pattern
* where a user provides an Ant-style glob, e.g. ** /*.java
* relative to the build workspace, and those files are uploaded
* to the storage bucket.
*/
@RequiresDomain(StorageScopeRequirement.class)
public class ClassicUploadStep extends Builder implements SimpleBuildStep,
Serializable {
@Nonnull
private ClassicUpload upload;
@DataBoundConstructor
public ClassicUploadStep(String credentialsId, String bucket,
String pattern) {
this(credentialsId, bucket, null, pattern);
}
/**
* Construct the classic upload step.
*
* @see ClassicUpload#ClassicUpload
*/
public ClassicUploadStep(String credentialsId, String bucket,
@Nullable UploadModule module,
String pattern) {
this.credentialsId = credentialsId;
upload = new ClassicUpload(bucket, module, pattern, null, null);
// Build steps will not be executed following a failed build.
// Pipeline steps performed sequentually will not be executed
// following a failed step
// If we ever get to execute this on a failed build, that must
// have been done intentionally, e.g., using "post" with appropriate
// flags. This should be allowed.
upload.setForFailedJobs(true);
}
/**
* Whether to surface the file being uploaded to anyone with the link.
*/
@DataBoundSetter
public void setSharedPublicly(boolean sharedPublicly) {
upload.setSharedPublicly(sharedPublicly);
}
public boolean isSharedPublicly() {
return upload.isSharedPublicly();
}
/**
* Whether to indicate in metadata that the file should be viewable inline
* in web browsers, rather than requiring it to be downloaded first.
*/
@DataBoundSetter
public void setShowInline(boolean showInline) {
upload.setShowInline(showInline);
}
public boolean isShowInline() {
return upload.isShowInline();
}
/**
* The path prefix that will be stripped from uploaded files. May be null if
* no path prefix needs to be stripped.
*
* Filenames that do not start with this prefix will not be modified. Trailing
* slash is automatically added if it is missing.
*/
@DataBoundSetter
public void setPathPrefix(@Nullable String pathPrefix) {
upload.setPathPrefix(pathPrefix);
}
@Nullable
public String getPathPrefix() {
return upload.getPathPrefix();
}
public String getPattern() {
return upload.getPattern();
}
public String getBucket() {
return upload.getBucket();
}
/**
* The unique ID for the credentials we are using to
* authenticate with GCS.
*/
public String getCredentialsId() {
return credentialsId;
}
private final String credentialsId;
@Override
public BuildStepMonitor getRequiredMonitorService() {
return BuildStepMonitor.NONE;
}
@Override
public void perform(Run<?, ?> run, FilePath workspace, Launcher launcher,
TaskListener listener)
throws IOException {
// setForFailedJobs was set to true in the constructor. However,
// some jobs might have been created before that bug fix.
// For those, set this here as well.
upload.setForFailedJobs(true);
try {
upload.perform(getCredentialsId(), run,
workspace, listener);
} catch (UploadException e) {
throw new IOException("Could not perform upload", e);
}
}
/**
* Descriptor for the class.
*/
@Extension
@Symbol("googleStorageUpload")
public static class DescriptorImpl extends BuildStepDescriptor<Builder> {
/**
* {@inheritDoc}
*/
@Override
public String getDisplayName() {
return Messages.ClassicUpload_BuildStepDisplayName();
}
/**
* {@inheritDoc}
*/
@Override
public boolean isApplicable(Class<? extends AbstractProject> jobType) {
return true;
}
@Override
public Builder newInstance(StaplerRequest req, JSONObject formData)
throws FormException {
// Since the config form lists the optional parameter pathPrefix as
// inline, it will be passed through even if stripPathPrefix is false.
// This might cause problems if the user, for example, fills in the field
// and then unchecks the checkbox. So, explicitly remove pathPrefix
// whenever stripPathPrefix is false.
if (Boolean.FALSE.equals(formData.remove("stripPathPrefix"))) {
formData.remove("pathPrefix");
}
return super.newInstance(req, formData);
}
/**
* This callback validates the {@code bucketNameWithVars} input field's
* values.
*/
public FormValidation doCheckBucket(
@QueryParameter final String bucket)
throws IOException {
return ClassicUpload.DescriptorImpl.staticDoCheckBucket(bucket);
}
public static FormValidation doCheckPattern(
@QueryParameter final String pattern)
throws IOException {
return ClassicUpload.DescriptorImpl.staticDoCheckPattern(pattern);
}
}
}
| |
/*
* Copyright (c) 2017, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.siddhi.core.util.parser;
import io.siddhi.core.aggregation.AggregationRuntime;
import io.siddhi.core.aggregation.IncrementalAggregationProcessor;
import io.siddhi.core.aggregation.IncrementalDataPurger;
import io.siddhi.core.aggregation.IncrementalExecutor;
import io.siddhi.core.aggregation.IncrementalExecutorsInitialiser;
import io.siddhi.core.config.SiddhiAppContext;
import io.siddhi.core.config.SiddhiQueryContext;
import io.siddhi.core.event.ComplexEvent;
import io.siddhi.core.event.stream.MetaStreamEvent;
import io.siddhi.core.event.stream.StreamEvent;
import io.siddhi.core.event.stream.StreamEventFactory;
import io.siddhi.core.exception.SiddhiAppCreationException;
import io.siddhi.core.executor.ConstantExpressionExecutor;
import io.siddhi.core.executor.ExpressionExecutor;
import io.siddhi.core.executor.VariableExpressionExecutor;
import io.siddhi.core.query.input.stream.StreamRuntime;
import io.siddhi.core.query.input.stream.single.EntryValveExecutor;
import io.siddhi.core.query.input.stream.single.SingleStreamRuntime;
import io.siddhi.core.query.processor.ProcessingMode;
import io.siddhi.core.query.processor.stream.window.QueryableProcessor;
import io.siddhi.core.query.selector.GroupByKeyGenerator;
import io.siddhi.core.query.selector.attribute.aggregator.incremental.IncrementalAttributeAggregator;
import io.siddhi.core.table.Table;
import io.siddhi.core.util.ExceptionUtil;
import io.siddhi.core.util.Scheduler;
import io.siddhi.core.util.SiddhiAppRuntimeBuilder;
import io.siddhi.core.util.SiddhiClassLoader;
import io.siddhi.core.util.SiddhiConstants;
import io.siddhi.core.util.config.ConfigManager;
import io.siddhi.core.util.extension.holder.FunctionExecutorExtensionHolder;
import io.siddhi.core.util.extension.holder.IncrementalAttributeAggregatorExtensionHolder;
import io.siddhi.core.util.lock.LockWrapper;
import io.siddhi.core.util.parser.helper.QueryParserHelper;
import io.siddhi.core.util.statistics.LatencyTracker;
import io.siddhi.core.util.statistics.ThroughputTracker;
import io.siddhi.core.window.Window;
import io.siddhi.query.api.aggregation.TimePeriod;
import io.siddhi.query.api.annotation.Annotation;
import io.siddhi.query.api.annotation.Element;
import io.siddhi.query.api.definition.AbstractDefinition;
import io.siddhi.query.api.definition.AggregationDefinition;
import io.siddhi.query.api.definition.Attribute;
import io.siddhi.query.api.definition.StreamDefinition;
import io.siddhi.query.api.definition.TableDefinition;
import io.siddhi.query.api.execution.query.selection.OutputAttribute;
import io.siddhi.query.api.expression.AttributeFunction;
import io.siddhi.query.api.expression.Expression;
import io.siddhi.query.api.expression.Variable;
import io.siddhi.query.api.expression.constant.StringConstant;
import io.siddhi.query.api.util.AnnotationHelper;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.locks.ReentrantLock;
import java.util.stream.Collectors;
import static io.siddhi.core.util.SiddhiConstants.AGG_EXTERNAL_TIMESTAMP_COL;
import static io.siddhi.core.util.SiddhiConstants.AGG_LAST_TIMESTAMP_COL;
import static io.siddhi.core.util.SiddhiConstants.AGG_SHARD_ID_COL;
import static io.siddhi.core.util.SiddhiConstants.AGG_START_TIMESTAMP_COL;
import static io.siddhi.core.util.SiddhiConstants.ANNOTATION_PARTITION_BY_ID;
import static io.siddhi.core.util.SiddhiConstants.METRIC_INFIX_AGGREGATIONS;
import static io.siddhi.core.util.SiddhiConstants.METRIC_TYPE_FIND;
import static io.siddhi.core.util.SiddhiConstants.METRIC_TYPE_INSERT;
/**
* This is the parser class of incremental aggregation definition.
*/
public class AggregationParser {
public static AggregationRuntime parse(AggregationDefinition aggregationDefinition,
SiddhiAppContext siddhiAppContext,
Map<String, AbstractDefinition> streamDefinitionMap,
Map<String, AbstractDefinition> tableDefinitionMap,
Map<String, AbstractDefinition> windowDefinitionMap,
Map<String, AbstractDefinition> aggregationDefinitionMap,
Map<String, Table> tableMap,
Map<String, Window> windowMap,
Map<String, AggregationRuntime> aggregationMap,
SiddhiAppRuntimeBuilder siddhiAppRuntimeBuilder) {
if (aggregationDefinition == null) {
throw new SiddhiAppCreationException(
"AggregationDefinition instance is null. " +
"Hence, can't create the siddhi app '" + siddhiAppContext.getName() + "'");
}
if (aggregationDefinition.getTimePeriod() == null) {
throw new SiddhiAppCreationException(
"AggregationDefinition '" + aggregationDefinition.getId() + "'s timePeriod is null. " +
"Hence, can't create the siddhi app '" + siddhiAppContext.getName() + "'",
aggregationDefinition.getQueryContextStartIndex(), aggregationDefinition.getQueryContextEndIndex());
}
if (aggregationDefinition.getSelector() == null) {
throw new SiddhiAppCreationException(
"AggregationDefinition '" + aggregationDefinition.getId() + "'s selection is not defined. " +
"Hence, can't create the siddhi app '" + siddhiAppContext.getName() + "'",
aggregationDefinition.getQueryContextStartIndex(), aggregationDefinition.getQueryContextEndIndex());
}
if (streamDefinitionMap.get(aggregationDefinition.getBasicSingleInputStream().getStreamId()) == null) {
throw new SiddhiAppCreationException("Stream " + aggregationDefinition.getBasicSingleInputStream().
getStreamId() + " has not been defined");
}
Element userDefinedPrimaryKey = AnnotationHelper.getAnnotationElement(
SiddhiConstants.ANNOTATION_PRIMARY_KEY, null, aggregationDefinition.getAnnotations());
if (userDefinedPrimaryKey != null) {
throw new SiddhiAppCreationException("Aggregation Tables have predefined primary key, but found '" +
userDefinedPrimaryKey.getValue() + "' primary key defined though annotation.");
}
try {
List<VariableExpressionExecutor> incomingVariableExpressionExecutors = new ArrayList<>();
String aggregatorName = aggregationDefinition.getId();
SiddhiQueryContext siddhiQueryContext = new SiddhiQueryContext(siddhiAppContext, aggregatorName);
StreamRuntime streamRuntime = InputStreamParser.parse(aggregationDefinition.getBasicSingleInputStream(),
null, streamDefinitionMap, tableDefinitionMap, windowDefinitionMap, aggregationDefinitionMap,
tableMap, windowMap, aggregationMap, incomingVariableExpressionExecutors, false,
siddhiQueryContext);
// Get original meta for later use.
MetaStreamEvent incomingMetaStreamEvent = (MetaStreamEvent) streamRuntime.getMetaComplexEvent();
// Create new meta stream event.
// This must hold the timestamp, group by attributes (if given) and the incremental attributes, in
// onAfterWindowData array
// Example format: AGG_TIMESTAMP, groupByAttribute1, groupByAttribute2, AGG_incAttribute1, AGG_incAttribute2
// AGG_incAttribute1, AGG_incAttribute2 would have the same attribute names as in
// finalListOfIncrementalAttributes
incomingMetaStreamEvent.initializeAfterWindowData(); // To enter data as onAfterWindowData
List<TimePeriod.Duration> incrementalDurations = getSortedPeriods(aggregationDefinition.getTimePeriod());
//Incoming executors will be executors for timestamp, externalTimestamp(if used),
// group by attributes (if given) and the incremental attributes expression executors
List<ExpressionExecutor> incomingExpressionExecutors = new ArrayList<>();
List<IncrementalAttributeAggregator> incrementalAttributeAggregators = new ArrayList<>();
List<Variable> groupByVariableList = aggregationDefinition.getSelector().getGroupByList();
List<Expression> outputExpressions = new ArrayList<>(); //Expressions to get
// final aggregate outputs. e.g avg = sum/count
boolean isProcessingOnExternalTime = aggregationDefinition.getAggregateAttribute() != null;
boolean isGroupBy = aggregationDefinition.getSelector().getGroupByList().size() != 0;
final boolean isDistributed;
ConfigManager configManager = siddhiAppContext.getSiddhiContext().getConfigManager();
final String shardId = configManager.extractProperty("shardId");
boolean enablePartitioning = false;
Annotation partitionById = AnnotationHelper.getAnnotation(ANNOTATION_PARTITION_BY_ID,
aggregationDefinition.getAnnotations());
if (partitionById != null) {
String enableElement = partitionById.getElement("enable");
enablePartitioning = enableElement == null || Boolean.parseBoolean(enableElement);
}
boolean shouldPartitionById = Boolean.parseBoolean(configManager.extractProperty("partitionById"));
if (enablePartitioning || shouldPartitionById) {
if (shardId == null) {
throw new SiddhiAppCreationException("Configuration 'shardId' not provided for @partitionById " +
"annotation");
}
isDistributed = true;
} else {
isDistributed = false;
}
populateIncomingAggregatorsAndExecutors(aggregationDefinition, siddhiQueryContext, tableMap,
incomingVariableExpressionExecutors, incomingMetaStreamEvent, incomingExpressionExecutors,
incrementalAttributeAggregators, groupByVariableList, outputExpressions, isProcessingOnExternalTime,
isDistributed, shardId);
boolean isLatestEventColAdded = incomingMetaStreamEvent.getOutputData()
.get(incomingMetaStreamEvent.getOutputData().size() - 1)
.getName().equals(AGG_LAST_TIMESTAMP_COL);
int baseAggregatorBeginIndex = incomingMetaStreamEvent.getOutputData().size();
List<Expression> finalBaseExpressions = new ArrayList<>();
populateFinalBaseAggregators(tableMap, incomingVariableExpressionExecutors, incomingMetaStreamEvent,
incomingExpressionExecutors, incrementalAttributeAggregators, siddhiQueryContext,
finalBaseExpressions);
StreamDefinition incomingOutputStreamDefinition = StreamDefinition.id(aggregatorName + "_intermediate");
incomingOutputStreamDefinition.setQueryContextStartIndex(aggregationDefinition.getQueryContextStartIndex());
incomingOutputStreamDefinition.setQueryContextEndIndex(aggregationDefinition.getQueryContextEndIndex());
MetaStreamEvent processedMetaStreamEvent = new MetaStreamEvent();
for (Attribute attribute : incomingMetaStreamEvent.getOutputData()) {
incomingOutputStreamDefinition.attribute(attribute.getName(), attribute.getType());
processedMetaStreamEvent.addOutputData(attribute);
}
incomingMetaStreamEvent.setOutputDefinition(incomingOutputStreamDefinition);
processedMetaStreamEvent.addInputDefinition(incomingOutputStreamDefinition);
processedMetaStreamEvent.setOutputDefinition(incomingOutputStreamDefinition);
// Executors of processing meta
List<VariableExpressionExecutor> processVariableExpressionExecutors = new ArrayList<>();
Map<TimePeriod.Duration, List<ExpressionExecutor>> processExpressionExecutorsMap = new HashMap<>();
Map<TimePeriod.Duration, List<ExpressionExecutor>> processExpressionExecutorsMapForFind = new HashMap<>();
incrementalDurations.forEach(
incrementalDuration -> {
processExpressionExecutorsMap.put(
incrementalDuration,
constructProcessExpressionExecutors(
siddhiQueryContext, tableMap, baseAggregatorBeginIndex, finalBaseExpressions,
incomingOutputStreamDefinition, processedMetaStreamEvent,
processVariableExpressionExecutors, isProcessingOnExternalTime,
incrementalDuration, isDistributed, shardId, isLatestEventColAdded));
processExpressionExecutorsMapForFind.put(
incrementalDuration,
constructProcessExpressionExecutors(
siddhiQueryContext, tableMap, baseAggregatorBeginIndex, finalBaseExpressions,
incomingOutputStreamDefinition, processedMetaStreamEvent,
processVariableExpressionExecutors, isProcessingOnExternalTime,
incrementalDuration, isDistributed, shardId, isLatestEventColAdded));
});
ExpressionExecutor shouldUpdateTimestamp = null;
if (isLatestEventColAdded) {
Expression shouldUpdateTimestampExp = new Variable(AGG_LAST_TIMESTAMP_COL);
shouldUpdateTimestamp = ExpressionParser.parseExpression(shouldUpdateTimestampExp,
processedMetaStreamEvent, 0, tableMap, processVariableExpressionExecutors,
false, 0, ProcessingMode.BATCH, false, siddhiQueryContext);
}
List<ExpressionExecutor> outputExpressionExecutors = outputExpressions.stream()
.map(expression -> ExpressionParser.parseExpression(expression, processedMetaStreamEvent, 0,
tableMap, processVariableExpressionExecutors, isGroupBy, 0, ProcessingMode.BATCH, false,
siddhiQueryContext))
.collect(Collectors.toList());
// Create group by key generator
Map<TimePeriod.Duration, GroupByKeyGenerator> groupByKeyGeneratorMap = new HashMap<>();
incrementalDurations.forEach(
incrementalDuration -> {
GroupByKeyGenerator groupByKeyGenerator = null;
if (isProcessingOnExternalTime || isGroupBy) {
List<Expression> groupByExpressionList = new ArrayList<>();
if (isProcessingOnExternalTime) {
Expression externalTimestampExpression =
AttributeFunction.function(
"incrementalAggregator", "getAggregationStartTime",
new Variable(AGG_EXTERNAL_TIMESTAMP_COL),
new StringConstant(incrementalDuration.name())
);
groupByExpressionList.add(externalTimestampExpression);
}
groupByExpressionList.addAll(groupByVariableList.stream()
.map(groupByVariable -> (Expression) groupByVariable)
.collect(Collectors.toList()));
groupByKeyGenerator = new GroupByKeyGenerator(groupByExpressionList, processedMetaStreamEvent,
SiddhiConstants.UNKNOWN_STATE, tableMap, processVariableExpressionExecutors,
siddhiQueryContext);
}
groupByKeyGeneratorMap.put(incrementalDuration, groupByKeyGenerator);
}
);
// GroupBy for reading
Map<TimePeriod.Duration, GroupByKeyGenerator> groupByKeyGeneratorMapForReading = new HashMap<>();
if (isDistributed && !isProcessingOnExternalTime) {
incrementalDurations.forEach(
incrementalDuration -> {
List<Expression> groupByExpressionList = new ArrayList<>();
Expression timestampExpression =
AttributeFunction.function(
"incrementalAggregator", "getAggregationStartTime",
new Variable(AGG_START_TIMESTAMP_COL),
new StringConstant(incrementalDuration.name())
);
groupByExpressionList.add(timestampExpression);
if (isGroupBy) {
groupByExpressionList.addAll(groupByVariableList.stream()
.map(groupByVariable -> (Expression) groupByVariable)
.collect(Collectors.toList()));
}
GroupByKeyGenerator groupByKeyGenerator = new GroupByKeyGenerator(groupByExpressionList,
processedMetaStreamEvent, SiddhiConstants.UNKNOWN_STATE, tableMap,
processVariableExpressionExecutors, siddhiQueryContext);
groupByKeyGeneratorMapForReading.put(incrementalDuration, groupByKeyGenerator);
}
);
} else {
groupByKeyGeneratorMapForReading.putAll(groupByKeyGeneratorMap);
}
// Create new scheduler
EntryValveExecutor entryValveExecutor = new EntryValveExecutor(siddhiAppContext);
LockWrapper lockWrapper = new LockWrapper(aggregatorName);
lockWrapper.setLock(new ReentrantLock());
Scheduler scheduler = SchedulerParser.parse(entryValveExecutor, siddhiQueryContext);
scheduler.init(lockWrapper, aggregatorName);
scheduler.setStreamEventFactory(new StreamEventFactory(processedMetaStreamEvent));
QueryParserHelper.reduceMetaComplexEvent(incomingMetaStreamEvent);
QueryParserHelper.reduceMetaComplexEvent(processedMetaStreamEvent);
QueryParserHelper.updateVariablePosition(incomingMetaStreamEvent, incomingVariableExpressionExecutors);
QueryParserHelper.updateVariablePosition(processedMetaStreamEvent, processVariableExpressionExecutors);
Map<TimePeriod.Duration, Table> aggregationTables = initDefaultTables(aggregatorName,
incrementalDurations, processedMetaStreamEvent.getOutputStreamDefinition(),
siddhiAppRuntimeBuilder, aggregationDefinition.getAnnotations(), groupByVariableList,
isProcessingOnExternalTime, isDistributed);
Map<TimePeriod.Duration, IncrementalExecutor> incrementalExecutorMap = buildIncrementalExecutors(
processedMetaStreamEvent, processExpressionExecutorsMap, groupByKeyGeneratorMap, incrementalDurations,
aggregationTables, siddhiQueryContext, aggregatorName, shouldUpdateTimestamp);
boolean isOptimisedLookup = aggregationTables.get(incrementalDurations.get(0)) instanceof QueryableProcessor;
List<String> groupByVariablesList = groupByVariableList.stream()
.map(Variable::getAttributeName)
.collect(Collectors.toList());
List<OutputAttribute> defaultSelectorList = new ArrayList<>();
if (isOptimisedLookup) {
defaultSelectorList = incomingOutputStreamDefinition.getAttributeList().stream()
.map((attribute) -> new OutputAttribute(new Variable(attribute.getName())))
.collect(Collectors.toList());
}
IncrementalDataPurger incrementalDataPurger = new IncrementalDataPurger();
incrementalDataPurger.init(aggregationDefinition, new StreamEventFactory(processedMetaStreamEvent)
, aggregationTables, isProcessingOnExternalTime, siddhiQueryContext);
//Recreate in-memory data from tables
IncrementalExecutorsInitialiser incrementalExecutorsInitialiser = new IncrementalExecutorsInitialiser(
incrementalDurations, aggregationTables, incrementalExecutorMap, isDistributed, shardId,
siddhiAppContext, processedMetaStreamEvent, tableMap, windowMap, aggregationMap);
IncrementalExecutor rootIncrementalExecutor = incrementalExecutorMap.get(incrementalDurations.get(0));
rootIncrementalExecutor.setScheduler(scheduler);
// Connect entry valve to root incremental executor
entryValveExecutor.setNextExecutor(rootIncrementalExecutor);
QueryParserHelper.initStreamRuntime(streamRuntime, incomingMetaStreamEvent, lockWrapper, aggregatorName);
LatencyTracker latencyTrackerFind = null;
LatencyTracker latencyTrackerInsert = null;
ThroughputTracker throughputTrackerFind = null;
ThroughputTracker throughputTrackerInsert = null;
if (siddhiAppContext.getStatisticsManager() != null) {
latencyTrackerFind = QueryParserHelper.createLatencyTracker(siddhiAppContext,
aggregationDefinition.getId(), METRIC_INFIX_AGGREGATIONS, METRIC_TYPE_FIND);
latencyTrackerInsert = QueryParserHelper.createLatencyTracker(siddhiAppContext,
aggregationDefinition.getId(), METRIC_INFIX_AGGREGATIONS, METRIC_TYPE_INSERT);
throughputTrackerFind = QueryParserHelper.createThroughputTracker(siddhiAppContext,
aggregationDefinition.getId(), METRIC_INFIX_AGGREGATIONS, METRIC_TYPE_FIND);
throughputTrackerInsert = QueryParserHelper.createThroughputTracker(siddhiAppContext,
aggregationDefinition.getId(), METRIC_INFIX_AGGREGATIONS, METRIC_TYPE_INSERT);
}
AggregationRuntime aggregationRuntime = new AggregationRuntime(aggregationDefinition,
isProcessingOnExternalTime, isDistributed, incrementalDurations, incrementalExecutorMap,
aggregationTables, outputExpressionExecutors, processExpressionExecutorsMapForFind,
shouldUpdateTimestamp, groupByKeyGeneratorMapForReading, isOptimisedLookup, defaultSelectorList,
groupByVariablesList, isLatestEventColAdded, baseAggregatorBeginIndex,
finalBaseExpressions, incrementalDataPurger, incrementalExecutorsInitialiser,
((SingleStreamRuntime) streamRuntime), processedMetaStreamEvent,
latencyTrackerFind, throughputTrackerFind);
streamRuntime.setCommonProcessor(new IncrementalAggregationProcessor(aggregationRuntime,
incomingExpressionExecutors, processedMetaStreamEvent, latencyTrackerInsert,
throughputTrackerInsert, siddhiAppContext));
return aggregationRuntime;
} catch (Throwable t) {
ExceptionUtil.populateQueryContext(t, aggregationDefinition, siddhiAppContext);
throw t;
}
}
private static Map<TimePeriod.Duration, IncrementalExecutor> buildIncrementalExecutors(
MetaStreamEvent processedMetaStreamEvent,
Map<TimePeriod.Duration, List<ExpressionExecutor>> processExpressionExecutorsMap,
Map<TimePeriod.Duration, GroupByKeyGenerator> groupByKeyGeneratorList,
List<TimePeriod.Duration> incrementalDurations,
Map<TimePeriod.Duration, Table> aggregationTables,
SiddhiQueryContext siddhiQueryContext,
String aggregatorName, ExpressionExecutor shouldUpdateTimestamp) {
Map<TimePeriod.Duration, IncrementalExecutor> incrementalExecutorMap = new HashMap<>();
// Create incremental executors
IncrementalExecutor child;
IncrementalExecutor root = null;
for (int i = incrementalDurations.size() - 1; i >= 0; i--) {
// Base incremental expression executors created using new meta
boolean isRoot = false;
if (i == 0) {
isRoot = true;
}
child = root;
TimePeriod.Duration duration = incrementalDurations.get(i);
IncrementalExecutor incrementalExecutor = new IncrementalExecutor(aggregatorName, duration,
processExpressionExecutorsMap.get(duration), shouldUpdateTimestamp,
groupByKeyGeneratorList.get(duration), isRoot, aggregationTables.get(duration),
child, siddhiQueryContext, processedMetaStreamEvent);
incrementalExecutorMap.put(duration, incrementalExecutor);
root = incrementalExecutor;
}
return incrementalExecutorMap;
}
private static List<ExpressionExecutor> constructProcessExpressionExecutors(
SiddhiQueryContext siddhiQueryContext, Map<String, Table> tableMap, int baseAggregatorBeginIndex,
List<Expression> finalBaseExpressions, StreamDefinition incomingOutputStreamDefinition,
MetaStreamEvent processedMetaStreamEvent,
List<VariableExpressionExecutor> processVariableExpressionExecutors, boolean isProcessingOnExternalTime,
TimePeriod.Duration duration, boolean isDistributed, String shardId, boolean isLatestEventColAdded) {
List<ExpressionExecutor> processExpressionExecutors = new ArrayList<>();
List<Attribute> attributeList = incomingOutputStreamDefinition.getAttributeList();
int i = 1;
//Add timestamp executor
Attribute attribute = attributeList.get(0);
VariableExpressionExecutor variableExpressionExecutor = (VariableExpressionExecutor) ExpressionParser
.parseExpression(new Variable(attribute.getName()), processedMetaStreamEvent, 0, tableMap,
processVariableExpressionExecutors, true, 0, ProcessingMode.BATCH, false, siddhiQueryContext);
processExpressionExecutors.add(variableExpressionExecutor);
if (isDistributed) {
Expression shardIdExpression = Expression.value(shardId);
ExpressionExecutor shardIdExpressionExecutor = ExpressionParser.parseExpression(shardIdExpression,
processedMetaStreamEvent, 0, tableMap, processVariableExpressionExecutors, true, 0,
ProcessingMode.BATCH, false, siddhiQueryContext);
processExpressionExecutors.add(shardIdExpressionExecutor);
i++;
}
if (isProcessingOnExternalTime) {
Expression externalTimestampExpression =
AttributeFunction.function("incrementalAggregator", "getAggregationStartTime",
new Variable(AGG_EXTERNAL_TIMESTAMP_COL), new StringConstant(duration.name()));
ExpressionExecutor externalTimestampExecutor = ExpressionParser.parseExpression(
externalTimestampExpression, processedMetaStreamEvent, 0, tableMap,
processVariableExpressionExecutors, true, 0, ProcessingMode.BATCH, false, siddhiQueryContext);
processExpressionExecutors.add(externalTimestampExecutor);
i++;
}
if (isLatestEventColAdded) {
baseAggregatorBeginIndex = baseAggregatorBeginIndex - 1;
}
for (; i < baseAggregatorBeginIndex; i++) {
attribute = attributeList.get(i);
variableExpressionExecutor = (VariableExpressionExecutor) ExpressionParser.parseExpression(
new Variable(attribute.getName()), processedMetaStreamEvent, 0, tableMap,
processVariableExpressionExecutors, true, 0, ProcessingMode.BATCH, false, siddhiQueryContext);
processExpressionExecutors.add(variableExpressionExecutor);
}
if (isLatestEventColAdded) {
Expression lastTimestampExpression =
AttributeFunction.function("max", new Variable(AGG_LAST_TIMESTAMP_COL));
ExpressionExecutor latestTimestampExecutor = ExpressionParser.parseExpression(lastTimestampExpression,
processedMetaStreamEvent, 0, tableMap, processVariableExpressionExecutors, true, 0,
ProcessingMode.BATCH, false, siddhiQueryContext);
processExpressionExecutors.add(latestTimestampExecutor);
}
for (Expression expression : finalBaseExpressions) {
ExpressionExecutor expressionExecutor = ExpressionParser.parseExpression(expression,
processedMetaStreamEvent, 0, tableMap, processVariableExpressionExecutors,
true, 0, ProcessingMode.BATCH, false, siddhiQueryContext);
processExpressionExecutors.add(expressionExecutor);
}
return processExpressionExecutors;
}
private static void populateFinalBaseAggregators(
Map<String, Table> tableMap, List<VariableExpressionExecutor> incomingVariableExpressionExecutors,
MetaStreamEvent incomingMetaStreamEvent, List<ExpressionExecutor> incomingExpressionExecutors,
List<IncrementalAttributeAggregator> incrementalAttributeAggregators,
SiddhiQueryContext siddhiQueryContext, List<Expression> finalBaseAggregators) {
List<Attribute> finalBaseAttributes = new ArrayList<>();
for (IncrementalAttributeAggregator incrementalAttributeAggregator : incrementalAttributeAggregators) {
Attribute[] baseAttributes = incrementalAttributeAggregator.getBaseAttributes();
Expression[] baseAttributeInitialValues = incrementalAttributeAggregator.getBaseAttributeInitialValues();
Expression[] baseAggregators = incrementalAttributeAggregator.getBaseAggregators();
for (int i = 0; i < baseAttributes.length; i++) {
validateBaseAggregators(incrementalAttributeAggregators,
incrementalAttributeAggregator, baseAttributes,
baseAttributeInitialValues, baseAggregators, i);
if (!finalBaseAttributes.contains(baseAttributes[i])) {
finalBaseAttributes.add(baseAttributes[i]);
finalBaseAggregators.add(baseAggregators[i]);
incomingMetaStreamEvent.addOutputData(baseAttributes[i]);
incomingExpressionExecutors.add(ExpressionParser.parseExpression(baseAttributeInitialValues[i],
incomingMetaStreamEvent, 0, tableMap, incomingVariableExpressionExecutors,
false, 0,
ProcessingMode.BATCH, false, siddhiQueryContext));
}
}
}
}
private static void populateIncomingAggregatorsAndExecutors(
AggregationDefinition aggregationDefinition, SiddhiQueryContext siddhiQueryContext,
Map<String, Table> tableMap, List<VariableExpressionExecutor> incomingVariableExpressionExecutors,
MetaStreamEvent incomingMetaStreamEvent, List<ExpressionExecutor> incomingExpressionExecutors,
List<IncrementalAttributeAggregator> incrementalAttributeAggregators, List<Variable> groupByVariableList,
List<Expression> outputExpressions, boolean isProcessingOnExternalTime, boolean isDistributed,
String shardId) {
boolean isLatestEventAdded = false;
ExpressionExecutor timestampExecutor = getTimeStampExecutor(siddhiQueryContext, tableMap,
incomingVariableExpressionExecutors, incomingMetaStreamEvent);
Attribute timestampAttribute = new Attribute(AGG_START_TIMESTAMP_COL, Attribute.Type.LONG);
incomingMetaStreamEvent.addOutputData(timestampAttribute);
incomingExpressionExecutors.add(timestampExecutor);
if (isDistributed) {
ExpressionExecutor nodeIdExpExecutor = new ConstantExpressionExecutor(shardId, Attribute.Type.STRING);
incomingExpressionExecutors.add(nodeIdExpExecutor);
incomingMetaStreamEvent.addOutputData(new Attribute(AGG_SHARD_ID_COL, Attribute.Type.STRING));
}
ExpressionExecutor externalTimestampExecutor = null;
if (isProcessingOnExternalTime) {
Expression externalTimestampExpression = aggregationDefinition.getAggregateAttribute();
externalTimestampExecutor = ExpressionParser.parseExpression(externalTimestampExpression,
incomingMetaStreamEvent, 0, tableMap, incomingVariableExpressionExecutors,
false, 0, ProcessingMode.BATCH, false, siddhiQueryContext);
if (externalTimestampExecutor.getReturnType() == Attribute.Type.STRING) {
Expression expression = AttributeFunction.function("incrementalAggregator",
"timestampInMilliseconds", externalTimestampExpression);
externalTimestampExecutor = ExpressionParser.parseExpression(expression, incomingMetaStreamEvent,
0, tableMap, incomingVariableExpressionExecutors, false, 0, ProcessingMode.BATCH, false,
siddhiQueryContext);
} else if (externalTimestampExecutor.getReturnType() != Attribute.Type.LONG) {
throw new SiddhiAppCreationException(
"AggregationDefinition '" + aggregationDefinition.getId() + "'s aggregateAttribute expects " +
"long or string, but found " + timestampExecutor.getReturnType() + ". " +
"Hence, can't create the siddhi app '" +
siddhiQueryContext.getSiddhiAppContext().getName() + "'",
externalTimestampExpression.getQueryContextStartIndex(),
externalTimestampExpression.getQueryContextEndIndex());
}
Attribute externalTimestampAttribute = new Attribute(AGG_EXTERNAL_TIMESTAMP_COL, Attribute.Type.LONG);
incomingMetaStreamEvent.addOutputData(externalTimestampAttribute);
incomingExpressionExecutors.add(externalTimestampExecutor);
}
AbstractDefinition incomingLastInputStreamDefinition = incomingMetaStreamEvent.getLastInputDefinition();
for (Variable groupByVariable : groupByVariableList) {
incomingMetaStreamEvent.addOutputData(incomingLastInputStreamDefinition.getAttributeList()
.get(incomingLastInputStreamDefinition.getAttributePosition(
groupByVariable.getAttributeName())));
incomingExpressionExecutors.add(ExpressionParser.parseExpression(groupByVariable,
incomingMetaStreamEvent, 0, tableMap, incomingVariableExpressionExecutors,
false, 0, ProcessingMode.BATCH,
false, siddhiQueryContext));
}
// Add AGG_TIMESTAMP to output as well
aggregationDefinition.getAttributeList().add(timestampAttribute);
//Executors of time is differentiated with modes
if (isProcessingOnExternalTime) {
outputExpressions.add(Expression.variable(AGG_EXTERNAL_TIMESTAMP_COL));
} else {
outputExpressions.add(Expression.variable(AGG_START_TIMESTAMP_COL));
}
for (OutputAttribute outputAttribute : aggregationDefinition.getSelector().getSelectionList()) {
Expression expression = outputAttribute.getExpression();
if (expression instanceof AttributeFunction) {
IncrementalAttributeAggregator incrementalAggregator = null;
try {
incrementalAggregator = (IncrementalAttributeAggregator)
SiddhiClassLoader.loadExtensionImplementation(
new AttributeFunction("incrementalAggregator",
((AttributeFunction) expression).getName(),
((AttributeFunction) expression).getParameters()),
IncrementalAttributeAggregatorExtensionHolder.getInstance(
siddhiQueryContext.getSiddhiAppContext()));
} catch (SiddhiAppCreationException ex) {
try {
SiddhiClassLoader.loadExtensionImplementation((AttributeFunction) expression,
FunctionExecutorExtensionHolder.getInstance(siddhiQueryContext.getSiddhiAppContext()));
processAggregationSelectors(aggregationDefinition, siddhiQueryContext, tableMap,
incomingVariableExpressionExecutors, incomingMetaStreamEvent,
incomingExpressionExecutors, outputExpressions, outputAttribute, expression);
} catch (SiddhiAppCreationException e) {
throw new SiddhiAppCreationException("'" + ((AttributeFunction) expression).getName() +
"' is neither a incremental attribute aggregator extension or a function" +
" extension", expression.getQueryContextStartIndex(),
expression.getQueryContextEndIndex());
}
}
if (incrementalAggregator != null) {
initIncrementalAttributeAggregator(incomingLastInputStreamDefinition,
(AttributeFunction) expression, incrementalAggregator);
incrementalAttributeAggregators.add(incrementalAggregator);
aggregationDefinition.getAttributeList().add(
new Attribute(outputAttribute.getRename(), incrementalAggregator.getReturnType()));
outputExpressions.add(incrementalAggregator.aggregate());
}
} else {
if (expression instanceof Variable && groupByVariableList.contains(expression)) {
Attribute groupByAttribute = null;
for (Attribute attribute : incomingMetaStreamEvent.getOutputData()) {
if (attribute.getName().equals(((Variable) expression).getAttributeName())) {
groupByAttribute = attribute;
break;
}
}
if (groupByAttribute == null) {
throw new SiddhiAppCreationException("Expected GroupBy attribute '" +
((Variable) expression).getAttributeName() + "' not used in aggregation '" +
siddhiQueryContext.getName() + "' processing.", expression.getQueryContextStartIndex(),
expression.getQueryContextEndIndex());
}
aggregationDefinition.getAttributeList().add(
new Attribute(outputAttribute.getRename(), groupByAttribute.getType()));
outputExpressions.add(Expression.variable(groupByAttribute.getName()));
} else {
isLatestEventAdded = true;
processAggregationSelectors(aggregationDefinition, siddhiQueryContext, tableMap,
incomingVariableExpressionExecutors, incomingMetaStreamEvent,
incomingExpressionExecutors, outputExpressions, outputAttribute, expression);
}
}
}
if (isProcessingOnExternalTime && isLatestEventAdded) {
Attribute lastEventTimeStamp = new Attribute(AGG_LAST_TIMESTAMP_COL, Attribute.Type.LONG);
incomingMetaStreamEvent.addOutputData(lastEventTimeStamp);
incomingExpressionExecutors.add(externalTimestampExecutor);
}
}
private static void processAggregationSelectors(AggregationDefinition aggregationDefinition,
SiddhiQueryContext siddhiQueryContext, Map<String, Table> tableMap,
List<VariableExpressionExecutor> incomingVariableExpressionExecutors,
MetaStreamEvent incomingMetaStreamEvent,
List<ExpressionExecutor> incomingExpressionExecutors,
List<Expression> outputExpressions, OutputAttribute outputAttribute,
Expression expression) {
ExpressionExecutor expressionExecutor = ExpressionParser.parseExpression(expression, incomingMetaStreamEvent,
0, tableMap, incomingVariableExpressionExecutors, false, 0, ProcessingMode.BATCH, false,
siddhiQueryContext);
incomingExpressionExecutors.add(expressionExecutor);
incomingMetaStreamEvent.addOutputData(
new Attribute(outputAttribute.getRename(), expressionExecutor.getReturnType()));
aggregationDefinition.getAttributeList().add(
new Attribute(outputAttribute.getRename(), expressionExecutor.getReturnType()));
outputExpressions.add(Expression.variable(outputAttribute.getRename()));
}
private static void validateBaseAggregators(List<IncrementalAttributeAggregator> incrementalAttributeAggregators,
IncrementalAttributeAggregator incrementalAttributeAggregator,
Attribute[] baseAttributes, Expression[] baseAttributeInitialValues,
Expression[] baseAggregators, int i) {
for (int i1 = i; i1 < incrementalAttributeAggregators.size(); i1++) {
IncrementalAttributeAggregator otherAttributeAggregator = incrementalAttributeAggregators.get(i1);
if (otherAttributeAggregator != incrementalAttributeAggregator) {
Attribute[] otherBaseAttributes = otherAttributeAggregator.getBaseAttributes();
Expression[] otherBaseAttributeInitialValues = otherAttributeAggregator
.getBaseAttributeInitialValues();
Expression[] otherBaseAggregators = otherAttributeAggregator.getBaseAggregators();
for (int j = 0; j < otherBaseAttributes.length; j++) {
if (baseAttributes[i].equals(otherBaseAttributes[j])) {
if (!baseAttributeInitialValues[i].equals(otherBaseAttributeInitialValues[j])) {
throw new SiddhiAppCreationException("BaseAttributes having same name should " +
"be defined with same initial values, but baseAttribute '" +
baseAttributes[i] + "' is defined in '" +
incrementalAttributeAggregator.getClass().getName() + "' and '" +
otherAttributeAggregator.getClass().getName() +
"' with different initial values.");
}
if (!baseAggregators[i].equals(otherBaseAggregators[j])) {
throw new SiddhiAppCreationException("BaseAttributes having same name should " +
"be defined with same baseAggregators, but baseAttribute '" +
baseAttributes[i] + "' is defined in '" +
incrementalAttributeAggregator.getClass().getName() + "' and '" +
otherAttributeAggregator.getClass().getName() +
"' with different baseAggregators.");
}
}
}
}
}
}
private static void initIncrementalAttributeAggregator(AbstractDefinition lastInputStreamDefinition,
AttributeFunction attributeFunction,
IncrementalAttributeAggregator incrementalAttributeAggregator) {
String attributeName = null;
Attribute.Type attributeType = null;
if (attributeFunction.getParameters() != null && attributeFunction.getParameters()[0] != null) {
if (attributeFunction.getParameters().length != 1) {
throw new SiddhiAppCreationException("Incremental aggregator requires only one parameter. "
+ "Found " + attributeFunction.getParameters().length,
attributeFunction.getQueryContextStartIndex(), attributeFunction.getQueryContextEndIndex());
}
if (!(attributeFunction.getParameters()[0] instanceof Variable)) {
throw new SiddhiAppCreationException("Incremental aggregator expected a variable. " +
"However a parameter of type " + attributeFunction.getParameters()[0].getClass().getTypeName()
+ " was found",
attributeFunction.getParameters()[0].getQueryContextStartIndex(),
attributeFunction.getParameters()[0].getQueryContextEndIndex());
}
attributeName = ((Variable) attributeFunction.getParameters()[0]).getAttributeName();
attributeType = lastInputStreamDefinition.getAttributeType(attributeName);
}
incrementalAttributeAggregator.init(attributeName, attributeType);
Attribute[] baseAttributes = incrementalAttributeAggregator.getBaseAttributes();
Expression[] baseAttributeInitialValues = incrementalAttributeAggregator
.getBaseAttributeInitialValues();
Expression[] baseAggregators = incrementalAttributeAggregator.getBaseAggregators();
if (baseAttributes.length != baseAggregators.length) {
throw new SiddhiAppCreationException("Number of baseAggregators '" +
baseAggregators.length + "' and baseAttributes '" +
baseAttributes.length + "' is not equal for '" + attributeFunction + "'",
attributeFunction.getQueryContextStartIndex(), attributeFunction.getQueryContextEndIndex());
}
if (baseAttributeInitialValues.length != baseAggregators.length) {
throw new SiddhiAppCreationException("Number of baseAggregators '" +
baseAggregators.length + "' and baseAttributeInitialValues '" +
baseAttributeInitialValues.length + "' is not equal for '" +
attributeFunction + "'",
attributeFunction.getQueryContextStartIndex(), attributeFunction.getQueryContextEndIndex());
}
}
private static ExpressionExecutor getTimeStampExecutor(SiddhiQueryContext siddhiQueryContext,
Map<String, Table> tableMap,
List<VariableExpressionExecutor> variableExpressionExecutors,
MetaStreamEvent metaStreamEvent) {
Expression timestampExpression;
ExpressionExecutor timestampExecutor;
// Execution is based on system time, the GMT time zone would be used.
timestampExpression = AttributeFunction.function("currentTimeMillis", null);
timestampExecutor = ExpressionParser.parseExpression(timestampExpression, metaStreamEvent, 0, tableMap,
variableExpressionExecutors, false, 0, ProcessingMode.BATCH, false, siddhiQueryContext);
return timestampExecutor;
}
private static boolean isRange(TimePeriod timePeriod) {
return timePeriod.getOperator() == TimePeriod.Operator.RANGE;
}
private static List<TimePeriod.Duration> getSortedPeriods(TimePeriod timePeriod) {
try {
List<TimePeriod.Duration> durations = timePeriod.getDurations();
if (isRange(timePeriod)) {
durations = fillGap(durations.get(0), durations.get(1));
}
return sortedDurations(durations);
} catch (Throwable t) {
ExceptionUtil.populateQueryContext(t, timePeriod, null);
throw t;
}
}
private static List<TimePeriod.Duration> sortedDurations(List<TimePeriod.Duration> durations) {
List<TimePeriod.Duration> copyDurations = new ArrayList<>(durations);
Comparator periodComparator = (Comparator<TimePeriod.Duration>) (firstDuration, secondDuration) -> {
int firstOrdinal = firstDuration.ordinal();
int secondOrdinal = secondDuration.ordinal();
if (firstOrdinal > secondOrdinal) {
return 1;
} else if (firstOrdinal < secondOrdinal) {
return -1;
}
return 0;
};
copyDurations.sort(periodComparator);
return copyDurations;
}
private static List<TimePeriod.Duration> fillGap(TimePeriod.Duration start, TimePeriod.Duration end) {
TimePeriod.Duration[] durations = TimePeriod.Duration.values();
List<TimePeriod.Duration> filledDurations = new ArrayList<>();
int startIndex = start.ordinal();
int endIndex = end.ordinal();
if (startIndex > endIndex) {
throw new SiddhiAppCreationException(
"Start time period must be less than end time period for range aggregation calculation");
}
if (startIndex == endIndex) {
filledDurations.add(start);
} else {
TimePeriod.Duration[] temp = new TimePeriod.Duration[endIndex - startIndex + 1];
System.arraycopy(durations, startIndex, temp, 0, endIndex - startIndex + 1);
filledDurations = Arrays.asList(temp);
}
return filledDurations;
}
private static HashMap<TimePeriod.Duration, Table> initDefaultTables(
String aggregatorName, List<TimePeriod.Duration> durations,
StreamDefinition streamDefinition, SiddhiAppRuntimeBuilder siddhiAppRuntimeBuilder,
List<Annotation> annotations, List<Variable> groupByVariableList, boolean isProcessingOnExternalTime,
boolean enablePartioning) {
HashMap<TimePeriod.Duration, Table> aggregationTableMap = new HashMap<>();
// Create annotations for primary key
Annotation primaryKeyAnnotation = new Annotation(SiddhiConstants.ANNOTATION_PRIMARY_KEY);
primaryKeyAnnotation.element(null, AGG_START_TIMESTAMP_COL);
if (enablePartioning) {
primaryKeyAnnotation.element(null, AGG_SHARD_ID_COL);
}
if (isProcessingOnExternalTime) {
primaryKeyAnnotation.element(null, AGG_EXTERNAL_TIMESTAMP_COL);
}
for (Variable groupByVariable : groupByVariableList) {
primaryKeyAnnotation.element(null, groupByVariable.getAttributeName());
}
annotations.add(primaryKeyAnnotation);
for (TimePeriod.Duration duration : durations) {
String tableId = aggregatorName + "_" + duration.toString();
TableDefinition tableDefinition = TableDefinition.id(tableId);
for (Attribute attribute : streamDefinition.getAttributeList()) {
tableDefinition.attribute(attribute.getName(), attribute.getType());
}
annotations.forEach(tableDefinition::annotation);
siddhiAppRuntimeBuilder.defineTable(tableDefinition);
aggregationTableMap.put(duration, siddhiAppRuntimeBuilder.getTableMap().get(tableId));
}
return aggregationTableMap;
}
public static StreamEvent createRestEvent(MetaStreamEvent metaStreamEvent, StreamEvent streamEvent) {
streamEvent.setTimestamp(0);
streamEvent.setType(ComplexEvent.Type.RESET);
List<Attribute> outputData = metaStreamEvent.getOutputData();
for (int i = 0, outputDataSize = outputData.size(); i < outputDataSize; i++) {
Attribute attribute = outputData.get(i);
switch (attribute.getType()) {
case STRING:
streamEvent.setOutputData("", i);
break;
case INT:
streamEvent.setOutputData(0, i);
break;
case LONG:
streamEvent.setOutputData(0L, i);
break;
case FLOAT:
streamEvent.setOutputData(0f, i);
break;
case DOUBLE:
streamEvent.setOutputData(0.0, i);
break;
case BOOL:
streamEvent.setOutputData(false, i);
break;
case OBJECT:
streamEvent.setOutputData(null, i);
break;
}
}
return streamEvent;
}
}
| |
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package com.PortfolioManager.domain.entities;
import com.PortfolioManager.domain.dao.IOrderStock;
import com.PortfolioManager.domain.dao.IPortfolioHistory;
import com.PortfolioManager.domain.impl.OrderStockDAO;
import com.PortfolioManager.domain.impl.PortfolioHistoryDAO;
import java.io.Serializable;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import java.util.StringTokenizer;
import javax.persistence.CascadeType;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.ManyToOne;
import javax.persistence.OneToMany;
import javax.persistence.OneToOne;
/**
*
* @author Ahmed
*/
@Entity
public class Portfolio implements Serializable {
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
private Long id;
@ManyToOne(fetch = FetchType.EAGER)
private Account account;
@OneToMany(cascade = CascadeType.ALL, mappedBy = "portfolio")
private Collection<OrderStock> orderStocks;
@OneToMany(cascade = CascadeType.ALL, mappedBy = "portfolio")
private Collection<PortfolioOperation> operations;
@OneToMany(cascade = CascadeType.ALL, mappedBy = "portfolio")
private Collection<PortfolioHistory> portfolioHistory;
@OneToOne
private Market market;
private double potentialEarnings;
private double initialMoney;
private double liquidMoney;
private double portfolioBeta;
private double portfolioPerformance;
private double portfolioToDayValue;
private String name;
private double RIRate;
private double investedMoney;
private double portfolioReturn;
public double getRIRate() {
return RIRate;
}
public void setRIRate(double RIRate) {
this.RIRate = RIRate;
}
public double getPortfolioToDayValue() {
return portfolioToDayValue;
}
public void setPortfolioToDayValue(double portfolioToDayValue) {
this.portfolioToDayValue = portfolioToDayValue;
}
public Collection<PortfolioHistory> getPortfolioHistory() {
return portfolioHistory;
}
public void setPortfolioHistory(Collection<PortfolioHistory> portfolioHistory) {
this.portfolioHistory = portfolioHistory;
}
public double getPortfolioReturn() {
return portfolioReturn;
}
public void setPortfolioReturn(double portfolioReturn) {
this.portfolioReturn = portfolioReturn;
}
public double getPortfolioBeta() {
return portfolioBeta;
}
public void setPortfolioBeta(double portfolioBeta) {
this.portfolioBeta = portfolioBeta;
}
public Portfolio() {
}
public Portfolio(double lquidMoney) {
orderStocks = new HashSet<OrderStock>();
operations = new HashSet<PortfolioOperation>();
this.liquidMoney = lquidMoney;
this.initialMoney = liquidMoney;
potentialEarnings = 0.0;
investedMoney = 0.0;
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public Account getAccount() {
return account;
}
public void setAccount(Account account) {
this.account = account;
}
public Collection<OrderStock> getOrders() {
return orderStocks;
}
public void setOrderStocks(Collection<OrderStock> orderStocks) {
this.orderStocks = orderStocks;
}
public void addOrderStock(OrderStock orderStock) {
orderStock.setPortfolio(this);
orderStocks.add(orderStock);
investedMoney += orderStock.getPrice();
liquidMoney -= orderStock.getPrice();
}
public void removeOrderStock(OrderStock orderStock) {
//still more
orderStocks.remove(orderStock);
investedMoney -= orderStock.getPrice();
liquidMoney += orderStock.getPrice();
liquidMoney += orderStock.getPotentialEarnings();
IOrderStock iOrderStock = new OrderStockDAO();
iOrderStock.delete(orderStock.getOrderID());
}
public Collection<PortfolioOperation> getOperations() {
return operations;
}
public void setOperations(Collection<PortfolioOperation> operations) {
this.operations = operations;
}
public void addOperation(PortfolioOperation operation) {
operation.setPortfolio(this);
operations.add(operation);
}
public void addPortfolioHistory(PortfolioHistory portfolioHistory) {
portfolioHistory.setPortfolio(this);
this.portfolioHistory.add(portfolioHistory);
}
public PortfolioHistory getPortfolioHistoryForYearAndMonth() {
IPortfolioHistory iPortfolioHistory = new PortfolioHistoryDAO();
return iPortfolioHistory.getHistoryByYearAndMonth(this.id);
}
/**
* @return the potentialEarnings
*/
public double getPotentialEarnings() {
return potentialEarnings;
}
/**
* @param potentialEarnings the potentialEarnings to set
*/
public void setPotentialEarnings(double potentialEarnings) {
this.potentialEarnings = potentialEarnings;
}
/**
* @return the market
*/
public Market getMarket() {
return market;
}
/**
* @param market the market to set
*/
public void setMarket(Market market) {
this.market = market;
}
/**
* @return the liquidMoney
*/
public double getLiquidMoney() {
return liquidMoney;
}
/**
* @param liquidMoney the liquidMoney to set
*/
public void setLiquidMoney(double liquidMoney) {
this.liquidMoney = liquidMoney;
}
/**
* @return the investedMoney
*/
public double getInvestedMoney() {
return investedMoney;
}
/**
* @param investedMoney the investedMoney to set
*/
public void setInvestedMoney(double investedMoney) {
this.investedMoney = investedMoney;
}
/**
* @return the portfolioPerformance
*/
public double getPortfolioPerformance() {
return portfolioPerformance;
}
/**
* @param portfolioPerformance the portfolioPerformance to set
*/
public void setPortfolioPerformance(double portfolioPerformance) {
this.portfolioPerformance = portfolioPerformance;
}
public double calculateEarnings() {
setPotentialEarnings(0);
if (!orderStocks.isEmpty()) {
Iterator<OrderStock> it = orderStocks.iterator();
double sum = 0.0;
while (it.hasNext()) {
OrderStock orderStock = it.next();
sum += orderStock.getPotentialEarnings();
// System.out.println(orderStock.getReturns()+" "+orderStock.getPrice());
}
setPotentialEarnings(sum);
}
return getPotentialEarnings();
}
public double calculateToDayValue() {
double v = 0;
calculateEarnings();
v += getLiquidMoney() + getInvestedMoney() + getPotentialEarnings();
setPortfolioToDayValue(v);
return v;
}
public String displayMoney(double m) {
String monney;
StringTokenizer st = new StringTokenizer("" + m, ".");
monney = st.nextToken();
String comma = st.nextToken();
int l = comma.length();
if (l > 2) {
l = 2;
}
monney = monney + "." + comma.substring(0, l);
return monney;
}
/**
* @return the name
*/
public String getName() {
return name;
}
/**
* @param name the name to set
*/
public void setName(String name) {
this.name = name;
}
public void credit(double money) {
liquidMoney += money;
}
public void debit(double money) {
liquidMoney -= money;
}
@Override
public int hashCode() {
int hash = 0;
hash += (id != null ? id.hashCode() : 0);
return hash;
}
@Override
public boolean equals(Object object) {
// TODO: Warning - this method won't work in the case the id fields are not set
if (!(object instanceof Portfolio)) {
return false;
}
Portfolio other = (Portfolio) object;
if ((this.id == null && other.id != null) || (this.id != null && !this.id.equals(other.id))) {
return false;
}
return true;
}
@Override
public String toString() {
return "Portfolio{" + "id=" + id + ", account=" + account + ", returnValue=" + potentialEarnings + ", liquidMoney=" + liquidMoney + ", investedMoney=" + investedMoney + ", performance=" + portfolioPerformance + ", name=" + name + ", market=" + market + '}';
}
/**
* @return the initialMoney
*/
public double getInitialMoney() {
return initialMoney;
}
/**
* @param initialMoney the initialMoney to set
*/
public void setInitialMoney(double initialMoney) {
this.initialMoney = initialMoney;
}
}
| |
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package org.jetbrains.plugins.groovy.refactoring.introduce.parameter;
import com.intellij.openapi.application.WriteAction;
import com.intellij.openapi.editor.impl.DocumentMarkupModel;
import com.intellij.openapi.editor.markup.EffectType;
import com.intellij.openapi.editor.markup.HighlighterTargetArea;
import com.intellij.openapi.editor.markup.MarkupModel;
import com.intellij.openapi.editor.markup.TextAttributes;
import com.intellij.openapi.util.TextRange;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiMethod;
import com.intellij.psi.PsiParameter;
import com.intellij.psi.PsiType;
import com.intellij.refactoring.IntroduceParameterRefactoring;
import com.intellij.refactoring.introduce.inplace.OccurrencesChooser;
import com.intellij.ui.JBColor;
import com.intellij.ui.components.JBCheckBox;
import com.intellij.usageView.UsageInfo;
import com.intellij.util.ArrayUtil;
import com.intellij.util.ArrayUtilRt;
import gnu.trove.TIntArrayList;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.plugins.groovy.GroovyBundle;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.GrParameterListOwner;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.GrVariable;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.GrExpression;
import org.jetbrains.plugins.groovy.refactoring.introduce.GrAbstractInplaceIntroducer;
import org.jetbrains.plugins.groovy.refactoring.introduce.GrIntroduceContext;
import org.jetbrains.plugins.groovy.refactoring.introduce.GrIntroduceHandlerBase;
import javax.swing.*;
import javax.swing.border.EmptyBorder;
import java.awt.*;
import java.util.ArrayList;
import java.util.LinkedHashSet;
import java.util.List;
public class GrInplaceParameterIntroducer extends GrAbstractInplaceIntroducer<GrIntroduceParameterSettings> {
private final IntroduceParameterInfo myInfo;
private final TIntArrayList myParametersToRemove;
private JBCheckBox myDelegateCB;
private final LinkedHashSet<String> mySuggestedNames;
public GrInplaceParameterIntroducer(IntroduceParameterInfo info, GrIntroduceContext context, OccurrencesChooser.ReplaceChoice choice) {
super(GrIntroduceParameterHandler.getRefactoringName(), choice, context);
myInfo = info;
GrVariable localVar = GrIntroduceHandlerBase.resolveLocalVar(context);
mySuggestedNames = GroovyIntroduceParameterUtil.suggestNames(localVar, context.getExpression(), context.getStringPart(), info.getToReplaceIn(), context.getProject());
myParametersToRemove = new TIntArrayList(GroovyIntroduceParameterUtil.findParametersToRemove(info).getValues());
}
@Override
protected String getActionName() {
return GrIntroduceParameterHandler.getRefactoringName();
}
@Override
protected String @NotNull [] suggestNames(boolean replaceAll, @Nullable GrVariable variable) {
return ArrayUtilRt.toStringArray(mySuggestedNames);
}
@Override
protected JComponent getComponent() {
JPanel previewPanel = new JPanel(new BorderLayout());
previewPanel.add(getPreviewEditor().getComponent(), BorderLayout.CENTER);
previewPanel.setBorder(new EmptyBorder(2, 2, 6, 2));
myDelegateCB = new JBCheckBox(GroovyBundle.message("checkbox.delegate.via.overloading.method"));
myDelegateCB.setMnemonic('l');
myDelegateCB.setFocusable(false);
JPanel panel = new JPanel(new BorderLayout());
panel.add(previewPanel, BorderLayout.CENTER);
panel.add(myDelegateCB, BorderLayout.SOUTH);
return panel;
}
@Override
protected void saveSettings(@NotNull GrVariable variable) {
}
@Override
protected void updateTitle(@Nullable GrVariable variable) {
if (variable == null) return;
updateTitle(variable, variable.getName());
}
@Override
protected void updateTitle(@Nullable GrVariable variable, String value) {
if (getPreviewEditor() == null || variable == null) return;
final PsiElement declarationScope = ((PsiParameter)variable).getDeclarationScope();
if (declarationScope instanceof PsiMethod) {
final PsiMethod psiMethod = (PsiMethod)declarationScope;
final StringBuilder buf = new StringBuilder();
buf.append(psiMethod.getName()).append(" (");
boolean frst = true;
final List<TextRange> ranges2Remove = new ArrayList<>();
TextRange addedRange = null;
int i = 0;
for (PsiParameter parameter : psiMethod.getParameterList().getParameters()) {
if (frst) {
frst = false;
}
else {
buf.append(", ");
}
int startOffset = buf.length();
/*if (myMustBeFinal || myPanel.isGenerateFinal()) {
buf.append("final ");
}*/
buf.append(parameter.getType().getPresentableText()).append(" ").append(variable == parameter ? value : parameter.getName());
int endOffset = buf.length();
if (variable == parameter) {
addedRange = new TextRange(startOffset, endOffset);
}
else if (myParametersToRemove.contains(i)) {
ranges2Remove.add(new TextRange(startOffset, endOffset));
}
i++;
}
assert addedRange != null;
buf.append(")");
setPreviewText(buf.toString());
final MarkupModel markupModel = DocumentMarkupModel.forDocument(getPreviewEditor().getDocument(), myProject, true);
markupModel.removeAllHighlighters();
for (TextRange textRange : ranges2Remove) {
markupModel.addRangeHighlighter(textRange.getStartOffset(), textRange.getEndOffset(), 0, getTestAttributesForRemoval(), HighlighterTargetArea.EXACT_RANGE);
}
markupModel.addRangeHighlighter(addedRange.getStartOffset(), addedRange.getEndOffset(), 0, getTextAttributesForAdd(), HighlighterTargetArea.EXACT_RANGE);
//revalidate();
}
}
private static TextAttributes getTextAttributesForAdd() {
final TextAttributes textAttributes = new TextAttributes();
textAttributes.setEffectType(EffectType.ROUNDED_BOX);
textAttributes.setEffectColor(JBColor.RED);
return textAttributes;
}
private static TextAttributes getTestAttributesForRemoval() {
final TextAttributes textAttributes = new TextAttributes();
textAttributes.setEffectType(EffectType.STRIKEOUT);
textAttributes.setEffectColor(JBColor.BLACK);
return textAttributes;
}
@Override
protected GrVariable runRefactoring(GrIntroduceContext context, GrIntroduceParameterSettings settings, boolean processUsages) {
GrExpressionWrapper wrapper = createExpressionWrapper(context);
if (processUsages) {
GrIntroduceExpressionSettingsImpl patchedSettings =
new GrIntroduceExpressionSettingsImpl(settings, settings.getName(), settings.declareFinal(), settings.parametersToRemove(),
settings.generateDelegate(), settings.replaceFieldsWithGetters(), context.getExpression(),
context.getVar(), settings.getSelectedType(), context.getVar() != null || settings.replaceAllOccurrences(),
context.getVar() != null, settings.isForceReturn());
GrIntroduceParameterProcessor processor = new GrIntroduceParameterProcessor(patchedSettings, wrapper);
processor.run();
}
else {
WriteAction.run(() -> new GrIntroduceParameterProcessor(settings, wrapper).performRefactoring(UsageInfo.EMPTY_ARRAY));
}
GrParameterListOwner owner = settings.getToReplaceIn();
return ArrayUtil.getLastElement(owner.getParameters());
}
@NotNull
private static GrExpressionWrapper createExpressionWrapper(@NotNull GrIntroduceContext context) {
GrExpression expression = context.getExpression();
GrVariable var = context.getVar();
assert expression != null || var != null ;
GrExpression initializer = expression != null ? expression : var.getInitializerGroovy();
return new GrExpressionWrapper(initializer);
}
@Nullable
@Override
protected GrIntroduceParameterSettings getInitialSettingsForInplace(@NotNull GrIntroduceContext context,
@NotNull OccurrencesChooser.ReplaceChoice choice,
String[] names) {
GrExpression expression = context.getExpression();
GrVariable var = context.getVar();
PsiType type = var != null ? var.getDeclaredType() :
expression != null ? expression.getType() :
null;
return new GrIntroduceExpressionSettingsImpl(myInfo, names[0], false, new TIntArrayList(), false,
IntroduceParameterRefactoring.REPLACE_FIELDS_WITH_GETTERS_NONE, expression,
var, type, false, false, false);
}
@Override
protected GrIntroduceParameterSettings getSettings() {
return new GrIntroduceExpressionSettingsImpl(myInfo, getInputName(), false, myParametersToRemove, myDelegateCB.isSelected(),
IntroduceParameterRefactoring.REPLACE_FIELDS_WITH_GETTERS_NONE, null,
null, getSelectedType(), isReplaceAllOccurrences(), false, false);
}
}
| |
package org.bds.data;
import java.io.File;
import java.io.FileOutputStream;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.net.URLConnection;
import java.util.ArrayList;
import java.util.Date;
import org.apache.http.client.utils.URIBuilder;
import org.bds.Config;
import org.bds.util.Gpr;
import org.bds.util.Timer;
/**
* A file / directory on a web server
*
* @author pcingola
*/
public class DataHttp extends DataRemote {
private static int BUFFER_SIZE = 100 * 1024;
public final int HTTP_OK = 200; // Connection OK
public final int HTTP_REDIR = 302; // The requested resource resides temporarily under a different URI
public final int HTTP_NOTFOUND = 404; // The requested resource resides temporarily under a different URI
protected URL url;
public DataHttp(String urlStr) {
super();
url = parseUrl(urlStr);
canWrite = false;
}
/**
* Close connection
*/
protected void close(URLConnection connection) {
// Nothing to do
}
/**
* Connect and cache some data
*/
protected URLConnection connect() {
try {
if (verbose) Timer.showStdErr("Connecting to " + url);
URLConnection connection = url.openConnection();
// Follow redirect? (only for http connections)
if (connection instanceof HttpURLConnection) {
for (boolean followRedirect = true; followRedirect;) {
HttpURLConnection httpConnection = (HttpURLConnection) connection;
int code = httpConnection.getResponseCode();
switch (code) {
case HTTP_OK:
// Status OK
return connection;
case HTTP_REDIR:
String newUrl = connection.getHeaderField("Location");
if (verbose) Timer.showStdErr("Following redirect: " + newUrl);
url = new URL(newUrl);
connection = url.openConnection();
break;
case HTTP_NOTFOUND:
canRead = false;
if (verbose) Timer.showStdErr("File '" + url + "' not found on server.");
return null;
default:
canRead = false;
if (verbose) Timer.showStdErr("Server error " + code + " for URL '" + url + "'");
return null;
}
}
}
} catch (Exception e) {
Timer.showStdErr("ERROR while connecting to " + this);
throw new RuntimeException(e);
}
return null;
}
@Override
public boolean delete() {
if (verbose) Timer.showStdErr("Cannot delete file '" + getUrl() + "'");
return false;
}
@Override
public void deleteOnExit() {
throw new RuntimeException("Unimplemented!");
}
/**
* Download a file
*/
@Override
public boolean download(String localFile) {
URLConnection connection = null;
try {
// Connect and update info
connection = connect();
if (connection == null) return false;
updateInfo(connection);
// Copy resource to local file, use remote file if no local file name specified
InputStream is = url.openStream();
// Open local file
if (verbose) Timer.showStdErr("Local file name: '" + localFile + "'");
// Create local directory if it doesn't exists
mkdirsLocal(localFile);
FileOutputStream os = new FileOutputStream(localFile);
// Copy to file
int count = 0, total = 0, lastShown = 0;
byte data[] = new byte[BUFFER_SIZE];
while ((count = is.read(data, 0, BUFFER_SIZE)) != -1) {
os.write(data, 0, count);
total += count;
if (verbose) {
// Show every MB
if ((total - lastShown) > (1024 * 1024)) {
System.err.print(".");
lastShown = total;
}
}
}
if (verbose) System.err.println("");
// Close streams
is.close();
os.close();
if (verbose) Timer.showStdErr("Donwload finished. Total " + total + " bytes.");
// Update file's last modified
updateLocalFileLastModified();
return true;
} catch (Exception e) {
Timer.showStdErr("ERROR while connecting to " + getUrl());
throw new RuntimeException(e);
} finally {
close(connection);
}
}
@Override
public String getAbsolutePath() {
return url.toString();
}
@Override
public String getName() {
File path = new File(url.getPath());
return path.getName();
}
@Override
public String getParent() {
try {
String path = url.getPath();
String paren = (new File(path)).getParent();
URI uri = new URI(url.getProtocol(), url.getAuthority(), paren, null, null);
return uri.toString();
} catch (URISyntaxException e) {
throw new RuntimeException("Error parsing URL: " + url, e);
}
}
@Override
public String getPath() {
return url.getPath();
}
public URL getUrl() {
return url;
}
/**
* HTTP has no concept of directory
*/
@Override
public boolean isDirectory() {
return false;
}
@Override
public boolean isFile() {
return true;
}
@Override
public ArrayList<String> list() {
return new ArrayList<>();
}
@Override
protected String localPath() {
StringBuilder sb = new StringBuilder();
sb.append(Config.get().getTmpDir() + "/" + TMP_BDS_DATA);
sb.append("/" + url.getProtocol());
// Authority: Host and port
if (url.getAuthority() != null) {
for (String part : url.getAuthority().split("[:\\.]")) {
if (!part.isEmpty()) sb.append("/" + Gpr.sanityzeName(part));
}
}
// Path
if (url.getPath() != null) {
for (String part : url.getPath().split("/")) {
if (!part.isEmpty()) sb.append("/" + Gpr.sanityzeName(part));
}
}
// Query
if (url.getQuery() != null) {
for (String part : url.getQuery().split("&")) {
if (!part.isEmpty()) sb.append("/" + Gpr.sanityzeName(part));
}
}
return sb.toString();
}
/**
* Cannot create remote dirs in http
*/
@Override
public boolean mkdirs() {
return false;
}
protected URL parseUrl(String urlStr) {
try {
// No protocol: file
if (urlStr.indexOf(PROTOCOL_SEP) < 0) return new URL("file" + PROTOCOL_SEP + urlStr);
// Encode the url
URIBuilder ub = new URIBuilder(urlStr);
return ub.build().toURL();
} catch (URISyntaxException | MalformedURLException e) {
throw new RuntimeException("Cannot parse URL " + urlStr, e);
}
}
/**
* Connect and update info
*/
@Override
protected boolean updateInfo() {
URLConnection connection = connect();
boolean ok = updateInfo(connection);
close(connection);
return ok;
}
protected boolean updateInfo(URLConnection connection) {
latestUpdate = new Timer(CACHE_TIMEOUT);
boolean ok;
if (connection == null) {
// Cannot connect
canRead = false;
exists = false;
lastModified = new Date(0);
size = 0;
ok = false;
} else {
// Update data
size = connection.getContentLengthLong(); // Could be negative (unspecified)
canRead = true;
exists = true;
// Last modified
long lastMod = connection.getLastModified();
if (lastMod == 0) lastMod = connection.getDate(); // If last_modified is not found, use 'date' (e.g. dynamic content)
lastModified = new Date(lastMod);
ok = true;
}
// Show information
if (debug) Timer.showStdErr("Updated infromation for '" + this + "'"//
+ "\n\tcanRead : " + canRead //
+ "\n\texists : " + exists //
+ "\n\tlast modified: " + lastModified //
+ "\n\tsize : " + size //
);
return ok;
}
/**
* Cannot upload to a web server
*/
@Override
public boolean upload(String localFileName) {
return false;
}
}
| |
/*
* Title: CarGUI
* Author: Matthew Boyette
* Date: 4/13/2013
*
* This class provides a control panel for a Car object.
*/
package api.gui.swing;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.FlowLayout;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import javax.swing.JButton;
import javax.swing.JPanel;
import javax.swing.JSlider;
import javax.swing.SwingConstants;
import javax.swing.Timer;
import api.gui.draw.Car;
import api.util.Support;
public class CarGUI extends JPanel implements ActionListener
{
protected static class GUIListener implements ActionListener
{
protected CarGUI parent = null;
public GUIListener(final CarGUI gui)
{
this.parent = gui;
}
@Override
public final void actionPerformed(final ActionEvent event)
{
// Update speed.
JSlider velocity = this.parent.getVelocitySlider();
Car car = this.parent.getCar();
car.setVelocity(velocity.getValue());
}
}
protected static final long serialVersionUID = 1L;
protected JButton blueBodyButton = null;
protected Car car = null;
protected JButton directionButton = null;
protected JButton greenBodyButton = null;
protected final Timer guiTimer = new Timer(333, new GUIListener(this));
protected JButton hornButton = null;
protected JButton redBodyButton = null;
protected JButton startCarButton = null;
protected JButton startRadioButton = null;
protected JButton stopCarButton = null;
protected JButton stopRadioButton = null;
protected JSlider velocitySlider = null;
protected JButton yellowBodyButton = null;
public CarGUI(final Car car)
{
this.setCar(car);
this.drawGUI();
this.guiTimer.start();
}
@Override
public void actionPerformed(final ActionEvent event)
{
switch ( event.getActionCommand() )
{
case "Start Car":
this.getCar().animationStart();
break;
case "Stop Car":
this.getCar().animationStop();
break;
case "Change Direction":
if ( this.getCar().getDirection() == Car.DIRECTION_LEFT )
{
this.getCar().setDirection(Car.DIRECTION_RIGHT);
}
else
{
this.getCar().setDirection(Car.DIRECTION_LEFT);
}
break;
case "Honk Horn":
this.getCar().honkHorn();
break;
case "Start Radio":
this.getCar().startRadio();
break;
case "Stop Radio":
this.getCar().stopRadio();
break;
case "Red":
this.getCar().setBodyColor(Color.RED);
break;
case "Blue":
this.getCar().setBodyColor(Color.BLUE);
break;
case "Green":
this.getCar().setBodyColor(Color.GREEN);
break;
case "Yellow":
this.getCar().setBodyColor(Color.YELLOW);
break;
default:
break;
}
}
public void drawGUI()
{
JPanel motionControls = new JPanel();
JPanel soundControls = new JPanel();
JPanel colorControls = new JPanel();
this.startCarButton = new JButton("Start Car");
this.stopCarButton = new JButton("Stop Car");
this.directionButton = new JButton("Change Direction");
this.hornButton = new JButton("Honk Horn");
this.velocitySlider = new JSlider(SwingConstants.HORIZONTAL, 1, 3, 1);
this.startRadioButton = new JButton("Start Radio");
this.stopRadioButton = new JButton("Stop Radio");
this.redBodyButton = new JButton("Red");
this.blueBodyButton = new JButton("Blue");
this.greenBodyButton = new JButton("Green");
this.yellowBodyButton = new JButton("Yellow");
this.startCarButton.setFont(Support.DEFAULT_TEXT_FONT);
this.stopCarButton.setFont(Support.DEFAULT_TEXT_FONT);
this.directionButton.setFont(Support.DEFAULT_TEXT_FONT);
this.hornButton.setFont(Support.DEFAULT_TEXT_FONT);
this.velocitySlider.setFont(Support.DEFAULT_TEXT_FONT);
this.startRadioButton.setFont(Support.DEFAULT_TEXT_FONT);
this.stopRadioButton.setFont(Support.DEFAULT_TEXT_FONT);
this.redBodyButton.setFont(Support.DEFAULT_TEXT_FONT);
this.blueBodyButton.setFont(Support.DEFAULT_TEXT_FONT);
this.greenBodyButton.setFont(Support.DEFAULT_TEXT_FONT);
this.yellowBodyButton.setFont(Support.DEFAULT_TEXT_FONT);
this.startCarButton.addActionListener(this);
this.stopCarButton.addActionListener(this);
this.directionButton.addActionListener(this);
this.hornButton.addActionListener(this);
this.startRadioButton.addActionListener(this);
this.stopRadioButton.addActionListener(this);
this.redBodyButton.addActionListener(this);
this.blueBodyButton.addActionListener(this);
this.greenBodyButton.addActionListener(this);
this.yellowBodyButton.addActionListener(this);
this.setLayout(new BorderLayout());
motionControls.setLayout(new BorderLayout());
soundControls.setLayout(new FlowLayout());
colorControls.setLayout(new FlowLayout());
this.add(motionControls, BorderLayout.NORTH);
this.add(soundControls, BorderLayout.CENTER);
this.add(colorControls, BorderLayout.SOUTH);
motionControls.add(this.startCarButton, BorderLayout.EAST);
motionControls.add(this.stopCarButton, BorderLayout.WEST);
motionControls.add(this.directionButton, BorderLayout.CENTER);
motionControls.add(this.velocitySlider, BorderLayout.SOUTH);
soundControls.add(this.stopRadioButton);
soundControls.add(this.hornButton);
soundControls.add(this.startRadioButton);
colorControls.add(this.redBodyButton);
colorControls.add(this.blueBodyButton);
colorControls.add(this.greenBodyButton);
colorControls.add(this.yellowBodyButton);
}
public final Car getCar()
{
return this.car;
}
public final JSlider getVelocitySlider()
{
return this.velocitySlider;
}
public final void setCar(final Car car)
{
this.car = car;
}
}
| |
/*
* Copyright 2015 herd contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.finra.herd.service;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import java.util.ArrayList;
import org.junit.Assert;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.finra.herd.model.AlreadyExistsException;
import org.finra.herd.model.ObjectNotFoundException;
import org.finra.herd.model.api.xml.BusinessObjectDataKey;
import org.finra.herd.model.api.xml.BusinessObjectDataStorageUnitKey;
import org.finra.herd.model.api.xml.BusinessObjectDefinitionKey;
import org.finra.herd.model.api.xml.BusinessObjectFormat;
import org.finra.herd.model.api.xml.RelationalTableRegistrationCreateRequest;
import org.finra.herd.model.dto.RelationalStorageAttributesDto;
import org.finra.herd.model.dto.RelationalTableRegistrationDto;
import org.finra.herd.model.jpa.FileTypeEntity;
import org.finra.herd.model.jpa.StoragePlatformEntity;
public class RelationalTableRegistrationHelperServiceTest extends AbstractServiceTest
{
@Autowired
@Qualifier(value = "relationalTableRegistrationHelperServiceImpl")
private RelationalTableRegistrationHelperService relationalTableRegistrationHelperServiceImpl;
@Test
public void testPrepareForRelationalTableRegistrationBusinessObjectDefinitionAlreadyExists()
{
// Create a namespace.
namespaceDaoTestHelper.createNamespaceEntity(BDEF_NAMESPACE);
// Create a business object definition.
businessObjectDefinitionDaoTestHelper
.createBusinessObjectDefinitionEntity(new BusinessObjectDefinitionKey(BDEF_NAMESPACE, BDEF_NAME), DATA_PROVIDER_NAME, BDEF_DESCRIPTION);
// Try to a get relational storage attributes when specified business object definition already exists.
try
{
relationalTableRegistrationHelperService.prepareForRelationalTableRegistration(
new RelationalTableRegistrationCreateRequest(BDEF_NAMESPACE, BDEF_NAME, BDEF_DISPLAY_NAME, FORMAT_USAGE_CODE, DATA_PROVIDER_NAME,
RELATIONAL_SCHEMA_NAME, RELATIONAL_TABLE_NAME, STORAGE_NAME), APPEND_TO_EXISTING_BUSINESS_OBJECT_DEFINTION_FALSE);
fail();
}
catch (AlreadyExistsException ex)
{
Assert.assertEquals(String.format("Business object definition with name \"%s\" already exists for namespace \"%s\".", BDEF_NAME, BDEF_NAMESPACE),
ex.getMessage());
}
}
@Test
public void testPrepareForRelationalTableRegistrationBusinessObjectFormatAlreadyExists()
{
// Create a namespace.
namespaceDaoTestHelper.createNamespaceEntity(BDEF_NAMESPACE);
// Create a business object definition.
businessObjectDefinitionDaoTestHelper
.createBusinessObjectDefinitionEntity(new BusinessObjectDefinitionKey(BDEF_NAMESPACE, BDEF_NAME), DATA_PROVIDER_NAME, BDEF_DESCRIPTION);
businessObjectFormatDaoTestHelper
.createBusinessObjectFormatEntity(BDEF_NAMESPACE, BDEF_NAME, FORMAT_USAGE_CODE, FileTypeEntity.RELATIONAL_TABLE_FILE_TYPE, 1, FORMAT_DESCRIPTION,
FORMAT_DOCUMENT_SCHEMA, FORMAT_DOCUMENT_SCHEMA_URL, true, PARTITION_KEY, PARTITION_KEY_GROUP);
// Try to a get relational storage attributes when specified business object definition already exists.
try
{
relationalTableRegistrationHelperService.prepareForRelationalTableRegistration(
new RelationalTableRegistrationCreateRequest(BDEF_NAMESPACE, BDEF_NAME, BDEF_DISPLAY_NAME, FORMAT_USAGE_CODE, DATA_PROVIDER_NAME,
RELATIONAL_SCHEMA_NAME, RELATIONAL_TABLE_NAME, STORAGE_NAME), APPEND_TO_EXISTING_BUSINESS_OBJECT_DEFINTION_TRUE);
fail();
}
catch (AlreadyExistsException alreadyExistsException)
{
Assert.assertEquals(String.format("Format with file type \"%s\" and usage \"%s\" already exists for business object definition \"%s\".",
FileTypeEntity.RELATIONAL_TABLE_FILE_TYPE, FORMAT_USAGE_CODE, BDEF_NAME), alreadyExistsException.getMessage());
}
}
@Test
public void testPrepareForRelationalTableRegistrationInvalidStoragePlatform()
{
// Create database entities required for relational table registration testing.
relationalTableRegistrationServiceTestHelper
.createDatabaseEntitiesForRelationalTableRegistrationTesting(BDEF_NAMESPACE, DATA_PROVIDER_NAME, STORAGE_NAME);
// Create another storage of a storage platfom type that is not supported by the relational table registration feature.
storageDaoTestHelper.createStorageEntity(STORAGE_NAME_2, STORAGE_PLATFORM_CODE);
// Try to a get relational storage attributes when specified storage has an invalid storage platform type.
try
{
relationalTableRegistrationHelperService.prepareForRelationalTableRegistration(
new RelationalTableRegistrationCreateRequest(BDEF_NAMESPACE, BDEF_NAME, BDEF_DISPLAY_NAME, FORMAT_USAGE_CODE, DATA_PROVIDER_NAME,
RELATIONAL_SCHEMA_NAME, RELATIONAL_TABLE_NAME, STORAGE_NAME_2), APPEND_TO_EXISTING_BUSINESS_OBJECT_DEFINTION_FALSE);
fail();
}
catch (IllegalArgumentException e)
{
Assert.assertEquals(String.format(
"Cannot register relational table in \"%s\" storage of %s storage platform type. Only %s storage platform type is supported by this feature.",
STORAGE_NAME_2, STORAGE_PLATFORM_CODE, StoragePlatformEntity.RELATIONAL), e.getMessage());
}
}
@Test
public void testPrepareForRelationalTableRegistrationRequiredDatabaseEntitiesNoExist()
{
// Create database entities required for relational table registration testing.
relationalTableRegistrationServiceTestHelper
.createDatabaseEntitiesForRelationalTableRegistrationTesting(BDEF_NAMESPACE, DATA_PROVIDER_NAME, STORAGE_NAME);
// Try to get a relational storage attributes when specified namespace does not exist.
try
{
relationalTableRegistrationHelperService.prepareForRelationalTableRegistration(
new RelationalTableRegistrationCreateRequest(I_DO_NOT_EXIST, BDEF_NAME, BDEF_DISPLAY_NAME, FORMAT_USAGE_CODE, DATA_PROVIDER_NAME,
RELATIONAL_SCHEMA_NAME, RELATIONAL_TABLE_NAME, STORAGE_NAME), APPEND_TO_EXISTING_BUSINESS_OBJECT_DEFINTION_FALSE);
fail();
}
catch (ObjectNotFoundException e)
{
Assert.assertEquals(String.format("Namespace \"%s\" doesn't exist.", I_DO_NOT_EXIST), e.getMessage());
}
// Try to get a relational storage attributes when specified data provider does not exist.
try
{
relationalTableRegistrationHelperService.prepareForRelationalTableRegistration(
new RelationalTableRegistrationCreateRequest(BDEF_NAMESPACE, BDEF_NAME, BDEF_DISPLAY_NAME, FORMAT_USAGE_CODE, I_DO_NOT_EXIST,
RELATIONAL_SCHEMA_NAME, RELATIONAL_TABLE_NAME, STORAGE_NAME), APPEND_TO_EXISTING_BUSINESS_OBJECT_DEFINTION_FALSE);
fail();
}
catch (ObjectNotFoundException e)
{
Assert.assertEquals(String.format("Data provider with name \"%s\" doesn't exist.", I_DO_NOT_EXIST), e.getMessage());
}
// Try to get a relational storage attributes when specified storage does not exist.
try
{
relationalTableRegistrationHelperService.prepareForRelationalTableRegistration(
new RelationalTableRegistrationCreateRequest(BDEF_NAMESPACE, BDEF_NAME, BDEF_DISPLAY_NAME, FORMAT_USAGE_CODE, DATA_PROVIDER_NAME,
RELATIONAL_SCHEMA_NAME, RELATIONAL_TABLE_NAME, I_DO_NOT_EXIST), APPEND_TO_EXISTING_BUSINESS_OBJECT_DEFINTION_FALSE);
fail();
}
catch (ObjectNotFoundException e)
{
Assert.assertEquals(String.format("Storage with name \"%s\" doesn't exist.", I_DO_NOT_EXIST), e.getMessage());
}
}
@Test
public void testRegisterRelationalTableBusinessObjectDefinitionAlreadyExists()
{
// Create a namespace.
namespaceDaoTestHelper.createNamespaceEntity(BDEF_NAMESPACE);
// Create a business object definition.
businessObjectDefinitionDaoTestHelper
.createBusinessObjectDefinitionEntity(new BusinessObjectDefinitionKey(BDEF_NAMESPACE, BDEF_NAME), DATA_PROVIDER_NAME, BDEF_DESCRIPTION);
// Try to register a relational table when specified business object definition already exists.
try
{
relationalTableRegistrationHelperService.registerRelationalTable(
new RelationalTableRegistrationCreateRequest(BDEF_NAMESPACE, BDEF_NAME, BDEF_DISPLAY_NAME, FORMAT_USAGE_CODE, DATA_PROVIDER_NAME,
RELATIONAL_SCHEMA_NAME, RELATIONAL_TABLE_NAME, STORAGE_NAME), relationalTableRegistrationServiceTestHelper.getExpectedSchemaColumns(),
APPEND_TO_EXISTING_BUSINESS_OBJECT_DEFINTION_FALSE);
fail();
}
catch (AlreadyExistsException ex)
{
Assert.assertEquals(String
.format("Unable to create business object definition with name \"%s\" because it already exists for namespace \"%s\".", BDEF_NAME,
BDEF_NAMESPACE), ex.getMessage());
}
}
@Test
public void testRegisterRelationalTableBusinessObjectDefinitionDoesNotExist()
{
// Create a namespace.
namespaceDaoTestHelper.createNamespaceEntity(BDEF_NAMESPACE);
// Try to register a relational table when specified business object definition already exists.
try
{
relationalTableRegistrationHelperService.registerRelationalTable(
new RelationalTableRegistrationCreateRequest(BDEF_NAMESPACE, BDEF_NAME, BDEF_DISPLAY_NAME, FORMAT_USAGE_CODE, DATA_PROVIDER_NAME,
RELATIONAL_SCHEMA_NAME, RELATIONAL_TABLE_NAME, STORAGE_NAME), relationalTableRegistrationServiceTestHelper.getExpectedSchemaColumns(),
APPEND_TO_EXISTING_BUSINESS_OBJECT_DEFINTION_TRUE);
fail();
}
catch (ObjectNotFoundException objectNotFoundException)
{
Assert.assertEquals(String.format("Business object definition with name \"%s\" doesn't exist for namespace \"%s\".", BDEF_NAME, BDEF_NAMESPACE),
objectNotFoundException.getMessage());
}
}
@Test
public void testRegisterRelationalTableRequiredDatabaseEntitiesNoExist()
{
// Create database entities required for relational table registration testing.
relationalTableRegistrationServiceTestHelper
.createDatabaseEntitiesForRelationalTableRegistrationTesting(BDEF_NAMESPACE, DATA_PROVIDER_NAME, STORAGE_NAME);
// Try to register a relational table when specified namespace does not exist.
try
{
relationalTableRegistrationHelperService.registerRelationalTable(
new RelationalTableRegistrationCreateRequest(I_DO_NOT_EXIST, BDEF_NAME, BDEF_DISPLAY_NAME, FORMAT_USAGE_CODE, DATA_PROVIDER_NAME,
RELATIONAL_SCHEMA_NAME, RELATIONAL_TABLE_NAME, STORAGE_NAME), relationalTableRegistrationServiceTestHelper.getExpectedSchemaColumns(),
APPEND_TO_EXISTING_BUSINESS_OBJECT_DEFINTION_FALSE);
fail();
}
catch (ObjectNotFoundException e)
{
Assert.assertEquals(String.format("Namespace \"%s\" doesn't exist.", I_DO_NOT_EXIST), e.getMessage());
}
// Try to register a relational table when specified data provider does not exist.
try
{
relationalTableRegistrationHelperService.registerRelationalTable(
new RelationalTableRegistrationCreateRequest(BDEF_NAMESPACE, BDEF_NAME, BDEF_DISPLAY_NAME, FORMAT_USAGE_CODE, I_DO_NOT_EXIST,
RELATIONAL_SCHEMA_NAME, RELATIONAL_TABLE_NAME, STORAGE_NAME), relationalTableRegistrationServiceTestHelper.getExpectedSchemaColumns(),
APPEND_TO_EXISTING_BUSINESS_OBJECT_DEFINTION_FALSE);
fail();
}
catch (ObjectNotFoundException e)
{
Assert.assertEquals(String.format("Data provider with name \"%s\" doesn't exist.", I_DO_NOT_EXIST), e.getMessage());
}
// Try to register a relational table when specified storage does not exist.
try
{
relationalTableRegistrationHelperService.registerRelationalTable(
new RelationalTableRegistrationCreateRequest(BDEF_NAMESPACE, BDEF_NAME, BDEF_DISPLAY_NAME, FORMAT_USAGE_CODE, DATA_PROVIDER_NAME,
RELATIONAL_SCHEMA_NAME, RELATIONAL_TABLE_NAME, I_DO_NOT_EXIST), relationalTableRegistrationServiceTestHelper.getExpectedSchemaColumns(),
APPEND_TO_EXISTING_BUSINESS_OBJECT_DEFINTION_FALSE);
fail();
}
catch (ObjectNotFoundException e)
{
Assert.assertEquals(String.format("Storage with name \"%s\" doesn't exist.", I_DO_NOT_EXIST), e.getMessage());
}
}
/**
* This unit test is to get coverage for the methods that have an explicit annotation for transaction propagation.
*/
@Test
public void testRelationalTableRegistrationHelperServiceMethodsNewTransactionPropagation()
{
try
{
relationalTableRegistrationHelperServiceImpl.prepareForRelationalTableRegistration(
new RelationalTableRegistrationCreateRequest(BDEF_NAMESPACE, BDEF_NAME, BDEF_DISPLAY_NAME, FORMAT_USAGE_CODE, DATA_PROVIDER_NAME,
RELATIONAL_SCHEMA_NAME, RELATIONAL_TABLE_NAME, STORAGE_NAME), APPEND_TO_EXISTING_BUSINESS_OBJECT_DEFINTION_FALSE);
fail();
}
catch (ObjectNotFoundException e)
{
assertEquals(String.format("Namespace \"%s\" doesn't exist.", BDEF_NAMESPACE), e.getMessage());
}
try
{
relationalTableRegistrationHelperServiceImpl.prepareForRelationalTableSchemaUpdate(
new BusinessObjectDataStorageUnitKey(BDEF_NAMESPACE, BDEF_NAME, FORMAT_USAGE_CODE, FORMAT_FILE_TYPE_CODE, FORMAT_VERSION, PARTITION_VALUE,
SUBPARTITION_VALUES, DATA_VERSION, STORAGE_NAME));
fail();
}
catch (ObjectNotFoundException e)
{
assertEquals(String.format("Business object data storage unit {%s, storageName: \"%s\"} doesn't exist.", businessObjectDataServiceTestHelper
.getExpectedBusinessObjectDataKeyAsString(
new BusinessObjectDataKey(BDEF_NAMESPACE, BDEF_NAME, FORMAT_USAGE_CODE, FORMAT_FILE_TYPE_CODE, FORMAT_VERSION, PARTITION_VALUE,
SUBPARTITION_VALUES, DATA_VERSION)), STORAGE_NAME), e.getMessage());
}
try
{
relationalTableRegistrationHelperServiceImpl
.registerRelationalTable(new RelationalTableRegistrationCreateRequest(), new ArrayList<>(), APPEND_TO_EXISTING_BUSINESS_OBJECT_DEFINTION_FALSE);
fail();
}
catch (IllegalArgumentException e)
{
assertEquals("A namespace must be specified.", e.getMessage());
}
try
{
relationalTableRegistrationHelperServiceImpl
.retrieveRelationalTableColumns(new RelationalStorageAttributesDto(JDBC_URL, USERNAME, NO_USER_CREDENTIAL_NAME), RELATIONAL_SCHEMA_NAME,
RELATIONAL_TABLE_NAME);
fail();
}
catch (IllegalArgumentException e)
{
assertEquals(String.format(
"Failed to retrieve description of a relational table with \"%s\" name under \"%s\" schema at jdbc.url=\"%s\" using jdbc.username=\"%s\". " +
"Reason: Wrong user name or password [28000-200]", RELATIONAL_TABLE_NAME, RELATIONAL_SCHEMA_NAME, JDBC_URL, USERNAME), e.getMessage());
}
try
{
relationalTableRegistrationHelperServiceImpl.updateRelationalTableSchema(new RelationalTableRegistrationDto(
new BusinessObjectDataStorageUnitKey(BDEF_NAMESPACE, BDEF_NAME, FORMAT_USAGE_CODE, FORMAT_FILE_TYPE_CODE, FORMAT_VERSION, PARTITION_VALUE,
SUBPARTITION_VALUES, DATA_VERSION, STORAGE_NAME), new RelationalStorageAttributesDto(), RELATIONAL_SCHEMA_NAME, RELATIONAL_TABLE_NAME,
new BusinessObjectFormat(ID, BDEF_NAMESPACE, BDEF_NAME, FORMAT_USAGE_CODE, FORMAT_FILE_TYPE_CODE, FORMAT_VERSION, LATEST_VERSION_FLAG_SET,
PARTITION_KEY, DESCRIPTION, NO_FORMAT_DOCUMENT_SCHEMA, NO_FORMAT_DOCUMENT_SCHEMA_URL, NO_ATTRIBUTES, NO_ATTRIBUTE_DEFINITIONS, NO_SCHEMA,
NO_BUSINESS_OBJECT_FORMAT_PARENTS, NO_BUSINESS_OBJECT_FORMAT_CHILDREN, NO_BUSINESS_OBJECT_FORMAT_EXTERNAL_INTERFACES, NO_RECORD_FLAG_SET,
NO_RETENTION_PERIOD_IN_DAYS, NO_RETENTION_TYPE, NO_ALLOW_NON_BACKWARDS_COMPATIBLE_CHANGES_SET, FORMAT_RELATIONAL_SCHEMA_NAME,
FORMAT_RELATIONAL_TABLE_NAME)), NO_COLUMNS);
fail();
}
catch (IllegalArgumentException e)
{
assertEquals("A schema must have at least one column.", e.getMessage());
}
try
{
relationalTableRegistrationHelperServiceImpl
.validateAndTrimRelationalTableRegistrationCreateRequest(new RelationalTableRegistrationCreateRequest());
fail();
}
catch (IllegalArgumentException e)
{
assertEquals("A namespace must be specified.", e.getMessage());
}
}
@Test
public void testRetrieveRelationalTableColumnsRelationTableNoExists()
{
// Create and initialize a relational storage attributes DTO to point to the in-memory database setup as part of DAO mocks.
RelationalStorageAttributesDto relationalStorageAttributesDto = new RelationalStorageAttributesDto();
relationalStorageAttributesDto.setJdbcUrl(JDBC_URL);
relationalStorageAttributesDto.setJdbcUsername(EMPTY_STRING);
relationalStorageAttributesDto.setJdbcUserCredentialName(NO_USER_CREDENTIAL_NAME);
// Try to get a list of schema columns for a non-existing relational table.
try
{
relationalTableRegistrationHelperService.retrieveRelationalTableColumns(relationalStorageAttributesDto, RELATIONAL_SCHEMA_NAME, I_DO_NOT_EXIST);
fail();
}
catch (IllegalArgumentException e)
{
assertEquals(String
.format("Relational table with \"%s\" name not found under \"%s\" schema at jdbc.url=\"%s\" for jdbc.username=\"%s\".", I_DO_NOT_EXIST,
RELATIONAL_SCHEMA_NAME, JDBC_URL, EMPTY_STRING), e.getMessage());
}
}
@Test
public void testRetrieveRelationalTableColumnsSqlException()
{
// Create and initialize a relational storage attributes DTO with an invalid JDBC URL.
RelationalStorageAttributesDto relationalStorageAttributesDto = new RelationalStorageAttributesDto();
relationalStorageAttributesDto.setJdbcUrl(INVALID_VALUE);
relationalStorageAttributesDto.setJdbcUsername(USERNAME);
relationalStorageAttributesDto.setJdbcUserCredentialName(NO_USER_CREDENTIAL_NAME);
// Try to get a list of schema columns using an invalid JDBC URL.
try
{
relationalTableRegistrationHelperService
.retrieveRelationalTableColumns(relationalStorageAttributesDto, RELATIONAL_SCHEMA_NAME, RELATIONAL_TABLE_NAME);
fail();
}
catch (IllegalArgumentException e)
{
assertEquals(String.format(
"Failed to retrieve description of a relational table with \"%s\" name under \"%s\" schema at jdbc.url=\"%s\" using jdbc.username=\"%s\". " +
"Reason: No suitable driver found for %s", RELATIONAL_TABLE_NAME, RELATIONAL_SCHEMA_NAME, INVALID_VALUE, USERNAME, INVALID_VALUE),
e.getMessage());
}
}
@Test
public void testValidateAndTrimRelationalTableRegistrationCreateRequestMissingOptionalParametersAsBlanks()
{
// Create a relational table registration create request with optional parameters passed as blank strings.
RelationalTableRegistrationCreateRequest relationalTableRegistrationCreateRequest =
new RelationalTableRegistrationCreateRequest(BDEF_NAMESPACE, BDEF_NAME, BLANK_TEXT, FORMAT_USAGE_CODE, DATA_PROVIDER_NAME, RELATIONAL_SCHEMA_NAME,
RELATIONAL_TABLE_NAME, STORAGE_NAME);
// Validate and trim the create request.
relationalTableRegistrationHelperService.validateAndTrimRelationalTableRegistrationCreateRequest(relationalTableRegistrationCreateRequest);
// Validate the results.
assertEquals(
new RelationalTableRegistrationCreateRequest(BDEF_NAMESPACE, BDEF_NAME, EMPTY_STRING, FORMAT_USAGE_CODE, DATA_PROVIDER_NAME, RELATIONAL_SCHEMA_NAME,
RELATIONAL_TABLE_NAME, STORAGE_NAME), relationalTableRegistrationCreateRequest);
}
@Test
public void testValidateAndTrimRelationalTableRegistrationCreateRequestMissingOptionalParametersAsNulls()
{
// Create a relational table registration create request with optional parameters passed as nulls.
RelationalTableRegistrationCreateRequest relationalTableRegistrationCreateRequest =
new RelationalTableRegistrationCreateRequest(BDEF_NAMESPACE, BDEF_NAME, NO_BDEF_DISPLAY_NAME, FORMAT_USAGE_CODE, DATA_PROVIDER_NAME,
RELATIONAL_SCHEMA_NAME, RELATIONAL_TABLE_NAME, STORAGE_NAME);
// Validate and trim the create request.
relationalTableRegistrationHelperService.validateAndTrimRelationalTableRegistrationCreateRequest(relationalTableRegistrationCreateRequest);
// Validate the results.
assertEquals(new RelationalTableRegistrationCreateRequest(BDEF_NAMESPACE, BDEF_NAME, NO_BDEF_DISPLAY_NAME, FORMAT_USAGE_CODE, DATA_PROVIDER_NAME,
RELATIONAL_SCHEMA_NAME, RELATIONAL_TABLE_NAME, STORAGE_NAME), relationalTableRegistrationCreateRequest);
}
@Test
public void testValidateAndTrimRelationalTableRegistrationCreateRequestMissingRequiredParameters()
{
// Try to validate a null create request.
try
{
relationalTableRegistrationHelperService.validateAndTrimRelationalTableRegistrationCreateRequest(null);
fail();
}
catch (IllegalArgumentException e)
{
assertEquals("A relational table registration create request must be specified.", e.getMessage());
}
// Try to validate a create request with a missing namespace.
try
{
relationalTableRegistrationHelperService.validateAndTrimRelationalTableRegistrationCreateRequest(
new RelationalTableRegistrationCreateRequest(BLANK_TEXT, BDEF_NAME, BDEF_DISPLAY_NAME, FORMAT_USAGE_CODE, DATA_PROVIDER_NAME,
RELATIONAL_SCHEMA_NAME, RELATIONAL_TABLE_NAME, STORAGE_NAME));
fail();
}
catch (IllegalArgumentException e)
{
assertEquals("A namespace must be specified.", e.getMessage());
}
// Try to validate a create request with a missing business object definition name.
try
{
relationalTableRegistrationHelperService.validateAndTrimRelationalTableRegistrationCreateRequest(
new RelationalTableRegistrationCreateRequest(BDEF_NAMESPACE, BLANK_TEXT, BDEF_DISPLAY_NAME, FORMAT_USAGE_CODE, DATA_PROVIDER_NAME,
RELATIONAL_SCHEMA_NAME, RELATIONAL_TABLE_NAME, STORAGE_NAME));
fail();
}
catch (IllegalArgumentException e)
{
assertEquals("A business object definition name must be specified.", e.getMessage());
}
// Try to validate a create request with a missing business object format usage.
try
{
relationalTableRegistrationHelperService.validateAndTrimRelationalTableRegistrationCreateRequest(
new RelationalTableRegistrationCreateRequest(BDEF_NAMESPACE, BDEF_NAME, BDEF_DISPLAY_NAME, BLANK_TEXT, DATA_PROVIDER_NAME,
RELATIONAL_SCHEMA_NAME, RELATIONAL_TABLE_NAME, STORAGE_NAME));
fail();
}
catch (IllegalArgumentException e)
{
assertEquals("A business object format usage must be specified.", e.getMessage());
}
// Try to validate a create request with a missing data provider name.
try
{
relationalTableRegistrationHelperService.validateAndTrimRelationalTableRegistrationCreateRequest(
new RelationalTableRegistrationCreateRequest(BDEF_NAMESPACE, BDEF_NAME, BDEF_DISPLAY_NAME, FORMAT_USAGE_CODE, BLANK_TEXT,
RELATIONAL_SCHEMA_NAME, RELATIONAL_TABLE_NAME, STORAGE_NAME));
fail();
}
catch (IllegalArgumentException e)
{
assertEquals("A data provider name must be specified.", e.getMessage());
}
// Try to validate a create request with a missing relational schema name.
try
{
relationalTableRegistrationHelperService.validateAndTrimRelationalTableRegistrationCreateRequest(
new RelationalTableRegistrationCreateRequest(BDEF_NAMESPACE, BDEF_NAME, BDEF_DISPLAY_NAME, FORMAT_USAGE_CODE, DATA_PROVIDER_NAME, BLANK_TEXT,
RELATIONAL_TABLE_NAME, STORAGE_NAME));
fail();
}
catch (IllegalArgumentException e)
{
assertEquals("A relational schema name must be specified.", e.getMessage());
}
// Try to validate a create request with a missing relational table name.
try
{
relationalTableRegistrationHelperService.validateAndTrimRelationalTableRegistrationCreateRequest(
new RelationalTableRegistrationCreateRequest(BDEF_NAMESPACE, BDEF_NAME, BDEF_DISPLAY_NAME, FORMAT_USAGE_CODE, DATA_PROVIDER_NAME,
RELATIONAL_SCHEMA_NAME, BLANK_TEXT, STORAGE_NAME));
fail();
}
catch (IllegalArgumentException e)
{
assertEquals("A relational table name must be specified.", e.getMessage());
}
// Try to validate a create request with a missing storage name.
try
{
relationalTableRegistrationHelperService.validateAndTrimRelationalTableRegistrationCreateRequest(
new RelationalTableRegistrationCreateRequest(BDEF_NAMESPACE, BDEF_NAME, BDEF_DISPLAY_NAME, FORMAT_USAGE_CODE, DATA_PROVIDER_NAME,
RELATIONAL_SCHEMA_NAME, RELATIONAL_TABLE_NAME, BLANK_TEXT));
fail();
}
catch (IllegalArgumentException e)
{
assertEquals("A storage name must be specified.", e.getMessage());
}
}
@Test
public void testValidateAndTrimRelationalTableRegistrationCreateRequestTrimParameters()
{
// Create a relational table registration create request with parameters having leading and trailing empty spaces.
RelationalTableRegistrationCreateRequest relationalTableRegistrationCreateRequest =
new RelationalTableRegistrationCreateRequest(addWhitespace(BDEF_NAMESPACE), addWhitespace(BDEF_NAME), addWhitespace(BDEF_DISPLAY_NAME),
addWhitespace(FORMAT_USAGE_CODE), addWhitespace(DATA_PROVIDER_NAME), addWhitespace(RELATIONAL_SCHEMA_NAME),
addWhitespace(RELATIONAL_TABLE_NAME), addWhitespace(STORAGE_NAME));
// Validate and trim the create request.
relationalTableRegistrationHelperService.validateAndTrimRelationalTableRegistrationCreateRequest(relationalTableRegistrationCreateRequest);
// Validate the results.
assertEquals(new RelationalTableRegistrationCreateRequest(BDEF_NAMESPACE, BDEF_NAME, BDEF_DISPLAY_NAME, FORMAT_USAGE_CODE, DATA_PROVIDER_NAME,
RELATIONAL_SCHEMA_NAME, RELATIONAL_TABLE_NAME, STORAGE_NAME), relationalTableRegistrationCreateRequest);
}
}
| |
package com.makotokw.android.widget;
import android.annotation.TargetApi;
import android.content.Context;
import android.database.DataSetObserver;
import android.os.Build;
import android.util.AttributeSet;
import android.view.KeyEvent;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AbsListView;
import android.widget.ListAdapter;
import android.widget.ListView;
public class CircularListView extends ListView implements AbsListView.OnScrollListener {
private static final String TAG = CircularListView.class.getSimpleName();
private static final int REPEAT_COUNT = 3;
private int mItemHeight = 0;
private CircularListViewListener mCircularListViewListener;
private InfiniteListAdapter mInfiniteListAdapter;
private boolean mEnableInfiniteScrolling = true;
private CircularListViewContentAlignment mCircularListViewContentAlignment = CircularListViewContentAlignment.Left;
private double mRadius = -1;
private int mSmoothScrollDuration = 80;
public CircularListView(Context context) {
this(context, null);
}
public CircularListView(Context context, AttributeSet attrs) {
this(context, attrs, android.R.attr.listViewStyle);
}
public CircularListView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
setOnScrollListener(this);
setClipChildren(false);
setEnableInfiniteScrolling(true);
}
public void setAdapter(ListAdapter adapter) {
mInfiniteListAdapter = new InfiniteListAdapter(adapter);
mInfiniteListAdapter.setEnableInfiniteScrolling(mEnableInfiniteScrolling);
super.setAdapter(mInfiniteListAdapter);
}
public CircularListViewListener getCircularListViewListener() {
return mCircularListViewListener;
}
public void setCircularListViewListener(CircularListViewListener circularListViewListener) {
this.mCircularListViewListener = circularListViewListener;
}
public void setEnableInfiniteScrolling(boolean enableInfiniteScrolling) {
mEnableInfiniteScrolling = enableInfiniteScrolling;
if (mInfiniteListAdapter != null) {
mInfiniteListAdapter.setEnableInfiniteScrolling(enableInfiniteScrolling);
}
if (mEnableInfiniteScrolling) {
setHorizontalScrollBarEnabled(false);
setVerticalScrollBarEnabled(false);
}
}
public CircularListViewContentAlignment getCircularListViewContentAlignment() {
return mCircularListViewContentAlignment;
}
public void setCircularListViewContentAlignment(
CircularListViewContentAlignment circularListViewContentAlignment) {
if (mCircularListViewContentAlignment != circularListViewContentAlignment) {
mCircularListViewContentAlignment = circularListViewContentAlignment;
requestLayout();
}
}
public double getRadius() {
return mRadius;
}
public void setRadius(double radius) {
if (this.mRadius != radius) {
this.mRadius = radius;
requestLayout();
}
}
public int getCentralPosition() {
double vCenterPos = getHeight() / 2.0f;
for (int i = 0; i < getChildCount(); i++) {
View child = getChildAt(i);
if (child != null) {
if (child.getTop() <= vCenterPos
&& child.getTop() + child.getHeight() >= vCenterPos) {
return getFirstVisiblePosition() + i;
}
}
}
return -1;
}
public View getCentralChild() {
int pos = getCentralPosition();
if (pos != -1) {
return getChildAt(pos - getFirstVisiblePosition());
}
return null;
}
public void scrollFirstItemToCenter() {
if (!mEnableInfiniteScrolling) {
return;
}
int realTotalItemCount = mInfiniteListAdapter.getRealCount();
if (realTotalItemCount > 0) {
setSelectionFromTop(realTotalItemCount, getBaseCentralChildTop());
}
}
public int getBaseCentralChildTop() {
int itemHeight = getItemHeight();
if (itemHeight > 0) {
return getHeight() / 2 - itemHeight / 2;
}
return 0;
}
public int getItemHeight() {
if (mItemHeight == 0) {
View child = getChildAt(0);
if (child != null) {
mItemHeight = child.getHeight();
}
}
return mItemHeight;
}
public void setSelectionAndMoveToCenter(int position) {
if (!mEnableInfiniteScrolling) {
return;
}
int realTotalItemCount = mInfiniteListAdapter.getRealCount();
if (realTotalItemCount == 0) {
return;
}
position = position % realTotalItemCount;
int centralPosition = getCentralPosition() % realTotalItemCount;
int y = getBaseCentralChildTop();
if (centralPosition == position) {
View centralView = getCentralChild();
y = centralView.getTop();
}
setSelectionFromTop(position + realTotalItemCount, y);
}
@TargetApi(Build.VERSION_CODES.FROYO)
@Override
public boolean dispatchKeyEvent(KeyEvent event) {
if (mEnableInfiniteScrolling) {
if (event.getAction() == KeyEvent.ACTION_DOWN) {
switch (event.getKeyCode()) {
case KeyEvent.KEYCODE_DPAD_UP:
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.FROYO) {
smoothScrollBy(mItemHeight, mSmoothScrollDuration);
return true;
}
break;
case KeyEvent.KEYCODE_DPAD_DOWN:
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.FROYO) {
smoothScrollBy(-mItemHeight, mSmoothScrollDuration);
return true;
}
break;
default:
break;
}
}
}
return super.dispatchKeyEvent(event);
}
@Override
public void onScrollStateChanged(AbsListView view, int scrollState) {
if (scrollState == SCROLL_STATE_IDLE) {
if (!isInTouchMode()) {
setSelectionAndMoveToCenter(getCentralPosition());
}
}
}
@Override
public void onScroll(AbsListView view, int firstVisibleItem, int visibleItemCount,
int totalItemCount) {
if (!mEnableInfiniteScrolling) {
return;
}
View itemView = this.getChildAt(0);
if (itemView == null) {
return;
}
int realTotalItemCount = mInfiniteListAdapter.getRealCount();
if (realTotalItemCount == 0) {
return;
}
if (mItemHeight == 0) {
mItemHeight = itemView.getHeight();
}
if (firstVisibleItem == 0) {
// scroll one unit
this.setSelectionFromTop(realTotalItemCount, itemView.getTop());
}
if (totalItemCount == firstVisibleItem + visibleItemCount) {
// back one unit
this.setSelectionFromTop(firstVisibleItem - realTotalItemCount,
itemView.getTop());
}
if (mCircularListViewContentAlignment != CircularListViewContentAlignment.None) {
double viewHalfHeight = view.getHeight() / 2.0f;
double vRadius = view.getHeight();
double hRadius = view.getWidth();
double yRadius = (view.getHeight() + mItemHeight) / 2.0f;
double xRadius = (vRadius < hRadius) ? vRadius : hRadius;
if (mRadius > 0) {
xRadius = mRadius;
}
for (int i = 0; i < visibleItemCount; i++) {
itemView = this.getChildAt(i);
if (itemView != null) {
double y = Math.abs(viewHalfHeight - (itemView.getTop() + (itemView.getHeight() / 2.0f)));
y = Math.min(y, yRadius);
double angle = Math.asin(y / yRadius);
double x = xRadius * Math.cos(angle);
if (mCircularListViewContentAlignment == CircularListViewContentAlignment.Left) {
x -= xRadius;
} else {
x = xRadius / 2 - x;
}
itemView.scrollTo((int) x, 0);
}
}
} else {
for (int i = 0; i < visibleItemCount; i++) {
itemView = this.getChildAt(i);
if (itemView != null) {
itemView.scrollTo(0, 0);
}
}
}
if (mCircularListViewListener != null) {
mCircularListViewListener.onCircularLayoutFinished(this, firstVisibleItem, visibleItemCount, totalItemCount);
}
}
class InfiniteListAdapter implements ListAdapter {
private boolean mEnableInfiniteScrolling = true;
private ListAdapter mCoreAdapter;
public InfiniteListAdapter(ListAdapter coreAdapter) {
mCoreAdapter = coreAdapter;
}
private void setEnableInfiniteScrolling(boolean enableInfiniteScrolling) {
mEnableInfiniteScrolling = enableInfiniteScrolling;
}
public int getRealCount() {
return mCoreAdapter.getCount();
}
public int positionToIndex(int position) {
int count = mCoreAdapter.getCount();
return (count == 0) ? 0 : position % count;
}
@Override
public void registerDataSetObserver(DataSetObserver observer) {
mCoreAdapter.registerDataSetObserver(observer);
}
@Override
public void unregisterDataSetObserver(DataSetObserver observer) {
mCoreAdapter.unregisterDataSetObserver(observer);
}
@Override
public int getCount() {
int count = mCoreAdapter.getCount();
return (mEnableInfiniteScrolling) ? count * REPEAT_COUNT : count;
}
@Override
public Object getItem(int position) {
return mCoreAdapter.getItem(this.positionToIndex(position));
}
@Override
public long getItemId(int position) {
return mCoreAdapter.getItemId(this.positionToIndex(position));
}
@Override
public boolean hasStableIds() {
return mCoreAdapter.hasStableIds();
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
return mCoreAdapter.getView(this.positionToIndex(position), convertView, parent);
}
@Override
public int getItemViewType(int position) {
return mCoreAdapter.getItemViewType(this.positionToIndex(position));
}
@Override
public int getViewTypeCount() {
return mCoreAdapter.getViewTypeCount();
}
@Override
public boolean isEmpty() {
return mCoreAdapter.isEmpty();
}
@Override
public boolean areAllItemsEnabled() {
return mCoreAdapter.areAllItemsEnabled();
}
@Override
public boolean isEnabled(int position) {
return mCoreAdapter.isEnabled(this.positionToIndex(position));
}
}
}
| |
package com.nilhcem.devoxxfr.debug.stetho;
import android.annotation.TargetApi;
import android.app.Activity;
import android.app.Application;
import android.app.PendingIntent;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.os.Build;
import android.os.SystemClock;
import com.facebook.stetho.dumpapp.DumpException;
import com.facebook.stetho.dumpapp.DumperContext;
import com.facebook.stetho.dumpapp.DumperPlugin;
import com.google.gson.GsonBuilder;
import com.jakewharton.processphoenix.ProcessPhoenix;
import com.nilhcem.devoxxfr.R;
import com.nilhcem.devoxxfr.data.app.model.Session;
import com.nilhcem.devoxxfr.data.database.dao.SessionsDao;
import com.nilhcem.devoxxfr.data.network.ApiEndpoint;
import com.nilhcem.devoxxfr.debug.lifecycle.ActivityProvider;
import com.nilhcem.devoxxfr.receiver.BootReceiver;
import com.nilhcem.devoxxfr.receiver.reminder.ReminderReceiver;
import com.nilhcem.devoxxfr.receiver.reminder.ReminderReceiverIntentBuilder;
import com.nilhcem.devoxxfr.ui.drawer.DrawerActivity;
import com.nilhcem.devoxxfr.ui.sessions.details.SessionDetailsActivity;
import com.nilhcem.devoxxfr.utils.App;
import org.threeten.bp.format.DateTimeFormatter;
import java.io.PrintStream;
import java.lang.reflect.Field;
import java.util.List;
import java.util.Locale;
import javax.inject.Inject;
import rx.Observable;
public class AppDumperPlugin implements DumperPlugin {
private final Context context;
private final ApiEndpoint endpoint;
private final SessionsDao sessionsDao;
private final ActivityProvider activityProvider;
@Inject
public AppDumperPlugin(Application app, ApiEndpoint endpoint, SessionsDao sessionsDao, ActivityProvider activityProvider) {
this.context = app;
this.endpoint = endpoint;
this.sessionsDao = sessionsDao;
this.activityProvider = activityProvider;
}
@Override
public String getName() {
return "devoxxfr";
}
@Override
public void dump(DumperContext dumpContext) throws DumpException {
final PrintStream writer = dumpContext.getStdout();
List<String> args = dumpContext.getArgsAsList();
String commandName = args.isEmpty() ? "" : args.remove(0);
switch (commandName) {
case "alarms":
displayAlarms(writer);
break;
case "appInfo":
displayAppInfo(writer);
break;
case "bootReceiver":
displayBootReceiverState(writer);
break;
case "currentSession":
displayCurrentSessionData(writer);
break;
case "endpoint":
changeEndpoint(writer, args);
break;
case "notif":
displayNotificationReminder();
break;
default:
doUsage(writer);
break;
}
}
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
private void displayAlarms(PrintStream writer) {
sessionsDao.getSessions()
.flatMap(Observable::from)
.map(session -> {
Intent intent = new ReminderReceiverIntentBuilder(session).build(context);
PendingIntent broadcast = PendingIntent.getBroadcast(context, session.getId(), intent, PendingIntent.FLAG_NO_CREATE);
if (broadcast != null) {
return String.format(Locale.US, "%s - Session(id=%d, title=%s)", session.getFromTime().format(DateTimeFormatter.ISO_DATE_TIME), session.getId(), session.getTitle());
}
return null;
})
.filter(id -> id != null)
.toList()
.subscribe(activeAlarms -> {
writer.println(Integer.toString(activeAlarms.size()) + " active alarm(s)");
for (String activeAlarm : activeAlarms) {
writer.println(activeAlarm);
}
});
}
private void displayAppInfo(PrintStream writer) {
writer.println(context.getString(R.string.app_name) + " " + App.getVersion());
}
private void displayBootReceiverState(PrintStream writer) {
ComponentName componentName = new ComponentName(context, BootReceiver.class);
PackageManager pm = context.getPackageManager();
writer.print("Boot receiver state: ");
int state = pm.getComponentEnabledSetting(componentName);
switch (state) {
case PackageManager.COMPONENT_ENABLED_STATE_DEFAULT:
writer.println("default");
break;
case PackageManager.COMPONENT_ENABLED_STATE_ENABLED:
writer.println("enabled");
break;
case PackageManager.COMPONENT_ENABLED_STATE_DISABLED:
writer.println("disabled");
break;
case PackageManager.COMPONENT_ENABLED_STATE_DISABLED_USER:
writer.println("disabled by user");
break;
default:
writer.println(state);
break;
}
}
private void displayCurrentSessionData(PrintStream writer) {
Activity activity = activityProvider.getCurrentActivity();
if (activity instanceof SessionDetailsActivity) {
try {
// Use reflection to access private "session" field
Field field = SessionDetailsActivity.class.getDeclaredField("session");
field.setAccessible(true);
Session session = (Session) field.get(activity);
writer.println(new GsonBuilder().setPrettyPrinting().create().toJson(session));
} catch (Exception e) {
writer.println(e.getMessage());
}
} else {
writer.println("SessionDetailsActivity not visible");
}
}
private void changeEndpoint(PrintStream writer, List<String> args) {
if (args.size() < 1) {
doUsage(writer);
} else {
switch (args.get(0)) {
case "get":
writer.println(String.format(Locale.US, "Endpoint: %s", endpoint));
break;
case "set":
if (args.size() < 2) {
doUsage(writer);
} else {
String arg = args.get(1);
try {
ApiEndpoint endpoint = ApiEndpoint.valueOf(arg.toUpperCase(Locale.US));
ApiEndpoint.persist(context, endpoint);
} catch (IllegalArgumentException e) {
ApiEndpoint.persist(context, arg);
}
restartApp(writer);
}
break;
default:
throw new IllegalArgumentException();
}
}
}
private void displayNotificationReminder() {
sessionsDao.getSessions()
.flatMap(Observable::from)
.filter(session -> session.getSpeakers() != null)
.first()
.subscribe(session -> {
Intent intent = new ReminderReceiverIntentBuilder(session).build(context);
new ReminderReceiver().onReceive(context, intent);
});
}
private void doUsage(PrintStream writer) {
writer.println("usage: dumpapp [arg]");
writer.println();
writer.println("arg:");
writer.println("* alarms: Display AlarmManager active alarms");
writer.println("* appInfo: Display current app build info");
writer.println("* bootReceiver: Display boot receiver state");
writer.println("* currentSession: Display current session data");
writer.println("* endpoint get: Display current api endpoint");
writer.println("* endpoint set (PROD|MOCK|\"https?://<url>\"): Change api endpoint");
writer.println("* notif: Test a notification reminder");
}
private void restartApp(PrintStream writer) {
writer.println("Restarting app...");
// Restart app after a few delay to make sure stetho can print the previous message.
new Thread(() -> {
SystemClock.sleep(500);
ProcessPhoenix.triggerRebirth(context, new Intent(context, DrawerActivity.class));
}).start();
}
}
| |
/*
* This file is provided to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.basho.riak.client.plain;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import com.basho.riak.client.RiakBucketInfo;
import com.basho.riak.client.RiakClient;
import com.basho.riak.client.RiakConfig;
import com.basho.riak.client.RiakObject;
import com.basho.riak.client.request.RequestMeta;
import com.basho.riak.client.request.RiakWalkSpec;
import com.basho.riak.client.response.BucketResponse;
import com.basho.riak.client.response.FetchResponse;
import com.basho.riak.client.response.HttpResponse;
import com.basho.riak.client.response.RiakExceptionHandler;
import com.basho.riak.client.response.RiakResponseRuntimeException;
import com.basho.riak.client.response.StoreResponse;
import com.basho.riak.client.response.StreamHandler;
import com.basho.riak.client.response.WalkResponse;
/**
* An adapter from {@link RiakClient} to a slightly less HTTP, more
* Java-centric, interface. Objects are returned without HTTP specific
* information and exceptions are thrown on unsuccessful responses.
*/
public class PlainClient {
private RiakClient impl;
/** Connect to Riak using the given configuration. */
public static PlainClient getClient(RiakConfig config) {
return new PlainClient(new RiakClient(config));
}
/** Connect to Riak using the given URL. */
public static PlainClient getClient(String url) {
return new PlainClient(new RiakClient(url));
}
/**
* The primary constructor. Wraps an existing {@link RiakClient} and
* installs a custom {@link RiakExceptionHandler}.
*/
public PlainClient(RiakClient riakClient) {
impl = riakClient;
impl.setExceptionHandler(new ConvertToCheckedExceptions());
}
/**
* Like
* {@link RiakClient#setBucketSchema(String, RiakBucketInfo, RequestMeta)},
* except throws on a non-204 response.
*
* @throws RiakIOException
* If an error occurs during communication with the Riak server.
* @throws RiakResponseException
* if the server does not successfully update the bucket schema.
*/
public void setBucketSchema(String bucket, RiakBucketInfo bucketInfo, RequestMeta meta) throws RiakIOException,
RiakResponseException {
HttpResponse r = impl.setBucketSchema(bucket, bucketInfo, meta);
if (r.getStatusCode() != 204)
throw new RiakResponseException(new RiakResponseRuntimeException(r, r.getBody()));
}
public void setBucketSchema(String bucket, RiakBucketInfo bucketInfo) throws RiakIOException, RiakResponseException {
setBucketSchema(bucket, bucketInfo, null);
}
/**
* Like {@link RiakClient#listBucket(String, RequestMeta)}, except throws on
* a non-200 response
*
* @throws RiakIOException
* If an error occurs during communication with the Riak server.
* @throws RiakResponseException
* if the server does not return the bucket information
*/
public RiakBucketInfo listBucket(String bucket, RequestMeta meta) throws RiakIOException, RiakResponseException {
BucketResponse r = impl.listBucket(bucket, meta);
if (r.getStatusCode() != 200)
throw new RiakResponseException(new RiakResponseRuntimeException(r, r.getBody()));
return r.getBucketInfo();
}
public RiakBucketInfo listBucket(String bucket) throws RiakIOException, RiakResponseException {
return listBucket(bucket, null);
}
/**
* Like {@link RiakClient#store(RiakObject, RequestMeta)}, except throws on
* a non-200 or 204 response and updates the passed in {@link RiakObject}
* with new metadata from Riak.
*
* @throws RiakIOException
* If an error occurs during communication with the Riak server.
* @throws RiakResponseException
* If the server does not succesfully store the object.
*/
public void store(RiakObject object, RequestMeta meta) throws RiakIOException, RiakResponseException {
StoreResponse r = impl.store(object, meta);
if (r.getStatusCode() != 200 && r.getStatusCode() != 204)
throw new RiakResponseException(new RiakResponseRuntimeException(r, r.getBody()));
object.updateMeta(r);
}
public void store(RiakObject object) throws RiakIOException, RiakResponseException {
store(object, null);
}
/**
* Like {@link RiakClient#fetchMeta(String, String, RequestMeta)}, except it
* returns the fetched object metadata directly or throws if the response is
* not a 200, 304 or 404.
*
* @return {@link RiakObject} or null if object doesn't exist.
*
* @throws RiakIOException
* If an error occurs during communication with the Riak server.
* @throws RiakResponseException
* If the server does return a valid object
*/
public RiakObject fetchMeta(String bucket, String key, RequestMeta meta) throws RiakIOException,
RiakResponseException {
FetchResponse r = impl.fetchMeta(bucket, key, meta);
if (r.getStatusCode() == 404)
return null;
if (r.getStatusCode() != 200 && r.getStatusCode() != 304)
throw new RiakResponseException(new RiakResponseRuntimeException(r, r.getBody()));
if (r.getStatusCode() == 200 && !r.hasObject())
throw new RiakResponseException(new RiakResponseRuntimeException(r, "Failed to parse metadata"));
return r.getObject();
}
public RiakObject fetchMeta(String bucket, String key) throws RiakIOException, RiakResponseException {
return fetchMeta(bucket, key, null);
}
/**
* Like {@link RiakClient#fetch(String, String, RequestMeta)}, except it
* returns the fetched object directly or throws if the response is not a
* 200, 304 or 404.
*
* @return {@link RiakObject} or null if object doesn't exist. If siblings
* exist, then returns one of the siblings.
* @throws RiakIOException
* If an error occurs during communication with the Riak server.
* @throws RiakResponseException
* If the server does return a valid object
*/
public RiakObject fetch(String bucket, String key, RequestMeta meta) throws RiakIOException, RiakResponseException {
FetchResponse r = impl.fetch(bucket, key, meta);
if (r.getStatusCode() == 404)
return null;
if (r.getStatusCode() != 200 && r.getStatusCode() != 304)
throw new RiakResponseException(new RiakResponseRuntimeException(r, r.getBody()));
if (r.getStatusCode() == 200 && !r.hasObject())
throw new RiakResponseException(new RiakResponseRuntimeException(r, "Failed to parse object"));
return r.getObject();
}
public RiakObject fetch(String bucket, String key) throws RiakIOException, RiakResponseException {
return fetch(bucket, key, null);
}
/**
* Like {@link RiakClient#fetch(String, String, RequestMeta)}, except it
* returns the all the fetched objects directly or throws if the response is
* not a 200, 304 or 404.
*
* @return All sibling {@link RiakObject} or null no object exist.
* @throws RiakIOException
* If an error occurs during communication with the Riak server.
* @throws RiakResponseException
* If the server does return any valid objects
*/
public Collection<? extends RiakObject> fetchAll(String bucket, String key, RequestMeta meta)
throws RiakIOException, RiakResponseException {
FetchResponse r = impl.fetch(bucket, key, meta);
if (r.getStatusCode() == 404)
return null;
if (r.getStatusCode() != 200 && r.getStatusCode() != 304)
throw new RiakResponseException(new RiakResponseRuntimeException(r, r.getBody()));
if (r.getStatusCode() == 200 && !(r.hasObject() || r.hasSiblings()))
throw new RiakResponseException(new RiakResponseRuntimeException(r, "Failed to parse object"));
if (r.hasSiblings())
return r.getSiblings();
return Arrays.asList(r.getObject());
}
public Collection<? extends RiakObject> fetchAll(String bucket, String key) throws RiakIOException,
RiakResponseException {
return fetchAll(bucket, key, null);
}
/**
* Identical to
* {@link RiakClient#stream(String, String, StreamHandler, RequestMeta)}.
*/
public boolean stream(String bucket, String key, StreamHandler handler, RequestMeta meta) throws IOException {
return impl.stream(bucket, key, handler, meta);
}
/**
* Like {@link RiakClient#delete(String, String, RequestMeta)}, except
* throws on a non-200 or 404 response. Note that delete succeeds if the
* object did not previously exist (404 response).
*
* @throws RiakIOException
* If an error occurs during communication with the Riak server.
* @throws RiakResponseException
* If the object was not deleted.
*/
public void delete(String bucket, String key, RequestMeta meta) throws RiakIOException, RiakResponseException {
HttpResponse r = impl.delete(bucket, key, meta);
if (r.getStatusCode() != 204 && r.getStatusCode() != 404)
throw new RiakResponseException(new RiakResponseRuntimeException(r, r.getBody()));
}
public void delete(String bucket, String key) throws RiakIOException, RiakResponseException {
delete(bucket, key, null);
}
/**
* Like {@link RiakClient#walk(String, String, String, RequestMeta)}, except
* throws on a non-200 or 404 response.
*
* @return list of lists of {@link RiakObject}s corresponding to steps of
* the walk. Returns null if the source object doesn't exist.
* @throws RiakIOException
* If an error occurs during communication with the Riak server.
* @throws RiakResponseException
* If the links could not be walked or the result steps were not
* returned.
*/
public List<? extends List<? extends RiakObject>> walk(String bucket, String key, String walkSpec, RequestMeta meta)
throws RiakIOException, RiakResponseException {
WalkResponse r = impl.walk(bucket, key, walkSpec, meta);
if (r.getStatusCode() == 404)
return null;
if (r.getStatusCode() != 200)
throw new RiakResponseException(new RiakResponseRuntimeException(r, r.getBody()));
if (!r.hasSteps())
throw new RiakResponseException(new RiakResponseRuntimeException(r, "Failed to parse walk results"));
return r.getSteps();
}
public List<? extends List<? extends RiakObject>> walk(String bucket, String key, String walkSpec)
throws RiakIOException, RiakResponseException {
return walk(bucket, key, walkSpec, null);
}
public List<? extends List<? extends RiakObject>> walk(String bucket, String key, RiakWalkSpec walkSpec)
throws RiakIOException, RiakResponseException {
return walk(bucket, key, walkSpec.toString(), null);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import org.apache.ignite.cache.CacheAtomicityMode;
import org.apache.ignite.cache.CacheMode;
import org.apache.ignite.cache.affinity.Affinity;
import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction;
import org.apache.ignite.cluster.ClusterNode;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.configuration.NearCacheConfiguration;
import org.apache.ignite.internal.processors.affinity.GridAffinityProcessor;
import org.apache.ignite.internal.processors.cache.IgniteCacheAbstractTest;
import org.junit.Test;
import static org.apache.ignite.cache.CacheAtomicityMode.TRANSACTIONAL;
import static org.apache.ignite.cache.CacheMode.PARTITIONED;
/**
* Tests for {@link GridAffinityProcessor.CacheAffinityProxy}.
*/
public class IgniteCacheAffinitySelfTest extends IgniteCacheAbstractTest {
/** Initial grid count. */
private int GRID_CNT = 3;
/** Cache name */
private final String CACHE2 = "Rendezvous";
/** {@inheritDoc} */
@Override protected int gridCount() {
return GRID_CNT;
}
/** {@inheritDoc} */
@Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception {
IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName);
CacheConfiguration cache0 = cacheConfiguration(null);
CacheConfiguration cache2 = cacheConfiguration(null);
cache2.setName(CACHE2);
cache2.setAffinity(new RendezvousAffinityFunction());
if (igniteInstanceName.contains("0"))
cfg.setCacheConfiguration(cache0);
else
cfg.setCacheConfiguration(cache0, cache2);
return cfg;
}
/** {@inheritDoc} */
@Override protected CacheMode cacheMode() {
return PARTITIONED;
}
/** {@inheritDoc} */
@Override protected CacheAtomicityMode atomicityMode() {
return TRANSACTIONAL;
}
/** {@inheritDoc} */
@Override protected NearCacheConfiguration nearConfiguration() {
return new NearCacheConfiguration();
}
/**
* @throws Exception if failed.
*/
@Test
public void testAffinity() throws Exception {
checkAffinity();
stopGrid(gridCount() - 1);
startGrid(gridCount() - 1);
startGrid(gridCount());
GRID_CNT += 1;
awaitPartitionMapExchange();
checkAffinity();
}
/**
* Check CacheAffinityProxy methods.
*/
private void checkAffinity() {
checkAffinity(grid(0).affinity(DEFAULT_CACHE_NAME), internalCache(1, DEFAULT_CACHE_NAME).affinity());
checkAffinity(grid(0).affinity(CACHE2), internalCache(1, CACHE2).affinity());
}
/**
* @param testAff Cache affinity to test.
* @param aff Cache affinity.
*/
private void checkAffinity(Affinity testAff, Affinity aff) {
checkAffinityKey(testAff, aff);
checkPartitions(testAff, aff);
checkIsBackupOrPrimary(testAff, aff);
checkMapKeyToNode(testAff, aff);
checkMapKeysToNodes(testAff, aff);
checkMapPartitionToNode(testAff, aff);
checkMapPartitionsToNodes(testAff, aff);
}
/**
* Check affinityKey method.
*
* @param testAff Affinity1.
* @param aff Affinity2.
*/
private void checkAffinityKey(Affinity testAff, Affinity aff) {
for (int i = 0; i < 10000; i++)
assertEquals(testAff.affinityKey(i), aff.affinityKey(i));
}
/**
* Check allPartitions, backupPartitions and primaryPartitions methods.
*
* @param testAff Affinity1.
* @param aff Affinity2.
*/
private void checkPartitions(Affinity testAff, Affinity aff) {
for (ClusterNode n : nodes()) {
checkEqualIntArray(testAff.allPartitions(n), aff.allPartitions(n));
checkEqualIntArray(testAff.backupPartitions(n), aff.backupPartitions(n));
checkEqualIntArray(testAff.primaryPartitions(n), aff.primaryPartitions(n));
}
}
/**
* Check isBackup, isPrimary and isPrimaryOrBackup methods.
*
* @param testAff Affinity1.
* @param aff Affinity2.
*/
private void checkIsBackupOrPrimary(Affinity testAff, Affinity aff) {
for (int i = 0; i < 10000; i++)
for (ClusterNode n : nodes()) {
assertEquals(testAff.isBackup(n, i), aff.isBackup(n, i));
assertEquals(testAff.isPrimary(n, i), aff.isPrimary(n, i));
assertEquals(testAff.isPrimaryOrBackup(n, i), aff.isPrimaryOrBackup(n, i));
}
}
/**
* Check mapKeyToNode, mapKeyToPrimaryAndBackups methods.
*
* @param testAff Affinity1.
* @param aff Affinity2.
*/
private void checkMapKeyToNode(Affinity testAff, Affinity aff) {
for (int i = 0; i < 10000; i++) {
assertEquals(testAff.mapKeyToNode(i).id(), aff.mapKeyToNode(i).id());
checkEqualCollection(testAff.mapKeyToPrimaryAndBackups(i), aff.mapKeyToPrimaryAndBackups(i));
}
}
/**
* Check mapPartitionToPrimaryAndBackups and mapPartitionToNode methods.
*
* @param testAff Affinity1.
* @param aff Affinity2.
*/
private void checkMapPartitionToNode(Affinity testAff, Affinity aff) {
assertEquals(aff.partitions(), testAff.partitions());
for (int part = 0; part < aff.partitions(); ++part) {
assertEquals(testAff.mapPartitionToNode(part).id(), aff.mapPartitionToNode(part).id());
checkEqualCollection(testAff.mapPartitionToPrimaryAndBackups(part),
aff.mapPartitionToPrimaryAndBackups(part));
}
}
/**
* Check mapKeysToNodes methods.
*
* @param testAff Affinity1.
* @param aff Affinity2.
*/
private void checkMapKeysToNodes(Affinity testAff, Affinity aff) {
List<Integer> keys = new ArrayList<>(10000);
for (int i = 0; i < 10000; ++i)
keys.add(i);
checkEqualMaps(testAff.mapKeysToNodes(keys), aff.mapKeysToNodes(keys));
}
/**
* Check mapPartitionsToNodes methods.
*
* @param testAff Affinity1.
* @param aff Affinity2.
*/
private void checkMapPartitionsToNodes(Affinity testAff, Affinity aff) {
List<Integer> parts = new ArrayList<>(aff.partitions());
for (int i = 0; i < aff.partitions(); ++i)
parts.add(i);
checkEqualPartitionMaps(testAff.mapPartitionsToNodes(parts), aff.mapPartitionsToNodes(parts));
}
/**
* Check equal arrays.
*
* @param arr1 Array 1.
* @param arr2 Array 2.
*/
private static void checkEqualIntArray(int[] arr1, int[] arr2) {
assertEquals(arr1.length, arr2.length);
Collection<Integer> col1 = new HashSet<>();
for (int anArr1 : arr1)
col1.add(anArr1);
for (int anArr2 : arr2) {
assertTrue(col1.contains(anArr2));
col1.remove(anArr2);
}
assertEquals(0, col1.size());
}
/**
* Check equal collections.
*
* @param col1 Collection 1.
* @param col2 Collection 2.
*/
private static void checkEqualCollection(Collection<ClusterNode> col1, Collection<ClusterNode> col2) {
assertEquals(col1.size(), col2.size());
for (ClusterNode node : col1)
assertTrue(col2.contains(node));
}
/**
* Check equal maps.
*
* @param map1 Map1.
* @param map2 Map2.
*/
private static void checkEqualMaps(Map<ClusterNode, Collection> map1, Map<ClusterNode, Collection> map2) {
assertEquals(map1.size(), map2.size());
for (ClusterNode node : map1.keySet()) {
assertTrue(map2.containsKey(node));
assertEquals(map1.get(node).size(), map2.get(node).size());
}
}
/**
* Check equal maps.
*
* @param map1 Map1.
* @param map2 Map2.
*/
private static void checkEqualPartitionMaps(Map<Integer, ClusterNode> map1, Map<Integer, ClusterNode> map2) {
assertEquals(map1.size(), map2.size());
for (Integer i : map1.keySet()) {
assertTrue(map2.containsKey(i));
assertEquals(map1.get(i), map2.get(i));
}
}
/**
* @return Cluster nodes.
*/
private Collection<ClusterNode> nodes() {
return grid(0).cluster().nodes();
}
}
| |
/*******************************************************************************
* Copyright (c) 2014 ARM Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package com.arm.cmsis.pack.rte;
import java.util.Collection;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.Set;
import com.arm.cmsis.pack.CpPlugIn;
import com.arm.cmsis.pack.ICpPackManager;
import com.arm.cmsis.pack.data.CpConditionContext;
import com.arm.cmsis.pack.data.CpItem;
import com.arm.cmsis.pack.data.CpPackFilter;
import com.arm.cmsis.pack.data.ICpComponent;
import com.arm.cmsis.pack.data.ICpConditionContext;
import com.arm.cmsis.pack.data.ICpDeviceItem;
import com.arm.cmsis.pack.data.ICpFile;
import com.arm.cmsis.pack.data.ICpItem;
import com.arm.cmsis.pack.data.ICpPack;
import com.arm.cmsis.pack.data.ICpPackCollection;
import com.arm.cmsis.pack.data.ICpPackFilter;
import com.arm.cmsis.pack.data.ICpTaxonomy;
import com.arm.cmsis.pack.enums.EEvaluationResult;
import com.arm.cmsis.pack.events.RteEvent;
import com.arm.cmsis.pack.events.IRteEventProxy;
import com.arm.cmsis.pack.info.CpComponentInfo;
import com.arm.cmsis.pack.info.CpConfigurationInfo;
import com.arm.cmsis.pack.info.CpFileInfo;
import com.arm.cmsis.pack.info.CpPackInfo;
import com.arm.cmsis.pack.info.ICpComponentInfo;
import com.arm.cmsis.pack.info.ICpConfigurationInfo;
import com.arm.cmsis.pack.info.ICpDeviceInfo;
import com.arm.cmsis.pack.info.ICpFileInfo;
import com.arm.cmsis.pack.info.ICpPackInfo;
import com.arm.cmsis.pack.rte.components.IRteComponent;
import com.arm.cmsis.pack.rte.components.IRteComponentGroup;
import com.arm.cmsis.pack.rte.components.IRteComponentItem;
import com.arm.cmsis.pack.rte.components.RteComponentRoot;
import com.arm.cmsis.pack.rte.devices.IRteDeviceItem;
import com.arm.cmsis.pack.rte.devices.RteDeviceItem;
/**
* Default implementation of IRteConfiguration interface
*
*/
public class RteConfiguration implements IRteConfiguration {
// object to store/load configuration meta data
protected ICpConfigurationInfo fConfigurationInfo = null;
// filtered Packs
protected ICpPackFilter fPackFilter = null;
protected Collection<ICpPack> fFilteredPacks = null;
// selected device
protected ICpDeviceInfo fDeviceInfo = null;
// selected toolchain
protected ICpItem fToolchainInfo = null;
// component filter
protected ICpConditionContext fComponentFilter = null;
// filtered components tree
protected RteComponentRoot fComponentRoot = null;
// filtered device tree
protected IRteDeviceItem fRteDevices = null;
// engine to evaluate/resolve component dependencies
protected IRteDependencySolver fDependencySolver = null;
// instance data -> project
// private IRteProjectData
// event proxy to notify GUI
protected IRteEventProxy fRteEventProxy = null;
protected boolean tbResolvingComponents = false;
/**
* Default constructor
*/
public RteConfiguration() {
// default uses "use the latest available components" filter
fPackFilter = new CpPackFilter();
fDependencySolver = new RteDependencySolver(this);
}
@Override
public IRteEventProxy getRteEventProxy() {
return fRteEventProxy;
}
@Override
public void setRteEventProxy(IRteEventProxy rteEventProxy) {
fRteEventProxy = rteEventProxy;
}
@Override
public ICpConfigurationInfo getConfigurationInfo() {
return fConfigurationInfo;
}
@Override
public void setConfigurationInfo(ICpConfigurationInfo info) {
fConfigurationInfo = info;
if(fConfigurationInfo == null) {
clear();
return;
}
ICpDeviceInfo deviceInfo = info.getDeviceInfo();
ICpItem ti = info.getToolchainInfo();
setFilterAttributes(deviceInfo, ti);
// resolve components and select them
EEvaluationResult result = EEvaluationResult.FULFILLED;
EEvaluationResult res = resolveComponents(info.getChildren("components"));
if(res.ordinal() < result.ordinal())
result = res;
res = resolveComponents(info.getChildren("apis"));
if(res.ordinal() < result.ordinal())
result = res;
evaluateDependencies();
}
private EEvaluationResult resolveComponents(Collection<? extends ICpItem> children) {
EEvaluationResult result = EEvaluationResult.FULFILLED;
if(children == null || children.isEmpty())
return result;
for(ICpItem item : children){
if(item instanceof ICpComponentInfo) { // skip doc and description items
ICpComponentInfo ci = (ICpComponentInfo) item;
if(ci.isApi())
fComponentRoot.addCpItem(ci);
else
fComponentRoot.addComponent(ci);
EEvaluationResult res = ci.getEvaluationResult();
if(res.ordinal() < result.ordinal())
result = res;
}
}
return result;
}
@Override
public void apply() {
fConfigurationInfo = new CpConfigurationInfo();
// store pack filter
ICpItem packs = new CpItem(fConfigurationInfo, "packages");
fConfigurationInfo.addChild(packs);
Set<ICpPack> selectedPacks = new HashSet<ICpPack>();
// TODO: add filtered packages
// add device
fConfigurationInfo.addChild(fDeviceInfo);
// add toolchain
fConfigurationInfo.addChild(fToolchainInfo);
ICpItem apiInfos = new CpItem(fConfigurationInfo, "apis");
fConfigurationInfo.addChild(apiInfos);
ICpItem componentInfos = new CpItem(fConfigurationInfo, "components");
fConfigurationInfo.addChild(componentInfos);
Set<ICpComponent> selectedApis = new HashSet<ICpComponent>();
Collection<IRteComponent> selectedComponents = getSelectedComponents();
for(IRteComponent component : selectedComponents){
ICpComponent c = component.getActiveCpComponent();
if(c == null)
continue;
ICpComponentInfo ci = null;
if(c instanceof ICpComponentInfo) {
ci = (ICpComponentInfo)c;
ci.setParent(componentInfos);
} else {
ci = new CpComponentInfo(componentInfos, c, component.getSelectedCount());
Collection<ICpFile> filteredFiles = c.getFilteredFiles(fComponentFilter);
for(ICpFile f : filteredFiles) {
ICpFileInfo fi = new CpFileInfo(ci, f);
ci.addChild(fi);
}
ICpPack pack = c.getPack();
selectedPacks.add(pack);
}
componentInfos.addChild(ci);
IRteComponentGroup g = component.getParentGroup();
// collect used APIs
ICpComponent api = g.getApi();
if(api != null) {
selectedApis.add(api);
}
}
for(ICpComponent api : selectedApis){
ICpComponentInfo ai = null;
if(api instanceof ICpComponentInfo) {
ai = (ICpComponentInfo)api;
ai.setParent(apiInfos);
} else {
ai = new CpComponentInfo(apiInfos, api, 1);
Collection<ICpFile> filteredFiles = api.getFilteredFiles(fComponentFilter);
for(ICpFile f : filteredFiles) {
ICpFileInfo fi = new CpFileInfo(ai, f);
ai.addChild(fi);
}
ICpPack pack = api.getPack();
if(pack != null)
selectedPacks.add(pack);
}
apiInfos.addChild(ai);
}
for(ICpPack pack: selectedPacks){
ICpPackInfo pi = new CpPackInfo(packs, pack);
packs.addChild(pi);
}
emitEvent("com.arm.comsis.pack.rte.configuration.saved", this);
}
@Override
public void clear() {
fRteDevices = null;
fFilteredPacks = null;
}
public void collectPacks() {
fFilteredPacks = null;
ICpPackCollection packs = null;
ICpPackManager pm = CpPlugIn.getDefault().getPackManager();
if(pm != null)
packs = pm.getPacks();
if(packs != null)
fFilteredPacks = packs.getFilteredPacks(fPackFilter);
}
/**
* Sets pack filter for the configuration
* @param filter ICpPackFilter to set
*/
void setPackFilter(ICpPackFilter filter) {
clear();
fPackFilter = filter;
}
@Override
public ICpPackFilter getPackFilter() {
return fPackFilter;
}
@Override
public ICpDeviceInfo getDeviceInfo() {
return fDeviceInfo;
}
@Override
public void setDeviceInfo(ICpDeviceInfo deviceInfo) {
fDeviceInfo = deviceInfo;
updateConditionContext();
collectPackData();
emitModifyEvent();
}
@Override
public ICpDeviceItem getDevice() {
if(fDeviceInfo != null)
return fDeviceInfo.getDevice();
return null;
}
@Override
public ICpItem getToolchainInfo() {
return fToolchainInfo;
}
@Override
public void setToolchainInfo(ICpItem toolchainInfo) {
fToolchainInfo = toolchainInfo;
updateConditionContext();
collectPackData();
emitModifyEvent();
}
@Override
public void setFilterAttributes(ICpDeviceInfo deviceInfo, ICpItem toolchainInfo) {
fDeviceInfo = deviceInfo;
fToolchainInfo = toolchainInfo;
updateConditionContext();
collectPackData();
emitModifyEvent();
}
/**
* Returns hierarchical collection of devices available for this target
* @return root of device tree as IRteDeviceItem
*/
public IRteDeviceItem getDevices(){
if(fRteDevices == null){
fRteDevices = RteDeviceItem.createTree(fFilteredPacks);
}
return fRteDevices;
}
@Override
public IRteComponentItem getComponents() {
return fComponentRoot;
}
/**
* Updates component filter by setting new device information
*/
protected void updateConditionContext() {
fComponentFilter = new CpConditionContext();
collectPacks();
getDevices();
if(fDeviceInfo != null) {
fComponentFilter.setAttributes(fDeviceInfo.attributes().getAttributesAsMap());
if(fDeviceInfo.getDevice() == null) {
IRteDeviceItem rteDevice = getDevices().findItem(fDeviceInfo.attributes());
ICpDeviceItem cpDevice = null;
if(rteDevice != null) {
cpDevice = rteDevice.getDevice();
} else {
// TODO set error state: device not found
}
fDeviceInfo.setDevice(cpDevice);
}
}
if(fToolchainInfo != null) {
fComponentFilter.mergeAttributes(fToolchainInfo.attributes());
}
fComponentFilter.resetResult();
}
/**
* Collects filtered packs and and components from the filtered packs
*/
protected void collectPackData() {
collectPacks();
collectComponents();
}
/**
* Builds filtered components tree
*/
protected void collectComponents() {
fComponentRoot = new RteComponentRoot();
// create component tree
if(fFilteredPacks == null || fFilteredPacks.isEmpty())
return;
// first add components
for(ICpPack pack : fFilteredPacks ){
Collection<? extends ICpItem> children = pack.getChildren("components");
collectComponents(children);
}
// then add APIs and taxonomy items
for(ICpPack pack : fFilteredPacks ){
Collection<? extends ICpItem> children = pack.getChildren("apis");
collectCpItems(children);
children = pack.getChildren("taxonomy");
collectCpItems(children);
}
}
/**
* @param children
*/
protected void collectCpItems( Collection<? extends ICpItem> children) {
if(children == null || children.isEmpty())
return;
for(ICpItem item : children){
if(item instanceof ICpTaxonomy || item instanceof ICpComponent) {
fComponentRoot.addCpItem(item); // add directly to group
}
}
}
/**
* Collect components from given pack
* @param pack
*/
protected void collectComponents(Collection<? extends ICpItem> children) {
if(children == null || children.isEmpty())
return;
for(ICpItem item : children){
if(item.getTag().equals("bundle")){
// insert bundle implicitly since its components can be filtered out
collectComponents(item.getChildren());
} else if(item instanceof ICpComponent) { // skip doc and description items
ICpComponent c = (ICpComponent) item;
EEvaluationResult res = c.evaluate(fComponentFilter);
if(res.ordinal() < EEvaluationResult.FULFILLED.ordinal())
continue; // filtered out
fComponentRoot.addComponent(c);
}
}
}
@Override
public Collection<IRteComponent> getSelectedComponents() {
if(fComponentRoot != null) {
return fComponentRoot.getSelectedComponents(new LinkedHashSet<IRteComponent>());
}
return null;
}
@Override
public Collection<IRteComponent> getUsedComponents() {
if(fComponentRoot != null) {
return fComponentRoot.getUsedComponents(new LinkedHashSet<IRteComponent>());
}
return null;
}
@Override
public void selectComponent(IRteComponent component, int nInstances) {
if(component != null) {
component.setSelected(nInstances);
evaluateDependencies();
}
}
@Override
public void selectActiveChild(IRteComponentItem item, String childName) {
if(item != null) {
item.setActiveChild(childName);
evaluateDependencies();
}
}
@Override
public void selectActiveVariant(IRteComponentItem item, String variant) {
if(item != null) {
item.setActiveVariant(variant);
evaluateDependencies();
}
}
@Override
public void selectActiveVendor(IRteComponentItem item, String vendor) {
if(item != null) {
item.setActiveVendor(vendor);
evaluateDependencies();
}
}
@Override
public void selectActiveVersion(IRteComponentItem item, String version) {
if(item != null) {
item.setActiveVersion(version);
evaluateDependencies();
}
}
protected void emitEvent(final String topic, Object data) {
if(fRteEventProxy != null) {
fRteEventProxy.processRteEvent(new RteEvent(topic, data));
}
}
protected void emitModifyEvent() {
emitEvent(RteEvent.CONFIGURATION_MODIFIED, this);
}
@Override
public EEvaluationResult evaluateDependencies() {
EEvaluationResult result = fDependencySolver.evaluateDependencies();
if(!tbResolvingComponents)
emitModifyEvent();
return result;
}
@Override
public EEvaluationResult getEvaluationResult(IRteComponentItem item) {
return fDependencySolver.getEvaluationResult(item);
}
@Override
public EEvaluationResult resolveDependencies() {
EEvaluationResult res = EEvaluationResult.UNDEFINED;
if(tbResolvingComponents)
return res;
tbResolvingComponents = true;
res = fDependencySolver.resolveDependencies();
emitModifyEvent();
tbResolvingComponents = false;
return res;
}
@Override
public Collection<? extends IRteDependencyItem> getDependencyItems() {
return fDependencySolver.getDependencyItems();
}
@Override
public EEvaluationResult getEvaluationResult() {
return fDependencySolver.getEvaluationResult();
}
@Override
public void setEvaluationResult(EEvaluationResult result) {
fDependencySolver.setEvaluationResult(result);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.axis2.jaxws.util;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.wsdl.Binding;
import javax.wsdl.BindingFault;
import javax.wsdl.BindingInput;
import javax.wsdl.BindingOperation;
import javax.wsdl.BindingOutput;
import javax.wsdl.WSDLElement;
import javax.wsdl.extensions.ExtensibilityElement;
import javax.wsdl.extensions.UnknownExtensibilityElement;
import javax.wsdl.extensions.soap.SOAPBinding;
import javax.wsdl.extensions.soap12.SOAP12Binding;
import javax.xml.namespace.QName;
import org.apache.axis2.jaxws.ExceptionFactory;
import org.apache.axis2.jaxws.common.config.WSDLValidatorElement;
import org.apache.axis2.jaxws.common.config.WSDLValidatorElement.State;
import org.apache.axis2.jaxws.description.EndpointDescription;
import org.apache.axis2.jaxws.description.EndpointDescriptionWSDL;
import org.apache.axis2.jaxws.i18n.Messages;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/**
* This utility class stores methods that can be used to fetch extension elements
* from wsdl and will be used from RespectBindingConfigurator on Client and Server.
*/
public class WSDLExtensionUtils {
private static final Log log = LogFactory.getLog(WSDLExtensionUtils.class);
/**
* This method will search for all wsdl extensibility elements marked as required=true in wsdl:bindings
* As per the wsdl 2.2 specification section 2.5 here is how a wsdl:binding is defined:
* <wsdl:definitions .... >
* <wsdl:binding name="nmtoken" type="qname"> *
* <-- extensibility element (1) --> *
* <wsdl:operation name="nmtoken"> *
* <-- extensibility element (2) --> *
* <wsdl:input name="nmtoken"? > ?
* <-- extensibility element (3) -->
* </wsdl:input>
* <wsdl:output name="nmtoken"? > ?
* <-- extensibility element (4) --> *
* </wsdl:output>
* <wsdl:fault name="nmtoken"> *
* <-- extensibility element (5) --> *
* </wsdl:fault>
* </wsdl:operation>
* </wsdl:binding>
* </wsdl:definitions>
* we will look for wsdl extensions in binding root, wsdl:operation, wsdl:input, wsdl:output and wsdl:fault.
* If the extensibility element is defines outside of these sections it will not be picked up by this method.
*
* @param wsdlBinding - WSDLBinding Object read from WSDL Definition.
* @param set - Set that will be filled with list of required=true extension elements.
* @return
*/
public static void search(WSDLElement element, Set<WSDLValidatorElement> set, List<QName> unusedExtensions) {
if(log.isDebugEnabled()){
log.debug("Start Searching for WSDLExtensions");
}
if(element == null){
return;
}
//This search method uses a simple BFS technique to search for Extension elements in WSDLBindings.
//I will Queue all available WSDLElements starting in wsdl:binding and traverse them looking for
//extensions. Queue will be empty when I have processed everything.
//NOTE:Binding, Operation, OperationInput, OperationOutput and OperationFault are all WSDLElements.
LinkedList<WSDLElement> queue = new LinkedList<WSDLElement>();
queue.offer(element);
while(!queue.isEmpty()){
WSDLElement wsdlElement = queue.remove();
//WSDLElement in Queue could be wsdl Binding, BindingOperations, Input, Output or Fault
//Find Extensibility Elements in wsdlElement.
processWSDLElement(wsdlElement, set, unusedExtensions);
//check if we are dealing with wsdlBinding;
//store all BindingOpeations from wsdlBindings
if(wsdlElement instanceof Binding){
//lets get all operations and add to queue
//TODO: WSDLDef API's don't use generics, hence we use Iterator below and type cast.
List operations = ((Binding)wsdlElement).getBindingOperations();
Iterator iter = operations.iterator();
while(iter.hasNext()){
BindingOperation op =(BindingOperation) iter.next();
queue.offer(op);
}
}
//check if we are dealing with Bindingoperations
//Store all input, output and faults.
if(wsdlElement instanceof BindingOperation){
BindingInput bi = ((BindingOperation)wsdlElement).getBindingInput();
queue.offer(bi);
BindingOutput bo = ((BindingOperation)wsdlElement).getBindingOutput();
queue.offer(bo);
Map map = ((BindingOperation)wsdlElement).getBindingFaults();
Collection c = map.values();
Iterator iter = c.iterator();
while(iter.hasNext()){
Object o = iter.next();
if(o instanceof BindingFault){
BindingFault bf = (BindingFault)o;
queue.offer(bf);
}
}
}
}
if(log.isDebugEnabled()){
log.debug("End Searching for WSDLExtensions");
}
}
private static void processWSDLElement(WSDLElement wsdlElement, Set<WSDLValidatorElement> set, List<QName> unusedExtensions){
if(log.isDebugEnabled()){
log.debug("Start processWSDLElement");
}
List list = wsdlElement.getExtensibilityElements();
if (list == null || list.size() == 0) {
return;
}
Iterator iter = list.iterator();
while (iter.hasNext()) {
ExtensibilityElement e = (ExtensibilityElement) iter.next();
//TODO in JAX-WS 2.1 Respect Binding implementation we are ignoring
//SOAPBinding, review the reason behind this.
if (e instanceof SOAPBinding || e instanceof SOAP12Binding)
continue;
if (e instanceof UnknownExtensibilityElement) {
UnknownExtensibilityElement ue = (UnknownExtensibilityElement) e;
String reqd = ue.getElement().getAttribute("required");
//check if extension element is required.
//one can set extension as required two different ways in wsdl
//lets check both ways here
boolean wsdl_required = e.getRequired() != null && e.getRequired();
boolean wsdl_attribute = reqd!=null && reqd.equalsIgnoreCase("true");
if (wsdl_attribute || wsdl_required) {
if (log.isDebugEnabled()) {
log.debug("Found a required element: " + e.getElementType());
}
WSDLValidatorElement element = new WSDLValidatorElement();
element.setExtensionElement(e);
element.setState(State.NOT_RECOGNIZED);
set.add(element);
}
else {
if (log.isDebugEnabled()) {
log.debug("Found a NOT required element: " + e.getElementType());
}
unusedExtensions.add(e.getElementType());
}
}
}
if(log.isDebugEnabled()){
log.debug("Exit processWSDLElement");
}
}
public static void processExtensions(EndpointDescription endpointDescription){
if(endpointDescription == null){
throw ExceptionFactory.makeWebServiceException(Messages.getMessage("RespectBindingConfiguratorErr1"));
}
EndpointDescriptionWSDL edw = (EndpointDescriptionWSDL) endpointDescription;
if(endpointDescription == null){
throw ExceptionFactory.makeWebServiceException(Messages.getMessage("RespectBindingConfiguratorErr2"));
}
javax.wsdl.Binding wsdlBinding = edw.getWSDLBinding();
Set<WSDLValidatorElement> set = endpointDescription.getRequiredBindings();
if(set.size()>0){
//we have already looked for wsdl extension once, no need to
//find them again. WSDL is shared for all serviceDesc artifacts.
return;
}
List<QName> unusedExtensions = new ArrayList<QName>();
WSDLExtensionUtils.search(wsdlBinding, set, unusedExtensions);
if (log.isDebugEnabled()) {
log.debug("The following extensibility elements were found, but were not required.");
for (int n = 0; n < unusedExtensions.size(); ++n)
log.debug("[" + (n + 1) + "] - " + unusedExtensions.get(n));
}
}
}
| |
/*
* Copyright 2016 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.schemaorg.core;
import com.google.schemaorg.JsonLdContext;
import com.google.schemaorg.SchemaOrgType;
import com.google.schemaorg.core.datatype.Date;
import com.google.schemaorg.core.datatype.DateTime;
import com.google.schemaorg.core.datatype.Text;
import com.google.schemaorg.core.datatype.URL;
import com.google.schemaorg.goog.PopularityScoreSpecification;
import javax.annotation.Nullable;
/** Interface of <a href="http://schema.org/UserLikes}">http://schema.org/UserLikes}</a>. */
public interface UserLikes extends UserInteraction {
/**
* Builder interface of <a href="http://schema.org/UserLikes}">http://schema.org/UserLikes}</a>.
*/
public interface Builder extends UserInteraction.Builder {
@Override
Builder addJsonLdContext(@Nullable JsonLdContext context);
@Override
Builder addJsonLdContext(@Nullable JsonLdContext.Builder context);
@Override
Builder setJsonLdId(@Nullable String value);
@Override
Builder setJsonLdReverse(String property, Thing obj);
@Override
Builder setJsonLdReverse(String property, Thing.Builder builder);
/** Add a value to property additionalType. */
Builder addAdditionalType(URL value);
/** Add a value to property additionalType. */
Builder addAdditionalType(String value);
/** Add a value to property aggregateRating. */
Builder addAggregateRating(AggregateRating value);
/** Add a value to property aggregateRating. */
Builder addAggregateRating(AggregateRating.Builder value);
/** Add a value to property aggregateRating. */
Builder addAggregateRating(String value);
/** Add a value to property alternateName. */
Builder addAlternateName(Text value);
/** Add a value to property alternateName. */
Builder addAlternateName(String value);
/** Add a value to property attendee. */
Builder addAttendee(Organization value);
/** Add a value to property attendee. */
Builder addAttendee(Organization.Builder value);
/** Add a value to property attendee. */
Builder addAttendee(Person value);
/** Add a value to property attendee. */
Builder addAttendee(Person.Builder value);
/** Add a value to property attendee. */
Builder addAttendee(String value);
/** Add a value to property attendees. */
Builder addAttendees(Organization value);
/** Add a value to property attendees. */
Builder addAttendees(Organization.Builder value);
/** Add a value to property attendees. */
Builder addAttendees(Person value);
/** Add a value to property attendees. */
Builder addAttendees(Person.Builder value);
/** Add a value to property attendees. */
Builder addAttendees(String value);
/** Add a value to property description. */
Builder addDescription(Text value);
/** Add a value to property description. */
Builder addDescription(String value);
/** Add a value to property doorTime. */
Builder addDoorTime(DateTime value);
/** Add a value to property doorTime. */
Builder addDoorTime(String value);
/** Add a value to property duration. */
Builder addDuration(Duration value);
/** Add a value to property duration. */
Builder addDuration(Duration.Builder value);
/** Add a value to property duration. */
Builder addDuration(String value);
/** Add a value to property endDate. */
Builder addEndDate(Date value);
/** Add a value to property endDate. */
Builder addEndDate(String value);
/** Add a value to property eventStatus. */
Builder addEventStatus(EventStatusType value);
/** Add a value to property eventStatus. */
Builder addEventStatus(String value);
/** Add a value to property image. */
Builder addImage(ImageObject value);
/** Add a value to property image. */
Builder addImage(ImageObject.Builder value);
/** Add a value to property image. */
Builder addImage(URL value);
/** Add a value to property image. */
Builder addImage(String value);
/** Add a value to property inLanguage. */
Builder addInLanguage(Language value);
/** Add a value to property inLanguage. */
Builder addInLanguage(Language.Builder value);
/** Add a value to property inLanguage. */
Builder addInLanguage(Text value);
/** Add a value to property inLanguage. */
Builder addInLanguage(String value);
/** Add a value to property location. */
Builder addLocation(Place value);
/** Add a value to property location. */
Builder addLocation(Place.Builder value);
/** Add a value to property location. */
Builder addLocation(PostalAddress value);
/** Add a value to property location. */
Builder addLocation(PostalAddress.Builder value);
/** Add a value to property location. */
Builder addLocation(Text value);
/** Add a value to property location. */
Builder addLocation(String value);
/** Add a value to property mainEntityOfPage. */
Builder addMainEntityOfPage(CreativeWork value);
/** Add a value to property mainEntityOfPage. */
Builder addMainEntityOfPage(CreativeWork.Builder value);
/** Add a value to property mainEntityOfPage. */
Builder addMainEntityOfPage(URL value);
/** Add a value to property mainEntityOfPage. */
Builder addMainEntityOfPage(String value);
/** Add a value to property name. */
Builder addName(Text value);
/** Add a value to property name. */
Builder addName(String value);
/** Add a value to property offers. */
Builder addOffers(Offer value);
/** Add a value to property offers. */
Builder addOffers(Offer.Builder value);
/** Add a value to property offers. */
Builder addOffers(String value);
/** Add a value to property organizer. */
Builder addOrganizer(Organization value);
/** Add a value to property organizer. */
Builder addOrganizer(Organization.Builder value);
/** Add a value to property organizer. */
Builder addOrganizer(Person value);
/** Add a value to property organizer. */
Builder addOrganizer(Person.Builder value);
/** Add a value to property organizer. */
Builder addOrganizer(String value);
/** Add a value to property performer. */
Builder addPerformer(Organization value);
/** Add a value to property performer. */
Builder addPerformer(Organization.Builder value);
/** Add a value to property performer. */
Builder addPerformer(Person value);
/** Add a value to property performer. */
Builder addPerformer(Person.Builder value);
/** Add a value to property performer. */
Builder addPerformer(String value);
/** Add a value to property performers. */
Builder addPerformers(Organization value);
/** Add a value to property performers. */
Builder addPerformers(Organization.Builder value);
/** Add a value to property performers. */
Builder addPerformers(Person value);
/** Add a value to property performers. */
Builder addPerformers(Person.Builder value);
/** Add a value to property performers. */
Builder addPerformers(String value);
/** Add a value to property potentialAction. */
Builder addPotentialAction(Action value);
/** Add a value to property potentialAction. */
Builder addPotentialAction(Action.Builder value);
/** Add a value to property potentialAction. */
Builder addPotentialAction(String value);
/** Add a value to property previousStartDate. */
Builder addPreviousStartDate(Date value);
/** Add a value to property previousStartDate. */
Builder addPreviousStartDate(String value);
/** Add a value to property recordedIn. */
Builder addRecordedIn(CreativeWork value);
/** Add a value to property recordedIn. */
Builder addRecordedIn(CreativeWork.Builder value);
/** Add a value to property recordedIn. */
Builder addRecordedIn(String value);
/** Add a value to property review. */
Builder addReview(Review value);
/** Add a value to property review. */
Builder addReview(Review.Builder value);
/** Add a value to property review. */
Builder addReview(String value);
/** Add a value to property sameAs. */
Builder addSameAs(URL value);
/** Add a value to property sameAs. */
Builder addSameAs(String value);
/** Add a value to property startDate. */
Builder addStartDate(Date value);
/** Add a value to property startDate. */
Builder addStartDate(String value);
/** Add a value to property subEvent. */
Builder addSubEvent(Event value);
/** Add a value to property subEvent. */
Builder addSubEvent(Event.Builder value);
/** Add a value to property subEvent. */
Builder addSubEvent(String value);
/** Add a value to property subEvents. */
Builder addSubEvents(Event value);
/** Add a value to property subEvents. */
Builder addSubEvents(Event.Builder value);
/** Add a value to property subEvents. */
Builder addSubEvents(String value);
/** Add a value to property superEvent. */
Builder addSuperEvent(Event value);
/** Add a value to property superEvent. */
Builder addSuperEvent(Event.Builder value);
/** Add a value to property superEvent. */
Builder addSuperEvent(String value);
/** Add a value to property typicalAgeRange. */
Builder addTypicalAgeRange(Text value);
/** Add a value to property typicalAgeRange. */
Builder addTypicalAgeRange(String value);
/** Add a value to property url. */
Builder addUrl(URL value);
/** Add a value to property url. */
Builder addUrl(String value);
/** Add a value to property workFeatured. */
Builder addWorkFeatured(CreativeWork value);
/** Add a value to property workFeatured. */
Builder addWorkFeatured(CreativeWork.Builder value);
/** Add a value to property workFeatured. */
Builder addWorkFeatured(String value);
/** Add a value to property workPerformed. */
Builder addWorkPerformed(CreativeWork value);
/** Add a value to property workPerformed. */
Builder addWorkPerformed(CreativeWork.Builder value);
/** Add a value to property workPerformed. */
Builder addWorkPerformed(String value);
/** Add a value to property detailedDescription. */
Builder addDetailedDescription(Article value);
/** Add a value to property detailedDescription. */
Builder addDetailedDescription(Article.Builder value);
/** Add a value to property detailedDescription. */
Builder addDetailedDescription(String value);
/** Add a value to property popularityScore. */
Builder addPopularityScore(PopularityScoreSpecification value);
/** Add a value to property popularityScore. */
Builder addPopularityScore(PopularityScoreSpecification.Builder value);
/** Add a value to property popularityScore. */
Builder addPopularityScore(String value);
/**
* Add a value to property.
*
* @param name The property name.
* @param value The value of the property.
*/
Builder addProperty(String name, SchemaOrgType value);
/**
* Add a value to property.
*
* @param name The property name.
* @param builder The schema.org object builder for the property value.
*/
Builder addProperty(String name, Thing.Builder builder);
/**
* Add a value to property.
*
* @param name The property name.
* @param value The string value of the property.
*/
Builder addProperty(String name, String value);
/** Build a {@link UserLikes} object. */
UserLikes build();
}
}
| |
/*
* Copyright (C) 2007-2008 Esmertec AG. Copyright (C) 2007-2008 The Android Open
* Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package info.guardianproject.otr.app.im.app;
import info.guardianproject.otr.app.im.IImConnection;
import info.guardianproject.otr.app.im.R;
import info.guardianproject.otr.app.im.engine.ImConnection;
import info.guardianproject.otr.app.im.engine.ImErrorInfo;
import info.guardianproject.otr.app.im.engine.Presence;
import info.guardianproject.otr.app.im.provider.Imps;
import info.guardianproject.util.LogCleaner;
import java.util.ArrayList;
import java.util.List;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.ContentResolver;
import android.content.Context;
import android.content.DialogInterface;
import android.graphics.drawable.Drawable;
import android.os.RemoteException;
import android.text.TextUtils;
import android.util.AttributeSet;
import android.view.KeyEvent;
import android.view.View;
import android.view.ViewGroup;
import android.widget.EditText;
import android.widget.ImageButton;
import android.widget.LinearLayout;
import android.widget.ProgressBar;
import android.widget.TextView;
public class UserPresenceView extends LinearLayout {
private ImageButton mStatusDialogButton;
// views of the popup window
TextView mStatusBar;
private final SimpleAlertHandler mHandler;
private IImConnection mConn;
private long mProviderId;
Presence mPresence;
Context mContext;
private String mLastStatusText;
final List<StatusItem> mStatusItems = new ArrayList<StatusItem>();
private ProgressBar mProgressBar;
public UserPresenceView(Context context, AttributeSet attrs) {
super(context, attrs);
mContext = context;
mHandler = new SimpleAlertHandler((Activity) context);
}
@Override
protected void onFinishInflate() {
super.onFinishInflate();
if (isInEditMode())
return;
/**
mStatusDialogButton = (ImageButton) findViewById(R.id.statusDropDownButton);
mStatusDialogButton.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
showStatusListDialog();
}
});
*/
mProgressBar = (ProgressBar) findViewById(R.id.progressBar1);
}
private void showStatusListDialog() {
if (mConn == null) {
return;
}
AlertDialog.Builder builder = new AlertDialog.Builder(mContext);
builder.setAdapter(getStatusAdapter(), new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
StatusItem item = mStatusItems.get(which);
int oldStatus = mPresence.getStatus();
if (item.getStatus() != oldStatus) {
updatePresence(item.getStatus(), item.getText().toString());
}
}
});
builder.show();
}
private StatusIconAdapter getStatusAdapter() {
try {
mStatusItems.clear();
int[] supportedStatus = mConn.getSupportedPresenceStatus();
for (int i = 0; i < supportedStatus.length; i++) {
int s = PresenceUtils.convertStatus(supportedStatus[i]);
if (s == Imps.Presence.OFFLINE) {
s = Imps.Presence.INVISIBLE;
}
ImApp app = (ImApp)((Activity)mContext).getApplication();
BrandingResources brandingRes = app.getBrandingResource(mProviderId);
Drawable icon = brandingRes.getDrawable(PresenceUtils.getStatusIconId(s));
String text = brandingRes.getString(PresenceUtils.getStatusStringRes(s));
mStatusItems.add(new StatusItem(supportedStatus[i], icon, text));
}
} catch (RemoteException e) {
mHandler.showServiceErrorAlert(e.getLocalizedMessage());
LogCleaner.error(ImApp.LOG_TAG, "get status adapter error",e);
}
return new StatusIconAdapter(mContext, mStatusItems);
}
void updateStatusText() {
String newStatusText = mStatusBar.getText().toString();
if (TextUtils.isEmpty(newStatusText)) {
newStatusText = "";
}
if (!newStatusText.equals(mLastStatusText)) {
updatePresence(-1, newStatusText);
}
mStatusBar = initStatusBar(mProviderId, false);
}
public void setConnection(IImConnection conn) {
mConn = conn;
try {
mPresence = conn.getUserPresence();
mProviderId = conn.getProviderId();
} catch (RemoteException e) {
mHandler.showServiceErrorAlert(e.getLocalizedMessage());
LogCleaner.error(ImApp.LOG_TAG, "set connection error",e);
}
if (mPresence == null) {
mPresence = new Presence();
}
updateView();
}
private void updateView() {
ImApp app = (ImApp)((Activity)mContext).getApplication();
BrandingResources brandingRes = app.getBrandingResource(mProviderId);
int status = PresenceUtils.convertStatus(mPresence.getStatus());
mStatusDialogButton.setImageDrawable(brandingRes.getDrawable(PresenceUtils
.getStatusIconId(status)));
String statusText = mPresence.getStatusText();
if (TextUtils.isEmpty(statusText)) {
statusText = brandingRes.getString(PresenceUtils.getStatusStringRes(status));
}
mLastStatusText = statusText;
if (mStatusBar == null) {
mStatusBar = initStatusBar(mProviderId, false);
}
mStatusBar.setText(statusText);
// Disable the user to edit the custom status text because
// the AIM and MSN server don't support it now.
ProviderDef provider = app.getProvider(mProviderId);
String providerName = provider == null ? null : provider.mName;
if (Imps.ProviderNames.AIM.equals(providerName)
|| Imps.ProviderNames.MSN.equals(providerName)) {
mStatusBar.setFocusable(false);
}
}
private TextView initStatusBar(long providerId, boolean showEdit) {
/**
EditText statusEdit = (EditText) findViewById(R.id.statusEdit);
statusEdit.setVisibility(View.GONE);
TextView statusView = (TextView) findViewById(R.id.statusView);
statusView.setVisibility(View.GONE);
if (showEdit) {
statusEdit.setVisibility(View.VISIBLE);
statusEdit.setOnKeyListener(new OnKeyListener() {
public boolean onKey(View v, int keyCode, KeyEvent event) {
if (KeyEvent.ACTION_DOWN == event.getAction()) {
switch (keyCode) {
case KeyEvent.KEYCODE_DPAD_CENTER:
case KeyEvent.KEYCODE_ENTER:
updateStatusText();
return true;
}
}
return false;
}
});
statusEdit.setOnFocusChangeListener(new View.OnFocusChangeListener() {
public void onFocusChange(View v, boolean hasFocus) {
if (!hasFocus) {
updateStatusText();
}
}
});
return statusEdit;
} else {
if (mPresence != null) {
statusView.setText(mPresence.getStatusText());
}
statusView.setVisibility(View.VISIBLE);
statusView.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
mStatusBar = initStatusBar(mProviderId, true);
}
});
return statusView;
}
*/
return null;
}
void updatePresence(int status, String statusText) {
if (mPresence == null) {
// No connection yet. Don't allow to update presence yet.
return;
}
Presence newPresence = new Presence(mPresence);
if (status != -1) {
newPresence.setStatus(status);
}
if (statusText != null)
newPresence.setStatusText(statusText);
try {
int res = mConn.updateUserPresence(newPresence);
if (res != ImErrorInfo.NO_ERROR) {
mHandler.showAlert(R.string.error, ErrorResUtils.getErrorRes(getResources(), res));
} else {
mPresence = newPresence;
updateView();
ContentResolver cr = mContext.getContentResolver();
Imps.ProviderSettings.setPresence(cr, mProviderId, status, statusText);
}
} catch (RemoteException e) {
// mHandler.showServiceErrorAlert();
}
}
private static class StatusItem implements ImageListAdapter.ImageListItem {
private final int mStatus;
private final Drawable mIcon;
private final String mText;
public StatusItem(int status, Drawable icon, String text) {
mStatus = status;
mIcon = icon;
mText = text;
}
public Drawable getDrawable() {
return mIcon;
}
public CharSequence getText() {
return mText;
}
public int getStatus() {
return mStatus;
}
}
private static class StatusIconAdapter extends ImageListAdapter {
public StatusIconAdapter(Context context, List<StatusItem> data) {
super(context, data);
}
@Override
public long getItemId(int position) {
StatusItem item = (StatusItem) getItem(position);
return item.getStatus();
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
View view = super.getView(position, convertView, parent);
return view;
}
}
public void refreshLogginInStatus ()
{
if (mConn != null)
{
try {
loggingIn(mConn.getState() == ImConnection.LOGGING_IN);
} catch (RemoteException e) {
loggingIn(false);
// mHandler.showServiceErrorAlert();
}
}
}
public void loggingIn(boolean loggingIn) {
mProgressBar.setVisibility(loggingIn ? View.VISIBLE : View.GONE);
try {
Presence newPresence = mConn.getUserPresence();
if (newPresence != null)
mPresence = newPresence;
} catch (RemoteException e) {
// mHandler.showServiceErrorAlert();
}
updateView();
}
}
| |
/*
* Copyright (c) 2017 Mockito contributors
* This program is made available under the terms of the MIT License.
*/
package org.mockitousage.matchers;
import static org.junit.Assert.fail;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.ArgumentMatchers.isNotNull;
import static org.mockito.ArgumentMatchers.isNull;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import org.assertj.core.api.AbstractListAssert;
import org.assertj.core.api.Assertions;
import org.assertj.core.api.Condition;
import org.assertj.core.api.ObjectAssert;
import org.junit.Ignore;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.mockito.ArgumentCaptor;
import org.mockito.ArgumentMatchers;
import org.mockito.Captor;
import org.mockito.Mock;
import org.mockito.exceptions.verification.opentest4j.ArgumentsAreDifferent;
import org.mockito.junit.MockitoJUnit;
import org.mockito.junit.MockitoRule;
import org.mockitousage.IMethods;
public class VarargsTest {
@Rule public MockitoRule mockitoRule = MockitoJUnit.rule();
@Rule public ExpectedException exception = ExpectedException.none();
@Captor private ArgumentCaptor<String> captor;
@Mock private IMethods mock;
private static final Condition<Object> NULL =
new Condition<Object>() {
@Override
public boolean matches(Object value) {
return value == null;
}
};
@Test
public void shouldMatchVarArgs_noArgs() {
mock.varargs();
verify(mock).varargs();
}
@Test
@Ignore("This test must succeed but is fails currently, see github issue #616")
public void shouldMatchEmptyVarArgs_noArgsIsNotNull() {
mock.varargs();
verify(mock).varargs(isNotNull());
}
@Test
@Ignore("This test must succeed but is fails currently, see github issue #616")
public void shouldMatchEmptyVarArgs_noArgsIsNull() {
mock.varargs();
verify(mock).varargs(isNull());
}
@Test
@Ignore("This test must succeed but is fails currently, see github issue #616")
public void shouldMatchEmptyVarArgs_noArgsIsNotNullArray() {
mock.varargs();
verify(mock).varargs((String[]) isNotNull());
}
@Test
public void shouldMatchVarArgs_oneNullArg_eqNull() {
Object arg = null;
mock.varargs(arg);
verify(mock).varargs(ArgumentMatchers.<Object[]>eq(null));
}
@Test
public void shouldMatchVarArgs_oneNullArg_isNull() {
Object arg = null;
mock.varargs(arg);
verify(mock).varargs(ArgumentMatchers.<Object[]>isNull());
}
@Test
public void shouldMatchVarArgs_nullArrayArg() {
Object[] argArray = null;
mock.varargs(argArray);
verify(mock).varargs(ArgumentMatchers.<Object[]>isNull());
}
@Test
public void shouldnotMatchVarArgs_twoArgsOneMatcher() {
mock.varargs("1", "1");
exception.expectMessage("Argument(s) are different");
verify(mock).varargs(eq("1"));
}
@Test
public void shouldMatchVarArgs_emptyVarArgsOneAnyMatcher() {
mock.varargs();
verify(mock).varargs((String[]) any()); // any() -> VarargMatcher
}
@Test
public void shouldMatchVarArgs_oneArgsOneAnyMatcher() {
mock.varargs(1);
verify(mock).varargs(ArgumentMatchers.<Object[]>any()); // any() -> VarargMatcher
}
@Test
public void shouldMatchVarArgs_twoArgsOneAnyMatcher() {
mock.varargs(1, 2);
verify(mock).varargs(ArgumentMatchers.<Object[]>any()); // any() -> VarargMatcher
}
@Test
public void shouldMatchVarArgs_twoArgsTwoAnyMatcher() {
mock.varargs(1, 2);
verify(mock).varargs(any(), ArgumentMatchers.<Object>any()); // any() -> VarargMatcher
}
@Test
public void shouldMatchVarArgs_twoArgsThreeAnyMatcher() {
mock.varargs(1, 2);
exception.expectMessage("Argument(s) are different");
verify(mock).varargs(any(), any(), any()); // any() -> VarargMatcher
}
@Test
public void shouldMatchVarArgs_oneNullArgument() {
mock.varargs("1", null);
verify(mock).varargs(eq("1"), (String) isNull());
}
@Test
public void shouldMatchVarArgs_onebyte() {
mock.varargsbyte((byte) 1);
verify(mock).varargsbyte(eq((byte) 1));
}
@Test
public void shouldMatchVarArgs_nullByteArray() {
mock.varargsbyte(null);
verify(mock).varargsbyte((byte[]) isNull());
}
@Test
public void shouldMatchVarArgs_emptyByteArray() {
mock.varargsbyte();
verify(mock).varargsbyte();
}
@Test
@Ignore
public void shouldMatchEmptyVarArgs_emptyArrayIsNotNull() {
mock.varargsbyte();
verify(mock).varargsbyte((byte[]) isNotNull());
}
@Test
public void shouldMatchVarArgs_oneArgIsNotNull() {
mock.varargsbyte((byte) 1);
verify(mock).varargsbyte((byte[]) isNotNull());
}
@Test
public void shouldCaptureVarArgs_noArgs() {
mock.varargs();
verify(mock).varargs(captor.capture());
assertThat(captor).isEmpty();
}
@Test
public void shouldCaptureVarArgs_oneNullArg_eqNull() {
String arg = null;
mock.varargs(arg);
verify(mock).varargs(captor.capture());
assertThat(captor).areExactly(1, NULL);
}
/**
* Relates to Github issue #583 "ArgumentCaptor: NPE when an null array is
* passed to a varargs method"
*/
@Test
public void shouldCaptureVarArgs_nullArrayArg() {
String[] argArray = null;
mock.varargs(argArray);
verify(mock).varargs(captor.capture());
assertThat(captor).areExactly(1, NULL);
}
@Test
public void shouldCaptureVarArgs_twoArgsOneCapture() {
mock.varargs("1", "2");
verify(mock).varargs(captor.capture());
assertThat(captor).contains("1", "2");
}
@Test
public void shouldCaptureVarArgs_twoArgsTwoCaptures() {
mock.varargs("1", "2");
verify(mock).varargs(captor.capture(), captor.capture());
assertThat(captor).contains("1", "2");
}
@Test
public void shouldCaptureVarArgs_oneNullArgument() {
mock.varargs("1", null);
verify(mock).varargs(captor.capture());
assertThat(captor).contains("1", (String) null);
}
@Test
public void shouldCaptureVarArgs_oneNullArgument2() {
mock.varargs("1", null);
verify(mock).varargs(captor.capture(), captor.capture());
assertThat(captor).contains("1", (String) null);
}
@Test
public void shouldNotCaptureVarArgs_3args2captures() {
mock.varargs("1", "2", "3");
exception.expect(ArgumentsAreDifferent.class);
verify(mock).varargs(captor.capture(), captor.capture());
}
@Test
public void shouldCaptureVarArgs_3argsCaptorMatcherMix() {
mock.varargs("1", "2", "3");
verify(mock).varargs(captor.capture(), eq("2"), captor.capture());
assertThat(captor).containsExactly("1", "3");
}
@Test
public void shouldNotCaptureVarArgs_3argsCaptorMatcherMix() {
mock.varargs("1", "2", "3");
try {
verify(mock).varargs(captor.capture(), eq("X"), captor.capture());
fail("The verification must fail, cause the second arg was not 'X' as expected!");
} catch (ArgumentsAreDifferent expected) {
}
assertThat(captor).isEmpty();
}
@Test
public void shouldNotCaptureVarArgs_1args2captures() {
mock.varargs("1");
exception.expect(ArgumentsAreDifferent.class);
verify(mock).varargs(captor.capture(), captor.capture());
}
/**
* As of v2.0.0-beta.118 this test fails. Once the github issues:
* <ul>
* <li>'#584 ArgumentCaptor can't capture varargs-arrays
* <li>#565 ArgumentCaptor should be type aware' are fixed this test must
* succeed
* </ul>
*/
@Test
@Ignore("Blocked by github issue: #584 & #565")
public void shouldCaptureVarArgsAsArray() {
mock.varargs("1", "2");
ArgumentCaptor<String[]> varargCaptor = ArgumentCaptor.forClass(String[].class);
verify(mock).varargs(varargCaptor.capture());
assertThat(varargCaptor).containsExactly(new String[] {"1", "2"});
}
@Test
public void shouldNotMatchRegualrAndVaraArgs() {
mock.varargsString(1, "a", "b");
exception.expect(ArgumentsAreDifferent.class);
verify(mock).varargsString(1);
}
@Test
public void shouldNotMatchVaraArgs() {
when(mock.varargsObject(1, "a", "b")).thenReturn("OK");
Assertions.assertThat(mock.varargsObject(1)).isNull();
}
private static <T> AbstractListAssert<?, ?, T, ObjectAssert<T>> assertThat(
ArgumentCaptor<T> captor) {
return Assertions.assertThat(captor.getAllValues());
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: pollservice.proto
package edu.sjsu.cmpe273.lab2;
/**
* Protobuf type {@code edu.sjsu.cmpe273.lab2.PollRequest}
*/
public final class PollRequest extends
com.google.protobuf.GeneratedMessage implements
// @@protoc_insertion_point(message_implements:edu.sjsu.cmpe273.lab2.PollRequest)
PollRequestOrBuilder {
// Use PollRequest.newBuilder() to construct.
private PollRequest(com.google.protobuf.GeneratedMessage.Builder builder) {
super(builder);
}
private PollRequest() {
moderatorId_ = "";
question_ = "";
startedAt_ = "";
expiredAt_ = "";
choice_ = com.google.protobuf.LazyStringArrayList.EMPTY;
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return com.google.protobuf.UnknownFieldSet.getDefaultInstance();
}
private PollRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
int mutable_bitField0_ = 0;
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!input.skipField(tag)) {
done = true;
}
break;
}
case 10: {
com.google.protobuf.ByteString bs = input.readBytes();
moderatorId_ = bs;
break;
}
case 18: {
com.google.protobuf.ByteString bs = input.readBytes();
question_ = bs;
break;
}
case 26: {
com.google.protobuf.ByteString bs = input.readBytes();
startedAt_ = bs;
break;
}
case 34: {
com.google.protobuf.ByteString bs = input.readBytes();
expiredAt_ = bs;
break;
}
case 42: {
com.google.protobuf.ByteString bs = input.readBytes();
if (!((mutable_bitField0_ & 0x00000010) == 0x00000010)) {
choice_ = new com.google.protobuf.LazyStringArrayList();
mutable_bitField0_ |= 0x00000010;
}
choice_.add(bs);
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000010) == 0x00000010)) {
choice_ = choice_.getUnmodifiableView();
}
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return edu.sjsu.cmpe273.lab2.PollServiceProto.internal_static_edu_sjsu_cmpe273_lab2_PollRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return edu.sjsu.cmpe273.lab2.PollServiceProto.internal_static_edu_sjsu_cmpe273_lab2_PollRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
edu.sjsu.cmpe273.lab2.PollRequest.class, edu.sjsu.cmpe273.lab2.PollRequest.Builder.class);
}
public static final com.google.protobuf.Parser<PollRequest> PARSER =
new com.google.protobuf.AbstractParser<PollRequest>() {
public PollRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new PollRequest(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<PollRequest> getParserForType() {
return PARSER;
}
private int bitField0_;
public static final int MODERATORID_FIELD_NUMBER = 1;
private java.lang.Object moderatorId_;
/**
* <code>optional string moderatorId = 1;</code>
*/
public java.lang.String getModeratorId() {
java.lang.Object ref = moderatorId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
moderatorId_ = s;
}
return s;
}
}
/**
* <code>optional string moderatorId = 1;</code>
*/
public com.google.protobuf.ByteString
getModeratorIdBytes() {
java.lang.Object ref = moderatorId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
moderatorId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int QUESTION_FIELD_NUMBER = 2;
private java.lang.Object question_;
/**
* <code>optional string question = 2;</code>
*/
public java.lang.String getQuestion() {
java.lang.Object ref = question_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
question_ = s;
}
return s;
}
}
/**
* <code>optional string question = 2;</code>
*/
public com.google.protobuf.ByteString
getQuestionBytes() {
java.lang.Object ref = question_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
question_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int STARTEDAT_FIELD_NUMBER = 3;
private java.lang.Object startedAt_;
/**
* <code>optional string startedAt = 3;</code>
*/
public java.lang.String getStartedAt() {
java.lang.Object ref = startedAt_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
startedAt_ = s;
}
return s;
}
}
/**
* <code>optional string startedAt = 3;</code>
*/
public com.google.protobuf.ByteString
getStartedAtBytes() {
java.lang.Object ref = startedAt_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
startedAt_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int EXPIREDAT_FIELD_NUMBER = 4;
private java.lang.Object expiredAt_;
/**
* <code>optional string expiredAt = 4;</code>
*/
public java.lang.String getExpiredAt() {
java.lang.Object ref = expiredAt_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
expiredAt_ = s;
}
return s;
}
}
/**
* <code>optional string expiredAt = 4;</code>
*/
public com.google.protobuf.ByteString
getExpiredAtBytes() {
java.lang.Object ref = expiredAt_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
expiredAt_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int CHOICE_FIELD_NUMBER = 5;
private com.google.protobuf.LazyStringList choice_;
/**
* <code>repeated string choice = 5;</code>
*/
public com.google.protobuf.ProtocolStringList
getChoiceList() {
return choice_;
}
/**
* <code>repeated string choice = 5;</code>
*/
public int getChoiceCount() {
return choice_.size();
}
/**
* <code>repeated string choice = 5;</code>
*/
public java.lang.String getChoice(int index) {
return choice_.get(index);
}
/**
* <code>repeated string choice = 5;</code>
*/
public com.google.protobuf.ByteString
getChoiceBytes(int index) {
return choice_.getByteString(index);
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (!getModeratorIdBytes().isEmpty()) {
output.writeBytes(1, getModeratorIdBytes());
}
if (!getQuestionBytes().isEmpty()) {
output.writeBytes(2, getQuestionBytes());
}
if (!getStartedAtBytes().isEmpty()) {
output.writeBytes(3, getStartedAtBytes());
}
if (!getExpiredAtBytes().isEmpty()) {
output.writeBytes(4, getExpiredAtBytes());
}
for (int i = 0; i < choice_.size(); i++) {
output.writeBytes(5, choice_.getByteString(i));
}
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (!getModeratorIdBytes().isEmpty()) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, getModeratorIdBytes());
}
if (!getQuestionBytes().isEmpty()) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(2, getQuestionBytes());
}
if (!getStartedAtBytes().isEmpty()) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(3, getStartedAtBytes());
}
if (!getExpiredAtBytes().isEmpty()) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(4, getExpiredAtBytes());
}
{
int dataSize = 0;
for (int i = 0; i < choice_.size(); i++) {
dataSize += com.google.protobuf.CodedOutputStream
.computeBytesSizeNoTag(choice_.getByteString(i));
}
size += dataSize;
size += 1 * getChoiceList().size();
}
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
public static edu.sjsu.cmpe273.lab2.PollRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static edu.sjsu.cmpe273.lab2.PollRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static edu.sjsu.cmpe273.lab2.PollRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static edu.sjsu.cmpe273.lab2.PollRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static edu.sjsu.cmpe273.lab2.PollRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static edu.sjsu.cmpe273.lab2.PollRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static edu.sjsu.cmpe273.lab2.PollRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static edu.sjsu.cmpe273.lab2.PollRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static edu.sjsu.cmpe273.lab2.PollRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static edu.sjsu.cmpe273.lab2.PollRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return new Builder(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(edu.sjsu.cmpe273.lab2.PollRequest prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code edu.sjsu.cmpe273.lab2.PollRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:edu.sjsu.cmpe273.lab2.PollRequest)
edu.sjsu.cmpe273.lab2.PollRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return edu.sjsu.cmpe273.lab2.PollServiceProto.internal_static_edu_sjsu_cmpe273_lab2_PollRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return edu.sjsu.cmpe273.lab2.PollServiceProto.internal_static_edu_sjsu_cmpe273_lab2_PollRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
edu.sjsu.cmpe273.lab2.PollRequest.class, edu.sjsu.cmpe273.lab2.PollRequest.Builder.class);
}
// Construct using edu.sjsu.cmpe273.lab2.PollRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
public Builder clear() {
super.clear();
moderatorId_ = "";
question_ = "";
startedAt_ = "";
expiredAt_ = "";
choice_ = com.google.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000010);
return this;
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return edu.sjsu.cmpe273.lab2.PollServiceProto.internal_static_edu_sjsu_cmpe273_lab2_PollRequest_descriptor;
}
public edu.sjsu.cmpe273.lab2.PollRequest getDefaultInstanceForType() {
return edu.sjsu.cmpe273.lab2.PollRequest.getDefaultInstance();
}
public edu.sjsu.cmpe273.lab2.PollRequest build() {
edu.sjsu.cmpe273.lab2.PollRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public edu.sjsu.cmpe273.lab2.PollRequest buildPartial() {
edu.sjsu.cmpe273.lab2.PollRequest result = new edu.sjsu.cmpe273.lab2.PollRequest(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
result.moderatorId_ = moderatorId_;
result.question_ = question_;
result.startedAt_ = startedAt_;
result.expiredAt_ = expiredAt_;
if (((bitField0_ & 0x00000010) == 0x00000010)) {
choice_ = choice_.getUnmodifiableView();
bitField0_ = (bitField0_ & ~0x00000010);
}
result.choice_ = choice_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof edu.sjsu.cmpe273.lab2.PollRequest) {
return mergeFrom((edu.sjsu.cmpe273.lab2.PollRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(edu.sjsu.cmpe273.lab2.PollRequest other) {
if (other == edu.sjsu.cmpe273.lab2.PollRequest.getDefaultInstance()) return this;
if (!other.getModeratorId().isEmpty()) {
moderatorId_ = other.moderatorId_;
onChanged();
}
if (!other.getQuestion().isEmpty()) {
question_ = other.question_;
onChanged();
}
if (!other.getStartedAt().isEmpty()) {
startedAt_ = other.startedAt_;
onChanged();
}
if (!other.getExpiredAt().isEmpty()) {
expiredAt_ = other.expiredAt_;
onChanged();
}
if (!other.choice_.isEmpty()) {
if (choice_.isEmpty()) {
choice_ = other.choice_;
bitField0_ = (bitField0_ & ~0x00000010);
} else {
ensureChoiceIsMutable();
choice_.addAll(other.choice_);
}
onChanged();
}
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
edu.sjsu.cmpe273.lab2.PollRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (edu.sjsu.cmpe273.lab2.PollRequest) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private java.lang.Object moderatorId_ = "";
/**
* <code>optional string moderatorId = 1;</code>
*/
public java.lang.String getModeratorId() {
java.lang.Object ref = moderatorId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
moderatorId_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string moderatorId = 1;</code>
*/
public com.google.protobuf.ByteString
getModeratorIdBytes() {
java.lang.Object ref = moderatorId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
moderatorId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>optional string moderatorId = 1;</code>
*/
public Builder setModeratorId(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
moderatorId_ = value;
onChanged();
return this;
}
/**
* <code>optional string moderatorId = 1;</code>
*/
public Builder clearModeratorId() {
moderatorId_ = getDefaultInstance().getModeratorId();
onChanged();
return this;
}
/**
* <code>optional string moderatorId = 1;</code>
*/
public Builder setModeratorIdBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
moderatorId_ = value;
onChanged();
return this;
}
private java.lang.Object question_ = "";
/**
* <code>optional string question = 2;</code>
*/
public java.lang.String getQuestion() {
java.lang.Object ref = question_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
question_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string question = 2;</code>
*/
public com.google.protobuf.ByteString
getQuestionBytes() {
java.lang.Object ref = question_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
question_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>optional string question = 2;</code>
*/
public Builder setQuestion(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
question_ = value;
onChanged();
return this;
}
/**
* <code>optional string question = 2;</code>
*/
public Builder clearQuestion() {
question_ = getDefaultInstance().getQuestion();
onChanged();
return this;
}
/**
* <code>optional string question = 2;</code>
*/
public Builder setQuestionBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
question_ = value;
onChanged();
return this;
}
private java.lang.Object startedAt_ = "";
/**
* <code>optional string startedAt = 3;</code>
*/
public java.lang.String getStartedAt() {
java.lang.Object ref = startedAt_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
startedAt_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string startedAt = 3;</code>
*/
public com.google.protobuf.ByteString
getStartedAtBytes() {
java.lang.Object ref = startedAt_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
startedAt_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>optional string startedAt = 3;</code>
*/
public Builder setStartedAt(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
startedAt_ = value;
onChanged();
return this;
}
/**
* <code>optional string startedAt = 3;</code>
*/
public Builder clearStartedAt() {
startedAt_ = getDefaultInstance().getStartedAt();
onChanged();
return this;
}
/**
* <code>optional string startedAt = 3;</code>
*/
public Builder setStartedAtBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
startedAt_ = value;
onChanged();
return this;
}
private java.lang.Object expiredAt_ = "";
/**
* <code>optional string expiredAt = 4;</code>
*/
public java.lang.String getExpiredAt() {
java.lang.Object ref = expiredAt_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
expiredAt_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string expiredAt = 4;</code>
*/
public com.google.protobuf.ByteString
getExpiredAtBytes() {
java.lang.Object ref = expiredAt_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
expiredAt_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>optional string expiredAt = 4;</code>
*/
public Builder setExpiredAt(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
expiredAt_ = value;
onChanged();
return this;
}
/**
* <code>optional string expiredAt = 4;</code>
*/
public Builder clearExpiredAt() {
expiredAt_ = getDefaultInstance().getExpiredAt();
onChanged();
return this;
}
/**
* <code>optional string expiredAt = 4;</code>
*/
public Builder setExpiredAtBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
expiredAt_ = value;
onChanged();
return this;
}
private com.google.protobuf.LazyStringList choice_ = com.google.protobuf.LazyStringArrayList.EMPTY;
private void ensureChoiceIsMutable() {
if (!((bitField0_ & 0x00000010) == 0x00000010)) {
choice_ = new com.google.protobuf.LazyStringArrayList(choice_);
bitField0_ |= 0x00000010;
}
}
/**
* <code>repeated string choice = 5;</code>
*/
public com.google.protobuf.ProtocolStringList
getChoiceList() {
return choice_.getUnmodifiableView();
}
/**
* <code>repeated string choice = 5;</code>
*/
public int getChoiceCount() {
return choice_.size();
}
/**
* <code>repeated string choice = 5;</code>
*/
public java.lang.String getChoice(int index) {
return choice_.get(index);
}
/**
* <code>repeated string choice = 5;</code>
*/
public com.google.protobuf.ByteString
getChoiceBytes(int index) {
return choice_.getByteString(index);
}
/**
* <code>repeated string choice = 5;</code>
*/
public Builder setChoice(
int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureChoiceIsMutable();
choice_.set(index, value);
onChanged();
return this;
}
/**
* <code>repeated string choice = 5;</code>
*/
public Builder addChoice(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureChoiceIsMutable();
choice_.add(value);
onChanged();
return this;
}
/**
* <code>repeated string choice = 5;</code>
*/
public Builder addAllChoice(
java.lang.Iterable<java.lang.String> values) {
ensureChoiceIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, choice_);
onChanged();
return this;
}
/**
* <code>repeated string choice = 5;</code>
*/
public Builder clearChoice() {
choice_ = com.google.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000010);
onChanged();
return this;
}
/**
* <code>repeated string choice = 5;</code>
*/
public Builder addChoiceBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
ensureChoiceIsMutable();
choice_.add(value);
onChanged();
return this;
}
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return this;
}
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return this;
}
// @@protoc_insertion_point(builder_scope:edu.sjsu.cmpe273.lab2.PollRequest)
}
// @@protoc_insertion_point(class_scope:edu.sjsu.cmpe273.lab2.PollRequest)
private static final edu.sjsu.cmpe273.lab2.PollRequest defaultInstance;static {
defaultInstance = new edu.sjsu.cmpe273.lab2.PollRequest();
}
public static edu.sjsu.cmpe273.lab2.PollRequest getDefaultInstance() {
return defaultInstance;
}
public edu.sjsu.cmpe273.lab2.PollRequest getDefaultInstanceForType() {
return defaultInstance;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.