gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/**
* Copyright 5AM Solutions Inc, ESAC, ScenPro & SAIC
*
* Distributed under the OSI-approved BSD 3-Clause License.
* See http://ncip.github.com/caintegrator/LICENSE.txt for details.
*/
package gov.nih.nci.caintegrator.web.action.query.form;
import gov.nih.nci.caintegrator.domain.application.AbstractGenomicCriterion;
import gov.nih.nci.caintegrator.domain.application.FoldChangeCriterion;
import gov.nih.nci.caintegrator.domain.application.GenomicCriterionTypeEnum;
import gov.nih.nci.caintegrator.domain.application.RegulationTypeEnum;
import gov.nih.nci.caintegrator.domain.translational.Study;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.math.NumberUtils;
import com.opensymphony.xwork2.ValidationAware;
/**
* Wraps access to a single <code>FoldChangeCriterion</code>.
*/
class FoldChangeCriterionWrapper extends AbstractGenomicCriterionWrapper {
private static final int NUMBER_OF_MAX_PARAMETERS = 5;
private static final int NUMBER_OF_MAX_PARAMETERS_MULTIPLE_PLATFORMS = NUMBER_OF_MAX_PARAMETERS + 1;
private static final float DEFAULT_FOLDS = 2.0f;
private static final Float DEFAULT_FOLDS_UNCHANGED_DOWN = 0.8f;
private static final Float DEFAULT_FOLDS_UNCHANGED_UP = 1.2f;
private static final String CONTROL_SAMPLE_SET_LABEL = "Control Sample Set";
private static final String REGULATION_TYPE_LABEL = "Regulation Type";
static final String FOLD_CHANGE = "Fold Change";
private Integer controlParameterIndex = null;
private Study study = null;
private final FoldChangeCriterion criterion;
FoldChangeCriterionWrapper(Study study, GeneExpressionCriterionRow row) {
this(study, new FoldChangeCriterion(), row);
}
@SuppressWarnings("PMD.ConstructorCallsOverridableMethod") // bogus error
FoldChangeCriterionWrapper(Study study, FoldChangeCriterion criterion, GeneExpressionCriterionRow row) {
super(row);
this.study = study;
this.criterion = criterion;
if (criterion.getRegulationType() == null) {
criterion.setRegulationType(RegulationTypeEnum.UP);
setCriterionDefaults();
}
setupDefaultGenomicParameters(GenomicCriterionTypeEnum.GENE_EXPRESSION);
getParameters().add(createControlSampleSetParameter());
getParameters().add(createRegulationTypeParameter());
addFoldsParameters();
}
private void setUpFoldsParameters() {
setCriterionDefaults();
removeExistingFoldsParameters();
addFoldsParameters();
}
@Override
protected void updateControlParameters() {
if (controlParameterIndex != null) {
getParameters().remove(getParameters().get(controlParameterIndex));
getParameters().add(controlParameterIndex, createControlSampleSetParameter());
}
}
private void addFoldsParameters() {
switch (criterion.getRegulationType()) {
case UP:
getParameters().add(createFoldsUpParameter());
break;
case DOWN:
getParameters().add(createFoldsDownParameter());
break;
case UP_OR_DOWN:
case UNCHANGED:
getParameters().add(createFoldsDownParameter());
getParameters().add(createFoldsUpParameter());
break;
default:
break;
}
}
private void removeExistingFoldsParameters() {
if (getParameters().size() == getNumberMaxParameters()) {
getParameters().remove(getNumberMaxParameters() - 1);
}
if (getParameters().size() == getNumberMaxParameters() - 1) {
getParameters().remove(getNumberMaxParameters() - 2);
}
}
private int getNumberMaxParameters() {
return isStudyHasMultipleGeneExpressionPlatforms()
? NUMBER_OF_MAX_PARAMETERS_MULTIPLE_PLATFORMS : NUMBER_OF_MAX_PARAMETERS;
}
private void setCriterionDefaults() {
switch (criterion.getRegulationType()) {
case UP:
criterion.setFoldsUp(DEFAULT_FOLDS);
break;
case DOWN:
criterion.setFoldsDown(DEFAULT_FOLDS);
break;
case UP_OR_DOWN:
criterion.setFoldsUp(DEFAULT_FOLDS);
criterion.setFoldsDown(DEFAULT_FOLDS);
break;
case UNCHANGED:
criterion.setFoldsDown(DEFAULT_FOLDS_UNCHANGED_DOWN);
criterion.setFoldsUp(DEFAULT_FOLDS_UNCHANGED_UP);
break;
default:
break;
}
}
private SelectListParameter<String> createControlSampleSetParameter() {
OptionList<String> options = new OptionList<String>();
String platformNameToUse = null;
if (isStudyHasMultipleGeneExpressionPlatforms()) {
platformNameToUse = StringUtils.isBlank(criterion.getPlatformName())
? "UNKNOWN" : criterion.getPlatformName();
}
for (String name : study.getStudyConfiguration().getControlSampleSetNames(platformNameToUse)) {
options.addOption(name, name);
}
ValueSelectedHandler<String> handler = new ValueSelectedHandler<String>() {
@Override
public void valueSelected(String value) {
criterion.setControlSampleSetName(value);
}
};
if (controlParameterIndex == null) {
controlParameterIndex = getParameters().size();
}
SelectListParameter<String> controlSampleSetNameParameter =
new SelectListParameter<String>(controlParameterIndex, getRow().getRowIndex(),
options, handler, criterion.getControlSampleSetName());
controlSampleSetNameParameter.setLabel(CONTROL_SAMPLE_SET_LABEL);
controlSampleSetNameParameter.setUpdateFormOnChange(false);
return controlSampleSetNameParameter;
}
private SelectListParameter<RegulationTypeEnum> createRegulationTypeParameter() {
OptionList<RegulationTypeEnum> options = new OptionList<RegulationTypeEnum>();
options.addOption(RegulationTypeEnum.UP.getValue(), RegulationTypeEnum.UP);
options.addOption(RegulationTypeEnum.DOWN.getValue(), RegulationTypeEnum.DOWN);
options.addOption(RegulationTypeEnum.UP_OR_DOWN.getValue(), RegulationTypeEnum.UP_OR_DOWN);
options.addOption(RegulationTypeEnum.UNCHANGED.getValue(), RegulationTypeEnum.UNCHANGED);
ValueSelectedHandler<RegulationTypeEnum> handler = new ValueSelectedHandler<RegulationTypeEnum>() {
@Override
public void valueSelected(RegulationTypeEnum value) {
criterion.setRegulationType(value);
setUpFoldsParameters();
}
};
SelectListParameter<RegulationTypeEnum> regulationTypeParameter =
new SelectListParameter<RegulationTypeEnum>(getParameters().size(), getRow().getRowIndex(),
options, handler, criterion.getRegulationType());
regulationTypeParameter.setLabel(REGULATION_TYPE_LABEL);
regulationTypeParameter.setUpdateFormOnChange(true);
return regulationTypeParameter;
}
private TextFieldParameter createFoldsDownParameter() {
final String label =
RegulationTypeEnum.UNCHANGED.equals(criterion.getRegulationType())
? "Folds between" : "Down-regulation folds";
TextFieldParameter foldsParameter = new TextFieldParameter(getParameters().size(), getRow().getRowIndex(),
criterion.getFoldsDown().toString());
foldsParameter.setLabel(label);
ValueHandler foldsChangeHandler = new ValueHandlerAdapter() {
@Override
public boolean isValid(String value) {
return NumberUtils.isNumber(value);
}
@Override
public void validate(String formFieldName, String value, ValidationAware action) {
if (!isValid(value)) {
action.addActionError("Numeric value required for " + label);
}
}
@Override
public void valueChanged(String value) {
criterion.setFoldsDown(Float.valueOf(value));
}
};
foldsParameter.setValueHandler(foldsChangeHandler);
return foldsParameter;
}
private TextFieldParameter createFoldsUpParameter() {
final String label =
RegulationTypeEnum.UNCHANGED.equals(criterion.getRegulationType()) ? "And" : "Up-regulation folds";
TextFieldParameter foldsParameter = new TextFieldParameter(getParameters().size(), getRow().getRowIndex(),
criterion.getFoldsUp().toString());
foldsParameter.setLabel(label);
ValueHandler foldsChangeHandler = new ValueHandlerAdapter() {
@Override
public boolean isValid(String value) {
return NumberUtils.isNumber(value);
}
@Override
public void validate(String formFieldName, String value, ValidationAware action) {
if (!isValid(value)) {
action.addActionError("Numeric value required for " + label);
}
}
@Override
public void valueChanged(String value) {
criterion.setFoldsUp(Float.valueOf(value));
}
};
foldsParameter.setValueHandler(foldsChangeHandler);
return foldsParameter;
}
/**
* {@inheritDoc}
*/
@Override
AbstractGenomicCriterion getAbstractGenomicCriterion() {
return criterion;
}
/**
* {@inheritDoc}
*/
@Override
String getFieldName() {
return FOLD_CHANGE;
}
/**
* {@inheritDoc}
*/
@Override
CriterionTypeEnum getCriterionType() {
return CriterionTypeEnum.FOLD_CHANGE;
}
/**
* {@inheritDoc}
*/
@Override
protected boolean platformParameterUpdateOnChange() {
return true;
}
}
| |
// Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.skyframe;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.graph.GraphBuilder;
import com.google.common.graph.ImmutableGraph;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.devtools.build.lib.concurrent.ThreadSafety.ThreadSafe;
import com.google.devtools.build.lib.events.ExtendedEventHandler;
import com.google.devtools.build.lib.util.GroupedList;
import java.util.List;
import java.util.Map;
import javax.annotation.Nullable;
/**
* Machinery to evaluate a single value.
*
* <p>The SkyFunction {@link #compute} implementation is supposed to access only direct dependencies
* of the value. However, the direct dependencies need not be known in advance. The implementation
* can request arbitrary values using {@link Environment#getValue}. If the values are not ready, the
* call will return {@code null}; in that case the implementation should just return {@code null},
* in which case the missing dependencies will be computed and the {@link #compute} method will be
* started again.
*/
public interface SkyFunction {
/**
* When a value is requested, this method is called with the name of the value and a
* dependency-tracking environment.
*
* <p>This method should return a non-{@code null} value, or {@code null} if any dependencies were
* missing ({@link Environment#valuesMissing} was true before returning). In that case the missing
* dependencies will be computed and the {@code compute} method called again.
*
* <p>This method should throw if it fails, or if one of its dependencies fails with an exception
* and this method cannot recover. If one of its dependencies fails and this method can enrich the
* exception with additional context, then this method should catch that exception and throw
* another containing that additional context. If it has no such additional context, then it
* should allow its dependency's exception to be thrown through it.
*
* <p>This method may return {@link Restart} in rare circumstances. See its docs. Do not return
* values of this type unless you know exactly what you are doing.
*
* <p>If version information is discovered for the given {@code skyKey}, {@link
* Environment#injectVersionForNonHermeticFunction(Version)} may be called on {@code env}.
*
* @throws SkyFunctionException on failure
* @throws InterruptedException if interrupted
*/
@ThreadSafe
@Nullable
SkyValue compute(SkyKey skyKey, Environment env)
throws SkyFunctionException, InterruptedException;
/**
* Extracts a tag (target label) from a SkyKey if it has one. Otherwise return {@code null}.
*
* <p>The tag is used for filtering out non-error event messages that do not match --output_filter
* flag. If a SkyFunction returns {@code null} in this method it means that all the info/warning
* messages associated with this value will be shown, no matter what --output_filter says.
*/
@Nullable
String extractTag(SkyKey skyKey);
/**
* Sentinel {@link SkyValue} type for {@link #compute} to return, indicating that something went
* wrong, and that the evaluation returning this value must be restarted, and the nodes associated
* with other keys in {@link #rewindGraph()} (whose directed edges should correspond to the nodes'
* direct dependencies) must also be restarted.
*
* <p>An intended cause for returning this is external data loss; e.g., if a dependency's
* "done-ness" is intended to mean that certain data is available in an external system, but
* during evaluation of a node that depends on that external data, that data has gone missing, and
* reevaluation of the dependency is expected to repair the discrepancy.
*
* <p>Values of this type will <em>never</em> be returned by {@link Environment}'s getValue
* methods or from {@link NodeEntry#getValue()}.
*
* <p>All {@link ListenableFuture}s used in calls to {@link Environment#dependOnFuture} which were
* not already complete will be cancelled.
*
* <p>This may only be returned by {@link #compute} if {@link Environment#restartPermitted} is
* true. If restarting is not permitted, {@link #compute} should throw an appropriate {@link
* SkyFunctionException}.
*/
interface Restart extends SkyValue {
ImmutableGraph<SkyKey> EMPTY_SKYKEY_GRAPH =
ImmutableGraph.copyOf(GraphBuilder.directed().allowsSelfLoops(false).build());
Restart SELF = () -> EMPTY_SKYKEY_GRAPH;
static Restart selfAnd(ImmutableGraph<SkyKey> rewindGraph) {
Preconditions.checkArgument(
rewindGraph.isDirected(), "rewindGraph undirected: %s", rewindGraph);
Preconditions.checkArgument(
!rewindGraph.allowsSelfLoops(), "rewindGraph allows self loops: %s", rewindGraph);
return () -> rewindGraph;
}
ImmutableGraph<SkyKey> rewindGraph();
}
/**
* The services provided to the {@link SkyFunction#compute} implementation by the Skyframe
* evaluation framework.
*/
interface Environment {
/**
* Returns a direct dependency. If the specified value is not in the set of already evaluated
* direct dependencies, returns {@code null}. Also returns {@code null} if the specified value
* has already been evaluated and found to be in error.
*
* <p>On a subsequent evaluation, if any of this value's dependencies have changed they will be
* re-evaluated in the same order as originally requested by the {@code SkyFunction} using this
* {@code getValue} call (see {@link #getValues} for when preserving the order is not
* important).
*
* <p>This method and the ones below may throw {@link InterruptedException}. Such exceptions
* must not be caught by the {@link SkyFunction#compute} implementation. Instead, they should be
* propagated up to the caller of {@link SkyFunction#compute}.
*/
@Nullable
SkyValue getValue(SkyKey valueName) throws InterruptedException;
/**
* Returns a direct dependency. If the specified value is not in the set of already evaluated
* direct dependencies, returns {@code null}. If the specified value has already been evaluated
* and found to be in error, throws the exception coming from the error, so long as the
* exception is of one of the specified types. SkyFunction implementations may use this method
* to continue evaluation even if one of their dependencies is in error by catching the thrown
* exception and proceeding. The caller must specify the exception type(s) that might be thrown
* using the {@code exceptionClass} argument(s). If the dependency's exception is not an
* instance of {@code exceptionClass}, {@code null} is returned.
*
* <p>The exception class given cannot be a supertype or a subtype of {@link RuntimeException},
* or a subtype of {@link InterruptedException}. See {@link
* SkyFunctionException#validateExceptionType} for details.
*/
@Nullable
<E extends Exception> SkyValue getValueOrThrow(SkyKey depKey, Class<E> exceptionClass)
throws E, InterruptedException;
@Nullable
<E1 extends Exception, E2 extends Exception> SkyValue getValueOrThrow(
SkyKey depKey, Class<E1> exceptionClass1, Class<E2> exceptionClass2)
throws E1, E2, InterruptedException;
@Nullable
<E1 extends Exception, E2 extends Exception, E3 extends Exception> SkyValue getValueOrThrow(
SkyKey depKey,
Class<E1> exceptionClass1,
Class<E2> exceptionClass2,
Class<E3> exceptionClass3)
throws E1, E2, E3, InterruptedException;
@Nullable
<E1 extends Exception, E2 extends Exception, E3 extends Exception, E4 extends Exception>
SkyValue getValueOrThrow(
SkyKey depKey,
Class<E1> exceptionClass1,
Class<E2> exceptionClass2,
Class<E3> exceptionClass3,
Class<E4> exceptionClass4)
throws E1, E2, E3, E4, InterruptedException;
@Nullable
<
E1 extends Exception,
E2 extends Exception,
E3 extends Exception,
E4 extends Exception,
E5 extends Exception>
SkyValue getValueOrThrow(
SkyKey depKey,
Class<E1> exceptionClass1,
Class<E2> exceptionClass2,
Class<E3> exceptionClass3,
Class<E4> exceptionClass4,
Class<E5> exceptionClass5)
throws E1, E2, E3, E4, E5, InterruptedException;
/**
* Requests {@code depKeys} "in parallel", independent of each others' values. These keys may be
* thought of as a "dependency group" -- they are requested together by this value.
*
* <p>In general, if the result of one getValue call can affect the argument of a later getValue
* call, the two calls cannot be merged into a single getValues call, since the result of the
* first call might change on a later evaluation. Inversely, if the result of one getValue call
* cannot affect the parameters of the next getValue call, the two keys can form a dependency
* group and the two getValue calls should be merged into one getValues call. In the latter
* case, if we fail to combine the _multiple_ getValue (or getValues) calls into one _single_
* getValues call, it would result in multiple dependency groups with an implicit ordering
* between them. This would unnecessarily cause sequential evaluations of these groups and could
* impact overall performance.
*
* <p>On subsequent evaluations, when checking to see if dependencies require re-evaluation, all
* the values within one group may be simultaneously checked. A SkyFunction should request a
* dependency group if checking the deps serially on a subsequent evaluation would take too
* long, and if the {@link #compute} method would request all deps anyway as long as no earlier
* deps had changed. SkyFunction.Environment implementations may also choose to request these
* deps in parallel on the first evaluation, potentially speeding it up.
*
* <p>While re-evaluating every value in the group may take longer than re-evaluating just the
* first one and finding that it has changed, no extra work is done: the contract of the
* dependency group means that the {@link #compute} method, when called to re-evaluate this
* value, will request all values in the group again anyway, so they would have to have been
* built in any case.
*
* <p>Example of when to use getValues: A ListProcessor value is built with key inputListRef.
* The {@link #compute} method first calls getValue(InputList.key(inputListRef)), and retrieves
* inputList. It then iterates through inputList, calling getValue on each input. Finally, it
* processes the whole list and returns. Say inputList is (a, b, c). Since the {@link #compute}
* method will unconditionally call getValue(a), getValue(b), and getValue (c), the {@link
* #compute} method can instead just call getValues({a, b, c}). If the value is later dirtied
* the evaluator will evaluate a, b, and c in parallel (assuming the inputList value was
* unchanged), and re-evaluate the ListProcessor value only if at least one of them was changed.
* On the other hand, if the InputList changes to be (a, b, d), then the evaluator will see that
* the first dep has changed, and call the {@link #compute} method to re-evaluate from scratch,
* without considering the dep group of {a, b, c}.
*
* <p>Example of when not to use getValues: A BestMatch value is built with key
* <potentialMatchesRef, matchCriterion>. The {@link #compute} method first calls
* getValue(PotentialMatches.key(potentialMatchesRef) and retrieves potentialMatches. It then
* iterates through potentialMatches, calling getValue on each potential match until it finds
* one that satisfies matchCriterion. In this case, if potentialMatches is (a, b, c), it would
* be <i>incorrect</i> to call getValues({a, b, c}), because it is not known yet whether
* requesting b or c will be necessary -- if a matches, then we will never call b or c.
*
* <p>Returns a map, {@code m}. For all {@code k} in {@code depKeys}, {@code m.containsKey(k)}
* is {@code true}, and, {@code m.get(k) != null} iff the dependency was already evaluated and
* was not in error.
*/
Map<SkyKey, SkyValue> getValues(Iterable<? extends SkyKey> depKeys) throws InterruptedException;
/**
* Similar to getValues, but instead of returning a {@code Map<SkyKey, SkyValue>}, returns a
* {@code List<SkyValue>} in the order of the input {@code Iterable<SkyKey>}. b/172462551
*/
List<SkyValue> getOrderedValues(Iterable<? extends SkyKey> depKeys) throws InterruptedException;
/**
* Similar to {@link #getValues} but allows the caller to specify a set of types that are proper
* subtypes of Exception (see {@link SkyFunctionException} for more details) to find out whether
* any of the dependencies' evaluations resulted in exceptions of those types. The returned
* objects may throw when attempting to retrieve their value.
*
* <p>Callers should prioritize their responsibility to detect and handle errors in the returned
* map over their responsibility to return {@code null} if values are missing. This is because
* in nokeep_going evaluations, an error from a low level dependency is given a chance to be
* enriched by its reverse-dependencies, if possible. Callers should also prioritize throwing
* exceptions over checking for {@link InterruptedException}, since during the error-bubbling
* enrichment process, the SkyFunction is interrupted after it has received the exception to
* prevent it from doing too much unnecessary work.
*
* <p>Returns a map, {@code m}. For all {@code k} in {@code depKeys}, {@code m.get(k) != null}.
* For all {@code v} such that there is some {@code k} such that {@code m.get(k) == v}, the
* following is true: {@code v.get() != null} iff the dependency {@code k} was already evaluated
* and was not in error. {@code v.get()} throws {@code E} iff the dependency {@code k} was
* already evaluated with an error in the specified set of {@link Exception} types.
*/
<E extends Exception> Map<SkyKey, ValueOrException<E>> getValuesOrThrow(
Iterable<? extends SkyKey> depKeys, Class<E> exceptionClass) throws InterruptedException;
<E1 extends Exception, E2 extends Exception>
Map<SkyKey, ValueOrException2<E1, E2>> getValuesOrThrow(
Iterable<? extends SkyKey> depKeys,
Class<E1> exceptionClass1,
Class<E2> exceptionClass2)
throws InterruptedException;
<E1 extends Exception, E2 extends Exception, E3 extends Exception>
Map<SkyKey, ValueOrException3<E1, E2, E3>> getValuesOrThrow(
Iterable<? extends SkyKey> depKeys,
Class<E1> exceptionClass1,
Class<E2> exceptionClass2,
Class<E3> exceptionClass3)
throws InterruptedException;
<E1 extends Exception, E2 extends Exception, E3 extends Exception, E4 extends Exception>
Map<SkyKey, ValueOrException4<E1, E2, E3, E4>> getValuesOrThrow(
Iterable<? extends SkyKey> depKeys,
Class<E1> exceptionClass1,
Class<E2> exceptionClass2,
Class<E3> exceptionClass3,
Class<E4> exceptionClass4)
throws InterruptedException;
<
E1 extends Exception,
E2 extends Exception,
E3 extends Exception,
E4 extends Exception,
E5 extends Exception>
Map<SkyKey, ValueOrException5<E1, E2, E3, E4, E5>> getValuesOrThrow(
Iterable<? extends SkyKey> depKeys,
Class<E1> exceptionClass1,
Class<E2> exceptionClass2,
Class<E3> exceptionClass3,
Class<E4> exceptionClass4,
Class<E5> exceptionClass5)
throws InterruptedException;
/**
* Similar to getValuesOrThrow, but instead of returning a {@code Map<SkyKey,
* ValueOrException>}, returns a {@code List<SkyValue>} in the order of the input {@code
* Iterable<SkyKey>}.
*/
<E extends Exception> List<ValueOrException<E>> getOrderedValuesOrThrow(
Iterable<? extends SkyKey> depKeys, Class<E> exceptionClass) throws InterruptedException;
<E1 extends Exception, E2 extends Exception>
List<ValueOrException2<E1, E2>> getOrderedValuesOrThrow(
Iterable<? extends SkyKey> depKeys,
Class<E1> exceptionClass1,
Class<E2> exceptionClass2)
throws InterruptedException;
<E1 extends Exception, E2 extends Exception, E3 extends Exception>
List<ValueOrException3<E1, E2, E3>> getOrderedValuesOrThrow(
Iterable<? extends SkyKey> depKeys,
Class<E1> exceptionClass1,
Class<E2> exceptionClass2,
Class<E3> exceptionClass3)
throws InterruptedException;
<E1 extends Exception, E2 extends Exception, E3 extends Exception, E4 extends Exception>
List<ValueOrException4<E1, E2, E3, E4>> getOrderedValuesOrThrow(
Iterable<? extends SkyKey> depKeys,
Class<E1> exceptionClass1,
Class<E2> exceptionClass2,
Class<E3> exceptionClass3,
Class<E4> exceptionClass4)
throws InterruptedException;
<
E1 extends Exception,
E2 extends Exception,
E3 extends Exception,
E4 extends Exception,
E5 extends Exception>
List<ValueOrException5<E1, E2, E3, E4, E5>> getOrderedValuesOrThrow(
Iterable<? extends SkyKey> depKeys,
Class<E1> exceptionClass1,
Class<E2> exceptionClass2,
Class<E3> exceptionClass3,
Class<E4> exceptionClass4,
Class<E5> exceptionClass5)
throws InterruptedException;
/**
* Returns whether there was a previous getValue[s][OrThrow] that indicated a missing
* dependency. Formally, returns true iff at least one of the following occurred:
*
* <ul>
* <li>getValue[OrThrow](k[, c]) returned {@code null} for some k
* <li>getValues(ks).get(k) == {@code null} for some ks and k such that ks.contains(k)
* <li>getValuesOrThrow(ks, c).get(k).get() == {@code null} for some ks and k such that
* ks.contains(k)
* </ul>
*
* <p>If this returns true, the {@link SkyFunction} must return {@code null} or throw a {@link
* SkyFunctionException} if it detected an error even with values missing.
*/
boolean valuesMissing();
/**
* Returns the {@link ExtendedEventHandler} that a SkyFunction should use to print any errors,
* warnings, or progress messages during execution of {@link SkyFunction#compute}.
*/
ExtendedEventHandler getListener();
/**
* A live view of deps known to have already been requested either through an earlier call to
* {@link SkyFunction#compute} or inferred during change pruning. Should return {@code null} if
* unknown. Only for special use cases: do not use in general unless you know exactly what
* you're doing!
*/
@Nullable
default GroupedList<SkyKey> getTemporaryDirectDeps() {
return null;
}
/**
* Injects non-hermetic {@link Version} information for this environment.
*
* <p>This may be called during the course of {@link SkyFunction#compute(SkyKey, Environment)}
* if the function discovers version information for the {@link SkyKey}.
*
* <p>Environments that either do not need or wish to ignore non-hermetic version information
* may keep the default no-op implementation.
*/
default void injectVersionForNonHermeticFunction(Version version) {}
/**
* Register dependencies on keys without necessarily requiring their values.
*
* <p>WARNING: Dependencies here MUST be done! Only use this function if you know what you're
* doing.
*
* <p>If the {@link EvaluationVersionBehavior} is {@link
* EvaluationVersionBehavior#MAX_CHILD_VERSIONS} then this method may fall back to just doing a
* {@link #getValues} call internally. Thus, any graph evaluations that require this method to
* be performant <i>must</i> run with {@link EvaluationVersionBehavior#GRAPH_VERSION}.
*/
default void registerDependencies(Iterable<SkyKey> keys) throws InterruptedException {
getValues(keys);
}
/** Returns whether we are currently in error bubbling. */
@VisibleForTesting
boolean inErrorBubblingForTesting();
/**
* Adds a dependency on a Skyframe-external event. If the given future is already complete, this
* method silently returns without doing anything (to avoid unnecessary function restarts).
* Otherwise, Skyframe adds a listener to the passed-in future, and only re-enqueues the current
* node after the future completes and all requested deps are done. The added listener will
* perform the minimum amount of work on the thread completing the future necessary for Skyframe
* bookkeeping.
*
* <p>Callers of this method must check {@link #valuesMissing} before returning {@code null}
* from a {@link SkyFunction}.
*
* <p>This API is intended for performing async computations (e.g., remote execution) in another
* thread pool without blocking the current Skyframe thread.
*/
void dependOnFuture(ListenableFuture<?> future);
/**
* A {@link SkyFunction#compute} call may return {@link Restart} only if this returns {@code
* true}.
*/
boolean restartPermitted();
}
}
| |
/**
* Copyright (c) 2008 Perforce Software. All rights reserved.
*/
package com.perforce.p4java.impl.generic.core.file;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import com.perforce.p4java.Log;
import com.perforce.p4java.client.IClient;
import com.perforce.p4java.core.IChangelist;
import com.perforce.p4java.core.file.DiffType;
import com.perforce.p4java.core.file.FileAction;
import com.perforce.p4java.core.file.IFileAnnotation;
import com.perforce.p4java.core.file.IFileRevisionData;
import com.perforce.p4java.core.file.IFileSpec;
import com.perforce.p4java.core.file.FileSpecBuilder;
import com.perforce.p4java.core.file.FileSpecOpStatus;
import com.perforce.p4java.exception.AccessException;
import com.perforce.p4java.exception.ConnectionException;
import com.perforce.p4java.exception.P4JavaError;
import com.perforce.p4java.exception.NullPointerError;
import com.perforce.p4java.exception.P4JavaException;
import com.perforce.p4java.exception.RequestException;
import com.perforce.p4java.impl.generic.core.ServerResource;
import com.perforce.p4java.impl.generic.core.file.FilePath.PathType;
import com.perforce.p4java.option.server.GetFileAnnotationsOptions;
import com.perforce.p4java.option.server.GetFileContentsOptions;
import com.perforce.p4java.option.server.GetRevisionHistoryOptions;
import com.perforce.p4java.option.server.MoveFileOptions;
import com.perforce.p4java.server.IOptionsServer;
import com.perforce.p4java.server.IServer;
/**
* Simple generic default implementation class for the IFileSpec
* interface.
*/
public class FileSpec extends ServerResource implements IFileSpec {
protected FileSpecOpStatus opStatus = FileSpecOpStatus.VALID;
protected String statusMessage = null;
protected int genericCode = 0;
protected int severityCode = 0;
protected int rawCode = 0;
protected int uniqueCode = 0;
protected int subCode = 0;
protected int subSystem = 0;
protected FilePath originalPath = null;
protected FilePath depotPath = null;
protected FilePath clientPath = null;
protected FilePath localPath = null;
protected String fileType = null;
protected int startRevision = NO_FILE_REVISION;
protected int endRevision = NO_FILE_REVISION;
protected int changeListId = IChangelist.UNKNOWN;
protected String label = null;
protected Date date = null;
protected FileAction action = null;
protected String userName = null;
protected String clientName = null;
protected int baseRev = NO_FILE_REVISION;
protected String baseName = null;
protected String baseFile = null;
protected boolean unmap = false;
private String fromFile = null;
private int endFromRev = NO_FILE_REVISION;
private int startFromRev = NO_FILE_REVISION;
private String toFile = null;
private int endToRev = NO_FILE_REVISION;
private int startToRev = NO_FILE_REVISION;
private int workRev = NO_FILE_REVISION;
private String howResolved = null;
private FileAction otherAction = null;
private boolean locked = false;
private String diffStatus = null;
private String resolveType = null;
private String contentResolveType = null;
private int shelvedChange = IChangelist.UNKNOWN;
protected IClient client = null;
/**
* Default constructor. Sets all paths, labels, dates, etc. to null; revisions
* to IFileSpec.NO_FILE_REVISION; client and server references to null;
* changelist ID to IChangelist.UNKNOWN; opStatus to VALID; locked to false, etc.
*/
public FileSpec() {
super(false, false);
}
/**
* Given a candidate path string (which may include version
* and changelist annotations, at least), try to construct
* a corresponding file spec.<p>
*
* Effectively an alias for FileSpec(pathStr, true).
*
* @param pathStr candidate path string
*/
public FileSpec(String pathStr) {
this(pathStr, true);
}
/**
* Given a candidate path string (which may include version
* and changelist annotations, at least), try to construct
* a corresponding file spec.<p>
*
* The motivation for the hasAnnotations parameter is to
* allow path strings to contain "@" and "#" characters;
* the downside of this that if there's any associated
* annotation info, it's not parsed at all and any such
* information must be set up manually.
*
* @param pathStr candidate path string
* @param parseAnnotations if true, attempt to parse the path string
* for revision annotations.
*/
public FileSpec(String pathStr, boolean parseAnnotations) {
super(false, false);
this.originalPath = new FilePath(PathType.ORIGINAL, pathStr);
if (parseAnnotations && (pathStr != null) && PathAnnotations.hasPerforceAnnotations(pathStr)) {
PathAnnotations annotations = new PathAnnotations(pathStr);
this.startRevision = annotations.getStartRevision();
this.endRevision = annotations.getEndRevision();
this.changeListId = annotations.getChangelistId();
this.label = annotations.getLabel();
this.date = annotations.getDate();
}
}
/**
* Construct a FileSpec from a specific FilePath.
*/
public FileSpec(FilePath path) {
super(false, false);
setPath(path);
}
/**
* Construct a filespec from an opstatus and error message pair.
*/
public FileSpec(FileSpecOpStatus status, String errStr) {
super(false, false);
this.opStatus = status;
this.statusMessage = errStr;
}
/**
* Construct a FileSpec from an opstatus, error message,
* Perforce generic code, and Perforce severity code.
*/
public FileSpec(FileSpecOpStatus status, String errStr,
int genericCode, int severityCode) {
super(false, false);
this.opStatus = status;
this.statusMessage = errStr;
this.genericCode = genericCode;
this.severityCode = severityCode;
}
/**
* Construct a new FileSpec given the op status, an error string, and a raw code
* string returned from a Perforce server.
*/
public FileSpec(FileSpecOpStatus status, String errStr, String codeStr) {
super(false, false);
this.opStatus = status;
this.statusMessage = errStr;
try {
setCodes(new Integer(codeStr));
} catch (Throwable thr) {
Log.exception(thr);
}
}
/**
* Construct a new FileSpec given the op status, an error string, and a raw code
* value returned from a Perforce server.
*/
public FileSpec(FileSpecOpStatus status, String errStr, int rawCode) {
super(false, false);
this.opStatus = status;
this.statusMessage = errStr;
setCodes(rawCode);
}
/**
* Construct a new filespec from another filespec. In
* other words, effectively clone it by deep copy of local
* fields.<p>
*
* @param impl non-null existing filespec.
*/
public FileSpec(FileSpec impl) {
super(false, false);
if (impl == null) {
throw new NullPointerError("null impl passed to FileSpec constructor");
}
this.opStatus = impl.opStatus;
this.statusMessage = impl.statusMessage;
this.genericCode = impl.genericCode;
this.severityCode = impl.severityCode;
this.originalPath = impl.originalPath;
this.depotPath = impl.depotPath;
this.clientPath = impl.clientPath;
this.localPath = impl.localPath;
this.fileType = impl.fileType;
this.startRevision = impl.startRevision;
this.endRevision = impl.endRevision;
this.changeListId = impl.changeListId;
this.label = impl.label;
this.date = impl.date;
this.action = impl.action;
this.userName = impl.userName;
this.clientName = impl.clientName;
this.unmap = impl.unmap;
this.fromFile = impl.fromFile;
this.endFromRev = impl.endFromRev;
this.startFromRev = impl.startFromRev;
this.toFile = impl.toFile;
this.endToRev = impl.endToRev;
this.startToRev = impl.startToRev;
this.workRev = impl.workRev;
this.howResolved = impl.howResolved;
this.otherAction = impl.otherAction;
this.locked = impl.locked;
this.diffStatus = impl.diffStatus;
this.resolveType = impl.resolveType;
this.contentResolveType = impl.contentResolveType;
this.shelvedChange = impl.shelvedChange;
this.server = impl.server;
this.client = impl.client;
this.baseFile = impl.baseFile;
}
/**
* Try to construct a FileSpec from a passed-in map as returned from a
* Perforce server. Tuned to return values from the underlying map-based server
* interface, which explains the index (set this to zero for normal use).
*/
public FileSpec(Map<String, Object> map, IServer server, int index) {
super(false, false);
if (map != null) {
this.setOpStatus(FileSpecOpStatus.VALID);
String indexStr = "";
if (index >= 0) {
indexStr += index;
}
this.setServer(server);
if (map.containsKey("dir" + indexStr)) {
this.setDepotPath(new FilePath(PathType.DEPOT, (String) map.get("dir" + indexStr), true));
}
if (map.containsKey("depotFile" + indexStr)) {
this.setDepotPath(new FilePath(PathType.DEPOT, (String) map.get("depotFile" + indexStr), true));
}
if (map.containsKey("clientFile" + indexStr)) {
this.setClientPath(new FilePath(PathType.CLIENT, (String) map.get("clientFile" + indexStr), true));
}
if (map.containsKey("localFile" + indexStr)) {
this.setLocalPath(new FilePath(PathType.LOCAL, (String) map.get("localFile" + indexStr), true));
}
if (map.containsKey("path" + indexStr)) {
this.setLocalPath(new FilePath(PathType.LOCAL, (String) map.get("path" + indexStr), true));
}
this.setFileType((String) map.get("type" + indexStr));
this.setAction(FileAction.fromString((String) map.get("action" + indexStr)));
this.setUserName((String) map.get("user" + indexStr));
this.setClientName((String) map.get("client" + indexStr));
String cid = (String) map.get("change" + indexStr);
String revStr = (String) map.get("rev" + indexStr);
if (revStr == null) {
// Sometimes it's the haveRev key...
revStr = (String) map.get("haveRev" + indexStr);
}
// Get submit date from the 'time' (seconds).
// Multiply by 1000 to get the milliseconds.
if (map.get("time") != null) {
try {
long seconds = Long.parseLong((String) map.get("time"));
this.setDate(new Date (seconds * 1000));
} catch (NumberFormatException nfe) {
Log.error("Error parsing the 'time' in the FileSpec constructor: "
+ nfe.getLocalizedMessage());
Log.exception(nfe);
}
}
this.setLocked((((map.get("ourLock") == null) && (map.get("otherLock") == null)) ? false : true));
this.setEndRevision(getRevFromString(revStr));
if (cid == null) {
this.setChangelistId(IChangelist.UNKNOWN);
} else if (cid.equalsIgnoreCase("default") || cid.equalsIgnoreCase("default change")) {
this.setChangelistId(IChangelist.DEFAULT);
} else {
// Sometimes in format "change nnnnnn", sometimes just "nnnnn". Urgh...
int i = cid.indexOf(" ");
if (i < 0) {
this.setChangelistId(new Integer(cid));
} else {
this.setChangelistId(new Integer(cid.substring(i+1)));
}
}
this.setEndFromRev(getRevFromString((String) map.get("endFromRev" + indexStr)));
this.setStartFromRev(getRevFromString((String) map.get("startFromRev" + indexStr)));
this.setWorkRev(getRevFromString((String) map.get("workRev" + indexStr)));
this.setHowResolved((String) map.get("how"));
this.setFromFile((String) map.get("fromFile" + indexStr));
this.setEndToRev(getRevFromString((String) map.get("endToRev" + indexStr)));
this.setStartToRev(getRevFromString((String) map.get("startToRev" + indexStr)));
this.setToFile((String) map.get("toFile" + indexStr));
this.setBaseRev(getRevFromString((String) map.get("baseRev" + indexStr)));
this.setBaseName((String) map.get("baseName" + indexStr));
this.setBaseFile((String) map.get("baseFile" + indexStr));
this.setOtherAction(
FileAction.fromString((String) map.get("otherAction" + indexStr)));
this.setDiffStatus((String) map.get("status"));
this.setResolveType((String) map.get("resolveType"));
this.setContentResolveType((String) map.get("contentResolveType"));
if (map.containsKey("shelvedChange")) {
try {
this.setShelvedChange(new Integer((String) map
.get("shelvedChange")));
} catch (NumberFormatException nfe) {
Log.error("Error parsing the 'shelvedChange' in the FileSpec constructor: "
+ nfe.getLocalizedMessage());
Log.exception(nfe);
}
}
if (((String) map.get("unmap" + indexStr)) != null) {
this.unmap = true;
}
this.setUnmap(((String) map.get("unmap" + indexStr)) != null);
}
}
/**
* Set the various error codes for this FileSpec to a value returned
* from the server or the RPC layer. Use this if you're hand-constructing
* a new FileSpec for an error condition and you have the raw code.
*/
public FileSpec setCodes(int rawCode) {
this.rawCode = rawCode;
this.subCode = ((rawCode >> 0) & 0x3FF);
this.subSystem = ((rawCode >> 10) & 0x3F);
this.uniqueCode = (rawCode & 0xFFFF);
this.genericCode = ((rawCode >> 16) & 0xFF);
this.severityCode = ((rawCode >> 28) & 0x00F);
return this;
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#getPath(com.perforce.p4java.impl.generic.core.file.FilePath.PathType)
*/
public FilePath getPath(PathType pathType) {
if (pathType != null) {
switch (pathType) {
case DEPOT:
return this.depotPath;
case CLIENT:
return this.clientPath;
case LOCAL:
return this.localPath;
default:
break;
}
}
return this.originalPath;
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#setPath(com.perforce.p4java.impl.generic.core.file.FilePath)
*/
public void setPath(FilePath filePath) {
if (filePath != null) {
if (filePath.getPathType() != null) {
switch (filePath.getPathType()) {
case DEPOT:
this.depotPath = filePath;
return;
case CLIENT:
this.clientPath = filePath;
return;
case LOCAL:
this.localPath = filePath;
return;
default:
break;
}
}
}
this.originalPath = filePath;
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#getAction()
*/
public FileAction getAction() {
return this.action;
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#getChangelistId()
*/
public int getChangelistId() {
return this.changeListId;
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#getClientName()
*/
public String getClientName() {
return this.clientName;
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#getClientPath()
*/
public FilePath getClientPath() {
return this.getPath(PathType.CLIENT);
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#getContents(boolean)
*/
public InputStream getContents(boolean noHeaderLine)
throws ConnectionException, RequestException,
AccessException {
checkServer();
List<IFileSpec> fList = new ArrayList<IFileSpec>();
fList.add(this);
return this.server.getFileContents(fList, false, noHeaderLine);
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#getContents(com.perforce.p4java.option.server.GetFileContentsOptions)
*/
public InputStream getContents(GetFileContentsOptions opts) throws P4JavaException {
checkServer();
List<IFileSpec> fList = new ArrayList<IFileSpec>();
fList.add(this);
return ((IOptionsServer) this.server).getFileContents(fList, opts);
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#getDepotPath()
*/
public FilePath getDepotPath() {
return this.getPath(PathType.DEPOT);
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#getEndRevision()
*/
public int getEndRevision() {
return this.endRevision;
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#getFileType()
*/
public String getFileType() {
return this.fileType;
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#getLabel()
*/
public String getLabel() {
return this.label;
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#getLocalPath()
*/
public FilePath getLocalPath() {
return this.getPath(PathType.LOCAL);
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#getOpStatus()
*/
public FileSpecOpStatus getOpStatus() {
return this.opStatus;
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#getPreferredPath()
*/
public FilePath getPreferredPath() {
if (this.originalPath != null) {
return this.originalPath;
} else if (this.depotPath != null) {
return this.depotPath;
} else if (this.clientPath != null) {
return this.clientPath;
} else if (this.localPath != null) {
return this.localPath;
}
return null;
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#getRevisionHistory(int, boolean, boolean, boolean, boolean)
*/
public Map<IFileSpec, List<IFileRevisionData>> getRevisionHistory(int maxRevs,
boolean contentHistory, boolean includeInherited,
boolean longOutput, boolean truncatedLongOutput)
throws ConnectionException, RequestException, AccessException {
checkServer();
return this.server.getRevisionHistory(
FileSpecBuilder.makeFileSpecList(new String[] { this.getAnnotatedPreferredPathString() }),
maxRevs, contentHistory, includeInherited, longOutput, truncatedLongOutput);
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#getRevisionHistory(com.perforce.p4java.option.server.GetRevisionHistoryOptions)
*/
public Map<IFileSpec, List<IFileRevisionData>> getRevisionHistory(GetRevisionHistoryOptions opts)
throws P4JavaException {
checkServer();
return ((IOptionsServer) this.server).getRevisionHistory(
FileSpecBuilder.makeFileSpecList(new String[] { this.getAnnotatedPreferredPathString() }), opts);
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#getAnnotations(com.perforce.p4java.core.file.DiffType, boolean, boolean, boolean)
*/
public List<IFileAnnotation> getAnnotations(DiffType wsOptions, boolean allResults,
boolean useChangeNumbers, boolean followBranches)
throws ConnectionException, RequestException, AccessException {
checkServer();
List<IFileSpec> specList = new ArrayList<IFileSpec>();
specList.add(this);
return this.server.getFileAnnotations(specList, wsOptions, allResults, useChangeNumbers, followBranches);
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#getAnnotations(com.perforce.p4java.option.server.GetFileAnnotationsOptions)
*/
public List<IFileAnnotation> getAnnotations(GetFileAnnotationsOptions opts) throws P4JavaException {
checkServer();
List<IFileSpec> specList = new ArrayList<IFileSpec>();
specList.add(this);
return ((IOptionsServer) this.server).getFileAnnotations(specList, opts);
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#move(int, boolean, boolean, java.lang.String, com.perforce.p4java.core.file.IFileSpec)
*/
public List<IFileSpec> move(int changelistId, boolean listOnly, boolean noClientMove, String fileType, IFileSpec toFile)
throws ConnectionException, RequestException, AccessException {
checkServer();
return this.server.moveFile(changelistId, listOnly, noClientMove, fileType, this, toFile);
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#move(com.perforce.p4java.core.file.IFileSpec, com.perforce.p4java.option.server.MoveFileOptions)
*/
public List<IFileSpec> move(IFileSpec toFile, MoveFileOptions opts) throws P4JavaException {
checkServer();
return ((IOptionsServer) this.server).moveFile(this, toFile, opts);
}
private void checkServer() throws P4JavaError {
if (this.server == null) {
throw new P4JavaError("File specification is not associated with any server");
}
if (!(this.server instanceof IOptionsServer)) {
// This should be impossible, but you never know... -- HR.
throw new P4JavaError("File specification is not associated with an options server");
}
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#getStartRevision()
*/
public int getStartRevision() {
return this.startRevision;
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#getStatusMessage()
*/
public String getStatusMessage() {
return this.statusMessage;
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#getSeverityCode()
*/
public int getSeverityCode() {
return this.severityCode;
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#getGenericCode()
*/
public int getGenericCode() {
return this.genericCode;
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#getUserName()
*/
public String getUserName() {
return this.userName;
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#isLocked()
*/
public boolean isLocked() {
return this.locked;
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#getDiffStatus()
*/
public String getDiffStatus() {
return this.diffStatus;
}
public String getResolveType() {
return this.resolveType;
}
public String getContentResolveType() {
return this.contentResolveType;
}
public int getShelvedChange() {
return this.shelvedChange;
}
public void setOpStatus(FileSpecOpStatus opStatus) {
this.opStatus = opStatus;
}
public void setStatusMessage(String statusMessage) {
this.statusMessage = statusMessage;
}
public void setOriginalPath(FilePath path) {
this.originalPath = path;
}
public void setDepotPath(FilePath depotPath) {
this.depotPath = depotPath;
}
public void setClientPath(FilePath clientPath) {
this.clientPath = clientPath;
}
public void setLocalPath(FilePath localPath) {
this.localPath = localPath;
}
public void setFileType(String fileType) {
this.fileType = fileType;
}
public void setStartRevision(int startRevision) {
this.startRevision = startRevision;
}
public void setEndRevision(int endRevision) {
this.endRevision = endRevision;
}
public void setChangelistId(int changeListId) {
this.changeListId = changeListId;
}
public void setLabel(String label) {
this.label = label;
}
public void setAction(FileAction action) {
this.action = action;
}
public void setUserName(String userName) {
this.userName = userName;
}
public void setClientName(String clientName) {
this.clientName = clientName;
}
public void setClient(IClient client) {
this.client = client;
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#getEndFromRev()
*/
public int getEndFromRev() {
return this.endFromRev;
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#getEndToRev()
*/
public int getEndToRev() {
return this.endToRev;
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#getFromFile()
*/
public String getFromFile() {
return this.fromFile;
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#getHowResolved()
*/
public String getHowResolved() {
return this.howResolved;
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#getOtherAction()
*/
public FileAction getOtherAction() {
return this.otherAction;
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#getStartFromRev()
*/
public int getStartFromRev() {
return this.startFromRev;
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#getStartToRev()
*/
public int getStartToRev() {
return this.startToRev;
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#getToFile()
*/
public String getToFile() {
return this.toFile;
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#getWorkRev()
*/
public int getWorkRev() {
return this.workRev;
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#isUnmap()
*/
public boolean isUnmap() {
return this.unmap;
}
public void setDate(Date date) {
this.date = date;
}
public void setFromFile(String fromFile) {
this.fromFile = fromFile;
}
public void setEndFromRev(int endFromRev) {
this.endFromRev = endFromRev;
}
public void setStartFromRev(int startFromRev) {
this.startFromRev = startFromRev;
}
public void setToFile(String toFile) {
this.toFile = toFile;
}
public void setEndToRev(int endToRev) {
this.endToRev = endToRev;
}
public void setStartToRev(int startToRev) {
this.startToRev = startToRev;
}
public void setWorkRev(int workRev) {
this.workRev = workRev;
}
public void setHowResolved(String howResolved) {
this.howResolved = howResolved;
}
public void setOtherAction(FileAction otherAction) {
this.otherAction = otherAction;
}
public void setLocked(boolean locked) {
this.locked = locked;
}
public void setDiffStatus(String diffStatus) {
this.diffStatus = diffStatus;
}
public void setResolveType(String resolveType) {
this.resolveType = resolveType;
}
public void setContentResolveType(String contentResolveType) {
this.contentResolveType = contentResolveType;
}
public void setShelvedChange(int shelvedChange) {
this.shelvedChange = shelvedChange;
}
public void setUnmap(boolean unmap) {
this.unmap = unmap;
}
public static int getRevFromString(String str) {
int rev = NO_FILE_REVISION;
if (str != null) {
// can be in #rev or rev form, unfortunately...
if (str.contains("head")) {
return HEAD_REVISION;
}
if (str.contains("none")) {
return NO_FILE_REVISION;
}
try {
if (str.startsWith("#") && (str.length() > 1)) {
rev = new Integer(str.substring(1));
} else if (str.length() > 0){
rev = new Integer(str);
}
} catch (Exception exc) {
Log.error("Conversion error in FileSpec.getRevFromString: "
+ exc.getLocalizedMessage());
Log.exception(exc);
}
}
return rev;
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#getClientPathString()
*/
public String getClientPathString() {
return getPathString(PathType.CLIENT);
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#getDepotPathString()
*/
public String getDepotPathString() {
return getPathString(PathType.DEPOT);
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#getLocalPathString()
*/
public String getLocalPathString() {
return getPathString(PathType.LOCAL);
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#getOriginalPath()
*/
public FilePath getOriginalPath() {
if (getPath(PathType.ORIGINAL) != null) { // See job061945
return getPath(PathType.ORIGINAL);
} else { // API backward compatibility - See job070533
return getPath(PathType.CLIENT);
}
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#getOriginalPathString()
*/
public String getOriginalPathString() {
if (getPathString(PathType.ORIGINAL) != null) { // See job061945
return getPathString(PathType.ORIGINAL);
} else { // API backward compatibility - See job070533
return getPathString(PathType.CLIENT);
}
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#getPathString(com.perforce.p4java.impl.generic.core.file.FilePath.PathType)
*/
public String getPathString(PathType pathType) {
FilePath fPath = getPath(pathType);
if (fPath != null) {
return fPath.toString();
}
return null;
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#getAnnotatedPathString(com.perforce.p4java.impl.generic.core.file.FilePath.PathType)
*/
public String getAnnotatedPathString(PathType pathType) {
FilePath path = getPath(pathType);
if (path != null) {
return path.annotate(this);
}
return null;
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#getPreferredPathString()
*/
public String getPreferredPathString() {
FilePath prefPath = getPreferredPath();
if ((prefPath != null) && (prefPath.getPathString() != null)) {
return prefPath.toString();
}
return null;
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#getAnnotatedPreferredPathString()
*/
public String getAnnotatedPreferredPathString() {
FilePath prefPath = getPreferredPath();
if ((prefPath != null) && (prefPath.getPathString() != null)) {
return prefPath.annotate(this);
}
return null;
}
/**
* Alias for getAnnotatedPreferredPathString().
*
* @see java.lang.Object#toString()
*/
public String toString() {
return getAnnotatedPreferredPathString();
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#setPathFromString(com.perforce.p4java.impl.generic.core.file.FilePath.PathType, java.lang.String)
*/
public void setPathFromString(PathType pathType, String pathStr) {
FilePath path = new FilePath(pathType, pathStr);
setPath(path);
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#getDate()
*/
public Date getDate() {
return this.date;
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#setClientPath(java.lang.String)
*/
public void setClientPath(String pathStr) {
setPath(new FilePath(PathType.CLIENT, pathStr));
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#setDepotPath(java.lang.String)
*/
public void setDepotPath(String pathStr) {
setPath(new FilePath(PathType.DEPOT, pathStr));
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#setLocalPath(java.lang.String)
*/
public void setLocalPath(String pathStr) {
setPath(new FilePath(PathType.LOCAL, pathStr));
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#setOriginalPath(java.lang.String)
*/
public void setOriginalPath(String pathStr) {
setPath(new FilePath(PathType.ORIGINAL, pathStr));
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#getBaseRev()
*/
public int getBaseRev() {
return baseRev;
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#setBaseRev(int)
*/
public void setBaseRev(int baseRev) {
this.baseRev = baseRev;
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#getBaseName()
*/
public String getBaseName() {
return baseName;
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#setBaseName(java.lang.String)
*/
public void setBaseName(String baseName) {
this.baseName = baseName;
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#getBaseFile()
*/
public String getBaseFile() {
return baseFile;
}
/**
* @see com.perforce.p4java.core.file.IFileSpec#setBaseFile(java.lang.String)
*/
public void setBaseFile(String baseFile) {
this.baseFile = baseFile;
}
public int getRawCode() {
return rawCode;
}
public int getUniqueCode() {
return uniqueCode;
}
public int getSubCode() {
return subCode;
}
public int getSubSystem() {
return subSystem;
}
}
| |
/*
* Copyright 2012 GitHub Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.mobile.ui.repo;
import static android.content.Intent.FLAG_ACTIVITY_CLEAR_TOP;
import static android.content.Intent.FLAG_ACTIVITY_SINGLE_TOP;
import static com.github.mobile.Intents.EXTRA_POSITION;
import static com.github.mobile.Intents.EXTRA_REPOSITORY;
import static com.github.mobile.ResultCodes.RESOURCE_CHANGED;
import static com.github.mobile.ui.repo.RepositoryPagerAdapter.ITEM_CODE;
import android.app.SearchManager;
import android.content.Intent;
import android.net.Uri;
import android.os.Bundle;
import android.support.v7.app.ActionBar;
import android.text.TextUtils;
import android.view.Menu;
import android.view.MenuItem;
import android.widget.ProgressBar;
import com.github.kevinsawicki.wishlist.ViewUtils;
import com.github.mobile.Intents.Builder;
import com.github.mobile.R;
import com.github.mobile.core.repo.ForkRepositoryTask;
import com.github.mobile.core.repo.RefreshRepositoryTask;
import com.github.mobile.core.repo.RepositoryUtils;
import com.github.mobile.core.repo.StarRepositoryTask;
import com.github.mobile.core.repo.StarredRepositoryTask;
import com.github.mobile.core.repo.UnstarRepositoryTask;
import com.github.mobile.ui.TabPagerActivity;
import com.github.mobile.ui.UriLauncherActivity;
import com.github.mobile.ui.user.UserViewActivity;
import com.github.mobile.util.ShareUtils;
import com.github.mobile.util.ToastUtils;
import com.github.mobile.util.TypefaceUtils;
import org.eclipse.egit.github.core.Repository;
import org.eclipse.egit.github.core.User;
/**
* Activity to view a repository
*/
public class RepositoryViewActivity extends TabPagerActivity<RepositoryPagerAdapter> {
/**
* Create intent for this activity
*
* @param repository
* @return intent
*/
public static Intent createIntent(Repository repository) {
return new Builder("repo.VIEW").repo(repository).toIntent();
}
/**
* Create intent for this activity and open the issues tab
*
* @param repository
* @return intent
*/
public static Intent createIntentForIssues(Repository repository) {
return new Builder("repo.VIEW").repo(repository).add(EXTRA_POSITION, 3).toIntent();
}
private Repository repository;
private ProgressBar loadingBar;
private boolean isStarred;
private boolean starredStatusChecked;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
repository = getSerializableExtra(EXTRA_REPOSITORY);
loadingBar = finder.find(R.id.pb_loading);
User owner = repository.getOwner();
ActionBar actionBar = getSupportActionBar();
actionBar.setTitle(repository.getName());
actionBar.setSubtitle(owner.getLogin());
actionBar.setDisplayHomeAsUpEnabled(true);
if (owner.getAvatarUrl() != null && RepositoryUtils.isComplete(repository))
configurePager();
else {
ViewUtils.setGone(loadingBar, false);
setGone(true);
new RefreshRepositoryTask(this, repository) {
@Override
protected void onSuccess(Repository fullRepository) throws Exception {
super.onSuccess(fullRepository);
repository = fullRepository;
getIntent().putExtra(EXTRA_REPOSITORY, repository);
configurePager();
}
@Override
protected void onException(Exception e) throws RuntimeException {
super.onException(e);
ToastUtils.show(RepositoryViewActivity.this, R.string.error_repo_load);
ViewUtils.setGone(loadingBar, true);
}
}.execute();
}
}
@Override
public boolean onCreateOptionsMenu(Menu optionsMenu) {
getMenuInflater().inflate(R.menu.repository, optionsMenu);
return super.onCreateOptionsMenu(optionsMenu);
}
@Override
public boolean onPrepareOptionsMenu(Menu menu) {
MenuItem followItem = menu.findItem(R.id.m_star);
followItem.setVisible(starredStatusChecked);
followItem.setTitle(isStarred ? R.string.unstar : R.string.star);
MenuItem parentRepo = menu.findItem(R.id.m_parent_repo);
parentRepo.setVisible(repository.isFork());
return super.onPrepareOptionsMenu(menu);
}
@Override
public boolean onSearchRequested() {
if (pager.getCurrentItem() == 1) {
Bundle args = new Bundle();
args.putSerializable(EXTRA_REPOSITORY, repository);
startSearch(null, false, args, false);
return true;
} else
return false;
}
@Override
public void startActivity(Intent intent) {
// Inject extra information into search intents
// Search intents will probably come from the "Issues" fragment
if (Intent.ACTION_SEARCH.equals(intent.getAction())) {
Bundle bundle = new Bundle();
bundle.putSerializable(EXTRA_REPOSITORY, repository);
intent.putExtra(SearchManager.APP_DATA, bundle);
}
super.startActivity(intent);
}
@Override
public void onBackPressed() {
if (adapter == null || pager.getCurrentItem() != ITEM_CODE || !adapter.onBackPressed())
super.onBackPressed();
}
private void configurePager() {
configureTabPager();
ViewUtils.setGone(loadingBar, true);
setGone(false);
checkStarredRepositoryStatus();
int initialPosition = getIntExtra(EXTRA_POSITION);
if (initialPosition != -1) {
pager.setItem(initialPosition);
}
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.m_star:
starRepository();
return true;
case R.id.m_fork:
forkRepository();
return true;
case R.id.m_contributors:
startActivity(RepositoryContributorsActivity.createIntent(repository));
return true;
case R.id.m_parent_repo:
if (repository.getParent() == null) {
// TODO: save parent in OrganizationRepositories so we don't need to do this
new RefreshRepositoryTask(this, repository) {
@Override
protected void onSuccess(Repository fullRepository) throws Exception {
super.onSuccess(fullRepository);
repository = fullRepository;
if (repository.getParent() != null) {
startActivity(RepositoryViewActivity.createIntent(repository.getParent()));
} else {
ToastUtils.show(RepositoryViewActivity.this, R.string.error_repo_load);
}
}
@Override
protected void onException(Exception e) throws RuntimeException {
super.onException(e);
ToastUtils.show(RepositoryViewActivity.this, R.string.error_repo_load);
}
}.execute();
} else {
startActivity(RepositoryViewActivity.createIntent(repository.getParent()));
}
return true;
case R.id.m_share:
shareRepository();
return true;
case R.id.m_refresh:
checkStarredRepositoryStatus();
return super.onOptionsItemSelected(item);
case R.id.m_open_browser:
Uri repoUri = Uri.parse("https://github.com/" + repository.generateId());
UriLauncherActivity.launchUriInBrowser(this, repoUri);
return true;
case android.R.id.home:
finish();
Intent intent = UserViewActivity.createIntent(repository.getOwner());
intent.addFlags(FLAG_ACTIVITY_CLEAR_TOP | FLAG_ACTIVITY_SINGLE_TOP);
startActivity(intent);
return true;
default:
return super.onOptionsItemSelected(item);
}
}
@Override
public void onDialogResult(int requestCode, int resultCode, Bundle arguments) {
adapter.onDialogResult(pager.getCurrentItem(), requestCode, resultCode, arguments);
}
@Override
protected RepositoryPagerAdapter createAdapter() {
return new RepositoryPagerAdapter(this, repository.isHasIssues());
}
@Override
protected int getContentView() {
return R.layout.tabbed_progress_pager;
}
@Override
protected String getIcon(int position) {
switch (position) {
case 0:
return TypefaceUtils.ICON_RSS;
case 1:
return TypefaceUtils.ICON_FILE_CODE;
case 2:
return TypefaceUtils.ICON_GIT_COMMIT;
case 3:
return TypefaceUtils.ICON_ISSUE_OPENED;
default:
return super.getIcon(position);
}
}
private void starRepository() {
if (isStarred)
new UnstarRepositoryTask(this, repository) {
@Override
protected void onSuccess(Void v) throws Exception {
super.onSuccess(v);
isStarred = !isStarred;
setResult(RESOURCE_CHANGED);
}
@Override
protected void onException(Exception e) throws RuntimeException {
super.onException(e);
ToastUtils.show(RepositoryViewActivity.this, R.string.error_unstarring_repository);
}
}.start();
else
new StarRepositoryTask(this, repository) {
@Override
protected void onSuccess(Void v) throws Exception {
super.onSuccess(v);
isStarred = !isStarred;
setResult(RESOURCE_CHANGED);
}
@Override
protected void onException(Exception e) throws RuntimeException {
super.onException(e);
ToastUtils.show(RepositoryViewActivity.this, R.string.error_starring_repository);
}
}.start();
}
private void checkStarredRepositoryStatus() {
starredStatusChecked = false;
new StarredRepositoryTask(this, repository) {
@Override
protected void onSuccess(Boolean watching) throws Exception {
super.onSuccess(watching);
isStarred = watching;
starredStatusChecked = true;
invalidateOptionsMenu();
}
}.execute();
}
private void shareRepository() {
String repoUrl = repository.getHtmlUrl();
if (TextUtils.isEmpty(repoUrl))
repoUrl = "https://github.com/" + repository.generateId();
Intent sharingIntent = ShareUtils.create(repository.generateId(), repoUrl);
startActivity(sharingIntent);
}
private void forkRepository() {
new ForkRepositoryTask(this, repository) {
@Override
protected void onSuccess(Repository e) throws Exception {
super.onSuccess(e);
if (e != null) {
UriLauncherActivity.launchUri(getContext(), Uri.parse(e.getHtmlUrl()));
} else {
ToastUtils.show(RepositoryViewActivity.this, R.string.error_forking_repository);
}
}
@Override
protected void onException(Exception e) throws RuntimeException {
super.onException(e);
ToastUtils.show(RepositoryViewActivity.this, R.string.error_forking_repository);
}
}.start();
}
}
| |
package com.bumptech.glide.test;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.graphics.drawable.Drawable;
import android.support.annotation.CheckResult;
import android.support.annotation.DrawableRes;
import android.support.annotation.FloatRange;
import android.support.annotation.IntRange;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import com.bumptech.glide.Priority;
import com.bumptech.glide.load.DecodeFormat;
import com.bumptech.glide.load.Key;
import com.bumptech.glide.load.Option;
import com.bumptech.glide.load.Transformation;
import com.bumptech.glide.load.engine.DiskCacheStrategy;
import com.bumptech.glide.load.resource.bitmap.DownsampleStrategy;
import com.bumptech.glide.request.RequestOptions;
import java.lang.Class;
import java.lang.Cloneable;
import java.lang.Override;
import java.lang.SafeVarargs;
import java.lang.SuppressWarnings;
/**
* Automatically generated from {@link com.bumptech.glide.annotation.GlideExtension} annotated classes.
*
* @see RequestOptions
* @see Extension
*/
@SuppressWarnings("deprecation")
public final class GlideOptions extends RequestOptions implements Cloneable {
private static GlideOptions fitCenterTransform0;
private static GlideOptions centerInsideTransform1;
private static GlideOptions centerCropTransform2;
private static GlideOptions circleCropTransform3;
private static GlideOptions noTransformation4;
private static GlideOptions noAnimation5;
/**
* @see RequestOptions#sizeMultiplierOf(float)
*/
@CheckResult
public static GlideOptions sizeMultiplierOf(@FloatRange(from = 0.0, to = 1.0) float arg0) {
return new GlideOptions().sizeMultiplier(arg0);
}
/**
* @see RequestOptions#diskCacheStrategyOf(DiskCacheStrategy)
*/
@CheckResult
public static GlideOptions diskCacheStrategyOf(@NonNull DiskCacheStrategy arg0) {
return new GlideOptions().diskCacheStrategy(arg0);
}
/**
* @see RequestOptions#priorityOf(Priority)
*/
@CheckResult
public static GlideOptions priorityOf(@NonNull Priority arg0) {
return new GlideOptions().priority(arg0);
}
/**
* @see RequestOptions#placeholderOf(Drawable)
*/
@CheckResult
public static GlideOptions placeholderOf(@Nullable Drawable arg0) {
return new GlideOptions().placeholder(arg0);
}
/**
* @see RequestOptions#placeholderOf(int)
*/
@CheckResult
public static GlideOptions placeholderOf(@DrawableRes int arg0) {
return new GlideOptions().placeholder(arg0);
}
/**
* @see RequestOptions#errorOf(Drawable)
*/
@CheckResult
public static GlideOptions errorOf(@Nullable Drawable arg0) {
return new GlideOptions().error(arg0);
}
/**
* @see RequestOptions#errorOf(int)
*/
@CheckResult
public static GlideOptions errorOf(@DrawableRes int arg0) {
return new GlideOptions().error(arg0);
}
/**
* @see RequestOptions#skipMemoryCacheOf(boolean)
*/
@CheckResult
public static GlideOptions skipMemoryCacheOf(boolean skipMemoryCache) {
return new GlideOptions().skipMemoryCache(skipMemoryCache);
}
/**
* @see RequestOptions#overrideOf(int, int)
*/
@CheckResult
public static GlideOptions overrideOf(@IntRange(from = 0) int arg0,
@IntRange(from = 0) int arg1) {
return new GlideOptions().override(arg0, arg1);
}
/**
* @see RequestOptions#overrideOf(int)
*/
@CheckResult
public static GlideOptions overrideOf(@IntRange(from = 0) int arg0) {
return new GlideOptions().override(arg0);
}
/**
* @see RequestOptions#signatureOf(Key)
*/
@CheckResult
public static GlideOptions signatureOf(@NonNull Key arg0) {
return new GlideOptions().signature(arg0);
}
/**
* @see RequestOptions#fitCenterTransform()
*/
@CheckResult
public static GlideOptions fitCenterTransform() {
if (GlideOptions.fitCenterTransform0 == null) {
GlideOptions.fitCenterTransform0 =
new GlideOptions().fitCenter().autoClone();
}
return GlideOptions.fitCenterTransform0;
}
/**
* @see RequestOptions#centerInsideTransform()
*/
@CheckResult
public static GlideOptions centerInsideTransform() {
if (GlideOptions.centerInsideTransform1 == null) {
GlideOptions.centerInsideTransform1 =
new GlideOptions().centerInside().autoClone();
}
return GlideOptions.centerInsideTransform1;
}
/**
* @see RequestOptions#centerCropTransform()
*/
@CheckResult
public static GlideOptions centerCropTransform() {
if (GlideOptions.centerCropTransform2 == null) {
GlideOptions.centerCropTransform2 =
new GlideOptions().centerCrop().autoClone();
}
return GlideOptions.centerCropTransform2;
}
/**
* @see RequestOptions#circleCropTransform()
*/
@CheckResult
public static GlideOptions circleCropTransform() {
if (GlideOptions.circleCropTransform3 == null) {
GlideOptions.circleCropTransform3 =
new GlideOptions().circleCrop().autoClone();
}
return GlideOptions.circleCropTransform3;
}
/**
* @see RequestOptions#bitmapTransform(Transformation)
*/
@CheckResult
public static GlideOptions bitmapTransform(@NonNull Transformation<Bitmap> arg0) {
return new GlideOptions().transform(arg0);
}
/**
* @see RequestOptions#noTransformation()
*/
@CheckResult
public static GlideOptions noTransformation() {
if (GlideOptions.noTransformation4 == null) {
GlideOptions.noTransformation4 =
new GlideOptions().dontTransform().autoClone();
}
return GlideOptions.noTransformation4;
}
/**
* @see RequestOptions#option(Option, T)
*/
@CheckResult
public static <T> GlideOptions option(@NonNull Option<T> arg0, @NonNull T arg1) {
return new GlideOptions().set(arg0, arg1);
}
/**
* @see RequestOptions#decodeTypeOf(Class)
*/
@CheckResult
public static GlideOptions decodeTypeOf(@NonNull Class<?> arg0) {
return new GlideOptions().decode(arg0);
}
/**
* @see RequestOptions#formatOf(DecodeFormat)
*/
@CheckResult
public static GlideOptions formatOf(@NonNull DecodeFormat arg0) {
return new GlideOptions().format(arg0);
}
/**
* @see RequestOptions#frameOf(long)
*/
@CheckResult
public static GlideOptions frameOf(@IntRange(from = 0) long arg0) {
return new GlideOptions().frame(arg0);
}
/**
* @see RequestOptions#downsampleOf(DownsampleStrategy)
*/
@CheckResult
public static GlideOptions downsampleOf(@NonNull DownsampleStrategy arg0) {
return new GlideOptions().downsample(arg0);
}
/**
* @see RequestOptions#timeoutOf(int)
*/
@CheckResult
public static GlideOptions timeoutOf(@IntRange(from = 0) int arg0) {
return new GlideOptions().timeout(arg0);
}
/**
* @see RequestOptions#encodeQualityOf(int)
*/
@CheckResult
public static GlideOptions encodeQualityOf(@IntRange(from = 0, to = 100) int arg0) {
return new GlideOptions().encodeQuality(arg0);
}
/**
* @see RequestOptions#encodeFormatOf(CompressFormat)
*/
@CheckResult
public static GlideOptions encodeFormatOf(@NonNull Bitmap.CompressFormat arg0) {
return new GlideOptions().encodeFormat(arg0);
}
/**
* @see RequestOptions#noAnimation()
*/
@CheckResult
public static GlideOptions noAnimation() {
if (GlideOptions.noAnimation5 == null) {
GlideOptions.noAnimation5 =
new GlideOptions().dontAnimate().autoClone();
}
return GlideOptions.noAnimation5;
}
@Override
@CheckResult
public final GlideOptions sizeMultiplier(@FloatRange(from = 0.0, to = 1.0) float arg0) {
return (GlideOptions) super.sizeMultiplier(arg0);
}
@Override
@CheckResult
public final GlideOptions useUnlimitedSourceGeneratorsPool(boolean flag) {
return (GlideOptions) super.useUnlimitedSourceGeneratorsPool(flag);
}
@Override
@CheckResult
public final GlideOptions useAnimationPool(boolean flag) {
return (GlideOptions) super.useAnimationPool(flag);
}
@Override
@CheckResult
public final GlideOptions onlyRetrieveFromCache(boolean flag) {
return (GlideOptions) super.onlyRetrieveFromCache(flag);
}
@Override
@CheckResult
public final GlideOptions diskCacheStrategy(@NonNull DiskCacheStrategy arg0) {
return (GlideOptions) super.diskCacheStrategy(arg0);
}
@Override
@CheckResult
public final GlideOptions priority(@NonNull Priority arg0) {
return (GlideOptions) super.priority(arg0);
}
@Override
@CheckResult
public final GlideOptions placeholder(@Nullable Drawable arg0) {
return (GlideOptions) super.placeholder(arg0);
}
@Override
@CheckResult
public final GlideOptions placeholder(@DrawableRes int arg0) {
return (GlideOptions) super.placeholder(arg0);
}
@Override
@CheckResult
public final GlideOptions fallback(@Nullable Drawable arg0) {
return (GlideOptions) super.fallback(arg0);
}
@Override
@CheckResult
public final GlideOptions fallback(@DrawableRes int arg0) {
return (GlideOptions) super.fallback(arg0);
}
@Override
@CheckResult
public final GlideOptions error(@Nullable Drawable arg0) {
return (GlideOptions) super.error(arg0);
}
@Override
@CheckResult
public final GlideOptions error(@DrawableRes int arg0) {
return (GlideOptions) super.error(arg0);
}
@Override
@CheckResult
public final GlideOptions theme(@Nullable Resources.Theme arg0) {
return (GlideOptions) super.theme(arg0);
}
@Override
@CheckResult
public final GlideOptions skipMemoryCache(boolean skip) {
return (GlideOptions) super.skipMemoryCache(skip);
}
@Override
@CheckResult
public final GlideOptions override(int width, int height) {
return (GlideOptions) super.override(width, height);
}
@Override
@CheckResult
public final GlideOptions override(int size) {
return (GlideOptions) super.override(size);
}
@Override
@CheckResult
public final GlideOptions signature(@NonNull Key arg0) {
return (GlideOptions) super.signature(arg0);
}
@Override
@CheckResult
public final GlideOptions clone() {
return (GlideOptions) super.clone();
}
@Override
@CheckResult
public final <T> GlideOptions set(@NonNull Option<T> arg0, @NonNull T arg1) {
return (GlideOptions) super.set(arg0, arg1);
}
@Override
@CheckResult
public final GlideOptions decode(@NonNull Class<?> arg0) {
return (GlideOptions) super.decode(arg0);
}
@Override
@CheckResult
public final GlideOptions encodeFormat(@NonNull Bitmap.CompressFormat arg0) {
return (GlideOptions) super.encodeFormat(arg0);
}
@Override
@CheckResult
public final GlideOptions encodeQuality(@IntRange(from = 0, to = 100) int arg0) {
return (GlideOptions) super.encodeQuality(arg0);
}
@Override
@CheckResult
public final GlideOptions frame(@IntRange(from = 0) long arg0) {
return (GlideOptions) super.frame(arg0);
}
@Override
@CheckResult
public final GlideOptions format(@NonNull DecodeFormat arg0) {
return (GlideOptions) super.format(arg0);
}
@Override
@CheckResult
public final GlideOptions disallowHardwareConfig() {
return (GlideOptions) super.disallowHardwareConfig();
}
@Override
@CheckResult
public final GlideOptions downsample(@NonNull DownsampleStrategy arg0) {
return (GlideOptions) super.downsample(arg0);
}
@Override
@CheckResult
public final GlideOptions timeout(@IntRange(from = 0) int arg0) {
return (GlideOptions) super.timeout(arg0);
}
@Override
@CheckResult
public final GlideOptions optionalCenterCrop() {
return (GlideOptions) super.optionalCenterCrop();
}
@Override
@CheckResult
public final GlideOptions optionalFitCenter() {
return (GlideOptions) super.optionalFitCenter();
}
@Override
@CheckResult
public final GlideOptions fitCenter() {
return (GlideOptions) super.fitCenter();
}
@Override
@CheckResult
public final GlideOptions optionalCenterInside() {
return (GlideOptions) super.optionalCenterInside();
}
@Override
@CheckResult
public final GlideOptions centerInside() {
return (GlideOptions) super.centerInside();
}
@Override
@CheckResult
public final GlideOptions optionalCircleCrop() {
return (GlideOptions) super.optionalCircleCrop();
}
@Override
@CheckResult
public final GlideOptions circleCrop() {
return (GlideOptions) super.circleCrop();
}
@Override
@CheckResult
public final GlideOptions transform(@NonNull Transformation<Bitmap> arg0) {
return (GlideOptions) super.transform(arg0);
}
@Override
@SafeVarargs
@SuppressWarnings("varargs")
@CheckResult
public final GlideOptions transforms(@NonNull Transformation<Bitmap>... arg0) {
return (GlideOptions) super.transforms(arg0);
}
@Override
@CheckResult
public final GlideOptions optionalTransform(@NonNull Transformation<Bitmap> arg0) {
return (GlideOptions) super.optionalTransform(arg0);
}
@Override
@CheckResult
public final <T> GlideOptions optionalTransform(@NonNull Class<T> arg0,
@NonNull Transformation<T> arg1) {
return (GlideOptions) super.optionalTransform(arg0, arg1);
}
@Override
@CheckResult
public final <T> GlideOptions transform(@NonNull Class<T> arg0, @NonNull Transformation<T> arg1) {
return (GlideOptions) super.transform(arg0, arg1);
}
@Override
@CheckResult
public final GlideOptions dontTransform() {
return (GlideOptions) super.dontTransform();
}
@Override
@CheckResult
public final GlideOptions dontAnimate() {
return (GlideOptions) super.dontAnimate();
}
@Override
@CheckResult
public final GlideOptions apply(@NonNull RequestOptions arg0) {
return (GlideOptions) super.apply(arg0);
}
@Override
public final GlideOptions lock() {
return (GlideOptions) super.lock();
}
@Override
public final GlideOptions autoClone() {
return (GlideOptions) super.autoClone();
}
/**
* @see Extension#centerCrop(RequestOptions)
*/
@CheckResult
public GlideOptions centerCrop() {
if (isAutoCloneEnabled()) {
return clone().centerCrop();
}
Extension.centerCrop(this);
return this;
}
/**
* @see Extension#centerCrop(RequestOptions)
*/
@CheckResult
public static GlideOptions centerCropOf() {
return new GlideOptions().centerCrop();
}
}
| |
package Accessors;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import server.UsersDetails;
public class UsersDetailsAccessor {
Connection connect = null;
Statement statement = null;
ResultSet resultSet = null;
PreparedStatement preparedStatement = null;
ConnectionStrings connectionStringMysql = new ConnectionStrings();
public void saveDetails(UsersDetails userDetails) {
try {
Class.forName("com.mysql.jdbc.Driver");
connect = DriverManager.getConnection(connectionStringMysql.getMysqlConnection());
preparedStatement = connect.prepareStatement("INSERT INTO casino.UserDetails " + "VALUES ("
+ userDetails.getUserId() + ",'" + userDetails.getFirstName() + "'" + "," + "'"
+ userDetails.getLastName() + "'" + "," + "'" + userDetails.getGender() + "'" + ",\""
+ userDetails.getBirthDate() + "\" ,'" + userDetails.getEmail() + "'," + "'"
+ userDetails.getStreet() + "'" + "," + "'" + userDetails.getCountry() + "'" + "," + "'"
+ userDetails.getPhoneNumber() + "'" + "," + "now()" + "," + "'" + userDetails.getCityId() + "');");
preparedStatement.executeUpdate();
}
catch (SQLException e) {
System.out.println(e.getMessage());
}
catch (Exception e) {
System.out.println(e.getMessage());
} finally {
close();
}
}
private void close() {
try {
if (resultSet != null) {
resultSet.close();
}
if (statement != null) {
statement.close();
}
if (connect != null) {
connect.close();
}
} catch (Exception e) {
}
}
public String getCountryId(UsersDetails country) {
String id = null;
try {
Class.forName("com.mysql.jdbc.Driver");
connect = DriverManager.getConnection(connectionStringMysql.getMysqlConnection());
preparedStatement = connect.prepareStatement(
"SELECT code FROM casino.countries WHERE name_en = '" + country.getCountry() + "'");
resultSet = preparedStatement.executeQuery();
if (resultSet != null) {
resultSet.next();
id = resultSet.getString("code");
return id;
}
}
catch (SQLException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
} finally {
if (preparedStatement != null)
try {
preparedStatement.close();
} catch (Exception e) {
}
if (resultSet != null)
try {
resultSet.close();
} catch (Exception e) {
}
if (statement != null)
try {
statement.close();
} catch (Exception e) {
}
if (connect != null)
try {
connect.close();
} catch (Exception e) {
}
}
return id;
}
public int getCityIdFromDb(UsersDetails cityId) {
int Id = 0;
try {
Class.forName("com.mysql.jdbc.Driver");
connect = DriverManager.getConnection(connectionStringMysql.getMysqlConnection());
preparedStatement = connect
.prepareStatement("SELECT CityID FROM casino.City WHERE City = '" + cityId.getCity() + "'");
resultSet = preparedStatement.executeQuery();
if (resultSet != null) {
resultSet.next();
Id = resultSet.getInt("CityID");
return Id;
}
}
catch (SQLException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
} finally {
if (preparedStatement != null)
try {
preparedStatement.close();
} catch (Exception e) {
}
if (resultSet != null)
try {
resultSet.close();
} catch (Exception e) {
}
if (statement != null)
try {
statement.close();
} catch (Exception e) {
}
if (connect != null)
try {
connect.close();
} catch (Exception e) {
}
}
return Id;
}
public int searchPaymentDetails(UsersDetails usersDetails) {
int count = 0;
try {
Class.forName("com.mysql.jdbc.Driver");
connect = DriverManager.getConnection(connectionStringMysql.getMysqlConnection());
preparedStatement = connect.prepareStatement(
"SELECT COUNT(*) FROM casino.UserDetails WHERE UserDetailsID =" + usersDetails.getUserId() + " ");
resultSet = preparedStatement.executeQuery();
if (resultSet != null) {
resultSet.next();
count = resultSet.getInt("COUNT(*)");
}
}
catch (SQLException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
} finally {
if (preparedStatement != null)
try {
preparedStatement.close();
} catch (Exception e) {
}
if (resultSet != null)
try {
resultSet.close();
} catch (Exception e) {
}
if (statement != null)
try {
statement.close();
} catch (Exception e) {
}
if (connect != null)
try {
connect.close();
} catch (Exception e) {
}
}
return count;
}
}
| |
package com.intranet.inscripcion.dao;
import java.util.List;
import org.hibernate.Query;
import org.hibernate.Session;
import com.intranet.bean.CursoCompuestoDTO;
import com.intranet.bean.DetalleInscriCursoDTO;
import com.intranet.bean.HorarioDTO;
import com.intranet.bean.UsuarioDTO;
import com.intranet.util.HbnConexion;
public class InscripcionDAO {
@SuppressWarnings("unchecked")
public List<CursoCompuestoDTO> getCursosJalados(UsuarioDTO usuario){
Session s=HbnConexion.getSessionFactory().getCurrentSession();
s.beginTransaction();
//Subquery separado
int qUltInscripcion=inscripcionDelCicloAnterior(s,usuario);
//Query Principal
Query q=s.createQuery("SELECT cc FROM UsuarioDTO u JOIN u.alumnoDTOs a " +
"JOIN a.inscripcionAlumnoDTOs i JOIN i.detalleInscriCursoDTOs dic " +
"JOIN dic.cursoCompuestoDTO cc JOIN cc.cursoDTO cu " +
"WHERE dic.promedioC<12.5 AND u.user=:usuario " +
"AND i.idInscripcionAlumno=:qUltimaInscripcion");
q.setParameter("usuario", usuario.getUser());
q.setParameter("qUltimaInscripcion", qUltInscripcion);
List<CursoCompuestoDTO> cursos=(List<CursoCompuestoDTO>)q.list();
s.getTransaction().commit();
return cursos;
}
@SuppressWarnings("unchecked")
public List<CursoCompuestoDTO> getCursosRestantes(UsuarioDTO usuario){
Session s=HbnConexion.getSessionFactory().getCurrentSession();
s.beginTransaction();
//Subquery separado
int qUltInscripcion=inscripcionDelCicloAnterior(s,usuario);
//Query Principal
Query q=s.createQuery("SELECT cc FROM UsuarioDTO u JOIN u.alumnoDTOs a " +
"JOIN a.inscripcionAlumnoDTOs i JOIN a.carreraDTO c " +
"JOIN c.detalleCursoCarreraCicloDTOs dcc JOIN dcc.cursoDTO cu JOIN cu.cursoCompuestoDTOs cc " +
"WHERE cu.idCurso NOT IN (SELECT xcc.cursoDTO.idCurso FROM InscripcionAlumnoDTO xi " +
"JOIN xi.detalleInscriCursoDTOs xdic JOIN xdic.cursoCompuestoDTO xcc JOIN xcc.cursoDTO xcu JOIN xcu.detalleCursoCarreraCicloDTOs xdcc " +
"WHERE xi.idInscripcionAlumno=:qidInscripcion-1 and xdcc.carreraDTO.idCarrera=c.idCarrera and xdcc.cicloDTO.idCiclo=i.cicloDTO.idCiclo-1) " +
"AND cu.categoriaCursoDTO.idCategoriaCurso NOT IN (SELECT xcu.categoriaCursoDTO.idCategoriaCurso " +
"FROM InscripcionAlumnoDTO xi JOIN xi.detalleInscriCursoDTOs dic JOIN dic.cursoCompuestoDTO xcc JOIN xcc.cursoDTO xcu " +
"WHERE xi.idInscripcionAlumno=:qidInscripcion and dic.promedioC<12.5 and xi.cicloDTO.idCiclo=i.cicloDTO.idCiclo) " +
"AND cu.idCurso NOT IN (SELECT xcc.cursoDTO.idCurso " +
"FROM InscripcionAlumnoDTO xi JOIN xi.detalleInscriCursoDTOs xdic JOIN xdic.cursoCompuestoDTO xcc " +
"WHERE xi.idInscripcionAlumno=:qidInscripcion and xi.cicloDTO.idCiclo=i.cicloDTO.idCiclo) " +
"AND cu.idCurso NOT IN (SELECT xcu.idCurso " +
"FROM DetalleCursoCarreraCicloDTO xdcc JOIN xdcc.cursoDTO xcu " +
"WHERE xcu.categoriaCursoDTO.idCategoriaCurso not in (SELECT xcu.categoriaCursoDTO.idCategoriaCurso FROM InscripcionAlumnoDTO xi " +
"JOIN xi.detalleInscriCursoDTOs xdic JOIN xdic.cursoCompuestoDTO xcc JOIN xcc.cursoDTO xcu JOIN xcu.detalleCursoCarreraCicloDTOs xdcc " +
"WHERE xi.idInscripcionAlumno BETWEEN :qidInscripcion-1 AND :qidInscripcion and xdcc.carreraDTO.idCarrera=c.idCarrera and xdcc.cicloDTO.idCiclo=i.cicloDTO.idCiclo-1) " +
"AND xdcc.cicloDTO.idCiclo=i.cicloDTO.idCiclo) " +
"AND dcc.cicloDTO.idCiclo BETWEEN i.cicloDTO.idCiclo-1 AND i.cicloDTO.idCiclo AND u.user=:usuario " +
"AND i.idInscripcionAlumno=:qidInscripcion " +
"group by cu.idCurso");
q.setParameter("usuario", usuario.getUser());
q.setParameter("qidInscripcion", qUltInscripcion);
List<CursoCompuestoDTO> cursos=(List<CursoCompuestoDTO>)q.list();
s.getTransaction().commit();
return cursos;
}
@SuppressWarnings("unchecked")
public List<CursoCompuestoDTO> getCursosSiguientes(UsuarioDTO usuario){
Session s=HbnConexion.getSessionFactory().getCurrentSession();
s.beginTransaction();
//Subquery separado
int qUltInscripcion=inscripcionDelCicloAnterior(s,usuario);
//Query Principal
Query q=s.createQuery("SELECT cc FROM UsuarioDTO u JOIN u.alumnoDTOs a " +
"JOIN a.inscripcionAlumnoDTOs i JOIN a.carreraDTO c " +
"JOIN c.detalleCursoCarreraCicloDTOs dcc JOIN dcc.cursoDTO cu JOIN cu.cursoCompuestoDTOs cc " +
"WHERE cu.categoriaCursoDTO.idCategoriaCurso NOT IN (SELECT xcu.categoriaCursoDTO.idCategoriaCurso " +
"FROM DetalleInscriCursoDTO dic JOIN dic.cursoCompuestoDTO cc JOIN cc.cursoDTO xcu " +
"WHERE dic.inscripcionAlumnoDTO.idInscripcionAlumno=i.idInscripcionAlumno AND dic.promedioC<12.5) " +
"AND cu.categoriaCursoDTO.idCategoriaCurso NOT IN (SELECT xxcu.categoriaCursoDTO.idCategoriaCurso " +
"FROM DetalleCursoCarreraCicloDTO xxdcc JOIN xxdcc.cursoDTO xxcu " +
"WHERE xxcu.idCurso NOT IN (SELECT xcc.cursoDTO.idCurso " +
"FROM DetalleInscriCursoDTO xdic JOIN xdic.cursoCompuestoDTO xcc " +
"WHERE xdic.inscripcionAlumnoDTO.idInscripcionAlumno=i.idInscripcionAlumno) " +
"AND xxdcc.cicloDTO.idCiclo=i.cicloDTO.idCiclo AND xxdcc.carreraDTO.idCarrera=c.idCarrera) " +
"AND dcc.cicloDTO.idCiclo=(i.cicloDTO.idCiclo+1) AND u.user=:usuario " +
"AND i.idInscripcionAlumno=:qUltimaInscripcion");
q.setParameter("usuario", usuario.getUser());
q.setParameter("qUltimaInscripcion", qUltInscripcion);
List<CursoCompuestoDTO> cursos=(List<CursoCompuestoDTO>)q.list();
s.getTransaction().commit();
return cursos;
}
public int getCantidadDeCursosRestantes4Parameter(UsuarioDTO usuario){
int result=0;
Session s=HbnConexion.getSessionFactory().getCurrentSession();
s.beginTransaction();
//Subquery separado
int qUltInscripcion=inscripcionDelCicloAnterior(s,usuario);
//Query Principal
Query q=s.createQuery("SELECT count(*) FROM UsuarioDTO u JOIN u.alumnoDTOs a " +
"JOIN a.inscripcionAlumnoDTOs i JOIN a.carreraDTO c " +
"JOIN c.detalleCursoCarreraCicloDTOs dcc JOIN dcc.cursoDTO cu JOIN cu.cursoCompuestoDTOs cc " +
"WHERE cu.idCurso NOT IN (SELECT xcc.cursoDTO.idCurso " +
"FROM DetalleInscriCursoDTO xdic JOIN xdic.cursoCompuestoDTO xcc " +
"WHERE xdic.inscripcionAlumnoDTO.idInscripcionAlumno=i.idInscripcionAlumno) " +
"AND dcc.cicloDTO.idCiclo=i.cicloDTO.idCiclo AND u.user=:usuario " +
"AND i.idInscripcionAlumno=:qUltimaInscripcion");
q.setParameter("usuario", usuario.getUser());
q.setParameter("qUltimaInscripcion", qUltInscripcion);
result=((Long)q.uniqueResult()).intValue();
//Valor a regresar
s.getTransaction().commit();
return result;
}
public int getCantidadDeCursosQuePuedeLlevar(UsuarioDTO usuario){
int result=0;
Session s=HbnConexion.getSessionFactory().getCurrentSession();
s.beginTransaction();
//Subquery separado
int qUltInscripcion=inscripcionDelCicloAnterior(s,usuario);
//Query Principal
Query q=s.createQuery("Select i.aprobado " +
"FROM InscripcionAlumnoDTO i JOIN i.alumnoDTO a JOIN a.usuarioDTO u " +
"WHERE u.user=:usuario AND i.idInscripcionAlumno=:qUltimaInscripcion");
q.setParameter("usuario", usuario.getUser());
q.setParameter("qUltimaInscripcion", qUltInscripcion);
result=(int)q.uniqueResult();
s.getTransaction().commit();
return result;
}
@SuppressWarnings("unchecked")
public List<CursoCompuestoDTO> getCursosRecienteMatriculado(UsuarioDTO usuario){
Session s=HbnConexion.getSessionFactory().getCurrentSession();
s.beginTransaction();
//Subquery separado
int qUltInscripcion=ultimaInscripcion(s,usuario);
//Query Principal
Query q=s.createQuery("SELECT cc " +
"FROM CursoCompuestoDTO cc JOIN cc.detalleInscriCursoDTOs dic " +
"JOIN dic.inscripcionAlumnoDTO xi JOIN xi.alumnoDTO a JOIN a.usuarioDTO u " +
"WHERE u.user=:usuario AND xi.idInscripcionAlumno=:nidInscripcion " +
"ORDER BY xi.inscripcionDTO.idInscripcion desc");
q.setParameter("usuario", usuario.getUser());
q.setParameter("nidInscripcion", qUltInscripcion);
List<CursoCompuestoDTO> cursos=(List<CursoCompuestoDTO>)q.list();
s.getTransaction().commit();
return cursos;
}
@SuppressWarnings("unchecked")
public List<HorarioDTO> getHorarioAlumno(UsuarioDTO usuario) {
Session s=HbnConexion.getSessionFactory().getCurrentSession();
s.beginTransaction();
//Subquery separado
int qUltInscripcion=ultimaInscripcion(s,usuario);
//Query Principal
Query q=s.createQuery("SELECT h " +
"FROM HorarioDTO h JOIN h.cursoCompuestoDTO cc JOIN cc.detalleInscriCursoDTOs dic JOIN dic.inscripcionAlumnoDTO xi JOIN xi.alumnoDTO a JOIN a.usuarioDTO u " +
"WHERE u.user=:usuario AND xi.idInscripcionAlumno=:nidInscripcion " +
"ORDER BY xi.inscripcionDTO.idInscripcion desc");
q.setParameter("usuario", usuario.getUser());
q.setParameter("nidInscripcion", qUltInscripcion);
List<HorarioDTO> cursos=(List<HorarioDTO>)q.list();
s.getTransaction().commit();
return cursos;
}
@SuppressWarnings("unchecked")
public List<HorarioDTO> getHorarioPorCurso(UsuarioDTO usuario,CursoCompuestoDTO cc) {
Session s=HbnConexion.getSessionFactory().getCurrentSession();
s.beginTransaction();
//Query Principal
Query q=s.createQuery("SELECT h "+
"FROM HorarioDTO h JOIN h.cursoCompuestoDTO cc "+
"WHERE cc.idCursoCompuesto=:qidCursoCompuesto");
q.setParameter("qidCursoCompuesto", cc.getIdCursoCompuesto());
List<HorarioDTO> cursos=(List<HorarioDTO>)q.list();
s.getTransaction().commit();
return cursos;
}
public int inscripcionDelCicloAnterior(Session s, UsuarioDTO usuario){
int result=0;
Query qUltimaInscripcion=s.createQuery("Select xi.idInscripcionAlumno " +
"FROM InscripcionDTO ig,InscripcionAlumnoDTO xi JOIN xi.alumnoDTO a JOIN a.usuarioDTO u " +
"WHERE u.user=:usuario AND xi.inscripcionDTO.idInscripcion=ig.idInscripcion-1 " +
"ORDER BY xi.inscripcionDTO.idInscripcion desc");
qUltimaInscripcion.setParameter("usuario", usuario.getUser());
qUltimaInscripcion.setMaxResults(1);
result=(int)qUltimaInscripcion.uniqueResult();
return result;
}
public int ultimaInscripcion(Session s, UsuarioDTO usuario){
int esNulo=0;
if(s==null){
esNulo=1;
s=HbnConexion.getSessionFactory().getCurrentSession();
s.beginTransaction();
}
int result=-1;
Query qtopPeriodo=s.createQuery("select p.idPeriodo " +
"FROM PeriodoDTO p " +
"ORDER BY p.idPeriodo desc");
qtopPeriodo.setMaxResults(1);
int topPeriodo=(int)qtopPeriodo.uniqueResult();
Query qUltimaInscripcion=s.createQuery("select xi.idInscripcionAlumno " +
"FROM PeriodoDTO p JOIN p.inscripcionDTOs ig JOIN ig.inscripcionAlumnoDTOs xi JOIN xi.alumnoDTO a JOIN a.usuarioDTO u " +
"WHERE u.user=:usuario AND p.idPeriodo=:qidPeriodo " +
"ORDER BY xi.idInscripcionAlumno desc");
qUltimaInscripcion.setParameter("usuario", usuario.getUser());
qUltimaInscripcion.setParameter("qidPeriodo",topPeriodo );
qUltimaInscripcion.setMaxResults(1);
try {
result=(int)qUltimaInscripcion.uniqueResult();
if(esNulo==1){
s.getTransaction().commit();
}
} catch (NullPointerException n) {
result=0;
}
return result;
}
@SuppressWarnings("unchecked")
public List<CursoCompuestoDTO> getCursoCompuestoPorCurso(List<Integer> cursosJalados) {
Session s=HbnConexion.getSessionFactory().getCurrentSession();
s.beginTransaction();
Query q=s.createQuery("SELECT cc FROM CursoCompuestoDTO cc JOIN cc.cursoDTO c WHERE c.idCurso IN (:ArrayidCurso)");
q.setParameterList("ArrayidCurso", cursosJalados);
List<CursoCompuestoDTO> innerCc=(List<CursoCompuestoDTO>)q.list();
s.getTransaction().commit();
return innerCc;
}
public void add(DetalleInscriCursoDTO dic){
Session s=HbnConexion.getSessionFactory().getCurrentSession();
s.beginTransaction();
s.persist(dic);
s.getTransaction().commit();
}
public void remove(DetalleInscriCursoDTO dic){
Session s=HbnConexion.getSessionFactory().getCurrentSession();
s.beginTransaction();
s.delete(dic);
s.getTransaction().commit();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.test.spring;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import org.apache.camel.CamelContext;
import org.apache.camel.ExtendedCamelContext;
import org.apache.camel.spring.SpringCamelContext;
import org.apache.camel.test.ExcludingPackageScanClassResolver;
import org.apache.camel.test.junit4.CamelTestSupport;
import org.apache.camel.util.IOHelper;
import org.apache.camel.util.ObjectHelper;
import org.junit.After;
import org.springframework.beans.factory.support.RootBeanDefinition;
import org.springframework.context.ApplicationContext;
import org.springframework.context.support.AbstractApplicationContext;
import org.springframework.context.support.GenericApplicationContext;
/**
* Base test-class for classic Spring application such as standalone, web applications.
* Do <tt>not</tt> use this class for Spring Boot testing, instead use <code>@RunWith(CamelSpringBootRunner.class)</code>.
*/
public abstract class CamelSpringTestSupport extends CamelTestSupport {
protected static ThreadLocal<AbstractApplicationContext> threadAppContext = new ThreadLocal<>();
protected static Object lock = new Object();
protected AbstractApplicationContext applicationContext;
protected abstract AbstractApplicationContext createApplicationContext();
@Override
public void postProcessTest() throws Exception {
if (isCreateCamelContextPerClass()) {
applicationContext = threadAppContext.get();
}
super.postProcessTest();
}
@Override
public void doPreSetup() throws Exception {
if (!"true".equalsIgnoreCase(System.getProperty("skipStartingCamelContext"))) {
// tell camel-spring it should not trigger starting CamelContext, since we do that later
// after we are finished setting up the unit test
synchronized (lock) {
SpringCamelContext.setNoStart(true);
if (isCreateCamelContextPerClass()) {
applicationContext = threadAppContext.get();
if (applicationContext == null) {
applicationContext = doCreateApplicationContext();
threadAppContext.set(applicationContext);
}
} else {
applicationContext = doCreateApplicationContext();
}
SpringCamelContext.setNoStart(false);
}
} else {
log.info("Skipping starting CamelContext as system property skipStartingCamelContext is set to be true.");
}
}
private AbstractApplicationContext doCreateApplicationContext() {
AbstractApplicationContext context = createApplicationContext();
assertNotNull("Should have created a valid Spring application context", context);
String[] profiles = activeProfiles();
if (profiles != null && profiles.length > 0) {
// the context must not be active
if (context.isActive()) {
throw new IllegalStateException("Cannot active profiles: " + Arrays.asList(profiles) + " on active Spring application context: " + context
+ ". The code in your createApplicationContext() method should be adjusted to create the application context with refresh = false as parameter");
}
log.info("Spring activating profiles: {}", Arrays.asList(profiles));
context.getEnvironment().setActiveProfiles(profiles);
}
// ensure the context has been refreshed at least once
if (!context.isActive()) {
context.refresh();
}
return context;
}
@Override
@After
public void tearDown() throws Exception {
super.tearDown();
if (!isCreateCamelContextPerClass()) {
IOHelper.close(applicationContext);
applicationContext = null;
}
}
@Override
public void doPostTearDown() throws Exception {
super.doPostTearDown();
if (threadAppContext.get() != null) {
IOHelper.close(threadAppContext.get());
threadAppContext.remove();
}
}
/**
* Create a parent context that initializes a
* {@link org.apache.camel.spi.PackageScanClassResolver} to exclude a set of given classes from
* being resolved. Typically this is used at test time to exclude certain routes,
* which might otherwise be just noisy, from being discovered and initialized.
* <p/>
* To use this filtering mechanism it is necessary to provide the
* {@link org.springframework.context.ApplicationContext} returned from here as the parent context to
* your test context e.g.
*
* <pre>
* protected AbstractXmlApplicationContext createApplicationContext() {
* return new ClassPathXmlApplicationContext(new String[] {"test-context.xml"}, getRouteExcludingApplicationContext());
* }
* </pre>
*
* This will, in turn, call the template methods <code>excludedRoutes</code>
* and <code>excludedRoute</code> to determine the classes to be excluded from scanning.
*
* @return ApplicationContext a parent {@link org.springframework.context.ApplicationContext} configured
* to exclude certain classes from package scanning
*/
protected ApplicationContext getRouteExcludingApplicationContext() {
GenericApplicationContext routeExcludingContext = new GenericApplicationContext();
routeExcludingContext.registerBeanDefinition("excludingResolver", new RootBeanDefinition(ExcludingPackageScanClassResolver.class));
routeExcludingContext.refresh();
ExcludingPackageScanClassResolver excludingResolver = routeExcludingContext.getBean("excludingResolver", ExcludingPackageScanClassResolver.class);
List<Class<?>> excluded = Arrays.asList(excludeRoutes());
excludingResolver.setExcludedClasses(new HashSet<>(excluded));
return routeExcludingContext;
}
/**
* Template method used to exclude {@link org.apache.camel.Route} from the test time context
* route scanning
*
* @return Class[] the classes to be excluded from test time context route scanning
*/
protected Class<?>[] excludeRoutes() {
Class<?> excludedRoute = excludeRoute();
return excludedRoute != null ? new Class[] {excludedRoute} : new Class[0];
}
/**
* Template method used to exclude a {@link org.apache.camel.Route} from the test camel context
*/
protected Class<?> excludeRoute() {
return null;
}
/**
* Looks up the mandatory spring bean of the given name and type, failing if
* it is not present or the correct type
*/
public <T> T getMandatoryBean(Class<T> type, String name) {
Object value = applicationContext.getBean(name);
assertNotNull("No spring bean found for name <" + name + ">", value);
if (type.isInstance(value)) {
return type.cast(value);
} else {
fail("Spring bean <" + name + "> is not an instanceof " + type.getName() + " but is of type " + ObjectHelper.className(value));
return null;
}
}
/**
* Which active profiles should be used.
* <p/>
* <b>Important:</b> When using active profiles, then the code in {@link #createApplicationContext()} should create
* the Spring {@link org.springframework.context.support.AbstractApplicationContext} without refreshing. For example creating an
* {@link org.springframework.context.support.ClassPathXmlApplicationContext} you would need to pass in
* <tt>false</tt> in the refresh parameter, in the constructor.
* Camel will thrown an {@link IllegalStateException} if this is not correct stating this problem.
* The reason is that we cannot active profiles <b>after</b> a Spring application context has already
* been refreshed, and is active.
*
* @return an array of active profiles to use, use <tt>null</tt> to not use any active profiles.
*/
protected String[] activeProfiles() {
return null;
}
@Override
protected CamelContext createCamelContext() throws Exception {
// don't start the springCamelContext if we
return SpringCamelContext.springCamelContext(applicationContext, false);
}
}
| |
/*
* ModeShape (http://www.modeshape.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.modeshape.jcr;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Set;
import java.util.regex.Pattern;
import java.util.regex.PatternSyntaxException;
import org.modeshape.common.annotation.Immutable;
import org.modeshape.common.collection.ReadOnlyIterator;
import org.modeshape.common.logging.Logger;
import org.modeshape.jcr.api.index.IndexColumnDefinition;
import org.modeshape.jcr.api.index.IndexDefinition;
@Immutable
final class RepositoryIndexDefinition implements IndexDefinition {
private static final Logger LOGGER = Logger.getLogger(RepositoryIndexDefinition.class);
public static IndexDefinition createFrom( IndexDefinition other ) {
return new RepositoryIndexDefinition(other.getName(), other.getProviderName(), other.getKind(), other.getNodeTypeName(),
other, other.getIndexProperties(), other.getDescription(), other.isSynchronous(),
other.isEnabled(), other.getWorkspaceMatchRule());
}
public static IndexDefinition createFrom( IndexDefinition other,
boolean isEnabled ) {
return new RepositoryIndexDefinition(other.getName(), other.getProviderName(), other.getKind(), other.getNodeTypeName(),
other, other.getIndexProperties(), other.getDescription(), other.isSynchronous(),
isEnabled, other.getWorkspaceMatchRule());
}
private final String name;
private final String providerName;
private final IndexKind kind;
private final String nodeTypeName;
private final String description;
private final boolean synchronous;
private final boolean enabled;
private final List<IndexColumnDefinition> columnDefns;
private final Map<String, IndexColumnDefinition> columnDefnsByName;
private final Map<String, Object> extendedProperties;
private final WorkspaceMatchRule workspaceRule;
RepositoryIndexDefinition( String name,
String providerName,
IndexKind kind,
String nodeTypeName,
Iterable<IndexColumnDefinition> columnDefns,
Map<String, Object> extendedProperties,
String description,
boolean synchronous,
boolean enabled,
WorkspaceMatchRule workspaceRule ) {
assert name != null;
assert providerName != null;
assert columnDefns != null;
assert extendedProperties != null;
assert workspaceRule != null;
this.name = name;
this.providerName = providerName;
this.kind = kind;
this.nodeTypeName = nodeTypeName != null ? nodeTypeName : JcrNtLexicon.BASE.getString();
this.columnDefns = new ArrayList<>();
this.extendedProperties = extendedProperties;
this.description = description != null ? description : "";
this.enabled = enabled;
this.synchronous = synchronous;
this.workspaceRule = workspaceRule;
this.columnDefnsByName = new HashMap<>();
for (IndexColumnDefinition columnDefn : columnDefns) {
assert columnDefn != null;
this.columnDefns.add(columnDefn);
this.columnDefnsByName.put(columnDefn.getPropertyName(), columnDefn);
}
assert !this.columnDefns.isEmpty();
}
@Override
public String getName() {
return name;
}
@Override
public String getProviderName() {
return providerName;
}
@Override
public IndexKind getKind() {
return kind;
}
@Override
public String getNodeTypeName() {
return nodeTypeName;
}
@Override
public String getDescription() {
return description;
}
@Override
public boolean isSynchronous() {
return synchronous;
}
@Override
public boolean isEnabled() {
return enabled;
}
@Override
public boolean hasSingleColumn() {
return columnDefns.size() == 1;
}
@Override
public int size() {
return columnDefns.size();
}
@Override
public IndexColumnDefinition getColumnDefinition( int position ) throws NoSuchElementException {
return columnDefns.get(position);
}
@Override
public Object getIndexProperty( String propertyName ) {
return extendedProperties.get(propertyName);
}
@Override
public Map<String, Object> getIndexProperties() {
return Collections.unmodifiableMap(extendedProperties);
}
@Override
public WorkspaceMatchRule getWorkspaceMatchRule() {
return workspaceRule;
}
@Override
public Iterator<IndexColumnDefinition> iterator() {
return ReadOnlyIterator.around(columnDefns.iterator());
}
@Override
public boolean appliesToProperty( String propertyName ) {
return columnDefnsByName.containsKey(propertyName);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getName()).append('@').append(getProviderName());
sb.append(" nodeType=").append(nodeTypeName);
if (columnDefns.size() == 1) {
sb.append(" column=");
} else {
sb.append(" columns=");
}
boolean first = true;
for (IndexColumnDefinition col : columnDefns) {
if (first) first = false;
else sb.append(",");
sb.append(col);
}
sb.append(" kind=").append(getKind());
sb.append(" sync=").append(isSynchronous());
sb.append(" workspaces=").append(workspaceRule);
return sb.toString();
}
protected static WorkspaceMatchRule workspaceMatchRule( String... workspaceNames ) {
if (workspaceNames == null || workspaceNames.length == 0) return MATCH_ALL_WORKSPACES_RULE;
Set<String> names = new HashSet<>();
StringBuilder sb = new StringBuilder();
for (String name : workspaceNames) {
name = name.trim();
if (name.length() != 0) {
if (names.size() != 0) sb.append(",");
names.add(name);
sb.append(name);
}
}
if (!names.isEmpty()) {
return new MultipleWorkspaceMatchRule(sb.toString(), names);
}
return MATCH_ALL_WORKSPACES_RULE;
}
public static WorkspaceMatchRule workspaceMatchRule( String rule ) {
if (rule == null) return MATCH_ALL_WORKSPACES_RULE;
rule = rule.trim();
if (rule.length() == 0 || MATCH_ALL_WORKSPACES.equals(rule)) return MATCH_ALL_WORKSPACES_RULE;
try {
return new RegexWorkspaceMatchRule(rule, Pattern.compile(rule));
} catch (PatternSyntaxException e) {
LOGGER.debug("Unable to parse workspace rule '{0}' into regular expression", rule);
}
try {
String[] names = rule.split(",");
Set<String> workspaceNames = new HashSet<>();
for (String name : names) {
if (name.trim().length() != 0) workspaceNames.add(name.trim());
}
if (!workspaceNames.isEmpty()) return new MultipleWorkspaceMatchRule(rule, workspaceNames);
} catch (PatternSyntaxException e) {
LOGGER.debug("Unable to parse workspace rule '{0}' into comma-separate list of workspace names", rule);
}
return new ExactWorkspaceMatchRule(rule);
}
public static final String MATCH_ALL_WORKSPACES = "*";
protected static final WorkspaceMatchRule MATCH_ALL_WORKSPACES_RULE = new MatchAllWorkspaces();
protected static class MatchAllWorkspaces implements WorkspaceMatchRule {
@Override
public boolean usedInWorkspace( String workspaceName ) {
return true;
}
@Override
public String getDefinition() {
return MATCH_ALL_WORKSPACES;
}
@Override
public String toString() {
return getDefinition();
}
}
protected static class RegexWorkspaceMatchRule implements WorkspaceMatchRule {
private final String rule;
private final Pattern pattern;
protected RegexWorkspaceMatchRule( String rule,
Pattern pattern ) {
this.rule = rule;
this.pattern = pattern;
}
@Override
public boolean usedInWorkspace( String workspaceName ) {
return pattern.matcher(workspaceName).matches();
}
@Override
public String getDefinition() {
return rule;
}
@Override
public String toString() {
return getDefinition();
}
}
protected static class ExactWorkspaceMatchRule implements WorkspaceMatchRule {
private final String workspaceName;
protected ExactWorkspaceMatchRule( String workspaceName ) {
this.workspaceName = workspaceName;
}
@Override
public boolean usedInWorkspace( String workspaceName ) {
return this.workspaceName.equals(workspaceName);
}
@Override
public String getDefinition() {
return workspaceName;
}
@Override
public String toString() {
return getDefinition();
}
}
protected static class MultipleWorkspaceMatchRule implements WorkspaceMatchRule {
private final String rule;
private final Set<String> workspaceNames;
protected MultipleWorkspaceMatchRule( String rule,
Set<String> workspaceNames ) {
this.rule = rule;
this.workspaceNames = workspaceNames;
}
@Override
public boolean usedInWorkspace( String workspaceName ) {
return this.workspaceNames.contains(workspaceName);
}
@Override
public String getDefinition() {
return rule;
}
@Override
public String toString() {
return getDefinition();
}
}
}
| |
/**
* Copyright 2011-2021 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asakusafw.dmdl.parser;
import static com.asakusafw.dmdl.parser.JjDmdlParserConstants.*;
import java.text.MessageFormat;
import java.util.Collection;
import java.util.Set;
import com.asakusafw.dmdl.Diagnostic;
import com.asakusafw.dmdl.Region;
import com.asakusafw.utils.collections.Sets;
/**
* Analyzes {@link ParseException} and converts it to {@link Diagnostic}s.
* @since 0.5.3
*/
class SyntaxErrorAnalyzer {
private static final Set<Integer> NAMES = Sets.freeze(new Integer[] {
NAME,
PROJECTIVE,
JOINED,
SUMMARIZED,
});
private static final Set<Integer> TYPES = Sets.freeze(new Integer[] {
INT,
LONG,
BYTE,
SHORT,
DECIMAL,
FLOAT,
DOUBLE,
TEXT,
BOOLEAN,
DATE,
DATETIME,
});
private static final Set<Integer> DELIMITERS = Sets.freeze(new Integer[] {
END_OF_DECLARATION,
PROPERTY_SEPARATOR,
});
private final JjDmdlParser parser;
private final ParseException exception;
private final Region region;
SyntaxErrorAnalyzer(JjDmdlParser parser, ParseException exception) {
this.parser = parser;
this.exception = exception;
this.region = computeRegion();
}
private Region computeRegion() {
assert exception != null;
assert parser != null;
Token token = parser.getToken(1);
if (token != null && token.kind != EOF) {
return new Region(
parser.getSourceFile(),
token.beginLine, token.beginColumn,
token.endLine, token.endColumn);
}
return new Region(parser.getSourceFile(), 0, 0, 0, 0);
}
public Diagnostic analyze() {
assert exception != null;
assert parser != null;
Token token = parser.getToken(1);
// Is invalid identifier?
if ((token.kind == INVALID_IDENTIFIER_LIKE || TYPES.contains(token.kind)) && isExpected(exception, TYPES)) {
return diagnostic(SyntaxErrorKind.INVALID_TYPE_NAME, getTokenImage(token));
}
if ((token.kind == INVALID_IDENTIFIER_LIKE || TYPES.contains(token.kind)) && isExpected(exception, NAMES)) {
return diagnostic(SyntaxErrorKind.INVALID_IDENTIFIER, getTokenImage(token));
}
// Is the next token kind unique?
String first = findFirstUniqueToken(exception);
if (first != null) {
return diagnostic(SyntaxErrorKind.UNEXPECTED_TOKEN_UNIQUE, getTokenImage(token), first);
}
// occurred invalid characters
if (token.kind == UNEXPECTED || token.kind == INVALID_IDENTIFIER_LIKE) {
return diagnostic(SyntaxErrorKind.INVALID_TOKEN, getTokenImage(token));
}
// May the next token be one of delimiter?
String delimiter = findMissingDelimiterToken(exception);
if (delimiter != null) {
return diagnostic(SyntaxErrorKind.UNEXPECTED_TOKEN_GUESS, getTokenImage(token), delimiter);
}
if (token.kind == EOF) {
return diagnostic(SyntaxErrorKind.UNEXPECTED_EOF);
}
return diagnostic(SyntaxErrorKind.UNEXPECTED_TOKEN_UNKNOWN, getTokenImage(token));
}
private Diagnostic diagnostic(SyntaxErrorKind kind, Object... arguments) {
String location;
if (region.beginLine >= 1 && region.beginColumn >= 1) {
location = MessageFormat.format(
"{0}:{1}:{2}", //$NON-NLS-1$
parser.getSourceFile(),
region.beginLine,
region.beginColumn);
} else {
location = parser.getSourceFile().toString();
}
String message = MessageFormat.format(
Messages.getString("DmdlSyntaxException.errorSyntax"), //$NON-NLS-1$
location,
kind.getMessage(arguments));
return new Diagnostic(Diagnostic.Level.ERROR, region, message);
}
private String getTokenImage(Token token) {
if (token.image == null || token.image.isEmpty()) {
return exception.tokenImage[token.kind];
} else {
return token.image;
}
}
private static boolean isExpected(ParseException exception, Collection<Integer> kinds) {
int[][] nextTokenSequences = exception.expectedTokenSequences;
for (int[] sequence : nextTokenSequences) {
Integer first = getFirstTokenKind(sequence);
if (kinds.contains(first)) {
return true;
}
}
return false;
}
private static String findMissingDelimiterToken(ParseException exception) {
int[][] nextTokenSequences = exception.expectedTokenSequences;
for (int[] sequence : nextTokenSequences) {
Integer first = getFirstTokenKind(sequence);
if (DELIMITERS.contains(first)) {
return exception.tokenImage[first];
}
}
return null;
}
private static String findFirstUniqueToken(ParseException exception) {
int[][] nextTokenSequences = exception.expectedTokenSequences;
if (nextTokenSequences.length == 0) {
return null;
}
Integer first = getFirstTokenKind(nextTokenSequences[0]);
if (first == null) {
return null;
}
// check is constant
if (0 > first || first >= tokenImage.length) {
return null;
}
String image = tokenImage[first];
if (image.startsWith("\"") == false || image.endsWith("\"") == false) { //$NON-NLS-1$ //$NON-NLS-2$
return null;
}
// check is identical
for (int i = 1; i < nextTokenSequences.length; i++) {
Integer other = getFirstTokenKind(nextTokenSequences[i]);
if (first.equals(other) == false) {
return null;
}
}
return image;
}
private static Integer getFirstTokenKind(int[] nextTokenSequence) {
if (nextTokenSequence.length == 0) {
return null;
}
return nextTokenSequence[0];
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.tools.pulse.controllers;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.*;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.doReturn;
import static org.powermock.api.mockito.PowerMockito.spy;
import static org.powermock.api.mockito.PowerMockito.when;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.*;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*;
import java.io.File;
import java.security.Principal;
import java.util.ArrayList;
import java.util.HashMap;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.geode.tools.pulse.internal.controllers.PulseController;
import org.apache.geode.tools.pulse.internal.data.Cluster;
import org.apache.geode.tools.pulse.internal.data.PulseConfig;
import org.apache.geode.tools.pulse.internal.data.Repository;
import org.apache.commons.collections.buffer.CircularFifoBuffer;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TemporaryFolder;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.powermock.core.classloader.annotations.PowerMockIgnore;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import org.powermock.modules.junit4.PowerMockRunnerDelegate;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.test.context.web.WebAppConfiguration;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.setup.MockMvcBuilders;
import org.springframework.web.context.WebApplicationContext;
import org.apache.geode.test.junit.categories.IntegrationTest;
@Category(IntegrationTest.class)
@PrepareForTest(Repository.class)
@RunWith(PowerMockRunner.class)
@PowerMockRunnerDelegate(SpringJUnit4ClassRunner.class)
@WebAppConfiguration
@ContextConfiguration("classpath*:mvc-dispatcher-servlet.xml")
@PowerMockIgnore("*.IntegrationTest")
public class PulseControllerJUnitTest {
private static final String PRINCIPAL_USER = "test-user";
private static final String MEMBER_ID = "member1";
private static final String MEMBER_NAME = "localhost-server";
private static final String CLUSTER_NAME = "mock-cluster";
private static final String REGION_NAME = "mock-region";
private static final String REGION_PATH = "/" + REGION_NAME;
private static final String REGION_TYPE = "PARTITION";
private static final String AEQ_LISTENER = "async-event-listener";
private static final String CLIENT_NAME = "client-1";
private static final String PHYSICAL_HOST_NAME = "physical-host-1";
private static final String GEMFIRE_VERSION = "1.0.0";
private static final Principal principal;
static {
principal = () -> PRINCIPAL_USER;
}
@Rule
public TemporaryFolder tempFolder = new TemporaryFolder();
@Autowired
private WebApplicationContext wac;
private MockMvc mockMvc;
private Cluster cluster;
private final ObjectMapper mapper = new ObjectMapper();
@Before
public void setup() throws Exception {
this.mockMvc = MockMvcBuilders.webAppContextSetup(this.wac).build();
cluster = Mockito.spy(Cluster.class);
Cluster.Region region = new Cluster.Region();
region.setName(REGION_NAME);
region.setFullPath(REGION_PATH);
region.setRegionType(REGION_TYPE);
region.setMemberCount(1);
region.setMemberName(new ArrayList<String>() {
{
add(MEMBER_NAME);
}
});
region.setPutsRate(12.31D);
region.setGetsRate(27.99D);
Cluster.RegionOnMember regionOnMember = new Cluster.RegionOnMember();
regionOnMember.setRegionFullPath(REGION_PATH);
regionOnMember.setMemberName(MEMBER_NAME);
region.setRegionOnMembers(new ArrayList<Cluster.RegionOnMember>() {
{
add(regionOnMember);
}
});
cluster.addClusterRegion(REGION_PATH, region);
Cluster.Member member = new Cluster.Member();
member.setId(MEMBER_ID);
member.setName(MEMBER_NAME);
member.setUptime(1L);
member.setHost(PHYSICAL_HOST_NAME);
member.setGemfireVersion(GEMFIRE_VERSION);
member.setCpuUsage(55.77123D);
member.setMemberRegions(new HashMap<String, Cluster.Region>() {
{
put(REGION_NAME, region);
}
});
Cluster.AsyncEventQueue aeq = new Cluster.AsyncEventQueue();
aeq.setAsyncEventListener(AEQ_LISTENER);
member.setAsyncEventQueueList(new ArrayList() {
{
add(aeq);
}
});
Cluster.Client client = new Cluster.Client();
client.setId("100");
client.setName(CLIENT_NAME);
client.setUptime(1L);
member.setMemberClientsHMap(new HashMap<String, Cluster.Client>() {
{
put(CLIENT_NAME, client);
}
});
cluster.setMembersHMap(new HashMap() {
{
put(MEMBER_NAME, member);
}
});
cluster.setPhysicalToMember(new HashMap() {
{
put(PHYSICAL_HOST_NAME, new ArrayList() {
{
add(member);
}
});
}
});
cluster.setServerName(CLUSTER_NAME);
cluster.setMemoryUsageTrend(new CircularFifoBuffer() {
{
add(1);
add(2);
add(3);
}
});
cluster.setWritePerSecTrend(new CircularFifoBuffer() {
{
add(1.29);
add(2.3);
add(3.0);
}
});
cluster.setThroughoutReadsTrend(new CircularFifoBuffer() {
{
add(1);
add(2);
add(3);
}
});
cluster.setThroughoutWritesTrend(new CircularFifoBuffer() {
{
add(4);
add(5);
add(6);
}
});
Repository repo = Mockito.spy(Repository.class);
// Set up a partial mock for some static methods
spy(Repository.class);
when(Repository.class, "get").thenReturn(repo);
doReturn(cluster).when(repo).getCluster();
PulseConfig config = new PulseConfig();
File tempQueryLog = tempFolder.newFile("query_history.log");
config.setQueryHistoryFileName(tempQueryLog.toString());
doReturn(config).when(repo).getPulseConfig();
PulseController.pulseVersion.setPulseVersion("not empty");
PulseController.pulseVersion.setPulseBuildId("not empty");
PulseController.pulseVersion.setPulseBuildDate("not empty");
PulseController.pulseVersion.setPulseSourceDate("not empty");
PulseController.pulseVersion.setPulseSourceRevision("not empty");
PulseController.pulseVersion.setPulseSourceRepository("not empty");
}
@Test
public void pulseUpdateForClusterDetails() throws Exception {
this.mockMvc
.perform(post("/pulseUpdate").param("pulseData", "{\"ClusterDetails\":\"{}\"}")
.principal(principal)
.accept(MediaType.parseMediaType(MediaType.APPLICATION_JSON_UTF8_VALUE)))
.andExpect(status().isOk())
.andExpect(jsonPath("$.ClusterDetails.userName").value(PRINCIPAL_USER))
.andExpect(jsonPath("$.ClusterDetails.totalHeap").value(0D))
.andExpect(jsonPath("$.ClusterDetails.clusterName").value(CLUSTER_NAME));
}
@Test
public void pulseUpdateForClusterDiskThroughput() throws Exception {
this.mockMvc
.perform(post("/pulseUpdate").param("pulseData", "{\"ClusterDiskThroughput\":\"{}\"}")
.principal(principal)
.accept(MediaType.parseMediaType(MediaType.APPLICATION_JSON_UTF8_VALUE)))
.andExpect(status().isOk())
.andExpect(jsonPath("$.ClusterDiskThroughput.currentThroughputWrites").value(0D))
.andExpect(jsonPath("$.ClusterDiskThroughput.throughputReads", contains(1, 2, 3)))
.andExpect(jsonPath("$.ClusterDiskThroughput.currentThroughputReads").value(0D))
.andExpect(jsonPath("$.ClusterDiskThroughput.throughputWrites", contains(4, 5, 6)));
}
@Test
public void pulseUpdateForClusterGCPauses() throws Exception {
this.mockMvc
.perform(post("/pulseUpdate").param("pulseData", "{\"ClusterJVMPauses\":\"{}\"}")
.principal(principal)
.accept(MediaType.parseMediaType(MediaType.APPLICATION_JSON_UTF8_VALUE)))
.andExpect(status().isOk())
.andExpect(jsonPath("$.ClusterJVMPauses.currentGCPauses").value(0))
.andExpect(jsonPath("$.ClusterJVMPauses.gCPausesTrend").isEmpty());
}
@Test
public void pulseUpdateForClusterKeyStatistics() throws Exception {
this.mockMvc
.perform(post("/pulseUpdate").param("pulseData", "{\"ClusterKeyStatistics\":\"{}\"}")
.principal(principal)
.accept(MediaType.parseMediaType(MediaType.APPLICATION_JSON_UTF8_VALUE)))
.andExpect(status().isOk())
.andExpect(jsonPath("$.ClusterKeyStatistics.readPerSecTrend").isEmpty())
.andExpect(jsonPath("$.ClusterKeyStatistics.queriesPerSecTrend").isEmpty())
.andExpect(jsonPath("$.ClusterKeyStatistics.writePerSecTrend", contains(1.29, 2.3, 3.0)));
}
@Test
public void pulseUpdateForClusterMember() throws Exception {
this.mockMvc
.perform(post("/pulseUpdate").param("pulseData", "{\"ClusterMembers\":\"{}\"}")
.principal(principal)
.accept(MediaType.parseMediaType(MediaType.APPLICATION_JSON_UTF8_VALUE)))
.andExpect(status().isOk())
.andExpect(jsonPath("$.ClusterMembers.members[0].serverGroups[0]").value("Default"))
.andExpect(jsonPath("$.ClusterMembers.members[0].cpuUsage").value(55.77D))
.andExpect(jsonPath("$.ClusterMembers.members[0].clients").value(1))
.andExpect(jsonPath("$.ClusterMembers.members[0].heapUsage").value(0))
.andExpect(jsonPath("$.ClusterMembers.members[0].name").value(MEMBER_NAME))
.andExpect(jsonPath("$.ClusterMembers.members[0].currentHeapUsage").value(0))
.andExpect(jsonPath("$.ClusterMembers.members[0].isManager").value(false))
.andExpect(jsonPath("$.ClusterMembers.members[0].threads").value(0))
.andExpect(jsonPath("$.ClusterMembers.members[0].memberId").value(MEMBER_ID))
.andExpect(jsonPath("$.ClusterMembers.members[0].redundancyZones[0]").value("Default"));
}
@Test
public void pulseUpdateForClusterMembersRGraph() throws Exception {
this.mockMvc
.perform(post("/pulseUpdate").param("pulseData", "{\"ClusterMembersRGraph\":\"{}\"}")
.principal(principal)
.accept(MediaType.parseMediaType(MediaType.APPLICATION_JSON_UTF8_VALUE)))
.andExpect(status().isOk())
.andExpect(jsonPath("$.ClusterMembersRGraph.memberCount").value(0))
.andExpect(jsonPath("$.ClusterMembersRGraph.clustor.data").isEmpty())
.andExpect(jsonPath("$.ClusterMembersRGraph.clustor.name").value(0))
.andExpect(jsonPath("$.ClusterMembersRGraph.clustor.id").value(0))
.andExpect(
jsonPath("$.ClusterMembersRGraph.clustor.children[0].id").value(PHYSICAL_HOST_NAME))
.andExpect(
jsonPath("$.ClusterMembersRGraph.clustor.children[0].name").value(PHYSICAL_HOST_NAME))
.andExpect(jsonPath("$.ClusterMembersRGraph.clustor.children[0].data.loadAvg").value(0D))
.andExpect(jsonPath("$.ClusterMembersRGraph.clustor.children[0].data.sockets").value(0))
.andExpect(jsonPath("$.ClusterMembersRGraph.clustor.children[0].data.threads").value(0))
.andExpect(jsonPath("$.ClusterMembersRGraph.clustor.children[0].data.cpuUsage").value(0D))
.andExpect(jsonPath("$.ClusterMembersRGraph.clustor.children[0].data.memoryUsage").value(0))
.andExpect(
jsonPath("$.ClusterMembersRGraph.clustor.children[0].data.hostStatus").value("Normal"))
.andExpect(jsonPath("$.ClusterMembersRGraph.clustor.children[0].data.$type")
.value("hostNormalNode"))
.andExpect(
jsonPath("$.ClusterMembersRGraph.clustor.children[0].children[0].id").value(MEMBER_ID))
.andExpect(jsonPath("$.ClusterMembersRGraph.clustor.children[0].children[0].name")
.value(MEMBER_NAME))
.andExpect(
jsonPath("$.ClusterMembersRGraph.clustor.children[0].children[0].data.gemfireVersion")
.value(GEMFIRE_VERSION))
.andExpect(
jsonPath("$.ClusterMembersRGraph.clustor.children[0].children[0].data.memoryUsage")
.value(0))
.andExpect(jsonPath("$.ClusterMembersRGraph.clustor.children[0].children[0].data.cpuUsage")
.value(55.77D))
.andExpect(jsonPath("$.ClusterMembersRGraph.clustor.children[0].children[0].data.regions")
.value(1))
.andExpect(jsonPath("$.ClusterMembersRGraph.clustor.children[0].children[0].data.host")
.value(PHYSICAL_HOST_NAME))
.andExpect(
jsonPath("$.ClusterMembersRGraph.clustor.children[0].children[0].data.port").value("-"))
.andExpect(jsonPath("$.ClusterMembersRGraph.clustor.children[0].children[0].data.clients")
.value(1))
.andExpect(jsonPath("$.ClusterMembersRGraph.clustor.children[0].children[0].data.gcPauses")
.value(0))
.andExpect(
jsonPath("$.ClusterMembersRGraph.clustor.children[0].children[0].data.numThreads")
.value(0))
.andExpect(jsonPath("$.ClusterMembersRGraph.clustor.children[0].children[0].data.nodeType")
.value("memberNormalNode"))
.andExpect(jsonPath("$.ClusterMembersRGraph.clustor.children[0].children[0].data.$type")
.value("memberNormalNode"))
.andExpect(
jsonPath("$.ClusterMembersRGraph.clustor.children[0].children[0].data.gatewaySender")
.value(0))
.andExpect(
jsonPath("$.ClusterMembersRGraph.clustor.children[0].children[0].data.gatewayReceiver")
.value(0))
.andExpect(
jsonPath("$.ClusterMembersRGraph.clustor.children[0].children[0].children").isEmpty());
}
@Test
public void pulseUpdateForClusterMemoryUsage() throws Exception {
this.mockMvc
.perform(post("/pulseUpdate").param("pulseData", "{\"ClusterMemoryUsage\":\"{}\"}")
.principal(principal)
.accept(MediaType.parseMediaType(MediaType.APPLICATION_JSON_UTF8_VALUE)))
.andExpect(status().isOk())
.andExpect(jsonPath("$.ClusterMemoryUsage.currentMemoryUsage").value(0))
.andExpect(jsonPath("$.ClusterMemoryUsage.memoryUsageTrend", containsInAnyOrder(1, 2, 3)));
}
@Test
public void pulseUpdateForClusterRegion() throws Exception {
this.mockMvc
.perform(post("/pulseUpdate").param("pulseData", "{\"ClusterRegion\":\"{}\"}")
.principal(principal)
.accept(MediaType.parseMediaType(MediaType.APPLICATION_JSON_UTF8_VALUE)))
.andExpect(status().isOk())
.andExpect(jsonPath("$.ClusterRegion.clusterName").value(CLUSTER_NAME))
.andExpect(jsonPath("$.ClusterRegion.userName").value(PRINCIPAL_USER))
.andExpect(jsonPath("$.ClusterRegion.region[0].regionPath").value(REGION_PATH))
.andExpect(jsonPath("$.ClusterRegion.region[0].diskReadsTrend").isEmpty())
.andExpect(jsonPath("$.ClusterRegion.region[0].memoryUsage").value("0.0000"))
.andExpect(jsonPath("$.ClusterRegion.region[0].getsRate").value(27.99D))
.andExpect(jsonPath("$.ClusterRegion.region[0].wanEnabled").value(false))
.andExpect(jsonPath("$.ClusterRegion.region[0].memberCount").value(1))
.andExpect(jsonPath("$.ClusterRegion.region[0].memberNames[0].name").value(MEMBER_NAME))
.andExpect(jsonPath("$.ClusterRegion.region[0].memberNames[0].id").value(MEMBER_ID))
.andExpect(jsonPath("$.ClusterRegion.region[0].emptyNodes").value(0))
.andExpect(jsonPath("$.ClusterRegion.region[0].type").value(REGION_TYPE))
.andExpect(jsonPath("$.ClusterRegion.region[0].isEnableOffHeapMemory").value("OFF"))
.andExpect(jsonPath("$.ClusterRegion.region[0].putsRate").value(12.31D))
.andExpect(jsonPath("$.ClusterRegion.region[0].totalMemory").value(0))
.andExpect(jsonPath("$.ClusterRegion.region[0].entryCount").value(0))
.andExpect(jsonPath("$.ClusterRegion.region[0].compressionCodec").value("NA"))
.andExpect(jsonPath("$.ClusterRegion.region[0].name").value(REGION_NAME))
.andExpect(jsonPath("$.ClusterRegion.region[0].systemRegionEntryCount").value(0))
.andExpect(jsonPath("$.ClusterRegion.region[0].persistence").value("OFF"))
.andExpect(jsonPath("$.ClusterRegion.region[0].memoryReadsTrend").isEmpty())
.andExpect(jsonPath("$.ClusterRegion.region[0].diskWritesTrend").isEmpty())
.andExpect(jsonPath("$.ClusterRegion.region[0].memoryWritesTrend").isEmpty())
.andExpect(jsonPath("$.ClusterRegion.region[0].dataUsage").value(0))
.andExpect(jsonPath("$.ClusterRegion.region[0].entrySize").value("0.0000"));
}
@Test
public void pulseUpdateForClusterRegions() throws Exception {
this.mockMvc
.perform(post("/pulseUpdate").param("pulseData", "{\"ClusterRegions\":\"{}\"}")
.principal(principal)
.accept(MediaType.parseMediaType(MediaType.APPLICATION_JSON_UTF8_VALUE)))
.andExpect(status().isOk())
.andExpect(jsonPath("$.ClusterRegions.regions[0].regionPath").value(REGION_PATH))
.andExpect(jsonPath("$.ClusterRegions.regions[0].diskReadsTrend").isEmpty())
.andExpect(jsonPath("$.ClusterRegions.regions[0].memoryUsage").value("0.0000"))
.andExpect(jsonPath("$.ClusterRegions.regions[0].getsRate").value(27.99D))
.andExpect(jsonPath("$.ClusterRegions.regions[0].wanEnabled").value(false))
.andExpect(jsonPath("$.ClusterRegions.regions[0].memberCount").value(1))
.andExpect(jsonPath("$.ClusterRegions.regions[0].memberNames[0].name").value(MEMBER_NAME))
.andExpect(jsonPath("$.ClusterRegions.regions[0].memberNames[0].id").value(MEMBER_ID))
.andExpect(jsonPath("$.ClusterRegions.regions[0].emptyNodes").value(0))
.andExpect(jsonPath("$.ClusterRegions.regions[0].type").value(REGION_TYPE))
.andExpect(jsonPath("$.ClusterRegions.regions[0].isEnableOffHeapMemory").value("OFF"))
.andExpect(jsonPath("$.ClusterRegions.regions[0].putsRate").value(12.31D))
.andExpect(jsonPath("$.ClusterRegions.regions[0].totalMemory").value(0))
.andExpect(jsonPath("$.ClusterRegions.regions[0].entryCount").value(0))
.andExpect(jsonPath("$.ClusterRegions.regions[0].compressionCodec").value("NA"))
.andExpect(jsonPath("$.ClusterRegions.regions[0].name").value(REGION_NAME))
.andExpect(jsonPath("$.ClusterRegions.regions[0].systemRegionEntryCount").value(0))
.andExpect(jsonPath("$.ClusterRegions.regions[0].persistence").value("OFF"))
.andExpect(jsonPath("$.ClusterRegions.regions[0].memoryReadsTrend").isEmpty())
.andExpect(jsonPath("$.ClusterRegions.regions[0].diskWritesTrend").isEmpty())
.andExpect(jsonPath("$.ClusterRegions.regions[0].memoryWritesTrend").isEmpty())
.andExpect(jsonPath("$.ClusterRegions.regions[0].dataUsage").value(0))
.andExpect(jsonPath("$.ClusterRegions.regions[0].entrySize").value("0.0000"));
}
@Test
public void pulseUpdateForClusterSelectedRegion() throws Exception {
this.mockMvc
.perform(post("/pulseUpdate")
.param("pulseData",
"{\"ClusterSelectedRegion\":{\"regionFullPath\":\"" + REGION_PATH + "\"}}")
.principal(principal)
.accept(MediaType.parseMediaType(MediaType.APPLICATION_JSON_UTF8_VALUE)))
.andExpect(status().isOk())
.andExpect(jsonPath("$.ClusterSelectedRegion.selectedRegion.lruEvictionRate").value(0D))
.andExpect(jsonPath("$.ClusterSelectedRegion.selectedRegion.getsRate").value(27.99D))
.andExpect(jsonPath("$.ClusterSelectedRegion.selectedRegion.emptyNodes").value(0))
.andExpect(jsonPath("$.ClusterSelectedRegion.selectedRegion.type").value(REGION_TYPE))
.andExpect(
jsonPath("$.ClusterSelectedRegion.selectedRegion.isEnableOffHeapMemory").value("OFF"))
.andExpect(jsonPath("$.ClusterSelectedRegion.selectedRegion.path").value(REGION_PATH))
.andExpect(
jsonPath("$.ClusterSelectedRegion.selectedRegion.members[0].cpuUsage").value(55.77D))
.andExpect(jsonPath("$.ClusterSelectedRegion.selectedRegion.members[0].clients").value(1))
.andExpect(jsonPath("$.ClusterSelectedRegion.selectedRegion.members[0].heapUsage").value(0))
.andExpect(
jsonPath("$.ClusterSelectedRegion.selectedRegion.members[0].name").value(MEMBER_NAME))
.andExpect(
jsonPath("$.ClusterSelectedRegion.selectedRegion.members[0].currentHeapUsage").value(0))
.andExpect(
jsonPath("$.ClusterSelectedRegion.selectedRegion.members[0].isManager").value(false))
.andExpect(jsonPath("$.ClusterSelectedRegion.selectedRegion.members[0].threads").value(0))
.andExpect(
jsonPath("$.ClusterSelectedRegion.selectedRegion.members[0].memberId").value(MEMBER_ID))
.andExpect(jsonPath("$.ClusterSelectedRegion.selectedRegion.members[0].uptime")
.value("0 Hours 0 Mins 1 Secs"))
.andExpect(jsonPath("$.ClusterSelectedRegion.selectedRegion.memoryReadsTrend").isEmpty())
.andExpect(jsonPath("$.ClusterSelectedRegion.selectedRegion.diskWritesTrend").isEmpty())
.andExpect(jsonPath("$.ClusterSelectedRegion.selectedRegion.dataUsage").value(0))
.andExpect(jsonPath("$.ClusterSelectedRegion.selectedRegion.regionPath").value(REGION_PATH))
.andExpect(jsonPath("$.ClusterSelectedRegion.selectedRegion.diskReadsTrend").isEmpty())
.andExpect(jsonPath("$.ClusterSelectedRegion.selectedRegion.memoryUsage").value("0.0000"))
.andExpect(jsonPath("$.ClusterSelectedRegion.selectedRegion.wanEnabled").value(false))
.andExpect(jsonPath("$.ClusterSelectedRegion.selectedRegion.memberCount").value(1))
.andExpect(jsonPath("$.ClusterSelectedRegion.selectedRegion.putsRate").value(12.31D))
.andExpect(jsonPath("$.ClusterSelectedRegion.selectedRegion.totalMemory").value(0))
.andExpect(jsonPath("$.ClusterSelectedRegion.selectedRegion.entryCount").value(0))
.andExpect(jsonPath("$.ClusterSelectedRegion.selectedRegion.compressionCodec").value("NA"))
.andExpect(jsonPath("$.ClusterSelectedRegion.selectedRegion.name").value(REGION_NAME))
.andExpect(
jsonPath("$.ClusterSelectedRegion.selectedRegion.systemRegionEntryCount").value(0))
.andExpect(jsonPath("$.ClusterSelectedRegion.selectedRegion.persistence").value("OFF"))
.andExpect(jsonPath("$.ClusterSelectedRegion.selectedRegion.memoryWritesTrend").isEmpty())
.andExpect(jsonPath("$.ClusterSelectedRegion.selectedRegion.entrySize").value("0.0000"));
}
@Test
public void pulseUpdateForClusterSelectedRegionsMember() throws Exception {
this.mockMvc
.perform(
post("/pulseUpdate")
.param("pulseData",
"{\"ClusterSelectedRegionsMember\":{\"regionFullPath\":\"" + REGION_PATH
+ "\"}}")
.principal(principal)
.accept(MediaType.parseMediaType(MediaType.APPLICATION_JSON_UTF8_VALUE)))
.andExpect(status().isOk())
.andExpect(
jsonPath("$.ClusterSelectedRegionsMember.selectedRegionsMembers.%s.diskReadsTrend",
MEMBER_NAME).isEmpty())
.andExpect(
jsonPath("$.ClusterSelectedRegionsMember.selectedRegionsMembers.%s.regionFullPath",
MEMBER_NAME).value(REGION_PATH))
.andExpect(jsonPath("$.ClusterSelectedRegionsMember.selectedRegionsMembers.%s.entryCount",
MEMBER_NAME).value(0))
.andExpect(jsonPath("$.ClusterSelectedRegionsMember.selectedRegionsMembers.%s.accessor",
MEMBER_NAME).value("True"))
.andExpect(jsonPath("$.ClusterSelectedRegionsMember.selectedRegionsMembers.%s.memberName",
MEMBER_NAME).value(MEMBER_NAME))
.andExpect(
jsonPath("$.ClusterSelectedRegionsMember.selectedRegionsMembers.%s.memoryReadsTrend",
MEMBER_NAME).isEmpty())
.andExpect(
jsonPath("$.ClusterSelectedRegionsMember.selectedRegionsMembers.%s.diskWritesTrend",
MEMBER_NAME).isEmpty())
.andExpect(
jsonPath("$.ClusterSelectedRegionsMember.selectedRegionsMembers.%s.memoryWritesTrend",
MEMBER_NAME).isEmpty())
.andExpect(jsonPath("$.ClusterSelectedRegionsMember.selectedRegionsMembers.%s.entrySize",
MEMBER_NAME).value(0))
.andExpect(jsonPath("$.ClusterSelectedRegionsMember.clusterName").value(CLUSTER_NAME))
.andExpect(jsonPath("$.ClusterSelectedRegionsMember.userName").value(PRINCIPAL_USER));
}
@Test
public void pulseUpdateForClusterWANInfo() throws Exception {
this.mockMvc
.perform(post("/pulseUpdate").param("pulseData", "{\"ClusterWANInfo\":\"{}\"}")
.principal(principal)
.accept(MediaType.parseMediaType(MediaType.APPLICATION_JSON_UTF8_VALUE)))
.andExpect(status().isOk())
.andExpect(jsonPath("$.ClusterWANInfo.connectedClusters").isEmpty());
}
@Test
public void pulseUpdateForMemberAsynchEventQueues() throws Exception {
this.mockMvc
.perform(post("/pulseUpdate")
.param("pulseData",
"{\"MemberAsynchEventQueues\":{\"memberName\":\"" + MEMBER_NAME + "\"}}")
.principal(principal)
.accept(MediaType.parseMediaType(MediaType.APPLICATION_JSON_UTF8_VALUE)))
.andExpect(status().isOk())
.andExpect(jsonPath("$.MemberAsynchEventQueues.isAsyncEventQueuesPresent").value(true))
.andExpect(
jsonPath("$.MemberAsynchEventQueues.asyncEventQueues[0].batchTimeInterval").value(0))
.andExpect(jsonPath("$.MemberAsynchEventQueues.asyncEventQueues[0].batchConflationEnabled")
.value(false))
.andExpect(jsonPath("$.MemberAsynchEventQueues.asyncEventQueues[0].queueSize").value(0))
.andExpect(
jsonPath("$.MemberAsynchEventQueues.asyncEventQueues[0].senderType").value(false))
.andExpect(jsonPath("$.MemberAsynchEventQueues.asyncEventQueues[0].asyncEventListener")
.value(AEQ_LISTENER))
.andExpect(jsonPath("$.MemberAsynchEventQueues.asyncEventQueues[0].batchSize").value(0))
.andExpect(jsonPath("$.MemberAsynchEventQueues.asyncEventQueues[0].primary").value(false));
}
@Test
public void pulseUpdateForMemberClients() throws Exception {
this.mockMvc
.perform(post("/pulseUpdate")
.param("pulseData", "{\"MemberClients\":{\"memberName\":\"" + MEMBER_NAME + "\"}}")
.principal(principal)
.accept(MediaType.parseMediaType(MediaType.APPLICATION_JSON_UTF8_VALUE)))
.andExpect(status().isOk()).andExpect(jsonPath("$.MemberClients.name").value(MEMBER_NAME))
.andExpect(jsonPath("$.MemberClients.memberClients[0].puts").value(0))
.andExpect(jsonPath("$.MemberClients.memberClients[0].cpuUsage").value("0.0000"))
.andExpect(jsonPath("$.MemberClients.memberClients[0].clientId").value("100"))
.andExpect(jsonPath("$.MemberClients.memberClients[0].queueSize").value(0))
.andExpect(jsonPath("$.MemberClients.memberClients[0].clientCQCount").value(0))
.andExpect(jsonPath("$.MemberClients.memberClients[0].name").value(CLIENT_NAME))
.andExpect(jsonPath("$.MemberClients.memberClients[0].isConnected").value("No"))
.andExpect(jsonPath("$.MemberClients.memberClients[0].threads").value(0))
.andExpect(jsonPath("$.MemberClients.memberClients[0].isSubscriptionEnabled").value("No"))
.andExpect(jsonPath("$.MemberClients.memberClients[0].gets").value(0)).andExpect(
jsonPath("$.MemberClients.memberClients[0].uptime").value("0 Hours 0 Mins 1 Secs"));
}
@Test
public void pulseUpdateForMemberDetails() throws Exception {
this.mockMvc
.perform(post("/pulseUpdate")
.param("pulseData", "{\"MemberDetails\":{\"memberName\":\"" + MEMBER_NAME + "\"}}")
.principal(principal)
.accept(MediaType.parseMediaType(MediaType.APPLICATION_JSON_UTF8_VALUE)))
.andExpect(status().isOk()).andExpect(jsonPath("$.MemberDetails.name").value(MEMBER_NAME))
.andExpect(jsonPath("$.MemberDetails.offHeapUsedSize").value(0))
.andExpect(jsonPath("$.MemberDetails.diskStorageUsed").value(0D))
.andExpect(jsonPath("$.MemberDetails.regionsCount").value(1))
.andExpect(jsonPath("$.MemberDetails.clusterName").value(CLUSTER_NAME))
.andExpect(jsonPath("$.MemberDetails.name").value(MEMBER_NAME))
.andExpect(jsonPath("$.MemberDetails.threads").value(0))
.andExpect(jsonPath("$.MemberDetails.clusterId").isNotEmpty())
.andExpect(jsonPath("$.MemberDetails.numClients").value(1))
.andExpect(jsonPath("$.MemberDetails.userName").value(PRINCIPAL_USER))
.andExpect(jsonPath("$.MemberDetails.offHeapFreeSize").value(0))
.andExpect(jsonPath("$.MemberDetails.memberId").value(MEMBER_ID))
.andExpect(jsonPath("$.MemberDetails.status").value("Normal"));
}
@Test
public void pulseUpdateForMemberDiskThroughput() throws Exception {
this.mockMvc
.perform(post("/pulseUpdate")
.param("pulseData",
"{\"MemberDiskThroughput\":{\"memberName\":\"" + MEMBER_NAME + "\"}}")
.principal(principal)
.accept(MediaType.parseMediaType(MediaType.APPLICATION_JSON_UTF8_VALUE)))
.andExpect(status().isOk())
.andExpect(jsonPath("$.MemberDiskThroughput.throughputWritesTrend").isEmpty())
.andExpect(jsonPath("$.MemberDiskThroughput.throughputReadsTrend").isEmpty())
.andExpect(jsonPath("$.MemberDiskThroughput.throughputWrites").value(0D))
.andExpect(jsonPath("$.MemberDiskThroughput.throughputReads").value(0D));
}
@Test
public void pulseUpdateForMemberGatewayHub() throws Exception {
this.mockMvc
.perform(post("/pulseUpdate")
.param("pulseData", "{\"MemberGatewayHub\":{\"memberName\":\"" + MEMBER_NAME + "\"}}")
.principal(principal)
.accept(MediaType.parseMediaType(MediaType.APPLICATION_JSON_UTF8_VALUE)))
.andExpect(status().isOk())
.andExpect(jsonPath("$.MemberGatewayHub.isGatewayReceiver").value(false))
.andExpect(jsonPath("$.MemberGatewayHub.asyncEventQueues[0].batchTimeInterval").value(0))
.andExpect(
jsonPath("$.MemberGatewayHub.asyncEventQueues[0].batchConflationEnabled").value(false))
.andExpect(jsonPath("$.MemberGatewayHub.asyncEventQueues[0].queueSize").value(0))
.andExpect(jsonPath("$.MemberGatewayHub.asyncEventQueues[0].senderType").value(false))
.andExpect(jsonPath("$.MemberGatewayHub.asyncEventQueues[0].asyncEventListener")
.value(AEQ_LISTENER))
.andExpect(jsonPath("$.MemberGatewayHub.asyncEventQueues[0].batchSize").value(0))
.andExpect(jsonPath("$.MemberGatewayHub.asyncEventQueues[0].primary").value(false))
.andExpect(jsonPath("$.MemberGatewayHub.isGatewaySender").value(false))
.andExpect(jsonPath("$.MemberGatewayHub.regionsInvolved").isEmpty())
.andExpect(jsonPath("$.MemberGatewayHub.gatewaySenders").isEmpty());
}
@Test
public void pulseUpdateForMemberGCPauses() throws Exception {
this.mockMvc
.perform(post("/pulseUpdate")
.param("pulseData", "{\"MemberGCPauses\":{\"memberName\":\"" + MEMBER_NAME + "\"}}")
.principal(principal)
.accept(MediaType.parseMediaType(MediaType.APPLICATION_JSON_UTF8_VALUE)))
.andExpect(status().isOk()).andExpect(jsonPath("$.MemberGCPauses.gcPausesCount").value(0))
.andExpect(jsonPath("$.MemberGCPauses.gcPausesTrend").isEmpty());
}
@Test
public void pulseUpdateForMemberHeapUsage() throws Exception {
this.mockMvc
.perform(post("/pulseUpdate")
.param("pulseData", "{\"MemberHeapUsage\":{\"memberName\":\"" + MEMBER_NAME + "\"}}")
.principal(principal)
.accept(MediaType.parseMediaType(MediaType.APPLICATION_JSON_UTF8_VALUE)))
.andExpect(status().isOk())
.andExpect(jsonPath("$.MemberHeapUsage.heapUsageTrend").isEmpty())
.andExpect(jsonPath("$.MemberHeapUsage.currentHeapUsage").value(0));
}
@Test
public void pulseUpdateForMemberKeyStatistics() throws Exception {
this.mockMvc
.perform(post("/pulseUpdate")
.param("pulseData",
"{\"MemberKeyStatistics\":{\"memberName\":\"" + MEMBER_NAME + "\"}}")
.principal(principal)
.accept(MediaType.parseMediaType(MediaType.APPLICATION_JSON_UTF8_VALUE)))
.andExpect(status().isOk())
.andExpect(jsonPath("$.MemberKeyStatistics.readPerSecTrend").isEmpty())
.andExpect(jsonPath("$.MemberKeyStatistics.cpuUsageTrend").isEmpty())
.andExpect(jsonPath("$.MemberKeyStatistics.memoryUsageTrend").isEmpty())
.andExpect(jsonPath("$.MemberKeyStatistics.writePerSecTrend").isEmpty());
}
@Test
public void pulseUpdateForMemberRegions() throws Exception {
this.mockMvc
.perform(post("/pulseUpdate")
.param("pulseData", "{\"MemberRegions\":{\"memberName\":\"" + MEMBER_NAME + "\"}}")
.principal(principal)
.accept(MediaType.parseMediaType(MediaType.APPLICATION_JSON_UTF8_VALUE)))
.andExpect(status().isOk()).andExpect(jsonPath("$.MemberRegions.name").value(MEMBER_NAME))
.andExpect(jsonPath("$.MemberRegions.memberRegions[0].fullPath").value(REGION_PATH))
.andExpect(jsonPath("$.MemberRegions.memberRegions[0].entryCount").value(0))
.andExpect(jsonPath("$.MemberRegions.memberRegions[0].name").value(REGION_NAME))
.andExpect(jsonPath("$.MemberRegions.memberRegions[0].diskStoreName").value(""))
.andExpect(jsonPath("$.MemberRegions.memberRegions[0].gatewayEnabled").value(false))
.andExpect(jsonPath("$.MemberRegions.memberRegions[0].entrySize").value("0.0000"))
.andExpect(jsonPath("$.MemberRegions.memberId").value(MEMBER_ID))
.andExpect(jsonPath("$.MemberRegions.status").value("Normal"));
}
@Test
public void pulseUpdateForMembersList() throws Exception {
this.mockMvc
.perform(post("/pulseUpdate")
.param("pulseData", "{\"MembersList\":{\"memberName\":\"" + MEMBER_NAME + "\"}}")
.principal(principal)
.accept(MediaType.parseMediaType(MediaType.APPLICATION_JSON_UTF8_VALUE)))
.andExpect(status().isOk())
.andExpect(jsonPath("$.MembersList.clusterMembers[0].name").value(MEMBER_NAME))
.andExpect(jsonPath("$.MembersList.clusterMembers[0].memberId").value(MEMBER_ID))
.andExpect(jsonPath("$.MembersList.clusterName").value(CLUSTER_NAME));
}
@Test
public void pulseUpdateForPulseVersion() throws Exception {
this.mockMvc
.perform(post("/pulseUpdate").param("pulseData", "{\"PulseVersion\":\"{}\"}")
.principal(principal)
.accept(MediaType.parseMediaType(MediaType.APPLICATION_JSON_UTF8_VALUE)))
.andExpect(status().isOk())
.andExpect(jsonPath("$.PulseVersion.sourceDate").value("not empty"))
.andExpect(jsonPath("$.PulseVersion.sourceRepository").value("not empty"))
.andExpect(jsonPath("$.PulseVersion.pulseVersion").value("not empty"))
.andExpect(jsonPath("$.PulseVersion.sourceRevision").value("not empty"))
.andExpect(jsonPath("$.PulseVersion.buildId").value("not empty"))
.andExpect(jsonPath("$.PulseVersion.buildDate").value("not empty"));
}
@Test
public void pulseUpdateForQueryStatistics() throws Exception {
this.mockMvc
.perform(post("/pulseUpdate").param("pulseData", "{\"QueryStatistics\":\"{}\"}")
.principal(principal)
.accept(MediaType.parseMediaType(MediaType.APPLICATION_JSON_UTF8_VALUE)))
.andExpect(status().isOk()).andExpect(jsonPath("$.QueryStatistics.queriesList").isEmpty())
.andExpect(jsonPath("$.QueryStatistics.connectedFlag").value(false))
.andExpect(jsonPath("$.QueryStatistics.connectedErrorMsg").value(""));
}
@Test
public void pulseUpdateForSystemAlerts() throws Exception {
this.mockMvc
.perform(post("/pulseUpdate")
.param("pulseData", "{\"SystemAlerts\":{\"pageNumber\":\"1\"}}").principal(principal)
.accept(MediaType.parseMediaType(MediaType.APPLICATION_JSON_UTF8_VALUE)))
.andExpect(status().isOk()).andExpect(jsonPath("$.SystemAlerts.pageNumber").value(1))
.andExpect(jsonPath("$.SystemAlerts.connectedFlag").value(false))
.andExpect(jsonPath("$.SystemAlerts.connectedErrorMsg").value(""))
.andExpect(jsonPath("$.SystemAlerts.systemAlerts").isEmpty());
}
@Test
public void authenticateUserNotLoggedIn() throws Exception {
this.mockMvc
.perform(get("/authenticateUser")
.accept(MediaType.parseMediaType(MediaType.APPLICATION_JSON_UTF8_VALUE)))
.andExpect(status().isOk()).andExpect(jsonPath("$.isUserLoggedIn").value(false));
}
@Test
public void authenticateUserLoggedIn() throws Exception {
this.mockMvc
.perform(get("/authenticateUser").principal(principal)
.accept(MediaType.parseMediaType(MediaType.APPLICATION_JSON_UTF8_VALUE)))
.andExpect(status().isOk()).andExpect(jsonPath("$.isUserLoggedIn").value(true));
}
@Test
public void pulseVersion() throws Exception {
this.mockMvc
.perform(get("/pulseVersion")
.accept(MediaType.parseMediaType(MediaType.APPLICATION_JSON_UTF8_VALUE)))
.andExpect(status().isOk()).andExpect(jsonPath("$.pulseVersion").isNotEmpty())
.andExpect(jsonPath("$.buildId").isNotEmpty())
.andExpect(jsonPath("$.buildDate").isNotEmpty())
.andExpect(jsonPath("$.sourceDate").isNotEmpty())
.andExpect(jsonPath("$.sourceRevision").isNotEmpty())
.andExpect(jsonPath("$.sourceRepository").isNotEmpty());
}
@Test
public void clearAlerts() throws Exception {
this.mockMvc
.perform(get("/clearAlerts").param("alertType", "1")
.accept(MediaType.parseMediaType(MediaType.APPLICATION_JSON_UTF8_VALUE)))
.andExpect(status().isOk()).andExpect(jsonPath("$.pageNumber").value(1))
.andExpect(jsonPath("$.systemAlerts").isEmpty())
.andExpect(jsonPath("$.connectedFlag").value(false))
.andExpect(jsonPath("$.status").value("deleted"));
}
@Test
public void acknowledgeAlert() throws Exception {
this.mockMvc
.perform(get("/acknowledgeAlert").param("alertId", "1")
.accept(MediaType.parseMediaType(MediaType.APPLICATION_JSON_UTF8_VALUE)))
.andExpect(status().isOk()).andExpect(jsonPath("$.status").value("deleted"));
}
@Test
public void dataBrowserRegions() throws Exception {
this.mockMvc
.perform(get("/dataBrowserRegions")
.accept(MediaType.parseMediaType(MediaType.APPLICATION_JSON_UTF8_VALUE)))
.andExpect(status().isOk()).andExpect(jsonPath("$.clusterName").value(CLUSTER_NAME))
.andExpect(jsonPath("$.connectedFlag").value(false))
.andExpect(jsonPath("$.clusterRegions[0].fullPath").value(REGION_PATH))
.andExpect(jsonPath("$.clusterRegions[0].regionType").value(REGION_TYPE));
}
@Test
public void dataBrowserQuery() throws Exception {
doReturn(mapper.createObjectNode().put("foo", "bar")).when(cluster).executeQuery(anyString(),
anyString(), anyInt());
this.mockMvc
.perform(get("/dataBrowserQuery").param("query", "SELECT * FROM " + REGION_PATH)
.param("members", MEMBER_NAME).principal(principal)
.accept(MediaType.parseMediaType(MediaType.APPLICATION_JSON_UTF8_VALUE)))
.andExpect(status().isOk()).andExpect(jsonPath("$.foo").value("bar"));
}
@Test
public void dataBrowserQueryHistory() throws Exception {
dataBrowserQuery();
this.mockMvc
.perform(get("/dataBrowserQueryHistory").param("action", "view").principal(principal)
.accept(MediaType.parseMediaType(MediaType.APPLICATION_JSON_UTF8_VALUE)))
.andExpect(status().isOk()).andExpect(
jsonPath("$.queryHistory[0].queryText").value("\"SELECT * FROM " + REGION_PATH + "\""));
}
@Test
public void getQueryStatisticsGridModel() throws Exception {
this.mockMvc
.perform(get("/getQueryStatisticsGridModel").principal(principal)
.accept(MediaType.parseMediaType(MediaType.APPLICATION_JSON_UTF8_VALUE)))
.andExpect(status().isOk())
.andExpect(jsonPath("$.columnNames",
containsInAnyOrder("Query", "NumExecution", "TotalExecutionTime(ns)",
"NumExecutionsInProgress", "NumTimesCompiled", "NumTimesGlobalIndexLookup",
"NumRowsModified", "ParseTime(ms)", "BindTime(ms)", "OptimizeTime(ms)",
"RoutingInfoTime(ms)", "GenerateTime(ms)", "TotalCompilationTime(ms)",
"ExecutionTime(ns)", "ProjectionTime(ns)", "RowsModificationTime(ns)",
"QNNumRowsSeen", "QNMsgSendTime(ns)", "QNMsgSerTime(ns)", "QNRespDeSerTime(ns)")));
}
}
| |
package org.sdnplatform.sync.internal.rpc;
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import java.util.concurrent.LinkedTransferQueue;
import net.floodlightcontroller.core.annotations.LogMessageCategory;
import net.floodlightcontroller.core.annotations.LogMessageDoc;
import net.floodlightcontroller.core.annotations.LogMessageDocs;
import net.floodlightcontroller.core.util.SingletonTask;
import net.floodlightcontroller.debugcounter.IDebugCounterService;
import org.jboss.netty.bootstrap.ClientBootstrap;
import org.jboss.netty.bootstrap.ServerBootstrap;
import org.jboss.netty.channel.Channel;
import org.jboss.netty.channel.ChannelFuture;
import org.jboss.netty.channel.ChannelFutureListener;
import org.jboss.netty.channel.ChannelPipelineFactory;
import org.jboss.netty.channel.group.ChannelGroup;
import org.jboss.netty.channel.group.DefaultChannelGroup;
import org.jboss.netty.channel.socket.nio.NioClientSocketChannelFactory;
import org.jboss.netty.channel.socket.nio.NioServerSocketChannelFactory;
import org.sdnplatform.sync.internal.SyncManager;
import org.sdnplatform.sync.internal.config.Node;
import org.sdnplatform.sync.internal.util.Pair;
import org.sdnplatform.sync.thrift.SyncMessage;
import org.sdnplatform.sync.thrift.MessageType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A lightweight RPC mechanism built on netty.
* @author readams
*/
@LogMessageCategory("State Synchronization")
public class RPCService {
protected static final Logger logger =
LoggerFactory.getLogger(RPCService.class);
/**
* Sync manager associated with this RPC service
*/
protected SyncManager syncManager;
/**
* Debug counter service
*/
protected IDebugCounterService debugCounter;
/**
* Channel group that will hold all our channels
*/
final ChannelGroup cg = new DefaultChannelGroup("Internal RPC");
/**
* {@link ExecutorService} used for netty boss threads
*/
protected ExecutorService bossExecutor;
/**
* {@link ExecutorService} used for netty worker threads
*/
protected ExecutorService workerExecutor;
/**
* Netty {@link ClientBootstrap} used for creating client connections
*/
protected ClientBootstrap clientBootstrap;
/**
* Netty {@link ServerBootstrap} used for creating server connections
*/
protected ServerBootstrap serverBootstrap;
/**
* {@link ChannelPipelineFactory} for creating connections
*/
protected RPCPipelineFactory pipelineFactory;
/**
* Node connections
*/
protected HashMap<Short, NodeConnection> connections =
new HashMap<Short, NodeConnection>();
/**
* Transaction ID used in message headers in the RPC protocol
*/
protected AtomicInteger transactionId = new AtomicInteger();
/**
* Buffer size for sockets
*/
public static final int SEND_BUFFER_SIZE = 4 * 1024 * 1024;
/**
* Connect timeout for client connections
*/
public static final int CONNECT_TIMEOUT = 500;
/**
* True after the {@link RPCService#run()} method is called
*/
protected boolean started = false;
/**
* true after the {@link RPCService#shutdown()} method
* is called.
*/
protected volatile boolean shutDown = false;
/**
* Task to periodically ensure that connections are active
*/
protected SingletonTask reconnectTask;
/**
* If we want to rate-limit certain types of messages, we can do
* so by limiting the overall number of outstanding messages.
* The number of such messages will be stored in the
* {@link MessageWindow}
*/
protected ConcurrentHashMap<Short, MessageWindow> messageWindows;
protected static final EnumSet<MessageType> windowedTypes =
EnumSet.of(MessageType.SYNC_VALUE,
MessageType.SYNC_OFFER);
/**
* A thread pool for handling sync messages. These messages require
* a separate pool since writing to the node can be a blocking operation
* while waiting for window capacity, and blocking the I/O threads could
* lead to deadlock
* @see SyncMessageWorker
*/
protected ExecutorService syncExecutor;
/**
* A queue for holding sync messages that are awaiting being written
* to the channel.
* @see SyncMessageWorker
*/
protected LinkedTransferQueue<NodeMessage> syncQueue =
new LinkedTransferQueue<NodeMessage>();
/**
* Number of workers in the sync message thread pool
*/
protected static final int SYNC_MESSAGE_POOL = 2;
/**
* The maximum number of outstanding pending messages for messages
* that use message windows
*/
protected static final int MAX_PENDING_MESSAGES = 500;
public RPCService(SyncManager syncManager,
IDebugCounterService debugCounter) {
super();
this.syncManager = syncManager;
this.debugCounter = debugCounter;
messageWindows = new ConcurrentHashMap<Short, MessageWindow>();
}
// *************
// public methods
// *************
/**
* Start the RPC service
*/
public void run() {
started = true;
final ThreadGroup tg1 = new ThreadGroup("Sync Message Handlers");
tg1.setMaxPriority(Thread.NORM_PRIORITY - 3);
ThreadFactory f1 = new ThreadFactory() {
AtomicInteger id = new AtomicInteger();
@Override
public Thread newThread(Runnable runnable) {
return new Thread(tg1, runnable,
"SyncMessage-" + id.getAndIncrement());
}
};
syncExecutor = Executors.newCachedThreadPool(f1);
for (int i = 0; i < SYNC_MESSAGE_POOL; i++) {
syncExecutor.execute(new SyncMessageWorker());
}
final ThreadGroup tg2 = new ThreadGroup("Sync I/O Threads");
tg2.setMaxPriority(Thread.NORM_PRIORITY - 1);
ThreadFactory f2 = new ThreadFactory() {
@Override
public Thread newThread(Runnable runnable) {
return new Thread(tg2, runnable);
}
};
bossExecutor = Executors.newCachedThreadPool(f2);
workerExecutor = Executors.newCachedThreadPool(f2);
pipelineFactory = new RPCPipelineFactory(syncManager, this);
startServer(pipelineFactory);
startClients(pipelineFactory);
}
/**
* Stop the RPC service
*/
@LogMessageDocs({
@LogMessageDoc(level="WARN",
message="Failed to cleanly shut down RPC server",
explanation="Could not close all open sockets cleanly"),
@LogMessageDoc(level="WARN",
message="Interrupted while shutting down RPC server",
explanation="Could not close all open sockets cleanly")
})
public void shutdown() {
shutDown = true;
try {
if (!cg.close().await(5, TimeUnit.SECONDS)) {
logger.warn("Failed to cleanly shut down RPC server");
return;
}
if (clientBootstrap != null)
clientBootstrap.releaseExternalResources();
clientBootstrap = null;
if (serverBootstrap != null)
serverBootstrap.releaseExternalResources();
serverBootstrap = null;
if (pipelineFactory != null)
pipelineFactory.releaseExternalResources();
pipelineFactory = null;
if (bossExecutor != null)
bossExecutor.shutdown();
bossExecutor = null;
if (workerExecutor != null)
workerExecutor.shutdown();
workerExecutor = null;
} catch (InterruptedException e) {
logger.warn("Interrupted while shutting down RPC server");
}
logger.debug("Internal floodlight RPC shut down");
}
/**
* Get a suitable transaction ID for sending a message
* @return the unique transaction iD
*/
public int getTransactionId() {
return transactionId.getAndIncrement();
}
/**
* Write a message to the node specified
* @param nodeId the node ID
* @param bsm the message to write
* @return <code>true</code> if the message was actually written to
* the channel. Note this is not the same as having been sent to the
* other node.
* @throws InterruptedException
*/
public boolean writeToNode(Short nodeId, SyncMessage bsm)
throws InterruptedException {
if (nodeId == null) return false;
NodeConnection nc = connections.get(nodeId);
if (nc != null && nc.state == NodeConnectionState.CONNECTED) {
waitForMessageWindow(bsm.getType(), nodeId, 0);
nc.nodeChannel.write(bsm);
return true;
}
return false;
}
/**
* Remove the connection from the connection registry and clean up
* any remaining shrapnel
* @param nodeId
*/
public void disconnectNode(short nodeId) {
synchronized (connections) {
Short n = Short.valueOf(nodeId);
MessageWindow mw = messageWindows.get(n);
if (mw != null) {
mw.lock.lock();
mw.disconnected = true;
try {
mw.full.signalAll();
messageWindows.remove(n);
} finally {
mw.lock.unlock();
}
}
NodeConnection nc = connections.get(nodeId);
if (nc != null) {
nc.nuke();
}
connections.remove(nodeId);
}
}
/**
* Check whether all links are established
* @return
*/
public boolean isFullyConnected() {
for (Node n : syncManager.getClusterConfig().getNodes()) {
if (n.getNodeId() != syncManager.getLocalNodeId() &&
!isConnected(n.getNodeId())) {
if (logger.isTraceEnabled()) {
logger.trace("[{}->{}] missing connection",
syncManager.getLocalNodeId(),
n.getNodeId());
}
return false;
}
}
return true;
}
/**
* Find out if a particular node is connected
* @param nodeId
* @return true if the node is connected
*/
public boolean isConnected(short nodeId) {
NodeConnection nc = connections.get(nodeId);
return (nc != null && nc.state == NodeConnectionState.CONNECTED);
}
/**
* Called when a message is acknowledged by a remote node
* @param type the message type
* @param nodeId the remote node
*/
public void messageAcked(MessageType type, Short nodeId) {
if (nodeId == null) return;
if (!windowedTypes.contains(type)) return;
MessageWindow mw = messageWindows.get(nodeId);
if (mw == null) return;
int pending = mw.pending.decrementAndGet();
if (pending < MAX_PENDING_MESSAGES) {
mw.lock.lock();
try {
mw.full.signalAll();
} finally {
mw.lock.unlock();
}
}
}
// *************
// Local methods
// *************
/**
* Get the appropriate {@link MessageWindow} object for the given node.
* @param nodeId the remote node
* @return a {@link MessageWindow} object
*/
private MessageWindow getMW(short nodeId) {
if (!isConnected(nodeId)) return null;
Short n = Short.valueOf(nodeId);
MessageWindow mw = messageWindows.get(n);
if (mw == null) {
mw = new MessageWindow();
MessageWindow old = messageWindows.putIfAbsent(n, mw);
if (old != null) mw = old;
}
return mw;
}
/**
* Wait for a message window slow to be available for the given node and
* message type
* @param type the type of the message
* @param nodeId the node Id
* @param maxWait the maximum time to wait in milliseconds
* @throws InterruptedException
* @return <code>true</code> if the message can be safely written
*/
private boolean waitForMessageWindow(MessageType type, short nodeId,
long maxWait)
throws InterruptedException {
if (!windowedTypes.contains(type)) return true;
long start = System.nanoTime();
// note that this can allow slightly more than the maximum number
// of messages. This is fine.
MessageWindow mw = getMW(nodeId);
if (!mw.disconnected &&
mw.pending.get() >= MAX_PENDING_MESSAGES) {
mw.lock.lock();
try {
while (!mw.disconnected &&
mw.pending.get() >= MAX_PENDING_MESSAGES) {
long now = System.nanoTime();
if (maxWait > 0 &&
(now - start) > maxWait * 1000) return false;
mw.full.awaitNanos(now - start);
}
} finally {
mw.lock.unlock();
}
}
mw = getMW(nodeId);
if (mw != null)
mw.pending.getAndIncrement();
return true;
}
/**
* Start listening sockets
*/
@LogMessageDoc(level="INFO",
message="Listening for internal floodlight RPC on {port}",
explanation="The internal RPC service is ready for connections")
protected void startServer(ChannelPipelineFactory pipelineFactory) {
final ServerBootstrap bootstrap =
new ServerBootstrap(
new NioServerSocketChannelFactory(bossExecutor,
workerExecutor));
bootstrap.setOption("reuseAddr", true);
bootstrap.setOption("child.keepAlive", true);
bootstrap.setOption("child.tcpNoDelay", true);
bootstrap.setOption("child.sendBufferSize", SEND_BUFFER_SIZE);
bootstrap.setOption("child.receiveBufferSize", SEND_BUFFER_SIZE);
bootstrap.setPipelineFactory(pipelineFactory);
serverBootstrap = bootstrap;
int port = syncManager.getClusterConfig().getNode().getPort();
InetSocketAddress sa;
String listenAddress =
syncManager.getClusterConfig().getListenAddress();
if (listenAddress != null)
sa = new InetSocketAddress(listenAddress, port);
else
sa = new InetSocketAddress(port);
cg.add(bootstrap.bind(sa));
logger.info("Listening for internal floodlight RPC on {}", sa);
}
/**
* Wait for the client connection
* @author readams
*/
protected class ConnectCFListener implements ChannelFutureListener {
protected Node node;
public ConnectCFListener(Node node) {
super();
this.node = node;
}
@Override
public void operationComplete(ChannelFuture cf) throws Exception {
if (!cf.isSuccess()) {
synchronized (connections) {
NodeConnection c = connections.remove(node.getNodeId());
if (c != null) c.nuke();
cf.getChannel().close();
}
String message = "[unknown error]";
if (cf.isCancelled()) message = "Timed out on connect";
if (cf.getCause() != null) message = cf.getCause().getMessage();
logger.debug("[{}->{}] Could not connect to RPC " +
"node: {}",
new Object[]{syncManager.getLocalNodeId(),
node.getNodeId(),
message});
} else {
logger.trace("[{}->{}] Channel future successful",
syncManager.getLocalNodeId(),
node.getNodeId());
}
}
}
/**
* Add the node connection to the node connection map
* @param nodeId the node ID for the channel
* @param channel the new channel
*/
protected void nodeConnected(short nodeId, Channel channel) {
logger.debug("[{}->{}] Connection established",
syncManager.getLocalNodeId(),
nodeId);
synchronized (connections) {
NodeConnection c = connections.get(nodeId);
if (c == null) {
connections.put(nodeId, c = new NodeConnection());
}
c.nodeChannel = channel;
c.state = NodeConnectionState.CONNECTED;
}
}
/**
* Connect to remote servers. We'll initiate the connection to
* any nodes with a lower ID so that there will be a single connection
* between each pair of nodes which we'll use symmetrically
*/
protected void startClients(ChannelPipelineFactory pipelineFactory) {
final ClientBootstrap bootstrap =
new ClientBootstrap(
new NioClientSocketChannelFactory(bossExecutor,
workerExecutor));
bootstrap.setOption("child.reuseAddr", true);
bootstrap.setOption("child.keepAlive", true);
bootstrap.setOption("child.tcpNoDelay", true);
bootstrap.setOption("child.sendBufferSize", SEND_BUFFER_SIZE);
bootstrap.setOption("child.connectTimeoutMillis", CONNECT_TIMEOUT);
bootstrap.setPipelineFactory(pipelineFactory);
clientBootstrap = bootstrap;
ScheduledExecutorService ses =
syncManager.getThreadPool().getScheduledExecutor();
reconnectTask = new SingletonTask(ses, new ConnectTask());
reconnectTask.reschedule(0, TimeUnit.SECONDS);
}
/**
* Connect to a remote node if appropriate
* @param bootstrap the client bootstrap object
* @param n the node to connect to
*/
protected void doNodeConnect(Node n) {
if (!shutDown && n.getNodeId() < syncManager.getLocalNodeId()) {
Short nodeId = n.getNodeId();
synchronized (connections) {
NodeConnection c = connections.get(n.getNodeId());
if (c == null) {
connections.put(nodeId, c = new NodeConnection());
}
if (logger.isTraceEnabled()) {
logger.trace("[{}->{}] Connection state: {}",
new Object[]{syncManager.getLocalNodeId(),
nodeId, c.state});
}
if (c.state.equals(NodeConnectionState.NONE)) {
if (logger.isDebugEnabled()) {
logger.debug("[{}->{}] Attempting connection {} {}",
new Object[]{syncManager.getLocalNodeId(),
nodeId,
n.getHostname(),
n.getPort()});
}
SocketAddress sa =
new InetSocketAddress(n.getHostname(), n.getPort());
c.pendingFuture = clientBootstrap.connect(sa);
c.pendingFuture.addListener(new ConnectCFListener(n));
c.state = NodeConnectionState.PENDING;
}
}
}
}
/**
* Ensure that all client connections are active
*/
protected void startClientConnections() {
for (Node n : syncManager.getClusterConfig().getNodes()) {
doNodeConnect(n);
}
}
/**
* Periodically ensure that all the node connections are alive
* @author readams
*/
protected class ConnectTask implements Runnable {
@Override
public void run() {
try {
if (!shutDown)
startClientConnections();
} catch (Exception e) {
logger.error("Error in reconnect task", e);
}
if (!shutDown) {
reconnectTask.reschedule(500, TimeUnit.MILLISECONDS);
}
}
}
/**
* Various states for connections
* @author readams
*/
protected enum NodeConnectionState {
NONE,
PENDING,
CONNECTED
}
/**
* Connection state wrapper for node connections
* @author readams
*/
protected static class NodeConnection {
volatile NodeConnectionState state = NodeConnectionState.NONE;
protected ChannelFuture pendingFuture;
protected Channel nodeChannel;
protected void nuke() {
state = NodeConnectionState.NONE;
if (pendingFuture != null) pendingFuture.cancel();
if (nodeChannel != null) nodeChannel.close();
pendingFuture = null;
nodeChannel = null;
}
}
/**
* Maintain state for the pending message window for a given message type
* @author readams
*/
protected static class MessageWindow {
AtomicInteger pending = new AtomicInteger();
volatile boolean disconnected = false;
Lock lock = new ReentrantLock();
Condition full = lock.newCondition();
}
/**
* A pending message to be sent to a particular mode.
* @author readams
*/
protected static class NodeMessage extends Pair<Short,SyncMessage> {
private static final long serialVersionUID = -3443080461324647922L;
public NodeMessage(Short first, SyncMessage second) {
super(first, second);
}
}
/**
* A worker thread responsible for reading sync messages off the queue
* and writing them to the appropriate node's channel. Because calls
* {@link RPCService#writeToNode(Short, SyncMessage)} can block while
* waiting for available slots in the message window, we do this in a
* separate thread.
* @author readams
*/
protected class SyncMessageWorker implements Runnable {
@Override
public void run() {
while (true) {
try {
NodeMessage m = syncQueue.take();
writeToNode(m.getFirst(), m.getSecond());
} catch (Exception e) {
logger.error("Error while dispatching message", e);
}
}
}
}
}
| |
/*
* Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.autoscaling.model;
import java.io.Serializable;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
*/
public class ExitStandbyRequest extends AmazonWebServiceRequest implements
Serializable, Cloneable {
/**
* <p>
* One or more instance IDs. You must specify at least one instance ID.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<String> instanceIds;
/**
* <p>
* The name of the Auto Scaling group.
* </p>
*/
private String autoScalingGroupName;
/**
* <p>
* One or more instance IDs. You must specify at least one instance ID.
* </p>
*
* @return One or more instance IDs. You must specify at least one instance
* ID.
*/
public java.util.List<String> getInstanceIds() {
if (instanceIds == null) {
instanceIds = new com.amazonaws.internal.SdkInternalList<String>();
}
return instanceIds;
}
/**
* <p>
* One or more instance IDs. You must specify at least one instance ID.
* </p>
*
* @param instanceIds
* One or more instance IDs. You must specify at least one instance
* ID.
*/
public void setInstanceIds(java.util.Collection<String> instanceIds) {
if (instanceIds == null) {
this.instanceIds = null;
return;
}
this.instanceIds = new com.amazonaws.internal.SdkInternalList<String>(
instanceIds);
}
/**
* <p>
* One or more instance IDs. You must specify at least one instance ID.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setInstanceIds(java.util.Collection)} or
* {@link #withInstanceIds(java.util.Collection)} if you want to override
* the existing values.
* </p>
*
* @param instanceIds
* One or more instance IDs. You must specify at least one instance
* ID.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public ExitStandbyRequest withInstanceIds(String... instanceIds) {
if (this.instanceIds == null) {
setInstanceIds(new com.amazonaws.internal.SdkInternalList<String>(
instanceIds.length));
}
for (String ele : instanceIds) {
this.instanceIds.add(ele);
}
return this;
}
/**
* <p>
* One or more instance IDs. You must specify at least one instance ID.
* </p>
*
* @param instanceIds
* One or more instance IDs. You must specify at least one instance
* ID.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public ExitStandbyRequest withInstanceIds(
java.util.Collection<String> instanceIds) {
setInstanceIds(instanceIds);
return this;
}
/**
* <p>
* The name of the Auto Scaling group.
* </p>
*
* @param autoScalingGroupName
* The name of the Auto Scaling group.
*/
public void setAutoScalingGroupName(String autoScalingGroupName) {
this.autoScalingGroupName = autoScalingGroupName;
}
/**
* <p>
* The name of the Auto Scaling group.
* </p>
*
* @return The name of the Auto Scaling group.
*/
public String getAutoScalingGroupName() {
return this.autoScalingGroupName;
}
/**
* <p>
* The name of the Auto Scaling group.
* </p>
*
* @param autoScalingGroupName
* The name of the Auto Scaling group.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public ExitStandbyRequest withAutoScalingGroupName(
String autoScalingGroupName) {
setAutoScalingGroupName(autoScalingGroupName);
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getInstanceIds() != null)
sb.append("InstanceIds: " + getInstanceIds() + ",");
if (getAutoScalingGroupName() != null)
sb.append("AutoScalingGroupName: " + getAutoScalingGroupName());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ExitStandbyRequest == false)
return false;
ExitStandbyRequest other = (ExitStandbyRequest) obj;
if (other.getInstanceIds() == null ^ this.getInstanceIds() == null)
return false;
if (other.getInstanceIds() != null
&& other.getInstanceIds().equals(this.getInstanceIds()) == false)
return false;
if (other.getAutoScalingGroupName() == null
^ this.getAutoScalingGroupName() == null)
return false;
if (other.getAutoScalingGroupName() != null
&& other.getAutoScalingGroupName().equals(
this.getAutoScalingGroupName()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime
* hashCode
+ ((getInstanceIds() == null) ? 0 : getInstanceIds().hashCode());
hashCode = prime
* hashCode
+ ((getAutoScalingGroupName() == null) ? 0
: getAutoScalingGroupName().hashCode());
return hashCode;
}
@Override
public ExitStandbyRequest clone() {
return (ExitStandbyRequest) super.clone();
}
}
| |
/**
* Copyright (C) 2013 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.engine.view.worker;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.opengamma.core.id.ExternalSchemes;
import com.opengamma.engine.ComputationTargetSpecification;
import com.opengamma.engine.marketdata.MarketDataProvider;
import com.opengamma.engine.marketdata.resolver.MarketDataProviderResolver;
import com.opengamma.engine.marketdata.spec.LiveMarketDataSpecification;
import com.opengamma.engine.marketdata.spec.MarketDataSpecification;
import com.opengamma.engine.target.ComputationTargetType;
import com.opengamma.engine.value.ValueProperties;
import com.opengamma.engine.value.ValuePropertyNames;
import com.opengamma.engine.value.ValueSpecification;
import com.opengamma.id.UniqueId;
import com.opengamma.livedata.UserPrincipal;
import com.opengamma.util.test.TestGroup;
/**
* Tests for the subscription tracking logic of the {@link MarketDataManager}.
*/
@Test(groups = TestGroup.UNIT)
public class MarketDataManagerTest {
private MarketDataManager _manager;
@BeforeMethod
public void setUp() throws Exception {
_manager = new MarketDataManager(createChangeListener(), createResolver(), null, null);
List<MarketDataSpecification> spec = Lists.newArrayList();
spec.add(LiveMarketDataSpecification.LIVE_SPEC);
_manager.createSnapshotManagerForCycle(new UserPrincipal("bloggs", "127.0.0.1"), ImmutableList.copyOf(spec));
}
@Test(expectedExceptions = IllegalArgumentException.class)
public void testConstructionWithNullListenerFails() {
new MarketDataManager(null, createResolver(), null, null);
}
@Test(expectedExceptions = IllegalArgumentException.class)
public void testConstructionWithNullResolverFails() {
new MarketDataManager(createChangeListener(), null, null, null);
}
@Test
public void testQueryForNonExistentSub() {
_manager.requestMarketDataSubscriptions(ImmutableSet.of(createValueSpecForMarketValue("AAPL.")));
assertThat(_manager.querySubscriptionState("BOGUS").size(), is(0));
}
@Test
public void testMarketDataRequestIsInitiallyPending() {
_manager.requestMarketDataSubscriptions(ImmutableSet.of(createValueSpecForMarketValue("AAPL.")));
checkSingleSubscriptionState("AAPL.", SubscriptionStateQuery.SubscriptionState.PENDING);
}
@Test
public void testMarketDataSubscriptionSucceeding() {
ImmutableSet<ValueSpecification> valueSpecs = ImmutableSet.of(createValueSpecForMarketValue("AAPL."));
_manager.requestMarketDataSubscriptions(valueSpecs);
_manager.subscriptionsSucceeded(valueSpecs);
checkSingleSubscriptionState("AAPL.", SubscriptionStateQuery.SubscriptionState.ACTIVE);
}
@Test
public void testMarketDataSubscriptionFailing() {
ValueSpecification valueSpec = createValueSpecForMarketValue("AAPL.");
_manager.requestMarketDataSubscriptions(ImmutableSet.of(valueSpec));
_manager.subscriptionFailed(valueSpec, "Que?");
checkSingleSubscriptionState("AAPL.", SubscriptionStateQuery.SubscriptionState.FAILED);
}
@Test
public void testMarketDataSubscriptionRemoval() {
ImmutableSet<ValueSpecification> valueSpecs1 = ImmutableSet.of(createValueSpecForMarketValue("AAPL."));
_manager.requestMarketDataSubscriptions(valueSpecs1);
_manager.subscriptionsSucceeded(valueSpecs1);
Set<ValueSpecification> valueSpecs2 = createMarketDataValueSpecs("GOOG.");
_manager.requestMarketDataSubscriptions(valueSpecs2);
_manager.subscriptionsSucceeded(valueSpecs2);
checkSingleSubscriptionState("AAPL.", SubscriptionStateQuery.SubscriptionState.REMOVED);
checkSingleSubscriptionState("GOOG.", SubscriptionStateQuery.SubscriptionState.ACTIVE);
}
@Test
public void testMarketDataLifecycle() {
ValueSpecification spec = createValueSpecForMarketValue("AAPL.");
ImmutableSet<ValueSpecification> valueSpecs = ImmutableSet.of(spec);
_manager.requestMarketDataSubscriptions(valueSpecs);
checkSingleSubscriptionState("AAPL.", SubscriptionStateQuery.SubscriptionState.PENDING);
_manager.subscriptionsSucceeded(valueSpecs);
checkSingleSubscriptionState("AAPL.", SubscriptionStateQuery.SubscriptionState.ACTIVE);
_manager.requestMarketDataSubscriptions(ImmutableSet.<ValueSpecification>of());
checkSingleSubscriptionState("AAPL.", SubscriptionStateQuery.SubscriptionState.REMOVED);
_manager.requestMarketDataSubscriptions(valueSpecs);
checkSingleSubscriptionState("AAPL.", SubscriptionStateQuery.SubscriptionState.PENDING);
_manager.subscriptionFailed(spec, "Why?");
checkSingleSubscriptionState("AAPL.", SubscriptionStateQuery.SubscriptionState.FAILED);
}
@Test
public void testAllMatchingTickersAreReturned() {
Set<ValueSpecification> valueSpecs =
createMarketDataValueSpecs("AAPL.", "AAPL/G4NHG.O", "AAPL/G4G3F.", "GOOG.", "GOOG/GsG~K.");
_manager.requestMarketDataSubscriptions(valueSpecs);
assertThat(_manager.querySubscriptionState("AAPL").size(), is(3));
assertThat(_manager.querySubscriptionState("AAPL.").size(), is(1));
assertThat(_manager.querySubscriptionState("GOOG").size(), is(2));
assertThat(_manager.querySubscriptionState("GOOG.").size(), is(1));
}
@Test
public void testNullQueryResultsInAllMatchingTickersReturned() {
Set<ValueSpecification> valueSpecs =
createMarketDataValueSpecs("AAPL.", "AAPL/G4NHG.O", "AAPL/G4G3F.", "GOOG.", "GOOG/GsG~K.");
_manager.requestMarketDataSubscriptions(valueSpecs);
assertThat(_manager.querySubscriptionState(null).size(), is(5));
}
@Test
public void testEmptyQueryResultsInAllMatchingTickersReturned() {
Set<ValueSpecification> valueSpecs =
createMarketDataValueSpecs("AAPL.", "AAPL/G4NHG.O", "AAPL/G4G3F.", "GOOG.", "GOOG/GsG~K.");
_manager.requestMarketDataSubscriptions(valueSpecs);
assertThat(_manager.querySubscriptionState("").size(), is(5));
}
@Test
public void testQueryByType() {
Set<ValueSpecification> appleSpec = createMarketDataValueSpecs("AAPL.");
Set<ValueSpecification> appleOptions = createMarketDataValueSpecs("AAPL/G4NHG.O", "AAPL/G4G3F.");
Set <ValueSpecification> googleOptions = createMarketDataValueSpecs("GOOG.", "GOOG/GsG~K.");
_manager.requestMarketDataSubscriptions(
ImmutableSet.<ValueSpecification>builder()
.addAll(appleOptions)
.addAll(appleSpec)
.build());
assertThat(_manager.getFailedSubscriptionCount(), is(0));
assertThat(_manager.getPendingSubscriptionCount(), is(3));
// Drop the AAPL request
_manager.requestMarketDataSubscriptions(
ImmutableSet.<ValueSpecification>builder()
.addAll(appleOptions)
.addAll(googleOptions)
.build());
_manager.subscriptionFailed(createValueSpecForMarketValue("AAPL/G4NHG.O"), "oops");
_manager.subscriptionsSucceeded(createMarketDataValueSpecs("AAPL/G4G3F."));
assertThat(_manager.getFailedSubscriptionCount(), is(1));
assertThat(_manager.getPendingSubscriptionCount(), is(2));
assertThat(_manager.getRemovedSubscriptionCount(), is(1));
assertThat(_manager.getActiveSubscriptionCount(), is(1));
}
@Test
public void testValueSpecsOnSameTickerAreDistinguishedByGet() {
Set<ValueSpecification> specs = ImmutableSet.of(createValueSpecForMarketValue("AAPL."),
createValueSpecForDividendYield("AAPL."));
_manager.requestMarketDataSubscriptions(specs);
assertThat(_manager.getPendingSubscriptionCount(), is(2));
Set<String> keys = _manager.queryPendingSubscriptions().keySet();
assertThat(keys.size(), is(2));
checkKeyMatches(keys);
}
@Test
public void testValueSpecsOnSameTickerAreDistinguishedByQuery() {
Set<ValueSpecification> specs = ImmutableSet.of(createValueSpecForMarketValue("AAPL."), createValueSpecForDividendYield("AAPL."));
_manager.requestMarketDataSubscriptions(specs);
Set<String> keys = _manager.querySubscriptionState("").keySet();
assertThat(keys.size(), is(2));
checkKeyMatches(keys);
}
@Test
public void testUnexpectedSubscriptionNotificationsIgnored() {
Set<ValueSpecification> specs1 = ImmutableSet.of(createValueSpecForMarketValue("AAPL."));
_manager.subscriptionsSucceeded(specs1);
assertThat(_manager.querySubscriptionState("AAPL.").size(), is(0));
assertThat(_manager.getFailedSubscriptionCount(), is(0));
assertThat(_manager.getPendingSubscriptionCount(), is(0));
assertThat(_manager.getRemovedSubscriptionCount(), is(0));
assertThat(_manager.getActiveSubscriptionCount(), is(0));
ValueSpecification spec2 = createValueSpecForMarketValue("GOOG.");
_manager.subscriptionFailed(spec2, "Not authorized");
assertThat(_manager.querySubscriptionState("GOOG.").size(), is(0));
assertThat(_manager.getFailedSubscriptionCount(), is(0));
assertThat(_manager.getPendingSubscriptionCount(), is(0));
assertThat(_manager.getRemovedSubscriptionCount(), is(0));
assertThat(_manager.getActiveSubscriptionCount(), is(0));
}
@Test
public void testUnexpectedChangeOfSubscriptionState() {
ValueSpecification spec = createValueSpecForMarketValue("AAPL.");
Set<ValueSpecification> specs = ImmutableSet.of(spec);
_manager.requestMarketDataSubscriptions(specs);
assertThat(_manager.getFailedSubscriptionCount(), is(0));
assertThat(_manager.getPendingSubscriptionCount(), is(1));
assertThat(_manager.getRemovedSubscriptionCount(), is(0));
assertThat(_manager.getActiveSubscriptionCount(), is(0));
_manager.subscriptionsSucceeded(specs);
assertThat(_manager.getFailedSubscriptionCount(), is(0));
assertThat(_manager.getPendingSubscriptionCount(), is(0));
assertThat(_manager.getRemovedSubscriptionCount(), is(0));
assertThat(_manager.getActiveSubscriptionCount(), is(1));
_manager.subscriptionFailed(spec, "Not authorized");
assertThat(_manager.getFailedSubscriptionCount(), is(1));
assertThat(_manager.getPendingSubscriptionCount(), is(0));
assertThat(_manager.getRemovedSubscriptionCount(), is(0));
assertThat(_manager.getActiveSubscriptionCount(), is(0));
}
private void checkKeyMatches(Set<String> keys) {
boolean mvMatch = false;
boolean dyMatch = false;
for (String key : keys) {
assertThat(key.contains("AAPL"), is(true));
mvMatch = mvMatch || key.contains("Market_Value");
dyMatch = dyMatch || key.contains("Dividend_Yield");
}
assertThat(mvMatch, is(true));
assertThat(dyMatch, is(true));
}
private void checkSingleSubscriptionState(String ticker, MarketDataManager.SubscriptionState expectedState) {
Map<String, MarketDataManager.SubscriptionStatus> stateMap = _manager.querySubscriptionState(ticker);
assertThat(Iterables.getOnlyElement(stateMap.values()).getState(), is(expectedState.name()));
}
private Set<ValueSpecification> createMarketDataValueSpecs(String... tickers) {
ImmutableSet.Builder<ValueSpecification> builder = ImmutableSet.builder();
for (String ticker : tickers) {
builder.add(createValueSpecForMarketValue(ticker));
}
return builder.build();
}
private ValueSpecification createValueSpecForMarketValue(String ticker) {
return createValueSpec(ticker, "Market_Value");
}
private ValueSpecification createValueSpecForDividendYield(String ticker) {
return createValueSpec(ticker, "Dividend_Yield");
}
private ValueSpecification createValueSpec(String ticker, String valueName) {
UniqueId uniqueId = UniqueId.of(ExternalSchemes.ACTIVFEED_TICKER.getName(), ticker);
ValueProperties properties = ValueProperties.builder()
.with(ValuePropertyNames.FUNCTION, "MarketDataSourcingFunction")
.get();
ComputationTargetSpecification targetSpecification =
new ComputationTargetSpecification(ComputationTargetType.PRIMITIVE, uniqueId);
return new ValueSpecification(valueName, targetSpecification, properties);
}
private MarketDataChangeListener createChangeListener() {
return new MarketDataChangeListener() {
@Override
public void onMarketDataValuesChanged(Collection<ValueSpecification> valueSpecifications) { }
};
}
private MarketDataProviderResolver createResolver() {
return new MarketDataProviderResolver() {
@Override
public MarketDataProvider resolve(UserPrincipal marketDataUser, MarketDataSpecification snapshotSpec) {
MarketDataProvider mock = mock(MarketDataProvider.class);
when(mock.snapshot(any(MarketDataSpecification.class))).thenReturn(mock(CompositeMarketDataSnapshot.class));
return mock;
}
};
}
}
| |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.pattern.project;
import com.google.gson.Gson;
import com.google.gson.JsonArray;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import com.google.gson.reflect.TypeToken;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.io.FileUtils;
import org.netbeans.api.project.FileOwnerQuery;
import org.openide.filesystems.FileObject;
import org.openide.filesystems.FileUtil;
import org.openide.util.Exceptions;
import org.pattern.api.MultiImageImporter;
import org.pattern.api.MultiImageImporterFinder;
import org.pattern.data.MultiImage;
import org.pattern.data.Particle;
import org.pattern.data.ParticleImage;
import org.pattern.data.UniImage;
import org.pattern.serialization.PatternGson;
/**
* Util operations to support files with {@code .pattern} extension.
* @author palas
*/
public class PatternFileSupport {
private static final String PATTERN_EXT = "pattern";
private static final String PARTICLES = "particles";
private static final String IMAGE = "image";
private static final String NAME = "name";
private static final String ENCODING = "UTF-8";
public static FileObject getImageFileObject(FileObject patternFile){
try {
String json = FileUtils.readFileToString(FileUtil.toFile(patternFile), ENCODING);
JsonObject root = new JsonParser().parse(json).getAsJsonObject();
String imagePath = root.get(IMAGE).getAsString();
FileObject projDir = FileOwnerQuery.getOwner(patternFile).getProjectDirectory();
return projDir.getFileObject(
"images/"+ imagePath);
} catch (IOException ex) {
Exceptions.printStackTrace(ex);
}
return null;
}
/**
* Loads pattern {@link MultiImage} according to information contained in
* given file.
*
* @param fp .pattern file, must exist already
* @return
* @throws IOException
*/
public static MultiImage load(FileObject fp) throws IOException {
if (fp.isFolder()) {
throw new IllegalArgumentException("Given folder. File expected");
}
Gson gson = PatternGson.gson();
String json = FileUtils.readFileToString(FileUtil.toFile(fp), ENCODING);
JsonObject root = new JsonParser().parse(json).getAsJsonObject();
String name = root.get(NAME).getAsString();
String imagePath = root.get(IMAGE).getAsString();
FileObject projectFolder = fp.getParent().getParent(); // comming from .pattern -> images -> project
FileObject imagesFolder = projectFolder.getFileObject(
"images/"+ imagePath);
File imageFile = FileUtil.toFile(imagesFolder);
MultiImageImporter importer = MultiImageImporterFinder.findImporter(imageFile);
MultiImage image = importer.importData(imageFile);
image.image = imagePath;
image.name = name;
if (image instanceof UniImage) {
List<Particle> particleList;
JsonArray particles = root.getAsJsonArray(PARTICLES);
if (particles.size() == 0) {
particleList = new ArrayList<>();
} else {
particleList = gson.fromJson(particles,
new TypeToken<List<Particle>>() {
}.getType());
}
image.getSelectedImage().assign(particleList);
} else {
throw new UnsupportedOperationException("Parsing multi image particles not supported!");
}
return image;
}
/**
* Saves data to file. Extracts data from {@link ParticleImage} contained in
* {@link MultiImage} and saves them to given file
*
* @param fp .pattern where to save, must exist already
* @param image
* @throws IOException
*/
public static void savePatternFile(FileObject fp, MultiImage image) throws IOException {
if (image != null) {
Gson gson = PatternGson.gson();
JsonObject root = new JsonObject();
root.addProperty(NAME, image.name);
root.addProperty(IMAGE, image.image);
List<Particle> particles = image.getSelectedImage().getParticles();
Type type = new TypeToken<List<Particle>>() {}.getType();
root.add(PARTICLES, gson.toJsonTree(particles, type));
String json = gson.toJson(root);
FileUtils.writeStringToFile(FileUtil.toFile(fp), json);
}
}
/**
* Creates new "name".pattern file in project in correct folder.
*
* @param project
* @param name name of image
* @param imageNameExt assigned image name with file extension eg.
* image1.tif
* @throws IOException
*/
public static void createPatternFile(PatternProject project, String name, String imageNameExt) throws IOException {
FileObject patternFile = project.getDataFolder()
.createData(name, PATTERN_EXT);
JsonObject root = new JsonObject();
root.addProperty(NAME, name);
root.addProperty(IMAGE, imageNameExt);
root.add(PARTICLES, new JsonArray());
String json = PatternGson.gson().toJson(root);
FileUtils.writeStringToFile(
FileUtil.toFile(patternFile),
json
);
}
/**
* Loads image data from files.
*
* @param fi
* @param fp
*/
public static MultiImage load(FileObject fi, FileObject fp) {
try {
File file = FileUtil.toFile(fi);
MultiImageImporter importer = MultiImageImporterFinder.findImporter(file);
MultiImage image = importer.importData(file);
if (image instanceof UniImage) {
List<Particle> particles = loadParticlesForUniImage(fp);
image.getSelectedImage().assign(particles);
} else {
throw new UnsupportedOperationException("Parsing multi image particles not supported!");
}
return image;
} catch (Exception ex) {
Exceptions.printStackTrace(ex);
}
return null;
}
public static void save(MultiImage image, FileObject primary, FileObject secondary) {
if (image instanceof UniImage) {
try {
saveParticlesForUniImage(secondary, image.getSelectedImage().getParticles());
} catch (IOException ex) {
Exceptions.printStackTrace(ex);
}
} else {
throw new UnsupportedOperationException("Parsing multi image particles not supported!");
}
}
private static List<Particle> loadParticlesForUniImage(FileObject secondary) throws IOException {
Gson gson = PatternGson.gson();
File file = FileUtil.toFile(secondary);
String json = FileUtils.readFileToString(file, "UTF-8");
if (json.isEmpty()) {
return new ArrayList<Particle>();
}
return gson.fromJson(json,
new TypeToken<List<Particle>>() {
}.getType());
}
private static void saveParticlesForUniImage(FileObject secondary, List<Particle> particles) throws IOException {
Gson gson = PatternGson.gson();
File file = FileUtil.toFile(secondary);
if (file.exists()) {
String json = gson.toJson(particles);
FileUtils.writeStringToFile(file, json);
} else {
throw new FileNotFoundException("Writting to non existing file");
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.indices.store;
import org.apache.lucene.store.StoreRateLimiting;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.*;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.routing.IndexRoutingTable;
import org.elasticsearch.cluster.routing.IndexShardRoutingTable;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.FileSystemUtils;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.env.NodeEnvironment;
import org.elasticsearch.index.service.IndexService;
import org.elasticsearch.index.shard.IndexShardState;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.shard.service.IndexShard;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.node.settings.NodeSettingsService;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.*;
import java.io.File;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.atomic.AtomicInteger;
/**
*
*/
public class IndicesStore extends AbstractComponent implements ClusterStateListener {
public static final String INDICES_STORE_THROTTLE_TYPE = "indices.store.throttle.type";
public static final String INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC = "indices.store.throttle.max_bytes_per_sec";
private static final String ACTION_SHARD_EXISTS = "index/shard/exists";
private static final EnumSet<IndexShardState> ACTIVE_STATES = EnumSet.of(IndexShardState.STARTED, IndexShardState.RELOCATED);
class ApplySettings implements NodeSettingsService.Listener {
@Override
public void onRefreshSettings(Settings settings) {
String rateLimitingType = settings.get(INDICES_STORE_THROTTLE_TYPE, IndicesStore.this.rateLimitingType);
// try and parse the type
StoreRateLimiting.Type.fromString(rateLimitingType);
if (!rateLimitingType.equals(IndicesStore.this.rateLimitingType)) {
logger.info("updating indices.store.throttle.type from [{}] to [{}]", IndicesStore.this.rateLimitingType, rateLimitingType);
IndicesStore.this.rateLimitingType = rateLimitingType;
IndicesStore.this.rateLimiting.setType(rateLimitingType);
}
ByteSizeValue rateLimitingThrottle = settings.getAsBytesSize(INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC, IndicesStore.this.rateLimitingThrottle);
if (!rateLimitingThrottle.equals(IndicesStore.this.rateLimitingThrottle)) {
logger.info("updating indices.store.throttle.max_bytes_per_sec from [{}] to [{}], note, type is [{}]", IndicesStore.this.rateLimitingThrottle, rateLimitingThrottle, IndicesStore.this.rateLimitingType);
IndicesStore.this.rateLimitingThrottle = rateLimitingThrottle;
IndicesStore.this.rateLimiting.setMaxRate(rateLimitingThrottle);
}
}
}
private final NodeEnvironment nodeEnv;
private final NodeSettingsService nodeSettingsService;
private final IndicesService indicesService;
private final ClusterService clusterService;
private final TransportService transportService;
private volatile String rateLimitingType;
private volatile ByteSizeValue rateLimitingThrottle;
private final StoreRateLimiting rateLimiting = new StoreRateLimiting();
private final ApplySettings applySettings = new ApplySettings();
@Inject
public IndicesStore(Settings settings, NodeEnvironment nodeEnv, NodeSettingsService nodeSettingsService, IndicesService indicesService,
ClusterService clusterService, TransportService transportService) {
super(settings);
this.nodeEnv = nodeEnv;
this.nodeSettingsService = nodeSettingsService;
this.indicesService = indicesService;
this.clusterService = clusterService;
this.transportService = transportService;
transportService.registerHandler(ACTION_SHARD_EXISTS, new ShardActiveRequestHandler());
// we limit with 20MB / sec by default with a default type set to merge sice 0.90.1
this.rateLimitingType = componentSettings.get("throttle.type", StoreRateLimiting.Type.MERGE.name());
rateLimiting.setType(rateLimitingType);
this.rateLimitingThrottle = componentSettings.getAsBytesSize("throttle.max_bytes_per_sec", new ByteSizeValue(20, ByteSizeUnit.MB));
rateLimiting.setMaxRate(rateLimitingThrottle);
logger.debug("using indices.store.throttle.type [{}], with index.store.throttle.max_bytes_per_sec [{}]", rateLimitingType, rateLimitingThrottle);
nodeSettingsService.addListener(applySettings);
clusterService.addLast(this);
}
IndicesStore() {
super(ImmutableSettings.EMPTY);
nodeEnv = null;
nodeSettingsService = null;
indicesService = null;
this.clusterService = null;
this.transportService = null;
}
public StoreRateLimiting rateLimiting() {
return this.rateLimiting;
}
public void close() {
nodeSettingsService.removeListener(applySettings);
clusterService.remove(this);
}
@Override
public void clusterChanged(ClusterChangedEvent event) {
if (!event.routingTableChanged()) {
return;
}
if (event.state().blocks().disableStatePersistence()) {
return;
}
for (IndexRoutingTable indexRoutingTable : event.state().routingTable()) {
// Note, closed indices will not have any routing information, so won't be deleted
for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) {
if (shardCanBeDeleted(event.state(), indexShardRoutingTable)) {
ShardId shardId = indexShardRoutingTable.shardId();
IndexService indexService = indicesService.indexService(shardId.getIndex());
if (indexService == null || !indexService.hasShard(shardId.getId())) {
deleteShardIfExistElseWhere(event.state(), indexShardRoutingTable);
}
}
}
}
}
boolean shardCanBeDeleted(ClusterState state, IndexShardRoutingTable indexShardRoutingTable) {
// a shard can be deleted if all its copies are active, and its not allocated on this node
if (indexShardRoutingTable.size() == 0) {
// should not really happen, there should always be at least 1 (primary) shard in a
// shard replication group, in any case, protected from deleting something by mistake
return false;
}
for (ShardRouting shardRouting : indexShardRoutingTable) {
// be conservative here, check on started, not even active
if (!shardRouting.started()) {
return false;
}
// if the allocated or relocation node id doesn't exists in the cluster state it may be a stale node,
// make sure we don't do anything with this until the routing table has properly been rerouted to reflect
// the fact that the node does not exists
DiscoveryNode node = state.nodes().get(shardRouting.currentNodeId());
if (node == null) {
return false;
}
// If all nodes have been upgraded to >= 1.3.0 at some point we get back here and have the chance to
// run this api. (when cluster state is then updated)
if (node.getVersion().before(Version.V_1_3_0)) {
logger.debug("Skip deleting deleting shard instance [{}], a node holding a shard instance is < 1.3.0", shardRouting);
return false;
}
if (shardRouting.relocatingNodeId() != null) {
node = state.nodes().get(shardRouting.relocatingNodeId());
if (node == null) {
return false;
}
if (node.getVersion().before(Version.V_1_3_0)) {
logger.debug("Skip deleting deleting shard instance [{}], a node holding a shard instance is < 1.3.0", shardRouting);
return false;
}
}
// check if shard is active on the current node or is getting relocated to the our node
String localNodeId = state.getNodes().localNode().id();
if (localNodeId.equals(shardRouting.currentNodeId()) || localNodeId.equals(shardRouting.relocatingNodeId())) {
return false;
}
}
return true;
}
private void deleteShardIfExistElseWhere(ClusterState state, IndexShardRoutingTable indexShardRoutingTable) {
List<Tuple<DiscoveryNode, ShardActiveRequest>> requests = new ArrayList<>(indexShardRoutingTable.size());
String indexUUID = state.getMetaData().index(indexShardRoutingTable.shardId().getIndex()).getUUID();
ClusterName clusterName = state.getClusterName();
for (ShardRouting shardRouting : indexShardRoutingTable) {
// Node can't be null, because otherwise shardCanBeDeleted() would have returned false
DiscoveryNode currentNode = state.nodes().get(shardRouting.currentNodeId());
assert currentNode != null;
requests.add(new Tuple<>(currentNode, new ShardActiveRequest(clusterName, indexUUID, shardRouting.shardId())));
if (shardRouting.relocatingNodeId() != null) {
DiscoveryNode relocatingNode = state.nodes().get(shardRouting.relocatingNodeId());
assert relocatingNode != null;
requests.add(new Tuple<>(relocatingNode, new ShardActiveRequest(clusterName, indexUUID, shardRouting.shardId())));
}
}
ShardActiveResponseHandler responseHandler = new ShardActiveResponseHandler(indexShardRoutingTable.shardId(), state, requests.size());
for (Tuple<DiscoveryNode, ShardActiveRequest> request : requests) {
transportService.submitRequest(request.v1(), ACTION_SHARD_EXISTS, request.v2(), responseHandler);
}
}
private class ShardActiveResponseHandler implements TransportResponseHandler<ShardActiveResponse> {
private final ShardId shardId;
private final int expectedActiveCopies;
private final ClusterState clusterState;
private final AtomicInteger awaitingResponses;
private final AtomicInteger activeCopies;
public ShardActiveResponseHandler(ShardId shardId, ClusterState clusterState, int expectedActiveCopies) {
this.shardId = shardId;
this.expectedActiveCopies = expectedActiveCopies;
this.clusterState = clusterState;
this.awaitingResponses = new AtomicInteger(expectedActiveCopies);
this.activeCopies = new AtomicInteger();
}
@Override
public ShardActiveResponse newInstance() {
return new ShardActiveResponse();
}
@Override
public void handleResponse(ShardActiveResponse response) {
if (response.shardActive) {
logger.trace("[{}] exists on node [{}]", shardId, response.node);
activeCopies.incrementAndGet();
}
if (awaitingResponses.decrementAndGet() == 0) {
allNodesResponded();
}
}
@Override
public void handleException(TransportException exp) {
logger.debug("shards active request failed for {}", exp, shardId);
if (awaitingResponses.decrementAndGet() == 0) {
allNodesResponded();
}
}
@Override
public String executor() {
return ThreadPool.Names.SAME;
}
private void allNodesResponded() {
if (activeCopies.get() != expectedActiveCopies) {
logger.trace("not deleting shard [{}], expected {} active copies, but only {} found active copies", shardId, expectedActiveCopies, activeCopies.get());
return;
}
ClusterState latestClusterState = clusterService.state();
if (clusterState.getVersion() != latestClusterState.getVersion()) {
logger.trace("not deleting shard [{}], the latest cluster state version[{}] is not equal to cluster state before shard active api call [{}]", shardId, latestClusterState.getVersion(), clusterState.getVersion());
return;
}
IndexService indexService = indicesService.indexService(shardId.getIndex());
if (indexService == null) {
// not physical allocation of the index, delete it from the file system if applicable
if (nodeEnv.hasNodeFile()) {
File[] shardLocations = nodeEnv.shardLocations(shardId);
if (FileSystemUtils.exists(shardLocations)) {
logger.debug("[{}][{}] deleting shard that is no longer used", shardId.index().name(), shardId.id());
FileSystemUtils.deleteRecursively(shardLocations);
}
}
} else {
if (!indexService.hasShard(shardId.id())) {
if (indexService.store().canDeleteUnallocated(shardId)) {
logger.debug("[{}][{}] deleting shard that is no longer used", shardId.index().name(), shardId.id());
try {
indexService.store().deleteUnallocated(shardId);
} catch (Exception e) {
logger.debug("[{}][{}] failed to delete unallocated shard, ignoring", e, shardId.index().name(), shardId.id());
}
}
} else {
// this state is weird, should we log?
// basically, it means that the shard is not allocated on this node using the routing
// but its still physically exists on an IndexService
// Note, this listener should run after IndicesClusterStateService...
}
}
}
}
private class ShardActiveRequestHandler extends BaseTransportRequestHandler<ShardActiveRequest> {
@Override
public ShardActiveRequest newInstance() {
return new ShardActiveRequest();
}
@Override
public String executor() {
return ThreadPool.Names.SAME;
}
@Override
public void messageReceived(ShardActiveRequest request, TransportChannel channel) throws Exception {
channel.sendResponse(new ShardActiveResponse(shardActive(request), clusterService.localNode()));
}
private boolean shardActive(ShardActiveRequest request) {
ClusterName thisClusterName = clusterService.state().getClusterName();
if (!thisClusterName.equals(request.clusterName)) {
logger.trace("shard exists request meant for cluster[{}], but this is cluster[{}], ignoring request", request.clusterName, thisClusterName);
return false;
}
ShardId shardId = request.shardId;
IndexService indexService = indicesService.indexService(shardId.index().getName());
if (indexService != null && indexService.indexUUID().equals(request.indexUUID)) {
IndexShard indexShard = indexService.shard(shardId.getId());
if (indexShard != null) {
return ACTIVE_STATES.contains(indexShard.state());
}
}
return false;
}
}
private static class ShardActiveRequest extends TransportRequest {
private ClusterName clusterName;
private String indexUUID;
private ShardId shardId;
ShardActiveRequest() {
}
ShardActiveRequest(ClusterName clusterName, String indexUUID, ShardId shardId) {
this.shardId = shardId;
this.indexUUID = indexUUID;
this.clusterName = clusterName;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
clusterName = ClusterName.readClusterName(in);
indexUUID = in.readString();
shardId = ShardId.readShardId(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
clusterName.writeTo(out);
out.writeString(indexUUID);
shardId.writeTo(out);
}
}
private static class ShardActiveResponse extends TransportResponse {
private boolean shardActive;
private DiscoveryNode node;
ShardActiveResponse() {
}
ShardActiveResponse(boolean shardActive, DiscoveryNode node) {
this.shardActive = shardActive;
this.node = node;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
shardActive = in.readBoolean();
node = DiscoveryNode.readNode(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeBoolean(shardActive);
node.writeTo(out);
}
}
}
| |
/*
*
* Copyright 2014 http://Bither.net
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* /
*/
package net.bither.viewsystem.froms;
import com.google.common.base.Optional;
import net.bither.BitherUI;
import net.bither.bitherj.core.Address;
import net.bither.bitherj.crypto.SecureCharSequence;
import net.bither.fonts.AwesomeIcon;
import net.bither.languages.MessageKey;
import net.bither.viewsystem.TextBoxes;
import net.bither.viewsystem.action.TextTransfer;
import net.bither.viewsystem.base.AccessibilityDecorator;
import net.bither.viewsystem.base.Buttons;
import net.bither.viewsystem.base.Labels;
import net.bither.viewsystem.base.Panels;
import net.bither.viewsystem.components.ScrollBarUIDecorator;
import net.bither.viewsystem.components.borders.TextBubbleBorder;
import net.bither.viewsystem.themes.Themes;
import net.miginfocom.swing.MigLayout;
import javax.swing.*;
import javax.swing.event.DocumentEvent;
import javax.swing.event.DocumentListener;
import java.awt.event.ActionEvent;
public class SignMessagePanel extends WizardPanel {
private Address address;
private JLabel signingAddressLabel;
private JLabel messageLabel;
private JLabel signatureLabel;
private JTextField signingAddress;
private JTextArea signature;
private JTextArea messageTextArea;
private JPasswordField currentPassword;
private JLabel spinner;
JLabel reportLabel;
public SignMessagePanel(Address address) {
super(MessageKey.SIGN_MESSAGE_TITLE, AwesomeIcon.PENCIL);
this.address = address;
}
@Override
public void initialiseContent(JPanel panel) {
panel.setLayout(new MigLayout(
Panels.migXYLayout(),
"[][][][]", // Column constraints
"[][80][][30][30][20]" // Row constraints
));
// Labels (also used in clipboard)
signingAddressLabel = Labels.newBitcoinAddress();
messageLabel = Labels.newMessage();
signatureLabel = Labels.newSignature();
signingAddress = TextBoxes.newTextField(43);
signingAddress.setText(address.getAddress());
messageTextArea = TextBoxes.newEnterMessage();
// The message is a wall of text so needs scroll bars in many cases
messageTextArea.setBorder(null);
// Message requires its own scroll pane
JScrollPane messageScrollPane = new JScrollPane();
messageScrollPane.setOpaque(true);
messageScrollPane.setBackground(Themes.currentTheme.dataEntryBackground());
messageScrollPane.setBorder(null);
messageScrollPane.setHorizontalScrollBarPolicy(JScrollPane.HORIZONTAL_SCROLLBAR_AS_NEEDED);
messageScrollPane.setVerticalScrollBarPolicy(JScrollPane.VERTICAL_SCROLLBAR_AS_NEEDED);
// View port requires special handling
messageScrollPane.setViewportView(messageTextArea);
messageScrollPane.getViewport().setBackground(Themes.currentTheme.dataEntryBackground());
messageScrollPane.setViewportBorder(new TextBubbleBorder(Themes.currentTheme.dataEntryBorder()));
// Ensure we maintain the overall theme
ScrollBarUIDecorator.apply(messageScrollPane, true);
signature = TextBoxes.newReadOnlyLengthLimitedTextArea(new DocumentListener() {
@Override
public void insertUpdate(DocumentEvent e) {
}
@Override
public void removeUpdate(DocumentEvent e) {
}
@Override
public void changedUpdate(DocumentEvent e) {
}
}, 5, 40);
AccessibilityDecorator.apply(signature, MessageKey.SIGNATURE);
// Add them to the panel
panel.add(signingAddressLabel);
panel.add(signingAddress, "growx,span 3,push,wrap");
panel.add(messageLabel);
panel.add(messageScrollPane, "grow,span 3,push,wrap");
panel.add(Labels.newEnterPassword());
panel.add(getenterPasswordMaV(), "growx,span 3,wrap");
panel.add(Buttons.newSignMessageButton(getSignMessageAction()), "cell 1 3,align right");
// panel.add(Buttons.newCopyAllButton(getCopyClipboardAction()), "cell 2 3");
panel.add(Buttons.newClearAllButton(getClearAllAction()), "cell 3 3,wrap");
panel.add(signatureLabel);
panel.add(signature, "grow,span 3,push,wrap");
reportLabel = Labels.newStatusLabel(Optional.<MessageKey>absent(), null, Optional.<Boolean>absent());
AccessibilityDecorator.apply(reportLabel, MessageKey.NOTES);
panel.add(reportLabel, "growx,span 4");
}
/**
* @return A new action for signing the message
*/
private Action getSignMessageAction() {
// Sign the message
return new AbstractAction() {
@Override
public void actionPerformed(ActionEvent e) {
signMessage();
}
};
}
/**
* @return A new action for clearing the signing address, message text and signature
*/
private Action getClearAllAction() {
// Clear the fields and set focus
return new AbstractAction() {
@Override
public void actionPerformed(ActionEvent e) {
signingAddress.setText("");
messageTextArea.setText("");
currentPassword.setText("");
signature.setText("");
reportLabel.setText("");
reportLabel.setIcon(null);
// Reset focus
signingAddress.requestFocusInWindow();
}
};
}
private JPanel getenterPasswordMaV() {
JPanel panel = Panels.newPanel(
new MigLayout(
Panels.migXLayout(), // Layout
"[][][][]", // Columns
"[]" // Rows
));
// Keep track of the credentials fields
currentPassword = TextBoxes.newPassword();
// Provide an invisible tar pit spinner
spinner = Labels.newSpinner(Themes.currentTheme.fadedText(), BitherUI.NORMAL_PLUS_ICON_SIZE);
spinner.setVisible(false);
// Bind a document listener to allow instant update of UI to matched passwords
currentPassword.getDocument().addDocumentListener(
new DocumentListener() {
@Override
public void insertUpdate(DocumentEvent e) {
updateModel();
}
@Override
public void removeUpdate(DocumentEvent e) {
updateModel();
}
@Override
public void changedUpdate(DocumentEvent e) {
updateModel();
}
/**
* Trigger any UI updates
*/
private void updateModel() {
// Reset the credentials background
currentPassword.setBackground(Themes.currentTheme.dataEntryBackground());
}
});
panel.add(currentPassword, "growx,h 32,push");
//panel.add(showButton, "shrink");
// Ensure the icon label is a size suitable for rotation
panel.add(spinner, BitherUI.NORMAL_PLUS_ICON_SIZE_MIG + ",wrap");
return panel;
}
/**
* Sign the message text with the address specified and update UI
*/
private void signMessage() {
String messageText = messageTextArea.getText();
SecureCharSequence secureCharSequence = new SecureCharSequence(currentPassword.getPassword());
String signMessage = this.address.signMessage(messageText, secureCharSequence);
signature.setText(signMessage);
}
/**
* @return A new action for copying the view contents to the clipboard
*/
private Action getCopyClipboardAction() {
return new AbstractAction() {
@Override
public void actionPerformed(ActionEvent e) {
TextTransfer textTransfer = new TextTransfer();
//getReceiveAddress
textTransfer.setClipboardContents(signature.getText());
}
};
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.syncope.core.policy;
import java.util.regex.Pattern;
import org.springframework.stereotype.Component;
import org.syncope.types.PasswordPolicySpec;
import org.syncope.types.PolicyType;
@Component
public class PasswordPolicyEnforcer
extends PolicyEnforcer<PasswordPolicySpec, String> {
private static final Pattern DIGIT = Pattern.compile(".*\\d+.*");
private static final Pattern ALPHA_LOWERCASE = Pattern.compile(".*[a-z]+.*");
private static final Pattern ALPHA_UPPERCASE = Pattern.compile(".*[A-Z]+.*");
private static final Pattern FIRSTDIGIT = Pattern.compile("\\d.*");
private static final Pattern LASTDIGIT = Pattern.compile(".*\\d");
private static final Pattern ALPHANUMERIC = Pattern.compile(".*\\w.*");
private static final Pattern FIRSTALPHANUMERIC = Pattern.compile("\\w.*");
private static final Pattern LASTALPHANUMERIC = Pattern.compile(".*\\w");
private static final Pattern NONALPHANUMERIC = Pattern.compile(".*\\W.*");
private static final Pattern FIRSTNONALPHANUMERIC = Pattern.compile("\\W.*");
private static final Pattern LASTNONALPHANUMERIC = Pattern.compile(".*\\W");
@Override
public void enforce(
final PasswordPolicySpec policy,
final PolicyType type,
final String password)
throws PasswordPolicyException, PolicyEnforceException {
if (password == null) {
throw new PolicyEnforceException("Invalid password");
}
if (policy == null) {
throw new PolicyEnforceException("Invalid policy");
}
// check length
if (policy.getMinLength() > 0
&& policy.getMinLength() > password.length()) {
throw new PasswordPolicyException("Password too short");
}
if (policy.getMaxLength() > 0
&& policy.getMaxLength() < password.length()) {
throw new PasswordPolicyException("Password too long");
}
// check words not permitted
for (String word : policy.getWordsNotPermitted()) {
if (password.contains(word)) {
throw new PasswordPolicyException("Used word(s) not permitted");
}
}
// check digits occurrence
if (policy.isDigitRequired()
&& !checkForDigit(password)) {
throw new PasswordPolicyException("Password must contain digit(s)");
}
// check lowercase alphabetic characters occurrence
if (policy.isLowercaseRequired()
&& !checkForLowercase(password)) {
throw new PasswordPolicyException(
"Password must contain lowercase alphabetic character(s)");
}
// check uppercase alphabetic characters occurrence
if (policy.isUppercaseRequired()
&& !checkForUppercase(password)) {
throw new PasswordPolicyException(
"Password must contain uppercase alphabetic character(s)");
}
// check prefix
for (String prefix : policy.getPrefixesNotPermitted()) {
if (password.startsWith(prefix)) {
throw new PasswordPolicyException("Prefix not permitted");
}
}
// check suffix
for (String suffix : policy.getSuffixesNotPermitted()) {
if (password.endsWith(suffix)) {
throw new PasswordPolicyException("Suffix not permitted");
}
}
// check digit first occurrence
if (policy.isMustStartWithDigit()
&& !checkForFirstDigit(password)) {
throw new PasswordPolicyException(
"Password must start with a digit");
}
if (policy.isMustntStartWithDigit()
&& checkForFirstDigit(password)) {
throw new PasswordPolicyException(
"Password mustn't start with a digit");
}
// check digit last occurrence
if (policy.isMustEndWithDigit()
&& !checkForLastDigit(password)) {
throw new PasswordPolicyException("Password must end with a digit");
}
if (policy.isMustntEndWithDigit()
&& checkForLastDigit(password)) {
throw new PasswordPolicyException(
"Password mustn't end with a digit");
}
// check alphanumeric characters occurence
if (policy.isAlphanumericRequired()
&& !checkForAlphanumeric(password)) {
throw new PasswordPolicyException(
"Password must contain alphanumeric character(s)");
}
// check non alphanumeric characters occurence
if (policy.isNonAlphanumericRequired()
&& !checkForNonAlphanumeric(password)) {
throw new PasswordPolicyException(
"Password must contain non-alphanumeric character(s)");
}
// check alphanumeric character first occurrence
if (policy.isMustStartWithAlpha()
&& !checkForFirstAlphanumeric(password)) {
throw new PasswordPolicyException(
"Password must start with an alphanumeric character");
}
if (policy.isMustntStartWithAlpha()
&& checkForFirstAlphanumeric(password)) {
throw new PasswordPolicyException(
"Password mustn't start with an alphanumeric character");
}
// check alphanumeric character last occurrence
if (policy.isMustEndWithAlpha()
&& !checkForLastAlphanumeric(password)) {
throw new PasswordPolicyException(
"Password must end with an alphanumeric character");
}
if (policy.isMustntEndWithAlpha()
&& checkForLastAlphanumeric(password)) {
throw new PasswordPolicyException(
"Password mustn't end with an alphanumeric character");
}
// check non alphanumeric character first occurrence
if (policy.isMustStartWithNonAlpha()
&& !checkForFirstNonAlphanumeric(password)) {
throw new PasswordPolicyException(
"Password must start with a non-alphanumeric character");
}
if (policy.isMustntStartWithNonAlpha()
&& checkForFirstNonAlphanumeric(password)) {
throw new PasswordPolicyException(
"Password mustn't start with a non-alphanumeric character");
}
// check non alphanumeric character last occurrence
if (policy.isMustEndWithNonAlpha()
&& !checkForLastNonAlphanumeric(password)) {
throw new PasswordPolicyException(
"Password must end with a non-alphanumeric character");
}
if (policy.isMustntEndWithNonAlpha()
&& checkForLastNonAlphanumeric(password)) {
throw new PasswordPolicyException(
"Password mustn't end with a non-alphanumeric character");
}
}
private boolean checkForDigit(String str) {
return DIGIT.matcher((CharSequence) str).matches();
}
private boolean checkForLowercase(String str) {
return ALPHA_LOWERCASE.matcher((CharSequence) str).matches();
}
private boolean checkForUppercase(String str) {
return ALPHA_UPPERCASE.matcher((CharSequence) str).matches();
}
private boolean checkForFirstDigit(String str) {
return FIRSTDIGIT.matcher((CharSequence) str).matches();
}
private boolean checkForLastDigit(String str) {
return LASTDIGIT.matcher((CharSequence) str).matches();
}
private boolean checkForAlphanumeric(String str) {
return ALPHANUMERIC.matcher(str).matches();
}
private boolean checkForFirstAlphanumeric(String str) {
return FIRSTALPHANUMERIC.matcher(str).matches();
}
private boolean checkForLastAlphanumeric(String str) {
return LASTALPHANUMERIC.matcher(str).matches();
}
private boolean checkForNonAlphanumeric(String str) {
return NONALPHANUMERIC.matcher(str).matches();
}
private boolean checkForFirstNonAlphanumeric(String str) {
return FIRSTNONALPHANUMERIC.matcher(str).matches();
}
private boolean checkForLastNonAlphanumeric(String str) {
return LASTNONALPHANUMERIC.matcher(str).matches();
}
}
| |
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.support.v7.widget;
import android.content.res.Resources;
import android.graphics.Canvas;
import android.graphics.ColorFilter;
import android.graphics.LinearGradient;
import android.graphics.Paint;
import android.graphics.Path;
import android.graphics.PixelFormat;
import android.graphics.RadialGradient;
import android.graphics.Rect;
import android.graphics.RectF;
import android.graphics.Shader;
import android.graphics.drawable.Drawable;
import android.support.v7.cardview.R;
import android.util.Log;
/**
* A rounded rectangle drawable which also includes a shadow around.
*/
class RoundRectDrawableWithShadow extends Drawable {
// used to calculate content padding
final static double COS_45 = Math.cos(Math.toRadians(45));
final static float SHADOW_MULTIPLIER = 1.5f;
final float mInsetShadow; // extra shadow to avoid gaps between card and shadow
/*
* This helper is set by CardView implementations.
* <p>
* Prior to API 17, canvas.drawRoundRect is expensive; which is why we need this interface
* to draw efficient rounded rectangles before 17.
* */
static RoundRectHelper sRoundRectHelper;
Paint mPaint;
Paint mCornerShadowPaint;
Paint mEdgeShadowPaint;
final RectF mCardBounds;
float mCornerRadius;
Path mCornerShadowPath;
// updated value with inset
float mMaxShadowSize;
// actual value set by developer
float mRawMaxShadowSize;
// multiplied value to account for shadow offset
float mShadowSize;
// actual value set by developer
float mRawShadowSize;
private boolean mDirty = true;
private final int mShadowStartColor;
private final int mShadowEndColor;
private boolean mAddPaddingForCorners = true;
/**
* If shadow size is set to a value above max shadow, we print a warning
*/
private boolean mPrintedShadowClipWarning = false;
RoundRectDrawableWithShadow(Resources resources, int backgroundColor, float radius,
float shadowSize, float maxShadowSize) {
mShadowStartColor = resources.getColor(R.color.cardview_shadow_start_color);
mShadowEndColor = resources.getColor(R.color.cardview_shadow_end_color);
mInsetShadow = resources.getDimension(R.dimen.cardview_compat_inset_shadow);
setShadowSize(shadowSize, maxShadowSize);
mPaint = new Paint(Paint.ANTI_ALIAS_FLAG | Paint.DITHER_FLAG);
mPaint.setColor(backgroundColor);
mCornerShadowPaint = new Paint(Paint.ANTI_ALIAS_FLAG | Paint.DITHER_FLAG);
mCornerShadowPaint.setStyle(Paint.Style.FILL);
mCornerShadowPaint.setDither(true);
mCornerRadius = radius;
mCardBounds = new RectF();
mEdgeShadowPaint = new Paint(mCornerShadowPaint);
}
public void setAddPaddingForCorners(boolean addPaddingForCorners) {
mAddPaddingForCorners = addPaddingForCorners;
invalidateSelf();
}
@Override
public void setAlpha(int alpha) {
mPaint.setAlpha(alpha);
mCornerShadowPaint.setAlpha(alpha);
mEdgeShadowPaint.setAlpha(alpha);
}
@Override
protected void onBoundsChange(Rect bounds) {
super.onBoundsChange(bounds);
mDirty = true;
}
void setShadowSize(float shadowSize, float maxShadowSize) {
if (shadowSize < 0 || maxShadowSize < 0) {
throw new IllegalArgumentException("invalid shadow size");
}
if (shadowSize > maxShadowSize) {
shadowSize = maxShadowSize;
if (!mPrintedShadowClipWarning) {
Log.w("CardView", "Shadow size is being clipped by the max shadow size. See "
+ "{CardView#setMaxCardElevation}.");
mPrintedShadowClipWarning = true;
}
}
if (mRawShadowSize == shadowSize && mRawMaxShadowSize == maxShadowSize) {
return;
}
mRawShadowSize = shadowSize;
mRawMaxShadowSize = maxShadowSize;
mShadowSize = shadowSize * SHADOW_MULTIPLIER + mInsetShadow;
mMaxShadowSize = maxShadowSize + mInsetShadow;
mDirty = true;
invalidateSelf();
}
@Override
public boolean getPadding(Rect padding) {
int vOffset = (int) Math.ceil(calculateVerticalPadding(mRawMaxShadowSize, mCornerRadius,
mAddPaddingForCorners));
int hOffset = (int) Math.ceil(calculateHorizontalPadding(mRawMaxShadowSize, mCornerRadius,
mAddPaddingForCorners));
padding.set(hOffset, vOffset, hOffset, vOffset);
return true;
}
static float calculateVerticalPadding(float maxShadowSize, float cornerRadius,
boolean addPaddingForCorners) {
if (addPaddingForCorners) {
return (float) (maxShadowSize * SHADOW_MULTIPLIER + (1 - COS_45) * cornerRadius);
} else {
return maxShadowSize * SHADOW_MULTIPLIER;
}
}
static float calculateHorizontalPadding(float maxShadowSize, float cornerRadius,
boolean addPaddingForCorners) {
if (addPaddingForCorners) {
return (float) (maxShadowSize + (1 - COS_45) * cornerRadius);
} else {
return maxShadowSize;
}
}
@Override
public void setColorFilter(ColorFilter cf) {
mPaint.setColorFilter(cf);
mCornerShadowPaint.setColorFilter(cf);
mEdgeShadowPaint.setColorFilter(cf);
}
@Override
public int getOpacity() {
return PixelFormat.OPAQUE;
}
void setCornerRadius(float radius) {
if (mCornerRadius == radius) {
return;
}
mCornerRadius = radius;
mDirty = true;
invalidateSelf();
}
@Override
public void draw(Canvas canvas) {
if (mDirty) {
buildComponents(getBounds());
mDirty = false;
}
canvas.translate(0, mRawShadowSize / 2);
drawShadow(canvas);
canvas.translate(0, -mRawShadowSize / 2);
sRoundRectHelper.drawRoundRect(canvas, mCardBounds, mCornerRadius, mPaint);
}
private void drawShadow(Canvas canvas) {
final float edgeShadowTop = -mCornerRadius - mShadowSize;
final float inset = mCornerRadius + mInsetShadow + mRawShadowSize / 2;
final boolean drawHorizontalEdges = mCardBounds.width() - 2 * inset > 0;
final boolean drawVerticalEdges = mCardBounds.height() - 2 * inset > 0;
// LT
int saved = canvas.save();
canvas.translate(mCardBounds.left + inset, mCardBounds.top + inset);
canvas.drawPath(mCornerShadowPath, mCornerShadowPaint);
if (drawHorizontalEdges) {
canvas.drawRect(0, edgeShadowTop,
mCardBounds.width() - 2 * inset, -mCornerRadius,
mEdgeShadowPaint);
}
canvas.restoreToCount(saved);
// RB
saved = canvas.save();
canvas.translate(mCardBounds.right - inset, mCardBounds.bottom - inset);
canvas.rotate(180f);
canvas.drawPath(mCornerShadowPath, mCornerShadowPaint);
if (drawHorizontalEdges) {
canvas.drawRect(0, edgeShadowTop,
mCardBounds.width() - 2 * inset, -mCornerRadius + mShadowSize,
mEdgeShadowPaint);
}
canvas.restoreToCount(saved);
// LB
saved = canvas.save();
canvas.translate(mCardBounds.left + inset, mCardBounds.bottom - inset);
canvas.rotate(270f);
canvas.drawPath(mCornerShadowPath, mCornerShadowPaint);
if (drawVerticalEdges) {
canvas.drawRect(0, edgeShadowTop,
mCardBounds.height() - 2 * inset, -mCornerRadius, mEdgeShadowPaint);
}
canvas.restoreToCount(saved);
// RT
saved = canvas.save();
canvas.translate(mCardBounds.right - inset, mCardBounds.top + inset);
canvas.rotate(90f);
canvas.drawPath(mCornerShadowPath, mCornerShadowPaint);
if (drawVerticalEdges) {
canvas.drawRect(0, edgeShadowTop,
mCardBounds.height() - 2 * inset, -mCornerRadius, mEdgeShadowPaint);
}
canvas.restoreToCount(saved);
}
private void buildShadowCorners() {
RectF innerBounds = new RectF(-mCornerRadius, -mCornerRadius, mCornerRadius, mCornerRadius);
RectF outerBounds = new RectF(innerBounds);
outerBounds.inset(-mShadowSize, -mShadowSize);
if (mCornerShadowPath == null) {
mCornerShadowPath = new Path();
} else {
mCornerShadowPath.reset();
}
mCornerShadowPath.setFillType(Path.FillType.EVEN_ODD);
mCornerShadowPath.moveTo(-mCornerRadius, 0);
mCornerShadowPath.rLineTo(-mShadowSize, 0);
// outer arc
mCornerShadowPath.arcTo(outerBounds, 180f, 90f, false);
// inner arc
mCornerShadowPath.arcTo(innerBounds, 270f, -90f, false);
mCornerShadowPath.close();
float startRatio = mCornerRadius / (mCornerRadius + mShadowSize);
mCornerShadowPaint.setShader(new RadialGradient(0, 0, mCornerRadius + mShadowSize,
new int[]{mShadowStartColor, mShadowStartColor, mShadowEndColor},
new float[]{0f, startRatio, 1f}
, Shader.TileMode.CLAMP));
// we offset the content shadowSize/2 pixels up to make it more realistic.
// this is why edge shadow shader has some extra space
// When drawing bottom edge shadow, we use that extra space.
mEdgeShadowPaint.setShader(new LinearGradient(0, -mCornerRadius + mShadowSize, 0,
-mCornerRadius - mShadowSize,
new int[]{mShadowStartColor, mShadowStartColor, mShadowEndColor},
new float[]{0f, .5f, 1f}, Shader.TileMode.CLAMP));
}
private void buildComponents(Rect bounds) {
// Card is offset SHADOW_MULTIPLIER * maxShadowSize to account for the shadow shift.
// We could have different top-bottom offsets to avoid extra gap above but in that case
// center aligning Views inside the CardView would be problematic.
final float verticalOffset = mMaxShadowSize * SHADOW_MULTIPLIER;
mCardBounds.set(bounds.left + mMaxShadowSize, bounds.top + verticalOffset,
bounds.right - mMaxShadowSize, bounds.bottom - verticalOffset);
buildShadowCorners();
}
float getCornerRadius() {
return mCornerRadius;
}
void getMaxShadowAndCornerPadding(Rect into) {
getPadding(into);
}
void setShadowSize(float size) {
setShadowSize(size, mRawMaxShadowSize);
}
void setMaxShadowSize(float size) {
setShadowSize(mRawShadowSize, size);
}
float getShadowSize() {
return mRawShadowSize;
}
float getMaxShadowSize() {
return mRawMaxShadowSize;
}
float getMinWidth() {
final float content = 2 *
Math.max(mRawMaxShadowSize, mCornerRadius + mInsetShadow + mRawMaxShadowSize / 2);
return content + (mRawMaxShadowSize + mInsetShadow) * 2;
}
float getMinHeight() {
final float content = 2 * Math.max(mRawMaxShadowSize, mCornerRadius + mInsetShadow
+ mRawMaxShadowSize * SHADOW_MULTIPLIER / 2);
return content + (mRawMaxShadowSize * SHADOW_MULTIPLIER + mInsetShadow) * 2;
}
static interface RoundRectHelper {
void drawRoundRect(Canvas canvas, RectF bounds, float cornerRadius, Paint paint);
}
}
| |
/*
* Copyright 2014 Alexey Plotnik
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.stem.db;
import org.apache.commons.codec.DecoderException;
import org.apache.commons.codec.binary.Hex;
import org.stem.domain.BlobDescriptor;
import org.stem.domain.ExtendedBlobDescriptor;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.nio.channels.FileLock;
import java.util.zip.CRC32;
public class Blob // TODO: integrate all descriptors (mountpoint, fatfile, etc)
{
Header header;
byte[] data;
BlobDescriptor descriptor;
public Header getHeader() {
return header;
}
public BlobDescriptor getDescriptor() {
return descriptor;
}
public static Blob deserialize(FatFile ff, long blobHeaderOffset) throws IOException {
FileChannel channel = ff.getReader().getChannel();
Header header = Header.deserialize(channel, blobHeaderOffset);
//System.out.println("deserialized: 0x" + Hex.encodeHexString(header.key) + ", offset=" + (int) blobHeaderOffset + Header.SIZE + ", valid=" + header.valid());
// boolean valid = header.valid();
// if (!valid) {
// valid = header.valid();
// if (valid) {
// int a = 1;
// }
// }
if (header.corrupted()) {
return null;
}
//throw new IOException("Blob header is corrupted");
int bodyOffset = (int) blobHeaderOffset + Header.SIZE;
byte[] data = ff.readBlob(bodyOffset, header.length); // TODO: long to int conversion. Why?
return new Blob(header, new BlobDescriptor(ff.id, (int) blobHeaderOffset, bodyOffset), data);
}
public static ExtendedBlobDescriptor deserializeDescriptor(FatFile ff, long blobHeaderOffset) throws IOException {
FileChannel channel = ff.getReader().getChannel();
Header header = Header.deserialize(channel, blobHeaderOffset);
if (!header.valid()) {
return null;
}
return new ExtendedBlobDescriptor(header.key, header.length, ff.id, (int) blobHeaderOffset, (int) blobHeaderOffset + Header.SIZE);
}
public Blob(Header header, byte[] data) {
this.header = header;
this.data = data;
}
public Blob(Header header, BlobDescriptor descriptor, byte[] data) {
this(header, data);
this.descriptor = descriptor;
}
public int size() {
return header.length;
}
public byte[] key() {
return header.key;
}
public byte[] data() {
return data;
}
public boolean deleted() {
return header.isDeleted();
}
public static class Header {
private static final int KEY_SIZE = 16;
private static final int LENGTH_SIZE = 4;
private static final int CRC32_SIZE = 4;
private static final int DELETE_FLAG_SIZE = 1;
public static final int SIZE = KEY_SIZE + LENGTH_SIZE + CRC32_SIZE + DELETE_FLAG_SIZE;
public byte[] key;
public Integer length;
public Integer crc32;
public byte deleteFlag;
private CRC32 crc = new CRC32();
public static Header create(byte[] keyBytes, int payloadLength, int crc32, byte deleteFlag) {
return new Header(keyBytes, payloadLength, crc32, deleteFlag);
}
public static Header create(byte[] keyBytes, int payloadLength, byte deleteFlag) {
return new Header(keyBytes, payloadLength, deleteFlag);
}
public static Header create(String key, int payloadLength, byte deleteFlag) throws IOException {
byte[] keyBytes;
try {
keyBytes = Hex.decodeHex(key.toCharArray());
} catch (DecoderException e) {
throw new IOException("Can not decode the key " + key, e);
}
assert keyBytes.length == 16;
return new Header(keyBytes, payloadLength, deleteFlag);
}
public Header(byte[] key, int length, int crc32, byte deleteFlag) {
this.key = key;
this.length = length;
this.crc32 = crc32;
this.deleteFlag = deleteFlag;
}
public Header(byte[] key, int length, byte deleteFlag) {
this.key = key;
this.length = length;
this.deleteFlag = deleteFlag;
}
/**
* 1. Write key (16 bytes)
* 2. Write blob length (4 bytes)
* 3. Write CRC32 (4 bytes) of key and blob length
* 4. Write delete flag (1 byte)
*
* @return
*/
public ByteBuffer serialize() {
ByteBuffer buf = ByteBuffer.allocate(SIZE);
buf.put(key);
buf.putInt(length);
if (null == crc32)
crc32 = calculateChecksum();
ByteBuffer crc32Buf = ByteBuffer.allocate(4).putInt(crc32);
crc32Buf.position(0);
buf.put(crc32Buf);
buf.put(deleteFlag);
return buf;
}
public static Header deserialize(FileChannel channel, long blobHeaderOffset) throws IOException {
FileLock lock = channel.lock();
try {
channel.position(blobHeaderOffset);
ByteBuffer buf = ByteBuffer.allocate(SIZE);
channel.read(buf);
buf.position(0);
byte[] key = new byte[KEY_SIZE];
buf.get(key);
int length = buf.getInt();
int crc32 = buf.getInt();
byte deleteFlag = buf.get();
return Header.create(key, length, crc32, deleteFlag);
} finally {
lock.release();
}
}
private Integer calculateChecksum() {
ByteBuffer blobLengthBuf = ByteBuffer.allocate(LENGTH_SIZE).putInt(length);
blobLengthBuf.position(0);
ByteBuffer keyAndLengthBuf = ByteBuffer.allocate(KEY_SIZE + LENGTH_SIZE).put(key).put(blobLengthBuf);
CRC32 crc = new CRC32();
crc.update(keyAndLengthBuf.array());
return (int) crc.getValue();
}
public void nextOffset() {
}
public boolean valid() {
return calculateChecksum().equals(crc32);
}
public boolean corrupted() {
return !valid();
}
public FatFileIndex.Entry toIndexEntry(int offset) {
return new FatFileIndex.Entry(key, offset, length, deleteFlag);
}
public boolean isLive() {
return FatFileIndex.Entry.FLAG_LIVE == this.deleteFlag;
}
public boolean isDeleted() {
return FatFileIndex.Entry.FLAG_DELETED == this.deleteFlag;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ambari.server.controller.internal;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.ambari.server.controller.ivory.Instance;
import org.apache.ambari.server.controller.ivory.IvoryService;
import org.apache.ambari.server.controller.spi.NoSuchParentResourceException;
import org.apache.ambari.server.controller.spi.NoSuchResourceException;
import org.apache.ambari.server.controller.spi.Predicate;
import org.apache.ambari.server.controller.spi.Request;
import org.apache.ambari.server.controller.spi.RequestStatus;
import org.apache.ambari.server.controller.spi.Resource;
import org.apache.ambari.server.controller.spi.ResourceAlreadyExistsException;
import org.apache.ambari.server.controller.spi.SystemException;
import org.apache.ambari.server.controller.spi.UnsupportedPropertyException;
import org.apache.ambari.server.controller.utilities.PropertyHelper;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Sets;
/**
* DR instance resource provider.
*/
public class InstanceResourceProvider extends AbstractDRResourceProvider {
// ----- Property ID constants ---------------------------------------------
protected static final String INSTANCE_FEED_NAME_PROPERTY_ID = PropertyHelper.getPropertyId("Instance", "feedName");
protected static final String INSTANCE_ID_PROPERTY_ID = PropertyHelper.getPropertyId("Instance", "id");
protected static final String INSTANCE_STATUS_PROPERTY_ID = PropertyHelper.getPropertyId("Instance", "status");
protected static final String INSTANCE_START_TIME_PROPERTY_ID = PropertyHelper.getPropertyId("Instance", "startTime");
protected static final String INSTANCE_END_TIME_PROPERTY_ID = PropertyHelper.getPropertyId("Instance", "endTime");
protected static final String INSTANCE_DETAILS_PROPERTY_ID = PropertyHelper.getPropertyId("Instance", "details");
protected static final String INSTANCE_LOG_PROPERTY_ID = PropertyHelper.getPropertyId("Instance", "log");
/**
* The key property ids for a Instance resource.
*/
private static final Map<Resource.Type, String> keyPropertyIds = ImmutableMap.<Resource.Type, String>builder()
.put(Resource.Type.DRInstance, INSTANCE_FEED_NAME_PROPERTY_ID)
.put(Resource.Type.Workflow, INSTANCE_ID_PROPERTY_ID)
.build();
/**
* The property ids for a Instance resource.
*/
private static final Set<String> propertyIds = Sets.newHashSet(
INSTANCE_FEED_NAME_PROPERTY_ID,
INSTANCE_ID_PROPERTY_ID,
INSTANCE_STATUS_PROPERTY_ID,
INSTANCE_START_TIME_PROPERTY_ID,
INSTANCE_END_TIME_PROPERTY_ID,
INSTANCE_DETAILS_PROPERTY_ID,
INSTANCE_LOG_PROPERTY_ID);
/**
* Construct a provider.
*
* @param ivoryService the ivory service
*/
public InstanceResourceProvider(IvoryService ivoryService) {
super(propertyIds, keyPropertyIds, ivoryService);
}
@Override
public RequestStatus createResources(Request request) throws SystemException,
UnsupportedPropertyException, ResourceAlreadyExistsException, NoSuchParentResourceException {
// we can't create instances directly
throw new UnsupportedOperationException("Not supported.");
}
@Override
public Set<Resource> getResources(Request request, Predicate predicate) throws SystemException,
UnsupportedPropertyException, NoSuchResourceException, NoSuchParentResourceException {
Set<String> requestedIds = getRequestPropertyIds(request, predicate);
Set<Resource> resources = new HashSet<>();
List<String> feedNames = new LinkedList<>();
IvoryService service = getService();
if (predicate == null) {
feedNames = service.getFeedNames();
} else {
for (Map<String, Object> propertyMap : getPropertyMaps(predicate)) {
String feedName = (String) propertyMap.get(INSTANCE_FEED_NAME_PROPERTY_ID);
if (feedName == null) {
// if any part of the predicate doesn't include feed name then we have to check them all
feedNames = service.getFeedNames();
break;
}
feedNames.add(feedName);
}
}
for (String feedName : feedNames) {
List<Instance> instances = service.getInstances(feedName);
for (Instance instance : instances) {
Resource resource = new ResourceImpl(Resource.Type.DRInstance);
setResourceProperty(resource, INSTANCE_FEED_NAME_PROPERTY_ID,
instance.getFeedName(), requestedIds);
setResourceProperty(resource, INSTANCE_ID_PROPERTY_ID,
instance.getId(), requestedIds);
setResourceProperty(resource, INSTANCE_STATUS_PROPERTY_ID,
instance.getStatus(), requestedIds);
setResourceProperty(resource, INSTANCE_START_TIME_PROPERTY_ID,
instance.getStartTime(), requestedIds);
setResourceProperty(resource, INSTANCE_END_TIME_PROPERTY_ID,
instance.getEndTime(), requestedIds);
setResourceProperty(resource, INSTANCE_DETAILS_PROPERTY_ID,
instance.getDetails(), requestedIds);
setResourceProperty(resource, INSTANCE_LOG_PROPERTY_ID,
instance.getLog(), requestedIds);
if (predicate == null || predicate.evaluate(resource)) {
resources.add(resource);
}
}
}
return resources;
}
@Override
public RequestStatus updateResources(Request request, Predicate predicate) throws SystemException,
UnsupportedPropertyException, NoSuchResourceException, NoSuchParentResourceException {
IvoryService service = getService();
Iterator<Map<String,Object>> iterator = request.getProperties().iterator();
if (iterator.hasNext()) {
Map<String, Object> propertyMap = iterator.next();
String desiredStatus = (String) propertyMap.get(INSTANCE_STATUS_PROPERTY_ID);
if (desiredStatus != null) {
// get all the instances that pass the predicate check
Set<Resource> resources = getResources(PropertyHelper.getReadRequest(), predicate);
// update all the matching instances with the property values from the request
for (Resource resource : resources) {
String status = (String) resource.getPropertyValue(INSTANCE_STATUS_PROPERTY_ID);
String feedName = (String) resource.getPropertyValue(INSTANCE_FEED_NAME_PROPERTY_ID);
String id = (String) resource.getPropertyValue(INSTANCE_ID_PROPERTY_ID);
if (desiredStatus.equals("SUSPENDED")) {
service.suspendInstance(feedName, id);
} else if (status.equals("SUSPENDED") && desiredStatus.equals("RUNNING")) {
service.resumeInstance(feedName, id);
}
}
}
}
return new RequestStatusImpl(null);
}
@Override
public RequestStatus deleteResources(Request request, Predicate predicate) throws SystemException,
UnsupportedPropertyException, NoSuchResourceException, NoSuchParentResourceException {
IvoryService service = getService();
// get all the instances that pass the predicate check
Set<Resource> resources = getResources(PropertyHelper.getReadRequest(), predicate);
for (Resource resource : resources) {
// delete all the matching instances with the property values from the request
service.killInstance((String) resource.getPropertyValue(INSTANCE_FEED_NAME_PROPERTY_ID),
(String) resource.getPropertyValue(INSTANCE_ID_PROPERTY_ID));
}
return new RequestStatusImpl(null);
}
@Override
protected Set<String> getPKPropertyIds() {
return new HashSet<>(keyPropertyIds.values());
}
// ----- helper methods -----------------------------------------------------
protected static Instance getInstance(String feedName, String instanceId, Map<String, Object> propertyMap) {
return new Instance(
feedName,
instanceId,
(String) propertyMap.get(INSTANCE_STATUS_PROPERTY_ID),
(String) propertyMap.get(INSTANCE_START_TIME_PROPERTY_ID),
(String) propertyMap.get(INSTANCE_END_TIME_PROPERTY_ID),
(String) propertyMap.get(INSTANCE_DETAILS_PROPERTY_ID),
(String) propertyMap.get(INSTANCE_LOG_PROPERTY_ID));
}
}
| |
package com.example.erin.sahitscore;
import android.content.Intent;
import android.net.Uri;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.support.v7.widget.Toolbar;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.TextView;
import com.github.mikephil.charting.charts.BarChart;
import com.github.mikephil.charting.components.XAxis;
import com.github.mikephil.charting.components.YAxis;
import com.github.mikephil.charting.data.BarData;
import com.github.mikephil.charting.data.BarDataSet;
import com.github.mikephil.charting.data.BarEntry;
import com.google.android.gms.appindexing.Action;
import com.google.android.gms.appindexing.AppIndex;
import com.google.android.gms.common.api.GoogleApiClient;
import java.util.ArrayList;
/**
* Created by erin on 03/05/16.
* Calculates the statistical probability of mortality and unfavourable outcome
* based on core, neuro, and full characteristics (given by the user in
* InputActivity). This uses the graphing software (MPAndroidChart) made by
* Philipp Jahoda accessible at https://github.com/PhilJay/MPAndroidChart/wiki/Getting-Started
* @author erin
*/
public class ResultsActivity extends AppCompatActivity {
/**
* ATTENTION: This was auto-generated to implement the App Indexing API.
* See https://g.co/AppIndexing/AndroidStudio for more information.
*/
private GoogleApiClient client;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_results);
Toolbar myToolbar = (Toolbar) findViewById(R.id.my_toolbar);
setSupportActionBar(myToolbar);
TextView coreText = (TextView) findViewById(R.id.coreText);
TextView neuroText = (TextView) findViewById(R.id.neuroText);
TextView fullText = (TextView) findViewById(R.id.fullText);
assert coreText != null;
assert neuroText != null;
assert fullText != null;
TextView neuroMsg = (TextView) findViewById(R.id.neuroMessage);
TextView fullMsg = (TextView) findViewById(R.id.fullMessage);
assert neuroMsg != null;
assert fullMsg != null;
BarChart coreChart = (BarChart) findViewById(R.id.coreChart);
BarChart neuroChart = (BarChart) findViewById(R.id.neuroChart);
BarChart fullChart = (BarChart) findViewById(R.id.fullChart);
assert coreChart != null;
assert neuroChart != null;
assert fullChart != null;
Inputs input = InputController.getInput();
Integer age = input.getAge();
Integer ht;
if (input.getHypertension()) {
ht = 1;
} else {
ht = 0;
}
Integer wfns = input.getWfns();
Integer fisher = input.getFisher();
String location = input.getLocation();
Integer size = input.getSize();
String repair = input.getRepair();
SearchFile searchFile = new SearchFile();
Calculations calculations = new Calculations();
double LinPCoreMort = calculations.LPCoreMortality(age, ht, wfns);
double PPCoreMort = (Math.pow(Math.E, LinPCoreMort)) / (1 + Math.pow(Math.E, LinPCoreMort));
double SELinPCoreMort = searchFile.readCSVFileFromAssets(getApplicationContext(), "lp_mort_core.csv", "se_mort_core.csv", LinPCoreMort);
double CIloCoreMort = LinPCoreMort - 1.96 * SELinPCoreMort;
double CIupCoreMort = LinPCoreMort + 1.96 * SELinPCoreMort;
double PPloCoreMort = (Math.pow(Math.E, CIloCoreMort)) / (1 + Math.pow(Math.E, CIloCoreMort));
double PPupCoreMort = (Math.pow(Math.E, CIupCoreMort)) / (1 + Math.pow(Math.E, CIupCoreMort));
double LinPCoreUF = calculations.LPCoreUF(age, ht, wfns);
double PPCoreUF = (Math.pow(Math.E, LinPCoreUF)) / (1 + Math.pow(Math.E, LinPCoreUF));
double SELinPCoreUF = searchFile.readCSVFileFromAssets(getApplicationContext(), "lp_uf_core.csv", "se_uf_core.csv", LinPCoreUF);
double CIloCoreUF = LinPCoreUF - 1.96 * SELinPCoreUF;
double CIupCoreUF = LinPCoreUF + 1.96 * SELinPCoreUF;
double PPloCoreUF = (Math.pow(Math.E, CIloCoreUF)) / (1 + Math.pow(Math.E, CIloCoreUF));
double PPupCoreUF = (Math.pow(Math.E, CIupCoreUF)) / (1 + Math.pow(Math.E, CIupCoreUF));
String coreMessage = getResources().getString(R.string.mortality) + Math.round(PPloCoreMort * 100) + getResources().getString(R.string.and) + Math.round(PPupCoreMort * 100) + getResources().getString(R.string.unfavourable) + Math.round(PPloCoreUF * 100) + getResources().getString(R.string.and) + Math.round(PPupCoreUF * 100) + getResources().getString(R.string.percent);
coreText.setText(coreMessage);
ArrayList<String> labels = new ArrayList<>();
labels.add("Mortality");
labels.add("Unfavourable");
labels.add("Favourable");
ArrayList<BarEntry> core = new ArrayList<>();
core.add(new BarEntry((float) PPCoreMort * 100, 0));
core.add(new BarEntry((float) PPCoreUF * 100, 1));
core.add(new BarEntry((float) (100 - (PPCoreUF * 100)), 2));
BarDataSet coreSet = new BarDataSet(core, "");
coreSet.setValueTextSize(12f);
coreSet.setColors(new int[]{R.color.colorBlack, R.color.colorRed, R.color.colorGreen}, this);
BarData coreData = new BarData(labels, coreSet);
coreChart.setData(coreData);
coreChart.setDescription("");
XAxis coreMortX = coreChart.getXAxis();
coreMortX.setTextSize(12f);
YAxis coreMortY = coreChart.getAxisLeft();
YAxis coreUFY = coreChart.getAxisRight();
coreMortY.setAxisMaxValue(119f);
coreMortY.setAxisMinValue(0f);
coreUFY.setAxisMaxValue(119f);
coreUFY.setAxisMinValue(0f);
coreUFY.setEnabled(false);
if (fisher != null) {
neuroChart.setVisibility(View.VISIBLE);
neuroMsg.setVisibility(View.VISIBLE);
neuroText.setVisibility(View.VISIBLE);
double LinPNeuroMort = calculations.LPNeuroMortality(age, ht, wfns, fisher, location, size);
double PPNeuroMort = (Math.pow(Math.E, LinPNeuroMort)) / (1 + Math.pow(Math.E, LinPNeuroMort));
double SELinPNeuroMort = searchFile.readCSVFileFromAssets(getApplicationContext(), "lp_mort_neuro.csv", "se_mort_neuro.csv", LinPNeuroMort);
double CIloNeuroMort = LinPNeuroMort - 1.96 * SELinPNeuroMort;
double CIupNeuroMort = LinPNeuroMort + 1.96 * SELinPNeuroMort;
double PPloNeuroMort = (Math.pow(Math.E, CIloNeuroMort)) / (1 + Math.pow(Math.E, CIloNeuroMort));
double PPupNeuroMort = (Math.pow(Math.E, CIupNeuroMort)) / (1 + Math.pow(Math.E, CIupNeuroMort));
double LinPNeuroUF = calculations.LPNeuroUF(age, ht, wfns, fisher, location, size);
double PPNeuroUF = (Math.pow(Math.E, LinPNeuroUF)) / (1 + Math.pow(Math.E, LinPNeuroUF));
double SELinPNeuroUF = searchFile.readCSVFileFromAssets(getApplicationContext(), "lp_uf_neuro.csv", "se_uf_neuro.csv", LinPNeuroUF);
double CIloNeuroUF = LinPNeuroUF - 1.96 * SELinPNeuroUF;
double CIupNeuroUF = LinPNeuroUF + 1.96 * SELinPNeuroUF;
double PPloNeuroUF = (Math.pow(Math.E, CIloNeuroUF)) / (1 + Math.pow(Math.E, CIloNeuroUF));
double PPupNeuroUF = (Math.pow(Math.E, CIupNeuroUF)) / (1 + Math.pow(Math.E, CIupNeuroUF));
String neuroMessage = getResources().getString(R.string.mortality) + Math.round(PPloNeuroMort * 100) + getResources().getString(R.string.and) + Math.round(PPupNeuroMort * 100) + getResources().getString(R.string.unfavourable) + Math.round(PPloNeuroUF * 100) + getResources().getString(R.string.and) + Math.round(PPupNeuroUF * 100) + getResources().getString(R.string.percent);
neuroText.setText(neuroMessage);
ArrayList<BarEntry> neuro = new ArrayList<>();
neuro.add(new BarEntry((float) PPNeuroMort * 100, 0));
neuro.add(new BarEntry((float) PPNeuroUF * 100, 1));
neuro.add(new BarEntry((float) (100 - (PPNeuroUF * 100)), 2));
BarDataSet neuroSet = new BarDataSet(neuro, "");
neuroSet.setValueTextSize(12f);
neuroSet.setColors(new int[]{R.color.colorBlack, R.color.colorRed, R.color.colorGreen}, this);
BarData neuroData = new BarData(labels, neuroSet);
neuroChart.setData(neuroData);
neuroChart.setDescription("");
XAxis neuroMortX = neuroChart.getXAxis();
neuroMortX.setTextSize(12f);
YAxis neuroMortY = neuroChart.getAxisLeft();
YAxis neuroUFY = neuroChart.getAxisRight();
neuroMortY.setAxisMaxValue(119f);
neuroMortY.setAxisMinValue(0f);
neuroUFY.setAxisMaxValue(119f);
neuroUFY.setAxisMinValue(0f);
neuroUFY.setEnabled(false);
}
if (repair != null) {
fullChart.setVisibility(View.VISIBLE);
fullMsg.setVisibility(View.VISIBLE);
fullText.setVisibility(View.VISIBLE);
double LinPFullMort = calculations.LPFullMortality(age, ht, wfns, fisher, location, size, repair);
double PPFullMort = (Math.pow(Math.E, LinPFullMort)) / (1 + Math.pow(Math.E, LinPFullMort));
double SELinPFullMort = searchFile.readCSVFileFromAssets(getApplicationContext(), "lp_mort_full.csv", "se_mort_full.csv", LinPFullMort);
double CIloFullMort = LinPFullMort - 1.96 * SELinPFullMort;
double CIupFullMort = LinPFullMort + 1.96 * SELinPFullMort;
double PPloFullMort = (Math.pow(Math.E, CIloFullMort)) / (1 + Math.pow(Math.E, CIloFullMort));
double PPupFullMort = (Math.pow(Math.E, CIupFullMort)) / (1 + Math.pow(Math.E, CIupFullMort));
double LinPFullUF = calculations.LPFullUF(age, ht, wfns, fisher, location, size, repair);
double PPFullUF = (Math.pow(Math.E, LinPFullUF)) / (1 + Math.pow(Math.E, LinPFullUF));
double SELinPFullUF = searchFile.readCSVFileFromAssets(getApplicationContext(), "lp_uf_full.csv", "se_uf_full.csv", LinPFullUF);
double CIloFullUF = LinPFullUF - 1.96 * SELinPFullUF;
double CIupFullUF = LinPFullUF + 1.96 * SELinPFullUF;
double PPloFullUF = (Math.pow(Math.E, CIloFullUF)) / (1 + Math.pow(Math.E, CIloFullUF));
double PPupFullUF = (Math.pow(Math.E, CIupFullUF)) / (1 + Math.pow(Math.E, CIupFullUF));
String fullMessage = getResources().getString(R.string.mortality) + Math.round(PPloFullMort * 100) + getResources().getString(R.string.and) + Math.round(PPupFullMort * 100) + getResources().getString(R.string.unfavourable) + Math.round(PPloFullUF * 100) + getResources().getString(R.string.and) + Math.round(PPupFullUF * 100) + getResources().getString(R.string.percent);
fullText.setText(fullMessage);
ArrayList<BarEntry> full = new ArrayList<>();
full.add(new BarEntry((float) PPFullMort * 100, 0));
full.add(new BarEntry((float) PPFullUF * 100, 1));
full.add(new BarEntry((float) (100 - (PPFullUF * 100)), 2));
BarDataSet fullSet = new BarDataSet(full, "");
fullSet.setValueTextSize(12f);
fullSet.setColors(new int[]{R.color.colorBlack, R.color.colorRed, R.color.colorGreen}, this);
BarData fullData = new BarData(labels, fullSet);
fullChart.setData(fullData);
fullChart.setDescription("");
XAxis fullMortX = fullChart.getXAxis();
fullMortX.setTextSize(12f);
YAxis fullMortY = fullChart.getAxisLeft();
YAxis fullUFY = fullChart.getAxisRight();
fullMortY.setAxisMaxValue(119f);
fullMortY.setAxisMinValue(0f);
fullUFY.setAxisMaxValue(119f);
fullUFY.setAxisMinValue(0f);
fullUFY.setEnabled(false);
}
// ATTENTION: This was auto-generated to implement the App Indexing API.
// See https://g.co/AppIndexing/AndroidStudio for more information.
client = new GoogleApiClient.Builder(this).addApi(AppIndex.API).build();
}
/**
* These two functions display menu options and navigate to different
* activities when an item is clicked.
* @param item Indicates which item the user has selected
* @return Boolean indicating success
*/
// Used https://www.learn2crack.com/2014/06/android-action-bar-example.html
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.home_button:
Intent homeIntent = new Intent(this, InformationActivity.class);
homeIntent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
startActivity(homeIntent);
return true;
case R.id.definitions_button:
Intent defsIntent = new Intent(this, InformationActivity.class);
defsIntent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
startActivity(defsIntent);
return true;
case R.id.calculate_button:
Intent calcIntent = new Intent(this, InputActivity.class);
calcIntent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
startActivity(calcIntent);
return true;
default:
return super.onOptionsItemSelected(item);
}
}
/**
* @param menu The corresponding menu object for this activity.
* @return Boolean indicating success
*/
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.results_menu_items, menu);
return true;
}
@Override
public void onStart() {
super.onStart();
// ATTENTION: This was auto-generated to implement the App Indexing API.
// See https://g.co/AppIndexing/AndroidStudio for more information.
client.connect();
Action viewAction = Action.newAction(
Action.TYPE_VIEW, // TODO: choose an action type.
"Results Page", // TODO: Define a title for the content shown.
// TODO: If you have web page content that matches this app activity's content,
// make sure this auto-generated web page URL is correct.
// Otherwise, set the URL to null.
Uri.parse("http://host/path"),
// TODO: Make sure this auto-generated app deep link URI is correct.
Uri.parse("android-app://com.example.erin.sahitscore/http/host/path")
);
AppIndex.AppIndexApi.start(client, viewAction);
}
@Override
public void onStop() {
super.onStop();
// ATTENTION: This was auto-generated to implement the App Indexing API.
// See https://g.co/AppIndexing/AndroidStudio for more information.
Action viewAction = Action.newAction(
Action.TYPE_VIEW, // TODO: choose an action type.
"Results Page", // TODO: Define a title for the content shown.
// TODO: If you have web page content that matches this app activity's content,
// make sure this auto-generated web page URL is correct.
// Otherwise, set the URL to null.
Uri.parse("http://host/path"),
// TODO: Make sure this auto-generated app deep link URI is correct.
Uri.parse("android-app://com.example.erin.sahitscore/http/host/path")
);
AppIndex.AppIndexApi.end(client, viewAction);
client.disconnect();
}
}
| |
/*
* Copyright 2013-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.cloudfoundry;
/**
* A factory for creating names
*/
public interface NameFactory {
String APPLICATION_PREFIX = "test-application-";
String BUILDPACK_PREFIX = "test-buildpack-";
String CLIENT_ID_PREFIX = "test-client-id-";
String CLIENT_SECRET_PREFIX = "test-client-secret-";
String DOMAIN_PREFIX = "test.domain.";
String GROUP_PREFIX = "test-group-";
String HOST_PREFIX = "test-host-";
String IDENTITY_PROVIDER_PREFIX = "test-identity-provider-";
String IDENTITY_ZONE_PREFIX = "test-identity-zone-";
String ISOLATION_SEGMENT_PREFIX = "test-isolation-segment-";
String ORGANIZATION_PREFIX = "test-organization-";
String PASSWORD_PREFIX = "test-password-";
String PATH_PREFIX = "/test-path-";
String PLAN_PREFIX = "test-plan-";
String QUOTA_DEFINITION_PREFIX = "test-quota-definition-";
String SECURITY_GROUP_PREFIX = "test-security-group-";
String SERVICE_BROKER_PREFIX = "test-service-broker-";
String SERVICE_INSTANCE_PREFIX = "test-service-instance-";
String SERVICE_KEY_PREFIX = "test-service-key-";
String SERVICE_PREFIX = "test-service-";
String SPACE_PREFIX = "test-space-";
String STACK_PREFIX = "test-stack-";
String TASK_PREFIX = "test-task-";
String USER_ID_PREFIX = "test-user-id-";
String USER_PREFIX = "test-user-";
String VARIABLE_NAME_PREFIX = "test-variable-name-";
String VARIABLE_VALUE_PREFIX = "test-variable-value-";
/**
* Creates an application name
*
* @return the application name
*/
default String getApplicationName() {
return getName(APPLICATION_PREFIX);
}
/**
* Creates a buildpack name
*
* @return the buildpack name
*/
default String getBuildpackName() {
return getName(BUILDPACK_PREFIX);
}
/**
* Creates a client id
*
* @return the client id
*/
default String getClientId() {
return getName(CLIENT_ID_PREFIX);
}
/**
* Creates a client secret
*
* @return the client secret
*/
default String getClientSecret() {
return getName(CLIENT_SECRET_PREFIX);
}
/**
* Creates a domain name
*
* @return the domain name
*/
default String getDomainName() {
return getName(DOMAIN_PREFIX);
}
/**
* Creates a group name
*
* @return the group name
*/
default String getGroupName() {
return getName(GROUP_PREFIX);
}
/**
* Creates a host name
*
* @return the host name
*/
default String getHostName() {
return getName(HOST_PREFIX);
}
/**
* Creates an identity provider name
*
* @return the identity provider name
*/
default String getIdentityProviderName() {
return getName(IDENTITY_PROVIDER_PREFIX);
}
/**
* Creates an identity zone name
*
* @return the identity zone name
*/
default String getIdentityZoneName() {
return getName(IDENTITY_ZONE_PREFIX);
}
/**
* Creates an IP address
*
* @return the IP address
*/
String getIpAddress();
/**
* Creates an isolation segment name
*
* @return the isolation segment name
*/
default String getIsolationSegmentName() {
return getName(ISOLATION_SEGMENT_PREFIX);
}
/**
* Creates a name
*
* @param prefix the prefix to the name
* @return the name
*/
String getName(String prefix);
/**
* Creates an organization name
*
* @return the organization name
*/
default String getOrganizationName() {
return getName(ORGANIZATION_PREFIX);
}
/**
* Creates a password
*
* @return the password
*/
default String getPassword() {
return getName(PASSWORD_PREFIX);
}
/**
* Creates a path
*
* @return the path
*/
default String getPath() {
return getName(PATH_PREFIX);
}
/**
* Creates a plan name
*
* @return the plan name
*/
default String getPlanName() {
return getName(PLAN_PREFIX);
}
/**
* Creates a port
*
* @return a port
*/
int getPort();
/**
* Creates a quota definition name
*
* @return the quota definition name
*/
default String getQuotaDefinitionName() {
return getName(QUOTA_DEFINITION_PREFIX);
}
/**
* Creates a security group name
*
* @return the security group name
*/
default String getSecurityGroupName() {
return getName(SECURITY_GROUP_PREFIX);
}
/**
* Creates a service broker name
*
* @return the service broker name
*/
default String getServiceBrokerName() {
return getName(SERVICE_BROKER_PREFIX);
}
/**
* Creates a service instance name
*
* @return the service instance name
*/
default String getServiceInstanceName() {
return getName(SERVICE_INSTANCE_PREFIX);
}
/**
* Creates a service key name
*
* @return the service key name
*/
default String getServiceKeyName() {
return getName(SERVICE_KEY_PREFIX);
}
/**
* Creates a service name
*
* @return the service name
*/
default String getServiceName() {
return getName(SERVICE_PREFIX);
}
/**
* Creates a space name
*
* @return the space name
*/
default String getSpaceName() {
return getName(SPACE_PREFIX);
}
/**
* Creates a stack name
*
* @return the stack name
*/
default String getStackName() {
return getName(STACK_PREFIX);
}
/**
* Creates a task name
*
* @return the task name
*/
default String getTaskName() {
return getName(TASK_PREFIX);
}
/**
* Creates a user id
*
* @return the user id
*/
default String getUserId() {
return getName(USER_ID_PREFIX);
}
/**
* Creates a user name
*
* @return the user name
*/
default String getUserName() {
return getName(USER_PREFIX);
}
/**
* Creates a variable name
*
* @return the variable name
*/
default String getVariableName() {
return getName(VARIABLE_NAME_PREFIX);
}
/**
* Creates a variable value
*
* @return the variable value
*/
default String getVariableValue() {
return getName(VARIABLE_VALUE_PREFIX);
}
/**
* Tests a name to determine if it is an application name
*
* @param candidate the candidate name
* @return {@code true} if the name is an application name, {@code false} otherwise
*/
default boolean isApplicationName(String candidate) {
return isName(APPLICATION_PREFIX, candidate);
}
/**
* Tests a name to determine if it is a buildpack name
*
* @param candidate the candidate name
* @return {@code true} if the name is a buildpack name, {@code false} otherwise
*/
default boolean isBuildpackName(String candidate) {
return isName(BUILDPACK_PREFIX, candidate);
}
/**
* Tests a name to determine if it is a client id
*
* @param candidate the candidate name
* @return {@code true} if the name is a client id, {@code false} otherwise
*/
default boolean isClientId(String candidate) {
return isName(CLIENT_ID_PREFIX, candidate);
}
/**
* Tests a name to determine if it is a client secret
*
* @param candidate the candidate name
* @return {@code true} if the name is a client secret, {@code false} otherwise
*/
default boolean isClientSecret(String candidate) {
return isName(CLIENT_SECRET_PREFIX, candidate);
}
/**
* Tests a name to determine if it is a domain name
*
* @param candidate the candidate name
* @return {@code true} if the name is a domain name, {@code false} otherwise
*/
default boolean isDomainName(String candidate) {
return isName(DOMAIN_PREFIX, candidate);
}
/**
* Tests a name to determine if it is a group name
*
* @param candidate the candidate name
* @return {@code true} if the name is a group name, {@code false} otherwise
*/
default boolean isGroupName(String candidate) {
return isName(GROUP_PREFIX, candidate);
}
/**
* Tests a name to determine if it is a host name
*
* @param candidate the candidate name
* @return {@code true} if the name is a host name, {@code false} otherwise
*/
default boolean isHostName(String candidate) {
return isName(HOST_PREFIX, candidate);
}
/**
* Tests a name to determine if it is an identity provider name
*
* @param candidate the candidate name
* @return {@code true} if the name is an identity provider name, {@code false} otherwise
*/
default boolean isIdentityProviderName(String candidate) {
return isName(IDENTITY_PROVIDER_PREFIX, candidate);
}
/**
* Tests a name to determine if it is an identity zone name
*
* @param candidate the candidate name
* @return {@code true} if the name is an identity zone name, {@code false} otherwise
*/
default boolean isIdentityZoneName(String candidate) {
return isName(IDENTITY_ZONE_PREFIX, candidate);
}
/**
* Tests a string to determine if it is an IP address
*
* @param candidate the candidate string
* @return {@code true} if the string is an IP address, {@code false} otherwise
*/
boolean isIpAddress(String candidate);
/**
* Tests a name to determine if it is an isolation segment name
*
* @param candidate the candidate name
* @return {@code true} if the name is an isolation segment name, {@code false} otherwise
*/
default boolean isIsolationSegmentName(String candidate) {
return isName(ISOLATION_SEGMENT_PREFIX, candidate);
}
/**
* Tests a name to determine if it starts with a prefix
*
* @param prefix the prefix to the name
* @param candidate the candidate name
* @return {@code true} if the name starts with the prefix, {@code false} otherwise
*/
boolean isName(String prefix, String candidate);
/**
* Tests a name to determine if it is an organization name
*
* @param candidate the candidate name
* @return {@code true} if the name is an organization name, {@code false} otherwise
*/
default boolean isOrganizationName(String candidate) {
return isName(ORGANIZATION_PREFIX, candidate);
}
/**
* Tests a name to determine if it is a password
*
* @param candidate the candidate name
* @return {@code true} if the name is a password, {@code false} otherwise
*/
default boolean isPassword(String candidate) {
return isName(PASSWORD_PREFIX, candidate);
}
/**
* Tests a name to determine if it is a path
*
* @param candidate the candidate name
* @return {@code true} if the name is a path, {@code false} otherwise
*/
default boolean isPath(String candidate) {
return isName(PATH_PREFIX, candidate);
}
/**
* Tests a name to determine if it is a plan name
*
* @param candidate the candidate name
* @return {@code true} if the name is a plan name, {@code false} otherwise
*/
default boolean isPlanName(String candidate) {
return isName(PLAN_PREFIX, candidate);
}
/**
* Tests if an integer is a port
*
* @param candidate the candidate integer
* @return {@code true} if the integer is a port, {@code false} otherwise
*/
boolean isPort(int candidate);
/**
* Tests a name to determine if it is a quota definition name
*
* @param candidate the candidate name
* @return {@code true} if the name is a quota definition name, {@code false} otherwise
*/
default boolean isQuotaDefinitionName(String candidate) {
return isName(QUOTA_DEFINITION_PREFIX, candidate);
}
/**
* Tests a name to determine if it is a security group name
*
* @param candidate the candidate name
* @return {@code true} if the name is a security group name, {@code false} otherwise
*/
default boolean isSecurityGroupName(String candidate) {
return isName(SECURITY_GROUP_PREFIX, candidate);
}
/**
* Tests a name to determine if it is a service broker name
*
* @param candidate the candidate name
* @return {@code true} if the name is a service broker name, {@code false} otherwise
*/
default boolean isServiceBrokerName(String candidate) {
return isName(SERVICE_BROKER_PREFIX, candidate);
}
/**
* Tests a name to determine if it is a service instance name
*
* @param candidate the candidate name
* @return {@code true} if the name is a service instance name, {@code false} otherwise
*/
default boolean isServiceInstanceName(String candidate) {
return isName(SERVICE_INSTANCE_PREFIX, candidate);
}
/**
* Tests a name to determine if it is a service key name
*
* @param candidate the candidate name
* @return {@code true} if the name is a service key name, {@code false} otherwise
*/
default boolean isServiceKeyName(String candidate) {
return isName(SERVICE_KEY_PREFIX, candidate);
}
/**
* Tests a name to determine if it is a service name
*
* @param candidate the candidate name
* @return {@code true} if the name is a service name, {@code false} otherwise
*/
default boolean isServiceName(String candidate) {
return isName(SERVICE_PREFIX, candidate);
}
/**
* Tests a name to determine if it is a space name
*
* @param candidate the candidate name
* @return {@code true} if the name is a space name, {@code false} otherwise
*/
default boolean isSpaceName(String candidate) {
return isName(SPACE_PREFIX, candidate);
}
/**
* Tests a name to determine if it is a stack name
*
* @param candidate the candidate name
* @return {@code true} if the name is a stack name, {@code false} otherwise
*/
default boolean isStackName(String candidate) {
return isName(STACK_PREFIX, candidate);
}
/**
* Tests a name to determine if it is a task name
*
* @param candidate the candidate name
* @return {@code true} if the name is a task name, {@code false} otherwise
*/
default boolean isTaskName(String candidate) {
return isName(TASK_PREFIX, candidate);
}
/**
* Tests a name to determine if it is a user id
*
* @param candidate the candidate name
* @return {@code true} if the name is a user id, {@code false} otherwise
*/
default boolean isUserId(String candidate) {
return isName(USER_ID_PREFIX, candidate);
}
/**
* Tests a name to determine if it is a user name
*
* @param candidate the candidate name
* @return {@code true} if the name is a user name, {@code false} otherwise
*/
default boolean isUserName(String candidate) {
return isName(USER_PREFIX, candidate);
}
/**
* Tests a name to determine if it is a variable name
*
* @param candidate the candidate name
* @return {@code true} if the name is a variable name, {@code false} otherwise
*/
default boolean isVariableName(String candidate) {
return isName(VARIABLE_NAME_PREFIX, candidate);
}
/**
* Tests a name to determine if it is a variable value
*
* @param candidate the candidate name
* @return {@code true} if the name is a variable value, {@code false} otherwise
*/
default boolean isVariableValue(String candidate) {
return isName(VARIABLE_VALUE_PREFIX, candidate);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zookeeper.test;
import static org.apache.zookeeper.test.ClientBase.CONNECTION_TIMEOUT;
import static org.apache.zookeeper.test.ClientBase.verifyThreadTerminated;
import java.util.LinkedList;
import org.apache.log4j.Logger;
import org.apache.zookeeper.CreateMode;
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.TestableZooKeeper;
import org.apache.zookeeper.WatchedEvent;
import org.apache.zookeeper.ZKTestCase;
import org.apache.zookeeper.AsyncCallback.DataCallback;
import org.apache.zookeeper.AsyncCallback.StringCallback;
import org.apache.zookeeper.AsyncCallback.VoidCallback;
import org.apache.zookeeper.ZooDefs.Ids;
import org.apache.zookeeper.data.Stat;
import org.apache.zookeeper.test.ClientBase.CountdownWatcher;
import org.junit.Assert;
import org.junit.Test;
public class AsyncHammerTest extends ZKTestCase
implements StringCallback, VoidCallback, DataCallback
{
private static final Logger LOG = Logger.getLogger(AsyncHammerTest.class);
private QuorumBase qb = new QuorumBase();
private volatile boolean bang;
public void setUp(boolean withObservers) throws Exception {
qb.setUp(withObservers);
}
protected void restart() throws Exception {
LOG.info("RESTARTING " + getTestName());
qb.tearDown();
// don't call setup - we don't want to reassign ports/dirs, etc...
JMXEnv.setUp();
qb.startServers();
}
public void tearDown() throws Exception {
LOG.info("Test clients shutting down");
qb.tearDown();
}
/**
* Create /test- sequence nodes asynchronously, max 30 outstanding
*/
class HammerThread extends Thread implements StringCallback, VoidCallback {
private static final int MAX_OUTSTANDING = 30;
private TestableZooKeeper zk;
private int outstanding;
private volatile boolean failed = false;
public HammerThread(String name) {
super(name);
}
public void run() {
try {
CountdownWatcher watcher = new CountdownWatcher();
zk = new TestableZooKeeper(qb.hostPort, CONNECTION_TIMEOUT,
watcher);
watcher.waitForConnected(CONNECTION_TIMEOUT);
while(bang) {
incOutstanding(); // before create otw race
zk.create("/test-", new byte[0], Ids.OPEN_ACL_UNSAFE,
CreateMode.PERSISTENT_SEQUENTIAL, this, null);
}
} catch (InterruptedException e) {
if (bang) {
LOG.error("sanity check Assert.failed!!!"); // sanity check
return;
}
} catch (Exception e) {
LOG.error("Client create operation Assert.failed", e);
return;
} finally {
if (zk != null) {
try {
zk.close();
if (!zk.testableWaitForShutdown(CONNECTION_TIMEOUT)) {
failed = true;
LOG.error("Client did not shutdown");
}
} catch (InterruptedException e) {
LOG.info("Interrupted", e);
}
}
}
}
private synchronized void incOutstanding() throws InterruptedException {
outstanding++;
while(outstanding > MAX_OUTSTANDING) {
wait();
}
}
private synchronized void decOutstanding() {
outstanding--;
Assert.assertTrue("outstanding >= 0", outstanding >= 0);
notifyAll();
}
public void process(WatchedEvent event) {
// ignore for purposes of this test
}
public void processResult(int rc, String path, Object ctx, String name) {
if (rc != KeeperException.Code.OK.intValue()) {
if (bang) {
failed = true;
LOG.error("Create Assert.failed for 0x"
+ Long.toHexString(zk.getSessionId())
+ "with rc:" + rc + " path:" + path);
}
decOutstanding();
return;
}
try {
decOutstanding();
zk.delete(name, -1, this, null);
} catch (Exception e) {
if (bang) {
failed = true;
LOG.error("Client delete Assert.failed", e);
}
}
}
public void processResult(int rc, String path, Object ctx) {
if (rc != KeeperException.Code.OK.intValue()) {
if (bang) {
failed = true;
LOG.error("Delete Assert.failed for 0x"
+ Long.toHexString(zk.getSessionId())
+ "with rc:" + rc + " path:" + path);
}
}
}
}
@Test
public void testHammer() throws Exception {
setUp(false);
bang = true;
LOG.info("Starting hammers");
HammerThread[] hammers = new HammerThread[100];
for (int i = 0; i < hammers.length; i++) {
hammers[i] = new HammerThread("HammerThread-" + i);
hammers[i].start();
}
LOG.info("Started hammers");
Thread.sleep(5000); // allow the clients to run for max 5sec
bang = false;
LOG.info("Stopping hammers");
for (int i = 0; i < hammers.length; i++) {
hammers[i].interrupt();
verifyThreadTerminated(hammers[i], 60000);
Assert.assertFalse(hammers[i].failed);
}
// before restart
LOG.info("Hammers stopped, verifying consistency");
qb.verifyRootOfAllServersMatch(qb.hostPort);
restart();
// after restart
LOG.info("Verifying hammers 2");
qb.verifyRootOfAllServersMatch(qb.hostPort);
tearDown();
}
@Test
public void testObserversHammer() throws Exception {
setUp(true);
bang = true;
Thread[] hammers = new Thread[100];
for (int i = 0; i < hammers.length; i++) {
hammers[i] = new HammerThread("HammerThread-" + i);
hammers[i].start();
}
Thread.sleep(5000); // allow the clients to run for max 5sec
bang = false;
for (int i = 0; i < hammers.length; i++) {
hammers[i].interrupt();
verifyThreadTerminated(hammers[i], 60000);
}
// before restart
qb.verifyRootOfAllServersMatch(qb.hostPort);
tearDown();
}
@SuppressWarnings("unchecked")
public void processResult(int rc, String path, Object ctx, String name) {
synchronized(ctx) {
((LinkedList<Integer>)ctx).add(rc);
ctx.notifyAll();
}
}
@SuppressWarnings("unchecked")
public void processResult(int rc, String path, Object ctx) {
synchronized(ctx) {
((LinkedList<Integer>)ctx).add(rc);
ctx.notifyAll();
}
}
@SuppressWarnings("unchecked")
public void processResult(int rc, String path, Object ctx, byte[] data,
Stat stat) {
synchronized(ctx) {
((LinkedList<Integer>)ctx).add(rc);
ctx.notifyAll();
}
}
}
| |
package com.podio.contact;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.List;
import javax.ws.rs.core.MediaType;
import com.podio.BaseAPI;
import com.podio.ResourceFactory;
import com.sun.jersey.api.client.GenericType;
import com.sun.jersey.api.client.WebResource;
/**
* Each user have a profile attached, that holds all the personal details of the
* user. This includes very basic information like the name and mail addresses,
* but can also include more advanced fields like billing address and IM
* addresses. Fields can have either one or multiple values. There can f.ex.
* only be one name, but multiple mail addresses. The value of a field can
* either be a string, a number or a date.
*/
public class ContactAPI extends BaseAPI {
public ContactAPI(ResourceFactory resourceFactory) {
super(resourceFactory);
}
/**
* Adds a new contact to the given space.
*
* @param spaceId
* The id of the space the contact should be added to
* @param create
* The data for the new contact
* @param silent
* True if the create should be silent, false otherwise
* @return The id of the newly created contact
*/
public int addSpaceContact(int spaceId, ContactCreate create, boolean silent) {
return getResourceFactory().getApiResource("/contact/space/" + spaceId + "/")
.queryParam("silent", silent ? "1" : "0")
.entity(create, MediaType.APPLICATION_JSON_TYPE)
.post(ContactCreateResponse.class).getId();
}
/**
* Updates the entire space contact. Only fields which have values specified
* will be updated. To delete the contents of a field, pass an empty array
* for the value.
*
* @param profileId
* The id of the space contact to be updated
* @param update
* The data for the update
* @param silent
* True if the update should be silent, false otherwise
* @param hook
* True if hooks should be executed for the change, false otherwise
*/
public void updateSpaceContact(int profileId, ContactUpdate update, boolean silent, boolean hook) {
getResourceFactory().getApiResource("/contact/" + profileId)
.queryParam("silent", silent ? "1" : "0")
.queryParam("hook", hook ? "1" : "0")
.entity(update, MediaType.APPLICATION_JSON_TYPE).put();
}
/**
* Deletes a space contact.
*
* @param profileId
* The id of the space contact to be deleted
* @param silent
* True if the deletion should be silent, false otherwise
*/
public void deleteSpaceContact(int profileId, boolean silent) {
getResourceFactory().getApiResource("/contact/" + profileId)
.queryParam("silent", silent ? "1" : "0")
.delete();
}
/**
* Returns all the contact details about the user with the given id.
*
* @param profileId
* The profile id of the user
* @return The contact profile
*/
public Profile getContact(int profileId) {
return getResourceFactory().getApiResource("/contact/" + profileId + "/v2").get(
Profile.class);
}
/**
* Returns the value of a contact with the specific field
*
* @param userId
* The id of the user
* @param field
* The field for which data should be returned
* @return The list of values for the given field
*/
public <T, R> List<T> getContactField(int userId, ProfileField<T, R> field) {
List<R> values = getResourceFactory().getApiResource(
"/contact/" + userId + "/" + field.getName())
.get(new GenericType<List<R>>() {
});
List<T> formatted = new ArrayList<T>();
for (R value : values) {
formatted.add(field.parse(value));
}
return formatted;
}
/**
* Returns the total number of contacts by organization.
*
* @return The list of contact totals by organization
*/
public ContactTotal getContactTotal() {
return getResourceFactory().getApiResource("/contact/totals/").get(
ContactTotal.class);
}
/**
* Used to get a list of contacts for the user.
*
* @param key
* The profile field if the contacts should be filtered
* @param value
* The value for the field if the contacts should be filtered
* @param limit
* The maximum number of contacts to return
* @param offset
* The offset into the list of contacts
* @param type
* The format in which the contacts should be returned
* @param order
* How the contacts should be ordered
* @param contactType
* The type of contacts to be returned
* @return The list of contacts
*/
public <T, F, R> List<T> getContacts(ProfileField<F, R> key, F value,
Integer limit, Integer offset, ProfileType<T> type,
ContactOrder order, ContactType contactType) {
WebResource resource = getResourceFactory().getApiResource("/contact/");
return getContactsCommon(resource, key, value, limit, offset, type,
order, contactType);
}
/**
* Returns all the profiles of the users contacts on the given organization
*
* @param organizationId
* The id of the organization the contacts should be returned
* from
* @param key
* The profile field if the contacts should be filtered
* @param value
* The value for the field if the contacts should be filtered
* @param limit
* The maximum number of contacts to return
* @param offset
* The offset into the list of contacts
* @param type
* The format in which the contacts should be returned
* @param order
* How the contacts should be ordered
* @param contactType
* The type of contacts to be returned
* @return The list of contacts
*/
public <T, F, R> List<T> getOrganizationContacts(int organizationId,
ProfileField<F, R> key, F value, Integer limit, Integer offset,
ProfileType<T> type, ContactOrder order, ContactType contactType) {
WebResource resource = getResourceFactory().getApiResource(
"/contact/org/" + organizationId);
return getContactsCommon(resource, key, value, limit, offset, type,
order, contactType);
}
/**
* Returns all the profiles of the users contacts on the given space
*
* @param spaceId
* The id of the space the contacts should be returned from
* @param key
* The profile field if the contacts should be filtered
* @param value
* The value for the field if the contacts should be filtered
* @param limit
* The maximum number of contacts to return
* @param offset
* The offset into the list of contacts
* @param type
* The format in which the contacts should be returned
* @param order
* How the contacts should be ordered
* @param contactType
* The type of contacts to be returned
* @return The list of contacts
*/
public <T, F, R> List<T> getSpaceContacts(int spaceId,
ProfileField<F, R> key, F value, Integer limit, Integer offset,
ProfileType<T> type, ContactOrder order, ContactType contactType) {
WebResource resource = getResourceFactory().getApiResource(
"/contact/space/" + spaceId);
return getContactsCommon(resource, key, value, limit, offset, type,
order, contactType);
}
private <T, F, R> List<T> getContactsCommon(WebResource resource,
ProfileField<F, R> key, F value, Integer limit, Integer offset,
final ProfileType<T> type, ContactOrder order, ContactType contactType) {
if (key != null && value != null) {
resource = resource.queryParam("key", key.getName().toLowerCase())
.queryParam("value", key.format(value).toString());
}
if (limit != null) {
resource = resource.queryParam("limit", limit.toString());
}
if (offset != null) {
resource = resource.queryParam("offset", offset.toString());
}
resource = resource.queryParam("type", type.getName());
if (order != null) {
resource = resource.queryParam("order", order.name().toLowerCase());
}
if (contactType != null) {
resource = resource.queryParam("contact_type", contactType.name().toLowerCase());
}
return resource.get(getGenericType(type));
}
private <T> GenericType<List<T>> getGenericType(final ProfileType<T> type) {
return new GenericType<List<T>>(new ParameterizedType() {
@Override
public Type getRawType() {
return List.class;
}
@Override
public Type getOwnerType() {
return null;
}
@Override
public Type[] getActualTypeArguments() {
return new Type[] { type.getType() };
}
});
}
}
| |
/*
* Copyright Beijing 58 Information Technology Co.,Ltd.
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.bj58.spat.gaea.server.contract.log;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import com.bj58.spat.gaea.server.contract.context.GaeaContext;
/**
* This class is will log messages to the log file using log4j logging framework
*
* @author Service Platform Architecture Team (spat@58.com)
*/
public final class Log4jLogger implements ILog {
/** the log object to log to */
private transient Logger logger = null;
// ------------------------------------------------------------- Attributes
/** The fully qualified name of the Log4JLogger class. */
private static final String FQCN = Log4jLogger.class.getName();
/**
* Constructor for creating a logger object using jdk 1.4 or higher logging.
*
* @param cls
* the class which wants to log
*/
public Log4jLogger(Class<?> cls) {
logger = Logger.getLogger(cls);
}
/**
*
* @return
*/
private String getLogMsg(String msg) {
StringBuilder sbLog = new StringBuilder();
sbLog.append(msg);
GaeaContext context = GaeaContext.getFromThreadLocal();
if(context != null) {
sbLog.append("--");
sbLog.append("remoteIP:");
sbLog.append(context.getChannel().getRemoteIP());
sbLog.append("--remotePort:");
sbLog.append(context.getChannel().getRemotePort());
}
return sbLog.toString();
}
/**
* Logging a fine message
*
* @param message
* the message to log.
*/
public void fine(String message) {
logger.log(FQCN, Level.DEBUG, getLogMsg(message), null);
}
/**
* Logging a config message.
*
* @param message
* the message to log
*/
public void config(String message) {
logger.log(FQCN, Level.DEBUG, getLogMsg(message), null);
}
/**
* Logging a info message.
*
* @param message
* the message to log
*/
public void info(String message) {
logger.log(FQCN, Level.INFO, getLogMsg(message), null);
}
/**
* Logging a warning message.
*
* @param message
* the message to log
*/
public void warning(String message) {
logger.log(FQCN, Level.WARN, getLogMsg(message), null);
}
// ****************************************************
// * The methods from log4j also implemented below *
// ****************************************************
/**
* Logging a debug message.
*
* @param message
* the message to log
*/
public void debug(String message) {
logger.log(FQCN, Level.DEBUG, getLogMsg(message), null);
}
/**
* Logging a fatal message with the throwable message.
*
* @param message
* the message to log
* @param t
* the exception
*/
public void fatal(String message, Throwable t) {
logger.log(FQCN, Level.FATAL, getLogMsg(message), t);
}
/**
* Logging a debug message with the throwable message.
*
* @param message
* the message to log
* @param t
* the exception
*/
public void debug(String message, Throwable t) {
logger.log(FQCN, Level.DEBUG, getLogMsg(message), t);
}
/**
* Logging an info message with the throwable message.
*
* @param message
* the message to log
* @param t
* the exception
*/
public void info(String message, Throwable t) {
logger.log(FQCN, Level.INFO, getLogMsg(message), t);
}
/**
* Logging a warning message.
*
* @param message
* the message to log
*/
public void warn(String message) {
logger.log(FQCN, Level.WARN, getLogMsg(message), null);
}
/**
* Logging a warning message with the throwable message.
*
* @param message
* the message to log
* @param t
* the exception
*/
public void warn(String message, Throwable t) {
logger.log(FQCN, Level.WARN, getLogMsg(message), t);
}
/**
* Logging an error message.
*
* @param message
* the message to log
*/
public void error(String message) {
logger.log(FQCN, Level.ERROR, getLogMsg(message), null);
}
/**
* Logging an error message with the throwable message.
*
* @param message
* the message to log
* @param t
* the exception
*/
public void error(String message, Throwable t) {
logger.log(FQCN, Level.ERROR, getLogMsg(message), t);
}
public void error(Throwable e) {
logger.log(FQCN, Level.ERROR, getLogMsg(""), e);
}
/**
* Logging a fatal message.
*
* @param message
* the message to log
*/
public void fatal(String message) {
logger.log(FQCN, Level.FATAL, getLogMsg(message), null);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.reef.wake.remote.transport.netty;
import io.netty.bootstrap.Bootstrap;
import io.netty.bootstrap.ServerBootstrap;
import io.netty.channel.Channel;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelOption;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.group.ChannelGroup;
import io.netty.channel.group.ChannelGroupFuture;
import io.netty.channel.group.DefaultChannelGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.nio.NioServerSocketChannel;
import io.netty.channel.socket.nio.NioSocketChannel;
import io.netty.util.concurrent.Future;
import io.netty.util.concurrent.GlobalEventExecutor;
import org.apache.reef.tang.annotations.Parameter;
import org.apache.reef.wake.EStage;
import org.apache.reef.wake.EventHandler;
import org.apache.reef.wake.impl.DefaultThreadFactory;
import org.apache.reef.wake.remote.Encoder;
import org.apache.reef.wake.remote.RemoteConfiguration;
import org.apache.reef.wake.remote.address.LocalAddressProvider;
import org.apache.reef.wake.remote.exception.RemoteRuntimeException;
import org.apache.reef.wake.remote.impl.TransportEvent;
import org.apache.reef.wake.remote.ports.TcpPortProvider;
import org.apache.reef.wake.remote.transport.Link;
import org.apache.reef.wake.remote.transport.LinkListener;
import org.apache.reef.wake.remote.transport.Transport;
import org.apache.reef.wake.remote.transport.exception.TransportRuntimeException;
import javax.inject.Inject;
import java.io.IOException;
import java.net.BindException;
import java.net.ConnectException;
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.util.ArrayList;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Messaging transport implementation with Netty.
*/
public final class NettyMessagingTransport implements Transport {
/**
* Indicates a hostname that isn't set or known.
*/
public static final String UNKNOWN_HOST_NAME = "##UNKNOWN##";
private static final String CLASS_NAME = NettyMessagingTransport.class.getSimpleName();
private static final Logger LOG = Logger.getLogger(CLASS_NAME);
private static final int SERVER_BOSS_NUM_THREADS = 3;
private static final int SERVER_WORKER_NUM_THREADS = 20;
private static final int CLIENT_WORKER_NUM_THREADS = 10;
private final ConcurrentMap<SocketAddress, LinkReference> addrToLinkRefMap = new ConcurrentHashMap<>();
private final EventLoopGroup clientWorkerGroup;
private final EventLoopGroup serverBossGroup;
private final EventLoopGroup serverWorkerGroup;
private final Bootstrap clientBootstrap;
private final Channel acceptor;
private final ChannelGroup clientChannelGroup = new DefaultChannelGroup(GlobalEventExecutor.INSTANCE);
private final ChannelGroup serverChannelGroup = new DefaultChannelGroup(GlobalEventExecutor.INSTANCE);
private final InetSocketAddress localAddress;
private final NettyClientEventListener clientEventListener;
private final NettyServerEventListener serverEventListener;
private final int numberOfTries;
private final int retryTimeout;
/**
* Constructs a messaging transport.
*
* @param hostAddress the server host address
* @param listenPort the server listening port; when it is 0, randomly assign a port number
* @param clientStage the client-side stage that handles transport events
* @param serverStage the server-side stage that handles transport events
* @param numberOfTries the number of tries of connection
* @param retryTimeout the timeout of reconnection
* @param tcpPortProvider gives an iterator that produces random tcp ports in a range
*/
@Inject
private NettyMessagingTransport(
@Parameter(RemoteConfiguration.HostAddress.class) final String hostAddress,
@Parameter(RemoteConfiguration.Port.class) final int listenPort,
@Parameter(RemoteConfiguration.RemoteClientStage.class) final EStage<TransportEvent> clientStage,
@Parameter(RemoteConfiguration.RemoteServerStage.class) final EStage<TransportEvent> serverStage,
@Parameter(RemoteConfiguration.NumberOfTries.class) final int numberOfTries,
@Parameter(RemoteConfiguration.RetryTimeout.class) final int retryTimeout,
final TcpPortProvider tcpPortProvider,
final LocalAddressProvider localAddressProvider) {
if (listenPort < 0) {
throw new RemoteRuntimeException("Invalid server port: " + listenPort);
}
final String host = UNKNOWN_HOST_NAME.equals(hostAddress) ? localAddressProvider.getLocalAddress() : hostAddress;
this.numberOfTries = numberOfTries;
this.retryTimeout = retryTimeout;
this.clientEventListener = new NettyClientEventListener(this.addrToLinkRefMap, clientStage);
this.serverEventListener = new NettyServerEventListener(this.addrToLinkRefMap, serverStage);
this.serverBossGroup = new NioEventLoopGroup(SERVER_BOSS_NUM_THREADS,
new DefaultThreadFactory(CLASS_NAME + ":ServerBoss"));
this.serverWorkerGroup = new NioEventLoopGroup(SERVER_WORKER_NUM_THREADS,
new DefaultThreadFactory(CLASS_NAME + ":ServerWorker"));
this.clientWorkerGroup = new NioEventLoopGroup(CLIENT_WORKER_NUM_THREADS,
new DefaultThreadFactory(CLASS_NAME + ":ClientWorker"));
this.clientBootstrap = new Bootstrap()
.group(this.clientWorkerGroup)
.channel(NioSocketChannel.class)
.handler(new NettyChannelInitializer(new NettyDefaultChannelHandlerFactory("client",
this.clientChannelGroup, this.clientEventListener)))
.option(ChannelOption.SO_REUSEADDR, true)
.option(ChannelOption.SO_KEEPALIVE, true);
final ServerBootstrap serverBootstrap = new ServerBootstrap()
.group(this.serverBossGroup, this.serverWorkerGroup)
.channel(NioServerSocketChannel.class)
.childHandler(new NettyChannelInitializer(new NettyDefaultChannelHandlerFactory("server",
this.serverChannelGroup, this.serverEventListener)))
.option(ChannelOption.SO_BACKLOG, 128)
.option(ChannelOption.SO_REUSEADDR, true)
.childOption(ChannelOption.SO_KEEPALIVE, true);
LOG.log(Level.FINE, "Binding to {0}:{1}", new Object[] {host, listenPort});
try {
if (listenPort > 0) {
this.localAddress = new InetSocketAddress(host, listenPort);
this.acceptor = serverBootstrap.bind(this.localAddress).sync().channel();
} else {
InetSocketAddress socketAddr = null;
Channel acceptorFound = null;
for (int port : tcpPortProvider) {
LOG.log(Level.FINEST, "Try port {0}", port);
try {
socketAddr = new InetSocketAddress(host, port);
acceptorFound = serverBootstrap.bind(socketAddr).sync().channel();
break;
} catch (final Exception ex) {
if (ex instanceof BindException) { // Not visible to catch :(
LOG.log(Level.FINEST, "The port {0} is already bound. Try again", port);
} else {
throw ex;
}
}
}
if (acceptorFound == null) {
throw new IllegalStateException("TcpPortProvider could not find a free port.");
}
this.localAddress = socketAddr;
this.acceptor = acceptorFound;
}
} catch (final IllegalStateException | InterruptedException ex) {
LOG.log(Level.SEVERE, "Cannot bind to port " + listenPort, ex);
this.clientWorkerGroup.shutdownGracefully();
this.serverBossGroup.shutdownGracefully();
this.serverWorkerGroup.shutdownGracefully();
throw new TransportRuntimeException("Cannot bind to port " + listenPort, ex);
}
LOG.log(Level.FINE, "Starting netty transport socket address: {0}", this.localAddress);
}
/**
* Closes all channels and releases all resources.
*/
@Override
public void close() {
LOG.log(Level.FINE, "Closing netty transport socket address: {0}", this.localAddress);
final ChannelGroupFuture clientChannelGroupFuture = this.clientChannelGroup.close();
final ChannelGroupFuture serverChannelGroupFuture = this.serverChannelGroup.close();
final ChannelFuture acceptorFuture = this.acceptor.close();
final ArrayList<Future> eventLoopGroupFutures = new ArrayList<>(3);
eventLoopGroupFutures.add(this.clientWorkerGroup.shutdownGracefully());
eventLoopGroupFutures.add(this.serverBossGroup.shutdownGracefully());
eventLoopGroupFutures.add(this.serverWorkerGroup.shutdownGracefully());
clientChannelGroupFuture.awaitUninterruptibly();
serverChannelGroupFuture.awaitUninterruptibly();
try {
acceptorFuture.sync();
} catch (final Exception ex) {
LOG.log(Level.SEVERE, "Error closing the acceptor channel for " + this.localAddress, ex);
}
for (final Future eventLoopGroupFuture : eventLoopGroupFutures) {
eventLoopGroupFuture.awaitUninterruptibly();
}
LOG.log(Level.FINE, "Closing netty transport socket address: {0} done", this.localAddress);
}
/**
* Returns a link for the remote address if cached; otherwise opens, caches and returns.
* When it opens a link for the remote address, only one attempt for the address is made at a given time
*
* @param remoteAddr the remote socket address
* @param encoder the encoder
* @param listener the link listener
* @return a link associated with the address
*/
@Override
public <T> Link<T> open(final SocketAddress remoteAddr, final Encoder<? super T> encoder,
final LinkListener<? super T> listener) throws IOException {
Link<T> link = null;
for (int i = 0; i <= this.numberOfTries; ++i) {
LinkReference linkRef = this.addrToLinkRefMap.get(remoteAddr);
if (linkRef != null) {
link = (Link<T>) linkRef.getLink();
if (LOG.isLoggable(Level.FINE)) {
LOG.log(Level.FINE, "Link {0} for {1} found", new Object[]{link, remoteAddr});
}
if (link != null) {
return link;
}
}
if (i == this.numberOfTries) {
// Connection failure
throw new ConnectException("Connection to " + remoteAddr + " refused");
}
LOG.log(Level.FINE, "No cached link for {0} thread {1}",
new Object[]{remoteAddr, Thread.currentThread()});
// no linkRef
final LinkReference newLinkRef = new LinkReference();
final LinkReference prior = this.addrToLinkRefMap.putIfAbsent(remoteAddr, newLinkRef);
final AtomicInteger flag = prior != null ?
prior.getConnectInProgress() : newLinkRef.getConnectInProgress();
synchronized (flag) {
if (!flag.compareAndSet(0, 1)) {
while (flag.get() == 1) {
try {
flag.wait();
} catch (final InterruptedException ex) {
LOG.log(Level.WARNING, "Wait interrupted", ex);
}
}
}
}
linkRef = this.addrToLinkRefMap.get(remoteAddr);
link = (Link<T>) linkRef.getLink();
if (link != null) {
return link;
}
ChannelFuture connectFuture = null;
try {
connectFuture = this.clientBootstrap.connect(remoteAddr);
connectFuture.syncUninterruptibly();
link = new NettyLink<>(connectFuture.channel(), encoder, listener);
linkRef.setLink(link);
synchronized (flag) {
flag.compareAndSet(1, 2);
flag.notifyAll();
}
break;
} catch (final Exception e) {
if (e instanceof ConnectException) {
LOG.log(Level.WARNING, "Connection refused. Retry {0} of {1}",
new Object[]{i + 1, this.numberOfTries});
synchronized (flag) {
flag.compareAndSet(1, 0);
flag.notifyAll();
}
if (i < this.numberOfTries) {
try {
Thread.sleep(retryTimeout);
} catch (final InterruptedException interrupt) {
LOG.log(Level.WARNING, "Thread {0} interrupted while sleeping", Thread.currentThread());
}
}
} else {
throw e;
}
}
}
return link;
}
/**
* Returns a link for the remote address if already cached; otherwise, returns null.
*
* @param remoteAddr the remote address
* @return a link if already cached; otherwise, null
*/
public <T> Link<T> get(final SocketAddress remoteAddr) {
final LinkReference linkRef = this.addrToLinkRefMap.get(remoteAddr);
return linkRef != null ? (Link<T>) linkRef.getLink() : null;
}
/**
* Gets a server local socket address of this transport.
*
* @return a server local socket address
*/
@Override
public SocketAddress getLocalAddress() {
return this.localAddress;
}
/**
* Gets a server listening port of this transport.
*
* @return a listening port number
*/
@Override
public int getListeningPort() {
return this.localAddress.getPort();
}
/**
* Registers the exception event handler.
*
* @param handler the exception event handler
*/
@Override
public void registerErrorHandler(final EventHandler<Exception> handler) {
this.clientEventListener.registerErrorHandler(handler);
this.serverEventListener.registerErrorHandler(handler);
}
@Override
public String toString() {
return String.format("NettyMessagingTransport: { address: %s }", this.localAddress);
}
}
| |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight.completion;
import com.intellij.codeInsight.completion.impl.CompletionServiceImpl;
import com.intellij.codeInsight.lookup.LookupAdapter;
import com.intellij.codeInsight.lookup.LookupElement;
import com.intellij.codeInsight.lookup.LookupEvent;
import com.intellij.codeInsight.lookup.LookupManager;
import com.intellij.codeInsight.lookup.impl.LookupImpl;
import com.intellij.openapi.application.Application;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.Result;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.command.WriteCommandAction;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.CaretModel;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.SelectionModel;
import com.intellij.openapi.editor.event.*;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.progress.util.ProgressIndicatorBase;
import com.intellij.openapi.util.Computable;
import com.intellij.ui.HintListener;
import com.intellij.ui.LightweightHint;
import com.intellij.util.concurrency.Semaphore;
import com.intellij.util.ui.AsyncProcessIcon;
import com.intellij.util.ui.update.MergingUpdateQueue;
import com.intellij.util.ui.update.Update;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.TestOnly;
import javax.swing.*;
import java.awt.event.KeyAdapter;
import java.awt.event.KeyEvent;
import java.util.EventObject;
/**
* @author peter
*/
public class CompletionProgressIndicator extends ProgressIndicatorBase implements CompletionProcess{
private static final Logger LOG = Logger.getInstance("#com.intellij.codeInsight.completion.CompletionProgressIndicator");
private final Editor myEditor;
private final CompletionParameters myParameters;
private final CodeCompletionHandlerBase myHandler;
private final LookupImpl myLookup;
private final MergingUpdateQueue myQueue;
private boolean myDisposed;
private boolean myInitialized;
private int myCount;
private final Update myUpdate = new Update("update") {
public void run() {
updateLookup();
}
};
private LightweightHint myHint;
private final CompletionContext myContextOriginal;
private final Semaphore myFreezeSemaphore;
private boolean myModifiersReleased;
private String myOldDocumentText;
private int myOldCaret;
private int myOldStart;
private int myOldEnd;
public CompletionProgressIndicator(final Editor editor, CompletionParameters parameters, CodeCompletionHandlerBase handler,
final CompletionContext contextOriginal, Semaphore freezeSemaphore) {
myEditor = editor;
myParameters = parameters;
myHandler = handler;
myContextOriginal = contextOriginal;
myFreezeSemaphore = freezeSemaphore;
myLookup = (LookupImpl)LookupManager.getInstance(editor.getProject()).createLookup(editor, LookupElement.EMPTY_ARRAY, "", new CompletionLookupArranger(parameters));
myLookup.addLookupListener(new LookupAdapter() {
public void itemSelected(LookupEvent event) {
cancel();
finishCompletion();
LookupElement item = event.getItem();
if (item == null) return;
setMergeCommand();
contextOriginal.setStartOffset(myEditor.getCaretModel().getOffset() - item.getLookupString().length());
CodeCompletionHandlerBase.selectLookupItem(item, event.getCompletionChar(), contextOriginal, myLookup.getItems());
}
public void lookupCanceled(final LookupEvent event) {
cancel();
finishCompletion();
}
});
myLookup.setCalculating(true);
myQueue = new MergingUpdateQueue("completion lookup progress", 200, true, myEditor.getContentComponent());
ApplicationManager.getApplication().assertIsDispatchThread();
registerItself();
scheduleAdvertising();
trackModifiers();
}
private void scheduleAdvertising() {
ApplicationManager.getApplication().executeOnPooledThread(new Runnable() {
public void run() {
if (myEditor.isDisposed()) return; //tests?
for (final CompletionContributor contributor : CompletionContributor.forParameters(myParameters)) {
if (myLookup.getAdvertisementText() != null) return;
if (!myLookup.isCalculating() && !myLookup.isVisible()) return;
String s = ApplicationManager.getApplication().runReadAction(new Computable<String>() {
public String compute() {
return contributor.advertise(myParameters);
}
});
if (myLookup.getAdvertisementText() != null) return;
if (s != null) {
myLookup.setAdvertisementText(s);
ApplicationManager.getApplication().invokeLater(new Runnable() {
public void run() {
if (myEditor.isDisposed() || myEditor.getComponent().getRootPane() == null) {
return;
}
updateLookup();
}
}, myQueue.getModalityState());
return;
}
}
}
});
}
private void trackModifiers() {
final JComponent contentComponent = myEditor.getContentComponent();
contentComponent.addKeyListener(new KeyAdapter() {
public void keyPressed(KeyEvent e) {
processModifier(e);
}
public void keyReleased(KeyEvent e) {
processModifier(e);
}
private void processModifier(KeyEvent e) {
final int code = e.getKeyCode();
if (code == KeyEvent.VK_CONTROL || code == KeyEvent.VK_META || code == KeyEvent.VK_ALT || code == KeyEvent.VK_SHIFT) {
myModifiersReleased = true;
if (myOldDocumentText != null) {
cleanup();
}
contentComponent.removeKeyListener(this);
}
}
});
}
private void setMergeCommand() {
CommandProcessor.getInstance().setCurrentCommandGroupId("Completion" + hashCode());
}
public void showLookup() {
updateLookup();
}
public CompletionParameters getParameters() {
return myParameters;
}
private void registerItself() {
CompletionServiceImpl.getCompletionService().setCurrentCompletion(this);
}
public void liveAfterDeath(@Nullable final LightweightHint hint) {
if (myModifiersReleased || ApplicationManager.getApplication().isUnitTestMode()) {
return;
}
registerItself();
myHint = hint;
if (hint != null) {
hint.addHintListener(new HintListener() {
public void hintHidden(final EventObject event) {
hint.removeHintListener(this);
cleanup();
}
});
}
final Document document = myEditor.getDocument();
document.addDocumentListener(new DocumentAdapter() {
@Override
public void beforeDocumentChange(DocumentEvent e) {
document.removeDocumentListener(this);
cleanup();
}
});
final SelectionModel selectionModel = myEditor.getSelectionModel();
selectionModel.addSelectionListener(new SelectionListener() {
public void selectionChanged(SelectionEvent e) {
selectionModel.removeSelectionListener(this);
cleanup();
}
});
final CaretModel caretModel = myEditor.getCaretModel();
caretModel.addCaretListener(new CaretListener() {
public void caretPositionChanged(CaretEvent e) {
caretModel.removeCaretListener(this);
cleanup();
}
});
}
public CodeCompletionHandlerBase getHandler() {
return myHandler;
}
public LookupImpl getLookup() {
return myLookup;
}
private void updateLookup() {
ApplicationManager.getApplication().assertIsDispatchThread();
if (myEditor.isDisposed() || myDisposed) return;
if (!myInitialized) {
myInitialized = true;
if (myLookup.isCalculating()) {
final AsyncProcessIcon processIcon = myLookup.getProcessIcon();
processIcon.setVisible(true);
processIcon.resume();
}
myLookup.show();
}
myLookup.refreshUi();
}
public int getCount() {
return myCount;
}
private boolean isInsideIdentifier() {
return myContextOriginal.getOffsetMap().getOffset(CompletionInitializationContext.IDENTIFIER_END_OFFSET) != myContextOriginal.getSelectionEndOffset();
}
public synchronized void addItem(final LookupElement item) {
if (!isRunning()) return;
ProgressManager.getInstance().checkCanceled();
final boolean unitTestMode = ApplicationManager.getApplication().isUnitTestMode();
if (!unitTestMode) {
assert !ApplicationManager.getApplication().isDispatchThread();
}
myLookup.addItem(item);
myCount++;
if (unitTestMode) return;
if (myCount == 1) {
ApplicationManager.getApplication().executeOnPooledThread(new Runnable() {
public void run() {
try {
Thread.sleep(300);
}
catch (InterruptedException e) {
LOG.error(e);
}
myFreezeSemaphore.up();
}
});
}
myQueue.queue(myUpdate);
}
public void closeAndFinish() {
if (myHint != null) {
myHint.hide();
}
LookupManager.getInstance(myEditor.getProject()).hideActiveLookup();
}
private void finishCompletion() {
assert !myDisposed;
myDisposed = true;
ApplicationManager.getApplication().assertIsDispatchThread();
myQueue.dispose();
cleanup();
}
@TestOnly
public static void cleanupForNextTest() {
CompletionProgressIndicator currentCompletion = CompletionServiceImpl.getCompletionService().getCurrentCompletion();
if (currentCompletion != null) {
currentCompletion.finishCompletion();
}
}
private void cleanup() {
myHint = null;
myOldDocumentText = null;
CompletionServiceImpl.getCompletionService().setCurrentCompletion(null);
}
public void stop() {
myQueue.cancelAllUpdates();
super.stop();
invokeLaterIfNotDispatch(new Runnable() {
public void run() {
if (isCanceled()) return;
if (myLookup.isVisible()) {
myLookup.getProcessIcon().suspend();
myLookup.getProcessIcon().setVisible(false);
updateLookup();
}
}
});
}
private void invokeLaterIfNotDispatch(final Runnable runnable) {
final Application application = ApplicationManager.getApplication();
if (application.isDispatchThread() || application.isUnitTestMode()) {
runnable.run();
} else {
application.invokeLater(runnable, myQueue.getModalityState());
}
}
public boolean fillInCommonPrefix(final boolean explicit) {
if (isInsideIdentifier()) {
return false;
}
final Boolean aBoolean = new WriteCommandAction<Boolean>(myEditor.getProject()) {
protected void run(Result<Boolean> result) throws Throwable {
if (!explicit) {
setMergeCommand();
}
try {
result.setResult(myLookup.fillInCommonPrefix(explicit));
}
catch (Exception e) {
LOG.error(e);
}
}
}.execute().getResultObject();
return aBoolean.booleanValue();
}
public boolean isInitialized() {
return myInitialized;
}
public void restorePrefix() {
setMergeCommand();
if (myOldDocumentText != null) {
myEditor.getDocument().setText(myOldDocumentText);
myEditor.getSelectionModel().setSelection(myOldStart, myOldEnd);
myEditor.getCaretModel().moveToOffset(myOldCaret);
myOldDocumentText = null;
return;
}
getLookup().restorePrefix();
}
public Editor getEditor() {
return myEditor;
}
public void rememberDocumentState() {
if (myModifiersReleased) {
return;
}
myOldDocumentText = myEditor.getDocument().getText();
myOldCaret = myEditor.getCaretModel().getOffset();
myOldStart = myEditor.getSelectionModel().getSelectionStart();
myOldEnd = myEditor.getSelectionModel().getSelectionEnd();
}
}
| |
/*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.ads.googleads.v9.services;
import com.google.ads.googleads.v9.resources.ProductGroupView;
import com.google.ads.googleads.v9.resources.ProductGroupViewName;
import com.google.ads.googleads.v9.services.stub.ProductGroupViewServiceStub;
import com.google.ads.googleads.v9.services.stub.ProductGroupViewServiceStubSettings;
import com.google.api.core.BetaApi;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.rpc.UnaryCallable;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* Service Description: Service to manage product group views.
*
* <p>This class provides the ability to make remote calls to the backing service through method
* calls that map to API methods. Sample code to get started:
*
* <pre>{@code
* try (ProductGroupViewServiceClient productGroupViewServiceClient =
* ProductGroupViewServiceClient.create()) {
* ProductGroupViewName resourceName =
* ProductGroupViewName.of("[CUSTOMER_ID]", "[ADGROUP_ID]", "[CRITERION_ID]");
* ProductGroupView response = productGroupViewServiceClient.getProductGroupView(resourceName);
* }
* }</pre>
*
* <p>Note: close() needs to be called on the ProductGroupViewServiceClient object to clean up
* resources such as threads. In the example above, try-with-resources is used, which automatically
* calls close().
*
* <p>The surface of this class includes several types of Java methods for each of the API's
* methods:
*
* <ol>
* <li> A "flattened" method. With this type of method, the fields of the request type have been
* converted into function parameters. It may be the case that not all fields are available as
* parameters, and not every API method will have a flattened method entry point.
* <li> A "request object" method. This type of method only takes one parameter, a request object,
* which must be constructed before the call. Not every API method will have a request object
* method.
* <li> A "callable" method. This type of method takes no parameters and returns an immutable API
* callable object, which can be used to initiate calls to the service.
* </ol>
*
* <p>See the individual methods for example code.
*
* <p>Many parameters require resource names to be formatted in a particular way. To assist with
* these names, this class includes a format method for each type of name, and additionally a parse
* method to extract the individual identifiers contained within names that are returned.
*
* <p>This class can be customized by passing in a custom instance of
* ProductGroupViewServiceSettings to create(). For example:
*
* <p>To customize credentials:
*
* <pre>{@code
* ProductGroupViewServiceSettings productGroupViewServiceSettings =
* ProductGroupViewServiceSettings.newBuilder()
* .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials))
* .build();
* ProductGroupViewServiceClient productGroupViewServiceClient =
* ProductGroupViewServiceClient.create(productGroupViewServiceSettings);
* }</pre>
*
* <p>To customize the endpoint:
*
* <pre>{@code
* ProductGroupViewServiceSettings productGroupViewServiceSettings =
* ProductGroupViewServiceSettings.newBuilder().setEndpoint(myEndpoint).build();
* ProductGroupViewServiceClient productGroupViewServiceClient =
* ProductGroupViewServiceClient.create(productGroupViewServiceSettings);
* }</pre>
*
* <p>Please refer to the GitHub repository's samples for more quickstart code snippets.
*/
@Generated("by gapic-generator-java")
public class ProductGroupViewServiceClient implements BackgroundResource {
private final ProductGroupViewServiceSettings settings;
private final ProductGroupViewServiceStub stub;
/** Constructs an instance of ProductGroupViewServiceClient with default settings. */
public static final ProductGroupViewServiceClient create() throws IOException {
return create(ProductGroupViewServiceSettings.newBuilder().build());
}
/**
* Constructs an instance of ProductGroupViewServiceClient, using the given settings. The channels
* are created based on the settings passed in, or defaults for any settings that are not set.
*/
public static final ProductGroupViewServiceClient create(ProductGroupViewServiceSettings settings)
throws IOException {
return new ProductGroupViewServiceClient(settings);
}
/**
* Constructs an instance of ProductGroupViewServiceClient, using the given stub for making calls.
* This is for advanced usage - prefer using create(ProductGroupViewServiceSettings).
*/
@BetaApi("A restructuring of stub classes is planned, so this may break in the future")
public static final ProductGroupViewServiceClient create(ProductGroupViewServiceStub stub) {
return new ProductGroupViewServiceClient(stub);
}
/**
* Constructs an instance of ProductGroupViewServiceClient, using the given settings. This is
* protected so that it is easy to make a subclass, but otherwise, the static factory methods
* should be preferred.
*/
protected ProductGroupViewServiceClient(ProductGroupViewServiceSettings settings)
throws IOException {
this.settings = settings;
this.stub = ((ProductGroupViewServiceStubSettings) settings.getStubSettings()).createStub();
}
@BetaApi("A restructuring of stub classes is planned, so this may break in the future")
protected ProductGroupViewServiceClient(ProductGroupViewServiceStub stub) {
this.settings = null;
this.stub = stub;
}
public final ProductGroupViewServiceSettings getSettings() {
return settings;
}
@BetaApi("A restructuring of stub classes is planned, so this may break in the future")
public ProductGroupViewServiceStub getStub() {
return stub;
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Returns the requested product group view in full detail.
*
* <p>List of thrown errors: [AuthenticationError]() [AuthorizationError]() [HeaderError]()
* [InternalError]() [QuotaError]() [RequestError]()
*
* <p>Sample code:
*
* <pre>{@code
* try (ProductGroupViewServiceClient productGroupViewServiceClient =
* ProductGroupViewServiceClient.create()) {
* ProductGroupViewName resourceName =
* ProductGroupViewName.of("[CUSTOMER_ID]", "[ADGROUP_ID]", "[CRITERION_ID]");
* ProductGroupView response = productGroupViewServiceClient.getProductGroupView(resourceName);
* }
* }</pre>
*
* @param resourceName Required. The resource name of the product group view to fetch.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final ProductGroupView getProductGroupView(ProductGroupViewName resourceName) {
GetProductGroupViewRequest request =
GetProductGroupViewRequest.newBuilder()
.setResourceName(resourceName == null ? null : resourceName.toString())
.build();
return getProductGroupView(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Returns the requested product group view in full detail.
*
* <p>List of thrown errors: [AuthenticationError]() [AuthorizationError]() [HeaderError]()
* [InternalError]() [QuotaError]() [RequestError]()
*
* <p>Sample code:
*
* <pre>{@code
* try (ProductGroupViewServiceClient productGroupViewServiceClient =
* ProductGroupViewServiceClient.create()) {
* String resourceName =
* ProductGroupViewName.of("[CUSTOMER_ID]", "[ADGROUP_ID]", "[CRITERION_ID]").toString();
* ProductGroupView response = productGroupViewServiceClient.getProductGroupView(resourceName);
* }
* }</pre>
*
* @param resourceName Required. The resource name of the product group view to fetch.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final ProductGroupView getProductGroupView(String resourceName) {
GetProductGroupViewRequest request =
GetProductGroupViewRequest.newBuilder().setResourceName(resourceName).build();
return getProductGroupView(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Returns the requested product group view in full detail.
*
* <p>List of thrown errors: [AuthenticationError]() [AuthorizationError]() [HeaderError]()
* [InternalError]() [QuotaError]() [RequestError]()
*
* <p>Sample code:
*
* <pre>{@code
* try (ProductGroupViewServiceClient productGroupViewServiceClient =
* ProductGroupViewServiceClient.create()) {
* GetProductGroupViewRequest request =
* GetProductGroupViewRequest.newBuilder()
* .setResourceName(
* ProductGroupViewName.of("[CUSTOMER_ID]", "[ADGROUP_ID]", "[CRITERION_ID]")
* .toString())
* .build();
* ProductGroupView response = productGroupViewServiceClient.getProductGroupView(request);
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final ProductGroupView getProductGroupView(GetProductGroupViewRequest request) {
return getProductGroupViewCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Returns the requested product group view in full detail.
*
* <p>List of thrown errors: [AuthenticationError]() [AuthorizationError]() [HeaderError]()
* [InternalError]() [QuotaError]() [RequestError]()
*
* <p>Sample code:
*
* <pre>{@code
* try (ProductGroupViewServiceClient productGroupViewServiceClient =
* ProductGroupViewServiceClient.create()) {
* GetProductGroupViewRequest request =
* GetProductGroupViewRequest.newBuilder()
* .setResourceName(
* ProductGroupViewName.of("[CUSTOMER_ID]", "[ADGROUP_ID]", "[CRITERION_ID]")
* .toString())
* .build();
* ApiFuture<ProductGroupView> future =
* productGroupViewServiceClient.getProductGroupViewCallable().futureCall(request);
* // Do something.
* ProductGroupView response = future.get();
* }
* }</pre>
*/
public final UnaryCallable<GetProductGroupViewRequest, ProductGroupView>
getProductGroupViewCallable() {
return stub.getProductGroupViewCallable();
}
@Override
public final void close() {
stub.close();
}
@Override
public void shutdown() {
stub.shutdown();
}
@Override
public boolean isShutdown() {
return stub.isShutdown();
}
@Override
public boolean isTerminated() {
return stub.isTerminated();
}
@Override
public void shutdownNow() {
stub.shutdownNow();
}
@Override
public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException {
return stub.awaitTermination(duration, unit);
}
}
| |
/*
* Copyright 2014 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.handler.codec.spdy;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.ByteBufAllocator;
import io.netty.buffer.Unpooled;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
public class SpdyHeaderBlockRawDecoderTest {
private static final int maxHeaderSize = 16;
private static final String name = "name";
private static final String value = "value";
private static final byte[] nameBytes = name.getBytes();
private static final byte[] valueBytes = value.getBytes();
private SpdyHeaderBlockRawDecoder decoder;
private SpdyHeadersFrame frame;
@Before
public void setUp() {
decoder = new SpdyHeaderBlockRawDecoder(SpdyVersion.SPDY_3_1, maxHeaderSize);
frame = new DefaultSpdyHeadersFrame(1);
}
@After
public void tearDown() {
decoder.end();
}
@Test
public void testEmptyHeaderBlock() throws Exception {
ByteBuf headerBlock = Unpooled.EMPTY_BUFFER;
decoder.decode(ByteBufAllocator.DEFAULT, headerBlock, frame);
decoder.endHeaderBlock(frame);
assertFalse(headerBlock.isReadable());
assertTrue(frame.isInvalid());
assertEquals(0, frame.headers().names().size());
headerBlock.release();
}
@Test
public void testZeroNameValuePairs() throws Exception {
ByteBuf headerBlock = Unpooled.buffer(4);
headerBlock.writeInt(0);
decoder.decode(ByteBufAllocator.DEFAULT, headerBlock, frame);
decoder.endHeaderBlock(frame);
assertFalse(headerBlock.isReadable());
assertFalse(frame.isInvalid());
assertEquals(0, frame.headers().names().size());
headerBlock.release();
}
@Test
public void testNegativeNameValuePairs() throws Exception {
ByteBuf headerBlock = Unpooled.buffer(4);
headerBlock.writeInt(-1);
decoder.decode(ByteBufAllocator.DEFAULT, headerBlock, frame);
assertFalse(headerBlock.isReadable());
assertTrue(frame.isInvalid());
assertEquals(0, frame.headers().names().size());
headerBlock.release();
}
@Test
public void testOneNameValuePair() throws Exception {
ByteBuf headerBlock = Unpooled.buffer(21);
headerBlock.writeInt(1);
headerBlock.writeInt(4);
headerBlock.writeBytes(nameBytes);
headerBlock.writeInt(5);
headerBlock.writeBytes(valueBytes);
decoder.decode(ByteBufAllocator.DEFAULT, headerBlock, frame);
decoder.endHeaderBlock(frame);
assertFalse(headerBlock.isReadable());
assertFalse(frame.isInvalid());
assertEquals(1, frame.headers().names().size());
assertTrue(frame.headers().contains(name));
assertEquals(1, frame.headers().getAll(name).size());
assertEquals(value, frame.headers().get(name));
headerBlock.release();
}
@Test
public void testMissingNameLength() throws Exception {
ByteBuf headerBlock = Unpooled.buffer(4);
headerBlock.writeInt(1);
decoder.decode(ByteBufAllocator.DEFAULT, headerBlock, frame);
decoder.endHeaderBlock(frame);
assertFalse(headerBlock.isReadable());
assertTrue(frame.isInvalid());
assertEquals(0, frame.headers().names().size());
headerBlock.release();
}
@Test
public void testZeroNameLength() throws Exception {
ByteBuf headerBlock = Unpooled.buffer(8);
headerBlock.writeInt(1);
headerBlock.writeInt(0);
decoder.decode(ByteBufAllocator.DEFAULT, headerBlock, frame);
assertFalse(headerBlock.isReadable());
assertTrue(frame.isInvalid());
assertEquals(0, frame.headers().names().size());
headerBlock.release();
}
@Test
public void testNegativeNameLength() throws Exception {
ByteBuf headerBlock = Unpooled.buffer(8);
headerBlock.writeInt(1);
headerBlock.writeInt(-1);
decoder.decode(ByteBufAllocator.DEFAULT, headerBlock, frame);
assertFalse(headerBlock.isReadable());
assertTrue(frame.isInvalid());
assertEquals(0, frame.headers().names().size());
headerBlock.release();
}
@Test
public void testMissingName() throws Exception {
ByteBuf headerBlock = Unpooled.buffer(8);
headerBlock.writeInt(1);
headerBlock.writeInt(4);
decoder.decode(ByteBufAllocator.DEFAULT, headerBlock, frame);
decoder.endHeaderBlock(frame);
assertFalse(headerBlock.isReadable());
assertTrue(frame.isInvalid());
assertEquals(0, frame.headers().names().size());
headerBlock.release();
}
@Test
public void testIllegalNameOnlyNull() throws Exception {
ByteBuf headerBlock = Unpooled.buffer(18);
headerBlock.writeInt(1);
headerBlock.writeInt(1);
headerBlock.writeByte(0);
headerBlock.writeInt(5);
headerBlock.writeBytes(valueBytes);
decoder.decode(ByteBufAllocator.DEFAULT, headerBlock, frame);
assertFalse(headerBlock.isReadable());
assertTrue(frame.isInvalid());
assertEquals(0, frame.headers().names().size());
headerBlock.release();
}
@Test
public void testMissingValueLength() throws Exception {
ByteBuf headerBlock = Unpooled.buffer(12);
headerBlock.writeInt(1);
headerBlock.writeInt(4);
headerBlock.writeBytes(nameBytes);
decoder.decode(ByteBufAllocator.DEFAULT, headerBlock, frame);
decoder.endHeaderBlock(frame);
assertFalse(headerBlock.isReadable());
assertTrue(frame.isInvalid());
assertEquals(0, frame.headers().names().size());
headerBlock.release();
}
@Test
public void testZeroValueLength() throws Exception {
ByteBuf headerBlock = Unpooled.buffer(16);
headerBlock.writeInt(1);
headerBlock.writeInt(4);
headerBlock.writeBytes(nameBytes);
headerBlock.writeInt(0);
decoder.decode(ByteBufAllocator.DEFAULT, headerBlock, frame);
decoder.endHeaderBlock(frame);
assertFalse(headerBlock.isReadable());
assertFalse(frame.isInvalid());
assertEquals(1, frame.headers().names().size());
assertTrue(frame.headers().contains(name));
assertEquals(1, frame.headers().getAll(name).size());
assertEquals("", frame.headers().get(name));
headerBlock.release();
}
@Test
public void testNegativeValueLength() throws Exception {
ByteBuf headerBlock = Unpooled.buffer(16);
headerBlock.writeInt(1);
headerBlock.writeInt(4);
headerBlock.writeBytes(nameBytes);
headerBlock.writeInt(-1);
decoder.decode(ByteBufAllocator.DEFAULT, headerBlock, frame);
assertFalse(headerBlock.isReadable());
assertTrue(frame.isInvalid());
assertEquals(0, frame.headers().names().size());
headerBlock.release();
}
@Test
public void testMissingValue() throws Exception {
ByteBuf headerBlock = Unpooled.buffer(16);
headerBlock.writeInt(1);
headerBlock.writeInt(4);
headerBlock.writeBytes(nameBytes);
headerBlock.writeInt(5);
decoder.decode(ByteBufAllocator.DEFAULT, headerBlock, frame);
decoder.endHeaderBlock(frame);
assertFalse(headerBlock.isReadable());
assertTrue(frame.isInvalid());
assertEquals(0, frame.headers().names().size());
headerBlock.release();
}
@Test
public void testIllegalValueOnlyNull() throws Exception {
ByteBuf headerBlock = Unpooled.buffer(17);
headerBlock.writeInt(1);
headerBlock.writeInt(4);
headerBlock.writeBytes(nameBytes);
headerBlock.writeInt(1);
headerBlock.writeByte(0);
decoder.decode(ByteBufAllocator.DEFAULT, headerBlock, frame);
assertFalse(headerBlock.isReadable());
assertTrue(frame.isInvalid());
assertEquals(0, frame.headers().names().size());
headerBlock.release();
}
@Test
public void testIllegalValueStartsWithNull() throws Exception {
ByteBuf headerBlock = Unpooled.buffer(22);
headerBlock.writeInt(1);
headerBlock.writeInt(4);
headerBlock.writeBytes(nameBytes);
headerBlock.writeInt(6);
headerBlock.writeByte(0);
headerBlock.writeBytes(valueBytes);
decoder.decode(ByteBufAllocator.DEFAULT, headerBlock, frame);
assertFalse(headerBlock.isReadable());
assertTrue(frame.isInvalid());
assertEquals(0, frame.headers().names().size());
headerBlock.release();
}
@Test
public void testIllegalValueEndsWithNull() throws Exception {
ByteBuf headerBlock = Unpooled.buffer(22);
headerBlock.writeInt(1);
headerBlock.writeInt(4);
headerBlock.writeBytes(nameBytes);
headerBlock.writeInt(6);
headerBlock.writeBytes(valueBytes);
headerBlock.writeByte(0);
decoder.decode(ByteBufAllocator.DEFAULT, headerBlock, frame);
assertFalse(headerBlock.isReadable());
assertTrue(frame.isInvalid());
assertEquals(0, frame.headers().names().size());
headerBlock.release();
}
@Test
public void testMultipleValues() throws Exception {
ByteBuf headerBlock = Unpooled.buffer(27);
headerBlock.writeInt(1);
headerBlock.writeInt(4);
headerBlock.writeBytes(nameBytes);
headerBlock.writeInt(11);
headerBlock.writeBytes(valueBytes);
headerBlock.writeByte(0);
headerBlock.writeBytes(valueBytes);
decoder.decode(ByteBufAllocator.DEFAULT, headerBlock, frame);
decoder.endHeaderBlock(frame);
assertFalse(headerBlock.isReadable());
assertFalse(frame.isInvalid());
assertEquals(1, frame.headers().names().size());
assertTrue(frame.headers().contains(name));
assertEquals(2, frame.headers().getAll(name).size());
assertEquals(value, frame.headers().getAll(name).get(0));
assertEquals(value, frame.headers().getAll(name).get(1));
headerBlock.release();
}
@Test
public void testMultipleValuesEndsWithNull() throws Exception {
ByteBuf headerBlock = Unpooled.buffer(28);
headerBlock.writeInt(1);
headerBlock.writeInt(4);
headerBlock.writeBytes(nameBytes);
headerBlock.writeInt(12);
headerBlock.writeBytes(valueBytes);
headerBlock.writeByte(0);
headerBlock.writeBytes(valueBytes);
headerBlock.writeByte(0);
decoder.decode(ByteBufAllocator.DEFAULT, headerBlock, frame);
assertFalse(headerBlock.isReadable());
assertTrue(frame.isInvalid());
assertEquals(1, frame.headers().names().size());
assertTrue(frame.headers().contains(name));
assertEquals(1, frame.headers().getAll(name).size());
assertEquals(value, frame.headers().get(name));
headerBlock.release();
}
@Test
public void testIllegalValueMultipleNulls() throws Exception {
ByteBuf headerBlock = Unpooled.buffer(28);
headerBlock.writeInt(1);
headerBlock.writeInt(4);
headerBlock.writeBytes(nameBytes);
headerBlock.writeInt(12);
headerBlock.writeBytes(valueBytes);
headerBlock.writeByte(0);
headerBlock.writeByte(0);
headerBlock.writeBytes(valueBytes);
decoder.decode(ByteBufAllocator.DEFAULT, headerBlock, frame);
decoder.endHeaderBlock(frame);
assertFalse(headerBlock.isReadable());
assertTrue(frame.isInvalid());
assertEquals(0, frame.headers().names().size());
headerBlock.release();
}
@Test
public void testMissingNextNameValuePair() throws Exception {
ByteBuf headerBlock = Unpooled.buffer(21);
headerBlock.writeInt(2);
headerBlock.writeInt(4);
headerBlock.writeBytes(nameBytes);
headerBlock.writeInt(5);
headerBlock.writeBytes(valueBytes);
decoder.decode(ByteBufAllocator.DEFAULT, headerBlock, frame);
decoder.endHeaderBlock(frame);
assertFalse(headerBlock.isReadable());
assertTrue(frame.isInvalid());
assertEquals(1, frame.headers().names().size());
assertTrue(frame.headers().contains(name));
assertEquals(1, frame.headers().getAll(name).size());
assertEquals(value, frame.headers().get(name));
headerBlock.release();
}
@Test
public void testMultipleNames() throws Exception {
ByteBuf headerBlock = Unpooled.buffer(38);
headerBlock.writeInt(2);
headerBlock.writeInt(4);
headerBlock.writeBytes(nameBytes);
headerBlock.writeInt(5);
headerBlock.writeBytes(valueBytes);
headerBlock.writeInt(4);
headerBlock.writeBytes(nameBytes);
headerBlock.writeInt(5);
headerBlock.writeBytes(valueBytes);
decoder.decode(ByteBufAllocator.DEFAULT, headerBlock, frame);
assertFalse(headerBlock.isReadable());
assertTrue(frame.isInvalid());
assertEquals(1, frame.headers().names().size());
assertTrue(frame.headers().contains(name));
assertEquals(1, frame.headers().getAll(name).size());
assertEquals(value, frame.headers().get(name));
headerBlock.release();
}
@Test
public void testExtraData() throws Exception {
ByteBuf headerBlock = Unpooled.buffer(22);
headerBlock.writeInt(1);
headerBlock.writeInt(4);
headerBlock.writeBytes(nameBytes);
headerBlock.writeInt(5);
headerBlock.writeBytes(valueBytes);
headerBlock.writeByte(0);
decoder.decode(ByteBufAllocator.DEFAULT, headerBlock, frame);
assertFalse(headerBlock.isReadable());
assertTrue(frame.isInvalid());
assertEquals(1, frame.headers().names().size());
assertTrue(frame.headers().contains(name));
assertEquals(1, frame.headers().getAll(name).size());
assertEquals(value, frame.headers().get(name));
headerBlock.release();
}
@Test
public void testMultipleDecodes() throws Exception {
ByteBuf headerBlock = Unpooled.buffer(21);
headerBlock.writeInt(1);
headerBlock.writeInt(4);
headerBlock.writeBytes(nameBytes);
headerBlock.writeInt(5);
headerBlock.writeBytes(valueBytes);
int readableBytes = headerBlock.readableBytes();
for (int i = 0; i < readableBytes; i++) {
ByteBuf headerBlockSegment = headerBlock.slice(i, 1);
decoder.decode(ByteBufAllocator.DEFAULT, headerBlockSegment, frame);
assertFalse(headerBlockSegment.isReadable());
}
decoder.endHeaderBlock(frame);
assertFalse(frame.isInvalid());
assertEquals(1, frame.headers().names().size());
assertTrue(frame.headers().contains(name));
assertEquals(1, frame.headers().getAll(name).size());
assertEquals(value, frame.headers().get(name));
headerBlock.release();
}
@Test
public void testContinueAfterInvalidHeaders() throws Exception {
ByteBuf numHeaders = Unpooled.buffer(4);
numHeaders.writeInt(1);
ByteBuf nameBlock = Unpooled.buffer(8);
nameBlock.writeInt(4);
nameBlock.writeBytes(nameBytes);
ByteBuf valueBlock = Unpooled.buffer(9);
valueBlock.writeInt(5);
valueBlock.writeBytes(valueBytes);
decoder.decode(ByteBufAllocator.DEFAULT, numHeaders, frame);
decoder.decode(ByteBufAllocator.DEFAULT, nameBlock, frame);
frame.setInvalid();
decoder.decode(ByteBufAllocator.DEFAULT, valueBlock, frame);
decoder.endHeaderBlock(frame);
assertFalse(numHeaders.isReadable());
assertFalse(nameBlock.isReadable());
assertFalse(valueBlock.isReadable());
assertEquals(1, frame.headers().names().size());
assertTrue(frame.headers().contains(name));
assertEquals(1, frame.headers().getAll(name).size());
assertEquals(value, frame.headers().get(name));
numHeaders.release();
nameBlock.release();
valueBlock.release();
}
@Test
public void testTruncatedHeaderName() throws Exception {
ByteBuf headerBlock = Unpooled.buffer(maxHeaderSize + 18);
headerBlock.writeInt(1);
headerBlock.writeInt(maxHeaderSize + 1);
for (int i = 0; i < maxHeaderSize + 1; i++) {
headerBlock.writeByte('a');
}
headerBlock.writeInt(5);
headerBlock.writeBytes(valueBytes);
decoder.decode(ByteBufAllocator.DEFAULT, headerBlock, frame);
decoder.endHeaderBlock(frame);
assertFalse(headerBlock.isReadable());
assertTrue(frame.isTruncated());
assertFalse(frame.isInvalid());
assertEquals(0, frame.headers().names().size());
headerBlock.release();
}
@Test
public void testTruncatedHeaderValue() throws Exception {
ByteBuf headerBlock = Unpooled.buffer(maxHeaderSize + 13);
headerBlock.writeInt(1);
headerBlock.writeInt(4);
headerBlock.writeBytes(nameBytes);
headerBlock.writeInt(13);
for (int i = 0; i < maxHeaderSize - 3; i++) {
headerBlock.writeByte('a');
}
decoder.decode(ByteBufAllocator.DEFAULT, headerBlock, frame);
decoder.endHeaderBlock(frame);
assertFalse(headerBlock.isReadable());
assertTrue(frame.isTruncated());
assertFalse(frame.isInvalid());
assertEquals(0, frame.headers().names().size());
headerBlock.release();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.plan.rules.logical;
import org.apache.flink.table.expressions.ResolvedFieldReference;
import org.apache.flink.table.plan.logical.LogicalWindow;
import org.apache.flink.table.plan.logical.rel.LogicalTableAggregate;
import org.apache.flink.table.plan.logical.rel.LogicalWindowAggregate;
import org.apache.flink.table.plan.logical.rel.LogicalWindowTableAggregate;
import org.apache.flink.table.plan.logical.rel.TableAggregate;
import org.apache.calcite.plan.RelOptRuleCall;
import org.apache.calcite.plan.RelOptRuleOperand;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.SingleRel;
import org.apache.calcite.rel.core.Aggregate;
import org.apache.calcite.rel.core.AggregateCall;
import org.apache.calcite.rel.core.Project;
import org.apache.calcite.rel.core.RelFactories;
import org.apache.calcite.rel.logical.LogicalAggregate;
import org.apache.calcite.rel.rules.AggregateExtractProjectRule;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.tools.RelBuilder;
import org.apache.calcite.tools.RelBuilderFactory;
import org.apache.calcite.util.ImmutableBitSet;
import org.apache.calcite.util.mapping.Mapping;
import org.apache.calcite.util.mapping.MappingType;
import org.apache.calcite.util.mapping.Mappings;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
/**
* Rule to extract a {@link org.apache.calcite.rel.core.Project} from a {@link LogicalAggregate},
* a {@link LogicalWindowAggregate} or a {@link TableAggregate} and push it down towards
* the input.
*
* <p>Note: Most of the logic in this rule is same with {@link AggregateExtractProjectRule}. The
* difference is this rule has also taken the {@link LogicalWindowAggregate} and
* {@link TableAggregate} into consideration. Furthermore, this rule also creates trivial
* {@link Project}s unless the input node is already a {@link Project}.
*/
public class ExtendedAggregateExtractProjectRule extends AggregateExtractProjectRule {
public static final ExtendedAggregateExtractProjectRule INSTANCE =
new ExtendedAggregateExtractProjectRule(
operand(SingleRel.class,
operand(RelNode.class, any())), RelFactories.LOGICAL_BUILDER);
public ExtendedAggregateExtractProjectRule(
RelOptRuleOperand operand,
RelBuilderFactory builderFactory) {
super(operand, builderFactory);
}
@Override
public boolean matches(RelOptRuleCall call) {
final SingleRel relNode = call.rel(0);
return relNode instanceof LogicalWindowAggregate ||
relNode instanceof LogicalAggregate ||
relNode instanceof TableAggregate;
}
@Override
public void onMatch(RelOptRuleCall call) {
final RelNode relNode = call.rel(0);
final RelNode input = call.rel(1);
final RelBuilder relBuilder = call.builder().push(input);
if (relNode instanceof Aggregate) {
call.transformTo(performExtractForAggregate((Aggregate) relNode, input, relBuilder));
} else if (relNode instanceof TableAggregate) {
call.transformTo(performExtractForTableAggregate((TableAggregate) relNode, input, relBuilder));
}
}
/**
* Extract a project from the input aggregate and return a new aggregate.
*/
private RelNode performExtractForAggregate(Aggregate aggregate, RelNode input, RelBuilder relBuilder) {
Mapping mapping = extractProjectsAndMapping(aggregate, input, relBuilder);
return getNewAggregate(aggregate, relBuilder, mapping);
}
/**
* Extract a project from the input table aggregate and return a new table aggregate.
*/
private RelNode performExtractForTableAggregate(TableAggregate aggregate, RelNode input, RelBuilder relBuilder) {
RelNode newAggregate = performExtractForAggregate(aggregate.getCorrespondingAggregate(), input, relBuilder);
if (aggregate instanceof LogicalTableAggregate) {
return LogicalTableAggregate.create((Aggregate) newAggregate);
} else {
return LogicalWindowTableAggregate.create((LogicalWindowAggregate) newAggregate);
}
}
/**
* Extract projects from the Aggregate and return the index mapping between the new projects
* and it's input.
*/
private Mapping extractProjectsAndMapping(
Aggregate aggregate,
RelNode input,
RelBuilder relBuilder) {
// Compute which input fields are used.
final ImmutableBitSet.Builder inputFieldsUsed = getInputFieldUsed(aggregate, input);
final List<RexNode> projects = new ArrayList<>();
final Mapping mapping =
Mappings.create(MappingType.INVERSE_SURJECTION,
aggregate.getInput().getRowType().getFieldCount(),
inputFieldsUsed.cardinality());
int j = 0;
for (int i : inputFieldsUsed.build()) {
projects.add(relBuilder.field(i));
mapping.set(i, j++);
}
if (input instanceof Project) {
// this will not create trivial projects
relBuilder.project(projects);
} else {
relBuilder.project(projects, Collections.emptyList(), true);
}
return mapping;
}
/**
* Compute which input fields are used by the aggregate.
*/
private ImmutableBitSet.Builder getInputFieldUsed(Aggregate aggregate, RelNode input) {
// 1. group fields are always used
final ImmutableBitSet.Builder inputFieldsUsed =
aggregate.getGroupSet().rebuild();
// 2. agg functions
for (AggregateCall aggCall : aggregate.getAggCallList()) {
for (int i : aggCall.getArgList()) {
inputFieldsUsed.set(i);
}
if (aggCall.filterArg >= 0) {
inputFieldsUsed.set(aggCall.filterArg);
}
}
// 3. window time field if the aggregate is a group window aggregate.
if (aggregate instanceof LogicalWindowAggregate) {
inputFieldsUsed.set(getWindowTimeFieldIndex(((LogicalWindowAggregate) aggregate).getWindow(), input));
}
return inputFieldsUsed;
}
private RelNode getNewAggregate(Aggregate oldAggregate, RelBuilder relBuilder, Mapping mapping) {
final ImmutableBitSet newGroupSet =
Mappings.apply(mapping, oldAggregate.getGroupSet());
final Iterable<ImmutableBitSet> newGroupSets =
oldAggregate.getGroupSets().stream()
.map(bitSet -> Mappings.apply(mapping, bitSet))
.collect(Collectors.toList());
final List<RelBuilder.AggCall> newAggCallList =
getNewAggCallList(oldAggregate, relBuilder, mapping);
final RelBuilder.GroupKey groupKey =
relBuilder.groupKey(newGroupSet, newGroupSets);
if (oldAggregate instanceof LogicalWindowAggregate) {
if (newGroupSet.size() == 0 && newAggCallList.size() == 0) {
// Return the old LogicalWindowAggregate directly, as we can't get an empty Aggregate
// from the relBuilder.
return oldAggregate;
} else {
relBuilder.aggregate(groupKey, newAggCallList);
Aggregate newAggregate = (Aggregate) relBuilder.build();
LogicalWindowAggregate oldLogicalWindowAggregate = (LogicalWindowAggregate) oldAggregate;
return LogicalWindowAggregate.create(
oldLogicalWindowAggregate.getWindow(),
oldLogicalWindowAggregate.getNamedProperties(),
newAggregate);
}
} else {
relBuilder.aggregate(groupKey, newAggCallList);
return relBuilder.build();
}
}
private int getWindowTimeFieldIndex(LogicalWindow logicalWindow, RelNode input) {
ResolvedFieldReference timeAttribute = (ResolvedFieldReference) logicalWindow.timeAttribute();
return input.getRowType().getFieldNames().indexOf(timeAttribute.name());
}
private List<RelBuilder.AggCall> getNewAggCallList(
Aggregate oldAggregate,
RelBuilder relBuilder,
Mapping mapping) {
final List<RelBuilder.AggCall> newAggCallList = new ArrayList<>();
for (AggregateCall aggCall : oldAggregate.getAggCallList()) {
final RexNode filterArg = aggCall.filterArg < 0 ? null
: relBuilder.field(Mappings.apply(mapping, aggCall.filterArg));
newAggCallList.add(
relBuilder
.aggregateCall(
aggCall.getAggregation(),
relBuilder.fields(Mappings.apply2(mapping, aggCall.getArgList())))
.distinct(aggCall.isDistinct())
.filter(filterArg)
.approximate(aggCall.isApproximate())
.sort(relBuilder.fields(aggCall.collation))
.as(aggCall.name));
}
return newAggCallList;
}
}
| |
/*
* Copyright (C) 2015 Actor LLC. <https://actor.im>
*/
package im.AfriChat.core.modules.internal;
import java.util.ArrayList;
import im.AfriChat.core.api.base.SeqUpdate;
import im.AfriChat.core.api.rpc.RequestAddContact;
import im.AfriChat.core.api.rpc.RequestRemoveContact;
import im.AfriChat.core.api.rpc.RequestSearchContacts;
import im.AfriChat.core.api.rpc.ResponseSearchContacts;
import im.AfriChat.core.api.rpc.ResponseSeq;
import im.AfriChat.core.api.updates.UpdateContactsAdded;
import im.AfriChat.core.api.updates.UpdateContactsRemoved;
import im.AfriChat.core.entity.User;
import im.AfriChat.core.modules.AbsModule;
import im.AfriChat.core.modules.Modules;
import im.AfriChat.core.viewmodel.Command;
import im.AfriChat.core.viewmodel.CommandCallback;
import im.AfriChat.runtime.Storage;
import im.AfriChat.runtime.actors.ActorCreator;
import im.AfriChat.runtime.actors.ActorRef;
import im.AfriChat.runtime.actors.Props;
import im.AfriChat.runtime.storage.ListEngine;
import im.AfriChat.core.entity.Contact;
import im.AfriChat.core.modules.internal.contacts.BookImportActor;
import im.AfriChat.core.modules.internal.contacts.ContactsSyncActor;
import im.AfriChat.core.modules.updates.internal.UsersFounded;
import im.AfriChat.core.network.RpcCallback;
import im.AfriChat.core.network.RpcException;
import im.AfriChat.core.network.RpcInternalException;
import im.AfriChat.core.viewmodel.UserVM;
import static im.AfriChat.runtime.actors.ActorSystem.system;
public class ContactsModule extends AbsModule {
private ListEngine<Contact> contacts;
private ActorRef bookImportActor;
private ActorRef contactSyncActor;
public ContactsModule(final Modules modules) {
super(modules);
contacts = Storage.createList(STORAGE_CONTACTS, Contact.CREATOR);
}
public void run() {
bookImportActor = system().actorOf(Props.create(BookImportActor.class, new ActorCreator<BookImportActor>() {
@Override
public BookImportActor create() {
return new BookImportActor(context());
}
}).changeDispatcher("heavy"), "actor/book_import");
contactSyncActor = system().actorOf(Props.create(ContactsSyncActor.class, new ActorCreator<ContactsSyncActor>() {
@Override
public ContactsSyncActor create() {
return new ContactsSyncActor(context());
}
}).changeDispatcher("heavy"), "actor/contacts_sync");
}
public ListEngine<Contact> getContacts() {
return contacts;
}
public void onPhoneBookChanged() {
bookImportActor.send(new BookImportActor.PerformSync());
}
public ActorRef getContactSyncActor() {
return contactSyncActor;
}
public void markContact(int uid) {
preferences().putBool("contact_" + uid, true);
}
public void markNonContact(int uid) {
preferences().putBool("contact_" + uid, false);
}
public boolean isUserContact(int uid) {
return preferences().getBool("contact_" + uid, false);
}
public Command<UserVM[]> findUsers(final String query) {
return new Command<UserVM[]>() {
@Override
public void start(final CommandCallback<UserVM[]> callback) {
request(new RequestSearchContacts(query), new RpcCallback<ResponseSearchContacts>() {
@Override
public void onResult(ResponseSearchContacts response) {
if (response.getUsers().size() == 0) {
runOnUiThread(new Runnable() {
@Override
public void run() {
callback.onResult(new UserVM[0]);
}
});
return;
}
updates().onUpdateReceived(new UsersFounded(response.getUsers(), callback));
}
@Override
public void onError(RpcException e) {
e.printStackTrace();
runOnUiThread(new Runnable() {
@Override
public void run() {
callback.onResult(new UserVM[0]);
}
});
}
});
}
};
}
public Command<Boolean> addContact(final int uid) {
return new Command<Boolean>() {
@Override
public void start(final CommandCallback<Boolean> callback) {
User user = users().getValue(uid);
if (user == null) {
runOnUiThread(new Runnable() {
@Override
public void run() {
callback.onError(new RpcInternalException());
}
});
return;
}
request(new RequestAddContact(uid, user.getAccessHash()), new RpcCallback<ResponseSeq>() {
@Override
public void onResult(ResponseSeq response) {
ArrayList<Integer> uids = new ArrayList<Integer>();
uids.add(uid);
SeqUpdate update = new SeqUpdate(response.getSeq(), response.getState(),
UpdateContactsAdded.HEADER, new UpdateContactsAdded(uids).toByteArray());
updates().onUpdateReceived(update);
runOnUiThread(new Runnable() {
@Override
public void run() {
callback.onResult(true);
}
});
}
@Override
public void onError(RpcException e) {
runOnUiThread(new Runnable() {
@Override
public void run() {
callback.onError(new RpcInternalException());
}
});
}
});
}
};
}
public Command<Boolean> removeContact(final int uid) {
return new Command<Boolean>() {
@Override
public void start(final CommandCallback<Boolean> callback) {
User user = users().getValue(uid);
if (user == null) {
runOnUiThread(new Runnable() {
@Override
public void run() {
callback.onError(new RpcInternalException());
}
});
return;
}
request(new RequestRemoveContact(uid, user.getAccessHash()), new RpcCallback<ResponseSeq>() {
@Override
public void onResult(ResponseSeq response) {
ArrayList<Integer> uids = new ArrayList<Integer>();
uids.add(uid);
SeqUpdate update = new SeqUpdate(response.getSeq(), response.getState(),
UpdateContactsRemoved.HEADER, new UpdateContactsRemoved(uids).toByteArray());
updates().onUpdateReceived(update);
runOnUiThread(new Runnable() {
@Override
public void run() {
callback.onResult(true);
}
});
}
@Override
public void onError(RpcException e) {
runOnUiThread(new Runnable() {
@Override
public void run() {
callback.onError(new RpcInternalException());
}
});
}
});
}
};
}
public void resetModule() {
// TODO: Implement
}
}
| |
package etri.sdn.controller.protocol.io;
import java.io.IOException;
import java.nio.channels.CancelledKeyException;
import java.nio.channels.ClosedChannelException;
import java.nio.channels.SelectionKey;
import java.nio.channels.Selector;
import java.nio.channels.SocketChannel;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentSkipListSet;
import org.projectfloodlight.openflow.protocol.OFMessage;
import etri.sdn.controller.util.Logger;
/**
* Watcher thread that monitors channels which are able to be read.
* @author bjlee
*
*/
public final class ClientChannelWatcher extends Thread {
private volatile boolean quit = false;
private Object guard = new Object();
private Selector read_selector;
private static Set<IOFHandler> controllers = new ConcurrentSkipListSet<IOFHandler>();
ClientChannelWatcher() throws IOException {
this.read_selector = Selector.open();
}
public static void registerController(IOFHandler controller) {
controllers.add(controller);
}
public static void deregisterController(IOFHandler controller) {
controllers.remove(controller);
}
public static Set<IOFHandler> getHandlersFor(Connection conn) {
Set<IOFHandler> ret = new ConcurrentSkipListSet<IOFHandler>();
for ( IOFHandler h : controllers ) {
ret.add(h);
}
return ret;
}
void shutdown() {
quit = true;
}
void addClient(SocketChannel client) {
synchronized ( guard ) {
try {
Connection new_conn = new Connection(client);
Set<IOFHandler> hset = getHandlersFor(new_conn);
if ( hset == null || hset.isEmpty() ) {
try {
// TODO: Does client need some indication for this disconnection?
client.close();
} catch (IOException e) {
// does nothing
}
return;
}
new_conn.addHandler( hset );
new_conn.setSelector( read_selector );
client.register(
read_selector.wakeup(),
SelectionKey.OP_READ | SelectionKey.OP_WRITE,
// SelectionKey.OP_READ,
new_conn
);
} catch (ClosedChannelException e) {
// channel is closed.
try {
client.close();
} catch (IOException e1) {
// does nothing.
}
}
}
}
void wakeup() {
synchronized ( guard ) {
read_selector.wakeup();
}
}
@Override
public void run() {
while ( !quit ) {
try {
// guard idiom to prevent deadlock at client.register() call
synchronized (guard) {}
int r = read_selector.select();
if ( r > 0 ) { // there's something to read.
Set<SelectionKey> keys = read_selector.selectedKeys();
for ( Iterator<SelectionKey> i = keys.iterator(); i.hasNext(); ) {
SelectionKey key = i.next();
i.remove();
try {
Connection conn = (Connection) key.attachment();
if ( !key.isValid() ) {
handleDisconnectedEvent( conn );
key.cancel();
conn.close();
continue;
}
if ( !conn.isConnected() ) {
handleDisconnectedEvent( conn );
key.cancel();
conn.close();
continue;
}
if ( key.isWritable() ) {
if ( conn.getStatus() == Connection.STATUS.CONNECTED ) {
// handle initial connection setup
if ( !handleConnectedEvent( conn ) ) {
handleDisconnectedEvent( conn );
key.cancel();
conn.close();
continue;
}
conn.setStatus( Connection.STATUS.RUNNING );
}
else if ( conn.getStatus() == Connection.STATUS.RUNNING ) {
// this is for flushing stream whenever possible.
// without this line, this server will not respond to
// cbench response-time test (without -t option) because
// it will not give immediate response to the request
// until the byte buffer is filled enough to flush.
conn.flush();
}
}
if ( conn.getStatus() == Connection.STATUS.RUNNING && key.isReadable() ) {
if ( !handleReadEvent(conn) ) {
handleDisconnectedEvent( conn );
key.cancel();
conn.close();
continue;
}
}
} catch ( CancelledKeyException e ) {
e.printStackTrace();
continue;
}
}
}
} catch (IOException e) {
e.printStackTrace();
// just break this watcher.
return;
}
}
}
private boolean handleConnectedEvent(Connection conn) {
// create a switch object and set to the connection.
IOFSwitch sw = new OFSwitchImpl();
conn.setSwitch( sw );
try {
Logger.stderr("connected with " + conn.getClient().getRemoteAddress());
} catch (IOException e1) {
// does nothing
}
boolean ret = true;
Set<IOFHandler> handlers = conn.getHandlers();
for ( IOFHandler h : handlers ) {
ret = h.handleConnectedEvent( conn ) == false ? false : ret ;
}
return ret;
}
private boolean handleReadEvent(Connection conn) {
List<OFMessage> msgs = null;
try {
msgs = conn.read();
if ( msgs == null ) { return true; }
} catch (IOException e) {
// e.printStackTrace();
return false;
}
boolean ret = true;
Set<IOFHandler> handlers = conn.getHandlers();
for ( IOFHandler h : handlers ) {
ret = h.handleReadEvent(conn, msgs) == false ? false : ret ;
}
return ret;
}
private void handleDisconnectedEvent(Connection conn) {
try {
Logger.stderr("disconnected with " + conn.getClient().getRemoteAddress());
} catch (IOException e) {
Logger.stderr("disconnected with a switch (reason is unknown)");
}
Set<IOFHandler> handlers = conn.getHandlers();
for ( IOFHandler h : handlers ) {
h.handleDisconnectEvent( conn );
}
}
}
| |
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.bbg.replay;
import static com.opengamma.bbg.replay.BloombergTick.BUID_KEY;
import static com.opengamma.bbg.replay.BloombergTick.FIELDS_KEY;
import static com.opengamma.bbg.replay.BloombergTick.RECEIVED_TS_KEY;
import static com.opengamma.bbg.replay.BloombergTick.SECURITY_KEY;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.LinkedBlockingQueue;
import org.apache.commons.lang.time.StopWatch;
import org.fudgemsg.FudgeContext;
import org.fudgemsg.FudgeMsg;
import org.fudgemsg.MutableFudgeMsg;
import org.fudgemsg.wire.FudgeMsgWriter;
import org.fudgemsg.wire.FudgeSize;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.threeten.bp.Clock;
import org.threeten.bp.Instant;
import org.threeten.bp.LocalDate;
import org.threeten.bp.ZoneOffset;
import org.threeten.bp.ZonedDateTime;
import com.google.common.collect.ImmutableMap;
import com.opengamma.OpenGammaRuntimeException;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.TerminatableJob;
import com.opengamma.util.fudgemsg.OpenGammaFudgeContext;
/**
*
*/
public class BloombergTickWriter extends TerminatableJob {
/** Logger. */
private static final Logger s_logger = LoggerFactory.getLogger(BloombergTickWriter.class);
//interval in millis
private static final long DEFAULT_REPORT_INTERVAL = 10000L;
/**
* The default name for the file that contains all ticks.
*/
public static final String ALL_TICKS_FILENAME = "allTicks.dat";
private final FudgeContext _fudgeContext;
private ConcurrentMap<String, BlockingQueue<FudgeMsg>> _securityMapQueue = new ConcurrentHashMap<String, BlockingQueue<FudgeMsg>>();
private final BlockingQueue<FudgeMsg> _allTicksQueue;
private final Map<String, String> _ticker2Buid;
private String _rootDir;
private int _nTicks;
private int _nWrites;
private int _nBlocks;
private final StopWatch _stopWatch = new StopWatch();
private long _reportInterval;
private final StorageMode _storageMode;
public BloombergTickWriter(BlockingQueue<FudgeMsg> allTicksQueue, Map<String, String> ticker2Buid,
String rootDir, StorageMode storageMode, BloombergTicksCollector ticksGenerator) {
this(OpenGammaFudgeContext.getInstance(), allTicksQueue, ticker2Buid, rootDir, storageMode);
}
public BloombergTickWriter(FudgeContext fudgeContext, BlockingQueue<FudgeMsg> allTicksQueue, Map<String, String> ticker2Buid,
String rootDir, StorageMode storageMode) {
this(fudgeContext, allTicksQueue, ticker2Buid, rootDir, storageMode, DEFAULT_REPORT_INTERVAL);
}
public BloombergTickWriter(
FudgeContext fudgeContext,
BlockingQueue<FudgeMsg> allTicksQueue,
Map<String, String> ticker2Buid,
String rootDir,
StorageMode storageMode,
long reportInterval) {
ArgumentChecker.notNull(fudgeContext, "fudgeContext");
ArgumentChecker.notNull(allTicksQueue, "allTicksQueue");
ArgumentChecker.notNull(ticker2Buid, "ticker2Buid");
ArgumentChecker.notNull(rootDir, "rootDir");
ArgumentChecker.notNull(storageMode, "storageMode");
_fudgeContext = fudgeContext;
_allTicksQueue = allTicksQueue;
_rootDir = rootDir;
_stopWatch.start();
_reportInterval = reportInterval;
_storageMode = storageMode;
_ticker2Buid = ImmutableMap.<String, String>builder().putAll(ticker2Buid).build();
s_logger.info("BloombergTickWriter started in {} mode writing to {}", _storageMode, _rootDir);
}
private FudgeContext getFudgeContext() {
return _fudgeContext;
}
@Override
protected void runOneCycle() {
// Andrew 2010-01-27 -- If the queue is empty, this will loop round in a big no-op.
// Checking for this and including a blocking 'take' on the queue seemed to result in lower throughput.
// This might not be the case outside of the high load test case where data arrives at high speed and the blocking is a rarity.
List<FudgeMsg> ticks = new ArrayList<FudgeMsg>(_allTicksQueue.size());
_allTicksQueue.drainTo(ticks);
FudgeMsg msg = writeAllTicksToSingleFile(ticks);
if (_storageMode == StorageMode.MULTI) {
if (msg != null && BloombergTickReplayUtils.isTerminateMsg(msg)) {
ticks.remove(msg);
}
buildSecurityMapQueue(ticks);
writeOutSecurityMapQueue();
}
if (s_logger.isDebugEnabled()) {
writeReport();
}
if (msg != null && BloombergTickReplayUtils.isTerminateMsg(msg)) {
s_logger.info("received terminate message, ..terminating");
terminate();
}
ticks.clear();
ticks = null;
}
/**
* @param ticks
*/
private void buildSecurityMapQueue(List<FudgeMsg> ticks) {
for (FudgeMsg fudgeMsg : ticks) {
String securityDes = fudgeMsg.getString(SECURITY_KEY);
if (_securityMapQueue.containsKey(securityDes)) {
BlockingQueue<FudgeMsg> queue = _securityMapQueue.get(securityDes);
try {
queue.put(fudgeMsg);
} catch (InterruptedException e) {
Thread.interrupted();
s_logger.warn("interrupted from putting message on queue");
}
} else {
LinkedBlockingQueue<FudgeMsg> queue = new LinkedBlockingQueue<FudgeMsg>();
try {
queue.put(fudgeMsg);
} catch (InterruptedException e) {
Thread.interrupted();
s_logger.warn("interrupted from putting message on queue");
}
_securityMapQueue.put(securityDes, queue);
}
}
}
private void writeSecurityTicks(final File dir, final String buid, final String securityDes, final List<FudgeMsg> tickMsgList) {
if (tickMsgList.isEmpty()) {
return;
}
s_logger.debug("writing {} messages for {}:{}", new Object[]{tickMsgList.size(), securityDes, buid});
//sort ticks per time
Map<String, List<FudgeMsg>> fileTicksMap = new HashMap<String, List<FudgeMsg>>();
for (FudgeMsg tickMsg : tickMsgList) {
String filename = makeFileName(tickMsg);
List<FudgeMsg> fileTicks = fileTicksMap.get(filename);
if (fileTicks == null) {
fileTicks = new ArrayList<FudgeMsg>();
fileTicks.add(tickMsg);
fileTicksMap.put(filename, fileTicks);
} else {
fileTicks.add(tickMsg);
}
}
for (Entry<String, List<FudgeMsg>> entry : fileTicksMap.entrySet()) {
String filename = entry.getKey();
List<FudgeMsg> ticks = entry.getValue();
String fullPath = new StringBuilder(dir.getAbsolutePath()).append(File.separator).append(filename).toString();
FileOutputStream fos = null;
try {
fos = new FileOutputStream(fullPath, true);
BufferedOutputStream bos = new BufferedOutputStream(fos, 4096);
FudgeMsgWriter fmsw = getFudgeContext().createMessageWriter(bos);
for (FudgeMsg tick : ticks) {
_nBlocks += FudgeSize.calculateMessageSize(tick);
fmsw.writeMessage(tick, 0);
fmsw.flush();
}
_nWrites++;
} catch (FileNotFoundException e) {
s_logger.warn("cannot open file {} for writing", fullPath);
throw new OpenGammaRuntimeException("Cannot open " + fullPath + " for writing", e);
} finally {
if (fos != null) {
try {
fos.close();
} catch (IOException e) {
s_logger.warn("cannot close file {}", fullPath);
}
}
}
}
_nTicks += tickMsgList.size();
}
private FudgeMsg writeAllTicksToSingleFile(List<FudgeMsg> ticks) {
if (ticks.isEmpty()) {
return null;
}
FudgeMsg terminateMsg = null;
File fullPath = getTicksFile();
FileOutputStream fos = null;
try {
fos = new FileOutputStream(fullPath, true);
BufferedOutputStream bos = new BufferedOutputStream(fos, 4096);
FudgeMsgWriter fmsw = getFudgeContext().createMessageWriter(bos);
for (FudgeMsg tick : ticks) {
if (BloombergTickReplayUtils.isTerminateMsg(tick)) {
terminateMsg = tick;
continue;
}
_nBlocks += FudgeSize.calculateMessageSize(tick);
String securityDes = tick.getString(SECURITY_KEY);
String buid = getBloombergBUID(securityDes);
((MutableFudgeMsg) tick).add(BUID_KEY, buid);
fmsw.writeMessage(tick, 0);
fmsw.flush();
}
_nWrites++;
} catch (FileNotFoundException e) {
s_logger.warn("cannot open file {} for writing", fullPath);
throw new OpenGammaRuntimeException("Cannot open file " + fullPath + " for writing", e);
} finally {
if (fos != null) {
try {
fos.close();
} catch (IOException e) {
s_logger.warn("cannot close {}", fullPath);
}
}
}
_nTicks += ticks.size();
return terminateMsg;
}
private File getTicksFile() {
String baseDirectory = makeBaseDirectoryName();
File dir = new File(baseDirectory);
if (!dir.exists()) {
createDirectory(dir);
}
return new File(new StringBuilder().append(baseDirectory).append(File.separator).append(ALL_TICKS_FILENAME).toString());
}
/**
* @return
*/
private String makeBaseDirectoryName() {
Clock clock = Clock.systemUTC();
LocalDate today = LocalDate.now(clock);
StringBuilder buf = new StringBuilder();
buf.append(_rootDir).append(File.separator);
int year = today.getYear();
if (year < 10) {
buf.append("0").append(year);
} else {
buf.append(year);
}
buf.append(File.separator);
int month = today.getMonthValue();
if (month < 10) {
buf.append("0").append(month);
} else {
buf.append(month);
}
buf.append(File.separator);
int dayOfMonth = today.getDayOfMonth();
if (dayOfMonth < 10) {
buf.append("0").append(dayOfMonth);
} else {
buf.append(dayOfMonth);
}
return buf.toString();
}
/**
*
*/
private void writeOutSecurityMapQueue() {
for (Entry<String, BlockingQueue<FudgeMsg>> entry : _securityMapQueue.entrySet()) {
String security = entry.getKey();
BlockingQueue<FudgeMsg> queue = entry.getValue();
if (queue.isEmpty()) {
continue;
}
List<FudgeMsg> tickMsgList = new ArrayList<FudgeMsg>(queue.size());
queue.drainTo(tickMsgList);
String buid = getBloombergBUID(security);
//get first message
FudgeMsg tickMsg = tickMsgList.get(0);
Long epochMillis = tickMsg.getLong(RECEIVED_TS_KEY);
File dir = buildSecurityDirectory(buid, epochMillis);
if (!dir.exists()) {
createDirectory(dir);
}
writeSecurityTicks(dir, buid, security, tickMsgList);
tickMsgList.clear();
tickMsgList = null;
}
}
/**
*
*/
private void writeReport() {
s_logger.debug("writing reports");
_stopWatch.suspend();
long time = _stopWatch.getTime();
if (time >= _reportInterval) {
double result = ((double) _nTicks / (double) time) * 1000.;
s_logger.debug("ticks {}/s", result);
result = ((double) _nWrites / (double) time) * 1000.;
s_logger.debug("fileOperations {}/s", result);
result = (double) _nBlocks / (double) _nWrites;
s_logger.debug("average blocks {}bytes", result);
_nWrites = 0;
_nTicks = 0;
_nBlocks = 0;
_stopWatch.reset();
_stopWatch.start();
} else {
_stopWatch.resume();
}
}
/**
* @param buid
* @param tickMsgList
* @return
*/
private File buildSecurityDirectory(String buid, long receivedTS) {
Instant instant = Instant.ofEpochMilli(receivedTS);
ZonedDateTime dateTime = ZonedDateTime.ofInstant(instant, ZoneOffset.UTC);
LocalDate today = dateTime.toLocalDate();
StringBuilder buf = new StringBuilder();
buf.append(_rootDir).append(File.separator);
buf.append(buid).append(File.separator).append(today.getYear()).append(File.separator);
int month = today.getMonthValue();
if (month < 10) {
buf.append("0").append(month);
} else {
buf.append(month);
}
buf.append(File.separator);
int dayOfMonth = today.getDayOfMonth();
if (dayOfMonth < 10) {
buf.append("0").append(dayOfMonth);
} else {
buf.append(dayOfMonth);
}
buf.append(File.separator);
return new File(buf.toString());
}
private String getBloombergBUID(String securityDes) {
String buid = _ticker2Buid.get(securityDes);
if (buid == null) {
buid = securityDes;
}
return buid;
}
/**
* @param tickMsg
* @return
*/
private String makeFileName(FudgeMsg tickMsg) {
String result = null;
FudgeMsg bbgTickAsMsg = tickMsg.getMessage(FIELDS_KEY);
String eventTime = bbgTickAsMsg.getString("EVENT_TIME");
if (eventTime == null) {
eventTime = bbgTickAsMsg.getString("TIME");
}
if (eventTime == null) {
//use received time stamp
result = makeFileNameFromReceivedTimeStamp(tickMsg);
} else {
result = makeFileNameFromEventTime(eventTime);
// if time/eventTime not in expected format
if (result == null) {
result = makeFileNameFromReceivedTimeStamp(tickMsg);
}
}
return result;
}
/**
* @param tickMsg
* @return
*/
private String makeFileNameFromReceivedTimeStamp(FudgeMsg tickMsg) {
String result = null;
//s_logger.warn("cannot determine event time in msg {}, using received timestamp", tickMsg); // Andrew - uncomment before checking back in
Long epochMillis = tickMsg.getLong(RECEIVED_TS_KEY);
Instant instant = Instant.ofEpochMilli(epochMillis);
ZonedDateTime dateTime = ZonedDateTime.ofInstant(instant, ZoneOffset.UTC);
int hourOfDay = dateTime.getHour();
if (hourOfDay < 10) {
result = new StringBuilder("0").append(hourOfDay).toString();
} else {
result = String.valueOf(hourOfDay);
}
return result;
}
/**
*
* @param eventTime expected time like 11:44:18.000+00:00
* @return
*/
private String makeFileNameFromEventTime(String eventTime) {
String result = null;
String[] split = eventTime.split(":");
if (split.length == 4) {
result = split[0];
} else {
s_logger.warn("time {} is not in expected format", eventTime);
}
return result;
}
/**
* @param dir
*/
private void createDirectory(File dir) {
if (!dir.mkdirs()) {
s_logger.warn("cannot create {}", dir);
throw new OpenGammaRuntimeException("cannot create directory " + dir);
}
}
/**
* @return the nTicks
*/
public int getNTicks() {
return _nTicks;
}
/**
* @return the nWrites
*/
public int getNWrites() {
return _nWrites;
}
/**
* @return the nBlocks
*/
public int getNBlocks() {
return _nBlocks;
}
}
| |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.keycloak.testsuite.broker;
import org.keycloak.models.IdentityProviderModel;
import org.keycloak.models.IdentityProviderSyncMode;
import org.keycloak.protocol.ProtocolMapperUtils;
import org.keycloak.protocol.saml.SamlConfigAttributes;
import org.keycloak.protocol.saml.SamlProtocol;
import org.keycloak.protocol.saml.mappers.AttributeStatementHelper;
import org.keycloak.protocol.saml.mappers.UserAttributeStatementMapper;
import org.keycloak.protocol.saml.mappers.UserPropertyAttributeStatementMapper;
import org.keycloak.representations.idm.ClientRepresentation;
import org.keycloak.representations.idm.IdentityProviderRepresentation;
import org.keycloak.representations.idm.ProtocolMapperRepresentation;
import org.keycloak.representations.idm.RealmRepresentation;
import org.keycloak.testsuite.saml.AbstractSamlTest;
import org.keycloak.testsuite.util.ClientBuilder;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.keycloak.broker.saml.SAMLIdentityProviderConfig.*;
import static org.keycloak.protocol.saml.SamlProtocol.SAML_ASSERTION_CONSUMER_URL_POST_ATTRIBUTE;
import static org.keycloak.protocol.saml.SamlProtocol.SAML_IDP_INITIATED_SSO_URL_NAME;
import static org.keycloak.testsuite.broker.BrokerTestConstants.*;
import static org.keycloak.testsuite.broker.BrokerTestTools.*;
public class KcSamlBrokerConfiguration implements BrokerConfiguration {
public static final KcSamlBrokerConfiguration INSTANCE = new KcSamlBrokerConfiguration();
public static final String ATTRIBUTE_TO_MAP_FRIENDLY_NAME = "user-attribute-friendly";
private final boolean loginHint;
public KcSamlBrokerConfiguration() {
this(false);
}
public KcSamlBrokerConfiguration(boolean loginHint) {
this.loginHint = loginHint;
}
@Override
public RealmRepresentation createProviderRealm() {
RealmRepresentation realm = new RealmRepresentation();
realm.setEnabled(true);
realm.setRealm(REALM_PROV_NAME);
return realm;
}
@Override
public RealmRepresentation createConsumerRealm() {
RealmRepresentation realm = new RealmRepresentation();
realm.setEnabled(true);
realm.setRealm(REALM_CONS_NAME);
realm.setResetPasswordAllowed(true);
return realm;
}
@Override
public List<ClientRepresentation> createProviderClients() {
String clientId = getIDPClientIdInProviderRealm();
return Arrays.asList(createProviderClient(clientId));
}
private ClientRepresentation createProviderClient(String clientId) {
ClientRepresentation client = new ClientRepresentation();
client.setClientId(clientId);
client.setEnabled(true);
client.setProtocol(IDP_SAML_PROVIDER_ID);
client.setRedirectUris(Collections.singletonList(
getConsumerRoot() + "/auth/realms/" + REALM_CONS_NAME + "/broker/" + IDP_SAML_ALIAS + "/endpoint"
));
Map<String, String> attributes = new HashMap<>();
attributes.put(SamlConfigAttributes.SAML_AUTHNSTATEMENT, "true");
attributes.put(SamlProtocol.SAML_SINGLE_LOGOUT_SERVICE_URL_POST_ATTRIBUTE,
getConsumerRoot() + "/auth/realms/" + REALM_CONS_NAME + "/broker/" + IDP_SAML_ALIAS + "/endpoint");
attributes.put(SAML_ASSERTION_CONSUMER_URL_POST_ATTRIBUTE,
getConsumerRoot() + "/auth/realms/" + REALM_CONS_NAME + "/broker/" + IDP_SAML_ALIAS + "/endpoint");
attributes.put(SamlConfigAttributes.SAML_FORCE_NAME_ID_FORMAT_ATTRIBUTE, "true");
attributes.put(SamlConfigAttributes.SAML_NAME_ID_FORMAT_ATTRIBUTE, "username");
attributes.put(SamlConfigAttributes.SAML_ASSERTION_SIGNATURE, "false");
attributes.put(SamlConfigAttributes.SAML_SERVER_SIGNATURE, "false");
attributes.put(SamlConfigAttributes.SAML_CLIENT_SIGNATURE_ATTRIBUTE, "false");
attributes.put(SamlConfigAttributes.SAML_ENCRYPT, "false");
attributes.put(IdentityProviderModel.LOGIN_HINT, String.valueOf(loginHint));
client.setAttributes(attributes);
ProtocolMapperRepresentation emailMapper = new ProtocolMapperRepresentation();
emailMapper.setName("email");
emailMapper.setProtocol(SamlProtocol.LOGIN_PROTOCOL);
emailMapper.setProtocolMapper(UserPropertyAttributeStatementMapper.PROVIDER_ID);
Map<String, String> emailMapperConfig = emailMapper.getConfig();
emailMapperConfig.put(ProtocolMapperUtils.USER_ATTRIBUTE, "email");
emailMapperConfig.put(AttributeStatementHelper.SAML_ATTRIBUTE_NAME, "urn:oid:1.2.840.113549.1.9.1");
emailMapperConfig.put(AttributeStatementHelper.SAML_ATTRIBUTE_NAMEFORMAT, "urn:oasis:names:tc:SAML:2.0:attrname-format:uri");
emailMapperConfig.put(AttributeStatementHelper.FRIENDLY_NAME, "email");
ProtocolMapperRepresentation dottedAttrMapper = new ProtocolMapperRepresentation();
dottedAttrMapper.setName("email - dotted");
dottedAttrMapper.setProtocol(SamlProtocol.LOGIN_PROTOCOL);
dottedAttrMapper.setProtocolMapper(UserAttributeStatementMapper.PROVIDER_ID);
Map<String, String> dottedEmailMapperConfig = dottedAttrMapper.getConfig();
dottedEmailMapperConfig.put(ProtocolMapperUtils.USER_ATTRIBUTE, "dotted.email");
dottedEmailMapperConfig.put(AttributeStatementHelper.SAML_ATTRIBUTE_NAME, "dotted.email");
dottedEmailMapperConfig.put(AttributeStatementHelper.SAML_ATTRIBUTE_NAMEFORMAT, "urn:oasis:names:tc:SAML:2.0:attrname-format:uri");
ProtocolMapperRepresentation nestedAttrMapper = new ProtocolMapperRepresentation();
nestedAttrMapper.setName("email - nested");
nestedAttrMapper.setProtocol(SamlProtocol.LOGIN_PROTOCOL);
nestedAttrMapper.setProtocolMapper(UserAttributeStatementMapper.PROVIDER_ID);
Map<String, String> nestedEmailMapperConfig = nestedAttrMapper.getConfig();
nestedEmailMapperConfig.put(ProtocolMapperUtils.USER_ATTRIBUTE, "nested.email");
nestedEmailMapperConfig.put(AttributeStatementHelper.SAML_ATTRIBUTE_NAME, "nested.email");
nestedEmailMapperConfig.put(AttributeStatementHelper.SAML_ATTRIBUTE_NAMEFORMAT, "urn:oasis:names:tc:SAML:2.0:attrname-format:uri");
ProtocolMapperRepresentation userAttrMapper = new ProtocolMapperRepresentation();
userAttrMapper.setName("attribute - name");
userAttrMapper.setProtocol(SamlProtocol.LOGIN_PROTOCOL);
userAttrMapper.setProtocolMapper(UserAttributeStatementMapper.PROVIDER_ID);
Map<String, String> userAttrMapperConfig = userAttrMapper.getConfig();
userAttrMapperConfig.put(ProtocolMapperUtils.USER_ATTRIBUTE, KcOidcBrokerConfiguration.ATTRIBUTE_TO_MAP_NAME);
userAttrMapperConfig.put(AttributeStatementHelper.SAML_ATTRIBUTE_NAME, KcOidcBrokerConfiguration.ATTRIBUTE_TO_MAP_NAME);
userAttrMapperConfig.put(AttributeStatementHelper.SAML_ATTRIBUTE_NAMEFORMAT, AttributeStatementHelper.BASIC);
userAttrMapperConfig.put(AttributeStatementHelper.FRIENDLY_NAME, "");
ProtocolMapperRepresentation userAttrMapper2 = new ProtocolMapperRepresentation();
userAttrMapper2.setName("attribute - name 2");
userAttrMapper2.setProtocol(SamlProtocol.LOGIN_PROTOCOL);
userAttrMapper2.setProtocolMapper(UserAttributeStatementMapper.PROVIDER_ID);
Map<String, String> userAttrMapper2Config = userAttrMapper2.getConfig();
userAttrMapper2Config.put(ProtocolMapperUtils.USER_ATTRIBUTE, KcOidcBrokerConfiguration.ATTRIBUTE_TO_MAP_NAME_2);
userAttrMapper2Config.put(AttributeStatementHelper.SAML_ATTRIBUTE_NAME, KcOidcBrokerConfiguration.ATTRIBUTE_TO_MAP_NAME_2);
userAttrMapper2Config.put(AttributeStatementHelper.SAML_ATTRIBUTE_NAMEFORMAT, AttributeStatementHelper.BASIC);
userAttrMapper2Config.put(AttributeStatementHelper.FRIENDLY_NAME, "");
ProtocolMapperRepresentation userFriendlyAttrMapper = new ProtocolMapperRepresentation();
userFriendlyAttrMapper.setName("attribute - friendly name");
userFriendlyAttrMapper.setProtocol(SamlProtocol.LOGIN_PROTOCOL);
userFriendlyAttrMapper.setProtocolMapper(UserAttributeStatementMapper.PROVIDER_ID);
Map<String, String> userFriendlyAttrMapperConfig = userFriendlyAttrMapper.getConfig();
userFriendlyAttrMapperConfig.put(ProtocolMapperUtils.USER_ATTRIBUTE, ATTRIBUTE_TO_MAP_FRIENDLY_NAME);
userFriendlyAttrMapperConfig.put(AttributeStatementHelper.SAML_ATTRIBUTE_NAME, "urn:oid:1.2.3.4.5.6.7");
userFriendlyAttrMapperConfig.put(AttributeStatementHelper.SAML_ATTRIBUTE_NAMEFORMAT, AttributeStatementHelper.BASIC);
userFriendlyAttrMapperConfig.put(AttributeStatementHelper.FRIENDLY_NAME, ATTRIBUTE_TO_MAP_FRIENDLY_NAME);
client.setProtocolMappers(Arrays.asList(emailMapper, dottedAttrMapper, nestedAttrMapper, userAttrMapper, userAttrMapper2, userFriendlyAttrMapper));
return client;
}
@Override
public List<ClientRepresentation> createConsumerClients() {
return Arrays.asList(
ClientBuilder.create()
.clientId(AbstractSamlTest.SAML_CLIENT_ID_SALES_POST)
.enabled(true)
.fullScopeEnabled(true)
.protocol(SamlProtocol.LOGIN_PROTOCOL)
.baseUrl(getConsumerRoot() + "/sales-post")
.addRedirectUri(getConsumerRoot() + "/sales-post/*")
.attribute(SamlConfigAttributes.SAML_AUTHNSTATEMENT, SamlProtocol.ATTRIBUTE_TRUE_VALUE)
.attribute(SamlConfigAttributes.SAML_CLIENT_SIGNATURE_ATTRIBUTE, SamlProtocol.ATTRIBUTE_FALSE_VALUE)
.build(),
ClientBuilder.create()
.clientId(AbstractSamlTest.SAML_CLIENT_ID_SALES_POST + ".dot/ted")
.enabled(true)
.fullScopeEnabled(true)
.protocol(SamlProtocol.LOGIN_PROTOCOL)
.baseUrl(getConsumerRoot() + "/sales-post")
.addRedirectUri(getConsumerRoot() + "/sales-post/*")
.attribute(SamlConfigAttributes.SAML_AUTHNSTATEMENT, SamlProtocol.ATTRIBUTE_TRUE_VALUE)
.attribute(SamlConfigAttributes.SAML_CLIENT_SIGNATURE_ATTRIBUTE, SamlProtocol.ATTRIBUTE_FALSE_VALUE)
.attribute(SAML_IDP_INITIATED_SSO_URL_NAME, "sales-post")
.attribute(SAML_ASSERTION_CONSUMER_URL_POST_ATTRIBUTE, getConsumerRoot() + "/sales-post/saml")
.build(),
ClientBuilder.create()
.clientId("broker-app")
.name("broker-app")
.secret("broker-app-secret")
.enabled(true)
.directAccessGrants()
.addRedirectUri(getConsumerRoot() + "/auth/*")
.baseUrl(getConsumerRoot() + "/auth/realms/" + REALM_CONS_NAME + "/app")
.build()
);
}
@Override
public IdentityProviderRepresentation setUpIdentityProvider(IdentityProviderSyncMode syncMode) {
IdentityProviderRepresentation idp = createIdentityProvider(IDP_SAML_ALIAS, IDP_SAML_PROVIDER_ID);
idp.setTrustEmail(true);
idp.setAddReadTokenRoleOnCreate(true);
idp.setStoreToken(true);
Map<String, String> config = idp.getConfig();
config.put(IdentityProviderModel.SYNC_MODE, syncMode.toString());
config.put(SINGLE_SIGN_ON_SERVICE_URL, getProviderRoot() + "/auth/realms/" + REALM_PROV_NAME + "/protocol/saml");
config.put(SINGLE_LOGOUT_SERVICE_URL, getProviderRoot() + "/auth/realms/" + REALM_PROV_NAME + "/protocol/saml");
config.put(NAME_ID_POLICY_FORMAT, "urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress");
config.put(FORCE_AUTHN, "false");
config.put(IdentityProviderModel.LOGIN_HINT, String.valueOf(loginHint));
config.put(POST_BINDING_RESPONSE, "true");
config.put(POST_BINDING_AUTHN_REQUEST, "true");
config.put(VALIDATE_SIGNATURE, "false");
config.put(WANT_AUTHN_REQUESTS_SIGNED, "false");
config.put(BACKCHANNEL_SUPPORTED, "false");
return idp;
}
@Override
public String providerRealmName() {
return REALM_PROV_NAME;
}
@Override
public String consumerRealmName() {
return REALM_CONS_NAME;
}
@Override
public String getIDPClientIdInProviderRealm() {
return getConsumerRoot() + "/auth/realms/" + consumerRealmName();
}
@Override
public String getUserLogin() {
return USER_LOGIN;
}
@Override
public String getUserPassword() {
return USER_PASSWORD;
}
@Override
public String getUserEmail() {
return USER_EMAIL;
}
@Override
public String getIDPAlias() {
return IDP_SAML_ALIAS;
}
}
| |
/*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.plugins.groovy.lang.psi.impl.statements.typedef;
import com.intellij.lang.ASTNode;
import com.intellij.psi.PsiClass;
import com.intellij.psi.PsiElement;
import com.intellij.psi.StubBasedPsiElement;
import com.intellij.psi.stubs.EmptyStub;
import com.intellij.psi.stubs.IStubElementType;
import com.intellij.util.IncorrectOperationException;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.plugins.groovy.lang.lexer.GroovyTokenTypes;
import org.jetbrains.plugins.groovy.lang.lexer.TokenSets;
import org.jetbrains.plugins.groovy.lang.parser.GroovyElementTypes;
import org.jetbrains.plugins.groovy.lang.psi.GroovyElementVisitor;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.*;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.GrEnumDefinitionBody;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.GrTypeDefinition;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.GrTypeDefinitionBody;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.members.GrEnumConstantList;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.members.GrMembersDeclaration;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.members.GrMethod;
import org.jetbrains.plugins.groovy.lang.psi.api.toplevel.GrTopStatement;
import org.jetbrains.plugins.groovy.lang.psi.impl.GrStubElementBase;
import org.jetbrains.plugins.groovy.lang.psi.impl.PsiImplUtil;
import java.util.ArrayList;
import java.util.List;
/**
* @author: Dmitry.Krasilschikov, ilyas
*/
public abstract class GrTypeDefinitionBodyBase extends GrStubElementBase<EmptyStub> implements GrTypeDefinitionBody {
private GrField[] myFields = null;
public GrTypeDefinitionBodyBase(@NotNull ASTNode node) {
super(node);
}
public GrTypeDefinitionBodyBase(EmptyStub stub, final IStubElementType classBody) {
super(stub, classBody);
}
@Override
public PsiElement getParent() {
return getParentByStub();
}
public void subtreeChanged() {
super.subtreeChanged();
myFields = null;
for (GrField field : getFields()) {
field.clearCaches();
}
}
public abstract void accept(GroovyElementVisitor visitor);
public String toString() {
return "Type definition body";
}
public GrField[] getFields() {
if (myFields == null) {
GrVariableDeclaration[] declarations = getStubOrPsiChildren(GroovyElementTypes.VARIABLE_DEFINITION, GrVariableDeclaration.ARRAY_FACTORY);
if (declarations.length == 0) return GrField.EMPTY_ARRAY;
List<GrField> result = new ArrayList<GrField>();
for (GrVariableDeclaration declaration : declarations) {
GrVariable[] variables = declaration.getVariables();
for (GrVariable variable : variables) {
if (variable instanceof GrField) {
result.add((GrField) variable);
}
}
}
myFields = result.toArray(new GrField[result.size()]);
}
return myFields;
}
public GrMethod[] getMethods() {
return getStubOrPsiChildren(TokenSets.METHOD_DEFS, GrMethod.ARRAY_FACTORY);
}
public GrMembersDeclaration[] getMemberDeclarations() {
return findChildrenByClass(GrMembersDeclaration.class);
}
@Nullable
public PsiElement getLBrace() {
return findChildByType(GroovyTokenTypes.mLCURLY);
}
@Nullable
public PsiElement getRBrace() {
return findChildByType(GroovyTokenTypes.mRCURLY);
}
@NotNull
public GrClassInitializer[] getInitializers() {
return findChildrenByClass(GrClassInitializer.class);
}
public PsiClass[] getInnerClasses() {
return getStubOrPsiChildren(GroovyElementTypes.TYPE_DEFINITION_TYPES, PsiClass.ARRAY_FACTORY);
}
public void removeVariable(GrVariable variable) {
PsiImplUtil.removeVariable(variable);
}
public GrVariableDeclaration addVariableDeclarationBefore(GrVariableDeclaration declaration, GrStatement anchor) throws IncorrectOperationException {
PsiElement rBrace = getRBrace();
if (anchor == null && rBrace == null) {
throw new IncorrectOperationException();
}
if (anchor != null && !this.equals(anchor.getParent())) {
throw new IncorrectOperationException();
}
ASTNode elemNode = declaration.getNode();
final ASTNode anchorNode = anchor != null ? anchor.getNode() : rBrace.getNode();
getNode().addChild(elemNode, anchorNode);
getNode().addLeaf(GroovyTokenTypes.mNLS, "\n", anchorNode);
return (GrVariableDeclaration) elemNode.getPsi();
}
@Override
public void deleteChildInternal(@NotNull ASTNode child) {
final PsiElement element = child.getPsi();
if (element instanceof GrTopStatement) {
PsiImplUtil.deleteStatementTail(this, element);
}
super.deleteChildInternal(child);
}
@Override
public void deleteChildRange(PsiElement first, PsiElement last) throws IncorrectOperationException {
if (last instanceof GrTopStatement) {
PsiImplUtil.deleteStatementTail(this, last);
}
super.deleteChildRange(first, last);
}
public static class GrClassBody extends GrTypeDefinitionBodyBase implements StubBasedPsiElement<EmptyStub> {
public GrClassBody(@NotNull ASTNode node) {
super(node);
}
public GrClassBody(EmptyStub stub) {
super(stub, GroovyElementTypes.CLASS_BODY);
}
public void accept(GroovyElementVisitor visitor) {
visitor.visitTypeDefinitionBody(this);
}
}
public static class GrEnumBody extends GrTypeDefinitionBodyBase implements GrEnumDefinitionBody, StubBasedPsiElement<EmptyStub> {
public GrEnumBody(@NotNull ASTNode node) {
super(node);
}
public GrEnumBody(EmptyStub stub) {
super(stub, GroovyElementTypes.ENUM_BODY);
}
@Nullable
public GrEnumConstantList getEnumConstantList() {
return getStubOrPsiChild(GroovyElementTypes.ENUM_CONSTANTS);
}
public void accept(GroovyElementVisitor visitor) {
visitor.visitEnumDefinitionBody(this);
}
}
@Override
public ASTNode addInternal(ASTNode first, ASTNode last, ASTNode anchor, Boolean before) {
ASTNode afterLast = last.getTreeNext();
ASTNode next;
for (ASTNode child = first; child != afterLast; child = next) {
next = child.getTreeNext();
if (child.getElementType() == GroovyElementTypes.CONSTRUCTOR_DEFINITION) {
ASTNode oldIdentifier = child.findChildByType(GroovyTokenTypes.mIDENT);
ASTNode newIdentifier = ((GrTypeDefinition)getParent()).getNameIdentifierGroovy().getNode().copyElement();
child.replaceChild(oldIdentifier, newIdentifier);
}
}
return super.addInternal(first, last, anchor, before);
}
}
| |
package seedu.address.logic;
import com.google.common.eventbus.Subscribe;
import seedu.task.commons.core.Config;
import seedu.task.commons.core.EventsCenter;
import seedu.task.commons.events.model.TaskManagerChangedEvent;
import seedu.task.commons.events.ui.JumpToListRequestEvent;
import seedu.task.commons.events.ui.ShowHelpRequestEvent;
import seedu.task.logic.Logic;
import seedu.task.logic.LogicManager;
import seedu.task.logic.commands.*;
import seedu.task.model.TaskManager;
import seedu.task.model.Model;
import seedu.task.model.ModelManager;
import seedu.task.model.ReadOnlyTaskManager;
import seedu.task.model.tag.Tag;
import seedu.task.model.tag.UniqueTagList;
import seedu.task.model.task.*;
import seedu.task.storage.StorageManager;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static seedu.task.commons.core.Messages.*;
public class LogicManagerTest {
/**
* See https://github.com/junit-team/junit4/wiki/rules#temporaryfolder-rule
*/
@Rule
public TemporaryFolder saveFolder = new TemporaryFolder();
private Model model;
private Logic logic;
private Config config;
// These are for checking the correctness of the events raised
private ReadOnlyTaskManager latestSavedTaskManager;
private boolean helpShown;
private int targetedJumpIndex;
@Subscribe
private void handleLocalModelChangedEvent(TaskManagerChangedEvent abce) {
latestSavedTaskManager = new TaskManager(abce.data);
}
@Subscribe
private void handleShowHelpRequestEvent(ShowHelpRequestEvent she) {
helpShown = true;
}
@Subscribe
private void handleJumpToListRequestEvent(JumpToListRequestEvent je) {
targetedJumpIndex = je.targetIndex;
}
@Before
public void setup() {
model = new ModelManager();
String tempTaskManagerFile = saveFolder.getRoot().getPath() + "TempTaskManager.xml";
String tempPreferencesFile = saveFolder.getRoot().getPath() + "TempPreferences.json";
logic = new LogicManager(model, new StorageManager(tempTaskManagerFile, tempPreferencesFile), config);
EventsCenter.getInstance().registerHandler(this);
latestSavedTaskManager = new TaskManager(model.getTaskManager()); // last
// saved
// assumed
// to
// be
// up
// to
// date
// before.
helpShown = false;
targetedJumpIndex = -1; // non yet
}
@After
public void teardown() {
EventsCenter.clearSubscribers();
}
//@@author A0139284X
@Test
public void execute_undoAtStartOfApplication() throws Exception {
assertCommandBehavior(UndoCommand.COMMAND_WORD, UndoCommand.MESSAGE_FAIL);
}
@Test
public void execute_undo_multipleUndoAtStartOfApplication() throws Exception {
TestDataHelper helper = new TestDataHelper();
model.addTask(helper.generateTask(1));
model.addTask(helper.generateTask(2));
model.addTask(helper.generateTask(3));
assertCommandBehavior(UndoCommand.COMMAND_WORD + " 4", String.format(UndoCommand.MESSAGE_SUCCESS, 3));
}
@Test
public void execute_help_addCommand() throws Exception {
assertCommandBehavior("help " + AddCommand.COMMAND_WORD, AddCommand.MESSAGE_USAGE);
}
@Test
public void execute_help_editCommand() throws Exception {
assertCommandBehavior("help " + EditCommand.COMMAND_WORD, EditCommand.MESSAGE_USAGE);
}
@Test
public void execute_help_deleteCommand() throws Exception {
assertCommandBehavior("help " + DeleteCommand.COMMAND_WORD, DeleteCommand.MESSAGE_USAGE);
}
@Test
public void execute_help_deleteMarkedCommand() throws Exception {
assertCommandBehavior("help " + DeleteMarkedCommand.COMMAND_WORD, DeleteMarkedCommand.MESSAGE_USAGE);
}
@Test
public void execute_help_markCommand() throws Exception {
assertCommandBehavior("help " + MarkCommand.COMMAND_WORD, MarkCommand.MESSAGE_USAGE);
}
@Test
public void execute_help_undoCommand() throws Exception {
assertCommandBehavior("help " + UndoCommand.COMMAND_WORD, UndoCommand.MESSAGE_USAGE);
}
@Test
public void execute_help_redoCommand() throws Exception {
assertCommandBehavior("help " + RedoCommand.COMMAND_WORD, RedoCommand.MESSAGE_USAGE);
}
@Test
public void execute_help_selectCommand() throws Exception {
assertCommandBehavior("help " + SelectCommand.COMMAND_WORD, SelectCommand.MESSAGE_USAGE);
}
@Test
public void execute_help_clearCommand() throws Exception {
assertCommandBehavior("help " + ClearCommand.COMMAND_WORD, ClearCommand.MESSAGE_USAGE);
}
@Test
public void execute_help_clearMarkedCommand() throws Exception {
assertCommandBehavior("help " + ClearMarkedCommand.COMMAND_WORD, ClearMarkedCommand.MESSAGE_USAGE);
}
@Test
public void execute_help_findCommand() throws Exception {
assertCommandBehavior("help " + FindCommand.COMMAND_WORD, FindCommand.MESSAGE_USAGE);
}
@Test
public void execute_help_listCommand() throws Exception {
assertCommandBehavior("help " + ListCommand.COMMAND_WORD, ListCommand.MESSAGE_USAGE);
}
@Test
public void execute_help_exitCommand() throws Exception {
assertCommandBehavior("help " + ExitCommand.COMMAND_WORD, ExitCommand.MESSAGE_USAGE);
}
@Test
public void execute_help_helpCommand() throws Exception {
assertCommandBehavior("help " + HelpCommand.COMMAND_WORD, HelpCommand.MESSAGE_USAGE);
}
@Test
public void execute_help_bareCommand() throws Exception {
assertCommandBehavior("help " + BareCommand.COMMAND_WORD, BareCommand.MESSAGE_USAGE);
}
@Test
public void execute_help_changeDirectoryCommand() throws Exception {
assertCommandBehavior("help " + ChangeDirectoryCommand.COMMAND_WORD, ChangeDirectoryCommand.MESSAGE_USAGE);
}
//@@author
@Test
public void execute_invalid() throws Exception {
String invalidCommand = " ";
assertCommandBehavior(invalidCommand, String.format(MESSAGE_INVALID_COMMAND_FORMAT, HelpCommand.MESSAGE_USAGE));
}
/**
* Executes the command and confirms that the result message is correct.
* Both the 'address book' and the 'last shown list' are expected to be
* empty.
*
* @see #assertCommandBehavior(String, String, ReadOnlyTaskManager, List)
*/
private void assertCommandBehavior(String inputCommand, String expectedMessage) throws Exception {
assertCommandBehavior(inputCommand, expectedMessage, new TaskManager(), Collections.emptyList());
}
/**
* Executes the command and confirms that the result message is correct and
* also confirms that the following three parts of the LogicManager object's
* state are as expected:<br>
* - the internal address book data are same as those in the
* {@code expectedAddressBook} <br>
* - the backing list shown by UI matches the {@code shownList} <br>
* - {@code expectedAddressBook} was saved to the storage file. <br>
*/
private void assertCommandBehavior(String inputCommand, String expectedMessage,
ReadOnlyTaskManager expectedTaskManager, List<? extends ReadOnlyTask> expectedShownList) throws Exception {
// Execute the command
CommandResult result = logic.execute(inputCommand);
// Confirm the ui display elements should contain the right data
assertEquals(expectedMessage, result.feedbackToUser);
assertEquals(expectedShownList, model.getFilteredTaskList());
// Confirm the state of data (saved and in-memory) is as expected
assertEquals(expectedTaskManager, model.getTaskManager());
assertEquals(expectedTaskManager, latestSavedTaskManager);
}
@Test
public void execute_unknownCommandWord() throws Exception {
String unknownCommand = "uicfhmowqewca";
assertCommandBehavior(unknownCommand, MESSAGE_UNKNOWN_COMMAND);
}
@Test
public void execute_help() throws Exception {
assertCommandBehavior("help", HelpCommand.SHOWING_HELP_MESSAGE);
}
@Test
public void execute_exit() throws Exception {
assertCommandBehavior("exit", ExitCommand.MESSAGE_EXIT_ACKNOWLEDGEMENT);
}
/*
@Test
public void execute_clear() throws Exception {
model.resetData(new TaskManager());
TestDataHelper helper = new TestDataHelper();
model.addTask(helper.generateTask(1));
model.addTask(helper.generateTask(2));
model.addTask(helper.generateTask(3));
assertCommandBehavior("clear", ClearCommand.MESSAGE_SUCCESS, new TaskManager(), Collections.emptyList());
}
*/
// @@author A0139284X
@Test
public void execute_add_FloatTasks() throws Exception {
// setup expectations
TestDataHelper helper = new TestDataHelper();
Task toBeAdded = helper.floating();
TaskManager expectedAB = new TaskManager();
expectedAB.addTask(toBeAdded);
// execute command and verify result
assertCommandBehavior(helper.generateAddCommand(toBeAdded),
String.format(AddCommand.MESSAGE_SUCCESS, toBeAdded), expectedAB, expectedAB.getTaskList());
}
// @@author
@Test
public void execute_add_invalidTaskData() throws Exception {
assertCommandBehavior("add []\\[;] d/11125678 e/valid@e.mail i/valid, address",
TaskName.MESSAGE_NAME_CONSTRAINTS);
assertCommandBehavior("add Valid TaskName d/not_numbers e/valid@e.mail i/valid, address",
Date.MESSAGE_DATE_CONSTRAINTS);
assertCommandBehavior("add Valid TaskName d/31125678 e/notAnEmail i/valid, address",
Time.MESSAGE_TIME_CONSTRAINTS);
assertCommandBehavior("add Valid TaskName d/11115678 e/valid@e.mail i/valid, address t/invalid_-[.tag",
Tag.MESSAGE_TAG_CONSTRAINTS);
}
@Test
public void execute_add_successful() throws Exception {
// setup expectations
TestDataHelper helper = new TestDataHelper();
Task toBeAdded = helper.adam();
TaskManager expectedAB = new TaskManager();
expectedAB.addTask(toBeAdded);
// execute command and verify result
assertCommandBehavior(helper.generateAddCommand(toBeAdded),
String.format(AddCommand.MESSAGE_SUCCESS, toBeAdded), expectedAB, expectedAB.getTaskList());
}
@Test
public void execute_addDuplicate_notAllowed() throws Exception {
// setup expectations
TestDataHelper helper = new TestDataHelper();
Task toBeAdded = helper.adam();
TaskManager expectedAB = new TaskManager();
expectedAB.addTask(toBeAdded);
// setup starting state
model.addTask(toBeAdded); // person already in internal address book
// execute command and verify result
assertCommandBehavior(helper.generateAddCommand(toBeAdded), AddCommand.MESSAGE_DUPLICATE_TASK, expectedAB,
expectedAB.getTaskList());
}
@Test
public void execute_list_showsAllTasks() throws Exception {
// prepare expectations
TestDataHelper helper = new TestDataHelper();
TaskManager expectedAB = helper.generateTaskManager(2);
List<? extends ReadOnlyTask> expectedList = expectedAB.getTaskList();
// prepare address book state
helper.addToModel(model, 2);
assertCommandBehavior("list", ListCommand.MESSAGE_SUCCESS, expectedAB, expectedList);
}
/**
* Confirms the 'invalid argument index number behaviour' for the given
* command targeting a single person in the shown list, using visible index.
*
* @param commandWord
* to test assuming it targets a single person in the last shown
* list based on visible index.
*/
private void assertIncorrectIndexFormatBehaviorForCommand(String commandWord, String expectedMessage)
throws Exception {
assertCommandBehavior(commandWord, expectedMessage); // index missing
assertCommandBehavior(commandWord + " +1", expectedMessage); // index
// should
// be
// unsigned
assertCommandBehavior(commandWord + " -1", expectedMessage); // index
// should
// be
// unsigned
assertCommandBehavior(commandWord + " 0", expectedMessage); // index
// cannot be
// 0
assertCommandBehavior(commandWord + " not_a_number", expectedMessage);
}
/**
* Confirms the 'invalid argument index number behaviour' for the given
* command targeting a single person in the shown list, using visible index.
*
* @param commandWord
* to test assuming it targets a single person in the last shown
* list based on visible index.
*/
private void assertIndexNotFoundBehaviorForCommand(String commandWord) throws Exception {
String expectedMessage = MESSAGE_INVALID_TASK_DISPLAYED_INDEX;
TestDataHelper helper = new TestDataHelper();
List<Task> personList = helper.generateTaskList(2);
// set AB state to 2 persons
model.resetData(new TaskManager());
for (Task p : personList) {
model.addTask(p);
}
assertCommandBehavior(commandWord + " 3", expectedMessage, model.getTaskManager(), personList);
}
@Test
public void execute_selectInvalidArgsFormat_errorMessageShown() throws Exception {
String expectedMessage = String.format(MESSAGE_INVALID_COMMAND_FORMAT, SelectCommand.MESSAGE_USAGE);
assertIncorrectIndexFormatBehaviorForCommand("select", expectedMessage);
}
@Test
public void execute_selectIndexNotFound_errorMessageShown() throws Exception {
assertIndexNotFoundBehaviorForCommand("select");
}
@Test
public void execute_select_jumpsToCorrectPerson() throws Exception {
TestDataHelper helper = new TestDataHelper();
List<Task> threePersons = helper.generateTaskList(3);
TaskManager expectedAB = helper.generateTaskManager(threePersons);
helper.addToModel(model, threePersons);
assertCommandBehavior("select 2", String.format(SelectCommand.MESSAGE_SELECT_TASK_SUCCESS, 2), expectedAB,
expectedAB.getTaskList());
assertEquals(1, targetedJumpIndex);
assertEquals(model.getFilteredTaskList().get(1), threePersons.get(1));
}
@Test
public void execute_deleteInvalidArgsFormat_errorMessageShown() throws Exception {
String expectedMessage = String.format(MESSAGE_INVALID_COMMAND_FORMAT, DeleteCommand.MESSAGE_USAGE);
assertIncorrectIndexFormatBehaviorForCommand("delete", expectedMessage);
}
@Test
public void execute_deleteIndexNotFound_errorMessageShown() throws Exception {
assertIndexNotFoundBehaviorForCommand("delete");
}
@Test
public void execute_delete_removesCorrectPerson() throws Exception {
TestDataHelper helper = new TestDataHelper();
List<Task> threeTasks = helper.generateTaskList(3);
TaskManager expectedAB = helper.generateTaskManager(threeTasks);
expectedAB.removeTask(threeTasks.get(1));
helper.addToModel(model, threeTasks);
assertCommandBehavior("delete 2", String.format(DeleteCommand.MESSAGE_DELETE_TASK_SUCCESS, threeTasks.get(1)),
expectedAB, expectedAB.getTaskList());
}
@Test
public void execute_find_invalidArgsFormat() throws Exception {
String expectedMessage = String.format(MESSAGE_INVALID_COMMAND_FORMAT, FindCommand.MESSAGE_USAGE);
assertCommandBehavior("find ", expectedMessage);
}
@Test
public void execute_find_canMatchPartialWordsInNames() throws Exception {
TestDataHelper helper = new TestDataHelper();
Task tFull1 = helper.generateTaskWithName("bla bla KEY bla");
Task tFull2 = helper.generateTaskWithName("bla KEY bla bceofeia");
Task tNotPartial = helper.generateTaskWithName("KE Y");
Task tPartial = helper.generateTaskWithName("KEYKEYKEY sduauo");
List<Task> fourTasks = helper.generateTaskList(tNotPartial, tFull1, tPartial, tFull2);
TaskManager expectedAB = helper.generateTaskManager(fourTasks);
List<Task> expectedList = helper.generateTaskList(tFull1, tPartial, tFull2);
helper.addToModel(model, fourTasks);
assertCommandBehavior("find KEY", Command.getMessageForTaskListShownSummary(expectedList.size()), expectedAB,
expectedList);
}
@Test
public void execute_find_isNotCaseSensitive() throws Exception {
TestDataHelper helper = new TestDataHelper();
Task p1 = helper.generateTaskWithName("bla bla KEY bla");
Task p2 = helper.generateTaskWithName("bla KEY bla bceofeia");
Task p3 = helper.generateTaskWithName("key key");
Task p4 = helper.generateTaskWithName("KEy sduauo");
List<Task> fourPersons = helper.generateTaskList(p3, p1, p4, p2);
TaskManager expectedAB = helper.generateTaskManager(fourPersons);
List<Task> expectedList = fourPersons;
helper.addToModel(model, fourPersons);
assertCommandBehavior("find KEY", Command.getMessageForTaskListShownSummary(expectedList.size()), expectedAB,
expectedList);
}
@Test
public void execute_find_matchesIfAnyKeywordPresent() throws Exception {
TestDataHelper helper = new TestDataHelper();
Task pTarget1 = helper.generateTaskWithName("bla bla KEY bla");
Task pTarget2 = helper.generateTaskWithName("bla rAnDoM bla bceofeia");
Task pTarget3 = helper.generateTaskWithName("key key");
Task p1 = helper.generateTaskWithName("sduauo");
List<Task> fourPersons = helper.generateTaskList(pTarget1, p1, pTarget2, pTarget3);
TaskManager expectedAB = helper.generateTaskManager(fourPersons);
List<Task> expectedList = helper.generateTaskList(pTarget1, pTarget2, pTarget3);
helper.addToModel(model, fourPersons);
assertCommandBehavior("find key rAnDoM", Command.getMessageForTaskListShownSummary(expectedList.size()),
expectedAB, expectedList);
}
// @@author A0139284X
@Test
public void execute_edit_changeTaskName() throws Exception {
// setup expectations
TestDataHelper helper = new TestDataHelper();
Task toBeAdded = helper.adam();
Task afterEdit = helper.adamChangeTaskName();
TaskManager expectedAB = new TaskManager();
expectedAB.addTask(afterEdit);
// set AB state to 1 person
model.resetData(new TaskManager());
model.addTask(toBeAdded);
model.editTask(toBeAdded, afterEdit);
// execute command and verify result
assertCommandBehavior("edit 1 Not Adam Brown", String.format(EditCommand.MESSAGE_EDIT_TASK_SUCCESS, afterEdit), expectedAB,
expectedAB.getTaskList());
}
@Test
public void execute_edit_changeDeadlineTaskToFloating() throws Exception {
// setup expectations
TestDataHelper helper = new TestDataHelper();
Task toBeAdded = helper.deadlineTask();
Task afterEdit = helper.floating();
TaskManager expectedAB = new TaskManager();
expectedAB.addTask(afterEdit);
// set AB state to 1 person
model.resetData(new TaskManager());
model.addTask(toBeAdded);
model.editTask(toBeAdded, afterEdit);
// execute command and verify result
assertCommandBehavior("edit 1 sd/- d/- i/-", String.format(EditCommand.MESSAGE_EDIT_TASK_SUCCESS, afterEdit), expectedAB,
expectedAB.getTaskList());
}
@Test
public void execute_edit_changeStartAndEndTime() throws Exception {
// setup expectations
TestDataHelper helper = new TestDataHelper();
Task toBeAdded = helper.adam();
Task afterEdit = helper.adamChangeTime();
TaskManager expectedAB = new TaskManager();
expectedAB.addTask(afterEdit);
// set AB state to 1 person
model.resetData(new TaskManager());
model.addTask(toBeAdded);
model.editTask(toBeAdded, afterEdit);
// execute command and verify result
assertCommandBehavior("edit 1 st/0900 e/1000", String.format(EditCommand.MESSAGE_EDIT_TASK_SUCCESS, afterEdit), expectedAB,
expectedAB.getTaskList());
}
@Test
public void execute_undo_addCommandToEmptyList() throws Exception {
// setup expectations
TestDataHelper helper = new TestDataHelper();
Task toBeAdded = helper.adam();
// set AB state to 1 person
model.resetData(new TaskManager());
model.addTask(toBeAdded);
// execute command and verify result
assertCommandBehavior(UndoCommand.COMMAND_WORD, String.format(UndoCommand.MESSAGE_SUCCESS, 1), new TaskManager(),
Collections.emptyList());
}
@Test
public void execute_undo_addCommandToNonEmptyList() throws Exception {
// setup expectations
TestDataHelper helper = new TestDataHelper();
Task toBeAdded = helper.adam();
TaskManager expectedAB = new TaskManager();
List<Task> personList = helper.generateTaskList(2);
for (Task p : personList) {
expectedAB.addTask(p);
}
// set AB state to 2 persons
model.resetData(new TaskManager());
for (Task p : personList) {
model.addTask(p);
}
model.addTask(toBeAdded);
// execute command and verify result
assertCommandBehavior(UndoCommand.COMMAND_WORD, String.format(UndoCommand.MESSAGE_SUCCESS, 1), expectedAB,
expectedAB.getTaskList());
}
@Test
public void execute_undo_deleteCommand() throws Exception {
// setup expectations
TestDataHelper helper = new TestDataHelper();
Task toBeDeleted = helper.adam();
TaskManager expectedAB = new TaskManager();
expectedAB.addTask(toBeDeleted);
// set AB state to 1 person
model.resetData(new TaskManager());
model.addTask(toBeDeleted);
model.deleteTask(toBeDeleted);
// execute command and verify result
assertCommandBehavior(UndoCommand.COMMAND_WORD, String.format(UndoCommand.MESSAGE_SUCCESS, 1), expectedAB,
expectedAB.getTaskList());
}
@Test
public void execute_undo_clearCommand() throws Exception {
// setup expectations
TestDataHelper helper = new TestDataHelper();
Task toBeDeleted = helper.adam();
TaskManager expectedAB = new TaskManager();
expectedAB.addTask(toBeDeleted);
// set AB state to 1 person
model.resetData(new TaskManager());
model.addTask(toBeDeleted);
model.resetData(new TaskManager());
// execute command and verify result
assertCommandBehavior(UndoCommand.COMMAND_WORD, String.format(UndoCommand.MESSAGE_SUCCESS, 1), expectedAB,
expectedAB.getTaskList());
}
@Test
public void execute_undo_editCommand() throws Exception {
// setup expectations
TestDataHelper helper = new TestDataHelper();
Task toBeEdited = helper.adam();
Task afterEdit = helper.adamChangeTaskName();
TaskManager expectedAB = new TaskManager();
expectedAB.addTask(toBeEdited);
// set AB state to 1 person
model.resetData(new TaskManager());
model.addTask(toBeEdited);
model.editTask(toBeEdited, afterEdit);
// execute command and verify result
assertCommandBehavior(UndoCommand.COMMAND_WORD, String.format(UndoCommand.MESSAGE_SUCCESS, 1), expectedAB,
expectedAB.getTaskList());
}
@Test
public void execute_mark_markFirstTask() throws Exception {
// setup expectations
TestDataHelper helper = new TestDataHelper();
TaskManager expectedAB = new TaskManager();
List<Task> personList = helper.generateTaskList(2);
Task toBeMarked = personList.get(0);
expectedAB.addTask(personList.get(1));
// set AB state to 2 persons
model.resetData(new TaskManager());
for (Task p : personList) {
model.addTask(p);
}
// execute command and verify result
assertCommandBehavior(MarkCommand.COMMAND_WORD + " 1", String.format(MarkCommand.MESSAGE_SUCCESS, personList.get(0)), expectedAB,
expectedAB.getTaskList());
}
//@@author
/**
* A utility class to generate test data.
*/
class TestDataHelper {
Task adam() throws Exception {
TaskName taskName = new TaskName("Adam Brown");
Date date = new Date("11112111");
Time time = new Time("0900");
EventStart eventStart = new EventStart(new Date("11112111"), new Time("0800"));
Deadline deadline = new Deadline(date, time);
Importance importance = new Importance("**");
Tag tag1 = new Tag("tag1");
Tag tag2 = new Tag("tag2");
UniqueTagList tags = new UniqueTagList(tag1, tag2);
return new Task(taskName, eventStart, deadline, importance, tags);
}
Task adamChangeTime() throws Exception {
TaskName taskName = new TaskName("Adam Brown");
Date date = new Date("11112111");
Time time = new Time("1000");
EventStart eventStart = new EventStart(new Date("11112111"), new Time("0900"));
Deadline deadline = new Deadline(date, time);
Importance importance = new Importance("**");
Tag tag1 = new Tag("tag1");
Tag tag2 = new Tag("tag2");
UniqueTagList tags = new UniqueTagList(tag1, tag2);
return new Task(taskName, eventStart, deadline, importance, tags);
}
Task adamChangeTaskName() throws Exception {
TaskName taskName = new TaskName("Not Adam Brown");
Date date = new Date("11112111");
Time time = new Time("2359");
EventStart eventStart = new EventStart(new Date(""), new Time(""));
Deadline deadline = new Deadline(date, time);
Importance importance = new Importance("**");
Tag tag1 = new Tag("tag1");
Tag tag2 = new Tag("tag2");
UniqueTagList tags = new UniqueTagList(tag1, tag2);
return new Task(taskName, eventStart, deadline, importance, tags);
}
/**
* @return a floating task.
*/
Task floating() throws Exception {
TaskName taskName = new TaskName("Floater");
Date date = new Date("");
Time time = new Time("");
EventStart eventStart = new EventStart(date, time);
Deadline deadline = new Deadline(date, time);
Importance importance = new Importance("");
UniqueTagList tags = new UniqueTagList();
return new Task(taskName, eventStart, deadline, importance, tags);
}
/**
* @return a floating task.
*/
Task deadlineTask() throws Exception {
TaskName taskName = new TaskName("Floater");
Date date = new Date("03112016");
Time time = new Time("1700");
EventStart eventStart = new EventStart(new Date(""), new Time(""));
Deadline deadline = new Deadline(date, time);
Importance importance = new Importance("");
UniqueTagList tags = new UniqueTagList();
return new Task(taskName, eventStart, deadline, importance, tags);
}
/**
* Generates a valid person using the given seed. Running this function
* with the same parameter values guarantees the returned person will
* have the same state. Each unique seed will generate a unique Task
* object.
*
* @param seed
* used to generate the person data field values
*/
Task generateTask(int seed) throws Exception {
return new Task(new TaskName("Task " + seed),
new EventStart(new Date("" + (31129989 - Math.abs(seed))), new Time("" + (Math.abs(seed) + 1200))),
new Deadline(new Date("" + (31129999 - Math.abs(seed))), new Time("" + (Math.abs(seed) + 1200))),
new Importance(new String(new char[seed]).replace("\0", "*")),
new UniqueTagList(new Tag("tag" + Math.abs(seed)), new Tag("tag" + Math.abs(seed + 1))));
}
/** Generates the correct add command based on the person given */
String generateAddCommand(Task p) {
StringBuffer cmd = new StringBuffer();
cmd.append("add ");
cmd.append(p.getName().toString());
cmd.append(" sd/").append(p.getEventStart().getStartDate().toString());
cmd.append(" st/").append(p.getEventStart().getStartTime().toString());
cmd.append(" d/").append(p.getDeadline().getDueDate().toString());
cmd.append(" e/").append(p.getDeadline().getDueTime().toString());
cmd.append(" i/").append(p.getImportance());
UniqueTagList tags = p.getTags();
for (Tag t : tags) {
cmd.append(" t/").append(t.tagName);
}
return cmd.toString();
}
/**
* Generates an TaskManager with auto-generated persons.
*/
TaskManager generateTaskManager(int numGenerated) throws Exception {
TaskManager taskManager = new TaskManager();
addToTaskManager(taskManager, numGenerated);
return taskManager;
}
/**
* Generates an TaskManager based on the list of Persons given.
*/
TaskManager generateTaskManager(List<Task> tasks) throws Exception {
TaskManager taskManager = new TaskManager();
addToTaskManager(taskManager, tasks);
return taskManager;
}
/**
* Adds auto-generated Task objects to the given TaskManager
*
* @param taskManager
* The TaskManager to which the Persons will be added
*/
void addToTaskManager(TaskManager taskManager, int numGenerated) throws Exception {
addToTaskManager(taskManager, generateTaskList(numGenerated));
}
/**
* Adds the given list of Persons to the given TaskManager
*/
void addToTaskManager(TaskManager taskManager, List<Task> tasksToAdd) throws Exception {
for (Task p : tasksToAdd) {
taskManager.addTask(p);
}
}
/**
* Adds auto-generated Task objects to the given model
*
* @param model
* The model to which the Persons will be added
*/
void addToModel(Model model, int numGenerated) throws Exception {
addToModel(model, generateTaskList(numGenerated));
}
/**
* Adds the given list of Persons to the given model
*/
void addToModel(Model model, List<Task> tasksToAdd) throws Exception {
for (Task p : tasksToAdd) {
model.addTask(p);
}
}
/**
* Generates a list of Persons based on the flags.
*/
List<Task> generateTaskList(int numGenerated) throws Exception {
List<Task> tasks = new ArrayList<>();
for (int i = 1; i <= numGenerated; i++) {
tasks.add(generateTask(i));
}
return tasks;
}
List<Task> generateTaskList(Task... tasks) {
return Arrays.asList(tasks);
}
/**
* Generates a Task object with given name. Other fields will have some
* dummy values.
*/
Task generateTaskWithName(String name) throws Exception {
return new Task(new TaskName(name), new EventStart(new Date("25124678"), new Time("0000")),
new Deadline(new Date("25125678"), new Time("0000")), new Importance("**"),
new UniqueTagList(new Tag("tag")));
}
}
}
| |
/*
* Copyright 2015-2016 Red Hat, Inc, and individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.hal.resources;
import java.util.ArrayList;
import java.util.List;
import com.google.common.collect.Lists;
import static com.google.common.base.Strings.emptyToNull;
import static com.google.common.base.Strings.isNullOrEmpty;
import static java.util.stream.Collectors.joining;
import static org.jboss.hal.resources.Strings.substringAfterLast;
/**
* IDs used in HTML elements and across multiple classes. Please add IDs to this interface even if there's already an
* equivalent or similar constant in {@code ModelDescriptionConstants} (SoC).
* <p>
* The IDs defined here are reused by QA. So please make sure that IDs are not spread over the code base but gathered in
* this interface. This is not always possible - for instance if the ID contains dynamic parts like a resource name or
* selected server. But IDs which only contain static strings should be part of this interface.
*/
@SuppressWarnings({"DuplicateStringLiteralInspection", "SpellCheckingInspection"})
public interface Ids {
// ------------------------------------------------------ ids (a-z)
// Don't compose IDs with build(), otherwise they cannot be used in annotations.
String ABOUT_MODAL = "about-modal";
String ABOUT_MODAL_TITLE = "about-modal-title";
String ACCESS_CONTROL_BROWSE_BY = "access-control-browse-by";
String ACCESS_CONTROL_BROWSE_BY_GROUPS = "access-control-browse-by-groups";
String ACCESS_CONTROL_BROWSE_BY_ROLES = "access-control-browse-by-roles";
String ACCESS_CONTROL_BROWSE_BY_USERS = "access-control-browse-by-users";
String ACTIVE_OPERATION = "active-operation";
String ACTIVE_OPERATION_EMPTY = "active-operation-empty";
String ADD = "add";
String AFFINITY_LOCAL = "affinity-local";
String AFFINITY_NONE = "affinity-none";
String AFFINITY_PRIMARY_OWNER = "affinity-primary-owner";
String AFFINITY_RANKED = "affinity-ranked";
String ASSIGNMENT = "assignement";
String ASSIGNMENT_EXCLUDE = "assignement-exclude";
String ASSIGNMENT_INCLUDE = "assignement-include";
String ATTRIBUTES = "attributes";
String BADEGE_ICON = "badge-icon";
String BOOT_ERRORS_ADDRESS_COLUMN = "boot-errors-address-column";
String BOOT_ERRORS_EMPTY = "boot-errors-empty";
String BOOT_ERRORS_FORM = "boot-errors-form";
String BOOT_ERRORS_OPERATION_COLUMN = "boot-errors-operation-column";
String BOOT_ERRORS_TABLE = "boot-errors-table";
String BREADCRUMB = "breadcrumb";
String BROWSE_CONTENT_SELECT_EMPTY = "browse-content-select-empty";
String BROWSE_CONTENT_DEPLOYMENT_EMPTY = "browse-content-deployment-empty";
String BROWSE_CONTENT_EXPLODED_EMPTY = "browse-content-exploded-empty";
String BROWSE_CONTENT_UNSUPPORTED_EMPTY = "browse-content-unsupported-empty";
String CACHE = "cache";
String CACHE_ADD_ACTIONS = "cache-add-actions";
String CACHE_REFRESH = "cache-refresh";
String CACHE_COMPONENT_EXPIRATION = "cache-component-expiration";
String CACHE_COMPONENT_LOCKING = "cache-component-locking";
String CACHE_COMPONENT_PARTITION_HANDLING = "cache-component-partition-handling";
String CACHE_COMPONENT_STATE_TRANSFER = "cache-component-state-transfer";
String CACHE_COMPONENT_TRANSACTION = "cache-component-transaction";
String CACHE_CONTAINER = "cc";
String CACHE_CONTAINER_ADD = "cc-add";
String CACHE_CONTAINER_ADD_ACTIONS = "cc-add-actions";
String CACHE_CONTAINER_FORM = "cc-form";
String CACHE_CONTAINER_ITEM = "cc-item";
String CACHE_CONTAINER_REFRESH = "cc-refresh";
String CACHE_CONTAINER_THREAD_POOL_ASYNC_OPERATIONS = "cc-thread-pool-async-operations";
String CACHE_CONTAINER_THREAD_POOL_EXPIRATION = "cc-thread-pool-expiration";
String CACHE_CONTAINER_THREAD_POOL_LISTENER = "cc-thread-pool-listener";
String CACHE_CONTAINER_THREAD_POOL_PERSISTENCE = "cc-thread-pool-persistence";
String CACHE_CONTAINER_THREAD_POOL_REMOTE_COMMAND = "cc-thread-pool-remote-command";
String CACHE_CONTAINER_THREAD_POOL_STATE_TRANSFER = "cc-thread-pool-state-transfer";
String CACHE_CONTAINER_THREAD_POOL_TRANSPORT = "cc-thread-pool-transport";
String CACHE_CONTAINER_THREAD_POOLS_ITEM = "cc-thread-pools-item";
String CACHE_CONTAINER_TRANSPORT_EMPTY = "cc-transport-empty";
String CACHE_CONTAINER_TRANSPORT_FORM = "cc-transport-form";
String CACHE_CONTAINER_TRANSPORT_ITEM = "cc-transport-item";
String CACHE_MEMORY_BINARY = "cache-memory-binary";
String CACHE_MEMORY_HEAP = "cache-memory-heap";
String CACHE_MEMORY_OBJECT = "cache-memory-object";
String CACHE_MEMORY_OFF_HEAP = "cache-memory-off-heap";
String CACHE_STORE_BINARY_JDBC = "cache-store-binary-jdbc";
String CACHE_STORE_BINARY_TABLE = "binary-table";
String CACHE_STORE_CUSTOM = "cache-store-custom";
String CACHE_STORE_FILE = "cache-store-file";
String CACHE_STORE_HOT_ROD = "cache-store-hot-rod";
String CACHE_STORE_JDBC = "cache-store-jdbc";
String CACHE_STORE_MIXED_JDBC = "cache-store-mixed-jdbc";
String CACHE_STORE_STRING_TABLE = "string-table";
String CACHE_STORE_WRITE_BEHIND = "behind";
String CACHE_STORE_WRITE_THROUGH = "write";
String CANCEL_NON_PROGRESSING_OPERATION = "cancel-non-progressing-operation";
String CONFIGURATION = "configuration";
String CONFIGURATION_CHANGES = "configuration-changes";
String CONFIGURATION_CHANGES_EMPTY = "configuration-changes-empty";
String CONFIGURATION_CHANGES_NOT_ENABLED = "configuration-changes-not-enabled";
String CONFIGURATION_SUBSYSTEM = "css";
String CONNECTION = "connection";
String CONNECTION_POOL_ITEM = "connection-pool-item";
String CONNECTION_POOL_FORM = "connection-pool-form";
String CONTENT = "content";
String CONTENT_ADD = "content-add";
String CONTENT_ADD_ACTIONS = "content-add-actions";
String CONTENT_EDITOR = "content-editor";
String CONTENT_NEW = "content-new";
String CONTENT_REFRESH = "content-refresh";
String CONTENT_SEARCH = "content-search";
String CONTENT_TAB = "content-tab";
String CONTENT_TREE = "content-tree";
String CONTENT_TREE_ROOT = "content-tree-root";
String CONTENT_TREE_SEARCH = "content-tree-search";
String CONTENT_UNMANAGED_ADD = "content-unmanaged-add";
String COOKIE = "hal-cookie";
String CUSTOM_LOAD_METRIC = "custom-load-metric";
String CREDENTIAL_REFERENCE = "credential-reference";
String DATA_SOURCE_ADD = "ds-configuration-add";
String DATA_SOURCE_ADD_ACTIONS = "ds-configuration-add-actions";
String DATA_SOURCE_CONFIGURATION = "ds-configuration";
String DATA_SOURCE_CONNECTION_FORM = "ds-configuration-connection-form";
String DATA_SOURCE_DRIVER = "data-source-driver";
String DATA_SOURCE_DRIVER_FORM = "ds-configuration-driver-form";
String DATA_SOURCE_FORM = "ds-configuration-form";
String DATA_SOURCE_NAMES_FORM = "ds-configuration-names-form";
String DATA_SOURCE_PROPERTIES_FORM = "ds-configuration-properties-form";
String DATA_SOURCE_REFRESH = "ds-configuration-refresh";
String DATA_SOURCE_REVIEW_FORM = "ds-configuration-review-form";
String DATA_SOURCE_RUNTIME = "ds-runtime";
String DATA_SOURCE_RUNTIME_JDBC_FORM = "ds-runtime-jdbc-form";
String DATA_SOURCE_RUNTIME_JDBC_TAB = "ds-runtime-jdbc-tab";
String DATA_SOURCE_RUNTIME_POOL_FORM = "ds-runtime-pool-form";
String DATA_SOURCE_RUNTIME_POOL_TAB = "ds-runtime-pool-tab";
String DATA_SOURCE_RUNTIME_STATISTICS_NOT_AVAILABLE = "ds-runtime-statistics-na";
String DATA_SOURCE_RUNTIME_STATISTICS_NOT_ENABLED = "ds-runtime-statistics-disabled";
String DATA_SOURCE_RUNTIME_TAB_CONTAINER = "ds-runtime-tab-container";
String DATA_SOURCE_TEST_CONNECTION = "ds-configuration-test-connection";
String DEPLOYMENT = "deployment";
String DEPLOYMENT_ADD_ACTIONS = "deployment-add-actions";
String DEPLOYMENT_BROWSE_BY = "deployment-browse-by";
String DEPLOYMENT_EMPTY_CREATE = "deployment-empty-create";
String DEPLOYMENT_EMPTY_FORM = "deployment-empty-form";
String DEPLOYMENT_NOT_ENABLED_EMPTY = "deployment-not-enabled-empty";
String DEPLOYMENT_PERMISSIONS_EMPTY = "deployment-permissions-empty";
String DEPLOYMENT_REFRESH = "deployment-refresh";
String DEPLOYMENT_SERVER_GROUP = "deployment-sg";
String DEPLOYMENT_TAB = "deployment-tab";
String DEPLOYMENT_TAB_CONTAINER = "deployment-tab-container";
String DEPLOYMENT_UNMANAGED_ADD = "deployment-unmanaged-add";
String DEPLOYMENT_UPLOAD = "deployment-upload";
String DISTRIBUTABLE_WEB_ROUTING_ITEM = "dw-routing-item";
String DISTRIBUTABLE_WEB_ROUTING_INFINISPAN = "dw-routing-infinispan";
String DISTRIBUTABLE_WEB_ROUTING_LOCAL = "dw-routing-local";
String DISTRIBUTABLE_WEB_ROUTING_SELECT = "dw-routing-select";
String DISTRIBUTED_CACHE = "distributed-cache";
String DISABLE_SSL = "disable-ssl";
String DOMAIN_BROWSE_BY = "domain-browse-by";
String DRAG_AND_DROP_DEPLOYMENT = "drag-and-drop-deployment";
String EE = "ee";
String EE_ATTRIBUTES_FORM = "ee-attributes-form";
String EE_ATTRIBUTES_ITEM = "ee-attributes-item";
String EE_CONTEXT_SERVICE = "ee-service-context-service";
String EE_DEFAULT_BINDINGS_FORM = "ee-default-bindings-form";
String EE_DEFAULT_BINDINGS_ITEM = "ee-default-bindings-item";
String EE_GLOBAL_MODULES_FORM = "ee-global-modules-form";
String EE_GLOBAL_MODULES_ITEM = "ee-global-modules-item";
String EE_GLOBAL_MODULES_TABLE = "ee-global-modules-table";
String EE_MANAGED_EXECUTOR = "ee-service-executor";
String EE_MANAGED_EXECUTOR_SCHEDULED = "ee-service-scheduled-executor";
String EE_MANAGED_THREAD_FACTORY = "ee-service-thread-factories";
String EE_SERVICES_ITEM = "ee-services-item";
String EJB3 = "ejb3";
String EJB3_APPLICATION_SECURITY_DOMAIN_ADD = "ejb3-app-security-domain-add";
String EJB3_APPLICATION_SECURITY_DOMAIN_FORM = "ejb3-app-security-domain-form";
String EJB3_APPLICATION_SECURITY_DOMAIN_ITEM = "ejb3-app-security-domain-item";
String EJB3_APPLICATION_SECURITY_DOMAIN_TABLE = "ejb3-app-security-domain-table";
String EJB3_DEPLOYMENT = "ejb3-deployment";
String ELYTRON = "elytron";
String ELYTRON_ADD_PREFIX_ROLE_MAPPER = "elytron-add-prefix-role-mapper";
String ELYTRON_ADD_SUFFIX_ROLE_MAPPER = "elytron-add-suffix-role-mapper";
String ELYTRON_AGGREGATE_HTTP_SERVER_MECHANISM_FACTORY = "elytron-aggregate-http-server-mechanism-factory";
String ELYTRON_AGGREGATE_PRINCIPAL_DECODER = "elytron-aggregate-principal-decoder";
String ELYTRON_AGGREGATE_PRINCIPAL_TRANSFORMER = "elytron-aggregate-principal-transformer";
String ELYTRON_AGGREGATE_PROVIDERS = "elytron-aggregate-providers";
String ELYTRON_AGGREGATE_REALM = "elytron-aggregate-realm";
String ELYTRON_AGGREGATE_ROLE_MAPPER = "elytron-aggregate-role-mapper";
String ELYTRON_AGGREGATE_SASL_SERVER_FACTORY = "elytron-aggregate-sasl-server-factory";
String ELYTRON_AGGREGATE_SECURITY_EVENT_LISTENER = "elytron-aggregate-security-event-listener";
String ELYTRON_AUTHENTICATION_ITEM = "authentication-item";
String ELYTRON_AUTHENTICATION_CONFIGURATION = "elytron-authentication-configuration";
String ELYTRON_AUTHENTICATION_CONTEXT = "elytron-authentication-context";
String ELYTRON_CACHING_REALM = "elytron-caching-realm";
String ELYTRON_CERTIFICATE_AUTHORITY_ACCOUNT = "elytron-certificate-authority-account";
String ELYTRON_CHAINED_PRINCIPAL_TRANSFORMER = "elytron-chained-principal-transformer";
String ELYTRON_CLIENT_SSL_CONTEXT = "elytron-client-ssl-context";
String ELYTRON_CONCATENATING_PRINCIPAL_DECODER = "elytron-concatenating-principal-decoder";
String ELYTRON_CONFIGURABLE_HTTP_SERVER_MECHANISM_FACTORY = "elytron-configurable-http-server-mechanism-factory";
String ELYTRON_CONFIGURABLE_SASL_SERVER_FACTORY = "elytron-configurable-sasl-server-factory";
String ELYTRON_CONSTANT_PERMISSION_MAPPER = "elytron-constant-permission-mapper";
String ELYTRON_CONSTANT_PRINCIPAL_DECODER = "elytron-constant-principal-decoder";
String ELYTRON_CONSTANT_PRINCIPAL_TRANSFORMER = "elytron-constant-principal-transformer";
String ELYTRON_CONSTANT_REALM_MAPPER = "elytron-constant-realm-mapper";
String ELYTRON_CONSTANT_ROLE_MAPPER = "elytron-constant-role-mapper";
String ELYTRON_CREDENTIAL_STORE = "elytron-credential-store";
String ELYTRON_CUSTOM_CREDENTIAL_SECURITY_FACTORY = "elytron-custom-credential-security-factory";
String ELYTRON_CUSTOM_MODIFIABLE_REALM = "elytron-custom-modifiable-realm";
String ELYTRON_CUSTOM_PERMISSION_MAPPER = "elytron-custom-permission-mapper";
String ELYTRON_CUSTOM_POLICY_EMPTY = "elytron-custom-policy-empty";
String ELYTRON_CUSTOM_POLICY_FORM = "elytron-custom-policy-form";
String ELYTRON_CUSTOM_PRINCIPAL_DECODER = "elytron-custom-principal-decoder";
String ELYTRON_CUSTOM_PRINCIPAL_TRANSFORMER = "elytron-custom-principal-transformer";
String ELYTRON_CUSTOM_REALM = "elytron-custom-realm";
String ELYTRON_CUSTOM_REALM_MAPPER = "elytron-custom-realm-mapper";
String ELYTRON_CUSTOM_ROLE_DECODER = "elytron-custom-role-decoder";
String ELYTRON_CUSTOM_ROLE_MAPPER = "elytron-custom-role-mapper";
String ELYTRON_CUSTOM_SECURITY_EVENT_LISTENER = "elytron-custom-security-event-listener";
String ELYTRON_DIR_CONTEXT = "elytron-dir-context";
String ELYTRON_FACTORIES_TRANSFORMERS = "elytron-factories-transformers";
String ELYTRON_FILE_AUDIT_LOG = "elytron-file-audit-log";
String ELYTRON_FILESYSTEM_REALM = "elytron-filesystem-realm";
String ELYTRON_FILTERING_KEY_STORE = "elytron-filtering-key-store";
String ELYTRON_HTTP_AUTHENTICATION_FACTORY = "elytron-http-authentication-factory";
String ELYTRON_IDENTITY_REALM = "elytron-identity-realm";
String ELYTRON_JACC_POLICY_FORM = "elytron-jacc-policy-form";
String ELYTRON_JASPI = "elytron-jaspi";
String ELYTRON_JDBC_REALM = "elytron-jdbc-realm";
String ELYTRON_KERBEROS_SECURITY_FACTORY = "elytron-kerberos-security-factory";
String ELYTRON_KEY_MANAGER = "elytron-key-manager";
String ELYTRON_KEY_STORE = "elytron-key-store";
String ELYTRON_KEY_STORE_REALM = "elytron-key-store-realm";
String ELYTRON_LDAP_KEY_STORE = "elytron-ldap-key-store";
String ELYTRON_LDAP_REALM = "elytron-ldap-realm";
String ELYTRON_LOGICAL_PERMISSION_MAPPER = "elytron-logical-permission-mapper";
String ELYTRON_LOGICAL_ROLE_MAPPER = "elytron-logical-role-mapper";
String ELYTRON_LOGS_ITEM = "logs-item";
String ELYTRON_MAPPED_REGEX_REALM_MAPPER = "elytron-mapped-regex-realm-mapper";
String ELYTRON_MAPPED_ROLE_MAPPER = "elytron-mapped-role-mapper";
String ELYTRON_MAPPERS_DECODERS = "elytron-mappers-decoders";
String ELYTRON_MECHANISM_PROVIDER_FILTERING_SASL_SERVER_FACTORY = "elytron-mechanism-provider-filtering-sasl-server-factory";
String ELYTRON_OTHER_ITEM = "other-item";
String ELYTRON_PERIODIC_ROTATING_FILE_AUDIT_LOG = "elytron-periodic-rotating-file-audit-log";
String ELYTRON_PERMISSION_MAPPINGS_ADD = "elytron-permission-mappings-add";
String ELYTRON_PERMISSION_MAPPINGS_FORM = "elytron-permission-mappings-form";
String ELYTRON_PERMISSION_MAPPINGS_PAGE = "elytron-permission-mappings-page";
String ELYTRON_PERMISSION_MAPPINGS_TABLE = "elytron-permission-mappings-table";
String ELYTRON_PERMISSION_SET = "elytron-permission-set";
String ELYTRON_PERMISSIONS_ADD = "elytron-permissions-add";
String ELYTRON_PERMISSIONS_FORM = "elytron-permissions-form";
String ELYTRON_PERMISSIONS_PAGE = "elytron-permissions-page";
String ELYTRON_PERMISSIONS_TABLE = "elytron-permissions-table";
String ELYTRON_POLICY = "elytron-policy";
String ELYTRON_PROPERTIES_REALM = "elytron-properties-realm";
String ELYTRON_PROVIDER_HTTP_SERVER_MECHANISM_FACTORY = "elytron-provider-http-server-mechanism-factory";
String ELYTRON_PROVIDER_LOADER = "elytron-provider-loader";
String ELYTRON_PROVIDER_SASL_SERVER_FACTORY = "elytron-provider-sasl-server-factory";
String ELYTRON_REGEX_PRINCIPAL_TRANSFORMER = "elytron-regex-principal-transformer";
String ELYTRON_REGEX_VALIDATING_PRINCIPAL_TRANSFORMER = "elytron-regex-validating-principal-transformer";
String ELYTRON_RUNTIME = "elytron-runtime";
String ELYTRON_SASL_AUTHENTICATION_FACTORY = "elytron-sasl-authentication-factory";
String ELYTRON_SECURITY_DOMAIN = "elytron-security-domain";
String ELYTRON_SECURITY_REALMS = "elytron-security-realms";
String ELYTRON_SERVER_SSL_CONTEXT = "elytron-server-ssl-context";
String ELYTRON_SERVER_SSL_SNI_CONTEXT = "elytron-server-ssl-sni-context";
String ELYTRON_SERVICE_LOADER_HTTP_SERVER_MECHANISM_FACTORY = "elytron-service-loader-http-server-mechanism-factory";
String ELYTRON_SERVICE_LOADER_SASL_SERVER_FACTORY = "elytron-service-loader-sasl-server-factory";
String ELYTRON_SIMPLE_PERMISSION_MAPPER = "elytron-simple-permission-mapper";
String ELYTRON_SIMPLE_PERMISSION_MAPPER_ADD = "elytron-simple-permission-mapper-add";
String ELYTRON_SIMPLE_PERMISSION_MAPPER_FORM = "elytron-simple-permission-mapper-form";
String ELYTRON_SIMPLE_PERMISSION_MAPPER_PAGE = "elytron-simple-permission-mapper-page";
String ELYTRON_SIMPLE_PERMISSION_MAPPER_PAGES = "elytron-simple-permission-mapper-pages";
String ELYTRON_SIMPLE_PERMISSION_MAPPER_TABLE = "elytron-simple-permission-mapper-table";
String ELYTRON_SIMPLE_REGEX_REALM_MAPPER = "elytron-simple-regex-realm-mapper";
String ELYTRON_SIMPLE_ROLE_DECODER = "elytron-simple-role-decoder";
String ELYTRON_SIZE_ROTATING_FILE_AUDIT_LOG = "elytron-size-rotating-file-audit-log";
String ELYTRON_SSL = "ssl";
String ELYTRON_SSL_ITEM = "ssl-item";
String ELYTRON_STORE_ITEM = "stores-item";
String ELYTRON_STORES = "stores";
String ELYTRON_SYSLOG_AUDIT_LOG = "elytron-syslog-audit-log";
String ELYTRON_TOKEN_REALM = "elytron-token-realm";
String ELYTRON_TRUST_MANAGER = "elytron-trust-manager";
String ELYTRON_X500_ATTRIBUTE_PRINCIPAL_DECODER = "elytron-x500-attribute-principal-decoder";
String EMPTY = "empty";
String ENABLE_SSL = "enable-ssl";
String ENDPOINT = "endpoint";
String ENDPOINT_ADD = "endpoint-add";
String ENDPOINT_PING = "endpoint-ping";
String ENDPOINT_REFRESH = "endpoint-refresh";
String ENDPOINT_SELECT = "endpoint-select";
String ENDPOINT_STORAGE = "hal-local-storage-endpoint";
String EXTENSION = "extension";
String EXTENSION_ADD = "extension-add";
String EXTENSION_REVIEW_FORM = "extension-review-form";
String EXTENSION_STORAGE = "hal-local-storage-extension";
String EXTENSION_URL_FORM = "extension-url-form";
String FINDER = "hal-finder";
String FOOTER_EXTENSIONS = "footer-extensions";
String FOOTER_EXTENSIONS_DROPDOWN = "footer-extensions-dropdown";
String FORM = "form";
String GROUP = "group";
String HAL_MODAL = "hal-modal";
String HAL_MODAL_TITLE = "hal-modal-title";
String HAL_WIZARD = "hal-wizard";
String HAL_WIZARD_TITLE = "hal-wizard-title";
String HEADER = "header";
String HEADER_CONNECTED_TO = "header-connected-to";
String HEADER_EXTENSIONS = "header-extensions";
String HEADER_EXTENSIONS_DROPDOWN = "header-extensions-dropdown";
String HEADER_USERNAME = "header-username";
String HOMEPAGE = "homepage";
String HOMEPAGE_ACCESS_CONTROL_MODULE = "homepage-access-control-module";
String HOMEPAGE_ACCESS_CONTROL_SECTION = "homepage-access-control-section";
String HOMEPAGE_CONFIGURATION_MODULE = "homepage-configuration-module";
String HOMEPAGE_CONFIGURATION_SECTION = "homepage-configuration-section";
String HOMEPAGE_DEPLOYMENTS_MODULE = "homepage-deployments-module";
String HOMEPAGE_DEPLOYMENTS_SECTION = "homepage-deployments-section";
String HOMEPAGE_PATCHING_MODULE = "homepage-patching-module";
String HOMEPAGE_PATCHING_SECTION = "homepage-patching-section";
String HOMEPAGE_RUNTIME_MONITOR_SECTION = "homepage-runtime-monitor-section";
String HOMEPAGE_RUNTIME_MODULE = "homepage-runtime-module";
String HOMEPAGE_RUNTIME_SECTION = "homepage-runtime-section";
String HOMEPAGE_RUNTIME_SERVER_GROUP_SECTION = "homepage-runtime-server-group-section";
String HOMEPAGE_RUNTIME_SERVER_SECTION = "homepage-runtime-server-section";
String HOST = "host";
String HOST_PRUNE_ACTIONS = "host-prune-actions";
String HOST_PRUNE_DISCONNECTED = "host-prune-disconnected";
String HOST_PRUNE_EXPIRED = "host-prune-expired";
String HOST_REFRESH = "host-refresh";
String HTTP_INTERFACE_ITEM = "http-interface-item";
String INLINE_ACTION = "inline-action";
String INLINE_ACTION_DROPDOWN = "inline-action-dropdown";
String INTERFACE = "interface";
String INTERFACE_ADD = "interface-add";
String INTERFACE_REFRESH = "interface-refresh";
String INVALIDATION_CACHE = "invalidation-cache";
String ITEM = "item";
String JCA_ARCHIVE_VALIDATION_FORM = "jca-archive-validation-form";
String JCA_ARCHIVE_VALIDATION_TAB = "jca-archive-validation-tab";
String JCA_BEAN_VALIDATION_FORM = "jca-bean-validation-form";
String JCA_BEAN_VALIDATION_TAB = "jca-bean-validation-tab";
String JCA_BOOTSTRAP_CONTEXT_ADD = "jca-bootstrap-context-add";
String JCA_BOOTSTRAP_CONTEXT_FORM = "jca-bootstrap-context-form";
String JCA_BOOTSTRAP_CONTEXT_ITEM = "jca-bootstrap-context-item";
String JCA_BOOTSTRAP_CONTEXT_TABLE = "jca-bootstrap-context-table";
String JCA_CCM_FORM = "jca-ccm-form";
String JCA_CCM_TAB = "jca-ccm-tab";
String JCA_CONFIGURATION_ITEM = "jca-configuration-item";
String JCA_DISTRIBUTED_WORKMANAGER = "jca-distributed-workmanager";
String JCA_DISTRIBUTED_WORKMANAGER_ADD = "jca-distributed-workmanager-add";
String JCA_DISTRIBUTED_WORKMANAGER_FORM = "jca-distributed-workmanager-form";
String JCA_DISTRIBUTED_WORKMANAGER_ITEM = "jca-distributed-workmanager-item";
String JCA_DISTRIBUTED_WORKMANAGER_PAGE = "jca-distributed-workmanager-page";
String JCA_DISTRIBUTED_WORKMANAGER_PAGES = "jca-distributed-workmanager-pages";
String JCA_DISTRIBUTED_WORKMANAGER_TABLE = "jca-distributed-workmanager-table";
String JCA_TAB_CONTAINER = "jca-tab-container";
String JCA_THREAD_POOL_ADD = "thread-pool-add";
String JCA_THREAD_POOL_ATTRIBUTES_FORM = "thread-pool-attributes-form";
String JCA_THREAD_POOL_ATTRIBUTES_TAB = "thread-pool-attributes-tab";
String JCA_THREAD_POOL_PAGE = "thread-pool-page";
String JCA_THREAD_POOL_SIZING_FORM = "thread-pool-sizing-form";
String JCA_THREAD_POOL_SIZING_TAB = "thread-pool-sizing-tab";
String JCA_THREAD_POOL_TAB_CONTAINER = "thread-pool-tab-container";
String JCA_THREAD_POOL_TABLE = "thread-pool-table";
String JCA_TRACER_FORM = "jca-tracer-form";
String JCA_TRACER_ITEM = "jca-tracer-item";
String JCA_WORKMANAGER = "jca-workmanager";
String JCA_WORKMANAGER_ADD = "jca-workmanager-add";
String JCA_WORKMANAGER_ITEM = "jca-workmanager-item";
String JCA_WORKMANAGER_PAGE = "jca-workmanager-page";
String JCA_WORKMANAGER_PAGES = "jca-workmanager-pages";
String JCA_WORKMANAGER_TABLE = "jca-workmanager-table";
String JDBC = "jdbc";
String JDBC_DRIVER = "jdbc-driver";
String JDBC_DRIVER_ADD = "jdbc-driver-add";
String JDBC_DRIVER_ADD_FORM = "jdbc-driver-add-form";
String JDBC_DRIVER_REFRESH = "jdbc-driver-refresh";
String JGROUPS_CHANNEL_CONFIG = "jgroups-channel";
String JGROUPS_CHANNEL_FORK = "fork";
String JGROUPS_CHANNEL_FORK_ITEM = "jgroups-channel-item-fork-item";
String JGROUPS_CHANNEL_FORK_PROTOCOL = "fork-protocol";
String JGROUPS_CHANNEL_ITEM = "jgroups-channel-item";
String JGROUPS_FORM = "jgroups-form";
String JGROUPS_ITEM = "jgroups-item";
String JGROUPS_PROTOCOL = "protocol";
String JGROUPS_RELAY = "relay";
String JGROUPS_REMOTE_SITE = "remote-site";
String JGROUPS_STACK_COLUMN = "jgroups-stack-column";
String JGROUPS_STACK_CONFIG = "jgroups-stack";
String JGROUPS_STACK_ITEM = "jgroups-stack-item";
String JGROUPS_TRANSPORT = "transport";
String JGROUPS_TRANSPORT_THREADPOOL_DEFAULT_FORM = "jgroups-transport-thread-pool-default-form";
String JGROUPS_TRANSPORT_THREADPOOL_DEFAULT_TAB = "jgroups-transport-thread-pool-default-tab";
String JGROUPS_TRANSPORT_THREADPOOL_INTERNAL_FORM = "jgroups-transport-thread-pool-internal-form";
String JGROUPS_TRANSPORT_THREADPOOL_INTERNAL_TAB = "jgroups-transport-thread-pool-internal-tab";
String JGROUPS_TRANSPORT_THREADPOOL_OOB_FORM = "jgroups-transport-thread-pool-oob-form";
String JGROUPS_TRANSPORT_THREADPOOL_OOB_TAB = "jgroups-transport-thread-pool-oob-tab";
String JGROUPS_TRANSPORT_THREADPOOL_TAB_CONTAINER = "jgroups-transport-thread-pool-tab-container";
String JGROUPS_TRANSPORT_THREADPOOL_TIMER_FORM = "jgroups-transport-thread-pool-timer-form";
String JGROUPS_TRANSPORT_THREADPOOL_TIMER_TAB = "jgroups-transport-thread-pool-timer-tab";
String JMS_BRIDGE = "jms-bridge";
String JMS_BRIDGE_ADD = "jms-bridge-add";
String JMS_BRIDGE_FORM = "jms-bridge-form";
String JMS_BRIDGE_ITEM = "jms-bridge-item";
String JMS_BRIDGE_REFRESH = "jms-bridge-refresh";
String JMS_BRIDGE_TAB = "jms-bridge-tab";
String JMS_MESSAGE_CHANGE_PRIORITY = "jms-message-change-priority";
String JMS_MESSAGE_CHANGE_PRIORITY_FORM = "jms-message-change-priority-form";
String JMS_MESSAGE_EXPIRE = "jms-message-expire";
String JMS_MESSAGE_LIST = "jms-message-list";
String JMS_MESSAGE_LIST_CHANGE_PRIORITY = "jms-message-list-change-priority";
String JMS_MESSAGE_LIST_CLEAR_SELECTION = "jms-message-list-clear-selection";
String JMS_MESSAGE_LIST_EXPIRE = "jms-message-list-expire";
String JMS_MESSAGE_LIST_MOVE = "jms-message-list-move";
String JMS_MESSAGE_LIST_REFRESH = "jms-message-list-refresh";
String JMS_MESSAGE_LIST_REMOVE = "jms-message-list-remove";
String JMS_MESSAGE_LIST_SELECT_ALL = "jms-message-list-select-all";
String JMS_MESSAGE_LIST_SEND_TO_DEAD_LETTER = "jms-message-list-send-to-dead-letter";
String JMS_MESSAGE_LIST_TOO_MANY = "jms-message-list-too-many";
String JMS_MESSAGE_MOVE = "jms-message-move";
String JMS_MESSAGE_MOVE_FORM = "jms-message-move-form";
String JMS_MESSAGE_REMOVE = "jms-message-remove";
String JMS_MESSAGE_SEND_TO_DEAD_LETTER = "jms-message-send-to-dead-letter";
String JMX_AUDIT_LOG_FORM = "jmx-audit-log-form";
String JMX_AUDIT_LOG_ITEM = "jmx-audit-log-item";
String JMX_CONFIGURATION_FORM = "jmx-configuration-form";
String JMX_CONFIGURATION_ITEM = "jmx-configuration-item";
String JMX_REMOTING_CONNECTOR_FORM = "jmx-remoting-connector-form";
String JMX_REMOTING_CONNECTOR_ITEM = "jmx-remoting-connector-item";
String JNDI = "jndi";
String JNDI_DETAILS = "jndi-details";
String JNDI_SEARCH = "jndi-search";
String JNDI_TREE = "jndi-tree";
String JNDI_TREE_APPLICATIONS_ROOT = "jndi-tree-applications-root";
String JNDI_TREE_JAVA_CONTEXTS_ROOT = "jndi-tree-java-contexts-root";
String JOB = "job";
String JOB_LIST = "job-list";
String JOB_REFRESH = "job-refresh";
String JOP_EXECUTION_EMPTY = "job-execution-empty";
String JOP_EXECUTION_REFRESH = "job-execution-refresh";
String JOP_EXECUTION_RESTART = "job-execution-restart";
String JOP_EXECUTION_STOP = "job-execution-stop";
String JPA_RUNTIME = "jpa-runtime-column";
String JPA_RUNTIME_COLLECTION_ITEM = "jpa-runtime-collection-item";
String JPA_RUNTIME_ENTITY_CACHE_ITEM = "jpa-runtime-entity-cache-item";
String JPA_RUNTIME_ENTITY_ITEM = "jpa-runtime-entity-item";
String JPA_RUNTIME_MAIN_ATTRIBUTES_ITEM = "jpa-runtime-main-attributes-item";
String JPA_RUNTIME_QUERY_CACHE_ITEM = "jpa-runtime-query-cache-item";
String JPA_RUNTIME_STATISTICS_DISABLED = "jpa-runtime-statistics-disabled";
String JPA_RUNTIME_TAB_CONTAINER = "jpa-runtime-tab-container";
String LOAD_METRIC = "load-metric";
String LOCAL_CACHE = "local-cache";
String LOG_FILE = "lf";
String LOG_FILE_EDITOR = "lf-editor";
String LOG_FILE_FOLLOW = "lf-follow";
String LOG_FILE_REFRESH = "lf-refresh";
String LOG_FILE_SEARCH = "lf-search";
String LOGGING_CONFIG_AND_PROFILES = "lcap";
String LOGGING_CONFIGURATION = "logging-configuration";
String LOGGING_FORMATTER_ITEM = "logging-formatter-item";
String LOGGING_PROFILE = "logging-profile";
String LOGGING_PROFILE_ADD = "logging-profile-add";
String LOGOUT_LINK = "logout-link";
String MACRO_EDITOR = "macro-editor";
String MACRO_EMPTY = "macro-empty";
String MACRO_LIST = "macro-list";
String MACRO_OPTIONS = "macro-options";
String MACRO_STORAGE = "hal-local-storage-macro";
String MAIL_SERVER = "mail-server";
String MAIL_SERVER_DIALOG = "mail-server-add-form";
String MAIL_SERVER_FORM = "mail-server-form";
String MAIL_SERVER_ITEM = "mail-server-item";
String MAIL_SERVER_TAB_CONTAINER = "mail-server-tab-container";
String MAIL_SERVER_TABLE = "mail-server-table";
String MAIL_SESSION = "mail-session";
String MAIL_SESSION_ADD = "mail-session-add";
String MAIL_SESSION_DIALOG = "mail-session-form";
String MAIL_SESSION_FORM = "mail-session-form";
String MAIL_SESSION_ITEM = "mail-session-item";
String MAIL_SESSION_REFRESH = "mail-session-refresh";
String MANAGEMENT = "management";
String MANAGEMENT_OPERATIONS = "mgmt-operations";
String MEMBERSHIP = "membership";
String MEMBERSHIP_EXCLUDE = "membership-exclude";
String MEMBERSHIP_INCLUDE = "membership-include";
String MESSAGES_LINK = "messages-link";
String MESSAGING_ACCEPTOR = "messaging-acceptor";
String MESSAGING_ADDRESS_SETTING = "messaging-address-setting";
String MESSAGING_BRIDGE = "messaging-bridge";
String MESSAGING_BROADCAST_GROUP = "messaging-broadcast-group";
String MESSAGING_CATEGORY = "messaging-category";
String MESSAGING_CLUSTER_CONNECTION = "messaging-cluster-connection";
String MESSAGING_CONNECTION_FACTORY = "messaging-connection-factory";
String MESSAGING_CONNECTOR = "messaging-connector";
String MESSAGING_CONNECTOR_SERVICE = "messaging-connector-service";
String MESSAGING_CORE_QUEUE = "messaging-core-queue";
String MESSAGING_DISCOVERY_GROUP = "messaging-discovery-group";
String MESSAGING_DIVERT = "messaging-divert";
String MESSAGING_REMOTE_ACTIVEMQ = "msg-remote-activemq";
String MESSAGING_GLOBAL_SETTINGS = "messaging-global-settings";
String MESSAGING_GROUPING_HANDLER = "messaging-grouping-handler";
String MESSAGING_HA_CHOOSE_STRATEGY = "messaging-ha-choose-strategy";
String MESSAGING_HA_POLICY_EMPTY = "messaging-ha-policy=empty";
String MESSAGING_HA_REPLICATION = "messaging-ha-replication";
String MESSAGING_HA_REPLICATION_COLOCATED = "messaging-ha-replication-colocated";
String MESSAGING_HA_REPLICATION_COLOCATED_MASTER = "messaging-ha-replication-colocated-master";
String MESSAGING_HA_REPLICATION_COLOCATED_SLAVE = "messaging-ha-replication-colocated-slave";
String MESSAGING_HA_REPLICATION_LIVE_ONLY = "messaging-ha-replication-live-only";
String MESSAGING_HA_REPLICATION_MASTER = "messaging-ha-replication-master";
String MESSAGING_HA_REPLICATION_SLAVE = "messaging-ha-replication-replication-slave";
String MESSAGING_HA_SHARED_STORE = "messaging-ha-shared-store";
String MESSAGING_HA_SHARED_STORE_COLOCATED = "messaging-ha-shared-store-colocated";
String MESSAGING_HA_SHARED_STORE_COLOCATED_MASTER = "messaging-ha-shared-store-colocated-master";
String MESSAGING_HA_SHARED_STORE_COLOCATED_SLAVE = "messaging-ha-shared-store-colocated-slave";
String MESSAGING_HA_SHARED_STORE_MASTER = "messaging-ha-shared-store-master";
String MESSAGING_HA_SHARED_STORE_SLAVE = "messaging-ha-shared-store-slave";
String MESSAGING_HTTP_ACCEPTOR = "messaging-http-acceptor";
String MESSAGING_HTTP_CONNECTOR = "messaging-http-connector";
String MESSAGING_IN_VM_ACCEPTOR = "messaging-in-vm-acceptor";
String MESSAGING_IN_VM_CONNECTOR = "messaging-in-vm-connector";
String MESSAGING_JMS_QUEUE = "messaging-jms-queue";
String MESSAGING_JMS_TOPIC = "messaging-jms-topic";
String MESSAGING_POOLED_CONNECTION_FACTORY = "messaging-pooled-connection-factory";
String MESSAGING_REMOTE_ACCEPTOR = "messaging-remote-acceptor";
String MESSAGING_REMOTE_CONNECTOR = "messaging-remote-connector";
String MESSAGING_SECURITY_SETTING_ROLE_ADD = "messaging-security-setting-role-add";
String MESSAGING_SECURITY_SETTING_ROLE_FORM = "messaging-security-setting-role-form";
String MESSAGING_SECURITY_SETTING_ROLE_ITEM = "messaging-security-setting-role-item";
String MESSAGING_SECURITY_SETTING_ROLE_TABLE = "messaging-security-setting-role-table";
String MESSAGING_SERVER = "msg-server";
String MESSAGING_SERVER_ADD = "msg-server-c-add";
String MESSAGING_SERVER_BINDING_DIRECTORY = "msg-server-bindings-directory";
String MESSAGING_SERVER_BINDING_DIRECTORY_FORM = "msg-server-bindings-directory-form";
String MESSAGING_SERVER_CLUSTERING = "msg-server-clustering";
String MESSAGING_SERVER_CONFIGURATION = "msg-server-c";
String MESSAGING_SERVER_CONFIGURATION_REFRESH = "msg-server-c-refresh";
String MESSAGING_SERVER_CONNECTION = "msg-server-connection";
String MESSAGING_SERVER_CONNECTION_PAGE = "msg-server-connection-page";
String MESSAGING_SERVER_CONNECTION_PAGES = "msg-server-connection-pages";
String MESSAGING_SERVER_CONNECTION_CLOSE_TABS = "msg-server-connection-close-for-address-form";
String MESSAGING_SERVER_CONNECTION_CLOSE_CONSUMER_FORM = "msg-server-connection-close-consumer-form";
String MESSAGING_SERVER_CONNECTION_CLOSE_CONSUMER_TAB = "msg-server-connection-close-consumer-tab";
String MESSAGING_SERVER_CONNECTION_CLOSE_FOR_ADDRESS_FORM = "msg-server-connection-close-for-address-form";
String MESSAGING_SERVER_CONNECTION_CLOSE_FOR_ADDRESS_TAB = "msg-server-connection-close-for-address-tab";
String MESSAGING_SERVER_CONNECTION_CLOSE_FOR_USER_FORM = "msg-server-connection-close-for-user-form";
String MESSAGING_SERVER_CONNECTION_CLOSE_FOR_USER_TAB = "msg-server-connection-close-for-user-tab";
String MESSAGING_SERVER_CONNECTION_ITEM = "msg-server-connection-item";
String MESSAGING_SERVER_CONNECTION_FORM = "msg-server-connection-form";
String MESSAGING_SERVER_CONNECTION_TABLE = "msg-server-connection-table";
String MESSAGING_SERVER_CONNECTION_CONSUMER_FORM = "msg-server-connection-consumer-form";
String MESSAGING_SERVER_CONNECTION_CONSUMER_PAGE = "msg-server-connection-consumer-page";
String MESSAGING_SERVER_CONNECTION_CONSUMER_TABLE = "msg-server-connection-consumer-table";
String MESSAGING_SERVER_CONSUMER_ITEM = "msg-server-consumer-item";
String MESSAGING_SERVER_CONSUMER_FORM = "msg-server-consumer-form";
String MESSAGING_SERVER_CONSUMER_TABLE = "msg-server-consumer-table";
String MESSAGING_SERVER_CONNECTOR_ITEM = "msg-server-connector-item";
String MESSAGING_SERVER_CONNECTOR_FORM = "msg-server-connector-form";
String MESSAGING_SERVER_CONNECTOR_TABLE = "msg-server-connector-table";
String MESSAGING_SERVER_DESTINATION = "msg-server-destination";
String MESSAGING_SERVER_DESTINATION_RUNTIME = "msg-server-destination-r";
String MESSAGING_SERVER_DESTINATION_REFRESH = "msg-server-destination-refresh";
String MESSAGING_SERVER_DIRECTORY_ITEM = "msg-server-directory-item";
String MESSAGING_SERVER_HA_POLICY = "msg-server-ha-policy";
String MESSAGING_SERVER_JOURNAL_DIRECTORY = "msg-server-journal-directory";
String MESSAGING_SERVER_JOURNAL_DIRECTORY_FORM = "msg-server-journal-directory-form";
String MESSAGING_SERVER_LARGE_MESSAGES_DIRECTORY = "msg-server-large-messages-directory";
String MESSAGING_SERVER_LARGE_MESSAGES_DIRECTORY_FORM = "msg-server-large-messages-directory-form";
String MESSAGING_SERVER_PAGING_DIRECTORY = "msg-server-paging-directory";
String MESSAGING_SERVER_PAGING_DIRECTORY_FORM = "msg-server-paging-directory-form";
String MESSAGING_SERVER_PRODUCER_ITEM = "msg-server-producer-item";
String MESSAGING_SERVER_PRODUCER_FORM = "msg-server-producer-form";
String MESSAGING_SERVER_PRODUCER_TABLE = "msg-server-producer-table";
String MESSAGING_SERVER_ROLE_ITEM = "msg-server-role-item";
String MESSAGING_SERVER_ROLE_FORM = "msg-server-role-form";
String MESSAGING_SERVER_ROLE_TABLE = "msg-server-role-table";
String MESSAGING_SERVER_RUNTIME = "msg-server-r";
String MESSAGING_SERVER_RUNTIME_REFRESH = "msg-server-r-refresh";
String MESSAGING_SERVER_SESSION_FORM = "msg-server-session-form";
String MESSAGING_SERVER_SESSION_PAGE = "msg-server-session-page";
String MESSAGING_SERVER_SESSION_TABLE = "msg-server-session-table";
String MESSAGING_SERVER_SETTINGS = "msg-server-settings";
String MESSAGING_SERVER_TRANSACTION_ITEM = "msg-server-transaction-item";
String MESSAGING_SERVER_TRANSACTION_FORM = "msg-server-transaction-form";
String MESSAGING_SERVER_TRANSACTION_TABLE = "msg-server-transaction-table";
String MICRO_PROFILE_CONFIG_SOURCE = "microprofile-config-source";
String MICRO_PROFILE_HEALTH = "microprofile-health";
String MICRO_PROFILE_METRICS_FORM = "microprofile-metrics-form";
String MODCLUSTER_PROXY = "modcluster-proxy";
String MODCLUSTER_PROXY_ADD = "modcluster-proxy-add";
String MODCLUSTER_PROXY_REFRESH = "modcluster-proxy-refresh";
String MODEL_BROWSER = "model-browser";
String MODEL_BROWSER_CREATE_SINGLETON_FORM = "model-browser-create-singleton-form";
String MODEL_BROWSER_ROOT = "model-browser-root";
String NO_MATCH = "no-match";
String NONE_PROGRESSING_LINK = "none-progressing-link";
String NATIVE_INTERFACE_ITEM = "native-interface-item";
String NEAR_CACHE_NONE = "near-cache-none";
String NEAR_CACHE_INVALIDATION = "near-cache-invalidation";
String NOTIFICATION_DRAWER_CLEAR_ALL = "notification-drawer-clear-all";
String NOTIFICATION_DRAWER_CLOSE = "notification-drawer-close";
String NOTIFICATION_DRAWER_MARK_ALL_READ = "notification-drawer-mark-all-read";
String PAGE = "page";
String PAGES = "pages";
String PATCH_ADD = "patch-add";
String PATCH_UPLOAD_NAMES_FORM = "patch-names-form";
String PATCHES_AGEOUT = "patching-ageout-history";
String PATCHES_REFRESH = "patching-refresh";
String PATCHING = "patching";
String PATCHING_DOMAIN = "patching-domain";
String POOL = "pool";
String PREVIEW_ID = "hal-finder-preview";
String PROFILE = "profile";
String PROFILE_ADD = "profile-add";
String PROFILE_CLONE = "profile-clone";
String PROFILE_REFRESH = "profile-refresh";
String REFERENCE_SERVER_EMPTY = "reference-server-empty";
String REFRESH = "refresh";
String RELOAD_LINK = "reload-link";
String REMOTE_CACHE_CONTAINER_ADD = "rcc-add";
String REMOTE_CACHE_CONTAINER_CONFIGURATION_FORM = "rcc-configuration-form";
String REMOTE_CACHE_CONTAINER_CONFIGURATION_TAB = "rcc-configuration-tab";
String REMOTE_CACHE_CONTAINER_ITEM = "rcc-item";
String REMOTE_CACHE_CONTAINER_FORM = "rcc-form";
String REMOTE_CACHE_CONTAINER_NEAR_CACHE_TAB = "rcc-near-cache-tab";
String REMOTE_CACHE_CONTAINER_TABS = "rcc-tabs";
String REMOTE_CLUSTER_ADD = "rc-add";
String REMOTE_CLUSTER_ITEM = "rc-item";
String REMOTE_CLUSTER_FORM = "rc-form";
String REMOTE_CLUSTER_TABLE = "rc-table";
String REMOTING_CONNECTOR_FORM = "remoting-connector-form";
String REMOTING_CONNECTOR_SECURITY_FORM = "remoting-connector-security-form";
String REMOTING_CONNECTOR_SECURITY_POLICY_FORM = "remoting-connector-security-policy-form";
String REMOTING_CONNECTOR_SECURITY_POLICY_TAB = "remoting-connector-security-policy-tab";
String REMOTING_CONNECTOR_SECURITY_TAB = "remoting-connector-security-tab";
String REMOTING_CONNECTOR_TAB = "remoting-connector-tab";
String REMOTING_CONNECTOR_TAB_CONTAINER = "remoting-connector-tab-container";
String REMOTING_HTTP_CONNECTOR_FORM = "remoting-http-connector-form";
String REMOTING_HTTP_CONNECTOR_SECURITY_FORM = "remoting-http-connector-security-form";
String REMOTING_HTTP_CONNECTOR_SECURITY_POLICY_FORM = "remoting-http-connector-security-policy-form";
String REMOTING_HTTP_CONNECTOR_SECURITY_POLICY_TAB = "remoting-http-connector-security-policy-tab";
String REMOTING_HTTP_CONNECTOR_SECURITY_TAB = "remoting-http-connector-security-tab";
String REMOTING_HTTP_CONNECTOR_TAB = "remoting-http-connector-tab";
String REMOTING_HTTP_CONNECTOR_TAB_CONTAINER = "remoting-http-connector-tab-container";
String REPLICATED_CACHE = "replicated-cache";
String RESET_MESSAGE_COUNTERS = "reset-message-counters";
String RESOLVE_EXPRESSION_FORM = "resolve-expression-form";
String RESOURCE_ADAPTER = "resource-adapter";
String RESOURCE_ADAPTER_ADD = "resource-adapter-add";
String RESOURCE_ADAPTER_ADMIN_OBJECT_ADD = "resource-adapter-admin-object-add";
String RESOURCE_ADAPTER_CONNECTION_DEFINITION_ADD = "resource-adapter-connection-definition-add";
String RESOURCE_ADAPTER_FORM = "resource-adapter-form";
String REST_RESOURCE = "rest-rsc";
String REST_RESOURCE_PATH_PARAM_FORM = "rest-rsc-path-param-form";
String REST_RESOURCE_REFRESH = "rest-rsc-refresh";
String ROLE = "role";
String ROLE_ADD = "role-add";
String ROLE_HOST_SCOPED_ADD = "role-host-add";
String ROLE_HOST_SCOPED_FORM = "role-host-form";
String ROLE_MAPPING_FORM = "role-mapping-form";
String ROLE_REFRESH = "role-refresh";
String ROLE_SERVER_GROUP_SCOPED_ADD = "role-server-group-add";
String ROLE_SERVER_GROUP_SCOPED_FORM = "role-server-group-form";
String ROOT_CONTAINER = "hal-root-container";
String RUNTIME_SUBSYSTEM = "rss";
String SCATTERED_CACHE = "scattered-cache";
String SEARCH = "search";
String SECURITY = "security";
String SECURITY_ITEM = "security-item";
String SECURITY_FORM = "security-form";
String SECURITY_DOMAIN = "sd";
String SECURITY_DOMAIN_ACL_MODULE_ADD = "security-domain-acl-add";
String SECURITY_DOMAIN_ADD = "sd-add";
String SECURITY_DOMAIN_AUDIT_ADD = "security-domain-provider-add";
String SECURITY_DOMAIN_AUTHENTICATION_ADD = "security-domain-authentication-add";
String SECURITY_DOMAIN_AUTHORIZATION_ADD = "security-domain-authorization-add";
String SECURITY_DOMAIN_MAPPING_ADD = "security-domain-mapping-add";
String SECURITY_DOMAIN_TRUST_MODULE_ADD = "security-domain-identity-trust-add";
String SECURITY_MANAGER_MAXIMUM_PERMISSIONS = "sm-max-permissions";
String SECURITY_MANAGER_MINIMUM_PERMISSIONS = "sm-min-permissions";
String SERVER = "server";
String SERVER_ADD = "server-add";
String SERVER_GROUP = "server-group";
String SERVER_GROUP_ADD = "server-group-add";
String SERVER_GROUP_DEPLOYMENT = "server-group-deployment";
String SERVER_GROUP_DEPLOYMENT_ADD = "server-group-deployment-add";
String SERVER_GROUP_DEPLOYMENT_ADD_ACTIONS = "server-group-deployment-add-actions";
String SERVER_GROUP_DEPLOYMENT_ENABLE = "server-group-deployment-enable";
String SERVER_GROUP_DEPLOYMENT_REFRESH = "server-group-deployment-refresh";
String SERVER_GROUP_DEPLOYMENT_TABLE = "server-group-deployment-table";
String SERVER_GROUP_DEPLOYMENT_UNMANAGED_ADD = "server-group-deployment-unmanaged-add";
String SERVER_GROUP_DEPLOYMENT_UPLOAD = "server-group-deployment-upload";
String SERVER_GROUP_REFRESH = "server-group-refresh";
String SERVER_REFRESH = "server-refresh";
String SERVER_RUNTIME_BOOTSTRAP_FORM = "server-runtime-bootstrap-form";
String SERVER_RUNTIME_ITEM = "server-runtime-item";
String SERVER_RUNTIME_JVM_ATTRIBUTES_FORM = "server-runtime-jvm-attributes-form";
String SERVER_RUNTIME_PROPERTIES_TABLE = "server-runtime-properties-table";
String SERVER_RUNTIME_STATUS = "server-runtime-status";
String SERVER_RUNTIME_STATUS_HEAP_COMMITTED = "server-runtime-status-heap-committed";
String SERVER_RUNTIME_STATUS_HEAP_USED = "server-runtime-status-heap-used";
String SERVER_RUNTIME_STATUS_NON_HEAP_COMMITTED = "server-runtime-status-non-heap-committed";
String SERVER_RUNTIME_STATUS_NON_HEAP_USED = "server-runtime-status-non-heap-used";
String SERVER_RUNTIME_STATUS_THREADS = "server-runtime-status-threads";
String SERVER_STATUS_BOOTSTRAP_ITEM = "server-runtime-bootstrap-item";
String SERVER_STATUS_MAIN_ATTRIBUTES_ITEM = "server-runtime-main-attributes-item";
String SERVER_STATUS_SYSTEM_PROPERTIES_ITEM = "server-runtime-system-properties-item";
String SERVER_URL_FORM = "server-url-form";
String SERVER_URL_STORAGE = "hal-local-storage-server-url";
String SESSION = "session";
String SETTINGS_FORM = "settings-form";
String SOCKET_BINDING_GROUP = "socket-binding-group";
String SOCKET_BINDING_GROUP_ADD = "socket-binding-group-add";
String SOCKET_BINDING_GROUP_INBOUND = "socket-binding-group-inbound";
String SOCKET_BINDING_GROUP_INBOUND_CLIENT_MAPPING_ADD = "socket-binding-group-inbound-client-mapping-add";
String SOCKET_BINDING_GROUP_INBOUND_CLIENT_MAPPING_FORM = "socket-binding-group-inbound-client-mapping-form";
String SOCKET_BINDING_GROUP_INBOUND_CLIENT_MAPPING_PAGE = "socket-binding-group-inbound-client-mapping-page";
String SOCKET_BINDING_GROUP_INBOUND_CLIENT_MAPPING_TABLE = "socket-binding-group-inbound-client-mapping-table";
String SOCKET_BINDING_GROUP_OUTBOUND_LOCAL = "socket-binding-group-outbound-local";
String SOCKET_BINDING_GROUP_OUTBOUND_REMOTE = "socket-binding-group-outbound-remote";
String SOCKET_BINDING_GROUP_REFRESH = "socket-binding-group-refresh";
String STANDALONE_HOST = "standalone-host";
String STANDALONE_SERVER_COLUMN = "standalone-server-column";
String STATEMENTS = "statements";
String STORAGE = "hal-local-storage";
String SYSTEM_PROPERTY_ADD = "system-property-add";
String SYSTEM_PROPERTY_FORM = "system-property-form";
String SYSTEM_PROPERTY_TABLE = "system-property-table";
String TAB = "tab";
String TAB_CONTAINER = "tab-container";
String TABLE = "table";
String TASKS_ACTIVE = "tasks-active";
String TASKS_COMPLETED = "tasks-completed";
String TASKS_QUEUE = "tasks-queue";
String TASKS_REJECTED = "tasks-rejected";
String THREAD_POOL_ITEM = "thread-pool-item";
String THREAD_POOL_FORM = "thread-pool-form";
String TIMEOUT = "timeout";
String TLC_ACCESS_CONTROL = "tlc-access-control";
String TLC_ACCESS_CONTROL_SSO = "tlc-access-control-sso";
String TLC_CONFIGURATION = "tlc-configuration";
String TLC_DEPLOYMENTS = "tlc-deployments";
String TLC_HOMEPAGE = "tlc-homepage";
String TLC_MANAGEMENT = "tlc-management";
String TLC_PATCHING = "tlc-patching";
String TLC_RUNTIME = "tlc-runtime";
String TOOLBAR = "toolbar";
String TOOLBAR_ACTION_DROPDOWN = "toolbar-action-dropdown";
String TOOLBAR_FILTER = "toolbar-filter";
String TOUR_BUTTON_BACK = "tour-button-back";
String TOUR_BUTTON_DONE = "tour-button-done";
String TOUR_BUTTON_NEXT = "tour-button-next";
String TRANSACTION = "transaction";
String TRANSACTION_PARTICIPANTS_PAGE = "transaction-Participants-page";
String TRANSACTION_STATISTICS_DISABLED = "transaction-statistics-disabled";
String UNDERTOW_APP_SECURITY_DOMAIN = "undertow-application-security-domain";
String UNDERTOW_APP_SECURITY_DOMAIN_ADD = "undertow-application-security-domain-add";
String UNDERTOW_APP_SECURITY_DOMAIN_FORM = "undertow-application-security-domain-form";
String UNDERTOW_APP_SECURITY_DOMAIN_REFRESH = "undertow-application-security-domain-refresh";
String UNDERTOW_APP_SECURITY_DOMAIN_TAB = "undertow-application-security-domain-tab";
String UNDERTOW_APP_SECURITY_DOMAIN_TAB_CONTAINER = "undertow-application-security-domain-tab-container";
String UNDERTOW_DEPLOYMENT_STATISTICS_DISABLED = "undertow-deployment-statistics-disabled";
String UNDERTOW_GLOBAL_SETTINGS = "undertow-global-settings";
String UNDERTOW_HOST_ACCESS_LOG = "undertow-host-access-log";
String UNDERTOW_HOST_ACTION_COLUMN = "undertow-host-action-column";
String UNDERTOW_HOST_ADD = "undertow-host-add";
String UNDERTOW_HOST_ATTRIBUTES_FORM = "undertow-host-form";
String UNDERTOW_HOST_ATTRIBUTES_TAB = "undertow-host-tab";
String UNDERTOW_HOST_ATTRIBUTES_TAB_CONTAINER = "undertow-host-tab-container";
String UNDERTOW_HOST_FILTER_REF_ADD = "undertow-host-filter-ref-add";
String UNDERTOW_HOST_FILTER_REF_FORM = "undertow-host-filter-ref-form";
String UNDERTOW_HOST_FILTER_REF_PAGE = "undertow-host-filter-ref-page";
String UNDERTOW_HOST_FILTER_REF_TABLE = "undertow-host-filter-ref-table";
String UNDERTOW_HOST_HTTP_INVOKER = "undertow-host-http-invoker";
String UNDERTOW_HOST_ITEM = "undertow-host-item";
String UNDERTOW_HOST_LOCATION_ADD = "undertow-host-location-add";
String UNDERTOW_HOST_LOCATION_FILTER_REF_ADD = "undertow-host-location-filter-ref-add";
String UNDERTOW_HOST_LOCATION_FILTER_REF_FORM = "undertow-host-location-filter-ref-form";
String UNDERTOW_HOST_LOCATION_FILTER_REF_PAGE = "undertow-host-location-filter-ref-page";
String UNDERTOW_HOST_LOCATION_FILTER_REF_TABLE = "undertow-host-location-filter-ref-table";
String UNDERTOW_HOST_LOCATION_FORM = "undertow-host-location-form";
String UNDERTOW_HOST_LOCATION_PAGE = "undertow-host-location-page";
String UNDERTOW_HOST_LOCATION_TABLE = "undertow-host-location-table";
String UNDERTOW_HOST_MAIN_PAGE = "undertow-host-main-page";
String UNDERTOW_HOST_PAGES = "undertow-host-pages";
String UNDERTOW_HOST_SINGLE_SIGN_ON = "undertow-host-single-sign-on";
String UNDERTOW_HOST_TABLE = "undertow-host-table";
String UNDERTOW_LISTENER_PROCESSING_DISABLED = "undertow-listener-processing-disabled";
String UNDERTOW_LISTENER_REFRESH = "undertow-listener-refresh";
String UNDERTOW_MODCLUSTER_BALANCER_NODE_CONTEXT_REFRESH = "undertow-modcluster-balancer-node-context-refresh";
String UNDERTOW_MODCLUSTER_BALANCER_NODE_REFRESH = "undertow-modcluster-balancer-node-refresh";
String UNDERTOW_MODCLUSTER_BALANCER_REFRESH = "undertow-modcluster-balancer-refresh";
String UNDERTOW_MODCLUSTER_REFRESH = "undertow-modcluster-refresh";
String UNDERTOW_RESPONSE_HEADER_ADD = "undertow-response-header-add";
String UNDERTOW_RUNTIME = "undertow-runtime";
String UNDERTOW_RUNTIME_APP_SEC_DOMAIN = "undertow-runtime-app-sec-domain";
String UNDERTOW_RUNTIME_DEPLOYMENT = "undertow-runtime-deployment-column";
String UNDERTOW_RUNTIME_LISTENER = "undertow-runtime-listener";
String UNDERTOW_RUNTIME_MODCLUSTER = "undertow-runtime-modcluster";
String UNDERTOW_RUNTIME_MODCLUSTER_BALANCER = "undertow-runtime-modcluster-balancer";
String UNDERTOW_RUNTIME_MODCLUSTER_BALANCER_NODE = "undertow-runtime-modcluster-balancer-node";
String UNDERTOW_RUNTIME_MODCLUSTER_BALANCER_NODE_CONTEXT = "undertow-runtime-modcluster-balancer-node-context";
String UNDERTOW_RUNTIME_REFRESH = "undertow-runtime-deployment-refresh";
String UNDERTOW_RUNTIME_SERVER = "undertow-runtime-server";
String UNDERTOW_SERVER = "undertow-server";
String UNDERTOW_SERVER_ADD = "undertow-server-add";
String UNDERTOW_SERVER_AJP_LISTENER = "undertow-server-ajp-listener";
String UNDERTOW_SERVER_CONFIGURATION_FORM = "undertow-server-configuration-form";
String UNDERTOW_SERVER_CONFIGURATION_ITEM = "undertow-server-configuration-item";
String UNDERTOW_SERVER_HTTP_LISTENER = "undertow-server-http-listener";
String UNDERTOW_SERVER_HTTPS_LISTENER = "undertow-server-https-listener";
String UNDERTOW_SERVER_LISTENER_ITEM = "undertow-server-listener-item";
String UNDERTOW_SERVER_REFRESH = "undertow-server-refresh";
String UNDERTOW_SERVLET_CONTAINER = "undertow-servlet-container";
String UNDERTOW_SERVLET_CONTAINER_ADD = "undertow-servlet-container-add";
String UNDERTOW_SERVLET_CONTAINER_CONFIGURATION_FORM = "undertow-servlet-container-configuration-form";
String UNDERTOW_SERVLET_CONTAINER_CONFIGURATION_ITEM = "undertow-servlet-container-configuration-item";
String UNDERTOW_SERVLET_CONTAINER_CONFIGURATION_TAB = "undertow-servlet-container-configuration-tab";
String UNDERTOW_SERVLET_CONTAINER_COOKIE = "undertow-servlet-container-cookie";
String UNDERTOW_SERVLET_CONTAINER_CRAWLER = "undertow-servlet-container-crawler";
String UNDERTOW_SERVLET_CONTAINER_JSP = "undertow-servlet-container-jsp";
String UNDERTOW_SERVLET_CONTAINER_MIME_MAPPING_FORM = "undertow-servlet-container-mime-mapping-form";
String UNDERTOW_SERVLET_CONTAINER_MIME_MAPPING_TAB = "undertow-servlet-container-mime-mapping-tab";
String UNDERTOW_SERVLET_CONTAINER_REFRESH = "undertow-servlet-container-refresh";
String UNDERTOW_SERVLET_CONTAINER_SESSION = "undertow-servlet-container-session";
String UNDERTOW_SERVLET_CONTAINER_TAB_CONTAINER = "undertow-servlet-container-tab-container";
String UNDERTOW_SERVLET_CONTAINER_WEBSOCKET = "undertow-servlet-container-websocket";
String UNDERTOW_SERVLET_CONTAINER_WELCOME_FILE_FORM = "undertow-servlet-container-welcome-file-form";
String UNDERTOW_SERVLET_CONTAINER_WELCOME_FILE_TAB = "undertow-servlet-container-welcome-file-tab";
String UNDERTOW_SETTINGS = "undertow-settings";
String UNDERTOW_SINGLE_SIGN_ON_ADD = "undertow-single-sign-on-add";
String UNDERTOW_SINGLE_SIGN_ON_FORM = "undertow-single-sign-on-form";
String UNDERTOW_SINGLE_SIGN_ON_TAB = "undertow-single-sign-on-table";
String UNDERTOW_STATISTICS_DISABLED = "undertow-statistics-disabled";
String UNMANAGED_FORM = "unmanaged-form";
String UPLOAD_FILE_INPUT = "upload-file-input";
String UPLOAD_NAMES_FORM = "upload-names-form";
String USER = "user";
String VALIDATION = "validations";
String WEBSERVICES_CLIENT_CONFIG = "webservices-client-config";
String WEBSERVICES_CLIENT_CONFIG_ITEM = "webservices-client-config-item";
String WEBSERVICES_ENDPOINT_CONFIG = "webservices-endpoint-config";
String WEBSERVICES_ENDPOINT_CONFIG_ITEM = "webservices-endpoint-config-item";
String WEBSERVICES_FORM = "webservices-form";
String WEBSERVICES_HANDLER_ADD = "webservices-handler-add";
String WEBSERVICES_HANDLER_CHAIN_ADD = "webservices-handler-chain-add";
String WEBSERVICES_HANDLER_CHAIN_COLUMN = "webservices-handler-chain-column";
String WEBSERVICES_ITEM = "webservices-item";
String WEBSERVICES_STATISTICS_DISABLED = "webservices-statistics-disabled";
String WORKER = "worker";
String XA_DATA_SOURCE = "xa-data-source";
String XA_DATA_SOURCE_ADD = "xa-data-source-add";
String XA_DATA_SOURCE_FORM = "xa-data-source-form";
String XA_DATA_SOURCE_RUNTIME_JDBC_FORM = "xa-data-source-runtime-jdbc-form";
String XA_DATA_SOURCE_RUNTIME_JDBC_TAB = "xa-data-source-runtime-jdbc-tab";
String XA_DATA_SOURCE_RUNTIME_POOL_FORM = "xa-data-source-runtime-pool-form";
String XA_DATA_SOURCE_RUNTIME_POOL_TAB = "xa-data-source-runtime-pool-tab";
String XA_DATA_SOURCE_RUNTIME_TAB_CONTAINER = "xa-data-source-runtime-tab-container";
// ------------------------------------------------------ resource ids (a-z)
static String cacheContainer(String name) {
return Ids.build("cc", name);
}
static String extractCacheContainer(String id) {
return substringAfterLast(id, "cc-");
}
static String content(String name) {
return name;
}
static String dataSourceConfiguration(String name, boolean xa) {
return build(xa ? "xa" : "non-xa", "dsc", name);
}
static String dataSourceRuntime(String name, boolean xa) {
return build(xa ? "xa" : "non-xa", "dsr", name);
}
static String deployment(String name) {
return Ids.build("dply", name);
}
static String destination(String deployment, String subdeployment, String messageServer, String type, String name) {
if (deployment == null) {
return build(messageServer, type, name);
}
return build(deployment, subdeployment, messageServer, type, name);
}
static String ejb3(String deployment, String subdeployment, String type, String name) {
return build(deployment, subdeployment, type, name);
}
static String host(String name) {
return build(HOST, name);
}
static String hostServer(String host, String server) {
return build(host, server);
}
static String job(String deployment, String subdeployment, String name) {
return build(JOB, deployment, subdeployment, name);
}
static String jmsBridge(String name) {
return build("jmsb", name);
}
static String jpaStatistic(String deployment, String subdeployment, String persistenceUnit) {
return build(deployment, subdeployment, persistenceUnit);
}
static String loggingProfile(String name) {
return build(LOGGING_CONFIG_AND_PROFILES, name);
}
static String mailSession(String name) {
return build("ms", name);
}
static String modclusterProxy(String name) {
return build("mcp", name);
}
static String messagingServer(String name) {
return build("msgs", name);
}
static String extractMessagingServer(String id) {
return substringAfterLast(id, "msgs-");
}
static String webServer(String name) {
return build("us", name);
}
static String webListener(String name) {
return build("ulst", name);
}
/**
* @param type must be one of "user" or "group"
*/
static String principal(String type, String name) {
return build(type, name);
}
static String remoteCacheContainer(String name) {
return Ids.build("rcc", name);
}
static String resourceAdapter(String name) {
return build("ra", name);
}
static String restResource(String deployment, String subdeployment, String name) {
return build(deployment, subdeployment, name);
}
static String role(String name) {
return asId(name);
}
static String securityDomain(String name) {
return build("sd", name);
}
static String serverGroup(String name) {
return build("sg", name);
}
static String serverGroupDeployment(String serverGroup, String name) {
return build(serverGroup, name);
}
static String undertowApplicationSecurityDomain(String name) {
return build("uasd", name);
}
static String undertowModcluster(String name) {
return build("umc", name);
}
static String extractUndertowModcluster(String id) {
return substringAfterLast(id, "umc-");
}
static String extractUndertowModclusterBalancer(String id) {
return substringAfterLast(id, "undertow-modcluster-balancer-");
}
static String undertowServer(String name) {
return build("us", name);
}
static String extractUndertowServer(String id) {
return substringAfterLast(id, "us-");
}
static String undertowServletContainer(String name) {
return build("usc", name);
}
// ------------------------------------------------------ methods
/**
* Turns a label which can contain whitespace and upper/lower case characters into an all lowercase id separated by
* "-".
*/
static String asId(String text) {
String[] parts = text.split("[-\\s]");
List<String> sanitized = new ArrayList<>();
for (String part : parts) {
if (part != null) {
String s = part.replaceAll("\\s+", "");
s = s.replaceAll("[^a-zA-Z0-9-_]", "");
s = s.replace('_', '-');
if (s.length() != 0) {
sanitized.add(s);
}
}
}
if (sanitized.isEmpty()) {
return null;
} else {
return sanitized.stream()
.filter(s -> s != null && s.trim().length() != 0)
.map(String::toLowerCase)
.collect(joining("-"));
}
}
static String build(String id, String... additionalIds) {
return build(id, '-', additionalIds);
}
static String build(String id, char separator, String... additionalIds) {
if (emptyToNull(id) == null) {
throw new IllegalArgumentException("ID must not be null");
}
List<String> ids = Lists.newArrayList(id);
if (additionalIds != null) {
for (String additionalId : additionalIds) {
if (!isNullOrEmpty(additionalId)) {
ids.add(additionalId);
}
}
}
return ids.stream().map(Ids::asId).collect(joining(String.valueOf(separator)));
}
Counter counter = new Counter();
static String uniqueId() {
return "hal-uid-" + counter.value++;
}
}
| |
package snisi.entities;
import java.util.Vector;
import snisi.entities.EntityHashTable;
/**
* List of static codes and names for Entities/Locations
* Automatically generated.
*/
public class EntityHashTableYNJ6 extends EntityHashTable {
public EntityHashTableYNJ6() {
this.code = "YNJ6";
this.name = "YOUWAROU";
this.children = new Vector();
EntityHashTable h6770 = new EntityHashTable("6770", "YOUWAROU CSREF");
this.children.addElement(h6770);
EntityHashTable hHT28 = new EntityHashTable("HT28", "AMBIRI");
EntityHashTable v58025001 = new EntityHashTable("58025001", "AMBIRI-HABE");
hHT28.children.addElement(v58025001);
EntityHashTable v58025042 = new EntityHashTable("58025042", "AMBIRI-KONG");
hHT28.children.addElement(v58025042);
EntityHashTable v58025083 = new EntityHashTable("58025083", "AMBIRI-OURO");
hHT28.children.addElement(v58025083);
EntityHashTable v58025124 = new EntityHashTable("58025124", "ARANGA");
hHT28.children.addElement(v58025124);
EntityHashTable v58025165 = new EntityHashTable("58025165", "BABA-GOUNGOU");
hHT28.children.addElement(v58025165);
EntityHashTable v58025206 = new EntityHashTable("58025206", "DIAMKOKO");
hHT28.children.addElement(v58025206);
EntityHashTable v58025247 = new EntityHashTable("58025247", "DIAMWELY");
hHT28.children.addElement(v58025247);
EntityHashTable v58025288 = new EntityHashTable("58025288", "DIANGORO");
hHT28.children.addElement(v58025288);
EntityHashTable v58025329 = new EntityHashTable("58025329", "DIOMOU");
hHT28.children.addElement(v58025329);
EntityHashTable v58025370 = new EntityHashTable("58025370", "DOMA");
hHT28.children.addElement(v58025370);
EntityHashTable v58025411 = new EntityHashTable("58025411", "GOUNOUMA");
hHT28.children.addElement(v58025411);
EntityHashTable v58025452 = new EntityHashTable("58025452", "KABE");
hHT28.children.addElement(v58025452);
EntityHashTable v58025534 = new EntityHashTable("58025534", "KONG");
hHT28.children.addElement(v58025534);
EntityHashTable v58025575 = new EntityHashTable("58025575", "KOSSOMBO");
hHT28.children.addElement(v58025575);
EntityHashTable v58025657 = new EntityHashTable("58025657", "MODIOKO");
hHT28.children.addElement(v58025657);
EntityHashTable v58025698 = new EntityHashTable("58025698", "NIAMA");
hHT28.children.addElement(v58025698);
EntityHashTable v58025739 = new EntityHashTable("58025739", "SAREDINA");
hHT28.children.addElement(v58025739);
EntityHashTable v58025780 = new EntityHashTable("58025780", "SINGAMA");
hHT28.children.addElement(v58025780);
EntityHashTable v58025821 = new EntityHashTable("58025821", "SOUNKAROU");
hHT28.children.addElement(v58025821);
EntityHashTable v58025862 = new EntityHashTable("58025862", "TANGA");
hHT28.children.addElement(v58025862);
EntityHashTable v58025903 = new EntityHashTable("58025903", "TILKAL");
hHT28.children.addElement(v58025903);
EntityHashTable v58025944 = new EntityHashTable("58025944", "YONGONA");
hHT28.children.addElement(v58025944);
EntityHashTable v58061914 = new EntityHashTable("58061914", "KOKORO");
hHT28.children.addElement(v58061914);
this.children.addElement(hHT28);
EntityHashTable hTTK1 = new EntityHashTable("TTK1", "DOGO");
EntityHashTable v58001001 = new EntityHashTable("58001001", "BARE");
hTTK1.children.addElement(v58001001);
EntityHashTable v58001100 = new EntityHashTable("58001100", "DOGO");
hTTK1.children.addElement(v58001100);
EntityHashTable v58001199 = new EntityHashTable("58001199", "FARAYENI");
hTTK1.children.addElement(v58001199);
EntityHashTable v58001298 = new EntityHashTable("58001298", "FETODJE");
hTTK1.children.addElement(v58001298);
EntityHashTable v58001397 = new EntityHashTable("58001397", "GANDE-TAMA");
hTTK1.children.addElement(v58001397);
EntityHashTable v58001496 = new EntityHashTable("58001496", "KARAN");
hTTK1.children.addElement(v58001496);
EntityHashTable v58001595 = new EntityHashTable("58001595", "OUROUNDE");
hTTK1.children.addElement(v58001595);
EntityHashTable v58001694 = new EntityHashTable("58001694", "SARBERE");
hTTK1.children.addElement(v58001694);
EntityHashTable v58001793 = new EntityHashTable("58001793", "SARE-DINA");
hTTK1.children.addElement(v58001793);
EntityHashTable v58001892 = new EntityHashTable("58001892", "YERE-YERE");
hTTK1.children.addElement(v58001892);
this.children.addElement(hTTK1);
EntityHashTable h4GZ7 = new EntityHashTable("4GZ7", "FAOU");
EntityHashTable v58025594 = new EntityHashTable("58025594", "TIEL-BARO");
h4GZ7.children.addElement(v58025594);
EntityHashTable v58061999 = new EntityHashTable("58061999", "DARI-SELOUMA");
h4GZ7.children.addElement(v58061999);
EntityHashTable v58061020 = new EntityHashTable("58061020", "BARAOU");
h4GZ7.children.addElement(v58061020);
EntityHashTable v58061058 = new EntityHashTable("58061058", "BOEFFA");
h4GZ7.children.addElement(v58061058);
EntityHashTable v58061077 = new EntityHashTable("58061077", "DAGAMBE");
h4GZ7.children.addElement(v58061077);
EntityHashTable v58061248 = new EntityHashTable("58061248", "FAOU");
h4GZ7.children.addElement(v58061248);
EntityHashTable v58061324 = new EntityHashTable("58061324", "GONDA");
h4GZ7.children.addElement(v58061324);
EntityHashTable v58061362 = new EntityHashTable("58061362", "GOUMNEWAL");
h4GZ7.children.addElement(v58061362);
EntityHashTable v58061400 = new EntityHashTable("58061400", "INGUERI");
h4GZ7.children.addElement(v58061400);
EntityHashTable v58061419 = new EntityHashTable("58061419", "ISSEYE");
h4GZ7.children.addElement(v58061419);
EntityHashTable v58061438 = new EntityHashTable("58061438", "KEREGOUNA");
h4GZ7.children.addElement(v58061438);
EntityHashTable v58061514 = new EntityHashTable("58061514", "KORODINGUE");
h4GZ7.children.addElement(v58061514);
EntityHashTable v58061552 = new EntityHashTable("58061552", "MISSE-OURO");
h4GZ7.children.addElement(v58061552);
EntityHashTable v58061571 = new EntityHashTable("58061571", "MISSE-SARE");
h4GZ7.children.addElement(v58061571);
EntityHashTable v58061590 = new EntityHashTable("58061590", "NAMBO");
h4GZ7.children.addElement(v58061590);
EntityHashTable v58061666 = new EntityHashTable("58061666", "ONGOM");
h4GZ7.children.addElement(v58061666);
EntityHashTable v58061685 = new EntityHashTable("58061685", "ROUNDE-GOUARD");
h4GZ7.children.addElement(v58061685);
EntityHashTable v58061742 = new EntityHashTable("58061742", "SARE-BANI");
h4GZ7.children.addElement(v58061742);
EntityHashTable v58061761 = new EntityHashTable("58061761", "SARE-HABE");
h4GZ7.children.addElement(v58061761);
EntityHashTable v58061441 = new EntityHashTable("58061441", "GOUNKI");
h4GZ7.children.addElement(v58061441);
this.children.addElement(h4GZ7);
EntityHashTable h84K9 = new EntityHashTable("84K9", "GATHI");
EntityHashTable v58049998 = new EntityHashTable("58049998", "FRACTION ELMAMOR");
h84K9.children.addElement(v58049998);
EntityHashTable v58049992 = new EntityHashTable("58049992", "HARDANE KAMANI");
h84K9.children.addElement(v58049992);
EntityHashTable v58049103 = new EntityHashTable("58049103", "BANDARE");
h84K9.children.addElement(v58049103);
EntityHashTable v58049137 = new EntityHashTable("58049137", "BOUTA");
h84K9.children.addElement(v58049137);
EntityHashTable v58049171 = new EntityHashTable("58049171", "DEGUE");
h84K9.children.addElement(v58049171);
EntityHashTable v58049205 = new EntityHashTable("58049205", "DJONDO");
h84K9.children.addElement(v58049205);
EntityHashTable v58049239 = new EntityHashTable("58049239", "GATHI-LOUMO");
h84K9.children.addElement(v58049239);
EntityHashTable v58049273 = new EntityHashTable("58049273", "GOUMBOUMBA");
h84K9.children.addElement(v58049273);
EntityHashTable v58049307 = new EntityHashTable("58049307", "HERAO");
h84K9.children.addElement(v58049307);
EntityHashTable v58049341 = new EntityHashTable("58049341", "KALASSEGUI");
h84K9.children.addElement(v58049341);
EntityHashTable v58049375 = new EntityHashTable("58049375", "KAMPA-KOUNA");
h84K9.children.addElement(v58049375);
EntityHashTable v58049409 = new EntityHashTable("58049409", "KERBE-KOUNA");
h84K9.children.addElement(v58049409);
EntityHashTable v58049443 = new EntityHashTable("58049443", "KORA");
h84K9.children.addElement(v58049443);
EntityHashTable v58049477 = new EntityHashTable("58049477", "KOUBITA");
h84K9.children.addElement(v58049477);
EntityHashTable v58049511 = new EntityHashTable("58049511", "LANADJIRI");
h84K9.children.addElement(v58049511);
EntityHashTable v58049579 = new EntityHashTable("58049579", "SAMANAMA");
h84K9.children.addElement(v58049579);
EntityHashTable v58049613 = new EntityHashTable("58049613", "SELENGOUROU");
h84K9.children.addElement(v58049613);
EntityHashTable v58049647 = new EntityHashTable("58049647", "SOKON-DEMA");
h84K9.children.addElement(v58049647);
EntityHashTable v58049715 = new EntityHashTable("58049715", "SONKALY");
h84K9.children.addElement(v58049715);
EntityHashTable v58049817 = new EntityHashTable("58049817", "TIAL");
h84K9.children.addElement(v58049817);
EntityHashTable v58049851 = new EntityHashTable("58049851", "TIOUKY");
h84K9.children.addElement(v58049851);
EntityHashTable v58049885 = new EntityHashTable("58049885", "TOULAL");
h84K9.children.addElement(v58049885);
EntityHashTable v58049919 = new EntityHashTable("58049919", "WARIO");
h84K9.children.addElement(v58049919);
EntityHashTable v58049146 = new EntityHashTable("58049146", "WARIO-NOMADE");
h84K9.children.addElement(v58049146);
EntityHashTable v58049202 = new EntityHashTable("58049202", "LALADJI TOMORA");
h84K9.children.addElement(v58049202);
EntityHashTable v58049038 = new EntityHashTable("58049038", "BENTIE");
h84K9.children.addElement(v58049038);
EntityHashTable v58049863 = new EntityHashTable("58049863", "SOURANGO");
h84K9.children.addElement(v58049863);
EntityHashTable v58049695 = new EntityHashTable("58049695", "GNIRIGNARA");
h84K9.children.addElement(v58049695);
EntityHashTable v58049927 = new EntityHashTable("58049927", "ALEYDI");
h84K9.children.addElement(v58049927);
EntityHashTable v58049136 = new EntityHashTable("58049136", "KELTINAKAWATT");
h84K9.children.addElement(v58049136);
EntityHashTable v58049882 = new EntityHashTable("58049882", "SANANGA");
h84K9.children.addElement(v58049882);
EntityHashTable v58049323 = new EntityHashTable("58049323", "SOURMOU");
h84K9.children.addElement(v58049323);
EntityHashTable v58049339 = new EntityHashTable("58049339", "NEMAT TCHABAHINE");
h84K9.children.addElement(v58049339);
this.children.addElement(h84K9);
EntityHashTable hD946 = new EntityHashTable("D946", "KORMOU-PEULH");
EntityHashTable v58037001 = new EntityHashTable("58037001", "AYOM");
hD946.children.addElement(v58037001);
EntityHashTable v58037077 = new EntityHashTable("58037077", "BIA");
hD946.children.addElement(v58037077);
EntityHashTable v58037153 = new EntityHashTable("58037153", "DIOU");
hD946.children.addElement(v58037153);
EntityHashTable v58037229 = new EntityHashTable("58037229", "DISKANA");
hD946.children.addElement(v58037229);
EntityHashTable v58037305 = new EntityHashTable("58037305", "DOUKOU");
hD946.children.addElement(v58037305);
EntityHashTable v58037381 = new EntityHashTable("58037381", "KORMOU-MARKA");
hD946.children.addElement(v58037381);
EntityHashTable v58037457 = new EntityHashTable("58037457", "KORMOU-PEULH");
hD946.children.addElement(v58037457);
EntityHashTable v58037533 = new EntityHashTable("58037533", "MAYEL-BOURGOU");
hD946.children.addElement(v58037533);
EntityHashTable v58037609 = new EntityHashTable("58037609", "OWA");
hD946.children.addElement(v58037609);
EntityHashTable v58037685 = new EntityHashTable("58037685", "SAH-TOUSKEL");
hD946.children.addElement(v58037685);
EntityHashTable v58037761 = new EntityHashTable("58037761", "SEBY");
hD946.children.addElement(v58037761);
EntityHashTable v58037837 = new EntityHashTable("58037837", "SIGUI");
hD946.children.addElement(v58037837);
EntityHashTable v58037913 = new EntityHashTable("58037913", "SIO");
hD946.children.addElement(v58037913);
this.children.addElement(hD946);
EntityHashTable hNMF0 = new EntityHashTable("NMF0", "QUIDIO");
EntityHashTable v58013040 = new EntityHashTable("58013040", "AWOYE");
hNMF0.children.addElement(v58013040);
EntityHashTable v58013079 = new EntityHashTable("58013079", "BAMAGAYE");
hNMF0.children.addElement(v58013079);
EntityHashTable v58013118 = new EntityHashTable("58013118", "BANGOU");
hNMF0.children.addElement(v58013118);
EntityHashTable v58013162 = new EntityHashTable("58013162", "DEBARE THIOKA");
hNMF0.children.addElement(v58013162);
EntityHashTable v58013274 = new EntityHashTable("58013274", "FATOMARE");
hNMF0.children.addElement(v58013274);
EntityHashTable v58013391 = new EntityHashTable("58013391", "GOMNI-KOUBOYE");
hNMF0.children.addElement(v58013391);
EntityHashTable v58013430 = new EntityHashTable("58013430", "GOURAO BOZO");
hNMF0.children.addElement(v58013430);
EntityHashTable v58013469 = new EntityHashTable("58013469", "GOURAO FOULBE");
hNMF0.children.addElement(v58013469);
EntityHashTable v58013508 = new EntityHashTable("58013508", "GOURAO SARE");
hNMF0.children.addElement(v58013508);
EntityHashTable v58013547 = new EntityHashTable("58013547", "GUIDIO SARE");
hNMF0.children.addElement(v58013547);
EntityHashTable v58013586 = new EntityHashTable("58013586", "GUIDIO OURO");
hNMF0.children.addElement(v58013586);
EntityHashTable v58013625 = new EntityHashTable("58013625", "KARMA");
hNMF0.children.addElement(v58013625);
EntityHashTable v58013742 = new EntityHashTable("58013742", "SAMBERI");
hNMF0.children.addElement(v58013742);
EntityHashTable v58013820 = new EntityHashTable("58013820", "SOROBA");
hNMF0.children.addElement(v58013820);
EntityHashTable v58013937 = new EntityHashTable("58013937", "WORO");
hNMF0.children.addElement(v58013937);
EntityHashTable v58013773 = new EntityHashTable("58013773", "MILALI");
hNMF0.children.addElement(v58013773);
this.children.addElement(hNMF0);
EntityHashTable hE6G5 = new EntityHashTable("E6G5", "SAH");
EntityHashTable v58025255 = new EntityHashTable("58025255", "SARE-PATE");
hE6G5.children.addElement(v58025255);
EntityHashTable v58061001 = new EntityHashTable("58061001", "BANGA");
hE6G5.children.addElement(v58061001);
EntityHashTable v58061039 = new EntityHashTable("58061039", "BIRO");
hE6G5.children.addElement(v58061039);
EntityHashTable v58061096 = new EntityHashTable("58061096", "DAKO-MALINKE");
hE6G5.children.addElement(v58061096);
EntityHashTable v58061115 = new EntityHashTable("58061115", "DIADDO");
hE6G5.children.addElement(v58061115);
EntityHashTable v58061134 = new EntityHashTable("58061134", "DIOGUI-OURO");
hE6G5.children.addElement(v58061134);
EntityHashTable v58061153 = new EntityHashTable("58061153", "DIOGUI-SARE");
hE6G5.children.addElement(v58061153);
EntityHashTable v58061172 = new EntityHashTable("58061172", "DIRMANDIO");
hE6G5.children.addElement(v58061172);
EntityHashTable v58061191 = new EntityHashTable("58061191", "DOKALABOUGOU");
hE6G5.children.addElement(v58061191);
EntityHashTable v58061210 = new EntityHashTable("58061210", "DOMBIRA");
hE6G5.children.addElement(v58061210);
EntityHashTable v58061229 = new EntityHashTable("58061229", "DRAMBO");
hE6G5.children.addElement(v58061229);
EntityHashTable v58061267 = new EntityHashTable("58061267", "GADIA");
hE6G5.children.addElement(v58061267);
EntityHashTable v58061286 = new EntityHashTable("58061286", "GAROU");
hE6G5.children.addElement(v58061286);
EntityHashTable v58061305 = new EntityHashTable("58061305", "GOGONA");
hE6G5.children.addElement(v58061305);
EntityHashTable v58061343 = new EntityHashTable("58061343", "GORE");
hE6G5.children.addElement(v58061343);
EntityHashTable v58061381 = new EntityHashTable("58061381", "GUEDIA");
hE6G5.children.addElement(v58061381);
EntityHashTable v58061457 = new EntityHashTable("58061457", "KOETAO");
hE6G5.children.addElement(v58061457);
EntityHashTable v58061476 = new EntityHashTable("58061476", "KOROBONGO-OURO");
hE6G5.children.addElement(v58061476);
EntityHashTable v58061495 = new EntityHashTable("58061495", "KOROBONGO-SARE");
hE6G5.children.addElement(v58061495);
EntityHashTable v58061533 = new EntityHashTable("58061533", "MARGOU");
hE6G5.children.addElement(v58061533);
EntityHashTable v58061609 = new EntityHashTable("58061609", "NAWAL");
hE6G5.children.addElement(v58061609);
EntityHashTable v58061628 = new EntityHashTable("58061628", "NINGO");
hE6G5.children.addElement(v58061628);
EntityHashTable v58061704 = new EntityHashTable("58061704", "SAH");
hE6G5.children.addElement(v58061704);
EntityHashTable v58061723 = new EntityHashTable("58061723", "SAKA");
hE6G5.children.addElement(v58061723);
EntityHashTable v58061780 = new EntityHashTable("58061780", "SARE-MASSI");
hE6G5.children.addElement(v58061780);
EntityHashTable v58061837 = new EntityHashTable("58061837", "SOGO-OURO");
hE6G5.children.addElement(v58061837);
EntityHashTable v58061856 = new EntityHashTable("58061856", "SOKOURA");
hE6G5.children.addElement(v58061856);
EntityHashTable v58061875 = new EntityHashTable("58061875", "TAKOUTALA");
hE6G5.children.addElement(v58061875);
EntityHashTable v58061913 = new EntityHashTable("58061913", "YOUGOUNA");
hE6G5.children.addElement(v58061913);
EntityHashTable v58061932 = new EntityHashTable("58061932", "YOUMOURA");
hE6G5.children.addElement(v58061932);
this.children.addElement(hE6G5);
EntityHashTable hAWK2 = new EntityHashTable("AWK2", "YOUWAROU CENTRAL");
EntityHashTable v58013001 = new EntityHashTable("58013001", "AKKA");
hAWK2.children.addElement(v58013001);
EntityHashTable v58013313 = new EntityHashTable("58013313", "FEROBE");
hAWK2.children.addElement(v58013313);
EntityHashTable v58013352 = new EntityHashTable("58013352", "FITTOBE");
hAWK2.children.addElement(v58013352);
EntityHashTable v58013781 = new EntityHashTable("58013781", "SOBE");
hAWK2.children.addElement(v58013781);
EntityHashTable v58073999 = new EntityHashTable("58073999", "M'BANADJI");
hAWK2.children.addElement(v58073999);
EntityHashTable v58073001 = new EntityHashTable("58073001", "AOURE");
hAWK2.children.addElement(v58073001);
EntityHashTable v58073048 = new EntityHashTable("58073048", "BANGUITA");
hAWK2.children.addElement(v58073048);
EntityHashTable v58073142 = new EntityHashTable("58073142", "DIOLLY");
hAWK2.children.addElement(v58073142);
EntityHashTable v58073189 = new EntityHashTable("58073189", "ENGUEM");
hAWK2.children.addElement(v58073189);
EntityHashTable v58073236 = new EntityHashTable("58073236", "FAFOU");
hAWK2.children.addElement(v58073236);
EntityHashTable v58073283 = new EntityHashTable("58073283", "GOUREYE");
hAWK2.children.addElement(v58073283);
EntityHashTable v58073330 = new EntityHashTable("58073330", "KADIGUI");
hAWK2.children.addElement(v58073330);
EntityHashTable v58073377 = new EntityHashTable("58073377", "KANGOUROU");
hAWK2.children.addElement(v58073377);
EntityHashTable v58073471 = new EntityHashTable("58073471", "M'BARADOU");
hAWK2.children.addElement(v58073471);
EntityHashTable v58073518 = new EntityHashTable("58073518", "OUALADOU");
hAWK2.children.addElement(v58073518);
EntityHashTable v58073565 = new EntityHashTable("58073565", "OUANAM");
hAWK2.children.addElement(v58073565);
EntityHashTable v58073659 = new EntityHashTable("58073659", "ROUBERE");
hAWK2.children.addElement(v58073659);
EntityHashTable v58073706 = new EntityHashTable("58073706", "SAKAMARA");
hAWK2.children.addElement(v58073706);
EntityHashTable v58073753 = new EntityHashTable("58073753", "SIMASSI");
hAWK2.children.addElement(v58073753);
EntityHashTable v58073800 = new EntityHashTable("58073800", "TANA");
hAWK2.children.addElement(v58073800);
EntityHashTable v58073847 = new EntityHashTable("58073847", "YOGORO");
hAWK2.children.addElement(v58073847);
EntityHashTable v58073894 = new EntityHashTable("58073894", "YOUWAROU OURO");
hAWK2.children.addElement(v58073894);
EntityHashTable v58073188 = new EntityHashTable("58073188", "HOMBOLORE");
hAWK2.children.addElement(v58073188);
this.children.addElement(hAWK2);
EntityHashTable hB7G4 = new EntityHashTable("B7G4", "AMBIRI-HABE");
this.children.addElement(hB7G4);
}
}
| |
package com.rehivetech.beeeon.gui.adapter.dashboard;
import android.annotation.SuppressLint;
import android.content.Context;
import android.os.Parcel;
import android.os.Parcelable;
import android.support.annotation.IntDef;
import android.support.v4.content.ContextCompat;
import android.support.v7.widget.CardView;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.TextView;
import com.rehivetech.beeeon.IconResourceType;
import com.rehivetech.beeeon.R;
import com.rehivetech.beeeon.controller.Controller;
import com.rehivetech.beeeon.gui.adapter.RecyclerViewSelectableAdapter;
import com.rehivetech.beeeon.household.device.Module;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.util.ArrayList;
import java.util.List;
/**
* @author martin
* @since 27.2.16
*/
public class DashboardModuleSelectAdapter extends RecyclerViewSelectableAdapter {
public static final int LAYOUT_TYPE_MODULE = 0;
public static final int LAYOUT_TYPE_DEVICE_NAME = 1;
public static final int LAYOT_TYPE_DEVICE_GROUP_NAME = 2;
private List<Object> mItems = new ArrayList<>();
private Context mContext;
private ItemClickListener mClickListener;
public DashboardModuleSelectAdapter(Context context, ItemClickListener clickListener) {
super(context);
mContext = context;
mClickListener = clickListener;
}
public void setItems(List<Object> items) {
mItems.addAll(items);
notifyDataSetChanged();
}
public void selectFirstModuleItem() {
int i = 0;
for (Object o : mItems) {
if (o instanceof ModuleItem) {
toggleSelection(i);
break;
}
i++;
}
}
public Object getItem(int index) {
return mItems.get(index);
}
@Override
public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
switch (viewType) {
case LAYOUT_TYPE_DEVICE_NAME:
case LAYOT_TYPE_DEVICE_GROUP_NAME: {
View view = LayoutInflater.from(parent.getContext()).inflate(R.layout.item_add_dashboard_module_header, parent, false);
return new HeaderViewHolder(view);
}
case LAYOUT_TYPE_MODULE: {
View view = LayoutInflater.from(parent.getContext()).inflate(R.layout.item_add_dashboard_module, parent, false);
return new ModuleViewHolder(view);
}
}
return null;
}
@Override
public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) {
Object item = mItems.get(position);
if (item instanceof HeaderItem) {
((HeaderViewHolder) holder).bind((HeaderItem) item);
} else {
((ModuleViewHolder) holder).bind((ModuleItem) item, position);
}
}
@Override
public int getItemCount() {
return mItems.size();
}
@Override
public int getItemViewType(int position) {
Object item = mItems.get(position);
if (item instanceof HeaderItem) {
return ((HeaderItem) item).mHeaderType == HeaderItem.ITEM_TYPE_DEVICE_NAME ? LAYOUT_TYPE_DEVICE_NAME : LAYOT_TYPE_DEVICE_GROUP_NAME;
}
else {
return LAYOUT_TYPE_MODULE;
}
}
public class ModuleViewHolder extends SelectableViewHolder {
public final ImageView mIcon;
public final TextView mName;
public final CardView mRoot;
public ModuleViewHolder(View itemView) {
super(itemView);
mRoot = (CardView) itemView;
mIcon = (ImageView) itemView.findViewById(R.id.item_add_dashboard_module_icon);
mName = (TextView) itemView.findViewById(R.id.item_add_dashboard_module_name);
}
public void bind(final ModuleItem item, final int position) {
Controller controller = Controller.getInstance(mContext);
Module module = controller.getDevicesModel().getModule(item.mGateId, item.mAbsoluteId);
if (module == null) {
return;
}
mIcon.setImageResource(module.getIconResource(isSelected(position) ? IconResourceType.WHITE : IconResourceType.DARK));
mName.setText(module.getName(mContext));
mRoot.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
clearSelection();
toggleSelection(position);
if (mClickListener != null) {
mClickListener.onItemClick(item.mAbsoluteId);
}
}
});
setSelected(isSelected(position));
}
@Override
protected void setSelectedBackground(boolean isSelected) {
mRoot.setSelected(isSelected);
if (isSelected) {
mRoot.setCardBackgroundColor(ContextCompat.getColor(mContext, R.color.beeeon_primary));
} else {
mRoot.setCardBackgroundColor(ContextCompat.getColor(mContext, R.color.cardview_light_background));
}
}
}
public class HeaderViewHolder extends RecyclerView.ViewHolder {
public final TextView mLabel;
public HeaderViewHolder(View itemView) {
super(itemView);
mLabel = (TextView) itemView;
}
@SuppressWarnings("deprecation")
@SuppressLint("PrivateResource")
public void bind(HeaderItem item) {
mLabel.setText(item.mName);
int textColor = item.mHeaderType == HeaderItem.ITEM_TYPE_DEVICE_NAME ? R.color.black : R.color.beeeon_accent;
mLabel.setTextColor(ContextCompat.getColor(mContext, textColor));
}
}
public static class ModuleItem implements Parcelable{
private String mGateId;
private String mAbsoluteId;
public ModuleItem(String absoluteId, String gateId) {
mAbsoluteId = absoluteId;
mGateId = gateId;
}
protected ModuleItem(Parcel in) {
mGateId = in.readString();
mAbsoluteId = in.readString();
}
public String getAbsoluteId() {
return mAbsoluteId;
}
@Override
public int describeContents() {
return 0;
}
@Override
public void writeToParcel(Parcel dest, int flags) {
dest.writeString(mGateId);
dest.writeString(mAbsoluteId);
}
public static final Creator<ModuleItem> CREATOR = new Creator<ModuleItem>() {
public ModuleItem createFromParcel(Parcel source) {
return new ModuleItem(source);
}
public ModuleItem[] newArray(int size) {
return new ModuleItem[size];
}
};
public static ModuleItem getEmpty() {
return new ModuleItem("", "");
}
public boolean isEmpty() {
return mGateId.isEmpty() || mAbsoluteId.isEmpty();
}
}
public static class HeaderItem {
public static final int ITEM_TYPE_DEVICE_NAME = 0;
public static final int ITEM_TYPE_DEVICE_GROUP = 1;
@Retention(RetentionPolicy.SOURCE)
@IntDef({ITEM_TYPE_DEVICE_NAME, ITEM_TYPE_DEVICE_GROUP})
public @interface HeaderType{}
private String mName;
private @HeaderType int mHeaderType;
public HeaderItem(String name, @HeaderType int headerType) {
mName = name;
mHeaderType = headerType;
}
}
public interface ItemClickListener {
void onItemClick(String absoluteModuleId);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.lens.cli.commands;
import java.io.*;
import java.nio.charset.Charset;
import java.util.List;
import java.util.UUID;
import javax.ws.rs.core.Response;
import org.apache.lens.api.query.*;
import org.apache.lens.api.result.PrettyPrintable;
import org.apache.lens.cli.commands.annotations.UserDocumentation;
import org.apache.lens.client.LensClient;
import org.apache.lens.client.exceptions.LensAPIException;
import org.apache.lens.client.exceptions.LensBriefErrorException;
import org.apache.lens.client.model.BriefError;
import org.apache.lens.client.model.IdBriefErrorTemplate;
import org.apache.lens.client.model.IdBriefErrorTemplateKey;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.springframework.shell.core.annotation.CliCommand;
import org.springframework.shell.core.annotation.CliOption;
import org.springframework.stereotype.Component;
import com.google.common.base.Joiner;
/**
* The Class LensQueryCommands.
* SUSPEND CHECKSTYLE CHECK InnerAssignmentCheck
*/
@Component
@UserDocumentation(title = "Query Management",
description = "This section provides commands for query life cycle - "
+ "submit, check status,\n"
+ " fetch results, kill or list all the queries. Also provides commands for\n"
+ " prepare a query, destroy a prepared query and list all prepared queries.\n"
+ "\n"
+ " Please note that, character <<<\">>> is used as delimiter by the Spring Shell\n"
+ " framework, which is used to build lens cli. So queries which require <<<\">>>,\n"
+ " should be prefixed with another double quote. For example\n"
+ " <<<query execute cube select id,name from dim_table where name != \"\"first\"\">>>,\n"
+ " will be parsed as <<<cube select id,name from dim_table where name != \"first\">>>")
public class LensQueryCommands extends BaseLensCommand {
/**
* Execute query.
*
* @param sql the sql
* @param async the asynch
* @param queryName the query name
* @return the string
*/
@CliCommand(value = "query execute",
help = "Execute query <query-string>."
+
" If <async> is true, The query is launched in async manner and query handle is returned. It's by default false."
+ " <query name> can also be provided, though not required")
public String executeQuery(
@CliOption(key = {"", "query"}, mandatory = true, help = "<query-string>") String sql,
@CliOption(key = {"async"}, mandatory = false, unspecifiedDefaultValue = "false",
specifiedDefaultValue = "true", help = "<async>") boolean async,
@CliOption(key = {"name"}, mandatory = false, help = "<query-name>") String queryName) {
PrettyPrintable cliOutput;
try {
if (async) {
QueryHandle queryHandle = getClient().executeQueryAsynch(sql, queryName).getData();
return queryHandle.getHandleIdString();
} else {
return formatResultSet(getClient().getResults(sql, queryName));
}
} catch (final LensAPIException e) {
BriefError briefError = new BriefError(e.getLensAPIErrorCode(), e.getLensAPIErrorMessage());
cliOutput = new IdBriefErrorTemplate(IdBriefErrorTemplateKey.REQUEST_ID, e.getLensAPIRequestId(), briefError);
} catch (final LensBriefErrorException e) {
cliOutput = e.getIdBriefErrorTemplate();
}
return cliOutput.toPrettyString();
}
/**
* Format result set.
*
* @param rs the rs
* @return the string
*/
private String formatResultSet(LensClient.LensClientResultSetWithStats rs) {
StringBuilder b = new StringBuilder();
int numRows = 0;
if (rs.getResultSet() != null) {
QueryResultSetMetadata resultSetMetadata = rs.getResultSet().getResultSetMetadata();
for (ResultColumn column : resultSetMetadata.getColumns()) {
b.append(column.getName()).append("\t");
}
b.append("\n");
QueryResult r = rs.getResultSet().getResult();
if (r instanceof InMemoryQueryResult) {
InMemoryQueryResult temp = (InMemoryQueryResult) r;
for (ResultRow row : temp.getRows()) {
for (Object col : row.getValues()) {
b.append(col).append("\t");
}
numRows++;
b.append("\n");
}
b.append(numRows + " rows ");
} else {
PersistentQueryResult temp = (PersistentQueryResult) r;
b.append("Results of query stored at : ").append(temp.getPersistedURI()).append(" ");
if (null != temp.getNumRows()) {
b.append(temp.getNumRows() + " rows ");
}
}
}
if (rs.getQuery() != null) {
long submissionTime = rs.getQuery().getSubmissionTime();
long endTime = rs.getQuery().getFinishTime();
b.append("processed in (").append(endTime > 0 ? ((endTime - submissionTime) / 1000) : 0)
.append(") seconds.\n");
}
return b.toString();
}
/**
* Gets the status.
*
* @param qh the qh
* @return the status
*/
@CliCommand(value = "query status", help = "Fetch status of executed query having query handle <query_handle>")
public String getStatus(
@CliOption(key = {"", "query_handle"}, mandatory = true, help = "<query_handle>") String qh) {
QueryStatus status = getClient().getQueryStatus(new QueryHandle(UUID.fromString(qh)));
if (status == null) {
return "Unable to find status for " + qh;
}
return status.toString();
}
/**
* Gets the query details.
*
* @param qh the qh
* @return the query
*/
@CliCommand(value = "query details", help = "Get query details of query with handle <query_handle>")
public String getDetails(
@CliOption(key = {"", "query_handle"}, mandatory = true, help
= "<query_handle>") String qh) {
LensQuery query = getClient().getQueryDetails(qh);
if (query == null) {
return "Unable to find query for " + qh;
}
try {
return formatJson(mapper.writer(pp).writeValueAsString(query));
} catch (IOException e) {
throw new IllegalArgumentException(e);
}
}
/**
* Explain query.
*
* @param sql the sql
* @param location the location
* @return the string
* @throws LensAPIException
* @throws UnsupportedEncodingException the unsupported encoding exception
*/
@CliCommand(value = "query explain", help = "Explain execution plan of query <query-string>. "
+ "Can optionally save the plan to a file by providing <save_location>")
public String explainQuery(@CliOption(key = { "", "query" }, mandatory = true, help = "<query-string>") String sql,
@CliOption(key = { "save_location" }, mandatory = false, help = "<save_location>") final File path)
throws IOException, LensAPIException {
PrettyPrintable cliOutput;
try {
QueryPlan plan = getClient().getQueryPlan(sql).getData();
if (path != null && StringUtils.isNotBlank(path.getPath())) {
String validPath = getValidPath(path, false, false);
try (OutputStreamWriter osw = new OutputStreamWriter(new FileOutputStream(validPath),
Charset.defaultCharset())) {
osw.write(plan.getPlanString());
}
return "Saved to " + validPath;
}
return plan.getPlanString();
} catch (final LensAPIException e) {
BriefError briefError = new BriefError(e.getLensAPIErrorCode(), e.getLensAPIErrorMessage());
cliOutput = new IdBriefErrorTemplate(IdBriefErrorTemplateKey.REQUEST_ID, e.getLensAPIRequestId(), briefError);
} catch (final LensBriefErrorException e) {
cliOutput = e.getIdBriefErrorTemplate();
}
return cliOutput.toPrettyString();
}
/**
* Gets the all queries.
*
* @param state the state
* @param queryName the query name
* @param user the user
* @param fromDate the from date
* @param toDate the to date
* @return the all queries
*/
@CliCommand(value = "query list",
help = "Get all queries. Various filter options can be provided(optionally), "
+ " as can be seen from the command syntax")
public String getAllQueries(
@CliOption(key = {"state"}, mandatory = false, help = "<query-status>") String state,
@CliOption(key = {"name"}, mandatory = false, help = "<query-name>") String queryName,
@CliOption(key = {"user"}, mandatory = false, help = "<user-who-submitted-query>") String user,
@CliOption(key = {"fromDate"}, mandatory = false, unspecifiedDefaultValue = "-1", help
= "<submission-time-is-after>") long fromDate,
@CliOption(key = {"toDate"}, mandatory = false, unspecifiedDefaultValue = "" + Long.MAX_VALUE, help
= "<submission-time-is-before>") long toDate) {
List<QueryHandle> handles = getClient().getQueries(state, queryName, user, fromDate, toDate);
if (handles != null && !handles.isEmpty()) {
return Joiner.on("\n").skipNulls().join(handles).concat("\n").concat("Total number of queries: "
+ handles.size());
} else {
return "No queries";
}
}
/**
* Kill query.
*
* @param qh the qh
* @return the string
*/
@CliCommand(value = "query kill", help = "Kill query with handle <query_handle>")
public String killQuery(
@CliOption(key = {"", "query_handle"}, mandatory = true, help = "<query_handle>") String qh) {
boolean status = getClient().killQuery(new QueryHandle(UUID.fromString(qh)));
if (status) {
return "Successfully killed " + qh;
} else {
return "Failed in killing " + qh;
}
}
/**
* Gets the query results.
*
* @param qh the qh
* @return the query results
*/
@CliCommand(value = "query results",
help = "get results of query with query handle <query_handle>. If async is false "
+ "then wait till the query execution is completed, it's by default true. "
+ "Can optionally save the results to a file by providing <save_location>.")
public String getQueryResults(
@CliOption(key = {"", "query_handle"}, mandatory = true, help = "<query_handle>") String qh,
@CliOption(key = {"save_location"}, mandatory = false, help = "<save_location>") final File path,
@CliOption(key = {"async"}, mandatory = false, unspecifiedDefaultValue = "true",
help = "<async>") boolean async) {
QueryHandle queryHandle = new QueryHandle(UUID.fromString(qh));
LensClient.LensClientResultSetWithStats results;
String location = path != null ? path.getPath() : null;
try {
String prefix = "";
if (StringUtils.isNotBlank(location)) {
location = getValidPath(path, true, true);
Response response = getClient().getHttpResults(queryHandle);
if (response.getStatus() == Response.Status.OK.getStatusCode()) {
String disposition = (String) response.getHeaders().get("content-disposition").get(0);
String fileName = disposition.split("=")[1].trim();
location = getValidPath(new File(location + File.separator + fileName), false, false);
try (InputStream stream = response.readEntity(InputStream.class);
FileOutputStream outStream = new FileOutputStream(new File(location))) {
IOUtils.copy(stream, outStream);
}
return "Saved to " + location;
} else {
if (async) {
results = getClient().getAsyncResults(queryHandle);
} else {
results = getClient().getSyncResults(queryHandle);
}
if (results.getResultSet() == null) {
return "Resultset not yet available";
} else if (results.getResultSet().getResult() instanceof InMemoryQueryResult) {
location = getValidPath(new File(location + File.separator + qh + ".csv"), false, false);
try (OutputStreamWriter osw = new OutputStreamWriter(new FileOutputStream(location),
Charset.defaultCharset())) {
osw.write(formatResultSet(results));
}
return "Saved to " + location;
} else {
return "Can't download the result because it's available in driver's persistence.\n"
+ formatResultSet(results);
}
}
} else if (async) {
return formatResultSet(getClient().getAsyncResults(queryHandle));
} else {
return formatResultSet(getClient().getSyncResults(queryHandle));
}
} catch (Throwable t) {
return t.getMessage();
}
}
/**
* Gets the all prepared queries.
*
* @param userName the user name
* @param queryName the query name
* @param fromDate the from date
* @param toDate the to date
* @return the all prepared queries
*/
@CliCommand(value = "prepQuery list",
help = "Get all prepared queries. Various filters can be provided(optionally)"
+ " as can be seen from command syntax")
public String getAllPreparedQueries(
@CliOption(key = {"name"}, mandatory = false, help = "<query-name>") String queryName,
@CliOption(key = {"user"}, mandatory = false, help = "<user-who-submitted-query>") String userName,
@CliOption(key = {"fromDate"}, mandatory = false, unspecifiedDefaultValue = "-1", help
= "<submission-time-is-after>") long fromDate,
@CliOption(key = {"toDate"}, mandatory = false, unspecifiedDefaultValue = "" + Long.MAX_VALUE, help
= "<submission-time-is-before>") long toDate) {
List<QueryPrepareHandle> handles = getClient().getPreparedQueries(userName, queryName, fromDate, toDate);
if (handles != null && !handles.isEmpty()) {
return Joiner.on("\n").skipNulls().join(handles);
} else {
return "No prepared queries";
}
}
/**
* Gets the prepared status.
*
* @param ph the ph
* @return the prepared status
*/
@CliCommand(value = "prepQuery details", help = "Get prepared query with handle <prepare_handle>")
public String getPreparedStatus(
@CliOption(key = {"", "prepare_handle"}, mandatory = true, help = "<prepare_handle>") String ph) {
LensPreparedQuery prepared = getClient().getPreparedQuery(QueryPrepareHandle.fromString(ph));
if (prepared != null) {
StringBuilder sb = new StringBuilder()
.append("User query:").append(prepared.getUserQuery()).append("\n")
.append("Prepare handle:").append(prepared.getPrepareHandle()).append("\n")
.append("User:" + prepared.getPreparedUser()).append("\n")
.append("Prepared at:").append(prepared.getPreparedTime()).append("\n")
.append("Selected driver :").append(prepared.getSelectedDriverClassName()).append("\n")
.append("Driver query:").append(prepared.getDriverQuery()).append("\n");
if (prepared.getConf() != null) {
sb.append("Conf:").append(prepared.getConf().getProperties()).append("\n");
}
return sb.toString();
} else {
return "No such handle";
}
}
/**
* Destroy prepared query.
*
* @param ph the ph
* @return the string
*/
@CliCommand(value = "prepQuery destroy", help = "Destroy prepared query with handle <prepare_handle>")
public String destroyPreparedQuery(
@CliOption(key = {"", "prepare_handle"}, mandatory = true, help = "<prepare_handle>") String ph) {
boolean status = getClient().destroyPrepared(new QueryPrepareHandle(UUID.fromString(ph)));
if (status) {
return "Successfully destroyed " + ph;
} else {
return "Failed in destroying " + ph;
}
}
/**
* Execute prepared query.
*
* @param phandle the phandle
* @param async the asynch
* @param queryName the query name
* @return the string
*/
@CliCommand(value = "prepQuery execute",
help = "Execute prepared query with handle <prepare_handle>."
+ " If <async> is supplied and is true, query is run in async manner and query handle is returned immediately."
+ " Optionally, <query-name> can be provided, though not required.")
public String executePreparedQuery(
@CliOption(key = {"", "prepare_handle"}, mandatory = true, help = "Prepare handle to execute") String phandle,
@CliOption(key = {"async"}, mandatory = false, unspecifiedDefaultValue = "false",
specifiedDefaultValue = "true", help = "<async>") boolean async,
@CliOption(key = {"name"}, mandatory = false, help = "<query-name>") String queryName) {
if (async) {
QueryHandle handle = getClient().executePrepared(QueryPrepareHandle.fromString(phandle), queryName);
return handle.getHandleId().toString();
} else {
try {
LensClient.LensClientResultSetWithStats result = getClient().getResultsFromPrepared(
QueryPrepareHandle.fromString(phandle), queryName);
return formatResultSet(result);
} catch (Throwable t) {
return t.getMessage();
}
}
}
/**
* Prepare.
*
* @param sql the sql
* @param queryName the query name
* @return the string
* @throws UnsupportedEncodingException the unsupported encoding exception
* @throws LensAPIException
*/
@CliCommand(value = "prepQuery prepare",
help = "Prepapre query <query-string> and return prepare handle. Can optionaly provide <query-name>")
public String prepare(@CliOption(key = {"", "query"}, mandatory = true, help = "<query-string>") String sql,
@CliOption(key = {"name"}, mandatory = false, help = "<query-name>") String queryName)
throws UnsupportedEncodingException, LensAPIException {
return getClient().prepare(sql, queryName).getData().toString();
}
/**
* Explain and prepare.
*
* @param sql
* the sql
* @param queryName
* the query name
* @return the string
* @throws UnsupportedEncodingException
* the unsupported encoding exception
* @throws LensAPIException
*/
@CliCommand(value = "prepQuery explain", help = "Explain and prepare query <query-string>. "
+ "Can optionally provide <query-name>")
public String explainAndPrepare(
@CliOption(key = { "", "query" }, mandatory = true, help = "<query-string>") String sql,
@CliOption(key = { "name" }, mandatory = false, help = "<query-name>") String queryName)
throws UnsupportedEncodingException, LensAPIException {
PrettyPrintable cliOutput;
try {
QueryPlan plan = getClient().explainAndPrepare(sql, queryName).getData();
StringBuilder planStr = new StringBuilder(plan.getPlanString());
planStr.append("\n").append("Prepare handle:").append(plan.getPrepareHandle());
return planStr.toString();
} catch (final LensAPIException e) {
BriefError briefError = new BriefError(e.getLensAPIErrorCode(), e.getLensAPIErrorMessage());
cliOutput = new IdBriefErrorTemplate(IdBriefErrorTemplateKey.REQUEST_ID, e.getLensAPIRequestId(), briefError);
} catch (final LensBriefErrorException e) {
cliOutput = e.getIdBriefErrorTemplate();
}
return cliOutput.toPrettyString();
}
}
| |
package fr.adrienbrault.idea.symfony2plugin.templating.variable.collector;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiManager;
import com.intellij.psi.PsiRecursiveElementWalkingVisitor;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.tree.IElementType;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.util.indexing.FileBasedIndex;
import com.jetbrains.twig.TwigFile;
import com.jetbrains.twig.TwigFileType;
import com.jetbrains.twig.TwigTokenTypes;
import com.jetbrains.twig.elements.TwigCompositeElement;
import com.jetbrains.twig.elements.TwigElementTypes;
import com.jetbrains.twig.elements.TwigExtendsTag;
import com.jetbrains.twig.elements.TwigTagWithFileReference;
import fr.adrienbrault.idea.symfony2plugin.templating.TwigPattern;
import fr.adrienbrault.idea.symfony2plugin.stubs.indexes.TwigIncludeStubIndex;
import fr.adrienbrault.idea.symfony2plugin.templating.util.TwigTypeResolveUtil;
import fr.adrienbrault.idea.symfony2plugin.templating.util.TwigUtil;
import fr.adrienbrault.idea.symfony2plugin.templating.variable.TwigFileVariableCollector;
import fr.adrienbrault.idea.symfony2plugin.templating.variable.TwigFileVariableCollectorParameter;
import fr.adrienbrault.idea.symfony2plugin.templating.variable.dict.PsiVariable;
import fr.adrienbrault.idea.symfony2plugin.util.PsiElementUtils;
import org.apache.commons.lang.StringUtils;
import org.jetbrains.annotations.NotNull;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* @author Daniel Espendiller <daniel@espendiller.net>
*/
public class IncludeVariableCollector implements TwigFileVariableCollector {
@Override
public void collectPsiVariables(@NotNull final TwigFileVariableCollectorParameter parameter, @NotNull final Map<String, PsiVariable> variables) {
PsiFile psiFile = parameter.getElement().getContainingFile();
if(!(psiFile instanceof TwigFile) || PsiTreeUtil.getChildOfType(psiFile, TwigExtendsTag.class) != null) {
return;
}
Collection<VirtualFile> files = getImplements((TwigFile) psiFile);
if(files.size() == 0) {
return;
}
for(VirtualFile virtualFile: files) {
PsiFile twigFile = PsiManager.getInstance(parameter.getProject()).findFile(virtualFile);
if(!(twigFile instanceof TwigFile)) {
continue;
}
twigFile.acceptChildren(new MyPsiRecursiveElementWalkingVisitor(psiFile, variables, parameter));
}
}
private void collectIncludeContextVars(IElementType iElementType, PsiElement tag, PsiElement templatePsiName, Map<String, PsiVariable> variables, Set<VirtualFile> visitedFiles) {
boolean addContextVar = true;
Map<String, String> varAliasMap = new HashMap<>();
if(iElementType == TwigElementTypes.INCLUDE_TAG || iElementType == TwigElementTypes.EMBED_TAG) {
// {% include 'template.html' with {'foo': 'bar'} only %}
// {% embed "template.html.twig" with {'foo': 'bar'} only %}
PsiElement onlyElement = PsiElementUtils.getChildrenOfType(tag, TwigPattern.getIncludeOnlyPattern());
if(onlyElement != null) {
addContextVar = false;
}
varAliasMap = getIncludeWithVarNames(tag.getText());
} else if(iElementType == TwigTokenTypes.IDENTIFIER) {
// {{ include('template.html.twig', {'foo2': foo}, with_context = false) }}
// not nice but its working :)
// strip all whitespace psi elements
String text = tag.getText();
text = text.replaceAll("\\r|\\n|\\s+", "");
String regex = "include\\((['|\"].*['|\"],(.*))\\)";
Matcher matcher = Pattern.compile(regex).matcher(text);
if (matcher.find()) {
String[] group = matcher.group(1).split(",");
if(group.length > 1) {
// json alias map: {'foo2': foo}
if(group[1].startsWith("{")) {
varAliasMap = getVariableAliasMap(group[1]);
}
// try to find context in one of the parameter:
// include('template.html', with_context = false)
// include('template.html', {foo: 'bar'}, with_context = false)
for (int i = 1; i < group.length; i++) {
if(group[i].equals("with_context=false")) {
addContextVar = false;
}
}
}
}
}
// we dont need to collect foreign file variables
if(!addContextVar && varAliasMap.size() == 0) {
return;
}
Map<String, PsiVariable> stringPsiVariableHashMap = TwigTypeResolveUtil.collectScopeVariables(templatePsiName, visitedFiles);
// add context vars
if(addContextVar) {
for(Map.Entry<String, PsiVariable> entry: stringPsiVariableHashMap.entrySet()) {
variables.put(entry.getKey(), entry.getValue());
}
}
// add alias vars
if(varAliasMap.size() > 0) {
for(Map.Entry<String, String> entry: varAliasMap.entrySet()) {
if(stringPsiVariableHashMap.containsKey(entry.getValue())) {
variables.put(entry.getKey(), stringPsiVariableHashMap.get(entry.getValue()));
}
}
}
}
@NotNull
private static Map<String, String> getIncludeWithVarNames(String includeText) {
String regex = "with\\s*\\{\\s*(.*[^%])\\}\\s*";
Matcher matcher = Pattern.compile(regex).matcher(includeText.replace("\r\n", " ").replace("\n", " "));
if (matcher.find()) {
String group = matcher.group(1);
return getVariableAliasMap("{" + group + "}");
}
return new HashMap<>();
}
@NotNull
private static Map<String, String> getVariableAliasMap(@NotNull String jsonLike) {
Map<String, String> map = new HashMap<>();
String[] parts = jsonLike.replaceAll("^\\{|\\}$","").split("\"?(:|,)(?![^\\{]*\\})\"?");
for (int i = 0; i < parts.length -1; i+=2) {
map.put(StringUtils.trim(parts[i]).replaceAll("^\"|\"$|\'|\'$", ""), StringUtils.trim(parts[i+1]).replaceAll("^\"|\"$|\'|\'$", ""));
}
return map;
}
@Override
public void collect(@NotNull TwigFileVariableCollectorParameter parameter, @NotNull Map<String, Set<String>> variables) {
}
private Collection<VirtualFile> getImplements(TwigFile twigFile) {
final Set<VirtualFile> targets = new HashSet<>();
for(String templateName: TwigUtil.getTemplateNamesForFile(twigFile)) {
FileBasedIndex.getInstance().getFilesWithKey(TwigIncludeStubIndex.KEY, new HashSet<>(Collections.singletonList(templateName)), virtualFile -> {
targets.add(virtualFile);
return true;
}, GlobalSearchScope.getScopeRestrictedByFileTypes(GlobalSearchScope.allScope(twigFile.getProject()), TwigFileType.INSTANCE));
}
return targets;
}
private class MyPsiRecursiveElementWalkingVisitor extends PsiRecursiveElementWalkingVisitor {
@NotNull
private final PsiFile psiFile;
@NotNull
private final Map<String, PsiVariable> variables;
@NotNull
private final TwigFileVariableCollectorParameter parameter;
private MyPsiRecursiveElementWalkingVisitor(@NotNull PsiFile psiFile, @NotNull Map<String, PsiVariable> variables, @NotNull TwigFileVariableCollectorParameter parameter) {
this.psiFile = psiFile;
this.variables = variables;
this.parameter = parameter;
}
@Override
public void visitElement(PsiElement element) {
// {% include 'template.html' %}
if(element instanceof TwigTagWithFileReference && element.getNode().getElementType() == TwigElementTypes.INCLUDE_TAG) {
PsiElement includeTag = PsiElementUtils.getChildrenOfType(element, TwigPattern.getTemplateFileReferenceTagPattern("include"));
if(includeTag != null) {
collectContextVars(TwigElementTypes.INCLUDE_TAG, element, includeTag);
}
}
if(element instanceof TwigCompositeElement) {
// {{ include('template.html') }}
PsiElement includeTag = PsiElementUtils.getChildrenOfType(element, TwigPattern.getPrintBlockOrTagFunctionPattern("include"));
if(includeTag != null) {
collectContextVars(TwigTokenTypes.IDENTIFIER, element, includeTag);
}
// {% embed "foo.html.twig"
PsiElement embedTag = PsiElementUtils.getChildrenOfType(element, TwigPattern.getEmbedPattern());
if(embedTag != null) {
collectContextVars(TwigElementTypes.EMBED_TAG, element, embedTag);
}
}
super.visitElement(element);
}
private void collectContextVars(IElementType iElementType, @NotNull PsiElement element, @NotNull PsiElement includeTag) {
String templateName = includeTag.getText();
if(StringUtils.isNotBlank(templateName)) {
for(PsiFile templateFile: TwigUtil.getTemplatePsiElements(element.getProject(), templateName)) {
if(templateFile.equals(psiFile)) {
collectIncludeContextVars(iElementType, element, includeTag, variables, parameter.getVisitedFiles());
}
}
}
}
}
}
| |
/*
* Copyright 2016 LINE Corporation
*
* LINE Corporation licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.linecorp.armeria.client.http.retrofit2;
import java.io.IOException;
import java.net.URI;
import java.util.Map;
import java.util.concurrent.CancellationException;
import java.util.concurrent.CompletionException;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicReferenceFieldUpdater;
import java.util.function.BiFunction;
import java.util.regex.Pattern;
import com.linecorp.armeria.client.ClientFactory;
import com.linecorp.armeria.client.ClientOptionsBuilder;
import com.linecorp.armeria.client.Clients;
import com.linecorp.armeria.client.http.HttpClient;
import com.linecorp.armeria.common.Scheme;
import com.linecorp.armeria.common.SerializationFormat;
import com.linecorp.armeria.common.SessionProtocol;
import com.linecorp.armeria.common.http.HttpHeaderNames;
import com.linecorp.armeria.common.http.HttpHeaders;
import com.linecorp.armeria.common.http.HttpMethod;
import com.linecorp.armeria.common.http.HttpResponse;
import okhttp3.Call;
import okhttp3.Call.Factory;
import okhttp3.Callback;
import okhttp3.HttpUrl;
import okhttp3.MediaType;
import okhttp3.Request;
import okhttp3.RequestBody;
import okhttp3.Response;
import okio.Buffer;
/**
* A {@link Factory} that creates a {@link Call} instance for {@link HttpClient}.
*/
final class ArmeriaCallFactory implements Factory {
static final String GROUP_PREFIX = "group_";
private static final Pattern GROUP_PREFIX_MATCHER = Pattern.compile(GROUP_PREFIX);
private final Map<String, HttpClient> httpClients = new ConcurrentHashMap<>();
private final HttpClient baseHttpClient;
private final ClientFactory clientFactory;
private final BiFunction<String, ? super ClientOptionsBuilder, ClientOptionsBuilder> configurator;
private final String baseAuthority;
ArmeriaCallFactory(HttpClient baseHttpClient,
ClientFactory clientFactory,
BiFunction<String, ? super ClientOptionsBuilder, ClientOptionsBuilder> configurator) {
this.baseHttpClient = baseHttpClient;
this.clientFactory = clientFactory;
this.configurator = configurator;
baseAuthority = baseHttpClient.uri().getAuthority();
httpClients.put(baseAuthority, baseHttpClient);
}
@Override
public Call newCall(Request request) {
return new ArmeriaCall(this, request);
}
private static boolean isGroup(String authority) {
return authority.startsWith(GROUP_PREFIX);
}
private HttpClient getHttpClient(String authority, String sessionProtocol) {
if (baseAuthority.equals(authority)) {
return baseHttpClient;
}
return httpClients.computeIfAbsent(authority, key -> {
final String finalAuthority = isGroup(key) ?
GROUP_PREFIX_MATCHER.matcher(key).replaceFirst("group:") : key;
final String uriText = Scheme.of(SerializationFormat.NONE, SessionProtocol.of(sessionProtocol))
.uriText() + "://" + finalAuthority;
return Clients.newClient(clientFactory, uriText, HttpClient.class,
configurator.apply(uriText, new ClientOptionsBuilder()).build());
});
}
static class ArmeriaCall implements Call {
private enum ExecutionState {
IDLE, RUNNING, CANCELED, FINISHED
}
private static final AtomicReferenceFieldUpdater<ArmeriaCall, ExecutionState> executionStateUpdater =
AtomicReferenceFieldUpdater.newUpdater(ArmeriaCall.class, ExecutionState.class,
"executionState");
private final ArmeriaCallFactory callFactory;
private final Request request;
private volatile HttpResponse httpResponse;
@SuppressWarnings("FieldMayBeFinal")
private volatile ExecutionState executionState = ExecutionState.IDLE;
ArmeriaCall(ArmeriaCallFactory callFactory, Request request) {
this.callFactory = callFactory;
this.request = request;
}
private static HttpResponse doCall(ArmeriaCallFactory callFactory, Request request) {
HttpUrl httpUrl = request.url();
URI uri = httpUrl.uri();
HttpClient httpClient = callFactory.getHttpClient(uri.getAuthority(), uri.getScheme());
StringBuilder uriBuilder = new StringBuilder(httpUrl.encodedPath());
if (uri.getQuery() != null) {
uriBuilder.append('?').append(httpUrl.encodedQuery());
}
final String uriString = uriBuilder.toString();
final HttpHeaders headers;
switch (request.method()) {
case "GET":
headers = HttpHeaders.of(HttpMethod.GET, uriString);
break;
case "HEAD":
headers = HttpHeaders.of(HttpMethod.HEAD, uriString);
break;
case "POST":
headers = HttpHeaders.of(HttpMethod.POST, uriString);
break;
case "DELETE":
headers = HttpHeaders.of(HttpMethod.DELETE, uriString);
break;
case "PUT":
headers = HttpHeaders.of(HttpMethod.PUT, uriString);
break;
case "PATCH":
headers = HttpHeaders.of(HttpMethod.PATCH, uriString);
break;
case "OPTIONS":
headers = HttpHeaders.of(HttpMethod.OPTIONS, uriString);
break;
default:
throw new IllegalArgumentException("Invalid HTTP method:" + request.method());
}
request.headers().toMultimap().forEach(
(key, values) -> headers.add(HttpHeaderNames.of(key), values));
final RequestBody body = request.body();
if (body != null) {
final MediaType contentType = body.contentType();
if (contentType != null) {
headers.set(HttpHeaderNames.CONTENT_TYPE, contentType.toString());
}
try (Buffer contentBuffer = new Buffer()) {
body.writeTo(contentBuffer);
return httpClient.execute(headers, contentBuffer.readByteArray());
} catch (IOException e) {
throw new IllegalArgumentException(
"Failed to convert RequestBody to HttpData. " + request.method(), e);
}
}
return httpClient.execute(headers);
}
@Override
public Request request() {
return request;
}
private synchronized void createRequest() {
if (httpResponse != null) {
throw new IllegalStateException("executed already");
}
executionStateUpdater.compareAndSet(this, ExecutionState.IDLE, ExecutionState.RUNNING);
httpResponse = doCall(callFactory, request);
}
@Override
public Response execute() throws IOException {
final CompletableCallback completableCallback = new CompletableCallback();
enqueue(completableCallback);
try {
return completableCallback.join();
} catch (CancellationException e) {
throw new IOException(e);
} catch (CompletionException e) {
throw new IOException(e.getCause());
}
}
@Override
public void enqueue(Callback callback) {
createRequest();
httpResponse.subscribe(new ArmeriaCallSubscriber(this, callback, request));
}
@Override
public void cancel() {
executionStateUpdater.set(this, ExecutionState.CANCELED);
}
@Override
public boolean isExecuted() {
return httpResponse != null;
}
@Override
public boolean isCanceled() {
return executionState == ExecutionState.CANCELED;
}
boolean tryFinish() {
return executionStateUpdater.compareAndSet(this,
ExecutionState.IDLE,
ExecutionState.FINISHED) ||
executionStateUpdater.compareAndSet(this,
ExecutionState.RUNNING,
ExecutionState.FINISHED);
}
@Override
public Call clone() {
return new ArmeriaCall(callFactory, request);
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zookeeper.server.auth;
import java.security.cert.CertificateException;
import java.security.cert.X509Certificate;
import javax.net.ssl.X509KeyManager;
import javax.net.ssl.X509TrustManager;
import javax.security.auth.x500.X500Principal;
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.common.ZKConfig;
import org.apache.zookeeper.common.X509Exception;
import org.apache.zookeeper.common.X509Exception.KeyManagerException;
import org.apache.zookeeper.common.X509Exception.TrustManagerException;
import org.apache.zookeeper.common.X509Util;
import org.apache.zookeeper.data.Id;
import org.apache.zookeeper.server.ServerCnxn;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* An AuthenticationProvider backed by an X509TrustManager and an X509KeyManager
* to perform remote host certificate authentication. The default algorithm is
* SunX509 and a JKS KeyStore. To specify the locations of the key store and
* trust store, set the following system properties:
* <br/><code>zookeeper.ssl.keyStore.location</code>
* <br/><code>zookeeper.ssl.trustStore.location</code>
* <br/>To specify store passwords, set the following system properties:
* <br/><code>zookeeper.ssl.keyStore.password</code>
* <br/><code>zookeeper.ssl.trustStore.password</code>
* <br/>Alternatively, this can be plugged with any X509TrustManager and
* X509KeyManager implementation.
*/
public class X509AuthenticationProvider implements AuthenticationProvider {
static final String ZOOKEEPER_X509AUTHENTICATIONPROVIDER_SUPERUSER
= "zookeeper.X509AuthenticationProvider.superUser";
private static final Logger LOG
= LoggerFactory.getLogger(X509AuthenticationProvider.class);
private final X509TrustManager trustManager;
private final X509KeyManager keyManager;
/**
* Initialize the X509AuthenticationProvider with a JKS KeyStore and JKS
* TrustStore according to the following system properties:
* <br/><code>zookeeper.ssl.keyStore.location</code>
* <br/><code>zookeeper.ssl.trustStore.location</code>
* <br/><code>zookeeper.ssl.keyStore.password</code>
* <br/><code>zookeeper.ssl.trustStore.password</code>
*/
public X509AuthenticationProvider() throws X509Exception {
String keyStoreLocationProp = System.getProperty(
ZKConfig.SSL_KEYSTORE_LOCATION);
String keyStorePasswordProp = System.getProperty(
ZKConfig.SSL_KEYSTORE_PASSWD);
X509KeyManager km = null;
X509TrustManager tm = null;
if (keyStoreLocationProp == null && keyStorePasswordProp == null) {
LOG.warn("keystore not specified for client connection");
} else {
if (keyStoreLocationProp == null) {
throw new X509Exception("keystore location not specified for client connection");
}
if (keyStorePasswordProp == null) {
throw new X509Exception("keystore password not specified for client connection");
}
try {
km = X509Util.createKeyManager(
keyStoreLocationProp, keyStorePasswordProp);
} catch (KeyManagerException e) {
LOG.error("Failed to create key manager", e);
}
}
String trustStoreLocationProp = System.getProperty(
ZKConfig.SSL_TRUSTSTORE_LOCATION);
String trustStorePasswordProp = System.getProperty(
ZKConfig.SSL_TRUSTSTORE_PASSWD);
if (trustStoreLocationProp == null && trustStorePasswordProp == null) {
LOG.warn("Truststore not specified for client connection");
} else {
if (trustStoreLocationProp == null) {
throw new X509Exception("Truststore location not specified for client connection");
}
if (trustStorePasswordProp == null) {
throw new X509Exception("Truststore password not specified for client connection");
}
try {
tm = X509Util.createTrustManager(
trustStoreLocationProp, trustStorePasswordProp);
} catch (TrustManagerException e) {
LOG.error("Failed to create trust manager", e);
}
}
this.keyManager = km;
this.trustManager = tm;
}
/**
* Initialize the X509AuthenticationProvider with the provided
* X509TrustManager and X509KeyManager.
*
* @param trustManager X509TrustManager implementation to use for remote
* host authentication.
* @param keyManager X509KeyManager implementation to use for certificate
* management.
*/
public X509AuthenticationProvider(X509TrustManager trustManager,
X509KeyManager keyManager) {
this.trustManager = trustManager;
this.keyManager = keyManager;
}
@Override
public String getScheme() {
return "x509";
}
@Override
public KeeperException.Code handleAuthentication(ServerCnxn cnxn,
byte[] authData) {
X509Certificate[] certChain
= (X509Certificate[]) cnxn.getClientCertificateChain();
if (certChain == null || certChain.length == 0) {
return KeeperException.Code.AUTHFAILED;
}
if (trustManager == null) {
LOG.error("No trust manager available to authenticate session 0x{}",
Long.toHexString(cnxn.getSessionId()));
return KeeperException.Code.AUTHFAILED;
}
X509Certificate clientCert = certChain[0];
try {
// Authenticate client certificate
trustManager.checkClientTrusted(certChain,
clientCert.getPublicKey().getAlgorithm());
} catch (CertificateException ce) {
LOG.error("Failed to trust certificate for session 0x" +
Long.toHexString(cnxn.getSessionId()), ce);
return KeeperException.Code.AUTHFAILED;
}
String clientId = getClientId(clientCert);
if (clientId.equals(System.getProperty(
ZOOKEEPER_X509AUTHENTICATIONPROVIDER_SUPERUSER))) {
cnxn.addAuthInfo(new Id("super", clientId));
LOG.info("Authenticated Id '{}' as super user", clientId);
}
Id authInfo = new Id(getScheme(), clientId);
cnxn.addAuthInfo(authInfo);
LOG.info("Authenticated Id '{}' for Scheme '{}'",
authInfo.getId(), authInfo.getScheme());
return KeeperException.Code.OK;
}
/**
* Determine the string to be used as the remote host session Id for
* authorization purposes. Associate this client identifier with a
* ServerCnxn that has been authenticated over SSL, and any ACLs that refer
* to the authenticated client.
*
* @param clientCert Authenticated X509Certificate associated with the
* remote host.
* @return Identifier string to be associated with the client.
*/
protected String getClientId(X509Certificate clientCert) {
return clientCert.getSubjectX500Principal().getName();
}
@Override
public boolean matches(String id, String aclExpr) {
if (System.getProperty(ZOOKEEPER_X509AUTHENTICATIONPROVIDER_SUPERUSER) != null) {
return (id.equals(System.getProperty(ZOOKEEPER_X509AUTHENTICATIONPROVIDER_SUPERUSER))
|| id.equals(aclExpr));
}
return (id.equals(aclExpr));
}
@Override
public boolean isAuthenticated() {
return true;
}
@Override
public boolean isValid(String id) {
try {
new X500Principal(id);
return true;
} catch (IllegalArgumentException e) {
return false;
}
}
/**
* Get the X509TrustManager implementation used for remote host
* authentication.
*
* @return The X509TrustManager.
* @throws TrustManagerException When there is no trust manager available.
*/
public X509TrustManager getTrustManager() throws TrustManagerException {
if (trustManager == null) {
throw new TrustManagerException("No trust manager available");
}
return trustManager;
}
/**
* Get the X509KeyManager implementation used for certificate management.
*
* @return The X509KeyManager.
* @throws KeyManagerException When there is no key manager available.
*/
public X509KeyManager getKeyManager() throws KeyManagerException {
if (keyManager == null) {
throw new KeyManagerException("No key manager available");
}
return keyManager;
}
}
| |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package View;
import Control.M01pegaCtrl;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.swing.JOptionPane;
/**
*
* @author Thinkpad
*/
public class M01pegaView extends javax.swing.JInternalFrame {
/**
* Creates new form M01pegaView
*/
public M01pegaView() {
initComponents();
M01pegaCtrl m03 = new M01pegaCtrl();
tblPegawai.setModel(m03.getDaftarPegawai());
setEditStatus(false);
setVisible(true);
}
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
jScrollPane1 = new javax.swing.JScrollPane();
tblPegawai = new javax.swing.JTable();
jPanel1 = new javax.swing.JPanel();
jPanel2 = new javax.swing.JPanel();
txtNik = new javax.swing.JTextField();
jLabel6 = new javax.swing.JLabel();
jLabel2 = new javax.swing.JLabel();
jLabel1 = new javax.swing.JLabel();
txtGender = new javax.swing.JTextField();
txtPassword = new javax.swing.JTextField();
jLabel5 = new javax.swing.JLabel();
txtNama = new javax.swing.JTextField();
jLabel3 = new javax.swing.JLabel();
txtStatus = new javax.swing.JTextField();
txtUsername = new javax.swing.JTextField();
jLabel4 = new javax.swing.JLabel();
txtTelepon = new javax.swing.JTextField();
jLabel7 = new javax.swing.JLabel();
jLabel8 = new javax.swing.JLabel();
txtAlamat = new javax.swing.JTextField();
jLabel9 = new javax.swing.JLabel();
txtAgama = new javax.swing.JTextField();
jLabel10 = new javax.swing.JLabel();
txtTglLahir = new javax.swing.JTextField();
jLabel11 = new javax.swing.JLabel();
txtEmail = new javax.swing.JTextField();
jLabel12 = new javax.swing.JLabel();
txtTglMasuk = new javax.swing.JTextField();
jLabel13 = new javax.swing.JLabel();
cmbKdJaba = new javax.swing.JComboBox();
jPanel3 = new javax.swing.JPanel();
btnTambah = new javax.swing.JButton();
btnCari = new javax.swing.JButton();
btnEdit = new javax.swing.JButton();
btnClear = new javax.swing.JButton();
btnPrint = new javax.swing.JButton();
btnHapus = new javax.swing.JButton();
btnBatal = new javax.swing.JButton();
setPreferredSize(new java.awt.Dimension(1132, 600));
jScrollPane1.setPreferredSize(null);
tblPegawai.setModel(new javax.swing.table.DefaultTableModel(
new Object [][] {
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null}
},
new String [] {
"Title 1", "Title 2", "Title 3", "Title 4"
}
));
tblPegawai.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseClicked(java.awt.event.MouseEvent evt) {
tblPegawaiMouseClicked(evt);
}
});
jScrollPane1.setViewportView(tblPegawai);
jPanel2.setBorder(javax.swing.BorderFactory.createEtchedBorder());
jPanel2.setPreferredSize(new java.awt.Dimension(270, 232));
txtNik.setPreferredSize(new java.awt.Dimension(160, 20));
txtNik.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
txtNikActionPerformed(evt);
}
});
txtNik.addKeyListener(new java.awt.event.KeyAdapter() {
public void keyReleased(java.awt.event.KeyEvent evt) {
txtNikKeyReleased(evt);
}
});
jLabel6.setText("Status");
jLabel2.setText("Nama");
jLabel1.setText("NIK");
txtGender.setPreferredSize(new java.awt.Dimension(140, 20));
txtPassword.setPreferredSize(new java.awt.Dimension(140, 20));
jLabel5.setText("Gender");
txtNama.setPreferredSize(new java.awt.Dimension(140, 20));
jLabel3.setText("Username");
txtStatus.setPreferredSize(new java.awt.Dimension(140, 20));
txtUsername.setPreferredSize(new java.awt.Dimension(140, 20));
txtUsername.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
txtUsernameActionPerformed(evt);
}
});
jLabel4.setText("Password");
txtTelepon.setPreferredSize(new java.awt.Dimension(140, 20));
jLabel7.setText("Telepon");
jLabel8.setText("Alamat");
txtAlamat.setPreferredSize(new java.awt.Dimension(140, 20));
jLabel9.setText("Agama");
txtAgama.setPreferredSize(new java.awt.Dimension(140, 20));
jLabel10.setText("Tanggal Lahir");
txtTglLahir.setPreferredSize(new java.awt.Dimension(140, 20));
jLabel11.setText("Email");
txtEmail.setPreferredSize(new java.awt.Dimension(140, 20));
jLabel12.setText("Tanggal Masuk");
txtTglMasuk.setPreferredSize(new java.awt.Dimension(140, 20));
jLabel13.setText("Kode Jabatan");
cmbKdJaba.setModel(new javax.swing.DefaultComboBoxModel(new String[] { "Item 1", "Item 2", "Item 3", "Item 4" }));
javax.swing.GroupLayout jPanel2Layout = new javax.swing.GroupLayout(jPanel2);
jPanel2.setLayout(jPanel2Layout);
jPanel2Layout.setHorizontalGroup(
jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel2Layout.createSequentialGroup()
.addContainerGap()
.addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel2Layout.createSequentialGroup()
.addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jLabel6)
.addComponent(jLabel5)
.addComponent(jLabel4)
.addComponent(jLabel3)
.addComponent(jLabel2)
.addComponent(jLabel1)
.addComponent(jLabel7)
.addComponent(jLabel8)
.addComponent(jLabel9))
.addGap(33, 33, 33)
.addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(txtNama, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(txtUsername, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(txtPassword, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(txtGender, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(txtStatus, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(txtTelepon, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(txtAlamat, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(txtAgama, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(txtNik, javax.swing.GroupLayout.DEFAULT_SIZE, 171, Short.MAX_VALUE)))
.addGroup(jPanel2Layout.createSequentialGroup()
.addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jLabel12)
.addComponent(jLabel11)
.addComponent(jLabel13)
.addComponent(jLabel10))
.addGap(10, 10, 10)
.addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(txtTglLahir, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(txtEmail, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(txtTglMasuk, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(cmbKdJaba, 0, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))))
.addContainerGap())
);
jPanel2Layout.setVerticalGroup(
jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel2Layout.createSequentialGroup()
.addContainerGap()
.addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel1)
.addComponent(txtNik, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel2)
.addComponent(txtNama, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel3)
.addComponent(txtUsername, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel4)
.addComponent(txtPassword, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel5)
.addComponent(txtGender, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel6)
.addComponent(txtStatus, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel7)
.addComponent(txtTelepon, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel8)
.addComponent(txtAlamat, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel9)
.addComponent(txtAgama, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel10)
.addComponent(txtTglLahir, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel11)
.addComponent(txtEmail, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel12)
.addComponent(txtTglMasuk, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel13)
.addComponent(cmbKdJaba, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
);
jPanel3.setBorder(javax.swing.BorderFactory.createEtchedBorder());
btnTambah.setText("Tambah");
btnTambah.setPreferredSize(new java.awt.Dimension(80, 23));
btnTambah.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btnTambahActionPerformed(evt);
}
});
btnCari.setText("Cari");
btnCari.setPreferredSize(new java.awt.Dimension(80, 23));
btnCari.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btnCariActionPerformed(evt);
}
});
btnEdit.setText("Edit");
btnEdit.setPreferredSize(new java.awt.Dimension(80, 23));
btnEdit.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btnEditActionPerformed(evt);
}
});
btnClear.setText("Clear");
btnClear.setPreferredSize(new java.awt.Dimension(80, 23));
btnClear.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btnClearActionPerformed(evt);
}
});
btnPrint.setText("Print");
btnPrint.setPreferredSize(new java.awt.Dimension(80, 23));
btnPrint.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btnPrintActionPerformed(evt);
}
});
btnHapus.setText("Hapus");
btnHapus.setPreferredSize(new java.awt.Dimension(80, 23));
btnHapus.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btnHapusActionPerformed(evt);
}
});
btnBatal.setText("Batal");
btnBatal.setPreferredSize(new java.awt.Dimension(80, 23));
btnBatal.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btnBatalActionPerformed(evt);
}
});
javax.swing.GroupLayout jPanel3Layout = new javax.swing.GroupLayout(jPanel3);
jPanel3.setLayout(jPanel3Layout);
jPanel3Layout.setHorizontalGroup(
jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel3Layout.createSequentialGroup()
.addContainerGap()
.addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel3Layout.createSequentialGroup()
.addComponent(btnTambah, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(btnHapus, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(btnCari, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addGroup(jPanel3Layout.createSequentialGroup()
.addComponent(btnEdit, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(btnClear, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(btnPrint, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addComponent(btnBatal, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addGap(10, 10, 10))
);
jPanel3Layout.setVerticalGroup(
jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel3Layout.createSequentialGroup()
.addContainerGap()
.addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(btnTambah, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(btnHapus, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(btnCari, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addGap(18, 18, 18)
.addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(btnEdit, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(btnClear, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(btnPrint, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addGap(18, 18, 18)
.addComponent(btnBatal, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addContainerGap())
);
javax.swing.GroupLayout jPanel1Layout = new javax.swing.GroupLayout(jPanel1);
jPanel1.setLayout(jPanel1Layout);
jPanel1Layout.setHorizontalGroup(
jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel1Layout.createSequentialGroup()
.addContainerGap()
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jPanel2, javax.swing.GroupLayout.PREFERRED_SIZE, 276, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jPanel3, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
);
jPanel1Layout.setVerticalGroup(
jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel1Layout.createSequentialGroup()
.addContainerGap()
.addComponent(jPanel2, javax.swing.GroupLayout.PREFERRED_SIZE, 358, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(18, 18, 18)
.addComponent(jPanel3, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
);
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane());
getContentPane().setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addComponent(jPanel1, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jScrollPane1, javax.swing.GroupLayout.PREFERRED_SIZE, 804, javax.swing.GroupLayout.PREFERRED_SIZE)
.addContainerGap())
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jPanel1, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addComponent(jScrollPane1, javax.swing.GroupLayout.DEFAULT_SIZE, 549, Short.MAX_VALUE)
.addContainerGap())
);
pack();
}// </editor-fold>//GEN-END:initComponents
private void tblPegawaiMouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_tblPegawaiMouseClicked
int tableData = tblPegawai.getSelectedRow();
txtNik.setText(tblPegawai.getValueAt(tableData, 0).toString());
txtNama.setText(tblPegawai.getValueAt(tableData, 1).toString());
txtUsername.setText(tblPegawai.getValueAt(tableData, 2).toString());
txtPassword.setText(tblPegawai.getValueAt(tableData, 3).toString());
txtGender.setText(tblPegawai.getValueAt(tableData, 4).toString());
txtStatus.setText(tblPegawai.getValueAt(tableData, 5).toString());
txtTelepon.setText(tblPegawai.getValueAt(tableData, 65).toString());
txtAlamat.setText(tblPegawai.getValueAt(tableData, 7).toString());
txtAgama.setText(tblPegawai.getValueAt(tableData, 8).toString());
txtTglLahir.setText(tblPegawai.getValueAt(tableData, 9).toString());
txtEmail.setText(tblPegawai.getValueAt(tableData, 10).toString());
txtTglMasuk.setText(tblPegawai.getValueAt(tableData, 11).toString());
txtStatus.setText(tblPegawai.getValueAt(tableData, 12).toString());
cmbKdJaba.setSelectedItem(tblPegawai.getValueAt(tableData, 13).toString());
setEditStatus(false);
}//GEN-LAST:event_tblPegawaiMouseClicked
private void txtNikActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_txtNikActionPerformed
// TODO add your handling code here:
}//GEN-LAST:event_txtNikActionPerformed
private void txtNikKeyReleased(java.awt.event.KeyEvent evt) {//GEN-FIRST:event_txtNikKeyReleased
txtNik.setText(txtNik.getText().toUpperCase());
}//GEN-LAST:event_txtNikKeyReleased
private void txtUsernameActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_txtUsernameActionPerformed
// TODO add your handling code here:
}//GEN-LAST:event_txtUsernameActionPerformed
private void btnTambahActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnTambahActionPerformed
if (!txtNik.getText().equals("") || !txtNama.getText().equals("")) {
M01pegaCtrl cb = new M01pegaCtrl();
cb.setNik(txtNik.getText());
cb.setNama(txtNama.getText());
cb.setUsername(txtUsername.getText());
cb.setPassword(txtPassword.getText());
cb.setGender(txtGender.getText());
cb.setStatus(txtStatus.getText());
cb.setTelepon(txtTelepon.getText());
cb.setAlamat(txtAlamat.getText());
cb.setAgama(txtAgama.getText());
String tanggal = txtTglLahir.getText();
DateFormat format = new SimpleDateFormat("dd-MM-yyyy");
Date date = null;
try {
date = format.parse(tanggal);
} catch (ParseException ex) {
Logger.getLogger(M01pegaView.class.getName()).log(Level.SEVERE, null, ex);
}
cb.setTgllahir(date);
cb.setEmail(txtEmail.getText());
String tanggal2 = txtTglMasuk.getText();
DateFormat format2 = new SimpleDateFormat("dd-MM-yyyy");
Date date2 = null;
try {
date2 = format.parse(tanggal2);
} catch (ParseException ex) {
Logger.getLogger(M01pegaView.class.getName()).log(Level.SEVERE, null, ex);
}
cb.setTglmasuk(date2);
cb.setKdjab(cmbKdJaba.getSelectedItem().toString());
cb.tambahPegawai();
btnClearActionPerformed(evt);
tblPegawai.setModel(cb.getDaftarPegawai());
} else {
JOptionPane.showInternalMessageDialog(this, "Kode Pegawai dan Nama Pegawai tidak boleh kosong", "Error", JOptionPane.INFORMATION_MESSAGE);
}
}//GEN-LAST:event_btnTambahActionPerformed
private void btnCariActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnCariActionPerformed
M01pegaCtrl cbm = new M01pegaCtrl();
cbm.setNik(txtNik.getText());
cbm.setNama(txtNama.getText());
cbm.setUsername(txtUsername.getText());
cbm.setGender(txtGender.getText());
cbm.setStatus(txtStatus.getText());
String tanggal = txtTglMasuk.getText();
DateFormat format = new SimpleDateFormat("dd-MM-yyyy");
Date date = null;
try {
date = format.parse(tanggal);
} catch (ParseException ex) {
Logger.getLogger(M01pegaView.class.getName()).log(Level.SEVERE, null, ex);
}
cbm.setTglmasuk((java.sql.Date) date);
cbm.setKdjab(cmbKdJaba.getSelectedItem().toString());
tblPegawai.setModel(cbm.getCariPegawai());
}//GEN-LAST:event_btnCariActionPerformed
private void btnEditActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnEditActionPerformed
M01pegaCtrl cb = new M01pegaCtrl();
cb.setNik(txtNik.getText());
cb.setNama(txtNama.getText());
cb.setUsername(txtUsername.getText());
cb.setPassword(txtPassword.getText());
cb.setGender(txtGender.getText());
cb.setStatus(txtStatus.getText());
cb.setTelepon(txtTelepon.getText());
cb.setAlamat(txtAlamat.getText());
cb.setAgama(txtAgama.getText());
String tanggal = txtTglLahir.getText();
DateFormat format = new SimpleDateFormat("dd-MM-yyyy");
Date date = null;
try {
date = format.parse(tanggal);
} catch (ParseException ex) {
Logger.getLogger(M01pegaView.class.getName()).log(Level.SEVERE, null, ex);
}
cb.setTgllahir(date);
cb.setEmail(txtEmail.getText());
String tanggal2 = txtTglMasuk.getText();
DateFormat format2 = new SimpleDateFormat("dd-MM-yyyy");
Date date2 = null;
try {
date2 = format.parse(tanggal2);
} catch (ParseException ex) {
Logger.getLogger(M01pegaView.class.getName()).log(Level.SEVERE, null, ex);
}
cb.setTglmasuk(date2);
cb.setKdjab(cmbKdJaba.getSelectedItem().toString());
cb.editPegawai();
setEditStatus(false);
btnClearActionPerformed(evt);
M01pegaCtrl cb2 = new M01pegaCtrl();
tblPegawai.setModel(cb2.getDaftarPegawai());
}//GEN-LAST:event_btnEditActionPerformed
private void btnClearActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnClearActionPerformed
clearText();
}//GEN-LAST:event_btnClearActionPerformed
private void btnPrintActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnPrintActionPerformed
M01pegaCtrl cb = new M01pegaCtrl();
cb.printPegawai();
}//GEN-LAST:event_btnPrintActionPerformed
private void btnHapusActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnHapusActionPerformed
M01pegaCtrl cb = new M01pegaCtrl();
cb.setNik(txtNik.getText());
cb.hapusPegawai();
setHapusStatus(false);
btnClearActionPerformed(evt);
M01pegaCtrl cb2 = new M01pegaCtrl();
tblPegawai.setModel(cb2.getDaftarPegawai());
}//GEN-LAST:event_btnHapusActionPerformed
private void btnBatalActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnBatalActionPerformed
setEditStatus(false);
btnClearActionPerformed(evt);
M01pegaCtrl cb = new M01pegaCtrl();
tblPegawai.setModel(cb.getDaftarPegawai());
}//GEN-LAST:event_btnBatalActionPerformed
private void setEditStatus(boolean status) {
if (status == false) {
btnCari.setEnabled(true);
btnTambah.setEnabled(true);
btnHapus.setEnabled(true);
btnEdit.setEnabled(false);
btnBatal.setEnabled(true);
txtNik.setEditable(false);
txtNama.setEditable(false);
txtUsername.setEditable(false);
txtPassword.setEditable(false);
txtGender.setEditable(false);
txtStatus.setEditable(false);
txtTelepon.setEditable(false);
txtAlamat.setEditable(false);
txtAgama.setEditable(false);
txtTglLahir.setEditable(false);
txtEmail.setEditable(false);
txtTglMasuk.setEditable(false);
cmbKdJaba.setEnabled(false);
} else {
btnCari.setEnabled(false);
btnTambah.setEnabled(false);
btnHapus.setEnabled(true);
btnEdit.setEnabled(true);
btnBatal.setEnabled(true);
txtNik.setEditable(false);
txtNama.setEditable(true);
txtUsername.setEditable(true);
txtPassword.setEditable(true);
txtGender.setEditable(true);
txtStatus.setEditable(true);
txtTelepon.setEditable(true);
txtAlamat.setEditable(true);
txtAgama.setEditable(true);
txtTglLahir.setEditable(true);
txtEmail.setEditable(true);
txtTglMasuk.setEditable(true);
cmbKdJaba.setEnabled(true);
}
}
private void clearText() {
txtNik.setText("");
txtNama.setText("");
txtUsername.setText("");
txtPassword.setText("");
txtGender.setText("");
txtStatus.setText("");
txtTelepon.setText("");
txtAlamat.setText("");
txtAgama.setText("");
txtTglLahir.setText(new SimpleDateFormat("dd-MM-yyyy").format(new Date()));
txtEmail.setText("");
txtTglMasuk.setText(new SimpleDateFormat("dd-MM-yyyy").format(new Date()));
}
private void setHapusStatus(boolean status) {
if (status == false) {
btnCari.setEnabled(true);
btnTambah.setEnabled(true);
btnClear.setEnabled(true);
btnEdit.setEnabled(false);
btnHapus.setEnabled(false);
btnBatal.setEnabled(false);
txtNik.setEditable(false);
txtNama.setEditable(true);
txtUsername.setEditable(true);
txtPassword.setEditable(true);
txtGender.setEditable(true);
txtStatus.setEditable(true);
txtTelepon.setEditable(true);
txtAlamat.setEditable(true);
txtAgama.setEditable(true);
txtTglLahir.setEditable(true);
txtEmail.setEditable(true);
txtTglMasuk.setEditable(true);
cmbKdJaba.setEnabled(true);
} else {
btnCari.setEnabled(false);
btnTambah.setEnabled(false);
btnClear.setEnabled(false);
btnEdit.setEnabled(true);
btnHapus.setEnabled(true);
btnBatal.setEnabled(true);
txtNik.setEditable(false);
txtNama.setEditable(false);
txtUsername.setEditable(false);
txtPassword.setEditable(false);
txtGender.setEditable(false);
txtStatus.setEditable(false);
txtTelepon.setEditable(false);
txtAlamat.setEditable(false);
txtAgama.setEditable(false);
txtTglLahir.setEditable(false);
txtEmail.setEditable(false);
txtTglMasuk.setEditable(false);
cmbKdJaba.setEnabled(false);
}
}
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JButton btnBatal;
private javax.swing.JButton btnCari;
private javax.swing.JButton btnClear;
private javax.swing.JButton btnEdit;
private javax.swing.JButton btnHapus;
private javax.swing.JButton btnPrint;
private javax.swing.JButton btnTambah;
private javax.swing.JComboBox cmbKdJaba;
private javax.swing.JLabel jLabel1;
private javax.swing.JLabel jLabel10;
private javax.swing.JLabel jLabel11;
private javax.swing.JLabel jLabel12;
private javax.swing.JLabel jLabel13;
private javax.swing.JLabel jLabel2;
private javax.swing.JLabel jLabel3;
private javax.swing.JLabel jLabel4;
private javax.swing.JLabel jLabel5;
private javax.swing.JLabel jLabel6;
private javax.swing.JLabel jLabel7;
private javax.swing.JLabel jLabel8;
private javax.swing.JLabel jLabel9;
private javax.swing.JPanel jPanel1;
private javax.swing.JPanel jPanel2;
private javax.swing.JPanel jPanel3;
private javax.swing.JScrollPane jScrollPane1;
private javax.swing.JTable tblPegawai;
private javax.swing.JTextField txtAgama;
private javax.swing.JTextField txtAlamat;
private javax.swing.JTextField txtEmail;
private javax.swing.JTextField txtGender;
private javax.swing.JTextField txtNama;
private javax.swing.JTextField txtNik;
private javax.swing.JTextField txtPassword;
private javax.swing.JTextField txtStatus;
private javax.swing.JTextField txtTelepon;
private javax.swing.JTextField txtTglLahir;
private javax.swing.JTextField txtTglMasuk;
private javax.swing.JTextField txtUsername;
// End of variables declaration//GEN-END:variables
}
| |
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.inspector.model;
import java.io.Serializable;
/**
* <p>
* Contains information about an Inspector finding. This data type is used as
* the response element in the <a>DescribeFindings</a> action.
* </p>
*/
public class Finding implements Serializable, Cloneable {
/**
* <p>
* The ARN specifying the finding.
* </p>
*/
private String arn;
/**
* <p>
* The schema version of this data type.
* </p>
*/
private Integer schemaVersion;
/**
* <p>
* The data element is set to "Inspector".
* </p>
*/
private String service;
private InspectorServiceAttributes serviceAttributes;
/**
* <p>
* The type of the host from which the finding is generated.
* </p>
*/
private String assetType;
/**
* <p>
* A collection of attributes of the host from which the finding is
* generated.
* </p>
*/
private AssetAttributes assetAttributes;
/**
* <p>
* The ID of the finding.
* </p>
*/
private String id;
/**
* <p>
* The name of the finding.
* </p>
*/
private String title;
/**
* <p>
* The description of the finding.
* </p>
*/
private String description;
/**
* <p>
* The recommendation for the finding.
* </p>
*/
private String recommendation;
/**
* <p>
* The finding severity. Values can be set to High, Medium, Low, and
* Informational.
* </p>
*/
private String severity;
/**
* <p>
* <p>
* The numeric value of the finding severity.
* </p>
* </p>
*/
private Double numericSeverity;
/**
* <p>
* This data element is currently not used.
* </p>
*/
private Integer confidence;
/**
* <p>
* This data element is currently not used.
* </p>
*/
private Boolean indicatorOfCompromise;
/**
* <p>
* The system-defined attributes for the finding.
* </p>
*/
private java.util.List<Attribute> attributes;
/**
* <p>
* The user-defined attributes that are assigned to the finding.
* </p>
*/
private java.util.List<Attribute> userAttributes;
/**
* <p>
* The time when the finding was generated.
* </p>
*/
private java.util.Date createdAt;
/**
* <p>
* The time when <a>AddAttributesToFindings</a> API is called.
* </p>
*/
private java.util.Date updatedAt;
/**
* <p>
* The ARN specifying the finding.
* </p>
*
* @param arn
* The ARN specifying the finding.
*/
public void setArn(String arn) {
this.arn = arn;
}
/**
* <p>
* The ARN specifying the finding.
* </p>
*
* @return The ARN specifying the finding.
*/
public String getArn() {
return this.arn;
}
/**
* <p>
* The ARN specifying the finding.
* </p>
*
* @param arn
* The ARN specifying the finding.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public Finding withArn(String arn) {
setArn(arn);
return this;
}
/**
* <p>
* The schema version of this data type.
* </p>
*
* @param schemaVersion
* The schema version of this data type.
*/
public void setSchemaVersion(Integer schemaVersion) {
this.schemaVersion = schemaVersion;
}
/**
* <p>
* The schema version of this data type.
* </p>
*
* @return The schema version of this data type.
*/
public Integer getSchemaVersion() {
return this.schemaVersion;
}
/**
* <p>
* The schema version of this data type.
* </p>
*
* @param schemaVersion
* The schema version of this data type.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public Finding withSchemaVersion(Integer schemaVersion) {
setSchemaVersion(schemaVersion);
return this;
}
/**
* <p>
* The data element is set to "Inspector".
* </p>
*
* @param service
* The data element is set to "Inspector".
*/
public void setService(String service) {
this.service = service;
}
/**
* <p>
* The data element is set to "Inspector".
* </p>
*
* @return The data element is set to "Inspector".
*/
public String getService() {
return this.service;
}
/**
* <p>
* The data element is set to "Inspector".
* </p>
*
* @param service
* The data element is set to "Inspector".
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public Finding withService(String service) {
setService(service);
return this;
}
/**
* @param serviceAttributes
*/
public void setServiceAttributes(
InspectorServiceAttributes serviceAttributes) {
this.serviceAttributes = serviceAttributes;
}
/**
* @return
*/
public InspectorServiceAttributes getServiceAttributes() {
return this.serviceAttributes;
}
/**
* @param serviceAttributes
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public Finding withServiceAttributes(
InspectorServiceAttributes serviceAttributes) {
setServiceAttributes(serviceAttributes);
return this;
}
/**
* <p>
* The type of the host from which the finding is generated.
* </p>
*
* @param assetType
* The type of the host from which the finding is generated.
* @see AssetType
*/
public void setAssetType(String assetType) {
this.assetType = assetType;
}
/**
* <p>
* The type of the host from which the finding is generated.
* </p>
*
* @return The type of the host from which the finding is generated.
* @see AssetType
*/
public String getAssetType() {
return this.assetType;
}
/**
* <p>
* The type of the host from which the finding is generated.
* </p>
*
* @param assetType
* The type of the host from which the finding is generated.
* @return Returns a reference to this object so that method calls can be
* chained together.
* @see AssetType
*/
public Finding withAssetType(String assetType) {
setAssetType(assetType);
return this;
}
/**
* <p>
* The type of the host from which the finding is generated.
* </p>
*
* @param assetType
* The type of the host from which the finding is generated.
* @see AssetType
*/
public void setAssetType(AssetType assetType) {
this.assetType = assetType.toString();
}
/**
* <p>
* The type of the host from which the finding is generated.
* </p>
*
* @param assetType
* The type of the host from which the finding is generated.
* @return Returns a reference to this object so that method calls can be
* chained together.
* @see AssetType
*/
public Finding withAssetType(AssetType assetType) {
setAssetType(assetType);
return this;
}
/**
* <p>
* A collection of attributes of the host from which the finding is
* generated.
* </p>
*
* @param assetAttributes
* A collection of attributes of the host from which the finding is
* generated.
*/
public void setAssetAttributes(AssetAttributes assetAttributes) {
this.assetAttributes = assetAttributes;
}
/**
* <p>
* A collection of attributes of the host from which the finding is
* generated.
* </p>
*
* @return A collection of attributes of the host from which the finding is
* generated.
*/
public AssetAttributes getAssetAttributes() {
return this.assetAttributes;
}
/**
* <p>
* A collection of attributes of the host from which the finding is
* generated.
* </p>
*
* @param assetAttributes
* A collection of attributes of the host from which the finding is
* generated.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public Finding withAssetAttributes(AssetAttributes assetAttributes) {
setAssetAttributes(assetAttributes);
return this;
}
/**
* <p>
* The ID of the finding.
* </p>
*
* @param id
* The ID of the finding.
*/
public void setId(String id) {
this.id = id;
}
/**
* <p>
* The ID of the finding.
* </p>
*
* @return The ID of the finding.
*/
public String getId() {
return this.id;
}
/**
* <p>
* The ID of the finding.
* </p>
*
* @param id
* The ID of the finding.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public Finding withId(String id) {
setId(id);
return this;
}
/**
* <p>
* The name of the finding.
* </p>
*
* @param title
* The name of the finding.
*/
public void setTitle(String title) {
this.title = title;
}
/**
* <p>
* The name of the finding.
* </p>
*
* @return The name of the finding.
*/
public String getTitle() {
return this.title;
}
/**
* <p>
* The name of the finding.
* </p>
*
* @param title
* The name of the finding.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public Finding withTitle(String title) {
setTitle(title);
return this;
}
/**
* <p>
* The description of the finding.
* </p>
*
* @param description
* The description of the finding.
*/
public void setDescription(String description) {
this.description = description;
}
/**
* <p>
* The description of the finding.
* </p>
*
* @return The description of the finding.
*/
public String getDescription() {
return this.description;
}
/**
* <p>
* The description of the finding.
* </p>
*
* @param description
* The description of the finding.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public Finding withDescription(String description) {
setDescription(description);
return this;
}
/**
* <p>
* The recommendation for the finding.
* </p>
*
* @param recommendation
* The recommendation for the finding.
*/
public void setRecommendation(String recommendation) {
this.recommendation = recommendation;
}
/**
* <p>
* The recommendation for the finding.
* </p>
*
* @return The recommendation for the finding.
*/
public String getRecommendation() {
return this.recommendation;
}
/**
* <p>
* The recommendation for the finding.
* </p>
*
* @param recommendation
* The recommendation for the finding.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public Finding withRecommendation(String recommendation) {
setRecommendation(recommendation);
return this;
}
/**
* <p>
* The finding severity. Values can be set to High, Medium, Low, and
* Informational.
* </p>
*
* @param severity
* The finding severity. Values can be set to High, Medium, Low, and
* Informational.
* @see Severity
*/
public void setSeverity(String severity) {
this.severity = severity;
}
/**
* <p>
* The finding severity. Values can be set to High, Medium, Low, and
* Informational.
* </p>
*
* @return The finding severity. Values can be set to High, Medium, Low, and
* Informational.
* @see Severity
*/
public String getSeverity() {
return this.severity;
}
/**
* <p>
* The finding severity. Values can be set to High, Medium, Low, and
* Informational.
* </p>
*
* @param severity
* The finding severity. Values can be set to High, Medium, Low, and
* Informational.
* @return Returns a reference to this object so that method calls can be
* chained together.
* @see Severity
*/
public Finding withSeverity(String severity) {
setSeverity(severity);
return this;
}
/**
* <p>
* The finding severity. Values can be set to High, Medium, Low, and
* Informational.
* </p>
*
* @param severity
* The finding severity. Values can be set to High, Medium, Low, and
* Informational.
* @see Severity
*/
public void setSeverity(Severity severity) {
this.severity = severity.toString();
}
/**
* <p>
* The finding severity. Values can be set to High, Medium, Low, and
* Informational.
* </p>
*
* @param severity
* The finding severity. Values can be set to High, Medium, Low, and
* Informational.
* @return Returns a reference to this object so that method calls can be
* chained together.
* @see Severity
*/
public Finding withSeverity(Severity severity) {
setSeverity(severity);
return this;
}
/**
* <p>
* <p>
* The numeric value of the finding severity.
* </p>
* </p>
*
* @param numericSeverity
* <p>
* The numeric value of the finding severity.
* </p>
*/
public void setNumericSeverity(Double numericSeverity) {
this.numericSeverity = numericSeverity;
}
/**
* <p>
* <p>
* The numeric value of the finding severity.
* </p>
* </p>
*
* @return <p>
* The numeric value of the finding severity.
* </p>
*/
public Double getNumericSeverity() {
return this.numericSeverity;
}
/**
* <p>
* <p>
* The numeric value of the finding severity.
* </p>
* </p>
*
* @param numericSeverity
* <p>
* The numeric value of the finding severity.
* </p>
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public Finding withNumericSeverity(Double numericSeverity) {
setNumericSeverity(numericSeverity);
return this;
}
/**
* <p>
* This data element is currently not used.
* </p>
*
* @param confidence
* This data element is currently not used.
*/
public void setConfidence(Integer confidence) {
this.confidence = confidence;
}
/**
* <p>
* This data element is currently not used.
* </p>
*
* @return This data element is currently not used.
*/
public Integer getConfidence() {
return this.confidence;
}
/**
* <p>
* This data element is currently not used.
* </p>
*
* @param confidence
* This data element is currently not used.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public Finding withConfidence(Integer confidence) {
setConfidence(confidence);
return this;
}
/**
* <p>
* This data element is currently not used.
* </p>
*
* @param indicatorOfCompromise
* This data element is currently not used.
*/
public void setIndicatorOfCompromise(Boolean indicatorOfCompromise) {
this.indicatorOfCompromise = indicatorOfCompromise;
}
/**
* <p>
* This data element is currently not used.
* </p>
*
* @return This data element is currently not used.
*/
public Boolean getIndicatorOfCompromise() {
return this.indicatorOfCompromise;
}
/**
* <p>
* This data element is currently not used.
* </p>
*
* @param indicatorOfCompromise
* This data element is currently not used.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public Finding withIndicatorOfCompromise(Boolean indicatorOfCompromise) {
setIndicatorOfCompromise(indicatorOfCompromise);
return this;
}
/**
* <p>
* This data element is currently not used.
* </p>
*
* @return This data element is currently not used.
*/
public Boolean isIndicatorOfCompromise() {
return this.indicatorOfCompromise;
}
/**
* <p>
* The system-defined attributes for the finding.
* </p>
*
* @return The system-defined attributes for the finding.
*/
public java.util.List<Attribute> getAttributes() {
return attributes;
}
/**
* <p>
* The system-defined attributes for the finding.
* </p>
*
* @param attributes
* The system-defined attributes for the finding.
*/
public void setAttributes(java.util.Collection<Attribute> attributes) {
if (attributes == null) {
this.attributes = null;
return;
}
this.attributes = new java.util.ArrayList<Attribute>(attributes);
}
/**
* <p>
* The system-defined attributes for the finding.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setAttributes(java.util.Collection)} or
* {@link #withAttributes(java.util.Collection)} if you want to override the
* existing values.
* </p>
*
* @param attributes
* The system-defined attributes for the finding.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public Finding withAttributes(Attribute... attributes) {
if (this.attributes == null) {
setAttributes(new java.util.ArrayList<Attribute>(attributes.length));
}
for (Attribute ele : attributes) {
this.attributes.add(ele);
}
return this;
}
/**
* <p>
* The system-defined attributes for the finding.
* </p>
*
* @param attributes
* The system-defined attributes for the finding.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public Finding withAttributes(java.util.Collection<Attribute> attributes) {
setAttributes(attributes);
return this;
}
/**
* <p>
* The user-defined attributes that are assigned to the finding.
* </p>
*
* @return The user-defined attributes that are assigned to the finding.
*/
public java.util.List<Attribute> getUserAttributes() {
return userAttributes;
}
/**
* <p>
* The user-defined attributes that are assigned to the finding.
* </p>
*
* @param userAttributes
* The user-defined attributes that are assigned to the finding.
*/
public void setUserAttributes(java.util.Collection<Attribute> userAttributes) {
if (userAttributes == null) {
this.userAttributes = null;
return;
}
this.userAttributes = new java.util.ArrayList<Attribute>(userAttributes);
}
/**
* <p>
* The user-defined attributes that are assigned to the finding.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setUserAttributes(java.util.Collection)} or
* {@link #withUserAttributes(java.util.Collection)} if you want to override
* the existing values.
* </p>
*
* @param userAttributes
* The user-defined attributes that are assigned to the finding.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public Finding withUserAttributes(Attribute... userAttributes) {
if (this.userAttributes == null) {
setUserAttributes(new java.util.ArrayList<Attribute>(
userAttributes.length));
}
for (Attribute ele : userAttributes) {
this.userAttributes.add(ele);
}
return this;
}
/**
* <p>
* The user-defined attributes that are assigned to the finding.
* </p>
*
* @param userAttributes
* The user-defined attributes that are assigned to the finding.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public Finding withUserAttributes(
java.util.Collection<Attribute> userAttributes) {
setUserAttributes(userAttributes);
return this;
}
/**
* <p>
* The time when the finding was generated.
* </p>
*
* @param createdAt
* The time when the finding was generated.
*/
public void setCreatedAt(java.util.Date createdAt) {
this.createdAt = createdAt;
}
/**
* <p>
* The time when the finding was generated.
* </p>
*
* @return The time when the finding was generated.
*/
public java.util.Date getCreatedAt() {
return this.createdAt;
}
/**
* <p>
* The time when the finding was generated.
* </p>
*
* @param createdAt
* The time when the finding was generated.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public Finding withCreatedAt(java.util.Date createdAt) {
setCreatedAt(createdAt);
return this;
}
/**
* <p>
* The time when <a>AddAttributesToFindings</a> API is called.
* </p>
*
* @param updatedAt
* The time when <a>AddAttributesToFindings</a> API is called.
*/
public void setUpdatedAt(java.util.Date updatedAt) {
this.updatedAt = updatedAt;
}
/**
* <p>
* The time when <a>AddAttributesToFindings</a> API is called.
* </p>
*
* @return The time when <a>AddAttributesToFindings</a> API is called.
*/
public java.util.Date getUpdatedAt() {
return this.updatedAt;
}
/**
* <p>
* The time when <a>AddAttributesToFindings</a> API is called.
* </p>
*
* @param updatedAt
* The time when <a>AddAttributesToFindings</a> API is called.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public Finding withUpdatedAt(java.util.Date updatedAt) {
setUpdatedAt(updatedAt);
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getArn() != null)
sb.append("Arn: " + getArn() + ",");
if (getSchemaVersion() != null)
sb.append("SchemaVersion: " + getSchemaVersion() + ",");
if (getService() != null)
sb.append("Service: " + getService() + ",");
if (getServiceAttributes() != null)
sb.append("ServiceAttributes: " + getServiceAttributes() + ",");
if (getAssetType() != null)
sb.append("AssetType: " + getAssetType() + ",");
if (getAssetAttributes() != null)
sb.append("AssetAttributes: " + getAssetAttributes() + ",");
if (getId() != null)
sb.append("Id: " + getId() + ",");
if (getTitle() != null)
sb.append("Title: " + getTitle() + ",");
if (getDescription() != null)
sb.append("Description: " + getDescription() + ",");
if (getRecommendation() != null)
sb.append("Recommendation: " + getRecommendation() + ",");
if (getSeverity() != null)
sb.append("Severity: " + getSeverity() + ",");
if (getNumericSeverity() != null)
sb.append("NumericSeverity: " + getNumericSeverity() + ",");
if (getConfidence() != null)
sb.append("Confidence: " + getConfidence() + ",");
if (getIndicatorOfCompromise() != null)
sb.append("IndicatorOfCompromise: " + getIndicatorOfCompromise()
+ ",");
if (getAttributes() != null)
sb.append("Attributes: " + getAttributes() + ",");
if (getUserAttributes() != null)
sb.append("UserAttributes: " + getUserAttributes() + ",");
if (getCreatedAt() != null)
sb.append("CreatedAt: " + getCreatedAt() + ",");
if (getUpdatedAt() != null)
sb.append("UpdatedAt: " + getUpdatedAt());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof Finding == false)
return false;
Finding other = (Finding) obj;
if (other.getArn() == null ^ this.getArn() == null)
return false;
if (other.getArn() != null
&& other.getArn().equals(this.getArn()) == false)
return false;
if (other.getSchemaVersion() == null ^ this.getSchemaVersion() == null)
return false;
if (other.getSchemaVersion() != null
&& other.getSchemaVersion().equals(this.getSchemaVersion()) == false)
return false;
if (other.getService() == null ^ this.getService() == null)
return false;
if (other.getService() != null
&& other.getService().equals(this.getService()) == false)
return false;
if (other.getServiceAttributes() == null
^ this.getServiceAttributes() == null)
return false;
if (other.getServiceAttributes() != null
&& other.getServiceAttributes().equals(
this.getServiceAttributes()) == false)
return false;
if (other.getAssetType() == null ^ this.getAssetType() == null)
return false;
if (other.getAssetType() != null
&& other.getAssetType().equals(this.getAssetType()) == false)
return false;
if (other.getAssetAttributes() == null
^ this.getAssetAttributes() == null)
return false;
if (other.getAssetAttributes() != null
&& other.getAssetAttributes().equals(this.getAssetAttributes()) == false)
return false;
if (other.getId() == null ^ this.getId() == null)
return false;
if (other.getId() != null
&& other.getId().equals(this.getId()) == false)
return false;
if (other.getTitle() == null ^ this.getTitle() == null)
return false;
if (other.getTitle() != null
&& other.getTitle().equals(this.getTitle()) == false)
return false;
if (other.getDescription() == null ^ this.getDescription() == null)
return false;
if (other.getDescription() != null
&& other.getDescription().equals(this.getDescription()) == false)
return false;
if (other.getRecommendation() == null
^ this.getRecommendation() == null)
return false;
if (other.getRecommendation() != null
&& other.getRecommendation().equals(this.getRecommendation()) == false)
return false;
if (other.getSeverity() == null ^ this.getSeverity() == null)
return false;
if (other.getSeverity() != null
&& other.getSeverity().equals(this.getSeverity()) == false)
return false;
if (other.getNumericSeverity() == null
^ this.getNumericSeverity() == null)
return false;
if (other.getNumericSeverity() != null
&& other.getNumericSeverity().equals(this.getNumericSeverity()) == false)
return false;
if (other.getConfidence() == null ^ this.getConfidence() == null)
return false;
if (other.getConfidence() != null
&& other.getConfidence().equals(this.getConfidence()) == false)
return false;
if (other.getIndicatorOfCompromise() == null
^ this.getIndicatorOfCompromise() == null)
return false;
if (other.getIndicatorOfCompromise() != null
&& other.getIndicatorOfCompromise().equals(
this.getIndicatorOfCompromise()) == false)
return false;
if (other.getAttributes() == null ^ this.getAttributes() == null)
return false;
if (other.getAttributes() != null
&& other.getAttributes().equals(this.getAttributes()) == false)
return false;
if (other.getUserAttributes() == null
^ this.getUserAttributes() == null)
return false;
if (other.getUserAttributes() != null
&& other.getUserAttributes().equals(this.getUserAttributes()) == false)
return false;
if (other.getCreatedAt() == null ^ this.getCreatedAt() == null)
return false;
if (other.getCreatedAt() != null
&& other.getCreatedAt().equals(this.getCreatedAt()) == false)
return false;
if (other.getUpdatedAt() == null ^ this.getUpdatedAt() == null)
return false;
if (other.getUpdatedAt() != null
&& other.getUpdatedAt().equals(this.getUpdatedAt()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode
+ ((getArn() == null) ? 0 : getArn().hashCode());
hashCode = prime
* hashCode
+ ((getSchemaVersion() == null) ? 0 : getSchemaVersion()
.hashCode());
hashCode = prime * hashCode
+ ((getService() == null) ? 0 : getService().hashCode());
hashCode = prime
* hashCode
+ ((getServiceAttributes() == null) ? 0
: getServiceAttributes().hashCode());
hashCode = prime * hashCode
+ ((getAssetType() == null) ? 0 : getAssetType().hashCode());
hashCode = prime
* hashCode
+ ((getAssetAttributes() == null) ? 0 : getAssetAttributes()
.hashCode());
hashCode = prime * hashCode
+ ((getId() == null) ? 0 : getId().hashCode());
hashCode = prime * hashCode
+ ((getTitle() == null) ? 0 : getTitle().hashCode());
hashCode = prime
* hashCode
+ ((getDescription() == null) ? 0 : getDescription().hashCode());
hashCode = prime
* hashCode
+ ((getRecommendation() == null) ? 0 : getRecommendation()
.hashCode());
hashCode = prime * hashCode
+ ((getSeverity() == null) ? 0 : getSeverity().hashCode());
hashCode = prime
* hashCode
+ ((getNumericSeverity() == null) ? 0 : getNumericSeverity()
.hashCode());
hashCode = prime * hashCode
+ ((getConfidence() == null) ? 0 : getConfidence().hashCode());
hashCode = prime
* hashCode
+ ((getIndicatorOfCompromise() == null) ? 0
: getIndicatorOfCompromise().hashCode());
hashCode = prime * hashCode
+ ((getAttributes() == null) ? 0 : getAttributes().hashCode());
hashCode = prime
* hashCode
+ ((getUserAttributes() == null) ? 0 : getUserAttributes()
.hashCode());
hashCode = prime * hashCode
+ ((getCreatedAt() == null) ? 0 : getCreatedAt().hashCode());
hashCode = prime * hashCode
+ ((getUpdatedAt() == null) ? 0 : getUpdatedAt().hashCode());
return hashCode;
}
@Override
public Finding clone() {
try {
return (Finding) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException(
"Got a CloneNotSupportedException from Object.clone() "
+ "even though we're Cloneable!", e);
}
}
}
| |
// Licensed to the Software Freedom Conservancy (SFC) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The SFC licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.openqa.selenium.support.ui;
import org.openqa.selenium.Alert;
import org.openqa.selenium.By;
import org.openqa.selenium.NoAlertPresentException;
import org.openqa.selenium.NoSuchElementException;
import org.openqa.selenium.NoSuchFrameException;
import org.openqa.selenium.StaleElementReferenceException;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.WebDriverException;
import org.openqa.selenium.WebElement;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Canned {@link ExpectedCondition}s which are generally useful within webdriver
* tests.
*/
public class ExpectedConditions {
private final static Logger log = Logger.getLogger(ExpectedConditions.class.getName());
private ExpectedConditions() {
// Utility class
}
/**
* An expectation for checking the title of a page.
*
* @param title the expected title, which must be an exact match
* @return true when the title matches, false otherwise
*/
public static ExpectedCondition<Boolean> titleIs(final String title) {
return new ExpectedCondition<Boolean>() {
private String currentTitle = "";
@Override
public Boolean apply(WebDriver driver) {
currentTitle = driver.getTitle();
return title.equals(currentTitle);
}
@Override
public String toString() {
return String.format("title to be \"%s\". Current title: \"%s\"", title, currentTitle);
}
};
}
/**
* An expectation for checking that the title contains a case-sensitive
* substring
*
* @param title the fragment of title expected
* @return true when the title matches, false otherwise
*/
public static ExpectedCondition<Boolean> titleContains(final String title) {
return new ExpectedCondition<Boolean>() {
private String currentTitle = "";
@Override
public Boolean apply(WebDriver driver) {
currentTitle = driver.getTitle();
return currentTitle != null && currentTitle.contains(title);
}
@Override
public String toString() {
return String.format("title to contain \"%s\". Current title: \"%s\"", title, currentTitle);
}
};
}
/**
* An expectation for checking that an element is present on the DOM of a
* page. This does not necessarily mean that the element is visible.
*
* @param locator used to find the element
* @return the WebElement once it is located
*/
public static ExpectedCondition<WebElement> presenceOfElementLocated(
final By locator) {
return new ExpectedCondition<WebElement>() {
@Override
public WebElement apply(WebDriver driver) {
return findElement(locator, driver);
}
@Override
public String toString() {
return "presence of element located by: " + locator;
}
};
}
/**
* An expectation for checking that an element is present on the DOM of a page
* and visible. Visibility means that the element is not only displayed but
* also has a height and width that is greater than 0.
*
* @param locator used to find the element
* @return the WebElement once it is located and visible
*/
public static ExpectedCondition<WebElement> visibilityOfElementLocated(
final By locator) {
return new ExpectedCondition<WebElement>() {
@Override
public WebElement apply(WebDriver driver) {
try {
return elementIfVisible(findElement(locator, driver));
} catch (StaleElementReferenceException e) {
return null;
}
}
@Override
public String toString() {
return "visibility of element located by " + locator;
}
};
}
/**
* An expectation for checking that all elements present on the web page that
* match the locator are visible. Visibility means that the elements are not
* only displayed but also have a height and width that is greater than 0.
*
* @param locator used to find the element
* @return the list of WebElements once they are located
*/
public static ExpectedCondition<List<WebElement>> visibilityOfAllElementsLocatedBy(
final By locator) {
return new ExpectedCondition<List<WebElement>>() {
@Override
public List<WebElement> apply(WebDriver driver) {
List<WebElement> elements = findElements(locator, driver);
for(WebElement element : elements){
if(!element.isDisplayed()){
return null;
}
}
return elements.size() > 0 ? elements : null;
}
@Override
public String toString() {
return "visibility of all elements located by " + locator;
}
};
}
/**
* An expectation for checking that all elements present on the web page that
* match the locator are visible. Visibility means that the elements are not
* only displayed but also have a height and width that is greater than 0.
*
* @param elements list of WebElements
* @return the list of WebElements once they are located
*/
public static ExpectedCondition<List<WebElement>> visibilityOfAllElements(
final List<WebElement> elements) {
return new ExpectedCondition<List<WebElement>>() {
@Override
public List<WebElement> apply(WebDriver driver) {
for(WebElement element : elements){
if(!element.isDisplayed()){
return null;
}
}
return elements.size() > 0 ? elements : null;
}
@Override
public String toString() {
return "visibility of all " + elements;
}
};
}
/**
* An expectation for checking that an element, known to be present on the DOM
* of a page, is visible. Visibility means that the element is not only
* displayed but also has a height and width that is greater than 0.
*
* @param element the WebElement
* @return the (same) WebElement once it is visible
*/
public static ExpectedCondition<WebElement> visibilityOf(
final WebElement element) {
return new ExpectedCondition<WebElement>() {
@Override
public WebElement apply(WebDriver driver) {
return elementIfVisible(element);
}
@Override
public String toString() {
return "visibility of " + element;
}
};
}
/**
* @return the given element if it is visible and has non-zero size, otherwise null.
*/
private static WebElement elementIfVisible(WebElement element) {
return element.isDisplayed() ? element : null;
}
/**
* An expectation for checking that there is at least one element present on a
* web page.
*
* @param locator used to find the element
* @return the list of WebElements once they are located
*/
public static ExpectedCondition<List<WebElement>> presenceOfAllElementsLocatedBy(
final By locator) {
return new ExpectedCondition<List<WebElement>>() {
@Override
public List<WebElement> apply(WebDriver driver) {
List<WebElement> elements = findElements(locator, driver);
return elements.size() > 0 ? elements : null;
}
@Override
public String toString() {
return "presence of any elements located by " + locator;
}
};
}
/**
* An expectation for checking if the given text is present in the specified element.
*
* @param element the WebElement
* @param text to be present in the element
* @return true once the element contains the given text
*/
public static ExpectedCondition<Boolean> textToBePresentInElement(
final WebElement element, final String text) {
return new ExpectedCondition<Boolean>() {
@Override
public Boolean apply(WebDriver driver) {
try {
String elementText = element.getText();
return elementText.contains(text);
} catch (StaleElementReferenceException e) {
return null;
}
}
@Override
public String toString() {
return String.format("text ('%s') to be present in element %s", text, element);
}
};
}
/**
* An expectation for checking if the given text is present in the element that matches
* the given locator.
*
* @param locator used to find the element
* @param text to be present in the element found by the locator
* @return the WebElement once it is located and visible
*
* @deprecated Use {@link #textToBePresentInElementLocated(By, String)} instead
*/
@Deprecated
public static ExpectedCondition<Boolean> textToBePresentInElement(
final By locator, final String text) {
return textToBePresentInElementLocated(locator, text);
}
/**
* An expectation for checking if the given text is present in the element that matches
* the given locator.
*
* @param locator used to find the element
* @param text to be present in the element found by the locator
* @return true once the first element located by locator contains the given text
*/
public static ExpectedCondition<Boolean> textToBePresentInElementLocated(
final By locator, final String text) {
return new ExpectedCondition<Boolean>() {
@Override
public Boolean apply(WebDriver driver) {
try {
String elementText = findElement(locator, driver).getText();
return elementText.contains(text);
} catch (StaleElementReferenceException e) {
return null;
}
}
@Override
public String toString() {
return String.format("text ('%s') to be present in element found by %s",
text, locator);
}
};
}
/**
* An expectation for checking if the given text is present in the specified
* elements value attribute.
*
* @param element the WebElement
* @param text to be present in the element's value attribute
* @return true once the element's value attribute contains the given text
*/
public static ExpectedCondition<Boolean> textToBePresentInElementValue(
final WebElement element, final String text) {
return new ExpectedCondition<Boolean>() {
@Override
public Boolean apply(WebDriver driver) {
try {
String elementText = element.getAttribute("value");
if (elementText != null) {
return elementText.contains(text);
} else {
return false;
}
} catch (StaleElementReferenceException e) {
return null;
}
}
@Override
public String toString() {
return String.format("text ('%s') to be the value of element %s", text, element);
}
};
}
/**
* An expectation for checking if the given text is present in the specified
* elements value attribute.
*
* @param locator used to find the element
* @param text to be present in the value attribute of the element found by the locator
* @return true once the value attribute of the first element located by locator contains
* the given text
*/
public static ExpectedCondition<Boolean> textToBePresentInElementValue(
final By locator, final String text) {
return new ExpectedCondition<Boolean>() {
@Override
public Boolean apply(WebDriver driver) {
try {
String elementText = findElement(locator, driver).getAttribute("value");
if (elementText != null) {
return elementText.contains(text);
} else {
return false;
}
} catch (StaleElementReferenceException e) {
return null;
}
}
@Override
public String toString() {
return String.format("text ('%s') to be the value of element located by %s",
text, locator);
}
};
}
/**
* An expectation for checking whether the given frame is available to switch
* to. <p> If the frame is available it switches the given driver to the
* specified frame.
*
* @param frameLocator used to find the frame (id or name)
*/
public static ExpectedCondition<WebDriver> frameToBeAvailableAndSwitchToIt(
final String frameLocator) {
return new ExpectedCondition<WebDriver>() {
@Override
public WebDriver apply(WebDriver driver) {
try {
return driver.switchTo().frame(frameLocator);
} catch (NoSuchFrameException e) {
return null;
}
}
@Override
public String toString() {
return "frame to be available: " + frameLocator;
}
};
}
/**
* An expectation for checking whether the given frame is available to switch
* to. <p> If the frame is available it switches the given driver to the
* specified frame.
*
* @param locator used to find the frame
*/
public static ExpectedCondition<WebDriver> frameToBeAvailableAndSwitchToIt(
final By locator) {
return new ExpectedCondition<WebDriver>() {
@Override
public WebDriver apply(WebDriver driver) {
try {
return driver.switchTo().frame(findElement(locator, driver));
} catch (NoSuchFrameException e) {
return null;
}
}
@Override
public String toString() {
return "frame to be available: " + locator;
}
};
}
/**
* An expectation for checking that an element is either invisible or not
* present on the DOM.
*
* @param locator used to find the element
*/
public static ExpectedCondition<Boolean> invisibilityOfElementLocated(
final By locator) {
return new ExpectedCondition<Boolean>() {
@Override
public Boolean apply(WebDriver driver) {
try {
return !(findElement(locator, driver).isDisplayed());
} catch (NoSuchElementException e) {
// Returns true because the element is not present in DOM. The
// try block checks if the element is present but is invisible.
return true;
} catch (StaleElementReferenceException e) {
// Returns true because stale element reference implies that element
// is no longer visible.
return true;
}
}
@Override
public String toString() {
return "element to no longer be visible: " + locator;
}
};
}
/**
* An expectation for checking that an element with text is either invisible
* or not present on the DOM.
*
* @param locator used to find the element
* @param text of the element
*/
public static ExpectedCondition<Boolean> invisibilityOfElementWithText(
final By locator, final String text) {
return new ExpectedCondition<Boolean>() {
@Override
public Boolean apply(WebDriver driver) {
try {
return !findElement(locator, driver).getText().equals(text);
} catch (NoSuchElementException e) {
// Returns true because the element with text is not present in DOM. The
// try block checks if the element is present but is invisible.
return true;
} catch (StaleElementReferenceException e) {
// Returns true because stale element reference implies that element
// is no longer visible.
return true;
}
}
@Override
public String toString() {
return String.format("element containing '%s' to no longer be visible: %s",
text, locator);
}
};
}
/**
* An expectation for checking an element is visible and enabled such that you
* can click it.
*
* @param locator used to find the element
* @return the WebElement once it is located and clickable (visible and enabled)
*/
public static ExpectedCondition<WebElement> elementToBeClickable(
final By locator) {
return new ExpectedCondition<WebElement>() {
public ExpectedCondition<WebElement> visibilityOfElementLocated =
ExpectedConditions.visibilityOfElementLocated(locator);
@Override
public WebElement apply(WebDriver driver) {
WebElement element = visibilityOfElementLocated.apply(driver);
try {
if (element != null && element.isEnabled()) {
return element;
} else {
return null;
}
} catch (StaleElementReferenceException e) {
return null;
}
}
@Override
public String toString() {
return "element to be clickable: " + locator;
}
};
}
/**
* An expectation for checking an element is visible and enabled such that you
* can click it.
*
* @param element the WebElement
* @return the (same) WebElement once it is clickable (visible and enabled)
*/
public static ExpectedCondition<WebElement> elementToBeClickable(
final WebElement element) {
return new ExpectedCondition<WebElement>() {
public ExpectedCondition<WebElement> visibilityOfElement =
ExpectedConditions.visibilityOf(element);
@Override
public WebElement apply(WebDriver driver) {
WebElement element = visibilityOfElement.apply(driver);
try {
if (element != null && element.isEnabled()) {
return element;
} else {
return null;
}
} catch (StaleElementReferenceException e) {
return null;
}
}
@Override
public String toString() {
return "element to be clickable: " + element;
}
};
}
/**
* Wait until an element is no longer attached to the DOM.
*
* @param element The element to wait for.
* @return false is the element is still attached to the DOM, true
* otherwise.
*/
public static ExpectedCondition<Boolean> stalenessOf(
final WebElement element) {
return new ExpectedCondition<Boolean>() {
@Override
public Boolean apply(WebDriver ignored) {
try {
// Calling any method forces a staleness check
element.isEnabled();
return false;
} catch (StaleElementReferenceException expected) {
return true;
}
}
@Override
public String toString() {
return String.format("element (%s) to become stale", element);
}
};
}
/**
* Wrapper for a condition, which allows for elements to update by redrawing.
*
* This works around the problem of conditions which have two parts: find an
* element and then check for some condition on it. For these conditions it is
* possible that an element is located and then subsequently it is redrawn on
* the client. When this happens a {@link StaleElementReferenceException} is
* thrown when the second part of the condition is checked.
*/
public static <T> ExpectedCondition<T> refreshed(
final ExpectedCondition<T> condition) {
return new ExpectedCondition<T>() {
@Override
public T apply(WebDriver driver) {
try {
return condition.apply(driver);
} catch (StaleElementReferenceException e) {
return null;
}
}
@Override
public String toString() {
return String.format("condition (%s) to be refreshed", condition);
}
};
}
/**
* An expectation for checking if the given element is selected.
*/
public static ExpectedCondition<Boolean> elementToBeSelected(final WebElement element) {
return elementSelectionStateToBe(element, true);
}
/**
* An expectation for checking if the given element is selected.
*/
public static ExpectedCondition<Boolean> elementSelectionStateToBe(final WebElement element,
final boolean selected) {
return new ExpectedCondition<Boolean>() {
@Override
public Boolean apply(WebDriver driver) {
return element.isSelected() == selected;
}
@Override
public String toString() {
return String.format("element (%s) to %sbe selected", element, (selected ? "" : "not "));
}
};
}
public static ExpectedCondition<Boolean> elementToBeSelected(final By locator) {
return elementSelectionStateToBe(locator, true);
}
public static ExpectedCondition<Boolean> elementSelectionStateToBe(final By locator,
final boolean selected) {
return new ExpectedCondition<Boolean>() {
@Override
public Boolean apply(WebDriver driver) {
try {
WebElement element = driver.findElement(locator);
return element.isSelected() == selected;
} catch (StaleElementReferenceException e) {
return null;
}
}
@Override
public String toString() {
return String.format("element found by %s to %sbe selected",
locator, (selected ? "" : "not "));
}
};
}
public static ExpectedCondition<Alert> alertIsPresent() {
return new ExpectedCondition<Alert>() {
@Override
public Alert apply(WebDriver driver) {
try {
return driver.switchTo().alert();
} catch (NoAlertPresentException e) {
return null;
}
}
@Override
public String toString() {
return "alert to be present";
}
};
}
/**
* An expectation with the logical opposite condition of the given condition.
*
* Note that if the Condition your are inverting throws an exception that is
* caught by the Ignored Exceptions, the inversion will not take place and lead
* to confusing results.
*/
public static ExpectedCondition<Boolean> not(final ExpectedCondition<?> condition) {
return new ExpectedCondition<Boolean>() {
@Override
public Boolean apply(WebDriver driver) {
Object result = condition.apply(driver);
return result == null || result == Boolean.FALSE;
}
@Override
public String toString() {
return "condition to not be valid: " + condition;
}
};
}
/**
* Looks up an element. Logs and re-throws WebDriverException if thrown. <p/>
* Method exists to gather data for http://code.google.com/p/selenium/issues/detail?id=1800
*/
private static WebElement findElement(By by, WebDriver driver) {
try {
return driver.findElement(by);
} catch (NoSuchElementException e) {
throw e;
} catch (WebDriverException e) {
log.log(Level.WARNING,
String.format("WebDriverException thrown by findElement(%s)", by), e);
throw e;
}
}
/**
* @see #findElement(By, WebDriver)
*/
private static List<WebElement> findElements(By by, WebDriver driver) {
try {
return driver.findElements(by);
} catch (WebDriverException e) {
log.log(Level.WARNING,
String.format("WebDriverException thrown by findElement(%s)", by), e);
throw e;
}
}
}
| |
/**
* This class is generated by jOOQ
*/
package org.openforis.calc.persistence.jooq.tables;
import java.util.Arrays;
import java.util.List;
import javax.annotation.Generated;
import org.jooq.Field;
import org.jooq.ForeignKey;
import org.jooq.Identity;
import org.jooq.Table;
import org.jooq.TableField;
import org.jooq.UniqueKey;
import org.jooq.impl.TableImpl;
import org.openforis.calc.persistence.jooq.CalcSchema;
import org.openforis.calc.persistence.jooq.Keys;
import org.openforis.calc.persistence.jooq.tables.records.EntityRecord;
/**
* This class is generated by jOOQ.
*/
@Generated(
value = {
"http://www.jooq.org",
"jOOQ version:3.6.2"
},
comments = "This class is generated by jOOQ"
)
@SuppressWarnings({ "all", "unchecked", "rawtypes" })
public class EntityTable extends TableImpl<EntityRecord> {
private static final long serialVersionUID = 1742217451;
/**
* The reference instance of <code>calc.entity</code>
*/
public static final EntityTable ENTITY = new EntityTable();
/**
* The class holding records for this type
*/
@Override
public Class<EntityRecord> getRecordType() {
return EntityRecord.class;
}
/**
* The column <code>calc.entity.id</code>.
*/
public final TableField<EntityRecord, Integer> ID = createField("id", org.jooq.impl.SQLDataType.INTEGER.nullable(false).defaulted(true), this, "");
/**
* The column <code>calc.entity.workspace_id</code>.
*/
public final TableField<EntityRecord, Integer> WORKSPACE_ID = createField("workspace_id", org.jooq.impl.SQLDataType.INTEGER.nullable(false), this, "");
/**
* The column <code>calc.entity.parent_entity_id</code>.
*/
public final TableField<EntityRecord, Integer> PARENT_ENTITY_ID = createField("parent_entity_id", org.jooq.impl.SQLDataType.INTEGER, this, "");
/**
* The column <code>calc.entity.name</code>.
*/
public final TableField<EntityRecord, String> NAME = createField("name", org.jooq.impl.SQLDataType.VARCHAR.length(255).nullable(false), this, "");
/**
* The column <code>calc.entity.data_table</code>.
*/
public final TableField<EntityRecord, String> DATA_TABLE = createField("data_table", org.jooq.impl.SQLDataType.VARCHAR.length(255).nullable(false), this, "");
/**
* The column <code>calc.entity.x_column</code>.
*/
public final TableField<EntityRecord, String> X_COLUMN = createField("x_column", org.jooq.impl.SQLDataType.VARCHAR.length(255), this, "");
/**
* The column <code>calc.entity.y_column</code>.
*/
public final TableField<EntityRecord, String> Y_COLUMN = createField("y_column", org.jooq.impl.SQLDataType.VARCHAR.length(255), this, "");
/**
* The column <code>calc.entity.srs_column</code>.
*/
public final TableField<EntityRecord, String> SRS_COLUMN = createField("srs_column", org.jooq.impl.SQLDataType.VARCHAR.length(255), this, "");
/**
* The column <code>calc.entity.location_column</code>.
*/
public final TableField<EntityRecord, String> LOCATION_COLUMN = createField("location_column", org.jooq.impl.SQLDataType.VARCHAR.length(255), this, "");
/**
* The column <code>calc.entity.input</code>.
*/
public final TableField<EntityRecord, Boolean> INPUT = createField("input", org.jooq.impl.SQLDataType.BOOLEAN, this, "");
/**
* The column <code>calc.entity.override</code>.
*/
public final TableField<EntityRecord, Boolean> OVERRIDE = createField("override", org.jooq.impl.SQLDataType.BOOLEAN, this, "");
/**
* The column <code>calc.entity.id_column</code>.
*/
public final TableField<EntityRecord, String> ID_COLUMN = createField("id_column", org.jooq.impl.SQLDataType.VARCHAR.length(255), this, "");
/**
* The column <code>calc.entity.parent_id_column</code>.
*/
public final TableField<EntityRecord, String> PARENT_ID_COLUMN = createField("parent_id_column", org.jooq.impl.SQLDataType.VARCHAR.length(255), this, "");
/**
* The column <code>calc.entity.sampling_unit</code>.
*/
public final TableField<EntityRecord, Boolean> SAMPLING_UNIT = createField("sampling_unit", org.jooq.impl.SQLDataType.BOOLEAN.defaulted(true), this, "");
/**
* The column <code>calc.entity.unit_of_analysis</code>.
*/
public final TableField<EntityRecord, Boolean> UNIT_OF_ANALYSIS = createField("unit_of_analysis", org.jooq.impl.SQLDataType.BOOLEAN.defaulted(true), this, "");
/**
* The column <code>calc.entity.cluster_variable_id</code>.
*/
public final TableField<EntityRecord, Integer> CLUSTER_VARIABLE_ID = createField("cluster_variable_id", org.jooq.impl.SQLDataType.INTEGER, this, "");
/**
* The column <code>calc.entity.unit_no_variable_id</code>.
*/
public final TableField<EntityRecord, Integer> UNIT_NO_VARIABLE_ID = createField("unit_no_variable_id", org.jooq.impl.SQLDataType.INTEGER, this, "");
/**
* The column <code>calc.entity.original_id</code>.
*/
public final TableField<EntityRecord, Integer> ORIGINAL_ID = createField("original_id", org.jooq.impl.SQLDataType.INTEGER, this, "");
/**
* The column <code>calc.entity.caption</code>.
*/
public final TableField<EntityRecord, String> CAPTION = createField("caption", org.jooq.impl.SQLDataType.VARCHAR.length(255), this, "");
/**
* The column <code>calc.entity.description</code>.
*/
public final TableField<EntityRecord, String> DESCRIPTION = createField("description", org.jooq.impl.SQLDataType.VARCHAR.length(1024), this, "");
/**
* The column <code>calc.entity.sort_order</code>.
*/
public final TableField<EntityRecord, Integer> SORT_ORDER = createField("sort_order", org.jooq.impl.SQLDataType.INTEGER.nullable(false), this, "");
/**
* The column <code>calc.entity.plot_area_script</code>.
*/
public final TableField<EntityRecord, String> PLOT_AREA_SCRIPT = createField("plot_area_script", org.jooq.impl.SQLDataType.CLOB, this, "");
/**
* Create a <code>calc.entity</code> table reference
*/
public EntityTable() {
this("entity", null);
}
/**
* Create an aliased <code>calc.entity</code> table reference
*/
public EntityTable(String alias) {
this(alias, ENTITY);
}
private EntityTable(String alias, Table<EntityRecord> aliased) {
this(alias, aliased, null);
}
private EntityTable(String alias, Table<EntityRecord> aliased, Field<?>[] parameters) {
super(alias, CalcSchema.CALC, aliased, parameters, "");
}
/**
* {@inheritDoc}
*/
@Override
public Identity<EntityRecord, Integer> getIdentity() {
return Keys.IDENTITY_ENTITY;
}
/**
* {@inheritDoc}
*/
@Override
public UniqueKey<EntityRecord> getPrimaryKey() {
return Keys.ENTITY_PKEY;
}
/**
* {@inheritDoc}
*/
@Override
public List<UniqueKey<EntityRecord>> getKeys() {
return Arrays.<UniqueKey<EntityRecord>>asList(Keys.ENTITY_PKEY, Keys.ENTITY_WORKSPACE_ID_NAME_KEY);
}
/**
* {@inheritDoc}
*/
@Override
public List<ForeignKey<EntityRecord, ?>> getReferences() {
return Arrays.<ForeignKey<EntityRecord, ?>>asList(Keys.ENTITY__ENTITY_WORKSPACE_FKEY, Keys.ENTITY__ENTITY_PARENT_FKEY);
}
/**
* {@inheritDoc}
*/
@Override
public EntityTable as(String alias) {
return new EntityTable(alias, this);
}
/**
* Rename this table
*/
public EntityTable rename(String name) {
return new EntityTable(name, null);
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.server;
import com.facebook.presto.AbstractTestSampledQueries;
import com.facebook.presto.client.ClientSession;
import com.facebook.presto.client.Column;
import com.facebook.presto.client.QueryError;
import com.facebook.presto.client.QueryResults;
import com.facebook.presto.client.StatementClient;
import com.facebook.presto.metadata.AllNodes;
import com.facebook.presto.metadata.QualifiedTableName;
import com.facebook.presto.metadata.QualifiedTablePrefix;
import com.facebook.presto.server.testing.TestingPrestoServer;
import com.facebook.presto.spi.TableHandle;
import com.facebook.presto.sql.analyzer.Session;
import com.facebook.presto.tpch.SampledTpchPlugin;
import com.facebook.presto.tpch.TpchMetadata;
import com.facebook.presto.tpch.TpchPlugin;
import com.facebook.presto.tuple.TupleInfo;
import com.facebook.presto.tuple.TupleInfo.Type;
import com.facebook.presto.util.MaterializedResult;
import com.facebook.presto.util.MaterializedTuple;
import com.google.common.base.Function;
import com.google.common.base.Joiner;
import com.google.common.base.Optional;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.inject.Module;
import io.airlift.http.client.AsyncHttpClient;
import io.airlift.http.client.HttpClientConfig;
import io.airlift.http.client.jetty.JettyHttpClient;
import io.airlift.json.JsonCodec;
import io.airlift.log.Logger;
import io.airlift.testing.Closeables;
import io.airlift.units.Duration;
import org.intellij.lang.annotations.Language;
import org.testng.annotations.Test;
import java.net.URI;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import static com.facebook.presto.sql.analyzer.Session.DEFAULT_CATALOG;
import static com.facebook.presto.sql.analyzer.Session.DEFAULT_SCHEMA;
import static com.facebook.presto.util.MaterializedResult.DEFAULT_PRECISION;
import static com.facebook.presto.util.Types.checkType;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.collect.Iterables.transform;
import static io.airlift.json.JsonCodec.jsonCodec;
import static io.airlift.testing.Assertions.assertLessThan;
import static io.airlift.units.Duration.nanosSince;
import static java.lang.String.format;
import static java.util.Collections.nCopies;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertTrue;
public class TestDistributedQueries
extends AbstractTestSampledQueries
{
private static final Session SESSION = new Session("user", "test", DEFAULT_CATALOG, "test", null, null);
private static final String ENVIRONMENT = "testing";
private static final Logger log = Logger.get(TestDistributedQueries.class.getSimpleName());
private final JsonCodec<QueryResults> queryResultsCodec = jsonCodec(QueryResults.class);
private TestingPrestoServer coordinator;
private List<TestingPrestoServer> servers;
private AsyncHttpClient httpClient;
private TestingDiscoveryServer discoveryServer;
@Test(expectedExceptions = RuntimeException.class, expectedExceptionsMessageRegExp = "statement is too large \\(stack overflow during analysis\\)")
public void testLargeQueryFailure()
throws Exception
{
assertQuery("SELECT " + Joiner.on(" AND ").join(nCopies(1000, "1 = 1")), "SELECT true");
}
@Test
public void testLargeQuerySuccess()
throws Exception
{
assertQuery("SELECT " + Joiner.on(" AND ").join(nCopies(500, "1 = 1")), "SELECT true");
}
@Test
public void testTableSampleSystem()
throws Exception
{
int total = computeActual("SELECT orderkey FROM orders").getMaterializedTuples().size();
boolean sampleSizeFound = false;
for (int i = 0; i < 100; i++) {
int sampleSize = computeActual("SELECT orderkey FROM ORDERS TABLESAMPLE SYSTEM (50)").getMaterializedTuples().size();
if (sampleSize > 0 && sampleSize < total) {
sampleSizeFound = true;
break;
}
}
assertTrue(sampleSizeFound, "Table sample returned unexpected number of rows");
}
@Test
public void testTableSampleSystemBoundaryValues()
throws Exception
{
MaterializedResult fullSample = computeActual("SELECT orderkey FROM orders TABLESAMPLE SYSTEM (100)");
MaterializedResult emptySample = computeActual("SELECT orderkey FROM orders TABLESAMPLE SYSTEM (0)");
MaterializedResult all = computeActual("SELECT orderkey FROM orders");
assertTrue(all.getMaterializedTuples().containsAll(fullSample.getMaterializedTuples()));
assertEquals(emptySample.getMaterializedTuples().size(), 0);
}
@Test
public void testCreateTableAsSelect()
throws Exception
{
assertCreateTable(
"test_simple",
"SELECT orderkey, totalprice, orderdate FROM orders",
"SELECT count(*) FROM orders");
}
@Test
public void testCreateTableAsSelectGroupBy()
throws Exception
{
assertCreateTable(
"test_group",
"SELECT orderstatus, sum(totalprice) x FROM orders GROUP BY orderstatus",
"SELECT count(DISTINCT orderstatus) FROM orders");
}
@Test
public void testCreateTableAsSelectLimit()
throws Exception
{
assertCreateTable(
"test_limit",
"SELECT orderkey FROM orders ORDER BY orderkey LIMIT 10",
"SELECT 10");
}
@Test
public void testCreateTableAsSelectJoin()
throws Exception
{
assertCreateTable(
"test_join",
"SELECT count(*) x FROM lineitem JOIN orders ON lineitem.orderkey = orders.orderkey",
"SELECT 1");
}
@Test
public void testCreateSampledTableAsSelectLimit()
throws Exception
{
assertCreateTable(
"test_limit_sampled",
"SELECT orderkey FROM tpch_sampled.tiny.orders ORDER BY orderkey LIMIT 10",
"SELECT orderkey FROM (SELECT orderkey FROM orders) UNION ALL (SELECT orderkey FROM orders) ORDER BY orderkey LIMIT 10",
"SELECT 10");
}
private void assertCreateTable(String table, @Language("SQL") String query, @Language("SQL") String rowCountQuery)
throws Exception
{
assertCreateTable(table, query, query, rowCountQuery);
}
private void assertCreateTable(String table, @Language("SQL") String query, @Language("SQL") String expectedQuery, @Language("SQL") String rowCountQuery)
throws Exception
{
try {
assertQuery("CREATE TABLE " + table + " AS " + query, rowCountQuery);
assertQuery("SELECT * FROM " + table, expectedQuery);
}
finally {
QualifiedTableName name = new QualifiedTableName(DEFAULT_CATALOG, DEFAULT_SCHEMA, table);
Optional<TableHandle> handle = coordinator.getMetadata().getTableHandle(name);
if (handle.isPresent()) {
coordinator.getMetadata().dropTable(handle.get());
}
}
}
@Override
protected int getNodeCount()
{
return 3;
}
@Override
protected Session setUpQueryFramework()
throws Exception
{
try {
discoveryServer = new TestingDiscoveryServer(ENVIRONMENT);
coordinator = createTestingPrestoServer(discoveryServer.getBaseUrl(), true);
servers = ImmutableList.<TestingPrestoServer>builder()
.add(coordinator)
.add(createTestingPrestoServer(discoveryServer.getBaseUrl(), false))
.add(createTestingPrestoServer(discoveryServer.getBaseUrl(), false))
.build();
}
catch (Exception e) {
tearDownQueryFramework();
throw e;
}
this.httpClient = new JettyHttpClient(
new HttpClientConfig()
.setConnectTimeout(new Duration(1, TimeUnit.DAYS))
.setReadTimeout(new Duration(10, TimeUnit.DAYS)));
long start = System.nanoTime();
while (!allNodesGloballyVisible()) {
assertLessThan(nanosSince(start), new Duration(10, SECONDS));
MILLISECONDS.sleep(10);
}
for (TestingPrestoServer server : servers) {
server.getMetadata().addFunctions(CUSTOM_FUNCTIONS);
}
log.info("Loading data...");
long startTime = System.nanoTime();
distributeData("tpch", TpchMetadata.TINY_SCHEMA_NAME, getClientSession());
distributeData("tpch_sampled", TpchMetadata.TINY_SCHEMA_NAME, getSampledClientSession());
log.info("Loading complete in %s", nanosSince(startTime).toString(SECONDS));
return SESSION;
}
private boolean allNodesGloballyVisible()
{
for (TestingPrestoServer server : servers) {
AllNodes allNodes = server.refreshNodes();
if (!allNodes.getInactiveNodes().isEmpty() ||
(allNodes.getActiveNodes().size() != servers.size())) {
return false;
}
}
return true;
}
@SuppressWarnings("deprecation")
@Override
protected void tearDownQueryFramework()
throws Exception
{
if (servers != null) {
for (TestingPrestoServer server : servers) {
Closeables.closeQuietly(server);
}
}
Closeables.closeQuietly(discoveryServer);
}
private void distributeData(String catalog, String schema, ClientSession session)
throws Exception
{
for (QualifiedTableName table : coordinator.getMetadata().listTables(new QualifiedTablePrefix(catalog, schema))) {
if (table.getTableName().equalsIgnoreCase("dual")) {
continue;
}
log.info("Running import for %s", table.getTableName());
@Language("SQL") String sql = format("CREATE TABLE %s AS SELECT * FROM %s", table.getTableName(), table);
long rows = checkType(compute(sql, session).getMaterializedTuples().get(0).getField(0), Long.class, "rows");
log.info("Imported %s rows for %s", rows, table.getTableName());
}
}
protected ClientSession getClientSession()
{
return new ClientSession(coordinator.getBaseUrl(), SESSION.getUser(), SESSION.getSource(), SESSION.getCatalog(), SESSION.getSchema(), true);
}
protected ClientSession getSampledClientSession()
{
return new ClientSession(coordinator.getBaseUrl(), SESSION.getUser(), SESSION.getSource(), SESSION.getCatalog(), "sampled", true);
}
@Override
protected MaterializedResult computeActualSampled(@Language("SQL") String sql)
{
return compute(sql, getSampledClientSession());
}
@Override
protected MaterializedResult computeActual(@Language("SQL") String sql)
{
return compute(sql, getClientSession());
}
private MaterializedResult compute(@Language("SQL") String sql, ClientSession session)
{
try (StatementClient client = new StatementClient(httpClient, queryResultsCodec, session, sql)) {
AtomicBoolean loggedUri = new AtomicBoolean(false);
ImmutableList.Builder<MaterializedTuple> rows = ImmutableList.builder();
List<TupleInfo> types = null;
while (client.isValid()) {
QueryResults results = client.current();
if (!loggedUri.getAndSet(true)) {
log.info("Query %s: %s?pretty", results.getId(), results.getInfoUri());
}
if ((types == null) && (results.getColumns() != null)) {
types = getTupleInfos(results.getColumns());
}
if (results.getData() != null) {
rows.addAll(transform(results.getData(), dataToTuple(types)));
}
client.advance();
}
if (!client.isFailed()) {
return new MaterializedResult(rows.build(), types);
}
QueryError error = client.finalResults().getError();
assert error != null;
if (error.getFailureInfo() != null) {
throw error.getFailureInfo().toException();
}
throw new RuntimeException("Query failed: " + error.getMessage());
// dump query info to console for debugging (NOTE: not pretty printed)
// JsonCodec<QueryInfo> queryInfoJsonCodec = createCodecFactory().prettyPrint().jsonCodec(QueryInfo.class);
// log.info("\n" + queryInfoJsonCodec.toJson(queryInfo));
}
}
private static List<TupleInfo> getTupleInfos(List<Column> columns)
{
return ImmutableList.copyOf(transform(columns, columnTupleInfoGetter()));
}
private static Function<Column, TupleInfo> columnTupleInfoGetter()
{
return new Function<Column, TupleInfo>()
{
@Override
public TupleInfo apply(Column column)
{
String type = column.getType();
switch (type) {
case "boolean":
return TupleInfo.SINGLE_BOOLEAN;
case "bigint":
return TupleInfo.SINGLE_LONG;
case "double":
return TupleInfo.SINGLE_DOUBLE;
case "varchar":
return TupleInfo.SINGLE_VARBINARY;
}
throw new AssertionError("Unhandled type: " + type);
}
};
}
private static Function<List<Object>, MaterializedTuple> dataToTuple(final List<TupleInfo> tupleInfos)
{
return new Function<List<Object>, MaterializedTuple>()
{
@Override
public MaterializedTuple apply(List<Object> data)
{
checkArgument(data.size() == tupleInfos.size(), "columns size does not match tuple infos");
List<Object> row = new ArrayList<>();
for (int i = 0; i < data.size(); i++) {
Object value = data.get(i);
if (value == null) {
row.add(null);
continue;
}
Type type = tupleInfos.get(i).getType();
switch (type) {
case BOOLEAN:
row.add(value);
break;
case FIXED_INT_64:
row.add(((Number) value).longValue());
break;
case DOUBLE:
row.add(((Number) value).doubleValue());
break;
case VARIABLE_BINARY:
row.add(value);
break;
default:
throw new AssertionError("unhandled type: " + type);
}
}
return new MaterializedTuple(DEFAULT_PRECISION, row);
}
};
}
private static TestingPrestoServer createTestingPrestoServer(URI discoveryUri, boolean coordinator)
throws Exception
{
Map<String, String> properties = ImmutableMap.<String, String>builder()
.put("query.client.timeout", "10m")
.put("exchange.http-client.read-timeout", "1h")
.put("datasources", "native,tpch,tpch_sampled")
.build();
TestingPrestoServer server = new TestingPrestoServer(coordinator, properties, ENVIRONMENT, discoveryUri, ImmutableList.<Module>of());
server.installPlugin(new TpchPlugin(), "tpch", "tpch");
server.installPlugin(new SampledTpchPlugin(), "tpch_sampled", "tpch_sampled");
return server;
}
}
| |
/*
* Copyright 2014 JBoss by Red Hat.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jbpm.kie.services.test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.kie.scanner.MavenRepository.getMavenRepository;
import java.io.File;
import java.io.FileOutputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.concurrent.TimeUnit;
import org.drools.compiler.kie.builder.impl.InternalKieModule;
import org.jbpm.kie.services.impl.KModuleDeploymentUnit;
import org.jbpm.kie.services.impl.store.DeploymentStore;
import org.jbpm.kie.services.impl.store.DeploymentSyncInvoker;
import org.jbpm.kie.services.impl.store.DeploymentSynchronizer;
import org.jbpm.kie.test.util.AbstractKieServicesBaseTest;
import org.jbpm.services.api.model.DeployedUnit;
import org.jbpm.services.api.model.DeploymentUnit;
import org.jbpm.shared.services.impl.TransactionalCommandService;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.kie.api.KieServices;
import org.kie.api.builder.ReleaseId;
import org.kie.scanner.MavenRepository;
public class DeploymentServiceWithSyncTest extends AbstractKieServicesBaseTest {
protected List<DeploymentUnit> units = new ArrayList<DeploymentUnit>();
protected DeploymentStore store;
protected DeploymentSyncInvoker invoker;
protected TransactionalCommandService commandService;
public void setCommandService(TransactionalCommandService commandService) {
this.commandService = commandService;
}
@Before
public void prepare() {
configureServices();
KieServices ks = KieServices.Factory.get();
ReleaseId releaseId = ks.newReleaseId(GROUP_ID, ARTIFACT_ID, VERSION);
List<String> processes = new ArrayList<String>();
processes.add("repo/processes/general/customtask.bpmn");
processes.add("repo/processes/general/humanTask.bpmn");
processes.add("repo/processes/general/signal.bpmn");
processes.add("repo/processes/general/import.bpmn");
processes.add("repo/processes/general/callactivity.bpmn");
InternalKieModule kJar1 = createKieJar(ks, releaseId, processes);
File pom = new File("target/kmodule", "pom.xml");
pom.getParentFile().mkdir();
try {
FileOutputStream fs = new FileOutputStream(pom);
fs.write(getPom(releaseId).getBytes());
fs.close();
} catch (Exception e) {
}
MavenRepository repository = getMavenRepository();
repository.deployArtifact(releaseId, kJar1, pom);
ReleaseId releaseIdSupport = ks.newReleaseId(GROUP_ID, "support", VERSION);
List<String> processesSupport = new ArrayList<String>();
processesSupport.add("repo/processes/support/support.bpmn");
InternalKieModule kJar2 = createKieJar(ks, releaseIdSupport, processesSupport);
File pom2 = new File("target/kmodule2", "pom.xml");
pom2.getParentFile().mkdir();
try {
FileOutputStream fs = new FileOutputStream(pom2);
fs.write(getPom(releaseIdSupport).getBytes());
fs.close();
} catch (Exception e) {
}
repository.deployArtifact(releaseIdSupport, kJar2, pom2);
configureDeploymentSync();
}
protected void configureDeploymentSync() {
assertNotNull(deploymentService);
store = new DeploymentStore();
if (commandService == null) {
commandService = new TransactionalCommandService(emf);
}
store.setCommandService(commandService);
DeploymentSynchronizer sync = new DeploymentSynchronizer();
sync.setDeploymentService(deploymentService);
sync.setDeploymentStore(store);
invoker = new DeploymentSyncInvoker(sync, 2L, 3L, TimeUnit.SECONDS);
invoker.start();
}
@After
public void cleanup() {
if (invoker != null) {
invoker.stop();
}
cleanupSingletonSessionId();
if (units != null && !units.isEmpty()) {
for (DeploymentUnit unit : units) {
deploymentService.undeploy(unit);
}
units.clear();
}
close();
}
@Test
public void testDeploymentOfProcessesBySync() throws Exception {
Collection<DeployedUnit> deployed = deploymentService.getDeployedUnits();
assertNotNull(deployed);
assertEquals(0, deployed.size());
KModuleDeploymentUnit unit = new KModuleDeploymentUnit(GROUP_ID, ARTIFACT_ID, VERSION);
Thread.sleep(3000);
store.enableDeploymentUnit(unit);
units.add(unit);
Thread.sleep(3000);
deployed = deploymentService.getDeployedUnits();
assertNotNull(deployed);
assertEquals(1, deployed.size());
}
@Test
public void testUndeploymentOfProcessesBySync() throws Exception {
Collection<DeployedUnit> deployed = deploymentService.getDeployedUnits();
assertNotNull(deployed);
assertEquals(0, deployed.size());
KModuleDeploymentUnit unit = new KModuleDeploymentUnit(GROUP_ID, ARTIFACT_ID, VERSION);
deploymentService.deploy(unit);
units.add(unit);
deployed = deploymentService.getDeployedUnits();
assertNotNull(deployed);
assertEquals(1, deployed.size());
Thread.sleep(3000);
store.disableDeploymentUnit(unit);
Thread.sleep(3000);
deployed = deploymentService.getDeployedUnits();
assertNotNull(deployed);
assertEquals(0, deployed.size());
}
@Test
public void testDeactivateAndActivateOfProcessesBySync() throws Exception {
Collection<DeployedUnit> deployed = deploymentService.getDeployedUnits();
assertNotNull(deployed);
assertEquals(0, deployed.size());
KModuleDeploymentUnit unit = new KModuleDeploymentUnit(GROUP_ID, ARTIFACT_ID, VERSION);
deploymentService.deploy(unit);
units.add(unit);
deployed = deploymentService.getDeployedUnits();
assertNotNull(deployed);
assertEquals(1, deployed.size());
assertTrue(deployed.iterator().next().isActive());
Thread.sleep(3000);
store.deactivateDeploymentUnit(unit);
Thread.sleep(3000);
deployed = deploymentService.getDeployedUnits();
assertNotNull(deployed);
assertEquals(1, deployed.size());
assertFalse(deployed.iterator().next().isActive());
store.activateDeploymentUnit(unit);
Thread.sleep(3000);
deployed = deploymentService.getDeployedUnits();
assertNotNull(deployed);
assertEquals(1, deployed.size());
assertTrue(deployed.iterator().next().isActive());
}
@Test
public void testDeploymentOfProcessesBySyncWithDisabledAttribute() throws Exception {
Collection<DeployedUnit> deployed = deploymentService.getDeployedUnits();
assertNotNull(deployed);
assertEquals(0, deployed.size());
KModuleDeploymentUnit unit = new KModuleDeploymentUnit(GROUP_ID, ARTIFACT_ID, VERSION);
unit.addAttribute("sync", "false");
Thread.sleep(3000);
store.enableDeploymentUnit(unit);
units.add(unit);
Thread.sleep(3000);
deployed = deploymentService.getDeployedUnits();
assertNotNull(deployed);
assertEquals(0, deployed.size());
}
}
| |
/******************************************************************************
* Confidential Proprietary *
* (c) Copyright Haifeng Li 2011, All Rights Reserved *
******************************************************************************/
package smile.math;
import java.util.Arrays;
/**
* Histogram utilities. A histogram is a graphical display of tabulated
* frequencies, shown as bars. It shows what proportion of cases fall into
* each of several categories: it is a form of data binning. The categories
* are usually specified as non-overlapping intervals of some variable.
* <p>
* There is no "best" number of bins, and different bin sizes can reveal
* different features of the data. Depending on the actual data distribution
* and the goals of the analysis, different bin widths may be appropriate,
* so experimentation is usually needed to determine an appropriate width.
* <p>
* Note that this class provides only tools to choose the bin width or the
* number of bins and frequency counting. It does NOT providing plotting
* services.
*
* @author Haifeng Li
*/
public class Histogram {
/**
* Generate the histogram of given data. The number of bins k is decided by
* square-root choice.
* @param data the data points.
* @return a 3-by-k bins array of which first row is the lower bound of bins,
* second row is the upper bound of bins, and the third row is the frequence
* count.
*/
public static double[][] histogram(int[] data) {
return histogram(data, bins(data.length));
}
/**
* Generate the histogram of given data. The number of bins k is decided by
* square-root choice.
* @param data the data points.
* @return a 3-by-k bins array of which first row is the lower bound of bins,
* second row is the upper bound of bins, and the third row is the frequence
* count.
*/
public static double[][] histogram(float[] data) {
return histogram(data, bins(data.length));
}
/**
* Generate the histogram of given data. The number of bins k is decided by
* square-root choice.
* @param data the data points.
* @return a 3-by-k bins array of which first row is the lower bound of bins,
* second row is the upper bound of bins, and the third row is the frequence
* count.
*/
public static double[][] histogram(double[] data) {
return histogram(data, bins(data.length));
}
/**
* Generate the histogram of k bins.
* @param data the data points.
* @param k the number of bins.
* @return a 3-by-k bins array of which first row is the lower bound of bins,
* second row is the upper bound of bins, and the third row is the frequence
* count.
*/
public static double[][] histogram(int[] data, int k) {
if (k <= 1) {
throw new IllegalArgumentException("Invlaid number of bins: " + k);
}
int min = Math.min(data);
int max = Math.max(data);
int span = max - min + 1;
int width = 1;
int residual = 1;
while (residual > 0) {
width = span / k;
if (width == 0) {
width = 1;
}
residual = span - k * width;
if (residual > 0) {
k += 1;
}
}
double center = width / 2.0;
double[] breaks = new double[k + 1];
breaks[0] = min - center;
for (int i = 1; i <= k; i++) {
breaks[i] = breaks[i - 1] + width;
}
return histogram(data, breaks);
}
/**
* Generate the histogram of n bins.
* @param data the data points.
* @param breaks an array of size k+1 giving the breakpoints between
* histogram cells. Must be in ascending order.
* @return a 3-by-n bins array of which first row is the lower bound of bins,
* second row is the upper bound of bins, and the third row is the frequence
* count.
*/
public static double[][] histogram(int[] data, double[] breaks) {
int k = breaks.length - 1;
if (k <= 1) {
throw new IllegalArgumentException("Invlaid number of bins: " + k);
}
double[][] freq = new double[3][k];
for (int i = 0; i < k; i++) {
freq[0][i] = breaks[i];
freq[1][i] = breaks[i + 1];
freq[2][i] = 0;
}
for (int d : data) {
int j = Arrays.binarySearch(breaks, d);
if (j >= k) {
j = k - 1;
}
if (j < -1 && j >= -breaks.length) {
j = -j - 2;
}
if (j >= 0) {
freq[2][j]++;
}
}
return freq;
}
/**
* Generate the histogram of n bins.
* @param data the data points.
* @param k the number of bins.
* @return a 3-by-k bins array of which first row is the lower bound of bins,
* second row is the upper bound of bins, and the third row is the frequence
* count.
*/
public static double[][] histogram(float[] data, int k) {
if (k <= 1) {
throw new IllegalArgumentException("Invlaid number of bins: " + k);
}
float min = Math.min(data);
float max = Math.max(data);
float span = max - min;
if (span == 0) {
span = k;
}
float width = span / k;
float[] breaks = new float[k + 1];
breaks[0] = min;
for (int i = 1; i < k; i++) {
breaks[i] = breaks[i - 1] + width;
}
breaks[k] = max;
return histogram(data, breaks);
}
/**
* Generate the histogram of n bins.
* @param data the data points.
* @param breaks an array of size k+1 giving the breakpoints between
* histogram cells. Must be in ascending order.
* @return a 3-by-k bins array of which first row is the lower bound of bins,
* second row is the upper bound of bins, and the third row is the frequence
* count.
*/
public static double[][] histogram(float[] data, float[] breaks) {
int k = breaks.length - 1;
if (k <= 1) {
throw new IllegalArgumentException("Invlaid number of bins: " + k);
}
double[][] freq = new double[3][k];
for (int i = 0; i < k; i++) {
freq[0][i] = breaks[i];
freq[1][i] = breaks[i + 1];
freq[2][i] = 0.0f;
}
for (float d : data) {
int j = Arrays.binarySearch(breaks, d);
if (j >= k) {
j = k - 1;
}
if (j < -1 && j >= -breaks.length) {
j = -j - 2;
}
if (j >= 0) {
freq[2][j]++;
}
}
return freq;
}
/**
* Generate the histogram of n bins.
* @param data the data points.
* @param k the number of bins.
* @return a 3-by-k array of which first row is the lower bound of bins,
* second row is the upper bound of bins, and the third row is the frequence
* count.
*/
public static double[][] histogram(double[] data, int k) {
double min = Math.min(data);
double max = Math.max(data);
double span = max - min;
if (span == 0) {
span = k;
}
double width = span / k;
double[] breaks = new double[k + 1];
breaks[0] = min;
for (int i = 1; i < k; i++) {
breaks[i] = breaks[i - 1] + width;
}
breaks[k] = max;
return histogram(data, breaks);
}
/**
* Generate the histogram of n bins.
* @param data the data points.
* @param breaks an array of size k+1 giving the breakpoints between
* histogram cells. Must be in ascending order.
* @return a 3-by-k bins array of which first row is the lower bound of bins,
* second row is the upper bound of bins, and the third row is the frequence
* count.
*/
public static double[][] histogram(double[] data, double[] breaks) {
int k = breaks.length - 1;
if (k <= 1) {
throw new IllegalArgumentException("Invlaid number of bins: " + k);
}
double[][] freq = new double[3][k];
for (int i = 0; i < k; i++) {
freq[0][i] = breaks[i];
freq[1][i] = breaks[i + 1];
freq[2][i] = 0.0f;
}
for (double d : data) {
int j = Arrays.binarySearch(breaks, d);
if (j >= k) {
j = k - 1;
}
if (j < -1 && j >= -breaks.length) {
j = -j - 2;
}
if (j >= 0) {
freq[2][j]++;
}
}
return freq;
}
/**
* Returns the breakpoints between histogram cells for a dataset based on a
* suggested bin width h.
* @param x the data set.
* @param h the bin width.
* @return the breakpoints between histogram cells
*/
public static double[] breaks(double[] x, double h) {
return breaks(Math.min(x), Math.max(x), h);
}
/**
* Returns the breakpoints between histogram cells for a given range based
* on a suggested bin width h.
* @param min the lower bound of bins.
* @param max the upper bound of bins.
* @param h the bin width.
* @return the breakpoints between histogram cells
*/
public static double[] breaks(double min, double max, double h) {
if (h <= 0.0) {
throw new IllegalArgumentException("Invalid bin width: " + h);
}
if (min > max) {
throw new IllegalArgumentException("Invalid lower and upper bounds: " + min + " > " + max);
}
int k = (int) Math.ceil((max-min) / h);
double[] breaks = new double[k + 1];
breaks[0] = min - (h * k - (max - min)) / 2;
breaks[k] = max + (h * k - (max - min)) / 2;
for (int i = 1; i < k; i++) {
breaks[i] = breaks[i - 1] + h;
}
return breaks;
}
/**
* Returns the breakpoints between histogram cells for a dataset.
* @param x the data set.
* @param k the number of bins.
* @return the breakpoints between histogram cells
*/
public static double[] breaks(double[] x, int k) {
return breaks(Math.min(x), Math.max(x), k);
}
/**
* Returns the breakpoints between histogram cells for a given range.
* @param min the lower bound of bins.
* @param max the upper bound of bins.
* @param k the number of bins.
* @return the breakpoints between histogram cells
*/
public static double[] breaks(double min, double max, int k) {
if (k <= 1) {
throw new IllegalArgumentException("Invalid number of bins: " + k);
}
if (min > max) {
throw new IllegalArgumentException("Invalid lower and upper bounds: " + min + " > " + max);
}
double h = (max - min) / k;
return breaks(min, max, h);
}
/**
* Returns the number of bins for a data based on a suggested bin width h.
* @param x the data set.
* @param h the bin width.
* @return the number of bins k = ceil((max - min) / h)
*/
public static int bins(double[] x, double h) {
if (h <= 0.0) {
throw new IllegalArgumentException("Invalid bin width: " + h);
}
double max = Math.max(x);
double min = Math.min(x);
return (int) Math.ceil((max-min) / h);
}
/**
* Returns the number of bins by square-root rule, which takes the square
* root of the number of data points in the sample (used by Excel histograms
* and many others).
* @param n the number of data points.
* @return the number of bins
*/
public static int bins(int n) {
int k = (int) Math.sqrt(n);
if (k < 5) k = 5;
return k;
}
/**
* Returns the number of bins by Sturges' rule k = ceil(log2(n) + 1).
* @param n the number of data points.
* @return the number of bins
*/
public static int sturges(int n) {
int k = (int) Math.ceil(Math.log2(n) + 1);
if (k < 5) k = 5;
return k;
}
/**
* Returns the number of bins by Scott's rule h = 3.5 * σ / (n<sup>1/3</sup>).
* @param x the data set.
* @return the number of bins
*/
public static int scott(double[] x) {
double h = Math.ceil(3.5 * Math.sd(x) / Math.pow(x.length, 1.0/3));
return bins(x, h);
}
}
| |
/* -*- Mode: java; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
*
* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is Rhino code, released
* May 6, 1999.
*
* The Initial Developer of the Original Code is
* Netscape Communications Corporation.
* Portions created by the Initial Developer are Copyright (C) 1997-1999
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
* Roger Lawrence
* Mike McCabe
* Igor Bukanov
* Ethan Hugg
* Bob Jervis
* Terry Lucas
* Milen Nankov
*
* Alternatively, the contents of this file may be used under the terms of
* the GNU General Public License Version 2 or later (the "GPL"), in which
* case the provisions of the GPL are applicable instead of those above. If
* you wish to allow use of your version of this file only under the terms of
* the GPL and not to allow others to use your version of this file under the
* MPL, indicate your decision by deleting the provisions above and replacing
* them with the notice and other provisions required by the GPL. If you do
* not delete the provisions above, a recipient may use your version of this
* file under either the MPL or the GPL.
*
* ***** END LICENSE BLOCK ***** */
package ed.ext.org.mozilla.javascript;
import java.io.*;
/**
* This class implements the JavaScript scanner.
*
* It is based on the C source files jsscan.c and jsscan.h
* in the jsref package.
*
* @see ed.ext.org.mozilla.javascript.Parser
*
* @author Mike McCabe
* @author Brendan Eich
*/
class TokenStream
{
/*
* For chars - because we need something out-of-range
* to check. (And checking EOF by exception is annoying.)
* Note distinction from EOF token type!
*/
private final static int
EOF_CHAR = -1;
TokenStream(Parser parser, Reader sourceReader, String sourceString,
int lineno)
{
this.parser = parser;
this.lineno = lineno;
if (sourceReader != null) {
if (sourceString != null) Kit.codeBug();
this.sourceReader = sourceReader;
this.sourceBuffer = new char[512];
this.sourceEnd = 0;
} else {
if (sourceString == null) Kit.codeBug();
this.sourceString = sourceString;
this.sourceEnd = sourceString.length();
}
this.sourceCursor = 0;
}
/* This function uses the cached op, string and number fields in
* TokenStream; if getToken has been called since the passed token
* was scanned, the op or string printed may be incorrect.
*/
String tokenToString(int token)
{
if (Token.printTrees) {
String name = Token.name(token);
switch (token) {
case Token.STRING:
case Token.REGEXP:
case Token.NAME:
return name + " `" + this.string + "'";
case Token.NUMBER:
return "NUMBER " + this.number;
}
return name;
}
return "";
}
static boolean isKeyword(String s)
{
return Token.EOF != stringToKeyword(s);
}
private static int stringToKeyword(String name)
{
// #string_id_map#
// The following assumes that Token.EOF == 0
final int
Id_break = Token.BREAK,
Id_case = Token.CASE,
Id_continue = Token.CONTINUE,
Id_default = Token.DEFAULT,
Id_delete = Token.DELPROP,
Id_do = Token.DO,
Id_else = Token.ELSE,
Id_export = Token.EXPORT,
Id_false = Token.FALSE,
Id_for = Token.FOR,
Id_function = Token.FUNCTION,
Id_if = Token.IF,
Id_in = Token.IN,
Id_let = Token.LET,
Id_new = Token.NEW,
Id_null = Token.NULL,
Id_return = Token.RETURN,
Id_switch = Token.SWITCH,
Id_this = Token.THIS,
Id_true = Token.TRUE,
Id_typeof = Token.TYPEOF,
Id_var = Token.VAR,
Id_void = Token.VOID,
Id_while = Token.WHILE,
Id_with = Token.WITH,
Id_yield = Token.YIELD,
// the following are #ifdef RESERVE_JAVA_KEYWORDS in jsscan.c
Id_abstract = Token.RESERVED,
Id_boolean = Token.RESERVED,
Id_byte = Token.RESERVED,
Id_catch = Token.CATCH,
Id_char = Token.RESERVED,
Id_class = Token.RESERVED,
Id_const = Token.CONST,
Id_debugger = Token.DEBUGGER,
Id_double = Token.RESERVED,
Id_enum = Token.RESERVED,
Id_extends = Token.RESERVED,
Id_final = Token.RESERVED,
Id_finally = Token.FINALLY,
Id_float = Token.RESERVED,
Id_goto = Token.RESERVED,
Id_implements = Token.RESERVED,
Id_import = Token.IMPORT,
Id_instanceof = Token.INSTANCEOF,
Id_int = Token.RESERVED,
Id_interface = Token.RESERVED,
Id_long = Token.RESERVED,
Id_native = Token.RESERVED,
Id_package = Token.RESERVED,
Id_private = Token.RESERVED,
Id_protected = Token.RESERVED,
Id_public = Token.RESERVED,
Id_short = Token.RESERVED,
Id_static = Token.RESERVED,
Id_super = Token.RESERVED,
Id_synchronized = Token.RESERVED,
Id_throw = Token.THROW,
Id_throws = Token.RESERVED,
Id_transient = Token.RESERVED,
Id_try = Token.TRY,
Id_volatile = Token.RESERVED;
int id;
String s = name;
// #generated# Last update: 2007-04-18 13:53:30 PDT
L0: { id = 0; String X = null; int c;
L: switch (s.length()) {
case 2: c=s.charAt(1);
if (c=='f') { if (s.charAt(0)=='i') {id=Id_if; break L0;} }
else if (c=='n') { if (s.charAt(0)=='i') {id=Id_in; break L0;} }
else if (c=='o') { if (s.charAt(0)=='d') {id=Id_do; break L0;} }
break L;
case 3: switch (s.charAt(0)) {
case 'f': if (s.charAt(2)=='r' && s.charAt(1)=='o') {id=Id_for; break L0;} break L;
case 'i': if (s.charAt(2)=='t' && s.charAt(1)=='n') {id=Id_int; break L0;} break L;
case 'l': if (s.charAt(2)=='t' && s.charAt(1)=='e') {id=Id_let; break L0;} break L;
case 'n': if (s.charAt(2)=='w' && s.charAt(1)=='e') {id=Id_new; break L0;} break L;
case 't': if (s.charAt(2)=='y' && s.charAt(1)=='r') {id=Id_try; break L0;} break L;
case 'v': if (s.charAt(2)=='r' && s.charAt(1)=='a') {id=Id_var; break L0;} break L;
} break L;
case 4: switch (s.charAt(0)) {
case 'b': X="byte";id=Id_byte; break L;
case 'c': c=s.charAt(3);
if (c=='e') { if (s.charAt(2)=='s' && s.charAt(1)=='a') {id=Id_case; break L0;} }
else if (c=='r') { if (s.charAt(2)=='a' && s.charAt(1)=='h') {id=Id_char; break L0;} }
break L;
case 'e': c=s.charAt(3);
if (c=='e') { if (s.charAt(2)=='s' && s.charAt(1)=='l') {id=Id_else; break L0;} }
else if (c=='m') { if (s.charAt(2)=='u' && s.charAt(1)=='n') {id=Id_enum; break L0;} }
break L;
case 'g': X="goto";id=Id_goto; break L;
case 'l': X="long";id=Id_long; break L;
case 'n': X="null";id=Id_null; break L;
case 't': c=s.charAt(3);
if (c=='e') { if (s.charAt(2)=='u' && s.charAt(1)=='r') {id=Id_true; break L0;} }
else if (c=='s') { if (s.charAt(2)=='i' && s.charAt(1)=='h') {id=Id_this; break L0;} }
break L;
case 'v': X="void";id=Id_void; break L;
case 'w': X="with";id=Id_with; break L;
} break L;
case 5: switch (s.charAt(2)) {
case 'a': X="class";id=Id_class; break L;
case 'e': c=s.charAt(0);
if (c=='b') { X="break";id=Id_break; }
else if (c=='y') { X="yield";id=Id_yield; }
break L;
case 'i': X="while";id=Id_while; break L;
case 'l': X="false";id=Id_false; break L;
case 'n': c=s.charAt(0);
if (c=='c') { X="const";id=Id_const; }
else if (c=='f') { X="final";id=Id_final; }
break L;
case 'o': c=s.charAt(0);
if (c=='f') { X="float";id=Id_float; }
else if (c=='s') { X="short";id=Id_short; }
break L;
case 'p': X="super";id=Id_super; break L;
case 'r': X="throw";id=Id_throw; break L;
case 't': X="catch";id=Id_catch; break L;
} break L;
case 6: switch (s.charAt(1)) {
case 'a': X="native";id=Id_native; break L;
case 'e': c=s.charAt(0);
if (c=='d') { X="delete";id=Id_delete; }
else if (c=='r') { X="return";id=Id_return; }
break L;
case 'h': X="throws";id=Id_throws; break L;
case 'm': X="import";id=Id_import; break L;
case 'o': X="double";id=Id_double; break L;
case 't': X="static";id=Id_static; break L;
case 'u': X="public";id=Id_public; break L;
case 'w': X="switch";id=Id_switch; break L;
case 'x': X="export";id=Id_export; break L;
case 'y': X="typeof";id=Id_typeof; break L;
} break L;
case 7: switch (s.charAt(1)) {
case 'a': X="package";id=Id_package; break L;
case 'e': X="default";id=Id_default; break L;
case 'i': X="finally";id=Id_finally; break L;
case 'o': X="boolean";id=Id_boolean; break L;
case 'r': X="private";id=Id_private; break L;
case 'x': X="extends";id=Id_extends; break L;
} break L;
case 8: switch (s.charAt(0)) {
case 'a': X="abstract";id=Id_abstract; break L;
case 'c': X="continue";id=Id_continue; break L;
case 'd': X="debugger";id=Id_debugger; break L;
case 'f': X="function";id=Id_function; break L;
case 'v': X="volatile";id=Id_volatile; break L;
} break L;
case 9: c=s.charAt(0);
if (c=='i') { X="interface";id=Id_interface; }
else if (c=='p') { X="protected";id=Id_protected; }
else if (c=='t') { X="transient";id=Id_transient; }
break L;
case 10: c=s.charAt(1);
if (c=='m') { X="implements";id=Id_implements; }
else if (c=='n') { X="instanceof";id=Id_instanceof; }
break L;
case 12: X="synchronized";id=Id_synchronized; break L;
}
if (X!=null && X!=s && !X.equals(s)) id = 0;
}
// #/generated#
// #/string_id_map#
if (id == 0) { return Token.EOF; }
return id & 0xff;
}
final int getLineno() { return lineno; }
final String getString() { return string; }
final double getNumber() { return number; }
final boolean eof() { return hitEOF; }
final int getToken() throws IOException
{
int c;
retry:
for (;;) {
// Eat whitespace, possibly sensitive to newlines.
for (;;) {
c = getChar();
if (c == EOF_CHAR) {
return Token.EOF;
} else if (c == '\n') {
dirtyLine = false;
return Token.EOL;
} else if (!isJSSpace(c)) {
if (c != '-') {
dirtyLine = true;
}
break;
}
}
if (c == '@') return Token.XMLATTR;
// identifier/keyword/instanceof?
// watch out for starting with a <backslash>
boolean identifierStart;
boolean isUnicodeEscapeStart = false;
if (c == '\\') {
c = getChar();
if (c == 'u') {
identifierStart = true;
isUnicodeEscapeStart = true;
stringBufferTop = 0;
} else {
identifierStart = false;
ungetChar(c);
c = '\\';
}
} else {
identifierStart = Character.isJavaIdentifierStart((char)c);
if (identifierStart) {
stringBufferTop = 0;
addToString(c);
}
}
if (identifierStart) {
boolean containsEscape = isUnicodeEscapeStart;
for (;;) {
if (isUnicodeEscapeStart) {
// strictly speaking we should probably push-back
// all the bad characters if the <backslash>uXXXX
// sequence is malformed. But since there isn't a
// correct context(is there?) for a bad Unicode
// escape sequence in an identifier, we can report
// an error here.
int escapeVal = 0;
for (int i = 0; i != 4; ++i) {
c = getChar();
escapeVal = Kit.xDigitToInt(c, escapeVal);
// Next check takes care about c < 0 and bad escape
if (escapeVal < 0) { break; }
}
if (escapeVal < 0) {
parser.addError("msg.invalid.escape");
return Token.ERROR;
}
addToString(escapeVal);
isUnicodeEscapeStart = false;
} else {
c = getChar();
if (c == '\\') {
c = getChar();
if (c == 'u') {
isUnicodeEscapeStart = true;
containsEscape = true;
} else {
parser.addError("msg.illegal.character");
return Token.ERROR;
}
} else {
if (c == EOF_CHAR
|| !Character.isJavaIdentifierPart((char)c))
{
break;
}
addToString(c);
}
}
}
ungetChar(c);
String str = getStringFromBuffer();
if (!containsEscape) {
// OPT we shouldn't have to make a string (object!) to
// check if it's a keyword.
// Return the corresponding token if it's a keyword
int result = stringToKeyword(str);
if (result != Token.EOF) {
if ((result == Token.LET || result == Token.YIELD) &&
parser.compilerEnv.getLanguageVersion()
< Context.VERSION_1_7)
{
// LET and YIELD are tokens only in 1.7 and later
result = Token.NAME;
}
if (result != Token.RESERVED) {
return result;
} else if (!parser.compilerEnv.
isReservedKeywordAsIdentifier())
{
keyword = str;
return result;
} else {
// If implementation permits to use future reserved
// keywords in violation with the EcmaScript,
// treat it as name but issue warning
parser.addWarning("msg.reserved.keyword", str);
}
}
}
this.string = (String)allStrings.intern(str);
return Token.NAME;
}
// is it a number?
if (isDigit(c) || (c == '.' && isDigit(peekChar()))) {
stringBufferTop = 0;
int base = 10;
if (c == '0') {
c = getChar();
if (c == 'x' || c == 'X') {
base = 16;
c = getChar();
} else if (isDigit(c)) {
base = 8;
} else {
addToString('0');
}
}
if (base == 16) {
while (0 <= Kit.xDigitToInt(c, 0)) {
addToString(c);
c = getChar();
}
} else {
while ('0' <= c && c <= '9') {
/*
* We permit 08 and 09 as decimal numbers, which
* makes our behavior a superset of the ECMA
* numeric grammar. We might not always be so
* permissive, so we warn about it.
*/
if (base == 8 && c >= '8') {
parser.addWarning("msg.bad.octal.literal",
c == '8' ? "8" : "9");
base = 10;
}
addToString(c);
c = getChar();
}
}
boolean isInteger = true;
if (base == 10 && (c == '.' || c == 'e' || c == 'E')) {
isInteger = false;
if (c == '.') {
do {
addToString(c);
c = getChar();
} while (isDigit(c));
}
if (c == 'e' || c == 'E') {
addToString(c);
c = getChar();
if (c == '+' || c == '-') {
addToString(c);
c = getChar();
}
if (!isDigit(c)) {
parser.addError("msg.missing.exponent");
return Token.ERROR;
}
do {
addToString(c);
c = getChar();
} while (isDigit(c));
}
}
ungetChar(c);
String numString = getStringFromBuffer();
double dval;
if (base == 10 && !isInteger) {
try {
// Use Java conversion to number from string...
dval = Double.valueOf(numString).doubleValue();
}
catch (NumberFormatException ex) {
parser.addError("msg.caught.nfe");
return Token.ERROR;
}
} else {
dval = ScriptRuntime.stringToNumber(numString, 0, base);
}
this.number = dval;
return Token.NUMBER;
}
// is it a string?
if (c == '"' || c == '\'') {
// We attempt to accumulate a string the fast way, by
// building it directly out of the reader. But if there
// are any escaped characters in the string, we revert to
// building it out of a StringBuffer.
int quoteChar = c;
stringBufferTop = 0;
c = getChar();
strLoop: while (c != quoteChar) {
if (c == '\n' || c == EOF_CHAR) {
ungetChar(c);
parser.addError("msg.unterminated.string.lit");
return Token.ERROR;
}
if (c == '\\') {
// We've hit an escaped character
int escapeVal;
c = getChar();
switch (c) {
case 'b': c = '\b'; break;
case 'f': c = '\f'; break;
case 'n': c = '\n'; break;
case 'r': c = '\r'; break;
case 't': c = '\t'; break;
// \v a late addition to the ECMA spec,
// it is not in Java, so use 0xb
case 'v': c = 0xb; break;
case 'u':
// Get 4 hex digits; if the u escape is not
// followed by 4 hex digits, use 'u' + the
// literal character sequence that follows.
int escapeStart = stringBufferTop;
addToString('u');
escapeVal = 0;
for (int i = 0; i != 4; ++i) {
c = getChar();
escapeVal = Kit.xDigitToInt(c, escapeVal);
if (escapeVal < 0) {
continue strLoop;
}
addToString(c);
}
// prepare for replace of stored 'u' sequence
// by escape value
stringBufferTop = escapeStart;
c = escapeVal;
break;
case 'x':
// Get 2 hex digits, defaulting to 'x'+literal
// sequence, as above.
c = getChar();
escapeVal = Kit.xDigitToInt(c, 0);
if (escapeVal < 0) {
addToString('x');
continue strLoop;
} else {
int c1 = c;
c = getChar();
escapeVal = Kit.xDigitToInt(c, escapeVal);
if (escapeVal < 0) {
addToString('x');
addToString(c1);
continue strLoop;
} else {
// got 2 hex digits
c = escapeVal;
}
}
break;
case '\n':
// Remove line terminator after escape to follow
// SpiderMonkey and C/C++
c = getChar();
continue strLoop;
default:
if ('0' <= c && c < '8') {
int val = c - '0';
c = getChar();
if ('0' <= c && c < '8') {
val = 8 * val + c - '0';
c = getChar();
if ('0' <= c && c < '8' && val <= 037) {
// c is 3rd char of octal sequence only
// if the resulting val <= 0377
val = 8 * val + c - '0';
c = getChar();
}
}
ungetChar(c);
c = val;
}
}
}
addToString(c);
c = getChar();
}
String str = getStringFromBuffer();
this.string = (String)allStrings.intern(str);
return Token.STRING;
}
switch (c) {
case ';': return Token.SEMI;
case '[': return Token.LB;
case ']': return Token.RB;
case '{': return Token.LC;
case '}': return Token.RC;
case '(': return Token.LP;
case ')': return Token.RP;
case ',': return Token.COMMA;
case '?': return Token.HOOK;
case ':':
if (matchChar(':')) {
return Token.COLONCOLON;
} else {
return Token.COLON;
}
case '.':
if (matchChar('.')) {
return Token.DOTDOT;
} else if (matchChar('(')) {
return Token.DOTQUERY;
} else {
return Token.DOT;
}
case '|':
if (matchChar('|')) {
return Token.OR;
} else if (matchChar('=')) {
return Token.ASSIGN_BITOR;
} else {
return Token.BITOR;
}
case '^':
if (matchChar('=')) {
return Token.ASSIGN_BITXOR;
} else {
return Token.BITXOR;
}
case '&':
if (matchChar('&')) {
return Token.AND;
} else if (matchChar('=')) {
return Token.ASSIGN_BITAND;
} else {
return Token.BITAND;
}
case '=':
if (matchChar('=')) {
if (matchChar('='))
return Token.SHEQ;
else
return Token.EQ;
} else {
return Token.ASSIGN;
}
case '!':
if (matchChar('=')) {
if (matchChar('='))
return Token.SHNE;
else
return Token.NE;
} else {
return Token.NOT;
}
case '<':
/* NB:treat HTML begin-comment as comment-till-eol */
if (matchChar('!')) {
if (matchChar('-')) {
if (matchChar('-')) {
skipLine();
continue retry;
}
ungetChar('-');
}
ungetChar('!');
}
if (matchChar('<')) {
if (matchChar('=')) {
return Token.ASSIGN_LSH;
} else {
return Token.LSH;
}
} else {
if (matchChar('=')) {
return Token.LE;
} else {
return Token.LT;
}
}
case '>':
if (matchChar('>')) {
if (matchChar('>')) {
if (matchChar('=')) {
return Token.ASSIGN_URSH;
} else {
return Token.URSH;
}
} else {
if (matchChar('=')) {
return Token.ASSIGN_RSH;
} else {
return Token.RSH;
}
}
} else {
if (matchChar('=')) {
return Token.GE;
} else {
return Token.GT;
}
}
case '*':
if (matchChar('=')) {
return Token.ASSIGN_MUL;
} else {
return Token.MUL;
}
case '/':
// is it a // comment?
if (matchChar('/')) {
skipLine();
continue retry;
}
if (matchChar('*')) {
boolean lookForSlash = false;
for (;;) {
c = getChar();
if (c == EOF_CHAR) {
parser.addError("msg.unterminated.comment");
return Token.ERROR;
} else if (c == '*') {
lookForSlash = true;
} else if (c == '/') {
if (lookForSlash) {
continue retry;
}
} else {
lookForSlash = false;
}
}
}
if (matchChar('=')) {
return Token.ASSIGN_DIV;
} else {
return Token.DIV;
}
case '%':
if (matchChar('=')) {
return Token.ASSIGN_MOD;
} else {
return Token.MOD;
}
case '~':
return Token.BITNOT;
case '+':
if (matchChar('=')) {
return Token.ASSIGN_ADD;
} else if (matchChar('+')) {
return Token.INC;
} else {
return Token.ADD;
}
case '-':
if (matchChar('=')) {
c = Token.ASSIGN_SUB;
} else if (matchChar('-')) {
if (!dirtyLine) {
// treat HTML end-comment after possible whitespace
// after line start as comment-utill-eol
if (matchChar('>')) {
skipLine();
continue retry;
}
}
c = Token.DEC;
} else {
c = Token.SUB;
}
dirtyLine = true;
return c;
default:
parser.addError("msg.illegal.character");
return Token.ERROR;
}
}
}
private static boolean isAlpha(int c)
{
// Use 'Z' < 'a'
if (c <= 'Z') {
return 'A' <= c;
} else {
return 'a' <= c && c <= 'z';
}
}
static boolean isDigit(int c)
{
return '0' <= c && c <= '9';
}
/* As defined in ECMA. jsscan.c uses C isspace() (which allows
* \v, I think.) note that code in getChar() implicitly accepts
* '\r' == \u000D as well.
*/
static boolean isJSSpace(int c)
{
if (c <= 127) {
return c == 0x20 || c == 0x9 || c == 0xC || c == 0xB;
} else {
return c == 0xA0
|| Character.getType((char)c) == Character.SPACE_SEPARATOR;
}
}
private static boolean isJSFormatChar(int c)
{
return c > 127 && Character.getType((char)c) == Character.FORMAT;
}
/**
* Parser calls the method when it gets / or /= in literal context.
*/
void readRegExp(int startToken)
throws IOException
{
stringBufferTop = 0;
if (startToken == Token.ASSIGN_DIV) {
// Miss-scanned /=
addToString('=');
} else {
if (startToken != Token.DIV) Kit.codeBug();
}
int c;
while ((c = getChar()) != '/') {
if (c == '\n' || c == EOF_CHAR) {
ungetChar(c);
throw parser.reportError("msg.unterminated.re.lit");
}
if (c == '\\') {
addToString(c);
c = getChar();
}
addToString(c);
}
int reEnd = stringBufferTop;
while (true) {
if (matchChar('g'))
addToString('g');
else if (matchChar('i'))
addToString('i');
else if (matchChar('m'))
addToString('m');
else
break;
}
if (isAlpha(peekChar())) {
throw parser.reportError("msg.invalid.re.flag");
}
this.string = new String(stringBuffer, 0, reEnd);
this.regExpFlags = new String(stringBuffer, reEnd,
stringBufferTop - reEnd);
}
boolean isXMLAttribute()
{
return xmlIsAttribute;
}
int getFirstXMLToken() throws IOException
{
xmlOpenTagsCount = 0;
xmlIsAttribute = false;
xmlIsTagContent = false;
ungetChar('<');
return getNextXMLToken();
}
int getNextXMLToken() throws IOException
{
stringBufferTop = 0; // remember the XML
for (int c = getChar(); c != EOF_CHAR; c = getChar()) {
if (xmlIsTagContent) {
switch (c) {
case '>':
addToString(c);
xmlIsTagContent = false;
xmlIsAttribute = false;
break;
case '/':
addToString(c);
if (peekChar() == '>') {
c = getChar();
addToString(c);
xmlIsTagContent = false;
xmlOpenTagsCount--;
}
break;
case '{':
ungetChar(c);
this.string = getStringFromBuffer();
return Token.XML;
case '\'':
case '"':
addToString(c);
if (!readQuotedString(c)) return Token.ERROR;
break;
case '=':
addToString(c);
xmlIsAttribute = true;
break;
case ' ':
case '\t':
case '\r':
case '\n':
addToString(c);
break;
default:
addToString(c);
xmlIsAttribute = false;
break;
}
if (!xmlIsTagContent && xmlOpenTagsCount == 0) {
this.string = getStringFromBuffer();
return Token.XMLEND;
}
} else {
switch (c) {
case '<':
addToString(c);
c = peekChar();
switch (c) {
case '!':
c = getChar(); // Skip !
addToString(c);
c = peekChar();
switch (c) {
case '-':
c = getChar(); // Skip -
addToString(c);
c = getChar();
if (c == '-') {
addToString(c);
if(!readXmlComment()) return Token.ERROR;
} else {
// throw away the string in progress
stringBufferTop = 0;
this.string = null;
parser.addError("msg.XML.bad.form");
return Token.ERROR;
}
break;
case '[':
c = getChar(); // Skip [
addToString(c);
if (getChar() == 'C' &&
getChar() == 'D' &&
getChar() == 'A' &&
getChar() == 'T' &&
getChar() == 'A' &&
getChar() == '[')
{
addToString('C');
addToString('D');
addToString('A');
addToString('T');
addToString('A');
addToString('[');
if (!readCDATA()) return Token.ERROR;
} else {
// throw away the string in progress
stringBufferTop = 0;
this.string = null;
parser.addError("msg.XML.bad.form");
return Token.ERROR;
}
break;
default:
if(!readEntity()) return Token.ERROR;
break;
}
break;
case '?':
c = getChar(); // Skip ?
addToString(c);
if (!readPI()) return Token.ERROR;
break;
case '/':
// End tag
c = getChar(); // Skip /
addToString(c);
if (xmlOpenTagsCount == 0) {
// throw away the string in progress
stringBufferTop = 0;
this.string = null;
parser.addError("msg.XML.bad.form");
return Token.ERROR;
}
xmlIsTagContent = true;
xmlOpenTagsCount--;
break;
default:
// Start tag
xmlIsTagContent = true;
xmlOpenTagsCount++;
break;
}
break;
case '{':
ungetChar(c);
this.string = getStringFromBuffer();
return Token.XML;
default:
addToString(c);
break;
}
}
}
stringBufferTop = 0; // throw away the string in progress
this.string = null;
parser.addError("msg.XML.bad.form");
return Token.ERROR;
}
/**
*
*/
private boolean readQuotedString(int quote) throws IOException
{
for (int c = getChar(); c != EOF_CHAR; c = getChar()) {
addToString(c);
if (c == quote) return true;
}
stringBufferTop = 0; // throw away the string in progress
this.string = null;
parser.addError("msg.XML.bad.form");
return false;
}
/**
*
*/
private boolean readXmlComment() throws IOException
{
for (int c = getChar(); c != EOF_CHAR;) {
addToString(c);
if (c == '-' && peekChar() == '-') {
c = getChar();
addToString(c);
if (peekChar() == '>') {
c = getChar(); // Skip >
addToString(c);
return true;
} else {
continue;
}
}
c = getChar();
}
stringBufferTop = 0; // throw away the string in progress
this.string = null;
parser.addError("msg.XML.bad.form");
return false;
}
/**
*
*/
private boolean readCDATA() throws IOException
{
for (int c = getChar(); c != EOF_CHAR;) {
addToString(c);
if (c == ']' && peekChar() == ']') {
c = getChar();
addToString(c);
if (peekChar() == '>') {
c = getChar(); // Skip >
addToString(c);
return true;
} else {
continue;
}
}
c = getChar();
}
stringBufferTop = 0; // throw away the string in progress
this.string = null;
parser.addError("msg.XML.bad.form");
return false;
}
/**
*
*/
private boolean readEntity() throws IOException
{
int declTags = 1;
for (int c = getChar(); c != EOF_CHAR; c = getChar()) {
addToString(c);
switch (c) {
case '<':
declTags++;
break;
case '>':
declTags--;
if (declTags == 0) return true;
break;
}
}
stringBufferTop = 0; // throw away the string in progress
this.string = null;
parser.addError("msg.XML.bad.form");
return false;
}
/**
*
*/
private boolean readPI() throws IOException
{
for (int c = getChar(); c != EOF_CHAR; c = getChar()) {
addToString(c);
if (c == '?' && peekChar() == '>') {
c = getChar(); // Skip >
addToString(c);
return true;
}
}
stringBufferTop = 0; // throw away the string in progress
this.string = null;
parser.addError("msg.XML.bad.form");
return false;
}
private String getStringFromBuffer()
{
return new String(stringBuffer, 0, stringBufferTop);
}
private void addToString(int c)
{
int N = stringBufferTop;
if (N == stringBuffer.length) {
char[] tmp = new char[stringBuffer.length * 2];
System.arraycopy(stringBuffer, 0, tmp, 0, N);
stringBuffer = tmp;
}
stringBuffer[N] = (char)c;
stringBufferTop = N + 1;
}
private void ungetChar(int c)
{
// can not unread past across line boundary
if (ungetCursor != 0 && ungetBuffer[ungetCursor - 1] == '\n')
Kit.codeBug();
ungetBuffer[ungetCursor++] = c;
}
private boolean matchChar(int test) throws IOException
{
int c = getChar();
if (c == test) {
return true;
} else {
ungetChar(c);
return false;
}
}
private int peekChar() throws IOException
{
int c = getChar();
ungetChar(c);
return c;
}
private int getChar() throws IOException
{
if (ungetCursor != 0) {
return ungetBuffer[--ungetCursor];
}
for(;;) {
int c;
if (sourceString != null) {
if (sourceCursor == sourceEnd) {
hitEOF = true;
return EOF_CHAR;
}
c = sourceString.charAt(sourceCursor++);
} else {
if (sourceCursor == sourceEnd) {
if (!fillSourceBuffer()) {
hitEOF = true;
return EOF_CHAR;
}
}
c = sourceBuffer[sourceCursor++];
}
if (lineEndChar >= 0) {
if (lineEndChar == '\r' && c == '\n') {
lineEndChar = '\n';
continue;
}
lineEndChar = -1;
lineStart = sourceCursor - 1;
lineno++;
}
if (c <= 127) {
if (c == '\n' || c == '\r') {
lineEndChar = c;
c = '\n';
}
} else {
if (isJSFormatChar(c)) {
continue;
}
if (ScriptRuntime.isJSLineTerminator(c)) {
lineEndChar = c;
c = '\n';
}
}
return c;
}
}
private void skipLine() throws IOException
{
// skip to end of line
int c;
while ((c = getChar()) != EOF_CHAR && c != '\n') { }
ungetChar(c);
}
final int getOffset()
{
int n = sourceCursor - lineStart;
if (lineEndChar >= 0) { --n; }
return n;
}
final String getLine()
{
if (sourceString != null) {
// String case
int lineEnd = sourceCursor;
if (lineEndChar >= 0) {
--lineEnd;
} else {
for (; lineEnd != sourceEnd; ++lineEnd) {
int c = sourceString.charAt(lineEnd);
if (ScriptRuntime.isJSLineTerminator(c)) {
break;
}
}
}
return sourceString.substring(lineStart, lineEnd);
} else {
// Reader case
int lineLength = sourceCursor - lineStart;
if (lineEndChar >= 0) {
--lineLength;
} else {
// Read until the end of line
for (;; ++lineLength) {
int i = lineStart + lineLength;
if (i == sourceEnd) {
try {
if (!fillSourceBuffer()) { break; }
} catch (IOException ioe) {
// ignore it, we're already displaying an error...
break;
}
// i recalculuation as fillSourceBuffer can move saved
// line buffer and change lineStart
i = lineStart + lineLength;
}
int c = sourceBuffer[i];
if (ScriptRuntime.isJSLineTerminator(c)) {
break;
}
}
}
return new String(sourceBuffer, lineStart, lineLength);
}
}
private boolean fillSourceBuffer() throws IOException
{
if (sourceString != null) Kit.codeBug();
if (sourceEnd == sourceBuffer.length) {
if (lineStart != 0) {
System.arraycopy(sourceBuffer, lineStart, sourceBuffer, 0,
sourceEnd - lineStart);
sourceEnd -= lineStart;
sourceCursor -= lineStart;
lineStart = 0;
} else {
char[] tmp = new char[sourceBuffer.length * 2];
System.arraycopy(sourceBuffer, 0, tmp, 0, sourceEnd);
sourceBuffer = tmp;
}
}
int n = sourceReader.read(sourceBuffer, sourceEnd,
sourceBuffer.length - sourceEnd);
if (n < 0) {
return false;
}
sourceEnd += n;
return true;
}
// so we know what reserved work was used
static String keyword = "";
// stuff other than whitespace since start of line
private boolean dirtyLine;
String regExpFlags;
// Set this to an inital non-null value so that the Parser has
// something to retrieve even if an error has occured and no
// string is found. Fosters one class of error, but saves lots of
// code.
private String string = "";
private double number;
private char[] stringBuffer = new char[128];
private int stringBufferTop;
private ObjToIntMap allStrings = new ObjToIntMap(50);
// Room to backtrace from to < on failed match of the last - in <!--
private final int[] ungetBuffer = new int[3];
private int ungetCursor;
private boolean hitEOF = false;
private int lineStart = 0;
private int lineno;
private int lineEndChar = -1;
private String sourceString;
private Reader sourceReader;
private char[] sourceBuffer;
private int sourceEnd;
private int sourceCursor;
// for xml tokenizer
private boolean xmlIsAttribute;
private boolean xmlIsTagContent;
private int xmlOpenTagsCount;
private Parser parser;
}
| |
package org.ssh.app.util.spring;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.Enumeration;
import java.util.Map;
import java.util.Vector;
import javax.sql.DataSource;
import liquibase.Liquibase;
import liquibase.database.Database;
import liquibase.database.DatabaseFactory;
import liquibase.database.jvm.JdbcConnection;
import liquibase.exception.DatabaseException;
import liquibase.exception.LiquibaseException;
import liquibase.logging.LogFactory;
import liquibase.logging.Logger;
import liquibase.resource.ResourceAccessor;
import org.springframework.beans.factory.BeanNameAware;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.context.ResourceLoaderAware;
import org.springframework.core.io.Resource;
import org.springframework.core.io.ResourceLoader;
/**
* A Spring-ified wrapper for Liquibase.
*
* Example Configuration:
* <p>
* <p>
* This Spring configuration example will cause liquibase to run
* automatically when the Spring context is initialized. It will load
* <code>db-changelog.xml</code> from the classpath and apply it against
* <code>myDataSource</code>.
* <p>
*
* <pre>
* <bean id="myLiquibase"
* class="liquibase.spring.SpringLiquibase"
* >
*
* <property name="dataSource" ref="myDataSource" />
*
* <property name="changeLog" value="classpath:db-changelog.xml" />
*
* <!-- The following configuration options are optional -->
*
* <property name="executeEnabled" value="true" />
*
* <!--
* If set to true, writeSqlFileEnabled will write the generated
* SQL to a file before executing it.
* -->
* <property name="writeSqlFileEnabled" value="true" />
*
* <!--
* sqlOutputDir specifies the directory into which the SQL file
* will be written, if so configured.
* -->
* <property name="sqlOutputDir" value="c:\sql" />
*
* </bean>
*
* </pre>
*
* @author Rob Schoening
*/
public class SpringLiquibase implements InitializingBean, BeanNameAware, ResourceLoaderAware {
public class SpringResourceOpener implements ResourceAccessor {
private String parentFile;
public SpringResourceOpener(String parentFile) {
this.parentFile = parentFile;
}
public InputStream getResourceAsStream(String file) throws IOException {
try {
Resource resource = getResource(file);
return resource.getInputStream();
}
catch ( FileNotFoundException ex ) {
return null;
}
}
public Enumeration<URL> getResources(String packageName) throws IOException {
Vector<URL> tmp = new Vector<URL>();
tmp.add(getResource(packageName).getURL());
return tmp.elements();
}
public Resource getResource(String file) {
return getResourceLoader().getResource(adjustClasspath(file));
}
private String adjustClasspath(String file) {
return isClasspathPrefixPresent(parentFile) && !isClasspathPrefixPresent(file)
? ResourceLoader.CLASSPATH_URL_PREFIX + file
: file;
}
public boolean isClasspathPrefixPresent(String file) {
return file.startsWith(ResourceLoader.CLASSPATH_URL_PREFIX);
}
public ClassLoader toClassLoader() {
return getResourceLoader().getClassLoader();
}
}
private String beanName;
private ResourceLoader resourceLoader;
private DataSource dataSource;
private Logger log = LogFactory.getLogger(SpringLiquibase.class.getName());
private String changeLog;
private String contexts;
private Map<String, String> parameters;
private String defaultSchema;
public SpringLiquibase() {
super();
}
public String getDatabaseProductName() throws DatabaseException {
Connection connection = null;
String name = "unknown";
try {
connection = getDataSource().getConnection();
Database database =
DatabaseFactory.getInstance().findCorrectDatabaseImplementation(new JdbcConnection(dataSource.getConnection()));
name = database.getDatabaseProductName();
} catch (SQLException e) {
throw new DatabaseException(e);
} finally {
if (connection != null) {
try {
if (!connection.getAutoCommit())
{
connection.rollback();
}
connection.close();
} catch (Exception e) {
log.warning("problem closing connection", e);
}
}
}
return name;
}
/**
* The DataSource that liquibase will use to perform the migration.
*
* @return
*/
public DataSource getDataSource() {
return dataSource;
}
/**
* The DataSource that liquibase will use to perform the migration.
*/
public void setDataSource(DataSource dataSource) {
this.dataSource = dataSource;
}
/**
* Returns a Resource that is able to resolve to a file or classpath resource.
*
* @return
*/
public String getChangeLog() {
return changeLog;
}
/**
* Sets a Spring Resource that is able to resolve to a file or classpath resource.
* An example might be <code>classpath:db-changelog.xml</code>.
*/
public void setChangeLog(String dataModel) {
this.changeLog = dataModel;
}
public String getContexts() {
return contexts;
}
public void setContexts(String contexts) {
this.contexts = contexts;
}
public String getDefaultSchema() {
return defaultSchema;
}
public void setDefaultSchema(String defaultSchema) {
this.defaultSchema = defaultSchema;
}
/**
* Executed automatically when the bean is initialized.
*/
public void afterPropertiesSet() throws LiquibaseException {
String shouldRunProperty = System.getProperty(Liquibase.SHOULD_RUN_SYSTEM_PROPERTY);
if (shouldRunProperty != null && !Boolean.valueOf(shouldRunProperty)) {
System.out.println("Liquibase did not run because '" + Liquibase.SHOULD_RUN_SYSTEM_PROPERTY + "' system property was set to false");
return;
}
Connection c = null;
Liquibase liquibase = null;
try {
c = getDataSource().getConnection();
liquibase = createLiquibase(c);
liquibase.update(getContexts());
} catch (SQLException e) {
throw new DatabaseException(e);
} finally {
if (liquibase != null) {
liquibase.forceReleaseLocks();
}
if (c != null) {
try {
c.rollback();
c.close();
} catch (SQLException e) {
//nothing to do
}
}
}
}
protected Liquibase createLiquibase(Connection c) throws LiquibaseException {
Liquibase liquibase = new Liquibase(getChangeLog(), createResourceOpener(), createDatabase(c));
for(Map.Entry<String, String> entry: parameters.entrySet()) {
liquibase.setChangeLogParameter(entry.getKey(), entry.getValue());
}
return liquibase;
}
/**
* Subclasses may override this method add change some database settings such as
* default schema before returning the database object.
* @param c
* @return a Database implementation retrieved from the {@link DatabaseFactory}.
* @throws DatabaseException
*/
protected Database createDatabase(Connection c) throws DatabaseException {
Database database = DatabaseFactory.getInstance().findCorrectDatabaseImplementation(new JdbcConnection(c));
if (this.defaultSchema != null) {
database.setDefaultSchemaName(this.defaultSchema);
}
return database;
}
public void setChangeLogParameters(Map<String, String> parameters) {
this.parameters = parameters;
}
/**
* Create a new resourceOpener.
*/
protected SpringResourceOpener createResourceOpener() {
return new SpringResourceOpener(getChangeLog());
}
/**
* Spring sets this automatically to the instance's configured bean name.
*/
public void setBeanName(String name) {
this.beanName = name;
}
/**
* Gets the Spring-name of this instance.
*
* @return
*/
public String getBeanName() {
return beanName;
}
public void setResourceLoader(ResourceLoader resourceLoader) {
this.resourceLoader = resourceLoader;
}
public ResourceLoader getResourceLoader() {
return resourceLoader;
}
@Override
public String toString() {
return getClass().getName()+"("+this.getResourceLoader().toString()+")";
}
}
| |
/*
* ModeShape (http://www.modeshape.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.modeshape.jcr.cache.document;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicReference;
import org.modeshape.common.annotation.Immutable;
import org.modeshape.common.logging.Logger;
import org.modeshape.jcr.ExecutionContext;
import org.modeshape.jcr.api.value.DateTime;
import org.modeshape.jcr.cache.CachedNode;
import org.modeshape.jcr.cache.ChildReference;
import org.modeshape.jcr.cache.NodeKey;
import org.modeshape.jcr.cache.SessionCache;
import org.modeshape.jcr.value.NameFactory;
import org.modeshape.jcr.value.Path;
import org.modeshape.jcr.value.PathFactory;
import org.modeshape.jcr.value.ValueFactories;
/**
*
*/
public abstract class AbstractSessionCache implements SessionCache, DocumentCache {
@Immutable
protected static final class BasicSaveContext implements SaveContext {
private final DateTime now;
private final String userId;
protected BasicSaveContext( ExecutionContext context ) {
this.now = context.getValueFactories().getDateFactory().create();
this.userId = context.getSecurityContext().getUserName();
}
@Override
public DateTime getTime() {
return now;
}
@Override
public String getUserId() {
return userId;
}
}
protected final Logger logger;
private final WorkspaceCache sharedWorkspaceCache;
private final AtomicReference<WorkspaceCache> workspaceCache = new AtomicReference<>();
private final NameFactory nameFactory;
private final PathFactory pathFactory;
private final Path rootPath;
private ExecutionContext context;
protected AbstractSessionCache(ExecutionContext context,
WorkspaceCache sharedWorkspaceCache) {
this.logger = Logger.getLogger(getClass());
this.context = context;
this.sharedWorkspaceCache = sharedWorkspaceCache;
this.workspaceCache.set(sharedWorkspaceCache);
ValueFactories factories = this.context.getValueFactories();
this.nameFactory = factories.getNameFactory();
this.pathFactory = factories.getPathFactory();
this.rootPath = this.pathFactory.createRootPath();
}
@Override
public final SessionCache unwrap() {
return this;
}
protected final String workspaceName() {
return workspaceCache().getWorkspaceName();
}
@Override
public final ExecutionContext getContext() {
return context;
}
@Override
public final WorkspaceCache workspaceCache() {
return workspaceCache.get();
}
final DocumentTranslator translator() {
return workspaceCache().translator();
}
final ExecutionContext context() {
return context;
}
final NameFactory nameFactory() {
return nameFactory;
}
final PathFactory pathFactory() {
return pathFactory;
}
final Path rootPath() {
return rootPath;
}
final AbstractSessionCache setWorkspaceCache(WorkspaceCache cache) {
this.workspaceCache.set(cache);
return this;
}
final WorkspaceCache sharedWorkspaceCache() {
return sharedWorkspaceCache;
}
@Override
public final void addContextData( String key,
String value ) {
this.context = context.with(key, value);
}
@Override
public NodeKey createNodeKey() {
return getRootKey().withId(generateIdentifier());
}
@Override
public NodeKey createNodeKeyWithIdentifier( String identifier ) {
return getRootKey().withId(identifier);
}
@Override
public NodeKey createNodeKeyWithSource( String sourceName ) {
String sourceKey = NodeKey.keyForSourceName(sourceName);
return getRootKey().withSourceKeyAndId(sourceKey, generateIdentifier());
}
@Override
public NodeKey createNodeKey( String sourceName,
String identifier ) {
String sourceKey = NodeKey.keyForSourceName(sourceName);
if (identifier == null) identifier = generateIdentifier();
return getRootKey().withSourceKeyAndId(sourceKey, identifier);
}
protected String generateIdentifier() {
return UUID.randomUUID().toString();
}
@Override
public NodeKey getRootKey() {
return workspaceCache().getRootKey();
}
@Override
public WorkspaceCache getWorkspace() {
return workspaceCache();
}
@Override
public CachedNode getNode( NodeKey key ) {
return workspaceCache().getNode(key);
}
@Override
public CachedNode getNode( ChildReference reference ) {
return getNode(reference.getKey());
}
@Override
public Set<NodeKey> getNodeKeysAtAndBelow( NodeKey nodeKey ) {
CachedNode node = this.getNode(nodeKey);
if (node == null) {
return Collections.emptySet();
}
Set<NodeKey> result = new HashSet<NodeKey>();
result.add(nodeKey);
for (ChildReference reference : node.getChildReferences(this)) {
NodeKey childKey = reference.getKey();
result.addAll(getNodeKeysAtAndBelow(childKey));
}
return result;
}
@Override
public abstract SessionNode mutable( NodeKey key );
@Override
public Iterator<NodeKey> getAllNodeKeys() {
return getAllNodeKeysAtAndBelow(getRootKey());
}
@Override
public Iterator<NodeKey> getAllNodeKeysAtAndBelow( NodeKey startingKey ) {
return new NodeCacheIterator(this, startingKey);
}
@Override
public final void clear( CachedNode node ) {
doClear(node);
WorkspaceCache wscache = workspaceCache.get();
if (wscache != sharedWorkspaceCache) {
assert wscache instanceof TransactionalWorkspaceCache;
wscache.clear();
}
}
@Override
public final void clear() {
doClear();
WorkspaceCache wscache = workspaceCache.get();
if (wscache != sharedWorkspaceCache) {
assert wscache instanceof TransactionalWorkspaceCache;
wscache.clear();
}
}
protected abstract void doClear( CachedNode node );
protected abstract void doClear();
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.streams.integration;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.Deserializer;
import org.apache.kafka.common.serialization.Serde;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.common.serialization.Serializer;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.KeyValueTimestamp;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.integration.utils.EmbeddedKafkaCluster;
import org.apache.kafka.streams.integration.utils.IntegrationTestUtils;
import org.apache.kafka.streams.kstream.Consumed;
import org.apache.kafka.streams.processor.TimestampExtractor;
import org.apache.kafka.test.IntegrationTest;
import org.apache.kafka.test.TestUtils;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import java.util.Properties;
import static org.apache.kafka.common.utils.Utils.mkEntry;
import static org.apache.kafka.common.utils.Utils.mkMap;
import static org.apache.kafka.common.utils.Utils.mkProperties;
import static org.apache.kafka.streams.integration.utils.IntegrationTestUtils.cleanStateBeforeTest;
import static org.apache.kafka.streams.integration.utils.IntegrationTestUtils.getStartedStreams;
import static org.apache.kafka.streams.integration.utils.IntegrationTestUtils.quietlyCleanStateAfterTest;
import static org.apache.kafka.streams.integration.utils.IntegrationTestUtils.safeUniqueTestName;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.MatcherAssert.assertThat;
@RunWith(Parameterized.class)
@Category({IntegrationTest.class})
public class ResetPartitionTimeIntegrationTest {
private static final int NUM_BROKERS = 1;
private static final Properties BROKER_CONFIG;
static {
BROKER_CONFIG = new Properties();
BROKER_CONFIG.put("transaction.state.log.replication.factor", (short) 1);
BROKER_CONFIG.put("transaction.state.log.min.isr", 1);
}
public static final EmbeddedKafkaCluster CLUSTER =
new EmbeddedKafkaCluster(NUM_BROKERS, BROKER_CONFIG, 0L);
@BeforeClass
public static void startCluster() throws IOException {
CLUSTER.start();
}
@AfterClass
public static void closeCluster() {
CLUSTER.stop();
}
private static final StringDeserializer STRING_DESERIALIZER = new StringDeserializer();
private static final StringSerializer STRING_SERIALIZER = new StringSerializer();
private static final Serde<String> STRING_SERDE = Serdes.String();
private static final int DEFAULT_TIMEOUT = 100;
private static long lastRecordedTimestamp = -2L;
@Parameterized.Parameters(name = "{0}")
public static Collection<String[]> data() {
return Arrays.asList(new String[][] {
{StreamsConfig.AT_LEAST_ONCE},
{StreamsConfig.EXACTLY_ONCE},
{StreamsConfig.EXACTLY_ONCE_BETA}
});
}
@Parameterized.Parameter
public String processingGuarantee;
@Rule
public TestName testName = new TestName();
@Test
public void shouldPreservePartitionTimeOnKafkaStreamRestart() {
final String appId = "app-" + safeUniqueTestName(getClass(), testName);
final String input = "input";
final String outputRaw = "output-raw";
cleanStateBeforeTest(CLUSTER, 2, input, outputRaw);
final StreamsBuilder builder = new StreamsBuilder();
builder
.stream(input, Consumed.with(STRING_SERDE, STRING_SERDE))
.to(outputRaw);
final Properties streamsConfig = new Properties();
streamsConfig.put(StreamsConfig.DEFAULT_TIMESTAMP_EXTRACTOR_CLASS_CONFIG, MaxTimestampExtractor.class);
streamsConfig.put(StreamsConfig.APPLICATION_ID_CONFIG, appId);
streamsConfig.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
streamsConfig.put(StreamsConfig.POLL_MS_CONFIG, Integer.toString(DEFAULT_TIMEOUT));
streamsConfig.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, Integer.toString(DEFAULT_TIMEOUT));
streamsConfig.put(StreamsConfig.PROCESSING_GUARANTEE_CONFIG, processingGuarantee);
streamsConfig.put(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory().getPath());
KafkaStreams kafkaStreams = getStartedStreams(streamsConfig, builder, true);
try {
// start sending some records to have partition time committed
produceSynchronouslyToPartitionZero(
input,
Collections.singletonList(
new KeyValueTimestamp<>("k3", "v3", 5000)
)
);
verifyOutput(
outputRaw,
Collections.singletonList(
new KeyValueTimestamp<>("k3", "v3", 5000)
)
);
assertThat(lastRecordedTimestamp, is(-1L));
lastRecordedTimestamp = -2L;
kafkaStreams.close();
assertThat(kafkaStreams.state(), is(KafkaStreams.State.NOT_RUNNING));
kafkaStreams = getStartedStreams(streamsConfig, builder, true);
// resend some records and retrieve the last committed timestamp
produceSynchronouslyToPartitionZero(
input,
Collections.singletonList(
new KeyValueTimestamp<>("k5", "v5", 4999)
)
);
verifyOutput(
outputRaw,
Collections.singletonList(
new KeyValueTimestamp<>("k5", "v5", 4999)
)
);
assertThat(lastRecordedTimestamp, is(5000L));
} finally {
kafkaStreams.close();
quietlyCleanStateAfterTest(CLUSTER, kafkaStreams);
}
}
public static final class MaxTimestampExtractor implements TimestampExtractor {
@Override
public long extract(final ConsumerRecord<Object, Object> record, final long partitionTime) {
lastRecordedTimestamp = partitionTime;
return record.timestamp();
}
}
private void verifyOutput(final String topic, final List<KeyValueTimestamp<String, String>> keyValueTimestamps) {
final Properties properties = mkProperties(
mkMap(
mkEntry(ConsumerConfig.GROUP_ID_CONFIG, "test-group"),
mkEntry(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers()),
mkEntry(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ((Deserializer<String>) STRING_DESERIALIZER).getClass().getName()),
mkEntry(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ((Deserializer<String>) STRING_DESERIALIZER).getClass().getName())
)
);
IntegrationTestUtils.verifyKeyValueTimestamps(properties, topic, keyValueTimestamps);
}
private static void produceSynchronouslyToPartitionZero(final String topic, final List<KeyValueTimestamp<String, String>> toProduce) {
final Properties producerConfig = mkProperties(mkMap(
mkEntry(ProducerConfig.CLIENT_ID_CONFIG, "anything"),
mkEntry(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ((Serializer<String>) STRING_SERIALIZER).getClass().getName()),
mkEntry(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ((Serializer<String>) STRING_SERIALIZER).getClass().getName()),
mkEntry(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers())
));
IntegrationTestUtils.produceSynchronously(producerConfig, false, topic, Optional.of(0), toProduce);
}
}
| |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.uiDesigner.propertyInspector;
import com.intellij.openapi.actionSystem.DataProvider;
import com.intellij.openapi.components.ProjectComponent;
import com.intellij.openapi.fileEditor.FileEditor;
import com.intellij.openapi.fileEditor.FileEditorManager;
import com.intellij.openapi.fileEditor.FileEditorManagerEvent;
import com.intellij.openapi.fileEditor.FileEditorManagerListener;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.startup.StartupManager;
import com.intellij.openapi.ui.Splitter;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.IconLoader;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.wm.ToolWindow;
import com.intellij.openapi.wm.ToolWindowAnchor;
import com.intellij.openapi.wm.ToolWindowManager;
import com.intellij.uiDesigner.UIDesignerBundle;
import com.intellij.uiDesigner.componentTree.ComponentTree;
import com.intellij.uiDesigner.componentTree.ComponentTreeBuilder;
import com.intellij.uiDesigner.designSurface.GuiEditor;
import com.intellij.uiDesigner.editor.UIFormEditor;
import com.intellij.util.ui.update.MergingUpdateQueue;
import com.intellij.util.ui.update.Update;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.DefaultTreeModel;
import java.awt.*;
/**
* @author yole
*/
public class UIDesignerToolWindowManager implements ProjectComponent {
private final Project myProject;
private MyToolWindowPanel myToolWindowPanel;
private ComponentTree myComponentTree;
private ComponentTreeBuilder myComponentTreeBuilder;
private PropertyInspector myPropertyInspector;
private final FileEditorManager myFileEditorManager;
private ToolWindow myToolWindow;
private boolean myToolWindowReady = false;
private boolean myToolWindowDisposed = false;
public UIDesignerToolWindowManager(final Project project, final FileEditorManager fileEditorManager) {
myProject = project;
myFileEditorManager = fileEditorManager;
MyFileEditorManagerListener listener = new MyFileEditorManagerListener();
myFileEditorManager.addFileEditorManagerListener(listener,project);
}
public void projectOpened() {
StartupManager.getInstance(myProject).registerPostStartupActivity(new Runnable() {
public void run() {
myToolWindowReady = true;
}
});
}
private void checkInitToolWindow() {
if (myToolWindowReady && !myToolWindowDisposed && myToolWindow == null) {
initToolWindow();
}
}
private void initToolWindow() {
myToolWindowPanel = new MyToolWindowPanel();
myComponentTree = new ComponentTree(myProject);
final JScrollPane scrollPane = new JScrollPane(myComponentTree);
scrollPane.setPreferredSize(new Dimension(250, -1));
myComponentTree.initQuickFixManager(scrollPane.getViewport());
myPropertyInspector= new PropertyInspector(myProject, myComponentTree);
myToolWindowPanel.setFirstComponent(scrollPane);
myToolWindowPanel.setSecondComponent(myPropertyInspector);
myToolWindow = ToolWindowManager.getInstance(myProject).registerToolWindow(UIDesignerBundle.message("toolwindow.ui.designer"),
myToolWindowPanel,
ToolWindowAnchor.LEFT, myProject);
myToolWindow.setIcon(IconLoader.getIcon("/com/intellij/uiDesigner/icons/toolWindowUIDesigner.png"));
myToolWindow.setAvailable(false, null);
}
public void projectClosed() {
if (myToolWindowPanel != null) {
if (myComponentTreeBuilder != null) {
Disposer.dispose(myComponentTreeBuilder);
}
myToolWindowPanel = null;
myToolWindow = null;
myToolWindowDisposed = true;
}
}
@NotNull @NonNls
public String getComponentName() {
return "UIDesignerToolWindowManager";
}
public void initComponent() {
}
public void disposeComponent() {
}
private final MergingUpdateQueue myQueue = new MergingUpdateQueue("property.inspector", 200, true, null);
private void processFileEditorChange(final UIFormEditor newEditor) {
myQueue.cancelAllUpdates();
myQueue.queue(new Update("update") {
public void run() {
if (!myToolWindowReady || myToolWindowDisposed) return;
GuiEditor activeFormEditor = newEditor != null ? newEditor.getEditor() : null;
if (myToolWindow == null) {
if (activeFormEditor == null) return;
initToolWindow();
}
if (myComponentTreeBuilder != null) {
Disposer.dispose(myComponentTreeBuilder);
myComponentTreeBuilder = null;
}
myComponentTree.setModel(new DefaultTreeModel(new DefaultMutableTreeNode()));
myComponentTree.setEditor(activeFormEditor);
myComponentTree.setFormEditor(newEditor);
myPropertyInspector.setEditor(activeFormEditor);
if (activeFormEditor == null) {
myToolWindow.setAvailable(false, null);
}
else {
myComponentTreeBuilder = new ComponentTreeBuilder(myComponentTree, activeFormEditor);
myToolWindow.setAvailable(true, null);
myToolWindow.show(null);
}
}
});
}
@Nullable
public UIFormEditor getActiveFormFileEditor() {
FileEditor[] fileEditors = myFileEditorManager.getSelectedEditors();
if (fileEditors.length > 0 && fileEditors [0] instanceof UIFormEditor) {
return (UIFormEditor) fileEditors [0];
}
return null;
}
@Nullable
public GuiEditor getActiveFormEditor() {
UIFormEditor formEditor = getActiveFormFileEditor();
return formEditor == null ? null : formEditor.getEditor();
}
public static UIDesignerToolWindowManager getInstance(Project project) {
return project.getComponent(UIDesignerToolWindowManager.class);
}
public ComponentTree getComponentTree() {
checkInitToolWindow();
return myComponentTree;
}
public ComponentTreeBuilder getComponentTreeBuilder() {
return myComponentTreeBuilder;
}
public PropertyInspector getPropertyInspector() {
return myPropertyInspector;
}
public void refreshErrors() {
myComponentTree.refreshIntentionHint();
myComponentTree.repaint(myComponentTree.getVisibleRect());
// PropertyInspector
myPropertyInspector.refreshIntentionHint();
myPropertyInspector.repaint(myPropertyInspector.getVisibleRect());
}
public void updateComponentTree() {
myComponentTreeBuilder.updateFromRoot();
}
private class MyFileEditorManagerListener implements FileEditorManagerListener {
public void fileOpened(FileEditorManager source, VirtualFile file) {
processFileEditorChange(getActiveFormFileEditor());
}
public void fileClosed(FileEditorManager source, VirtualFile file) {
processFileEditorChange(getActiveFormFileEditor());
}
public void selectionChanged(FileEditorManagerEvent event) {
UIFormEditor newEditor = event.getNewEditor() instanceof UIFormEditor ? (UIFormEditor)event.getNewEditor() : null;
processFileEditorChange(newEditor);
}
}
private class MyToolWindowPanel extends Splitter implements DataProvider {
MyToolWindowPanel() {
super(true, 0.33f);
}
@Nullable
public Object getData(@NonNls String dataId) {
if (dataId.equals(GuiEditor.class.getName())) {
return getActiveFormEditor();
}
return null;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.serde2;
import java.sql.Date;
import java.util.List;
import java.util.Properties;
import java.util.Random;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.binarysortable.MyTestClass;
import org.apache.hadoop.hive.serde2.binarysortable.MyTestInnerStruct;
import org.apache.hadoop.hive.serde2.binarysortable.MyTestPrimitiveClass;
import org.apache.hadoop.hive.serde2.binarysortable.TestBinarySortableSerDe;
import org.apache.hadoop.hive.serde2.binarysortable.MyTestPrimitiveClass.ExtraTypeInfo;
import org.apache.hadoop.hive.serde2.columnar.BytesRefArrayWritable;
import org.apache.hadoop.hive.serde2.columnar.BytesRefWritable;
import org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe;
import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
import org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.ObjectInspectorOptions;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.Text;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotSame;
import org.junit.Test;
/**
* StatsSerde Test.
*/
public class TestStatsSerde {
/**
* Test LazySimpleSerDe
*/
@Test
public void testLazySimpleSerDe() throws Throwable {
try {
// Create the SerDe
System.out.println("test: testLazySimpleSerDe");
LazySimpleSerDe serDe = new LazySimpleSerDe();
Configuration conf = new Configuration();
Properties tbl = createProperties();
serDe.initialize(conf, tbl, null);
// Data
Text t = new Text("123\t456\t789\t1000\t5.3\thive and hadoop\t1.\tNULL");
// Test
deserializeAndSerializeLazySimple(serDe, t);
System.out.println("test: testLazySimpleSerDe - OK");
} catch (Throwable e) {
e.printStackTrace();
throw e;
}
}
private void deserializeAndSerializeLazySimple(LazySimpleSerDe serDe, Text t)
throws SerDeException {
// Get the row structure
StructObjectInspector oi = (StructObjectInspector) serDe
.getObjectInspector();
// Deserialize
Object row = serDe.deserialize(t);
assertEquals("serialized size correct after deserialization", serDe.getSerDeStats()
.getRawDataSize(), t.getLength());
// Serialize
Text serializedText = (Text) serDe.serialize(row, oi);
assertEquals("serialized size correct after serialization", serDe.getSerDeStats()
.getRawDataSize(),
serializedText.toString().length());
}
/**
* Test LazyBinarySerDe
*/
@Test
public void testLazyBinarySerDe() throws Throwable {
try {
System.out.println("test: testLazyBinarySerDe");
int num = 1000;
Random r = new Random(1234);
MyTestClass rows[] = new MyTestClass[num];
for (int i = 0; i < num; i++) {
MyTestClass t = new MyTestClass();
ExtraTypeInfo extraTypeInfo = new ExtraTypeInfo();
t.randomFill(r, extraTypeInfo);
rows[i] = t;
}
StructObjectInspector rowOI = (StructObjectInspector) ObjectInspectorFactory
.getReflectionObjectInspector(MyTestClass.class,
ObjectInspectorOptions.JAVA);
String fieldNames = ObjectInspectorUtils.getFieldNames(rowOI);
String fieldTypes = ObjectInspectorUtils.getFieldTypes(rowOI);
Properties schema = new Properties();
schema.setProperty(serdeConstants.LIST_COLUMNS, fieldNames);
schema.setProperty(serdeConstants.LIST_COLUMN_TYPES, fieldTypes);
LazyBinarySerDe serDe = new LazyBinarySerDe();
serDe.initialize(new Configuration(), schema, null);
deserializeAndSerializeLazyBinary(serDe, rows, rowOI);
System.out.println("test: testLazyBinarySerDe - OK");
} catch (Throwable e) {
e.printStackTrace();
throw e;
}
}
private void deserializeAndSerializeLazyBinary(AbstractSerDe serDe, Object[] rows, ObjectInspector rowOI)
throws Throwable {
BytesWritable bytes[] = new BytesWritable[rows.length];
int lenS = 0;
int lenD = 0;
for (int i = 0; i < rows.length; i++) {
BytesWritable s = (BytesWritable) serDe.serialize(rows[i], rowOI);
lenS += serDe.getSerDeStats().getRawDataSize();
bytes[i] = new BytesWritable();
bytes[i].set(s);
}
for (int i = 0; i < rows.length; i++) {
serDe.deserialize(bytes[i]);
lenD += serDe.getSerDeStats().getRawDataSize();
}
// serialized sizes after serialization and deserialization should be equal
assertEquals(lenS, lenD);
assertNotSame(0, lenS);
}
/**
* Test ColumnarSerDe
*/
@Test
public void testColumnarSerDe() throws Throwable {
try {
System.out.println("test: testColumnarSerde");
// Create the SerDe
ColumnarSerDe serDe = new ColumnarSerDe();
Configuration conf = new Configuration();
Properties tbl = createProperties();
serDe.initialize(conf, tbl, null);
// Data
BytesRefArrayWritable braw = new BytesRefArrayWritable(8);
String[] data = {"123", "456", "789", "1000", "5.3", "hive and hadoop", "1.", "NULL"};
for (int i = 0; i < 8; i++) {
braw.set(i, new BytesRefWritable(data[i].getBytes()));
}
// Test
deserializeAndSerializeColumnar(serDe, braw, data);
System.out.println("test: testColumnarSerde - OK");
} catch (Throwable e) {
e.printStackTrace();
throw e;
}
}
private void deserializeAndSerializeColumnar(ColumnarSerDe serDe, BytesRefArrayWritable t,
String[] data) throws SerDeException {
// Get the row structure
StructObjectInspector oi = (StructObjectInspector) serDe
.getObjectInspector();
// Deserialize
Object row = serDe.deserialize(t);
int size = 0;
for (int i = 0; i < data.length; i++) {
size += data[i].length();
}
assertEquals("serialized size correct after deserialization", size, serDe.getSerDeStats()
.getRawDataSize());
assertNotSame(0, size);
BytesRefArrayWritable serializedData = (BytesRefArrayWritable) serDe.serialize(row, oi);
size = 0;
for (int i = 0; i < serializedData.size(); i++) {
size += serializedData.get(i).getLength();
}
assertEquals("serialized size correct after serialization", size, serDe.getSerDeStats()
.getRawDataSize());
assertNotSame(0, size);
}
private Properties createProperties() {
Properties tbl = new Properties();
// Set the configuration parameters
tbl.setProperty(serdeConstants.SERIALIZATION_FORMAT, "9");
tbl.setProperty("columns",
"abyte,ashort,aint,along,adouble,astring,anullint,anullstring");
tbl.setProperty("columns.types",
"tinyint:smallint:int:bigint:double:string:int:string");
tbl.setProperty(serdeConstants.SERIALIZATION_NULL_FORMAT, "NULL");
return tbl;
}
}
| |
package com.example.fftrajectorymapper;
import java.net.InetAddress;
import java.util.ArrayList;
import java.util.Arrays;
import com.illposed.osc.OSCMessage;
import com.illposed.osc.OSCPortOut;
import java.net.InetAddress;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.BlurMaskFilter;
import android.graphics.Canvas;
import android.graphics.EmbossMaskFilter;
import android.graphics.MaskFilter;
import android.graphics.Paint;
import android.graphics.PorterDuff;
import android.graphics.PorterDuffXfermode;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Environment;
import android.preference.PreferenceManager;
import android.util.Log;
import android.view.Display;
import android.view.Menu;
import android.view.MenuItem;
import android.view.MotionEvent;
import android.view.View;
/*
* Copyright (C) 2007 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import android.view.Window;
public class MainActivity extends Activity{
//implements ColorPickerDialog.OnColorChangedListener {
//OSC start
OSCPortOut sender = null;
OSCMessage message;
Object args[];
float playerX = 200;
float playerY = 200;
float originX = 0;
float originY = 0;
float mapWidth = 400;
float mapHeight = 400;
int submitButtonSize = 160;
int submitButtonMargin = 10;
float sX, sY, tmpX, tmpY, prevX, prevY, picX, picY;
Bitmap mBitmap;
int OSCPort = 7244;
//OSC end
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
this.requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(new MyView(this));
View decorView = getWindow().getDecorView();
int uiOptions = View.SYSTEM_UI_FLAG_FULLSCREEN |View.SYSTEM_UI_FLAG_LAYOUT_STABLE | View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN ;
decorView.setSystemUiVisibility(uiOptions);
mPaint = new Paint();
mPaint.setAntiAlias(true);
mPaint.setDither(true);
mPaint.setColor(0xFFFF0000);
mPaint.setStyle(Paint.Style.STROKE);
mPaint.setStrokeJoin(Paint.Join.ROUND);
mPaint.setStrokeCap(Paint.Cap.ROUND);
mPaint.setStrokeWidth(12);
mEmboss = new EmbossMaskFilter(new float[] { 1, 1, 1 },
0.4f, 6, 3.5f);
mBlur = new BlurMaskFilter(8, BlurMaskFilter.Blur.NORMAL);
//OSC start
SharedPreferences p = PreferenceManager.getDefaultSharedPreferences(this);
startActivity(new Intent(this, SettingsActivity.class));
String defaultClient = this.getResources().getString(R.string.defaultClient);
//playerX = Float.parseFloat(this.getResources().getString(R.string.playerX));
playerX = Float.parseFloat(p.getString("playerX","0"));
playerY = Float.parseFloat(p.getString("playerY","0"));
originX = Float.parseFloat(p.getString("originX","0"));
originY = Float.parseFloat(p.getString("originY","0"));
mapWidth = Float.parseFloat(p.getString("mapWidth","0"));
mapHeight = Float.parseFloat(p.getString("mapHeight","0"));
String prefClient = p.getString("OSCClient","192.168.");//"192.168.1.64";//
Log.w("osc",prefClient);
try{
//This expects a string w/ a url
sender = new OSCPortOut(InetAddress.getByName(prefClient), OSCPort);
}catch (Exception e){
Log.w("dbg", "oscoutport: " + e.toString() + ": " + prefClient);
}
//OSC end
}
private Paint mPaint;
private MaskFilter mEmboss;
private MaskFilter mBlur;
public void colorChanged(int color) {
mPaint.setColor(color);
}
public void sendMessage(float sX,float sY,float picX,float picY,Bitmap mBitmap,float submitted){
//Need to account for changing the origin!
float unityStartX = ((sX - picX - originX)/mBitmap.getWidth())*mapWidth;
float unityStartY = ((sY - picY - originY)/mBitmap.getHeight())*mapHeight;
args = new Object[3];
args[0] = unityStartX;
args[1] = unityStartY;
args[2] = submitted;
//args[3] = 2.7;
message = new OSCMessage("/coords",Arrays.asList(args));
try{
sender.send(message);
}catch (Exception e){
Log.w("oscthread", "sender: " + e.toString());
}
}
private class oscthread extends AsyncTask<String, Integer, String> {
@Override
protected String doInBackground(String... params) {
boolean submitted = params[0] == "submit";
if(submitted){
sendMessage(sX,sY,picX,picY,mBitmap,1);
} else {
sendMessage(sX,sY,picX,picY,mBitmap,0);
}
return null;
}
}
public class MyView extends View {
private static final float MINP = 0.25f;
private static final float MAXP = 0.75f;
private Canvas mCanvas;
private Paint mBitmapPaint;
private float prevX, prevY;
private float maxLength = 200;
private boolean mapMove = false;
public MyView(Context c) {
super(c);
mBitmapPaint = new Paint(Paint.DITHER_FLAG);
}
@Override
protected void onSizeChanged(int w, int h, int oldw, int oldh) {
super.onSizeChanged(w, h, oldw, oldh);
String path = Environment.getExternalStorageDirectory() + "/FFTrajectoryBackground.jpg";
Bitmap tempBitmap = BitmapFactory.decodeFile(Environment.getExternalStorageDirectory() + "/FFTrajectoryBackground.jpg");//Bitmap.createBitmap(w, h, Bitmap.Config.ARGB_8888);
mBitmap = tempBitmap.copy(Bitmap.Config.ARGB_8888, true);
mCanvas = new Canvas(mBitmap);
//Log.i("DRBRG", "Width: " + w + " Height: " + h);
}
@Override
protected void onDraw(Canvas canvas) {
canvas.drawColor(0xFFAAAAAA);
canvas.drawBitmap(mBitmap, picX, picY, mBitmapPaint);
//Draw a circle at the player location
canvas.drawCircle(mBitmap.getWidth() * (playerX/mapWidth) + picX + originX, mBitmap.getHeight() * (playerY/mapHeight) + picY + originY, 8, mPaint);
canvas.drawCircle(sX, sY, 3, mPaint);
canvas.drawRect(submitButtonMargin, submitButtonMargin, submitButtonSize, submitButtonSize, mPaint);
}
private static final float TOUCH_TOLERANCE = 4;
private void touch_start(float x, float y) {
if(!mapMove){
//Check if pressed submit
if(x > submitButtonMargin
&& y >submitButtonMargin
&& x < submitButtonSize - submitButtonMargin
&& y < submitButtonSize - submitButtonMargin){
//Maybe do something?
} else {
sX = x;
sY = y;
}
}
prevX = x;
prevY = y;
}
int pathListIndex = 0;
private void touch_move(float x, float y) {
if(mapMove){
float deltX = x - prevX;
float deltY = y - prevY;
picX += deltX;
picY += deltY;
sX += deltX;
sY += deltY;
prevX = x;
prevY = y;
} else {
if(x > submitButtonMargin
&& y >submitButtonMargin
&& x < submitButtonSize - submitButtonMargin
&& y < submitButtonSize - submitButtonMargin){
//Maybe do something?
} else {
sX = x;
sY = y;
}
}
}
private void touch_up(float x, float y) {
if(x > submitButtonMargin
&& y >submitButtonMargin
&& x < submitButtonSize - submitButtonMargin
&& y < submitButtonSize - submitButtonMargin){
new oscthread().execute("submit");
} else {
new oscthread().execute("standard");
}
}
@Override
public boolean onTouchEvent(MotionEvent event) {
float x = event.getX();
float y = event.getY();
switch (event.getActionMasked()) {
case MotionEvent.ACTION_POINTER_DOWN:
Log.w("osc", "down twice");
mapMove = true;
sX = tmpX;
sY = tmpY;
touch_start(x,y);
invalidate();
break;
case MotionEvent.ACTION_DOWN:
Log.w("osc", "down");
tmpX = sX;
tmpY = sY;
touch_start(x, y);
invalidate();
break;
case MotionEvent.ACTION_MOVE:
touch_move(x, y);
invalidate();
break;
case MotionEvent.ACTION_POINTER_UP:
mapMove = false;
touch_up(x, y);
invalidate();
break;
case MotionEvent.ACTION_UP:
touch_up(x, y);
invalidate();
break;
}
return true;
}
}
private static final int COLOR_MENU_ID = Menu.FIRST;
private static final int EMBOSS_MENU_ID = Menu.FIRST + 1;
private static final int BLUR_MENU_ID = Menu.FIRST + 2;
private static final int ERASE_MENU_ID = Menu.FIRST + 3;
private static final int SRCATOP_MENU_ID = Menu.FIRST + 4;
@Override
public boolean onCreateOptionsMenu(Menu menu) {
super.onCreateOptionsMenu(menu);
menu.add(0, COLOR_MENU_ID, 0, "Color").setShortcut('3', 'c');
menu.add(0, EMBOSS_MENU_ID, 0, "Emboss").setShortcut('4', 's');
menu.add(0, BLUR_MENU_ID, 0, "Blur").setShortcut('5', 'z');
menu.add(0, ERASE_MENU_ID, 0, "Erase").setShortcut('5', 'z');
menu.add(0, SRCATOP_MENU_ID, 0, "SrcATop").setShortcut('5', 'z');
/**** Is this the mechanism to extend with filter effects?
Intent intent = new Intent(null, getIntent().getData());
intent.addCategory(Intent.CATEGORY_ALTERNATIVE);
menu.addIntentOptions(
Menu.ALTERNATIVE, 0,
new ComponentName(this, NotesList.class),
null, intent, 0, null);
*****/
return true;
}
@Override
public boolean onPrepareOptionsMenu(Menu menu) {
super.onPrepareOptionsMenu(menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
mPaint.setXfermode(null);
mPaint.setAlpha(0xFF);
switch (item.getItemId()) {
case COLOR_MENU_ID:
//new ColorPickerDialog(this, this, mPaint.getColor()).show();
return true;
case EMBOSS_MENU_ID:
if (mPaint.getMaskFilter() != mEmboss) {
mPaint.setMaskFilter(mEmboss);
} else {
mPaint.setMaskFilter(null);
}
return true;
case BLUR_MENU_ID:
if (mPaint.getMaskFilter() != mBlur) {
mPaint.setMaskFilter(mBlur);
} else {
mPaint.setMaskFilter(null);
}
return true;
case ERASE_MENU_ID:
mPaint.setXfermode(new PorterDuffXfermode(
PorterDuff.Mode.CLEAR));
return true;
case SRCATOP_MENU_ID:
mPaint.setXfermode(new PorterDuffXfermode(
PorterDuff.Mode.SRC_ATOP));
mPaint.setAlpha(0x80);
return true;
}
return super.onOptionsItemSelected(item);
}
}
| |
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.ec2.model;
import java.io.Serializable;
/**
* <p>
* Describes a Reserved Instance listing state.
* </p>
*/
public class InstanceCount implements Serializable, Cloneable {
/**
* <p>
* The states of the listed Reserved Instances.
* </p>
*/
private String state;
/**
* <p>
* The number of listed Reserved Instances in the state specified by the
* <code>state</code>.
* </p>
*/
private Integer instanceCount;
/**
* <p>
* The states of the listed Reserved Instances.
* </p>
*
* @param state
* The states of the listed Reserved Instances.
* @see ListingState
*/
public void setState(String state) {
this.state = state;
}
/**
* <p>
* The states of the listed Reserved Instances.
* </p>
*
* @return The states of the listed Reserved Instances.
* @see ListingState
*/
public String getState() {
return this.state;
}
/**
* <p>
* The states of the listed Reserved Instances.
* </p>
*
* @param state
* The states of the listed Reserved Instances.
* @return Returns a reference to this object so that method calls can be
* chained together.
* @see ListingState
*/
public InstanceCount withState(String state) {
setState(state);
return this;
}
/**
* <p>
* The states of the listed Reserved Instances.
* </p>
*
* @param state
* The states of the listed Reserved Instances.
* @return Returns a reference to this object so that method calls can be
* chained together.
* @see ListingState
*/
public void setState(ListingState state) {
this.state = state.toString();
}
/**
* <p>
* The states of the listed Reserved Instances.
* </p>
*
* @param state
* The states of the listed Reserved Instances.
* @return Returns a reference to this object so that method calls can be
* chained together.
* @see ListingState
*/
public InstanceCount withState(ListingState state) {
setState(state);
return this;
}
/**
* <p>
* The number of listed Reserved Instances in the state specified by the
* <code>state</code>.
* </p>
*
* @param instanceCount
* The number of listed Reserved Instances in the state specified by
* the <code>state</code>.
*/
public void setInstanceCount(Integer instanceCount) {
this.instanceCount = instanceCount;
}
/**
* <p>
* The number of listed Reserved Instances in the state specified by the
* <code>state</code>.
* </p>
*
* @return The number of listed Reserved Instances in the state specified by
* the <code>state</code>.
*/
public Integer getInstanceCount() {
return this.instanceCount;
}
/**
* <p>
* The number of listed Reserved Instances in the state specified by the
* <code>state</code>.
* </p>
*
* @param instanceCount
* The number of listed Reserved Instances in the state specified by
* the <code>state</code>.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public InstanceCount withInstanceCount(Integer instanceCount) {
setInstanceCount(instanceCount);
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getState() != null)
sb.append("State: " + getState() + ",");
if (getInstanceCount() != null)
sb.append("InstanceCount: " + getInstanceCount());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof InstanceCount == false)
return false;
InstanceCount other = (InstanceCount) obj;
if (other.getState() == null ^ this.getState() == null)
return false;
if (other.getState() != null
&& other.getState().equals(this.getState()) == false)
return false;
if (other.getInstanceCount() == null ^ this.getInstanceCount() == null)
return false;
if (other.getInstanceCount() != null
&& other.getInstanceCount().equals(this.getInstanceCount()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode
+ ((getState() == null) ? 0 : getState().hashCode());
hashCode = prime
* hashCode
+ ((getInstanceCount() == null) ? 0 : getInstanceCount()
.hashCode());
return hashCode;
}
@Override
public InstanceCount clone() {
try {
return (InstanceCount) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException(
"Got a CloneNotSupportedException from Object.clone() "
+ "even though we're Cloneable!", e);
}
}
}
| |
/*
* Licensed to The Apereo Foundation under one or more contributor license
* agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*
* The Apereo Foundation licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.unitime.timetable.gwt.client.solver;
import org.unitime.timetable.gwt.client.ToolBox;
import org.unitime.timetable.gwt.client.ToolBox.Page;
import org.unitime.timetable.gwt.client.page.UniTimeNotifications;
import org.unitime.timetable.gwt.client.widgets.LoadingWidget;
import org.unitime.timetable.gwt.client.widgets.P;
import org.unitime.timetable.gwt.client.widgets.SimpleForm;
import org.unitime.timetable.gwt.client.widgets.UniTimeHeaderPanel;
import org.unitime.timetable.gwt.command.client.GwtRpcService;
import org.unitime.timetable.gwt.command.client.GwtRpcServiceAsync;
import org.unitime.timetable.gwt.resources.GwtMessages;
import org.unitime.timetable.gwt.shared.CourseTimetablingSolverInterface.NotAssignedClassesFilterRequest;
import org.unitime.timetable.gwt.shared.CourseTimetablingSolverInterface.NotAssignedClassesFilterResponse;
import org.unitime.timetable.gwt.shared.CourseTimetablingSolverInterface.NotAssignedClassesRequest;
import org.unitime.timetable.gwt.shared.CourseTimetablingSolverInterface.NotAssignedClassesResponse;
import org.unitime.timetable.gwt.shared.EventInterface.EncodeQueryRpcRequest;
import org.unitime.timetable.gwt.shared.EventInterface.EncodeQueryRpcResponse;
import org.unitime.timetable.gwt.shared.FilterInterface;
import org.unitime.timetable.gwt.shared.SolverInterface.PageMessage;
import org.unitime.timetable.gwt.shared.SolverInterface.PageMessageType;
import com.google.gwt.core.client.GWT;
import com.google.gwt.dom.client.Element;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.event.logical.shared.ValueChangeEvent;
import com.google.gwt.event.logical.shared.ValueChangeHandler;
import com.google.gwt.user.client.DOM;
import com.google.gwt.user.client.History;
import com.google.gwt.user.client.Window;
import com.google.gwt.user.client.rpc.AsyncCallback;
import com.google.gwt.user.client.ui.Composite;
import com.google.gwt.user.client.ui.HTML;
import com.google.gwt.user.client.ui.RootPanel;
import com.google.gwt.user.client.ui.SimplePanel;
/**
* @author Tomas Muller
*/
public class NotAssignedClassesPage extends Composite {
private static final GwtMessages MESSAGES = GWT.create(GwtMessages.class);
protected static GwtRpcServiceAsync RPC = GWT.create(GwtRpcService.class);
private PageFilter iFilter;
private SimplePanel iRootPanel;
private SimpleForm iPanel;
private FilterInterface iLastFilter;
private NotAssignedClassesResponse iLastResponse;
private DataTable iTable;
private HTML iNote1, iNote2;
private PreferenceLegend iLegend;
public NotAssignedClassesPage() {
iFilter = new PageFilter();
iFilter.getHeader().setCollapsible(SolverCookie.getInstance().isNotAssignedClassesFilter());
iFilter.getHeader().addCollapsibleHandler(new ValueChangeHandler<Boolean>() {
@Override
public void onValueChange(ValueChangeEvent<Boolean> event) {
if (event.getValue() != null)
SolverCookie.getInstance().setNotAssignedClassesFilter(event.getValue());
}
});
iPanel = new SimpleForm(2);
iPanel.removeStyleName("unitime-NotPrintableBottomLine");
iPanel.addRow(iFilter);
iNote1 = new HTML(MESSAGES.notAssignedClassesNote()); iNote1.addStyleName("table-note-top");
iNote2 = new HTML(MESSAGES.notAssignedClassesNote()); iNote2.addStyleName("table-note-bottom");
iFilter.getFooter().addButton("search", MESSAGES.buttonSearch(), new ClickHandler() {
@Override
public void onClick(ClickEvent event) {
String token = iFilter.getQuery();
if (!History.getToken().equals(token))
History.newItem(token, false);
search(null);
}
});
iFilter.getFooter().setEnabled("search", false);
iFilter.getFooter().addButton("print", MESSAGES.buttonPrint(), new ClickHandler() {
@Override
public void onClick(ClickEvent event) {
print();
}
});
iFilter.getFooter().setEnabled("print", false);
iFilter.getFooter().addButton("exportCSV", MESSAGES.buttonExportCSV(), new ClickHandler() {
@Override
public void onClick(ClickEvent event) {
exportData("csv");
}
});
iFilter.getFooter().setEnabled("exportCSV", false);
iFilter.getFooter().addButton("exportPDF", MESSAGES.buttonExportPDF(), new ClickHandler() {
@Override
public void onClick(ClickEvent event) {
exportData("pdf");
}
});
iFilter.getFooter().setEnabled("exportPDF", false);
iRootPanel = new SimplePanel(iPanel);
iRootPanel.addStyleName("unitime-NotAssignedClassesPage");
initWidget(iRootPanel);
init();
History.addValueChangeHandler(new ValueChangeHandler<String>() {
@Override
public void onValueChange(ValueChangeEvent<String> event) {
iFilter.setQuery(event.getValue(), true);
if (iPanel.getRowCount() > 1)
search(null);
}
});
}
protected void init() {
RPC.execute(new NotAssignedClassesFilterRequest(), new AsyncCallback<NotAssignedClassesFilterResponse>() {
@Override
public void onFailure(Throwable caught) {
iFilter.getFooter().setErrorMessage(MESSAGES.failedToInitialize(caught.getMessage()));
UniTimeNotifications.error(MESSAGES.failedToInitialize(caught.getMessage()), caught);
ToolBox.checkAccess(caught);
}
@Override
public void onSuccess(NotAssignedClassesFilterResponse result) {
iLegend = new PreferenceLegend(result.getPreferences());
iFilter.getFooter().clearMessage();
iFilter.setValue(result);
iFilter.getFooter().setEnabled("search", true);
createTriggers();
if (iFilter.getHeader().isCollapsible() != null && !iFilter.getHeader().isCollapsible())
search(null);
}
});
}
public static native void createTriggers()/*-{
$wnd.refreshPage = function() {
@org.unitime.timetable.gwt.client.solver.NotAssignedClassesPage::__search()();
};
}-*/;
public static void __search() {
final int left = Window.getScrollLeft();
final int top = Window.getScrollTop();
NotAssignedClassesPage page = (NotAssignedClassesPage)RootPanel.get("UniTimeGWT:Body").getWidget(0);
page.search(new AsyncCallback<Boolean>() {
@Override
public void onFailure(Throwable caught) {
}
@Override
public void onSuccess(Boolean result) {
if (result)
Window.scrollTo(left, top);
}
});
}
protected void search(final AsyncCallback<Boolean> callback) {
final NotAssignedClassesRequest request = new NotAssignedClassesRequest();
request.setFilter(iFilter.getValue());
iFilter.getFooter().clearMessage();
for (int row = iPanel.getRowCount() - 1; row > 0; row--)
iPanel.removeRow(row);
iFilter.getFooter().showLoading();
iFilter.getFooter().setEnabled("search", false);
LoadingWidget.showLoading(MESSAGES.waitLoadingData());
RPC.execute(request, new AsyncCallback<NotAssignedClassesResponse>() {
@Override
public void onFailure(Throwable caught) {
LoadingWidget.hideLoading();
iFilter.getFooter().setErrorMessage(MESSAGES.failedToLoadNotAssignedClasses(caught.getMessage()));
UniTimeNotifications.error(MESSAGES.failedToLoadNotAssignedClasses(caught.getMessage()), caught);
iFilter.getFooter().setEnabled("search", true);
if (callback != null)
callback.onFailure(caught);
}
@Override
public void onSuccess(NotAssignedClassesResponse result) {
LoadingWidget.hideLoading();
iFilter.getFooter().clearMessage();
populate(request.getFilter(), result);
iFilter.getFooter().setEnabled("search", true);
if (callback != null)
callback.onSuccess(!result.getRows().isEmpty());
}
});
}
protected void print() {
final DataTable table = new DataTable(iLastResponse);
Element headerRow = table.getRowFormatter().getElement(0);
Element tableElement = table.getElement();
Element thead = DOM.createTHead();
tableElement.insertFirst(thead);
headerRow.getParentElement().removeChild(headerRow);
thead.appendChild(headerRow);
Page page = new Page() {
@Override
public String getName() {
return MESSAGES.sectNotAssignedClasses();
}
@Override
public String getUser() {
return "";
}
@Override
public String getSession() {
return "";
}
@Override
public Element getBody() {
return table.getElement();
}
};
ToolBox.print(page);
}
private void exportData(String format) {
String query = "output=unassigned-classes." + format + iFilter.getQuery() + "&sort=" + SolverCookie.getInstance().getNotAssignedClassesSort();
RPC.execute(EncodeQueryRpcRequest.encode(query), new AsyncCallback<EncodeQueryRpcResponse>() {
@Override
public void onFailure(Throwable caught) {
}
@Override
public void onSuccess(EncodeQueryRpcResponse result) {
ToolBox.open(GWT.getHostPageBaseURL() + "export?q=" + result.getQuery());
}
});
}
protected void populate(FilterInterface filter, NotAssignedClassesResponse response) {
iLastFilter = filter;
iLastResponse = response;
iFilter.getFooter().setEnabled("print", false);
iFilter.getFooter().setEnabled("exportCSV", false);
iFilter.getFooter().setEnabled("exportPDF", false);
for (int row = iPanel.getRowCount() - 1; row > 0; row--)
iPanel.removeRow(row);
RootPanel cpm = RootPanel.get("UniTimeGWT:CustomPageMessages");
if (cpm != null) {
cpm.clear();
if (response.hasPageMessages()) {
for (final PageMessage pm: response.getPageMessages()) {
P p = new P(pm.getType() == PageMessageType.ERROR ? "unitime-PageError" : pm.getType() == PageMessageType.WARNING ? "unitime-PageWarn" : "unitime-PageMessage");
p.setHTML(pm.getMessage());
if (pm.hasUrl()) {
p.addStyleName("unitime-ClickablePageMessage");
p.addClickHandler(new ClickHandler() {
@Override
public void onClick(ClickEvent event) {
if (pm.hasUrl()) ToolBox.open(GWT.getHostPageBaseURL() + pm.getUrl());
}
});
}
cpm.add(p);
}
}
}
if (response.getRows().isEmpty()) {
iFilter.getFooter().setMessage(MESSAGES.errorNotAssignedClassesNoDataReturned());
return;
}
if (response.isShowNote()) iPanel.addRow(iNote1);
UniTimeHeaderPanel header = new UniTimeHeaderPanel(MESSAGES.sectNotAssignedClasses());
iPanel.addHeaderRow(header);
if (iTable == null) {
iTable = new DataTable(response);
iTable.addValueChangeHandler(new ValueChangeHandler<Integer>() {
@Override
public void onValueChange(ValueChangeEvent<Integer> event) {
SolverCookie.getInstance().setNotAssignedClassesSort(event.getValue() == null ? 0 : event.getValue().intValue());
}
});
} else {
iTable.populate(response);
}
iTable.setValue(SolverCookie.getInstance().getNotAssignedClassesSort());
iPanel.addRow(iTable);
iPanel.addRow(iLegend);
if (response.isShowNote()) iPanel.addRow(iNote2);
iFilter.getFooter().setEnabled("print", true);
iFilter.getFooter().setEnabled("exportCSV", true);
iFilter.getFooter().setEnabled("exportPDF", true);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ambari.server.serveraction.kerberos;
import com.google.inject.Inject;
import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.actionmanager.HostRoleStatus;
import org.apache.ambari.server.agent.CommandReport;
import org.apache.ambari.server.agent.ExecutionCommand;
import org.apache.ambari.server.controller.KerberosHelper;
import org.apache.ambari.server.serveraction.AbstractServerAction;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.apache.ambari.server.serveraction.kerberos.KerberosIdentityDataFileReader.DATA_FILE_NAME;
import java.io.File;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
/**
* KerberosServerAction is an abstract class to be implemented by Kerberos-related
* {@link org.apache.ambari.server.serveraction.ServerAction} implementations.
* <p/>
* This class provides helper methods used to get common properties from the command parameters map
* and iterate through the Kerberos identity metadata file
* (see {@link org.apache.ambari.server.serveraction.kerberos.KerberosIdentityDataFileReader}).
*/
public abstract class KerberosServerAction extends AbstractServerAction {
/**
* A (command parameter) property name used to hold the authenticated user's name for use in
* operations that record the acting user.
*/
public static final String AUTHENTICATED_USER_NAME = "authenticated_user_name";
/**
* A (command parameter) property name used to hold the absolute path to the directory that is to
* be used to store transient data while the request is being processed. This is expected to be
* a temporary directory.
*/
public static final String DATA_DIRECTORY = "data_directory";
/**
* A (command parameter) property name used to hold the default Kerberos realm value.
*/
public static final String DEFAULT_REALM = "default_realm";
/**
* A (command parameter) property name used to hold the (serialized) service/component filter map.
*/
public static final String SERVICE_COMPONENT_FILTER = "service_component_filter";
/**
* A (command parameter) property name used to hold the (serialized) identity filter list.
*/
public static final String IDENTITY_FILTER = "identity_filter";
/**
* A (command parameter) property name used to hold the relevant KDC type value. See
* {@link org.apache.ambari.server.serveraction.kerberos.KDCType} for valid values
*/
public static final String KDC_TYPE = "kdc_type";
/**
* A (command parameter) property name used to hold a boolean value indicating whether configurations
* should be process to see if they need to be updated
*/
public static final String UPDATE_CONFIGURATIONS = "update_configurations";
/**
* A (command parameter) property name used to hold the note to set when applying any
* configuration changes
*/
public static final String UPDATE_CONFIGURATION_NOTE = "update_configuration_note";
/**
* The prefix to use for the data directory name.
*/
public static final String DATA_DIRECTORY_PREFIX = ".ambari_";
/*
* Kerberos action shared data entry name for the principal-to-password map
*/
private static final String PRINCIPAL_PASSWORD_MAP = "principal_password_map";
/*
* Kerberos action shared data entry name for the principal-to-key_number map
*/
private static final String PRINCIPAL_KEY_NUMBER_MAP = "principal_key_number_map";
/*
* Key used in kerberosCommandParams in ExecutionCommand for base64 encoded keytab content
*/
public static final String KEYTAB_CONTENT_BASE64 = "keytab_content_base64";
/*
* Key used in kerberosCommandParams in ExecutionCommand to indicate whether to generate key keytabs
* for all principals ("true") or only those that are missing ("false")
*/
public static final String REGENERATE_ALL = "regenerate_all";
private static final Logger LOG = LoggerFactory.getLogger(KerberosServerAction.class);
/**
* The Cluster that this ServerAction implementation is executing on
*/
@Inject
private Clusters clusters = null;
/**
* The KerberosOperationHandlerFactory to use to obtain KerberosOperationHandler instances
* <p/>
* This is needed to help with test cases to mock a KerberosOperationHandler
*/
@Inject
private KerberosOperationHandlerFactory kerberosOperationHandlerFactory;
/**
* The KerberosIdentityDataFileReaderFactory to use to obtain KerberosIdentityDataFileReader instances
*/
@Inject
private KerberosIdentityDataFileReaderFactory kerberosIdentityDataFileReaderFactory;
/**
* KerberosHelper
*/
@Inject
private KerberosHelper kerberosHelper;
/**
* Given a (command parameter) Map and a property name, attempts to safely retrieve the requested
* data.
*
* @param commandParameters a Map containing the dictionary of data to interrogate
* @param propertyName a String declaring the name of the item from commandParameters to retrieve
* @return a String or null, depending on the property value and if it existed in commandParameters
*/
protected static String getCommandParameterValue(Map<String, String> commandParameters, String propertyName) {
return ((commandParameters == null) || (propertyName == null)) ? null : commandParameters.get(propertyName);
}
/**
* Given a (command parameter) Map, attempts to safely retrieve the "default_realm" property.
*
* @param commandParameters a Map containing the dictionary of data to interrogate
* @return a String indicating the default realm or null (if not found or set)
*/
protected static String getDefaultRealm(Map<String, String> commandParameters) {
return getCommandParameterValue(commandParameters, DEFAULT_REALM);
}
/**
* Given a (command parameter) Map, attempts to safely retrieve the "kdc_type" property.
* <p/>
* If not found, {@link org.apache.ambari.server.serveraction.kerberos.KDCType#MIT_KDC} will be
* returned as a default value.
*
* @param commandParameters a Map containing the dictionary of data to interrogate
* @return a KDCType or null (if not found or set)
*/
protected static KDCType getKDCType(Map<String, String> commandParameters) {
String kdcType = getCommandParameterValue(commandParameters, KDC_TYPE);
return ((kdcType == null) || kdcType.isEmpty())
? KDCType.MIT_KDC
: KDCType.translate(kdcType);
}
/**
* Given a (command parameter) Map, attempts to safely retrieve the "data_directory" property.
*
* @param commandParameters a Map containing the dictionary of data to interrogate
* @return a String indicating the data directory or null (if not found or set)
*/
protected static String getDataDirectoryPath(Map<String, String> commandParameters) {
return getCommandParameterValue(commandParameters, DATA_DIRECTORY);
}
/**
* Sets the shared principal-to-password Map used to store principals and generated password for
* use within the current request context.
*
* @param requestSharedDataContext a Map to be used as shared data among all ServerActions related
* to a given request
* @param principalPasswordMap A Map of principals and password to store
*/
protected static void setPrincipalPasswordMap(Map<String, Object> requestSharedDataContext,
Map<String, String> principalPasswordMap) {
if (requestSharedDataContext != null) {
requestSharedDataContext.put(PRINCIPAL_PASSWORD_MAP, principalPasswordMap);
}
}
/**
* Gets the shared principal-to-password Map used to store principals and generated password for
* use within the current request context.
* <p/>
* If the requested Map is not found in requestSharedDataContext, one will be created and stored,
* ensuring that a Map will always be returned, assuming requestSharedDataContext is not null.
*
* @param requestSharedDataContext a Map to be used a shared data among all ServerActions related
* to a given request
* @return A Map of principals-to-password
*/
protected static Map<String, String> getPrincipalPasswordMap(Map<String, Object> requestSharedDataContext) {
if (requestSharedDataContext == null) {
return null;
} else {
Object map = requestSharedDataContext.get(PRINCIPAL_PASSWORD_MAP);
if (map == null) {
map = new HashMap<String, String>();
requestSharedDataContext.put(PRINCIPAL_PASSWORD_MAP, map);
}
return (Map<String, String>) map;
}
}
/**
* Gets the shared principal-to-key_number Map used to store principals and key numbers for
* use within the current request context.
* <p/>
* If the requested Map is not found in requestSharedDataContext, one will be created and stored,
* ensuring that a Map will always be returned, assuming requestSharedDataContext is not null.
*
* @param requestSharedDataContext a Map to be used a shared data among all ServerActions related
* to a given request
* @return A Map of principals-to-key_numbers
*/
protected static Map<String, Integer> getPrincipalKeyNumberMap(Map<String, Object> requestSharedDataContext) {
if (requestSharedDataContext == null) {
return null;
} else {
Object map = requestSharedDataContext.get(PRINCIPAL_KEY_NUMBER_MAP);
if (map == null) {
map = new HashMap<String, String>();
requestSharedDataContext.put(PRINCIPAL_KEY_NUMBER_MAP, map);
}
return (Map<String, Integer>) map;
}
}
/**
* Returns the relevant cluster's name
* <p/>
* Using the data from the execution command, retrieve the relevant cluster's name.
*
* @return a String declaring the relevant cluster's name
* @throws AmbariException if the cluster's name is not available
*/
protected String getClusterName() throws AmbariException {
ExecutionCommand executionCommand = getExecutionCommand();
String clusterName = (executionCommand == null) ? null : executionCommand.getClusterName();
if ((clusterName == null) || clusterName.isEmpty()) {
throw new AmbariException("Failed to retrieve the cluster name from the execution command");
}
return clusterName;
}
/**
* Returns the relevant Cluster object
*
* @return the relevant Cluster
* @throws AmbariException if the Cluster object cannot be retrieved
*/
protected Cluster getCluster() throws AmbariException {
Cluster cluster = clusters.getCluster(getClusterName());
if (cluster == null) {
throw new AmbariException(String.format("Failed to retrieve cluster for %s", getClusterName()));
}
return cluster;
}
/**
* The Clusters object for this KerberosServerAction
*
* @return a Clusters object
*/
protected Clusters getClusters() {
return clusters;
}
/**
* Attempts to safely retrieve the "data_directory" property from the this action's relevant
* command parameters Map.
*
* @return a String indicating the data directory or null (if not found or set)
*/
protected String getDataDirectoryPath() {
return getDataDirectoryPath(getCommandParameters());
}
/**
* Iterates through the Kerberos identity metadata from the
* {@link org.apache.ambari.server.serveraction.kerberos.KerberosIdentityDataFileReader} and calls
* the implementing class to handle each identity found.
* <p/>
* Using the "data_directory" value from this action's command parameters map, creates a
* {@link KerberosIdentityDataFileReader} to parse
* the relative identity.dat file and iterate through its "records". Each "record" is process using
* {@link #processRecord(Map, String, KerberosOperationHandler, Map, Map)}.
*
* @param requestSharedDataContext a Map to be used a shared data among all ServerActions related
* to a given request
* @return a CommandReport indicating the result of this operation
* @throws AmbariException
*/
protected CommandReport processIdentities(Map<String, Object> requestSharedDataContext)
throws AmbariException {
CommandReport commandReport = null;
Map<String, String> commandParameters = getCommandParameters();
actionLog.writeStdOut("Processing identities...");
LOG.info("Processing identities...");
if (commandParameters != null) {
// Grab the relevant data from this action's command parameters map
KerberosCredential administratorCredential = kerberosHelper.getKDCCredentials();
String defaultRealm = getDefaultRealm(commandParameters);
KDCType kdcType = getKDCType(commandParameters);
String dataDirectoryPath = getDataDirectoryPath(commandParameters);
if (dataDirectoryPath != null) {
File dataDirectory = new File(dataDirectoryPath);
// If the data directory exists, attempt to process further, else assume there is no work to do
if (dataDirectory.exists()) {
if (!dataDirectory.isDirectory() || !dataDirectory.canRead()) {
String message = String.format("Failed to process the identities, the data directory is not accessible: %s",
dataDirectory.getAbsolutePath());
actionLog.writeStdErr(message);
LOG.error(message);
throw new AmbariException(message);
}
// The "identity data" file may or may not exist in the data directory, depending on if
// there is work to do or not.
File identityDataFile = new File(dataDirectory, DATA_FILE_NAME);
if (identityDataFile.exists()) {
if (!identityDataFile.canRead()) {
String message = String.format("Failed to process the identities, cannot read the index file: %s",
identityDataFile.getAbsolutePath());
actionLog.writeStdErr(message);
LOG.error(message);
throw new AmbariException(message);
}
KerberosOperationHandler handler = kerberosOperationHandlerFactory.getKerberosOperationHandler(kdcType);
if (handler == null) {
String message = String.format("Failed to process the identities, a KDC operation handler was not found for the KDC type of : %s",
kdcType.toString());
actionLog.writeStdErr(message);
LOG.error(message);
throw new AmbariException(message);
}
Map<String, String> kerberosConfiguration = getConfiguration("kerberos-env");
try {
handler.open(administratorCredential, defaultRealm, kerberosConfiguration);
} catch (KerberosOperationException e) {
String message = String.format("Failed to process the identities, could not properly open the KDC operation handler: %s",
e.getMessage());
actionLog.writeStdErr(message);
LOG.error(message);
throw new AmbariException(message, e);
}
// Create the data file reader to parse and iterate through the records
KerberosIdentityDataFileReader reader = null;
try {
reader = kerberosIdentityDataFileReaderFactory.createKerberosIdentityDataFileReader(identityDataFile);
for (Map<String, String> record : reader) {
// Process the current record
commandReport = processRecord(record, defaultRealm, handler, kerberosConfiguration, requestSharedDataContext);
// If the principal processor returns a CommandReport, than it is time to stop since
// an error condition has probably occurred, else all is assumed to be well.
if (commandReport != null) {
break;
}
}
} catch (AmbariException e) {
// Catch this separately from IOException since the reason it was thrown was not the same
// Note: AmbariException is an IOException, so there may be some confusion
throw new AmbariException(e.getMessage(), e);
} catch (IOException e) {
String message = String.format("Failed to process the identities, cannot read the index file: %s",
identityDataFile.getAbsolutePath());
actionLog.writeStdErr(message);
LOG.error(message, e);
throw new AmbariException(message, e);
} finally {
if (reader != null) {
// The reader needs to be closed, if it fails to close ignore the exception since
// there is little we can or care to do about it now.
try {
reader.close();
} catch (IOException e) {
// Ignore this...
}
}
// The KerberosOperationHandler needs to be closed, if it fails to close ignore the
// exception since there is little we can or care to do about it now.
try {
handler.close();
} catch (KerberosOperationException e) {
// Ignore this...
}
}
}
}
}
}
actionLog.writeStdOut("Processing identities completed.");
LOG.info("Processing identities completed.");
// If commandReport is null, we can assume this operation was a success, so return a successful
// CommandReport; else return the previously created CommandReport.
return (commandReport == null)
? createCommandReport(0, HostRoleStatus.COMPLETED, "{}", actionLog.getStdOut(), actionLog.getStdErr())
: commandReport;
}
/**
* Processes an identity as necessary.
* <p/>
* This method is called from {@link #processIdentities(Map)} for each
* identity "record" found in the Kerberos identity metadata file. After processing, it is expected
* that the return value is null on success and a CommandReport (indicating the error) on failure.
*
* @param identityRecord a Map containing the data for the current identity record
* @param evaluatedPrincipal a String indicating the relevant principal
* @param operationHandler a KerberosOperationHandler used to perform Kerberos-related
* tasks for specific Kerberos implementations
* (MIT, Active Directory, etc...)
* @param kerberosConfiguration a Map of configuration properties from kerberos-env
* @param requestSharedDataContext a Map to be used a shared data among all ServerActions related
* to a given request @return a CommandReport, indicating an error
* condition; or null, indicating a success condition
* @throws AmbariException if an error occurs while processing the identity record
*/
protected abstract CommandReport processIdentity(Map<String, String> identityRecord,
String evaluatedPrincipal,
KerberosOperationHandler operationHandler,
Map<String, String> kerberosConfiguration,
Map<String, Object> requestSharedDataContext)
throws AmbariException;
/**
* Process and prepares an identity record to be handled by the implementing class.
* <p/>
* Given the data from the record Map, attempts to replace variables in the principal pattern to
* generate a concrete principal value to further process. This "evaluated principal" is then passed to
* {@link #processIdentity(Map, String, KerberosOperationHandler, Map, Map)}
* to be handled as needed.
*
* @param record a Map containing the data for the current identity record
* @param defaultRealm a String declaring the default Kerberos realm
* @param operationHandler a KerberosOperationHandler used to perform Kerberos-related
* tasks for specific Kerberos implementations
* (MIT, Active Directory, etc...)
* @param kerberosConfiguration a Map of configuration properties from kerberos-env
* @param requestSharedDataContext a Map to be used a shared data among all ServerActions related
* to a given request @return a CommandReport, indicating an error
* condition; or null, indicating a success condition
* @throws AmbariException if an error occurs while processing the identity record
*/
private CommandReport processRecord(Map<String, String> record, String defaultRealm,
KerberosOperationHandler operationHandler,
Map<String, String> kerberosConfiguration, Map<String, Object> requestSharedDataContext)
throws AmbariException {
CommandReport commandReport = null;
if (record != null) {
String principal = record.get(KerberosIdentityDataFileReader.PRINCIPAL);
String host = record.get(KerberosIdentityDataFileReader.HOSTNAME);
if (principal != null) {
// Evaluate the principal "pattern" found in the record to generate the "evaluated principal"
// by replacing the _HOST and _REALM variables.
String evaluatedPrincipal = principal.replace("_HOST", host).replace("_REALM", defaultRealm);
commandReport = processIdentity(record, evaluatedPrincipal, operationHandler, kerberosConfiguration, requestSharedDataContext);
}
}
return commandReport;
}
}
| |
package nanocad.minimize.mm3;
/**
* aterm.java - MM2-style angle energy term
* Copyright (c) 1997,1998 Will Ware, all rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and other materials provided with the distribution.
*
* This software is provided "as is" and any express or implied warranties,
* including, but not limited to, the implied warranties of merchantability
* or fitness for any particular purpose are disclaimed. In no event shall
* Will Ware be liable for any direct, indirect, incidental, special,
* exemplary, or consequential damages (including, but not limited to,
* procurement of substitute goods or services; loss of use, data, or
* profits; or business interruption) however caused and on any theory of
* liability, whether in contract, strict liability, or tort (including
* negligence or otherwise) arising in any way out of the use of this
* software, even if advised of the possibility of such damage.
*/
import java.lang.Math;
import java.util.Vector;
import nanocad.*;
public class aterm extends nanocad.term
{
public static final String rcsid =
"$Id: aterm.java,v 1.2 2005/05/14 23:51:02 xli16 Exp $";
private static final double convert = 3.1415926 / 180; // degrees to radians
private double kth, th0;
public static final double B = .014;
public final static double C = .00005;
public final static double D = .0000007;
public final static double E = .0000000009;
/**
* Insert the method's description here.
* Creation date: (6/19/00 4:10:14 PM)
*/
public aterm() {}
public aterm(atom a1, atom a2, atom a3) {
myAtoms = new atom[3];
myAtoms[0] = a1;
myAtoms[1] = a2;
myAtoms[2] = a3;
setCalculationValues();
}
protected void buildTerm (Vector v, Vector termList)
{
int atom1 = ((atom)v.elementAt(0)).getMM3TypeNum();
int atom2 = ((atom)v.elementAt(1)).getMM3TypeNum();
int atom3 = ((atom)v.elementAt(2)).getMM3TypeNum();
if (atom1 < atom3)
{
aterm t = new aterm ((atom) v.elementAt(0),
(atom) v.elementAt(1),
(atom) v.elementAt(2));
termList.addElement(t);
}
else if (atom1 == atom3)
{
if ( ((atom)v.elementAt(0)).x[0] < ((atom)v.elementAt(2)).x[0])
{
aterm t = new aterm ((atom) v.elementAt(0),
(atom) v.elementAt(1),
(atom) v.elementAt(2));
termList.addElement (t);
}
}
}
public double computeForces() {
if (kth == 0.0)
return 0.0;
int i;
// compute forces on each atom, add it to the atom's force vector
double[] ab = new double[3];
double[] bc = new double[3];
double abdotab = 0.0, abdotbc = 0.0, bcdotbc = 0.0, th, tdif, duDth;
for (i = 0; i < 3; i++) {
ab[i] = myAtoms[0].x[i] - myAtoms[1].x[i];
bc[i] = myAtoms[2].x[i] - myAtoms[1].x[i];
abdotab += ab[i] * ab[i];
abdotbc += ab[i] * bc[i];
bcdotbc += bc[i] * bc[i];
}
if(abdotab <= 0) abdotab = mm3MinimizeAlgorythm.TINY;
if(bcdotbc <= 0) bcdotbc = mm3MinimizeAlgorythm.TINY;
if (abdotab > 3.0 || bcdotbc > 3.0)
return 0.0;
double jtemp1 = abdotbc/Math.sqrt(abdotab * bcdotbc);
if(jtemp1 < -1) jtemp1 = -1;
if(jtemp1 > 1) jtemp1 = 1;
th = Math.acos(jtemp1);
tdif = th - th0;
if(tdif == 0.0){
potential = 0.0;
return 0.0;
}
potential = computePotential(tdif);
duDth = computePotentialDerivative(tdif);
double[] dthda = new double[3];
double[] dthdc = new double[3];
double jtemp2 = abdotab * bcdotbc - abdotbc*abdotbc;
if(jtemp2 <= 0) jtemp2 = mm3MinimizeAlgorythm.TINY;
double denominator = Math.sqrt(jtemp2);
for (i = 0; i < 3; i++)
{
dthda[i] = -(ab[i] * abdotbc / abdotab - bc[i]) / denominator;
dthdc[i] = -(bc[i] * abdotbc / bcdotbc - ab[i]) / denominator;
myAtoms[0].f[i] += duDth * dthda[i];
myAtoms[1].f[i] += duDth * (-dthda[i] - dthdc[i]);
myAtoms[2].f[i] += duDth * dthdc[i];
}
return duDth * dthda[0];
}
/**
* Insert the method's description here.
* Creation date: (6/15/00 12:53:22 PM)
* @return double
* @param deltaTheta double
*/
protected double computePotentialDerivative(double deltaTheta) {
double deltaThetaSquared = deltaTheta*deltaTheta;
double bTerm = 3*B*deltaTheta;
double cTerm = 4*C*deltaThetaSquared;
double dTerm = 5*D*deltaThetaSquared*deltaTheta;
double eTerm = 6*E*deltaThetaSquared*deltaThetaSquared;
double force = kth*deltaTheta*(2 - bTerm + cTerm - dTerm + eTerm);
return force;
}
protected double computePotential(double dT){
return 0.5*kth*dT*dT*(1 - B*dT + C*dT*dT - D*dT*dT*dT + E*dT*dT*dT*dT);
}
protected String repr2()
{
return " angle " +
(new Double(kth)).toString() + " " +
(new Double(th0)).toString();
}
/**
* Insert the method's description here.
* Creation date: (6/15/00 12:41:20 PM)
*/
protected void setCalculationValues() {
int atomOneType = myAtoms[0].getMM3TypeNum();
int atomTwoType = myAtoms[1].getMM3TypeNum();
int atomThreeType = myAtoms[2].getMM3TypeNum();
if ((atomOneType < 0) || (atomTwoType < 0) || (atomThreeType < 0)) {
setDefaultCalculationValues();
return;
}
try {
AtomDataFile dataFile = new AtomDataFile(newNanocad.txtDir +
newNanocad.fileSeparator + "mm3adata.txt");
//AtomDataFile dataFile = new AtomDataFile("mm3adata.txt");
if (dataFile.findData(atomTwoType, atomOneType, atomThreeType, 1, 0, 2)) {
kth = dataFile.parseDouble(3);
th0 = dataFile.parseDouble(4) * convert;
} else {
setDefaultCalculationValues();
return;
}
} catch (java.io.IOException e) {
System.err.println("Angle data lookup error");
e.printStackTrace();
}
}
/**
* Insert the method's description here.
* Creation date: (6/19/00 2:10:38 PM)
*/
protected void setDefaultCalculationValues() {
defaultsUsed = true;
kth = 0.3;
th0 = 120.0 * convert;
//System.out.println("No data for " + myAtoms[0].getMM3TypeNum() + " " + myAtoms[1].getMM3TypeNum() + " " + myAtoms[2].getMM3TypeNum() + " angle");
}
public int termLength()
{
return 3;
}
public String name()
{
return "Angle";
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.metrics;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.packed.PackedInts;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.BitArray;
import org.elasticsearch.common.util.ByteArray;
import org.elasticsearch.common.util.ByteUtils;
import org.elasticsearch.common.util.IntArray;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
/**
* Hyperloglog++ counter, implemented based on pseudo code from
* http://static.googleusercontent.com/media/research.google.com/fr//pubs/archive/40671.pdf and its appendix
* https://docs.google.com/document/d/1gyjfMHy43U9OWBXxfaeG-3MjGzejW1dlpyMwEYAAWEI/view?fullscreen
*
* This implementation is different from the original implementation in that it uses a hash table instead of a sorted list for linear
* counting. Although this requires more space and makes hyperloglog (which is less accurate) used sooner, this is also considerably faster.
*
* Trying to understand what this class does without having read the paper is considered adventurous.
*
* The HyperLogLogPlusPlus contains two algorithms, one for linear counting and the HyperLogLog algorithm. Initially hashes added to the
* data structure are processed using the linear counting until a threshold defined by the precision is reached where the data is replayed
* to the HyperLogLog algorithm and then this is used.
*
* It supports storing several HyperLogLogPlusPlus structures which are identified by a bucket number.
*/
public final class HyperLogLogPlusPlus extends AbstractHyperLogLogPlusPlus {
private static final float MAX_LOAD_FACTOR = 0.75f;
public static final int DEFAULT_PRECISION = 14;
private final BitArray algorithm;
private final HyperLogLog hll;
private final LinearCounting lc;
/**
* Compute the required precision so that <code>count</code> distinct entries would be counted with linear counting.
*/
public static int precisionFromThreshold(long count) {
final long hashTableEntries = (long) Math.ceil(count / MAX_LOAD_FACTOR);
int precision = PackedInts.bitsRequired(hashTableEntries * Integer.BYTES);
precision = Math.max(precision, AbstractHyperLogLog.MIN_PRECISION);
precision = Math.min(precision, AbstractHyperLogLog.MAX_PRECISION);
return precision;
}
/**
* Return the expected per-bucket memory usage for the given precision.
*/
public static long memoryUsage(int precision) {
return 1L << precision;
}
public HyperLogLogPlusPlus(int precision, BigArrays bigArrays, long initialBucketCount) {
super(precision);
HyperLogLog hll = null;
LinearCounting lc = null;
BitArray algorithm = null;
boolean success = false;
try {
hll = new HyperLogLog(bigArrays, initialBucketCount, precision);
lc = new LinearCounting(bigArrays, initialBucketCount, precision, hll);
algorithm = new BitArray(1, bigArrays);
success = true;
} finally {
if (success == false) {
Releasables.close(hll, lc, algorithm);
}
}
this.hll = hll;
this.lc = lc;
this.algorithm = algorithm;
}
@Override
public long maxOrd() {
return hll.maxOrd();
}
@Override
public long cardinality(long bucketOrd) {
if (getAlgorithm(bucketOrd) == LINEAR_COUNTING) {
return lc.cardinality(bucketOrd);
} else {
return hll.cardinality(bucketOrd);
}
}
@Override
protected boolean getAlgorithm(long bucketOrd) {
return algorithm.get(bucketOrd);
}
@Override
protected AbstractLinearCounting.HashesIterator getLinearCounting(long bucketOrd) {
return lc.values(bucketOrd);
}
@Override
protected AbstractHyperLogLog.RunLenIterator getHyperLogLog(long bucketOrd) {
return hll.getRunLens(bucketOrd);
}
@Override
public void collect(long bucket, long hash) {
hll.ensureCapacity(bucket + 1);
if (algorithm.get(bucket) == LINEAR_COUNTING) {
final int newSize = lc.collect(bucket, hash);
if (newSize > lc.threshold) {
upgradeToHll(bucket);
}
} else {
hll.collect(bucket, hash);
}
}
@Override
public void close() {
Releasables.close(algorithm, hll, lc);
}
protected void addRunLen(long bucketOrd, int register, int runLen) {
if (algorithm.get(bucketOrd) == LINEAR_COUNTING) {
upgradeToHll(bucketOrd);
}
hll.addRunLen(0, register, runLen);
}
void upgradeToHll(long bucketOrd) {
hll.ensureCapacity(bucketOrd + 1);
final AbstractLinearCounting.HashesIterator hashes = lc.values(bucketOrd);
// We need to copy values into an arrays as we will override
// the values on the buffer
final IntArray values = lc.bigArrays.newIntArray(hashes.size());
try {
int i = 0;
while (hashes.next()) {
values.set(i++, hashes.value());
}
assert i == hashes.size();
hll.reset(bucketOrd);
for (long j = 0; j < values.size(); ++j) {
final int encoded = values.get(j);
hll.collectEncoded(bucketOrd, encoded);
}
algorithm.set(bucketOrd);
} finally {
Releasables.close(values);
}
}
public void merge(long thisBucket, AbstractHyperLogLogPlusPlus other, long otherBucket) {
if (precision() != other.precision()) {
throw new IllegalArgumentException();
}
hll.ensureCapacity(thisBucket + 1);
if (other.getAlgorithm(otherBucket) == LINEAR_COUNTING) {
merge(thisBucket, other.getLinearCounting(otherBucket));
} else {
merge(thisBucket, other.getHyperLogLog(otherBucket));
}
}
private void merge(long thisBucket, AbstractLinearCounting.HashesIterator values) {
while (values.next()) {
final int encoded = values.value();
if (algorithm.get(thisBucket) == LINEAR_COUNTING) {
final int newSize = lc.addEncoded(thisBucket, encoded);
if (newSize > lc.threshold) {
upgradeToHll(thisBucket);
}
} else {
hll.collectEncoded(thisBucket, encoded);
}
}
}
private void merge(long thisBucket, AbstractHyperLogLog.RunLenIterator runLens) {
if (algorithm.get(thisBucket) != HYPERLOGLOG) {
upgradeToHll(thisBucket);
}
for (int i = 0; i < hll.m; ++i) {
runLens.next();
hll.addRunLen(thisBucket, i, runLens.value());
}
}
private static class HyperLogLog extends AbstractHyperLogLog implements Releasable {
private final BigArrays bigArrays;
private final HyperLogLogIterator iterator;
// array for holding the runlens.
private ByteArray runLens;
HyperLogLog(BigArrays bigArrays, long initialBucketCount, int precision) {
super(precision);
this.runLens = bigArrays.newByteArray(initialBucketCount << precision);
this.bigArrays = bigArrays;
this.iterator = new HyperLogLogIterator(this, precision, m);
}
public long maxOrd() {
return runLens.size() >>> precision();
}
@Override
protected void addRunLen(long bucketOrd, int register, int encoded) {
final long bucketIndex = (bucketOrd << p) + register;
runLens.set(bucketIndex, (byte) Math.max(encoded, runLens.get(bucketIndex)));
}
@Override
protected RunLenIterator getRunLens(long bucketOrd) {
iterator.reset(bucketOrd);
return iterator;
}
protected void reset(long bucketOrd) {
runLens.fill(bucketOrd << p, (bucketOrd << p) + m, (byte) 0);
}
protected void ensureCapacity(long numBuckets) {
runLens = bigArrays.grow(runLens, numBuckets << p);
}
@Override
public void close() {
Releasables.close(runLens);
}
}
private static class HyperLogLogIterator implements AbstractHyperLogLog.RunLenIterator {
private final HyperLogLog hll;
private final int m, p;
int pos;
long start;
private byte value;
HyperLogLogIterator(HyperLogLog hll, int p, int m) {
this.hll = hll;
this.m = m;
this.p = p;
}
void reset(long bucket) {
pos = 0;
start = bucket << p;
}
@Override
public boolean next() {
if (pos < m) {
value = hll.runLens.get(start + pos);
pos++;
return true;
}
return false;
}
@Override
public byte value() {
return value;
}
}
private static class LinearCounting extends AbstractLinearCounting implements Releasable {
protected final int threshold;
private final int mask;
private final BytesRef readSpare;
private final ByteBuffer writeSpare;
private final BigArrays bigArrays;
private final LinearCountingIterator iterator;
// We are actually using HyperLogLog's runLens array but interpreting it as a hash set for linear counting.
private final HyperLogLog hll;
// Number of elements stored.
private IntArray sizes;
LinearCounting(BigArrays bigArrays, long initialBucketCount, int p, HyperLogLog hll) {
super(p);
this.bigArrays = bigArrays;
this.hll = hll;
final int capacity = (1 << p) / 4; // because ints take 4 bytes
threshold = (int) (capacity * MAX_LOAD_FACTOR);
mask = capacity - 1;
sizes = bigArrays.newIntArray(initialBucketCount);
readSpare = new BytesRef();
writeSpare = ByteBuffer.allocate(4).order(ByteOrder.LITTLE_ENDIAN);
iterator = new LinearCountingIterator(this, capacity);
}
@Override
protected int addEncoded(long bucketOrd, int encoded) {
sizes = bigArrays.grow(sizes, bucketOrd + 1);
assert encoded != 0;
for (int i = (encoded & mask);; i = (i + 1) & mask) {
final int v = get(bucketOrd, i);
if (v == 0) {
// means unused, take it!
set(bucketOrd, i, encoded);
return sizes.increment(bucketOrd, 1);
} else if (v == encoded) {
// k is already in the set
return -1;
}
}
}
@Override
protected int size(long bucketOrd) {
if (bucketOrd >= sizes.size()) {
return 0;
}
final int size = sizes.get(bucketOrd);
assert size == recomputedSize(bucketOrd);
return size;
}
@Override
protected HashesIterator values(long bucketOrd) {
iterator.reset(bucketOrd, size(bucketOrd));
return iterator;
}
private long index(long bucketOrd, int index) {
return (bucketOrd << p) + (index << 2);
}
private int get(long bucketOrd, int index) {
hll.runLens.get(index(bucketOrd, index), 4, readSpare);
return ByteUtils.readIntLE(readSpare.bytes, readSpare.offset);
}
private void set(long bucketOrd, int index, int value) {
writeSpare.putInt(0, value);
hll.runLens.set(index(bucketOrd, index), writeSpare.array(), 0, 4);
}
private int recomputedSize(long bucketOrd) {
if (bucketOrd >= hll.maxOrd()) {
return 0;
}
int size = 0;
for (int i = 0; i <= mask; ++i) {
final int v = get(bucketOrd, i);
if (v != 0) {
++size;
}
}
return size;
}
@Override
public void close() {
Releasables.close(sizes);
}
}
private static class LinearCountingIterator implements AbstractLinearCounting.HashesIterator {
private final LinearCounting lc;
private final int capacity;
private int pos, size;
private long bucketOrd;
private int value;
LinearCountingIterator(LinearCounting lc, int capacity) {
this.lc = lc;
this.capacity = capacity;
}
void reset(long bucketOrd, int size) {
this.bucketOrd = bucketOrd;
this.size = size;
this.pos = size == 0 ? capacity : 0;
}
@Override
public int size() {
return size;
}
@Override
public boolean next() {
if (pos < capacity) {
for (; pos < capacity; ++pos) {
final int k = lc.get(bucketOrd, pos);
if (k != 0) {
++pos;
value = k;
return true;
}
}
}
return false;
}
@Override
public int value() {
return value;
}
}
}
| |
/*
* NOTE: This copyright does *not* cover user programs that use HQ
* program services by normal system calls through the application
* program interfaces provided as part of the Hyperic Plug-in Development
* Kit or the Hyperic Client Development Kit - this is merely considered
* normal use of the program, and does *not* fall under the heading of
* "derived work".
*
* Copyright (C) [2004-2007], Hyperic, Inc.
* This file is part of HQ.
*
* HQ is free software; you can redistribute it and/or modify
* it under the terms version 2 of the GNU General Public License as
* published by the Free Software Foundation. This program is distributed
* in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
* even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the GNU General Public License for more
* details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
* USA.
*/
package org.hyperic.hq.plugin.nagios.parser;
import java.io.PrintStream;
import java.util.regex.Pattern;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.net.UnknownHostException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
public abstract class NagiosObj
{
protected String logCtx = getClass().getName();
protected Log _log = LogFactory.getLog(logCtx);
static final int HOST_TYPE = 0,
SERVICE_TYPE = 1,
COMMAND_TYPE = 2,
CONTACT_TYPE = 3,
CONTACTGROUP_TYPE = 4,
HOSTGROUP_TYPE = 5,
HOSTGROUPESCALATION_TYPE = 6,
SERVICEDEPENDENCY_TYPE = 7,
SERVICESCALATION_TYPE = 8,
TIMEPERIOD_TYPE = 9,
RESOURCE_TYPE = 10,
HOST_TEMPL_TYPE = 11;
private static final Pattern
_host = Pattern.compile("\\s+host\\s*\\{"),
_service = Pattern.compile("\\s+service\\s*\\{"),
_hostgroup = Pattern.compile("\\s+hostgroup\\s*\\{"),
_command = Pattern.compile("\\s+command\\s*\\{"),
_resource = Util.RESOURCE_PATTERN;
protected static final Pattern _comment = Pattern.compile("^\\s*#"),
_blankLine = Pattern.compile("^\\s*$");
protected PrintStream _debugOut;
protected String _filename;
protected NagiosObj()
{
}
public abstract String toString();
public abstract String getKey();
public abstract int hashCode();
public abstract int getType();
abstract void resolveDependencies(NagiosParser parser);
protected abstract void parseCfg(String cfgBlock)
throws NagiosParserException;
public void setDebugInfo(String filename, PrintStream debugOut)
{
_filename = filename;
_debugOut = debugOut;
}
static int getObjectType(String line)
throws NagiosTypeNotSupportedException
{
if (_host.matcher(line).find()) {
return HOST_TYPE;
} else if (_service.matcher(line).find()) {
return SERVICE_TYPE;
} else if (_command.matcher(line).find()) {
return COMMAND_TYPE;
} else if (_resource.matcher(line).find()) {
return RESOURCE_TYPE;
} else if (_hostgroup.matcher(line).find()) {
return HOSTGROUP_TYPE;
}
throw new NagiosTypeNotSupportedException("Type not supported for "+
line);
}
public static final String getTypeName(int type)
throws NagiosTypeNotSupportedException
{
switch (type)
{
case HOST_TYPE:
return "Host Type";
case SERVICE_TYPE:
return "Service Type";
case COMMAND_TYPE:
return "Command Type";
case HOSTGROUP_TYPE:
return "HostGroup Type";
case RESOURCE_TYPE:
return "Resource Type";
default:
throw new NagiosTypeNotSupportedException("Type not supported for "+
"numeric "+type);
}
}
private static NagiosObj getHostObj(String cfgLines,
String filename,
PrintStream _debugOut)
throws NagiosParserException
{
NagiosObj rtn;
try
{
rtn = new NagiosHostObj();
rtn.setDebugInfo(filename, _debugOut);
rtn.parseCfg(cfgLines);
}
catch (NagiosParserException e)
{
rtn = new NagiosTemplateHostObj();
rtn.setDebugInfo(filename, _debugOut);
rtn.parseCfg(cfgLines);
}
return rtn;
}
public static final NagiosObj getObject(String firstLine,
String cfgLines,
String filename,
PrintStream _debugOut)
throws NagiosParserException,
NagiosTypeNotSupportedException,
UnknownHostException
{
NagiosObj rtn;
int objType = getObjectType(firstLine);
switch (objType)
{
case HOST_TYPE:
rtn = getHostObj(cfgLines, filename, _debugOut);
break;
case SERVICE_TYPE:
rtn = new NagiosServiceObj();
rtn.setDebugInfo(filename, _debugOut);
rtn.parseCfg(cfgLines);
break;
case COMMAND_TYPE:
rtn = new NagiosCommandObj();
rtn.setDebugInfo(filename, _debugOut);
rtn.parseCfg(cfgLines);
break;
case HOSTGROUP_TYPE:
rtn = new NagiosHostGroupObj();
rtn.setDebugInfo(filename, _debugOut);
rtn.parseCfg(cfgLines);
break;
case RESOURCE_TYPE:
rtn = new NagiosResourceObj();
rtn.setDebugInfo(filename, _debugOut);
rtn.parseCfg(cfgLines);
break;
default:
throw new NagiosTypeNotSupportedException();
}
return rtn;
}
protected String removeInlineComments(String nameValue)
{
return nameValue.trim().replaceAll("\\s*;.*$", "");
}
protected void debug(String buf, Throwable e)
{
if (_debugOut != null)
Util.debug(_debugOut, buf, e);
else
_log.debug(buf, e);
}
protected void debug(Throwable e)
{
if (_debugOut != null)
Util.debug(_debugOut, null, e);
else
_log.debug(e);
}
protected void debug(String buf)
{
if (_debugOut != null)
Util.debug(_debugOut, buf);
else
_log.debug(buf);
}
protected String join(String delim, List l, int start)
{
if (l.size() == 0 || start >= l.size()) {
return "";
}
StringBuffer buf = new StringBuffer();
for (int i=start; i<l.size(); i++) {
buf.append(l.get(i).toString()).append(delim);
}
return buf.substring(0, buf.length()-1).toString();
}
protected String join(String delim, Collection c)
{
return join(delim, new ArrayList(c), 0);
}
protected String join(String delim, List l)
{
return join(delim, l, 0);
}
}
| |
/*
* The MIT License
*
* Copyright (c) 2012, Dominik Bartholdi, Seiji Sogabe
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package jenkins.model;
import hudson.DescriptorExtensionList;
import hudson.Extension;
import hudson.ExtensionPoint;
import hudson.Util;
import hudson.model.Describable;
import hudson.model.Descriptor;
import hudson.model.Failure;
import jenkins.model.Messages;
import hudson.util.FormValidation;
import java.io.IOException;
import java.io.Serializable;
import java.util.regex.Pattern;
import java.util.regex.PatternSyntaxException;
import javax.servlet.ServletException;
import org.apache.commons.lang.StringUtils;
import org.jenkinsci.Symbol;
import org.kohsuke.stapler.DataBoundConstructor;
import org.kohsuke.stapler.QueryParameter;
/**
* This ExtensionPoint allows to enforce the name of projects/jobs.
*
* @author Dominik Bartholdi (imod)
*/
public abstract class ProjectNamingStrategy implements Describable<ProjectNamingStrategy>, ExtensionPoint {
public ProjectNamingStrategyDescriptor getDescriptor() {
return (ProjectNamingStrategyDescriptor) Jenkins.get().getDescriptor(getClass());
}
public static DescriptorExtensionList<ProjectNamingStrategy, ProjectNamingStrategyDescriptor> all() {
return Jenkins.get().getDescriptorList(ProjectNamingStrategy.class);
}
/**
* Called when creating a new job.
*
* @param name
* the name given from the UI
* @throws Failure
* if the user has to be informed about an illegal name, forces the user to change the name before submitting. The message of the failure will be presented to the user.
*/
public void checkName(String name) throws Failure {
// no op
}
/**
* This flag can be used to force existing jobs to be migrated to a new naming strategy - if this method returns true, the naming will be enforced at every config change. If <code>false</code> is
* returned, only new jobs have to follow the strategy.
*
* @return <code>true</code> if existing jobs should be enforced to confirm to the naming standard.
*/
public boolean isForceExistingJobs() {
return false;
}
/**
* The default naming strategy which does not restrict the name of a job.
*/
public static final ProjectNamingStrategy DEFAULT_NAMING_STRATEGY = new DefaultProjectNamingStrategy();
/**
* Default implementation which does not restrict the name to any form.
*/
public static final class DefaultProjectNamingStrategy extends ProjectNamingStrategy implements Serializable {
private static final long serialVersionUID = 1L;
@DataBoundConstructor
public DefaultProjectNamingStrategy() {
}
@Override
public void checkName(String origName) throws Failure {
// default - should just do nothing (this is how Jenkins worked before introducing this ExtensionPoint)
}
/**
* DefaultProjectNamingStrategy is stateless, therefore save to keep the same instance
*/
private Object readResolve() {
return DEFAULT_NAMING_STRATEGY;
}
@Extension @Symbol("standard")
public static final class DescriptorImpl extends ProjectNamingStrategyDescriptor {
@Override
public String getDisplayName() {
return Messages.DefaultProjectNamingStrategy_DisplayName();
}
@Override
public String getHelpFile() {
return "/help/system-config/defaultJobNamingStrategy.html";
}
}
}
/**
* Naming strategy which allows the admin to define a pattern a job's name has to follow.
*/
public static final class PatternProjectNamingStrategy extends ProjectNamingStrategy implements Serializable {
private static final long serialVersionUID = 1L;
/**
* regex pattern a job's name has to follow
*/
private final String namePattern;
private final String description;
private boolean forceExistingJobs;
@Deprecated
public PatternProjectNamingStrategy(String namePattern, boolean forceExistingJobs) {
this(namePattern, null, forceExistingJobs);
}
/** @since 1.533 */
@DataBoundConstructor
public PatternProjectNamingStrategy(String namePattern, String description, boolean forceExistingJobs) {
this.namePattern = namePattern;
this.description = description;
this.forceExistingJobs = forceExistingJobs;
}
@Override
public void checkName(String name) {
if (StringUtils.isNotBlank(namePattern) && StringUtils.isNotBlank(name)) {
if (!Pattern.matches(namePattern, name)) {
throw new Failure(StringUtils.isEmpty(description) ?
Messages.Hudson_JobNameConventionNotApplyed(name, namePattern) :
description);
}
}
}
public String getNamePattern() {
return namePattern;
}
/** @since 1.533 */
public String getDescription() {
return description;
}
public boolean isForceExistingJobs() {
return forceExistingJobs;
}
@Extension @Symbol("pattern")
public static final class DescriptorImpl extends ProjectNamingStrategyDescriptor {
public static final String DEFAULT_PATTERN = ".*";
@Override
public String getDisplayName() {
return Messages.PatternProjectNamingStrategy_DisplayName();
}
@Override
public String getHelpFile() {
return "/help/system-config/patternJobNamingStrategy.html";
}
public FormValidation doCheckNamePattern(@QueryParameter String value)
throws IOException, ServletException {
String pattern = Util.fixEmptyAndTrim(value);
if (pattern == null) {
return FormValidation.error(Messages.PatternProjectNamingStrategy_NamePatternRequired());
}
try {
Pattern.compile(pattern);
} catch (PatternSyntaxException e) {
return FormValidation.error(Messages.PatternProjectNamingStrategy_NamePatternInvalidSyntax());
}
return FormValidation.ok();
}
}
}
public static abstract class ProjectNamingStrategyDescriptor extends Descriptor<ProjectNamingStrategy> {
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.bookkeeper.client;
import static org.apache.bookkeeper.client.RackawareEnsemblePlacementPolicy.REPP_DNS_RESOLVER_CLASS;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Set;
import junit.framework.TestCase;
import org.apache.bookkeeper.client.BKException.BKNotEnoughBookiesException;
import org.apache.bookkeeper.net.NetworkTopology;
import org.apache.bookkeeper.util.StaticDNSResolver;
import org.apache.commons.configuration.CompositeConfiguration;
import org.apache.commons.configuration.Configuration;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TestRackawareEnsemblePlacementPolicy extends TestCase {
static final Logger LOG = LoggerFactory.getLogger(TestRackawareEnsemblePlacementPolicy.class);
RackawareEnsemblePlacementPolicy repp;
Configuration conf = new CompositeConfiguration();
@Override
protected void setUp() throws Exception {
super.setUp();
StaticDNSResolver.reset();
StaticDNSResolver.addNodeToRack(InetAddress.getLocalHost().getHostAddress(), NetworkTopology.DEFAULT_RACK);
StaticDNSResolver.addNodeToRack("127.0.0.1", NetworkTopology.DEFAULT_RACK);
StaticDNSResolver.addNodeToRack("localhost", NetworkTopology.DEFAULT_RACK);
LOG.info("Set up static DNS Resolver.");
conf.setProperty(REPP_DNS_RESOLVER_CLASS, StaticDNSResolver.class.getName());
repp = new RackawareEnsemblePlacementPolicy();
repp.initialize(conf);
}
@Override
protected void tearDown() throws Exception {
repp.uninitalize();
super.tearDown();
}
@Test
public void testReplaceBookieWithEnoughBookiesInSameRack() throws Exception {
InetSocketAddress addr1 = new InetSocketAddress("127.0.0.1", 3181);
InetSocketAddress addr2 = new InetSocketAddress("127.0.0.2", 3181);
InetSocketAddress addr3 = new InetSocketAddress("127.0.0.3", 3181);
InetSocketAddress addr4 = new InetSocketAddress("127.0.0.4", 3181);
// update dns mapping
StaticDNSResolver.addNodeToRack(addr1.getAddress().getHostAddress(), NetworkTopology.DEFAULT_RACK);
StaticDNSResolver.addNodeToRack(addr2.getAddress().getHostAddress(), "/r2");
StaticDNSResolver.addNodeToRack(addr3.getAddress().getHostAddress(), "/r2");
StaticDNSResolver.addNodeToRack(addr4.getAddress().getHostAddress(), "/r3");
// Update cluster
Set<InetSocketAddress> addrs = new HashSet<InetSocketAddress>();
addrs.add(addr1);
addrs.add(addr2);
addrs.add(addr3);
addrs.add(addr4);
repp.onClusterChanged(addrs, new HashSet<InetSocketAddress>());
// replace node under r2
InetSocketAddress replacedBookie = repp.replaceBookie(addr2, new HashSet<InetSocketAddress>());
assertEquals(addr3, replacedBookie);
}
@Test
public void testReplaceBookieWithEnoughBookiesInDifferentRack() throws Exception {
InetSocketAddress addr1 = new InetSocketAddress("127.0.0.1", 3181);
InetSocketAddress addr2 = new InetSocketAddress("127.0.0.2", 3181);
InetSocketAddress addr3 = new InetSocketAddress("127.0.0.3", 3181);
InetSocketAddress addr4 = new InetSocketAddress("127.0.0.4", 3181);
// update dns mapping
StaticDNSResolver.addNodeToRack(addr1.getAddress().getHostAddress(), NetworkTopology.DEFAULT_RACK);
StaticDNSResolver.addNodeToRack(addr2.getAddress().getHostAddress(), "/r2");
StaticDNSResolver.addNodeToRack(addr3.getAddress().getHostAddress(), "/r3");
StaticDNSResolver.addNodeToRack(addr4.getAddress().getHostAddress(), "/r4");
// Update cluster
Set<InetSocketAddress> addrs = new HashSet<InetSocketAddress>();
addrs.add(addr1);
addrs.add(addr2);
addrs.add(addr3);
addrs.add(addr4);
repp.onClusterChanged(addrs, new HashSet<InetSocketAddress>());
// replace node under r2
Set<InetSocketAddress> excludedAddrs = new HashSet<InetSocketAddress>();
excludedAddrs.add(addr1);
InetSocketAddress replacedBookie = repp.replaceBookie(addr2, excludedAddrs);
assertFalse(addr1.equals(replacedBookie));
assertTrue(addr3.equals(replacedBookie) || addr4.equals(replacedBookie));
}
@Test
public void testReplaceBookieWithNotEnoughBookies() throws Exception {
InetSocketAddress addr1 = new InetSocketAddress("127.0.0.1", 3181);
InetSocketAddress addr2 = new InetSocketAddress("127.0.0.2", 3181);
InetSocketAddress addr3 = new InetSocketAddress("127.0.0.3", 3181);
InetSocketAddress addr4 = new InetSocketAddress("127.0.0.4", 3181);
// update dns mapping
StaticDNSResolver.addNodeToRack(addr1.getAddress().getHostAddress(), NetworkTopology.DEFAULT_RACK);
StaticDNSResolver.addNodeToRack(addr2.getAddress().getHostAddress(), "/r2");
StaticDNSResolver.addNodeToRack(addr3.getAddress().getHostAddress(), "/r3");
StaticDNSResolver.addNodeToRack(addr4.getAddress().getHostAddress(), "/r4");
// Update cluster
Set<InetSocketAddress> addrs = new HashSet<InetSocketAddress>();
addrs.add(addr1);
addrs.add(addr2);
addrs.add(addr3);
addrs.add(addr4);
repp.onClusterChanged(addrs, new HashSet<InetSocketAddress>());
// replace node under r2
Set<InetSocketAddress> excludedAddrs = new HashSet<InetSocketAddress>();
excludedAddrs.add(addr1);
excludedAddrs.add(addr3);
excludedAddrs.add(addr4);
try {
repp.replaceBookie(addr2, excludedAddrs);
fail("Should throw BKNotEnoughBookiesException when there is not enough bookies");
} catch (BKNotEnoughBookiesException bnebe) {
// should throw not enou
}
}
@Test
public void testNewEnsembleWithSingleRack() throws Exception {
InetSocketAddress addr1 = new InetSocketAddress("127.0.0.1", 3181);
InetSocketAddress addr2 = new InetSocketAddress("127.0.0.2", 3181);
InetSocketAddress addr3 = new InetSocketAddress("127.0.0.3", 3181);
InetSocketAddress addr4 = new InetSocketAddress("127.0.0.4", 3181);
// Update cluster
Set<InetSocketAddress> addrs = new HashSet<InetSocketAddress>();
addrs.add(addr1);
addrs.add(addr2);
addrs.add(addr3);
addrs.add(addr4);
repp.onClusterChanged(addrs, new HashSet<InetSocketAddress>());
try {
ArrayList<InetSocketAddress> ensemble = repp.newEnsemble(3, 2, new HashSet<InetSocketAddress>());
assertEquals(0, getNumCoveredWriteQuorums(ensemble, 2));
ArrayList<InetSocketAddress> ensemble2 = repp.newEnsemble(4, 2, new HashSet<InetSocketAddress>());
assertEquals(0, getNumCoveredWriteQuorums(ensemble2, 2));
} catch (BKNotEnoughBookiesException bnebe) {
fail("Should not get not enough bookies exception even there is only one rack.");
}
}
@Test
public void testNewEnsembleWithMultipleRacks() throws Exception {
InetSocketAddress addr1 = new InetSocketAddress("127.0.0.1", 3181);
InetSocketAddress addr2 = new InetSocketAddress("127.0.0.2", 3181);
InetSocketAddress addr3 = new InetSocketAddress("127.0.0.3", 3181);
InetSocketAddress addr4 = new InetSocketAddress("127.0.0.4", 3181);
// update dns mapping
StaticDNSResolver.addNodeToRack(addr1.getAddress().getHostAddress(), NetworkTopology.DEFAULT_RACK);
StaticDNSResolver.addNodeToRack(addr2.getAddress().getHostAddress(), "/r2");
StaticDNSResolver.addNodeToRack(addr3.getAddress().getHostAddress(), "/r2");
StaticDNSResolver.addNodeToRack(addr4.getAddress().getHostAddress(), "/r2");
// Update cluster
Set<InetSocketAddress> addrs = new HashSet<InetSocketAddress>();
addrs.add(addr1);
addrs.add(addr2);
addrs.add(addr3);
addrs.add(addr4);
repp.onClusterChanged(addrs, new HashSet<InetSocketAddress>());
try {
ArrayList<InetSocketAddress> ensemble = repp.newEnsemble(3, 2, new HashSet<InetSocketAddress>());
int numCovered = getNumCoveredWriteQuorums(ensemble, 2);
assertTrue(numCovered >= 1 && numCovered < 3);
ArrayList<InetSocketAddress> ensemble2 = repp.newEnsemble(4, 2, new HashSet<InetSocketAddress>());
numCovered = getNumCoveredWriteQuorums(ensemble2, 2);
assertTrue(numCovered >= 1 && numCovered < 3);
} catch (BKNotEnoughBookiesException bnebe) {
fail("Should not get not enough bookies exception even there is only one rack.");
}
}
@Test
public void testNewEnsembleWithEnoughRacks() throws Exception {
InetSocketAddress addr1 = new InetSocketAddress("127.0.0.1", 3181);
InetSocketAddress addr2 = new InetSocketAddress("127.0.0.2", 3181);
InetSocketAddress addr3 = new InetSocketAddress("127.0.0.3", 3181);
InetSocketAddress addr4 = new InetSocketAddress("127.0.0.4", 3181);
InetSocketAddress addr5 = new InetSocketAddress("127.0.0.5", 3181);
InetSocketAddress addr6 = new InetSocketAddress("127.0.0.6", 3181);
InetSocketAddress addr7 = new InetSocketAddress("127.0.0.7", 3181);
InetSocketAddress addr8 = new InetSocketAddress("127.0.0.8", 3181);
// update dns mapping
StaticDNSResolver.addNodeToRack(addr1.getAddress().getHostAddress(), NetworkTopology.DEFAULT_RACK);
StaticDNSResolver.addNodeToRack(addr2.getAddress().getHostAddress(), "/r2");
StaticDNSResolver.addNodeToRack(addr3.getAddress().getHostAddress(), "/r3");
StaticDNSResolver.addNodeToRack(addr4.getAddress().getHostAddress(), "/r4");
StaticDNSResolver.addNodeToRack(addr5.getAddress().getHostAddress(), NetworkTopology.DEFAULT_RACK);
StaticDNSResolver.addNodeToRack(addr6.getAddress().getHostAddress(), "/r2");
StaticDNSResolver.addNodeToRack(addr7.getAddress().getHostAddress(), "/r3");
StaticDNSResolver.addNodeToRack(addr8.getAddress().getHostAddress(), "/r4");
// Update cluster
Set<InetSocketAddress> addrs = new HashSet<InetSocketAddress>();
addrs.add(addr1);
addrs.add(addr2);
addrs.add(addr3);
addrs.add(addr4);
addrs.add(addr5);
addrs.add(addr6);
addrs.add(addr7);
addrs.add(addr8);
repp.onClusterChanged(addrs, new HashSet<InetSocketAddress>());
try {
ArrayList<InetSocketAddress> ensemble1 = repp.newEnsemble(3, 2, new HashSet<InetSocketAddress>());
assertEquals(3, getNumCoveredWriteQuorums(ensemble1, 2));
ArrayList<InetSocketAddress> ensemble2 = repp.newEnsemble(4, 2, new HashSet<InetSocketAddress>());
assertEquals(4, getNumCoveredWriteQuorums(ensemble2, 2));
} catch (BKNotEnoughBookiesException bnebe) {
fail("Should not get not enough bookies exception even there is only one rack.");
}
}
private int getNumCoveredWriteQuorums(ArrayList<InetSocketAddress> ensemble, int writeQuorumSize)
throws Exception {
int ensembleSize = ensemble.size();
int numCoveredWriteQuorums = 0;
for (int i = 0; i < ensembleSize; i++) {
Set<String> racks = new HashSet<String>();
for (int j = 0; j < writeQuorumSize; j++) {
int bookieIdx = (i + j) % ensembleSize;
InetSocketAddress addr = ensemble.get(bookieIdx);
racks.add(StaticDNSResolver.getRack(addr.getAddress().getHostAddress()));
}
numCoveredWriteQuorums += (racks.size() > 1 ? 1 : 0);
}
return numCoveredWriteQuorums;
}
}
| |
/*
* Copyright 2012-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.context.properties;
import java.util.List;
import javax.annotation.PostConstruct;
import javax.validation.constraints.NotNull;
import org.junit.After;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.BeanCreationException;
import org.springframework.beans.factory.FactoryBean;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.beans.factory.support.AbstractBeanDefinition;
import org.springframework.beans.factory.support.GenericBeanDefinition;
import org.springframework.boot.bind.RelaxedBindingNotWritablePropertyException;
import org.springframework.boot.testutil.InternalOutputCapture;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.support.PropertySourcesPlaceholderConfigurer;
import org.springframework.mock.env.MockEnvironment;
import org.springframework.test.context.support.TestPropertySourceUtils;
import org.springframework.test.util.ReflectionTestUtils;
import org.springframework.validation.BindException;
import org.springframework.validation.Errors;
import org.springframework.validation.ValidationUtils;
import org.springframework.validation.Validator;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.fail;
/**
* Tests for {@link ConfigurationPropertiesBindingPostProcessor}.
*
* @author Christian Dupuis
* @author Phillip Webb
* @author Stephane Nicoll
*/
public class ConfigurationPropertiesBindingPostProcessorTests {
@Rule
public ExpectedException thrown = ExpectedException.none();
@Rule
public InternalOutputCapture output = new InternalOutputCapture();
private AnnotationConfigApplicationContext context;
@After
public void close() {
if (this.context != null) {
this.context.close();
}
}
@Test
public void testValidationWithSetter() {
this.context = new AnnotationConfigApplicationContext();
TestPropertySourceUtils.addInlinedPropertiesToEnvironment(this.context,
"test.foo=spam");
this.context.register(TestConfigurationWithValidatingSetter.class);
assertBindingFailure(1);
}
@Test
public void unknownFieldFailureMessageContainsDetailsOfPropertyOrigin() {
this.context = new AnnotationConfigApplicationContext();
TestPropertySourceUtils.addInlinedPropertiesToEnvironment(this.context,
"com.example.baz=spam");
this.context.register(TestConfiguration.class);
try {
this.context.refresh();
fail("Expected exception");
}
catch (BeanCreationException ex) {
RelaxedBindingNotWritablePropertyException bex = (RelaxedBindingNotWritablePropertyException) ex
.getRootCause();
assertThat(bex.getMessage())
.startsWith("Failed to bind 'com.example.baz' from '"
+ TestPropertySourceUtils.INLINED_PROPERTIES_PROPERTY_SOURCE_NAME
+ "' to 'baz' " + "property on '"
+ TestConfiguration.class.getName());
}
}
@Test
public void testValidationWithoutJSR303() {
this.context = new AnnotationConfigApplicationContext();
this.context.register(TestConfigurationWithoutJSR303.class);
assertBindingFailure(1);
}
@Test
public void testValidationWithJSR303() {
this.context = new AnnotationConfigApplicationContext();
this.context.register(TestConfigurationWithJSR303.class);
assertBindingFailure(2);
}
@Test
public void testValidationAndNullOutValidator() {
this.context = new AnnotationConfigApplicationContext();
this.context.register(TestConfiguration.class);
this.context.refresh();
ConfigurationPropertiesBindingPostProcessor bean = this.context
.getBean(ConfigurationPropertiesBindingPostProcessor.class);
assertThat(ReflectionTestUtils.getField(bean, "validator")).isNull();
}
@Test
public void testSuccessfulValidationWithJSR303() {
MockEnvironment env = new MockEnvironment();
env.setProperty("test.foo", "123456");
env.setProperty("test.bar", "654321");
this.context = new AnnotationConfigApplicationContext();
this.context.setEnvironment(env);
this.context.register(TestConfigurationWithJSR303.class);
this.context.refresh();
}
@Test
public void testInitializersSeeBoundProperties() {
MockEnvironment env = new MockEnvironment();
env.setProperty("bar", "foo");
this.context = new AnnotationConfigApplicationContext();
this.context.setEnvironment(env);
this.context.register(TestConfigurationWithInitializer.class);
this.context.refresh();
}
@Test
public void testValidationWithCustomValidator() {
this.context = new AnnotationConfigApplicationContext();
this.context.register(TestConfigurationWithCustomValidator.class);
assertBindingFailure(1);
}
@Test
public void testValidationWithCustomValidatorNotSupported() {
MockEnvironment env = new MockEnvironment();
env.setProperty("test.foo", "bar");
this.context = new AnnotationConfigApplicationContext();
this.context.setEnvironment(env);
this.context.register(TestConfigurationWithCustomValidator.class,
PropertyWithValidatingSetter.class);
assertBindingFailure(1);
}
@Test
public void testPropertyWithEnum() throws Exception {
doEnumTest("test.theValue=foo");
}
@Test
public void testRelaxedPropertyWithEnum() throws Exception {
doEnumTest("test.the-value=FoO");
doEnumTest("TEST_THE_VALUE=FoO");
doEnumTest("test.THE_VALUE=FoO");
doEnumTest("test_the_value=FoO");
}
private void doEnumTest(String property) {
this.context = new AnnotationConfigApplicationContext();
TestPropertySourceUtils.addInlinedPropertiesToEnvironment(this.context, property);
this.context.register(PropertyWithEnum.class);
this.context.refresh();
assertThat(this.context.getBean(PropertyWithEnum.class).getTheValue())
.isEqualTo(FooEnum.FOO);
this.context.close();
}
@Test
public void testRelaxedPropertyWithSetOfEnum() {
doEnumSetTest("test.the-values=foo,bar", FooEnum.FOO, FooEnum.BAR);
doEnumSetTest("test.the-values=foo", FooEnum.FOO);
doEnumSetTest("TEST_THE_VALUES=FoO", FooEnum.FOO);
doEnumSetTest("test_the_values=BaR,FoO", FooEnum.BAR, FooEnum.FOO);
}
private void doEnumSetTest(String property, FooEnum... expected) {
this.context = new AnnotationConfigApplicationContext();
TestPropertySourceUtils.addInlinedPropertiesToEnvironment(this.context, property);
this.context.register(PropertyWithEnum.class);
this.context.refresh();
assertThat(this.context.getBean(PropertyWithEnum.class).getTheValues())
.contains(expected);
this.context.close();
}
@Test
public void testValueBindingForDefaults() throws Exception {
this.context = new AnnotationConfigApplicationContext();
TestPropertySourceUtils.addInlinedPropertiesToEnvironment(this.context,
"default.value=foo");
this.context.register(PropertyWithValue.class);
this.context.refresh();
assertThat(this.context.getBean(PropertyWithValue.class).getValue())
.isEqualTo("foo");
}
@Test
public void placeholderResolutionWithCustomLocation() throws Exception {
this.context = new AnnotationConfigApplicationContext();
TestPropertySourceUtils.addInlinedPropertiesToEnvironment(this.context,
"fooValue=bar");
this.context.register(CustomConfigurationLocation.class);
this.context.refresh();
assertThat(this.context.getBean(CustomConfigurationLocation.class).getFoo())
.isEqualTo("bar");
}
@Test
public void placeholderResolutionWithUnmergedCustomLocation() throws Exception {
this.context = new AnnotationConfigApplicationContext();
TestPropertySourceUtils.addInlinedPropertiesToEnvironment(this.context,
"fooValue:bar");
this.context.register(UnmergedCustomConfigurationLocation.class);
this.context.refresh();
assertThat(
this.context.getBean(UnmergedCustomConfigurationLocation.class).getFoo())
.isEqualTo("${fooValue}");
}
@Test
public void configurationPropertiesWithFactoryBean() throws Exception {
ConfigurationPropertiesWithFactoryBean.factoryBeanInit = false;
this.context = new AnnotationConfigApplicationContext() {
@Override
protected void onRefresh() throws BeansException {
assertThat(ConfigurationPropertiesWithFactoryBean.factoryBeanInit)
.as("Init too early").isFalse();
super.onRefresh();
}
};
this.context.register(ConfigurationPropertiesWithFactoryBean.class);
GenericBeanDefinition beanDefinition = new GenericBeanDefinition();
beanDefinition.setBeanClass(FactoryBeanTester.class);
beanDefinition.setAutowireMode(AbstractBeanDefinition.AUTOWIRE_BY_TYPE);
this.context.registerBeanDefinition("test", beanDefinition);
this.context.refresh();
assertThat(ConfigurationPropertiesWithFactoryBean.factoryBeanInit).as("No init")
.isTrue();
}
@Test
public void configurationPropertiesWithCharArray() throws Exception {
this.context = new AnnotationConfigApplicationContext();
TestPropertySourceUtils.addInlinedPropertiesToEnvironment(this.context,
"test.chars=word");
this.context.register(PropertyWithCharArray.class);
this.context.refresh();
assertThat(this.context.getBean(PropertyWithCharArray.class).getChars())
.isEqualTo("word".toCharArray());
}
@Test
public void configurationPropertiesWithArrayExpansion() throws Exception {
this.context = new AnnotationConfigApplicationContext();
TestPropertySourceUtils.addInlinedPropertiesToEnvironment(this.context,
"test.chars[4]=s");
this.context.register(PropertyWithCharArrayExpansion.class);
this.context.refresh();
assertThat(this.context.getBean(PropertyWithCharArrayExpansion.class).getChars())
.isEqualTo("words".toCharArray());
}
@Test
public void notWritablePropertyException() throws Exception {
this.context = new AnnotationConfigApplicationContext();
TestPropertySourceUtils.addInlinedPropertiesToEnvironment(this.context,
"test.madeup:word");
this.context.register(PropertyWithCharArray.class);
this.thrown.expect(BeanCreationException.class);
this.thrown.expectMessage("test");
this.context.refresh();
}
@Test
public void relaxedPropertyNamesSame() throws Exception {
testRelaxedPropertyNames("test.FOO_BAR=test1", "test.FOO_BAR=test2");
}
@Test
public void relaxedPropertyNamesMixed() throws Exception {
testRelaxedPropertyNames("test.FOO_BAR=test2", "test.foo-bar=test1");
}
private void testRelaxedPropertyNames(String... environment) {
this.context = new AnnotationConfigApplicationContext();
TestPropertySourceUtils.addInlinedPropertiesToEnvironment(this.context,
environment);
this.context.register(RelaxedPropertyNames.class);
this.context.refresh();
assertThat(this.context.getBean(RelaxedPropertyNames.class).getFooBar())
.isEqualTo("test2");
}
@Test
public void nestedProperties() throws Exception {
// gh-3539
this.context = new AnnotationConfigApplicationContext();
TestPropertySourceUtils.addInlinedPropertiesToEnvironment(this.context,
"TEST_NESTED_VALUE=test1");
this.context.register(PropertyWithNestedValue.class);
this.context.refresh();
assertThat(this.context.getBean(PropertyWithNestedValue.class).getNested()
.getValue()).isEqualTo("test1");
}
@Test
public void bindWithoutConfigurationPropertiesAnnotation() {
this.context = new AnnotationConfigApplicationContext();
TestPropertySourceUtils.addInlinedPropertiesToEnvironment(this.context,
"name:foo");
this.context.register(ConfigurationPropertiesWithoutAnnotation.class);
this.thrown.expect(IllegalArgumentException.class);
this.thrown.expectMessage("No ConfigurationProperties annotation found");
this.context.refresh();
}
@Test
public void multiplePropertySourcesPlaceholderConfigurer() throws Exception {
this.context = new AnnotationConfigApplicationContext();
this.context.register(MultiplePropertySourcesPlaceholderConfigurer.class);
this.context.refresh();
assertThat(this.output.toString()).contains(
"Multiple PropertySourcesPlaceholderConfigurer beans registered");
}
private void assertBindingFailure(int errorCount) {
try {
this.context.refresh();
fail("Expected exception");
}
catch (BeanCreationException ex) {
BindException bex = (BindException) ex.getRootCause();
assertThat(bex.getErrorCount()).isEqualTo(errorCount);
}
}
@Configuration
@EnableConfigurationProperties
public static class TestConfigurationWithValidatingSetter {
@Bean
public PropertyWithValidatingSetter testProperties() {
return new PropertyWithValidatingSetter();
}
}
@ConfigurationProperties(prefix = "test")
public static class PropertyWithValidatingSetter {
private String foo;
public String getFoo() {
return this.foo;
}
public void setFoo(String foo) {
this.foo = foo;
if (!foo.equals("bar")) {
throw new IllegalArgumentException("Wrong value for foo");
}
}
}
@Configuration
@EnableConfigurationProperties
public static class TestConfigurationWithoutJSR303 {
@Bean
public PropertyWithoutJSR303 testProperties() {
return new PropertyWithoutJSR303();
}
}
@ConfigurationProperties(prefix = "test")
public static class PropertyWithoutJSR303 implements Validator {
private String foo;
@Override
public boolean supports(Class<?> clazz) {
return clazz.isAssignableFrom(getClass());
}
@Override
public void validate(Object target, Errors errors) {
ValidationUtils.rejectIfEmpty(errors, "foo", "TEST1");
}
public String getFoo() {
return this.foo;
}
public void setFoo(String foo) {
this.foo = foo;
}
}
@Configuration
@EnableConfigurationProperties
public static class TestConfigurationWithJSR303 {
@Bean
public PropertyWithJSR303 testProperties() {
return new PropertyWithJSR303();
}
}
@Configuration
@EnableConfigurationProperties
@ConfigurationProperties
public static class TestConfigurationWithInitializer {
private String bar;
public void setBar(String bar) {
this.bar = bar;
}
public String getBar() {
return this.bar;
}
@PostConstruct
public void init() {
assertThat(this.bar).isNotNull();
}
}
@Configuration
@EnableConfigurationProperties
@ConfigurationProperties(prefix = "com.example", ignoreUnknownFields = false)
public static class TestConfiguration {
private String bar;
public void setBar(String bar) {
this.bar = bar;
}
public String getBar() {
return this.bar;
}
}
@ConfigurationProperties(prefix = "test")
public static class PropertyWithJSR303 extends PropertyWithoutJSR303 {
@NotNull
private String bar;
public void setBar(String bar) {
this.bar = bar;
}
public String getBar() {
return this.bar;
}
}
@Configuration
@EnableConfigurationProperties
public static class TestConfigurationWithCustomValidator {
@Bean
public PropertyWithCustomValidator propertyWithCustomValidator() {
return new PropertyWithCustomValidator();
}
@Bean
public Validator configurationPropertiesValidator() {
return new CustomPropertyValidator();
}
}
@ConfigurationProperties(prefix = "custom")
public static class PropertyWithCustomValidator {
private String foo;
public String getFoo() {
return this.foo;
}
public void setFoo(String foo) {
this.foo = foo;
}
}
public static class CustomPropertyValidator implements Validator {
@Override
public boolean supports(Class<?> aClass) {
return aClass == PropertyWithCustomValidator.class;
}
@Override
public void validate(Object o, Errors errors) {
ValidationUtils.rejectIfEmpty(errors, "foo", "TEST1");
}
}
@Configuration
@EnableConfigurationProperties
@ConfigurationProperties(prefix = "test", ignoreUnknownFields = false)
public static class PropertyWithCharArray {
private char[] chars;
public char[] getChars() {
return this.chars;
}
public void setChars(char[] chars) {
this.chars = chars;
}
}
@Configuration
@EnableConfigurationProperties
@ConfigurationProperties(prefix = "test", ignoreUnknownFields = false)
public static class PropertyWithCharArrayExpansion {
private char[] chars = new char[] { 'w', 'o', 'r', 'd' };
public char[] getChars() {
return this.chars;
}
public void setChars(char[] chars) {
this.chars = chars;
}
}
@Configuration
@EnableConfigurationProperties
@ConfigurationProperties(prefix = "test")
public static class PropertyWithEnum {
private FooEnum theValue;
private List<FooEnum> theValues;
public void setTheValue(FooEnum value) {
this.theValue = value;
}
public FooEnum getTheValue() {
return this.theValue;
}
public List<FooEnum> getTheValues() {
return this.theValues;
}
public void setTheValues(List<FooEnum> theValues) {
this.theValues = theValues;
}
}
enum FooEnum {
FOO, BAZ, BAR
}
@Configuration
@EnableConfigurationProperties
@ConfigurationProperties(prefix = "test")
public static class PropertyWithValue {
@Value("${default.value}")
private String value;
public void setValue(String value) {
this.value = value;
}
public String getValue() {
return this.value;
}
@Bean
public static PropertySourcesPlaceholderConfigurer configurer() {
return new PropertySourcesPlaceholderConfigurer();
}
}
@EnableConfigurationProperties
@ConfigurationProperties(locations = "custom-location.yml")
public static class CustomConfigurationLocation {
private String foo;
public String getFoo() {
return this.foo;
}
public void setFoo(String foo) {
this.foo = foo;
}
}
@EnableConfigurationProperties
@ConfigurationProperties(locations = "custom-location.yml", merge = false)
public static class UnmergedCustomConfigurationLocation {
private String foo;
public String getFoo() {
return this.foo;
}
public void setFoo(String foo) {
this.foo = foo;
}
}
@Configuration
@EnableConfigurationProperties
public static class ConfigurationPropertiesWithFactoryBean {
public static boolean factoryBeanInit;
}
@Configuration
@EnableConfigurationProperties
@ConfigurationProperties(prefix = "test")
public static class RelaxedPropertyNames {
private String fooBar;
public String getFooBar() {
return this.fooBar;
}
public void setFooBar(String fooBar) {
this.fooBar = fooBar;
}
}
@SuppressWarnings("rawtypes")
// Must be a raw type
static class FactoryBeanTester implements FactoryBean, InitializingBean {
@Override
public Object getObject() throws Exception {
return Object.class;
}
@Override
public Class<?> getObjectType() {
return null;
}
@Override
public boolean isSingleton() {
return true;
}
@Override
public void afterPropertiesSet() throws Exception {
ConfigurationPropertiesWithFactoryBean.factoryBeanInit = true;
}
}
@Configuration
@EnableConfigurationProperties
@ConfigurationProperties(prefix = "test")
public static class PropertyWithNestedValue {
private Nested nested = new Nested();
public Nested getNested() {
return this.nested;
}
@Bean
public static PropertySourcesPlaceholderConfigurer configurer() {
return new PropertySourcesPlaceholderConfigurer();
}
public static class Nested {
@Value("${default.value}")
private String value;
public void setValue(String value) {
this.value = value;
}
public String getValue() {
return this.value;
}
}
}
@Configuration
@EnableConfigurationProperties(PropertyWithoutConfigurationPropertiesAnnotation.class)
public static class ConfigurationPropertiesWithoutAnnotation {
}
@Configuration
@EnableConfigurationProperties
public static class MultiplePropertySourcesPlaceholderConfigurer {
@Bean
public static PropertySourcesPlaceholderConfigurer configurer1() {
return new PropertySourcesPlaceholderConfigurer();
}
@Bean
public static PropertySourcesPlaceholderConfigurer configurer2() {
return new PropertySourcesPlaceholderConfigurer();
}
}
public static class PropertyWithoutConfigurationPropertiesAnnotation {
private String name;
public String getName() {
return this.name;
}
public void setName(String name) {
this.name = name;
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.ignite;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.cache.Cache.Entry;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import org.apache.camel.CamelException;
import org.apache.camel.component.ignite.cache.IgniteCacheOperation;
import org.apache.camel.util.ObjectHelper;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.cache.CachePeekMode;
import org.apache.ignite.cache.query.Query;
import org.apache.ignite.cache.query.ScanQuery;
import org.apache.ignite.lang.IgniteBiPredicate;
import org.junit.After;
import org.junit.Test;
import static com.google.common.truth.Truth.assert_;
public class IgniteCacheTest extends AbstractIgniteTest {
@Test
public void testAddEntry() {
template.requestBodyAndHeader("ignite:cache:testcache1?operation=PUT", "1234", IgniteConstants.IGNITE_CACHE_KEY, "abcd");
assert_().that(ignite().cache("testcache1").size(CachePeekMode.ALL)).isEqualTo(1);
assert_().that(ignite().cache("testcache1").get("abcd")).isEqualTo("1234");
}
@Test
public void testAddEntrySet() {
template.requestBody("ignite:cache:testcache1?operation=PUT", ImmutableMap.of("abcd", "1234", "efgh", "5678"));
assert_().that(ignite().cache("testcache1").size(CachePeekMode.ALL)).isEqualTo(2);
assert_().that(ignite().cache("testcache1").get("abcd")).isEqualTo("1234");
assert_().that(ignite().cache("testcache1").get("efgh")).isEqualTo("5678");
}
@Test
public void testGetOne() {
testAddEntry();
String result = template.requestBody("ignite:cache:testcache1?operation=GET", "abcd", String.class);
assert_().that(result).isEqualTo("1234");
result = template.requestBodyAndHeader("ignite:cache:testcache1?operation=GET", "this value won't be used", IgniteConstants.IGNITE_CACHE_KEY, "abcd", String.class);
assert_().that(result).isEqualTo("1234");
}
@Test
@SuppressWarnings("unchecked")
public void testGetMany() {
IgniteCache<String, String> cache = ignite().getOrCreateCache("testcache1");
Set<String> keys = new HashSet<>();
for (int i = 0; i < 100; i++) {
cache.put("k" + i, "v" + i);
keys.add("k" + i);
}
Map<String, String> result = template.requestBody("ignite:cache:testcache1?operation=GET", keys, Map.class);
for (String k : keys) {
assert_().that(result.get(k)).isEqualTo(k.replace("k", "v"));
}
}
@Test
public void testGetSize() {
IgniteCache<String, String> cache = ignite().getOrCreateCache("testcache1");
Set<String> keys = new HashSet<>();
for (int i = 0; i < 100; i++) {
cache.put("k" + i, "v" + i);
keys.add("k" + i);
}
Integer result = template.requestBody("ignite:cache:testcache1?operation=SIZE", keys, Integer.class);
assert_().that(result).isEqualTo(100);
}
@Test
@SuppressWarnings("unchecked")
public void testQuery() {
IgniteCache<String, String> cache = ignite().getOrCreateCache("testcache1");
Set<String> keys = new HashSet<>();
for (int i = 0; i < 100; i++) {
cache.put("k" + i, "v" + i);
keys.add("k" + i);
}
Query<Entry<String, String>> query = new ScanQuery<String, String>(new IgniteBiPredicate<String, String>() {
private static final long serialVersionUID = 1L;
@Override
public boolean apply(String key, String value) {
return Integer.parseInt(key.replace("k", "")) >= 50;
}
});
List results = template.requestBodyAndHeader("ignite:cache:testcache1?operation=QUERY", keys, IgniteConstants.IGNITE_CACHE_QUERY, query, List.class);
assert_().that(results.size()).isEqualTo(50);
}
@Test
public void testGetManyTreatCollectionsAsCacheObjects() {
IgniteCache<Object, String> cache = ignite().getOrCreateCache("testcache1");
Set<String> keys = new HashSet<>();
for (int i = 0; i < 100; i++) {
cache.put("k" + i, "v" + i);
keys.add("k" + i);
}
// Also add a cache entry with the entire Set as a key.
cache.put(keys, "---");
String result = template.requestBody("ignite:cache:testcache1?operation=GET&treatCollectionsAsCacheObjects=true", keys, String.class);
assert_().that(result).isEqualTo("---");
}
@Test
public void testRemoveEntry() {
IgniteCache<String, String> cache = ignite().getOrCreateCache("testcache1");
cache.put("abcd", "1234");
cache.put("efgh", "5678");
assert_().that(cache.size(CachePeekMode.ALL)).isEqualTo(2);
template.requestBody("ignite:cache:testcache1?operation=REMOVE", "abcd");
assert_().that(cache.size(CachePeekMode.ALL)).isEqualTo(1);
assert_().that(cache.get("abcd")).isNull();
template.requestBodyAndHeader("ignite:cache:testcache1?operation=REMOVE", "this value won't be used", IgniteConstants.IGNITE_CACHE_KEY, "efgh");
assert_().that(cache.size(CachePeekMode.ALL)).isEqualTo(0);
assert_().that(cache.get("efgh")).isNull();
}
@Test
public void testClearCache() {
IgniteCache<String, String> cache = ignite().getOrCreateCache("testcache1");
for (int i = 0; i < 100; i++) {
cache.put("k" + i, "v" + i);
}
assert_().that(cache.size(CachePeekMode.ALL)).isEqualTo(100);
template.requestBody("ignite:cache:testcache1?operation=CLEAR", "this value won't be used");
assert_().that(cache.size(CachePeekMode.ALL)).isEqualTo(0);
}
@Test
public void testHeaderSetRemoveEntry() {
testAddEntry();
String result = template.requestBody("ignite:cache:testcache1?operation=GET", "abcd", String.class);
assert_().that(result).isEqualTo("1234");
result = template.requestBodyAndHeader("ignite:cache:testcache1?operation=GET", "abcd", IgniteConstants.IGNITE_CACHE_OPERATION, IgniteCacheOperation.REMOVE, String.class);
// The body has not changed, but the cache entry is gone.
assert_().that(result).isEqualTo("abcd");
assert_().that(ignite().cache("testcache1").size(CachePeekMode.ALL)).isEqualTo(0);
}
@Test
public void testAddEntryNoCacheCreation() {
try {
template.requestBodyAndHeader("ignite:cache:testcache2?operation=PUT&failIfInexistentCache=true", "1234", IgniteConstants.IGNITE_CACHE_KEY, "abcd");
} catch (Exception e) {
assert_().that(ObjectHelper.getException(CamelException.class, e).getMessage()).startsWith("Ignite cache testcache2 doesn't exist");
return;
}
fail("Should have thrown an exception");
}
@Test
public void testAddEntryDoNotPropagateIncomingBody() {
Object result = template.requestBodyAndHeader("ignite:cache:testcache1?operation=PUT&propagateIncomingBodyIfNoReturnValue=false", "1234", IgniteConstants.IGNITE_CACHE_KEY, "abcd",
Object.class);
assert_().that(ignite().cache("testcache1").size(CachePeekMode.ALL)).isEqualTo(1);
assert_().that(ignite().cache("testcache1").get("abcd")).isEqualTo("1234");
assert_().that(result).isNull();
}
@Override
public boolean isCreateCamelContextPerClass() {
return true;
}
@After
public void deleteCaches() {
for (String cacheName : ImmutableSet.<String> of("testcache1", "testcache2")) {
IgniteCache<?, ?> cache = ignite().cache(cacheName);
if (cache == null) {
continue;
}
cache.clear();
}
}
}
| |
package org.opencv.samples.colorblobdetect;
import java.util.List;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame;
import org.opencv.android.JavaCameraView;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.MatOfPoint;
import org.opencv.core.Rect;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener2;
import org.opencv.imgproc.Imgproc;
import com.example.bravo.MainActivity;
import com.example.bravo.R;
import android.app.Activity;
import android.os.Bundle;
import android.util.Log;
import android.view.MotionEvent;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
import android.view.View.OnTouchListener;
import android.widget.FrameLayout;
import bravo.game.manager.GameManager;
public class MarkerDetectionActivity implements OnTouchListener, CvCameraViewListener2 {
//-------------------------debug TAGs--------------------------//
private static final String TAG = "OCVSample::Activity";
private static final String Draw3Dobject = "draw3Dobject";
//-------------------------Static----------------------------//
private static final int NO_COLOR = -1;
private static final int BLUE_COLOR = 1;
private static final int GREEN_COLOR = 2;
private static final int RED_COLOR = 3;
private static final int NUM_OF_COLORS = 3;
private static int colorPermutation[][] = {
{BLUE_COLOR,GREEN_COLOR,GREEN_COLOR},
{GREEN_COLOR,RED_COLOR,RED_COLOR},
{RED_COLOR,BLUE_COLOR,BLUE_COLOR},
{BLUE_COLOR,RED_COLOR,RED_COLOR},
{GREEN_COLOR,BLUE_COLOR,BLUE_COLOR},
{RED_COLOR,GREEN_COLOR,GREEN_COLOR}
};
//-------------------------Fields--------------------------//
public Mat mRgba;
public MarkerDetector mDetectorArry[];
public MarkerDetector myRobot;
private int NUM_OF_TOWERS;
public CameraBridgeViewBase mOpenCvCameraView;
private insertColor insertColorBlue;
private insertColor insertColorRed;
private insertColor insertColorGreen;
private int colorToCalibrate, colorToView;
private boolean updatedLastColor = false;
private boolean colorsAreCalibrated = false;
private boolean viewCalibration = false;
private GameManager gameManager;
private long lastEndFouncTime;
private int resizeFactor;
//-------------------------Activity------------------------//
MainActivity mMainActivity;
//-------------------------Methods-------------------------//
public BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(mMainActivity) {
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
mOpenCvCameraView.enableView();
mOpenCvCameraView.setOnTouchListener(MarkerDetectionActivity.this);
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
public MarkerDetectionActivity() {
Log.i(TAG, "Instantiated new " + this.getClass());
}
public MarkerDetectionActivity(MainActivity activity) {
resizeFactor = 4;
mMainActivity = activity;
Log.i(TAG, "called colorblob");
mOpenCvCameraView = (CameraBridgeViewBase) new JavaCameraView(mMainActivity, -1);
FrameLayout preview = (FrameLayout) mMainActivity.findViewById(R.id.camera_preview);
preview.addView(mOpenCvCameraView);
mOpenCvCameraView.setCvCameraViewListener(this);
gameManager = mMainActivity.mGameManager;
}
public void onCameraViewStarted(int width, int height) {
mRgba = new Mat(height, width, CvType.CV_8UC4);
setMarkerDetectors(width, height);
colorToCalibrate = BLUE_COLOR; // first color to Calibrate
viewCalibration = false;
insertColorBlue = new insertColor(BLUE_COLOR);
insertColorGreen = new insertColor(GREEN_COLOR);
insertColorRed = new insertColor(RED_COLOR);
insertColorBlue.prepareGameSize(width, height);
insertColorGreen.prepareGameSize(width, height);
insertColorRed.prepareGameSize(width, height);
mMainActivity.mGLView.mRenderer.setScreenWidthHeight(width, height);
}
public void setMarkerDetectors(int width, int height){
int i = 0;
myRobot = new MarkerDetector();
myRobot.prepareGame(width, height, BLUE_COLOR, RED_COLOR, resizeFactor);
NUM_OF_TOWERS = mMainActivity.mGameManager.NUM_OF_TOWERS;
mDetectorArry = new MarkerDetector[NUM_OF_TOWERS];
for(i = 0; i < NUM_OF_TOWERS; i++){
mDetectorArry[i] = new MarkerDetector();
mDetectorArry[i].prepareGame(width, height, BLUE_COLOR, RED_COLOR, resizeFactor);
}
}
public void onCameraViewStopped() {
mRgba.release();
}
public boolean onTouch(View v, MotionEvent event) {
if (colorsAreCalibrated == true){
return false;
}
if ((colorsAreCalibrated == false) && (viewCalibration == false)){
int xpos = (int) event.getX();
int ypos = (int) event.getY();
switch (colorToCalibrate){
case (BLUE_COLOR):
Log.d(TAG, "onTouch: colorToCalibrate = " + colorToCalibrate);
insertColorBlue.deliverTouchEvent(xpos, ypos);
colorToCalibrate = GREEN_COLOR;
colorToView = BLUE_COLOR;
viewCalibration = true;
break;
case (GREEN_COLOR):
Log.d(TAG, "onTouch: colorToCalibrate = " + colorToCalibrate);
insertColorGreen.deliverTouchEvent(xpos, ypos);
colorToCalibrate = RED_COLOR;
colorToView = GREEN_COLOR;
viewCalibration = true;
break;
case (RED_COLOR):
Log.d(TAG, "onTouch: colorToCalibrate = " + colorToCalibrate);
insertColorRed.deliverTouchEvent(xpos, ypos);
colorToCalibrate = NO_COLOR;
colorToView = RED_COLOR;
viewCalibration = true;
updatedLastColor = true;
break;
}
}else {
viewCalibration =false;
if(updatedLastColor == true){
gameManager.GameStarted();
setMarkerDetectorsColors();
colorsAreCalibrated = true;
}
}
return false; // don't need subsequent touch events
}
private void setMarkerDetectorsColors(){
checkCalibratedColors();
myRobot.updateColors(BLUE_COLOR, insertColorBlue.getHSVarr());
myRobot.updateColors(GREEN_COLOR, insertColorGreen.getHSVarr());
myRobot.updateColors(RED_COLOR, insertColorRed.getHSVarr());
int i = 0;
for (i = 0; i < NUM_OF_TOWERS; i++){
mDetectorArry[i].updateColors(BLUE_COLOR, insertColorBlue.getHSVarr());
mDetectorArry[i].updateColors(GREEN_COLOR, insertColorGreen.getHSVarr());
mDetectorArry[i].updateColors(RED_COLOR, insertColorRed.getHSVarr());
}
}
private void checkCalibratedColors() {
double[] HSVtresholdsBlue = insertColorBlue.getHSVarr();
double[] HSVtresholdsGreen = insertColorGreen.getHSVarr();
double[] HSVtresholdsRed = insertColorRed.getHSVarr();
int lowH = 0;
int highH = 3;
double mid = 0;
boolean flag = false;
if (HSVtresholdsBlue[lowH] < HSVtresholdsGreen[highH] && HSVtresholdsBlue[highH] > HSVtresholdsGreen[highH]){
mid = (HSVtresholdsBlue[lowH] + HSVtresholdsGreen[highH])/2;
HSVtresholdsBlue[lowH] = Math.floor(mid);
HSVtresholdsGreen[highH] = Math.floor(mid);
flag = true;
}
if (HSVtresholdsBlue[lowH] < HSVtresholdsRed[highH] && HSVtresholdsBlue[highH] > HSVtresholdsRed[highH]){
mid = (HSVtresholdsBlue[lowH] + HSVtresholdsRed[highH])/2;
HSVtresholdsBlue[lowH] = Math.floor(mid);
HSVtresholdsRed[highH] = Math.floor(mid);
flag = true;
}
if (HSVtresholdsRed[lowH] < HSVtresholdsGreen[highH] && HSVtresholdsRed[highH] > HSVtresholdsGreen[highH]){
mid = (HSVtresholdsRed[lowH] + HSVtresholdsGreen[highH])/2;
HSVtresholdsRed[lowH] = Math.floor(mid);
HSVtresholdsGreen[highH] = Math.floor(mid);
flag = true;
}
if (HSVtresholdsRed[lowH] < HSVtresholdsBlue[highH] && HSVtresholdsRed[highH] > HSVtresholdsBlue[highH]){
mid = (HSVtresholdsRed[lowH] + HSVtresholdsBlue[highH])/2;
HSVtresholdsRed[lowH] = Math.floor(mid);
HSVtresholdsBlue[highH] = Math.floor(mid);
flag = true;
}
if (HSVtresholdsGreen[lowH] < HSVtresholdsBlue[highH] && HSVtresholdsGreen[highH] > HSVtresholdsBlue[highH]){
mid = (HSVtresholdsGreen[lowH] + HSVtresholdsBlue[highH])/2;
HSVtresholdsGreen[lowH] = Math.floor(mid);
HSVtresholdsBlue[highH] = Math.floor(mid);
flag = true;
}
if (HSVtresholdsGreen[lowH] < HSVtresholdsRed[highH] && HSVtresholdsGreen[highH] > HSVtresholdsRed[highH]){
mid = (HSVtresholdsGreen[lowH] + HSVtresholdsRed[highH])/2;
HSVtresholdsGreen[lowH] = Math.floor(mid);
HSVtresholdsRed[highH] = Math.floor(mid);
flag = true;
}
if (flag) {
gameManager.printMiddle("checkCalibratedColors");
}
insertColorBlue.setMaxH(HSVtresholdsBlue[highH]);
insertColorBlue.setMinH(HSVtresholdsBlue[lowH]);
insertColorGreen.setMaxH(HSVtresholdsGreen[highH]);
insertColorGreen.setMinH(HSVtresholdsGreen[lowH]);
insertColorRed.setMaxH(HSVtresholdsRed[highH]);
insertColorRed.setMinH(HSVtresholdsRed[lowH]);
}
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
long onCameraFrameStartTime = System.currentTimeMillis();//start of the func
mRgba = inputFrame.rgba();
if (colorsAreCalibrated == false){
switch (colorToCalibrate) {
case (BLUE_COLOR):
insertColorBlue.updateFrame(mRgba);
break;
case (GREEN_COLOR):
insertColorGreen.updateFrame(mRgba);
break;
case (RED_COLOR):
insertColorRed.updateFrame(mRgba);
break;
}
if (viewCalibration == true) {
switch (colorToView) {
case (BLUE_COLOR):
Log.i(TAG, "onCameraFrame: insertColorBlue");
return insertColorBlue.getMat();
case (GREEN_COLOR):
Log.i(TAG, "onCameraFrame: insertColorGreen");
return insertColorGreen.getMat();
case (RED_COLOR):
Log.i(TAG, "onCameraFrame: insertColorRed");
double[] arr1 = insertColorBlue.getHSVarr();
double[] arr2 = insertColorGreen.getHSVarr();
double[] arr3 = insertColorRed.getHSVarr();
/*gameManager.printTopLeft("H: " + arr1[0] + "-" + arr1[3] + " S: " + arr1[1] + "-" + arr1[4] + " V: " + arr1[2] + "-" + arr1[5] + "\n" +
"H: " + arr2[0] + "-" + arr2[3] + " S: " + arr2[1] + "-" + arr2[4] + " V: " + arr2[2] + "-" + arr2[5] + "\n" +
"H: " + arr3[0] + "-" + arr3[3] + " S: " + arr3[1] + "-" + arr3[4] + " V: " + arr3[2] + "-" + arr3[5]);*/
return insertColorRed.getMat();
}
}
}
if (colorsAreCalibrated == true) {
Mat mPyrDownMat = new Mat();
long PyrDownStart = System.currentTimeMillis();//start of the func
Imgproc.resize(mRgba, mPyrDownMat, new Size(), 1/((double)resizeFactor), 1/((double)resizeFactor), Imgproc.INTER_NEAREST );
long PyrDownEnd = System.currentTimeMillis();//start of the func
String PyrDownText = "\nresize timr: " + (PyrDownEnd - PyrDownStart);
//----------------------------------------------
//long PyrDownStart1 = System.currentTimeMillis();//start of the func
//Imgproc.pyrDown(mRgba, mPyrDownMat);
//long PyrDownStart2 = System.currentTimeMillis();//start of the func
//Imgproc.pyrDown(mPyrDownMat, mPyrDownMat);
//long PyrDownEnd = System.currentTimeMillis();//start of the func
//String PyrDownText = "\npyrDown1: " + (PyrDownStart2 - PyrDownStart1) +"\npyrDown2: " + (PyrDownEnd - PyrDownStart2) +
// "\ntotal: " + (PyrDownEnd - PyrDownStart1);
//----------------------------------------------
long startB = System.currentTimeMillis();//start of the func
List<MatOfPoint> blueContours = myRobot.getContours(mPyrDownMat, BLUE_COLOR);
long startG = System.currentTimeMillis();//start of the func
List<MatOfPoint> GreenContours = myRobot.getContours(mPyrDownMat, GREEN_COLOR);
long startR = System.currentTimeMillis();//start of the func
List<MatOfPoint> redContours = myRobot.getContours(mPyrDownMat, RED_COLOR);
long endR = System.currentTimeMillis();//start of the func
String debugText = "blueContours: " + blueContours.size() +"\nGreenContours: " + GreenContours.size() + "\nredContours: " + redContours.size() +
"\nblue time: " + (startG - startB) + "\ngreen time: " + (startR - startG) + "\nred time: " + (endR - startR);
//gameManager.printTopRight(debugText + PyrDownText);
//----------------------------------------------
long detactonStartOld = System.currentTimeMillis();//time before draw3Dobject
setTrackingColor(blueContours,GreenContours,redContours);
long detactonEndOld = System.currentTimeMillis();//time before draw3Dobject
mRgba = myRobot.drawObject(myRobot.getMiddleX(), myRobot.getMiddleY(), mRgba, new Scalar(255, 0, 0));
for (int k = 0; k < NUM_OF_TOWERS; k++){
mRgba = mDetectorArry[k].drawObject(mDetectorArry[k].getMiddleX(), mDetectorArry[k].getMiddleY(), mRgba, new Scalar(255, 0, 0));
}
int num = 0;
for (int k = 0; k < NUM_OF_TOWERS; k++){
if (mDetectorArry[k].isDetected()) {
num++;
}
}
//gameManager.printTopRight("num of detacted obj : " + num);
mMainActivity.mGameManager.objCheck();
long onCameraFrameBeforeDraw3DobjectTime = System.currentTimeMillis();//time before draw3Dobject
draw3Dobject(); //prints the 3d objects
long onCameraFrameEndTime = System.currentTimeMillis();//end of the func
//printFouncTimeForDebug(onCameraFrameStartTime,onCameraFrameBeforeDraw3DobjectTime,onCameraFrameEndTime);
}
lastEndFouncTime = System.currentTimeMillis();//End of the func for use in the next start of founc
return mRgba;
}
private Scalar converScalarHsv2Rgba(Scalar hsvColor) {
Mat pointMatRgba = new Mat();
Mat pointMatHsv = new Mat(1, 1, CvType.CV_8UC3, hsvColor);
Imgproc.cvtColor(pointMatHsv, pointMatRgba, Imgproc.COLOR_HSV2RGB_FULL, 4);
return new Scalar(pointMatRgba.get(0, 0));
}
private void draw3Dobject(){
mMainActivity.mGLView.mRenderer.setTrackObjects(mDetectorArry);
mMainActivity.mGLView.requestRender();
}
private void printFouncTimeForDebug(long start, long mid, long end){
long sM = mid-start;
long sE = end-start;
long mE = end-mid;
long lastEtoS = start-lastEndFouncTime;
long lastEtoE = end-lastEndFouncTime;
// need to add time from end to next start
String debugText = "onCameraFrame time: " + sE + "\ndetection time: " + sM + "\nDraw3Dobject time: " + mE
+ "\nbetween time calls: " + lastEtoS + "\nfull cicle time: " + lastEtoE;
//gameManager.printTopLeft(debugText);
}
public void setCoinDistribution(int newCoin){
int i = 0;
for (i = 0; i < NUM_OF_TOWERS; i++){
mDetectorArry[i].setCoin(false);
}
mDetectorArry[newCoin].setCoin(true);
}
public void setTrackingColor(List<MatOfPoint> blueContours, List<MatOfPoint> greenContours, List<MatOfPoint> redContours){
myRobot.trackObject(blueContours, greenContours, redContours);
int front, back1, back2;
List<MatOfPoint> frontColor = null, backColor1 = null, backColor2 = null;
for(int i = 0; i < NUM_OF_TOWERS ; i++){
front = colorPermutation[i][0];
back1 = colorPermutation[i][1];
back2 = colorPermutation[i][2];
switch(front){
case BLUE_COLOR: frontColor = blueContours; break;
case GREEN_COLOR: frontColor = greenContours; break;
case RED_COLOR: frontColor = redContours; break;
default: ;
}
switch(back1){
case BLUE_COLOR: backColor1 = blueContours; break;
case GREEN_COLOR: backColor1 = greenContours; break;
case RED_COLOR: backColor1 = redContours; break;
default: ;
}
switch(back2){
case BLUE_COLOR: backColor2 = blueContours; break;
case GREEN_COLOR: backColor2 = greenContours; break;
case RED_COLOR: backColor2 = redContours; break;
default: ;
}
mDetectorArry[i].trackObject(frontColor, backColor1, backColor2);
}
}
}
| |
/* (The MIT License)
Copyright (c) 2006 Adam Bennett (cruxic@gmail.com)
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
package adamb.ogg;
import java.util.*;
import adamb.util.Util;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
/**content is defined as the bytes after the header*/
public class Page
{
/**the size of the page header excluding the variable length segment table*/
public static final int FIXED_HEADER_SIZE = 27;
/**
The maximum size of one page in the page stream. (header size + max number of segments + max content size).
The maximum number of 255 segments (255 bytes each) sets the maximum possible physical page size at 65307 bytes or just under 64kB
*/
public static final int MAX_PAGE_SIZE = FIXED_HEADER_SIZE + 255 + (255 * 255);
/**the header offset of the checksum value*/
public static final int HEADER_CHECKSUM_OFFSET = 22;
/**Must be zero. "The capture pattern is followed by the stream structure revision"*/
public int streamStructureVersion;
/**continued packet*/
public boolean isContinued;
/**first page of logical bitstream (bos)*/
public boolean isFirst;
/**last page of logical bitstream (eos)*/
public boolean isLast;
/**
(This is packed in the same way the rest of Ogg data is packed; LSb of LSB first. Note that the 'position' data specifies a 'sample' number (eg, in a CD quality sample is four octets, 16 bits for left and 16 bits for right; in video it would likely be the frame number. It is up to the specific codec in use to define the semantic meaning of the granule position value). The position specified is the total samples encoded after including all packets finished on this page (packets begun on this page but continuing on to the next page do not count). The rationale here is that the position specified in the frame header of the last page tells how long the data coded by the bitstream is. A truncated stream will still return the proper number of samples that can be decoded fully.
A special value of '-1' (in two's complement) indicates that no packets finish on this page.
*/
public long absGranulePos;
/**Ogg allows for separate logical bitstreams to be mixed at page granularity in a physical bitstream. The most common case would be sequential arrangement, but it is possible to interleave pages for two separate bitstreams to be decoded concurrently. The serial number is the means by which pages physical pages are associated with a particular logical stream. Each logical stream must have a unique serial number within a physical stream.*/
public int streamSerialNumber;
/*Page counter; lets us know if a page is lost (useful where packets span page boundaries).*/
public int sequence;
public int checksum;
/**The segments on this page. A page may contain from 0 to 255 segments.*/
public ArrayList<Segment> segments;
public byte[] content;
public Page()
{
/*
"Ogg bitstream specification strongly recommends nominal page size of approximately 4-8kB"
Thus we should pre-allocate the segment list for that size to avoid growing
the arraylist in the average case.*/
segments = new ArrayList<Segment>(32);
streamStructureVersion = 0;
}
public byte[] getFixedHeaderBytes()
{
byte[] bytes = new byte[FIXED_HEADER_SIZE];
ByteBuffer bb = ByteBuffer.wrap(bytes);
bb.order(ByteOrder.LITTLE_ENDIAN); //Ogg values are little endian
//capture pattern
bb.put(PhysicalPageStream.OGG_STREAM_CAPTURE_PATTERN);
bb.put(Util.ubyte(streamStructureVersion));
//flags
byte flags = 0;
if (isContinued)
flags |= 1;
if (isFirst)
flags |= 2;
if (isLast)
flags |= 4;
bb.put(flags);
bb.putLong(absGranulePos);
bb.putInt(streamSerialNumber);
bb.putInt(sequence);
bb.putInt(checksum);
//segment count
assert segments.size() <= 255;
bb.put(Util.ubyte(segments.size()));
assert bb.position() == FIXED_HEADER_SIZE;
return bytes;
}
public byte[] getSegmentTableBytes()
{
byte[] segmentTable = new byte[segments.size()];
for (int i = 0; i < segmentTable.length; i++)
segmentTable[i] = Util.ubyte(segments.get(i).size());
return segmentTable;
}
public int calculateContentSizeFromSegments()
{
int contentSize = 0;
for (Segment segment: segments)
contentSize += segment.size();
return contentSize;
}
/**
Populate the header values from the bytes.
@param fixedHeaderBytes raw header data. If length of array is FIXED_HEADER_SIZE then it must begin with the capture pattern.
@return the segment count
@throws InvalidHeaderException if the bytes do not begin with the ogg
stream capture pattern: "OggS" or any other header values are incorrect. The page is corrupt in some way.
*/
public int parseFixedHeaderValues(byte[] fixedHeaderBytes)
throws InvalidHeaderException
{
//do the bytes need to begin with the capture pattern?
boolean needCapture = fixedHeaderBytes.length == FIXED_HEADER_SIZE;
if (needCapture && !Util.startsWith(fixedHeaderBytes, PhysicalPageStream.OGG_STREAM_CAPTURE_PATTERN))
throw new InvalidHeaderException("Ogg page does not begin with \"OggS\"!");
ByteBuffer bb = ByteBuffer.wrap(fixedHeaderBytes);
bb.order(ByteOrder.LITTLE_ENDIAN); //Ogg values are LE
//skip the capture pattern
if (needCapture)
bb.position(PhysicalPageStream.OGG_STREAM_CAPTURE_PATTERN.length);
streamStructureVersion = Util.ubyte(bb.get());
if (streamStructureVersion != 0)
throw new InvalidHeaderException("Wrong Ogg stream structure revision " + streamStructureVersion);
int flags = Util.ubyte(bb.get());
isContinued = (flags & 1) > 0;
isFirst = (flags & 2) > 0;
isLast = (flags & 4) > 0;
absGranulePos = bb.getLong();
streamSerialNumber = bb.getInt();
sequence = bb.getInt();
checksum = bb.getInt();
int segmentCount = Util.ubyte(bb.get());
return segmentCount;
}
/**
@return the calculated content size for convenience (a by product of parsing the segment table)
*/
public int parseSegmentTable(byte[] segmentTable)
{
/*build the segment objects and calculate the content size*/
int offset = 0;
int lacingValue;
for (int i = 0; i < segmentTable.length; i++)
{
lacingValue = Util.ubyte(segmentTable[i]);
segments.add(new Segment(this, offset, lacingValue));
offset += lacingValue;
}
return offset;
}
/**Get the total page size (as it was in the Ogg stream).
This includes the header and contents*/
public int size()
{
return FIXED_HEADER_SIZE + segments.size() + content.length;
}
/**
Compare all or most members for equality.
*/
public boolean equals(Page p, boolean ignoreCRC, boolean ignoreSequence)
{
return streamStructureVersion == p.streamStructureVersion
&& isContinued == p.isContinued
&& isFirst == p.isFirst
&& isLast == p.isLast
&& absGranulePos == p.absGranulePos
&& streamSerialNumber == p.streamSerialNumber
&& (ignoreSequence || sequence == p.sequence)
&& (ignoreCRC || checksum == p.checksum)
&& Arrays.equals(content, p.content)
&& Arrays.equals(getSegmentTableBytes(), p.getSegmentTableBytes());
}
}
| |
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.protocol.saml;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.NameValuePair;
import org.apache.http.client.HttpClient;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.message.BasicNameValuePair;
import org.jboss.logging.Logger;
import org.keycloak.connections.httpclient.HttpClientProvider;
import org.keycloak.dom.saml.v2.assertion.AssertionType;
import org.keycloak.dom.saml.v2.assertion.AttributeStatementType;
import org.keycloak.dom.saml.v2.protocol.ResponseType;
import org.keycloak.events.EventBuilder;
import org.keycloak.models.ClientModel;
import org.keycloak.models.ClientSessionModel;
import org.keycloak.models.KeyManager;
import org.keycloak.models.KeycloakSession;
import org.keycloak.models.ProtocolMapperModel;
import org.keycloak.models.RealmModel;
import org.keycloak.models.UserModel;
import org.keycloak.models.UserSessionModel;
import org.keycloak.protocol.LoginProtocol;
import org.keycloak.protocol.ProtocolMapper;
import org.keycloak.protocol.RestartLoginCookie;
import org.keycloak.protocol.saml.mappers.SAMLAttributeStatementMapper;
import org.keycloak.protocol.saml.mappers.SAMLLoginResponseMapper;
import org.keycloak.protocol.saml.mappers.SAMLRoleListMapper;
import org.keycloak.saml.SAML2ErrorResponseBuilder;
import org.keycloak.saml.SAML2LoginResponseBuilder;
import org.keycloak.saml.SAML2LogoutRequestBuilder;
import org.keycloak.saml.SAML2LogoutResponseBuilder;
import org.keycloak.saml.SignatureAlgorithm;
import org.keycloak.saml.common.constants.GeneralConstants;
import org.keycloak.saml.common.constants.JBossSAMLURIConstants;
import org.keycloak.saml.common.exceptions.ConfigurationException;
import org.keycloak.saml.common.exceptions.ParsingException;
import org.keycloak.saml.common.exceptions.ProcessingException;
import org.keycloak.saml.common.util.XmlKeyInfoKeyNameTransformer;
import org.keycloak.saml.processing.core.util.KeycloakKeySamlExtensionGenerator;
import org.keycloak.services.ErrorPage;
import org.keycloak.services.managers.ClientSessionCode;
import org.keycloak.services.managers.ResourceAdminManager;
import org.keycloak.services.messages.Messages;
import org.keycloak.services.resources.RealmsResource;
import org.w3c.dom.Document;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriBuilder;
import javax.ws.rs.core.UriInfo;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.security.PublicKey;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.UUID;
/**
* @author <a href="mailto:bill@burkecentral.com">Bill Burke</a>
* @version $Revision: 1 $
*/
public class SamlProtocol implements LoginProtocol {
protected static final Logger logger = Logger.getLogger(SamlProtocol.class);
public static final String ATTRIBUTE_TRUE_VALUE = "true";
public static final String ATTRIBUTE_FALSE_VALUE = "false";
public static final String SAML_ASSERTION_CONSUMER_URL_POST_ATTRIBUTE = "saml_assertion_consumer_url_post";
public static final String SAML_ASSERTION_CONSUMER_URL_REDIRECT_ATTRIBUTE = "saml_assertion_consumer_url_redirect";
public static final String SAML_SINGLE_LOGOUT_SERVICE_URL_POST_ATTRIBUTE = "saml_single_logout_service_url_post";
public static final String SAML_SINGLE_LOGOUT_SERVICE_URL_REDIRECT_ATTRIBUTE = "saml_single_logout_service_url_redirect";
public static final String LOGIN_PROTOCOL = "saml";
public static final String SAML_BINDING = "saml_binding";
public static final String SAML_IDP_INITIATED_LOGIN = "saml_idp_initiated_login";
public static final String SAML_POST_BINDING = "post";
public static final String SAML_SOAP_BINDING = "soap";
public static final String SAML_REDIRECT_BINDING = "get";
public static final String SAML_REQUEST_ID = "SAML_REQUEST_ID";
public static final String SAML_LOGOUT_BINDING = "saml.logout.binding";
public static final String SAML_LOGOUT_ADD_EXTENSIONS_ELEMENT_WITH_KEY_INFO = "saml.logout.addExtensionsElementWithKeyInfo";
public static final String SAML_SERVER_SIGNATURE_KEYINFO_KEY_NAME_TRANSFORMER = "SAML_SERVER_SIGNATURE_KEYINFO_KEY_NAME_TRANSFORMER";
public static final String SAML_LOGOUT_REQUEST_ID = "SAML_LOGOUT_REQUEST_ID";
public static final String SAML_LOGOUT_RELAY_STATE = "SAML_LOGOUT_RELAY_STATE";
public static final String SAML_LOGOUT_CANONICALIZATION = "SAML_LOGOUT_CANONICALIZATION";
public static final String SAML_LOGOUT_BINDING_URI = "SAML_LOGOUT_BINDING_URI";
public static final String SAML_LOGOUT_SIGNATURE_ALGORITHM = "saml.logout.signature.algorithm";
public static final String SAML_NAME_ID = "SAML_NAME_ID";
public static final String SAML_NAME_ID_FORMAT = "SAML_NAME_ID_FORMAT";
public static final String SAML_DEFAULT_NAMEID_FORMAT = JBossSAMLURIConstants.NAMEID_FORMAT_UNSPECIFIED.get();
public static final String SAML_PERSISTENT_NAME_ID_FOR = "saml.persistent.name.id.for";
public static final String SAML_IDP_INITIATED_SSO_RELAY_STATE = "saml_idp_initiated_sso_relay_state";
public static final String SAML_IDP_INITIATED_SSO_URL_NAME = "saml_idp_initiated_sso_url_name";
protected KeycloakSession session;
protected RealmModel realm;
protected UriInfo uriInfo;
protected HttpHeaders headers;
protected EventBuilder event;
@Override
public SamlProtocol setSession(KeycloakSession session) {
this.session = session;
return this;
}
@Override
public SamlProtocol setRealm(RealmModel realm) {
this.realm = realm;
return this;
}
@Override
public SamlProtocol setUriInfo(UriInfo uriInfo) {
this.uriInfo = uriInfo;
return this;
}
@Override
public SamlProtocol setHttpHeaders(HttpHeaders headers) {
this.headers = headers;
return this;
}
@Override
public SamlProtocol setEventBuilder(EventBuilder event) {
this.event = event;
return this;
}
@Override
public Response sendError(ClientSessionModel clientSession, Error error) {
try {
ClientModel client = clientSession.getClient();
if ("true".equals(client.getAttribute(SAML_IDP_INITIATED_LOGIN))) {
if (error == Error.CANCELLED_BY_USER) {
UriBuilder builder = RealmsResource.protocolUrl(uriInfo).path(SamlService.class, "idpInitiatedSSO");
Map<String, String> params = new HashMap<>();
params.put("realm", realm.getName());
params.put("protocol", LOGIN_PROTOCOL);
params.put("client", client.getAttribute(SAML_IDP_INITIATED_SSO_URL_NAME));
URI redirect = builder.buildFromMap(params);
return Response.status(302).location(redirect).build();
} else {
return ErrorPage.error(session, translateErrorToIdpInitiatedErrorMessage(error));
}
} else {
SAML2ErrorResponseBuilder builder = new SAML2ErrorResponseBuilder().destination(clientSession.getRedirectUri()).issuer(getResponseIssuer(realm)).status(translateErrorToSAMLStatus(error).get());
try {
JaxrsSAML2BindingBuilder binding = new JaxrsSAML2BindingBuilder().relayState(clientSession.getNote(GeneralConstants.RELAY_STATE));
SamlClient samlClient = new SamlClient(client);
KeyManager keyManager = session.keys();
if (samlClient.requiresRealmSignature()) {
KeyManager.ActiveRsaKey keys = keyManager.getActiveRsaKey(realm);
String keyName = samlClient.getXmlSigKeyInfoKeyNameTransformer().getKeyName(keys.getKid(), keys.getCertificate());
String canonicalization = samlClient.getCanonicalizationMethod();
if (canonicalization != null) {
binding.canonicalizationMethod(canonicalization);
}
binding.signatureAlgorithm(samlClient.getSignatureAlgorithm()).signWith(keyName, keys.getPrivateKey(), keys.getPublicKey(), keys.getCertificate()).signDocument();
}
if (samlClient.requiresEncryption()) {
PublicKey publicKey;
try {
publicKey = SamlProtocolUtils.getEncryptionValidationKey(client);
} catch (Exception e) {
logger.error("failed", e);
return ErrorPage.error(session, Messages.FAILED_TO_PROCESS_RESPONSE);
}
binding.encrypt(publicKey);
}
Document document = builder.buildDocument();
return buildErrorResponse(clientSession, binding, document);
} catch (Exception e) {
return ErrorPage.error(session, Messages.FAILED_TO_PROCESS_RESPONSE);
}
}
} finally {
RestartLoginCookie.expireRestartCookie(realm, session.getContext().getConnection(), uriInfo);
session.sessions().removeClientSession(realm, clientSession);
}
}
protected Response buildErrorResponse(ClientSessionModel clientSession, JaxrsSAML2BindingBuilder binding, Document document) throws ConfigurationException, ProcessingException, IOException {
if (isPostBinding(clientSession)) {
return binding.postBinding(document).response(clientSession.getRedirectUri());
} else {
return binding.redirectBinding(document).response(clientSession.getRedirectUri());
}
}
private JBossSAMLURIConstants translateErrorToSAMLStatus(Error error) {
switch (error) {
case CANCELLED_BY_USER:
case CONSENT_DENIED:
return JBossSAMLURIConstants.STATUS_REQUEST_DENIED;
case PASSIVE_INTERACTION_REQUIRED:
case PASSIVE_LOGIN_REQUIRED:
return JBossSAMLURIConstants.STATUS_NO_PASSIVE;
default:
logger.warn("Untranslated protocol Error: " + error.name() + " so we return default SAML error");
return JBossSAMLURIConstants.STATUS_REQUEST_DENIED;
}
}
private String translateErrorToIdpInitiatedErrorMessage(Error error) {
switch (error) {
case CONSENT_DENIED:
return Messages.CONSENT_DENIED;
case PASSIVE_INTERACTION_REQUIRED:
case PASSIVE_LOGIN_REQUIRED:
return Messages.UNEXPECTED_ERROR_HANDLING_REQUEST;
default:
logger.warn("Untranslated protocol Error: " + error.name() + " so we return default error message");
return Messages.UNEXPECTED_ERROR_HANDLING_REQUEST;
}
}
protected String getResponseIssuer(RealmModel realm) {
return RealmsResource.realmBaseUrl(uriInfo).build(realm.getName()).toString();
}
protected boolean isPostBinding(ClientSessionModel clientSession) {
ClientModel client = clientSession.getClient();
SamlClient samlClient = new SamlClient(client);
return SamlProtocol.SAML_POST_BINDING.equals(clientSession.getNote(SamlProtocol.SAML_BINDING)) || samlClient.forcePostBinding();
}
public static boolean isLogoutPostBindingForInitiator(UserSessionModel session) {
String note = session.getNote(SamlProtocol.SAML_LOGOUT_BINDING);
return SamlProtocol.SAML_POST_BINDING.equals(note);
}
protected boolean isLogoutPostBindingForClient(ClientSessionModel clientSession) {
ClientModel client = clientSession.getClient();
SamlClient samlClient = new SamlClient(client);
String logoutPostUrl = client.getAttribute(SAML_SINGLE_LOGOUT_SERVICE_URL_POST_ATTRIBUTE);
String logoutRedirectUrl = client.getAttribute(SAML_SINGLE_LOGOUT_SERVICE_URL_REDIRECT_ATTRIBUTE);
if (logoutPostUrl == null || logoutPostUrl.trim().isEmpty()) {
// if we don't have a redirect uri either, return true and default to the admin url + POST binding
if (logoutRedirectUrl == null || logoutRedirectUrl.trim().isEmpty())
return true;
return false;
}
if (samlClient.forcePostBinding()) {
return true; // configured to force a post binding and post binding logout url is not null
}
String bindingType = clientSession.getNote(SAML_BINDING);
// if the login binding was POST, return true
if (SAML_POST_BINDING.equals(bindingType))
return true;
if (logoutRedirectUrl == null || logoutRedirectUrl.trim().isEmpty())
return true; // we don't have a redirect binding url, so use post binding
return false; // redirect binding
}
protected String getNameIdFormat(SamlClient samlClient, ClientSessionModel clientSession) {
String nameIdFormat = clientSession.getNote(GeneralConstants.NAMEID_FORMAT);
boolean forceFormat = samlClient.forceNameIDFormat();
String configuredNameIdFormat = samlClient.getNameIDFormat();
if ((nameIdFormat == null || forceFormat) && configuredNameIdFormat != null) {
nameIdFormat = configuredNameIdFormat;
}
if (nameIdFormat == null)
return SAML_DEFAULT_NAMEID_FORMAT;
return nameIdFormat;
}
protected String getNameId(String nameIdFormat, ClientSessionModel clientSession, UserSessionModel userSession) {
if (nameIdFormat.equals(JBossSAMLURIConstants.NAMEID_FORMAT_EMAIL.get())) {
return userSession.getUser().getEmail();
} else if (nameIdFormat.equals(JBossSAMLURIConstants.NAMEID_FORMAT_TRANSIENT.get())) {
// "G-" stands for "generated" Add this for the slight possibility of collisions.
return "G-" + UUID.randomUUID().toString();
} else if (nameIdFormat.equals(JBossSAMLURIConstants.NAMEID_FORMAT_PERSISTENT.get())) {
return getPersistentNameId(clientSession, userSession);
} else if (nameIdFormat.equals(JBossSAMLURIConstants.NAMEID_FORMAT_UNSPECIFIED.get())) {
// TODO: Support for persistent NameID (pseudo-random identifier persisted in user object)
return userSession.getUser().getUsername();
} else {
return userSession.getUser().getUsername();
}
}
/**
* Attempts to retrieve the persistent type NameId as follows:
*
* <ol>
* <li>saml.persistent.name.id.for.$clientId user attribute</li>
* <li>saml.persistent.name.id.for.* user attribute</li>
* <li>G-$randomUuid</li>
* </ol>
*
* If a randomUuid is generated, an attribute for the given saml.persistent.name.id.for.$clientId will be generated,
* otherwise no state change will occur with respect to the user's attributes.
*
* @return the user's persistent NameId
*/
protected String getPersistentNameId(final ClientSessionModel clientSession, final UserSessionModel userSession) {
// attempt to retrieve the UserID for the client-specific attribute
final UserModel user = userSession.getUser();
final String clientNameId = String.format("%s.%s", SAML_PERSISTENT_NAME_ID_FOR,
clientSession.getClient().getClientId());
String samlPersistentNameId = user.getFirstAttribute(clientNameId);
if (samlPersistentNameId != null) {
return samlPersistentNameId;
}
// check for a wildcard attribute
final String wildcardNameId = String.format("%s.*", SAML_PERSISTENT_NAME_ID_FOR);
samlPersistentNameId = user.getFirstAttribute(wildcardNameId);
if (samlPersistentNameId != null) {
return samlPersistentNameId;
}
// default to generated. "G-" stands for "generated"
samlPersistentNameId = "G-" + UUID.randomUUID().toString();
user.setSingleAttribute(clientNameId, samlPersistentNameId);
return samlPersistentNameId;
}
@Override
public Response authenticated(UserSessionModel userSession, ClientSessionCode accessCode) {
ClientSessionModel clientSession = accessCode.getClientSession();
ClientModel client = clientSession.getClient();
SamlClient samlClient = new SamlClient(client);
String requestID = clientSession.getNote(SAML_REQUEST_ID);
String relayState = clientSession.getNote(GeneralConstants.RELAY_STATE);
String redirectUri = clientSession.getRedirectUri();
String responseIssuer = getResponseIssuer(realm);
String nameIdFormat = getNameIdFormat(samlClient, clientSession);
String nameId = getNameId(nameIdFormat, clientSession, userSession);
// save NAME_ID and format in clientSession as they may be persistent or transient or email and not username
// we'll need to send this back on a logout
clientSession.setNote(SAML_NAME_ID, nameId);
clientSession.setNote(SAML_NAME_ID_FORMAT, nameIdFormat);
SAML2LoginResponseBuilder builder = new SAML2LoginResponseBuilder();
builder.requestID(requestID).destination(redirectUri).issuer(responseIssuer).assertionExpiration(realm.getAccessCodeLifespan()).subjectExpiration(realm.getAccessTokenLifespan()).sessionIndex(clientSession.getId())
.requestIssuer(clientSession.getClient().getClientId()).nameIdentifier(nameIdFormat, nameId).authMethod(JBossSAMLURIConstants.AC_UNSPECIFIED.get());
if (!samlClient.includeAuthnStatement()) {
builder.disableAuthnStatement(true);
}
builder.includeOneTimeUseCondition(samlClient.includeOneTimeUseCondition());
List<ProtocolMapperProcessor<SAMLAttributeStatementMapper>> attributeStatementMappers = new LinkedList<>();
List<ProtocolMapperProcessor<SAMLLoginResponseMapper>> loginResponseMappers = new LinkedList<>();
ProtocolMapperProcessor<SAMLRoleListMapper> roleListMapper = null;
Set<ProtocolMapperModel> mappings = accessCode.getRequestedProtocolMappers();
for (ProtocolMapperModel mapping : mappings) {
ProtocolMapper mapper = (ProtocolMapper) session.getKeycloakSessionFactory().getProviderFactory(ProtocolMapper.class, mapping.getProtocolMapper());
if (mapper == null)
continue;
if (mapper instanceof SAMLAttributeStatementMapper) {
attributeStatementMappers.add(new ProtocolMapperProcessor<SAMLAttributeStatementMapper>((SAMLAttributeStatementMapper) mapper, mapping));
}
if (mapper instanceof SAMLLoginResponseMapper) {
loginResponseMappers.add(new ProtocolMapperProcessor<SAMLLoginResponseMapper>((SAMLLoginResponseMapper) mapper, mapping));
}
if (mapper instanceof SAMLRoleListMapper) {
roleListMapper = new ProtocolMapperProcessor<SAMLRoleListMapper>((SAMLRoleListMapper) mapper, mapping);
}
}
Document samlDocument = null;
KeyManager keyManager = session.keys();
KeyManager.ActiveRsaKey keys = keyManager.getActiveRsaKey(realm);
boolean postBinding = isPostBinding(clientSession);
String keyName = samlClient.getXmlSigKeyInfoKeyNameTransformer().getKeyName(keys.getKid(), keys.getCertificate());
try {
if ((! postBinding) && samlClient.requiresRealmSignature() && samlClient.addExtensionsElementWithKeyInfo()) {
builder.addExtension(new KeycloakKeySamlExtensionGenerator(keyName));
}
ResponseType samlModel = builder.buildModel();
final AttributeStatementType attributeStatement = populateAttributeStatements(attributeStatementMappers, session, userSession, clientSession);
populateRoles(roleListMapper, session, userSession, clientSession, attributeStatement);
// SAML Spec 2.7.3 AttributeStatement must contain one or more Attribute or EncryptedAttribute
if (attributeStatement.getAttributes().size() > 0) {
AssertionType assertion = samlModel.getAssertions().get(0).getAssertion();
assertion.addStatement(attributeStatement);
}
samlModel = transformLoginResponse(loginResponseMappers, samlModel, session, userSession, clientSession);
samlDocument = builder.buildDocument(samlModel);
} catch (Exception e) {
logger.error("failed", e);
return ErrorPage.error(session, Messages.FAILED_TO_PROCESS_RESPONSE);
}
JaxrsSAML2BindingBuilder bindingBuilder = new JaxrsSAML2BindingBuilder();
bindingBuilder.relayState(relayState);
if (samlClient.requiresRealmSignature()) {
String canonicalization = samlClient.getCanonicalizationMethod();
if (canonicalization != null) {
bindingBuilder.canonicalizationMethod(canonicalization);
}
bindingBuilder.signatureAlgorithm(samlClient.getSignatureAlgorithm()).signWith(keyName, keys.getPrivateKey(), keys.getPublicKey(), keys.getCertificate()).signDocument();
}
if (samlClient.requiresAssertionSignature()) {
String canonicalization = samlClient.getCanonicalizationMethod();
if (canonicalization != null) {
bindingBuilder.canonicalizationMethod(canonicalization);
}
bindingBuilder.signatureAlgorithm(samlClient.getSignatureAlgorithm()).signWith(keyName, keys.getPrivateKey(), keys.getPublicKey(), keys.getCertificate()).signAssertions();
}
if (samlClient.requiresEncryption()) {
PublicKey publicKey = null;
try {
publicKey = SamlProtocolUtils.getEncryptionValidationKey(client);
} catch (Exception e) {
logger.error("failed", e);
return ErrorPage.error(session, Messages.FAILED_TO_PROCESS_RESPONSE);
}
bindingBuilder.encrypt(publicKey);
}
try {
return buildAuthenticatedResponse(clientSession, redirectUri, samlDocument, bindingBuilder);
} catch (Exception e) {
logger.error("failed", e);
return ErrorPage.error(session, Messages.FAILED_TO_PROCESS_RESPONSE);
}
}
protected Response buildAuthenticatedResponse(ClientSessionModel clientSession, String redirectUri, Document samlDocument, JaxrsSAML2BindingBuilder bindingBuilder) throws ConfigurationException, ProcessingException, IOException {
if (isPostBinding(clientSession)) {
return bindingBuilder.postBinding(samlDocument).response(redirectUri);
} else {
return bindingBuilder.redirectBinding(samlDocument).response(redirectUri);
}
}
public static class ProtocolMapperProcessor<T> {
final public T mapper;
final public ProtocolMapperModel model;
public ProtocolMapperProcessor(T mapper, ProtocolMapperModel model) {
this.mapper = mapper;
this.model = model;
}
}
public AttributeStatementType populateAttributeStatements(List<ProtocolMapperProcessor<SAMLAttributeStatementMapper>> attributeStatementMappers, KeycloakSession session, UserSessionModel userSession,
ClientSessionModel clientSession) {
AttributeStatementType attributeStatement = new AttributeStatementType();
for (ProtocolMapperProcessor<SAMLAttributeStatementMapper> processor : attributeStatementMappers) {
processor.mapper.transformAttributeStatement(attributeStatement, processor.model, session, userSession, clientSession);
}
return attributeStatement;
}
public ResponseType transformLoginResponse(List<ProtocolMapperProcessor<SAMLLoginResponseMapper>> mappers, ResponseType response, KeycloakSession session, UserSessionModel userSession, ClientSessionModel clientSession) {
for (ProtocolMapperProcessor<SAMLLoginResponseMapper> processor : mappers) {
response = processor.mapper.transformLoginResponse(response, processor.model, session, userSession, clientSession);
}
return response;
}
public void populateRoles(ProtocolMapperProcessor<SAMLRoleListMapper> roleListMapper, KeycloakSession session, UserSessionModel userSession, ClientSessionModel clientSession,
final AttributeStatementType existingAttributeStatement) {
if (roleListMapper == null)
return;
roleListMapper.mapper.mapRoles(existingAttributeStatement, roleListMapper.model, session, userSession, clientSession);
}
public static String getLogoutServiceUrl(UriInfo uriInfo, ClientModel client, String bindingType) {
String logoutServiceUrl = null;
if (SAML_POST_BINDING.equals(bindingType)) {
logoutServiceUrl = client.getAttribute(SAML_SINGLE_LOGOUT_SERVICE_URL_POST_ATTRIBUTE);
} else {
logoutServiceUrl = client.getAttribute(SAML_SINGLE_LOGOUT_SERVICE_URL_REDIRECT_ATTRIBUTE);
}
if (logoutServiceUrl == null && client instanceof ClientModel)
logoutServiceUrl = ((ClientModel) client).getManagementUrl();
if (logoutServiceUrl == null || logoutServiceUrl.trim().equals(""))
return null;
return ResourceAdminManager.resolveUri(uriInfo.getRequestUri(), client.getRootUrl(), logoutServiceUrl);
}
@Override
public Response frontchannelLogout(UserSessionModel userSession, ClientSessionModel clientSession) {
ClientModel client = clientSession.getClient();
SamlClient samlClient = new SamlClient(client);
if (!(client instanceof ClientModel))
return null;
try {
if (isLogoutPostBindingForClient(clientSession)) {
String bindingUri = getLogoutServiceUrl(uriInfo, client, SAML_POST_BINDING);
SAML2LogoutRequestBuilder logoutBuilder = createLogoutRequest(bindingUri, clientSession, client);
// This is POST binding, hence KeyID is included in dsig:KeyInfo/dsig:KeyName, no need to add <samlp:Extensions> element
JaxrsSAML2BindingBuilder binding = createBindingBuilder(samlClient);
return binding.postBinding(logoutBuilder.buildDocument()).request(bindingUri);
} else {
logger.debug("frontchannel redirect binding");
String bindingUri = getLogoutServiceUrl(uriInfo, client, SAML_REDIRECT_BINDING);
SAML2LogoutRequestBuilder logoutBuilder = createLogoutRequest(bindingUri, clientSession, client);
if (samlClient.requiresRealmSignature() && samlClient.addExtensionsElementWithKeyInfo()) {
KeyManager.ActiveRsaKey keys = session.keys().getActiveRsaKey(realm);
String keyName = samlClient.getXmlSigKeyInfoKeyNameTransformer().getKeyName(keys.getKid(), keys.getCertificate());
logoutBuilder.addExtension(new KeycloakKeySamlExtensionGenerator(keyName));
}
JaxrsSAML2BindingBuilder binding = createBindingBuilder(samlClient);
return binding.redirectBinding(logoutBuilder.buildDocument()).request(bindingUri);
}
} catch (ConfigurationException e) {
throw new RuntimeException(e);
} catch (ProcessingException e) {
throw new RuntimeException(e);
} catch (IOException e) {
throw new RuntimeException(e);
} catch (ParsingException e) {
throw new RuntimeException(e);
}
}
@Override
public Response finishLogout(UserSessionModel userSession) {
logger.debug("finishLogout");
String logoutBindingUri = userSession.getNote(SAML_LOGOUT_BINDING_URI);
if (logoutBindingUri == null) {
logger.error("Can't finish SAML logout as there is no logout binding set. Please configure the logout service url in the admin console for your client applications.");
return ErrorPage.error(session, Messages.FAILED_LOGOUT);
}
String logoutRelayState = userSession.getNote(SAML_LOGOUT_RELAY_STATE);
SAML2LogoutResponseBuilder builder = new SAML2LogoutResponseBuilder();
builder.logoutRequestID(userSession.getNote(SAML_LOGOUT_REQUEST_ID));
builder.destination(logoutBindingUri);
builder.issuer(getResponseIssuer(realm));
JaxrsSAML2BindingBuilder binding = new JaxrsSAML2BindingBuilder();
binding.relayState(logoutRelayState);
String signingAlgorithm = userSession.getNote(SAML_LOGOUT_SIGNATURE_ALGORITHM);
boolean postBinding = isLogoutPostBindingForInitiator(userSession);
if (signingAlgorithm != null) {
SignatureAlgorithm algorithm = SignatureAlgorithm.valueOf(signingAlgorithm);
String canonicalization = userSession.getNote(SAML_LOGOUT_CANONICALIZATION);
if (canonicalization != null) {
binding.canonicalizationMethod(canonicalization);
}
KeyManager.ActiveRsaKey keys = session.keys().getActiveRsaKey(realm);
XmlKeyInfoKeyNameTransformer transformer = XmlKeyInfoKeyNameTransformer.from(
userSession.getNote(SAML_SERVER_SIGNATURE_KEYINFO_KEY_NAME_TRANSFORMER),
SamlClient.DEFAULT_XML_KEY_INFO_KEY_NAME_TRANSFORMER);
String keyName = transformer.getKeyName(keys.getKid(), keys.getCertificate());
binding.signatureAlgorithm(algorithm).signWith(keyName, keys.getPrivateKey(), keys.getPublicKey(), keys.getCertificate()).signDocument();
boolean addExtension = (! postBinding) && Objects.equals("true", userSession.getNote(SamlProtocol.SAML_LOGOUT_ADD_EXTENSIONS_ELEMENT_WITH_KEY_INFO));
if (addExtension) { // Only include extension if REDIRECT binding and signing whole SAML protocol message
builder.addExtension(new KeycloakKeySamlExtensionGenerator(keyName));
}
}
try {
return buildLogoutResponse(userSession, logoutBindingUri, builder, binding);
} catch (ConfigurationException e) {
throw new RuntimeException(e);
} catch (ProcessingException e) {
throw new RuntimeException(e);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
protected Response buildLogoutResponse(UserSessionModel userSession, String logoutBindingUri, SAML2LogoutResponseBuilder builder, JaxrsSAML2BindingBuilder binding) throws ConfigurationException, ProcessingException, IOException {
if (isLogoutPostBindingForInitiator(userSession)) {
return binding.postBinding(builder.buildDocument()).response(logoutBindingUri);
} else {
return binding.redirectBinding(builder.buildDocument()).response(logoutBindingUri);
}
}
@Override
public void backchannelLogout(UserSessionModel userSession, ClientSessionModel clientSession) {
ClientModel client = clientSession.getClient();
SamlClient samlClient = new SamlClient(client);
String logoutUrl = getLogoutServiceUrl(uriInfo, client, SAML_POST_BINDING);
if (logoutUrl == null) {
logger.warnv("Can't do backchannel logout. No SingleLogoutService POST Binding registered for client: {1}", client.getClientId());
return;
}
SAML2LogoutRequestBuilder logoutBuilder = createLogoutRequest(logoutUrl, clientSession, client);
String logoutRequestString = null;
try {
JaxrsSAML2BindingBuilder binding = createBindingBuilder(samlClient);
// This is POST binding, hence KeyID is included in dsig:KeyInfo/dsig:KeyName, no need to add <samlp:Extensions> element
logoutRequestString = binding.postBinding(logoutBuilder.buildDocument()).encoded();
} catch (Exception e) {
logger.warn("failed to send saml logout", e);
return;
}
HttpClient httpClient = session.getProvider(HttpClientProvider.class).getHttpClient();
for (int i = 0; i < 2; i++) { // follow redirects once
try {
List<NameValuePair> formparams = new ArrayList<NameValuePair>();
formparams.add(new BasicNameValuePair(GeneralConstants.SAML_REQUEST_KEY, logoutRequestString));
formparams.add(new BasicNameValuePair("BACK_CHANNEL_LOGOUT", "BACK_CHANNEL_LOGOUT")); // for Picketlink
// todo remove
// this
UrlEncodedFormEntity form = new UrlEncodedFormEntity(formparams, "UTF-8");
HttpPost post = new HttpPost(logoutUrl);
post.setEntity(form);
HttpResponse response = httpClient.execute(post);
try {
int status = response.getStatusLine().getStatusCode();
if (status == 302 && !logoutUrl.endsWith("/")) {
String redirect = response.getFirstHeader(HttpHeaders.LOCATION).getValue();
String withSlash = logoutUrl + "/";
if (withSlash.equals(redirect)) {
logoutUrl = withSlash;
continue;
}
}
} finally {
HttpEntity entity = response.getEntity();
if (entity != null) {
InputStream is = entity.getContent();
if (is != null)
is.close();
}
}
} catch (IOException e) {
logger.warn("failed to send saml logout", e);
}
break;
}
}
protected SAML2LogoutRequestBuilder createLogoutRequest(String logoutUrl, ClientSessionModel clientSession, ClientModel client) {
// build userPrincipal with subject used at login
SAML2LogoutRequestBuilder logoutBuilder = new SAML2LogoutRequestBuilder().assertionExpiration(realm.getAccessCodeLifespan()).issuer(getResponseIssuer(realm)).sessionIndex(clientSession.getId())
.userPrincipal(clientSession.getNote(SAML_NAME_ID), clientSession.getNote(SAML_NAME_ID_FORMAT)).destination(logoutUrl);
return logoutBuilder;
}
@Override
public boolean requireReauthentication(UserSessionModel userSession, ClientSessionModel clientSession) {
// Not yet supported
return false;
}
private JaxrsSAML2BindingBuilder createBindingBuilder(SamlClient samlClient) {
JaxrsSAML2BindingBuilder binding = new JaxrsSAML2BindingBuilder();
if (samlClient.requiresRealmSignature()) {
KeyManager.ActiveRsaKey keys = session.keys().getActiveRsaKey(realm);
String keyName = samlClient.getXmlSigKeyInfoKeyNameTransformer().getKeyName(keys.getKid(), keys.getCertificate());
binding.signatureAlgorithm(samlClient.getSignatureAlgorithm()).signWith(keyName, keys.getPrivateKey(), keys.getPublicKey(), keys.getCertificate()).signDocument();
}
return binding;
}
@Override
public void close() {
}
}
| |
/*
* Copyright (C) 2013 tarent AG
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.osiam.storage.entities;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import javax.persistence.Lob;
import javax.persistence.SequenceGenerator;
import javax.persistence.Table;
import org.hibernate.annotations.Type;
/**
* Name Entity
*/
@Entity
@Table(name = "scim_name")
public class NameEntity {
@Id
@SequenceGenerator(name = "sequence_scim_name",
sequenceName = "resource_server_sequence_scim_name",
allocationSize = 1,
initialValue = 100)
@GeneratedValue(generator = "sequence_scim_name")
private long id;
@Lob
@Type(type = "org.hibernate.type.StringClobType")
private String formatted;
private String familyName;
private String givenName;
private String middleName;
private String honorificPrefix;
private String honorificSuffix;
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public String getFormatted() {
return formatted;
}
public void setFormatted(String formatted) {
this.formatted = formatted;
}
public String getFamilyName() {
return familyName;
}
public void setFamilyName(String familyName) {
this.familyName = familyName;
}
public String getGivenName() {
return givenName;
}
public void setGivenName(String givenName) {
this.givenName = givenName;
}
public String getMiddleName() {
return middleName;
}
public void setMiddleName(String middleName) {
this.middleName = middleName;
}
public String getHonorificPrefix() {
return honorificPrefix;
}
public void setHonorificPrefix(String honorificPrefix) {
this.honorificPrefix = honorificPrefix;
}
public String getHonorificSuffix() {
return honorificSuffix;
}
public void setHonorificSuffix(String honorificSuffix) {
this.honorificSuffix = honorificSuffix;
}
@Override
@SuppressWarnings("all")
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
NameEntity other = (NameEntity) obj;
if (familyName == null) {
if (other.familyName != null) {
return false;
}
} else if (!familyName.equals(other.familyName)) {
return false;
}
if (formatted == null) {
if (other.formatted != null) {
return false;
}
} else if (!formatted.equals(other.formatted)) {
return false;
}
if (givenName == null) {
if (other.givenName != null) {
return false;
}
} else if (!givenName.equals(other.givenName)) {
return false;
}
if (honorificPrefix == null) {
if (other.honorificPrefix != null) {
return false;
}
} else if (!honorificPrefix.equals(other.honorificPrefix)) {
return false;
}
if (honorificSuffix == null) {
if (other.honorificSuffix != null) {
return false;
}
} else if (!honorificSuffix.equals(other.honorificSuffix)) {
return false;
}
if (middleName == null) {
if (other.middleName != null) {
return false;
}
} else if (!middleName.equals(other.middleName)) {
return false;
}
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((familyName == null) ? 0 : familyName.hashCode());
result = prime * result + ((formatted == null) ? 0 : formatted.hashCode());
result = prime * result + ((givenName == null) ? 0 : givenName.hashCode());
result = prime * result + ((honorificPrefix == null) ? 0 : honorificPrefix.hashCode());
result = prime * result + ((honorificSuffix == null) ? 0 : honorificSuffix.hashCode());
result = prime * result + ((middleName == null) ? 0 : middleName.hashCode());
return result;
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("NameEntity [formatted=").append(formatted).append(", familyName=").append(familyName)
.append(", givenName=").append(givenName).append(", middleName=").append(middleName)
.append(", honorificPrefix=").append(honorificPrefix).append(", honorificSuffix=")
.append(honorificSuffix).append("]");
return builder.toString();
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.search.aggregations.AggregationBuilders.terms;
import static org.hamcrest.Matchers.equalTo;
public class ShardSizeTermsIT extends ShardSizeTestCase {
public void testNoShardSizeString() throws Exception {
createIdx("type=string,index=not_analyzed");
indexData();
SearchResponse response = client().prepareSearch("idx").setTypes("type")
.setQuery(matchAllQuery())
.addAggregation(terms("keys").field("key").size(3)
.collectMode(randomFrom(SubAggCollectionMode.values())).order(Terms.Order.count(false)))
.execute().actionGet();
Terms terms = response.getAggregations().get("keys");
Collection<Terms.Bucket> buckets = terms.getBuckets();
assertThat(buckets.size(), equalTo(3));
Map<String, Long> expected = new HashMap<>();
expected.put("1", 8L);
expected.put("3", 8L);
expected.put("2", 5L);
for (Terms.Bucket bucket : buckets) {
assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsString())));
}
}
public void testShardSizeEqualsSizeString() throws Exception {
createIdx("type=string,index=not_analyzed");
indexData();
SearchResponse response = client().prepareSearch("idx").setTypes("type")
.setQuery(matchAllQuery())
.addAggregation(terms("keys").field("key").size(3).shardSize(3)
.collectMode(randomFrom(SubAggCollectionMode.values())).order(Terms.Order.count(false)))
.execute().actionGet();
Terms terms = response.getAggregations().get("keys");
Collection<Terms.Bucket> buckets = terms.getBuckets();
assertThat(buckets.size(), equalTo(3));
Map<String, Long> expected = new HashMap<>();
expected.put("1", 8L);
expected.put("3", 8L);
expected.put("2", 4L);
for (Terms.Bucket bucket : buckets) {
assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsString())));
}
}
public void testWithShardSizeString() throws Exception {
createIdx("type=string,index=not_analyzed");
indexData();
SearchResponse response = client().prepareSearch("idx").setTypes("type")
.setQuery(matchAllQuery())
.addAggregation(terms("keys").field("key").size(3)
.collectMode(randomFrom(SubAggCollectionMode.values())).shardSize(5).order(Terms.Order.count(false)))
.execute().actionGet();
Terms terms = response.getAggregations().get("keys");
Collection<Terms.Bucket> buckets = terms.getBuckets();
assertThat(buckets.size(), equalTo(3)); // we still only return 3 entries (based on the 'size' param)
Map<String, Long> expected = new HashMap<>();
expected.put("1", 8L);
expected.put("3", 8L);
expected.put("2", 5L); // <-- count is now fixed
for (Terms.Bucket bucket : buckets) {
assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsString())));
}
}
public void testWithShardSizeStringSingleShard() throws Exception {
createIdx("type=string,index=not_analyzed");
indexData();
SearchResponse response = client().prepareSearch("idx").setTypes("type").setRouting(routing1)
.setQuery(matchAllQuery())
.addAggregation(terms("keys").field("key").size(3)
.collectMode(randomFrom(SubAggCollectionMode.values())).shardSize(5).order(Terms.Order.count(false)))
.execute().actionGet();
Terms terms = response.getAggregations().get("keys");
Collection<Terms.Bucket> buckets = terms.getBuckets();
assertThat(buckets.size(), equalTo(3)); // we still only return 3 entries (based on the 'size' param)
Map<String, Long> expected = new HashMap<>();
expected.put("1", 5L);
expected.put("2", 4L);
expected.put("3", 3L); // <-- count is now fixed
for (Terms.Bucket bucket: buckets) {
assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKey())));
}
}
public void testNoShardSizeTermOrderString() throws Exception {
createIdx("type=string,index=not_analyzed");
indexData();
SearchResponse response = client().prepareSearch("idx").setTypes("type")
.setQuery(matchAllQuery())
.addAggregation(terms("keys").field("key").size(3)
.collectMode(randomFrom(SubAggCollectionMode.values())).order(Terms.Order.term(true)))
.execute().actionGet();
Terms terms = response.getAggregations().get("keys");
Collection<Terms.Bucket> buckets = terms.getBuckets();
assertThat(buckets.size(), equalTo(3));
Map<String, Long> expected = new HashMap<>();
expected.put("1", 8L);
expected.put("2", 5L);
expected.put("3", 8L);
for (Terms.Bucket bucket : buckets) {
assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsString())));
}
}
public void testNoShardSizeLong() throws Exception {
createIdx("type=long");
indexData();
SearchResponse response = client().prepareSearch("idx").setTypes("type")
.setQuery(matchAllQuery())
.addAggregation(terms("keys").field("key").size(3)
.collectMode(randomFrom(SubAggCollectionMode.values())).order(Terms.Order.count(false)))
.execute().actionGet();
Terms terms = response.getAggregations().get("keys");
Collection<Terms.Bucket> buckets = terms.getBuckets();
assertThat(buckets.size(), equalTo(3));
Map<Integer, Long> expected = new HashMap<>();
expected.put(1, 8L);
expected.put(3, 8L);
expected.put(2, 5L);
for (Terms.Bucket bucket : buckets) {
assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsNumber().intValue())));
}
}
public void testShardSizeEqualsSizeLong() throws Exception {
createIdx("type=long");
indexData();
SearchResponse response = client().prepareSearch("idx").setTypes("type")
.setQuery(matchAllQuery())
.addAggregation(terms("keys").field("key").size(3).shardSize(3)
.collectMode(randomFrom(SubAggCollectionMode.values())).order(Terms.Order.count(false)))
.execute().actionGet();
Terms terms = response.getAggregations().get("keys");
Collection<Terms.Bucket> buckets = terms.getBuckets();
assertThat(buckets.size(), equalTo(3));
Map<Integer, Long> expected = new HashMap<>();
expected.put(1, 8L);
expected.put(3, 8L);
expected.put(2, 4L);
for (Terms.Bucket bucket : buckets) {
assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsNumber().intValue())));
}
}
public void testWithShardSizeLong() throws Exception {
createIdx("type=long");
indexData();
SearchResponse response = client().prepareSearch("idx").setTypes("type")
.setQuery(matchAllQuery())
.addAggregation(terms("keys").field("key").size(3)
.collectMode(randomFrom(SubAggCollectionMode.values())).shardSize(5).order(Terms.Order.count(false)))
.execute().actionGet();
Terms terms = response.getAggregations().get("keys");
Collection<Terms.Bucket> buckets = terms.getBuckets();
assertThat(buckets.size(), equalTo(3)); // we still only return 3 entries (based on the 'size' param)
Map<Integer, Long> expected = new HashMap<>();
expected.put(1, 8L);
expected.put(3, 8L);
expected.put(2, 5L); // <-- count is now fixed
for (Terms.Bucket bucket : buckets) {
assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsNumber().intValue())));
}
}
public void testWithShardSizeLongSingleShard() throws Exception {
createIdx("type=long");
indexData();
SearchResponse response = client().prepareSearch("idx").setTypes("type").setRouting(routing1)
.setQuery(matchAllQuery())
.addAggregation(terms("keys").field("key").size(3)
.collectMode(randomFrom(SubAggCollectionMode.values())).shardSize(5).order(Terms.Order.count(false)))
.execute().actionGet();
Terms terms = response.getAggregations().get("keys");
Collection<Terms.Bucket> buckets = terms.getBuckets();
assertThat(buckets.size(), equalTo(3)); // we still only return 3 entries (based on the 'size' param)
Map<Integer, Long> expected = new HashMap<>();
expected.put(1, 5L);
expected.put(2, 4L);
expected.put(3, 3L);
for (Terms.Bucket bucket : buckets) {
assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsNumber().intValue())));
}
}
public void testNoShardSizeTermOrderLong() throws Exception {
createIdx("type=long");
indexData();
SearchResponse response = client().prepareSearch("idx").setTypes("type")
.setQuery(matchAllQuery())
.addAggregation(terms("keys").field("key").size(3)
.collectMode(randomFrom(SubAggCollectionMode.values())).order(Terms.Order.term(true)))
.execute().actionGet();
Terms terms = response.getAggregations().get("keys");
Collection<Terms.Bucket> buckets = terms.getBuckets();
assertThat(buckets.size(), equalTo(3));
Map<Integer, Long> expected = new HashMap<>();
expected.put(1, 8L);
expected.put(2, 5L);
expected.put(3, 8L);
for (Terms.Bucket bucket : buckets) {
assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsNumber().intValue())));
}
}
public void testNoShardSizeDouble() throws Exception {
createIdx("type=double");
indexData();
SearchResponse response = client().prepareSearch("idx").setTypes("type")
.setQuery(matchAllQuery())
.addAggregation(terms("keys").field("key").size(3)
.collectMode(randomFrom(SubAggCollectionMode.values())).order(Terms.Order.count(false)))
.execute().actionGet();
Terms terms = response.getAggregations().get("keys");
Collection<Terms.Bucket> buckets = terms.getBuckets();
assertThat(buckets.size(), equalTo(3));
Map<Integer, Long> expected = new HashMap<>();
expected.put(1, 8L);
expected.put(3, 8L);
expected.put(2, 5L);
for (Terms.Bucket bucket : buckets) {
assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsNumber().intValue())));
}
}
public void testShardSizeEqualsSizeDouble() throws Exception {
createIdx("type=double");
indexData();
SearchResponse response = client().prepareSearch("idx").setTypes("type")
.setQuery(matchAllQuery())
.addAggregation(terms("keys").field("key").size(3).shardSize(3)
.collectMode(randomFrom(SubAggCollectionMode.values())).order(Terms.Order.count(false)))
.execute().actionGet();
Terms terms = response.getAggregations().get("keys");
Collection<Terms.Bucket> buckets = terms.getBuckets();
assertThat(buckets.size(), equalTo(3));
Map<Integer, Long> expected = new HashMap<>();
expected.put(1, 8L);
expected.put(3, 8L);
expected.put(2, 4L);
for (Terms.Bucket bucket : buckets) {
assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsNumber().intValue())));
}
}
public void testWithShardSizeDouble() throws Exception {
createIdx("type=double");
indexData();
SearchResponse response = client().prepareSearch("idx").setTypes("type")
.setQuery(matchAllQuery())
.addAggregation(terms("keys").field("key").size(3)
.collectMode(randomFrom(SubAggCollectionMode.values())).shardSize(5).order(Terms.Order.count(false)))
.execute().actionGet();
Terms terms = response.getAggregations().get("keys");
Collection<Terms.Bucket> buckets = terms.getBuckets();
assertThat(buckets.size(), equalTo(3));
Map<Integer, Long> expected = new HashMap<>();
expected.put(1, 8L);
expected.put(3, 8L);
expected.put(2, 5L); // <-- count is now fixed
for (Terms.Bucket bucket : buckets) {
assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsNumber().intValue())));
}
}
public void testWithShardSizeDoubleSingleShard() throws Exception {
createIdx("type=double");
indexData();
SearchResponse response = client().prepareSearch("idx").setTypes("type").setRouting(routing1)
.setQuery(matchAllQuery())
.addAggregation(terms("keys").field("key").size(3)
.collectMode(randomFrom(SubAggCollectionMode.values())).shardSize(5).order(Terms.Order.count(false)))
.execute().actionGet();
Terms terms = response.getAggregations().get("keys");
Collection<Terms.Bucket> buckets = terms.getBuckets();
assertThat(buckets.size(), equalTo(3));
Map<Integer, Long> expected = new HashMap<>();
expected.put(1, 5L);
expected.put(2, 4L);
expected.put(3, 3L);
for (Terms.Bucket bucket : buckets) {
assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsNumber().intValue())));
}
}
public void testNoShardSizeTermOrderDouble() throws Exception {
createIdx("type=double");
indexData();
SearchResponse response = client().prepareSearch("idx").setTypes("type")
.setQuery(matchAllQuery())
.addAggregation(terms("keys").field("key").size(3)
.collectMode(randomFrom(SubAggCollectionMode.values())).order(Terms.Order.term(true)))
.execute().actionGet();
Terms terms = response.getAggregations().get("keys");
Collection<Terms.Bucket> buckets = terms.getBuckets();
assertThat(buckets.size(), equalTo(3));
Map<Integer, Long> expected = new HashMap<>();
expected.put(1, 8L);
expected.put(2, 5L);
expected.put(3, 8L);
for (Terms.Bucket bucket : buckets) {
assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsNumber().intValue())));
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.data;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.io.InvalidObjectException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import org.apache.accumulo.core.dataImpl.thrift.TRange;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableComparable;
/**
* This class is used to specify a range of Accumulo keys.
*
* @see Key
*/
public class Range implements WritableComparable<Range> {
private Key start;
private Key stop;
private boolean startKeyInclusive;
private boolean stopKeyInclusive;
private boolean infiniteStartKey;
private boolean infiniteStopKey;
/**
* Creates a range that goes from negative to positive infinity
*/
public Range() {
this((Key) null, true, (Key) null, true);
}
/**
* Creates a range from startKey inclusive to endKey inclusive.
*
* @param startKey
* starting key; set to null for negative infinity
* @param endKey
* ending key; set to null for positive infinity
* @throws IllegalArgumentException
* if end key is before start key
*/
public Range(Key startKey, Key endKey) {
this(startKey, true, endKey, true);
}
/**
* Creates a range that covers an entire row.
*
* @param row
* row to cover; set to null to cover all rows
*/
public Range(CharSequence row) {
this(row, true, row, true);
}
/**
* Creates a range that covers an entire row.
*
* @param row
* row to cover; set to null to cover all rows
*/
public Range(Text row) {
this(row, true, row, true);
}
/**
* Creates a range from startRow inclusive to endRow inclusive.
*
* @param startRow
* starting row; set to null for negative infinity
* @param endRow
* ending row; set to null for positive infinity
* @throws IllegalArgumentException
* if end row is before start row
*/
public Range(Text startRow, Text endRow) {
this(startRow, true, endRow, true);
}
/**
* Creates a range from startRow inclusive to endRow inclusive.
*
* @param startRow
* starting row; set to null for negative infinity
* @param endRow
* ending row; set to null for positive infinity
* @throws IllegalArgumentException
* if end row is before start row
*/
public Range(CharSequence startRow, CharSequence endRow) {
this(startRow, true, endRow, true);
}
/**
* Creates a range from startRow to endRow.
*
* @param startRow
* starting row; set to null for negative infinity
* @param startRowInclusive
* true to include start row, false to skip
* @param endRow
* ending row; set to null for positive infinity
* @param endRowInclusive
* true to include end row, false to skip
* @throws IllegalArgumentException
* if end row is before start row
*/
public Range(Text startRow, boolean startRowInclusive, Text endRow, boolean endRowInclusive) {
this(
(startRow == null ? null
: (startRowInclusive ? new Key(startRow)
: new Key(startRow).followingKey(PartialKey.ROW))),
true,
(endRow == null ? null
: (endRowInclusive ? new Key(endRow).followingKey(PartialKey.ROW) : new Key(endRow))),
false);
}
/**
* Creates a range from startRow to endRow.
*
* @param startRow
* starting row; set to null for negative infinity
* @param startRowInclusive
* true to include start row, false to skip
* @param endRow
* ending row; set to null for positive infinity
* @param endRowInclusive
* true to include start row, false to skip
* @throws IllegalArgumentException
* if end row is before start row
*/
public Range(CharSequence startRow, boolean startRowInclusive, CharSequence endRow,
boolean endRowInclusive) {
this(startRow == null ? null : new Text(startRow.toString()), startRowInclusive,
endRow == null ? null : new Text(endRow.toString()), endRowInclusive);
}
/**
* Creates a range from startKey to endKey.
*
* @param startKey
* starting key; set to null for negative infinity
* @param startKeyInclusive
* true to include start key, false to skip
* @param endKey
* ending key; set to null for positive infinity
* @param endKeyInclusive
* true to include start key, false to skip
* @throws IllegalArgumentException
* if end key is before start key
*/
public Range(Key startKey, boolean startKeyInclusive, Key endKey, boolean endKeyInclusive) {
this.start = startKey;
this.startKeyInclusive = startKeyInclusive;
this.infiniteStartKey = startKey == null;
this.stop = endKey;
this.stopKeyInclusive = endKeyInclusive;
this.infiniteStopKey = stop == null;
if (!infiniteStartKey && !infiniteStopKey && beforeStartKeyImpl(endKey)) {
throw new IllegalArgumentException(
"Start key must be less than end key in range (" + startKey + ", " + endKey + ")");
}
}
/**
* Copies a range.
*
* @param range
* range to copy
*/
public Range(Range range) {
this(range.start, range.startKeyInclusive, range.infiniteStartKey, range.stop,
range.stopKeyInclusive, range.infiniteStopKey);
}
/**
* Creates a range from start to stop.
*
* @param start
* set this to null when negative infinity is needed
* @param stop
* set this to null when infinity is needed
* @param startKeyInclusive
* determines if the ranges includes the start key
* @param stopKeyInclusive
* determines if the range includes the end key
* @param infiniteStartKey
* true if start key is negative infinity (null)
* @param infiniteStopKey
* true if stop key is positive infinity (null)
* @throws IllegalArgumentException
* if stop is before start, or infiniteStartKey is true but start is not null, or
* infiniteStopKey is true but stop is not null
*/
public Range(Key start, Key stop, boolean startKeyInclusive, boolean stopKeyInclusive,
boolean infiniteStartKey, boolean infiniteStopKey) {
this(start, startKeyInclusive, infiniteStartKey, stop, stopKeyInclusive, infiniteStopKey);
if (!infiniteStartKey && !infiniteStopKey && beforeStartKeyImpl(stop)) {
throw new IllegalArgumentException(
"Start key must be less than end key in range (" + start + ", " + stop + ")");
}
}
/**
* Creates a range from start to stop. Unlike the public six-argument method, this one does not
* assure that stop is after start, which helps performance in cases where that assurance is
* already in place.
*
* @param start
* set this to null when negative infinity is needed
* @param startKeyInclusive
* determines if the ranges includes the start key
* @param infiniteStartKey
* true if start key is negative infinity (null)
* @param stop
* set this to null when infinity is needed
* @param stopKeyInclusive
* determines if the range includes the end key
* @param infiniteStopKey
* true if stop key is positive infinity (null)
* @throws IllegalArgumentException
* if infiniteStartKey is true but start is not null, or infiniteStopKey is true but
* stop is not null
*/
protected Range(Key start, boolean startKeyInclusive, boolean infiniteStartKey, Key stop,
boolean stopKeyInclusive, boolean infiniteStopKey) {
if (infiniteStartKey && start != null)
throw new IllegalArgumentException();
if (infiniteStopKey && stop != null)
throw new IllegalArgumentException();
this.start = start;
this.stop = stop;
this.startKeyInclusive = startKeyInclusive;
this.stopKeyInclusive = stopKeyInclusive;
this.infiniteStartKey = infiniteStartKey;
this.infiniteStopKey = infiniteStopKey;
}
/**
* Creates a range from a Thrift range.
*
* @param trange
* Thrift range
*/
public Range(TRange trange) {
this(trange.start == null ? null : new Key(trange.start), trange.startKeyInclusive,
trange.infiniteStartKey, trange.stop == null ? null : new Key(trange.stop),
trange.stopKeyInclusive, trange.infiniteStopKey);
if (!infiniteStartKey && !infiniteStopKey && beforeStartKeyImpl(stop)) {
throw new IllegalArgumentException(
"Start key must be less than end key in range (" + start + ", " + stop + ")");
}
}
/**
* Gets the start key, or null if the start is negative infinity.
*
* @return start key
*/
public Key getStartKey() {
if (infiniteStartKey) {
return null;
}
return start;
}
/**
* Determines if the given key is before the start key of this range.
*
* @param key
* key to check
* @return true if the given key is before the range, otherwise false
*/
public boolean beforeStartKey(Key key) {
return beforeStartKeyImpl(key);
}
/**
* Implements logic of {@code #beforeStartKey(Key)}, but in a private method, so that it can be
* safely used by constructors if a subclass overrides that {@link #beforeStartKey(Key)}
*/
private boolean beforeStartKeyImpl(Key key) {
if (infiniteStartKey) {
return false;
}
if (startKeyInclusive)
return key.compareTo(start) < 0;
return key.compareTo(start) <= 0;
}
/**
* Gets the ending key, or null if the end is positive infinity.
*
* @return ending key
*/
public Key getEndKey() {
if (infiniteStopKey) {
return null;
}
return stop;
}
/**
* Determines if the given key is after the ending key of this range.
*
* @param key
* key to check
* @return true if the given key is after the range, otherwise false
*/
public boolean afterEndKey(Key key) {
if (infiniteStopKey)
return false;
if (stopKeyInclusive)
return stop.compareTo(key) < 0;
return stop.compareTo(key) <= 0;
}
@Override
public int hashCode() {
int startHash = infiniteStartKey ? 0 : start.hashCode() + (startKeyInclusive ? 1 : 0);
int stopHash = infiniteStopKey ? 0 : stop.hashCode() + (stopKeyInclusive ? 1 : 0);
return startHash + stopHash;
}
@Override
public boolean equals(Object o) {
if (o instanceof Range)
return equals((Range) o);
return false;
}
/**
* Determines if this range equals another.
*
* @param otherRange
* range to compare
* @return true if ranges are equals, false otherwise
* @see #compareTo(Range)
*/
public boolean equals(Range otherRange) {
return compareTo(otherRange) == 0;
}
/**
* Compares this range to another range. Compares in order: start key, inclusiveness of start key,
* end key, inclusiveness of end key. Infinite keys sort first, and non-infinite keys are compared
* with {@link Key#compareTo(Key)}. Inclusive sorts before non-inclusive.
*
* @param o
* range to compare
* @return comparison result
*/
@Override
public int compareTo(Range o) {
int comp;
if (infiniteStartKey)
if (o.infiniteStartKey)
comp = 0;
else
comp = -1;
else if (o.infiniteStartKey)
comp = 1;
else {
comp = start.compareTo(o.start);
if (comp == 0)
if (startKeyInclusive && !o.startKeyInclusive)
comp = -1;
else if (!startKeyInclusive && o.startKeyInclusive)
comp = 1;
}
if (comp == 0)
if (infiniteStopKey)
if (o.infiniteStopKey)
comp = 0;
else
comp = 1;
else if (o.infiniteStopKey)
comp = -1;
else {
comp = stop.compareTo(o.stop);
if (comp == 0)
if (stopKeyInclusive && !o.stopKeyInclusive)
comp = 1;
else if (!stopKeyInclusive && o.stopKeyInclusive)
comp = -1;
}
return comp;
}
/**
* Determines if the given key falls within this range.
*
* @param key
* key to consider
* @return true if the given key falls within the range, false otherwise
*/
public boolean contains(Key key) {
return !beforeStartKey(key) && !afterEndKey(key);
}
/**
* Merges overlapping and adjacent ranges. For example given the following input:
*
* <pre>
* [a,c], (c, d], (g,m), (j,t]
* </pre>
*
* the following ranges would be returned:
*
* <pre>
* [a,d], (g,t]
* </pre>
*
* @param ranges
* to merge
* @return list of merged ranges
*/
public static List<Range> mergeOverlapping(Collection<Range> ranges) {
if (ranges.isEmpty())
return Collections.emptyList();
if (ranges.size() == 1)
return Collections.singletonList(ranges.iterator().next());
List<Range> ral = new ArrayList<>(ranges);
Collections.sort(ral);
ArrayList<Range> ret = new ArrayList<>(ranges.size());
Range currentRange = ral.get(0);
boolean currentStartKeyInclusive = ral.get(0).startKeyInclusive;
for (int i = 1; i < ral.size(); i++) {
// because of inclusive switch, equal keys may not be seen
if (currentRange.infiniteStopKey) {
// this range has the minimal start key and
// an infinite end key so it will contain all
// other ranges
break;
}
Range range = ral.get(i);
boolean startKeysEqual;
if (range.infiniteStartKey) {
// previous start key must be infinite because it is sorted
assert currentRange.infiniteStartKey;
startKeysEqual = true;
} else if (currentRange.infiniteStartKey) {
startKeysEqual = false;
} else {
startKeysEqual = currentRange.start.equals(range.start);
}
if (startKeysEqual || currentRange.contains(range.start) || (!currentRange.stopKeyInclusive
&& range.startKeyInclusive && range.start.equals(currentRange.stop))) {
int cmp;
if (range.infiniteStopKey || (cmp = range.stop.compareTo(currentRange.stop)) > 0
|| (cmp == 0 && range.stopKeyInclusive)) {
currentRange = new Range(currentRange.getStartKey(), currentStartKeyInclusive,
range.getEndKey(), range.stopKeyInclusive);
} /* else currentRange contains ral.get(i) */
} else {
ret.add(currentRange);
currentRange = range;
currentStartKeyInclusive = range.startKeyInclusive;
}
}
ret.add(currentRange);
return ret;
}
/**
* Creates a range which represents the intersection of this range and the passed in range. The
* following example will print true.
*
* <pre>
* Range range1 = new Range("a", "f");
* Range range2 = new Range("c", "n");
* Range range3 = range1.clip(range2);
* System.out.println(range3.equals(new Range("c", "f")));
* </pre>
*
* @param range
* range to clip to
* @return the intersection of this range and the given range
* @throws IllegalArgumentException
* if ranges does not overlap
*/
public Range clip(Range range) {
return clip(range, false);
}
/**
* Creates a range which represents the intersection of this range and the passed in range. Unlike
* {@link #clip(Range)}, this method can optionally return null if the ranges do not overlap,
* instead of throwing an exception. The returnNullIfDisjoint parameter controls this behavior.
*
* @param range
* range to clip to
* @param returnNullIfDisjoint
* true to return null if ranges are disjoint, false to throw an exception
* @return the intersection of this range and the given range, or null if ranges do not overlap
* and returnNullIfDisjoint is true
* @throws IllegalArgumentException
* if ranges does not overlap and returnNullIfDisjoint is false
* @see Range#clip(Range)
*/
public Range clip(Range range, boolean returnNullIfDisjoint) {
Key sk = range.getStartKey();
boolean ski = range.isStartKeyInclusive();
Key ek = range.getEndKey();
boolean eki = range.isEndKeyInclusive();
if (range.getStartKey() == null) {
if (getStartKey() != null) {
sk = getStartKey();
ski = isStartKeyInclusive();
}
} else if (afterEndKey(range.getStartKey())
|| (getEndKey() != null && range.getStartKey().equals(getEndKey())
&& !(range.isStartKeyInclusive() && isEndKeyInclusive()))) {
if (returnNullIfDisjoint)
return null;
throw new IllegalArgumentException("Range " + range + " does not overlap " + this);
} else if (beforeStartKey(range.getStartKey())) {
sk = getStartKey();
ski = isStartKeyInclusive();
}
if (range.getEndKey() == null) {
if (getEndKey() != null) {
ek = getEndKey();
eki = isEndKeyInclusive();
}
} else if (beforeStartKey(range.getEndKey())
|| (getStartKey() != null && range.getEndKey().equals(getStartKey())
&& !(range.isEndKeyInclusive() && isStartKeyInclusive()))) {
if (returnNullIfDisjoint)
return null;
throw new IllegalArgumentException("Range " + range + " does not overlap " + this);
} else if (afterEndKey(range.getEndKey())) {
ek = getEndKey();
eki = isEndKeyInclusive();
}
return new Range(sk, ski, ek, eki);
}
/**
* Creates a new range that is bounded by the columns passed in. The start key in the returned
* range will have a column >= to the minimum column. The end key in the returned range will
* have a column <= the max column.
*
* @param min
* minimum column
* @param max
* maximum column
* @return a column bounded range
* @throws IllegalArgumentException
* if the minimum column compares greater than the maximum column
*/
public Range bound(Column min, Column max) {
if (min.compareTo(max) > 0) {
throw new IllegalArgumentException("min column > max column " + min + " " + max);
}
Key sk = getStartKey();
boolean ski = isStartKeyInclusive();
if (sk != null) {
ByteSequence cf = sk.getColumnFamilyData();
ByteSequence cq = sk.getColumnQualifierData();
ByteSequence mincf = new ArrayByteSequence(min.columnFamily);
ByteSequence mincq;
if (min.columnQualifier != null)
mincq = new ArrayByteSequence(min.columnQualifier);
else
mincq = new ArrayByteSequence(new byte[0]);
int cmp = cf.compareTo(mincf);
if (cmp < 0 || (cmp == 0 && cq.compareTo(mincq) < 0)) {
ski = true;
sk = new Key(sk.getRowData().toArray(), mincf.toArray(), mincq.toArray(), new byte[0],
Long.MAX_VALUE, true);
}
}
Key ek = getEndKey();
boolean eki = isEndKeyInclusive();
if (ek != null) {
ByteSequence row = ek.getRowData();
ByteSequence cf = ek.getColumnFamilyData();
ByteSequence cq = ek.getColumnQualifierData();
ByteSequence cv = ek.getColumnVisibilityData();
ByteSequence maxcf = new ArrayByteSequence(max.columnFamily);
ByteSequence maxcq = null;
if (max.columnQualifier != null)
maxcq = new ArrayByteSequence(max.columnQualifier);
boolean set = false;
int comp = cf.compareTo(maxcf);
if (comp > 0) {
set = true;
} else if (comp == 0 && maxcq != null && cq.compareTo(maxcq) > 0) {
set = true;
} else if (!eki && row.length() > 0 && row.byteAt(row.length() - 1) == 0 && cf.length() == 0
&& cq.length() == 0 && cv.length() == 0 && ek.getTimestamp() == Long.MAX_VALUE) {
row = row.subSequence(0, row.length() - 1);
set = true;
}
if (set) {
eki = false;
if (maxcq == null)
ek = new Key(row.toArray(), maxcf.toArray(), new byte[0], new byte[0], 0, false)
.followingKey(PartialKey.ROW_COLFAM);
else
ek = new Key(row.toArray(), maxcf.toArray(), maxcq.toArray(), new byte[0], 0, false)
.followingKey(PartialKey.ROW_COLFAM_COLQUAL);
}
}
return new Range(sk, ski, ek, eki);
}
@Override
public String toString() {
return ((startKeyInclusive && start != null) ? "[" : "(") + (start == null ? "-inf" : start)
+ "," + (stop == null ? "+inf" : stop) + ((stopKeyInclusive && stop != null) ? "]" : ")");
}
@Override
public void readFields(DataInput in) throws IOException {
infiniteStartKey = in.readBoolean();
infiniteStopKey = in.readBoolean();
if (infiniteStartKey) {
start = null;
} else {
start = new Key();
start.readFields(in);
}
if (infiniteStopKey) {
stop = null;
} else {
stop = new Key();
stop.readFields(in);
}
startKeyInclusive = in.readBoolean();
stopKeyInclusive = in.readBoolean();
if (!infiniteStartKey && !infiniteStopKey && beforeStartKey(stop)) {
throw new InvalidObjectException(
"Start key must be less than end key in range (" + start + ", " + stop + ")");
}
}
@Override
public void write(DataOutput out) throws IOException {
out.writeBoolean(infiniteStartKey);
out.writeBoolean(infiniteStopKey);
if (!infiniteStartKey)
start.write(out);
if (!infiniteStopKey)
stop.write(out);
out.writeBoolean(startKeyInclusive);
out.writeBoolean(stopKeyInclusive);
}
/**
* Gets whether the start key of this range is inclusive.
*
* @return true if start key is inclusive
*/
public boolean isStartKeyInclusive() {
return startKeyInclusive;
}
/**
* Gets whether the end key of this range is inclusive.
*
* @return true if end key is inclusive
*/
public boolean isEndKeyInclusive() {
return stopKeyInclusive;
}
/**
* Converts this range to Thrift.
*
* @return Thrift range
*/
public TRange toThrift() {
return new TRange(start == null ? null : start.toThrift(),
stop == null ? null : stop.toThrift(), startKeyInclusive, stopKeyInclusive,
infiniteStartKey, infiniteStopKey);
}
/**
* Gets whether the start key is negative infinity.
*
* @return true if start key is negative infinity
*/
public boolean isInfiniteStartKey() {
return infiniteStartKey;
}
/**
* Gets whether the end key is positive infinity.
*
* @return true if end key is positive infinity
*/
public boolean isInfiniteStopKey() {
return infiniteStopKey;
}
/**
* Creates a range that covers an exact row. Returns the same Range as {@link #Range(Text)}.
*
* @param row
* row to cover; set to null to cover all rows
*/
public static Range exact(Text row) {
return new Range(row);
}
/**
* Creates a range that covers an exact row and column family.
*
* @param row
* row row to cover
* @param cf
* column family to cover
*/
public static Range exact(Text row, Text cf) {
Key startKey = new Key(row, cf);
return new Range(startKey, true, startKey.followingKey(PartialKey.ROW_COLFAM), false);
}
/**
* Creates a range that covers an exact row, column family, and column qualifier.
*
* @param row
* row row to cover
* @param cf
* column family to cover
* @param cq
* column qualifier to cover
*/
public static Range exact(Text row, Text cf, Text cq) {
Key startKey = new Key(row, cf, cq);
return new Range(startKey, true, startKey.followingKey(PartialKey.ROW_COLFAM_COLQUAL), false);
}
/**
* Creates a range that covers an exact row, column family, column qualifier, and column
* visibility.
*
* @param row
* row row to cover
* @param cf
* column family to cover
* @param cq
* column qualifier to cover
* @param cv
* column visibility to cover
*/
public static Range exact(Text row, Text cf, Text cq, Text cv) {
Key startKey = new Key(row, cf, cq, cv);
return new Range(startKey, true, startKey.followingKey(PartialKey.ROW_COLFAM_COLQUAL_COLVIS),
false);
}
/**
* Creates a range that covers an exact row, column family, column qualifier, column visibility,
* and timestamp.
*
* @param row
* row row to cover
* @param cf
* column family to cover
* @param cq
* column qualifier to cover
* @param cv
* column visibility to cover
* @param ts
* timestamp to cover
*/
public static Range exact(Text row, Text cf, Text cq, Text cv, long ts) {
Key startKey = new Key(row, cf, cq, cv, ts);
return new Range(startKey, true,
startKey.followingKey(PartialKey.ROW_COLFAM_COLQUAL_COLVIS_TIME), false);
}
/**
* Returns a Text that sorts just after all Texts beginning with a prefix.
*
* @param prefix
* to follow
* @return prefix that immediately follows the given prefix when sorted, or null if no prefix can
* follow (i.e., the string is all 0xff bytes)
*/
public static Text followingPrefix(Text prefix) {
byte[] prefixBytes = prefix.getBytes();
// find the last byte in the array that is not 0xff
int changeIndex = prefix.getLength() - 1;
while (changeIndex >= 0 && prefixBytes[changeIndex] == (byte) 0xff)
changeIndex--;
if (changeIndex < 0)
return null;
// copy prefix bytes into new array
byte[] newBytes = new byte[changeIndex + 1];
System.arraycopy(prefixBytes, 0, newBytes, 0, changeIndex + 1);
// increment the selected byte
newBytes[changeIndex]++;
return new Text(newBytes);
}
/**
* Returns a Range that covers all rows beginning with a prefix.
*
* @param rowPrefix
* prefix of rows to cover
*/
public static Range prefix(Text rowPrefix) {
Text fp = followingPrefix(rowPrefix);
return new Range(new Key(rowPrefix), true, fp == null ? null : new Key(fp), false);
}
/**
* Returns a Range that covers all column families beginning with a prefix within a given row.
*
* @param row
* row to cover
* @param cfPrefix
* prefix of column families to cover
*/
public static Range prefix(Text row, Text cfPrefix) {
Text fp = followingPrefix(cfPrefix);
return new Range(new Key(row, cfPrefix), true,
fp == null ? new Key(row).followingKey(PartialKey.ROW) : new Key(row, fp), false);
}
/**
* Returns a Range that covers all column qualifiers beginning with a prefix within a given row
* and column family.
*
* @param row
* row to cover
* @param cf
* column family to cover
* @param cqPrefix
* prefix of column qualifiers to cover
*/
public static Range prefix(Text row, Text cf, Text cqPrefix) {
Text fp = followingPrefix(cqPrefix);
return new Range(new Key(row, cf, cqPrefix), true,
fp == null ? new Key(row, cf).followingKey(PartialKey.ROW_COLFAM) : new Key(row, cf, fp),
false);
}
/**
* Returns a Range that covers all column visibilities beginning with a prefix within a given row,
* column family, and column qualifier.
*
* @param row
* row to cover
* @param cf
* column family to cover
* @param cq
* column qualifier to cover
* @param cvPrefix
* prefix of column visibilities to cover
*/
public static Range prefix(Text row, Text cf, Text cq, Text cvPrefix) {
Text fp = followingPrefix(cvPrefix);
return new Range(new Key(row, cf, cq, cvPrefix), true,
fp == null ? new Key(row, cf, cq).followingKey(PartialKey.ROW_COLFAM_COLQUAL)
: new Key(row, cf, cq, fp),
false);
}
/**
* Creates a range that covers an exact row.
*
* @param row
* row to cover; set to null to cover all rows
* @see #exact(Text)
*/
public static Range exact(CharSequence row) {
return Range.exact(new Text(row.toString()));
}
/**
* Creates a range that covers an exact row and column family.
*
* @param row
* row row to cover
* @param cf
* column family to cover
* @see #exact(Text, Text)
*/
public static Range exact(CharSequence row, CharSequence cf) {
return Range.exact(new Text(row.toString()), new Text(cf.toString()));
}
/**
* Creates a range that covers an exact row, column family, and column qualifier.
*
* @param row
* row row to cover
* @param cf
* column family to cover
* @param cq
* column qualifier to cover
* @see #exact(Text, Text, Text)
*/
public static Range exact(CharSequence row, CharSequence cf, CharSequence cq) {
return Range.exact(new Text(row.toString()), new Text(cf.toString()), new Text(cq.toString()));
}
/**
* Creates a range that covers an exact row, column family, column qualifier, and column
* visibility.
*
* @param row
* row row to cover
* @param cf
* column family to cover
* @param cq
* column qualifier to cover
* @param cv
* column visibility to cover
* @see #exact(Text, Text, Text, Text)
*/
public static Range exact(CharSequence row, CharSequence cf, CharSequence cq, CharSequence cv) {
return Range.exact(new Text(row.toString()), new Text(cf.toString()), new Text(cq.toString()),
new Text(cv.toString()));
}
/**
* Creates a range that covers an exact row, column family, column qualifier, column visibility,
* and timestamp.
*
* @param row
* row row to cover
* @param cf
* column family to cover
* @param cq
* column qualifier to cover
* @param cv
* column visibility to cover
* @param ts
* timestamp to cover
* @see #exact(Text, Text, Text, Text, long)
*/
public static Range exact(CharSequence row, CharSequence cf, CharSequence cq, CharSequence cv,
long ts) {
return Range.exact(new Text(row.toString()), new Text(cf.toString()), new Text(cq.toString()),
new Text(cv.toString()), ts);
}
/**
* Returns a Range that covers all rows beginning with a prefix.
*
* @param rowPrefix
* prefix of rows to cover
* @see #prefix(Text)
*/
public static Range prefix(CharSequence rowPrefix) {
return Range.prefix(new Text(rowPrefix.toString()));
}
/**
* Returns a Range that covers all column families beginning with a prefix within a given row.
*
* @param row
* row to cover
* @param cfPrefix
* prefix of column families to cover
* @see #prefix(Text, Text)
*/
public static Range prefix(CharSequence row, CharSequence cfPrefix) {
return Range.prefix(new Text(row.toString()), new Text(cfPrefix.toString()));
}
/**
* Returns a Range that covers all column qualifiers beginning with a prefix within a given row
* and column family.
*
* @param row
* row to cover
* @param cf
* column family to cover
* @param cqPrefix
* prefix of column qualifiers to cover
* @see #prefix(Text, Text, Text)
*/
public static Range prefix(CharSequence row, CharSequence cf, CharSequence cqPrefix) {
return Range.prefix(new Text(row.toString()), new Text(cf.toString()),
new Text(cqPrefix.toString()));
}
/**
* Returns a Range that covers all column visibilities beginning with a prefix within a given row,
* column family, and column qualifier.
*
* @param row
* row to cover
* @param cf
* column family to cover
* @param cq
* column qualifier to cover
* @param cvPrefix
* prefix of column visibilities to cover
* @see #prefix(Text, Text, Text, Text)
*/
public static Range prefix(CharSequence row, CharSequence cf, CharSequence cq,
CharSequence cvPrefix) {
return Range.prefix(new Text(row.toString()), new Text(cf.toString()), new Text(cq.toString()),
new Text(cvPrefix.toString()));
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.regionserver;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.IOException;
import java.io.InputStream;
import java.lang.ref.SoftReference;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FilterFileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PositionedReadable;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.MiniHBaseCluster;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.fs.HFileSystem;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.HFileContext;
import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
import org.apache.hadoop.hbase.io.hfile.HFileScanner;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Assume;
import org.junit.ClassRule;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Test cases that ensure that file system level errors are bubbled up
* appropriately to clients, rather than swallowed.
*/
@Category({RegionServerTests.class, LargeTests.class})
public class TestFSErrorsExposed {
@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestFSErrorsExposed.class);
private static final Logger LOG = LoggerFactory.getLogger(TestFSErrorsExposed.class);
HBaseTestingUtility util = new HBaseTestingUtility();
@Rule
public TestName name = new TestName();
/**
* Injects errors into the pread calls of an on-disk file, and makes
* sure those bubble up to the HFile scanner
*/
@Test
public void testHFileScannerThrowsErrors() throws IOException {
Path hfilePath = new Path(new Path(
util.getDataTestDir("internalScannerExposesErrors"),
"regionname"), "familyname");
HFileSystem hfs = (HFileSystem)util.getTestFileSystem();
FaultyFileSystem faultyfs = new FaultyFileSystem(hfs.getBackingFs());
FileSystem fs = new HFileSystem(faultyfs);
CacheConfig cacheConf = new CacheConfig(util.getConfiguration());
HFileContext meta = new HFileContextBuilder().withBlockSize(2 * 1024).build();
StoreFileWriter writer = new StoreFileWriter.Builder(
util.getConfiguration(), cacheConf, hfs)
.withOutputDir(hfilePath)
.withFileContext(meta)
.build();
TestHStoreFile.writeStoreFile(
writer, Bytes.toBytes("cf"), Bytes.toBytes("qual"));
HStoreFile sf = new HStoreFile(fs, writer.getPath(), util.getConfiguration(), cacheConf,
BloomType.NONE, true);
sf.initReader();
StoreFileReader reader = sf.getReader();
HFileScanner scanner = reader.getScanner(false, true);
FaultyInputStream inStream = faultyfs.inStreams.get(0).get();
assertNotNull(inStream);
scanner.seekTo();
// Do at least one successful read
assertTrue(scanner.next());
faultyfs.startFaults();
try {
int scanned=0;
while (scanner.next()) {
scanned++;
}
fail("Scanner didn't throw after faults injected");
} catch (IOException ioe) {
LOG.info("Got expected exception", ioe);
assertTrue(ioe.getMessage().contains("Fault"));
}
reader.close(true); // end of test so evictOnClose
}
/**
* Injects errors into the pread calls of an on-disk file, and makes
* sure those bubble up to the StoreFileScanner
*/
@Test
public void testStoreFileScannerThrowsErrors() throws IOException {
Path hfilePath = new Path(new Path(
util.getDataTestDir("internalScannerExposesErrors"),
"regionname"), "familyname");
HFileSystem hfs = (HFileSystem)util.getTestFileSystem();
FaultyFileSystem faultyfs = new FaultyFileSystem(hfs.getBackingFs());
HFileSystem fs = new HFileSystem(faultyfs);
CacheConfig cacheConf = new CacheConfig(util.getConfiguration());
HFileContext meta = new HFileContextBuilder().withBlockSize(2 * 1024).build();
StoreFileWriter writer = new StoreFileWriter.Builder(
util.getConfiguration(), cacheConf, hfs)
.withOutputDir(hfilePath)
.withFileContext(meta)
.build();
TestHStoreFile.writeStoreFile(
writer, Bytes.toBytes("cf"), Bytes.toBytes("qual"));
HStoreFile sf = new HStoreFile(fs, writer.getPath(), util.getConfiguration(), cacheConf,
BloomType.NONE, true);
List<StoreFileScanner> scanners = StoreFileScanner.getScannersForStoreFiles(
Collections.singletonList(sf), false, true, false, false,
// 0 is passed as readpoint because this test operates on HStoreFile directly
0);
KeyValueScanner scanner = scanners.get(0);
FaultyInputStream inStream = faultyfs.inStreams.get(0).get();
assertNotNull(inStream);
scanner.seek(KeyValue.LOWESTKEY);
// Do at least one successful read
assertNotNull(scanner.next());
faultyfs.startFaults();
try {
int scanned=0;
while (scanner.next() != null) {
scanned++;
}
fail("Scanner didn't throw after faults injected");
} catch (IOException ioe) {
LOG.info("Got expected exception", ioe);
assertTrue(ioe.getMessage().contains("Could not iterate"));
}
scanner.close();
}
/**
* Cluster test which starts a region server with a region, then
* removes the data from HDFS underneath it, and ensures that
* errors are bubbled to the client.
*/
@Test
public void testFullSystemBubblesFSErrors() throws Exception {
// We won't have an error if the datanode is not there if we use short circuit
// it's a known 'feature'.
Assume.assumeTrue(!util.isReadShortCircuitOn());
try {
// Make it fail faster.
util.getConfiguration().setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 1);
util.getConfiguration().setInt(HConstants.HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD, 90000);
util.getConfiguration().setInt("hbase.lease.recovery.timeout", 10000);
util.getConfiguration().setInt("hbase.lease.recovery.dfs.timeout", 1000);
util.startMiniCluster(1);
final TableName tableName = TableName.valueOf(name.getMethodName());
byte[] fam = Bytes.toBytes("fam");
Admin admin = util.getAdmin();
TableDescriptorBuilder.ModifyableTableDescriptor tableDescriptor =
new TableDescriptorBuilder.ModifyableTableDescriptor(tableName);
tableDescriptor.setColumnFamily(
new ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor(fam)
.setMaxVersions(1)
.setBlockCacheEnabled(false)
);
admin.createTable(tableDescriptor);
// Make a new Configuration so it makes a new connection that has the
// above configuration on it; else we use the old one w/ 10 as default.
try (Table table = util.getConnection().getTable(tableName)) {
// Load some data
util.loadTable(table, fam, false);
util.flush();
util.countRows(table);
// Kill the DFS cluster
util.getDFSCluster().shutdownDataNodes();
try {
util.countRows(table);
fail("Did not fail to count after removing data");
} catch (Exception e) {
LOG.info("Got expected error", e);
assertTrue(e.getMessage().contains("Could not seek"));
}
}
// Restart data nodes so that HBase can shut down cleanly.
util.getDFSCluster().restartDataNodes();
} finally {
MiniHBaseCluster cluster = util.getMiniHBaseCluster();
if (cluster != null) cluster.killAll();
util.shutdownMiniCluster();
}
}
static class FaultyFileSystem extends FilterFileSystem {
List<SoftReference<FaultyInputStream>> inStreams = new ArrayList<>();
public FaultyFileSystem(FileSystem testFileSystem) {
super(testFileSystem);
}
@Override
public FSDataInputStream open(Path p, int bufferSize) throws IOException {
FSDataInputStream orig = fs.open(p, bufferSize);
FaultyInputStream faulty = new FaultyInputStream(orig);
inStreams.add(new SoftReference<>(faulty));
return faulty;
}
/**
* Starts to simulate faults on all streams opened so far
*/
public void startFaults() {
for (SoftReference<FaultyInputStream> is: inStreams) {
is.get().startFaults();
}
}
}
static class FaultyInputStream extends FSDataInputStream {
boolean faultsStarted = false;
public FaultyInputStream(InputStream in) throws IOException {
super(in);
}
public void startFaults() {
faultsStarted = true;
}
@Override
public int read(long position, byte[] buffer, int offset, int length)
throws IOException {
injectFault();
return ((PositionedReadable)in).read(position, buffer, offset, length);
}
private void injectFault() throws IOException {
if (faultsStarted) {
throw new IOException("Fault injected");
}
}
}
}
| |
// Copyright 2016 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.syntax;
import com.google.devtools.build.lib.events.Location;
import com.google.devtools.build.lib.skylarkinterface.SkylarkValue;
import com.google.devtools.build.lib.syntax.Mutability.Freezable;
import com.google.devtools.build.lib.syntax.Mutability.MutabilityException;
import java.util.Collection;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import javax.annotation.Nullable;
/**
* Base class for data structures that are only mutable with a proper Mutability.
*/
public abstract class SkylarkMutable implements Freezable, SkylarkValue {
protected SkylarkMutable() {}
/**
* Check whether this object is mutable in the current evaluation Environment.
* @throws EvalException if the object was not mutable.
*/
protected void checkMutable(Location loc, Environment env) throws EvalException {
try {
Mutability.checkMutable(this, env);
} catch (MutabilityException ex) {
throw new EvalException(loc, ex);
}
}
@Override
public boolean isImmutable() {
return mutability().isFrozen();
}
@Override
public String toString() {
return Printer.repr(this);
}
/**
* Add a new lock at {@code loc}. No effect if frozen.
*/
public void lock(Location loc) {
mutability().lock(this, loc);
}
/**
* Remove the lock at {@code loc}; such a lock must already exist. No effect if frozen.
*/
public void unlock(Location loc) {
mutability().unlock(this, loc);
}
abstract static class MutableCollection<E> extends SkylarkMutable implements Collection<E> {
protected MutableCollection() {}
/**
* The underlying contents is a (usually) mutable data structure.
* Read access is forwarded to these contents.
* This object must not be modified outside an {@link Environment}
* with a correct matching {@link Mutability},
* which should be checked beforehand using {@link #checkMutable}.
* it need not be an instance of {@link com.google.common.collect.ImmutableCollection}.
*/
protected abstract Collection<E> getContentsUnsafe();
@Override
public Iterator<E> iterator() {
return getContentsUnsafe().iterator();
}
@Override
public int size() {
return getContentsUnsafe().size();
}
@Override
public final Object[] toArray() {
return getContentsUnsafe().toArray();
}
@Override
public final <T> T[] toArray(T[] other) {
return getContentsUnsafe().toArray(other);
}
@Override
public boolean isEmpty() {
return getContentsUnsafe().isEmpty();
}
@Override
public final boolean contains(@Nullable Object object) {
return getContentsUnsafe().contains(object);
}
@Override
public final boolean containsAll(Collection<?> collection) {
return getContentsUnsafe().containsAll(collection);
}
// Disable all mutation interfaces without a mutation context.
@Deprecated
@Override
public final boolean add(E element) {
throw new UnsupportedOperationException();
}
@Deprecated
@Override
public final boolean addAll(Collection<? extends E> collection) {
throw new UnsupportedOperationException();
}
@Deprecated
@Override
public final boolean remove(Object object) {
throw new UnsupportedOperationException();
}
@Deprecated
@Override
public final boolean removeAll(Collection<?> collection) {
throw new UnsupportedOperationException();
}
@Deprecated
@Override
public final boolean retainAll(Collection<?> collection) {
throw new UnsupportedOperationException();
}
@Deprecated
@Override
public final void clear() {
throw new UnsupportedOperationException();
}
@Override
public boolean equals(Object o) {
return getContentsUnsafe().equals(o);
}
@Override
public int hashCode() {
return getContentsUnsafe().hashCode();
}
}
abstract static class MutableMap<K, V> extends SkylarkMutable implements Map<K, V> {
MutableMap() {}
/**
* The underlying contents is a (usually) mutable data structure.
* Read access is forwarded to these contents.
* This object must not be modified outside an {@link Environment}
* with a correct matching {@link Mutability},
* which should be checked beforehand using {@link #checkMutable}.
*/
protected abstract Map<K, V> getContentsUnsafe();
// A SkylarkDict forwards all read-only access to the contents.
@Override
public final V get(Object key) {
return getContentsUnsafe().get(key);
}
@Override
public boolean containsKey(Object key) {
return getContentsUnsafe().containsKey(key);
}
@Override
public boolean containsValue(Object value) {
return getContentsUnsafe().containsValue(value);
}
@Override
public Set<Map.Entry<K, V>> entrySet() {
return getContentsUnsafe().entrySet();
}
@Override
public Set<K> keySet() {
return getContentsUnsafe().keySet();
}
@Override
public Collection<V> values() {
return getContentsUnsafe().values();
}
@Override
public int size() {
return getContentsUnsafe().size();
}
@Override
public boolean isEmpty() {
return getContentsUnsafe().isEmpty();
}
// Disable all mutation interfaces without a mutation context.
@Deprecated
@Override
public final V put(K key, V value) {
throw new UnsupportedOperationException();
}
@Deprecated
@Override
public final void putAll(Map<? extends K, ? extends V> map) {
throw new UnsupportedOperationException();
}
@Deprecated
@Override
public final V remove(Object key) {
throw new UnsupportedOperationException();
}
@Deprecated
@Override
public final void clear() {
throw new UnsupportedOperationException();
}
@Override
public boolean equals(Object o) {
return getContentsUnsafe().equals(o);
}
@Override
public int hashCode() {
return getContentsUnsafe().hashCode();
}
}
}
| |
import net.runelite.mapping.Export;
import net.runelite.mapping.Hook;
import net.runelite.mapping.Implements;
import net.runelite.mapping.ObfuscatedGetter;
import net.runelite.mapping.ObfuscatedName;
import net.runelite.mapping.ObfuscatedSignature;
@ObfuscatedName("cs")
@Implements("Projectile")
public final class Projectile extends Renderable {
@ObfuscatedName("dt")
@Export("host")
static String host;
@ObfuscatedName("kv")
@ObfuscatedSignature(
signature = "Lcm;"
)
@Export("topContextMenuRow")
static ContextMenuRow topContextMenuRow;
@ObfuscatedName("o")
@ObfuscatedGetter(
intValue = -694715359
)
@Export("id")
int id;
@ObfuscatedName("k")
@ObfuscatedGetter(
intValue = -218460973
)
@Export("floor")
int floor;
@ObfuscatedName("t")
@ObfuscatedGetter(
intValue = 927536955
)
@Export("x1")
int x1;
@ObfuscatedName("d")
@ObfuscatedGetter(
intValue = 869766095
)
@Export("y1")
int y1;
@ObfuscatedName("h")
@ObfuscatedGetter(
intValue = -715790761
)
@Export("height")
int height;
@ObfuscatedName("m")
@ObfuscatedGetter(
intValue = -1196467841
)
@Export("endHeight")
int endHeight;
@ObfuscatedName("z")
@ObfuscatedGetter(
intValue = -20443961
)
@Export("startMovementCycle")
int startMovementCycle;
@ObfuscatedName("i")
@ObfuscatedGetter(
intValue = 728016351
)
@Export("endCycle")
int endCycle;
@ObfuscatedName("u")
@ObfuscatedGetter(
intValue = 356552955
)
@Export("slope")
int slope;
@ObfuscatedName("x")
@ObfuscatedGetter(
intValue = -1949591217
)
@Export("startHeight")
int startHeight;
@ObfuscatedName("y")
@ObfuscatedGetter(
intValue = -1821054993
)
@Export("interacting")
int interacting;
@ObfuscatedName("a")
@Export("isMoving")
boolean isMoving;
@ObfuscatedName("w")
@Export("x")
double x;
@ObfuscatedName("n")
@Export("y")
double y;
@ObfuscatedName("l")
@Export("z")
double z;
@ObfuscatedName("s")
@Export("velocityX")
double velocityX;
@ObfuscatedName("v")
@Export("velocityY")
double velocityY;
@ObfuscatedName("q")
@Export("scalar")
double scalar;
@ObfuscatedName("r")
@Export("velocityZ")
double velocityZ;
@ObfuscatedName("j")
@Export("heightOffset")
double heightOffset;
@ObfuscatedName("b")
@ObfuscatedGetter(
intValue = -197184165
)
@Export("rotationX")
int rotationX;
@ObfuscatedName("g")
@ObfuscatedGetter(
intValue = 226844767
)
@Export("rotationY")
int rotationY;
@ObfuscatedName("f")
@ObfuscatedSignature(
signature = "Lke;"
)
@Export("animationSequence")
Sequence animationSequence;
@ObfuscatedName("p")
@ObfuscatedGetter(
intValue = 747017145
)
@Export("int7")
int int7;
@ObfuscatedName("e")
@ObfuscatedGetter(
intValue = -677208147
)
@Export("int6")
int int6;
Projectile(int var1, int var2, int var3, int var4, int var5, int var6, int var7, int var8, int var9, int var10, int var11) {
this.isMoving = false;
this.int7 = 0;
this.int6 = 0;
this.id = var1;
this.floor = var2;
this.x1 = var3;
this.y1 = var4;
this.height = var5;
this.startMovementCycle = var6;
this.endCycle = var7;
this.slope = var8;
this.startHeight = var9;
this.interacting = var10;
this.endHeight = var11;
this.isMoving = false;
int var12 = class86.getSpotAnimType(this.id).field3497;
if(var12 != -1) {
this.animationSequence = CombatInfo1.getAnimation(var12);
} else {
this.animationSequence = null;
}
}
@ObfuscatedName("o")
@ObfuscatedSignature(
signature = "(IIIIB)V",
garbageValue = "1"
)
@Export("moveProjectile")
@Hook("projectileMoved")
final void moveProjectile(int var1, int var2, int var3, int var4) {
double var5;
if(!this.isMoving) {
var5 = (double)(var1 - this.x1);
double var7 = (double)(var2 - this.y1);
double var9 = Math.sqrt(var5 * var5 + var7 * var7);
this.x = (double)this.x1 + var5 * (double)this.startHeight / var9;
this.y = (double)this.y1 + (double)this.startHeight * var7 / var9;
this.z = (double)this.height;
}
var5 = (double)(this.endCycle + 1 - var4);
this.velocityX = ((double)var1 - this.x) / var5;
this.velocityY = ((double)var2 - this.y) / var5;
this.scalar = Math.sqrt(this.velocityY * this.velocityY + this.velocityX * this.velocityX);
if(!this.isMoving) {
this.velocityZ = -this.scalar * Math.tan((double)this.slope * 0.02454369D);
}
this.heightOffset = ((double)var3 - this.z - var5 * this.velocityZ) * 2.0D / (var5 * var5);
}
@ObfuscatedName("k")
@ObfuscatedSignature(
signature = "(IB)V",
garbageValue = "37"
)
@Export("update")
final void update(int var1) {
this.isMoving = true;
this.x += (double)var1 * this.velocityX;
this.y += (double)var1 * this.velocityY;
this.z += (double)var1 * (double)var1 * this.heightOffset * 0.5D + this.velocityZ * (double)var1;
this.velocityZ += this.heightOffset * (double)var1;
this.rotationX = (int)(Math.atan2(this.velocityX, this.velocityY) * 325.949D) + 1024 & 2047;
this.rotationY = (int)(Math.atan2(this.velocityZ, this.scalar) * 325.949D) & 2047;
if(this.animationSequence != null) {
this.int6 += var1;
while(true) {
do {
do {
if(this.int6 <= this.animationSequence.frameLengths[this.int7]) {
return;
}
this.int6 -= this.animationSequence.frameLengths[this.int7];
++this.int7;
} while(this.int7 < this.animationSequence.frameIDs.length);
this.int7 -= this.animationSequence.frameStep;
} while(this.int7 >= 0 && this.int7 < this.animationSequence.frameIDs.length);
this.int7 = 0;
}
}
}
@ObfuscatedName("u")
@ObfuscatedSignature(
signature = "(I)Lei;",
garbageValue = "1329079562"
)
protected final Model getModel() {
Spotanim var1 = class86.getSpotAnimType(this.id);
Model var2 = var1.getModel(this.int7);
if(var2 == null) {
return null;
} else {
var2.rotateZ(this.rotationY);
return var2;
}
}
@ObfuscatedName("o")
@ObfuscatedSignature(
signature = "(II)V",
garbageValue = "-1626830208"
)
static final void method1944(int var0) {
class132.Viewport_entityIdsAtMouse[++class132.Viewport_entityCountAtMouse - 1] = var0;
}
@ObfuscatedName("o")
@ObfuscatedSignature(
signature = "(Ljf;Ljf;Ljf;Ljf;B)V",
garbageValue = "54"
)
public static void method1938(IndexDataBase var0, IndexDataBase var1, IndexDataBase var2, IndexDataBase var3) {
UrlRequest.widgetIndex = var0;
Friend.field3864 = var1;
Widget.field2815 = var2;
DynamicObject.field1471 = var3;
MouseRecorder.widgets = new Widget[UrlRequest.widgetIndex.size()][];
class154.validInterfaces = new boolean[UrlRequest.widgetIndex.size()];
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.suggest.phrase;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.suggest.SuggestBuilder.SuggestionBuilder;
import java.io.IOException;
import java.util.*;
import java.util.Map.Entry;
/**
* Defines the actual suggest command for phrase suggestions ( <tt>phrase</tt>).
*/
public final class PhraseSuggestionBuilder extends SuggestionBuilder<PhraseSuggestionBuilder> {
private Float maxErrors;
private String separator;
private Float realWordErrorLikelihood;
private Float confidence;
private final Map<String, List<CandidateGenerator>> generators = new HashMap<>();
private Integer gramSize;
private SmoothingModel model;
private Boolean forceUnigrams;
private Integer tokenLimit;
private String preTag;
private String postTag;
private String collateQuery;
private String collateFilter;
private String collatePreference;
private Map<String, Object> collateParams;
private Boolean collatePrune;
public PhraseSuggestionBuilder(String name) {
super(name, "phrase");
}
/**
* Sets the gram size for the n-gram model used for this suggester. The
* default value is <tt>1</tt> corresponding to <tt>unigrams</tt>. Use
* <tt>2</tt> for <tt>bigrams</tt> and <tt>3</tt> for <tt>trigrams</tt>.
*/
public PhraseSuggestionBuilder gramSize(int gramSize) {
if (gramSize < 1) {
throw new IllegalArgumentException("gramSize must be >= 1");
}
this.gramSize = gramSize;
return this;
}
/**
* Sets the maximum percentage of the terms that at most considered to be
* misspellings in order to form a correction. This method accepts a float
* value in the range [0..1) as a fraction of the actual query terms a
* number <tt>>=1</tt> as an absolut number of query terms.
*
* The default is set to <tt>1.0</tt> which corresponds to that only
* corrections with at most 1 missspelled term are returned.
*/
public PhraseSuggestionBuilder maxErrors(Float maxErrors) {
this.maxErrors = maxErrors;
return this;
}
/**
* Sets the separator that is used to separate terms in the bigram field. If
* not set the whitespace character is used as a separator.
*/
public PhraseSuggestionBuilder separator(String separator) {
this.separator = separator;
return this;
}
/**
* Sets the likelihood of a term being a misspelled even if the term exists
* in the dictionary. The default it <tt>0.95</tt> corresponding to 5% or
* the real words are misspelled.
*/
public PhraseSuggestionBuilder realWordErrorLikelihood(Float realWordErrorLikelihood) {
this.realWordErrorLikelihood = realWordErrorLikelihood;
return this;
}
/**
* Sets the confidence level for this suggester. The confidence level
* defines a factor applied to the input phrases score which is used as a
* threshold for other suggest candidates. Only candidates that score higher
* than the threshold will be included in the result. For instance a
* confidence level of <tt>1.0</tt> will only return suggestions that score
* higher than the input phrase. If set to <tt>0.0</tt> the top N candidates
* are returned. The default is <tt>1.0</tt>
*/
public PhraseSuggestionBuilder confidence(Float confidence) {
this.confidence = confidence;
return this;
}
/**
* Adds a {@link CandidateGenerator} to this suggester. The
* {@link CandidateGenerator} is used to draw candidates for each individual
* phrase term before the candidates are scored.
*/
public PhraseSuggestionBuilder addCandidateGenerator(CandidateGenerator generator) {
List<CandidateGenerator> list = this.generators.get(generator.getType());
if (list == null) {
list = new ArrayList<>();
this.generators.put(generator.getType(), list);
}
list.add(generator);
return this;
}
/**
* Clear the candidate generators.
*/
public PhraseSuggestionBuilder clearCandidateGenerators() {
this.generators.clear();
return this;
}
/**
* If set to <code>true</code> the phrase suggester will fail if the analyzer only
* produces ngrams. the default it <code>true</code>.
*/
public PhraseSuggestionBuilder forceUnigrams(boolean forceUnigrams) {
this.forceUnigrams = forceUnigrams;
return this;
}
/**
* Sets an explicit smoothing model used for this suggester. The default is
* {@link PhraseSuggester#StupidBackoff}.
*/
public PhraseSuggestionBuilder smoothingModel(SmoothingModel model) {
this.model = model;
return this;
}
public PhraseSuggestionBuilder tokenLimit(int tokenLimit) {
this.tokenLimit = tokenLimit;
return this;
}
/**
* Setup highlighting for suggestions. If this is called a highlight field
* is returned with suggestions wrapping changed tokens with preTag and postTag.
*/
public PhraseSuggestionBuilder highlight(String preTag, String postTag) {
if ((preTag == null) != (postTag == null)) {
throw new IllegalArgumentException("Pre and post tag must both be null or both not be null.");
}
this.preTag = preTag;
this.postTag = postTag;
return this;
}
/**
* Sets a query used for filtering out suggested phrases (collation).
*/
public PhraseSuggestionBuilder collateQuery(String collateQuery) {
this.collateQuery = collateQuery;
return this;
}
/**
* Sets a filter used for filtering out suggested phrases (collation).
*/
public PhraseSuggestionBuilder collateFilter(String collateFilter) {
this.collateFilter = collateFilter;
return this;
}
/**
* Sets routing preferences for executing filter query (collation).
*/
public PhraseSuggestionBuilder collatePreference(String collatePreference) {
this.collatePreference = collatePreference;
return this;
}
/**
* Sets additional params for collate script
*/
public PhraseSuggestionBuilder collateParams(Map<String, Object> collateParams) {
this.collateParams = collateParams;
return this;
}
/**
* Sets whether to prune suggestions after collation
*/
public PhraseSuggestionBuilder collatePrune(boolean collatePrune) {
this.collatePrune = collatePrune;
return this;
}
@Override
public XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException {
if (realWordErrorLikelihood != null) {
builder.field("real_word_error_likelihood", realWordErrorLikelihood);
}
if (confidence != null) {
builder.field("confidence", confidence);
}
if (separator != null) {
builder.field("separator", separator);
}
if (maxErrors != null) {
builder.field("max_errors", maxErrors);
}
if (gramSize != null) {
builder.field("gram_size", gramSize);
}
if (forceUnigrams != null) {
builder.field("force_unigrams", forceUnigrams);
}
if (tokenLimit != null) {
builder.field("token_limit", tokenLimit);
}
if (!generators.isEmpty()) {
Set<Entry<String, List<CandidateGenerator>>> entrySet = generators.entrySet();
for (Entry<String, List<CandidateGenerator>> entry : entrySet) {
builder.startArray(entry.getKey());
for (CandidateGenerator generator : entry.getValue()) {
generator.toXContent(builder, params);
}
builder.endArray();
}
}
if (model != null) {
builder.startObject("smoothing");
model.toXContent(builder, params);
builder.endObject();
}
if (preTag != null) {
builder.startObject("highlight");
builder.field("pre_tag", preTag);
builder.field("post_tag", postTag);
builder.endObject();
}
if (collateQuery != null || collateFilter != null) {
builder.startObject("collate");
if (collateQuery != null) {
builder.field("query", collateQuery);
}
if (collateFilter != null) {
builder.field("filter", collateFilter);
}
if (collatePreference != null) {
builder.field("preference", collatePreference);
}
if (collateParams != null) {
builder.field("params", collateParams);
}
if (collatePrune != null) {
builder.field("prune", collatePrune.booleanValue());
}
builder.endObject();
}
return builder;
}
/**
* Creates a new {@link DirectCandidateGenerator}
*
* @param field
* the field this candidate generator operates on.
*/
public static DirectCandidateGenerator candidateGenerator(String field) {
return new DirectCandidateGenerator(field);
}
/**
* A "stupid-backoff" smoothing model simialr to <a
* href="http://en.wikipedia.org/wiki/Katz's_back-off_model"> Katz's
* Backoff</a>. This model is used as the default if no model is configured.
* <p>
* See <a
* href="http://en.wikipedia.org/wiki/N-gram#Smoothing_techniques">N-Gram
* Smoothing</a> for details.
* </p>
*/
public static final class StupidBackoff extends SmoothingModel {
private final double discount;
/**
* Creates a Stupid-Backoff smoothing model.
*
* @param discount
* the discount given to lower order ngrams if the higher order ngram doesn't exits
*/
public StupidBackoff(double discount) {
super("stupid_backoff");
this.discount = discount;
}
@Override
protected XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException {
builder.field("discount", discount);
return builder;
}
}
/**
* An <a href="http://en.wikipedia.org/wiki/Additive_smoothing">additive
* smoothing</a> model.
* <p>
* See <a
* href="http://en.wikipedia.org/wiki/N-gram#Smoothing_techniques">N-Gram
* Smoothing</a> for details.
* </p>
*/
public static final class Laplace extends SmoothingModel {
private final double alpha;
/**
* Creates a Laplace smoothing model.
*
* @param discount
* the discount given to lower order ngrams if the higher order ngram doesn't exits
*/
public Laplace(double alpha) {
super("laplace");
this.alpha = alpha;
}
@Override
protected XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException {
builder.field("alpha", alpha);
return builder;
}
}
public static abstract class SmoothingModel implements ToXContent {
private final String type;
protected SmoothingModel(String type) {
this.type = type;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(type);
innerToXContent(builder,params);
builder.endObject();
return builder;
}
protected abstract XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException;
}
/**
* Linear interpolation smoothing model.
* <p>
* See <a
* href="http://en.wikipedia.org/wiki/N-gram#Smoothing_techniques">N-Gram
* Smoothing</a> for details.
* </p>
*/
public static final class LinearInterpolation extends SmoothingModel {
private final double trigramLambda;
private final double bigramLambda;
private final double unigramLambda;
/**
* Creates a linear interpolation smoothing model.
*
* Note: the lambdas must sum up to one.
*
* @param trigramLambda
* the trigram lambda
* @param bigramLambda
* the bigram lambda
* @param unigramLambda
* the unigram lambda
*/
public LinearInterpolation(double trigramLambda, double bigramLambda, double unigramLambda) {
super("linear");
this.trigramLambda = trigramLambda;
this.bigramLambda = bigramLambda;
this.unigramLambda = unigramLambda;
}
@Override
protected XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException {
builder.field("trigram_lambda", trigramLambda);
builder.field("bigram_lambda", bigramLambda);
builder.field("unigram_lambda", unigramLambda);
return builder;
}
}
/**
* {@link CandidateGenerator} base class.
*/
public static abstract class CandidateGenerator implements ToXContent {
private final String type;
public CandidateGenerator(String type) {
this.type = type;
}
public String getType() {
return type;
}
}
/**
*
*
*/
public static final class DirectCandidateGenerator extends CandidateGenerator {
private final String field;
private String preFilter;
private String postFilter;
private String suggestMode;
private Float accuracy;
private Integer size;
private String sort;
private String stringDistance;
private Integer maxEdits;
private Integer maxInspections;
private Float maxTermFreq;
private Integer prefixLength;
private Integer minWordLength;
private Float minDocFreq;
/**
* Sets from what field to fetch the candidate suggestions from. This is
* an required option and needs to be set via this setter or
* {@link org.elasticsearch.search.suggest.SuggestBuilder.TermSuggestionBuilder#setField(String)}
* method
*/
public DirectCandidateGenerator(String field) {
super("direct_generator");
this.field = field;
}
/**
* The global suggest mode controls what suggested terms are included or
* controls for what suggest text tokens, terms should be suggested for.
* Three possible values can be specified:
* <ol>
* <li><code>missing</code> - Only suggest terms in the suggest text
* that aren't in the index. This is the default.
* <li><code>popular</code> - Only suggest terms that occur in more docs
* then the original suggest text term.
* <li><code>always</code> - Suggest any matching suggest terms based on
* tokens in the suggest text.
* </ol>
*/
public DirectCandidateGenerator suggestMode(String suggestMode) {
this.suggestMode = suggestMode;
return this;
}
/**
* Sets how similar the suggested terms at least need to be compared to
* the original suggest text tokens. A value between 0 and 1 can be
* specified. This value will be compared to the string distance result
* of each candidate spelling correction.
* <p/>
* Default is <tt>0.5</tt>
*/
public DirectCandidateGenerator accuracy(float accuracy) {
this.accuracy = accuracy;
return this;
}
/**
* Sets the maximum suggestions to be returned per suggest text term.
*/
public DirectCandidateGenerator size(int size) {
if (size <= 0) {
throw new IllegalArgumentException("Size must be positive");
}
this.size = size;
return this;
}
/**
* Sets how to sort the suggest terms per suggest text token. Two
* possible values:
* <ol>
* <li><code>score</code> - Sort should first be based on score, then
* document frequency and then the term itself.
* <li><code>frequency</code> - Sort should first be based on document
* frequency, then scotr and then the term itself.
* </ol>
* <p/>
* What the score is depends on the suggester being used.
*/
public DirectCandidateGenerator sort(String sort) {
this.sort = sort;
return this;
}
/**
* Sets what string distance implementation to use for comparing how
* similar suggested terms are. Four possible values can be specified:
* <ol>
* <li><code>internal</code> - This is the default and is based on
* <code>damerau_levenshtein</code>, but highly optimized for comparing
* string distance for terms inside the index.
* <li><code>damerau_levenshtein</code> - String distance algorithm
* based on Damerau-Levenshtein algorithm.
* <li><code>levenstein</code> - String distance algorithm based on
* Levenstein edit distance algorithm.
* <li><code>jarowinkler</code> - String distance algorithm based on
* Jaro-Winkler algorithm.
* <li><code>ngram</code> - String distance algorithm based on character
* n-grams.
* </ol>
*/
public DirectCandidateGenerator stringDistance(String stringDistance) {
this.stringDistance = stringDistance;
return this;
}
/**
* Sets the maximum edit distance candidate suggestions can have in
* order to be considered as a suggestion. Can only be a value between 1
* and 2. Any other value result in an bad request error being thrown.
* Defaults to <tt>2</tt>.
*/
public DirectCandidateGenerator maxEdits(Integer maxEdits) {
this.maxEdits = maxEdits;
return this;
}
/**
* A factor that is used to multiply with the size in order to inspect
* more candidate suggestions. Can improve accuracy at the cost of
* performance. Defaults to <tt>5</tt>.
*/
public DirectCandidateGenerator maxInspections(Integer maxInspections) {
this.maxInspections = maxInspections;
return this;
}
/**
* Sets a maximum threshold in number of documents a suggest text token
* can exist in order to be corrected. Can be a relative percentage
* number (e.g 0.4) or an absolute number to represent document
* frequencies. If an value higher than 1 is specified then fractional
* can not be specified. Defaults to <tt>0.01</tt>.
* <p/>
* This can be used to exclude high frequency terms from being
* suggested. High frequency terms are usually spelled correctly on top
* of this this also improves the suggest performance.
*/
public DirectCandidateGenerator maxTermFreq(float maxTermFreq) {
this.maxTermFreq = maxTermFreq;
return this;
}
/**
* Sets the number of minimal prefix characters that must match in order
* be a candidate suggestion. Defaults to 1. Increasing this number
* improves suggest performance. Usually misspellings don't occur in the
* beginning of terms.
*/
public DirectCandidateGenerator prefixLength(int prefixLength) {
this.prefixLength = prefixLength;
return this;
}
/**
* The minimum length a suggest text term must have in order to be
* corrected. Defaults to <tt>4</tt>.
*/
public DirectCandidateGenerator minWordLength(int minWordLength) {
this.minWordLength = minWordLength;
return this;
}
/**
* Sets a minimal threshold in number of documents a suggested term
* should appear in. This can be specified as an absolute number or as a
* relative percentage of number of documents. This can improve quality
* by only suggesting high frequency terms. Defaults to 0f and is not
* enabled. If a value higher than 1 is specified then the number cannot
* be fractional.
*/
public DirectCandidateGenerator minDocFreq(float minDocFreq) {
this.minDocFreq = minDocFreq;
return this;
}
/**
* Sets a filter (analyzer) that is applied to each of the tokens passed to this candidate generator.
* This filter is applied to the original token before candidates are generated.
*/
public DirectCandidateGenerator preFilter(String preFilter) {
this.preFilter = preFilter;
return this;
}
/**
* Sets a filter (analyzer) that is applied to each of the generated tokens
* before they are passed to the actual phrase scorer.
*/
public DirectCandidateGenerator postFilter(String postFilter) {
this.postFilter = postFilter;
return this;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
if (field != null) {
builder.field("field", field);
}
if (suggestMode != null) {
builder.field("suggest_mode", suggestMode);
}
if (accuracy != null) {
builder.field("accuracy", accuracy);
}
if (size != null) {
builder.field("size", size);
}
if (sort != null) {
builder.field("sort", sort);
}
if (stringDistance != null) {
builder.field("string_distance", stringDistance);
}
if (maxEdits != null) {
builder.field("max_edits", maxEdits);
}
if (maxInspections != null) {
builder.field("max_inspections", maxInspections);
}
if (maxTermFreq != null) {
builder.field("max_term_freq", maxTermFreq);
}
if (prefixLength != null) {
builder.field("prefix_length", prefixLength);
}
if (minWordLength != null) {
builder.field("min_word_length", minWordLength);
}
if (minDocFreq != null) {
builder.field("min_doc_freq", minDocFreq);
}
if (preFilter != null) {
builder.field("pre_filter", preFilter);
}
if (postFilter != null) {
builder.field("post_filter", postFilter);
}
builder.endObject();
return builder;
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.hadoop.hdfs.tools;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.protocol.AddErasureCodingPolicyResponse;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyInfo;
import org.apache.hadoop.hdfs.util.ECPolicyLoader;
import org.apache.hadoop.io.erasurecode.ErasureCodeConstants;
import org.apache.hadoop.tools.TableListing;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
/**
* CLI for the erasure code encoding operations.
*/
@InterfaceAudience.Private
public class ECAdmin extends Configured implements Tool {
public static final String NAME = "ec";
public static void main(String[] args) throws Exception {
final ECAdmin admin = new ECAdmin(new Configuration());
int res = ToolRunner.run(admin, args);
System.exit(res);
}
public ECAdmin(Configuration conf) {
super(conf);
}
@Override
public int run(String[] args) throws Exception {
if (args.length == 0) {
AdminHelper.printUsage(false, NAME, COMMANDS);
ToolRunner.printGenericCommandUsage(System.err);
return 1;
}
final AdminHelper.Command command = AdminHelper.determineCommand(args[0],
COMMANDS);
if (command == null) {
System.err.println("Can't understand command '" + args[0] + "'");
if (!args[0].startsWith("-")) {
System.err.println("Command names must start with dashes.");
}
AdminHelper.printUsage(false, NAME, COMMANDS);
ToolRunner.printGenericCommandUsage(System.err);
return 1;
}
final List<String> argsList = new LinkedList<>();
argsList.addAll(Arrays.asList(args).subList(1, args.length));
try {
return command.run(getConf(), argsList);
} catch (IllegalArgumentException e) {
System.err.println(AdminHelper.prettifyException(e));
return -1;
}
}
/** Command to list the set of enabled erasure coding policies. */
private static class ListECPoliciesCommand
implements AdminHelper.Command {
@Override
public String getName() {
return "-listPolicies";
}
@Override
public String getShortUsage() {
return "[" + getName() + "]\n";
}
@Override
public String getLongUsage() {
return getShortUsage() + "\n" +
"Get the list of all erasure coding policies.\n";
}
@Override
public int run(Configuration conf, List<String> args) throws IOException {
if (args.size() > 0) {
System.err.println(getName() + ": Too many arguments");
return 1;
}
final DistributedFileSystem dfs = AdminHelper.getDFS(conf);
try {
final Collection<ErasureCodingPolicyInfo> policies =
dfs.getAllErasureCodingPolicies();
if (policies.isEmpty()) {
System.out.println("There is no erasure coding policies in the " +
"cluster.");
} else {
System.out.println("Erasure Coding Policies:");
for (ErasureCodingPolicyInfo policy : policies) {
if (policy != null) {
System.out.println(policy);
}
}
}
} catch (IOException e) {
System.err.println(AdminHelper.prettifyException(e));
return 2;
}
return 0;
}
}
/** Command to add a set of erasure coding policies. */
private static class AddECPoliciesCommand
implements AdminHelper.Command {
@Override
public String getName() {
return "-addPolicies";
}
@Override
public String getShortUsage() {
return "[" + getName() + " -policyFile <file>]\n";
}
@Override
public String getLongUsage() {
final TableListing listing = AdminHelper.getOptionDescriptionListing();
listing.addRow("<file>",
"The path of the xml file which defines the EC policies to add");
return getShortUsage() + "\n" +
"Add a list of erasure coding policies.\n" +
listing.toString();
}
@Override
public int run(Configuration conf, List<String> args) throws IOException {
final String filePath =
StringUtils.popOptionWithArgument("-policyFile", args);
if (filePath == null) {
System.err.println("Please specify the path with -policyFile.\nUsage: "
+ getLongUsage());
return 1;
}
if (args.size() > 0) {
System.err.println(getName() + ": Too many arguments");
return 1;
}
final DistributedFileSystem dfs = AdminHelper.getDFS(conf);
try {
List<ErasureCodingPolicy> policies =
new ECPolicyLoader().loadPolicy(filePath);
if (policies.size() > 0) {
AddErasureCodingPolicyResponse[] responses =
dfs.addErasureCodingPolicies(
policies.toArray(new ErasureCodingPolicy[policies.size()]));
for (AddErasureCodingPolicyResponse response : responses) {
System.out.println(response);
}
} else {
System.out.println("No EC policy parsed out from " + filePath);
}
} catch (IOException e) {
System.err.println(AdminHelper.prettifyException(e));
return 2;
}
return 0;
}
}
/** Command to get the erasure coding policy for a file or directory. */
private static class GetECPolicyCommand implements AdminHelper.Command {
@Override
public String getName() {
return "-getPolicy";
}
@Override
public String getShortUsage() {
return "[" + getName() + " -path <path>]\n";
}
@Override
public String getLongUsage() {
final TableListing listing = AdminHelper.getOptionDescriptionListing();
listing.addRow("<path>",
"The path of the file/directory for getting the erasure coding " +
"policy");
return getShortUsage() + "\n" +
"Get the erasure coding policy of a file/directory.\n\n" +
listing.toString();
}
@Override
public int run(Configuration conf, List<String> args) throws IOException {
final String path = StringUtils.popOptionWithArgument("-path", args);
if (path == null) {
System.err.println("Please specify the path with -path.\nUsage: " +
getLongUsage());
return 1;
}
if (args.size() > 0) {
System.err.println(getName() + ": Too many arguments");
return 1;
}
final Path p = new Path(path);
final DistributedFileSystem dfs = AdminHelper.getDFS(p.toUri(), conf);
try {
ErasureCodingPolicy ecPolicy = dfs.getErasureCodingPolicy(p);
if (ecPolicy != null) {
System.out.println(ecPolicy.getName());
} else {
System.out.println("The erasure coding policy of " + path + " is " +
"unspecified");
}
} catch (Exception e) {
System.err.println(AdminHelper.prettifyException(e));
return 2;
}
return 0;
}
}
/** Command to remove an erasure coding policy. */
private static class RemoveECPolicyCommand implements AdminHelper.Command {
@Override
public String getName() {
return "-removePolicy";
}
@Override
public String getShortUsage() {
return "[" + getName() + " -policy <policy>]\n";
}
@Override
public String getLongUsage() {
TableListing listing = AdminHelper.getOptionDescriptionListing();
listing.addRow("<policy>", "The name of the erasure coding policy");
return getShortUsage() + "\n" +
"Remove an erasure coding policy.\n" +
listing.toString();
}
@Override
public int run(Configuration conf, List<String> args) throws IOException {
final String ecPolicyName = StringUtils.popOptionWithArgument(
"-policy", args);
if (ecPolicyName == null) {
System.err.println("Please specify the policy name.\nUsage: " +
getLongUsage());
return 1;
}
if (args.size() > 0) {
System.err.println(getName() + ": Too many arguments");
return 1;
}
final DistributedFileSystem dfs = AdminHelper.getDFS(conf);
try {
dfs.removeErasureCodingPolicy(ecPolicyName);
System.out.println("Erasure coding policy " + ecPolicyName +
"is removed");
} catch (IOException e) {
System.err.println(AdminHelper.prettifyException(e));
return 2;
}
return 0;
}
}
/** Command to set the erasure coding policy to a file/directory. */
private static class SetECPolicyCommand implements AdminHelper.Command {
@Override
public String getName() {
return "-setPolicy";
}
@Override
public String getShortUsage() {
return "[" + getName() +
" -path <path> [-policy <policy>] [-replicate]]\n";
}
@Override
public String getLongUsage() {
TableListing listing = AdminHelper.getOptionDescriptionListing();
listing.addRow("<path>", "The path of the file/directory to set " +
"the erasure coding policy");
listing.addRow("<policy>", "The name of the erasure coding policy");
listing.addRow("-replicate",
"force 3x replication scheme on the directory");
return getShortUsage() + "\n" +
"Set the erasure coding policy for a file/directory.\n\n" +
listing.toString() + "\n" +
"-replicate and -policy are optional arguments. They cannot been " +
"used at the same time";
}
@Override
public int run(Configuration conf, List<String> args) throws IOException {
final String path = StringUtils.popOptionWithArgument("-path", args);
if (path == null) {
System.err.println("Please specify the path for setting the EC " +
"policy.\nUsage: " + getLongUsage());
return 1;
}
String ecPolicyName = StringUtils.popOptionWithArgument("-policy",
args);
final boolean replicate = StringUtils.popOption("-replicate", args);
if (args.size() > 0) {
System.err.println(getName() + ": Too many arguments");
return 1;
}
if (replicate) {
if (ecPolicyName != null) {
System.err.println(getName() +
": -replicate and -policy cannot been used at the same time");
return 2;
}
ecPolicyName = ErasureCodeConstants.REPLICATION_POLICY_NAME;
}
final Path p = new Path(path);
final DistributedFileSystem dfs = AdminHelper.getDFS(p.toUri(), conf);
try {
dfs.setErasureCodingPolicy(p, ecPolicyName);
if (ecPolicyName == null){
ecPolicyName = "default";
}
System.out.println("Set " + ecPolicyName + " erasure coding policy on" +
" " + path);
RemoteIterator<FileStatus> dirIt = dfs.listStatusIterator(p);
if (dirIt.hasNext()) {
System.out.println("Warning: setting erasure coding policy on a " +
"non-empty directory will not automatically convert existing " +
"files to " + ecPolicyName + " erasure coding policy");
}
} catch (Exception e) {
System.err.println(AdminHelper.prettifyException(e));
return 3;
}
return 0;
}
}
/** Command to unset the erasure coding policy set for a file/directory. */
private static class UnsetECPolicyCommand
implements AdminHelper.Command {
@Override
public String getName() {
return "-unsetPolicy";
}
@Override
public String getShortUsage() {
return "[" + getName() + " -path <path>]\n";
}
@Override
public String getLongUsage() {
TableListing listing = AdminHelper.getOptionDescriptionListing();
listing.addRow("<path>", "The path of the directory "
+ "from which the erasure coding policy will be unset.");
return getShortUsage() + "\n"
+ "Unset the erasure coding policy for a directory.\n\n"
+ listing.toString();
}
@Override
public int run(Configuration conf, List<String> args) throws IOException {
final String path = StringUtils.popOptionWithArgument("-path", args);
if (path == null) {
System.err.println("Please specify a path.\nUsage: " + getLongUsage());
return 1;
}
if (args.size() > 0) {
System.err.println(getName() + ": Too many arguments");
return 1;
}
final Path p = new Path(path);
final DistributedFileSystem dfs = AdminHelper.getDFS(p.toUri(), conf);
try {
dfs.unsetErasureCodingPolicy(p);
System.out.println("Unset erasure coding policy from " + path);
RemoteIterator<FileStatus> dirIt = dfs.listStatusIterator(p);
if (dirIt.hasNext()) {
System.out.println("Warning: unsetting erasure coding policy on a " +
"non-empty directory will not automatically convert existing" +
" files to replicated data.");
}
} catch (Exception e) {
System.err.println(AdminHelper.prettifyException(e));
return 2;
}
return 0;
}
}
/** Command to list the set of supported erasure coding codecs and coders. */
private static class ListECCodecsCommand
implements AdminHelper.Command {
@Override
public String getName() {
return "-listCodecs";
}
@Override
public String getShortUsage() {
return "[" + getName() + "]\n";
}
@Override
public String getLongUsage() {
return getShortUsage() + "\n" +
"Get the list of supported erasure coding codecs and coders.\n" +
"A coder is an implementation of a codec. A codec can have " +
"different implementations, thus different coders.\n" +
"The coders for a codec are listed in a fall back order.\n";
}
@Override
public int run(Configuration conf, List<String> args) throws IOException {
if (args.size() > 0) {
System.err.println(getName() + ": Too many arguments");
return 1;
}
final DistributedFileSystem dfs = AdminHelper.getDFS(conf);
try {
Map<String, String> codecs =
dfs.getAllErasureCodingCodecs();
if (codecs.isEmpty()) {
System.out.println("No erasure coding codecs are supported on the " +
"cluster.");
} else {
System.out.println("Erasure Coding Codecs: Codec [Coder List]");
for (Map.Entry<String, String> codec : codecs.entrySet()) {
if (codec != null) {
System.out.println("\t" + codec.getKey().toUpperCase() + " ["
+ codec.getValue().toUpperCase() +"]");
}
}
}
} catch (IOException e) {
System.err.println(AdminHelper.prettifyException(e));
return 2;
}
return 0;
}
}
/** Command to enable an existing erasure coding policy. */
private static class EnableECPolicyCommand implements AdminHelper.Command {
@Override
public String getName() {
return "-enablePolicy";
}
@Override
public String getShortUsage() {
return "[" + getName() + " -policy <policy>]\n";
}
@Override
public String getLongUsage() {
TableListing listing = AdminHelper.getOptionDescriptionListing();
listing.addRow("<policy>", "The name of the erasure coding policy");
return getShortUsage() + "\n" +
"Enable the erasure coding policy.\n\n" +
listing.toString();
}
@Override
public int run(Configuration conf, List<String> args) throws IOException {
final String ecPolicyName = StringUtils.popOptionWithArgument("-policy",
args);
if (ecPolicyName == null) {
System.err.println("Please specify the policy name.\nUsage: " +
getLongUsage());
return 1;
}
if (args.size() > 0) {
System.err.println(getName() + ": Too many arguments");
return 1;
}
final DistributedFileSystem dfs = AdminHelper.getDFS(conf);
try {
dfs.enableErasureCodingPolicy(ecPolicyName);
System.out.println("Erasure coding policy " + ecPolicyName +
" is enabled");
} catch (IOException e) {
System.err.println(AdminHelper.prettifyException(e));
return 2;
}
return 0;
}
}
/** Command to disable an existing erasure coding policy. */
private static class DisableECPolicyCommand implements AdminHelper.Command {
@Override
public String getName() {
return "-disablePolicy";
}
@Override
public String getShortUsage() {
return "[" + getName() + " -policy <policy>]\n";
}
@Override
public String getLongUsage() {
TableListing listing = AdminHelper.getOptionDescriptionListing();
listing.addRow("<policy>", "The name of the erasure coding policy");
return getShortUsage() + "\n" +
"Disable the erasure coding policy.\n\n" +
listing.toString();
}
@Override
public int run(Configuration conf, List<String> args) throws IOException {
final String ecPolicyName = StringUtils.popOptionWithArgument("-policy",
args);
if (ecPolicyName == null) {
System.err.println("Please specify the policy name.\nUsage: " +
getLongUsage());
return 1;
}
if (args.size() > 0) {
System.err.println(getName() + ": Too many arguments");
return 1;
}
final DistributedFileSystem dfs = AdminHelper.getDFS(conf);
try {
dfs.disableErasureCodingPolicy(ecPolicyName);
System.out.println("Erasure coding policy " + ecPolicyName +
" is disabled");
} catch (IOException e) {
System.err.println(AdminHelper.prettifyException(e));
return 2;
}
return 0;
}
}
private static final AdminHelper.Command[] COMMANDS = {
new ListECPoliciesCommand(),
new AddECPoliciesCommand(),
new GetECPolicyCommand(),
new RemoveECPolicyCommand(),
new SetECPolicyCommand(),
new UnsetECPolicyCommand(),
new ListECCodecsCommand(),
new EnableECPolicyCommand(),
new DisableECPolicyCommand()
};
}
| |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/automl/v1beta1/service.proto
package com.google.cloud.automl.v1beta1;
/**
*
*
* <pre>
* Request message for [AutoMl.GetColumnSpec][google.cloud.automl.v1beta1.AutoMl.GetColumnSpec].
* </pre>
*
* Protobuf type {@code google.cloud.automl.v1beta1.GetColumnSpecRequest}
*/
public final class GetColumnSpecRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.automl.v1beta1.GetColumnSpecRequest)
GetColumnSpecRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetColumnSpecRequest.newBuilder() to construct.
private GetColumnSpecRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private GetColumnSpecRequest() {
name_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new GetColumnSpecRequest();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private GetColumnSpecRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
name_ = s;
break;
}
case 18:
{
com.google.protobuf.FieldMask.Builder subBuilder = null;
if (fieldMask_ != null) {
subBuilder = fieldMask_.toBuilder();
}
fieldMask_ =
input.readMessage(com.google.protobuf.FieldMask.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(fieldMask_);
fieldMask_ = subBuilder.buildPartial();
}
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.automl.v1beta1.AutoMlProto
.internal_static_google_cloud_automl_v1beta1_GetColumnSpecRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.automl.v1beta1.AutoMlProto
.internal_static_google_cloud_automl_v1beta1_GetColumnSpecRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.automl.v1beta1.GetColumnSpecRequest.class,
com.google.cloud.automl.v1beta1.GetColumnSpecRequest.Builder.class);
}
public static final int NAME_FIELD_NUMBER = 1;
private volatile java.lang.Object name_;
/**
*
*
* <pre>
* Required. The resource name of the column spec to retrieve.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The resource name of the column spec to retrieve.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FIELD_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask fieldMask_;
/**
*
*
* <pre>
* Mask specifying which fields to read.
* </pre>
*
* <code>.google.protobuf.FieldMask field_mask = 2;</code>
*
* @return Whether the fieldMask field is set.
*/
@java.lang.Override
public boolean hasFieldMask() {
return fieldMask_ != null;
}
/**
*
*
* <pre>
* Mask specifying which fields to read.
* </pre>
*
* <code>.google.protobuf.FieldMask field_mask = 2;</code>
*
* @return The fieldMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getFieldMask() {
return fieldMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : fieldMask_;
}
/**
*
*
* <pre>
* Mask specifying which fields to read.
* </pre>
*
* <code>.google.protobuf.FieldMask field_mask = 2;</code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getFieldMaskOrBuilder() {
return getFieldMask();
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (fieldMask_ != null) {
output.writeMessage(2, getFieldMask());
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (fieldMask_ != null) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getFieldMask());
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.automl.v1beta1.GetColumnSpecRequest)) {
return super.equals(obj);
}
com.google.cloud.automl.v1beta1.GetColumnSpecRequest other =
(com.google.cloud.automl.v1beta1.GetColumnSpecRequest) obj;
if (!getName().equals(other.getName())) return false;
if (hasFieldMask() != other.hasFieldMask()) return false;
if (hasFieldMask()) {
if (!getFieldMask().equals(other.getFieldMask())) return false;
}
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
if (hasFieldMask()) {
hash = (37 * hash) + FIELD_MASK_FIELD_NUMBER;
hash = (53 * hash) + getFieldMask().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.automl.v1beta1.GetColumnSpecRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.automl.v1beta1.GetColumnSpecRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.automl.v1beta1.GetColumnSpecRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.automl.v1beta1.GetColumnSpecRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.automl.v1beta1.GetColumnSpecRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.automl.v1beta1.GetColumnSpecRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.automl.v1beta1.GetColumnSpecRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.automl.v1beta1.GetColumnSpecRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.automl.v1beta1.GetColumnSpecRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.automl.v1beta1.GetColumnSpecRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.automl.v1beta1.GetColumnSpecRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.automl.v1beta1.GetColumnSpecRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.automl.v1beta1.GetColumnSpecRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for [AutoMl.GetColumnSpec][google.cloud.automl.v1beta1.AutoMl.GetColumnSpec].
* </pre>
*
* Protobuf type {@code google.cloud.automl.v1beta1.GetColumnSpecRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.automl.v1beta1.GetColumnSpecRequest)
com.google.cloud.automl.v1beta1.GetColumnSpecRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.automl.v1beta1.AutoMlProto
.internal_static_google_cloud_automl_v1beta1_GetColumnSpecRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.automl.v1beta1.AutoMlProto
.internal_static_google_cloud_automl_v1beta1_GetColumnSpecRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.automl.v1beta1.GetColumnSpecRequest.class,
com.google.cloud.automl.v1beta1.GetColumnSpecRequest.Builder.class);
}
// Construct using com.google.cloud.automl.v1beta1.GetColumnSpecRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
name_ = "";
if (fieldMaskBuilder_ == null) {
fieldMask_ = null;
} else {
fieldMask_ = null;
fieldMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.automl.v1beta1.AutoMlProto
.internal_static_google_cloud_automl_v1beta1_GetColumnSpecRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.automl.v1beta1.GetColumnSpecRequest getDefaultInstanceForType() {
return com.google.cloud.automl.v1beta1.GetColumnSpecRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.automl.v1beta1.GetColumnSpecRequest build() {
com.google.cloud.automl.v1beta1.GetColumnSpecRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.automl.v1beta1.GetColumnSpecRequest buildPartial() {
com.google.cloud.automl.v1beta1.GetColumnSpecRequest result =
new com.google.cloud.automl.v1beta1.GetColumnSpecRequest(this);
result.name_ = name_;
if (fieldMaskBuilder_ == null) {
result.fieldMask_ = fieldMask_;
} else {
result.fieldMask_ = fieldMaskBuilder_.build();
}
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.automl.v1beta1.GetColumnSpecRequest) {
return mergeFrom((com.google.cloud.automl.v1beta1.GetColumnSpecRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.automl.v1beta1.GetColumnSpecRequest other) {
if (other == com.google.cloud.automl.v1beta1.GetColumnSpecRequest.getDefaultInstance())
return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
onChanged();
}
if (other.hasFieldMask()) {
mergeFieldMask(other.getFieldMask());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.automl.v1beta1.GetColumnSpecRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.cloud.automl.v1beta1.GetColumnSpecRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. The resource name of the column spec to retrieve.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The resource name of the column spec to retrieve.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The resource name of the column spec to retrieve.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource name of the column spec to retrieve.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource name of the column spec to retrieve.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
onChanged();
return this;
}
private com.google.protobuf.FieldMask fieldMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
fieldMaskBuilder_;
/**
*
*
* <pre>
* Mask specifying which fields to read.
* </pre>
*
* <code>.google.protobuf.FieldMask field_mask = 2;</code>
*
* @return Whether the fieldMask field is set.
*/
public boolean hasFieldMask() {
return fieldMaskBuilder_ != null || fieldMask_ != null;
}
/**
*
*
* <pre>
* Mask specifying which fields to read.
* </pre>
*
* <code>.google.protobuf.FieldMask field_mask = 2;</code>
*
* @return The fieldMask.
*/
public com.google.protobuf.FieldMask getFieldMask() {
if (fieldMaskBuilder_ == null) {
return fieldMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : fieldMask_;
} else {
return fieldMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Mask specifying which fields to read.
* </pre>
*
* <code>.google.protobuf.FieldMask field_mask = 2;</code>
*/
public Builder setFieldMask(com.google.protobuf.FieldMask value) {
if (fieldMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
fieldMask_ = value;
onChanged();
} else {
fieldMaskBuilder_.setMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Mask specifying which fields to read.
* </pre>
*
* <code>.google.protobuf.FieldMask field_mask = 2;</code>
*/
public Builder setFieldMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (fieldMaskBuilder_ == null) {
fieldMask_ = builderForValue.build();
onChanged();
} else {
fieldMaskBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Mask specifying which fields to read.
* </pre>
*
* <code>.google.protobuf.FieldMask field_mask = 2;</code>
*/
public Builder mergeFieldMask(com.google.protobuf.FieldMask value) {
if (fieldMaskBuilder_ == null) {
if (fieldMask_ != null) {
fieldMask_ =
com.google.protobuf.FieldMask.newBuilder(fieldMask_).mergeFrom(value).buildPartial();
} else {
fieldMask_ = value;
}
onChanged();
} else {
fieldMaskBuilder_.mergeFrom(value);
}
return this;
}
/**
*
*
* <pre>
* Mask specifying which fields to read.
* </pre>
*
* <code>.google.protobuf.FieldMask field_mask = 2;</code>
*/
public Builder clearFieldMask() {
if (fieldMaskBuilder_ == null) {
fieldMask_ = null;
onChanged();
} else {
fieldMask_ = null;
fieldMaskBuilder_ = null;
}
return this;
}
/**
*
*
* <pre>
* Mask specifying which fields to read.
* </pre>
*
* <code>.google.protobuf.FieldMask field_mask = 2;</code>
*/
public com.google.protobuf.FieldMask.Builder getFieldMaskBuilder() {
onChanged();
return getFieldMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Mask specifying which fields to read.
* </pre>
*
* <code>.google.protobuf.FieldMask field_mask = 2;</code>
*/
public com.google.protobuf.FieldMaskOrBuilder getFieldMaskOrBuilder() {
if (fieldMaskBuilder_ != null) {
return fieldMaskBuilder_.getMessageOrBuilder();
} else {
return fieldMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : fieldMask_;
}
}
/**
*
*
* <pre>
* Mask specifying which fields to read.
* </pre>
*
* <code>.google.protobuf.FieldMask field_mask = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getFieldMaskFieldBuilder() {
if (fieldMaskBuilder_ == null) {
fieldMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getFieldMask(), getParentForChildren(), isClean());
fieldMask_ = null;
}
return fieldMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.automl.v1beta1.GetColumnSpecRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.automl.v1beta1.GetColumnSpecRequest)
private static final com.google.cloud.automl.v1beta1.GetColumnSpecRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.automl.v1beta1.GetColumnSpecRequest();
}
public static com.google.cloud.automl.v1beta1.GetColumnSpecRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<GetColumnSpecRequest> PARSER =
new com.google.protobuf.AbstractParser<GetColumnSpecRequest>() {
@java.lang.Override
public GetColumnSpecRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new GetColumnSpecRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<GetColumnSpecRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<GetColumnSpecRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.automl.v1beta1.GetColumnSpecRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.tests;
import com.facebook.presto.Session;
import com.facebook.presto.spi.security.Identity;
import com.facebook.presto.testing.MaterializedResult;
import com.facebook.presto.testing.MaterializedRow;
import com.facebook.presto.testing.QueryRunner;
import com.facebook.presto.testing.TestingSession;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import io.airlift.testing.Assertions;
import org.intellij.lang.annotations.Language;
import org.testng.annotations.Test;
import java.util.Optional;
import static com.facebook.presto.SystemSessionProperties.QUERY_MAX_MEMORY;
import static com.facebook.presto.connector.informationSchema.InformationSchemaMetadata.INFORMATION_SCHEMA;
import static com.facebook.presto.spi.type.VarcharType.VARCHAR;
import static com.facebook.presto.testing.MaterializedResult.resultBuilder;
import static com.facebook.presto.testing.TestingAccessControlManager.TestingPrivilegeType.ADD_COLUMN;
import static com.facebook.presto.testing.TestingAccessControlManager.TestingPrivilegeType.CREATE_TABLE;
import static com.facebook.presto.testing.TestingAccessControlManager.TestingPrivilegeType.CREATE_VIEW;
import static com.facebook.presto.testing.TestingAccessControlManager.TestingPrivilegeType.CREATE_VIEW_WITH_SELECT_TABLE;
import static com.facebook.presto.testing.TestingAccessControlManager.TestingPrivilegeType.CREATE_VIEW_WITH_SELECT_VIEW;
import static com.facebook.presto.testing.TestingAccessControlManager.TestingPrivilegeType.DROP_TABLE;
import static com.facebook.presto.testing.TestingAccessControlManager.TestingPrivilegeType.RENAME_COLUMN;
import static com.facebook.presto.testing.TestingAccessControlManager.TestingPrivilegeType.RENAME_TABLE;
import static com.facebook.presto.testing.TestingAccessControlManager.TestingPrivilegeType.SELECT_TABLE;
import static com.facebook.presto.testing.TestingAccessControlManager.TestingPrivilegeType.SELECT_VIEW;
import static com.facebook.presto.testing.TestingAccessControlManager.TestingPrivilegeType.SET_SESSION;
import static com.facebook.presto.testing.TestingAccessControlManager.TestingPrivilegeType.SET_USER;
import static com.facebook.presto.testing.TestingAccessControlManager.privilege;
import static com.facebook.presto.testing.TestingSession.TESTING_CATALOG;
import static com.facebook.presto.tests.QueryAssertions.assertContains;
import static com.google.common.collect.Iterables.getOnlyElement;
import static java.lang.String.format;
import static java.util.Collections.nCopies;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertTrue;
public abstract class AbstractTestDistributedQueries
extends AbstractTestQueries
{
protected AbstractTestDistributedQueries(QueryRunner queryRunner)
{
super(queryRunner);
}
protected boolean supportsViews()
{
return true;
}
@Test
public void testSetSession()
{
MaterializedResult result = computeActual("SET SESSION test_string = 'bar'");
assertTrue((Boolean) getOnlyElement(result).getField(0));
assertEquals(result.getSetSessionProperties(), ImmutableMap.of("test_string", "bar"));
result = computeActual(format("SET SESSION %s.connector_long = 999", TESTING_CATALOG));
assertTrue((Boolean) getOnlyElement(result).getField(0));
assertEquals(result.getSetSessionProperties(), ImmutableMap.of(TESTING_CATALOG + ".connector_long", "999"));
result = computeActual(format("SET SESSION %s.connector_string = 'baz'", TESTING_CATALOG));
assertTrue((Boolean) getOnlyElement(result).getField(0));
assertEquals(result.getSetSessionProperties(), ImmutableMap.of(TESTING_CATALOG + ".connector_string", "baz"));
result = computeActual(format("SET SESSION %s.connector_string = 'ban' || 'ana'", TESTING_CATALOG));
assertTrue((Boolean) getOnlyElement(result).getField(0));
assertEquals(result.getSetSessionProperties(), ImmutableMap.of(TESTING_CATALOG + ".connector_string", "banana"));
result = computeActual(format("SET SESSION %s.connector_long = 444", TESTING_CATALOG));
assertTrue((Boolean) getOnlyElement(result).getField(0));
assertEquals(result.getSetSessionProperties(), ImmutableMap.of(TESTING_CATALOG + ".connector_long", "444"));
result = computeActual(format("SET SESSION %s.connector_long = 111 + 111", TESTING_CATALOG));
assertTrue((Boolean) getOnlyElement(result).getField(0));
assertEquals(result.getSetSessionProperties(), ImmutableMap.of(TESTING_CATALOG + ".connector_long", "222"));
result = computeActual(format("SET SESSION %s.connector_boolean = 111 < 3", TESTING_CATALOG));
assertTrue((Boolean) getOnlyElement(result).getField(0));
assertEquals(result.getSetSessionProperties(), ImmutableMap.of(TESTING_CATALOG + ".connector_boolean", "false"));
result = computeActual(format("SET SESSION %s.connector_double = 11.1", TESTING_CATALOG));
assertTrue((Boolean) getOnlyElement(result).getField(0));
assertEquals(result.getSetSessionProperties(), ImmutableMap.of(TESTING_CATALOG + ".connector_double", "11.1"));
}
@Test
public void testResetSession()
{
MaterializedResult result = computeActual(getSession(), "RESET SESSION test_string");
assertTrue((Boolean) getOnlyElement(result).getField(0));
assertEquals(result.getResetSessionProperties(), ImmutableSet.of("test_string"));
result = computeActual(getSession(), format("RESET SESSION %s.connector_string", TESTING_CATALOG));
assertTrue((Boolean) getOnlyElement(result).getField(0));
assertEquals(result.getResetSessionProperties(), ImmutableSet.of(TESTING_CATALOG + ".connector_string"));
}
@Test
public void testCreateTable()
{
assertUpdate("CREATE TABLE test_create (a bigint, b double, c varchar)");
assertTrue(queryRunner.tableExists(getSession(), "test_create"));
assertTableColumnNames("test_create", "a", "b", "c");
assertUpdate("DROP TABLE test_create");
assertFalse(queryRunner.tableExists(getSession(), "test_create"));
assertUpdate("CREATE TABLE test_create_table_if_not_exists (a bigint, b varchar, c double)");
assertTrue(queryRunner.tableExists(getSession(), "test_create_table_if_not_exists"));
assertTableColumnNames("test_create_table_if_not_exists", "a", "b", "c");
assertUpdate("CREATE TABLE IF NOT EXISTS test_create_table_if_not_exists (d bigint, e varchar)");
assertTrue(queryRunner.tableExists(getSession(), "test_create_table_if_not_exists"));
assertTableColumnNames("test_create_table_if_not_exists", "a", "b", "c");
assertUpdate("DROP TABLE test_create_table_if_not_exists");
assertFalse(queryRunner.tableExists(getSession(), "test_create_table_if_not_exists"));
// Test CREATE TABLE LIKE
assertUpdate("CREATE TABLE test_create_original (a bigint, b double, c varchar)");
assertTrue(queryRunner.tableExists(getSession(), "test_create_original"));
assertTableColumnNames("test_create_original", "a", "b", "c");
assertUpdate("CREATE TABLE test_create_like (LIKE test_create_original, d boolean, e varchar)");
assertTrue(queryRunner.tableExists(getSession(), "test_create_like"));
assertTableColumnNames("test_create_like", "a", "b", "c", "d", "e");
assertUpdate("DROP TABLE test_create_original");
assertFalse(queryRunner.tableExists(getSession(), "test_create_original"));
assertUpdate("DROP TABLE test_create_like");
assertFalse(queryRunner.tableExists(getSession(), "test_create_like"));
}
@Test
public void testCreateTableAsSelect()
{
assertUpdate("CREATE TABLE test_create_table_as_if_not_exists (a bigint, b double)");
assertTrue(queryRunner.tableExists(getSession(), "test_create_table_as_if_not_exists"));
assertTableColumnNames("test_create_table_as_if_not_exists", "a", "b");
MaterializedResult materializedRows = computeActual("CREATE TABLE IF NOT EXISTS test_create_table_as_if_not_exists AS SELECT orderkey, discount FROM lineitem");
assertEquals(materializedRows.getRowCount(), 0);
assertTrue(queryRunner.tableExists(getSession(), "test_create_table_as_if_not_exists"));
assertTableColumnNames("test_create_table_as_if_not_exists", "a", "b");
assertUpdate("DROP TABLE test_create_table_as_if_not_exists");
assertFalse(queryRunner.tableExists(getSession(), "test_create_table_as_if_not_exists"));
assertCreateTableAsSelect(
"test_select",
"SELECT orderdate, orderkey, totalprice FROM orders",
"SELECT count(*) FROM orders");
assertCreateTableAsSelect(
"test_group",
"SELECT orderstatus, sum(totalprice) x FROM orders GROUP BY orderstatus",
"SELECT count(DISTINCT orderstatus) FROM orders");
assertCreateTableAsSelect(
"test_join",
"SELECT count(*) x FROM lineitem JOIN orders ON lineitem.orderkey = orders.orderkey",
"SELECT 1");
assertCreateTableAsSelect(
"test_limit",
"SELECT orderkey FROM orders ORDER BY orderkey LIMIT 10",
"SELECT 10");
assertCreateTableAsSelect(
"test_unicode",
"SELECT '\u2603' unicode",
"SELECT 1");
assertCreateTableAsSelect(
"test_with_data",
"SELECT * FROM orders WITH DATA",
"SELECT * FROM orders",
"SELECT count(*) FROM orders");
assertCreateTableAsSelect(
"test_with_no_data",
"SELECT * FROM orders WITH NO DATA",
"SELECT * FROM orders LIMIT 0",
"SELECT 0");
// Tests for CREATE TABLE with UNION ALL: exercises PushTableWriteThroughUnion optimizer
assertCreateTableAsSelect(
"test_union_all",
"SELECT orderdate, orderkey, totalprice FROM orders WHERE orderkey % 2 = 0 UNION ALL " +
"SELECT orderdate, orderkey, totalprice FROM orders WHERE orderkey % 2 = 1",
"SELECT orderdate, orderkey, totalprice FROM orders",
"SELECT count(*) FROM orders");
assertCreateTableAsSelect(
Session.builder(getSession()).setSystemProperty("redistribute_writes", "true").build(),
"test_union_all",
"SELECT orderdate, orderkey, totalprice FROM orders UNION ALL " +
"SELECT DATE '2000-01-01', 1234567890, 1.23",
"SELECT orderdate, orderkey, totalprice FROM orders UNION ALL " +
"SELECT DATE '2000-01-01', 1234567890, 1.23",
"SELECT count(*) + 1 FROM orders");
assertCreateTableAsSelect(
Session.builder(getSession()).setSystemProperty("redistribute_writes", "false").build(),
"test_union_all",
"SELECT orderdate, orderkey, totalprice FROM orders UNION ALL " +
"SELECT DATE '2000-01-01', 1234567890, 1.23",
"SELECT orderdate, orderkey, totalprice FROM orders UNION ALL " +
"SELECT DATE '2000-01-01', 1234567890, 1.23",
"SELECT count(*) + 1 FROM orders");
assertExplainAnalyze("EXPLAIN ANALYZE CREATE TABLE analyze_test AS SELECT orderstatus FROM orders");
assertQuery("SELECT * from analyze_test", "SELECT orderstatus FROM orders");
assertUpdate("DROP TABLE analyze_test");
}
@Test
public void testExplainAnalyze()
{
assertExplainAnalyze("EXPLAIN ANALYZE SELECT * FROM orders");
assertExplainAnalyze("EXPLAIN ANALYZE SELECT count(*), clerk FROM orders GROUP BY clerk");
assertExplainAnalyze(
"EXPLAIN ANALYZE SELECT x + y FROM (" +
" SELECT orderdate, COUNT(*) x FROM orders GROUP BY orderdate) a JOIN (" +
" SELECT orderdate, COUNT(*) y FROM orders GROUP BY orderdate) b ON a.orderdate = b.orderdate");
assertExplainAnalyze("" +
"EXPLAIN ANALYZE SELECT *, o2.custkey\n" +
" IN (\n" +
" SELECT orderkey\n" +
" FROM lineitem\n" +
" WHERE orderkey % 5 = 0)\n" +
"FROM (SELECT * FROM orders WHERE custkey % 256 = 0) o1\n" +
"JOIN (SELECT * FROM orders WHERE custkey % 256 = 0) o2\n" +
" ON (o1.orderkey IN (SELECT orderkey FROM lineitem WHERE orderkey % 4 = 0)) = (o2.orderkey IN (SELECT orderkey FROM lineitem WHERE orderkey % 4 = 0))\n" +
"WHERE o1.orderkey\n" +
" IN (\n" +
" SELECT orderkey\n" +
" FROM lineitem\n" +
" WHERE orderkey % 4 = 0)\n" +
"ORDER BY o1.orderkey\n" +
" IN (\n" +
" SELECT orderkey\n" +
" FROM lineitem\n" +
" WHERE orderkey % 7 = 0)");
assertExplainAnalyze("EXPLAIN ANALYZE SELECT count(*), clerk FROM orders GROUP BY clerk UNION ALL SELECT sum(orderkey), clerk FROM orders GROUP BY clerk");
assertExplainAnalyze("EXPLAIN ANALYZE SHOW COLUMNS FROM orders");
assertExplainAnalyze("EXPLAIN ANALYZE EXPLAIN SELECT count(*) FROM orders");
assertExplainAnalyze("EXPLAIN ANALYZE EXPLAIN ANALYZE SELECT count(*) FROM orders");
assertExplainAnalyze("EXPLAIN ANALYZE SHOW FUNCTIONS");
assertExplainAnalyze("EXPLAIN ANALYZE SHOW TABLES");
assertExplainAnalyze("EXPLAIN ANALYZE SHOW SCHEMAS");
assertExplainAnalyze("EXPLAIN ANALYZE SHOW CATALOGS");
assertExplainAnalyze("EXPLAIN ANALYZE SHOW SESSION");
}
@Test(expectedExceptions = RuntimeException.class, expectedExceptionsMessageRegExp = "EXPLAIN ANALYZE only supported for statements that are queries")
public void testExplainAnalyzeDDL()
{
computeActual("EXPLAIN ANALYZE DROP TABLE orders");
}
private void assertExplainAnalyze(@Language("SQL") String query)
{
String value = getOnlyElement(computeActual(query).getOnlyColumnAsSet());
// TODO: check that rendered plan is as expected, once stats are collected in a consistent way
assertTrue(value.contains("Cost: "), format("Expected output to contain \"Cost: \", but it is %s", value));
}
protected void assertCreateTableAsSelect(String table, @Language("SQL") String query, @Language("SQL") String rowCountQuery)
{
assertCreateTableAsSelect(getSession(), table, query, query, rowCountQuery);
}
protected void assertCreateTableAsSelect(String table, @Language("SQL") String query, @Language("SQL") String expectedQuery, @Language("SQL") String rowCountQuery)
{
assertCreateTableAsSelect(getSession(), table, query, expectedQuery, rowCountQuery);
}
protected void assertCreateTableAsSelect(Session session, String table, @Language("SQL") String query, @Language("SQL") String expectedQuery, @Language("SQL") String rowCountQuery)
{
assertUpdate(session, "CREATE TABLE " + table + " AS " + query, rowCountQuery);
assertQuery(session, "SELECT * FROM " + table, expectedQuery);
assertUpdate(session, "DROP TABLE " + table);
assertFalse(queryRunner.tableExists(session, table));
}
@Test
public void testRenameTable()
{
assertUpdate("CREATE TABLE test_rename AS SELECT 123 x", 1);
assertUpdate("ALTER TABLE test_rename RENAME TO test_rename_new");
MaterializedResult materializedRows = computeActual("SELECT x FROM test_rename_new");
assertEquals(getOnlyElement(materializedRows.getMaterializedRows()).getField(0), 123);
// provide new table name in uppercase
assertUpdate("ALTER TABLE test_rename_new RENAME TO TEST_RENAME");
materializedRows = computeActual("SELECT x FROM test_rename");
assertEquals(getOnlyElement(materializedRows.getMaterializedRows()).getField(0), 123);
assertUpdate("DROP TABLE test_rename");
assertFalse(queryRunner.tableExists(getSession(), "test_rename"));
assertFalse(queryRunner.tableExists(getSession(), "test_rename_new"));
}
@Test
public void testRenameColumn()
{
assertUpdate("CREATE TABLE test_rename_column AS SELECT 123 x", 1);
assertUpdate("ALTER TABLE test_rename_column RENAME COLUMN x TO y");
MaterializedResult materializedRows = computeActual("SELECT y FROM test_rename_column");
assertEquals(getOnlyElement(materializedRows.getMaterializedRows()).getField(0), 123);
assertUpdate("ALTER TABLE test_rename_column RENAME COLUMN y TO Z");
materializedRows = computeActual("SELECT z FROM test_rename_column");
assertEquals(getOnlyElement(materializedRows.getMaterializedRows()).getField(0), 123);
assertUpdate("DROP TABLE test_rename_column");
assertFalse(queryRunner.tableExists(getSession(), "test_rename_column"));
}
@Test
public void testAddColumn()
{
assertUpdate("CREATE TABLE test_add_column AS SELECT 123 x", 1);
assertUpdate("CREATE TABLE test_add_column_a AS SELECT 234 x, 111 a", 1);
assertUpdate("CREATE TABLE test_add_column_ab AS SELECT 345 x, 222 a, 33.3 b", 1);
assertQueryFails("ALTER TABLE test_add_column ADD COLUMN x bigint", ".* Column 'x' already exists");
assertQueryFails("ALTER TABLE test_add_column ADD COLUMN X bigint", ".* Column 'X' already exists");
assertUpdate("ALTER TABLE test_add_column ADD COLUMN a bigint");
assertUpdate("INSERT INTO test_add_column SELECT * FROM test_add_column_a", 1);
MaterializedResult materializedRows = computeActual("SELECT x, a FROM test_add_column ORDER BY x");
assertEquals(materializedRows.getMaterializedRows().get(0).getField(0), 123);
assertEquals(materializedRows.getMaterializedRows().get(0).getField(1), null);
assertEquals(materializedRows.getMaterializedRows().get(1).getField(0), 234);
assertEquals(materializedRows.getMaterializedRows().get(1).getField(1), 111L);
assertUpdate("ALTER TABLE test_add_column ADD COLUMN b double");
assertUpdate("INSERT INTO test_add_column SELECT * FROM test_add_column_ab", 1);
materializedRows = computeActual("SELECT x, a, b FROM test_add_column ORDER BY x");
assertEquals(materializedRows.getMaterializedRows().get(0).getField(0), 123);
assertEquals(materializedRows.getMaterializedRows().get(0).getField(1), null);
assertEquals(materializedRows.getMaterializedRows().get(0).getField(2), null);
assertEquals(materializedRows.getMaterializedRows().get(1).getField(0), 234);
assertEquals(materializedRows.getMaterializedRows().get(1).getField(1), 111L);
assertEquals(materializedRows.getMaterializedRows().get(1).getField(2), null);
assertEquals(materializedRows.getMaterializedRows().get(2).getField(0), 345);
assertEquals(materializedRows.getMaterializedRows().get(2).getField(1), 222L);
assertEquals(materializedRows.getMaterializedRows().get(2).getField(2), 33.3);
assertUpdate("DROP TABLE test_add_column");
assertUpdate("DROP TABLE test_add_column_a");
assertUpdate("DROP TABLE test_add_column_ab");
assertFalse(queryRunner.tableExists(getSession(), "test_add_column"));
assertFalse(queryRunner.tableExists(getSession(), "test_add_column_a"));
assertFalse(queryRunner.tableExists(getSession(), "test_add_column_ab"));
}
@Test
public void testInsert()
{
@Language("SQL") String query = "SELECT orderdate, orderkey, totalprice FROM orders";
assertUpdate("CREATE TABLE test_insert AS " + query + " WITH NO DATA", 0);
assertQuery("SELECT count(*) FROM test_insert", "SELECT 0");
assertUpdate("INSERT INTO test_insert " + query, "SELECT count(*) FROM orders");
assertQuery("SELECT * FROM test_insert", query);
assertUpdate("INSERT INTO test_insert (orderkey) VALUES (-1)", 1);
assertUpdate("INSERT INTO test_insert (orderkey) VALUES (null)", 1);
assertUpdate("INSERT INTO test_insert (orderdate) VALUES (DATE '2001-01-01')", 1);
assertUpdate("INSERT INTO test_insert (orderkey, orderdate) VALUES (-2, DATE '2001-01-02')", 1);
assertUpdate("INSERT INTO test_insert (orderdate, orderkey) VALUES (DATE '2001-01-03', -3)", 1);
assertUpdate("INSERT INTO test_insert (totalprice) VALUES (1234)", 1);
assertQuery("SELECT * FROM test_insert", query
+ " UNION ALL SELECT null, -1, null"
+ " UNION ALL SELECT null, null, null"
+ " UNION ALL SELECT DATE '2001-01-01', null, null"
+ " UNION ALL SELECT DATE '2001-01-02', -2, null"
+ " UNION ALL SELECT DATE '2001-01-03', -3, null"
+ " UNION ALL SELECT null, null, 1234");
// UNION query produces columns in the opposite order
// of how they are declared in the table schema
assertUpdate(
"INSERT INTO test_insert (orderkey, orderdate, totalprice) " +
"SELECT orderkey, orderdate, totalprice FROM orders " +
"UNION ALL " +
"SELECT orderkey, orderdate, totalprice FROM orders",
"SELECT 2 * count(*) FROM orders");
assertUpdate("DROP TABLE test_insert");
assertUpdate("CREATE TABLE test_insert (a ARRAY<DOUBLE>, b ARRAY<BIGINT>)");
assertUpdate("INSERT INTO test_insert (a) VALUES (ARRAY[null])", 1);
assertUpdate("INSERT INTO test_insert (a) VALUES (ARRAY[1234])", 1);
assertQuery("SELECT a[1] FROM test_insert", "VALUES (null), (1234)");
assertQueryFails("INSERT INTO test_insert (b) VALUES (ARRAY[1.23E1])", "Insert query has mismatched column types: .*");
assertUpdate("DROP TABLE test_insert");
}
@Test
public void testDelete()
{
// delete half the table, then delete the rest
assertUpdate("CREATE TABLE test_delete AS SELECT * FROM orders", "SELECT count(*) FROM orders");
assertUpdate("DELETE FROM test_delete WHERE orderkey % 2 = 0", "SELECT count(*) FROM orders WHERE orderkey % 2 = 0");
assertQuery("SELECT * FROM test_delete", "SELECT * FROM orders WHERE orderkey % 2 <> 0");
assertUpdate("DELETE FROM test_delete", "SELECT count(*) FROM orders WHERE orderkey % 2 <> 0");
assertQuery("SELECT * FROM test_delete", "SELECT * FROM orders LIMIT 0");
assertUpdate("DROP TABLE test_delete");
// delete successive parts of the table
assertUpdate("CREATE TABLE test_delete AS SELECT * FROM orders", "SELECT count(*) FROM orders");
assertUpdate("DELETE FROM test_delete WHERE custkey <= 100", "SELECT count(*) FROM orders WHERE custkey <= 100");
assertQuery("SELECT * FROM test_delete", "SELECT * FROM orders WHERE custkey > 100");
assertUpdate("DELETE FROM test_delete WHERE custkey <= 300", "SELECT count(*) FROM orders WHERE custkey > 100 AND custkey <= 300");
assertQuery("SELECT * FROM test_delete", "SELECT * FROM orders WHERE custkey > 300");
assertUpdate("DELETE FROM test_delete WHERE custkey <= 500", "SELECT count(*) FROM orders WHERE custkey > 300 AND custkey <= 500");
assertQuery("SELECT * FROM test_delete", "SELECT * FROM orders WHERE custkey > 500");
assertUpdate("DROP TABLE test_delete");
// delete using a constant property
assertUpdate("CREATE TABLE test_delete AS SELECT * FROM orders", "SELECT count(*) FROM orders");
assertUpdate("DELETE FROM test_delete WHERE orderstatus = 'O'", "SELECT count(*) FROM orders WHERE orderstatus = 'O'");
assertQuery("SELECT * FROM test_delete", "SELECT * FROM orders WHERE orderstatus <> 'O'");
assertUpdate("DROP TABLE test_delete");
// delete without matching any rows
assertUpdate("CREATE TABLE test_delete AS SELECT * FROM orders", "SELECT count(*) FROM orders");
assertUpdate("DELETE FROM test_delete WHERE rand() < 0", 0);
assertUpdate("DROP TABLE test_delete");
// delete with a predicate that optimizes to false
assertUpdate("CREATE TABLE test_delete AS SELECT * FROM orders", "SELECT count(*) FROM orders");
assertUpdate("DELETE FROM test_delete WHERE orderkey > 5 AND orderkey < 4", 0);
assertUpdate("DROP TABLE test_delete");
// delete using a subquery
assertUpdate("CREATE TABLE test_delete AS SELECT * FROM lineitem", "SELECT count(*) FROM lineitem");
assertUpdate(
"DELETE FROM test_delete WHERE orderkey IN (SELECT orderkey FROM orders WHERE orderstatus = 'F')",
"SELECT count(*) FROM lineitem WHERE orderkey IN (SELECT orderkey FROM orders WHERE orderstatus = 'F')");
assertQuery(
"SELECT * FROM test_delete",
"SELECT * FROM lineitem WHERE orderkey IN (SELECT orderkey FROM orders WHERE orderstatus <> 'F')");
assertUpdate("DROP TABLE test_delete");
// delete with multiple SemiJoin
assertUpdate("CREATE TABLE test_delete AS SELECT * FROM lineitem", "SELECT count(*) FROM lineitem");
assertUpdate(
"DELETE FROM test_delete\n" +
"WHERE orderkey IN (SELECT orderkey FROM orders WHERE orderstatus = 'F')\n" +
" AND orderkey IN (SELECT orderkey FROM orders WHERE custkey % 5 = 0)\n",
"SELECT count(*) FROM lineitem\n" +
"WHERE orderkey IN (SELECT orderkey FROM orders WHERE orderstatus = 'F')\n" +
" AND orderkey IN (SELECT orderkey FROM orders WHERE custkey % 5 = 0)");
assertQuery(
"SELECT * FROM test_delete",
"SELECT * FROM lineitem\n" +
"WHERE orderkey IN (SELECT orderkey FROM orders WHERE orderstatus <> 'F')\n" +
" OR orderkey IN (SELECT orderkey FROM orders WHERE custkey % 5 <> 0)");
assertUpdate("DROP TABLE test_delete");
// delete with SemiJoin null handling
assertUpdate("CREATE TABLE test_delete AS SELECT * FROM orders", "SELECT count(*) FROM orders");
assertUpdate(
"DELETE FROM test_delete\n" +
"WHERE (orderkey IN (SELECT CASE WHEN orderkey % 3 = 0 THEN NULL ELSE orderkey END FROM lineitem)) IS NULL\n",
"SELECT count(*) FROM orders\n" +
"WHERE (orderkey IN (SELECT CASE WHEN orderkey % 3 = 0 THEN NULL ELSE orderkey END FROM lineitem)) IS NULL\n");
assertQuery(
"SELECT * FROM test_delete",
"SELECT * FROM orders\n" +
"WHERE (orderkey IN (SELECT CASE WHEN orderkey % 3 = 0 THEN NULL ELSE orderkey END FROM lineitem)) IS NOT NULL\n");
assertUpdate("DROP TABLE test_delete");
// delete using a scalar and EXISTS subquery
assertUpdate("CREATE TABLE test_delete AS SELECT * FROM orders", "SELECT count(*) FROM orders");
assertUpdate("DELETE FROM test_delete WHERE orderkey = (SELECT orderkey FROM orders ORDER BY orderkey LIMIT 1)", 1);
assertUpdate("DELETE FROM test_delete WHERE orderkey = (SELECT orderkey FROM orders WHERE false)", 0);
assertUpdate("DELETE FROM test_delete WHERE EXISTS(SELECT 1 WHERE false)", 0);
assertUpdate("DELETE FROM test_delete WHERE EXISTS(SELECT 1)", "SELECT count(*) - 1 FROM orders");
assertUpdate("DROP TABLE test_delete");
// test EXPLAIN ANALYZE with CTAS
assertExplainAnalyze("EXPLAIN ANALYZE CREATE TABLE analyze_test AS SELECT CAST(orderstatus AS VARCHAR(15)) orderstatus FROM orders");
assertQuery("SELECT * from analyze_test", "SELECT orderstatus FROM orders");
// check that INSERT works also
assertExplainAnalyze("EXPLAIN ANALYZE INSERT INTO analyze_test SELECT clerk FROM orders");
assertQuery("SELECT * from analyze_test", "SELECT orderstatus FROM orders UNION ALL SELECT clerk FROM orders");
// check DELETE works with EXPLAIN ANALYZE
assertExplainAnalyze("EXPLAIN ANALYZE DELETE FROM analyze_test WHERE TRUE");
assertQuery("SELECT COUNT(*) from analyze_test", "SELECT 0");
assertUpdate("DROP TABLE analyze_test");
}
@Test
public void testDropTableIfExists()
{
assertFalse(queryRunner.tableExists(getSession(), "test_drop_if_exists"));
assertUpdate("DROP TABLE IF EXISTS test_drop_if_exists");
assertFalse(queryRunner.tableExists(getSession(), "test_drop_if_exists"));
}
@Test
public void testView()
{
skipTestUnless(supportsViews());
@Language("SQL") String query = "SELECT orderkey, orderstatus, totalprice / 2 half FROM orders";
assertUpdate("CREATE VIEW test_view AS SELECT 123 x");
assertUpdate("CREATE OR REPLACE VIEW test_view AS " + query);
assertQuery("SELECT * FROM test_view", query);
assertQuery(
"SELECT * FROM test_view a JOIN test_view b on a.orderkey = b.orderkey",
format("SELECT * FROM (%s) a JOIN (%s) b ON a.orderkey = b.orderkey", query, query));
assertQuery("WITH orders AS (SELECT * FROM orders LIMIT 0) SELECT * FROM test_view", query);
String name = format("%s.%s.test_view", getSession().getCatalog().get(), getSession().getSchema().get());
assertQuery("SELECT * FROM " + name, query);
assertUpdate("DROP VIEW test_view");
}
@Test
public void testViewCaseSensitivity()
{
skipTestUnless(supportsViews());
computeActual("CREATE VIEW test_view_uppercase AS SELECT X FROM (SELECT 123 X)");
computeActual("CREATE VIEW test_view_mixedcase AS SELECT XyZ FROM (SELECT 456 XyZ)");
assertQuery("SELECT * FROM test_view_uppercase", "SELECT X FROM (SELECT 123 X)");
assertQuery("SELECT * FROM test_view_mixedcase", "SELECT XyZ FROM (SELECT 456 XyZ)");
}
@Test
public void testCompatibleTypeChangeForView()
{
skipTestUnless(supportsViews());
assertUpdate("CREATE TABLE test_table_1 AS SELECT 'abcdefg' a", 1);
assertUpdate("CREATE VIEW test_view_1 AS SELECT a FROM test_table_1");
assertQuery("SELECT * FROM test_view_1", "VALUES 'abcdefg'");
// replace table with a version that's implicitly coercible to the previous one
assertUpdate("DROP TABLE test_table_1");
assertUpdate("CREATE TABLE test_table_1 AS SELECT 'abc' a", 1);
assertQuery("SELECT * FROM test_view_1", "VALUES 'abc'");
assertUpdate("DROP VIEW test_view_1");
assertUpdate("DROP TABLE test_table_1");
}
@Test
public void testCompatibleTypeChangeForView2()
{
skipTestUnless(supportsViews());
assertUpdate("CREATE TABLE test_table_2 AS SELECT BIGINT '1' v", 1);
assertUpdate("CREATE VIEW test_view_2 AS SELECT * FROM test_table_2");
assertQuery("SELECT * FROM test_view_2", "VALUES 1");
// replace table with a version that's implicitly coercible to the previous one
assertUpdate("DROP TABLE test_table_2");
assertUpdate("CREATE TABLE test_table_2 AS SELECT INTEGER '1' v", 1);
assertQuery("SELECT * FROM test_view_2 WHERE v = 1", "VALUES 1");
assertUpdate("DROP VIEW test_view_2");
assertUpdate("DROP TABLE test_table_2");
}
@Test
public void testViewMetadata()
{
skipTestUnless(supportsViews());
@Language("SQL") String query = "SELECT BIGINT '123' x, 'foo' y";
assertUpdate("CREATE VIEW meta_test_view AS " + query);
// test INFORMATION_SCHEMA.TABLES
MaterializedResult actual = computeActual(format(
"SELECT table_name, table_type FROM information_schema.tables WHERE table_schema = '%s'",
getSession().getSchema().get()));
MaterializedResult expected = resultBuilder(getSession(), actual.getTypes())
.row("customer", "BASE TABLE")
.row("lineitem", "BASE TABLE")
.row("meta_test_view", "VIEW")
.row("nation", "BASE TABLE")
.row("orders", "BASE TABLE")
.row("part", "BASE TABLE")
.row("partsupp", "BASE TABLE")
.row("region", "BASE TABLE")
.row("supplier", "BASE TABLE")
.build();
assertContains(actual, expected);
// test SHOW TABLES
actual = computeActual("SHOW TABLES");
MaterializedResult.Builder builder = resultBuilder(getSession(), actual.getTypes());
for (MaterializedRow row : expected.getMaterializedRows()) {
builder.row(row.getField(0));
}
expected = builder.build();
assertContains(actual, expected);
// test INFORMATION_SCHEMA.VIEWS
actual = computeActual(format(
"SELECT table_name, view_definition FROM information_schema.views WHERE table_schema = '%s'",
getSession().getSchema().get()));
expected = resultBuilder(getSession(), actual.getTypes())
.row("meta_test_view", formatSqlText(query))
.build();
assertContains(actual, expected);
// test SHOW COLUMNS
actual = computeActual("SHOW COLUMNS FROM meta_test_view");
expected = resultBuilder(getSession(), VARCHAR, VARCHAR, VARCHAR, VARCHAR)
.row("x", "bigint", "", "")
.row("y", "varchar(3)", "", "")
.build();
assertEquals(actual, expected);
// test SHOW CREATE VIEW
String expectedSql = formatSqlText(format(
"CREATE VIEW %s.%s.%s AS %s",
getSession().getCatalog().get(),
getSession().getSchema().get(),
"meta_test_view",
query)).trim();
actual = computeActual("SHOW CREATE VIEW meta_test_view");
assertEquals(getOnlyElement(actual.getOnlyColumnAsSet()), expectedSql);
assertUpdate("DROP VIEW meta_test_view");
}
@Test
public void testLargeQuerySuccess()
{
assertQuery("SELECT " + Joiner.on(" AND ").join(nCopies(500, "1 = 1")), "SELECT true");
}
@Test
public void testShowSchemasFromOther()
{
MaterializedResult result = computeActual("SHOW SCHEMAS FROM tpch");
assertTrue(result.getOnlyColumnAsSet().containsAll(ImmutableSet.of(INFORMATION_SCHEMA, "tiny", "sf1")));
}
@Test
public void testTableSampleSystem()
{
int total = computeActual("SELECT orderkey FROM orders").getMaterializedRows().size();
boolean sampleSizeFound = false;
for (int i = 0; i < 100; i++) {
int sampleSize = computeActual("SELECT orderkey FROM ORDERS TABLESAMPLE SYSTEM (50)").getMaterializedRows().size();
if (sampleSize > 0 && sampleSize < total) {
sampleSizeFound = true;
break;
}
}
assertTrue(sampleSizeFound, "Table sample returned unexpected number of rows");
}
@Test
public void testTableSampleSystemBoundaryValues()
{
MaterializedResult fullSample = computeActual("SELECT orderkey FROM orders TABLESAMPLE SYSTEM (100)");
MaterializedResult emptySample = computeActual("SELECT orderkey FROM orders TABLESAMPLE SYSTEM (0)");
MaterializedResult all = computeActual("SELECT orderkey FROM orders");
assertContains(all, fullSample);
assertEquals(emptySample.getMaterializedRows().size(), 0);
}
@Test
public void testSymbolAliasing()
throws Exception
{
assertUpdate("CREATE TABLE test_symbol_aliasing AS SELECT 1 foo_1, 2 foo_2_4", 1);
assertQuery("SELECT foo_1, foo_2_4 FROM test_symbol_aliasing", "SELECT 1, 2");
assertUpdate("DROP TABLE test_symbol_aliasing");
}
@Test
public void testNonQueryAccessControl()
throws Exception
{
skipTestUnless(supportsViews());
assertAccessDenied("SET SESSION " + QUERY_MAX_MEMORY + " = '10MB'",
"Cannot set system session property " + QUERY_MAX_MEMORY,
privilege(QUERY_MAX_MEMORY, SET_SESSION));
assertAccessDenied("CREATE TABLE foo (pk bigint)", "Cannot create table .*.foo.*", privilege("foo", CREATE_TABLE));
assertAccessDenied("DROP TABLE orders", "Cannot drop table .*.orders.*", privilege("orders", DROP_TABLE));
assertAccessDenied("ALTER TABLE orders RENAME TO foo", "Cannot rename table .*.orders.* to .*.foo.*", privilege("orders", RENAME_TABLE));
assertAccessDenied("ALTER TABLE orders ADD COLUMN foo bigint", "Cannot add a column to table .*.orders.*", privilege("orders", ADD_COLUMN));
assertAccessDenied("ALTER TABLE orders RENAME COLUMN orderkey TO foo", "Cannot rename a column in table .*.orders.*", privilege("orders", RENAME_COLUMN));
assertAccessDenied("CREATE VIEW foo as SELECT * FROM orders", "Cannot create view .*.foo.*", privilege("foo", CREATE_VIEW));
// todo add DROP VIEW test... not all connectors have view support
try {
assertAccessDenied("SELECT 1", "Principal .* cannot become user " + getSession().getUser() + ".*", privilege(getSession().getUser(), SET_USER));
}
catch (AssertionError e) {
// There is no clean exception message for authorization failure. We simply get a 403
Assertions.assertContains(e.getMessage(), "statusCode=403");
}
}
@Test
public void testViewAccessControl()
throws Exception
{
skipTestUnless(supportsViews());
Session viewOwnerSession = TestingSession.testSessionBuilder()
.setIdentity(new Identity("test_view_access_owner", Optional.empty()))
.setCatalog(getSession().getCatalog().get())
.setSchema(getSession().getSchema().get())
.build();
// verify creation of view over a table requires special view creation privileges for the table
assertAccessDenied(
viewOwnerSession,
"CREATE VIEW test_view_access AS SELECT * FROM orders",
"Cannot select from table .*.orders.*",
privilege("orders", CREATE_VIEW_WITH_SELECT_TABLE));
// create the view
assertAccessAllowed(
viewOwnerSession,
"CREATE VIEW test_view_access AS SELECT * FROM orders",
privilege("bogus", "bogus privilege to disable security", SELECT_TABLE));
// verify selecting from a view over a table requires the view owner to have special view creation privileges for the table
assertAccessDenied(
"SELECT * FROM test_view_access",
"Cannot select from table .*.orders.*",
privilege(viewOwnerSession.getUser(), "orders", CREATE_VIEW_WITH_SELECT_TABLE));
// verify selecting from a view over a table does not require the session user to have SELECT privileges on the underlying table
assertAccessAllowed(
"SELECT * FROM test_view_access",
privilege(getSession().getUser(), "orders", CREATE_VIEW_WITH_SELECT_TABLE));
assertAccessAllowed(
"SELECT * FROM test_view_access",
privilege(getSession().getUser(), "orders", SELECT_TABLE));
Session nestedViewOwnerSession = TestingSession.testSessionBuilder()
.setIdentity(new Identity("test_nested_view_access_owner", Optional.empty()))
.setCatalog(getSession().getCatalog().get())
.setSchema(getSession().getSchema().get())
.build();
// verify creation of view over a view requires special view creation privileges for the view
assertAccessDenied(
nestedViewOwnerSession,
"CREATE VIEW test_nested_view_access AS SELECT * FROM test_view_access",
"Cannot select from view .*.test_view_access.*",
privilege("test_view_access", CREATE_VIEW_WITH_SELECT_VIEW));
// create the nested view
assertAccessAllowed(
nestedViewOwnerSession,
"CREATE VIEW test_nested_view_access AS SELECT * FROM test_view_access",
privilege("bogus", "bogus privilege to disable security", SELECT_TABLE));
// verify selecting from a view over a view requires the view owner of the outer view to have special view creation privileges for the inner view
assertAccessDenied(
"SELECT * FROM test_nested_view_access",
"Cannot select from view .*.test_view_access.*",
privilege(nestedViewOwnerSession.getUser(), "test_view_access", CREATE_VIEW_WITH_SELECT_VIEW));
// verify selecting from a view over a view does not require the session user to have SELECT privileges for the inner view
assertAccessAllowed(
"SELECT * FROM test_nested_view_access",
privilege(getSession().getUser(), "test_view_access", CREATE_VIEW_WITH_SELECT_VIEW));
assertAccessAllowed(
"SELECT * FROM test_nested_view_access",
privilege(getSession().getUser(), "test_view_access", SELECT_VIEW));
assertAccessAllowed(nestedViewOwnerSession, "DROP VIEW test_nested_view_access");
assertAccessAllowed(viewOwnerSession, "DROP VIEW test_view_access");
}
}
| |
/*
* Licensed to GraphHopper GmbH under one or more contributor
* license agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*
* GraphHopper GmbH licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.graphhopper.routing;
import com.carrotsearch.hppc.IntObjectMap;
import com.graphhopper.routing.util.DefaultEdgeFilter;
import com.graphhopper.routing.util.EdgeFilter;
import com.graphhopper.routing.util.FlagEncoder;
import com.graphhopper.routing.util.TraversalMode;
import com.graphhopper.routing.weighting.Weighting;
import com.graphhopper.storage.Graph;
import com.graphhopper.storage.SPTEntry;
import com.graphhopper.util.EdgeExplorer;
import com.graphhopper.util.EdgeIterator;
import com.graphhopper.util.EdgeIteratorState;
import com.graphhopper.util.GHUtility;
import java.util.PriorityQueue;
import static com.graphhopper.util.EdgeIterator.ANY_EDGE;
/**
* Common subclass for bidirectional algorithms.
*
* @author Peter Karich
* @author easbar
* @see AbstractBidirCHAlgo for bidirectional CH algorithms
*/
public abstract class AbstractNonCHBidirAlgo extends AbstractBidirAlgo implements BidirRoutingAlgorithm {
protected final Graph graph;
protected final Weighting weighting;
protected final FlagEncoder flagEncoder;
protected EdgeExplorer edgeExplorer;
protected EdgeFilter inEdgeFilter;
protected EdgeFilter outEdgeFilter;
protected EdgeFilter additionalEdgeFilter;
public AbstractNonCHBidirAlgo(Graph graph, Weighting weighting, TraversalMode tMode) {
super(tMode);
this.weighting = weighting;
this.flagEncoder = weighting.getFlagEncoder();
this.graph = graph;
this.nodeAccess = graph.getNodeAccess();
edgeExplorer = graph.createEdgeExplorer();
outEdgeFilter = DefaultEdgeFilter.outEdges(flagEncoder.getAccessEnc());
inEdgeFilter = DefaultEdgeFilter.inEdges(flagEncoder.getAccessEnc());
int size = Math.min(Math.max(200, graph.getNodes() / 10), 150_000);
initCollections(size);
}
/**
* Creates a new entry of the shortest path tree (a {@link SPTEntry} or one of its subclasses) during a dijkstra
* expansion.
*
* @param edge the edge that is currently processed for the expansion
* @param incEdge the id of the edge that is incoming to the node the edge is pointed at. usually this is the same as
* edge.getEdge(), but for edge-based CH and in case edge is a shortcut incEdge is the original edge
* that is incoming to the node
* @param weight the weight the shortest path three entry should carry
* @param parent the parent entry of in the shortest path tree
* @param reverse true if we are currently looking at the backward search, false otherwise
*/
protected abstract SPTEntry createEntry(EdgeIteratorState edge, int incEdge, double weight, SPTEntry parent, boolean reverse);
protected BidirPathExtractor createPathExtractor(Graph graph, Weighting weighting) {
return new BidirPathExtractor(graph, weighting);
}
protected void postInitFrom() {
if (fromOutEdge == ANY_EDGE) {
fillEdgesFrom();
} else {
fillEdgesFromUsingFilter(new EdgeFilter() {
@Override
public boolean accept(EdgeIteratorState edgeState) {
return edgeState.getOrigEdgeFirst() == fromOutEdge;
}
});
}
}
protected void postInitTo() {
if (toInEdge == ANY_EDGE) {
fillEdgesTo();
} else {
fillEdgesToUsingFilter(new EdgeFilter() {
@Override
public boolean accept(EdgeIteratorState edgeState) {
return edgeState.getOrigEdgeLast() == toInEdge;
}
});
}
}
/**
* @param edgeFilter edge filter used to filter edges during {@link #fillEdgesFrom()}
*/
protected void fillEdgesFromUsingFilter(EdgeFilter edgeFilter) {
additionalEdgeFilter = edgeFilter;
finishedFrom = !fillEdgesFrom();
additionalEdgeFilter = null;
}
/**
* @see #fillEdgesFromUsingFilter(EdgeFilter)
*/
protected void fillEdgesToUsingFilter(EdgeFilter edgeFilter) {
additionalEdgeFilter = edgeFilter;
finishedTo = !fillEdgesTo();
additionalEdgeFilter = null;
}
@Override
boolean fillEdgesFrom() {
if (pqOpenSetFrom.isEmpty()) {
return false;
}
currFrom = pqOpenSetFrom.poll();
visitedCountFrom++;
if (fromEntryCanBeSkipped()) {
return true;
}
if (fwdSearchCanBeStopped()) {
return false;
}
bestWeightMapOther = bestWeightMapTo;
fillEdges(currFrom, pqOpenSetFrom, bestWeightMapFrom, false);
return true;
}
@Override
boolean fillEdgesTo() {
if (pqOpenSetTo.isEmpty()) {
return false;
}
currTo = pqOpenSetTo.poll();
visitedCountTo++;
if (toEntryCanBeSkipped()) {
return true;
}
if (bwdSearchCanBeStopped()) {
return false;
}
bestWeightMapOther = bestWeightMapFrom;
fillEdges(currTo, pqOpenSetTo, bestWeightMapTo, true);
return true;
}
private void fillEdges(SPTEntry currEdge, PriorityQueue<SPTEntry> prioQueue, IntObjectMap<SPTEntry> bestWeightMap, boolean reverse) {
EdgeIterator iter = edgeExplorer.setBaseNode(currEdge.adjNode);
while (iter.next()) {
if (!accept(iter, currEdge, reverse))
continue;
final double weight = calcWeight(iter, currEdge, reverse);
if (Double.isInfinite(weight)) {
continue;
}
final int origEdgeId = getOrigEdgeId(iter, reverse);
final int traversalId = getTraversalId(iter, origEdgeId, reverse);
SPTEntry entry = bestWeightMap.get(traversalId);
if (entry == null) {
entry = createEntry(iter, origEdgeId, weight, currEdge, reverse);
bestWeightMap.put(traversalId, entry);
prioQueue.add(entry);
} else if (entry.getWeightOfVisitedPath() > weight) {
prioQueue.remove(entry);
updateEntry(entry, iter, origEdgeId, weight, currEdge, reverse);
prioQueue.add(entry);
} else
continue;
if (updateBestPath) {
// only needed for edge-based -> skip the calculation and use dummy value otherwise
double edgeWeight = traversalMode.isEdgeBased() ? weighting.calcEdgeWeight(iter, reverse) : Double.POSITIVE_INFINITY;
// todo: performance - if bestWeightMapOther.get(traversalId) == null, updateBestPath will exit early and we might
// have calculated the edgeWeight unnecessarily
updateBestPath(edgeWeight, entry, origEdgeId, traversalId, reverse);
}
}
}
protected void updateEntry(SPTEntry entry, EdgeIteratorState edge, int edgeId, double weight, SPTEntry parent, boolean reverse) {
entry.edge = edge.getEdge();
entry.weight = weight;
entry.parent = parent;
}
protected boolean accept(EdgeIteratorState edge, SPTEntry currEdge, boolean reverse) {
return accept(edge, getIncomingEdge(currEdge));
}
protected int getOrigEdgeId(EdgeIteratorState edge, boolean reverse) {
return edge.getEdge();
}
protected int getTraversalId(EdgeIteratorState edge, int origEdgeId, boolean reverse) {
return traversalMode.createTraversalId(edge, reverse);
}
protected double calcWeight(EdgeIteratorState iter, SPTEntry currEdge, boolean reverse) {
// todo: for #1776/#1835 move access flag checks into weighting
final boolean access = reverse ? inEdgeFilter.accept(iter) : outEdgeFilter.accept(iter);
if (!access) {
return Double.POSITIVE_INFINITY;
}
return GHUtility.calcWeightWithTurnWeight(weighting, iter, reverse, getIncomingEdge(currEdge)) + currEdge.getWeightOfVisitedPath();
}
@Override
protected double getInEdgeWeight(SPTEntry entry) {
return weighting.calcEdgeWeight(graph.getEdgeIteratorState(getIncomingEdge(entry), entry.adjNode), false);
}
@Override
protected int getOtherNode(int edge, int node) {
return graph.getOtherNode(edge, node);
}
@Override
protected Path extractPath() {
if (finished())
return createPathExtractor(graph, weighting).extract(bestFwdEntry, bestBwdEntry, bestWeight);
return createEmptyPath();
}
protected boolean accept(EdgeIteratorState iter, int prevOrNextEdgeId) {
// for edge-based traversal we leave it for TurnWeighting to decide whether or not a u-turn is acceptable,
// but for node-based traversal we exclude such a turn for performance reasons already here
if (!traversalMode.isEdgeBased() && iter.getEdge() == prevOrNextEdgeId)
return false;
return additionalEdgeFilter == null || additionalEdgeFilter.accept(iter);
}
protected Path createEmptyPath() {
return new Path(graph);
}
@Override
public String toString() {
return getName() + "|" + weighting;
}
}
| |
/*******************************************************************************
*
* Copyright FUJITSU LIMITED 2017
*
* Creation Date: 03.03.2014
*
*******************************************************************************/
package org.oscm.ui.dialog.mp.landingpage;
import org.oscm.ui.common.JSFUtils;
import org.oscm.internal.landingpage.POLandingpageEntry;
import org.oscm.internal.types.enumtypes.ServiceStatus;
import org.oscm.internal.types.enumtypes.SubscriptionStatus;
/**
* @author zankov
*
*/
public class LandingpageEntryModel {
private final static int MAX_LEN_LIMITED_SHORT_DESCRIPTION = 120;
private final static int INDEX_LIMIT_SHORT_DESCRIPTION = 100;
private POLandingpageEntry entry;
private String accessLink;
private String redirectUrl;
private String target;
private boolean showSubscribeButton;
public LandingpageEntryModel(POLandingpageEntry entry) {
this.entry = entry;
}
public long getKey() {
return entry.getServiceKey();
}
public void setKey(long key) {
entry.setServiceKey(key);
}
public long getSubscriptionKey() {
return entry.getSubscriptionKey();
}
public void setSubcriptionKey(long key) {
entry.setSubscriptionKey(key);
}
public String getSubscriptionHexKey() {
return Long.toHexString(getSubscriptionKey());
}
public int getVersion() {
return entry.getVersion();
}
public void setVersion(int version) {
entry.setVersion(version);
}
public String getServiceId() {
return entry.getServiceId();
}
public void setServiceId(String serviceId) {
entry.setServiceId(serviceId);
}
public void setServiceAccessURL(String url) {
entry.setServiceAccessURL(url);
}
public String getServiceAccessURL() {
return entry.getServiceAccessURL();
}
public void setShortDescription(String shortDescription) {
entry.setShortDescription(shortDescription);
}
public String getShortDescription() {
return entry.getShortDescription();
}
public void setName(String name) {
entry.setName(name);
}
public String getName() {
return entry.getName();
}
public void setServiceStatus(ServiceStatus status) {
entry.setServiceStatus(status);
}
public ServiceStatus getServiceStatus() {
return entry.getServiceStatus();
}
public boolean isSubscribed() {
return entry.isSubscribed();
}
public void setSubscribed(boolean subscribed) {
entry.setSubscribed(subscribed);
}
public String getSubscriptionId() {
return entry.getSubscriptionId();
}
public void setSubscriptionId(String subscriptionId) {
entry.setSubscriptionId(subscriptionId);
}
public void setSubscriptionStatus(SubscriptionStatus subscriptionStatus) {
entry.setSubscriptionStatus(subscriptionStatus);
}
public SubscriptionStatus getSubscriptionStatus() {
return entry.getSubscriptionStatus();
}
public String getNameToDisplay() {
String name = entry.getName();
if (name == null || name.trim().length() == 0) {
return JSFUtils.getText("service.name.undefined", null);
}
return name;
}
public void setSellerName(String sellerName) {
entry.setSellerName(sellerName);
}
public String getSellerName() {
return entry.getSellerName();
}
/**
* Returns the short description of this service with a max. length of 120
* characters. If the description is longer it will be cut accordingly.
*/
public String getShortDescriptionLimited() {
StringBuffer sd = new StringBuffer(getShortDescription());
if (sd.length() > MAX_LEN_LIMITED_SHORT_DESCRIPTION) {
// Shorten as follows:
// Find the last blank before the 100th character, use the part
// before the blank and add "...".
String shortString = sd.substring(0, INDEX_LIMIT_SHORT_DESCRIPTION);
int blankIdx = shortString.lastIndexOf(' ');
if (blankIdx > 0) {
sd.setLength(blankIdx);
} else {
// No blank found => simply cut and add "..."
sd.setLength(INDEX_LIMIT_SHORT_DESCRIPTION);
}
// Append ...
sd.append("...");
}
return sd.toString();
}
public void setAccessLink(String accessUrl) {
this.accessLink = accessUrl;
}
public String getAccessLink() {
return accessLink;
}
public String getRedirectUrl() {
return redirectUrl;
}
public void setRedirectUrl(String redirectUrl) {
this.redirectUrl = redirectUrl;
}
public String getTarget() {
return target;
}
public void setTarget(String target) {
this.target = target;
}
public boolean isShowSubscribeButton() {
return showSubscribeButton;
}
public void setShowSubscribeButton(boolean showSubscribeButton) {
this.showSubscribeButton = showSubscribeButton;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.procedure;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.errorhandling.ForeignException;
import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;
import org.apache.hadoop.hbase.testclassification.MasterTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
/**
* Demonstrate how Procedure handles single members, multiple members, and errors semantics
*/
@Category({MasterTests.class, SmallTests.class})
public class TestProcedure {
@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestProcedure.class);
ProcedureCoordinator coord;
@Before
public void setup() {
coord = mock(ProcedureCoordinator.class);
final ProcedureCoordinatorRpcs comms = mock(ProcedureCoordinatorRpcs.class);
when(coord.getRpcs()).thenReturn(comms); // make it not null
}
static class LatchedProcedure extends Procedure {
CountDownLatch startedAcquireBarrier = new CountDownLatch(1);
CountDownLatch startedDuringBarrier = new CountDownLatch(1);
CountDownLatch completedProcedure = new CountDownLatch(1);
public LatchedProcedure(ProcedureCoordinator coord, ForeignExceptionDispatcher monitor,
long wakeFreq, long timeout, String opName, byte[] data,
List<String> expectedMembers) {
super(coord, monitor, wakeFreq, timeout, opName, data, expectedMembers);
}
@Override
public void sendGlobalBarrierStart() {
startedAcquireBarrier.countDown();
}
@Override
public void sendGlobalBarrierReached() {
startedDuringBarrier.countDown();
}
@Override
public void sendGlobalBarrierComplete() {
completedProcedure.countDown();
}
};
/**
* With a single member, verify ordered execution. The Coordinator side is run in a separate
* thread so we can only trigger from members and wait for particular state latches.
*/
@Test
public void testSingleMember() throws Exception {
// The member
List<String> members = new ArrayList<>();
members.add("member");
LatchedProcedure proc = new LatchedProcedure(coord, new ForeignExceptionDispatcher(), 100,
Integer.MAX_VALUE, "op", null, members);
final LatchedProcedure procspy = spy(proc);
// coordinator: start the barrier procedure
new Thread() {
@Override
public void run() {
procspy.call();
}
}.start();
// coordinator: wait for the barrier to be acquired, then send start barrier
proc.startedAcquireBarrier.await();
// we only know that {@link Procedure#sendStartBarrier()} was called, and others are blocked.
verify(procspy).sendGlobalBarrierStart();
verify(procspy, never()).sendGlobalBarrierReached();
verify(procspy, never()).sendGlobalBarrierComplete();
verify(procspy, never()).barrierAcquiredByMember(anyString());
// member: trigger global barrier acquisition
proc.barrierAcquiredByMember(members.get(0));
// coordinator: wait for global barrier to be acquired.
proc.acquiredBarrierLatch.await();
verify(procspy).sendGlobalBarrierStart(); // old news
// since two threads, we cannot guarantee that {@link Procedure#sendSatsifiedBarrier()} was
// or was not called here.
// member: trigger global barrier release
proc.barrierReleasedByMember(members.get(0), new byte[0]);
// coordinator: wait for procedure to be completed
proc.completedProcedure.await();
verify(procspy).sendGlobalBarrierReached();
verify(procspy).sendGlobalBarrierComplete();
verify(procspy, never()).receive(any());
}
@Test
public void testMultipleMember() throws Exception {
// 2 members
List<String> members = new ArrayList<>();
members.add("member1");
members.add("member2");
LatchedProcedure proc = new LatchedProcedure(coord, new ForeignExceptionDispatcher(), 100,
Integer.MAX_VALUE, "op", null, members);
final LatchedProcedure procspy = spy(proc);
// start the barrier procedure
new Thread() {
@Override
public void run() {
procspy.call();
}
}.start();
// coordinator: wait for the barrier to be acquired, then send start barrier
procspy.startedAcquireBarrier.await();
// we only know that {@link Procedure#sendStartBarrier()} was called, and others are blocked.
verify(procspy).sendGlobalBarrierStart();
verify(procspy, never()).sendGlobalBarrierReached();
verify(procspy, never()).sendGlobalBarrierComplete();
verify(procspy, never()).barrierAcquiredByMember(anyString()); // no externals
// member0: [1/2] trigger global barrier acquisition.
procspy.barrierAcquiredByMember(members.get(0));
// coordinator not satisified.
verify(procspy).sendGlobalBarrierStart();
verify(procspy, never()).sendGlobalBarrierReached();
verify(procspy, never()).sendGlobalBarrierComplete();
// member 1: [2/2] trigger global barrier acquisition.
procspy.barrierAcquiredByMember(members.get(1));
// coordinator: wait for global barrier to be acquired.
procspy.startedDuringBarrier.await();
verify(procspy).sendGlobalBarrierStart(); // old news
// member 1, 2: trigger global barrier release
procspy.barrierReleasedByMember(members.get(0), new byte[0]);
procspy.barrierReleasedByMember(members.get(1), new byte[0]);
// coordinator wait for procedure to be completed
procspy.completedProcedure.await();
verify(procspy).sendGlobalBarrierReached();
verify(procspy).sendGlobalBarrierComplete();
verify(procspy, never()).receive(any());
}
@Test
public void testErrorPropagation() throws Exception {
List<String> members = new ArrayList<>();
members.add("member");
Procedure proc = new Procedure(coord, new ForeignExceptionDispatcher(), 100,
Integer.MAX_VALUE, "op", null, members);
final Procedure procspy = spy(proc);
ForeignException cause = new ForeignException("SRC", "External Exception");
proc.receive(cause);
// start the barrier procedure
Thread t = new Thread() {
@Override
public void run() {
procspy.call();
}
};
t.start();
t.join();
verify(procspy, never()).sendGlobalBarrierStart();
verify(procspy, never()).sendGlobalBarrierReached();
verify(procspy).sendGlobalBarrierComplete();
}
@Test
public void testBarrieredErrorPropagation() throws Exception {
List<String> members = new ArrayList<>();
members.add("member");
LatchedProcedure proc = new LatchedProcedure(coord, new ForeignExceptionDispatcher(), 100,
Integer.MAX_VALUE, "op", null, members);
final LatchedProcedure procspy = spy(proc);
// start the barrier procedure
Thread t = new Thread() {
@Override
public void run() {
procspy.call();
}
};
t.start();
// now test that we can put an error in before the commit phase runs
procspy.startedAcquireBarrier.await();
ForeignException cause = new ForeignException("SRC", "External Exception");
procspy.receive(cause);
procspy.barrierAcquiredByMember(members.get(0));
t.join();
// verify state of all the object
verify(procspy).sendGlobalBarrierStart();
verify(procspy).sendGlobalBarrierComplete();
verify(procspy, never()).sendGlobalBarrierReached();
}
}
| |
/**
* Copyright 2005-2015 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.rice.krms.api.repository.reference;
import org.kuali.rice.core.api.CoreConstants;
import org.kuali.rice.core.api.mo.AbstractDataTransferObject;
import org.kuali.rice.core.api.mo.ModelBuilder;
import org.kuali.rice.krms.api.KrmsConstants;
import javax.xml.bind.Element;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAnyElement;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
import java.io.Serializable;
import java.util.Collection;
/**
* Generated using JVM arguments -DNOT_BLANK=krmsObjectId,krmsDiscriminatorType,referenceObjectId,referenceDiscriminatorType
* Concrete model object implementation, immutable.
* Instances can be (un)marshalled to and from XML.
*
* @see ReferenceObjectBindingContract
*
* @author Kuali Rice Team (rice.collab@kuali.org)
*
*/
@XmlRootElement(name = ReferenceObjectBinding.Constants.ROOT_ELEMENT_NAME)
@XmlAccessorType(XmlAccessType.NONE)
@XmlType(name = ReferenceObjectBinding.Constants.TYPE_NAME, propOrder = {
ReferenceObjectBinding.Elements.COLLECTION_NAME,
ReferenceObjectBinding.Elements.KRMS_DISCRIMINATOR_TYPE,
ReferenceObjectBinding.Elements.KRMS_OBJECT_ID,
ReferenceObjectBinding.Elements.NAMESPACE,
ReferenceObjectBinding.Elements.REFERENCE_DISCRIMINATOR_TYPE,
ReferenceObjectBinding.Elements.REFERENCE_OBJECT_ID,
ReferenceObjectBinding.Elements.ID,
ReferenceObjectBinding.Elements.ACTIVE,
CoreConstants.CommonElements.VERSION_NUMBER,
CoreConstants.CommonElements.FUTURE_ELEMENTS
})
public final class ReferenceObjectBinding
extends AbstractDataTransferObject
implements ReferenceObjectBindingContract
{
@XmlElement(name = Elements.COLLECTION_NAME, required = false)
private final String collectionName;
@XmlElement(name = Elements.KRMS_DISCRIMINATOR_TYPE, required = false)
private final String krmsDiscriminatorType;
@XmlElement(name = Elements.KRMS_OBJECT_ID, required = false)
private final String krmsObjectId;
@XmlElement(name = Elements.NAMESPACE, required = false)
private final String namespace;
@XmlElement(name = Elements.REFERENCE_DISCRIMINATOR_TYPE, required = false)
private final String referenceDiscriminatorType;
@XmlElement(name = Elements.REFERENCE_OBJECT_ID, required = false)
private final String referenceObjectId;
@XmlElement(name = Elements.ID, required = false)
private final String id;
@XmlElement(name = Elements.ACTIVE, required = false)
private final boolean active;
@XmlElement(name = CoreConstants.CommonElements.VERSION_NUMBER, required = false)
private final Long versionNumber;
@SuppressWarnings("unused")
@XmlAnyElement
private final Collection<Element> _futureElements = null;
/**
* Private constructor used only by JAXB. This constructor should never be called.
* It is only present for use during JAXB unmarshalling.
*
*/
private ReferenceObjectBinding() {
this.collectionName = null;
this.krmsDiscriminatorType = null;
this.krmsObjectId = null;
this.namespace = null;
this.referenceDiscriminatorType = null;
this.referenceObjectId = null;
this.id = null;
this.active = false;
this.versionNumber = null;
}
/**
* Constructs an object from the given builder. This constructor is private and should only ever be invoked from the builder.
*
* @param builder the Builder from which to construct the object.
*
*/
private ReferenceObjectBinding(Builder builder) {
this.collectionName = builder.getCollectionName();
this.krmsDiscriminatorType = builder.getKrmsDiscriminatorType();
this.krmsObjectId = builder.getKrmsObjectId();
this.namespace = builder.getNamespace();
this.referenceDiscriminatorType = builder.getReferenceDiscriminatorType();
this.referenceObjectId = builder.getReferenceObjectId();
this.id = builder.getId();
this.active = builder.isActive();
this.versionNumber = builder.getVersionNumber();
}
@Override
public String getCollectionName() {
return this.collectionName;
}
@Override
public String getKrmsDiscriminatorType() {
return this.krmsDiscriminatorType;
}
@Override
public String getKrmsObjectId() {
return this.krmsObjectId;
}
@Override
public String getNamespace() {
return this.namespace;
}
@Override
public String getReferenceDiscriminatorType() {
return this.referenceDiscriminatorType;
}
@Override
public String getReferenceObjectId() {
return this.referenceObjectId;
}
@Override
public String getId() {
return this.id;
}
@Override
public boolean isActive() {
return this.active;
}
@Override
public Long getVersionNumber() {
return this.versionNumber;
}
/**
* A builder which can be used to construct {@link ReferenceObjectBinding} instances. Enforces the constraints of the {@link ReferenceObjectBindingContract}.
*
*/
public final static class Builder
implements Serializable, ModelBuilder, ReferenceObjectBindingContract
{
private String collectionName;
private String krmsDiscriminatorType;
private String krmsObjectId;
private String namespace;
private String referenceDiscriminatorType;
private String referenceObjectId;
private String id;
private boolean active;
private Long versionNumber;
private Builder(String krmsDiscriminatorType, String krmsObjectId, String namespace, String referenceDiscriminatorType, String referenceObjectId) {
// TODO modify this constructor as needed to pass any required values and invoke the appropriate 'setter' methods
setKrmsDiscriminatorType(krmsDiscriminatorType);
setKrmsObjectId(krmsObjectId);
setNamespace(namespace);
setReferenceDiscriminatorType(referenceDiscriminatorType);
setReferenceObjectId(referenceObjectId);
}
public static Builder create(String krmsDiscriminatorType, String krmsObjectId, String namespace, String referenceDiscriminatorType, String referenceObjectId) {
// TODO modify as needed to pass any required values and add them to the signature of the 'create' method
return new Builder(krmsDiscriminatorType, krmsObjectId, namespace, referenceDiscriminatorType, referenceObjectId);
}
public static Builder create(ReferenceObjectBindingContract contract) {
if (contract == null) {
throw new IllegalArgumentException("contract was null");
}
// TODO if create() is modified to accept required parameters, this will need to be modified
Builder builder = create(contract.getKrmsDiscriminatorType(), contract.getKrmsObjectId(), contract.getNamespace(), contract.getReferenceDiscriminatorType(), contract.getReferenceObjectId());
builder.setId(contract.getId());
builder.setActive(contract.isActive());
builder.setCollectionName(contract.getCollectionName());
builder.setVersionNumber(contract.getVersionNumber());
return builder;
}
/**
* Builds an instance of a ReferenceObjectBinding based on the current state of the builder.
*
* @return the fully-constructed ReferenceObjectBinding.
*
*/
public ReferenceObjectBinding build() {
return new ReferenceObjectBinding(this);
}
@Override
public boolean isActive() {
return this.active;
}
@Override
public String getCollectionName() {
return this.collectionName;
}
@Override
public String getId() {
return this.id;
}
@Override
public String getKrmsDiscriminatorType() {
return this.krmsDiscriminatorType;
}
@Override
public String getKrmsObjectId() {
return this.krmsObjectId;
}
@Override
public String getNamespace() {
return this.namespace;
}
@Override
public String getReferenceDiscriminatorType() {
return this.referenceDiscriminatorType;
}
@Override
public String getReferenceObjectId() {
return this.referenceObjectId;
}
@Override
public Long getVersionNumber() {
return this.versionNumber;
}
/**
* Sets the value of active on this builder to the given value.
*
* @param active the active value to set.
*
*/
public void setActive(boolean active) {
this.active = active;
}
/**
* Sets the value of collectionName on this builder to the given value.
*
* @param collectionName the collectionName value to set.
*
*/
public void setCollectionName(String collectionName) {
// TODO add validation of input value if required and throw IllegalArgumentException if needed
this.collectionName = collectionName;
}
/**
* Sets the value of id on this builder to the given value.
*
* @param id the id value to set., may be null, representing the Object has not been persisted, but must not be blank.
* @throws IllegalArgumentException if the id is blank
*
*/
public void setId(String id) {
if (id != null && org.apache.commons.lang.StringUtils.isBlank(id)) {
throw new IllegalArgumentException("id is blank");
}
this.id = id;
}
/**
* Sets the value of krmsDiscriminatorType on this builder to the given value.
*
* @param krmsDiscriminatorType the krmsDiscriminatorType value to set., must not be null or blank
* @throws IllegalArgumentException if the krmsDiscriminatorType is null or blank
*
*/
public void setKrmsDiscriminatorType(String krmsDiscriminatorType) {
if (org.apache.commons.lang.StringUtils.isBlank(krmsDiscriminatorType)) {
throw new IllegalArgumentException("krmsDiscriminatorType is null or blank");
}
this.krmsDiscriminatorType = krmsDiscriminatorType;
}
/**
* Sets the value of krmsObjectId on this builder to the given value.
*
* @param krmsObjectId the krmsObjectId value to set., must not be null or blank
* @throws IllegalArgumentException if the krmsObjectId is null or blank
*
*/
public void setKrmsObjectId(String krmsObjectId) {
if (org.apache.commons.lang.StringUtils.isBlank(krmsObjectId)) {
throw new IllegalArgumentException("krmsObjectId is null or blank");
}
this.krmsObjectId = krmsObjectId;
}
/**
* Sets the value of namespace on this builder to the given value.
*
* @param namespace the namespace value to set., must not be null or blank
* @throws IllegalArgumentException if the namespace is null or blank
*
*/
public void setNamespace(String namespace) {
if (org.apache.commons.lang.StringUtils.isBlank(namespace)) {
throw new IllegalArgumentException("namespace is null or blank");
}
this.namespace = namespace;
}
/**
* Sets the value of referenceDiscriminatorType on this builder to the given value.
*
* @param referenceDiscriminatorType the referenceDiscriminatorType value to set., must not be null or blank
* @throws IllegalArgumentException if the referenceDiscriminatorType is null or blank
*
*/
public void setReferenceDiscriminatorType(String referenceDiscriminatorType) {
if (org.apache.commons.lang.StringUtils.isBlank(referenceDiscriminatorType)) {
throw new IllegalArgumentException("referenceDiscriminatorType is null or blank");
}
this.referenceDiscriminatorType = referenceDiscriminatorType;
}
/**
* Sets the value of referenceObjectId on this builder to the given value.
*
* @param referenceObjectId the referenceObjectId value to set., must not be null or blank
* @throws IllegalArgumentException if the referenceObjectId is null or blank
*
*/
public void setReferenceObjectId(String referenceObjectId) {
if (org.apache.commons.lang.StringUtils.isBlank(referenceObjectId)) {
throw new IllegalArgumentException("referenceObjectId is null or blank");
}
this.referenceObjectId = referenceObjectId;
}
/**
* Sets the value of versionNumber on this builder to the given value.
*
* @param versionNumber the versionNumber value to set.
*
*/
public void setVersionNumber(Long versionNumber) {
this.versionNumber = versionNumber;
}
}
/**
* Defines some internal constants used on this class.
*
*/
static class Constants {
final static String ROOT_ELEMENT_NAME = "referenceObjectBinding";
final static String TYPE_NAME = "ReferenceObjectBindingType";
}
/**
* A private class which exposes constants which define the XML element names to use when this object is marshalled to XML.
*
*/
static class Elements {
final static String COLLECTION_NAME = "collectionName";
final static String KRMS_DISCRIMINATOR_TYPE = "krmsDiscriminatorType";
final static String KRMS_OBJECT_ID = "krmsObjectId";
final static String NAMESPACE = "namespace";
final static String REFERENCE_DISCRIMINATOR_TYPE = "referenceDiscriminatorType";
final static String REFERENCE_OBJECT_ID = "referenceObjectId";
final static String ID = "id";
final static String ACTIVE = "active";
}
public static class Cache {
public static final String NAME = KrmsConstants.Namespaces.KRMS_NAMESPACE_2_0 + "/" + ReferenceObjectBinding.Constants.TYPE_NAME;
}
}
| |
/*******************************************************************************
* Copyright 2011 The fangorn project
*
* Author: Sumukh Ghodke
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
******************************************************************************/
package au.edu.unimelb.csse.bit;
public class ByteOps {
private PosMid p = new PosMid();
private int[] d = new int[4];
/**
* the values are in the order right, left, depth, parent
* @param values
* @param bytes
* @return
*/
public PosMid int4ToDiffVarBytes(int[] values, byte[] bytes) {
final Order order = Order.find(values[0], values[1], values[2],
values[3]);
order.diffs(values, d);
int id = order.id;
p.clear();
p = intToVarByte(id, bytes, p);
for (int i = 0; i < d.length; i++) {
p = intToVarByte(d[i], bytes, p);
}
return p;
}
public PosMid int4ToVarBytes(int[] values, byte[] bytes) {
p.clear();
for (int i = 0; i < values.length; i++) {
p = intToVarByte(values[i], bytes, p);
}
return p;
}
public void int4ToBytes(int[] values, byte[] bytes) {
for (int i = 0; i < values.length; i++) {
intToByte(values[i], bytes, i);
}
}
private void intToByte(int i, byte[] bytes, int pos) {
bytes[pos] = (byte) (i & 0xFF);
}
public static PosMid intToVarByte(int i, byte[] bytes, PosMid p) {
if (i == 0) {
if (p.mid) {
bytes[p.position] = (byte) (bytes[p.position] & 0x0F);
p.mid = false;
p.position++;
} else {
bytes[p.position] = (byte) 0x00;
p.mid = true;
}
}
while (i > 0) {
if (p.mid) {
bytes[p.position] = (byte) ((bytes[p.position] & 0x0F) | (i & 0x7) << 4);
i = i >>> 3;
if (i > 0) {
bytes[p.position] = (byte) (bytes[p.position] | 0x80);
}
p.mid = false;
p.position++;
} else {
bytes[p.position] = (byte) (i & 0x7);
i = i >>> 3;
if (i > 0) {
bytes[p.position] = (byte) (bytes[p.position] | 0x08);
}
p.mid = true;
}
}
return p;
}
public static class PosMid {
public int position;
public boolean mid;
void clear() {
position = 0;
mid = false;
}
}
public static final Order DLRP = new Order(0, 2, 1, 0, 3);
public static final Order LPRD = new Order(1, 1, 3, 0, 2);
public static final Order LDRP = new Order(2, 1, 2, 0, 3);
public static final Order DLPR = new Order(3, 2, 1, 3, 0);
public static final Order LRPD = new Order(4, 1, 0, 3, 2);
public static final Order LRDP = new Order(5, 1, 0, 2, 3);
public static final Order LPDR = new Order(6, 1, 3, 2, 0);
public static final Order LDPR = new Order(7, 1, 2, 3, 0);
public static class Order {
private final int id;
private final int[] order = new int[4];
public Order(int id, int first, int second, int third, int fourth) {
this.id = id;
order[0] = first;
order[1] = second;
order[2] = third;
order[3] = fourth;
}
public void diffs(int[] values, int[] d) {
d[0] = values[order[0]];
d[1] = values[order[1]] - values[order[0]];
d[2] = values[order[2]] - values[order[1]];
d[3] = values[order[3]] - values[order[2]];
}
public static Order find(int r, int l, int d, int p) {
if (l < d) {
// l < d
if (p < d) {
// p cannot be < l, therefore l < p < d
if (r < p) {
// l < r < p < d
return ByteOps.LRPD;
} else {
// l < p < d and r > p
if (r < d) {
// l < p < r < d
return ByteOps.LPRD;
} else {
// l < p < d < r
return ByteOps.LPDR;
}
}
} else {
// l < d < p
if (p < r) {
// l < d < p < r
return ByteOps.LDPR;
} else {
// l < d < p and r < p
if (r < d) {
// l < r < d < p
return ByteOps.LRDP;
} else {
// l < d < r < p
return ByteOps.LDRP;
}
}
}
} else {
// d < l
if (r < p) {
// d < l < r < p
return ByteOps.DLRP;
} else {
// d < l < p < r
return ByteOps.DLPR;
}
}
}
}
static class OrderLookup {
public static Order find(int id) {
switch (id) {
case 0:
return DLRP;
case 1:
return LPRD;
case 2:
return LDRP;
case 3:
return DLPR;
case 4:
return LRPD;
case 5:
return LRDP;
case 6:
return LPDR;
case 7:
return LDPR;
}
return null;
}
}
public static void convertToInts(byte[] bytes, int[] ints, int intstart) {
int prev = 0;
int numberOfInts = 0;
int moves = 0;
Order o = null;
for (int i = 0; i < bytes.length && numberOfInts < 5; i++) {
byte b = bytes[i];
prev = prev | (b & 0x07) << (moves * 3);
moves++;
if ((b & 0x08) == 0) {
o = extracted(ints, intstart, prev, numberOfInts, o);
prev = 0;
moves = 0;
numberOfInts++;
}
if (numberOfInts == 5)
break;
prev = prev | (((b & 0x70) >>> 4) << (moves * 3));
moves++;
if ((b & 0x80) == 0) {
o = extracted(ints, intstart, prev, numberOfInts, o);
prev = 0;
moves = 0;
numberOfInts++;
}
}
}
private static Order extracted(int[] ints, int intstart, int prev,
int numberOfInts, Order o) {
if (o == null) {
o = OrderLookup.find(prev);
} else {
if (numberOfInts > 1) {
ints[intstart + o.order[numberOfInts - 1]] = prev
+ ints[intstart + o.order[numberOfInts - 2]];
} else {
ints[intstart + o.order[numberOfInts - 1]] = prev;
}
}
return o;
}
}
| |
/* The following code was generated by JFlex 1.4.1 on 1/11/17 10:38 AM */
/*
* 11/13/2004
*
* LogoTokenMaker.java - Scanner for the Logo programming language.
*
* This library is distributed under a modified BSD license. See the included
* RSyntaxTextArea.License.txt file for details.
*/
package org.tros.logo.rsyntax;
import java.io.*;
import javax.swing.text.Segment;
import org.fife.ui.rsyntaxtextarea.*;
/**
* Scanner for the Logo programming language.<p>
*
* This implementation was created using
* <a href="http://www.jflex.de/">JFlex</a> 1.4.1; however, the generated file
* was modified for performance. Memory allocation needs to be almost
* completely removed to be competitive with the handwritten lexers (subclasses
* of <code>AbstractTokenMaker</code>, so this class has been modified so that
* Strings are never allocated (via yytext()), and the scanner never has to
* worry about refilling its buffer (needlessly copying chars around).
* We can achieve this because RText always scans exactly 1 line of tokens at a
* time, and hands the scanner this line as an array of characters (a Segment
* really). Since tokens contain pointers to char arrays instead of Strings
* holding their contents, there is no need for allocating new memory for
* Strings.<p>
*
* The actual algorithm generated for scanning has, of course, not been
* modified.<p>
*
* If you wish to regenerate this file yourself, keep in mind the following:
* <ul>
* <li>The generated <code>LogoTokenMaker.java</code> file will contain two
* definitions of both <code>zzRefill</code> and <code>yyreset</code>.
* You should hand-delete the second of each definition (the ones
* generated by the lexer), as these generated methods modify the input
* buffer, which we'll never have to do.</li>
* <li>You should also change the declaration/definition of zzBuffer to NOT
* be initialized. This is a needless memory allocation for us since we
* will be pointing the array somewhere else anyway.</li>
* <li>You should NOT call <code>yylex()</code> on the generated scanner
* directly; rather, you should use <code>getTokenList</code> as you would
* with any other <code>TokenMaker</code> instance.</li>
* </ul>
*
* @author Robert Futrell
* @version 0.5
*
*/
public class LogoTokenMaker extends AbstractJFlexTokenMaker {
/** This character denotes the end of file */
public static final int YYEOF = -1;
/** initial size of the lookahead buffer */
private static final int ZZ_BUFFERSIZE = 16384;
/** lexical states */
public static final int EOL_COMMENT = 1;
public static final int YYINITIAL = 0;
/**
* Translates characters to character classes
*/
private static final String ZZ_CMAP_PACKED =
"\11\0\1\14\1\13\1\0\1\14\1\10\22\0\1\14\1\32\1\10"+
"\1\11\1\1\1\30\1\33\1\34\2\24\1\30\1\22\1\34\1\22"+
"\1\23\1\31\1\4\7\6\2\3\1\45\1\15\1\25\1\26\1\25"+
"\1\34\1\11\3\5\1\20\1\21\1\20\5\1\1\17\13\1\1\16"+
"\2\1\1\34\1\12\1\34\1\27\1\2\1\0\1\51\1\54\1\56"+
"\1\47\1\44\1\41\1\53\1\35\1\42\1\1\1\57\1\43\1\61"+
"\1\52\1\55\1\37\1\1\1\50\1\40\1\36\1\7\1\60\1\46"+
"\1\64\1\63\1\62\1\10\1\27\1\10\1\34\uff81\0";
/**
* Translates characters to character classes
*/
private static final char [] ZZ_CMAP = zzUnpackCMap(ZZ_CMAP_PACKED);
/**
* Translates DFA states to action switch labels.
*/
private static final int [] ZZ_ACTION = zzUnpackAction();
private static final String ZZ_ACTION_PACKED_0 =
"\2\0\1\1\1\2\2\3\1\1\1\4\1\5\1\6"+
"\1\7\1\4\1\1\1\10\1\4\2\1\16\2\1\11"+
"\1\12\3\11\1\1\1\13\1\3\1\14\1\13\1\14"+
"\1\13\1\15\1\13\1\16\7\2\1\16\21\2\4\0"+
"\1\1\1\14\1\0\2\15\11\2\1\16\16\2\4\0"+
"\1\1\16\2\1\16\2\2\2\0\1\17\1\1\14\2"+
"\1\16\1\2\2\0\1\1\21\2";
private static int [] zzUnpackAction() {
int [] result = new int[164];
int offset = 0;
offset = zzUnpackAction(ZZ_ACTION_PACKED_0, offset, result);
return result;
}
private static int zzUnpackAction(String packed, int offset, int [] result) {
int i = 0; /* index in packed string */
int j = offset; /* index in unpacked array */
int l = packed.length();
while (i < l) {
int count = packed.charAt(i++);
int value = packed.charAt(i++);
do result[j++] = value; while (--count > 0);
}
return j;
}
/**
* Translates a state to a row index in the transition table
*/
private static final int [] ZZ_ROWMAP = zzUnpackRowMap();
private static final String ZZ_ROWMAP_PACKED_0 =
"\0\0\0\65\0\152\0\237\0\324\0\u0109\0\u013e\0\152"+
"\0\u013e\0\u0173\0\u013e\0\u013e\0\u01a8\0\u013e\0\u01dd\0\u01dd"+
"\0\u0212\0\u0247\0\u027c\0\u02b1\0\u02e6\0\u031b\0\u0350\0\u0385"+
"\0\u03ba\0\u03ef\0\u0424\0\u0459\0\u048e\0\u04c3\0\u04f8\0\u052d"+
"\0\u013e\0\u0562\0\u0597\0\u05cc\0\u0601\0\u0636\0\u0636\0\u0636"+
"\0\u066b\0\u06a0\0\u06d5\0\u070a\0\u073f\0\237\0\u0774\0\u07a9"+
"\0\u07de\0\u0813\0\u0848\0\u087d\0\u08b2\0\u08e7\0\u091c\0\u0951"+
"\0\u0986\0\u09bb\0\u09f0\0\u0a25\0\u0a5a\0\u0a8f\0\u0ac4\0\u0af9"+
"\0\u0b2e\0\u0b63\0\u0b98\0\u0bcd\0\u0c02\0\u0c37\0\u0c6c\0\u0ca1"+
"\0\u0cd6\0\u0d0b\0\u0d40\0\u0d75\0\u0daa\0\u0ddf\0\u0636\0\u0e14"+
"\0\u0e49\0\u0e7e\0\u0eb3\0\u0ee8\0\u0f1d\0\u0f52\0\u0f87\0\u0fbc"+
"\0\u0ff1\0\u1026\0\u105b\0\u1090\0\u10c5\0\u10fa\0\u112f\0\u1164"+
"\0\u1199\0\u11ce\0\u1203\0\u1238\0\u126d\0\u12a2\0\u12d7\0\u130c"+
"\0\u1341\0\u1376\0\u13ab\0\u13e0\0\u1415\0\u144a\0\u147f\0\u14b4"+
"\0\u14e9\0\u151e\0\u1553\0\u1588\0\u15bd\0\u15f2\0\u1627\0\u165c"+
"\0\u1691\0\u16c6\0\u16fb\0\u1730\0\u1765\0\u179a\0\u17cf\0\u1804"+
"\0\u1839\0\u186e\0\u18a3\0\u18d8\0\u190d\0\u1942\0\u1977\0\u19ac"+
"\0\u19e1\0\u1a16\0\u1a4b\0\u1a80\0\u1ab5\0\u1aea\0\u1b1f\0\u1b54"+
"\0\u1b89\0\u1839\0\u1bbe\0\u1bf3\0\u1c28\0\u1c5d\0\u1c92\0\u1cc7"+
"\0\u1cfc\0\u1d31\0\u1d66\0\u1d9b\0\u1dd0\0\u1e05\0\u1e3a\0\u1e6f"+
"\0\u1ea4\0\u1ed9\0\u1f0e\0\u1f43";
private static int [] zzUnpackRowMap() {
int [] result = new int[164];
int offset = 0;
offset = zzUnpackRowMap(ZZ_ROWMAP_PACKED_0, offset, result);
return result;
}
private static int zzUnpackRowMap(String packed, int offset, int [] result) {
int i = 0; /* index in packed string */
int j = offset; /* index in unpacked array */
int l = packed.length();
while (i < l) {
int high = packed.charAt(i++) << 16;
result[j++] = high | packed.charAt(i++);
}
return j;
}
/**
* The transition table of the DFA
*/
private static final int [] ZZ_TRANS = zzUnpackTrans();
private static final String ZZ_TRANS_PACKED_0 =
"\1\3\2\4\1\5\1\6\1\4\1\5\1\4\1\7"+
"\1\3\1\10\1\11\1\12\1\13\4\4\1\14\1\15"+
"\1\16\1\17\1\20\3\14\1\20\1\21\1\7\1\22"+
"\1\23\1\24\1\25\1\26\1\27\1\30\1\31\1\7"+
"\1\4\1\32\1\33\2\4\1\34\1\35\1\4\1\36"+
"\2\4\1\37\3\4\13\40\1\41\21\40\1\42\3\40"+
"\1\43\4\40\1\44\16\40\10\3\1\0\2\3\3\0"+
"\4\3\13\0\10\3\1\0\20\3\7\4\1\0\1\3"+
"\1\45\3\0\4\4\13\0\10\4\1\0\17\4\3\46"+
"\2\5\1\46\1\5\1\46\1\0\2\46\3\0\1\46"+
"\1\47\1\50\1\51\1\0\1\52\11\0\4\46\1\50"+
"\1\46\1\47\1\51\1\0\1\46\1\50\20\46\1\53"+
"\1\54\1\46\1\54\1\46\1\0\2\46\3\0\1\55"+
"\1\47\1\50\1\51\1\0\1\52\11\0\4\46\1\50"+
"\1\46\1\47\1\51\1\0\1\46\1\50\14\46\1\55"+
"\101\0\1\12\53\0\2\52\1\0\1\52\104\0\1\14"+
"\71\0\1\14\31\0\1\3\7\4\1\0\1\3\1\45"+
"\3\0\4\4\13\0\1\4\1\56\3\4\1\57\2\4"+
"\1\0\7\4\1\60\7\4\1\3\7\4\1\0\1\3"+
"\1\45\3\0\4\4\13\0\10\4\1\0\7\4\1\56"+
"\7\4\1\3\6\4\1\56\1\0\1\3\1\45\3\0"+
"\4\4\13\0\6\4\1\61\1\62\1\0\1\4\1\56"+
"\1\63\1\64\4\4\1\56\6\4\1\3\7\4\1\0"+
"\1\3\1\45\3\0\4\4\13\0\1\65\1\66\5\4"+
"\1\67\1\0\17\4\1\3\7\4\1\0\1\3\1\45"+
"\3\0\4\4\13\0\10\4\1\0\1\4\1\56\5\4"+
"\1\70\7\4\1\3\7\4\1\0\1\3\1\45\3\0"+
"\4\4\13\0\1\4\1\71\2\4\1\56\3\4\1\0"+
"\17\4\1\3\7\4\1\0\1\3\1\45\3\0\4\4"+
"\13\0\1\4\1\56\5\4\1\72\1\0\3\4\1\73"+
"\3\4\1\74\7\4\1\3\7\4\1\0\1\3\1\45"+
"\3\0\4\4\13\0\10\4\1\0\4\4\1\75\12\4"+
"\1\3\7\4\1\0\1\3\1\45\3\0\4\4\13\0"+
"\3\4\1\56\4\4\1\0\2\4\1\76\14\4\1\3"+
"\7\4\1\0\1\3\1\45\3\0\4\4\13\0\1\4"+
"\1\56\3\4\1\77\1\4\1\100\1\0\3\4\1\101"+
"\13\4\1\3\7\4\1\0\1\3\1\45\3\0\4\4"+
"\13\0\7\4\1\102\1\0\17\4\1\3\7\4\1\0"+
"\1\3\1\45\3\0\4\4\13\0\10\4\1\0\3\4"+
"\1\103\3\4\1\104\1\4\1\56\5\4\1\3\7\4"+
"\1\0\1\3\1\45\3\0\4\4\13\0\3\4\1\56"+
"\2\4\1\105\1\4\1\0\3\4\1\106\4\4\1\56"+
"\6\4\1\3\7\4\1\0\1\3\1\45\3\0\4\4"+
"\13\0\10\4\1\0\3\4\1\107\13\4\13\40\1\0"+
"\21\40\1\0\3\40\1\0\4\40\1\0\16\40\36\0"+
"\1\110\64\0\1\111\3\0\1\112\70\0\1\113\16\0"+
"\7\3\1\114\1\0\2\3\3\0\4\3\13\0\10\3"+
"\1\0\17\3\10\46\1\0\2\46\3\0\4\46\13\0"+
"\10\46\1\0\22\46\2\115\1\46\1\115\1\46\1\0"+
"\2\46\3\0\4\46\1\116\12\0\10\46\1\0\22\46"+
"\2\52\1\46\1\52\1\46\1\0\2\46\3\0\2\46"+
"\1\50\1\51\13\0\4\46\1\50\2\46\1\51\1\0"+
"\1\46\1\50\20\46\2\53\1\46\1\53\1\46\1\0"+
"\2\46\3\0\2\46\1\50\1\51\1\0\1\52\11\0"+
"\4\46\1\50\2\46\1\51\1\0\1\46\1\50\20\46"+
"\1\53\1\54\1\46\1\54\1\46\1\0\2\46\3\0"+
"\1\46\1\117\1\50\1\51\1\0\1\52\11\0\4\46"+
"\1\50\1\46\1\117\1\51\1\0\1\46\1\50\20\46"+
"\4\120\1\46\1\0\2\46\3\0\2\46\2\120\13\0"+
"\4\46\1\120\2\46\1\120\1\0\1\46\1\120\1\46"+
"\1\120\2\46\1\120\1\46\1\120\6\46\1\3\7\4"+
"\1\0\1\3\1\45\3\0\4\4\13\0\10\4\1\0"+
"\1\4\1\121\15\4\1\3\7\4\1\0\1\3\1\45"+
"\3\0\4\4\13\0\10\4\1\0\13\4\1\122\3\4"+
"\1\3\7\4\1\0\1\3\1\45\3\0\4\4\13\0"+
"\10\4\1\0\3\4\1\123\13\4\1\3\7\4\1\0"+
"\1\3\1\45\3\0\4\4\13\0\10\4\1\0\4\4"+
"\1\124\12\4\1\3\7\4\1\0\1\3\1\45\3\0"+
"\4\4\13\0\5\4\1\125\2\4\1\0\17\4\1\3"+
"\6\4\1\126\1\0\1\3\1\45\3\0\4\4\13\0"+
"\10\4\1\0\17\4\1\3\7\4\1\0\1\3\1\45"+
"\3\0\4\4\13\0\10\4\1\0\7\4\1\127\7\4"+
"\1\3\7\4\1\0\1\3\1\45\3\0\4\4\13\0"+
"\10\4\1\0\7\4\1\130\7\4\1\3\7\4\1\0"+
"\1\3\1\45\3\0\4\4\13\0\1\4\1\131\6\4"+
"\1\0\17\4\1\3\7\4\1\0\1\3\1\45\3\0"+
"\4\4\13\0\10\4\1\0\2\4\1\132\1\4\1\133"+
"\12\4\1\3\7\4\1\0\1\3\1\45\3\0\4\4"+
"\13\0\10\4\1\0\3\4\1\134\13\4\1\3\7\4"+
"\1\0\1\3\1\45\3\0\4\4\13\0\4\4\1\135"+
"\3\4\1\0\17\4\1\3\7\4\1\0\1\3\1\45"+
"\3\0\4\4\13\0\10\4\1\0\6\4\1\136\10\4"+
"\1\3\7\4\1\0\1\3\1\45\3\0\4\4\13\0"+
"\10\4\1\0\10\4\1\137\6\4\1\3\7\4\1\0"+
"\1\3\1\45\3\0\4\4\13\0\10\4\1\0\1\4"+
"\1\56\15\4\1\3\7\4\1\0\1\3\1\45\3\0"+
"\4\4\13\0\10\4\1\0\3\4\1\140\13\4\1\3"+
"\7\4\1\0\1\3\1\45\3\0\4\4\13\0\10\4"+
"\1\0\5\4\1\141\11\4\1\3\7\4\1\0\1\3"+
"\1\45\3\0\4\4\13\0\2\4\1\142\5\4\1\0"+
"\17\4\1\3\7\4\1\0\1\3\1\45\3\0\4\4"+
"\13\0\10\4\1\0\4\4\1\143\12\4\1\3\7\4"+
"\1\0\1\3\1\45\3\0\4\4\13\0\1\4\1\144"+
"\6\4\1\0\17\4\1\3\7\4\1\0\1\3\1\45"+
"\3\0\4\4\13\0\10\4\1\0\10\4\1\145\6\4"+
"\1\3\7\4\1\0\1\3\1\45\3\0\4\4\13\0"+
"\6\4\1\146\1\4\1\0\17\4\1\3\7\4\1\0"+
"\1\3\1\45\3\0\4\4\13\0\3\4\1\56\3\4"+
"\1\147\1\0\17\4\1\3\7\4\1\0\1\3\1\45"+
"\3\0\4\4\13\0\10\4\1\0\4\4\1\150\12\4"+
"\1\3\7\4\1\0\1\3\1\45\3\0\4\4\13\0"+
"\10\4\1\0\11\4\1\122\5\4\36\0\1\151\65\0"+
"\1\152\70\0\1\153\67\0\1\154\16\0\3\3\4\155"+
"\1\3\1\0\2\3\3\0\2\3\2\155\13\0\4\3"+
"\1\155\2\3\1\155\1\0\1\3\1\155\1\3\1\155"+
"\2\3\1\155\1\3\1\155\6\3\3\46\2\115\1\46"+
"\1\115\1\46\1\0\2\46\3\0\2\46\1\50\1\46"+
"\13\0\4\46\1\50\3\46\1\0\1\46\1\50\15\46"+
"\3\0\2\115\1\0\1\115\56\0\3\46\4\120\1\46"+
"\1\0\2\46\3\0\1\46\1\117\2\120\13\0\4\46"+
"\1\120\1\46\1\117\1\120\1\0\1\46\1\120\1\46"+
"\1\120\2\46\1\120\1\46\1\120\6\46\1\3\7\4"+
"\1\0\1\3\1\45\3\0\4\4\13\0\7\4\1\156"+
"\1\0\17\4\1\3\7\4\1\0\1\3\1\45\3\0"+
"\4\4\13\0\7\4\1\56\1\0\17\4\1\3\7\4"+
"\1\0\1\3\1\45\3\0\4\4\13\0\5\4\1\157"+
"\2\4\1\0\17\4\1\3\6\4\1\130\1\0\1\3"+
"\1\45\3\0\4\4\13\0\10\4\1\0\1\4\1\160"+
"\6\4\1\161\6\4\1\3\7\4\1\0\1\3\1\45"+
"\3\0\4\4\13\0\10\4\1\0\4\4\1\135\12\4"+
"\1\3\7\4\1\0\1\3\1\45\3\0\4\4\13\0"+
"\3\4\1\122\4\4\1\0\17\4\1\3\7\4\1\0"+
"\1\3\1\45\3\0\4\4\13\0\10\4\1\0\1\156"+
"\16\4\1\3\7\4\1\0\1\3\1\45\3\0\4\4"+
"\13\0\2\4\1\56\5\4\1\0\17\4\1\3\7\4"+
"\1\0\1\3\1\45\3\0\4\4\13\0\10\4\1\0"+
"\16\4\1\162\1\3\7\4\1\0\1\3\1\45\3\0"+
"\4\4\13\0\10\4\1\0\1\163\16\4\1\3\7\4"+
"\1\0\1\3\1\45\3\0\4\4\13\0\1\4\1\164"+
"\6\4\1\0\17\4\1\3\7\4\1\0\1\3\1\45"+
"\3\0\4\4\13\0\6\4\1\165\1\4\1\0\17\4"+
"\1\3\7\4\1\0\1\3\1\45\3\0\4\4\13\0"+
"\1\4\1\56\6\4\1\0\17\4\1\3\7\4\1\0"+
"\1\3\1\45\3\0\4\4\13\0\7\4\1\166\1\0"+
"\17\4\1\3\7\4\1\0\1\3\1\45\3\0\4\4"+
"\13\0\10\4\1\0\3\4\1\167\13\4\1\3\7\4"+
"\1\0\1\3\1\45\3\0\4\4\13\0\10\4\1\0"+
"\1\170\16\4\1\3\7\4\1\0\1\3\1\45\3\0"+
"\4\4\13\0\1\135\7\4\1\0\17\4\1\3\7\4"+
"\1\0\1\3\1\45\3\0\4\4\13\0\7\4\1\171"+
"\1\0\17\4\1\3\7\4\1\0\1\3\1\45\3\0"+
"\4\4\13\0\10\4\1\0\1\4\1\172\15\4\1\3"+
"\7\4\1\0\1\3\1\45\3\0\4\4\13\0\10\4"+
"\1\0\3\4\1\173\11\4\2\56\1\3\7\4\1\0"+
"\1\3\1\45\3\0\4\4\13\0\10\4\1\0\11\4"+
"\1\132\5\4\1\3\7\4\1\0\1\3\1\45\3\0"+
"\4\4\13\0\10\4\1\0\1\4\1\174\15\4\1\3"+
"\7\4\1\0\1\3\1\45\3\0\4\4\13\0\10\4"+
"\1\0\3\4\1\175\13\4\1\3\7\4\1\0\1\3"+
"\1\45\3\0\4\4\13\0\10\4\1\0\12\4\1\176"+
"\4\4\37\0\1\177\72\0\1\200\63\0\1\152\43\0"+
"\1\201\41\0\3\3\4\202\1\3\1\0\2\3\3\0"+
"\2\3\2\202\13\0\4\3\1\202\2\3\1\202\1\0"+
"\1\3\1\202\1\3\1\202\2\3\1\202\1\3\1\202"+
"\7\3\7\4\1\0\1\3\1\45\3\0\4\4\13\0"+
"\1\4\1\203\6\4\1\0\17\4\1\3\7\4\1\0"+
"\1\3\1\45\3\0\4\4\13\0\10\4\1\0\4\4"+
"\1\56\12\4\1\3\7\4\1\0\1\3\1\45\3\0"+
"\4\4\13\0\10\4\1\0\7\4\1\204\7\4\1\3"+
"\7\4\1\0\1\3\1\45\3\0\4\4\13\0\10\4"+
"\1\0\7\4\1\205\7\4\1\3\7\4\1\0\1\3"+
"\1\45\3\0\4\4\13\0\10\4\1\0\15\4\1\56"+
"\1\4\1\3\7\4\1\0\1\3\1\45\3\0\4\4"+
"\13\0\10\4\1\0\3\4\1\206\13\4\1\3\7\4"+
"\1\0\1\3\1\45\3\0\4\4\13\0\3\4\1\207"+
"\4\4\1\0\4\4\1\210\12\4\1\3\7\4\1\0"+
"\1\3\1\45\3\0\4\4\13\0\5\4\1\211\2\4"+
"\1\0\17\4\1\3\7\4\1\0\1\3\1\45\3\0"+
"\4\4\13\0\6\4\1\56\1\4\1\0\17\4\1\3"+
"\7\4\1\0\1\3\1\45\3\0\4\4\13\0\6\4"+
"\1\212\1\4\1\0\17\4\1\3\7\4\1\0\1\3"+
"\1\45\3\0\4\4\13\0\3\4\1\213\4\4\1\0"+
"\17\4\1\3\7\4\1\0\1\3\1\45\3\0\4\4"+
"\13\0\10\4\1\0\3\4\1\135\13\4\1\3\7\4"+
"\1\0\1\3\1\45\3\0\4\4\13\0\10\4\1\0"+
"\7\4\1\214\7\4\1\3\7\4\1\0\1\3\1\45"+
"\3\0\4\4\13\0\10\4\1\0\4\4\1\215\12\4"+
"\1\3\1\4\1\216\5\4\1\0\1\3\1\45\3\0"+
"\4\4\13\0\10\4\1\0\17\4\1\3\7\4\1\0"+
"\1\3\1\45\3\0\4\4\13\0\10\4\1\0\2\4"+
"\1\217\14\4\1\3\7\4\1\0\1\3\1\45\3\0"+
"\4\4\13\0\10\4\1\0\3\4\1\220\13\4\40\0"+
"\1\152\4\0\1\200\50\0\1\221\34\0\1\201\1\222"+
"\5\201\1\0\1\222\3\0\1\222\4\201\3\222\1\0"+
"\1\222\1\0\1\222\1\201\3\222\10\201\1\222\17\201"+
"\3\3\4\223\1\3\1\0\2\3\3\0\2\3\2\223"+
"\13\0\4\3\1\223\2\3\1\223\1\0\1\3\1\223"+
"\1\3\1\223\2\3\1\223\1\3\1\223\7\3\6\4"+
"\1\224\1\0\1\3\1\45\3\0\4\4\13\0\10\4"+
"\1\0\17\4\1\3\7\4\1\0\1\3\1\45\3\0"+
"\4\4\13\0\10\4\1\0\1\157\16\4\1\3\7\4"+
"\1\0\1\3\1\45\3\0\4\4\13\0\6\4\1\225"+
"\1\4\1\0\17\4\1\3\7\4\1\0\1\3\1\45"+
"\3\0\4\4\13\0\10\4\1\0\2\4\1\75\14\4"+
"\1\3\7\4\1\0\1\3\1\45\3\0\4\4\13\0"+
"\1\4\1\226\3\4\1\227\2\4\1\0\17\4\1\3"+
"\7\4\1\0\1\3\1\45\3\0\4\4\13\0\10\4"+
"\1\0\3\4\1\60\13\4\1\3\7\4\1\0\1\3"+
"\1\45\3\0\4\4\13\0\10\4\1\0\10\4\1\56"+
"\6\4\1\3\7\4\1\0\1\3\1\45\3\0\4\4"+
"\13\0\10\4\1\0\13\4\1\37\3\4\1\3\7\4"+
"\1\0\1\3\1\45\3\0\4\4\13\0\1\4\1\230"+
"\6\4\1\0\17\4\1\3\7\4\1\0\1\3\1\45"+
"\3\0\4\4\13\0\10\4\1\0\13\4\1\56\3\4"+
"\1\3\7\4\1\0\1\3\1\45\3\0\4\4\13\0"+
"\10\4\1\0\5\4\1\231\11\4\1\3\7\4\1\0"+
"\1\3\1\45\3\0\4\4\13\0\5\4\1\232\2\4"+
"\1\0\17\4\1\3\7\4\1\0\1\3\1\45\3\0"+
"\4\4\13\0\3\4\1\233\4\4\1\0\17\4\1\3"+
"\7\4\1\0\1\3\1\45\3\0\4\4\13\0\3\4"+
"\1\234\4\4\1\0\17\4\31\0\1\201\33\0\3\3"+
"\4\4\1\3\1\0\2\3\3\0\2\3\2\4\13\0"+
"\4\3\1\4\2\3\1\4\1\0\1\3\1\4\1\3"+
"\1\4\2\3\1\4\1\3\1\4\7\3\7\4\1\0"+
"\1\3\1\45\3\0\4\4\13\0\10\4\1\0\2\4"+
"\1\235\14\4\1\3\7\4\1\0\1\3\1\45\3\0"+
"\4\4\13\0\10\4\1\0\7\4\1\236\7\4\1\3"+
"\7\4\1\0\1\3\1\45\3\0\4\4\13\0\10\4"+
"\1\0\15\4\1\231\1\4\1\3\7\4\1\0\1\3"+
"\1\45\3\0\4\4\13\0\10\4\1\0\14\4\1\122"+
"\2\4\1\3\7\4\1\0\1\3\1\45\3\0\4\4"+
"\13\0\10\4\1\0\2\4\1\237\14\4\1\3\7\4"+
"\1\0\1\3\1\45\3\0\4\4\13\0\6\4\1\122"+
"\1\4\1\0\17\4\1\3\7\4\1\0\1\3\1\45"+
"\3\0\4\4\13\0\1\4\1\71\6\4\1\0\17\4"+
"\1\3\7\4\1\0\1\3\1\45\3\0\4\4\13\0"+
"\10\4\1\0\10\4\1\240\6\4\1\3\7\4\1\0"+
"\1\3\1\45\3\0\4\4\13\0\10\4\1\0\10\4"+
"\1\161\6\4\1\3\7\4\1\0\1\3\1\45\3\0"+
"\4\4\13\0\1\4\1\231\6\4\1\0\17\4\1\3"+
"\7\4\1\0\1\3\1\45\3\0\4\4\13\0\10\4"+
"\1\0\2\4\1\56\14\4\1\3\7\4\1\0\1\3"+
"\1\45\3\0\4\4\13\0\5\4\1\241\2\4\1\0"+
"\17\4\1\3\7\4\1\0\1\3\1\45\3\0\4\4"+
"\13\0\10\4\1\0\2\4\1\242\14\4\1\3\7\4"+
"\1\0\1\3\1\45\3\0\4\4\13\0\10\4\1\0"+
"\4\4\1\243\12\4\1\3\7\4\1\0\1\3\1\45"+
"\3\0\4\4\13\0\7\4\1\244\1\0\17\4\1\3"+
"\7\4\1\0\1\3\1\45\3\0\4\4\13\0\10\4"+
"\1\0\5\4\1\56\11\4\1\3\7\4\1\0\1\3"+
"\1\45\3\0\4\4\13\0\7\4\1\157\1\0\17\4";
private static int [] zzUnpackTrans() {
int [] result = new int[8056];
int offset = 0;
offset = zzUnpackTrans(ZZ_TRANS_PACKED_0, offset, result);
return result;
}
private static int zzUnpackTrans(String packed, int offset, int [] result) {
int i = 0; /* index in packed string */
int j = offset; /* index in unpacked array */
int l = packed.length();
while (i < l) {
int count = packed.charAt(i++);
int value = packed.charAt(i++);
value--;
do result[j++] = value; while (--count > 0);
}
return j;
}
/* error codes */
private static final int ZZ_UNKNOWN_ERROR = 0;
private static final int ZZ_NO_MATCH = 1;
private static final int ZZ_PUSHBACK_2BIG = 2;
/* error messages for the codes above */
private static final String ZZ_ERROR_MSG[] = {
"Unkown internal scanner error",
"Error: could not match input",
"Error: pushback value was too large"
};
/**
* ZZ_ATTRIBUTE[aState] contains the attributes of state <code>aState</code>
*/
private static final int [] ZZ_ATTRIBUTE = zzUnpackAttribute();
private static final String ZZ_ATTRIBUTE_PACKED_0 =
"\2\0\4\1\1\11\1\1\1\11\1\1\2\11\1\1"+
"\1\11\22\1\1\11\46\1\4\0\2\1\1\0\32\1"+
"\4\0\22\1\2\0\20\1\2\0\22\1";
private static int [] zzUnpackAttribute() {
int [] result = new int[164];
int offset = 0;
offset = zzUnpackAttribute(ZZ_ATTRIBUTE_PACKED_0, offset, result);
return result;
}
private static int zzUnpackAttribute(String packed, int offset, int [] result) {
int i = 0; /* index in packed string */
int j = offset; /* index in unpacked array */
int l = packed.length();
while (i < l) {
int count = packed.charAt(i++);
int value = packed.charAt(i++);
do result[j++] = value; while (--count > 0);
}
return j;
}
/** the input device */
private java.io.Reader zzReader;
/** the current state of the DFA */
private int zzState;
/** the current lexical state */
private int zzLexicalState = YYINITIAL;
/** this buffer contains the current text to be matched and is
the source of the yytext() string */
private char zzBuffer[] = new char[ZZ_BUFFERSIZE];
/** the textposition at the last accepting state */
private int zzMarkedPos;
/** the textposition at the last state to be included in yytext */
private int zzPushbackPos;
/** the current text position in the buffer */
private int zzCurrentPos;
/** startRead marks the beginning of the yytext() string in the buffer */
private int zzStartRead;
/** endRead marks the last character in the buffer, that has been read
from input */
private int zzEndRead;
/** number of newlines encountered up to the start of the matched text */
private int yyline;
/** the number of characters up to the start of the matched text */
private int yychar;
/**
* the number of characters from the last newline up to the start of the
* matched text
*/
private int yycolumn;
/**
* zzAtBOL == true <=> the scanner is currently at the beginning of a line
*/
private boolean zzAtBOL = true;
/** zzAtEOF == true <=> the scanner is at the EOF */
private boolean zzAtEOF;
/* user code: */
/**
* Constructor. This must be here because JFlex does not generate a
* no-parameter constructor.
*/
public LogoTokenMaker() {
}
/**
* Adds the token specified to the current linked list of tokens.
*
* @param tokenType The token's type.
* @see #addToken(int, int, int)
*/
private void addHyperlinkToken(int start, int end, int tokenType) {
int so = start + offsetShift;
addToken(zzBuffer, start,end, tokenType, so, true);
}
/**
* Adds the token specified to the current linked list of tokens.
*
* @param tokenType The token's type.
*/
private void addToken(int tokenType) {
addToken(zzStartRead, zzMarkedPos-1, tokenType);
}
/**
* Adds the token specified to the current linked list of tokens.
*
* @param tokenType The token's type.
* @see #addHyperlinkToken(int, int, int)
*/
private void addToken(int start, int end, int tokenType) {
int so = start + offsetShift;
addToken(zzBuffer, start,end, tokenType, so, false);
}
/**
* Adds the token specified to the current linked list of tokens.
*
* @param array The character array.
* @param start The starting offset in the array.
* @param end The ending offset in the array.
* @param tokenType The token's type.
* @param startOffset The offset in the document at which this token
* occurs.
* @param hyperlink Whether this token is a hyperlink.
*/
@Override
public void addToken(char[] array, int start, int end, int tokenType,
int startOffset, boolean hyperlink) {
super.addToken(array, start,end, tokenType, startOffset, hyperlink);
zzStartRead = zzMarkedPos;
}
/**
* {@inheritDoc}
*/
@Override
public String[] getLineCommentStartAndEnd(int languageIndex) {
return new String[] { ";", null };
}
/**
* Returns the first token in the linked list of tokens generated
* from <code>text</code>. This method must be implemented by
* subclasses so they can correctly implement syntax highlighting.
*
* @param text The text from which to get tokens.
* @param initialTokenType The token type we should start with.
* @param startOffset The offset into the document at which
* <code>text</code> starts.
* @return The first <code>Token</code> in a linked list representing
* the syntax highlighted text.
*/
public Token getTokenList(Segment text, int initialTokenType, int startOffset) {
resetTokenList();
this.offsetShift = -text.offset + startOffset;
// Start off in the proper state.
int state = Token.NULL;
s = text;
try {
yyreset(zzReader);
yybegin(state);
return yylex();
} catch (IOException ioe) {
ioe.printStackTrace();
return new TokenImpl();
}
}
/**
* Refills the input buffer.
*
* @return <code>true</code> if EOF was reached, otherwise
* <code>false</code>.
*/
private boolean zzRefill() {
return zzCurrentPos>=s.offset+s.count;
}
/**
* Resets the scanner to read from a new input stream.
* Does not close the old reader.
*
* All internal variables are reset, the old input stream
* <b>cannot</b> be reused (internal buffer is discarded and lost).
* Lexical state is set to <tt>YY_INITIAL</tt>.
*
* @param reader the new input stream
*/
public final void yyreset(Reader reader) {
// 's' has been updated.
zzBuffer = s.array;
/*
* We replaced the line below with the two below it because zzRefill
* no longer "refills" the buffer (since the way we do it, it's always
* "full" the first time through, since it points to the segment's
* array). So, we assign zzEndRead here.
*/
//zzStartRead = zzEndRead = s.offset;
zzStartRead = s.offset;
zzEndRead = zzStartRead + s.count - 1;
zzCurrentPos = zzMarkedPos = s.offset;
zzLexicalState = YYINITIAL;
zzReader = reader;
zzAtBOL = true;
zzAtEOF = false;
}
/**
* Creates a new scanner
* There is also a java.io.InputStream version of this constructor.
*
* @param in the java.io.Reader to read input from.
*/
public LogoTokenMaker(java.io.Reader in) {
this.zzReader = in;
}
/**
* Creates a new scanner.
* There is also java.io.Reader version of this constructor.
*
* @param in the java.io.Inputstream to read input from.
*/
public LogoTokenMaker(java.io.InputStream in) {
this(new java.io.InputStreamReader(in));
}
/**
* Unpacks the compressed character translation table.
*
* @param packed the packed character translation table
* @return the unpacked character translation table
*/
private static char [] zzUnpackCMap(String packed) {
char [] map = new char[0x10000];
int i = 0; /* index in packed string */
int j = 0; /* index in unpacked array */
while (i < 156) {
int count = packed.charAt(i++);
char value = packed.charAt(i++);
do map[j++] = value; while (--count > 0);
}
return map;
}
/**
* Closes the input stream.
*
* @throws java.io.IOException error on close.
*/
public final void yyclose() throws java.io.IOException {
zzAtEOF = true; /* indicate end of file */
zzEndRead = zzStartRead; /* invalidate buffer */
if (zzReader != null)
zzReader.close();
}
/**
* Returns the current lexical state.
*
* @return the current lexical state.
*/
public final int yystate() {
return zzLexicalState;
}
/**
* Enters a new lexical state
*
* @param newState the new lexical state
*/
public final void yybegin(int newState) {
zzLexicalState = newState;
}
/**
* Returns the text matched by the current regular expression.
*
* @return the text matched by the current regular expression.
*/
public final String yytext() {
return new String( zzBuffer, zzStartRead, zzMarkedPos-zzStartRead );
}
/**
* Returns the character at position <tt>pos</tt> from the
* matched text.
*
* It is equivalent to yytext().charAt(pos), but faster
*
* @param pos the position of the character to fetch.
* A value from 0 to yylength()-1.
*
* @return the character at position pos
*/
public final char yycharat(int pos) {
return zzBuffer[zzStartRead+pos];
}
/**
* Returns the length of the matched text region.
*
* @return the length of the matched text region.
*/
public final int yylength() {
return zzMarkedPos-zzStartRead;
}
/**
* Reports an error that occured while scanning.
*
* In a wellformed scanner (no or only correct usage of
* yypushback(int) and a match-all fallback rule) this method
* will only be called with things that "Can't Possibly Happen".
* If this method is called, something is seriously wrong
* (e.g. a JFlex bug producing a faulty scanner etc.).
*
* Usual syntax/scanner level error handling should be done
* in error fallback rules.
*
* @param errorCode the code of the errormessage to display
*/
private void zzScanError(int errorCode) {
String message;
try {
message = ZZ_ERROR_MSG[errorCode];
}
catch (ArrayIndexOutOfBoundsException e) {
message = ZZ_ERROR_MSG[ZZ_UNKNOWN_ERROR];
}
throw new Error(message);
}
/**
* Pushes the specified amount of characters back into the input stream.
*
* They will be read again by then next call of the scanning method
*
* @param number the number of characters to be read again.
* This number must not be greater than yylength()!
*/
public void yypushback(int number) {
if ( number > yylength() )
zzScanError(ZZ_PUSHBACK_2BIG);
zzMarkedPos -= number;
}
/**
* Resumes scanning until the next regular expression is matched,
* the end of input is encountered or an I/O-Error occurs.
*
* @return the next token
* @exception java.io.IOException if any I/O-Error occurs
*/
public org.fife.ui.rsyntaxtextarea.Token yylex() throws java.io.IOException {
int zzInput;
int zzAction;
// cached fields:
int zzCurrentPosL;
int zzMarkedPosL;
int zzEndReadL = zzEndRead;
char [] zzBufferL = zzBuffer;
char [] zzCMapL = ZZ_CMAP;
int [] zzTransL = ZZ_TRANS;
int [] zzRowMapL = ZZ_ROWMAP;
int [] zzAttrL = ZZ_ATTRIBUTE;
while (true) {
zzMarkedPosL = zzMarkedPos;
zzAction = -1;
zzCurrentPosL = zzCurrentPos = zzStartRead = zzMarkedPosL;
zzState = zzLexicalState;
zzForAction: {
while (true) {
if (zzCurrentPosL < zzEndReadL)
zzInput = zzBufferL[zzCurrentPosL++];
else if (zzAtEOF) {
zzInput = YYEOF;
break zzForAction;
}
else {
// store back cached positions
zzCurrentPos = zzCurrentPosL;
zzMarkedPos = zzMarkedPosL;
boolean eof = zzRefill();
// get translated positions and possibly new buffer
zzCurrentPosL = zzCurrentPos;
zzMarkedPosL = zzMarkedPos;
zzBufferL = zzBuffer;
zzEndReadL = zzEndRead;
if (eof) {
zzInput = YYEOF;
break zzForAction;
}
else {
zzInput = zzBufferL[zzCurrentPosL++];
}
}
int zzNext = zzTransL[ zzRowMapL[zzState] + zzCMapL[zzInput] ];
if (zzNext == -1) break zzForAction;
zzState = zzNext;
int zzAttributes = zzAttrL[zzState];
if ( (zzAttributes & 1) == 1 ) {
zzAction = zzState;
zzMarkedPosL = zzCurrentPosL;
if ( (zzAttributes & 8) == 8 ) break zzForAction;
}
}
}
// store back cached position
zzMarkedPos = zzMarkedPosL;
switch (zzAction < 0 ? zzAction : ZZ_ACTION[zzAction]) {
case 14:
{ addToken(Token.RESERVED_WORD);
}
case 16: break;
case 2:
{ addToken(Token.IDENTIFIER);
}
case 17: break;
case 12:
{ addToken(Token.LITERAL_NUMBER_FLOAT);
}
case 18: break;
case 6:
{ addToken(Token.WHITESPACE);
}
case 19: break;
case 15:
{ int temp=zzStartRead; addToken(start,zzStartRead-1, Token.COMMENT_EOL); addHyperlinkToken(temp,zzMarkedPos-1, Token.COMMENT_EOL); start = zzMarkedPos;
}
case 20: break;
case 11:
{ addToken(Token.ERROR_NUMBER_FORMAT);
}
case 21: break;
case 1:
{ addToken(Token.ERROR_IDENTIFIER);
}
case 22: break;
case 7:
{ start = zzMarkedPos-1; yybegin(EOL_COMMENT);
}
case 23: break;
case 13:
{ addToken(Token.LITERAL_NUMBER_HEXADECIMAL);
}
case 24: break;
case 4:
{ addToken(Token.OPERATOR);
}
case 25: break;
case 3:
{ addToken(Token.LITERAL_NUMBER_DECIMAL_INT);
}
case 26: break;
case 5:
{ addNullToken(); return firstToken;
}
case 27: break;
case 10:
{ addToken(start,zzStartRead-1, Token.COMMENT_EOL); addNullToken(); return firstToken;
}
case 28: break;
case 9:
{
}
case 29: break;
case 8:
{ addToken(Token.SEPARATOR);
}
case 30: break;
default:
if (zzInput == YYEOF && zzStartRead == zzCurrentPos) {
zzAtEOF = true;
switch (zzLexicalState) {
case EOL_COMMENT: {
addToken(start,zzStartRead-1, Token.COMMENT_EOL); addNullToken(); return firstToken;
}
case 165: break;
case YYINITIAL: {
addNullToken(); return firstToken;
}
case 166: break;
default:
return null;
}
}
else {
zzScanError(ZZ_NO_MATCH);
}
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.fs.swift.util;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.swift.exceptions.SwiftConfigurationException;
import org.junit.internal.AssumptionViolatedException;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Properties;
/**
* Utilities used across test cases
*/
public class SwiftTestUtils extends org.junit.Assert {
private static final Log LOG =
LogFactory.getLog(SwiftTestUtils.class);
public static final String TEST_FS_SWIFT = "test.fs.swift.name";
public static final String IO_FILE_BUFFER_SIZE = "io.file.buffer.size";
/**
* Get the test URI
* @param conf configuration
* @throws SwiftConfigurationException missing parameter or bad URI
*/
public static URI getServiceURI(Configuration conf) throws
SwiftConfigurationException {
String instance = conf.get(TEST_FS_SWIFT);
if (instance == null) {
throw new SwiftConfigurationException(
"Missing configuration entry " + TEST_FS_SWIFT);
}
try {
return new URI(instance);
} catch (URISyntaxException e) {
throw new SwiftConfigurationException("Bad URI: " + instance);
}
}
public static boolean hasServiceURI(Configuration conf) {
String instance = conf.get(TEST_FS_SWIFT);
return instance != null;
}
/**
* Assert that a property in the property set matches the expected value
* @param props property set
* @param key property name
* @param expected expected value. If null, the property must not be in the set
*/
public static void assertPropertyEquals(Properties props,
String key,
String expected) {
String val = props.getProperty(key);
if (expected == null) {
assertNull("Non null property " + key + " = " + val, val);
} else {
assertEquals("property " + key + " = " + val,
expected,
val);
}
}
/**
*
* Write a file and read it in, validating the result. Optional flags control
* whether file overwrite operations should be enabled, and whether the
* file should be deleted afterwards.
*
* If there is a mismatch between what was written and what was expected,
* a small range of bytes either side of the first error are logged to aid
* diagnosing what problem occurred -whether it was a previous file
* or a corrupting of the current file. This assumes that two
* sequential runs to the same path use datasets with different character
* moduli.
*
* @param fs filesystem
* @param path path to write to
* @param len length of data
* @param overwrite should the create option allow overwrites?
* @param delete should the file be deleted afterwards? -with a verification
* that it worked. Deletion is not attempted if an assertion has failed
* earlier -it is not in a <code>finally{}</code> block.
* @throws IOException IO problems
*/
public static void writeAndRead(FileSystem fs,
Path path,
byte[] src,
int len,
int blocksize,
boolean overwrite,
boolean delete) throws IOException {
fs.mkdirs(path.getParent());
writeDataset(fs, path, src, len, blocksize, overwrite);
byte[] dest = readDataset(fs, path, len);
compareByteArrays(src, dest, len);
if (delete) {
boolean deleted = fs.delete(path, false);
assertTrue("Deleted", deleted);
assertPathDoesNotExist(fs, "Cleanup failed", path);
}
}
/**
* Write a file.
* Optional flags control
* whether file overwrite operations should be enabled
* @param fs filesystem
* @param path path to write to
* @param len length of data
* @param overwrite should the create option allow overwrites?
* @throws IOException IO problems
*/
public static void writeDataset(FileSystem fs,
Path path,
byte[] src,
int len,
int blocksize,
boolean overwrite) throws IOException {
assertTrue(
"Not enough data in source array to write " + len + " bytes",
src.length >= len);
FSDataOutputStream out = fs.create(path,
overwrite,
fs.getConf()
.getInt(IO_FILE_BUFFER_SIZE,
4096),
(short) 1,
blocksize);
out.write(src, 0, len);
out.close();
assertFileHasLength(fs, path, len);
}
/**
* Read the file and convert to a byte dataset
* @param fs filesystem
* @param path path to read from
* @param len length of data to read
* @return the bytes
* @throws IOException IO problems
*/
public static byte[] readDataset(FileSystem fs, Path path, int len)
throws IOException {
FSDataInputStream in = fs.open(path);
byte[] dest = new byte[len];
try {
in.readFully(0, dest);
} finally {
in.close();
}
return dest;
}
/**
* Assert that the array src[0..len] and dest[] are equal
* @param src source data
* @param dest actual
* @param len length of bytes to compare
*/
public static void compareByteArrays(byte[] src,
byte[] dest,
int len) {
assertEquals("Number of bytes read != number written",
len, dest.length);
int errors = 0;
int first_error_byte = -1;
for (int i = 0; i < len; i++) {
if (src[i] != dest[i]) {
if (errors == 0) {
first_error_byte = i;
}
errors++;
}
}
if (errors > 0) {
String message = String.format(" %d errors in file of length %d",
errors, len);
LOG.warn(message);
// the range either side of the first error to print
// this is a purely arbitrary number, to aid user debugging
final int overlap = 10;
for (int i = Math.max(0, first_error_byte - overlap);
i < Math.min(first_error_byte + overlap, len);
i++) {
byte actual = dest[i];
byte expected = src[i];
String letter = toChar(actual);
String line = String.format("[%04d] %2x %s%n", i, actual, letter);
if (expected != actual) {
line = String.format("[%04d] %2x %s -expected %2x %s%n",
i,
actual,
letter,
expected,
toChar(expected));
}
LOG.warn(line);
}
fail(message);
}
}
/**
* Convert a byte to a character for printing. If the
* byte value is < 32 -and hence unprintable- the byte is
* returned as a two digit hex value
* @param b byte
* @return the printable character string
*/
public static String toChar(byte b) {
if (b >= 0x20) {
return Character.toString((char) b);
} else {
return String.format("%02x", b);
}
}
public static String toChar(byte[] buffer) {
StringBuilder builder = new StringBuilder(buffer.length);
for (byte b : buffer) {
builder.append(toChar(b));
}
return builder.toString();
}
public static byte[] toAsciiByteArray(String s) {
char[] chars = s.toCharArray();
int len = chars.length;
byte[] buffer = new byte[len];
for (int i = 0; i < len; i++) {
buffer[i] = (byte) (chars[i] & 0xff);
}
return buffer;
}
public static void cleanupInTeardown(FileSystem fileSystem,
String cleanupPath) {
cleanup("TEARDOWN", fileSystem, cleanupPath);
}
public static void cleanup(String action,
FileSystem fileSystem,
String cleanupPath) {
noteAction(action);
try {
if (fileSystem != null) {
fileSystem.delete(new Path(cleanupPath).makeQualified(fileSystem),
true);
}
} catch (Exception e) {
LOG.error("Error deleting in "+ action + " - " + cleanupPath + ": " + e, e);
}
}
public static void noteAction(String action) {
if (LOG.isDebugEnabled()) {
LOG.debug("============== "+ action +" =============");
}
}
/**
* downgrade a failure to a message and a warning, then an
* exception for the Junit test runner to mark as failed
* @param message text message
* @param failure what failed
* @throws AssumptionViolatedException always
*/
public static void downgrade(String message, Throwable failure) {
LOG.warn("Downgrading test " + message, failure);
AssumptionViolatedException ave =
new AssumptionViolatedException(failure, null);
throw ave;
}
/**
* report an overridden test as unsupported
* @param message message to use in the text
* @throws AssumptionViolatedException always
*/
public static void unsupported(String message) {
throw new AssumptionViolatedException(message);
}
/**
* report a test has been skipped for some reason
* @param message message to use in the text
* @throws AssumptionViolatedException always
*/
public static void skip(String message) {
throw new AssumptionViolatedException(message);
}
/**
* Make an assertion about the length of a file
* @param fs filesystem
* @param path path of the file
* @param expected expected length
* @throws IOException on File IO problems
*/
public static void assertFileHasLength(FileSystem fs, Path path,
int expected) throws IOException {
FileStatus status = fs.getFileStatus(path);
assertEquals(
"Wrong file length of file " + path + " status: " + status,
expected,
status.getLen());
}
/**
* Assert that a path refers to a directory
* @param fs filesystem
* @param path path of the directory
* @throws IOException on File IO problems
*/
public static void assertIsDirectory(FileSystem fs,
Path path) throws IOException {
FileStatus fileStatus = fs.getFileStatus(path);
assertIsDirectory(fileStatus);
}
/**
* Assert that a path refers to a directory
* @param fileStatus stats to check
*/
public static void assertIsDirectory(FileStatus fileStatus) {
assertTrue("Should be a dir -but isn't: " + fileStatus,
fileStatus.isDirectory());
}
/**
* Write the text to a file, returning the converted byte array
* for use in validating the round trip
* @param fs filesystem
* @param path path of file
* @param text text to write
* @param overwrite should the operation overwrite any existing file?
* @return the read bytes
* @throws IOException on IO problems
*/
public static byte[] writeTextFile(FileSystem fs,
Path path,
String text,
boolean overwrite) throws IOException {
FSDataOutputStream stream = fs.create(path, overwrite);
byte[] bytes = new byte[0];
if (text != null) {
bytes = toAsciiByteArray(text);
stream.write(bytes);
}
stream.close();
return bytes;
}
/**
* Touch a file: fails if it is already there
* @param fs filesystem
* @param path path
* @throws IOException IO problems
*/
public static void touch(FileSystem fs,
Path path) throws IOException {
fs.delete(path, true);
writeTextFile(fs, path, null, false);
}
public static void assertDeleted(FileSystem fs,
Path file,
boolean recursive) throws IOException {
assertPathExists(fs, "about to be deleted file", file);
boolean deleted = fs.delete(file, recursive);
String dir = ls(fs, file.getParent());
assertTrue("Delete failed on " + file + ": " + dir, deleted);
assertPathDoesNotExist(fs, "Deleted file", file);
}
/**
* Read in "length" bytes, convert to an ascii string
* @param fs filesystem
* @param path path to read
* @param length #of bytes to read.
* @return the bytes read and converted to a string
* @throws IOException
*/
public static String readBytesToString(FileSystem fs,
Path path,
int length) throws IOException {
FSDataInputStream in = fs.open(path);
try {
byte[] buf = new byte[length];
in.readFully(0, buf);
return toChar(buf);
} finally {
in.close();
}
}
public static String getDefaultWorkingDirectory() {
return "/user/" + System.getProperty("user.name");
}
public static String ls(FileSystem fileSystem, Path path) throws IOException {
return SwiftUtils.ls(fileSystem, path);
}
public static String dumpStats(String pathname, FileStatus[] stats) {
return pathname + SwiftUtils.fileStatsToString(stats,"\n");
}
/**
/**
* Assert that a file exists and whose {@link FileStatus} entry
* declares that this is a file and not a symlink or directory.
* @param fileSystem filesystem to resolve path against
* @param filename name of the file
* @throws IOException IO problems during file operations
*/
public static void assertIsFile(FileSystem fileSystem, Path filename) throws
IOException {
assertPathExists(fileSystem, "Expected file", filename);
FileStatus status = fileSystem.getFileStatus(filename);
String fileInfo = filename + " " + status;
assertFalse("File claims to be a directory " + fileInfo,
status.isDirectory());
/* disabled for Hadoop v1 compatibility
assertFalse("File claims to be a symlink " + fileInfo,
status.isSymlink());
*/
}
/**
* Create a dataset for use in the tests; all data is in the range
* base to (base+modulo-1) inclusive
* @param len length of data
* @param base base of the data
* @param modulo the modulo
* @return the newly generated dataset
*/
public static byte[] dataset(int len, int base, int modulo) {
byte[] dataset = new byte[len];
for (int i = 0; i < len; i++) {
dataset[i] = (byte) (base + (i % modulo));
}
return dataset;
}
/**
* Assert that a path exists -but make no assertions as to the
* type of that entry
*
* @param fileSystem filesystem to examine
* @param message message to include in the assertion failure message
* @param path path in the filesystem
* @throws IOException IO problems
*/
public static void assertPathExists(FileSystem fileSystem, String message,
Path path) throws IOException {
if (!fileSystem.exists(path)) {
//failure, report it
fail(message + ": not found " + path + " in " + path.getParent());
ls(fileSystem, path.getParent());
}
}
/**
* Assert that a path does not exist
*
* @param fileSystem filesystem to examine
* @param message message to include in the assertion failure message
* @param path path in the filesystem
* @throws IOException IO problems
*/
public static void assertPathDoesNotExist(FileSystem fileSystem,
String message,
Path path) throws IOException {
try {
FileStatus status = fileSystem.getFileStatus(path);
fail(message + ": unexpectedly found " + path + " as " + status);
} catch (FileNotFoundException expected) {
//this is expected
}
}
/**
* Assert that a FileSystem.listStatus on a dir finds the subdir/child entry
* @param fs filesystem
* @param dir directory to scan
* @param subdir full path to look for
* @throws IOException IO problems
*/
public static void assertListStatusFinds(FileSystem fs,
Path dir,
Path subdir) throws IOException {
FileStatus[] stats = fs.listStatus(dir);
boolean found = false;
StringBuilder builder = new StringBuilder();
for (FileStatus stat : stats) {
builder.append(stat.toString()).append('\n');
if (stat.getPath().equals(subdir)) {
found = true;
}
}
assertTrue("Path " + subdir
+ " not found in directory " + dir + ":" + builder,
found);
}
}
| |
package de.tum.bio.sequenceviewer;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import com.compomics.util.experiment.biology.Enzyme;
import de.tum.bio.proteomics.Peptide;
import de.tum.bio.proteomics.PeptideId;
import de.tum.bio.sequenceviewer.aaintensityprofiles.ProfileGenerator;
import javafx.beans.property.BooleanProperty;
import javafx.beans.property.DoubleProperty;
import javafx.beans.property.ReadOnlyDoubleProperty;
import javafx.beans.property.SimpleBooleanProperty;
import javafx.beans.property.SimpleDoubleProperty;
import javafx.beans.property.SimpleStringProperty;
import javafx.beans.property.StringProperty;
import javafx.concurrent.Task;
import javafx.event.ActionEvent;
import javafx.event.EventHandler;
import javafx.geometry.Pos;
import javafx.geometry.VPos;
import javafx.scene.canvas.Canvas;
import javafx.scene.canvas.GraphicsContext;
import javafx.scene.control.Alert;
import javafx.scene.control.Alert.AlertType;
import javafx.scene.control.Button;
import javafx.scene.control.ButtonType;
import javafx.scene.control.Label;
import javafx.scene.control.ScrollPane;
import javafx.scene.control.Tooltip;
import javafx.scene.layout.BorderPane;
import javafx.scene.layout.ColumnConstraints;
import javafx.scene.layout.GridPane;
import javafx.scene.layout.HBox;
import javafx.scene.layout.Priority;
import javafx.scene.layout.VBox;
import javafx.scene.paint.Color;
import javafx.scene.text.Font;
import javafx.scene.text.FontWeight;
import javafx.scene.text.TextAlignment;
import javafx.stage.Stage;
public class SequenceViewer extends BorderPane {
private final int COLUMN_WIDTH = 20;
private final int CANVAS_SIZE = 200;
private final int DESCRIPTION_ROW = 1;
private final int DESCRIPTION_COLUMN = 1;
private final int CONTENT_ROW = 2;
private final int CONTENT_COLUMN = 2;
private final int VGAP = 16;
private Stage owner;
private ReadOnlyDoubleProperty parentHightProperty;
private ScrollPane centerScrollPane;
private VBox centerVBox;
private GridPane mainGrid;
private GridPane peptideGrid;
private GridPane intensityGrid;
private GridPane inSilicoDigestGrid = null;
private VBox experimentIntensities = null;
private PeptideId peptideId;
private String proteinGroupId;
private String proteinSequence;
/*
* Holds the peptide Ids for each amino acid position of the protein.
*/
private Map<Integer, List<String>> peptideMap;
/*
* Holds the intensities for each amino acid position and specific experiment.
*/
private Map<String, Map<Integer, Long>> experimentIntensityMap;
private DoubleProperty progressProperty = new SimpleDoubleProperty(0.0);
private StringProperty statusProperty = new SimpleStringProperty();
private BooleanProperty readyProperty = new SimpleBooleanProperty(false);
public SequenceViewer(Stage owner, PeptideId peptideId, String id, ReadOnlyDoubleProperty parentHightProperty) {
this.owner = owner;
setPeptideId(peptideId);
setProteinGroupId(id);
this.parentHightProperty = parentHightProperty;
proteinSequence = peptideId.getProteinGroupById(id).getSequenceAsString();
init();
}
public void inSilicoDigest(Enzyme enzyme) {
centerVBox.getChildren().remove(inSilicoDigestGrid);
if (!peptideId.hasDigestionAssay(enzyme, proteinGroupId)) {
List<String> sequences = enzyme.digest(proteinSequence, 2, 7, 30);
peptideId.addPeptides(sequences, proteinGroupId, enzyme);
}
List<Peptide> peptides = peptideId.getInSilicoPeptides(enzyme, proteinGroupId);
inSilicoDigestGrid = buildGrid(null, true);
inSilicoDigestGrid.setVgap(inSilicoDigestGrid.getVgap() * 0.3);
generateDescription(inSilicoDigestGrid, "In silico digest with " + enzyme.getName(), true);
generatePeptideRepresentations(CONTENT_COLUMN, CONTENT_ROW, inSilicoDigestGrid, peptides, false);
}
private void generateDescription(GridPane targetGrid, String text, boolean closeButton) {
HBox hBox = new HBox();
Label label = new Label(text);
label.setFont(Font.font("System", FontWeight.EXTRA_BOLD, 15));
label.setTextFill(Color.valueOf("#3070b3"));
hBox.getChildren().add(label);
if (closeButton) {
Button close = new Button("X");
close.getStyleClass().add("round-button");
close.setOnAction(new EventHandler<ActionEvent>() {
@Override
public void handle(ActionEvent e) {
centerVBox.getChildren().remove(targetGrid);
}
});
hBox.getChildren().add(close);
}
hBox.setSpacing(5.0);
hBox.setAlignment(Pos.CENTER_LEFT);
targetGrid.add(hBox, DESCRIPTION_COLUMN, DESCRIPTION_ROW, 30, 1);
}
private void initPeptideMap() {
peptideMap = new HashMap<>();
for (int i = 1; i <= proteinSequence.length(); i++) {
peptideMap.put(i, new ArrayList<String>(0));
}
}
private void init() {
if (proteinSequence != null) {
Task<Void> task = new Task<Void>() {
@Override
protected Void call() throws Exception {
updateProgress(-1.0, 1.0);
updateMessage("Compute sequence view...");
// Build view components
centerVBox = new VBox();
centerVBox.setMaxHeight(Double.MAX_VALUE);
centerScrollPane = new ScrollPane(centerVBox);
setCenter(centerScrollPane);
setMaxHeight(Double.MAX_VALUE);
prefHeightProperty().bind(parentHightProperty);
// Initialize initial grid
initPeptideMap();
mainGrid = buildGrid(null, true);
// Main grid
generateSequenceRepresentation(CONTENT_COLUMN, CONTENT_ROW, mainGrid);
peptideGrid = buildGrid(mainGrid, true);
generateDescription(peptideGrid, "Detected peptides", false);
generatePeptideRepresentations(CONTENT_COLUMN, CONTENT_ROW, peptideGrid, peptideId.getPeptidesByProteinGroupsId(proteinGroupId), true);
// Intensity grid
intensityGrid = buildGrid(peptideGrid, true);
generateDescription(intensityGrid, "Total peptide intensities", false);
generateTotalIntensityGraph(CONTENT_COLUMN, CONTENT_ROW, intensityGrid);
return null;
}
@Override
protected void succeeded() {
super.succeeded();
updateProgress(0.0, 1.0);
updateMessage("Done.");
}
@Override
protected void failed() {
super.failed();
updateProgress(0.0, 1.0);
updateMessage("Cancelled.");
}
};
task.setOnSucceeded(workerStateEvent -> {
readyProperty.set(true);
});
task.setOnFailed(workerStateEvent -> {
task.getException().printStackTrace(System.out);
Alert alert = new Alert(AlertType.ERROR, task.getException().getMessage(), ButtonType.OK);
alert.showAndWait();
});
progressProperty.bind(task.progressProperty());
statusProperty.bind(task.messageProperty());
new Thread(task).start();
}
}
private GridPane buildGrid(GridPane positionAfter, boolean putInParent) {
GridPane grid = new GridPane();
grid.getStyleClass().add("sequence-viewer");
List<ColumnConstraints> columnList = new ArrayList<>();
for (int i = 1; i <= proteinSequence.length() + CONTENT_COLUMN + 1; i++) {
columnList.add(new ColumnConstraints(COLUMN_WIDTH));
}
grid.getColumnConstraints().addAll(columnList);
grid.setVgap(VGAP);
VBox.setVgrow(grid, Priority.ALWAYS);
// Position grid in given VBox
if (putInParent) {
if (positionAfter == null) {
centerVBox.getChildren().add(0, grid);
} else {
centerVBox.getChildren().add(centerVBox.getChildren().indexOf(positionAfter)+1, grid);
}
}
//grid.setGridLinesVisible(true);
return grid;
}
private int generateSequenceRepresentation(int column, int row, GridPane grid) {
for (int i = 0; i <= ((int) Math.ceil(proteinSequence.length() / CANVAS_SIZE)); i++) {
if (!proteinSequence.substring((i)*CANVAS_SIZE).isEmpty()) {
grid.add(createSequenceCanvas(proteinSequence.substring((i)*CANVAS_SIZE)), column+(i*CANVAS_SIZE), row, proteinSequence.substring((i)*CANVAS_SIZE).length(), 1);
}
}
for (int i = 1; i <= proteinSequence.length(); i++) {
if (i % 10 == 0) {
grid.add(createNumberingCanvas(i), i+column-CONTENT_COLUMN, row+1, 3, 1);
}
}
return row+2;
}
private Canvas createSequenceCanvas(String subSequence) {
int canvasLength = COLUMN_WIDTH * CANVAS_SIZE;
if (subSequence.length() < CANVAS_SIZE) {
canvasLength = subSequence.length() * COLUMN_WIDTH;
}
Canvas canvas = new Canvas(canvasLength, 20);
GraphicsContext gc = canvas.getGraphicsContext2D();
gc.setTextAlign(TextAlignment.CENTER);
gc.setTextBaseline(VPos.CENTER);
gc.setFont(new Font(20.0));
int i = 1;
for (char c : subSequence.toCharArray()) {
gc.setFill(Color.BLACK);
gc.fillText(
String.valueOf(c),
Math.round(i * COLUMN_WIDTH - COLUMN_WIDTH / 2),
Math.round(canvas.getHeight() / 2)
);
if (i >= CANVAS_SIZE) {
break;
}
i++;
}
return canvas;
}
private Canvas createNumberingCanvas(int number) {
Canvas canvas = new Canvas(COLUMN_WIDTH * 3, 20);
GraphicsContext gc = canvas.getGraphicsContext2D();
gc.setTextAlign(TextAlignment.CENTER);
gc.setTextBaseline(VPos.CENTER);
gc.setFont(new Font(10.0));
gc.fillText(
String.valueOf(number),
Math.round(COLUMN_WIDTH * 3 / 2),
Math.round(canvas.getHeight() / 2)
);
return canvas;
}
private int generatePeptideRepresentations(int column, int row, GridPane grid, List<Peptide> peptides, boolean updatePeptideMap) {
List<SequenceViewerPeptide> peptideRepresentations = buildPeptideRepresentationsList(peptides);
peptideRepresentations.sort(new Comparator<SequenceViewerPeptide>() {
@Override
public int compare(SequenceViewerPeptide o1, SequenceViewerPeptide o2) {
if (o1.getStartPosition() == o2.getStartPosition()) {
return 1;
}
if (o1.getStartPosition() > o2.getStartPosition()) {
return 1;
}
if (o1.getStartPosition() < o2.getStartPosition()) {
return -1;
}
return 1;
}
});
Map<SequenceViewerPeptide, Integer> peptideRepresentationsMap = assignPeptideToRow(peptideRepresentations, row);
int lastRow = 0;
for (Entry<SequenceViewerPeptide, Integer> entry : peptideRepresentationsMap.entrySet()) {
grid.add(entry.getKey(), entry.getKey().getStartPosition() + column-1, entry.getValue(), entry.getKey().getSequenceAsString().length(), 1);
if (updatePeptideMap) {
for (int i = entry.getKey().getStartPosition(); i <= entry.getKey().getEndPosition(); i++) {
peptideMap.get(i).add(entry.getKey().getPeptideId());
}
}
if (entry.getValue() > lastRow) {
lastRow = entry.getValue();
}
}
return lastRow+1;
}
private List<SequenceViewerPeptide> buildPeptideRepresentationsList(List<Peptide> peptides) {
List<SequenceViewerPeptide> peptideRepresentationsList = new ArrayList<>();
for (Peptide peptide : peptides) {
if (proteinSequence.substring(peptide.getStartPosition()-1, peptide.getEndPosition()).equals(peptide.getSequenceAsString())) {
peptideRepresentationsList.add(new SequenceViewerPeptide(peptide, COLUMN_WIDTH));
} else {
int index = proteinSequence.indexOf(peptide.getSequenceAsString());
if ((index >= 0) && ((index + peptide.getSequenceAsString().length()) <= proteinSequence.length())) {
SequenceViewerPeptide seqPeptide = new SequenceViewerPeptide(peptide, COLUMN_WIDTH, index+1, index+peptide.getSequenceAsString().length());
peptideRepresentationsList.add(seqPeptide);
}
}
}
return peptideRepresentationsList;
}
private Map<SequenceViewerPeptide, Integer> assignPeptideToRow(List<SequenceViewerPeptide> peptideRepresentations, int row) {
Map<SequenceViewerPeptide, Integer> result = new HashMap<>();
List<SequenceViewerPeptide> tmp = new ArrayList<>(peptideRepresentations);
int previousEnd = -1;
for (SequenceViewerPeptide peptideRepresentation : peptideRepresentations) {
if (peptideRepresentation.getStartPosition() > previousEnd) {
result.put(peptideRepresentation, row);
previousEnd = peptideRepresentation.getEndPosition();
tmp.remove(peptideRepresentation);
}
}
if (tmp.size() > 0) {
result.putAll(assignPeptideToRow(tmp, row+1));
}
return result;
}
private int generateTotalIntensityGraph(int column, int row, GridPane grid) {
Map<Integer, Long> totalIntensityMap = new HashMap<>();
long maxIntensity = 0;
long minIntensity = 0;
boolean firstEntry = true;
for (Entry<Integer, List<String>> idList : peptideMap.entrySet()) {
long totalIntensity = 0;
for (String id : idList.getValue()) {
totalIntensity += peptideId.getPeptideById(id).getTotalIntensity();
}
totalIntensityMap.put(idList.getKey(), totalIntensity);
if (totalIntensity > maxIntensity) {
maxIntensity = totalIntensity;
}
if (firstEntry || totalIntensity < minIntensity) {
minIntensity = totalIntensity;
firstEntry = false;
}
}
row = assembleIntensityRow(grid, totalIntensityMap, maxIntensity, minIntensity, column, row);
return row+1;
}
public void showExperimentIntensities() {
if (experimentIntensities != null) {
centerVBox.getChildren().remove(experimentIntensities);
}
// Initialize experimentIntensityMap
experimentIntensityMap = new HashMap<>();
for (String experimentName : peptideId.getSummary().getExperimentNames()) {
experimentIntensityMap.put(experimentName, new HashMap<>());
}
// Compile experimentIntensityMap
long maxIntensity = 0;
for (Entry<Integer, List<String>> idList : peptideMap.entrySet()) {
// for each position
Map<String, Long> tmpExperimentIntensity = new HashMap<>();
for (String experimentName : peptideId.getSummary().getExperimentNames()) {
tmpExperimentIntensity.put(experimentName, (long) 0);
}
for (String id : idList.getValue()) {
for (Entry<String, Long> entry : peptideId.getPeptideById(id).getExperimentIntensities().entrySet()) {
if (tmpExperimentIntensity.containsKey(entry.getKey())) {
tmpExperimentIntensity.put(entry.getKey(), entry.getValue() + tmpExperimentIntensity.get(entry.getKey()));
} else {
tmpExperimentIntensity.put(entry.getKey(), entry.getValue());
}
}
}
for (Entry<String, Long> experiment : tmpExperimentIntensity.entrySet()) {
experimentIntensityMap.get(experiment.getKey()).put(idList.getKey(), tmpExperimentIntensity.get(experiment.getKey()));
if (tmpExperimentIntensity.get(experiment.getKey()) > maxIntensity) {
maxIntensity = tmpExperimentIntensity.get(experiment.getKey());
}
}
}
experimentIntensities = new VBox();
for (String experimentName : peptideId.getSummary().getExperimentNames()) {
GridPane grid = buildGrid(null, false);
assembleIntensityRow(grid, experimentIntensityMap.get(experimentName), maxIntensity, 0, CONTENT_COLUMN, CONTENT_ROW);
generateDescription(grid, experimentName, false);
Tooltip tooltip = new Tooltip();
tooltip.setText(experimentName);
Tooltip.install(grid, tooltip);
experimentIntensities.getChildren().add(grid);
}
centerVBox.getChildren().add(experimentIntensities);
}
public void generateAaIntensityProfiles() {
if (experimentIntensityMap == null) {
showExperimentIntensities();
}
ProfileGenerator aaProfileGenerator = new ProfileGenerator(owner);
aaProfileGenerator.make(experimentIntensityMap, peptideId.getProteinGroupById(proteinGroupId));
}
private int assembleIntensityRow(GridPane grid, Map<Integer, Long> intensityMap, long maxIntensity, long minIntensity, int column, int row) {
for (int i = 0; i <= ((int) Math.ceil(intensityMap.size() / CANVAS_SIZE)); i++) {
int start = (i*CANVAS_SIZE) + 1;
if (intensityMap.size() - (i*CANVAS_SIZE+column) > 0) {
grid.add(createIntensityCanvas(intensityMap, start, maxIntensity, minIntensity), i*CANVAS_SIZE+column, row, intensityMap.size() - (i*CANVAS_SIZE+column), 1);
}
}
return row+1;
}
private Canvas createIntensityCanvas(Map<Integer, Long> intensityMap, int start, long max, long min) {
int canvasLength = COLUMN_WIDTH * CANVAS_SIZE;
if ((intensityMap.size() - start) < CANVAS_SIZE) {
canvasLength = (intensityMap.size() - start + 1) * COLUMN_WIDTH;
}
Canvas canvas = new Canvas(canvasLength, 50);
GraphicsContext gc = canvas.getGraphicsContext2D();
int i = 1;
for (int j = start; j <= intensityMap.size(); j++) {
gc.setFill(Color.valueOf("#f9423a"));
double y = canvas.getHeight()-canvas.getHeight()*intensityMap.get(j)/max;
double height = canvas.getHeight() - y;
gc.fillRect((i-1)*COLUMN_WIDTH, y, COLUMN_WIDTH, height);
if (i >= CANVAS_SIZE) {
break;
}
i++;
}
return canvas;
}
public void setPeptideId(PeptideId peptideId) {
this.peptideId = peptideId;
}
public PeptideId getPeptideId() {
return peptideId;
}
public void setProteinGroupId(String id) {
this.proteinGroupId = id;
}
public String getProteinGroupId() {
return proteinGroupId;
}
public DoubleProperty progressProperty() {
return progressProperty;
}
public StringProperty statusProperty() {
return statusProperty;
}
public BooleanProperty readyProperty() {
return readyProperty;
}
public boolean isReady() {
return readyProperty.get();
}
}
| |
package org.bouncycastle.crypto.modes;
import java.io.ByteArrayOutputStream;
import org.bouncycastle.crypto.BlockCipher;
import org.bouncycastle.crypto.CipherParameters;
import org.bouncycastle.crypto.DataLengthException;
import org.bouncycastle.crypto.InvalidCipherTextException;
import org.bouncycastle.crypto.Mac;
import org.bouncycastle.crypto.OutputLengthException;
import org.bouncycastle.crypto.macs.CBCBlockCipherMac;
import org.bouncycastle.crypto.params.AEADParameters;
import org.bouncycastle.crypto.params.ParametersWithIV;
import org.bouncycastle.util.Arrays;
/**
* Implements the Counter with Cipher Block Chaining mode (CCM) detailed in
* NIST Special Publication 800-38C.
* <p>
* <b>Note</b>: this mode is a packet mode - it needs all the data up front.
*/
public class CCMBlockCipher
implements AEADBlockCipher
{
private BlockCipher cipher;
private int blockSize;
private boolean forEncryption;
private byte[] nonce;
private byte[] initialAssociatedText;
private int macSize;
private CipherParameters keyParam;
private byte[] macBlock;
private ExposedByteArrayOutputStream associatedText = new ExposedByteArrayOutputStream();
private ExposedByteArrayOutputStream data = new ExposedByteArrayOutputStream();
/**
* Basic constructor.
*
* @param c the block cipher to be used.
*/
public CCMBlockCipher(BlockCipher c)
{
this.cipher = c;
this.blockSize = c.getBlockSize();
this.macBlock = new byte[blockSize];
if (blockSize != 16)
{
throw new IllegalArgumentException("cipher required with a block size of 16.");
}
}
/**
* return the underlying block cipher that we are wrapping.
*
* @return the underlying block cipher that we are wrapping.
*/
public BlockCipher getUnderlyingCipher()
{
return cipher;
}
public void init(boolean forEncryption, CipherParameters params)
throws IllegalArgumentException
{
this.forEncryption = forEncryption;
CipherParameters cipherParameters;
if (params instanceof AEADParameters)
{
AEADParameters param = (AEADParameters)params;
nonce = param.getNonce();
initialAssociatedText = param.getAssociatedText();
macSize = param.getMacSize() / 8;
cipherParameters = param.getKey();
}
else if (params instanceof ParametersWithIV)
{
ParametersWithIV param = (ParametersWithIV)params;
nonce = param.getIV();
initialAssociatedText = null;
macSize = macBlock.length / 2;
cipherParameters = param.getParameters();
}
else
{
throw new IllegalArgumentException("invalid parameters passed to CCM");
}
// NOTE: Very basic support for key re-use, but no performance gain from it
if (cipherParameters != null)
{
keyParam = cipherParameters;
}
if (nonce == null || nonce.length < 7 || nonce.length > 13)
{
throw new IllegalArgumentException("nonce must have length from 7 to 13 octets");
}
reset();
}
public String getAlgorithmName()
{
return cipher.getAlgorithmName() + "/CCM";
}
public void processAADByte(byte in)
{
associatedText.write(in);
}
public void processAADBytes(byte[] in, int inOff, int len)
{
// TODO: Process AAD online
associatedText.write(in, inOff, len);
}
public int processByte(byte in, byte[] out, int outOff)
throws DataLengthException, IllegalStateException
{
data.write(in);
return 0;
}
public int processBytes(byte[] in, int inOff, int inLen, byte[] out, int outOff)
throws DataLengthException, IllegalStateException
{
if (in.length < (inOff + inLen))
{
throw new DataLengthException("Input buffer too short");
}
data.write(in, inOff, inLen);
return 0;
}
public int doFinal(byte[] out, int outOff)
throws IllegalStateException, InvalidCipherTextException
{
int len = processPacket(data.getBuffer(), 0, data.size(), out, outOff);
reset();
return len;
}
public void reset()
{
cipher.reset();
associatedText.reset();
data.reset();
}
/**
* Returns a byte array containing the mac calculated as part of the
* last encrypt or decrypt operation.
*
* @return the last mac calculated.
*/
public byte[] getMac()
{
byte[] mac = new byte[macSize];
System.arraycopy(macBlock, 0, mac, 0, mac.length);
return mac;
}
public int getUpdateOutputSize(int len)
{
return 0;
}
public int getOutputSize(int len)
{
int totalData = len + data.size();
if (forEncryption)
{
return totalData + macSize;
}
return totalData < macSize ? 0 : totalData - macSize;
}
/**
* Process a packet of data for either CCM decryption or encryption.
*
* @param in data for processing.
* @param inOff offset at which data starts in the input array.
* @param inLen length of the data in the input array.
* @return a byte array containing the processed input..
* @throws IllegalStateException if the cipher is not appropriately set up.
* @throws InvalidCipherTextException if the input data is truncated or the mac check fails.
*/
public byte[] processPacket(byte[] in, int inOff, int inLen)
throws IllegalStateException, InvalidCipherTextException
{
byte[] output;
if (forEncryption)
{
output = new byte[inLen + macSize];
}
else
{
if (inLen < macSize)
{
throw new InvalidCipherTextException("data too short");
}
output = new byte[inLen - macSize];
}
processPacket(in, inOff, inLen, output, 0);
return output;
}
/**
* Process a packet of data for either CCM decryption or encryption.
*
* @param in data for processing.
* @param inOff offset at which data starts in the input array.
* @param inLen length of the data in the input array.
* @param output output array.
* @param outOff offset into output array to start putting processed bytes.
* @return the number of bytes added to output.
* @throws IllegalStateException if the cipher is not appropriately set up.
* @throws InvalidCipherTextException if the input data is truncated or the mac check fails.
* @throws DataLengthException if output buffer too short.
*/
public int processPacket(byte[] in, int inOff, int inLen, byte[] output, int outOff)
throws IllegalStateException, InvalidCipherTextException, DataLengthException
{
// TODO: handle null keyParam (e.g. via RepeatedKeySpec)
// Need to keep the CTR and CBC Mac parts around and reset
if (keyParam == null)
{
throw new IllegalStateException("CCM cipher unitialized.");
}
int n = nonce.length;
int q = 15 - n;
if (q < 4)
{
int limitLen = 1 << (8 * q);
if (inLen >= limitLen)
{
throw new IllegalStateException("CCM packet too large for choice of q.");
}
}
byte[] iv = new byte[blockSize];
iv[0] = (byte)((q - 1) & 0x7);
System.arraycopy(nonce, 0, iv, 1, nonce.length);
BlockCipher ctrCipher = new SICBlockCipher(cipher);
ctrCipher.init(forEncryption, new ParametersWithIV(keyParam, iv));
int outputLen;
int inIndex = inOff;
int outIndex = outOff;
if (forEncryption)
{
outputLen = inLen + macSize;
if (output.length < (outputLen + outOff))
{
throw new OutputLengthException("Output buffer too short.");
}
calculateMac(in, inOff, inLen, macBlock);
ctrCipher.processBlock(macBlock, 0, macBlock, 0); // S0
while (inIndex < (inOff + inLen - blockSize)) // S1...
{
ctrCipher.processBlock(in, inIndex, output, outIndex);
outIndex += blockSize;
inIndex += blockSize;
}
byte[] block = new byte[blockSize];
System.arraycopy(in, inIndex, block, 0, inLen + inOff - inIndex);
ctrCipher.processBlock(block, 0, block, 0);
System.arraycopy(block, 0, output, outIndex, inLen + inOff - inIndex);
System.arraycopy(macBlock, 0, output, outOff + inLen, macSize);
}
else
{
if (inLen < macSize)
{
throw new InvalidCipherTextException("data too short");
}
outputLen = inLen - macSize;
if (output.length < (outputLen + outOff))
{
throw new OutputLengthException("Output buffer too short.");
}
System.arraycopy(in, inOff + outputLen, macBlock, 0, macSize);
ctrCipher.processBlock(macBlock, 0, macBlock, 0);
for (int i = macSize; i != macBlock.length; i++)
{
macBlock[i] = 0;
}
while (inIndex < (inOff + outputLen - blockSize))
{
ctrCipher.processBlock(in, inIndex, output, outIndex);
outIndex += blockSize;
inIndex += blockSize;
}
byte[] block = new byte[blockSize];
System.arraycopy(in, inIndex, block, 0, outputLen - (inIndex - inOff));
ctrCipher.processBlock(block, 0, block, 0);
System.arraycopy(block, 0, output, outIndex, outputLen - (inIndex - inOff));
byte[] calculatedMacBlock = new byte[blockSize];
calculateMac(output, outOff, outputLen, calculatedMacBlock);
if (!Arrays.constantTimeAreEqual(macBlock, calculatedMacBlock))
{
throw new InvalidCipherTextException("mac check in CCM failed");
}
}
return outputLen;
}
private int calculateMac(byte[] data, int dataOff, int dataLen, byte[] macBlock)
{
Mac cMac = new CBCBlockCipherMac(cipher, macSize * 8);
cMac.init(keyParam);
//
// build b0
//
byte[] b0 = new byte[16];
if (hasAssociatedText())
{
b0[0] |= 0x40;
}
b0[0] |= (((cMac.getMacSize() - 2) / 2) & 0x7) << 3;
b0[0] |= ((15 - nonce.length) - 1) & 0x7;
System.arraycopy(nonce, 0, b0, 1, nonce.length);
int q = dataLen;
int count = 1;
while (q > 0)
{
b0[b0.length - count] = (byte)(q & 0xff);
q >>>= 8;
count++;
}
cMac.update(b0, 0, b0.length);
//
// process associated text
//
if (hasAssociatedText())
{
int extra;
int textLength = getAssociatedTextLength();
if (textLength < ((1 << 16) - (1 << 8)))
{
cMac.update((byte)(textLength >> 8));
cMac.update((byte)textLength);
extra = 2;
}
else // can't go any higher than 2^32
{
cMac.update((byte)0xff);
cMac.update((byte)0xfe);
cMac.update((byte)(textLength >> 24));
cMac.update((byte)(textLength >> 16));
cMac.update((byte)(textLength >> 8));
cMac.update((byte)textLength);
extra = 6;
}
if (initialAssociatedText != null)
{
cMac.update(initialAssociatedText, 0, initialAssociatedText.length);
}
if (associatedText.size() > 0)
{
cMac.update(associatedText.getBuffer(), 0, associatedText.size());
}
extra = (extra + textLength) % 16;
if (extra != 0)
{
for (int i = extra; i != 16; i++)
{
cMac.update((byte)0x00);
}
}
}
//
// add the text
//
cMac.update(data, dataOff, dataLen);
return cMac.doFinal(macBlock, 0);
}
private int getAssociatedTextLength()
{
return associatedText.size() + ((initialAssociatedText == null) ? 0 : initialAssociatedText.length);
}
private boolean hasAssociatedText()
{
return getAssociatedTextLength() > 0;
}
private class ExposedByteArrayOutputStream
extends ByteArrayOutputStream
{
public ExposedByteArrayOutputStream()
{
}
public byte[] getBuffer()
{
return this.buf;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.maven;
import java.io.File;
import java.util.Collections;
import java.util.List;
import java.util.Locale;
import java.util.regex.Matcher;
import org.apache.camel.util.StringHelper;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugins.annotations.LifecyclePhase;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.plugins.annotations.Parameter;
import org.apache.maven.plugins.annotations.ResolutionScope;
import org.apache.velocity.VelocityContext;
/**
* Generates Camel Component based on a collection of APIs.
*/
@Mojo(name = "fromApis", requiresDependencyResolution = ResolutionScope.COMPILE_PLUS_RUNTIME, requiresProject = true,
defaultPhase = LifecyclePhase.GENERATE_SOURCES, threadSafe = true)
public class ApiComponentGeneratorMojo extends AbstractApiMethodBaseMojo {
protected static final String DEFAULT_EXCLUDE_PACKAGES = "javax?\\.lang.*";
/**
* List of API names, proxies and code generation settings.
*/
@Parameter(required = true)
protected ApiProxy[] apis;
/**
* Common Javasource code generation settings.
*/
@Parameter
protected FromJavasource fromJavasource = new FromJavasource();
/**
* Names of options that can be set to null value if not specified.
*/
@Parameter
private String[] nullableOptions;
/**
* Method alias patterns for all APIs.
*/
@Parameter
private List<ApiMethodAlias> aliases = Collections.emptyList();
@Override
public void executeInternal() throws MojoExecutionException {
if (apis == null || apis.length == 0) {
throw new MojoExecutionException("One or more API proxies are required");
}
// fix apiName for single API use-case since Maven configurator sets empty parameters as null!!!
if (apis.length == 1 && apis[0].getApiName() == null) {
apis[0].setApiName("");
}
// generate API methods for each API proxy
for (ApiProxy api : apis) {
// validate API configuration
api.validate();
// create the appropriate code generator if signatureFile or fromJavaDoc are specified
// this way users can skip generating API classes for duplicate proxy class references
final AbstractApiMethodGeneratorMojo apiMethodGenerator = getApiMethodGenerator(api);
if (apiMethodGenerator != null) {
// configure API method properties and generate Proxy classes
configureMethodGenerator(apiMethodGenerator, api);
try {
apiMethodGenerator.setProjectClassLoader(getProjectClassLoader()); // supply pre-constructed ClassLoader
apiMethodGenerator.executeInternal(); // Call internal execute method
} catch (Exception e) {
final String msg = "Error generating source for " + api.getProxyClass() + ": " + e.getMessage();
throw new MojoExecutionException(msg, e);
}
} else {
// make sure the proxy class is being generated elsewhere
final String proxyClass = api.getProxyClass();
boolean found = false;
for (ApiProxy other : apis) {
if (other != api && proxyClass.equals(other.getProxyClass())) {
found = true;
break;
}
}
if (!found) {
throw new MojoExecutionException(
"Missing one of fromSignatureFile or fromJavadoc for "
+ proxyClass);
}
}
// set common aliases if needed
if (!aliases.isEmpty() && api.getAliases().isEmpty()) {
api.setAliases(aliases);
}
// set common nullable options if needed
if (api.getNullableOptions() == null) {
api.setNullableOptions(nullableOptions);
}
}
// generate ApiCollection
mergeTemplate(getApiContext(), getApiCollectionFile(), "/api-collection.vm");
// generate ApiName
mergeTemplate(getApiContext(), getApiNameFile(), "/api-name-enum.vm");
}
private void configureMethodGenerator(AbstractApiMethodGeneratorMojo mojo, ApiProxy apiProxy) {
// set AbstractGeneratorMojo properties
mojo.componentName = componentName;
mojo.scheme = scheme;
mojo.outPackage = outPackage;
mojo.componentPackage = componentPackage;
mojo.project = project;
// set AbstractSourceGeneratorMojo properties
mojo.generatedSrcDir = generatedSrcDir;
mojo.generatedTestDir = generatedTestDir;
mojo.addCompileSourceRoots = addCompileSourceRoots;
// set AbstractAPIMethodBaseMojo properties
mojo.substitutions = apiProxy.getSubstitutions().length != 0
? apiProxy.getSubstitutions() : substitutions;
mojo.excludeConfigNames = apiProxy.getExcludeConfigNames() != null
? apiProxy.getExcludeConfigNames() : excludeConfigNames;
mojo.excludeConfigTypes = apiProxy.getExcludeConfigTypes() != null
? apiProxy.getExcludeConfigTypes() : excludeConfigTypes;
mojo.extraOptions = apiProxy.getExtraOptions() != null
? apiProxy.getExtraOptions() : extraOptions;
// set AbstractAPIMethodGeneratorMojo properties
mojo.proxyClass = apiProxy.getProxyClass();
mojo.classPrefix = apiProxy.getClassPrefix();
mojo.apiName = apiProxy.getApiName();
mojo.apiDescription = apiProxy.getApiDescription();
mojo.consumerOnly = apiProxy.isConsumerOnly();
mojo.producerOnly = apiProxy.isProducerOnly();
}
private AbstractApiMethodGeneratorMojo getApiMethodGenerator(ApiProxy api) {
AbstractApiMethodGeneratorMojo apiMethodGenerator = null;
final FromJavasource apiFromJavasource = api.getFromJavasource();
if (apiFromJavasource != null) {
final JavaSourceApiMethodGeneratorMojo mojo = new JavaSourceApiMethodGeneratorMojo();
mojo.excludePackages = apiFromJavasource.getExcludePackages() != null
? apiFromJavasource.getExcludePackages() : fromJavasource.getExcludePackages();
mojo.excludeClasses = apiFromJavasource.getExcludeClasses() != null
? apiFromJavasource.getExcludeClasses() : fromJavasource.getExcludeClasses();
mojo.includeMethods = apiFromJavasource.getIncludeMethods() != null
? apiFromJavasource.getIncludeMethods() : fromJavasource.getIncludeMethods();
mojo.excludeMethods = apiFromJavasource.getExcludeMethods() != null
? apiFromJavasource.getExcludeMethods() : fromJavasource.getExcludeMethods();
mojo.includeStaticMethods = apiFromJavasource.getIncludeStaticMethods() != null
? apiFromJavasource.getIncludeStaticMethods() : fromJavasource.getIncludeStaticMethods();
mojo.aliases = api.getAliases().isEmpty() ? aliases : api.getAliases();
mojo.nullableOptions = api.getNullableOptions() != null ? api.getNullableOptions() : nullableOptions;
apiMethodGenerator = mojo;
}
return apiMethodGenerator;
}
private VelocityContext getApiContext() {
final VelocityContext context = new VelocityContext();
context.put("componentName", componentName);
context.put("componentPackage", componentPackage);
context.put("apis", apis);
context.put("helper", getClass());
context.put("collectionName", getApiCollectionName());
context.put("apiNameEnum", getApiNameEnum());
return context;
}
private String getApiCollectionName() {
return componentName + "ApiCollection";
}
private String getApiNameEnum() {
return componentName + "ApiName";
}
private File getApiCollectionFile() {
final StringBuilder fileName = getFileBuilder();
fileName.append(getApiCollectionName()).append(".java");
return new File(generatedSrcDir, fileName.toString());
}
private File getApiNameFile() {
final StringBuilder fileName = getFileBuilder();
fileName.append(getApiNameEnum()).append(".java");
return new File(generatedSrcDir, fileName.toString());
}
private StringBuilder getFileBuilder() {
final StringBuilder fileName = new StringBuilder();
fileName.append(outPackage.replace(".", Matcher.quoteReplacement(File.separator))).append(File.separator);
return fileName;
}
public static String getApiMethod(String proxyClass, String classPrefix) {
String proxyClassWithCanonicalName = getProxyClassWithCanonicalName(proxyClass);
String prefix = classPrefix != null ? classPrefix : "";
return prefix + proxyClassWithCanonicalName.substring(proxyClassWithCanonicalName.lastIndexOf('.') + 1) + "ApiMethod";
}
public static String getEndpointConfig(String proxyClass, String classPrefix) {
String proxyClassWithCanonicalName = getProxyClassWithCanonicalName(proxyClass);
String prefix = classPrefix != null ? classPrefix : "";
return prefix + proxyClassWithCanonicalName.substring(proxyClassWithCanonicalName.lastIndexOf('.') + 1)
+ "EndpointConfiguration";
}
private static String getProxyClassWithCanonicalName(String proxyClass) {
return proxyClass.replace("$", "");
}
public static String getEnumConstant(String enumValue) {
if (enumValue == null || enumValue.isEmpty()) {
return "DEFAULT";
}
String value = StringHelper.camelCaseToDash(enumValue);
// replace dash with underscore and upper case
value = value.replace('-', '_');
value = value.toUpperCase(Locale.ENGLISH);
return value;
}
public static String getNullableOptionValues(String[] nullableOptions) {
if (nullableOptions == null || nullableOptions.length == 0) {
return "";
}
final StringBuilder builder = new StringBuilder();
final int nOptions = nullableOptions.length;
int i = 0;
for (String option : nullableOptions) {
builder.append('"').append(option).append('"');
if (++i < nOptions) {
builder.append(", ");
}
}
return builder.toString();
}
}
| |
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University
// Copyright (c) 2011, 2012 Open Networking Foundation
// Copyright (c) 2012, 2013 Big Switch Networks, Inc.
// This library was generated by the LoxiGen Compiler.
// See the file LICENSE.txt which should have been included in the source distribution
// Automatically generated by LOXI from template of_class.java
// Do not modify
package org.projectfloodlight.openflow.protocol.ver14;
import org.projectfloodlight.openflow.protocol.*;
import org.projectfloodlight.openflow.protocol.action.*;
import org.projectfloodlight.openflow.protocol.actionid.*;
import org.projectfloodlight.openflow.protocol.bsntlv.*;
import org.projectfloodlight.openflow.protocol.errormsg.*;
import org.projectfloodlight.openflow.protocol.meterband.*;
import org.projectfloodlight.openflow.protocol.instruction.*;
import org.projectfloodlight.openflow.protocol.instructionid.*;
import org.projectfloodlight.openflow.protocol.match.*;
import org.projectfloodlight.openflow.protocol.oxm.*;
import org.projectfloodlight.openflow.protocol.queueprop.*;
import org.projectfloodlight.openflow.types.*;
import org.projectfloodlight.openflow.util.*;
import org.projectfloodlight.openflow.exceptions.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Set;
import org.jboss.netty.buffer.ChannelBuffer;
import com.google.common.hash.PrimitiveSink;
import com.google.common.hash.Funnel;
class OFBsnTlvMissPacketsVer14 implements OFBsnTlvMissPackets {
private static final Logger logger = LoggerFactory.getLogger(OFBsnTlvMissPacketsVer14.class);
// version: 1.4
final static byte WIRE_VERSION = 5;
final static int LENGTH = 12;
private final static U64 DEFAULT_VALUE = U64.ZERO;
// OF message fields
private final U64 value;
//
// Immutable default instance
final static OFBsnTlvMissPacketsVer14 DEFAULT = new OFBsnTlvMissPacketsVer14(
DEFAULT_VALUE
);
// package private constructor - used by readers, builders, and factory
OFBsnTlvMissPacketsVer14(U64 value) {
if(value == null) {
throw new NullPointerException("OFBsnTlvMissPacketsVer14: property value cannot be null");
}
this.value = value;
}
// Accessors for OF message fields
@Override
public int getType() {
return 0xd;
}
@Override
public U64 getValue() {
return value;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_14;
}
public OFBsnTlvMissPackets.Builder createBuilder() {
return new BuilderWithParent(this);
}
static class BuilderWithParent implements OFBsnTlvMissPackets.Builder {
final OFBsnTlvMissPacketsVer14 parentMessage;
// OF message fields
private boolean valueSet;
private U64 value;
BuilderWithParent(OFBsnTlvMissPacketsVer14 parentMessage) {
this.parentMessage = parentMessage;
}
@Override
public int getType() {
return 0xd;
}
@Override
public U64 getValue() {
return value;
}
@Override
public OFBsnTlvMissPackets.Builder setValue(U64 value) {
this.value = value;
this.valueSet = true;
return this;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_14;
}
@Override
public OFBsnTlvMissPackets build() {
U64 value = this.valueSet ? this.value : parentMessage.value;
if(value == null)
throw new NullPointerException("Property value must not be null");
//
return new OFBsnTlvMissPacketsVer14(
value
);
}
}
static class Builder implements OFBsnTlvMissPackets.Builder {
// OF message fields
private boolean valueSet;
private U64 value;
@Override
public int getType() {
return 0xd;
}
@Override
public U64 getValue() {
return value;
}
@Override
public OFBsnTlvMissPackets.Builder setValue(U64 value) {
this.value = value;
this.valueSet = true;
return this;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_14;
}
//
@Override
public OFBsnTlvMissPackets build() {
U64 value = this.valueSet ? this.value : DEFAULT_VALUE;
if(value == null)
throw new NullPointerException("Property value must not be null");
return new OFBsnTlvMissPacketsVer14(
value
);
}
}
final static Reader READER = new Reader();
static class Reader implements OFMessageReader<OFBsnTlvMissPackets> {
@Override
public OFBsnTlvMissPackets readFrom(ChannelBuffer bb) throws OFParseError {
int start = bb.readerIndex();
// fixed value property type == 0xd
short type = bb.readShort();
if(type != (short) 0xd)
throw new OFParseError("Wrong type: Expected=0xd(0xd), got="+type);
int length = U16.f(bb.readShort());
if(length != 12)
throw new OFParseError("Wrong length: Expected=12(12), got="+length);
if(bb.readableBytes() + (bb.readerIndex() - start) < length) {
// Buffer does not have all data yet
bb.readerIndex(start);
return null;
}
if(logger.isTraceEnabled())
logger.trace("readFrom - length={}", length);
U64 value = U64.ofRaw(bb.readLong());
OFBsnTlvMissPacketsVer14 bsnTlvMissPacketsVer14 = new OFBsnTlvMissPacketsVer14(
value
);
if(logger.isTraceEnabled())
logger.trace("readFrom - read={}", bsnTlvMissPacketsVer14);
return bsnTlvMissPacketsVer14;
}
}
public void putTo(PrimitiveSink sink) {
FUNNEL.funnel(this, sink);
}
final static OFBsnTlvMissPacketsVer14Funnel FUNNEL = new OFBsnTlvMissPacketsVer14Funnel();
static class OFBsnTlvMissPacketsVer14Funnel implements Funnel<OFBsnTlvMissPacketsVer14> {
private static final long serialVersionUID = 1L;
@Override
public void funnel(OFBsnTlvMissPacketsVer14 message, PrimitiveSink sink) {
// fixed value property type = 0xd
sink.putShort((short) 0xd);
// fixed value property length = 12
sink.putShort((short) 0xc);
message.value.putTo(sink);
}
}
public void writeTo(ChannelBuffer bb) {
WRITER.write(bb, this);
}
final static Writer WRITER = new Writer();
static class Writer implements OFMessageWriter<OFBsnTlvMissPacketsVer14> {
@Override
public void write(ChannelBuffer bb, OFBsnTlvMissPacketsVer14 message) {
// fixed value property type = 0xd
bb.writeShort((short) 0xd);
// fixed value property length = 12
bb.writeShort((short) 0xc);
bb.writeLong(message.value.getValue());
}
}
@Override
public String toString() {
StringBuilder b = new StringBuilder("OFBsnTlvMissPacketsVer14(");
b.append("value=").append(value);
b.append(")");
return b.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
OFBsnTlvMissPacketsVer14 other = (OFBsnTlvMissPacketsVer14) obj;
if (value == null) {
if (other.value != null)
return false;
} else if (!value.equals(other.value))
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((value == null) ? 0 : value.hashCode());
return result;
}
}
| |
/*
* Copyright (C) 2005-2008 Jive Software. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jivesoftware.database;
import org.apache.commons.dbcp2.ConnectionFactory;
import org.apache.commons.dbcp2.DriverManagerConnectionFactory;
import org.apache.commons.dbcp2.PoolableConnection;
import org.apache.commons.dbcp2.PoolableConnectionFactory;
import org.apache.commons.dbcp2.PoolingDataSource;
import org.apache.commons.pool2.impl.GenericObjectPool;
import org.apache.commons.pool2.impl.GenericObjectPoolConfig;
import org.jivesoftware.util.JiveConstants;
import org.jivesoftware.util.JiveGlobals;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.Connection;
import java.sql.SQLException;
/**
* Default Jive connection provider, which uses an internal connection pool.<p>
*
* @author Jive Software
*/
public class DefaultConnectionProvider implements ConnectionProvider {
private static final Logger Log = LoggerFactory.getLogger(DefaultConnectionProvider.class);
private String driver;
private String serverURL;
private String username;
private String password;
private int minConnections = 3;
private int maxConnections = 10;
private String testSQL = "";
private Boolean testBeforeUse = true;
private Boolean testAfterUse = true;
private int testTimeout = (int) JiveConstants.SECOND / 2;
private long timeBetweenEvictionRuns = 30 * JiveConstants.SECOND;
private long minIdleTime = 15 * JiveConstants.MINUTE;
private long maxWaitTime = (int) JiveConstants.SECOND / 2;
private long refusedCount = 0;
private PoolingDataSource<PoolableConnection> dataSource;
private GenericObjectPool<PoolableConnection> connectionPool;
/**
* Maximum time a connection can be open before it's reopened (in days)
*/
private double connectionTimeout = 0.5;
/**
* MySQL doesn't currently support Unicode. However, a workaround is
* implemented in the mm.mysql JDBC driver. Setting the Jive property
* database.mysql.useUnicode to true will turn this feature on.
*/
private boolean mysqlUseUnicode;
/**
* Creates a new DefaultConnectionProvider.
*/
public DefaultConnectionProvider() {
loadProperties();
}
@Override
public boolean isPooled() {
return true;
}
@Override
public Connection getConnection() throws SQLException {
if (dataSource == null) {
throw new SQLException("Check JDBC properties; data source was not be initialised");
}
// DBCP doesn't expose the number of refused connections, so count them ourselves
try {
return dataSource.getConnection();
} catch (final SQLException e) {
refusedCount++;
throw e;
}
}
@Override
public void start() {
try {
Class.forName(driver);
} catch (final ClassNotFoundException e) {
throw new RuntimeException("Unable to find JDBC driver " + driver, e);
}
final ConnectionFactory connectionFactory = new DriverManagerConnectionFactory(serverURL, username, password);
final PoolableConnectionFactory poolableConnectionFactory = new PoolableConnectionFactory(connectionFactory, null);
poolableConnectionFactory.setValidationQuery(testSQL);
poolableConnectionFactory.setValidationQueryTimeout(testTimeout);
poolableConnectionFactory.setMaxConnLifetimeMillis((long) (connectionTimeout * JiveConstants.DAY));
final GenericObjectPoolConfig poolConfig = new GenericObjectPoolConfig();
poolConfig.setTestOnBorrow(testBeforeUse);
poolConfig.setTestOnReturn(testAfterUse);
poolConfig.setMinIdle(minConnections);
if( minConnections > GenericObjectPoolConfig.DEFAULT_MAX_IDLE )
{
poolConfig.setMaxIdle(minConnections);
}
poolConfig.setMaxTotal(maxConnections);
poolConfig.setTimeBetweenEvictionRunsMillis(timeBetweenEvictionRuns);
poolConfig.setSoftMinEvictableIdleTimeMillis(minIdleTime);
poolConfig.setMaxWaitMillis(maxWaitTime);
connectionPool = new GenericObjectPool<>(poolableConnectionFactory, poolConfig);
poolableConnectionFactory.setPool(connectionPool);
dataSource = new PoolingDataSource<>(connectionPool);
}
@Override
public void restart() {
}
@Override
public void destroy() {
try {
dataSource.close();
} catch (final Exception e) {
Log.error("Unable to close the data source", e);
}
}
/**
* Returns the JDBC driver classname used to make database connections.
* For example: com.mysql.jdbc.Driver
*
* @return the JDBC driver classname.
*/
public String getDriver() {
return driver;
}
/**
* Sets the JDBC driver classname used to make database connections.
* For example: com.mysql.jdbc.Driver
*
* @param driver the fully qualified JDBC driver name.
*/
public void setDriver(String driver) {
this.driver = driver;
saveProperties();
}
/**
* Returns the JDBC connection URL used to make database connections.
*
* @return the JDBC connection URL.
*/
public String getServerURL() {
return serverURL;
}
/**
* Sets the JDBC connection URL used to make database connections.
*
* @param serverURL the JDBC connection URL.
*/
public void setServerURL(String serverURL) {
this.serverURL = serverURL;
saveProperties();
}
/**
* Returns the username used to connect to the database. In some cases,
* a username is not needed so this method will return null.
*
* @return the username used to connect to the datbase.
*/
public String getUsername() {
return username;
}
/**
* Sets the username used to connect to the database. In some cases, a
* username is not needed so null should be passed in.
*
* @param username the username used to connect to the database.
*/
public void setUsername(String username) {
this.username = username;
saveProperties();
}
/**
* Returns the password used to connect to the database. In some cases,
* a password is not needed so this method will return null.
*
* @return the password used to connect to the database.
*/
public String getPassword() {
return password;
}
/**
* Sets the password used to connect to the database. In some cases, a
* password is not needed so null should be passed in.
*
* @param password the password used to connect to the database.
*/
public void setPassword(String password) {
this.password = password;
saveProperties();
}
/**
* Returns the minimum number of connections that the pool will use. This
* should probably be at least three.
*
* @return the minimum number of connections in the pool.
*/
public int getMinConnections() {
return minConnections;
}
/**
* Sets the minimum number of connections that the pool will use. This
* should probably be at least three.
*
* @param minConnections the minimum number of connections in the pool.
*/
public void setMinConnections(int minConnections) {
this.minConnections = minConnections;
saveProperties();
}
/**
* Returns the maximum number of connections that the pool will use. The
* actual number of connections in the pool will vary between this value
* and the minimum based on the current load.
*
* @return the max possible number of connections in the pool.
*/
public int getMaxConnections() {
return maxConnections;
}
/**
* Sets the maximum number of connections that the pool will use. The
* actual number of connections in the pool will vary between this value
* and the minimum based on the current load.
*
* @param maxConnections the max possible number of connections in the pool.
*/
public void setMaxConnections(int maxConnections) {
this.maxConnections = maxConnections;
saveProperties();
}
/**
* Returns the amount of time between connection recycles in days. For
* example, a value of .5 would correspond to recycling the connections
* in the pool once every half day.
*
* @return the amount of time in days between connection recycles.
*/
public double getConnectionTimeout() {
return connectionTimeout;
}
/**
* Sets the amount of time between connection recycles in days. For
* example, a value of .5 would correspond to recycling the connections
* in the pool once every half day.
*
* @param connectionTimeout the amount of time in days between connection
* recycles.
*/
public void setConnectionTimeout(double connectionTimeout) {
this.connectionTimeout = connectionTimeout;
saveProperties();
}
/**
* Returns the SQL statement used to test if a connection is valid.
*
* @return the SQL statement that will be run to test a connection.
*/
public String getTestSQL() {
return testSQL;
}
public int getTestTimeout() {
return testTimeout;
}
public long getTimeBetweenEvictionRunsMillis() {
return connectionPool.getTimeBetweenEvictionRunsMillis();
}
public long getMinIdleTime() {
return connectionPool.getSoftMinEvictableIdleTimeMillis();
}
public int getActiveConnections() {
return connectionPool.getNumActive();
}
public int getIdleConnections() {
return connectionPool.getNumIdle();
}
public long getConnectionsServed() {
return connectionPool.getBorrowedCount();
}
public long getRefusedCount() {
return refusedCount;
}
public long getMaxWaitTime() {
return connectionPool.getMaxWaitMillis();
}
public long getMeanBorrowWaitTime() {
return connectionPool.getMeanBorrowWaitTimeMillis();
}
public long getMaxBorrowWaitTime() {
return connectionPool.getMaxBorrowWaitTimeMillis();
}
/**
* Sets the SQL statement used to test if a connection is valid. House keeping
* and before/after connection tests make use of this. This
* should be something that causes the minimal amount of work by the database
* server and is as quick as possible.
*
* @param testSQL the SQL statement that will be run to test a connection.
*/
public void setTestSQL(String testSQL) {
this.testSQL = testSQL;
}
/**
* Returns whether returned connections will be tested before being handed over
* to be used.
*
* @return True if connections are tested before use.
*/
public Boolean getTestBeforeUse() {
return testBeforeUse;
}
/**
* Sets whether connections will be tested before being handed over to be used.
*
* @param testBeforeUse True or false if connections are to be tested before use.
*/
public void setTestBeforeUse(Boolean testBeforeUse) {
this.testBeforeUse = testBeforeUse;
}
/**
* Returns whether returned connections will be tested after being returned to
* the pool.
*
* @return True if connections are tested after use.
*/
public Boolean getTestAfterUse() {
return testAfterUse;
}
/**
* Sets whether connections will be tested after being returned to the pool.
*
* @param testAfterUse True or false if connections are to be tested after use.
*/
public void setTestAfterUse(Boolean testAfterUse) {
this.testAfterUse = testAfterUse;
}
public boolean isMysqlUseUnicode() {
return mysqlUseUnicode;
}
/**
* Load properties that already exist from Jive properties.
*/
private void loadProperties() {
driver = JiveGlobals.getXMLProperty("database.defaultProvider.driver");
serverURL = JiveGlobals.getXMLProperty("database.defaultProvider.serverURL");
username = JiveGlobals.getXMLProperty("database.defaultProvider.username");
password = JiveGlobals.getXMLProperty("database.defaultProvider.password");
String minCons = JiveGlobals.getXMLProperty("database.defaultProvider.minConnections");
String maxCons = JiveGlobals.getXMLProperty("database.defaultProvider.maxConnections");
String conTimeout = JiveGlobals.getXMLProperty("database.defaultProvider.connectionTimeout");
testSQL = JiveGlobals.getXMLProperty("database.defaultProvider.testSQL", DbConnectionManager.getTestSQL(driver));
testBeforeUse = JiveGlobals.getXMLProperty("database.defaultProvider.testBeforeUse", false);
testAfterUse = JiveGlobals.getXMLProperty("database.defaultProvider.testAfterUse", false);
testTimeout = JiveGlobals.getXMLProperty("database.defaultProvider.testTimeout", (int) JiveConstants.SECOND / 2);
timeBetweenEvictionRuns = JiveGlobals.getXMLProperty("database.defaultProvider.timeBetweenEvictionRuns", (int) (30 * JiveConstants.SECOND));
minIdleTime = JiveGlobals.getXMLProperty("database.defaultProvider.minIdleTime", (int) (15 * JiveConstants.MINUTE));
maxWaitTime = JiveGlobals.getXMLProperty("database.defaultProvider.maxWaitTime", (int) JiveConstants.SECOND / 2);
// See if we should use Unicode under MySQL
mysqlUseUnicode = Boolean.valueOf(JiveGlobals.getXMLProperty("database.mysql.useUnicode"));
try {
if (minCons != null) {
minConnections = Integer.parseInt(minCons);
}
if (maxCons != null) {
maxConnections = Integer.parseInt(maxCons);
}
if (conTimeout != null) {
connectionTimeout = Double.parseDouble(conTimeout);
}
}
catch (Exception e) {
Log.error("Error: could not parse default pool properties. " +
"Make sure the values exist and are correct.", e);
}
}
/**
* Save properties as Jive properties.
*/
private void saveProperties() {
JiveGlobals.setXMLProperty("database.defaultProvider.driver", driver);
JiveGlobals.setXMLProperty("database.defaultProvider.serverURL", serverURL);
JiveGlobals.setXMLProperty("database.defaultProvider.username", username);
JiveGlobals.setXMLProperty("database.defaultProvider.password", password);
JiveGlobals.setXMLProperty("database.defaultProvider.testSQL", testSQL);
JiveGlobals.setXMLProperty("database.defaultProvider.testBeforeUse", testBeforeUse.toString());
JiveGlobals.setXMLProperty("database.defaultProvider.testAfterUse", testAfterUse.toString());
JiveGlobals.setXMLProperty("database.defaultProvider.testTimeout", String.valueOf(testTimeout));
JiveGlobals.setXMLProperty("database.defaultProvider.timeBetweenEvictionRuns", String.valueOf(timeBetweenEvictionRuns));
JiveGlobals.setXMLProperty("database.defaultProvider.minIdleTime", String.valueOf(minIdleTime));
JiveGlobals.setXMLProperty("database.defaultProvider.maxWaitTime", String.valueOf(maxWaitTime));
JiveGlobals.setXMLProperty("database.defaultProvider.minConnections", Integer.toString(minConnections));
JiveGlobals.setXMLProperty("database.defaultProvider.maxConnections", Integer.toString(maxConnections));
JiveGlobals.setXMLProperty("database.defaultProvider.connectionTimeout", Double.toString(connectionTimeout));
}
}
| |
/**
* Copyright 2005-2014 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.rice.krad.web.form;
import org.apache.commons.lang.StringUtils;
import org.kuali.rice.krad.lookup.LookupUtils;
import org.kuali.rice.krad.lookup.Lookupable;
import org.kuali.rice.krad.uif.UifConstants.ViewType;
import org.kuali.rice.krad.uif.view.LookupView;
import org.kuali.rice.krad.uif.service.ViewHelperService;
import org.kuali.rice.krad.util.KRADConstants;
import org.kuali.rice.krad.util.KRADUtils;
import javax.servlet.http.HttpServletRequest;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Form class for <code>LookupView</code> screens
*
* @author Kuali Rice Team (rice.collab@kuali.org)
*/
public class LookupForm extends UifFormBase {
private static final long serialVersionUID = -7323484966538685327L;
private static final org.apache.log4j.Logger LOG = org.apache.log4j.Logger.getLogger(LookupForm.class);
private String dataObjectClassName;
private String docNum;
private String referencesToRefresh;
private boolean multipleValuesSelect;
private String lookupCollectionName;
private Map<String, String> criteriaFields;
private Map<String, String> fieldConversions;
private boolean atLeastOneRowReturnable;
private boolean atLeastOneRowHasActions;
private Collection<?> searchResults;
private boolean redirectedLookup;
public LookupForm() {
super();
setViewTypeName(ViewType.LOOKUP);
atLeastOneRowReturnable = false;
atLeastOneRowHasActions = false;
multipleValuesSelect = false;
redirectedLookup = false;
criteriaFields = new HashMap<String, String>();
fieldConversions = new HashMap<String, String>();
}
/**
* Picks out business object name from the request to get retrieve a
* lookupable and set properties
*/
@Override
public void postBind(HttpServletRequest request) {
super.postBind(request);
try {
Lookupable lookupable = getLookupable();
if (lookupable == null) {
// assume lookupable will be set by controller or a redirect will happen
return;
}
if (StringUtils.isBlank(getDataObjectClassName())) {
setDataObjectClassName(((LookupView) getView()).getDataObjectClassName().getName());
}
// init lookupable with data object class
Class<?> dataObjectClass = Class.forName(getDataObjectClassName());
lookupable.setDataObjectClass(dataObjectClass);
// if showMaintenanceLinks is not already true, only show maintenance links
// if the lookup was called from the home application view
if (!((LookupView) getView()).isShowMaintenanceLinks()) {
// TODO replace with check to history
if (StringUtils.contains(getReturnLocation(), "/" + KRADConstants.PORTAL_ACTION) ||
StringUtils.contains(getReturnLocation(), "/index.html")) {
((LookupView) getView()).setShowMaintenanceLinks(true);
}
}
// populate lookup read only fields list on lookupable
lookupable.setReadOnlyFieldsList(getReadOnlyFieldsList());
// populate field conversions list
if (request.getParameter(KRADConstants.CONVERSION_FIELDS_PARAMETER) != null) {
String conversionFields = request.getParameter(KRADConstants.CONVERSION_FIELDS_PARAMETER);
setFieldConversions(KRADUtils.convertStringParameterToMap(conversionFields));
lookupable.setFieldConversions(getFieldConversions());
}
// perform upper casing of lookup parameters
Map<String, String> fieldValues = new HashMap<String, String>();
Map<String, String> formFields = getCriteriaFields();
if (formFields != null) {
for (Map.Entry<String, String> entry : formFields.entrySet()) {
// check here to see if this field is a criteria element on the form
fieldValues.put(entry.getKey(),
LookupUtils.forceUppercase(dataObjectClass, entry.getKey(), entry.getValue()));
}
}
// fieldValues.put(UifParameters.RETURN_FORM_KEY, getReturnFormKey());
// fieldValues.put(UifParameters.RETURN_LOCATION, getReturnLocation());
if (StringUtils.isNotBlank(getDocNum())) {
fieldValues.put(KRADConstants.DOC_NUM, getDocNum());
}
this.setCriteriaFields(fieldValues);
} catch (ClassNotFoundException e) {
LOG.error("Object class " + getDataObjectClassName() + " not found");
throw new RuntimeException("Object class " + getDataObjectClassName() + " not found", e);
}
}
public Lookupable getLookupable() {
if ((getView() != null) && Lookupable.class.isAssignableFrom(getView().getViewHelperService().getClass())) {
return (Lookupable) getView().getViewHelperService();
}
return null;
}
public String getDataObjectClassName() {
return this.dataObjectClassName;
}
public void setDataObjectClassName(String dataObjectClassName) {
this.dataObjectClassName = dataObjectClassName;
}
public String getDocNum() {
return this.docNum;
}
public void setDocNum(String docNum) {
this.docNum = docNum;
}
public String getReferencesToRefresh() {
return referencesToRefresh;
}
public void setReferencesToRefresh(String referencesToRefresh) {
this.referencesToRefresh = referencesToRefresh;
}
/**
* Indicates whether multiple values select should be enabled for the lookup
*
* <p>
* When set to true, the select field is enabled for the lookup results group that allows the user
* to select one or more rows for returning
* </p>
*
* @return boolean true if multiple values should be enabled, false otherwise
*/
public boolean isMultipleValuesSelect() {
return multipleValuesSelect;
}
/**
* Setter for the multiple values select indicator
*
* @param multipleValuesSelect
*/
public void setMultipleValuesSelect(boolean multipleValuesSelect) {
this.multipleValuesSelect = multipleValuesSelect;
}
/**
* For the case of multi-value lookup, indicates the collection that should be populated with
* the return results
*
* @return String collection name (must be full binding path)
*/
public String getLookupCollectionName() {
return lookupCollectionName;
}
/**
* Setter for the name of the collection that should be populated with lookup results
*
* @param lookupCollectionName
*/
public void setLookupCollectionName(String lookupCollectionName) {
this.lookupCollectionName = lookupCollectionName;
}
public Map<String, String> getCriteriaFields() {
return this.criteriaFields;
}
public void setCriteriaFields(Map<String, String> criteriaFields) {
this.criteriaFields = criteriaFields;
}
public Map<String, String> getFieldConversions() {
return this.fieldConversions;
}
public void setFieldConversions(Map<String, String> fieldConversions) {
this.fieldConversions = fieldConversions;
}
public Collection<?> getSearchResults() {
return this.searchResults;
}
public void setSearchResults(Collection<?> searchResults) {
this.searchResults = searchResults;
}
public boolean isAtLeastOneRowReturnable() {
return atLeastOneRowReturnable;
}
public void setAtLeastOneRowReturnable(boolean atLeastOneRowReturnable) {
this.atLeastOneRowReturnable = atLeastOneRowReturnable;
}
public boolean isAtLeastOneRowHasActions() {
return atLeastOneRowHasActions;
}
public void setAtLeastOneRowHasActions(boolean atLeastOneRowHasActions) {
this.atLeastOneRowHasActions = atLeastOneRowHasActions;
}
/**
* Indicates whether the requested was redirected from the lookup framework due to an external object
* request. This prevents the framework from performing another redirect check
*
* @return boolean true if request was a redirect, false if not
*/
public boolean isRedirectedLookup() {
return redirectedLookup;
}
/**
* Setter for the redirected request indicator
*
* @param redirectedLookup
*/
public void setRedirectedLookup(boolean redirectedLookup) {
this.redirectedLookup = redirectedLookup;
}
}
| |
/*
* This file is generated by jOOQ.
*/
package com.oneops.crawler.jooq.cms.tables.records;
import com.oneops.crawler.jooq.cms.tables.DjRfcCiAttributes;
import java.sql.Timestamp;
import javax.annotation.Generated;
import org.jooq.Field;
import org.jooq.Record1;
import org.jooq.Record8;
import org.jooq.Row8;
import org.jooq.impl.UpdatableRecordImpl;
/**
* This class is generated by jOOQ.
*/
@Generated(
value = {
"http://www.jooq.org",
"jOOQ version:3.10.0"
},
comments = "This class is generated by jOOQ"
)
@SuppressWarnings({ "all", "unchecked", "rawtypes" })
public class DjRfcCiAttributesRecord extends UpdatableRecordImpl<DjRfcCiAttributesRecord> implements Record8<Long, Long, Integer, String, String, String, String, Timestamp> {
private static final long serialVersionUID = 625868933;
/**
* Setter for <code>kloopzcm.dj_rfc_ci_attributes.rfc_attr_id</code>.
*/
public void setRfcAttrId(Long value) {
set(0, value);
}
/**
* Getter for <code>kloopzcm.dj_rfc_ci_attributes.rfc_attr_id</code>.
*/
public Long getRfcAttrId() {
return (Long) get(0);
}
/**
* Setter for <code>kloopzcm.dj_rfc_ci_attributes.rfc_id</code>.
*/
public void setRfcId(Long value) {
set(1, value);
}
/**
* Getter for <code>kloopzcm.dj_rfc_ci_attributes.rfc_id</code>.
*/
public Long getRfcId() {
return (Long) get(1);
}
/**
* Setter for <code>kloopzcm.dj_rfc_ci_attributes.attribute_id</code>.
*/
public void setAttributeId(Integer value) {
set(2, value);
}
/**
* Getter for <code>kloopzcm.dj_rfc_ci_attributes.attribute_id</code>.
*/
public Integer getAttributeId() {
return (Integer) get(2);
}
/**
* Setter for <code>kloopzcm.dj_rfc_ci_attributes.old_attribute_value</code>.
*/
public void setOldAttributeValue(String value) {
set(3, value);
}
/**
* Getter for <code>kloopzcm.dj_rfc_ci_attributes.old_attribute_value</code>.
*/
public String getOldAttributeValue() {
return (String) get(3);
}
/**
* Setter for <code>kloopzcm.dj_rfc_ci_attributes.new_attribute_value</code>.
*/
public void setNewAttributeValue(String value) {
set(4, value);
}
/**
* Getter for <code>kloopzcm.dj_rfc_ci_attributes.new_attribute_value</code>.
*/
public String getNewAttributeValue() {
return (String) get(4);
}
/**
* Setter for <code>kloopzcm.dj_rfc_ci_attributes.owner</code>.
*/
public void setOwner(String value) {
set(5, value);
}
/**
* Getter for <code>kloopzcm.dj_rfc_ci_attributes.owner</code>.
*/
public String getOwner() {
return (String) get(5);
}
/**
* Setter for <code>kloopzcm.dj_rfc_ci_attributes.comments</code>.
*/
public void setComments(String value) {
set(6, value);
}
/**
* Getter for <code>kloopzcm.dj_rfc_ci_attributes.comments</code>.
*/
public String getComments() {
return (String) get(6);
}
/**
* Setter for <code>kloopzcm.dj_rfc_ci_attributes.created</code>.
*/
public void setCreated(Timestamp value) {
set(7, value);
}
/**
* Getter for <code>kloopzcm.dj_rfc_ci_attributes.created</code>.
*/
public Timestamp getCreated() {
return (Timestamp) get(7);
}
// -------------------------------------------------------------------------
// Primary key information
// -------------------------------------------------------------------------
/**
* {@inheritDoc}
*/
@Override
public Record1<Long> key() {
return (Record1) super.key();
}
// -------------------------------------------------------------------------
// Record8 type implementation
// -------------------------------------------------------------------------
/**
* {@inheritDoc}
*/
@Override
public Row8<Long, Long, Integer, String, String, String, String, Timestamp> fieldsRow() {
return (Row8) super.fieldsRow();
}
/**
* {@inheritDoc}
*/
@Override
public Row8<Long, Long, Integer, String, String, String, String, Timestamp> valuesRow() {
return (Row8) super.valuesRow();
}
/**
* {@inheritDoc}
*/
@Override
public Field<Long> field1() {
return DjRfcCiAttributes.DJ_RFC_CI_ATTRIBUTES.RFC_ATTR_ID;
}
/**
* {@inheritDoc}
*/
@Override
public Field<Long> field2() {
return DjRfcCiAttributes.DJ_RFC_CI_ATTRIBUTES.RFC_ID;
}
/**
* {@inheritDoc}
*/
@Override
public Field<Integer> field3() {
return DjRfcCiAttributes.DJ_RFC_CI_ATTRIBUTES.ATTRIBUTE_ID;
}
/**
* {@inheritDoc}
*/
@Override
public Field<String> field4() {
return DjRfcCiAttributes.DJ_RFC_CI_ATTRIBUTES.OLD_ATTRIBUTE_VALUE;
}
/**
* {@inheritDoc}
*/
@Override
public Field<String> field5() {
return DjRfcCiAttributes.DJ_RFC_CI_ATTRIBUTES.NEW_ATTRIBUTE_VALUE;
}
/**
* {@inheritDoc}
*/
@Override
public Field<String> field6() {
return DjRfcCiAttributes.DJ_RFC_CI_ATTRIBUTES.OWNER;
}
/**
* {@inheritDoc}
*/
@Override
public Field<String> field7() {
return DjRfcCiAttributes.DJ_RFC_CI_ATTRIBUTES.COMMENTS;
}
/**
* {@inheritDoc}
*/
@Override
public Field<Timestamp> field8() {
return DjRfcCiAttributes.DJ_RFC_CI_ATTRIBUTES.CREATED;
}
/**
* {@inheritDoc}
*/
@Override
public Long component1() {
return getRfcAttrId();
}
/**
* {@inheritDoc}
*/
@Override
public Long component2() {
return getRfcId();
}
/**
* {@inheritDoc}
*/
@Override
public Integer component3() {
return getAttributeId();
}
/**
* {@inheritDoc}
*/
@Override
public String component4() {
return getOldAttributeValue();
}
/**
* {@inheritDoc}
*/
@Override
public String component5() {
return getNewAttributeValue();
}
/**
* {@inheritDoc}
*/
@Override
public String component6() {
return getOwner();
}
/**
* {@inheritDoc}
*/
@Override
public String component7() {
return getComments();
}
/**
* {@inheritDoc}
*/
@Override
public Timestamp component8() {
return getCreated();
}
/**
* {@inheritDoc}
*/
@Override
public Long value1() {
return getRfcAttrId();
}
/**
* {@inheritDoc}
*/
@Override
public Long value2() {
return getRfcId();
}
/**
* {@inheritDoc}
*/
@Override
public Integer value3() {
return getAttributeId();
}
/**
* {@inheritDoc}
*/
@Override
public String value4() {
return getOldAttributeValue();
}
/**
* {@inheritDoc}
*/
@Override
public String value5() {
return getNewAttributeValue();
}
/**
* {@inheritDoc}
*/
@Override
public String value6() {
return getOwner();
}
/**
* {@inheritDoc}
*/
@Override
public String value7() {
return getComments();
}
/**
* {@inheritDoc}
*/
@Override
public Timestamp value8() {
return getCreated();
}
/**
* {@inheritDoc}
*/
@Override
public DjRfcCiAttributesRecord value1(Long value) {
setRfcAttrId(value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public DjRfcCiAttributesRecord value2(Long value) {
setRfcId(value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public DjRfcCiAttributesRecord value3(Integer value) {
setAttributeId(value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public DjRfcCiAttributesRecord value4(String value) {
setOldAttributeValue(value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public DjRfcCiAttributesRecord value5(String value) {
setNewAttributeValue(value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public DjRfcCiAttributesRecord value6(String value) {
setOwner(value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public DjRfcCiAttributesRecord value7(String value) {
setComments(value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public DjRfcCiAttributesRecord value8(Timestamp value) {
setCreated(value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public DjRfcCiAttributesRecord values(Long value1, Long value2, Integer value3, String value4, String value5, String value6, String value7, Timestamp value8) {
value1(value1);
value2(value2);
value3(value3);
value4(value4);
value5(value5);
value6(value6);
value7(value7);
value8(value8);
return this;
}
// -------------------------------------------------------------------------
// Constructors
// -------------------------------------------------------------------------
/**
* Create a detached DjRfcCiAttributesRecord
*/
public DjRfcCiAttributesRecord() {
super(DjRfcCiAttributes.DJ_RFC_CI_ATTRIBUTES);
}
/**
* Create a detached, initialised DjRfcCiAttributesRecord
*/
public DjRfcCiAttributesRecord(Long rfcAttrId, Long rfcId, Integer attributeId, String oldAttributeValue, String newAttributeValue, String owner, String comments, Timestamp created) {
super(DjRfcCiAttributes.DJ_RFC_CI_ATTRIBUTES);
set(0, rfcAttrId);
set(1, rfcId);
set(2, attributeId);
set(3, oldAttributeValue);
set(4, newAttributeValue);
set(5, owner);
set(6, comments);
set(7, created);
}
}
| |
/*
* Copyright 2015 OpenCB
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.opencb.cellbase.mongodb.impl;
import com.mongodb.client.model.Aggregates;
import com.mongodb.client.model.Filters;
import com.mongodb.client.model.Projections;
import org.bson.Document;
import org.bson.conversions.Bson;
import org.opencb.biodata.models.core.Gene;
import org.opencb.biodata.models.core.Region;
import org.opencb.cellbase.core.api.GeneDBAdaptor;
import org.opencb.cellbase.mongodb.MongoDBCollectionConfiguration;
import org.opencb.commons.datastore.core.Query;
import org.opencb.commons.datastore.core.QueryOptions;
import org.opencb.commons.datastore.core.QueryResult;
import org.opencb.commons.datastore.mongodb.MongoDBCollection;
import org.opencb.commons.datastore.mongodb.MongoDataStore;
import java.util.*;
import java.util.function.Consumer;
/**
* Created by imedina on 25/11/15.
*/
public class GeneMongoDBAdaptor extends MongoDBAdaptor implements GeneDBAdaptor<Gene> {
public GeneMongoDBAdaptor(String species, String assembly, MongoDataStore mongoDataStore) {
super(species, assembly, mongoDataStore);
mongoDBCollection = mongoDataStore.getCollection("gene");
logger.debug("GeneMongoDBAdaptor: in 'constructor'");
}
@Override
public QueryResult<Gene> next(Query query, QueryOptions options) {
return null;
}
@Override
public QueryResult nativeNext(Query query, QueryOptions options) {
return null;
}
@Override
public QueryResult getIntervalFrequencies(Query query, int intervalSize, QueryOptions options) {
if (query.getString("region") != null) {
Region region = Region.parseRegion(query.getString("region"));
Bson bsonDocument = parseQuery(query);
return getIntervalFrequencies(bsonDocument, region, intervalSize, options);
}
return null;
}
@Override
public QueryResult<Long> update(List objectList, String field) {
return null;
}
@Override
public QueryResult<Long> count(Query query) {
Bson bsonDocument = parseQuery(query);
return mongoDBCollection.count(bsonDocument);
}
@Override
public QueryResult distinct(Query query, String field) {
Bson bsonDocument = parseQuery(query);
return mongoDBCollection.distinct(field, bsonDocument);
}
@Override
public QueryResult stats(Query query) {
return null;
}
@Override
public QueryResult<Gene> get(Query query, QueryOptions options) {
Bson bson = parseQuery(query);
options = addPrivateExcludeOptions(options);
return mongoDBCollection.find(bson, null, Gene.class, options);
}
@Override
public QueryResult nativeGet(Query query, QueryOptions options) {
Bson bson = parseQuery(query);
return mongoDBCollection.find(bson, options);
}
@Override
public Iterator<Gene> iterator(Query query, QueryOptions options) {
return null;
}
@Override
public Iterator nativeIterator(Query query, QueryOptions options) {
Bson bson = parseQuery(query);
return mongoDBCollection.nativeQuery().find(bson, options).iterator();
}
@Override
public void forEach(Query query, Consumer<? super Object> action, QueryOptions options) {
Objects.requireNonNull(action);
Iterator iterator = nativeIterator(query, options);
while (iterator.hasNext()) {
action.accept(iterator.next());
}
}
@Override
public QueryResult rank(Query query, String field, int numResults, boolean asc) {
return null;
}
@Override
public QueryResult groupBy(Query query, String field, QueryOptions options) {
Bson bsonQuery = parseQuery(query);
return groupBy(bsonQuery, field, "name", options);
}
@Override
public QueryResult groupBy(Query query, List<String> fields, QueryOptions options) {
Bson bsonQuery = parseQuery(query);
return groupBy(bsonQuery, fields, "name", options);
}
@Override
public QueryResult getRegulatoryElements(Query query, QueryOptions queryOptions) {
Bson bson = parseQuery(query);
QueryResult<Document> queryResult = null;
QueryResult<Document> gene = mongoDBCollection.find(bson, new QueryOptions(MongoDBCollection.INCLUDE, "chromosome,start,end"));
if (gene != null) {
MongoDBCollection regulatoryRegionCollection = mongoDataStore.getCollection("regulatory_region");
for (Document document : gene.getResult()) {
// String region = document.getString("chromosome") + ":"
// + document.getInteger("start", 1) + "-" + document.getInteger("end", Integer.MAX_VALUE);
// query.put(RegulationDBAdaptor.QueryParams.REGION.key(), region);
Bson eq = Filters.eq("chromosome", document.getString("chromosome"));
Bson lte = Filters.lte("start", document.getInteger("end", Integer.MAX_VALUE));
Bson gte = Filters.gte("end", document.getInteger("start", 1));
queryResult = regulatoryRegionCollection.find(Filters.and(eq, lte, gte), queryOptions);
}
}
return queryResult;
}
@Override
public QueryResult getTfbs(Query query, QueryOptions queryOptions) {
Bson bsonQuery = parseQuery(query);
Bson match = Aggregates.match(bsonQuery);
// We parse user's exclude options, ONLY _id can be added if exists
Bson includeAndExclude;
Bson exclude = null;
if (queryOptions != null && queryOptions.containsKey("exclude")) {
List<String> stringList = queryOptions.getAsStringList("exclude");
if (stringList.contains("_id")) {
exclude = Aggregates.project(Projections.exclude("_id"));
}
}
if (exclude != null) {
includeAndExclude = Aggregates.project(Projections.fields(Projections.excludeId(), Projections.include("transcripts.tfbs")));
} else {
includeAndExclude = Aggregates.project(Projections.include("transcripts.tfbs"));
}
Bson unwind = Aggregates.unwind("$transcripts");
Bson unwind2 = Aggregates.unwind("$transcripts.tfbs");
// This project the three fields of Xref to the top of the object
Document document = new Document("tfName", "$transcripts.tfbs.tfName");
document.put("pwm", "$transcripts.tfbs.pwm");
document.put("chromosome", "$transcripts.tfbs.chromosome");
document.put("start", "$transcripts.tfbs.start");
document.put("end", "$transcripts.tfbs.end");
document.put("strand", "$transcripts.tfbs.strand");
document.put("relativeStart", "$transcripts.tfbs.relativeStart");
document.put("relativeEnd", "$transcripts.tfbs.relativeEnd");
document.put("score", "$transcripts.tfbs.score");
Bson project = Aggregates.project(document);
return mongoDBCollection.aggregate(Arrays.asList(match, includeAndExclude, unwind, unwind2, project), queryOptions);
}
private Bson parseQuery(Query query) {
List<Bson> andBsonList = new ArrayList<>();
createRegionQuery(query, QueryParams.REGION.key(), MongoDBCollectionConfiguration.GENE_CHUNK_SIZE, andBsonList);
createOrQuery(query, QueryParams.ID.key(), "id", andBsonList);
createOrQuery(query, QueryParams.NAME.key(), "name", andBsonList);
createOrQuery(query, QueryParams.BIOTYPE.key(), "biotype", andBsonList);
createOrQuery(query, QueryParams.XREFS.key(), "transcripts.xrefs.id", andBsonList);
createOrQuery(query, QueryParams.TRANSCRIPT_ID.key(), "transcripts.id", andBsonList);
createOrQuery(query, QueryParams.TRANSCRIPT_NAME.key(), "transcripts.name", andBsonList);
createOrQuery(query, QueryParams.TRANSCRIPT_BIOTYPE.key(), "transcripts.biotype", andBsonList);
createOrQuery(query, QueryParams.TFBS_NAME.key(), "transcripts.tfbs.name", andBsonList);
createOrQuery(query, QueryParams.ANNOTATION_DISEASE_ID.key(), "annotation.diseases.id", andBsonList);
createOrQuery(query, QueryParams.ANNOTATION_DISEASE_NAME.key(), "annotation.diseases.name", andBsonList);
createOrQuery(query, QueryParams.ANNOTATION_EXPRESSION_GENE.key(), "annotation.expression.geneName", andBsonList);
createOrQuery(query, QueryParams.ANNOTATION_EXPRESSION_TISSUE.key(), "annotation.expression.factorValue", andBsonList);
createOrQuery(query, QueryParams.ANNOTATION_DRUGS_NAME.key(), "annotation.drugs.drugName", andBsonList);
createOrQuery(query, QueryParams.ANNOTATION_DRUGS_GENE.key(), "annotation.drugs.geneName", andBsonList);
if (andBsonList.size() > 0) {
return Filters.and(andBsonList);
} else {
return new Document();
}
}
}
| |
/*
* Copyright (c) 2011, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*/
// TextCatalogReader.java - Read text/plain Catalog files
/*
* Copyright 2001-2004 The Apache Software Foundation or its licensors,
* as applicable.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sun.org.apache.xml.internal.resolver.readers;
import java.io.InputStream;
import java.io.IOException;
import java.io.FileNotFoundException;
import java.net.URL;
import java.net.URLConnection;
import java.net.MalformedURLException;
import java.util.Vector;
import java.util.Stack;
import com.sun.org.apache.xml.internal.resolver.Catalog;
import com.sun.org.apache.xml.internal.resolver.CatalogEntry;
import com.sun.org.apache.xml.internal.resolver.CatalogException;
import com.sun.org.apache.xml.internal.resolver.readers.CatalogReader;
/**
* Parses plain text Catalog files.
*
* <p>This class reads plain text Open Catalog files.</p>
*
* @see Catalog
*
* @author Norman Walsh
* <a href="mailto:Norman.Walsh@Sun.COM">Norman.Walsh@Sun.COM</a>
*
*/
public class TextCatalogReader implements CatalogReader {
/** The input stream used to read the catalog */
protected InputStream catfile = null;
/**
* Character lookahead stack. Reading a catalog sometimes requires
* up to two characters of lookahead.
*/
protected int[] stack = new int[3];
/**
* Token stack. Recognizing an unexpected catalog entry requires
* the ability to "push back" a token.
*/
protected Stack tokenStack = new Stack();
/** The current position on the lookahead stack */
protected int top = -1;
/** Are keywords in the catalog case sensitive? */
protected boolean caseSensitive = false;
/**
* Construct a CatalogReader object.
*/
public TextCatalogReader() { }
public void setCaseSensitive(boolean isCaseSensitive) {
caseSensitive = isCaseSensitive;
}
public boolean getCaseSensitive() {
return caseSensitive;
}
/**
* Start parsing a text catalog file. The file is
* actually read and parsed
* as needed by <code>nextEntry</code>.</p>
*
* @param fileUrl The URL or filename of the catalog file to process
*
* @throws MalformedURLException Improper fileUrl
* @throws IOException Error reading catalog file
*/
public void readCatalog(Catalog catalog, String fileUrl)
throws MalformedURLException, IOException {
URL catURL = null;
try {
catURL = new URL(fileUrl);
} catch (MalformedURLException e) {
catURL = new URL("file:///" + fileUrl);
}
URLConnection urlCon = catURL.openConnection();
try {
readCatalog(catalog, urlCon.getInputStream());
} catch (FileNotFoundException e) {
catalog.getCatalogManager().debug.message(1, "Failed to load catalog, file not found",
catURL.toString());
}
}
public void readCatalog(Catalog catalog, InputStream is)
throws MalformedURLException, IOException {
catfile = is;
if (catfile == null) {
return;
}
Vector unknownEntry = null;
try {
while (true) {
String token = nextToken();
if (token == null) {
if (unknownEntry != null) {
catalog.unknownEntry(unknownEntry);
unknownEntry = null;
}
catfile.close();
catfile = null;
return;
}
String entryToken = null;
if (caseSensitive) {
entryToken = token;
} else {
entryToken = token.toUpperCase();
}
try {
int type = CatalogEntry.getEntryType(entryToken);
int numArgs = CatalogEntry.getEntryArgCount(type);
Vector args = new Vector();
if (unknownEntry != null) {
catalog.unknownEntry(unknownEntry);
unknownEntry = null;
}
for (int count = 0; count < numArgs; count++) {
args.addElement(nextToken());
}
catalog.addEntry(new CatalogEntry(entryToken, args));
} catch (CatalogException cex) {
if (cex.getExceptionType() == CatalogException.INVALID_ENTRY_TYPE) {
if (unknownEntry == null) {
unknownEntry = new Vector();
}
unknownEntry.addElement(token);
} else if (cex.getExceptionType() == CatalogException.INVALID_ENTRY) {
catalog.getCatalogManager().debug.message(1, "Invalid catalog entry", token);
unknownEntry = null;
} else if (cex.getExceptionType() == CatalogException.UNENDED_COMMENT) {
catalog.getCatalogManager().debug.message(1, cex.getMessage());
}
}
}
} catch (CatalogException cex2) {
if (cex2.getExceptionType() == CatalogException.UNENDED_COMMENT) {
catalog.getCatalogManager().debug.message(1, cex2.getMessage());
}
}
}
/**
* The destructor.
*
* <p>Makes sure the catalog file is closed.</p>
*/
protected void finalize() {
if (catfile != null) {
try {
catfile.close();
} catch (IOException e) {
// whatever...
}
}
catfile = null;
}
// -----------------------------------------------------------------
/**
* Return the next token in the catalog file.
*
* <p>FYI: This code does not throw any sort of exception for
* a file that contains an n
*
* @return The Catalog file token from the input stream.
* @throws IOException If an error occurs reading from the stream.
*/
protected String nextToken() throws IOException, CatalogException {
String token = "";
int ch, nextch;
if (!tokenStack.empty()) {
return (String) tokenStack.pop();
}
// Skip over leading whitespace and comments
while (true) {
// skip leading whitespace
ch = catfile.read();
while (ch <= ' ') { // all ctrls are whitespace
ch = catfile.read();
if (ch < 0) {
return null;
}
}
// now 'ch' is the current char from the file
nextch = catfile.read();
if (nextch < 0) {
return null;
}
if (ch == '-' && nextch == '-') {
// we've found a comment, skip it...
ch = ' ';
nextch = nextChar();
while ((ch != '-' || nextch != '-') && nextch > 0) {
ch = nextch;
nextch = nextChar();
}
if (nextch < 0) {
throw new CatalogException(CatalogException.UNENDED_COMMENT,
"Unterminated comment in catalog file; EOF treated as end-of-comment.");
}
// Ok, we've found the end of the comment,
// loop back to the top and start again...
} else {
stack[++top] = nextch;
stack[++top] = ch;
break;
}
}
ch = nextChar();
if (ch == '"' || ch == '\'') {
int quote = ch;
while ((ch = nextChar()) != quote) {
char[] chararr = new char[1];
chararr[0] = (char) ch;
String s = new String(chararr);
token = token.concat(s);
}
return token;
} else {
// return the next whitespace or comment delimited
// string
while (ch > ' ') {
nextch = nextChar();
if (ch == '-' && nextch == '-') {
stack[++top] = ch;
stack[++top] = nextch;
return token;
} else {
char[] chararr = new char[1];
chararr[0] = (char) ch;
String s = new String(chararr);
token = token.concat(s);
ch = nextch;
}
}
return token;
}
}
/**
* Return the next logical character from the input stream.
*
* @return The next (logical) character from the input stream. The
* character may be buffered from a previous lookahead.
*
* @throws IOException If an error occurs reading from the stream.
*/
protected int nextChar() throws IOException {
if (top < 0) {
return catfile.read();
} else {
return stack[top--];
}
}
}
| |
package com.joy.webview.ui;
import android.content.Intent;
import android.content.res.TypedArray;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.view.Gravity;
import android.widget.FrameLayout.LayoutParams;
import com.joy.inject.ActivityScope;
import com.joy.share.JoyShare;
import com.joy.share.ShareItem;
import com.joy.share.ShareUtil;
import com.joy.ui.activity.BaseUiActivity;
import com.joy.ui.fragment.BaseUiFragment;
import com.joy.ui.view.viewpager.PageChangeListenerAdapter;
import com.joy.ui.view.viewpager.VerticalViewPager;
import com.joy.utils.LayoutInflater;
import com.joy.webview.R;
import com.joy.webview.ui.interfaces.BaseViewPageWeb;
import com.joy.webview.view.NavigationBar;
import java.util.ArrayList;
import java.util.List;
import javax.inject.Inject;
import static android.view.ViewGroup.LayoutParams.MATCH_PARENT;
import static com.joy.ui.utils.DimenCons.DP;
/**
* Created by Daisw on 2017/11/17.
*/
@ActivityScope
public class UIPageDelegate<F extends BaseUiFragment> {
@Inject
BaseViewPageWeb mBaseView;
@Inject
BaseUiActivity mActivity;
VerticalViewPager mVerticalViewPager;
List<BasePageEntity> mPageEntities;
List<F> mFragments;
int mCurrentPosition;
JoyShare mJoyShare;
NavigationBar mNavBar;
boolean mNavDisplay = false;
boolean mNavAnimate = true;
int mNavHeight;
int mNavElevation;
@Inject
UIPageDelegate() {
}
void onCreate() {
mActivity.setContentView(R.layout.lib_view_page_container);
}
void resolveThemeAttribute() {
TypedArray navTa = mActivity.obtainStyledAttributes(R.styleable.NavigationBar);
mNavDisplay = navTa.getBoolean(R.styleable.NavigationBar_navDisplay, false);
mNavAnimate = navTa.getBoolean(R.styleable.NavigationBar_navAnimate, true);
mNavHeight = navTa.getDimensionPixelSize(R.styleable.NavigationBar_navHeight, 0);
mNavElevation = navTa.getDimensionPixelSize(R.styleable.NavigationBar_navElevation, 0);
navTa.recycle();
}
void initData() {
Intent intent = mActivity.getIntent();
mCurrentPosition = intent.getIntExtra("currentPosition", 0);
mPageEntities = intent.getParcelableArrayListExtra("pageEntities");
mFragments = new ArrayList<>();
int i = 0;
String title, subtitle;
for (BasePageEntity entity : mPageEntities) {
if (i == 0) {
title = mActivity.getString(R.string.toast_nothing);
subtitle = null;
} else if (i == mCurrentPosition) {
title = mPageEntities.get(i - 1).getTitle();
subtitle = mActivity.getString(R.string.prev_page);
} else {
title = entity.getTitle();
subtitle = mActivity.getString(R.string.next_page);
}
F f = (F) mBaseView.getFragment(entity.getUrl());
if (f instanceof BasePageWebViewFragment) {
((BasePageWebViewFragment) f)
.setPageTitle(title)
.setPageSubtitle(subtitle);
} else if (f instanceof BasePageWebX5Fragment) {
((BasePageWebX5Fragment) f)
.setPageTitle(title)
.setPageSubtitle(subtitle);
}
mFragments.add(f);
i++;
}
mJoyShare = new JoyShare(mActivity);
mJoyShare.setData(mBaseView.getShareItems());
mJoyShare.setOnItemClickListener((position, v, item) -> mBaseView.onShareItemClick(position, v, item));
}
void initTitle() {
if (mActivity.hasTitle()) {
mActivity.setTitle(mPageEntities.get(mCurrentPosition).getTitle());
}
}
void initContentView() {
addNavBarIfNecessary();
mVerticalViewPager = mActivity.findViewById(R.id.vvpContainer);
mVerticalViewPager.setPageMargin(DP(80));
BasePageWebAdapter<F> adapter =
new BasePageWebAdapter<>(mActivity.getSupportFragmentManager());
adapter.setFragments(mFragments);
mVerticalViewPager.setAdapter(adapter);
if (mCurrentPosition != 0) {
mVerticalViewPager.setCurrentItem(mCurrentPosition, false);
}
mVerticalViewPager.setOnPageChangeListener(new PageChangeListenerAdapter() {
@Override
public void onPageSelected(int position) {
mCurrentPosition = position;
if (mActivity.hasTitle()) {
mActivity.setTitle(mPageEntities.get(position).getTitle());
}
if (position > 0) {
updateTitles(mFragments.get(position), position - 1);
}
if (position + 1 < mFragments.size()) {
updateTitles(mFragments.get(position + 1), position + 1);
}
}
});
}
private void updateTitles(F f, int position) {
if (f instanceof BasePageWebViewFragment) {
((BasePageWebViewFragment) f)
.setPageTitle(mPageEntities.get(position).getTitle())
.setPageSubtitle(mActivity.getString(position < mCurrentPosition ? R.string.prev_page : R.string.next_page));
} else if (f instanceof BasePageWebX5Fragment) {
((BasePageWebX5Fragment) f)
.setPageTitle(mPageEntities.get(position).getTitle())
.setPageSubtitle(mActivity.getString(position < mCurrentPosition ? R.string.prev_page : R.string.next_page));
}
}
public String getUrl() {
return mPageEntities.get(mCurrentPosition).getUrl();
}
public String getTitle() {
return mPageEntities.get(mCurrentPosition).getTitle();
}
public JoyShare getJoyShare() {
return mJoyShare;
}
boolean onShareItemClick(ShareItem item) {
getJoyShare().dismiss();
ShareItem.DEFAULT def = item.mDefault;
if (def != null) {
switch (def) {
case COPY_LINK:
ShareUtil.copyUrl(mActivity, getUrl());
return true;
case BROWSER:
ShareUtil.openBrowser(mActivity, getUrl());
return true;
case MORE:
ShareUtil.shareTextUrl(mActivity, getUrl(), getTitle());
return true;
default:
return false;
}
}
return false;
}
private void addNavBarIfNecessary() {
if (mNavDisplay) {
mNavBar = mBaseView.initNavigationBar();
if (mNavBar != null) {
addNavigationBar(mNavBar, generateNavBarLp(), mNavAnimate, false);
}
}
}
private void addNavigationBar(@NonNull NavigationBar navBar, @NonNull LayoutParams lp, boolean animate, boolean showLater) {
if (animate) {
navBar.setAlpha(0.f);
navBar.setTranslationY(lp.height);
} else {
mActivity.getContentViewLp().bottomMargin = lp.height - mNavElevation;
}
mActivity.addContentView(navBar, lp);
if (animate && showLater) {
navBar.runEnterAnimator();
}
mNavDisplay = true;
mNavBar = navBar;
mNavHeight = lp.height;
mNavAnimate = animate;
}
private LayoutParams generateNavBarLp() {
LayoutParams lp = new LayoutParams(MATCH_PARENT, mNavHeight);
lp.gravity = Gravity.BOTTOM;
return lp;
}
@Nullable
NavigationBar initNavigationBar() {
NavigationBar navBar = LayoutInflater.inflate(mActivity, R.layout.lib_view_web_navigation_bar);
navBar.getIvNav(0).setOnClickListener((v1) -> mBaseView.onNavigationItemClick(0));
navBar.getIvNav(1).setOnClickListener((v1) -> mBaseView.onNavigationItemClick(1));
navBar.getIvNav(2).setOnClickListener((v1) -> mBaseView.onNavigationItemClick(2));
navBar.getIvNav(3).setOnClickListener((v1) -> mBaseView.onNavigationItemClick(3));
return navBar;
}
void onNavigationItemClick(int position) {
switch (position) {
case 1:
mVerticalViewPager.pageUp();
break;
case 2:
mVerticalViewPager.pageDown();
break;
case 3:
mJoyShare.show();
break;
default:
break;
}
}
void onScrollChanged(int scrollX, int scrollY, int oldScrollX, int oldScrollY) {
if (mNavDisplay && mNavAnimate && mNavBar != null) {
if (scrollY > oldScrollY) {// to down
mNavBar.runExitAnimator();
} else {// to up
mNavBar.runEnterAnimator();
}
}
}
public F getCurrentFragment() {
return mFragments.get(mCurrentPosition);
}
public int getCurrentPosition() {
return mCurrentPosition;
}
}
| |
/*
* Copyright 2010-2012 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.model;
/**
* <p>
* Provides details of the <code>ChildWorkflowExecutionTimedOut</code> event.
* </p>
*/
public class ChildWorkflowExecutionTimedOutEventAttributes {
/**
* The child workflow execution that timed out.
*/
private WorkflowExecution workflowExecution;
/**
* The type of the child workflow execution.
*/
private WorkflowType workflowType;
/**
* The type of the timeout that caused the child workflow execution to
* time out.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>START_TO_CLOSE
*/
private String timeoutType;
/**
* The id of the <code>StartChildWorkflowExecutionInitiated</code> event
* corresponding to the <code>StartChildWorkflowExecution</code>
* <a>Decision</a> to start this child workflow execution. This
* information can be useful for diagnosing problems by tracing back the
* chain of events leading up to this event.
*/
private Long initiatedEventId;
/**
* The Id of the <code>ChildWorkflowExecutionStarted</code> event
* recorded when this child workflow execution was started. This
* information can be useful for diagnosing problems by tracing back the
* chain of events leading up to this event.
*/
private Long startedEventId;
/**
* The child workflow execution that timed out.
*
* @return The child workflow execution that timed out.
*/
public WorkflowExecution getWorkflowExecution() {
return workflowExecution;
}
/**
* The child workflow execution that timed out.
*
* @param workflowExecution The child workflow execution that timed out.
*/
public void setWorkflowExecution(WorkflowExecution workflowExecution) {
this.workflowExecution = workflowExecution;
}
/**
* The child workflow execution that timed out.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param workflowExecution The child workflow execution that timed out.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public ChildWorkflowExecutionTimedOutEventAttributes withWorkflowExecution(WorkflowExecution workflowExecution) {
this.workflowExecution = workflowExecution;
return this;
}
/**
* The type of the child workflow execution.
*
* @return The type of the child workflow execution.
*/
public WorkflowType getWorkflowType() {
return workflowType;
}
/**
* The type of the child workflow execution.
*
* @param workflowType The type of the child workflow execution.
*/
public void setWorkflowType(WorkflowType workflowType) {
this.workflowType = workflowType;
}
/**
* The type of the child workflow execution.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param workflowType The type of the child workflow execution.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public ChildWorkflowExecutionTimedOutEventAttributes withWorkflowType(WorkflowType workflowType) {
this.workflowType = workflowType;
return this;
}
/**
* The type of the timeout that caused the child workflow execution to
* time out.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>START_TO_CLOSE
*
* @return The type of the timeout that caused the child workflow execution to
* time out.
*
* @see WorkflowExecutionTimeoutType
*/
public String getTimeoutType() {
return timeoutType;
}
/**
* The type of the timeout that caused the child workflow execution to
* time out.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>START_TO_CLOSE
*
* @param timeoutType The type of the timeout that caused the child workflow execution to
* time out.
*
* @see WorkflowExecutionTimeoutType
*/
public void setTimeoutType(String timeoutType) {
this.timeoutType = timeoutType;
}
/**
* The type of the timeout that caused the child workflow execution to
* time out.
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>START_TO_CLOSE
*
* @param timeoutType The type of the timeout that caused the child workflow execution to
* time out.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*
* @see WorkflowExecutionTimeoutType
*/
public ChildWorkflowExecutionTimedOutEventAttributes withTimeoutType(String timeoutType) {
this.timeoutType = timeoutType;
return this;
}
/**
* The type of the timeout that caused the child workflow execution to
* time out.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>START_TO_CLOSE
*
* @param timeoutType The type of the timeout that caused the child workflow execution to
* time out.
*
* @see WorkflowExecutionTimeoutType
*/
public void setTimeoutType(WorkflowExecutionTimeoutType timeoutType) {
this.timeoutType = timeoutType.toString();
}
/**
* The type of the timeout that caused the child workflow execution to
* time out.
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>START_TO_CLOSE
*
* @param timeoutType The type of the timeout that caused the child workflow execution to
* time out.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*
* @see WorkflowExecutionTimeoutType
*/
public ChildWorkflowExecutionTimedOutEventAttributes withTimeoutType(WorkflowExecutionTimeoutType timeoutType) {
this.timeoutType = timeoutType.toString();
return this;
}
/**
* The id of the <code>StartChildWorkflowExecutionInitiated</code> event
* corresponding to the <code>StartChildWorkflowExecution</code>
* <a>Decision</a> to start this child workflow execution. This
* information can be useful for diagnosing problems by tracing back the
* chain of events leading up to this event.
*
* @return The id of the <code>StartChildWorkflowExecutionInitiated</code> event
* corresponding to the <code>StartChildWorkflowExecution</code>
* <a>Decision</a> to start this child workflow execution. This
* information can be useful for diagnosing problems by tracing back the
* chain of events leading up to this event.
*/
public Long getInitiatedEventId() {
return initiatedEventId;
}
/**
* The id of the <code>StartChildWorkflowExecutionInitiated</code> event
* corresponding to the <code>StartChildWorkflowExecution</code>
* <a>Decision</a> to start this child workflow execution. This
* information can be useful for diagnosing problems by tracing back the
* chain of events leading up to this event.
*
* @param initiatedEventId The id of the <code>StartChildWorkflowExecutionInitiated</code> event
* corresponding to the <code>StartChildWorkflowExecution</code>
* <a>Decision</a> to start this child workflow execution. This
* information can be useful for diagnosing problems by tracing back the
* chain of events leading up to this event.
*/
public void setInitiatedEventId(Long initiatedEventId) {
this.initiatedEventId = initiatedEventId;
}
/**
* The id of the <code>StartChildWorkflowExecutionInitiated</code> event
* corresponding to the <code>StartChildWorkflowExecution</code>
* <a>Decision</a> to start this child workflow execution. This
* information can be useful for diagnosing problems by tracing back the
* chain of events leading up to this event.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param initiatedEventId The id of the <code>StartChildWorkflowExecutionInitiated</code> event
* corresponding to the <code>StartChildWorkflowExecution</code>
* <a>Decision</a> to start this child workflow execution. This
* information can be useful for diagnosing problems by tracing back the
* chain of events leading up to this event.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public ChildWorkflowExecutionTimedOutEventAttributes withInitiatedEventId(Long initiatedEventId) {
this.initiatedEventId = initiatedEventId;
return this;
}
/**
* The Id of the <code>ChildWorkflowExecutionStarted</code> event
* recorded when this child workflow execution was started. This
* information can be useful for diagnosing problems by tracing back the
* chain of events leading up to this event.
*
* @return The Id of the <code>ChildWorkflowExecutionStarted</code> event
* recorded when this child workflow execution was started. This
* information can be useful for diagnosing problems by tracing back the
* chain of events leading up to this event.
*/
public Long getStartedEventId() {
return startedEventId;
}
/**
* The Id of the <code>ChildWorkflowExecutionStarted</code> event
* recorded when this child workflow execution was started. This
* information can be useful for diagnosing problems by tracing back the
* chain of events leading up to this event.
*
* @param startedEventId The Id of the <code>ChildWorkflowExecutionStarted</code> event
* recorded when this child workflow execution was started. This
* information can be useful for diagnosing problems by tracing back the
* chain of events leading up to this event.
*/
public void setStartedEventId(Long startedEventId) {
this.startedEventId = startedEventId;
}
/**
* The Id of the <code>ChildWorkflowExecutionStarted</code> event
* recorded when this child workflow execution was started. This
* information can be useful for diagnosing problems by tracing back the
* chain of events leading up to this event.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param startedEventId The Id of the <code>ChildWorkflowExecutionStarted</code> event
* recorded when this child workflow execution was started. This
* information can be useful for diagnosing problems by tracing back the
* chain of events leading up to this event.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public ChildWorkflowExecutionTimedOutEventAttributes withStartedEventId(Long startedEventId) {
this.startedEventId = startedEventId;
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (workflowExecution != null) sb.append("WorkflowExecution: " + workflowExecution + ", ");
if (workflowType != null) sb.append("WorkflowType: " + workflowType + ", ");
if (timeoutType != null) sb.append("TimeoutType: " + timeoutType + ", ");
if (initiatedEventId != null) sb.append("InitiatedEventId: " + initiatedEventId + ", ");
if (startedEventId != null) sb.append("StartedEventId: " + startedEventId + ", ");
sb.append("}");
return sb.toString();
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getWorkflowExecution() == null) ? 0 : getWorkflowExecution().hashCode());
hashCode = prime * hashCode + ((getWorkflowType() == null) ? 0 : getWorkflowType().hashCode());
hashCode = prime * hashCode + ((getTimeoutType() == null) ? 0 : getTimeoutType().hashCode());
hashCode = prime * hashCode + ((getInitiatedEventId() == null) ? 0 : getInitiatedEventId().hashCode());
hashCode = prime * hashCode + ((getStartedEventId() == null) ? 0 : getStartedEventId().hashCode());
return hashCode;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (obj instanceof ChildWorkflowExecutionTimedOutEventAttributes == false) return false;
ChildWorkflowExecutionTimedOutEventAttributes other = (ChildWorkflowExecutionTimedOutEventAttributes)obj;
if (other.getWorkflowExecution() == null ^ this.getWorkflowExecution() == null) return false;
if (other.getWorkflowExecution() != null && other.getWorkflowExecution().equals(this.getWorkflowExecution()) == false) return false;
if (other.getWorkflowType() == null ^ this.getWorkflowType() == null) return false;
if (other.getWorkflowType() != null && other.getWorkflowType().equals(this.getWorkflowType()) == false) return false;
if (other.getTimeoutType() == null ^ this.getTimeoutType() == null) return false;
if (other.getTimeoutType() != null && other.getTimeoutType().equals(this.getTimeoutType()) == false) return false;
if (other.getInitiatedEventId() == null ^ this.getInitiatedEventId() == null) return false;
if (other.getInitiatedEventId() != null && other.getInitiatedEventId().equals(this.getInitiatedEventId()) == false) return false;
if (other.getStartedEventId() == null ^ this.getStartedEventId() == null) return false;
if (other.getStartedEventId() != null && other.getStartedEventId().equals(this.getStartedEventId()) == false) return false;
return true;
}
}
| |
package com.oregonscientific.meep.app;
import java.io.File;
import java.util.List;
import com.oregonscientific.meep.app.AppManager.OnApkListChangeListener;
import com.oregonscientific.meep.app.FullListViewActivity.EditMode;
import com.oregonscientific.meep.customfont.MyButton;
import com.oregonscientific.meep.customfont.MyTextView;
import com.oregonscientific.meep.global.Global;
import com.oregonscientific.meep.message.common.MeepLogger;
import com.oregonscientific.meep.message.common.OsListViewItem;
import com.oregonscientific.meep.opengl.OSButton;
import com.oregonscientific.meep.tool.ImageDownloader;
import android.app.DialogFragment;
import android.app.Fragment;
import android.content.BroadcastReceiver;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Color;
import android.graphics.drawable.ColorDrawable;
import android.net.Uri;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.util.Log;
import android.view.GestureDetector;
import android.view.Gravity;
import android.view.KeyEvent;
import android.view.LayoutInflater;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
import android.view.GestureDetector.OnGestureListener;
import android.view.View.OnClickListener;
import android.view.View.OnLongClickListener;
import android.view.View.OnTouchListener;
import android.view.inputmethod.InputMethodManager;
import android.widget.AbsListView;
import android.widget.BaseAdapter;
import android.widget.Button;
import android.widget.EditText;
import android.widget.FrameLayout;
import android.widget.GridView;
import android.widget.ImageView;
import android.widget.PopupWindow;
import android.widget.RelativeLayout;
import android.widget.TextView;
import android.widget.Toast;
import android.widget.AbsListView.OnScrollListener;
import android.widget.RelativeLayout.LayoutParams;
public class GridViewFragment extends Fragment{
private AppManager mAppManager;
private List<OsListViewItem> mListViewItemList = null;
private Thread mThread = null;
private ViewGroup mViewGroup = null;
private RelativeLayout mOptionLayout = null;
private RelativeLayout mRenameLayout = null;
private final static String CACHE_DIR = "ebook";
//private AbsoluteLayout mMainLayout = null;
private Bitmap frameImage;
private Bitmap bgImage;
private String[] mGameArray = null;
private String[] mBlockArray = null;
private GestureDetector mGuestureDetector = null;
private GestureDetector mItemGuestureDetector = null;
private View mSelectedView = null;
private EditMode mEditMode = EditMode.VIEW;
private Handler mHandlerReadImg = null;
private EditText mTxtRename = null;
private ImageView mSnackIcon = null;
private Button mBtnConfirm = null;
private FullListViewActivity mContext;
private AppsAdapter appAdapter =null;
private GridView myApps = null;
private LoadImagesThread appsLoadThread = null;
//private MeepDbCommunicationCtrl mMeepDbCommunicationCtrl = null;
private float mInitSpan = 0;
private int mCorX = 0;
private int mCorY = 0;
private OSButton button;
private ImageDownloader imageDownloader = null;
private DialogFragment popupFragment;
private ImageView ring_delete;
private ImageView ring_rename;
public static GridViewFragment newInstance(String tag) {
GridViewFragment f = new GridViewFragment();
// Supply index input as an argument.
Bundle args = new Bundle();
args.putString("tag", tag);
f.setArguments(args);
return f;
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
View v = inflater.inflate(R.layout.full_list_view, container, false);
mContext = (FullListViewActivity) this.getActivity();
mAppManager = new AppManager(mContext);
mAppManager.setmOnApkListChangeListener(mOnApkListChangeListener);
mGuestureDetector = new GestureDetector(mContext, mOnGuestureListener);
mItemGuestureDetector = new GestureDetector(mContext, mOnGuestureListener);
frameImage = BitmapFactory.decodeResource(getResources(), R.drawable.app_icon_frame);
bgImage = BitmapFactory.decodeResource(getResources(), R.drawable.app_icon_mask);
mBlockArray = mContext.getIntent().getStringArrayExtra(Global.STRING_BLOCK_LIST);
mGameArray = mContext.getIntent().getStringArrayExtra(Global.STRING_GAME_LIST);
imageDownloader = new ImageDownloader(mContext, CACHE_DIR);
imageDownloader.setmImageDownloadListener(mDownloadListener);
//mMeepDbCommunicationCtrl = new MeepDbCommunicationCtrl(mContext, Global.AppType.App);
initHandler();
initUIComponent(v);
// 2013-6-13 - Zoya - update shelves
appsLoadThread = new LoadImagesThread();
appsLoadThread.start();
// 2013-6-13 - Zoya - Monitor application installation and uninstall events
mInstallReceiver();
return v;
}
// 2013-6-13 - Zoya - MyInstalledReceiver class
private final BroadcastReceiver MyInstalledReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
String action = intent.getAction();
Log.d("tag", "sdcard action----" + action);
if (action.equals("android.intent.action.PACKAGE_ADDED")) {// install
appsLoadThread = new LoadImagesThread();
appsLoadThread.start();
} else if (action.equals("android.intent.action.PACKAGE_REMOVED")) {// uninstall
appsLoadThread = new LoadImagesThread();
appsLoadThread.start();
}
}
};
private void mInstallReceiver() {
IntentFilter intentFilter = new IntentFilter();
intentFilter.addAction(Intent.ACTION_PACKAGE_ADDED);
intentFilter.addAction(Intent.ACTION_PACKAGE_REMOVED);
intentFilter.addAction(Intent.ACTION_PACKAGE_CHANGED);
intentFilter.addDataScheme("package");
mContext.registerReceiver(MyInstalledReceiver, intentFilter);
}
@Override
public void onDestroy() {
// TODO Auto-generated method stub
mContext.unregisterReceiver(MyInstalledReceiver);
super.onDestroy();
}
private void initUIComponent(View v) {
//2013-3-20 - Amy - update shelves use GridView
myApps = (GridView)v.findViewById(R.id.myApp);
myApps.setSelector(new ColorDrawable(Color.TRANSPARENT));
myApps.setOnScrollListener(new OnScrollListener() {
@Override
public void onScrollStateChanged(AbsListView view, int scrollState) {
if(scrollState == SCROLL_STATE_TOUCH_SCROLL){
hideRingMenu();
}
}
@Override
public void onScroll(AbsListView view, int firstVisibleItem,
int visibleItemCount, int totalItemCount) {
}
});
appAdapter = new AppsAdapter();
myApps.setAdapter(appAdapter);
/*mBtnOptionDelete.setOnClickListener(onDeleteBtnClickListener);
mBtnOptionEdit.setOnClickListener(btnEditOnClickListener);*/
mSnackIcon = (ImageView)v.findViewById(R.id.imageViewSnackView);
mSnackIcon.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
goToMeepAppView();
}
});
/*mBtnConfirm = (Button)v.findViewById(R.id.buttonConfirmButton);
mBtnConfirm.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
// TODO Auto-generated method stub
if(mEditMode == EditMode.DELETE)
{
for(int i = 0; i< mListViewItemList.size(); i++)
{
if(mListViewItemList.get(i).isChecked())
{
//deleteFile(mListViewItemList.get(i).getPath());
deleteItem(mListViewItemList.get(i));
}
}
}
}
});*/
mRenameLayout = (RelativeLayout)v.findViewById(R.id.relativeLayoutRename);
mTxtRename = (EditText)v.findViewById(R.id.editTextRename);
/*mTxtRename.setOnKeyListener(new View.OnKeyListener() {
@Override
public boolean onKey(View v, int keyCode, KeyEvent event) {
if(keyCode == KeyEvent.KEYCODE_ENTER)
{
}
return false;
}
}); */
mViewGroup = (ViewGroup)v. findViewById(R.id.appFullListViewLayout);
}
//2013-3-20 -Amy- Use PopupWindow replace RingMenu
PopupWindow popWindow = null;
private View ringMenuShow = null;
private void showRingMenu(float x, float y, OsListViewItem item) {
if(Global.DISABLE_RING_MENU){
Log.e("MeepApp-showRingMenu","DISABLE_RING_MENU");
return;
}
int left = 0;
int right = 1;
int buttonNum = 1;
if (y < 0){
y= -20;
}
//2013-4-16 -Amy- modified to change pop images
FrameLayout two_button_left;
RelativeLayout contentLayout;
int xx = (int)x+55;
int yy = (int)y+5;
LayoutInflater layoutInflater = (LayoutInflater) mContext.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
popWindow = new PopupWindow(ringMenuShow, LayoutParams.FILL_PARENT, LayoutParams.FILL_PARENT);
ringMenuShow = layoutInflater.inflate(R.layout.ring_menu_delete, null);
two_button_left = (FrameLayout) ringMenuShow.findViewById(R.id.two_button_left);
contentLayout = (RelativeLayout) ringMenuShow.findViewById(R.id.contentLayoutleft);
android.widget.FrameLayout.LayoutParams layoutParams = (android.widget.FrameLayout.LayoutParams) contentLayout.getLayoutParams();
layoutParams.leftMargin = xx;
//Log.e("cdf",""+yy);
if(yy>230){
yy=230;
}
layoutParams.topMargin = yy;
contentLayout.setLayoutParams(layoutParams);
popWindow.setContentView(ringMenuShow);
popWindow.update();
two_button_left.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
// TODO Auto-generated method stub
ringMenuShow.setVisibility(View.GONE);
}
});
popWindow.showAtLocation(myApps, Gravity.START, (int)x+70, (int)y-120);
ringMenuHandler(left, buttonNum, item);
}
private void hideRingMenu() {
/*mViewGroup.removeView(ringMenu);
ringMenu = null;*/
//2013-3-20 -Amy- Use PopupWindow replace RingMenu
if(popWindow != null && popWindow.isShowing()){
popWindow.dismiss();
}
ringMenuShow = null;
}
private void ringMenuHandler(int leftRight, int buttonNum, OsListViewItem item){
if (leftRight == 0 && buttonNum == 1) {
//left menu with two button
leftMenuOneButton(item);
}
}
private void leftMenuOneButton(final OsListViewItem item) {
//2013-4-17 -Zoya- update ring menu layout.
ring_delete = (ImageView) ringMenuShow.findViewById(R.id.btnDelete);
//ring_menu_right_one = (ImageView) ringMenuShow.findViewById(R.id.ring_menu_right_one3);
ring_delete.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View pV) {
mViewGroup.removeView(ringMenuShow);
//deleteItem1(item);
deleteItem(item);
//notification = new NotificationMessage(mContext, null, DELETE_TITLE, DELETE_MESSAGE);
//2013-3-20 -Amy- Use PopupWindow replace RingMenu
appAdapter.notifyDataSetChanged();
hideRingMenu();
}
});
/*ring_menu_right_one.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View pV) {
mViewGroup.removeView(ringMenuShow);
deleteItem(item);
appAdapter.notifyDataSetChanged();
hideRingMenu();
}
});
*/
// ring_rename.setOnClickListener(new View.OnClickListener() {
// @Override
// public void onClick(View pV) {
// mViewGroup.removeView(ringMenu);
// //renameLayer(item);
// }
// });
}
//*******load image thread*******
private void initHandler() {
mHandlerReadImg = new Handler() {
@Override
public void handleMessage(Message msg) {
switch (msg.what) {
case 1:{
//mListViewItemList = mAppManager.getAllItems(false);
//for (int position = 0; position < list.size(); position ++) {
//drawImage(list.get(position), position);
//}
appAdapter.notifyDataSetChanged();
break;
}
case 2: //draw shelf only
//drawShelfOnly();
break;
default:
break;
}
//2013-3-20 - Amy - update shelves use GridView
//mLayout.invalidate();
super.handleMessage(msg);
}
};
//mLayout.removeAllViews();
//mListViewItemList.clear();
//mAppManager.clearAppItems();
mThread = new Thread(runReadImage);
mThread.start();
}
private Runnable runReadImage = new Runnable() {
@Override
public void run() {
mHandlerReadImg.sendEmptyMessage(1);
//loadAndDrawImage();
/*try {
mThread.join();
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}*/
}
};
//2013-3-19 - Amy - update shelves use GridView
private static class ViewHolder {
ImageView pic;
TextView pic_title;
}
private class AppsAdapter extends BaseAdapter
{
private LayoutInflater inflater = LayoutInflater.from(mContext);
@Override
public int getCount() {
//Log.e("cdf","mListViewItemList.size() == "+mListViewItemList.size());
if(mListViewItemList == null) {
return 0;
}else {
return mListViewItemList.size();
}
}
@Override
public Object getItem(int position) {
if (mListViewItemList == null) {
return null;
}
return mListViewItemList.get(position);
}
@Override
public long getItemId(int position) {
if (mListViewItemList == null) {
return -1;
}
return mListViewItemList.get(position).hashCode();
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
ViewHolder holder = null;
if(convertView == null){
holder = new ViewHolder();
convertView = inflater.inflate(R.layout.gridview_item, null);
holder.pic = (ImageView)convertView.findViewById(R.id.appic);
holder.pic_title = (TextView) convertView.findViewById(R.id.appic_title);
convertView.setTag(holder);
}else {
holder = (ViewHolder) convertView.getTag();
}
final OsListViewItem item = mListViewItemList.get(position);
holder.pic_title.setText(item.getName());
//holder.pic.setImageBitmap(item.getImage());
imageDownloader.download(item.getPath(), holder.pic, bgImage, null);
//2013-7-18 -Amy- Apps with same name cannot be launched correctly in grid view
holder.pic.setTag(R.id.TAG, item);
//Log.e("cdf","item.getName() == 2 "+item.getName());
holder.pic.setOnClickListener(btnItemClickListener);
holder.pic.setOnLongClickListener(btnItemLongClickListener);
convertView.setVisibility(View.VISIBLE);
return convertView;
}
}
//-------end load image thread----------
//******start define listener******
private OnGestureListener mOnGuestureListener = new OnGestureListener() {
@Override
public boolean onSingleTapUp(MotionEvent e) {
if (mEditMode == EditMode.VIEW) {
if(mSelectedView!=null)
{
String name = (String) mSelectedView.getTag();
OsListViewItem item = mAppManager.findListViewItemByName(name);
if (mSelectedView.getTag() != null) {
if (item != null) {
goToApp(item.getPath());
}
}
}
}
else
{
hideOption();
}
return false;
}
@Override
public void onShowPress(MotionEvent e) {
//do nothing
}
@Override
public boolean onScroll(MotionEvent e1, MotionEvent e2, float distanceX, float distanceY) {
//do nothing
return false;
}
@Override
public void onLongPress(MotionEvent e) {
// float x = e.getX() - 120;
// float y = e.getY() - 90;
// showOption(x,y);
// setEditMode(EditMode.OPTION_SHOWN);
}
@Override
public boolean onFling(MotionEvent e1, MotionEvent e2, float velocityX, float velocityY) {
//do nothing
return false;
}
@Override
public boolean onDown(MotionEvent e) {
//do nothing
return false;
}
};
private OnClickListener btnItemClickListener = new OnClickListener() {
@Override
public void onClick(View v) {
//String name = (String) v.getTag(R.id.TAG);
//OsListViewItem item = mAppManager.findListViewItemByName(name);
//2013-7-18 -Amy- Apps with same name cannot be launched correctly in grid view
OsListViewItem item = (OsListViewItem) v.getTag(R.id.TAG);
if (item != null && ringMenuShow == null) {
goToApp(item.getPath());
}
hideRingMenu();
}
};
private OnLongClickListener btnItemLongClickListener = new OnLongClickListener() {
@Override
public boolean onLongClick(View v) {
final int[] location = new int[2];
v.getLocationOnScreen(location);
mCorX = location[0];
mCorY = location[1];
float x = mCorX - 90;
float y = mCorY - 75;
hideRingMenu();
//String name = (String) v.getTag(R.id.TAG);
//OsListViewItem item = mAppManager.findListViewItemByName(name);
//2013-7-18 -Amy- Apps with same name cannot be launched correctly in grid view
OsListViewItem item = (OsListViewItem) v.getTag(R.id.TAG);
showRingMenu(x, y, item);
return true;
}
};
// 2013-6-13 - Zoya - update shelves
class LoadImagesThread extends Thread {
@Override
public void run() {
// TODO Auto-generated method stub
if (mListViewItemList != null) {
mListViewItemList.clear();
}
Message message = new Message();
// Log.e("cdf","cdf-----------");
loadAndDrawImage();
message.what = 1;
mHandlerReadImg.sendMessage(message);
super.run();
}
}
private void loadAndDrawImage() {
mListViewItemList = mAppManager.getAllItems(false);
}
private boolean isAppExist(String name, String[] nameArray) {
String tempName = name + Global.FILE_TYPE_PNG;
if (nameArray != null) {
for(int i =0; i< nameArray.length; i++) {
if(nameArray[i].equals(tempName)) {
return true;
}
}
}
return false;
}
private String getPackageName(OsListViewItem item)
{
if(item != null)
{
String content = item.getPath();
String[] strArr = content.split("/");
return strArr[0];
}
return null;
}
private String getFileNameOnly(String fileName)
{
return fileName.substring(0,fileName.lastIndexOf('.'));
}
private void goToApp(String path) {
Intent intent = new Intent(mContext, FullListViewActivity.class);
intent.setComponent(ComponentName
.unflattenFromString(path));
intent.addCategory("android.intent.category.LAUNCHER");
//intent.putExtra(Global.STRING_PATH, path);
try {
startActivity(intent);
MeepLogger meepLogger = new MeepLogger(mContext);
meepLogger.p("has opened: " + path);
} catch (Exception ex) {
Toast.makeText(mContext, ex.toString(), Toast.LENGTH_SHORT).show();
ex.printStackTrace();
}
}
private void showOption(float x, float y) {
//Log.d("testOnTouch", "x:" + x + " y:" + y + " mcorX:" + mCorX + " mcorY:" + mCorY );
mOptionLayout.setX(x+mCorX);
mOptionLayout.setY(y + mCorY);
mOptionLayout.setVisibility(View.VISIBLE);
}
private void hideOption() {
hideRingMenu();
mOptionLayout.setVisibility(View.GONE);
setEditMode(EditMode.VIEW);
}
private void setEditMode(EditMode editMode) {
mEditMode = editMode;
Toast.makeText(mContext, editMode.toString(), Toast.LENGTH_SHORT).show();
if(editMode == EditMode.DELETE)
{
//2013-3-20 - Amy - update shelves use GridView
//showAllDeleteIcons();
mBtnConfirm.setVisibility(View.VISIBLE);
}
else
{
//hideAllDeleteIcons();
mBtnConfirm.setVisibility(View.GONE);
}
}
private void changeToDeleteMode()
{
}
public LayoutInflater getLayoutInflater() {
return this.getActivity().getWindow().getLayoutInflater();
}
private void deleteItem(OsListViewItem item) {
//uninstallItemName = item.getPath().substring(0, item.getPath().lastIndexOf("/"));
//Log.d("delete app", "delete app:" + itemName + ".png");
uninstallApplication(getPackageName(item));
//uninstallPackageName = getPackageName(item);
// deleteItemFile(PATH_SMALL_ICON_DIR + itemName + ".png");
// deleteItemFile(PATH_LARGE_ICON_DIR+ itemName + ".png");
// deleteItemFile(PATH_LARGE_DIM_ICON_DIR + itemName + ".png");
// mListViewItemList.remove(item);
// redrawListView();
}
private void deleteItemFile(String path) {
// delete the real item
File file = new File(path);
try {
if (file.exists()) {
file.delete();
}
} catch (Exception e) {
Log.e("MovieFullListView", "delete file error : " + e.toString());
}
}
private void uninstallApplication(String packageName)
{
Uri packageURI = Uri.parse("package:" + packageName);
Intent uninstallIntent = new Intent(Intent.ACTION_DELETE, packageURI);
startActivity(uninstallIntent);
}
private void goToMeepAppView(){
mContext.showDetails(FullListViewActivity.STATE_FRAG_SNACK);
/*Intent myIntent = new Intent(this.getActivity(), MeepAppActivity.class);
myIntent.putExtra(Global.STRING_TYPE, "app");
myIntent.putExtra(Global.STRING_GAME_LIST, mGameArray);
myIntent.putExtra(Global.STRING_BLOCK_LIST, mBlockArray);
try {
startActivityForResult(myIntent, 0);
} catch (Exception ex) {
ex.printStackTrace();
} */
}
private OnApkListChangeListener mOnApkListChangeListener = new OnApkListChangeListener() {
@Override
public void onApkLoadFail() {
//Log.d(TAG, "onApkLoadFail");
}
@Override
public void onApkListChanged(List<OsListViewItem> apks) {
//Log.d(TAG, "onApkListChanged");
// deleted by aaronli Mar21 throws exceptions
//initHandler();
}
@Override
public void onGetServiceAccount(String user) {
//initHandler();
}
};
private ImageDownloader.ImageDownloadListener mDownloadListener = new ImageDownloader.ImageDownloadListener() {
@Override
public void onDownloadSuccessed(ImageView view, Bitmap bitmap) {
view.setImageBitmap(bitmap);
}
@Override
public void onDownloadFromCache(ImageView view, Bitmap bitmapFromCache) {
view.setImageBitmap(bitmapFromCache);
}
@Override
public void onDownloadFail(ImageView view) {
view.setImageBitmap(bgImage);
}
/*@Override
public Bitmap loadImageFromUrl(String url) {
// TODO Auto-generated method stub
return null;
}*/
@Override
public Bitmap loadImageFromUrl(String url) {
return mAppManager.getAppIconBitmap(AppManager.getPackageName(url),frameImage, bgImage);
}
};
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.