gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* Copyright 2014-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.cxx;
import com.facebook.buck.graph.AbstractBreadthFirstThrowingTraversal;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.Flavor;
import com.facebook.buck.parser.NoSuchBuildTargetException;
import com.facebook.buck.rules.BuildRule;
import com.facebook.buck.rules.BuildRuleParams;
import com.facebook.buck.rules.BuildRuleResolver;
import com.facebook.buck.rules.SourcePath;
import com.facebook.buck.rules.SourcePathResolver;
import com.facebook.buck.rules.coercer.FrameworkPath;
import com.google.common.base.Preconditions;
import com.google.common.base.Predicate;
import com.google.common.base.Suppliers;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.google.common.collect.ComparisonChain;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import com.google.common.collect.Maps;
import com.google.common.collect.Multimap;
import org.immutables.value.Value;
import java.nio.file.Path;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.Map;
import javax.annotation.Nonnull;
public class CxxPreprocessables {
private CxxPreprocessables() {}
public enum HeaderMode {
/**
* Creates the tree of symbolic links of headers.
*/
SYMLINK_TREE_ONLY,
/**
* Creates the header map that references the headers directly in the source tree.
*/
HEADER_MAP_ONLY,
/**
* Creates the tree of symbolic links of headers and creates the header map that
* references the symbolic links to the headers.
*/
SYMLINK_TREE_WITH_HEADER_MAP,
}
public enum IncludeType {
/**
* Headers should be included with `-I`.
*/
LOCAL {
@Override
public Iterable<String> includeArgs(Preprocessor pp, Iterable<String> includeRoots) {
return pp.localIncludeArgs(includeRoots);
}
},
/**
* Headers should be included with `-isystem`.
*/
SYSTEM {
@Override
public Iterable<String> includeArgs(Preprocessor pp, Iterable<String> includeRoots) {
return pp.systemIncludeArgs(includeRoots);
}
},
/**
* Headers should be included with `-iquote`.
*/
IQUOTE {
@Override
public Iterable<String> includeArgs(Preprocessor pp, Iterable<String> includeRoots) {
return pp.quoteIncludeArgs(includeRoots);
}
},
;
public abstract Iterable<String> includeArgs(Preprocessor pp, Iterable<String> includeRoots);
}
/**
* Resolve the map of name to {@link SourcePath} to a map of full header name to
* {@link SourcePath}.
*/
public static ImmutableMap<Path, SourcePath> resolveHeaderMap(
Path basePath,
ImmutableMap<String, SourcePath> headers) {
ImmutableMap.Builder<Path, SourcePath> headerMap = ImmutableMap.builder();
// Resolve the "names" of the headers to actual paths by prepending the base path
// specified by the build target.
for (ImmutableMap.Entry<String, SourcePath> ent : headers.entrySet()) {
Path path = basePath.resolve(ent.getKey());
headerMap.put(path, ent.getValue());
}
return headerMap.build();
}
/**
* Find and return the {@link CxxPreprocessorInput} objects from {@link CxxPreprocessorDep}
* found while traversing the dependencies starting from the {@link BuildRule} objects given.
*/
public static Collection<CxxPreprocessorInput> getTransitiveCxxPreprocessorInput(
final CxxPlatform cxxPlatform,
Iterable<? extends BuildRule> inputs,
final Predicate<Object> traverse) throws NoSuchBuildTargetException {
// We don't really care about the order we get back here, since headers shouldn't
// conflict. However, we want something that's deterministic, so sort by build
// target.
final Map<BuildTarget, CxxPreprocessorInput> deps = Maps.newLinkedHashMap();
// Build up the map of all C/C++ preprocessable dependencies.
new AbstractBreadthFirstThrowingTraversal<BuildRule, NoSuchBuildTargetException>(inputs) {
@Override
public ImmutableSet<BuildRule> visit(BuildRule rule) throws NoSuchBuildTargetException {
if (rule instanceof CxxPreprocessorDep) {
CxxPreprocessorDep dep = (CxxPreprocessorDep) rule;
deps.putAll(
dep.getTransitiveCxxPreprocessorInput(
cxxPlatform,
HeaderVisibility.PUBLIC));
return ImmutableSet.of();
}
return traverse.apply(rule) ? rule.getDeps() : ImmutableSet.of();
}
}.start();
// Grab the cxx preprocessor inputs and return them.
return deps.values();
}
public static Collection<CxxPreprocessorInput> getTransitiveCxxPreprocessorInput(
final CxxPlatform cxxPlatform,
Iterable<? extends BuildRule> inputs) throws NoSuchBuildTargetException {
return getTransitiveCxxPreprocessorInput(
cxxPlatform,
inputs,
x -> true);
}
/**
* Build the {@link HeaderSymlinkTree} rule using the original build params from a target node.
* In particular, make sure to drop all dependencies from the original build rule params,
* as these are modeled via {@link CxxPreprocessAndCompile}.
*/
public static HeaderSymlinkTree createHeaderSymlinkTreeBuildRule(
SourcePathResolver resolver,
BuildTarget target,
BuildRuleParams params,
Path root,
ImmutableMap<Path, SourcePath> links,
HeaderMode headerMode) {
// Symlink trees never need to depend on anything.
BuildRuleParams paramsWithoutDeps =
params.copyWithChanges(
target,
Suppliers.ofInstance(ImmutableSortedSet.of()),
Suppliers.ofInstance(ImmutableSortedSet.of()));
switch (headerMode) {
case SYMLINK_TREE_WITH_HEADER_MAP:
return new HeaderSymlinkTreeWithHeaderMap(
paramsWithoutDeps,
resolver,
root,
links);
case HEADER_MAP_ONLY:
return new DirectHeaderMap(
paramsWithoutDeps,
resolver,
root,
links);
default:
case SYMLINK_TREE_ONLY:
return new HeaderSymlinkTree(
paramsWithoutDeps,
resolver,
root,
links);
}
}
/**
* @return adds a the header {@link com.facebook.buck.rules.SymlinkTree} for the given rule to
* the {@link CxxPreprocessorInput}.
*/
public static CxxPreprocessorInput.Builder addHeaderSymlinkTree(
CxxPreprocessorInput.Builder builder,
BuildTarget target,
BuildRuleResolver ruleResolver,
CxxPlatform platform,
HeaderVisibility headerVisibility,
IncludeType includeType) throws NoSuchBuildTargetException {
BuildRule rule = ruleResolver.requireRule(
BuildTarget.builder(target)
.addFlavors(
platform.getFlavor(),
CxxDescriptionEnhancer.getHeaderSymlinkTreeFlavor(headerVisibility))
.build());
Preconditions.checkState(
rule instanceof HeaderSymlinkTree,
"Attempt to add %s of type %s and class %s to %s",
rule.getFullyQualifiedName(),
rule.getType(),
rule.getClass().getName(),
target);
HeaderSymlinkTree symlinkTree = (HeaderSymlinkTree) rule;
builder.addIncludes(CxxSymlinkTreeHeaders.from(symlinkTree, includeType));
return builder;
}
/**
* @return The BuildRule corresponding to the exported (public) header symlink
* tree for the provided target.
*/
public static HeaderSymlinkTree requireHeaderSymlinkTreeForLibraryTarget(
BuildRuleResolver ruleResolver,
BuildTarget libraryBuildTarget,
Flavor platformFlavor) {
BuildRule rule;
try {
rule = ruleResolver.requireRule(
BuildTarget.builder(libraryBuildTarget)
.addFlavors(
platformFlavor,
CxxDescriptionEnhancer.getHeaderSymlinkTreeFlavor(HeaderVisibility.PUBLIC))
.build());
} catch (NoSuchBuildTargetException e) {
// This shouldn't happen; if a library rule exists, its header symlink tree rule
// should exist.
throw new IllegalStateException(e);
}
Preconditions.checkState(rule instanceof HeaderSymlinkTree);
return (HeaderSymlinkTree) rule;
}
/**
* Builds a {@link CxxPreprocessorInput} for a rule.
*/
public static CxxPreprocessorInput getCxxPreprocessorInput(
BuildRuleParams params,
BuildRuleResolver ruleResolver,
boolean hasHeaderSymlinkTree,
CxxPlatform platform,
HeaderVisibility headerVisibility,
IncludeType includeType,
Multimap<CxxSource.Type, String> exportedPreprocessorFlags,
Iterable<FrameworkPath> frameworks) throws NoSuchBuildTargetException {
CxxPreprocessorInput.Builder builder = CxxPreprocessorInput.builder();
if (hasHeaderSymlinkTree) {
addHeaderSymlinkTree(
builder,
params.getBuildTarget(),
ruleResolver,
platform,
headerVisibility,
includeType);
}
return builder
.putAllPreprocessorFlags(exportedPreprocessorFlags)
.addAllFrameworks(frameworks)
.build();
}
public static LoadingCache<
CxxPreprocessorInputCacheKey,
ImmutableMap<BuildTarget, CxxPreprocessorInput>
> getTransitiveCxxPreprocessorInputCache(final CxxPreprocessorDep preprocessorDep) {
return CacheBuilder.newBuilder()
.build(
new CacheLoader<
CxxPreprocessorInputCacheKey,
ImmutableMap<BuildTarget, CxxPreprocessorInput>>() {
@Override
public ImmutableMap<BuildTarget, CxxPreprocessorInput> load(
@Nonnull CxxPreprocessorInputCacheKey key)
throws Exception {
Map<BuildTarget, CxxPreprocessorInput> builder = new LinkedHashMap<>();
builder.put(
preprocessorDep.getBuildTarget(),
preprocessorDep.getCxxPreprocessorInput(
key.getPlatform(),
key.getVisibility()));
for (CxxPreprocessorDep dep :
preprocessorDep.getCxxPreprocessorDeps(key.getPlatform())) {
builder.putAll(
dep.getTransitiveCxxPreprocessorInput(
key.getPlatform(),
key.getVisibility()));
}
return ImmutableMap.copyOf(builder);
}
});
}
@Value.Immutable
public abstract static class CxxPreprocessorInputCacheKey
implements Comparable<CxxPreprocessorInputCacheKey> {
@Value.Parameter
public abstract CxxPlatform getPlatform();
@Value.Parameter
public abstract HeaderVisibility getVisibility();
@Override
public int compareTo(@Nonnull CxxPreprocessorInputCacheKey o) {
return ComparisonChain.start()
.compare(getPlatform().getFlavor(), o.getPlatform().getFlavor())
.compare(getVisibility(), o.getVisibility())
.result();
}
}
}
| |
/**
* Copyright 2017 Netflix, Inc.
*
* <p>Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of the License at
*
* <p>http://www.apache.org/licenses/LICENSE-2.0
*
* <p>Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.priam.google;
import com.google.api.client.auth.oauth2.Credential;
import com.google.api.client.googleapis.auth.oauth2.GoogleCredential;
import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport;
import com.google.api.client.http.HttpTransport;
import com.google.api.client.json.JsonFactory;
import com.google.api.client.json.jackson2.JacksonFactory;
import com.google.api.services.storage.Storage;
import com.google.api.services.storage.StorageScopes;
import com.google.inject.Inject;
import com.google.inject.Provider;
import com.google.inject.name.Named;
import com.netflix.priam.backup.AbstractBackupPath;
import com.netflix.priam.backup.AbstractFileSystem;
import com.netflix.priam.backup.BackupRestoreException;
import com.netflix.priam.config.IConfiguration;
import com.netflix.priam.cred.ICredentialGeneric;
import com.netflix.priam.cred.ICredentialGeneric.KEY;
import com.netflix.priam.merics.BackupMetrics;
import com.netflix.priam.notification.BackupNotificationMgr;
import java.io.*;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import org.apache.commons.io.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class GoogleEncryptedFileSystem extends AbstractFileSystem {
private static final Logger logger = LoggerFactory.getLogger(GoogleEncryptedFileSystem.class);
private static final String APPLICATION_NAME = "gdl";
private static final JsonFactory JSON_FACTORY = JacksonFactory.getDefaultInstance();
private HttpTransport httpTransport;
// represents our "service account" credentials we will use to access GCS
private Credential credential;
private Storage gcsStorageHandle;
private Storage.Objects objectsResoruceHandle = null;
private String srcBucketName;
private final IConfiguration config;
private final ICredentialGeneric gcsCredential;
private final BackupMetrics backupMetrics;
@Inject
public GoogleEncryptedFileSystem(
Provider<AbstractBackupPath> pathProvider,
final IConfiguration config,
@Named("gcscredential") ICredentialGeneric credential,
BackupMetrics backupMetrics,
BackupNotificationMgr backupNotificationManager) {
super(config, backupMetrics, backupNotificationManager, pathProvider);
this.backupMetrics = backupMetrics;
this.config = config;
this.gcsCredential = credential;
try {
this.httpTransport = GoogleNetHttpTransport.newTrustedTransport();
} catch (Exception e) {
throw new IllegalStateException(
"Unable to create a handle to the Google Http tranport", e);
}
this.srcBucketName = getShard();
}
private Storage.Objects constructObjectResourceHandle() {
if (this.objectsResoruceHandle != null) {
return this.objectsResoruceHandle;
}
constructGcsStorageHandle();
this.objectsResoruceHandle = this.gcsStorageHandle.objects();
return this.objectsResoruceHandle;
}
/*
* Get a handle to the GCS api to manage our data within their storage. Code derive from
* https://code.google.com/p/google-api-java-client/source/browse/storage-cmdline-sample/src/main/java/com/google/api/services/samples/storage/cmdline/StorageSample.java?repo=samples
*
* Note: GCS storage will use our credential to do auto-refresh of expired tokens
*/
private Storage constructGcsStorageHandle() {
if (this.gcsStorageHandle != null) {
return this.gcsStorageHandle;
}
try {
constructGcsCredential();
} catch (Exception e) {
throw new IllegalStateException("Exception during GCS authorization", e);
}
this.gcsStorageHandle =
new Storage.Builder(this.httpTransport, JSON_FACTORY, this.credential)
.setApplicationName(APPLICATION_NAME)
.build();
return this.gcsStorageHandle;
}
/**
* Authorizes the installed application to access user's protected data, code from
* https://developers.google.com/maps-engine/documentation/oauth/serviceaccount and
* http://javadoc.google-api-java-client.googlecode.com/hg/1.8.0-beta/com/google/api/client/googleapis/auth/oauth2/GoogleCredential.html
*/
private Credential constructGcsCredential() throws Exception {
if (this.credential != null) {
return this.credential;
}
synchronized (this) {
if (this.credential == null) {
String service_acct_email =
new String(this.gcsCredential.getValue(KEY.GCS_SERVICE_ID));
if (this.config.getGcsServiceAccountPrivateKeyLoc() == null
|| this.config.getGcsServiceAccountPrivateKeyLoc().isEmpty()) {
throw new NullPointerException(
"Fast property for the the GCS private key file is null/empty.");
}
// Take the encrypted private key, decrypted into an in-transit file which is passed
// to GCS
File gcsPrivateKeyHandle =
new File(this.config.getGcsServiceAccountPrivateKeyLoc() + ".output");
ByteArrayOutputStream byteos = new ByteArrayOutputStream();
byte[] gcsPrivateKeyPlainText =
this.gcsCredential.getValue(KEY.GCS_PRIVATE_KEY_LOC);
try (BufferedOutputStream bos =
new BufferedOutputStream(new FileOutputStream(gcsPrivateKeyHandle))) {
byteos.write(gcsPrivateKeyPlainText);
byteos.writeTo(bos);
} catch (IOException e) {
throw new IOException(
"Exception when writing decrypted gcs private key value to disk.", e);
}
Collection<String> scopes = new ArrayList<>(1);
scopes.add(StorageScopes.DEVSTORAGE_READ_ONLY);
// Cryptex decrypted service account key derive from the GCS console
this.credential =
new GoogleCredential.Builder()
.setTransport(this.httpTransport)
.setJsonFactory(JSON_FACTORY)
.setServiceAccountId(service_acct_email)
.setServiceAccountScopes(scopes)
.setServiceAccountPrivateKeyFromP12File(gcsPrivateKeyHandle)
.build();
}
}
return this.credential;
}
@Override
protected void downloadFileImpl(AbstractBackupPath path, String suffix)
throws BackupRestoreException {
String remotePath = path.getRemotePath();
File localFile = new File(path.newRestoreFile().getAbsolutePath() + suffix);
String objectName = parseObjectname(getPrefix().toString());
com.google.api.services.storage.Storage.Objects.Get get;
try {
get = constructObjectResourceHandle().get(this.srcBucketName, remotePath);
} catch (IOException e) {
throw new BackupRestoreException(
"IO error retrieving metadata for: "
+ objectName
+ " from bucket: "
+ this.srcBucketName,
e);
}
// If you're not using GCS' AppEngine, download the whole thing (instead of chunks) in one
// request, if possible.
get.getMediaHttpDownloader().setDirectDownloadEnabled(true);
try (OutputStream os = new FileOutputStream(localFile);
InputStream is = get.executeMediaAsInputStream()) {
IOUtils.copyLarge(is, os);
} catch (IOException e) {
throw new BackupRestoreException(
"IO error during streaming of object: "
+ objectName
+ " from bucket: "
+ this.srcBucketName,
e);
} catch (Exception ex) {
throw new BackupRestoreException(
"Exception encountered when copying bytes from input to output", ex);
}
backupMetrics.recordDownloadRate(get.getLastResponseHeaders().getContentLength());
}
@Override
protected boolean doesRemoteFileExist(Path remotePath) {
// TODO: Implement based on GCS. Since this is only used for upload, leaving it empty
return false;
}
@Override
public Iterator<String> listFileSystem(String prefix, String delimiter, String marker) {
return new GoogleFileIterator(constructGcsStorageHandle(), prefix, null);
}
@Override
public void cleanup() {
// TODO Auto-generated method stub
}
@Override
public void shutdown() {
// TODO Auto-generated method stub
}
@Override
protected long uploadFileImpl(AbstractBackupPath path) throws BackupRestoreException {
throw new UnsupportedOperationException();
}
@Override
public long getFileSize(String remotePath) throws BackupRestoreException {
return 0;
}
@Override
public void deleteFiles(List<Path> remotePaths) throws BackupRestoreException {
// TODO: Delete implementation
}
/*
* @param pathPrefix
* @return objectName
*/
static String parseObjectname(String pathPrefix) {
int offset = pathPrefix.lastIndexOf(0x2f);
return pathPrefix.substring(offset + 1);
}
}
| |
/**
* Copyright (C) 2015 ogaclejapan
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ogaclejapan.smarttablayout;
import android.content.Context;
import android.content.res.ColorStateList;
import android.content.res.TypedArray;
import android.graphics.Typeface;
import android.os.Build;
import android.support.v4.view.PagerAdapter;
import android.support.v4.view.ViewPager;
import android.util.AttributeSet;
import android.util.DisplayMetrics;
import android.util.TypedValue;
import android.view.Gravity;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.HorizontalScrollView;
import android.widget.LinearLayout;
import android.widget.TextView;
/**
* To be used with ViewPager to provide a tab indicator component which give constant feedback as
* to
* the user's scroll progress.
* <p>
* To use the component, simply add it to your view hierarchy. Then in your
* {@link android.app.Activity} or {@link android.app.Fragment}, {@link
* android.support.v4.app.Fragment} call
* {@link #setViewPager(android.support.v4.view.ViewPager)} providing it the ViewPager this layout
* is being used for.
* <p>
* The colors can be customized in two ways. The first and simplest is to provide an array of
* colors
* via {@link #setSelectedIndicatorColors(int...)} and {@link #setDividerColors(int...)}. The
* alternative is via the {@link TabColorizer} interface which provides you complete control over
* which color is used for any individual position.
* <p>
* The views used as tabs can be customized by calling {@link #setCustomTabView(int, int)},
* providing the layout ID of your custom layout.
* <p>
* Forked from Google Samples > SlidingTabsBasic >
* <a href="https://developer.android.com/samples/SlidingTabsBasic/src/com.example.android.common/view/SlidingTabLayout.html">SlidingTabLayout</a>
*/
public class SmartTabLayout extends HorizontalScrollView {
private static final boolean DEFAULT_DISTRIBUTE_EVENLY = false;
private static final int TITLE_OFFSET_DIPS = 24;
private static final int TAB_VIEW_PADDING_DIPS = 16;
private static final boolean TAB_VIEW_TEXT_ALL_CAPS = true;
private static final int TAB_VIEW_TEXT_SIZE_SP = 12;
private static final int TAB_VIEW_TEXT_COLOR = 0xFC000000;
private static final int TAB_VIEW_TEXT_MIN_WIDTH = 0;
protected final SmartTabStrip tabStrip;
private int titleOffset;
private int tabViewBackgroundResId;
private boolean tabViewTextAllCaps;
private ColorStateList tabViewTextColors;
private float tabViewTextSize;
private int tabViewTextHorizontalPadding;
private int tabViewTextMinWidth;
private ViewPager viewPager;
private ViewPager.OnPageChangeListener viewPagerPageChangeListener;
private OnScrollChangeListener onScrollChangeListener;
private TabProvider tabProvider;
private boolean distributeEvenly;
public SmartTabLayout(Context context) {
this(context, null);
}
public SmartTabLayout(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public SmartTabLayout(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
// Disable the Scroll Bar
setHorizontalScrollBarEnabled(false);
final DisplayMetrics dm = getResources().getDisplayMetrics();
final float density = dm.density;
int tabBackgroundResId = NO_ID;
boolean textAllCaps = TAB_VIEW_TEXT_ALL_CAPS;
ColorStateList textColors;
float textSize = TypedValue.applyDimension(
TypedValue.COMPLEX_UNIT_SP, TAB_VIEW_TEXT_SIZE_SP, dm);
int textHorizontalPadding = (int) (TAB_VIEW_PADDING_DIPS * density);
int textMinWidth = (int) (TAB_VIEW_TEXT_MIN_WIDTH * density);
boolean distributeEvenly = DEFAULT_DISTRIBUTE_EVENLY;
int customTabLayoutId = NO_ID;
int customTabTextViewId = NO_ID;
TypedArray a = context.obtainStyledAttributes(
attrs, R.styleable.stl_SmartTabLayout, defStyle, 0);
tabBackgroundResId = a.getResourceId(
R.styleable.stl_SmartTabLayout_stl_defaultTabBackground, tabBackgroundResId);
textAllCaps = a.getBoolean(
R.styleable.stl_SmartTabLayout_stl_defaultTabTextAllCaps, textAllCaps);
textColors = a.getColorStateList(
R.styleable.stl_SmartTabLayout_stl_defaultTabTextColor);
textSize = a.getDimension(
R.styleable.stl_SmartTabLayout_stl_defaultTabTextSize, textSize);
textHorizontalPadding = a.getDimensionPixelSize(
R.styleable.stl_SmartTabLayout_stl_defaultTabTextHorizontalPadding, textHorizontalPadding);
textMinWidth = a.getDimensionPixelSize(
R.styleable.stl_SmartTabLayout_stl_defaultTabTextMinWidth, textMinWidth);
customTabLayoutId = a.getResourceId(
R.styleable.stl_SmartTabLayout_stl_customTabTextLayoutId, customTabLayoutId);
customTabTextViewId = a.getResourceId(
R.styleable.stl_SmartTabLayout_stl_customTabTextViewId, customTabTextViewId);
distributeEvenly = a.getBoolean(
R.styleable.stl_SmartTabLayout_stl_distributeEvenly, distributeEvenly);
a.recycle();
this.titleOffset = (int) (TITLE_OFFSET_DIPS * density);
this.tabViewBackgroundResId = tabBackgroundResId;
this.tabViewTextAllCaps = textAllCaps;
this.tabViewTextColors = (textColors != null)
? textColors
: ColorStateList.valueOf(TAB_VIEW_TEXT_COLOR);
this.tabViewTextSize = textSize;
this.tabViewTextHorizontalPadding = textHorizontalPadding;
this.tabViewTextMinWidth = textMinWidth;
this.distributeEvenly = distributeEvenly;
if (customTabLayoutId != NO_ID) {
setCustomTabView(customTabLayoutId, customTabTextViewId);
}
this.tabStrip = new SmartTabStrip(context, attrs);
if (distributeEvenly && tabStrip.isIndicatorAlwaysInCenter()) {
throw new UnsupportedOperationException(
"'distributeEvenly' and 'indicatorAlwaysInCenter' both use does not support");
}
// Make sure that the Tab Strips fills this View
setFillViewport(!tabStrip.isIndicatorAlwaysInCenter());
addView(tabStrip, LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT);
}
@Override
protected void onScrollChanged(int l, int t, int oldl, int oldt) {
super.onScrollChanged(l, t, oldl, oldt);
if (onScrollChangeListener != null) {
onScrollChangeListener.onScrollChanged(l, oldl);
}
}
@Override
protected void onSizeChanged(int w, int h, int oldw, int oldh) {
super.onSizeChanged(w, h, oldw, oldh);
if (tabStrip.isIndicatorAlwaysInCenter() && getChildCount() > 0) {
View firstTab = tabStrip.getChildAt(0);
View lastTab = tabStrip.getChildAt(getChildCount() - 1);
int left = (w - Utils.getMeasuredWidth(firstTab)) / 2 - Utils.getMarginStart(firstTab);
int right = (w - Utils.getMeasuredWidth(lastTab)) / 2 - Utils.getMarginEnd(lastTab);
tabStrip.setMinimumWidth(tabStrip.getMeasuredWidth());
setPadding(left, getPaddingTop(), right, getPaddingBottom());
setClipToPadding(false);
}
}
/**
* Set the behavior of the Indicator scrolling feedback.
*
* @param interpolator {@link com.ogaclejapan.smarttablayout.SmartTabIndicationInterpolator}
*/
public void setIndicationInterpolator(SmartTabIndicationInterpolator interpolator) {
tabStrip.setIndicationInterpolator(interpolator);
}
/**
* Set the custom {@link TabColorizer} to be used.
*
* If you only require simple customisation then you can use
* {@link #setSelectedIndicatorColors(int...)} and {@link #setDividerColors(int...)} to achieve
* similar effects.
*/
public void setCustomTabColorizer(TabColorizer tabColorizer) {
tabStrip.setCustomTabColorizer(tabColorizer);
}
/**
* Set the color used for styling the tab text. This will need to be called prior to calling
* {@link #setViewPager(android.support.v4.view.ViewPager)} otherwise it will not get set
*
* @param color to use for tab text
*/
public void setDefaultTabTextColor(int color) {
tabViewTextColors = ColorStateList.valueOf(color);
}
/**
* Sets the colors used for styling the tab text. This will need to be called prior to calling
* {@link #setViewPager(android.support.v4.view.ViewPager)} otherwise it will not get set
*
* @param colors ColorStateList to use for tab text
*/
public void setDefaultTabTextColor(ColorStateList colors) {
tabViewTextColors = colors;
}
/**
* Set the same weight for tab
*/
public void setDistributeEvenly(boolean distributeEvenly) {
this.distributeEvenly = distributeEvenly;
}
/**
* Sets the colors to be used for indicating the selected tab. These colors are treated as a
* circular array. Providing one color will mean that all tabs are indicated with the same color.
*/
public void setSelectedIndicatorColors(int... colors) {
tabStrip.setSelectedIndicatorColors(colors);
}
/**
* Sets the colors to be used for tab dividers. These colors are treated as a circular array.
* Providing one color will mean that all tabs are indicated with the same color.
*/
public void setDividerColors(int... colors) {
tabStrip.setDividerColors(colors);
}
/**
* Set the {@link ViewPager.OnPageChangeListener}. When using {@link SmartTabLayout} you are
* required to set any {@link ViewPager.OnPageChangeListener} through this method. This is so
* that the layout can update it's scroll position correctly.
*
* @see ViewPager#setOnPageChangeListener(ViewPager.OnPageChangeListener)
*/
public void setOnPageChangeListener(ViewPager.OnPageChangeListener listener) {
viewPagerPageChangeListener = listener;
}
/**
* Set {@link OnScrollChangeListener} for obtaining values of scrolling.
* @param listener the {@link OnScrollChangeListener} to set
*/
public void setOnScrollChangeListener(OnScrollChangeListener listener) {
onScrollChangeListener = listener;
}
/**
* Set the custom layout to be inflated for the tab views.
*
* @param layoutResId Layout id to be inflated
* @param textViewId id of the {@link android.widget.TextView} in the inflated view
*/
public void setCustomTabView(int layoutResId, int textViewId) {
tabProvider = new SimpleTabProvider(getContext(), layoutResId, textViewId);
}
/**
* Set the custom layout to be inflated for the tab views.
*
* @param provider {@link TabProvider}
*/
public void setCustomTabView(TabProvider provider) {
tabProvider = provider;
}
/**
* Sets the associated view pager. Note that the assumption here is that the pager content
* (number of tabs and tab titles) does not change after this call has been made.
*/
public void setViewPager(ViewPager viewPager) {
tabStrip.removeAllViews();
this.viewPager = viewPager;
if (viewPager != null && viewPager.getAdapter() != null) {
viewPager.setOnPageChangeListener(new InternalViewPagerListener());
populateTabStrip();
}
}
/**
* Returns the view at the specified position in the tabs.
*
* @param position the position at which to get the view from
* @return the view at the specified position or null if the position does not exist within the
* tabs
*/
public View getTabAt(int position) {
return tabStrip.getChildAt(position);
}
/**
* Create a default view to be used for tabs. This is called if a custom tab view is not set via
* {@link #setCustomTabView(int, int)}.
*/
protected TextView createDefaultTabView(CharSequence title) {
TextView textView = new TextView(getContext());
textView.setGravity(Gravity.CENTER);
textView.setText(title);
textView.setTextColor(tabViewTextColors);
textView.setTextSize(TypedValue.COMPLEX_UNIT_PX, tabViewTextSize);
textView.setTypeface(Typeface.DEFAULT_BOLD);
textView.setLayoutParams(new LinearLayout.LayoutParams(
LinearLayout.LayoutParams.WRAP_CONTENT, LinearLayout.LayoutParams.MATCH_PARENT));
if (tabViewBackgroundResId != NO_ID) {
textView.setBackgroundResource(tabViewBackgroundResId);
} else if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
// If we're running on Honeycomb or newer, then we can use the Theme's
// selectableItemBackground to ensure that the View has a pressed state
TypedValue outValue = new TypedValue();
getContext().getTheme().resolveAttribute(android.R.attr.selectableItemBackground,
outValue, true);
textView.setBackgroundResource(outValue.resourceId);
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH) {
// If we're running on ICS or newer, enable all-caps to match the Action Bar tab style
textView.setAllCaps(tabViewTextAllCaps);
}
textView.setPadding(
tabViewTextHorizontalPadding, 0,
tabViewTextHorizontalPadding, 0);
if (tabViewTextMinWidth > 0) {
textView.setMinWidth(tabViewTextMinWidth);
}
return textView;
}
private void populateTabStrip() {
final PagerAdapter adapter = viewPager.getAdapter();
final OnClickListener tabClickListener = new TabClickListener();
for (int i = 0; i < adapter.getCount(); i++) {
final View tabView = (tabProvider == null)
? createDefaultTabView(adapter.getPageTitle(i))
: tabProvider.createTabView(tabStrip, i, adapter);
if (tabView == null) {
throw new IllegalStateException("tabView is null.");
}
if (distributeEvenly) {
LinearLayout.LayoutParams lp = (LinearLayout.LayoutParams) tabView.getLayoutParams();
lp.width = 0;
lp.weight = 1;
}
tabView.setOnClickListener(tabClickListener);
tabStrip.addView(tabView);
if (i == viewPager.getCurrentItem()) {
tabView.setSelected(true);
}
}
}
@Override
protected void onAttachedToWindow() {
super.onAttachedToWindow();
if (viewPager != null) {
scrollToTab(viewPager.getCurrentItem(), 0);
}
}
private void scrollToTab(int tabIndex, int positionOffset) {
final int tabStripChildCount = tabStrip.getChildCount();
if (tabStripChildCount == 0 || tabIndex < 0 || tabIndex >= tabStripChildCount) {
return;
}
View selectedTab = tabStrip.getChildAt(tabIndex);
if (selectedTab != null) {
int targetScrollX = Utils.getLeft(selectedTab) - Utils.getMarginStart(selectedTab) + positionOffset;
if (tabStrip.isIndicatorAlwaysInCenter()) {
View firstTab = tabStrip.getChildAt(0);
int first = Utils.getWidth(firstTab) + Utils.getMarginStart(firstTab);
int selected = Utils.getWidth(selectedTab) + Utils.getMarginStart(selectedTab);
targetScrollX -= (first - selected) / 2;
} else if (tabIndex > 0 || positionOffset > 0) {
// If we're not at the first child and are mid-scroll, make sure we obey the offset
targetScrollX -= titleOffset;
}
scrollTo(targetScrollX, 0);
}
}
/**
* Allows complete control over the colors drawn in the tab layout. Set with
* {@link #setCustomTabColorizer(TabColorizer)}.
*/
public interface TabColorizer {
/**
* @return return the color of the indicator used when {@code position} is selected.
*/
int getIndicatorColor(int position);
/**
* @return return the color of the divider drawn to the right of {@code position}.
*/
int getDividerColor(int position);
}
/**
* Interface definition for a callback to be invoked when the scroll position of a view changes.
*/
public interface OnScrollChangeListener {
/**
* Called when the scroll position of a view changes.
* @param scrollX Current horizontal scroll origin.
* @param oldScrollX Previous horizontal scroll origin.
*/
void onScrollChanged(int scrollX, int oldScrollX);
}
/**
* Create the custom tabs in the tab layout. Set with
* {@link #setCustomTabView(com.ogaclejapan.smarttablayout.SmartTabLayout.TabProvider)}
*/
public interface TabProvider {
/**
* @return Return the View of {@code position} for the Tabs
*/
View createTabView(ViewGroup container, int position, PagerAdapter adapter);
}
private static class SimpleTabProvider implements TabProvider {
private final LayoutInflater inflater;
private final int tabViewLayoutId;
private final int tabViewTextViewId;
private SimpleTabProvider(Context context, int layoutResId, int textViewId) {
inflater = LayoutInflater.from(context);
tabViewLayoutId = layoutResId;
tabViewTextViewId = textViewId;
}
@Override
public View createTabView(ViewGroup container, int position, PagerAdapter adapter) {
View tabView = null;
TextView tabTitleView = null;
if (tabViewLayoutId != NO_ID) {
tabView = inflater.inflate(tabViewLayoutId, container, false);
}
if (tabViewTextViewId != NO_ID && tabView != null) {
tabTitleView = (TextView) tabView.findViewById(tabViewTextViewId);
}
if (tabTitleView == null && TextView.class.isInstance(tabView)) {
tabTitleView = (TextView) tabView;
}
if (tabTitleView != null) {
tabTitleView.setText(adapter.getPageTitle(position));
}
return tabView;
}
}
private class InternalViewPagerListener implements ViewPager.OnPageChangeListener {
private int scrollState;
@Override
public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels) {
int tabStripChildCount = tabStrip.getChildCount();
if ((tabStripChildCount == 0) || (position < 0) || (position >= tabStripChildCount)) {
return;
}
tabStrip.onViewPagerPageChanged(position, positionOffset);
View selectedTab = tabStrip.getChildAt(position);
int widthPlusMargin = Utils.getWidth(selectedTab) + Utils.getMarginHorizontally(selectedTab);
int extraOffset = (int) (positionOffset * widthPlusMargin);
if (0f < positionOffset && positionOffset < 1f
&& tabStrip.isIndicatorAlwaysInCenter()) {
View currentTab = tabStrip.getChildAt(position);
View nextTab = tabStrip.getChildAt(position + 1);
int current = Utils.getWidth(currentTab) / 2 + Utils.getMarginEnd(currentTab);
int next = Utils.getWidth(nextTab) / 2 + Utils.getMarginStart(nextTab);
extraOffset = Math.round(positionOffset * (current + next));
}
scrollToTab(position, extraOffset);
if (viewPagerPageChangeListener != null) {
viewPagerPageChangeListener.onPageScrolled(position, positionOffset,
positionOffsetPixels);
}
}
@Override
public void onPageScrollStateChanged(int state) {
scrollState = state;
if (viewPagerPageChangeListener != null) {
viewPagerPageChangeListener.onPageScrollStateChanged(state);
}
}
@Override
public void onPageSelected(int position) {
if (scrollState == ViewPager.SCROLL_STATE_IDLE) {
tabStrip.onViewPagerPageChanged(position, 0f);
scrollToTab(position, 0);
}
for (int i = 0, size = tabStrip.getChildCount(); i < size; i++) {
tabStrip.getChildAt(i).setSelected(position == i);
}
if (viewPagerPageChangeListener != null) {
viewPagerPageChangeListener.onPageSelected(position);
}
}
}
private class TabClickListener implements OnClickListener {
@Override
public void onClick(View v) {
for (int i = 0; i < tabStrip.getChildCount(); i++) {
if (v == tabStrip.getChildAt(i)) {
viewPager.setCurrentItem(i);
return;
}
}
}
}
}
| |
package org.broadinstitute.sting.gatk.datasources.providers;
import net.sf.picard.reference.ReferenceSequence;
import org.broadinstitute.sting.gatk.contexts.ReferenceContext;
import org.broadinstitute.sting.gatk.walkers.Reference;
import org.broadinstitute.sting.gatk.walkers.Walker;
import org.broadinstitute.sting.gatk.walkers.Window;
import org.broadinstitute.sting.utils.GenomeLoc;
import org.broadinstitute.sting.utils.exceptions.ReviewedStingException;
/*
* Copyright (c) 2009 The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
/**
* Provides access to the portion of the reference covering a single locus.
*/
public class LocusReferenceView extends ReferenceView {
/**
* Bound the reference view to make sure all accesses are within the shard.
*/
private GenomeLoc bounds;
/**
* Start of the expanded window for which the reference context should be provided,
* relative to the locus in question.
*/
private final int windowStart;
/**
* Start of the expanded window for which the reference context should be provided,
* relative to the locus in question.
*/
private final int windowStop;
/**
* Track the reference sequence and the last point accessed. Used to
* track state when traversing over the reference.
*/
private ReferenceSequence referenceSequence;
/**
* Create a LocusReferenceView given no other contextual information about
* the walkers, etc.
* @param provider source for locus data.
*/
public LocusReferenceView( LocusShardDataProvider provider ) {
super(provider);
initializeBounds(provider);
windowStart = windowStop = 0;
initializeReferenceSequence(bounds);
}
/**
* Create a new locus reference view.
* @param provider source for locus data.
*/
public LocusReferenceView( Walker walker, LocusShardDataProvider provider ) {
super( provider );
initializeBounds(provider);
// Retrieve information about the window being accessed.
if( walker.getClass().isAnnotationPresent(Reference.class) ) {
Window window = walker.getClass().getAnnotation(Reference.class).window();
if( window.start() > 0 ) throw new ReviewedStingException( "Reference window starts after current locus" );
if( window.stop() < 0 ) throw new ReviewedStingException( "Reference window ends before current locus" );
windowStart = window.start();
windowStop = window.stop();
}
else {
windowStart = 0;
windowStop = 0;
}
if(bounds != null) {
int expandedStart = getWindowStart( bounds );
int expandedStop = getWindowStop( bounds );
initializeReferenceSequence(genomeLocParser.createGenomeLoc(bounds.getContig(), expandedStart, expandedStop));
}
}
/** Returns true if the specified location is fully within the bounds of the reference window loaded into
* this LocusReferenceView object.
*/
public boolean isLocationWithinBounds(GenomeLoc loc) {
return bounds.containsP(loc);
}
/** Ensures that specified location is within the bounds of the reference window loaded into this
* LocusReferenceView object. If the location loc is within the current bounds (or if it is null), then nothing is done.
* Otherwise, the bounds are expanded on either side, as needed, to accomodate the location, and the reference seuqence for the
* new bounds is reloaded (can be costly!). If loc spans beyond the current contig, the expansion is performed
* to the start/stop of that contig only.
* @param loc
*/
public void expandBoundsToAccomodateLoc(GenomeLoc loc) {
if ( bounds==null || loc==null) return; // can bounds be null actually???
if ( isLocationWithinBounds(loc) ) return;
if ( loc.getContigIndex() != bounds.getContigIndex() )
throw new ReviewedStingException("Illegal attempt to expand reference view bounds to accommodate location on a different contig.");
bounds = genomeLocParser.createGenomeLoc(bounds.getContig(),
Math.min(bounds.getStart(),loc.getStart()),
Math.max(bounds.getStop(),loc.getStop()));
int expandedStart = getWindowStart( bounds );
int expandedStop = getWindowStop( bounds );
initializeReferenceSequence(genomeLocParser.createGenomeLoc(bounds.getContig(), expandedStart, expandedStop));
}
/**
* Initialize the bounds of this shard, trimming the bounds so that they match the reference.
* @param provider Provider covering the appropriate locus.
*/
private void initializeBounds(LocusShardDataProvider provider) {
if(provider.getLocus() != null) {
int sequenceLength = reference.getSequenceDictionary().getSequence(provider.getLocus().getContig()).getSequenceLength();
bounds = genomeLocParser.createGenomeLoc(provider.getLocus().getContig(),
Math.max(provider.getLocus().getStart(),1),
Math.min(provider.getLocus().getStop(),sequenceLength));
}
else
bounds = null;
}
/**
* Initialize reference sequence data using the given locus.
* @param locus
*/
private void initializeReferenceSequence( GenomeLoc locus ) {
this.referenceSequence = reference.getSubsequenceAt( locus.getContig(), locus.getStart(), locus.getStop() );
}
protected GenomeLoc trimToBounds(GenomeLoc l) {
int expandedStart = getWindowStart( bounds );
int expandedStop = getWindowStop( bounds );
if ( l.getStart() < expandedStart ) l = genomeLocParser.setStart(l, expandedStart);
if ( l.getStop() > expandedStop ) l = genomeLocParser.setStop(l, expandedStop);
return l;
}
public class Provider implements ReferenceContext.ReferenceContextRefProvider {
int refStart, len;
public Provider( int refStart, int len ) {
this.refStart = refStart;
this.len = len;
}
public byte[] getBases() {
//System.out.printf("Getting bases for location%n");
byte[] bases = new byte[len];
System.arraycopy(referenceSequence.getBases(), refStart, bases, 0, len);
return bases;
}
}
/**
* Gets the reference context associated with this particular point or extended interval on the genome.
* @param genomeLoc Region for which to retrieve the base(s). If region spans beyond contig end or beoynd current bounds, it will be trimmed down.
* @return The base at the position represented by this genomeLoc.
*/
public ReferenceContext getReferenceContext( GenomeLoc genomeLoc ) {
//validateLocation( genomeLoc );
GenomeLoc window = genomeLocParser.createGenomeLoc( genomeLoc.getContig(), getWindowStart(genomeLoc), getWindowStop(genomeLoc) );
int refStart = -1;
if (bounds != null) {
window = trimToBounds(window);
refStart = (int)(window.getStart() - getWindowStart(bounds));
}
else {
if(referenceSequence == null || referenceSequence.getContigIndex() != genomeLoc.getContigIndex())
referenceSequence = reference.getSequence(genomeLoc.getContig());
refStart = (int)window.getStart()-1;
}
int len = (int)window.size();
return new ReferenceContext( genomeLocParser, genomeLoc, window, new Provider(refStart, len));
}
/**
* Allow the user to pull reference info from any arbitrary region of the reference.
* @param genomeLoc The locus.
* @return A list of the bases starting at the start of the locus (inclusive) and ending
* at the end of the locus (inclusive).
*/
public byte[] getReferenceBases( GenomeLoc genomeLoc ) {
return super.getReferenceBases(genomeLoc);
}
/**
* Validates that the genomeLoc is one base wide and is in the reference sequence.
* @param genomeLoc location to verify.
*/
private void validateLocation( GenomeLoc genomeLoc ) throws InvalidPositionException {
if( bounds != null && !bounds.containsP(genomeLoc) )
throw new InvalidPositionException(
String.format("Requested position %s not within interval %s", genomeLoc, bounds));
}
/**
* Gets the start of the expanded window, bounded if necessary by the contig.
* @param locus The locus to expand.
* @return The expanded window.
*/
private int getWindowStart( GenomeLoc locus ) {
// If the locus is not within the bounds of the contig it allegedly maps to, expand only as much as we can.
if(locus.getStart() < 1) return 1;
// if(locus.getStart() < 1) return locus.getStart();
return Math.max( locus.getStart() + windowStart, 1 );
}
/**
* Gets the stop of the expanded window, bounded if necessary by the contig.
* @param locus The locus to expand.
* @return The expanded window.
*/
private int getWindowStop( GenomeLoc locus ) {
// If the locus is not within the bounds of the contig it allegedly maps to, expand only as much as we can.
int sequenceLength = reference.getSequenceDictionary().getSequence(locus.getContig()).getSequenceLength();
if(locus.getStop() > sequenceLength) return sequenceLength;
return Math.min( locus.getStop() + windowStop, sequenceLength );
}
}
| |
/*
* The MIT License
*
* Copyright (c) 2004-2010, Sun Microsystems, Inc., Alan Harder
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package lib.form;
import com.gargoylesoftware.htmlunit.ElementNotFoundException;
import com.gargoylesoftware.htmlunit.html.HtmlPage;
import com.gargoylesoftware.htmlunit.html.HtmlForm;
import com.gargoylesoftware.htmlunit.html.HtmlSelect;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import com.gargoylesoftware.htmlunit.javascript.background.JavaScriptJob;
import com.gargoylesoftware.htmlunit.javascript.background.JavaScriptJobManagerImpl;
import hudson.DescriptorExtensionList;
import hudson.Extension;
import hudson.ExtensionPoint;
import hudson.model.Describable;
import hudson.model.Descriptor;
import jenkins.model.Jenkins;
import net.sf.json.JSONObject;
import org.jvnet.hudson.test.HudsonTestCase;
import org.kohsuke.stapler.DataBoundConstructor;
import org.kohsuke.stapler.StaplerRequest;
/**
* @author Alan.Harder@sun.com
*/
public class RepeatableTest extends HudsonTestCase {
private JSONObject formData;
private Class<?> bindClass;
private List<?> bindResult;
public List<Object> list = new ArrayList<Object>();
public List<Object> defaults = null;
public Integer minimum = null;
public void doSubmitTest(StaplerRequest req) throws Exception {
formData = req.getSubmittedForm();
if (bindClass != null)
bindResult = req.bindJSONToList(bindClass, formData.get("items"));
}
// ========================================================================
private void doTestSimple() throws Exception {
HtmlPage p = createWebClient().goTo("self/testSimple");
HtmlForm f = p.getFormByName("config");
f.getButtonByCaption("Add").click();
f.getInputByValue("").setValueAttribute("value one");
f.getButtonByCaption("Add").click();
f.getInputByValue("").setValueAttribute("value two");
f.getButtonByCaption("Add").click();
f.getInputByValue("").setValueAttribute("value three");
f.getInputsByName("bool").get(2).click();
submit(f);
}
public void testSimple() throws Exception {
doTestSimple();
assertEquals("[{\"bool\":false,\"txt\":\"value one\"},"
+ "{\"bool\":false,\"txt\":\"value two\"},{\"bool\":true,\"txt\":\"value three\"}]",
formData.get("foos").toString());
}
// ========================================================================
public static class Foo {
public String txt;
public boolean bool;
@DataBoundConstructor
public Foo(String txt, boolean bool) { this.txt = txt; this.bool = bool; }
@Override public String toString() { return "foo:" + txt + ':' + bool; }
}
private void addData() {
list.add(new Foo("existing one", true));
list.add(new Foo("existing two", false));
}
public void testSimple_ExistingData() throws Exception {
addData();
doTestSimple();
assertEquals("[{\"bool\":true,\"txt\":\"existing one\"},"
+ "{\"bool\":false,\"txt\":\"existing two\"},{\"bool\":true,\"txt\":\"value one\"},"
+ "{\"bool\":false,\"txt\":\"value two\"},{\"bool\":false,\"txt\":\"value three\"}]",
formData.get("foos").toString());
}
public void testMinimum() throws Exception {
minimum = 3;
HtmlPage p = createWebClient().goTo("self/testSimple");
HtmlForm f = p.getFormByName("config");
f.getInputByValue("").setValueAttribute("value one");
f.getInputByValue("").setValueAttribute("value two");
f.getInputByValue("").setValueAttribute("value three");
try { f.getInputByValue(""); fail("?"); } catch (ElementNotFoundException expected) { }
f.getInputsByName("bool").get(2).click();
submit(f);
assertEquals("[{\"bool\":false,\"txt\":\"value one\"},"
+ "{\"bool\":false,\"txt\":\"value two\"},{\"bool\":true,\"txt\":\"value three\"}]",
formData.get("foos").toString());
}
public void testMinimum_ExistingData() throws Exception {
addData();
minimum = 3;
HtmlPage p = createWebClient().goTo("self/testSimple");
HtmlForm f = p.getFormByName("config");
f.getInputByValue("").setValueAttribute("new one");
try { f.getInputByValue(""); fail("?"); } catch (ElementNotFoundException expected) { }
f.getInputsByName("bool").get(1).click();
submit(f);
assertEquals("[{\"bool\":true,\"txt\":\"existing one\"},"
+ "{\"bool\":true,\"txt\":\"existing two\"},{\"bool\":false,\"txt\":\"new one\"}]",
formData.get("foos").toString());
}
public void testNoData() throws Exception {
list = null;
defaults = null;
gotoAndSubmitConfig("defaultForField");
assertNull(formData.get("list"));
gotoAndSubmitConfig("defaultForItems");
assertNull(formData.get("list"));
}
public void testItemsWithDefaults() throws Exception {
assertWithDefaults("defaultForItems");
}
public void testItemsDefaultsIgnoredIfFieldHasData() throws Exception {
assertDefaultsIgnoredIfHaveData("defaultForItems");
}
public void testFieldWithDefaults() throws Exception {
assertWithDefaults("defaultForField");
}
public void testFieldDefaultsIgnoredIfFieldHasData() throws Exception {
assertDefaultsIgnoredIfHaveData("defaultForField");
}
private void addDefaults() {
defaults = new ArrayList<Object>();
defaults.add(new Foo("default one", true));
defaults.add(new Foo("default two", false));
}
private void assertWithDefaults(final String viewName) throws Exception {
list = null;
addDefaults();
gotoAndSubmitConfig(viewName);
assertNotNull(formData.get("list"));
assertEquals("[{\"bool\":true,\"txt\":\"default one\"},{\"bool\":false,\"txt\":\"default two\"}]",
formData.get("list").toString());
}
private void assertDefaultsIgnoredIfHaveData(final String viewName) throws Exception {
addData();
addDefaults();
gotoAndSubmitConfig(viewName);
assertNotNull(formData.get("list"));
assertEquals("[{\"bool\":true,\"txt\":\"existing one\"},{\"bool\":false,\"txt\":\"existing two\"}]",
formData.get("list").toString());
}
private void gotoAndSubmitConfig(final String viewName) throws Exception {
HtmlPage p = createWebClient().goTo("self/" + viewName);
HtmlForm f = p.getFormByName("config");
submit(f);
}
// ========================================================================
// hudson-behavior uniquifies radiobutton names so the browser properly handles each group,
// then converts back to original names when submitting form.
public void testRadio() throws Exception {
HtmlPage p = createWebClient().goTo("self/testRadio");
HtmlForm f = p.getFormByName("config");
f.getButtonByCaption("Add").click();
f.getInputByValue("").setValueAttribute("txt one");
f.getElementsByAttribute("INPUT", "type", "radio").get(1).click();
f.getButtonByCaption("Add").click();
f.getInputByValue("").setValueAttribute("txt two");
f.getElementsByAttribute("INPUT", "type", "radio").get(3).click();
submit(f);
assertEquals("[{\"radio\":\"two\",\"txt\":\"txt one\"},"
+ "{\"radio\":\"two\",\"txt\":\"txt two\"}]",
formData.get("foos").toString());
}
public static class FooRadio {
public String txt, radio;
public FooRadio(String txt, String radio) { this.txt = txt; this.radio = radio; }
}
public void testRadio_ExistingData() throws Exception {
list.add(new FooRadio("1", "one"));
list.add(new FooRadio("2", "two"));
list.add(new FooRadio("three", "one"));
HtmlPage p = createWebClient().goTo("self/testRadio");
HtmlForm f = p.getFormByName("config");
f.getButtonByCaption("Add").click();
f.getInputByValue("").setValueAttribute("txt 4");
f.getElementsByAttribute("INPUT", "type", "radio").get(7).click();
submit(f);
assertEquals("[{\"radio\":\"one\",\"txt\":\"1\"},{\"radio\":\"two\",\"txt\":\"2\"},"
+ "{\"radio\":\"one\",\"txt\":\"three\"},{\"radio\":\"two\",\"txt\":\"txt 4\"}]",
formData.get("foos").toString());
}
// hudson-behavior uniquifies radiobutton names so the browser properly handles each group,
// then converts back to original names when submitting form.
public void testRadioBlock() throws Exception {
HtmlPage p = createWebClient().goTo("self/testRadioBlock");
HtmlForm f = p.getFormByName("config");
f.getButtonByCaption("Add").click();
f.getInputByValue("").setValueAttribute("txt one");
f.getInputByValue("").setValueAttribute("avalue do not send");
f.getElementsByAttribute("INPUT", "type", "radio").get(1).click();
f.getInputByValue("").setValueAttribute("bvalue");
f.getButtonByCaption("Add").click();
f.getInputByValue("").setValueAttribute("txt two");
f.getElementsByAttribute("INPUT", "type", "radio").get(2).click();
f.getInputByValue("").setValueAttribute("avalue two");
submit(f);
assertEquals("[{\"radio\":{\"b\":\"bvalue\",\"value\":\"two\"},\"txt\":\"txt one\"},"
+ "{\"radio\":{\"a\":\"avalue two\",\"value\":\"one\"},\"txt\":\"txt two\"}]",
formData.get("foos").toString());
}
// ========================================================================
public static class Fruit implements ExtensionPoint, Describable<Fruit> {
protected String name;
private Fruit(String name) { this.name = name; }
public Descriptor<Fruit> getDescriptor() {
return Jenkins.getInstance().getDescriptor(getClass());
}
}
public static class FruitDescriptor extends Descriptor<Fruit> {
public FruitDescriptor(Class<? extends Fruit> clazz) {
super(clazz);
}
public String getDisplayName() {
return clazz.getSimpleName();
}
}
public static class Apple extends Fruit {
private int seeds;
@DataBoundConstructor public Apple(int seeds) { super("Apple"); this.seeds = seeds; }
@Extension public static final FruitDescriptor D = new FruitDescriptor(Apple.class);
@Override public String toString() { return name + " with " + seeds + " seeds"; }
}
public static class Banana extends Fruit {
private boolean yellow;
@DataBoundConstructor public Banana(boolean yellow) { super("Banana"); this.yellow = yellow; }
@Extension public static final FruitDescriptor D = new FruitDescriptor(Banana.class);
@Override public String toString() { return (yellow ? "Yellow" : "Green") + " " + name; }
}
public static class Fruity {
public Fruit fruit;
public String word;
@DataBoundConstructor public Fruity(Fruit fruit, String word) {
this.fruit = fruit;
this.word = word;
}
@Override public String toString() { return fruit + " " + word; }
}
public DescriptorExtensionList<Fruit,Descriptor<Fruit>> getFruitDescriptors() {
return hudson.<Fruit,Descriptor<Fruit>>getDescriptorList(Fruit.class);
}
public void testDropdownList() throws Exception {
HtmlPage p = createWebClient().goTo("self/testDropdownList");
HtmlForm f = p.getFormByName("config");
f.getButtonByCaption("Add").click();
waitForJavaScript(p);
f.getInputByValue("").setValueAttribute("17"); // seeds
f.getInputByValue("").setValueAttribute("pie"); // word
f.getButtonByCaption("Add").click();
waitForJavaScript(p);
// select banana in 2nd select element:
((HtmlSelect)f.getElementsByTagName("select").get(1)).getOption(1).click();
f.getInputsByName("yellow").get(1).click(); // checkbox
f.getInputsByValue("").get(1).setValueAttribute("split"); // word
String xml = f.asXml();
bindClass = Fruity.class;
submit(f);
assertEquals(formData + "\n" + xml,
"[Apple with 17 seeds pie, Yellow Banana split]", bindResult.toString());
}
// ========================================================================
public static class FooList {
public String title;
public Foo[] list = new Foo[0];
@DataBoundConstructor public FooList(String title, Foo[] foo) {
this.title = title;
this.list = foo;
}
@Override public String toString() {
StringBuilder buf = new StringBuilder("FooList:" + title + ":[");
for (int i = 0; i < list.length; i++) {
if (i > 0) buf.append(',');
buf.append(list[i].toString());
}
buf.append(']');
return buf.toString();
}
}
/** Tests nested repeatable and use of @DataBoundContructor to process formData */
public void testNested() throws Exception {
HtmlPage p = createWebClient().goTo("self/testNested");
HtmlForm f = p.getFormByName("config");
try {
clickButton(p, f, "Add");
f.getInputByValue("").setValueAttribute("title one");
clickButton(p,f,"Add Foo");
f.getInputByValue("").setValueAttribute("txt one");
clickButton(p,f,"Add Foo");
f.getInputByValue("").setValueAttribute("txt two");
f.getInputsByName("bool").get(1).click();
clickButton(p, f, "Add");
f.getInputByValue("").setValueAttribute("title two");
f.getElementsByTagName("button").get(1).click(); // 2nd "Add Foo" button
f.getInputByValue("").setValueAttribute("txt 2.1");
} catch (Exception e) {
System.err.println("HTML at time of failure:\n" + p.getBody().asXml());
throw e;
}
bindClass = FooList.class;
submit(f);
assertEquals("[FooList:title one:[foo:txt one:false,foo:txt two:true], "
+ "FooList:title two:[foo:txt 2.1:false]]", bindResult.toString());
}
private void clickButton(HtmlPage p, HtmlForm f, String caption) throws IOException {
f.getButtonByCaption(caption).click();
waitForJavaScript(p);
}
public void testNestedRadio() throws Exception {
HtmlPage p = createWebClient().goTo("self/testNestedRadio");
HtmlForm f = p.getFormByName("config");
try {
clickButton(p, f, "Add");
f.getElementsByAttribute("input", "type", "radio").get(1).click(); // outer=two
f.getButtonByCaption("Add Moo").click();
waitForJavaScript(p);
f.getElementsByAttribute("input", "type", "radio").get(2).click(); // inner=inone
f.getButtonByCaption("Add").click();
waitForJavaScript(p);
f.getElementsByAttribute("input", "type", "radio").get(4).click(); // outer=one
Thread.sleep(500);
f.getElementsByTagName("button").get(1).click(); // 2nd "Add Moo" button
waitForJavaScript(p);
f.getElementsByAttribute("input", "type", "radio").get(7).click(); // inner=intwo
f.getElementsByTagName("button").get(1).click();
waitForJavaScript(p);
f.getElementsByAttribute("input", "type", "radio").get(8).click(); // inner=inone
} catch (Exception e) {
System.err.println("HTML at time of failure:\n" + p.getBody().asXml());
throw e;
}
submit(f);
assertEquals("[{\"moo\":{\"inner\":\"inone\"},\"outer\":\"two\"},"
+ "{\"moo\":[{\"inner\":\"intwo\"},{\"inner\":\"inone\"}],\"outer\":\"one\"}]",
formData.get("items").toString());
}
/**
* YUI internally partially relies on setTimeout/setInterval when we add a new chunk of HTML
* to the page. So wait for the completion of it.
*
* <p>
* To see where such asynchronous activity is happening, set a breakpoint to
* {@link JavaScriptJobManagerImpl#addJob(JavaScriptJob, Page)} and look at the call stack.
* Also see {@link #jsDebugger} at that time to see the JavaScript callstack.
*/
private void waitForJavaScript(HtmlPage p) {
p.getEnclosingWindow().getJobManager().waitForJobsStartingBefore(50);
}
}
| |
/*
* Copyright (C) 2015 Giuseppe Cardone <ippatsuman@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.gcardone.junidecode;
/**
* Character map for Unicode characters with codepoint U+63xx.
* @author Giuseppe Cardone
* @version 0.1
*/
class X63 {
public static final String[] map = new String[]{
"Bo ", // 0x00
"Chi ", // 0x01
"Gua ", // 0x02
"Zhi ", // 0x03
"Kuo ", // 0x04
"Duo ", // 0x05
"Duo ", // 0x06
"Zhi ", // 0x07
"Qie ", // 0x08
"An ", // 0x09
"Nong ", // 0x0a
"Zhen ", // 0x0b
"Ge ", // 0x0c
"Jiao ", // 0x0d
"Ku ", // 0x0e
"Dong ", // 0x0f
"Ru ", // 0x10
"Tiao ", // 0x11
"Lie ", // 0x12
"Zha ", // 0x13
"Lu ", // 0x14
"Die ", // 0x15
"Wa ", // 0x16
"Jue ", // 0x17
"Mushiru ", // 0x18
"Ju ", // 0x19
"Zhi ", // 0x1a
"Luan ", // 0x1b
"Ya ", // 0x1c
"Zhua ", // 0x1d
"Ta ", // 0x1e
"Xie ", // 0x1f
"Nao ", // 0x20
"Dang ", // 0x21
"Jiao ", // 0x22
"Zheng ", // 0x23
"Ji ", // 0x24
"Hui ", // 0x25
"Xun ", // 0x26
"Ku ", // 0x27
"Ai ", // 0x28
"Tuo ", // 0x29
"Nuo ", // 0x2a
"Cuo ", // 0x2b
"Bo ", // 0x2c
"Geng ", // 0x2d
"Ti ", // 0x2e
"Zhen ", // 0x2f
"Cheng ", // 0x30
"Suo ", // 0x31
"Suo ", // 0x32
"Keng ", // 0x33
"Mei ", // 0x34
"Long ", // 0x35
"Ju ", // 0x36
"Peng ", // 0x37
"Jian ", // 0x38
"Yi ", // 0x39
"Ting ", // 0x3a
"Shan ", // 0x3b
"Nuo ", // 0x3c
"Wan ", // 0x3d
"Xie ", // 0x3e
"Cha ", // 0x3f
"Feng ", // 0x40
"Jiao ", // 0x41
"Wu ", // 0x42
"Jun ", // 0x43
"Jiu ", // 0x44
"Tong ", // 0x45
"Kun ", // 0x46
"Huo ", // 0x47
"Tu ", // 0x48
"Zhuo ", // 0x49
"Pou ", // 0x4a
"Le ", // 0x4b
"Ba ", // 0x4c
"Han ", // 0x4d
"Shao ", // 0x4e
"Nie ", // 0x4f
"Juan ", // 0x50
"Ze ", // 0x51
"Song ", // 0x52
"Ye ", // 0x53
"Jue ", // 0x54
"Bu ", // 0x55
"Huan ", // 0x56
"Bu ", // 0x57
"Zun ", // 0x58
"Yi ", // 0x59
"Zhai ", // 0x5a
"Lu ", // 0x5b
"Sou ", // 0x5c
"Tuo ", // 0x5d
"Lao ", // 0x5e
"Sun ", // 0x5f
"Bang ", // 0x60
"Jian ", // 0x61
"Huan ", // 0x62
"Dao ", // 0x63
"[?] ", // 0x64
"Wan ", // 0x65
"Qin ", // 0x66
"Peng ", // 0x67
"She ", // 0x68
"Lie ", // 0x69
"Min ", // 0x6a
"Men ", // 0x6b
"Fu ", // 0x6c
"Bai ", // 0x6d
"Ju ", // 0x6e
"Dao ", // 0x6f
"Wo ", // 0x70
"Ai ", // 0x71
"Juan ", // 0x72
"Yue ", // 0x73
"Zong ", // 0x74
"Chen ", // 0x75
"Chui ", // 0x76
"Jie ", // 0x77
"Tu ", // 0x78
"Ben ", // 0x79
"Na ", // 0x7a
"Nian ", // 0x7b
"Nuo ", // 0x7c
"Zu ", // 0x7d
"Wo ", // 0x7e
"Xi ", // 0x7f
"Xian ", // 0x80
"Cheng ", // 0x81
"Dian ", // 0x82
"Sao ", // 0x83
"Lun ", // 0x84
"Qing ", // 0x85
"Gang ", // 0x86
"Duo ", // 0x87
"Shou ", // 0x88
"Diao ", // 0x89
"Pou ", // 0x8a
"Di ", // 0x8b
"Zhang ", // 0x8c
"Gun ", // 0x8d
"Ji ", // 0x8e
"Tao ", // 0x8f
"Qia ", // 0x90
"Qi ", // 0x91
"Pai ", // 0x92
"Shu ", // 0x93
"Qian ", // 0x94
"Ling ", // 0x95
"Yi ", // 0x96
"Ya ", // 0x97
"Jue ", // 0x98
"Zheng ", // 0x99
"Liang ", // 0x9a
"Gua ", // 0x9b
"Yi ", // 0x9c
"Huo ", // 0x9d
"Shan ", // 0x9e
"Zheng ", // 0x9f
"Lue ", // 0xa0
"Cai ", // 0xa1
"Tan ", // 0xa2
"Che ", // 0xa3
"Bing ", // 0xa4
"Jie ", // 0xa5
"Ti ", // 0xa6
"Kong ", // 0xa7
"Tui ", // 0xa8
"Yan ", // 0xa9
"Cuo ", // 0xaa
"Zou ", // 0xab
"Ju ", // 0xac
"Tian ", // 0xad
"Qian ", // 0xae
"Ken ", // 0xaf
"Bai ", // 0xb0
"Shou ", // 0xb1
"Jie ", // 0xb2
"Lu ", // 0xb3
"Guo ", // 0xb4
"Haba ", // 0xb5
"[?] ", // 0xb6
"Zhi ", // 0xb7
"Dan ", // 0xb8
"Mang ", // 0xb9
"Xian ", // 0xba
"Sao ", // 0xbb
"Guan ", // 0xbc
"Peng ", // 0xbd
"Yuan ", // 0xbe
"Nuo ", // 0xbf
"Jian ", // 0xc0
"Zhen ", // 0xc1
"Jiu ", // 0xc2
"Jian ", // 0xc3
"Yu ", // 0xc4
"Yan ", // 0xc5
"Kui ", // 0xc6
"Nan ", // 0xc7
"Hong ", // 0xc8
"Rou ", // 0xc9
"Pi ", // 0xca
"Wei ", // 0xcb
"Sai ", // 0xcc
"Zou ", // 0xcd
"Xuan ", // 0xce
"Miao ", // 0xcf
"Ti ", // 0xd0
"Nie ", // 0xd1
"Cha ", // 0xd2
"Shi ", // 0xd3
"Zong ", // 0xd4
"Zhen ", // 0xd5
"Yi ", // 0xd6
"Shun ", // 0xd7
"Heng ", // 0xd8
"Bian ", // 0xd9
"Yang ", // 0xda
"Huan ", // 0xdb
"Yan ", // 0xdc
"Zuan ", // 0xdd
"An ", // 0xde
"Xu ", // 0xdf
"Ya ", // 0xe0
"Wo ", // 0xe1
"Ke ", // 0xe2
"Chuai ", // 0xe3
"Ji ", // 0xe4
"Ti ", // 0xe5
"La ", // 0xe6
"La ", // 0xe7
"Cheng ", // 0xe8
"Kai ", // 0xe9
"Jiu ", // 0xea
"Jiu ", // 0xeb
"Tu ", // 0xec
"Jie ", // 0xed
"Hui ", // 0xee
"Geng ", // 0xef
"Chong ", // 0xf0
"Shuo ", // 0xf1
"She ", // 0xf2
"Xie ", // 0xf3
"Yuan ", // 0xf4
"Qian ", // 0xf5
"Ye ", // 0xf6
"Cha ", // 0xf7
"Zha ", // 0xf8
"Bei ", // 0xf9
"Yao ", // 0xfa
"[?] ", // 0xfb
"[?] ", // 0xfc
"Lan ", // 0xfd
"Wen ", // 0xfe
"Qin " // 0xff
};
}
| |
package gwt.material.design.client.ui;
/*
* #%L
* GwtMaterial
* %%
* Copyright (C) 2015 GwtMaterialDesign
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import com.google.gwt.core.client.JsDate;
import com.google.gwt.dom.client.Document;
import com.google.gwt.dom.client.Element;
import com.google.gwt.event.logical.shared.*;
import com.google.gwt.event.shared.HandlerRegistration;
import com.google.gwt.user.client.ui.HasValue;
import gwt.material.design.client.base.*;
import gwt.material.design.client.base.mixin.ErrorMixin;
import gwt.material.design.client.base.mixin.GridMixin;
import gwt.material.design.client.constants.Orientation;
import gwt.material.design.client.ui.html.DateInput;
import gwt.material.design.client.ui.html.Label;
import java.util.Date;
//@formatter:off
/**
* Material Date Picker will provide a visual calendar to your apps.
* <p/>
* <h3>UiBinder Usage:</h3>
* {@code
* <m:MaterialDatePicker ui:field="datePicker">
* }
* <h3>Java Usage:</h3>
* {@code
* datePicker.setDate(new Date());
* }
*
* @author kevzlou7979
* @author Ben Dol
* @see <a href="http://gwt-material-demo.herokuapp.com/#pickers">Material Date Picker</a>
*/
//@formatter:on
public class MaterialDatePicker extends MaterialWidget implements HasGrid, HasError, HasOrientation, HasPlaceholder,
HasValue<Date>, HasOpenHandlers<MaterialDatePicker>, HasCloseHandlers<MaterialDatePicker> {
/**
* Enum for identifying various selection types for the picker.
*/
public enum MaterialDatePickerType {
DAY,
MONTH_DAY,
YEAR_MONTH_DAY,
YEAR
}
private String placeholder;
private Date date;
private Date dateMin;
private Date dateMax;
private String format = "dd mmmm yyyy";
private DateInput dateInput;
private Label label = new Label();
private MaterialLabel lblName = new MaterialLabel();
private Element pickatizedDateInput;
private MaterialLabel lblError = new MaterialLabel();
private Orientation orientation = Orientation.PORTRAIT;
private MaterialDatePickerType selectionType = MaterialDatePickerType.DAY;
private final GridMixin<MaterialDatePicker> gridMixin = new GridMixin<>(this);
private final ErrorMixin<MaterialDatePicker, MaterialLabel> errorMixin;
private boolean initialized = false;
public MaterialDatePicker() {
super(Document.get().createDivElement(), "input-field");
dateInput = new DateInput();
add(dateInput);
label.add(lblName);
add(label);
add(lblError);
errorMixin = new ErrorMixin<>(this, lblError, dateInput);
}
@Override
protected void onAttach() {
super.onAttach();
addStyleName(orientation.getCssName());
pickatizedDateInput = initDatePicker(dateInput.getElement(), selectionType.name(), format);
initClickHandler(pickatizedDateInput, this);
label.getElement().setAttribute("for", getPickerId(pickatizedDateInput));
this.initialized = true;
setDate(this.date);
setDateMin(this.dateMin);
setDateMax(this.dateMax);
setPlaceholder(this.placeholder);
}
@Override
protected void onDetach() {
super.onDetach();
removeClickHandler(pickatizedDateInput, this);
}
@Override
public void clear() {
if (initialized) {
clearErrorOrSuccess();
label.removeStyleName("active");
dateInput.removeStyleName("valid");
dateInput.clear();
}
}
public void removeErrorModifiers() {
dateInput.addStyleName("valid");
dateInput.removeStyleName("invalid");
lblName.removeStyleName("green-text");
lblName.removeStyleName("red-text");
}
/**
* Sets the type of selection options (date, month, year,...).
*
* @param type if <code>null</code>, {@link MaterialDatePickerType#DAY} will be used as fallback.
*/
public void setDateSelectionType(MaterialDatePickerType type) {
if (type != null) {
this.selectionType = type;
}
}
native void removeClickHandler(Element picker, MaterialDatePicker parent) /*-{
picker.pickadate('picker').off("close", "open", "set");
}-*/;
native void initClickHandler(Element picker, MaterialDatePicker parent) /*-{
picker.pickadate('picker').on({
close: function () {
parent.@gwt.material.design.client.ui.MaterialDatePicker::onClose()();
$wnd.jQuery('.picker').blur();
},
open: function () {
parent.@gwt.material.design.client.ui.MaterialDatePicker::onOpen()();
},
set: function (thingSet) {
if (thingSet.hasOwnProperty('clear')) {
parent.@gwt.material.design.client.ui.MaterialDatePicker::onClear()();
}
else if (thingSet.select) {
parent.@gwt.material.design.client.ui.MaterialDatePicker::onSelect()();
}
}
});
}-*/;
void onClose() {
CloseEvent.fire(this, this);
}
void onOpen() {
label.addStyleName("active");
dateInput.setFocus(true);
OpenEvent.fire(this, this);
}
void onSelect() {
label.addStyleName("active");
dateInput.addStyleName("valid");
ValueChangeEvent.fire(this, getValue());
}
void onClear() {
clear();
}
public static native String getPickerId(Element inputSrc) /*-{
return inputSrc.pickadate('picker').get("id");
}-*/;
public static native Element initDatePicker(Element inputSrc, String typeName, String format) /*-{
var input;
if (typeName === "MONTH_DAY") {
input = $wnd.jQuery(inputSrc).pickadate({
container: 'body',
selectYears: false,
selectMonths: true,
format: format
});
} else if (typeName === "YEAR_MONTH_DAY") {
input = $wnd.jQuery(inputSrc).pickadate({
container: 'body',
selectYears: true,
selectMonths: true,
format: format
});
} else if (typeName === "YEAR") {
input = $wnd.jQuery(inputSrc).pickadate({
container: 'body',
selectYears: true,
format: format
});
} else {
input = $wnd.jQuery(inputSrc).pickadate({
container: 'body',
format: format
});
}
return input;
}-*/;
/**
* Sets the current date of the picker.
*
* @param date - must not be <code>null</code>
*/
public void setDate(Date date) {
setValue(date);
}
public Date getDateMin() {
return dateMin;
}
public void setDateMin(Date dateMin) {
this.dateMin = dateMin;
if (initialized && dateMin != null) {
setPickerDateMin(JsDate.create((double) dateMin.getTime()), pickatizedDateInput);
}
}
public native void setPickerDateMin(JsDate date, Element picker) /*-{
picker.pickadate('picker').set('min', date);
}-*/;
public Date getDateMax() {
return dateMax;
}
public void setDateMax(Date dateMax) {
this.dateMax = dateMax;
if (initialized && dateMax != null) {
setPickerDateMax(JsDate.create((double) dateMax.getTime()), pickatizedDateInput);
}
}
public native void setPickerDateMax(JsDate date, Element picker) /*-{
picker.pickadate('picker').set('max', date);
}-*/;
public native void setPickerDate(JsDate date, Element picker) /*-{
picker.pickadate('picker').set('select', date, { muted: true });
}-*/;
/**
* Same as calling {@link #getValue()}
*/
public Date getDate() {
return getPickerDate();
}
protected Date getPickerDate() {
try {
JsDate selectedDate = getDatePickerValue(pickatizedDateInput);
return new Date((long) selectedDate.getTime());
} catch (Exception e) {
e.printStackTrace();
return null;
}
}
public static native JsDate getDatePickerValue(Element picker)/*-{
return picker.pickadate('picker').get('select').obj;
}-*/;
/**
* Clears the values of the picker field.
*/
public void clearValues() {
if (pickatizedDateInput != null) {
clearValues(pickatizedDateInput);
}
}
public native void clearValues(Element picker) /*-{
picker.pickadate('picker').clear();
}-*/;
public String getPlaceholder() {
return placeholder;
}
public void setPlaceholder(String placeholder) {
this.placeholder = placeholder;
if (initialized && placeholder != null) {
lblName.setText(placeholder);
}
}
public MaterialDatePickerType getSelectionType() {
return selectionType;
}
public void setSelectionType(MaterialDatePickerType selectionType) {
if(initialized) {
throw new IllegalStateException("setSelectionType can be called only before initialization");
}
this.selectionType = selectionType;
}
/**
* @return the orientation
*/
@Override
public Orientation getOrientation() {
return orientation;
}
/**
* @param orientation the orientation to set : can be Vertical or Horizontal
*/
@Override
public void setOrientation(Orientation orientation) {
if(initialized) {
throw new IllegalStateException("setOrientation can be called only before initialization");
}
this.orientation = orientation;
}
@Override
public void setGrid(String grid) {
gridMixin.setGrid(grid);
}
@Override
public void setOffset(String offset) {
gridMixin.setOffset(offset);
}
@Override
public void setError(String error) {
errorMixin.setError(error);
removeErrorModifiers();
lblName.setStyleName("red-text");
dateInput.addStyleName("invalid");
}
@Override
public void setSuccess(String success) {
errorMixin.setSuccess(success);
lblName.setStyleName("green-text");
dateInput.addStyleName("valid");
}
@Override
public void clearErrorOrSuccess() {
errorMixin.clearErrorOrSuccess();
removeErrorModifiers();
}
public String getFormat() {
return format;
}
/**
* To call before initialization
* @param format
*/
public void setFormat(String format) {
if(initialized) {
throw new IllegalStateException("setFormat can be called only before initialization");
}
this.format = format;
}
@Override
public HandlerRegistration addValueChangeHandler(final ValueChangeHandler<Date> handler) {
return addHandler(new ValueChangeHandler<Date>() {
@Override
public void onValueChange(ValueChangeEvent<Date> event) {
if(isEnabled()){
handler.onValueChange(event);
}
}
}, ValueChangeEvent.getType());
}
@Override
public Date getValue() {
return getPickerDate();
}
@Override
public void setValue(Date value) {
setValue(value, false);
}
@Override
public void setValue(Date value, boolean fireEvents) {
if (value == null) {
return;
}
this.date = value;
if (initialized) {
setPickerDate(JsDate.create((double) value.getTime()), pickatizedDateInput);
label.addStyleName("active");
}
if (fireEvents){
ValueChangeEvent.fire(this, value);
}
}
@Override
public HandlerRegistration addCloseHandler(final CloseHandler<MaterialDatePicker> handler) {
return addHandler(new CloseHandler<MaterialDatePicker>() {
@Override
public void onClose(CloseEvent<MaterialDatePicker> event) {
if(isEnabled()){
handler.onClose(event);
}
}
}, CloseEvent.getType());
}
@Override
public HandlerRegistration addOpenHandler(final OpenHandler<MaterialDatePicker> handler) {
return addHandler(new OpenHandler<MaterialDatePicker>() {
@Override
public void onOpen(OpenEvent<MaterialDatePicker> event) {
if(isEnabled()){
handler.onOpen(event);
}
}
}, OpenEvent.getType());
}
@Override
public void setEnabled(boolean enabled) {
super.setEnabled(enabled);
dateInput.setEnabled(enabled);
}
}
| |
/**
*/
package geometry.provider;
import geometry.GeometryFactory;
import geometry.GeometryPackage;
import geometry.Line;
import java.util.Collection;
import java.util.List;
import org.eclipse.emf.common.notify.AdapterFactory;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.ecore.EStructuralFeature;
import org.eclipse.emf.edit.provider.ComposeableAdapterFactory;
import org.eclipse.emf.edit.provider.IEditingDomainItemProvider;
import org.eclipse.emf.edit.provider.IItemLabelProvider;
import org.eclipse.emf.edit.provider.IItemPropertyDescriptor;
import org.eclipse.emf.edit.provider.IItemPropertySource;
import org.eclipse.emf.edit.provider.IStructuredItemContentProvider;
import org.eclipse.emf.edit.provider.ITreeItemContentProvider;
import org.eclipse.emf.edit.provider.ItemPropertyDescriptor;
import org.eclipse.emf.edit.provider.ViewerNotification;
/**
* This is the item provider adapter for a {@link geometry.Line} object.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public class LineItemProvider
extends GObjectItemProvider
implements
IEditingDomainItemProvider,
IStructuredItemContentProvider,
ITreeItemContentProvider,
IItemLabelProvider,
IItemPropertySource {
/**
* This constructs an instance from a factory and a notifier.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public LineItemProvider(AdapterFactory adapterFactory) {
super(adapterFactory);
}
/**
* This returns the property descriptors for the adapted class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public List<IItemPropertyDescriptor> getPropertyDescriptors(Object object) {
if (itemPropertyDescriptors == null) {
super.getPropertyDescriptors(object);
addBeginPropertyDescriptor(object);
addEndPropertyDescriptor(object);
addAppearanceLabelPropertyDescriptor(object);
addTokenAppearanceLabelPropertyDescriptor(object);
}
return itemPropertyDescriptors;
}
/**
* This adds a property descriptor for the Begin feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addBeginPropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_Line_begin_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_Line_begin_feature", "_UI_Line_type"),
GeometryPackage.Literals.LINE__BEGIN,
true,
false,
true,
null,
null,
null));
}
/**
* This adds a property descriptor for the End feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addEndPropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_Line_end_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_Line_end_feature", "_UI_Line_type"),
GeometryPackage.Literals.LINE__END,
true,
false,
true,
null,
null,
null));
}
/**
* This adds a property descriptor for the Appearance Label feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addAppearanceLabelPropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_Line_appearanceLabel_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_Line_appearanceLabel_feature", "_UI_Line_type"),
GeometryPackage.Literals.LINE__APPEARANCE_LABEL,
true,
false,
false,
ItemPropertyDescriptor.GENERIC_VALUE_IMAGE,
null,
null));
}
/**
* This adds a property descriptor for the Token Appearance Label feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addTokenAppearanceLabelPropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_Line_tokenAppearanceLabel_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_Line_tokenAppearanceLabel_feature", "_UI_Line_type"),
GeometryPackage.Literals.LINE__TOKEN_APPEARANCE_LABEL,
true,
false,
false,
ItemPropertyDescriptor.GENERIC_VALUE_IMAGE,
null,
null));
}
/**
* This specifies how to implement {@link #getChildren} and is used to deduce an appropriate feature for an
* {@link org.eclipse.emf.edit.command.AddCommand}, {@link org.eclipse.emf.edit.command.RemoveCommand} or
* {@link org.eclipse.emf.edit.command.MoveCommand} in {@link #createCommand}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Collection<? extends EStructuralFeature> getChildrenFeatures(Object object) {
if (childrenFeatures == null) {
super.getChildrenFeatures(object);
childrenFeatures.add(GeometryPackage.Literals.LINE__BEND_POINTS);
}
return childrenFeatures;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EStructuralFeature getChildFeature(Object object, Object child) {
// Check the type of the specified child object and return the proper feature to use for
// adding (see {@link AddCommand}) it as a child.
return super.getChildFeature(object, child);
}
/**
* This returns Line.gif.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object getImage(Object object) {
return overlayImage(object, getResourceLocator().getImage("full/obj16/Line"));
}
/**
* This returns the label text for the adapted class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String getText(Object object) {
String label = ((Line)object).getLabel();
return label == null || label.length() == 0 ?
getString("_UI_Line_type") :
getString("_UI_Line_type") + " " + label;
}
/**
* This handles model notifications by calling {@link #updateChildren} to update any cached
* children and by creating a viewer notification, which it passes to {@link #fireNotifyChanged}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void notifyChanged(Notification notification) {
updateChildren(notification);
switch (notification.getFeatureID(Line.class)) {
case GeometryPackage.LINE__APPEARANCE_LABEL:
case GeometryPackage.LINE__TOKEN_APPEARANCE_LABEL:
fireNotifyChanged(new ViewerNotification(notification, notification.getNotifier(), false, true));
return;
case GeometryPackage.LINE__BEND_POINTS:
fireNotifyChanged(new ViewerNotification(notification, notification.getNotifier(), true, false));
return;
}
super.notifyChanged(notification);
}
/**
* This adds {@link org.eclipse.emf.edit.command.CommandParameter}s describing the children
* that can be created under this object.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected void collectNewChildDescriptors(Collection<Object> newChildDescriptors, Object object) {
super.collectNewChildDescriptors(newChildDescriptors, object);
newChildDescriptors.add
(createChildParameter
(GeometryPackage.Literals.LINE__BEND_POINTS,
GeometryFactory.eINSTANCE.createBendPoint()));
}
}
| |
package org.springframework.data.simpledb.core.entity;
import static org.junit.Assert.*;
import java.text.ParseException;
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang.builder.EqualsBuilder;
import org.junit.Test;
import org.springframework.data.annotation.Id;
import org.springframework.data.simpledb.attributeutil.SimpleDBAttributeConverter;
import org.springframework.data.simpledb.core.domain.SimpleDbSampleEntity;
import org.springframework.data.simpledb.core.entity.EntityWrapperTest.AClass.BClass;
import org.springframework.data.simpledb.core.entity.EntityWrapperTest.AClass.BClass.CClass;
import org.springframework.data.simpledb.util.EntityInformationSupport;
public class EntityWrapperTest {
@Test
public void generateId_should_populate_itemName_of_Item() {
SimpleDbSampleEntity object = new SimpleDbSampleEntity();
EntityWrapper<SimpleDbSampleEntity, String> sdbEntity = new EntityWrapper<SimpleDbSampleEntity, String>(SimpleDbSampleEntity.entityInformation(), object);
sdbEntity.generateIdIfNotSet();
assertNotNull(object.getItemName());
}
@Test
public void generateId_should_not_overwrite_existing_id() {
SimpleDbSampleEntity object = new SimpleDbSampleEntity();
object.setItemName("gigi");
EntityWrapper<SimpleDbSampleEntity, String> sdbEntity = new EntityWrapper<SimpleDbSampleEntity, String>(SimpleDbSampleEntity.entityInformation(), object);
sdbEntity.generateIdIfNotSet();
assertEquals("gigi", object.getItemName());
}
@Test
public void generateId_should_create_distinct_values() {
SimpleDbSampleEntity object1 = new SimpleDbSampleEntity();
SimpleDbSampleEntity object2 = new SimpleDbSampleEntity();
EntityWrapper<SimpleDbSampleEntity, String> sdbEntity1 = new EntityWrapper<SimpleDbSampleEntity, String>(SimpleDbSampleEntity.entityInformation(), object1);
sdbEntity1.generateIdIfNotSet();
EntityWrapper<SimpleDbSampleEntity, String> sdbEntity2 = new EntityWrapper<SimpleDbSampleEntity, String>(SimpleDbSampleEntity.entityInformation(), object2);
sdbEntity2.generateIdIfNotSet();
assertNotEquals(object1.getItemName(), object2.getItemName());
}
@Test
public void test_getSerializedPrimitiveAttributes() throws ParseException {
final SampleEntity entity = new SampleEntity();
entity.setIntField(11);
entity.setLongField(123);
entity.setShortField((short) -12);
entity.setFloatField(-0.01f);
entity.setDoubleField(1.2d);
entity.setByteField((byte) 1);
entity.setBooleanField(Boolean.TRUE);
entity.setStringField("string");
entity.setDoubleWrapper(Double.valueOf("2323.32d"));
EntityWrapper<SampleEntity, String> sdbEntity = new EntityWrapper<SampleEntity, String>(
EntityInformationSupport.readEntityInformation(SampleEntity.class), entity);
assertNotNull(sdbEntity);
final Map<String, String> attributes = sdbEntity.serialize();
assertNotNull(attributes);
/* test int field */
String intValues = attributes.get("intField");
assertNotNull(intValues);
assertEquals(entity.getIntField(),
((Integer) SimpleDBAttributeConverter.decodeToFieldOfType(intValues, Integer.class)).intValue());
/* test long field */
String longValues = attributes.get("longField");
assertEquals(entity.getLongField(),
((Long) SimpleDBAttributeConverter.decodeToFieldOfType(longValues, Long.class)).longValue());
/* test short field */
String shortValues = attributes.get("shortField");
assertEquals(entity.getShortField(),
((Short) SimpleDBAttributeConverter.decodeToFieldOfType(shortValues, Short.class)).shortValue());
/* test float field */
String floatValues = attributes.get("floatField");
assertTrue(entity.getFloatField() == ((Float) SimpleDBAttributeConverter.decodeToFieldOfType(floatValues,
Float.class)).floatValue());
/* test double field */
String doubleValues = attributes.get("doubleField");
assertTrue(entity.getDoubleField() == ((Double) SimpleDBAttributeConverter.decodeToFieldOfType(doubleValues,
Double.class)).doubleValue());
/* test byte field */
String byteValues = attributes.get("byteField");
assertTrue(entity.getByteField() == ((Byte) SimpleDBAttributeConverter.decodeToFieldOfType(byteValues,
Byte.class)).byteValue());
/* test boolean field */
String booleanValues = attributes.get("booleanField");
assertTrue(entity.getBooleanField() == ((Boolean) SimpleDBAttributeConverter.decodeToFieldOfType(booleanValues,
Boolean.class)).booleanValue());
}
/* ***************************** Test serializing nested domain entities ******************* */
@Test
public void should_generate_attribute_keys_for_nested_domain_fields() {
final AClass aDomain = new AClass();
{
aDomain.nestedB = new BClass();
{
aDomain.nestedB.floatField = 21f;
aDomain.nestedB.nestedNestedC = new CClass();
{
aDomain.nestedB.nestedNestedC.doubleField = 14d;
}
}
}
EntityWrapper<AClass, String> sdbEntity = new EntityWrapper<AClass, String>(
EntityInformationSupport.readEntityInformation(AClass.class), aDomain);
final Map<String, String> attributes = sdbEntity.serialize();
assertNotNull(attributes);
assertEquals(3, attributes.size());
final Set<String> keySet = attributes.keySet();
assertTrue(keySet.contains("intField"));
assertTrue(keySet.contains("nestedB.floatField"));
assertTrue(keySet.contains("nestedB.nestedNestedC.doubleField"));
}
@Test
public void should_build_entity_with_nested_domain_entities() {
final AClass aDomain = new AClass();
{
aDomain.intField = 13;
aDomain.nestedB = new BClass();
{
aDomain.nestedB.floatField = 21f;
aDomain.nestedB.nestedNestedC = new CClass();
{
aDomain.nestedB.nestedNestedC.doubleField = 14d;
}
}
}
EntityWrapper<AClass, String> sdbEntity = new EntityWrapper<AClass, String>(
EntityInformationSupport.readEntityInformation(AClass.class), aDomain);
final Map<String, String> attributes = sdbEntity.serialize();
/* convert back */
final EntityWrapper<AClass, String> convertedEntity = new EntityWrapper<AClass, String>(
EntityInformationSupport.readEntityInformation(AClass.class));
convertedEntity.deserialize(attributes);
assertTrue(aDomain.equals(convertedEntity.getItem()));
}
@SuppressWarnings("unused")
public static class AClass {
@Id
private String id;
private int intField;
private BClass nestedB;
private Long longField;
public BClass getNestedB() {
return nestedB;
}
public void setNestedB(BClass nestedB) {
this.nestedB = nestedB;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public int getIntField() {
return intField;
}
public void setIntField(int intField) {
this.intField = intField;
}
private Long getLongField() {
return longField;
}
public void setLongField(Long longField) {
this.longField = longField;
}
public static class BClass {
private float floatField;
private CClass nestedNestedC;
public CClass getNestedNestedC() {
return nestedNestedC;
}
public void setNestedNestedC(CClass nestedNestedC) {
this.nestedNestedC = nestedNestedC;
}
public float getFloatField() {
return floatField;
}
public void setFloatField(float floatField) {
this.floatField = floatField;
}
@Override
public boolean equals(Object obj) {
return EqualsBuilder.reflectionEquals(this, obj);
}
public static class CClass {
private double doubleField;
public double getDoubleField() {
return doubleField;
}
public void setDoubleField(double doubleField) {
this.doubleField = doubleField;
}
@Override
public boolean equals(Object obj) {
return EqualsBuilder.reflectionEquals(this, obj);
}
}
}
@Override
public boolean equals(Object obj) {
return EqualsBuilder.reflectionEquals(this, obj);
}
}
public static class SampleEntity {
private int intField;
private float floatField;
private double doubleField;
private short shortField;
private long longField;
private byte byteField;
private boolean booleanField;
private String stringField;
private Double doubleWrapper;
public int getIntField() {
return intField;
}
public void setIntField(int intField) {
this.intField = intField;
}
public float getFloatField() {
return floatField;
}
public void setFloatField(float floatField) {
this.floatField = floatField;
}
public double getDoubleField() {
return doubleField;
}
public void setDoubleField(double doubleField) {
this.doubleField = doubleField;
}
public short getShortField() {
return shortField;
}
public void setShortField(short shortField) {
this.shortField = shortField;
}
public long getLongField() {
return longField;
}
public void setLongField(long longField) {
this.longField = longField;
}
public byte getByteField() {
return byteField;
}
public void setByteField(byte byteField) {
this.byteField = byteField;
}
public boolean getBooleanField() {
return booleanField;
}
public void setBooleanField(boolean booleanField) {
this.booleanField = booleanField;
}
public String getStringField() {
return stringField;
}
public void setStringField(String stringField) {
this.stringField = stringField;
}
public Double getDoubleWrapper() {
return doubleWrapper;
}
public void setDoubleWrapper(Double doubleWrapper) {
this.doubleWrapper = doubleWrapper;
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.execution;
import com.facebook.presto.OutputBuffers;
import com.facebook.presto.Session;
import com.facebook.presto.UnpartitionedPagePartitionFunction;
import com.facebook.presto.execution.SharedBuffer.BufferState;
import com.facebook.presto.execution.StateMachine.StateChangeListener;
import com.facebook.presto.execution.TestSqlTaskManager.MockLocationFactory;
import com.facebook.presto.metadata.ColumnHandle;
import com.facebook.presto.metadata.InMemoryNodeManager;
import com.facebook.presto.metadata.NodeVersion;
import com.facebook.presto.metadata.PrestoNode;
import com.facebook.presto.metadata.Split;
import com.facebook.presto.metadata.TableHandle;
import com.facebook.presto.operator.TaskContext;
import com.facebook.presto.spi.ConnectorSplit;
import com.facebook.presto.spi.FixedSplitSource;
import com.facebook.presto.spi.Node;
import com.facebook.presto.spi.type.Type;
import com.facebook.presto.split.ConnectorAwareSplitSource;
import com.facebook.presto.split.SplitSource;
import com.facebook.presto.sql.planner.PlanFragment;
import com.facebook.presto.sql.planner.PlanFragment.OutputPartitioning;
import com.facebook.presto.sql.planner.PlanFragment.PlanDistribution;
import com.facebook.presto.sql.planner.StageExecutionPlan;
import com.facebook.presto.sql.planner.Symbol;
import com.facebook.presto.sql.planner.TestingColumnHandle;
import com.facebook.presto.sql.planner.TestingTableHandle;
import com.facebook.presto.sql.planner.plan.JoinNode;
import com.facebook.presto.sql.planner.plan.JoinNode.EquiJoinClause;
import com.facebook.presto.sql.planner.plan.PlanFragmentId;
import com.facebook.presto.sql.planner.plan.PlanNodeId;
import com.facebook.presto.sql.planner.plan.RemoteSourceNode;
import com.facebook.presto.sql.planner.plan.TableScanNode;
import com.google.common.base.Supplier;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.Multimap;
import io.airlift.units.DataSize;
import io.airlift.units.DataSize.Unit;
import io.airlift.units.Duration;
import org.joda.time.DateTime;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import javax.annotation.concurrent.GuardedBy;
import java.net.URI;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicLong;
import static com.facebook.presto.OutputBuffers.INITIAL_EMPTY_OUTPUT_BUFFERS;
import static com.facebook.presto.SessionTestUtils.TEST_SESSION;
import static com.facebook.presto.spi.type.VarcharType.VARCHAR;
import static com.facebook.presto.util.Failures.toFailures;
import static com.google.common.base.Preconditions.checkNotNull;
import static io.airlift.concurrent.Threads.daemonThreadsNamed;
import static io.airlift.units.DataSize.Unit.MEGABYTE;
import static java.util.concurrent.Executors.newCachedThreadPool;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.fail;
@Test(singleThreaded = true)
public class TestSqlStageExecution
{
public static final TaskId OUT = new TaskId("query", "stage", "out");
private NodeTaskMap nodeTaskMap;
private InMemoryNodeManager nodeManager;
private NodeScheduler nodeScheduler;
private LocationFactory locationFactory;
private Supplier<ConnectorSplit> splitFactory;
@BeforeMethod
public void setUp()
throws Exception
{
nodeManager = new InMemoryNodeManager();
ImmutableList.Builder<Node> nodeBuilder = ImmutableList.builder();
nodeBuilder.add(new PrestoNode("other1", URI.create("http://127.0.0.1:11"), NodeVersion.UNKNOWN));
nodeBuilder.add(new PrestoNode("other2", URI.create("http://127.0.0.1:12"), NodeVersion.UNKNOWN));
nodeBuilder.add(new PrestoNode("other3", URI.create("http://127.0.0.1:13"), NodeVersion.UNKNOWN));
ImmutableList<Node> nodes = nodeBuilder.build();
nodeManager.addNode("foo", nodes);
NodeSchedulerConfig nodeSchedulerConfig = new NodeSchedulerConfig()
.setMaxSplitsPerNode(20)
.setIncludeCoordinator(false)
.setMaxPendingSplitsPerNodePerTask(10);
nodeTaskMap = new NodeTaskMap();
nodeScheduler = new NodeScheduler(nodeManager, nodeSchedulerConfig, nodeTaskMap);
locationFactory = new MockLocationFactory();
splitFactory = TestingSplit::createLocalSplit;
}
@Test(expectedExceptions = ExecutionException.class, expectedExceptionsMessageRegExp = ".*No nodes available to run query")
public void testExcludeCoordinator()
throws Exception
{
InMemoryNodeManager nodeManager = new InMemoryNodeManager();
NodeScheduler nodeScheduler = new NodeScheduler(nodeManager, new NodeSchedulerConfig().setIncludeCoordinator(false), nodeTaskMap);
// Start sql stage execution
StageExecutionPlan tableScanPlan = createTableScanPlan("test", 20, TestingSplit::createEmptySplit);
SqlStageExecution sqlStageExecution = createSqlStageExecution(nodeScheduler, 2, tableScanPlan);
Future<?> future = sqlStageExecution.start();
future.get(1, TimeUnit.SECONDS);
}
@Test
public void testSplitAssignment()
throws Exception
{
// Start sql stage execution (schedule 15 splits in batches of 2), there are 3 nodes, each node should get 5 splits
StageExecutionPlan tableScanPlan = createTableScanPlan("test", 15, splitFactory);
SqlStageExecution sqlStageExecution1 = createSqlStageExecution(nodeScheduler, 2, tableScanPlan);
Future<?> future1 = sqlStageExecution1.start();
future1.get(1, TimeUnit.SECONDS);
for (RemoteTask remoteTask : sqlStageExecution1.getAllTasks()) {
assertEquals(remoteTask.getPartitionedSplitCount(), 5);
}
// Add new node
Node additionalNode = new PrestoNode("other4", URI.create("http://127.0.0.1:14"), NodeVersion.UNKNOWN);
nodeManager.addNode("foo", additionalNode);
// Schedule next query with 5 splits. Since the new node does not have any splits, all 5 splits are assigned to the new node
StageExecutionPlan tableScanPlan2 = createTableScanPlan("test", 5, splitFactory);
SqlStageExecution sqlStageExecution2 = createSqlStageExecution(nodeScheduler, 5, tableScanPlan2);
Future<?> future2 = sqlStageExecution2.start();
future2.get(1, TimeUnit.SECONDS);
List<RemoteTask> tasks2 = sqlStageExecution2.getTasks(additionalNode);
RemoteTask task = Iterables.getFirst(tasks2, null);
assertNotNull(task);
assertEquals(task.getPartitionedSplitCount(), 5);
}
@Test
public void testSplitAssignmentBatchSizeGreaterThanMaxPending()
throws Exception
{
// Start sql stage execution with 100 splits. Only 20 will be scheduled on each node as that is the maxSplitsPerNode
StageExecutionPlan tableScanPlan = createTableScanPlan("test", 100, splitFactory);
SqlStageExecution sqlStageExecution1 = createSqlStageExecution(nodeScheduler, 100, tableScanPlan);
Future<?> future1 = sqlStageExecution1.start();
// The stage scheduler will block and this will cause a timeout exception
try {
future1.get(2, TimeUnit.SECONDS);
}
catch (TimeoutException e) {
// expected
}
for (RemoteTask task : sqlStageExecution1.getAllTasks()) {
assertEquals(task.getPartitionedSplitCount(), 20);
}
}
private SqlStageExecution createSqlStageExecution(NodeScheduler nodeScheduler, int splitBatchSize, StageExecutionPlan tableScanPlan)
{
ExecutorService remoteTaskExecutor = newCachedThreadPool(daemonThreadsNamed("remoteTaskExecutor-%s"));
MockRemoteTaskFactory remoteTaskFactory = new MockRemoteTaskFactory(remoteTaskExecutor);
ExecutorService executor = newCachedThreadPool(daemonThreadsNamed("stageExecutor-%s"));
OutputBuffers outputBuffers = INITIAL_EMPTY_OUTPUT_BUFFERS
.withBuffer(OUT, new UnpartitionedPagePartitionFunction())
.withNoMoreBufferIds();
return new SqlStageExecution(new QueryId("query"),
locationFactory,
tableScanPlan,
nodeScheduler,
remoteTaskFactory,
TEST_SESSION,
splitBatchSize,
8, // initialHashPartitions
executor,
nodeTaskMap,
outputBuffers);
}
@Test(enabled = false)
public void testYieldCausesFullSchedule()
throws Exception
{
ExecutorService executor = newCachedThreadPool(daemonThreadsNamed("test-%s"));
SqlStageExecution stageExecution = null;
try {
StageExecutionPlan joinPlan = createJoinPlan("A");
InMemoryNodeManager nodeManager = new InMemoryNodeManager();
nodeManager.addNode("foo", new PrestoNode("other", URI.create("http://127.0.0.1:11"), NodeVersion.UNKNOWN));
OutputBuffers outputBuffers = INITIAL_EMPTY_OUTPUT_BUFFERS
.withBuffer(OUT, new UnpartitionedPagePartitionFunction())
.withNoMoreBufferIds();
stageExecution = new SqlStageExecution(new QueryId("query"),
new MockLocationFactory(),
joinPlan,
new NodeScheduler(nodeManager, new NodeSchedulerConfig(), nodeTaskMap),
new MockRemoteTaskFactory(executor),
TEST_SESSION,
1000,
8,
executor,
nodeTaskMap,
outputBuffers);
Future<?> future = stageExecution.start();
long start = System.nanoTime();
while (true) {
StageInfo stageInfo = stageExecution.getStageInfo();
assertEquals(stageInfo.getState(), StageState.SCHEDULING);
StageInfo tableScanInfo = stageInfo.getSubStages().get(0);
StageState tableScanState = tableScanInfo.getState();
switch (tableScanState) {
case PLANNED:
case SCHEDULING:
case SCHEDULED:
break;
case RUNNING:
// there should be two tasks (even though only one can ever be used)
assertEquals(stageInfo.getTasks().size(), 2);
assertEquals(tableScanInfo.getTasks().size(), 1);
assertEquals(tableScanInfo.getTasks().get(0).getOutputBuffers().getState(), BufferState.NO_MORE_BUFFERS);
return;
case FINISHED:
case CANCELED:
case FAILED:
fail("Unexpected state for table scan stage " + tableScanState);
break;
}
if (TimeUnit.NANOSECONDS.toSeconds(System.nanoTime() - start) > 1) {
fail("Expected test to complete within 1 second");
}
try {
future.get(50, TimeUnit.MILLISECONDS);
}
catch (TimeoutException e) {
}
}
}
finally {
if (stageExecution != null) {
stageExecution.cancel();
}
executor.shutdownNow();
}
}
private StageExecutionPlan createJoinPlan(String planId)
{
// create table scan for build data with a single split, so it is only waiting on the no-more buffers call
StageExecutionPlan build = createTableScanPlan("build", 1, splitFactory);
// create an exchange to read the build data
RemoteSourceNode buildSource = new RemoteSourceNode(new PlanNodeId(planId + "-build"),
build.getFragment().getId(),
ImmutableList.copyOf(build.getFragment().getSymbols().keySet()));
// create table scan for probe data with three splits, so it will not send the no-more buffers call
StageExecutionPlan probe = createTableScanPlan("probe", 10, splitFactory);
// create an exchange to read the probe data
RemoteSourceNode probeSource = new RemoteSourceNode(new PlanNodeId(planId + "-probe"),
probe.getFragment().getId(),
ImmutableList.copyOf(probe.getFragment().getSymbols().keySet()));
// join build and probe
JoinNode joinNode = new JoinNode(new PlanNodeId(planId), JoinNode.Type.INNER, probeSource, buildSource, ImmutableList.<EquiJoinClause>of(), Optional.empty(), Optional.empty());
PlanFragment joinPlan = new PlanFragment(
new PlanFragmentId(planId),
joinNode,
probe.getFragment().getSymbols(), // this is wrong, but it works
joinNode.getOutputSymbols(),
PlanDistribution.SOURCE,
new PlanNodeId(planId),
OutputPartitioning.NONE,
ImmutableList.<Symbol>of(),
Optional.empty());
return new StageExecutionPlan(joinPlan,
probe.getDataSource(),
ImmutableList.of(probe, build)
);
}
private static StageExecutionPlan createTableScanPlan(String planId, int splitCount, Supplier<ConnectorSplit> splitFactory)
{
Symbol symbol = new Symbol("column");
// table scan with splitCount splits
PlanNodeId tableScanNodeId = new PlanNodeId(planId);
PlanFragment testFragment = new PlanFragment(
new PlanFragmentId(planId),
new TableScanNode(
tableScanNodeId,
new TableHandle("test", new TestingTableHandle()),
ImmutableList.of(symbol),
ImmutableMap.of(symbol, new ColumnHandle("test", new TestingColumnHandle("column"))),
null,
Optional.empty()),
ImmutableMap.<Symbol, Type>of(symbol, VARCHAR),
ImmutableList.of(symbol),
PlanDistribution.SOURCE,
tableScanNodeId,
OutputPartitioning.NONE,
ImmutableList.<Symbol>of(),
Optional.empty());
ImmutableList.Builder<ConnectorSplit> splits = ImmutableList.builder();
for (int i = 0; i < splitCount; i++) {
splits.add(splitFactory.get());
}
SplitSource splitSource = new ConnectorAwareSplitSource("test", new FixedSplitSource(null, splits.build()));
return new StageExecutionPlan(testFragment,
Optional.of(splitSource),
ImmutableList.<StageExecutionPlan>of()
);
}
private static class MockRemoteTaskFactory
implements RemoteTaskFactory
{
private final Executor executor;
private MockRemoteTaskFactory(Executor executor)
{
this.executor = executor;
}
@Override
public RemoteTask createRemoteTask(
Session session,
TaskId taskId,
Node node,
PlanFragment fragment,
Multimap<PlanNodeId, Split> initialSplits,
OutputBuffers outputBuffers)
{
return new MockRemoteTask(taskId, node.getNodeIdentifier(), executor, initialSplits);
}
private static class MockRemoteTask
implements RemoteTask
{
private final AtomicLong nextTaskInfoVersion = new AtomicLong(TaskInfo.STARTING_VERSION);
private final URI location;
private final TaskStateMachine taskStateMachine;
private final TaskContext taskContext;
private final SharedBuffer sharedBuffer;
private final String nodeId;
@GuardedBy("this")
private final Set<PlanNodeId> noMoreSplits = new HashSet<>();
@GuardedBy("this")
private final Multimap<PlanNodeId, Split> splits = HashMultimap.create();
public MockRemoteTask(TaskId taskId,
String nodeId,
Executor executor,
Multimap<PlanNodeId, Split> initialSplits)
{
this.taskStateMachine = new TaskStateMachine(checkNotNull(taskId, "taskId is null"), checkNotNull(executor, "executor is null"));
this.taskContext = new TaskContext(taskStateMachine, executor, TEST_SESSION, new DataSize(256, MEGABYTE), new DataSize(1, MEGABYTE), true, true);
this.location = URI.create("fake://task/" + taskId);
this.sharedBuffer = new SharedBuffer(taskId, executor, checkNotNull(new DataSize(1, Unit.BYTE), "maxBufferSize is null"));
this.nodeId = nodeId;
splits.putAll(initialSplits);
}
@Override
public String getNodeId()
{
return nodeId;
}
@Override
public TaskInfo getTaskInfo()
{
TaskState state = taskStateMachine.getState();
List<ExecutionFailureInfo> failures = ImmutableList.of();
if (state == TaskState.FAILED) {
failures = toFailures(taskStateMachine.getFailureCauses());
}
return new TaskInfo(
taskStateMachine.getTaskId(),
Optional.empty(),
nextTaskInfoVersion.getAndIncrement(),
state,
location,
DateTime.now(),
sharedBuffer.getInfo(),
ImmutableSet.<PlanNodeId>of(),
taskContext.getTaskStats(),
failures);
}
public void finished()
{
taskStateMachine.finished();
}
@Override
public void start()
{
}
@Override
public void addSplits(PlanNodeId sourceId, Iterable<Split> splits)
{
checkNotNull(splits, "splits is null");
for (Split split : splits) {
this.splits.put(sourceId, split);
}
}
@Override
public void noMoreSplits(PlanNodeId sourceId)
{
noMoreSplits.add(sourceId);
}
@Override
public void setOutputBuffers(OutputBuffers outputBuffers)
{
sharedBuffer.setOutputBuffers(outputBuffers);
}
@Override
public void addStateChangeListener(StateChangeListener<TaskInfo> stateChangeListener)
{
taskStateMachine.addStateChangeListener(newValue -> stateChangeListener.stateChanged(getTaskInfo()));
}
@Override
public void cancel()
{
taskStateMachine.cancel();
}
@Override
public void abort()
{
taskStateMachine.abort();
}
@Override
public Duration waitForTaskToFinish(Duration maxWait)
throws InterruptedException
{
while (true) {
TaskState currentState = taskStateMachine.getState();
if (maxWait.toMillis() <= 1 || currentState.isDone()) {
return maxWait;
}
maxWait = taskStateMachine.waitForStateChange(currentState, maxWait);
}
}
@Override
public int getPartitionedSplitCount()
{
if (taskStateMachine.getState().isDone()) {
return 0;
}
return splits.size();
}
@Override
public int getQueuedPartitionedSplitCount()
{
return getPartitionedSplitCount();
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.handler.dataimport;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.io.FileUtils;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.SuppressForbidden;
import org.apache.solr.core.SolrCore;
import org.apache.solr.request.LocalSolrQueryRequest;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.update.AddUpdateCommand;
import org.apache.solr.update.CommitUpdateCommand;
import org.apache.solr.update.DeleteUpdateCommand;
import org.apache.solr.update.MergeIndexesCommand;
import org.apache.solr.update.RollbackUpdateCommand;
import org.apache.solr.update.processor.UpdateRequestProcessor;
import org.apache.solr.update.processor.UpdateRequestProcessorFactory;
import org.junit.Before;
/**
* <p>
* Abstract base class for DataImportHandler tests
* </p>
* <p>
* <b>This API is experimental and subject to change</b>
*
*
* @since solr 1.3
*/
public abstract class AbstractDataImportHandlerTestCase extends
SolrTestCaseJ4 {
// note, a little twisted that we shadow this static method
public static void initCore(String config, String schema) throws Exception {
File testHome = createTempDir("core-home").toFile();
FileUtils.copyDirectory(getFile("dih/solr"), testHome);
initCore(config, schema, testHome.getAbsolutePath());
}
@Override
@Before
public void setUp() throws Exception {
super.setUp();
File home = createTempDir("dih-properties").toFile();
System.setProperty("solr.solr.home", home.getAbsolutePath());
}
protected String loadDataConfig(String dataConfigFileName) {
try {
SolrCore core = h.getCore();
return SolrWriter.getResourceAsString(core.getResourceLoader()
.openResource(dataConfigFileName));
} catch (IOException e) {
e.printStackTrace();
return null;
}
}
protected void runFullImport(String dataConfig) throws Exception {
LocalSolrQueryRequest request = lrf.makeRequest("command", "full-import",
"debug", "on", "clean", "true", "commit", "true", "dataConfig",
dataConfig);
h.query("/dataimport", request);
}
protected void runDeltaImport(String dataConfig) throws Exception {
LocalSolrQueryRequest request = lrf.makeRequest("command", "delta-import",
"debug", "on", "clean", "false", "commit", "true", "dataConfig",
dataConfig);
h.query("/dataimport", request);
}
/**
* Redirect {@link SimplePropertiesWriter#filename} to a temporary location
* and return it.
*/
protected File redirectTempProperties(DataImporter di) {
try {
File tempFile = createTempFile().toFile();
di.getConfig().getPropertyWriter().getParameters()
.put(SimplePropertiesWriter.FILENAME, tempFile.getAbsolutePath());
return tempFile;
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* Runs a full-import using the given dataConfig and the provided request parameters.
*
* By default, debug=on, clean=true and commit=true are passed which can be overridden.
*
* @param dataConfig the data-config xml as a string
* @param extraParams any extra request parameters needed to be passed to DataImportHandler
* @throws Exception in case of any error
*/
protected void runFullImport(String dataConfig, Map<String, String> extraParams) throws Exception {
HashMap<String, String> params = new HashMap<>();
params.put("command", "full-import");
params.put("debug", "on");
params.put("dataConfig", dataConfig);
params.put("clean", "true");
params.put("commit", "true");
params.putAll(extraParams);
NamedList l = new NamedList();
for (Map.Entry<String, String> e : params.entrySet()) {
l.add(e.getKey(),e.getValue());
}
LocalSolrQueryRequest request = new LocalSolrQueryRequest(h.getCore(), l);
h.query("/dataimport", request);
}
/**
* Helper for creating a Context instance. Useful for testing Transformers
*/
@SuppressWarnings("unchecked")
public static TestContext getContext(EntityProcessorWrapper parent,
VariableResolver resolver, DataSource parentDataSource,
String currProcess, final List<Map<String, String>> entityFields,
final Map<String, String> entityAttrs) {
if (resolver == null) resolver = new VariableResolver();
final Context delegate = new ContextImpl(parent, resolver,
parentDataSource, currProcess,
new HashMap<String, Object>(), null, null);
return new TestContext(entityAttrs, delegate, entityFields, parent == null);
}
/**
* Strings at even index are keys, odd-index strings are values in the
* returned map
*/
@SuppressWarnings("unchecked")
public static Map createMap(Object... args) {
Map result = new LinkedHashMap();
if (args == null || args.length == 0)
return result;
for (int i = 0; i < args.length - 1; i += 2)
result.put(args[i], args[i + 1]);
return result;
}
@SuppressForbidden(reason = "Needs currentTimeMillis to set modified time for a file")
public static File createFile(File tmpdir, String name, byte[] content,
boolean changeModifiedTime) throws IOException {
File file = new File(tmpdir.getAbsolutePath() + File.separator + name);
file.deleteOnExit();
FileOutputStream f = new FileOutputStream(file);
f.write(content);
f.close();
if (changeModifiedTime)
file.setLastModified(System.currentTimeMillis() - 3600000);
return file;
}
public static Map<String, String> getField(String col, String type,
String re, String srcCol, String splitBy) {
HashMap<String, String> vals = new HashMap<>();
vals.put("column", col);
vals.put("type", type);
vals.put("regex", re);
vals.put("sourceColName", srcCol);
vals.put("splitBy", splitBy);
return vals;
}
static class TestContext extends Context {
private final Map<String, String> entityAttrs;
private final Context delegate;
private final List<Map<String, String>> entityFields;
private final boolean root;
String script,scriptlang;
public TestContext(Map<String, String> entityAttrs, Context delegate,
List<Map<String, String>> entityFields, boolean root) {
this.entityAttrs = entityAttrs;
this.delegate = delegate;
this.entityFields = entityFields;
this.root = root;
}
@Override
public String getEntityAttribute(String name) {
return entityAttrs == null ? delegate.getEntityAttribute(name) : entityAttrs.get(name);
}
@Override
public String getResolvedEntityAttribute(String name) {
return entityAttrs == null ? delegate.getResolvedEntityAttribute(name) :
delegate.getVariableResolver().replaceTokens(entityAttrs.get(name));
}
@Override
public List<Map<String, String>> getAllEntityFields() {
return entityFields == null ? delegate.getAllEntityFields()
: entityFields;
}
@Override
public VariableResolver getVariableResolver() {
return delegate.getVariableResolver();
}
@Override
public DataSource getDataSource() {
return delegate.getDataSource();
}
@Override
public boolean isRootEntity() {
return root;
}
@Override
public String currentProcess() {
return delegate.currentProcess();
}
@Override
public Map<String, Object> getRequestParameters() {
return delegate.getRequestParameters();
}
@Override
public EntityProcessor getEntityProcessor() {
return null;
}
@Override
public void setSessionAttribute(String name, Object val, String scope) {
delegate.setSessionAttribute(name, val, scope);
}
@Override
public Object getSessionAttribute(String name, String scope) {
return delegate.getSessionAttribute(name, scope);
}
@Override
public Context getParentContext() {
return delegate.getParentContext();
}
@Override
public DataSource getDataSource(String name) {
return delegate.getDataSource(name);
}
@Override
public SolrCore getSolrCore() {
return delegate.getSolrCore();
}
@Override
public Map<String, Object> getStats() {
return delegate.getStats();
}
@Override
public String getScript() {
return script == null ? delegate.getScript() : script;
}
@Override
public String getScriptLanguage() {
return scriptlang == null ? delegate.getScriptLanguage() : scriptlang;
}
@Override
public void deleteDoc(String id) {
}
@Override
public void deleteDocByQuery(String query) {
}
@Override
public Object resolve(String var) {
return delegate.resolve(var);
}
@Override
public String replaceTokens(String template) {
return delegate.replaceTokens(template);
}
}
public static class TestUpdateRequestProcessorFactory extends UpdateRequestProcessorFactory {
@Override
public UpdateRequestProcessor getInstance(SolrQueryRequest req,
SolrQueryResponse rsp, UpdateRequestProcessor next) {
return new TestUpdateRequestProcessor(next);
}
}
public static class TestUpdateRequestProcessor extends UpdateRequestProcessor {
public static boolean finishCalled = false;
public static boolean processAddCalled = false;
public static boolean processCommitCalled = false;
public static boolean processDeleteCalled = false;
public static boolean mergeIndexesCalled = false;
public static boolean rollbackCalled = false;
public static void reset() {
finishCalled = false;
processAddCalled = false;
processCommitCalled = false;
processDeleteCalled = false;
mergeIndexesCalled = false;
rollbackCalled = false;
}
public TestUpdateRequestProcessor(UpdateRequestProcessor next) {
super(next);
reset();
}
@Override
public void finish() throws IOException {
finishCalled = true;
super.finish();
}
@Override
public void processAdd(AddUpdateCommand cmd) throws IOException {
processAddCalled = true;
super.processAdd(cmd);
}
@Override
public void processCommit(CommitUpdateCommand cmd) throws IOException {
processCommitCalled = true;
super.processCommit(cmd);
}
@Override
public void processDelete(DeleteUpdateCommand cmd) throws IOException {
processDeleteCalled = true;
super.processDelete(cmd);
}
@Override
public void processMergeIndexes(MergeIndexesCommand cmd) throws IOException {
mergeIndexesCalled = true;
super.processMergeIndexes(cmd);
}
@Override
public void processRollback(RollbackUpdateCommand cmd) throws IOException {
rollbackCalled = true;
super.processRollback(cmd);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.jndi;
import static org.junit.Assert.*;
import java.util.Hashtable;
import javax.naming.Binding;
import javax.naming.Context;
import javax.naming.ContextNotEmptyException;
import javax.naming.InitialContext;
import javax.naming.NameAlreadyBoundException;
import javax.naming.NameNotFoundException;
import javax.naming.NamingEnumeration;
import javax.naming.NamingException;
import javax.naming.NoPermissionException;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.apache.geode.test.junit.categories.UnitTest;
/**
* Tests all basic methods of ContextImpl.
*/
@Category(UnitTest.class)
public class ContextJUnitTest {
private Context initialContext;
private Context gemfireContext;
private Context envContext;
private Context dataSourceContext;
@Before
public void setUp() throws Exception {
Hashtable table = new Hashtable();
table.put(Context.INITIAL_CONTEXT_FACTORY,
"org.apache.geode.internal.jndi.InitialContextFactoryImpl");
initialContext = new InitialContext(table);
initialContext.bind("java:gf/env/datasource/oracle", "a");
gemfireContext = (Context) initialContext.lookup("java:gf");
envContext = (Context) gemfireContext.lookup("env");
dataSourceContext = (Context) envContext.lookup("datasource");
}
@After
public void tearDown() throws Exception {
clearContext(initialContext);
dataSourceContext = null;
envContext = null;
gemfireContext = null;
initialContext = null;
}
/**
* Removes all entries from the specified context, including subcontexts.
*
* @param context context to clear
*/
private void clearContext(Context context) throws NamingException {
for (NamingEnumeration e = context.listBindings(""); e.hasMoreElements();) {
Binding binding = (Binding) e.nextElement();
if (binding.getObject() instanceof Context) {
clearContext((Context) binding.getObject());
}
context.unbind(binding.getName());
}
}
/**
* Tests inability to create duplicate subcontexts.
*/
@Test
public void testSubcontextCreationOfDuplicates() throws NamingException {
// Try to create duplicate subcontext
try {
initialContext.createSubcontext("java:gf");
fail();
} catch (NameAlreadyBoundException expected) {
}
// Try to create duplicate subcontext using multi-component name
try {
gemfireContext.createSubcontext("env/datasource");
fail();
} catch (NameAlreadyBoundException expected) {
}
}
/**
* Tests inability to destroy non empty subcontexts.
*/
@Test
public void testSubcontextNonEmptyDestruction() throws Exception {
// Bind some object in ejb subcontext
dataSourceContext.bind("Test", "Object");
// Attempt to destroy any subcontext
try {
initialContext.destroySubcontext("java:gf");
fail();
} catch (ContextNotEmptyException expected) {
}
try {
initialContext.destroySubcontext("java:gf/env/datasource");
fail();
} catch (ContextNotEmptyException expected) {
}
try {
envContext.destroySubcontext("datasource");
fail();
} catch (ContextNotEmptyException expected) {
}
}
/**
* Tests ability to destroy empty subcontexts.
*/
@Test
public void testSubcontextDestruction() throws Exception {
// Create three new subcontexts
dataSourceContext.createSubcontext("sub1");
dataSourceContext.createSubcontext("sub2");
envContext.createSubcontext("sub3");
// Destroy
initialContext.destroySubcontext("java:gf/env/datasource/sub1");
dataSourceContext.destroySubcontext("sub2");
envContext.destroySubcontext("sub3");
// Perform lookup
try {
dataSourceContext.lookup("sub1");
fail();
} catch (NameNotFoundException expected) {
}
try {
envContext.lookup("datasource/sub2");
fail();
} catch (NameNotFoundException expected) {
}
try {
initialContext.lookup("java:gf/sub3");
fail();
} catch (NameNotFoundException expected) {
}
}
/**
* Tests inability to invoke methods on destroyed subcontexts.
*/
@Test
public void testSubcontextInvokingMethodsOnDestroyedContext() throws Exception {
// Create subcontext and destroy it.
Context sub = dataSourceContext.createSubcontext("sub4");
initialContext.destroySubcontext("java:gf/env/datasource/sub4");
try {
sub.bind("name", "object");
fail();
} catch (NoPermissionException expected) {
}
try {
sub.unbind("name");
fail();
} catch (NoPermissionException expected) {
}
try {
sub.createSubcontext("sub5");
fail();
} catch (NoPermissionException expected) {
}
try {
sub.destroySubcontext("sub6");
fail();
} catch (NoPermissionException expected) {
}
try {
sub.list("");
fail();
} catch (NoPermissionException expected) {
}
try {
sub.lookup("name");
fail();
} catch (NoPermissionException expected) {
}
try {
sub.composeName("name", "prefix");
fail();
} catch (NoPermissionException expected) {
}
try {
NameParserImpl parser = new NameParserImpl();
sub.composeName(parser.parse("a"), parser.parse("b"));
fail();
} catch (NoPermissionException expected) {
}
}
/**
* Tests ability to bind name to object.
*/
@Test
public void testBindLookup() throws Exception {
Object obj1 = new String("Object1");
Object obj2 = new String("Object2");
Object obj3 = new String("Object3");
dataSourceContext.bind("sub21", null);
dataSourceContext.bind("sub22", obj1);
initialContext.bind("java:gf/env/sub23", null);
initialContext.bind("java:gf/env/sub24", obj2);
// Bind to subcontexts that do not exist
initialContext.bind("java:gf/env/datasource/sub25/sub26", obj3);
// Try to lookup
assertNull(dataSourceContext.lookup("sub21"));
assertSame(dataSourceContext.lookup("sub22"), obj1);
assertNull(gemfireContext.lookup("env/sub23"));
assertSame(initialContext.lookup("java:gf/env/sub24"), obj2);
assertSame(dataSourceContext.lookup("sub25/sub26"), obj3);
}
/**
* Tests ability to unbind names.
*/
@Test
public void testUnbind() throws Exception {
envContext.bind("sub31", null);
gemfireContext.bind("env/ejb/sub32", new String("UnbindObject"));
// Unbind
initialContext.unbind("java:gf/env/sub31");
dataSourceContext.unbind("sub32");
try {
envContext.lookup("sub31");
fail();
} catch (NameNotFoundException expected) {
}
try {
initialContext.lookup("java:gf/env/sub32");
fail();
} catch (NameNotFoundException expected) {
}
// Unbind non-existing name
dataSourceContext.unbind("doesNotExist");
// Unbind non-existing name, when subcontext does not exists
try {
gemfireContext.unbind("env/x/y");
fail();
} catch (NameNotFoundException expected) {
}
}
/**
* Tests ability to list bindings for a context - specified by name through object reference.
*/
@Test
public void testListBindings() throws Exception {
gemfireContext.bind("env/datasource/sub41", "ListBindings1");
envContext.bind("sub42", "ListBindings2");
dataSourceContext.bind("sub43", null);
// Verify bindings for context specified by reference
verifyListBindings(envContext, "", "ListBindings1", "ListBindings2");
// Verify bindings for context specified by name
verifyListBindings(initialContext, "java:gf/env", "ListBindings1", "ListBindings2");
}
private void verifyListBindings(Context c, String name, Object obj1, Object obj2)
throws NamingException {
boolean datasourceFoundFlg = false;
boolean o2FoundFlg = false;
boolean datasourceO1FoundFlg = false;
boolean datasourceNullFoundFlg = false;
// List bindings for the specified context
for (NamingEnumeration en = c.listBindings(name); en.hasMore();) {
Binding b = (Binding) en.next();
if (b.getName().equals("datasource")) {
assertEquals(b.getObject(), dataSourceContext);
datasourceFoundFlg = true;
Context nextCon = (Context) b.getObject();
for (NamingEnumeration en1 = nextCon.listBindings(""); en1.hasMore();) {
Binding b1 = (Binding) en1.next();
if (b1.getName().equals("sub41")) {
assertEquals(b1.getObject(), obj1);
datasourceO1FoundFlg = true;
} else if (b1.getName().equals("sub43")) {
// check for null object
assertNull(b1.getObject());
datasourceNullFoundFlg = true;
}
}
} else if (b.getName().equals("sub42")) {
assertEquals(b.getObject(), obj2);
o2FoundFlg = true;
}
}
if (!(datasourceFoundFlg && o2FoundFlg && datasourceO1FoundFlg && datasourceNullFoundFlg)) {
fail();
}
}
@Test
public void testCompositeName() throws Exception {
ContextImpl c = new ContextImpl();
Object o = new Object();
c.rebind("/a/b/c/", o);
assertEquals(c.lookup("a/b/c"), o);
assertEquals(c.lookup("///a/b/c///"), o);
}
@Test
public void testLookup() throws Exception {
ContextImpl ctx = new ContextImpl();
Object obj = new Object();
ctx.rebind("a/b/c/d", obj);
assertEquals(obj, ctx.lookup("a/b/c/d"));
ctx.bind("a", obj);
assertEquals(obj, ctx.lookup("a"));
}
/**
* Tests "getCompositeName" method
*/
@Test
public void testGetCompositeName() throws Exception {
ContextImpl ctx = new ContextImpl();
ctx.rebind("a/b/c/d", new Object());
ContextImpl subCtx;
subCtx = (ContextImpl) ctx.lookup("a");
assertEquals("a", subCtx.getCompoundStringName());
subCtx = (ContextImpl) ctx.lookup("a/b/c");
assertEquals("a/b/c", subCtx.getCompoundStringName());
}
/**
* Tests substitution of '.' with '/' when parsing string names.
*/
@Test
public void testTwoSeparatorNames() throws Exception {
ContextImpl ctx = new ContextImpl();
Object obj = new Object();
ctx.bind("a/b.c.d/e", obj);
assertEquals(ctx.lookup("a/b/c/d/e"), obj);
assertEquals(ctx.lookup("a.b/c.d.e"), obj);
assertTrue(ctx.lookup("a.b.c.d") instanceof Context);
}
}
| |
/*******************************************************************************
* Copyright (C) 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
******************************************************************************/
package com.google.cloud.dataflow.sdk.runners.worker;
import static com.google.api.client.util.Preconditions.checkNotNull;
import static com.google.cloud.dataflow.sdk.runners.worker.SourceTranslationUtils.cloudPositionToReaderPosition;
import static com.google.cloud.dataflow.sdk.runners.worker.SourceTranslationUtils.cloudProgressToReaderProgress;
import static com.google.cloud.dataflow.sdk.runners.worker.SourceTranslationUtils.splitRequestToApproximateProgress;
import com.google.api.client.util.Preconditions;
import com.google.api.services.dataflow.model.ApproximateProgress;
import com.google.cloud.dataflow.sdk.coders.Coder;
import com.google.cloud.dataflow.sdk.coders.IterableCoder;
import com.google.cloud.dataflow.sdk.coders.KvCoder;
import com.google.cloud.dataflow.sdk.options.PipelineOptions;
import com.google.cloud.dataflow.sdk.util.BatchModeExecutionContext;
import com.google.cloud.dataflow.sdk.util.CoderUtils;
import com.google.cloud.dataflow.sdk.util.WindowedValue;
import com.google.cloud.dataflow.sdk.util.WindowedValue.WindowedValueCoder;
import com.google.cloud.dataflow.sdk.util.common.CounterSet;
import com.google.cloud.dataflow.sdk.util.common.Reiterable;
import com.google.cloud.dataflow.sdk.util.common.Reiterator;
import com.google.cloud.dataflow.sdk.util.common.worker.AbstractBoundedReaderIterator;
import com.google.cloud.dataflow.sdk.util.common.worker.BatchingShuffleEntryReader;
import com.google.cloud.dataflow.sdk.util.common.worker.GroupingShuffleEntryIterator;
import com.google.cloud.dataflow.sdk.util.common.worker.KeyGroupedShuffleEntries;
import com.google.cloud.dataflow.sdk.util.common.worker.Reader;
import com.google.cloud.dataflow.sdk.util.common.worker.ShuffleEntry;
import com.google.cloud.dataflow.sdk.util.common.worker.ShuffleEntryReader;
import com.google.cloud.dataflow.sdk.util.common.worker.StateSampler;
import com.google.cloud.dataflow.sdk.values.KV;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.Iterator;
import javax.annotation.Nullable;
/**
* A source that reads from a shuffled dataset and yields key-grouped data.
*
* @param <K> the type of the keys read from the shuffle
* @param <V> the type of the values read from the shuffle
*/
public class GroupingShuffleReader<K, V> extends Reader<WindowedValue<KV<K, Reiterable<V>>>> {
private static final Logger LOG = LoggerFactory.getLogger(GroupingShuffleReader.class);
final byte[] shuffleReaderConfig;
@Nullable final String startShufflePosition;
@Nullable final String stopShufflePosition;
final BatchModeExecutionContext executionContext;
Coder<K> keyCoder;
Coder<V> valueCoder;
public GroupingShuffleReader(
PipelineOptions options,
byte[] shuffleReaderConfig,
@Nullable String startShufflePosition,
@Nullable String stopShufflePosition,
Coder<WindowedValue<KV<K, Iterable<V>>>> coder,
BatchModeExecutionContext executionContext)
throws Exception {
this.shuffleReaderConfig = shuffleReaderConfig;
this.startShufflePosition = startShufflePosition;
this.stopShufflePosition = stopShufflePosition;
this.executionContext = executionContext;
initCoder(coder);
}
@Override
public ReaderIterator<WindowedValue<KV<K, Reiterable<V>>>> iterator() throws IOException {
Preconditions.checkArgument(shuffleReaderConfig != null);
return iterator(new BatchingShuffleEntryReader(
new ChunkingShuffleBatchReader(new ApplianceShuffleReader(shuffleReaderConfig))));
}
private void initCoder(Coder<WindowedValue<KV<K, Iterable<V>>>> coder) throws Exception {
if (!(coder instanceof WindowedValueCoder)) {
throw new Exception("unexpected kind of coder for WindowedValue: " + coder);
}
Coder<KV<K, Iterable<V>>> elemCoder =
((WindowedValueCoder<KV<K, Iterable<V>>>) coder).getValueCoder();
if (!(elemCoder instanceof KvCoder)) {
throw new Exception("unexpected kind of coder for elements read from "
+ "a key-grouping shuffle: " + elemCoder);
}
@SuppressWarnings("unchecked")
KvCoder<K, Iterable<V>> kvCoder = (KvCoder<K, Iterable<V>>) elemCoder;
this.keyCoder = kvCoder.getKeyCoder();
Coder<Iterable<V>> kvValueCoder = kvCoder.getValueCoder();
if (!(kvValueCoder instanceof IterableCoder)) {
throw new Exception("unexpected kind of coder for values of KVs read from "
+ "a key-grouping shuffle");
}
IterableCoder<V> iterCoder = (IterableCoder<V>) kvValueCoder;
this.valueCoder = iterCoder.getElemCoder();
}
final ReaderIterator<WindowedValue<KV<K, Reiterable<V>>>> iterator(ShuffleEntryReader reader) {
return new GroupingShuffleReaderIterator(reader);
}
/**
* A ReaderIterator that reads from a ShuffleEntryReader and groups
* all the values with the same key.
*
* <p>A key limitation of this implementation is that all iterator accesses
* must by externally synchronized (the iterator objects are not individually
* thread-safe, and the iterators derived from a single original iterator
* access shared state that is not thread-safe).
*
* <p>To access the current position, the iterator must advance
* on-demand and cache the next batch of key grouped shuffle
* entries. The iterator does not advance a second time in @next()
* to avoid asking the underlying iterator to advance to the next
* key before the caller/user iterates over the values corresponding
* to the current key, which would introduce a performance
* penalty.
*/
private final class GroupingShuffleReaderIterator
extends AbstractBoundedReaderIterator<WindowedValue<KV<K, Reiterable<V>>>> {
// N.B. This class is *not* static; it uses the keyCoder, valueCoder, and
// executionContext from its enclosing GroupingShuffleReader.
/** The iterator over shuffle entries, grouped by common key. */
private final Iterator<KeyGroupedShuffleEntries> groups;
private final GroupingShuffleRangeTracker rangeTracker;
private ByteArrayShufflePosition lastGroupStart;
/** The next group to be consumed, if available. */
private KeyGroupedShuffleEntries currentGroup = null;
protected StateSampler stateSampler = null;
protected int readState;
public GroupingShuffleReaderIterator(ShuffleEntryReader reader) {
if (GroupingShuffleReader.this.stateSampler == null) {
CounterSet counterSet = new CounterSet();
this.stateSampler = new StateSampler("local", counterSet.getAddCounterMutator());
this.readState = stateSampler.stateForName("shuffle");
} else {
checkNotNull(GroupingShuffleReader.this.stateSamplerOperationName);
this.stateSampler = GroupingShuffleReader.this.stateSampler;
this.readState = stateSampler.stateForName(
GroupingShuffleReader.this.stateSamplerOperationName + "-process");
}
this.rangeTracker =
new GroupingShuffleRangeTracker(
ByteArrayShufflePosition.fromBase64(startShufflePosition),
ByteArrayShufflePosition.fromBase64(stopShufflePosition));
try (StateSampler.ScopedState read = stateSampler.scopedState(readState)) {
this.groups =
new GroupingShuffleEntryIterator(
reader.read(rangeTracker.getStartPosition(), rangeTracker.getStopPosition())) {
@Override
protected void notifyElementRead(long byteSize) {
GroupingShuffleReader.this.notifyElementRead(byteSize);
}
};
}
}
@Override
protected boolean hasNextImpl() throws IOException {
try (StateSampler.ScopedState read = stateSampler.scopedState(readState)) {
if (!groups.hasNext()) {
return false;
}
currentGroup = groups.next();
}
ByteArrayShufflePosition groupStart = ByteArrayShufflePosition.of(currentGroup.position);
boolean isAtSplitPoint = (lastGroupStart == null) || (!groupStart.equals(lastGroupStart));
lastGroupStart = groupStart;
return rangeTracker.tryReturnRecordAt(isAtSplitPoint, groupStart);
}
@Override
protected WindowedValue<KV<K, Reiterable<V>>> nextImpl() throws IOException {
K key = CoderUtils.decodeFromByteArray(keyCoder, currentGroup.key);
if (executionContext != null) {
executionContext.setKey(key);
}
KeyGroupedShuffleEntries group = currentGroup;
currentGroup = null;
return WindowedValue.valueInEmptyWindows(
KV.<K, Reiterable<V>>of(key, new ValuesIterable(group.values)));
}
/**
* Returns the position before the next {@code KV<K, Reiterable<V>>} to be returned by the
* {@link GroupingShuffleReaderIterator}. Returns null if the
* {@link GroupingShuffleReaderIterator} is finished.
*/
@Override
public Progress getProgress() {
com.google.api.services.dataflow.model.Position position =
new com.google.api.services.dataflow.model.Position();
ApproximateProgress progress = new ApproximateProgress();
ByteArrayShufflePosition groupStart = rangeTracker.getLastGroupStart();
if (groupStart != null) {
position.setShufflePosition(groupStart.encodeBase64());
progress.setPosition(position);
}
return cloudProgressToReaderProgress(progress);
}
/**
* Updates the stop position of the shuffle source to the position proposed. Ignores the
* proposed stop position if it is smaller than or equal to the position before the next
* {@code KV<K, Reiterable<V>>} to be returned by the {@link GroupingShuffleReaderIterator}.
*/
@Override
public DynamicSplitResult requestDynamicSplit(DynamicSplitRequest splitRequest) {
checkNotNull(splitRequest);
ApproximateProgress splitProgress = splitRequestToApproximateProgress(
splitRequest);
com.google.api.services.dataflow.model.Position splitPosition = splitProgress.getPosition();
if (splitPosition == null) {
LOG.warn("GroupingShuffleReader only supports split at a Position. Requested: {}",
splitRequest);
return null;
}
String splitShufflePosition = splitPosition.getShufflePosition();
if (splitShufflePosition == null) {
LOG.warn("GroupingShuffleReader only supports split at a shuffle position. Requested: {}",
splitPosition);
return null;
}
ByteArrayShufflePosition newStopPosition =
ByteArrayShufflePosition.fromBase64(splitShufflePosition);
if (rangeTracker.trySplitAtPosition(newStopPosition)) {
LOG.info(
"Split GroupingShuffleReader at {}, now {}",
newStopPosition.encodeBase64(),
rangeTracker);
return new DynamicSplitResultWithPosition(cloudPositionToReaderPosition(splitPosition));
} else {
LOG.info(
"Refused to split GroupingShuffleReader {} at {}",
rangeTracker,
newStopPosition.encodeBase64());
return null;
}
}
/**
* Provides the {@link Reiterable} used to iterate through the values part
* of a {@code KV<K, Reiterable<V>>} entry produced by a
* {@link GroupingShuffleReader}.
*/
private final class ValuesIterable implements Reiterable<V> {
// N.B. This class is *not* static; it uses the valueCoder from
// its enclosing GroupingShuffleReader.
private final Reiterable<ShuffleEntry> base;
public ValuesIterable(Reiterable<ShuffleEntry> base) {
this.base = checkNotNull(base);
}
@Override
public ValuesIterator iterator() {
return new ValuesIterator(base.iterator());
}
}
/**
* Provides the {@link Reiterator} used to iterate through the values part
* of a {@code KV<K, Reiterable<V>>} entry produced by a
* {@link GroupingShuffleReader}.
*/
private final class ValuesIterator implements Reiterator<V> {
// N.B. This class is *not* static; it uses the valueCoder from
// its enclosing GroupingShuffleReader.
private final Reiterator<ShuffleEntry> base;
public ValuesIterator(Reiterator<ShuffleEntry> base) {
this.base = checkNotNull(base);
}
@Override
public boolean hasNext() {
try (StateSampler.ScopedState read =
GroupingShuffleReaderIterator.this.stateSampler.scopedState(
GroupingShuffleReaderIterator.this.readState)) {
return base.hasNext();
}
}
@Override
public V next() {
try (StateSampler.ScopedState read =
GroupingShuffleReaderIterator.this.stateSampler.scopedState(
GroupingShuffleReaderIterator.this.readState)) {
ShuffleEntry entry = base.next();
try {
return CoderUtils.decodeFromByteArray(valueCoder, entry.getValue());
} catch (IOException exn) {
throw new RuntimeException(exn);
}
}
}
@Override
public void remove() {
base.remove();
}
@Override
public ValuesIterator copy() {
return new ValuesIterator(base.copy());
}
}
}
}
| |
/*
* Copyright 2019 NAVER Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.navercorp.pinpoint.profiler.sender;
import java.util.Objects;
import com.navercorp.pinpoint.common.profiler.concurrent.PinpointThreadFactory;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import java.util.Collection;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
/**
* @author emeroad
*/
public class AsyncQueueingExecutor<T> implements Runnable {
private final Logger logger;
private final boolean isWarn;
private final LinkedBlockingQueue<T> queue;
private final AtomicBoolean isRun = new AtomicBoolean(true);
private final Thread executeThread;
private final String executorName;
private final int maxDrainSize;
// Caution. single thread only. this Collection is simpler than ArrayList.
private final Collection<T> drain;
private final AsyncQueueingExecutorListener<T> listener;
public AsyncQueueingExecutor(int queueSize, String executorName, AsyncQueueingExecutorListener<T> listener) {
Objects.requireNonNull(executorName, "executorName");
this.logger = LogManager.getLogger(this.getClass().getName() + "@" + executorName);
this.isWarn = logger.isWarnEnabled();
// BEFORE executeThread start
this.maxDrainSize = 10;
this.drain = new UnsafeArrayCollection<T>(maxDrainSize);
this.queue = new LinkedBlockingQueue<T>(queueSize);
this.executeThread = this.createExecuteThread(executorName);
this.executorName = executeThread.getName();
this.listener = Objects.requireNonNull(listener, "listener");
}
private Thread createExecuteThread(String executorName) {
final ThreadFactory threadFactory = new PinpointThreadFactory(executorName, true);
Thread thread = threadFactory.newThread(this);
thread.start();
return thread;
}
@Override
public void run() {
logger.info("{} started.", executorName);
doExecute();
}
private void doExecute() {
long timeout = 2000;
drainStartEntry:
while (isRun()) {
try {
final Collection<T> dtoList = getDrainQueue();
final int drainSize = takeN(dtoList, this.maxDrainSize);
if (drainSize > 0) {
doExecute(dtoList);
continue;
}
while (isRun()) {
final T dto = takeOne(timeout);
if (dto != null) {
doExecute(dto);
continue drainStartEntry;
} else {
pollTimeout(timeout);
}
}
} catch (Throwable th) {
logger.warn("{} doExecute(). Unexpected Error. Cause:{}", executorName, th.getMessage(), th);
}
}
flushQueue();
}
private void flushQueue() {
boolean debugEnabled = logger.isDebugEnabled();
if (debugEnabled) {
logger.debug("Loop is stop.");
}
while(true) {
final Collection<T> dtoList = getDrainQueue();
int drainSize = takeN(dtoList, this.maxDrainSize);
if (drainSize == 0) {
break;
}
if (debugEnabled) {
logger.debug("flushData size {}", drainSize);
}
doExecute(dtoList);
}
}
private T takeOne(long timeout) {
try {
return queue.poll(timeout, TimeUnit.MILLISECONDS);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
return null;
}
}
private int takeN(Collection<T> drain, int maxDrainSize) {
return queue.drainTo(drain, maxDrainSize);
}
protected void pollTimeout(long timeout) {
// do nothing
}
public boolean execute(T data) {
if (data == null) {
if (isWarn) {
logger.warn("execute(). data is null");
}
return false;
}
if (!isRun.get()) {
if (isWarn) {
logger.warn("{} is shutdown. discard data:{}", executorName, data);
}
return false;
}
boolean offer = queue.offer(data);
if (!offer) {
if (isWarn) {
logger.warn("{} Drop data. queue is full. size:{}", executorName, queue.size());
}
}
return offer;
}
private void doExecute(Collection<T> dtoList) {
this.listener.execute(dtoList);
}
private void doExecute(T dto) {
this.listener.execute(dto);
}
public boolean isEmpty() {
return queue.isEmpty();
}
public boolean isRun() {
return isRun.get();
}
public void stop() {
isRun.set(false);
if (!isEmpty()) {
logger.info("Wait 5 seconds. Flushing queued data.");
}
executeThread.interrupt();
try {
executeThread.join(5000);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
logger.warn("{} stopped incompletely.", executorName);
}
logger.info("{} stopped.", executorName);
}
Collection<T> getDrainQueue() {
this.drain.clear();
return drain;
}
}
| |
/**
* Licensed to Apereo under one or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information regarding copyright ownership. Apereo
* licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use
* this file except in compliance with the License. You may obtain a copy of the License at the
* following location:
*
* <p>http://www.apache.org/licenses/LICENSE-2.0
*
* <p>Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apereo.portal.groups;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apereo.portal.EntityIdentifier;
import org.apereo.portal.jdbc.RDBMServices;
import org.apereo.portal.services.GroupService;
import org.apereo.portal.spring.locator.CounterStoreLocator;
import org.apereo.portal.spring.locator.EntityTypesLocator;
import org.apereo.portal.utils.SqlTransaction;
/**
* Store for <code>EntityGroupImpl</code>.
*
*/
public class RDBMEntityGroupStore implements IEntityGroupStore, IGroupConstants {
private static final Log log = LogFactory.getLog(RDBMEntityGroupStore.class);
private static RDBMEntityGroupStore singleton;
// Constant SQL strings:
private static String EQ = " = ";
private static String QUOTE = "'";
private static String EQUALS_PARAM = EQ + "?";
// Constant strings for GROUP table:
private static String GROUP_TABLE = "UP_GROUP";
private static String GROUP_TABLE_ALIAS = "T1";
private static String GROUP_TABLE_WITH_ALIAS = GROUP_TABLE + " " + GROUP_TABLE_ALIAS;
private static String GROUP_ID_COLUMN = "GROUP_ID";
private static String GROUP_CREATOR_COLUMN = "CREATOR_ID";
private static String GROUP_TYPE_COLUMN = "ENTITY_TYPE_ID";
private static String GROUP_NAME_COLUMN = "GROUP_NAME";
private static String GROUP_DESCRIPTION_COLUMN = "DESCRIPTION";
// SQL strings for GROUP crud:
private static String allGroupColumns;
private static String allGroupColumnsWithTableAlias;
private static String countAMemberGroupSql;
private static String countMemberGroupsNamedSql;
private static String countAMemberEntitySql;
private static String findParentGroupsForEntitySql;
private static String findParentGroupsForGroupSql;
private static String findGroupSql;
private static String findMemberGroupKeysSql;
private static String findMemberGroupsSql;
private static String insertGroupSql;
private static String updateGroupSql;
// Constant strings for MEMBERS table:
private static String MEMBER_TABLE = "UP_GROUP_MEMBERSHIP";
private static String MEMBER_TABLE_ALIAS = "T2";
private static String MEMBER_TABLE_WITH_ALIAS = MEMBER_TABLE + " " + MEMBER_TABLE_ALIAS;
private static String MEMBER_GROUP_ID_COLUMN = "GROUP_ID";
private static String MEMBER_MEMBER_SERVICE_COLUMN = "MEMBER_SERVICE";
private static String MEMBER_MEMBER_KEY_COLUMN = "MEMBER_KEY";
private static String MEMBER_IS_GROUP_COLUMN = "MEMBER_IS_GROUP";
private static String MEMBER_IS_ENTITY = "F";
private static String MEMBER_IS_GROUP = "T";
private static String GROUP_NODE_SEPARATOR;
// SQL strings for group MEMBERS crud:
private static String allMemberColumns;
private static String deleteMembersInGroupSql;
private static String deleteMemberGroupSql;
private static String deleteMemberEntitySql;
private static String insertMemberSql;
// SQL group search string
private static String searchGroupsPartial =
"SELECT "
+ GROUP_ID_COLUMN
+ " FROM "
+ GROUP_TABLE
+ " WHERE "
+ GROUP_TYPE_COLUMN
+ "=? AND UPPER("
+ GROUP_NAME_COLUMN
+ ") LIKE UPPER(?)";
private static String searchGroups =
"SELECT "
+ GROUP_ID_COLUMN
+ " FROM "
+ GROUP_TABLE
+ " WHERE "
+ GROUP_TYPE_COLUMN
+ "=? AND UPPER("
+ GROUP_NAME_COLUMN
+ ") = UPPER(?)";
/** RDBMEntityGroupStore constructor. */
public RDBMEntityGroupStore() {
super();
initialize();
}
/**
* Get the node separator character from the GroupServiceConfiguration. Default it to
* IGroupConstants.NODE_SEPARATOR.
*/
private void initialize() {
String sep;
try {
sep = GroupServiceConfiguration.getConfiguration().getNodeSeparator();
} catch (Exception ex) {
sep = NODE_SEPARATOR;
}
GROUP_NODE_SEPARATOR = sep;
if (log.isDebugEnabled()) {
log.debug("RDBMEntityGroupStore.initialize(): Node separator set to " + sep);
}
}
/**
* @param conn java.sql.Connection
* @exception java.sql.SQLException
*/
protected static void commit(Connection conn) throws java.sql.SQLException {
SqlTransaction.commit(conn);
}
/**
* Answers if <code>IGroupMember</code> member is a member of <code>group</code>.
*
* @return boolean
* @param group org.apereo.portal.groups.IEntityGroup
* @param member org.apereo.portal.groups.IGroupMember
*/
public boolean contains(IEntityGroup group, IGroupMember member) throws GroupsException {
return (member.isGroup())
? containsGroup(group, (IEntityGroup) member)
: containsEntity(group, member);
}
private boolean containsEntity(IEntityGroup group, IGroupMember member) throws GroupsException {
String groupKey = group.getLocalKey();
String memberKey = member.getKey();
Connection conn = RDBMServices.getConnection();
try {
String sql = getCountAMemberEntitySql();
PreparedStatement ps = conn.prepareStatement(sql);
try {
ps.clearParameters();
ps.setString(1, groupKey);
ps.setString(2, memberKey);
if (log.isDebugEnabled())
log.debug(
"RDBMEntityGroupStore.containsEntity(): "
+ ps
+ " ("
+ groupKey
+ ", "
+ memberKey
+ ")");
ResultSet rs = ps.executeQuery();
try {
return (rs.next()) && (rs.getInt(1) > 0);
} finally {
rs.close();
}
} finally {
ps.close();
}
} catch (Exception e) {
log.error("RDBMEntityGroupStore.containsEntity(): " + e);
throw new GroupsException("Problem retrieving data from store: " + e);
} finally {
RDBMServices.releaseConnection(conn);
}
}
private boolean containsGroup(IEntityGroup group, IEntityGroup member) throws GroupsException {
String memberService = member.getServiceName().toString();
String groupKey = group.getLocalKey();
String memberKey = member.getLocalKey();
Connection conn = RDBMServices.getConnection();
try {
String sql = getCountAMemberGroupSql();
PreparedStatement ps = conn.prepareStatement(sql);
try {
ps.clearParameters();
ps.setString(1, groupKey);
ps.setString(2, memberKey);
ps.setString(3, memberService);
if (log.isDebugEnabled())
log.debug(
"RDBMEntityGroupStore.containsGroup(): "
+ ps
+ " ("
+ groupKey
+ ", "
+ memberKey
+ ", "
+ memberService
+ ")");
ResultSet rs = ps.executeQuery();
try {
return (rs.next()) && (rs.getInt(1) > 0);
} finally {
rs.close();
}
} finally {
ps.close();
}
} catch (Exception e) {
log.error("RDBMEntityGroupStore.containsGroup(): " + e);
throw new GroupsException("Problem retrieving data from store: " + e);
} finally {
RDBMServices.releaseConnection(conn);
}
}
/**
* If this entity exists, delete it.
*
* @param group org.apereo.portal.groups.IEntityGroup
*/
public void delete(IEntityGroup group) throws GroupsException {
if (existsInDatabase(group)) {
try {
primDelete(group);
} catch (SQLException sqle) {
throw new GroupsException("Problem deleting " + group, sqle);
}
}
}
/**
* Answer if the IEntityGroup entity exists in the database.
*
* @return boolean
* @param group IEntityGroup
*/
private boolean existsInDatabase(IEntityGroup group) throws GroupsException {
IEntityGroup ug = this.find(group.getLocalKey());
return ug != null;
}
/**
* Find and return an instance of the group.
*
* @param groupID the group ID
* @return org.apereo.portal.groups.IEntityGroup
*/
public IEntityGroup find(String groupID) throws GroupsException {
return primFind(groupID, false);
}
/**
* Find the groups that this entity belongs to.
*
* @param ent the entity in question
* @return java.util.Iterator
*/
public java.util.Iterator findParentGroups(IEntity ent) throws GroupsException {
String memberKey = ent.getKey();
Integer type = EntityTypesLocator.getEntityTypes().getEntityIDFromType(ent.getLeafType());
return findParentGroupsForEntity(memberKey, type.intValue());
}
/**
* Find the groups that this group belongs to.
*
* @param group org.apereo.portal.groups.IEntityGroup
* @return java.util.Iterator
*/
public java.util.Iterator findParentGroups(IEntityGroup group) throws GroupsException {
String memberKey = group.getLocalKey();
String serviceName = group.getServiceName().toString();
Integer type = EntityTypesLocator.getEntityTypes().getEntityIDFromType(group.getLeafType());
return findParentGroupsForGroup(serviceName, memberKey, type.intValue());
}
/**
* Find the groups that this group member belongs to.
*
* @param gm the group member in question
* @return java.util.Iterator
*/
public Iterator findParentGroups(IGroupMember gm) throws GroupsException {
if (gm.isGroup()) {
IEntityGroup group = (IEntityGroup) gm;
return findParentGroups(group);
} else {
IEntity ent = (IEntity) gm;
return findParentGroups(ent);
}
}
/**
* Find the groups associated with this member key.
*
* @param memberKey
* @param type
* @return java.util.Iterator
*/
private java.util.Iterator findParentGroupsForEntity(String memberKey, int type)
throws GroupsException {
java.sql.Connection conn = null;
Collection groups = new ArrayList();
IEntityGroup eg = null;
try {
conn = RDBMServices.getConnection();
String sql = getFindParentGroupsForEntitySql();
PreparedStatement ps = conn.prepareStatement(sql);
try {
ps.setString(1, memberKey);
ps.setInt(2, type);
if (log.isDebugEnabled())
log.debug(
"RDBMEntityGroupStore.findParentGroupsForEntity(): "
+ ps
+ " ("
+ memberKey
+ ", "
+ type
+ ", memberIsGroup = F)");
java.sql.ResultSet rs = ps.executeQuery();
try {
while (rs.next()) {
eg = instanceFromResultSet(rs);
groups.add(eg);
}
} finally {
rs.close();
}
} finally {
ps.close();
}
} catch (Exception e) {
log.error("RDBMEntityGroupStore.findParentGroupsForEntity(): " + e);
throw new GroupsException("Problem retrieving containing groups: " + e);
} finally {
RDBMServices.releaseConnection(conn);
}
return groups.iterator();
}
/**
* Find the groups associated with this member key.
*
* @param serviceName
* @param memberKey
* @param type
* @return java.util.Iterator
*/
private java.util.Iterator findParentGroupsForGroup(
String serviceName, String memberKey, int type) throws GroupsException {
java.sql.Connection conn = null;
Collection groups = new ArrayList();
IEntityGroup eg = null;
try {
conn = RDBMServices.getConnection();
String sql = getFindParentGroupsForGroupSql();
PreparedStatement ps = conn.prepareStatement(sql);
try {
ps.setString(1, serviceName);
ps.setString(2, memberKey);
ps.setInt(3, type);
if (log.isDebugEnabled())
log.debug(
"RDBMEntityGroupStore.findParentGroupsForGroup(): "
+ ps
+ " ("
+ serviceName
+ ", "
+ memberKey
+ ", "
+ type
+ ", memberIsGroup = T)");
java.sql.ResultSet rs = ps.executeQuery();
try {
while (rs.next()) {
eg = instanceFromResultSet(rs);
groups.add(eg);
}
} finally {
rs.close();
}
} finally {
ps.close();
}
} catch (Exception e) {
log.error("RDBMEntityGroupStore.findParentGroupsForGroup(): " + e);
throw new GroupsException("Problem retrieving containing groups: " + e);
} finally {
RDBMServices.releaseConnection(conn);
}
return groups.iterator();
}
/**
* Find the <code>IEntities</code> that are members of the <code>IEntityGroup</code>.
*
* @param group the entity group in question
* @return java.util.Iterator
*/
public Iterator findEntitiesForGroup(IEntityGroup group) throws GroupsException {
Collection entities = new ArrayList();
Connection conn = null;
String groupID = group.getLocalKey();
Class cls = group.getLeafType();
try {
conn = RDBMServices.getConnection();
Statement stmnt = conn.createStatement();
try {
String query =
"SELECT "
+ MEMBER_MEMBER_KEY_COLUMN
+ " FROM "
+ MEMBER_TABLE
+ " WHERE "
+ MEMBER_GROUP_ID_COLUMN
+ " = '"
+ groupID
+ "' AND "
+ MEMBER_IS_GROUP_COLUMN
+ " = '"
+ MEMBER_IS_ENTITY
+ "'";
ResultSet rs = stmnt.executeQuery(query);
try {
while (rs.next()) {
String key = rs.getString(1);
IEntity e = newEntity(cls, key);
entities.add(e);
}
} finally {
rs.close();
}
} finally {
stmnt.close();
}
} catch (SQLException sqle) {
log.error("Problem retrieving Entities for Group: " + group, sqle);
throw new GroupsException("Problem retrieving Entities for Group", sqle);
} finally {
RDBMServices.releaseConnection(conn);
}
return entities.iterator();
}
/**
* Find and return an instance of the group.
*
* @param groupID the group ID
* @return org.apereo.portal.groups.ILockableEntityGroup
*/
public ILockableEntityGroup findLockable(String groupID) throws GroupsException {
return (ILockableEntityGroup) primFind(groupID, true);
}
/**
* Find the keys of groups that are members of group.
*
* @param group the org.apereo.portal.groups.IEntityGroup
* @return String[]
*/
public String[] findMemberGroupKeys(IEntityGroup group) throws GroupsException {
java.sql.Connection conn = null;
Collection groupKeys = new ArrayList();
String groupKey = null;
try {
conn = RDBMServices.getConnection();
String sql = getFindMemberGroupKeysSql();
PreparedStatement ps = conn.prepareStatement(sql);
try {
ps.setString(1, group.getLocalKey());
if (log.isDebugEnabled())
log.debug(
"RDBMEntityGroupStore.findMemberGroupKeys(): "
+ ps
+ " ("
+ group.getLocalKey()
+ ")");
java.sql.ResultSet rs = ps.executeQuery();
try {
while (rs.next()) {
groupKey = rs.getString(1) + GROUP_NODE_SEPARATOR + rs.getString(2);
groupKeys.add(groupKey);
}
} finally {
rs.close();
}
} finally {
ps.close();
}
} catch (Exception sqle) {
log.error("RDBMEntityGroupStore.findMemberGroupKeys(): " + sqle);
throw new GroupsException("Problem retrieving member group keys: " + sqle);
} finally {
RDBMServices.releaseConnection(conn);
}
return (String[]) groupKeys.toArray(new String[groupKeys.size()]);
}
/**
* Find the IUserGroups that are members of the group.
*
* @param group org.apereo.portal.groups.IEntityGroup
* @return java.util.Iterator
*/
public Iterator findMemberGroups(IEntityGroup group) throws GroupsException {
java.sql.Connection conn = null;
Collection groups = new ArrayList();
IEntityGroup eg = null;
String serviceName = group.getServiceName().toString();
String localKey = group.getLocalKey();
try {
conn = RDBMServices.getConnection();
String sql = getFindMemberGroupsSql();
PreparedStatement ps = conn.prepareStatement(sql);
try {
ps.setString(1, localKey);
ps.setString(2, serviceName);
if (log.isDebugEnabled())
log.debug(
"RDBMEntityGroupStore.findMemberGroups(): "
+ ps
+ " ("
+ localKey
+ ", "
+ serviceName
+ ")");
java.sql.ResultSet rs = ps.executeQuery();
try {
while (rs.next()) {
eg = instanceFromResultSet(rs);
groups.add(eg);
}
} finally {
rs.close();
}
} finally {
ps.close();
}
} catch (Exception sqle) {
log.error("RDBMEntityGroupStore.findMemberGroups(): " + sqle);
throw new GroupsException("Problem retrieving member groups: " + sqle);
} finally {
RDBMServices.releaseConnection(conn);
}
return groups.iterator();
}
/** @return java.lang.String */
private static java.lang.String getAllGroupColumns() {
if (allGroupColumns == null) {
StringBuffer buff = new StringBuffer(100);
buff.append(GROUP_ID_COLUMN);
buff.append(", ");
buff.append(GROUP_CREATOR_COLUMN);
buff.append(", ");
buff.append(GROUP_TYPE_COLUMN);
buff.append(", ");
buff.append(GROUP_NAME_COLUMN);
buff.append(", ");
buff.append(GROUP_DESCRIPTION_COLUMN);
allGroupColumns = buff.toString();
}
return allGroupColumns;
}
/** @return java.lang.String */
private static java.lang.String getAllGroupColumnsWithTableAlias() {
if (allGroupColumnsWithTableAlias == null) {
StringBuffer buff = new StringBuffer(100);
buff.append(groupAlias(GROUP_ID_COLUMN));
buff.append(", ");
buff.append(groupAlias(GROUP_CREATOR_COLUMN));
buff.append(", ");
buff.append(groupAlias(GROUP_TYPE_COLUMN));
buff.append(", ");
buff.append(groupAlias(GROUP_NAME_COLUMN));
buff.append(", ");
buff.append(groupAlias(GROUP_DESCRIPTION_COLUMN));
allGroupColumnsWithTableAlias = buff.toString();
}
return allGroupColumnsWithTableAlias;
}
/** @return java.lang.String */
private static java.lang.String getAllMemberColumns() {
if (allMemberColumns == null) {
StringBuffer buff = new StringBuffer(100);
buff.append(MEMBER_GROUP_ID_COLUMN);
buff.append(", ");
buff.append(MEMBER_MEMBER_SERVICE_COLUMN);
buff.append(", ");
buff.append(MEMBER_MEMBER_KEY_COLUMN);
buff.append(", ");
buff.append(MEMBER_IS_GROUP_COLUMN);
allMemberColumns = buff.toString();
}
return allMemberColumns;
}
/** @return java.lang.String */
private static java.lang.String getCountAMemberEntitySql() {
if (countAMemberEntitySql == null) {
StringBuffer buff = new StringBuffer(100);
buff.append("SELECT COUNT(*) FROM " + MEMBER_TABLE);
buff.append(" WHERE " + MEMBER_GROUP_ID_COLUMN + EQUALS_PARAM);
buff.append(" AND " + MEMBER_MEMBER_KEY_COLUMN + EQUALS_PARAM);
buff.append(" AND " + MEMBER_IS_GROUP_COLUMN + EQ + sqlQuote(MEMBER_IS_ENTITY));
countAMemberEntitySql = buff.toString();
}
return countAMemberEntitySql;
}
/** @return java.lang.String */
private static java.lang.String getCountAMemberGroupSql() {
if (countAMemberGroupSql == null) {
StringBuffer buff = new StringBuffer(100);
buff.append("SELECT COUNT(*) FROM " + MEMBER_TABLE);
buff.append(" WHERE " + MEMBER_GROUP_ID_COLUMN + EQUALS_PARAM);
buff.append(" AND " + MEMBER_MEMBER_KEY_COLUMN + EQUALS_PARAM);
buff.append(" AND " + MEMBER_MEMBER_SERVICE_COLUMN + EQUALS_PARAM);
buff.append(" AND " + MEMBER_IS_GROUP_COLUMN + EQ + sqlQuote(MEMBER_IS_GROUP));
countAMemberGroupSql = buff.toString();
}
return countAMemberGroupSql;
}
/** @return java.lang.String */
private static java.lang.String getDeleteGroupSql(IEntityGroup group) {
StringBuffer buff = new StringBuffer(100);
buff.append("DELETE FROM ");
buff.append(GROUP_TABLE);
buff.append(" WHERE ");
buff.append(GROUP_ID_COLUMN + EQ + sqlQuote(group.getLocalKey()));
return buff.toString();
}
/** @return java.lang.String */
private static java.lang.String getDeleteMemberEntitySql() {
if (deleteMemberEntitySql == null) {
StringBuffer buff = new StringBuffer(100);
buff.append("DELETE FROM ");
buff.append(MEMBER_TABLE);
buff.append(" WHERE ");
buff.append(MEMBER_GROUP_ID_COLUMN + EQUALS_PARAM);
buff.append(" AND ");
buff.append(MEMBER_MEMBER_KEY_COLUMN + EQUALS_PARAM);
buff.append(" AND ");
buff.append(MEMBER_IS_GROUP_COLUMN + EQ + sqlQuote(MEMBER_IS_ENTITY));
deleteMemberEntitySql = buff.toString();
}
return deleteMemberEntitySql;
}
/** @return java.lang.String */
private static java.lang.String getDeleteMemberGroupSql() {
if (deleteMemberGroupSql == null) {
StringBuffer buff = new StringBuffer(100);
buff.append("DELETE FROM ");
buff.append(MEMBER_TABLE);
buff.append(" WHERE ");
buff.append(MEMBER_GROUP_ID_COLUMN + EQUALS_PARAM);
buff.append(" AND ");
buff.append(MEMBER_MEMBER_SERVICE_COLUMN + EQUALS_PARAM);
buff.append(" AND ");
buff.append(MEMBER_MEMBER_KEY_COLUMN + EQUALS_PARAM);
buff.append(" AND ");
buff.append(MEMBER_IS_GROUP_COLUMN + EQ + sqlQuote(MEMBER_IS_GROUP));
deleteMemberGroupSql = buff.toString();
}
return deleteMemberGroupSql;
}
/** @return java.lang.String */
private static java.lang.String getDeleteMembersInGroupSql() {
if (deleteMembersInGroupSql == null) {
StringBuffer buff = new StringBuffer(100);
buff.append("DELETE FROM ");
buff.append(MEMBER_TABLE);
buff.append(" WHERE ");
buff.append(GROUP_ID_COLUMN + EQ);
deleteMembersInGroupSql = buff.toString();
}
return deleteMembersInGroupSql;
}
/** @return java.lang.String */
private static java.lang.String getDeleteMembersInGroupSql(IEntityGroup group) {
return getDeleteMembersInGroupSql() + sqlQuote(group.getLocalKey());
}
/** @return java.lang.String */
private static java.lang.String getFindParentGroupsForEntitySql() {
if (findParentGroupsForEntitySql == null) {
StringBuffer buff = new StringBuffer(500);
buff.append("SELECT ");
buff.append(getAllGroupColumnsWithTableAlias());
buff.append(" FROM " + GROUP_TABLE_WITH_ALIAS + ", " + MEMBER_TABLE_WITH_ALIAS);
buff.append(" WHERE ");
buff.append(groupAlias(GROUP_ID_COLUMN) + EQ);
buff.append(memberAlias(MEMBER_GROUP_ID_COLUMN));
buff.append(" AND ");
buff.append(memberAlias(MEMBER_MEMBER_KEY_COLUMN) + EQUALS_PARAM);
buff.append(" AND ");
buff.append(groupAlias(GROUP_TYPE_COLUMN) + EQUALS_PARAM);
buff.append(" AND ");
buff.append(memberAlias(MEMBER_IS_GROUP_COLUMN) + EQ + sqlQuote(MEMBER_IS_ENTITY));
findParentGroupsForEntitySql = buff.toString();
}
return findParentGroupsForEntitySql;
}
/** @return java.lang.String */
private static java.lang.String getFindParentGroupsForGroupSql() {
if (findParentGroupsForGroupSql == null) {
StringBuffer buff = new StringBuffer(500);
buff.append("SELECT ");
buff.append(getAllGroupColumnsWithTableAlias());
buff.append(" FROM ");
buff.append(GROUP_TABLE_WITH_ALIAS);
buff.append(", ");
buff.append(MEMBER_TABLE_WITH_ALIAS);
buff.append(" WHERE ");
buff.append(groupAlias(GROUP_ID_COLUMN) + EQ);
buff.append(memberAlias(MEMBER_GROUP_ID_COLUMN));
buff.append(" AND ");
buff.append(memberAlias(MEMBER_MEMBER_SERVICE_COLUMN) + EQUALS_PARAM);
buff.append(" AND ");
buff.append(memberAlias(MEMBER_MEMBER_KEY_COLUMN) + EQUALS_PARAM);
buff.append(" AND ");
buff.append(groupAlias(GROUP_TYPE_COLUMN) + EQUALS_PARAM);
buff.append(" AND ");
buff.append(memberAlias(MEMBER_IS_GROUP_COLUMN) + EQ + sqlQuote(MEMBER_IS_GROUP));
findParentGroupsForGroupSql = buff.toString();
}
return findParentGroupsForGroupSql;
}
/** @return java.lang.String */
private static java.lang.String getFindGroupSql() {
if (findGroupSql == null) {
StringBuffer buff = new StringBuffer(200);
buff.append("SELECT ");
buff.append(getAllGroupColumns());
buff.append(" FROM ");
buff.append(GROUP_TABLE);
buff.append(" WHERE ");
buff.append(GROUP_ID_COLUMN + EQUALS_PARAM);
findGroupSql = buff.toString();
}
return findGroupSql;
}
/** @return java.lang.String */
private static java.lang.String getFindMemberGroupKeysSql() {
if (findMemberGroupKeysSql == null) {
StringBuffer buff = new StringBuffer(200);
buff.append("SELECT ");
buff.append(MEMBER_MEMBER_SERVICE_COLUMN + ", " + MEMBER_MEMBER_KEY_COLUMN);
buff.append(" FROM ");
buff.append(MEMBER_TABLE);
buff.append(" WHERE ");
buff.append(MEMBER_GROUP_ID_COLUMN + EQUALS_PARAM);
buff.append(" AND ");
buff.append(MEMBER_IS_GROUP_COLUMN + EQ);
buff.append(sqlQuote(MEMBER_IS_GROUP));
findMemberGroupKeysSql = buff.toString();
}
return findMemberGroupKeysSql;
}
/** @return java.lang.String */
private static java.lang.String getFindMemberGroupsSql() {
if (findMemberGroupsSql == null) {
StringBuffer buff = new StringBuffer(500);
buff.append("SELECT ");
buff.append(getAllGroupColumnsWithTableAlias());
buff.append(" FROM ");
buff.append(GROUP_TABLE + " " + GROUP_TABLE_ALIAS);
buff.append(", ");
buff.append(MEMBER_TABLE + " " + MEMBER_TABLE_ALIAS);
buff.append(" WHERE ");
buff.append(groupAlias(GROUP_ID_COLUMN) + EQ);
buff.append(memberAlias(MEMBER_MEMBER_KEY_COLUMN));
buff.append(" AND ");
buff.append(memberAlias(MEMBER_IS_GROUP_COLUMN) + EQ);
buff.append(sqlQuote(MEMBER_IS_GROUP));
buff.append(" AND ");
buff.append(memberAlias(MEMBER_GROUP_ID_COLUMN) + EQUALS_PARAM);
buff.append(" AND ");
buff.append(memberAlias(MEMBER_MEMBER_SERVICE_COLUMN) + EQUALS_PARAM);
findMemberGroupsSql = buff.toString();
}
return findMemberGroupsSql;
}
/** @return java.lang.String */
private static java.lang.String getInsertGroupSql() {
if (insertGroupSql == null) {
StringBuffer buff = new StringBuffer(200);
buff.append("INSERT INTO ");
buff.append(GROUP_TABLE);
buff.append(" (");
buff.append(getAllGroupColumns());
buff.append(") VALUES (?, ?, ?, ?, ?)");
insertGroupSql = buff.toString();
}
return insertGroupSql;
}
/** @return java.lang.String */
private static java.lang.String getInsertMemberSql() {
if (insertMemberSql == null) {
StringBuffer buff = new StringBuffer(200);
buff.append("INSERT INTO ");
buff.append(MEMBER_TABLE);
buff.append(" (");
buff.append(getAllMemberColumns());
buff.append(") VALUES (?, ?, ?, ? )");
insertMemberSql = buff.toString();
}
return insertMemberSql;
}
/**
* @return java.lang.String
* @exception java.lang.Exception
*/
private String getNextKey() throws java.lang.Exception {
return Integer.toString(CounterStoreLocator.getCounterStore().getNextId(GROUP_TABLE));
}
/** @return java.lang.String */
private static java.lang.String getUpdateGroupSql() {
if (updateGroupSql == null) {
StringBuffer buff = new StringBuffer(200);
buff.append("UPDATE ");
buff.append(GROUP_TABLE);
buff.append(" SET ");
buff.append(GROUP_CREATOR_COLUMN + EQUALS_PARAM);
buff.append(", ");
buff.append(GROUP_TYPE_COLUMN + EQUALS_PARAM);
buff.append(", ");
buff.append(GROUP_NAME_COLUMN + EQUALS_PARAM);
buff.append(", ");
buff.append(GROUP_DESCRIPTION_COLUMN + EQUALS_PARAM);
buff.append(" WHERE ");
buff.append(GROUP_ID_COLUMN + EQUALS_PARAM);
updateGroupSql = buff.toString();
}
return updateGroupSql;
}
/**
* Find and return an instance of the group.
*
* @param rs the SQL result set
* @return org.apereo.portal.groups.IEntityGroup
*/
private IEntityGroup instanceFromResultSet(java.sql.ResultSet rs)
throws SQLException, GroupsException {
IEntityGroup eg = null;
String key = rs.getString(1);
String creatorID = rs.getString(2);
Integer entityTypeID = new Integer(rs.getInt(3));
Class entityType = EntityTypesLocator.getEntityTypes().getEntityTypeFromID(entityTypeID);
String groupName = rs.getString(4);
String description = rs.getString(5);
if (key != null) {
eg = newInstance(key, entityType, creatorID, groupName, description);
}
return eg;
}
/**
* Find and return an instance of the group.
*
* @param rs the SQL result set
* @return org.apereo.portal.groups.ILockableEntityGroup
*/
private ILockableEntityGroup lockableInstanceFromResultSet(java.sql.ResultSet rs)
throws SQLException, GroupsException {
ILockableEntityGroup eg = null;
String key = rs.getString(1);
String creatorID = rs.getString(2);
Integer entityTypeID = new Integer(rs.getInt(3));
Class entityType = EntityTypesLocator.getEntityTypes().getEntityTypeFromID(entityTypeID);
String groupName = rs.getString(4);
String description = rs.getString(5);
if (key != null) {
eg = newLockableInstance(key, entityType, creatorID, groupName, description);
}
return eg;
}
/** @return org.apereo.portal.groups.IEntity */
public IEntity newEntity(Class type, String key) throws GroupsException {
if (EntityTypesLocator.getEntityTypes().getEntityIDFromType(type) == null) {
throw new GroupsException("Invalid group type: " + type);
}
return GroupService.getEntity(key, type);
}
/** @return org.apereo.portal.groups.IEntityGroup */
public IEntityGroup newInstance(Class type) throws GroupsException {
if (EntityTypesLocator.getEntityTypes().getEntityIDFromType(type) == null) {
throw new GroupsException("Invalid group type: " + type);
}
try {
return new EntityGroupImpl(getNextKey(), type);
} catch (Exception ex) {
throw new GroupsException("Could not create new group", ex);
}
}
/** @return org.apereo.portal.groups.IEntityGroup */
private IEntityGroup newInstance(
String newKey,
Class newType,
String newCreatorID,
String newName,
String newDescription)
throws GroupsException {
EntityGroupImpl egi = new EntityGroupImpl(newKey, newType);
egi.setCreatorID(newCreatorID);
egi.primSetName(newName);
egi.setDescription(newDescription);
return egi;
}
/** @return org.apereo.portal.groups.ILockableEntityGroup */
private ILockableEntityGroup newLockableInstance(
String newKey,
Class newType,
String newCreatorID,
String newName,
String newDescription)
throws GroupsException {
LockableEntityGroupImpl group = new LockableEntityGroupImpl(newKey, newType);
group.setCreatorID(newCreatorID);
group.primSetName(newName);
group.setDescription(newDescription);
return group;
}
/** @return java.lang.String */
private static java.lang.String groupAlias(String column) {
return GROUP_TABLE_ALIAS + "." + column;
}
/** @return java.lang.String */
private static java.lang.String memberAlias(String column) {
return MEMBER_TABLE_ALIAS + "." + column;
}
/**
* Insert the entity into the database.
*
* @param group org.apereo.portal.groups.IEntityGroup
* @param conn the database connection
*/
private void primAdd(IEntityGroup group, Connection conn) throws SQLException, GroupsException {
try {
PreparedStatement ps = conn.prepareStatement(getInsertGroupSql());
try {
Integer typeID =
EntityTypesLocator.getEntityTypes()
.getEntityIDFromType(group.getLeafType());
ps.setString(1, group.getLocalKey());
ps.setString(2, group.getCreatorID());
ps.setInt(3, typeID.intValue());
ps.setString(4, group.getName());
ps.setString(5, group.getDescription());
if (log.isDebugEnabled())
log.debug(
"RDBMEntityGroupStore.primAdd(): "
+ ps
+ "("
+ group.getLocalKey()
+ ", "
+ group.getCreatorID()
+ ", "
+ typeID
+ ", "
+ group.getName()
+ ", "
+ group.getDescription()
+ ")");
int rc = ps.executeUpdate();
if (rc != 1) {
String errString = "Problem adding " + group;
log.error(errString);
throw new GroupsException(errString);
}
} finally {
ps.close();
}
} catch (java.sql.SQLException sqle) {
log.error("Error inserting an entity into the database. Group:" + group, sqle);
throw sqle;
}
}
/**
* Delete this entity from the database after first deleting its memberships. Exception
* java.sql.SQLException - if we catch a SQLException, we rollback and re-throw it.
*
* @param group org.apereo.portal.groups.IEntityGroup
*/
private void primDelete(IEntityGroup group) throws SQLException {
java.sql.Connection conn = null;
String deleteGroupSql = getDeleteGroupSql(group);
String deleteMembershipSql = getDeleteMembersInGroupSql(group);
try {
conn = RDBMServices.getConnection();
Statement stmnt = conn.createStatement();
setAutoCommit(conn, false);
try {
if (log.isDebugEnabled())
log.debug("RDBMEntityGroupStore.primDelete(): " + deleteMembershipSql);
stmnt.executeUpdate(deleteMembershipSql);
if (log.isDebugEnabled())
log.debug("RDBMEntityGroupStore.primDelete(): " + deleteGroupSql);
stmnt.executeUpdate(deleteGroupSql);
} finally {
stmnt.close();
}
commit(conn);
} catch (SQLException sqle) {
rollback(conn);
throw sqle;
} finally {
try {
setAutoCommit(conn, true);
} finally {
RDBMServices.releaseConnection(conn);
}
}
}
/**
* Find and return an instance of the group.
*
* @param groupID the group ID
* @param lockable boolean
* @return org.apereo.portal.groups.IEntityGroup
*/
private IEntityGroup primFind(String groupID, boolean lockable) throws GroupsException {
IEntityGroup eg = null;
java.sql.Connection conn = null;
try {
conn = RDBMServices.getConnection();
String sql = getFindGroupSql();
PreparedStatement ps = conn.prepareStatement(sql);
try {
ps.setString(1, groupID);
if (log.isDebugEnabled())
log.debug("RDBMEntityGroupStore.find(): " + ps + " (" + groupID + ")");
java.sql.ResultSet rs = ps.executeQuery();
try {
while (rs.next()) {
eg =
(lockable)
? lockableInstanceFromResultSet(rs)
: instanceFromResultSet(rs);
}
} finally {
rs.close();
}
} finally {
ps.close();
}
} catch (Exception e) {
log.error("RDBMEntityGroupStore.find(): ", e);
throw new GroupsException("Error retrieving " + groupID + ": ", e);
} finally {
RDBMServices.releaseConnection(conn);
}
return eg;
}
/**
* Update the entity in the database.
*
* @param group org.apereo.portal.groups.IEntityGroup
* @param conn the database connection
*/
private void primUpdate(IEntityGroup group, Connection conn)
throws SQLException, GroupsException {
try {
PreparedStatement ps = conn.prepareStatement(getUpdateGroupSql());
try {
Integer typeID =
EntityTypesLocator.getEntityTypes()
.getEntityIDFromType(group.getLeafType());
ps.setString(1, group.getCreatorID());
ps.setInt(2, typeID.intValue());
ps.setString(3, group.getName());
ps.setString(4, group.getDescription());
ps.setString(5, group.getLocalKey());
if (log.isDebugEnabled())
log.debug(
"RDBMEntityGroupStore.primUpdate(): "
+ ps
+ "("
+ group.getCreatorID()
+ ", "
+ typeID
+ ", "
+ group.getName()
+ ", "
+ group.getDescription()
+ ", "
+ group.getLocalKey()
+ ")");
int rc = ps.executeUpdate();
if (rc != 1) {
String errString = "Problem updating " + group;
log.error(errString);
throw new GroupsException(errString);
}
} finally {
ps.close();
}
} catch (java.sql.SQLException sqle) {
log.error("Error updating entity in database. Group: " + group, sqle);
throw sqle;
}
}
/**
* Insert and delete group membership rows. The transaction is maintained by the caller.
*
* @param egi org.apereo.portal.groups.EntityGroupImpl
* @param conn the database connection
*/
private void primUpdateMembers(EntityGroupImpl egi, Connection conn)
throws java.sql.SQLException {
String groupKey = egi.getLocalKey();
String memberKey, isGroup, serviceName = null;
try {
if (egi.hasDeletes()) {
List deletedGroups = new ArrayList();
List deletedEntities = new ArrayList();
Iterator deletes = egi.getRemovedMembers().values().iterator();
while (deletes.hasNext()) {
IGroupMember gm = (IGroupMember) deletes.next();
if (gm.isGroup()) {
deletedGroups.add(gm);
} else {
deletedEntities.add(gm);
}
}
if (!deletedGroups.isEmpty()) {
PreparedStatement psDeleteMemberGroup =
conn.prepareStatement(getDeleteMemberGroupSql());
try {
for (Iterator groups = deletedGroups.iterator(); groups.hasNext(); ) {
IEntityGroup removedGroup = (IEntityGroup) groups.next();
memberKey = removedGroup.getLocalKey();
isGroup = MEMBER_IS_GROUP;
serviceName = removedGroup.getServiceName().toString();
psDeleteMemberGroup.setString(1, groupKey);
psDeleteMemberGroup.setString(2, serviceName);
psDeleteMemberGroup.setString(3, memberKey);
if (log.isDebugEnabled())
log.debug(
"RDBMEntityGroupStore.primUpdateMembers(): "
+ psDeleteMemberGroup
+ "("
+ groupKey
+ ", "
+ serviceName
+ ", "
+ memberKey
+ ", isGroup = T)");
psDeleteMemberGroup.executeUpdate();
} // for
} // try
finally {
psDeleteMemberGroup.close();
}
} // if ( ! deletedGroups.isEmpty() )
if (!deletedEntities.isEmpty()) {
PreparedStatement psDeleteMemberEntity =
conn.prepareStatement(getDeleteMemberEntitySql());
try {
for (Iterator entities = deletedEntities.iterator(); entities.hasNext(); ) {
IGroupMember removedEntity = (IGroupMember) entities.next();
memberKey = removedEntity.getUnderlyingEntityIdentifier().getKey();
isGroup = MEMBER_IS_ENTITY;
psDeleteMemberEntity.setString(1, groupKey);
psDeleteMemberEntity.setString(2, memberKey);
if (log.isDebugEnabled())
log.debug(
"RDBMEntityGroupStore.primUpdateMembers(): "
+ psDeleteMemberEntity
+ "("
+ groupKey
+ ", "
+ memberKey
+ ", "
+ "isGroup = F)");
psDeleteMemberEntity.executeUpdate();
} // for
} // try
finally {
psDeleteMemberEntity.close();
}
} // if ( ! deletedEntities.isEmpty() )
}
if (egi.hasAdds()) {
PreparedStatement psAdd = conn.prepareStatement(getInsertMemberSql());
try {
Iterator adds = egi.getAddedMembers().values().iterator();
while (adds.hasNext()) {
IGroupMember addedGM = (IGroupMember) adds.next();
memberKey = addedGM.getKey();
if (addedGM.isGroup()) {
IEntityGroup addedGroup = (IEntityGroup) addedGM;
isGroup = MEMBER_IS_GROUP;
serviceName = addedGroup.getServiceName().toString();
memberKey = addedGroup.getLocalKey();
} else {
isGroup = MEMBER_IS_ENTITY;
serviceName = egi.getServiceName().toString();
memberKey = addedGM.getUnderlyingEntityIdentifier().getKey();
}
psAdd.setString(1, groupKey);
psAdd.setString(2, serviceName);
psAdd.setString(3, memberKey);
psAdd.setString(4, isGroup);
if (log.isDebugEnabled())
log.debug(
"RDBMEntityGroupStore.primUpdateMembers(): "
+ psAdd
+ "("
+ groupKey
+ ", "
+ memberKey
+ ", "
+ isGroup
+ ")");
psAdd.executeUpdate();
}
} finally {
psAdd.close();
}
}
} catch (SQLException sqle) {
log.error("Error inserting/deleting membership rows.", sqle);
throw sqle;
}
}
/**
* @param conn java.sql.Connection
* @exception java.sql.SQLException
*/
protected static void rollback(Connection conn) throws java.sql.SQLException {
SqlTransaction.rollback(conn);
}
public EntityIdentifier[] searchForGroups(String query, int method, Class leaftype)
throws GroupsException {
EntityIdentifier[] r = new EntityIdentifier[0];
ArrayList ar = new ArrayList();
Connection conn = null;
PreparedStatement ps = null;
int type = EntityTypesLocator.getEntityTypes().getEntityIDFromType(leaftype).intValue();
//System.out.println("Checking out groups of leaftype "+leaftype.getName()+" or "+type);
try {
conn = RDBMServices.getConnection();
switch (method) {
case IS:
ps = conn.prepareStatement(RDBMEntityGroupStore.searchGroups);
break;
case STARTS_WITH:
query = query + "%";
ps = conn.prepareStatement(RDBMEntityGroupStore.searchGroupsPartial);
break;
case ENDS_WITH:
query = "%" + query;
ps = conn.prepareStatement(RDBMEntityGroupStore.searchGroupsPartial);
break;
case CONTAINS:
query = "%" + query + "%";
ps = conn.prepareStatement(RDBMEntityGroupStore.searchGroupsPartial);
break;
default:
throw new GroupsException("Unknown search type");
}
try {
ps.clearParameters();
ps.setInt(1, type);
ps.setString(2, query);
ResultSet rs = ps.executeQuery();
try {
//System.out.println(ps.toString());
while (rs.next()) {
//System.out.println("result");
ar.add(
new EntityIdentifier(
rs.getString(1), ICompositeGroupService.GROUP_ENTITY_TYPE));
}
} finally {
close(rs);
}
} finally {
close(ps);
}
} catch (Exception e) {
log.error("RDBMChannelDefSearcher.searchForEntities(): " + ps, e);
} finally {
RDBMServices.releaseConnection(conn);
}
return (EntityIdentifier[]) ar.toArray(r);
}
/**
* @param conn java.sql.Connection
* @param newValue boolean
* @exception java.sql.SQLException The exception description.
*/
protected static void setAutoCommit(Connection conn, boolean newValue)
throws java.sql.SQLException {
SqlTransaction.setAutoCommit(conn, newValue);
}
/** @return org.apereo.portal.groups.RDBMEntityGroupStore */
public static synchronized RDBMEntityGroupStore singleton() throws GroupsException {
if (singleton == null) {
singleton = new RDBMEntityGroupStore();
}
return singleton;
}
/** @return java.lang.String */
private static java.lang.String sqlQuote(Object o) {
return QUOTE + o + QUOTE;
}
/**
* Commit this entity AND ITS MEMBERSHIPS to the underlying store.
*
* @param group org.apereo.portal.groups.IEntityGroup
*/
public void update(IEntityGroup group) throws GroupsException {
Connection conn = null;
boolean exists = existsInDatabase(group);
try {
conn = RDBMServices.getConnection();
setAutoCommit(conn, false);
try {
if (exists) {
primUpdate(group, conn);
} else {
primAdd(group, conn);
}
primUpdateMembers((EntityGroupImpl) group, conn);
commit(conn);
} catch (Exception ex) {
rollback(conn);
throw new GroupsException("Problem updating " + this + ex);
}
} catch (SQLException sqlex) {
throw new GroupsException(sqlex);
} finally {
if (conn != null) {
try {
setAutoCommit(conn, true);
} catch (SQLException sqle) {
throw new GroupsException(sqle);
} finally {
RDBMServices.releaseConnection(conn);
}
}
}
}
/**
* Insert and delete group membership rows inside a transaction.
*
* @param eg org.apereo.portal.groups.IEntityGroup
*/
public void updateMembers(IEntityGroup eg) throws GroupsException {
Connection conn = null;
EntityGroupImpl egi = (EntityGroupImpl) eg;
if (egi.isDirty())
try {
conn = RDBMServices.getConnection();
setAutoCommit(conn, false);
try {
primUpdateMembers(egi, conn);
commit(conn);
} catch (SQLException sqle) {
rollback(conn);
throw new GroupsException("Problem updating memberships for " + egi, sqle);
}
} catch (SQLException sqlex) {
throw new GroupsException(sqlex);
} finally {
if (conn != null) {
try {
setAutoCommit(conn, true);
} catch (SQLException sqle) {
throw new GroupsException(sqle);
} finally {
RDBMServices.releaseConnection(conn);
}
}
}
}
private static final void close(final Statement statement) {
if (statement != null) {
try {
statement.close();
} catch (SQLException e) {
log.warn("problem closing statement", e);
}
}
}
private static final void close(final ResultSet resultset) {
if (resultset != null) {
try {
resultset.close();
} catch (SQLException e) {
log.warn("problem closing resultset", e);
}
}
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.ingest.geoip;
import com.maxmind.db.NoCache;
import com.maxmind.db.Reader;
import com.maxmind.geoip2.DatabaseReader;
import com.maxmind.geoip2.exception.AddressNotFoundException;
import com.maxmind.geoip2.model.AbstractResponse;
import com.maxmind.geoip2.model.AsnResponse;
import com.maxmind.geoip2.model.CityResponse;
import com.maxmind.geoip2.model.CountryResponse;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.lucene.util.SetOnce;
import org.elasticsearch.SpecialPermission;
import org.elasticsearch.common.Booleans;
import org.elasticsearch.common.CheckedBiFunction;
import org.elasticsearch.common.CheckedSupplier;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.logging.HeaderWarning;
import org.elasticsearch.core.internal.io.IOUtils;
import java.io.Closeable;
import java.io.IOException;
import java.io.InputStream;
import java.net.InetAddress;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.time.Duration;
import java.util.Objects;
import java.util.concurrent.atomic.AtomicInteger;
/**
* Facilitates lazy loading of the database reader, so that when the geoip plugin is installed, but not used,
* no memory is being wasted on the database reader.
*/
class DatabaseReaderLazyLoader implements Closeable {
private static final boolean LOAD_DATABASE_ON_HEAP =
Booleans.parseBoolean(System.getProperty("es.geoip.load_db_on_heap", "false"));
private static final Logger LOGGER = LogManager.getLogger(DatabaseReaderLazyLoader.class);
private final String md5;
private final GeoIpCache cache;
private final Path databasePath;
private final CheckedSupplier<DatabaseReader, IOException> loader;
private volatile long lastUpdate;
final SetOnce<DatabaseReader> databaseReader;
// cache the database type so that we do not re-read it on every pipeline execution
final SetOnce<String> databaseType;
private volatile boolean deleteDatabaseFileOnClose;
private final AtomicInteger currentUsages = new AtomicInteger(0);
DatabaseReaderLazyLoader(GeoIpCache cache, Path databasePath, String md5) {
this(cache, databasePath, md5, createDatabaseLoader(databasePath));
}
DatabaseReaderLazyLoader(GeoIpCache cache, Path databasePath, String md5, CheckedSupplier<DatabaseReader, IOException> loader) {
this.cache = cache;
this.databasePath = Objects.requireNonNull(databasePath);
this.md5 = md5;
this.loader = Objects.requireNonNull(loader);
this.databaseReader = new SetOnce<>();
this.databaseType = new SetOnce<>();
}
/**
* Read the database type from the database. We do this manually instead of relying on the built-in mechanism to avoid reading the
* entire database into memory merely to read the type. This is especially important to maintain on master nodes where pipelines are
* validated. If we read the entire database into memory, we could potentially run into low-memory constraints on such nodes where
* loading this data would otherwise be wasteful if they are not also ingest nodes.
*
* @return the database type
* @throws IOException if an I/O exception occurs reading the database type
*/
final String getDatabaseType() throws IOException {
if (databaseType.get() == null) {
synchronized (databaseType) {
if (databaseType.get() == null) {
final long fileSize = databaseFileSize();
if (fileSize <= 512) {
throw new IOException("unexpected file length [" + fileSize + "] for [" + databasePath + "]");
}
final int[] databaseTypeMarker = {'d', 'a', 't', 'a', 'b', 'a', 's', 'e', '_', 't', 'y', 'p', 'e'};
try (InputStream in = databaseInputStream()) {
// read the last 512 bytes
final long skipped = in.skip(fileSize - 512);
if (skipped != fileSize - 512) {
throw new IOException("failed to skip [" + (fileSize - 512) + "] bytes while reading [" + databasePath + "]");
}
final byte[] tail = new byte[512];
int read = 0;
do {
final int actualBytesRead = in.read(tail, read, 512 - read);
if (actualBytesRead == -1) {
throw new IOException("unexpected end of stream [" + databasePath + "] after reading [" + read + "] bytes");
}
read += actualBytesRead;
} while (read != 512);
// find the database_type header
int metadataOffset = -1;
int markerOffset = 0;
for (int i = 0; i < tail.length; i++) {
byte b = tail[i];
if (b == databaseTypeMarker[markerOffset]) {
markerOffset++;
} else {
markerOffset = 0;
}
if (markerOffset == databaseTypeMarker.length) {
metadataOffset = i + 1;
break;
}
}
if (metadataOffset == -1) {
throw new IOException("database type marker not found");
}
// read the database type
final int offsetByte = tail[metadataOffset] & 0xFF;
final int type = offsetByte >>> 5;
if (type != 2) {
throw new IOException("type must be UTF-8 string");
}
int size = offsetByte & 0x1f;
databaseType.set(new String(tail, metadataOffset + 1, size, StandardCharsets.UTF_8));
}
}
}
}
return databaseType.get();
}
long databaseFileSize() throws IOException {
return Files.size(databasePath);
}
InputStream databaseInputStream() throws IOException {
return Files.newInputStream(databasePath);
}
CityResponse getCity(InetAddress ipAddress) {
return getResponse(ipAddress, DatabaseReader::city);
}
CountryResponse getCountry(InetAddress ipAddress) {
return getResponse(ipAddress, DatabaseReader::country);
}
AsnResponse getAsn(InetAddress ipAddress) {
return getResponse(ipAddress, DatabaseReader::asn);
}
boolean preLookup() {
return currentUsages.updateAndGet(current -> current < 0 ? current : current + 1) > 0;
}
void postLookup() throws IOException {
if (currentUsages.updateAndGet(current -> current > 0 ? current - 1 : current + 1) == -1) {
doClose();
}
}
int current() {
return currentUsages.get();
}
private <T extends AbstractResponse> T getResponse(InetAddress ipAddress,
CheckedBiFunction<DatabaseReader, InetAddress, T, Exception> responseProvider) {
SpecialPermission.check();
return AccessController.doPrivileged((PrivilegedAction<T>) () ->
cache.putIfAbsent(ipAddress, databasePath.toString(), ip -> {
try {
return responseProvider.apply(get(), ipAddress);
} catch (AddressNotFoundException e) {
throw new GeoIpProcessor.AddressNotFoundRuntimeException(e);
} catch (Exception e) {
throw new RuntimeException(e);
}
}));
}
DatabaseReader get() throws IOException {
//only downloaded databases will have lastUpdate != 0, we never update it for default databases or databases from config dir
if (lastUpdate != 0) {
Path fileName = databasePath.getFileName();
if (System.currentTimeMillis() - lastUpdate > Duration.ofDays(30).toMillis()) {
throw new IllegalStateException("database [" + fileName + "] was not updated for 30 days and is disabled");
} else if (System.currentTimeMillis() - lastUpdate > Duration.ofDays(25).toMillis()) {
HeaderWarning.addWarning(
"database [{}] was not updated for over 25 days, ingestion will fail if there is no update for 30 days", fileName);
}
}
if (databaseReader.get() == null) {
synchronized (databaseReader) {
if (databaseReader.get() == null) {
databaseReader.set(loader.get());
LOGGER.debug("loaded [{}] geo-IP database", databasePath);
}
}
}
return databaseReader.get();
}
String getMd5() {
return md5;
}
public void close(boolean deleteDatabaseFileOnClose) throws IOException {
this.deleteDatabaseFileOnClose = deleteDatabaseFileOnClose;
close();
}
@Override
public void close() throws IOException {
if (currentUsages.updateAndGet(u -> -1 - u) == -1) {
doClose();
}
}
private void doClose() throws IOException {
IOUtils.close(databaseReader.get());
int numEntriesEvicted = cache.purgeCacheEntriesForDatabase(databasePath);
LOGGER.info("evicted [{}] entries from cache after reloading database [{}]", numEntriesEvicted, databasePath);
if (deleteDatabaseFileOnClose) {
LOGGER.info("deleting [{}]", databasePath);
Files.delete(databasePath);
}
}
private static CheckedSupplier<DatabaseReader, IOException> createDatabaseLoader(Path databasePath) {
return () -> {
DatabaseReader.Builder builder = createDatabaseBuilder(databasePath).withCache(NoCache.getInstance());
if (LOAD_DATABASE_ON_HEAP) {
builder.fileMode(Reader.FileMode.MEMORY);
} else {
builder.fileMode(Reader.FileMode.MEMORY_MAPPED);
}
return builder.build();
};
}
@SuppressForbidden(reason = "Maxmind API requires java.io.File")
private static DatabaseReader.Builder createDatabaseBuilder(Path databasePath) {
return new DatabaseReader.Builder(databasePath.toFile());
}
void setLastUpdate(long lastUpdate) {
this.lastUpdate = lastUpdate;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.resourcemanager.webapp;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.StringReader;
import javax.ws.rs.core.MediaType;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.service.Service.STATE;
import org.apache.hadoop.util.VersionInfo;
import org.apache.hadoop.yarn.api.records.QueueState;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.server.resourcemanager.ClusterMetrics;
import org.apache.hadoop.yarn.server.resourcemanager.MockRM;
import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.QueueMetrics;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fifo.FifoScheduler;
import org.apache.hadoop.yarn.util.YarnVersionInfo;
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
import org.apache.hadoop.yarn.webapp.JerseyTestBase;
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NodeList;
import org.xml.sax.InputSource;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.google.inject.servlet.GuiceServletContextListener;
import com.google.inject.servlet.ServletModule;
import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.ClientResponse.Status;
import com.sun.jersey.api.client.UniformInterfaceException;
import com.sun.jersey.api.client.WebResource;
import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
import com.sun.jersey.test.framework.WebAppDescriptor;
public class TestRMWebServices extends JerseyTestBase {
private static MockRM rm;
private Injector injector = Guice.createInjector(new ServletModule() {
@Override
protected void configureServlets() {
bind(JAXBContextResolver.class);
bind(RMWebServices.class);
bind(GenericExceptionHandler.class);
Configuration conf = new Configuration();
conf.setClass(YarnConfiguration.RM_SCHEDULER, FifoScheduler.class,
ResourceScheduler.class);
rm = new MockRM(conf);
bind(ResourceManager.class).toInstance(rm);
serve("/*").with(GuiceContainer.class);
}
});
public class GuiceServletConfig extends GuiceServletContextListener {
@Override
protected Injector getInjector() {
return injector;
}
}
@Before
@Override
public void setUp() throws Exception {
super.setUp();
}
public TestRMWebServices() {
super(new WebAppDescriptor.Builder(
"org.apache.hadoop.yarn.server.resourcemanager.webapp")
.contextListenerClass(GuiceServletConfig.class)
.filterClass(com.google.inject.servlet.GuiceFilter.class)
.contextPath("jersey-guice-filter").servletPath("/").build());
}
@BeforeClass
public static void initClusterMetrics() {
ClusterMetrics clusterMetrics = ClusterMetrics.getMetrics();
clusterMetrics.incrDecommisionedNMs();
clusterMetrics.incrNumActiveNodes();
clusterMetrics.incrNumLostNMs();
clusterMetrics.incrNumRebootedNMs();
clusterMetrics.incrNumUnhealthyNMs();
}
@Test
public void testInfoXML() throws JSONException, Exception {
WebResource r = resource();
ClientResponse response = r.path("ws").path("v1").path("cluster")
.path("info").accept("application/xml").get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
String xml = response.getEntity(String.class);
verifyClusterInfoXML(xml);
}
@Test
public void testInvalidUri() throws JSONException, Exception {
WebResource r = resource();
String responseStr = "";
try {
responseStr = r.path("ws").path("v1").path("cluster").path("bogus")
.accept(MediaType.APPLICATION_JSON).get(String.class);
fail("should have thrown exception on invalid uri");
} catch (UniformInterfaceException ue) {
ClientResponse response = ue.getResponse();
assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
WebServicesTestUtils.checkStringMatch(
"error string exists and shouldn't", "", responseStr);
}
}
@Test
public void testInvalidUri2() throws JSONException, Exception {
WebResource r = resource();
String responseStr = "";
try {
responseStr = r.accept(MediaType.APPLICATION_JSON).get(String.class);
fail("should have thrown exception on invalid uri");
} catch (UniformInterfaceException ue) {
ClientResponse response = ue.getResponse();
assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
WebServicesTestUtils.checkStringMatch(
"error string exists and shouldn't", "", responseStr);
}
}
@Test
public void testInvalidAccept() throws JSONException, Exception {
WebResource r = resource();
String responseStr = "";
try {
responseStr = r.path("ws").path("v1").path("cluster")
.accept(MediaType.TEXT_PLAIN).get(String.class);
fail("should have thrown exception on invalid uri");
} catch (UniformInterfaceException ue) {
ClientResponse response = ue.getResponse();
assertEquals(Status.INTERNAL_SERVER_ERROR,
response.getClientResponseStatus());
WebServicesTestUtils.checkStringMatch(
"error string exists and shouldn't", "", responseStr);
}
}
@Test
public void testCluster() throws JSONException, Exception {
WebResource r = resource();
ClientResponse response = r.path("ws").path("v1").path("cluster")
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
verifyClusterInfo(json);
}
@Test
public void testClusterSlash() throws JSONException, Exception {
WebResource r = resource();
// test with trailing "/" to make sure acts same as without slash
ClientResponse response = r.path("ws").path("v1").path("cluster/")
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
verifyClusterInfo(json);
}
@Test
public void testClusterDefault() throws JSONException, Exception {
WebResource r = resource();
// test with trailing "/" to make sure acts same as without slash
ClientResponse response = r.path("ws").path("v1").path("cluster")
.get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
verifyClusterInfo(json);
}
@Test
public void testInfo() throws JSONException, Exception {
WebResource r = resource();
ClientResponse response = r.path("ws").path("v1").path("cluster")
.path("info").accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
verifyClusterInfo(json);
}
@Test
public void testInfoSlash() throws JSONException, Exception {
// test with trailing "/" to make sure acts same as without slash
WebResource r = resource();
ClientResponse response = r.path("ws").path("v1").path("cluster")
.path("info/").accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
verifyClusterInfo(json);
}
@Test
public void testInfoDefault() throws JSONException, Exception {
WebResource r = resource();
ClientResponse response = r.path("ws").path("v1").path("cluster")
.path("info").get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
verifyClusterInfo(json);
}
public void verifyClusterInfoXML(String xml) throws JSONException, Exception {
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
DocumentBuilder db = dbf.newDocumentBuilder();
InputSource is = new InputSource();
is.setCharacterStream(new StringReader(xml));
Document dom = db.parse(is);
NodeList nodes = dom.getElementsByTagName("clusterInfo");
assertEquals("incorrect number of elements", 1, nodes.getLength());
for (int i = 0; i < nodes.getLength(); i++) {
Element element = (Element) nodes.item(i);
verifyClusterGeneric(WebServicesTestUtils.getXmlLong(element, "id"),
WebServicesTestUtils.getXmlLong(element, "startedOn"),
WebServicesTestUtils.getXmlString(element, "state"),
WebServicesTestUtils.getXmlString(element, "haState"),
WebServicesTestUtils.getXmlString(element, "hadoopVersionBuiltOn"),
WebServicesTestUtils.getXmlString(element, "hadoopBuildVersion"),
WebServicesTestUtils.getXmlString(element, "hadoopVersion"),
WebServicesTestUtils.getXmlString(element,
"resourceManagerVersionBuiltOn"),
WebServicesTestUtils.getXmlString(element,
"resourceManagerBuildVersion"),
WebServicesTestUtils.getXmlString(element, "resourceManagerVersion"));
}
}
public void verifyClusterInfo(JSONObject json) throws JSONException,
Exception {
assertEquals("incorrect number of elements", 1, json.length());
JSONObject info = json.getJSONObject("clusterInfo");
assertEquals("incorrect number of elements", 11, info.length());
verifyClusterGeneric(info.getLong("id"), info.getLong("startedOn"),
info.getString("state"), info.getString("haState"),
info.getString("hadoopVersionBuiltOn"),
info.getString("hadoopBuildVersion"), info.getString("hadoopVersion"),
info.getString("resourceManagerVersionBuiltOn"),
info.getString("resourceManagerBuildVersion"),
info.getString("resourceManagerVersion"));
}
public void verifyClusterGeneric(long clusterid, long startedon,
String state, String haState, String hadoopVersionBuiltOn,
String hadoopBuildVersion, String hadoopVersion,
String resourceManagerVersionBuiltOn, String resourceManagerBuildVersion,
String resourceManagerVersion) {
assertEquals("clusterId doesn't match: ",
ResourceManager.getClusterTimeStamp(), clusterid);
assertEquals("startedOn doesn't match: ",
ResourceManager.getClusterTimeStamp(), startedon);
assertTrue("stated doesn't match: " + state,
state.matches(STATE.INITED.toString()));
assertTrue("HA state doesn't match: " + haState,
haState.matches("INITIALIZING"));
WebServicesTestUtils.checkStringMatch("hadoopVersionBuiltOn",
VersionInfo.getDate(), hadoopVersionBuiltOn);
WebServicesTestUtils.checkStringEqual("hadoopBuildVersion",
VersionInfo.getBuildVersion(), hadoopBuildVersion);
WebServicesTestUtils.checkStringMatch("hadoopVersion",
VersionInfo.getVersion(), hadoopVersion);
WebServicesTestUtils.checkStringMatch("resourceManagerVersionBuiltOn",
YarnVersionInfo.getDate(), resourceManagerVersionBuiltOn);
WebServicesTestUtils.checkStringEqual("resourceManagerBuildVersion",
YarnVersionInfo.getBuildVersion(), resourceManagerBuildVersion);
WebServicesTestUtils.checkStringMatch("resourceManagerVersion",
YarnVersionInfo.getVersion(), resourceManagerVersion);
}
@Test
public void testClusterMetrics() throws JSONException, Exception {
WebResource r = resource();
ClientResponse response = r.path("ws").path("v1").path("cluster")
.path("metrics").accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
verifyClusterMetricsJSON(json);
}
@Test
public void testClusterMetricsSlash() throws JSONException, Exception {
WebResource r = resource();
ClientResponse response = r.path("ws").path("v1").path("cluster")
.path("metrics/").accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
verifyClusterMetricsJSON(json);
}
@Test
public void testClusterMetricsDefault() throws JSONException, Exception {
WebResource r = resource();
ClientResponse response = r.path("ws").path("v1").path("cluster")
.path("metrics").get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
verifyClusterMetricsJSON(json);
}
@Test
public void testClusterMetricsXML() throws JSONException, Exception {
WebResource r = resource();
ClientResponse response = r.path("ws").path("v1").path("cluster")
.path("metrics").accept("application/xml").get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
String xml = response.getEntity(String.class);
verifyClusterMetricsXML(xml);
}
public void verifyClusterMetricsXML(String xml) throws JSONException,
Exception {
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
DocumentBuilder db = dbf.newDocumentBuilder();
InputSource is = new InputSource();
is.setCharacterStream(new StringReader(xml));
Document dom = db.parse(is);
NodeList nodes = dom.getElementsByTagName("clusterMetrics");
assertEquals("incorrect number of elements", 1, nodes.getLength());
for (int i = 0; i < nodes.getLength(); i++) {
Element element = (Element) nodes.item(i);
verifyClusterMetrics(
WebServicesTestUtils.getXmlInt(element, "appsSubmitted"),
WebServicesTestUtils.getXmlInt(element, "appsCompleted"),
WebServicesTestUtils.getXmlInt(element, "reservedMB"),
WebServicesTestUtils.getXmlInt(element, "availableMB"),
WebServicesTestUtils.getXmlInt(element, "allocatedMB"),
WebServicesTestUtils.getXmlInt(element, "reservedVirtualCores"),
WebServicesTestUtils.getXmlInt(element, "availableVirtualCores"),
WebServicesTestUtils.getXmlInt(element, "allocatedVirtualCores"),
WebServicesTestUtils.getXmlInt(element, "totalVirtualCores"),
WebServicesTestUtils.getXmlInt(element, "containersAllocated"),
WebServicesTestUtils.getXmlInt(element, "totalMB"),
WebServicesTestUtils.getXmlInt(element, "totalNodes"),
WebServicesTestUtils.getXmlInt(element, "lostNodes"),
WebServicesTestUtils.getXmlInt(element, "unhealthyNodes"),
WebServicesTestUtils.getXmlInt(element, "decommissionedNodes"),
WebServicesTestUtils.getXmlInt(element, "rebootedNodes"),
WebServicesTestUtils.getXmlInt(element, "activeNodes"));
}
}
public void verifyClusterMetricsJSON(JSONObject json) throws JSONException,
Exception {
assertEquals("incorrect number of elements", 1, json.length());
JSONObject clusterinfo = json.getJSONObject("clusterMetrics");
assertEquals("incorrect number of elements", 23, clusterinfo.length());
verifyClusterMetrics(
clusterinfo.getInt("appsSubmitted"), clusterinfo.getInt("appsCompleted"),
clusterinfo.getInt("reservedMB"), clusterinfo.getInt("availableMB"),
clusterinfo.getInt("allocatedMB"),
clusterinfo.getInt("reservedVirtualCores"), clusterinfo.getInt("availableVirtualCores"),
clusterinfo.getInt("allocatedVirtualCores"), clusterinfo.getInt("totalVirtualCores"),
clusterinfo.getInt("containersAllocated"),
clusterinfo.getInt("totalMB"), clusterinfo.getInt("totalNodes"),
clusterinfo.getInt("lostNodes"), clusterinfo.getInt("unhealthyNodes"),
clusterinfo.getInt("decommissionedNodes"),
clusterinfo.getInt("rebootedNodes"),clusterinfo.getInt("activeNodes"));
}
public void verifyClusterMetrics(int submittedApps, int completedApps,
int reservedMB, int availableMB,
int allocMB, int reservedVirtualCores, int availableVirtualCores,
int allocVirtualCores, int totalVirtualCores,
int containersAlloc, int totalMB, int totalNodes,
int lostNodes, int unhealthyNodes, int decommissionedNodes,
int rebootedNodes, int activeNodes) throws JSONException, Exception {
ResourceScheduler rs = rm.getResourceScheduler();
QueueMetrics metrics = rs.getRootQueueMetrics();
ClusterMetrics clusterMetrics = ClusterMetrics.getMetrics();
long totalMBExpect =
metrics.getAvailableMB() + metrics.getAllocatedMB();
long totalVirtualCoresExpect =
metrics.getAvailableVirtualCores() + metrics.getAllocatedVirtualCores();
assertEquals("appsSubmitted doesn't match",
metrics.getAppsSubmitted(), submittedApps);
assertEquals("appsCompleted doesn't match",
metrics.getAppsCompleted(), completedApps);
assertEquals("reservedMB doesn't match",
metrics.getReservedMB(), reservedMB);
assertEquals("availableMB doesn't match",
metrics.getAvailableMB(), availableMB);
assertEquals("allocatedMB doesn't match",
metrics.getAllocatedMB(), allocMB);
assertEquals("reservedVirtualCores doesn't match",
metrics.getReservedVirtualCores(), reservedVirtualCores);
assertEquals("availableVirtualCores doesn't match",
metrics.getAvailableVirtualCores(), availableVirtualCores);
assertEquals("allocatedVirtualCores doesn't match",
totalVirtualCoresExpect, allocVirtualCores);
assertEquals("containersAllocated doesn't match", 0, containersAlloc);
assertEquals("totalMB doesn't match", totalMBExpect, totalMB);
assertEquals(
"totalNodes doesn't match",
clusterMetrics.getNumActiveNMs() + clusterMetrics.getNumLostNMs()
+ clusterMetrics.getNumDecommisionedNMs()
+ clusterMetrics.getNumRebootedNMs()
+ clusterMetrics.getUnhealthyNMs(), totalNodes);
assertEquals("lostNodes doesn't match", clusterMetrics.getNumLostNMs(),
lostNodes);
assertEquals("unhealthyNodes doesn't match",
clusterMetrics.getUnhealthyNMs(), unhealthyNodes);
assertEquals("decommissionedNodes doesn't match",
clusterMetrics.getNumDecommisionedNMs(), decommissionedNodes);
assertEquals("rebootedNodes doesn't match",
clusterMetrics.getNumRebootedNMs(), rebootedNodes);
assertEquals("activeNodes doesn't match", clusterMetrics.getNumActiveNMs(),
activeNodes);
}
@Test
public void testClusterSchedulerFifo() throws JSONException, Exception {
WebResource r = resource();
ClientResponse response = r.path("ws").path("v1").path("cluster")
.path("scheduler").accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
verifyClusterSchedulerFifo(json);
}
@Test
public void testClusterSchedulerFifoSlash() throws JSONException, Exception {
WebResource r = resource();
ClientResponse response = r.path("ws").path("v1").path("cluster")
.path("scheduler/").accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
verifyClusterSchedulerFifo(json);
}
@Test
public void testClusterSchedulerFifoDefault() throws JSONException, Exception {
WebResource r = resource();
ClientResponse response = r.path("ws").path("v1").path("cluster")
.path("scheduler").get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
verifyClusterSchedulerFifo(json);
}
@Test
public void testClusterSchedulerFifoXML() throws JSONException, Exception {
WebResource r = resource();
ClientResponse response = r.path("ws").path("v1").path("cluster")
.path("scheduler").accept(MediaType.APPLICATION_XML)
.get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
String xml = response.getEntity(String.class);
verifySchedulerFifoXML(xml);
}
public void verifySchedulerFifoXML(String xml) throws JSONException,
Exception {
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
DocumentBuilder db = dbf.newDocumentBuilder();
InputSource is = new InputSource();
is.setCharacterStream(new StringReader(xml));
Document dom = db.parse(is);
NodeList nodesSched = dom.getElementsByTagName("scheduler");
assertEquals("incorrect number of elements", 1, nodesSched.getLength());
NodeList nodes = dom.getElementsByTagName("schedulerInfo");
assertEquals("incorrect number of elements", 1, nodes.getLength());
for (int i = 0; i < nodes.getLength(); i++) {
Element element = (Element) nodes.item(i);
verifyClusterSchedulerFifoGeneric(
WebServicesTestUtils.getXmlAttrString(element, "xsi:type"),
WebServicesTestUtils.getXmlString(element, "qstate"),
WebServicesTestUtils.getXmlFloat(element, "capacity"),
WebServicesTestUtils.getXmlFloat(element, "usedCapacity"),
WebServicesTestUtils.getXmlInt(element, "minQueueMemoryCapacity"),
WebServicesTestUtils.getXmlInt(element, "maxQueueMemoryCapacity"),
WebServicesTestUtils.getXmlInt(element, "numNodes"),
WebServicesTestUtils.getXmlInt(element, "usedNodeCapacity"),
WebServicesTestUtils.getXmlInt(element, "availNodeCapacity"),
WebServicesTestUtils.getXmlInt(element, "totalNodeCapacity"),
WebServicesTestUtils.getXmlInt(element, "numContainers"));
}
}
public void verifyClusterSchedulerFifo(JSONObject json) throws JSONException,
Exception {
assertEquals("incorrect number of elements", 1, json.length());
JSONObject info = json.getJSONObject("scheduler");
assertEquals("incorrect number of elements", 1, info.length());
info = info.getJSONObject("schedulerInfo");
assertEquals("incorrect number of elements", 11, info.length());
verifyClusterSchedulerFifoGeneric(info.getString("type"),
info.getString("qstate"), (float) info.getDouble("capacity"),
(float) info.getDouble("usedCapacity"),
info.getInt("minQueueMemoryCapacity"),
info.getInt("maxQueueMemoryCapacity"), info.getInt("numNodes"),
info.getInt("usedNodeCapacity"), info.getInt("availNodeCapacity"),
info.getInt("totalNodeCapacity"), info.getInt("numContainers"));
}
public void verifyClusterSchedulerFifoGeneric(String type, String state,
float capacity, float usedCapacity, int minQueueCapacity,
int maxQueueCapacity, int numNodes, int usedNodeCapacity,
int availNodeCapacity, int totalNodeCapacity, int numContainers)
throws JSONException, Exception {
assertEquals("type doesn't match", "fifoScheduler", type);
assertEquals("qstate doesn't match", QueueState.RUNNING.toString(), state);
assertEquals("capacity doesn't match", 1.0, capacity, 0.0);
assertEquals("usedCapacity doesn't match", 0.0, usedCapacity, 0.0);
assertEquals(
"minQueueMemoryCapacity doesn't match",
YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_MB,
minQueueCapacity);
assertEquals("maxQueueMemoryCapacity doesn't match",
YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_MB,
maxQueueCapacity);
assertEquals("numNodes doesn't match", 0, numNodes);
assertEquals("usedNodeCapacity doesn't match", 0, usedNodeCapacity);
assertEquals("availNodeCapacity doesn't match", 0, availNodeCapacity);
assertEquals("totalNodeCapacity doesn't match", 0, totalNodeCapacity);
assertEquals("numContainers doesn't match", 0, numContainers);
}
}
| |
/* Copyright (c) The m-m-m Team, Licensed under the Apache License, Version 2.0
* http://www.apache.org/licenses/LICENSE-2.0 */
package net.sf.mmm.util.nls.base;
import java.io.File;
import java.io.IOException;
import java.io.PrintWriter;
import java.text.SimpleDateFormat;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
import java.util.Locale;
import java.util.ResourceBundle;
import java.util.Set;
import net.sf.mmm.util.cli.NlsBundleUtilCliRoot;
import net.sf.mmm.util.cli.api.AbstractMain;
import net.sf.mmm.util.cli.api.AbstractVersionedMain;
import net.sf.mmm.util.cli.api.CliOption;
import net.sf.mmm.util.file.api.FileCreationFailedException;
import net.sf.mmm.util.filter.api.Filter;
import net.sf.mmm.util.nls.api.NlsBundle;
import net.sf.mmm.util.nls.api.NlsBundleOptions;
import net.sf.mmm.util.nls.api.NlsBundleWithLookup;
import net.sf.mmm.util.nls.impl.NlsResourceBundleLocator;
import net.sf.mmm.util.nls.impl.NlsResourceBundleLocatorImpl;
import net.sf.mmm.util.reflect.api.ReflectionUtil;
import net.sf.mmm.util.reflect.base.AssignableFromFilter;
import net.sf.mmm.util.reflect.base.ReflectionUtilImpl;
/**
* The abstract base class for a {@link AbstractMain CLI program} to process {@link NlsBundle}s or
* {@link AbstractResourceBundle}s for localization maintenance.
*
* @see ResourceBundleSynchronizer
*
* @author Joerg Hohwiller (hohwille at users.sourceforge.net)
* @since 7.3.0
*/
public abstract class AbstractResourceBundleCli extends AbstractVersionedMain {
/**
* The command-line option to {@link #setDatePattern(String) set the date-pattern}.
*/
public static final String OPTION_DATE_PATTERN = "--date-pattern";
/** The command-line option to {@link #setEncoding(String) set the encoding}. */
public static final String OPTION_ENCODING = "--encoding";
/** The command-line option to {@link #setPath(String) set the path}. */
public static final String OPTION_PATH = "--path";
/** The command-line option to set the bundle-class. */
public static final String OPTION_BUNDLE_CLASS = "--bundle";
/** The command-line option to set the locales. */
public static final String OPTION_LOCALE = "--locale";
/** @see #getPath() */
protected static final String DEFAULT_BASE_PATH = "src/main/resources";
private static final String DEFAULT_ENCODING = "UTF-8";
private static final String DEFAULT_DATE_PATTERN = "yyyy-MM-dd HH:mm:ss Z";
@CliOption(name = OPTION_PATH, aliases = "-p", operand = "DIR", //
usage = NlsBundleUtilCliRoot.MSG_SYNCHRONIZER_USAGE_PATH)
private String path;
@CliOption(name = OPTION_ENCODING, aliases = "-e", operand = "ENC", //
usage = NlsBundleUtilCliRoot.MSG_SYNCHRONIZER_USAGE_ENCODING)
private String encoding;
private String newline;
@CliOption(name = OPTION_DATE_PATTERN, aliases = "-d", operand = "PATTERN", //
usage = NlsBundleUtilCliRoot.MSG_SYNCHRONIZER_USAGE_DATE_PATTERN)
private String datePattern;
@CliOption(name = OPTION_BUNDLE_CLASS, aliases = "-b", operand = "CLASS", //
usage = NlsBundleUtilCliRoot.MSG_SYNCHRONIZER_USAGE_BUNDLE_CLASS)
private List<Class<?>> bundleClasses;
private NlsResourceBundleLocator resourceBundleLocator;
private ReflectionUtil reflectionUtil;
private NlsBundleHelper bundleHelper;
/**
* The constructor.
*/
public AbstractResourceBundleCli() {
super();
this.path = DEFAULT_BASE_PATH;
this.datePattern = DEFAULT_DATE_PATTERN;
this.encoding = DEFAULT_ENCODING;
this.newline = "\n";
}
/**
* This method gets the pattern used to format the date comment.
*
* @see SimpleDateFormat
*
* @return the date pattern.
*/
public String getDatePattern() {
return this.datePattern;
}
/**
* @param datePattern the datePattern to set
*/
public void setDatePattern(String datePattern) {
this.datePattern = datePattern;
}
/**
* This method gets the locales of the bundles that should be {@link #synchronize(NlsBundleDescriptor) synchronized}.
* Examples for locales (entries of the returned array) are {@code ""}, {@code "en"}, or {@code "en_GB"}.
*
* @return the locales to create/update.
*/
public abstract String[] getLocales();
/**
* This method sets the {@link #getLocales() locales}.
*
* @param locales are the locales to set
*/
public abstract void setLocales(String[] locales);
/**
* This method sets the {@link #getLocales() locales}.
*
* @param locales are the locales to set
*/
public void setLocales(Locale... locales) {
String[] array = new String[locales.length];
for (int i = 0; i < locales.length; i++) {
array[i] = locales[i].toString();
}
setLocales(array);
}
/**
* This method gets the base-path where the bundles are written to. They will appear there under their appropriate
* classpath. The default is {@link #DEFAULT_BASE_PATH}.
*
* @return the basePath is the base path where the resource bundles are written to.
*/
public String getPath() {
return this.path;
}
/**
* This method sets the {@link #getPath() base-path}.
*
* @param basePath the basePath to set
*/
public void setPath(String basePath) {
this.path = basePath;
}
/**
* This method gets the encoding used to read and write the bundles. The default is {@code UTF-8}.
*
* @return the encoding.
*/
public String getEncoding() {
return this.encoding;
}
/**
* This method sets the {@link #getEncoding() encoding}.
*
* @param encoding the encoding to set
*/
public void setEncoding(String encoding) {
this.encoding = encoding;
}
/**
* This method sets the newline string used to terminate a line in the resource bundle. The default is LF ({@code \n}
* ).
*
* @return the newline
*/
public String getNewline() {
return this.newline;
}
/**
* @param newline the newline to set
*/
public void setNewline(String newline) {
this.newline = newline;
}
/**
* This method gets the {@link Class} reflecting the {@link ResourceBundle} to synchronize.
*
* @return the bundle-class.
*/
public List<Class<?>> getBundleClasses() {
return this.bundleClasses;
}
/**
* This method sets the {@link #getBundleClasses() bundle-classes}.
*
* @param bundleClasses is the {@link List} of bundle-classes to set
*/
public void setBundleClasses(List<Class<?>> bundleClasses) {
this.bundleClasses = bundleClasses;
}
/**
* This method gets the {@link NlsResourceBundleLocator}.
*
* @return the {@link NlsResourceBundleLocator}.
*/
public NlsResourceBundleLocator getResourceBundleLocator() {
if (this.resourceBundleLocator == null) {
NlsResourceBundleLocatorImpl impl = new NlsResourceBundleLocatorImpl();
impl.initialize();
this.resourceBundleLocator = impl;
}
return this.resourceBundleLocator;
}
/**
* @param resourceBundleFinder is the resourceBundleFinder to set
*/
public void setResourceBundleLocator(NlsResourceBundleLocator resourceBundleFinder) {
this.resourceBundleLocator = resourceBundleFinder;
}
/**
* This method gets the {@link ReflectionUtil}.
*
* @return the {@link ReflectionUtil}.
*/
public ReflectionUtil getReflectionUtil() {
if (this.reflectionUtil == null) {
this.reflectionUtil = ReflectionUtilImpl.getInstance();
}
return this.reflectionUtil;
}
/**
* @param reflectionUtil is the {@link ReflectionUtil}.
*/
public void setReflectionUtil(ReflectionUtil reflectionUtil) {
this.reflectionUtil = reflectionUtil;
}
/**
* @return the {@link NlsBundleHelper}.
*/
public NlsBundleHelper getBundleHelper() {
if (this.bundleHelper == null) {
this.bundleHelper = NlsBundleHelper.getInstance();
}
return this.bundleHelper;
}
/**
* @param bundleHelper is the {@link NlsBundleHelper}.
*/
public void setBundleHelper(NlsBundleHelper bundleHelper) {
this.bundleHelper = bundleHelper;
}
/**
* This method synchronizes (creates or updates) the localized bundles (properties). If a bundle already exists, it
* will NOT just be overwritten but the missing keys are appended to the end of the file. If no keys are missing, the
* existing file remains untouched.
*
* @param bundle is the bundle instance as java object.
* @throws IOException if the operation failed with an input/output error.
*/
public void synchronize(NlsBundleDescriptor bundle) throws IOException {
PrintWriter out = getStandardOutput();
if (bundle.getMessages().isEmpty()) {
out.println(bundle.getQualifiedName() + " is empty - noting to do!");
return;
}
List<String> locales = getLocales(bundle);
if (locales.isEmpty()) {
getStandardError().println("No localized bundles for " + bundle.getQualifiedName() + " on your classpath!");
return;
}
SimpleDateFormat sdf = new SimpleDateFormat(this.datePattern);
String date = sdf.format(new Date());
for (String locale : locales) {
File targetFile = getTargetFileMkdirs(bundle, locale);
synchronize(bundle, locale, targetFile, date);
}
}
/**
* @param bundle the {@link NlsBundleDescriptor}.
* @param locale the {@link Locale} to generate as {@link String}.
* @return the target {@link File} to generate. It is ensured that the parent directory exists.
*/
protected File getTargetFileMkdirs(NlsBundleDescriptor bundle, String locale) {
File targetFile = getTargetFile(bundle, locale);
File directory = targetFile.getParentFile();
if (!directory.exists()) {
boolean success = directory.mkdirs();
if (!success) {
throw new FileCreationFailedException(directory);
}
}
return targetFile;
}
/**
* @param bundle the {@link NlsBundleDescriptor}.
* @param locale the {@link Locale} to generate as {@link String}.
* @return the target {@link File} to generate.
*/
protected abstract File getTargetFile(NlsBundleDescriptor bundle, String locale);
/**
* @param bundle the {@link NlsBundleDescriptor}.
* @return the {@link #getLocales() locales} to process for the bundle.
*/
protected List<String> getLocales(NlsBundleDescriptor bundle) {
return Arrays.asList(getLocales());
}
/**
* Like {@link #synchronize(NlsBundleDescriptor)} but for a single {@link Locale}.
*
* @param bundle the bundle instance as java object.
* @param locale the locale to synchronize as string.
* @param targetFile the {@link File} to write to. May not yet exists but parent folder exists.
* @param date is the current date as string.
* @throws IOException if an I/O problem occurred.
*/
protected abstract void synchronize(NlsBundleDescriptor bundle, String locale, File targetFile, String date) throws IOException;
@Override
protected int runDefaultMode() throws Exception {
if (this.bundleClasses == null) {
List<ResourceBundle> bundleList = getResourceBundleLocator().findBundles();
for (ResourceBundle resourceBundle : bundleList) {
if (isProductive(resourceBundle.getClass())) {
synchronize(new NlsBundleDescriptor(resourceBundle));
}
}
Set<String> allClasses = getReflectionUtil().findClassNames("", true);
Filter<? super Class<?>> filter = new AssignableFromFilter(NlsBundle.class, true);
@SuppressWarnings({ "unchecked", "rawtypes" })
Set<Class<? extends NlsBundle>> nlsBundleClasses = (Set) getReflectionUtil().loadClasses(allClasses, filter);
for (Class<? extends NlsBundle> bundleClass : nlsBundleClasses) {
if (bundleClass != NlsBundleWithLookup.class) {
if (isProductive(bundleClass)) {
synchronize(new NlsBundleDescriptor(bundleClass));
}
}
}
} else {
for (Class<?> bundleClass : this.bundleClasses) {
NlsBundleDescriptor bundle;
if (ResourceBundle.class.isAssignableFrom(bundleClass)) {
ResourceBundle resourceBundle = (ResourceBundle) bundleClass.newInstance();
bundle = new NlsBundleDescriptor(resourceBundle);
} else if (NlsBundle.class.isAssignableFrom(bundleClass)) {
@SuppressWarnings("unchecked")
Class<? extends NlsBundle> bundleInterface = (Class<? extends NlsBundle>) bundleClass;
bundle = new NlsBundleDescriptor(bundleInterface);
} else {
throw new IllegalArgumentException(bundleClass.getName());
}
synchronize(bundle);
}
}
return EXIT_CODE_OK;
}
/**
* Determines if the given {@code bundleClass} is {@link NlsBundleOptions#productive() productive}.
*
* @param bundleClass is the {@link Class} to test.
* @return {@code true} if {@link NlsBundleOptions#productive() productive}, {@code false} otherwise.
*/
private boolean isProductive(Class<?> bundleClass) {
NlsBundleOptions options = bundleClass.getAnnotation(NlsBundleOptions.class);
if (options != null) {
return options.productive();
}
return true;
}
}
| |
// Copyright 2016 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.runtime;
import static com.google.common.truth.Truth.assertThat;
import static com.google.common.truth.Truth.assertWithMessage;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.not;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.mockito.Mockito.when;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.devtools.build.lib.actions.Action;
import com.google.devtools.build.lib.actions.ActionCompletionEvent;
import com.google.devtools.build.lib.actions.ActionLookupData;
import com.google.devtools.build.lib.actions.ActionOwner;
import com.google.devtools.build.lib.actions.ActionStartedEvent;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.actions.ArtifactRoot;
import com.google.devtools.build.lib.actions.RunningActionEvent;
import com.google.devtools.build.lib.actions.ScanningActionEvent;
import com.google.devtools.build.lib.actions.SchedulingActionEvent;
import com.google.devtools.build.lib.actions.util.ActionsTestUtil;
import com.google.devtools.build.lib.analysis.ConfiguredTarget;
import com.google.devtools.build.lib.bazel.repository.downloader.DownloadProgressEvent;
import com.google.devtools.build.lib.buildeventstream.AnnounceBuildEventTransportsEvent;
import com.google.devtools.build.lib.buildeventstream.BuildEventTransport;
import com.google.devtools.build.lib.buildeventstream.BuildEventTransportClosedEvent;
import com.google.devtools.build.lib.buildtool.BuildResult;
import com.google.devtools.build.lib.buildtool.buildevent.BuildCompleteEvent;
import com.google.devtools.build.lib.buildtool.buildevent.TestFilteringCompleteEvent;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.build.lib.events.ExtendedEventHandler.FetchProgress;
import com.google.devtools.build.lib.packages.AspectDescriptor;
import com.google.devtools.build.lib.runtime.UiStateTracker.ProgressMode;
import com.google.devtools.build.lib.runtime.UiStateTracker.StrategyIds;
import com.google.devtools.build.lib.skyframe.LoadingPhaseStartedEvent;
import com.google.devtools.build.lib.skyframe.PackageProgressReceiver;
import com.google.devtools.build.lib.testutil.FoundationTestCase;
import com.google.devtools.build.lib.testutil.ManualClock;
import com.google.devtools.build.lib.util.ExitCode;
import com.google.devtools.build.lib.util.Pair;
import com.google.devtools.build.lib.util.io.LoggingTerminalWriter;
import com.google.devtools.build.lib.vfs.Path;
import com.google.devtools.build.lib.vfs.PathFragment;
import com.google.devtools.build.lib.vfs.Root;
import com.google.devtools.build.lib.view.test.TestStatus.BlazeTestStatus;
import java.io.IOException;
import java.net.URL;
import java.time.Duration;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import org.mockito.Mockito;
/** Tests {@link UiStateTracker}. */
@RunWith(JUnit4.class)
public class UiStateTrackerTest extends FoundationTestCase {
@Test
public void testStrategyIds_getId_idsAreBitmasks() {
StrategyIds strategyIds = new StrategyIds();
Integer id1 = strategyIds.getId("foo");
Integer id2 = strategyIds.getId("bar");
Integer id3 = strategyIds.getId("baz");
assertThat(id1).isGreaterThan(0);
assertThat(id2).isGreaterThan(0);
assertThat(id3).isGreaterThan(0);
assertThat(id1 & id2).isEqualTo(0);
assertThat(id1 & id3).isEqualTo(0);
assertThat(id2 & id3).isEqualTo(0);
}
@Test
public void testStrategyIds_getId_idsAreReusedIfAlreadyExist() {
StrategyIds strategyIds = new StrategyIds();
Integer id1 = strategyIds.getId("foo");
Integer id2 = strategyIds.getId("bar");
Integer id3 = strategyIds.getId("foo");
assertThat(id1).isNotEqualTo(id2);
assertThat(id1).isEqualTo(id3);
}
@Test
public void testStrategyIds_getId_exhaustIds() {
StrategyIds strategyIds = new StrategyIds();
Set<Integer> ids = new HashSet<>();
StringBuilder name = new StringBuilder();
for (; ; ) {
name.append('a');
Integer id = strategyIds.getId(name.toString());
if (id.equals(strategyIds.fallbackId)) {
break;
}
ids.add(id);
}
assertThat(ids).hasSize(Integer.SIZE - 1); // Minus 1 for FALLBACK_NAME.
assertThat(strategyIds.getId("some")).isEqualTo(strategyIds.fallbackId);
assertThat(strategyIds.getId("more")).isEqualTo(strategyIds.fallbackId);
}
@Test
public void testStrategyIds_formatNames_fallbackExistsByDefault() {
StrategyIds strategyIds = new StrategyIds();
assertThat(strategyIds.formatNames(strategyIds.fallbackId))
.isEqualTo(StrategyIds.FALLBACK_NAME);
}
@Test
public void testStrategyIds_formatNames_oneHasNoComma() {
StrategyIds strategyIds = new StrategyIds();
Integer id1 = strategyIds.getId("abc");
assertThat(strategyIds.formatNames(id1)).isEqualTo("abc");
}
@Test
public void testStrategyIds_formatNames() {
StrategyIds strategyIds = new StrategyIds();
Integer id1 = strategyIds.getId("abc");
Integer id2 = strategyIds.getId("xyz");
Integer id3 = strategyIds.getId("def");
// Names are not sorted alphabetically but their order is stable based on prior getId calls.
assertThat(strategyIds.formatNames(id1 | id2)).isEqualTo("abc, xyz");
assertThat(strategyIds.formatNames(id1 | id3)).isEqualTo("abc, def");
assertThat(strategyIds.formatNames(id2 | id3)).isEqualTo("xyz, def");
assertThat(strategyIds.formatNames(id1 | id2 | id3)).isEqualTo("abc, xyz, def");
}
private Action mockAction(String progressMessage, String primaryOutput) {
Path path = outputBase.getRelative(PathFragment.create(primaryOutput));
Artifact artifact =
ActionsTestUtil.createArtifact(ArtifactRoot.asSourceRoot(Root.fromPath(outputBase)), path);
Action action = Mockito.mock(Action.class);
when(action.getProgressMessage()).thenReturn(progressMessage);
when(action.getPrimaryOutput()).thenReturn(artifact);
return action;
}
private int longestLine(String output) {
int maxLength = 0;
for (String line : output.split("\n")) {
maxLength = Math.max(maxLength, line.length());
}
return maxLength;
}
@Test
public void testLoadingActivity() throws IOException {
// During loading phase, state and activity, as reported by the PackageProgressReceiver,
// should be visible in the progress bar.
String state = "42 packages loaded";
String activity = "currently loading //src/foo/bar and 17 more";
PackageProgressReceiver progress = Mockito.mock(PackageProgressReceiver.class);
when(progress.progressState()).thenReturn(new Pair<String, String>(state, activity));
ManualClock clock = new ManualClock();
UiStateTracker stateTracker = new UiStateTracker(clock);
stateTracker.loadingStarted(new LoadingPhaseStartedEvent(progress));
LoggingTerminalWriter terminalWriter = new LoggingTerminalWriter(/*discardHighlight=*/ true);
stateTracker.writeProgressBar(terminalWriter);
String output = terminalWriter.getTranscript();
assertWithMessage(
"Output should indicate that we are in the loading phase, but was:\n" + output)
.that(output.contains("Loading"))
.isTrue();
assertWithMessage("Output should contain loading state '" + state + "', but was:\n" + output)
.that(output.contains(state))
.isTrue();
assertWithMessage("Output should contain loading state '" + activity + "', but was:\n" + output)
.that(output.contains(activity))
.isTrue();
}
@Test
public void testActionVisible() throws IOException {
// If there is only one action running, it should be visible
// somewhere in the progress bar, and also the short version thereof.
String message = "Building foo";
ManualClock clock = new ManualClock();
clock.advanceMillis(120000);
UiStateTracker stateTracker = new UiStateTracker(clock);
stateTracker.actionStarted(new ActionStartedEvent(mockAction(message, "bar/foo"), 123456789));
LoggingTerminalWriter terminalWriter = new LoggingTerminalWriter(/*discardHighlight=*/ true);
stateTracker.writeProgressBar(terminalWriter);
String output = terminalWriter.getTranscript();
assertWithMessage("Action message '" + message + "' should be present in output: " + output)
.that(output.contains(message))
.isTrue();
terminalWriter = new LoggingTerminalWriter();
stateTracker.writeProgressBar(terminalWriter, /* shortVersion=*/ true);
output = terminalWriter.getTranscript();
assertWithMessage(
"Action message '" + message + "' should be present in short output: " + output)
.that(output.contains(message))
.isTrue();
}
@Test
public void testCompletedActionNotShown() throws IOException {
// Completed actions should not be reported in the progress bar, nor in the
// short progress bar.
String messageFast = "Running quick action";
String messageSlow = "Running slow action";
ManualClock clock = new ManualClock();
clock.advanceMillis(120000);
Action fastAction = mockAction(messageFast, "foo/fast");
Action slowAction = mockAction(messageSlow, "bar/slow");
UiStateTracker stateTracker = new UiStateTracker(clock);
stateTracker.actionStarted(new ActionStartedEvent(fastAction, 123456789));
stateTracker.actionStarted(new ActionStartedEvent(slowAction, 123456999));
stateTracker.actionCompletion(
new ActionCompletionEvent(20, fastAction, Mockito.mock(ActionLookupData.class)));
LoggingTerminalWriter terminalWriter = new LoggingTerminalWriter(/*discardHighlight=*/ true);
stateTracker.writeProgressBar(terminalWriter);
String output = terminalWriter.getTranscript();
assertWithMessage(
"Completed action '" + messageFast + "' should not be present in output: " + output)
.that(output.contains(messageFast))
.isFalse();
assertWithMessage(
"Only running action '" + messageSlow + "' should be present in output: " + output)
.that(output.contains(messageSlow))
.isTrue();
terminalWriter = new LoggingTerminalWriter();
stateTracker.writeProgressBar(terminalWriter, /* shortVersion=*/ true);
output = terminalWriter.getTranscript();
assertWithMessage(
"Completed action '"
+ messageFast
+ "' should not be present in short output: "
+ output)
.that(output.contains(messageFast))
.isFalse();
assertWithMessage(
"Only running action '"
+ messageSlow
+ "' should be present in short output: "
+ output)
.that(output.contains(messageSlow))
.isTrue();
}
@Test
public void testOldestActionVisible() throws IOException {
// The earliest-started action is always visible somehow in the progress bar
// and its short version.
String messageOld = "Running the first-started action";
ManualClock clock = new ManualClock();
clock.advanceMillis(120000);
UiStateTracker stateTracker = new UiStateTracker(clock);
stateTracker.actionStarted(
new ActionStartedEvent(mockAction(messageOld, "bar/foo"), 123456789));
for (int i = 0; i < 30; i++) {
stateTracker.actionStarted(
new ActionStartedEvent(
mockAction("Other action " + i, "some/other/actions/number" + i), 123456790 + i));
}
LoggingTerminalWriter terminalWriter = new LoggingTerminalWriter(/*discardHighlight=*/ true);
stateTracker.writeProgressBar(terminalWriter);
String output = terminalWriter.getTranscript();
assertWithMessage(
"Longest running action '" + messageOld + "' should be visible in output: " + output)
.that(output.contains(messageOld))
.isTrue();
terminalWriter = new LoggingTerminalWriter(/*discardHighlight=*/ true);
stateTracker.writeProgressBar(terminalWriter, /* shortVersion=*/ true);
output = terminalWriter.getTranscript();
assertWithMessage(
"Longest running action '"
+ messageOld
+ "' should be visible in short output: "
+ output)
.that(output.contains(messageOld))
.isTrue();
}
@Test
public void testSampleSize() throws IOException {
// Verify that the number of actions shown in the progress bar can be set as sample size.
ManualClock clock = new ManualClock();
clock.advanceMillis(TimeUnit.SECONDS.toMillis(123));
UiStateTracker stateTracker = new UiStateTracker(clock);
clock.advanceMillis(TimeUnit.SECONDS.toMillis(2));
// Start 10 actions (numbered 0 to 9).
for (int i = 0; i < 10; i++) {
clock.advanceMillis(TimeUnit.SECONDS.toMillis(1));
Action action = mockAction("Performing action A" + i + ".", "action_A" + i + ".out");
stateTracker.actionStarted(new ActionStartedEvent(action, clock.nanoTime()));
}
// For various sample sizes verify the progress bar
for (int i = 1; i < 11; i++) {
stateTracker.setProgressMode(ProgressMode.OLDEST_ACTIONS, i);
LoggingTerminalWriter terminalWriter = new LoggingTerminalWriter(/*discardHighlight=*/ true);
stateTracker.writeProgressBar(terminalWriter);
String output = terminalWriter.getTranscript();
assertWithMessage("Action " + (i - 1) + " should still be shown in the output: '" + output)
.that(output.contains("A" + (i - 1) + "."))
.isTrue();
assertWithMessage("Action " + i + " should not be shown in the output: " + output)
.that(output.contains("A" + i + "."))
.isFalse();
if (i < 10) {
assertWithMessage("Ellipsis symbol should be shown in output: " + output)
.that(output.contains("..."))
.isTrue();
} else {
assertWithMessage("Ellipsis symbol should not be shown in output: " + output)
.that(output.contains("..."))
.isFalse();
}
}
}
@Test
public void testTimesShown() throws IOException {
// For sufficiently long running actions, the time that has passed since their start is shown.
// In the short version of the progress bar, this should be true at least for the oldest action.
ManualClock clock = new ManualClock();
clock.advanceMillis(TimeUnit.SECONDS.toMillis(123));
UiStateTracker stateTracker = new UiStateTracker(clock);
clock.advanceMillis(TimeUnit.SECONDS.toMillis(2));
stateTracker.actionStarted(
new ActionStartedEvent(mockAction("First action", "foo"), clock.nanoTime()));
clock.advanceMillis(TimeUnit.SECONDS.toMillis(7));
stateTracker.actionStarted(
new ActionStartedEvent(mockAction("Second action", "bar"), clock.nanoTime()));
clock.advanceMillis(TimeUnit.SECONDS.toMillis(20));
LoggingTerminalWriter terminalWriter = new LoggingTerminalWriter(/*discardHighlight=*/ true);
stateTracker.writeProgressBar(terminalWriter);
String output = terminalWriter.getTranscript();
assertWithMessage("Runtime of first action should be visible in output: " + output)
.that(output.contains("27s"))
.isTrue();
assertWithMessage("Runtime of second action should be visible in output: " + output)
.that(output.contains("20s"))
.isTrue();
terminalWriter = new LoggingTerminalWriter(/*discardHighlight=*/ true);
stateTracker.writeProgressBar(terminalWriter, /* shortVersion=*/ true);
output = terminalWriter.getTranscript();
assertWithMessage("Runtime of first action should be visible in short output: " + output)
.that(output.contains("27s"))
.isTrue();
}
@Test
public void initialProgressBarTimeIndependent() {
ManualClock clock = new ManualClock();
clock.advanceMillis(TimeUnit.SECONDS.toMillis(123));
UiStateTracker stateTracker = new UiStateTracker(clock);
assertWithMessage("Initial progress status should be time independent")
.that(stateTracker.progressBarTimeDependent())
.isFalse();
}
@Test
public void runningActionTimeIndependent() {
ManualClock clock = new ManualClock();
clock.advanceMillis(TimeUnit.SECONDS.toMillis(123));
UiStateTracker stateTracker = new UiStateTracker(clock);
clock.advanceMillis(TimeUnit.SECONDS.toMillis(1));
stateTracker.actionStarted(
new ActionStartedEvent(mockAction("Some action", "foo"), clock.nanoTime()));
assertWithMessage("Progress bar showing a running action should be time dependent")
.that(stateTracker.progressBarTimeDependent())
.isTrue();
}
@Test
public void testCountVisible() throws Exception {
// The test count should be visible in the status bar, as well as the short status bar
ManualClock clock = new ManualClock();
UiStateTracker stateTracker = new UiStateTracker(clock);
TestFilteringCompleteEvent filteringComplete = Mockito.mock(TestFilteringCompleteEvent.class);
Label labelA = Label.parseAbsolute("//foo/bar:baz", ImmutableMap.of());
ConfiguredTarget targetA = Mockito.mock(ConfiguredTarget.class);
when(targetA.getLabel()).thenReturn(labelA);
ConfiguredTarget targetB = Mockito.mock(ConfiguredTarget.class);
when(filteringComplete.getTestTargets()).thenReturn(ImmutableSet.of(targetA, targetB));
TestSummary testSummary = Mockito.mock(TestSummary.class);
when(testSummary.getTarget()).thenReturn(targetA);
when(testSummary.getLabel()).thenReturn(labelA);
stateTracker.testFilteringComplete(filteringComplete);
stateTracker.testSummary(testSummary);
LoggingTerminalWriter terminalWriter = new LoggingTerminalWriter(/*discardHighlight=*/ true);
stateTracker.writeProgressBar(terminalWriter);
String output = terminalWriter.getTranscript();
assertWithMessage("Test count should be visible in output: " + output)
.that(output.contains(" 1 / 2 tests"))
.isTrue();
terminalWriter = new LoggingTerminalWriter(/*discardHighlight=*/ true);
stateTracker.writeProgressBar(terminalWriter, /* shortVersion=*/ true);
output = terminalWriter.getTranscript();
assertWithMessage("Test count should be visible in short output: " + output)
.that(output.contains(" 1 / 2 tests"))
.isTrue();
}
@Test
public void testPassedVisible() throws Exception {
// The last test should still be visible in the long status bar, and colored as ok if it passed.
ManualClock clock = new ManualClock();
UiStateTracker stateTracker = new UiStateTracker(clock);
TestFilteringCompleteEvent filteringComplete = Mockito.mock(TestFilteringCompleteEvent.class);
Label labelA = Label.parseAbsolute("//foo/bar:baz", ImmutableMap.of());
ConfiguredTarget targetA = Mockito.mock(ConfiguredTarget.class);
when(targetA.getLabel()).thenReturn(labelA);
ConfiguredTarget targetB = Mockito.mock(ConfiguredTarget.class);
when(filteringComplete.getTestTargets()).thenReturn(ImmutableSet.of(targetA, targetB));
TestSummary testSummary = Mockito.mock(TestSummary.class);
when(testSummary.getStatus()).thenReturn(BlazeTestStatus.PASSED);
when(testSummary.getTarget()).thenReturn(targetA);
when(testSummary.getLabel()).thenReturn(labelA);
stateTracker.testFilteringComplete(filteringComplete);
stateTracker.testSummary(testSummary);
LoggingTerminalWriter terminalWriter = new LoggingTerminalWriter();
stateTracker.writeProgressBar(terminalWriter);
String output = terminalWriter.getTranscript();
String expected = LoggingTerminalWriter.OK + labelA;
assertWithMessage(
"Sequence '" + expected + "' should be present in colored progress bar: " + output)
.that(output.contains(expected))
.isTrue();
}
@Test
public void testFailedVisible() throws Exception {
// The last test should still be visible in the long status bar, and colored as fail if it
// did not pass.
ManualClock clock = new ManualClock();
UiStateTracker stateTracker = new UiStateTracker(clock);
TestFilteringCompleteEvent filteringComplete = Mockito.mock(TestFilteringCompleteEvent.class);
Label labelA = Label.parseAbsolute("//foo/bar:baz", ImmutableMap.of());
ConfiguredTarget targetA = Mockito.mock(ConfiguredTarget.class);
when(targetA.getLabel()).thenReturn(labelA);
ConfiguredTarget targetB = Mockito.mock(ConfiguredTarget.class);
when(filteringComplete.getTestTargets()).thenReturn(ImmutableSet.of(targetA, targetB));
TestSummary testSummary = Mockito.mock(TestSummary.class);
when(testSummary.getStatus()).thenReturn(BlazeTestStatus.FAILED);
when(testSummary.getTarget()).thenReturn(targetA);
when(testSummary.getLabel()).thenReturn(labelA);
stateTracker.testFilteringComplete(filteringComplete);
stateTracker.testSummary(testSummary);
LoggingTerminalWriter terminalWriter = new LoggingTerminalWriter();
stateTracker.writeProgressBar(terminalWriter);
String output = terminalWriter.getTranscript();
String expected = LoggingTerminalWriter.FAIL + labelA;
assertWithMessage(
"Sequence '" + expected + "' should be present in colored progress bar: " + output)
.that(output.contains(expected))
.isTrue();
}
@Test
public void testSensibleShortening() throws Exception {
// Verify that in the typical case, we shorten the progress message by shortening
// the path implicit in it, that can also be extracted from the label. In particular,
// the parts
ManualClock clock = new ManualClock();
UiStateTracker stateTracker = new UiStateTracker(clock, 70);
Action action =
mockAction(
"Building some/very/very/long/path/for/some/library/directory/foo.jar (42 source"
+ " files)",
"some/very/very/long/path/for/some/library/directory/foo.jar");
Label label =
Label.parseAbsolute(
"//some/very/very/long/path/for/some/library/directory:libfoo", ImmutableMap.of());
ActionOwner owner =
ActionOwner.create(
label,
ImmutableList.<AspectDescriptor>of(),
null,
null,
null,
"fedcba",
null,
null,
ImmutableMap.of(),
null);
when(action.getOwner()).thenReturn(owner);
clock.advanceMillis(TimeUnit.SECONDS.toMillis(3));
stateTracker.actionStarted(new ActionStartedEvent(action, clock.nanoTime()));
clock.advanceMillis(TimeUnit.SECONDS.toMillis(5));
LoggingTerminalWriter terminalWriter = new LoggingTerminalWriter(/*discardHighlight=*/ true);
stateTracker.writeProgressBar(terminalWriter);
String output = terminalWriter.getTranscript();
assertWithMessage("Progress bar should contain 'Building ', but was:\n" + output)
.that(output.contains("Building "))
.isTrue();
assertWithMessage(
"Progress bar should contain 'foo.jar (42 source files)', but was:\n" + output)
.that(output.contains("foo.jar (42 source files)"))
.isTrue();
}
@Test
public void testActionStrategyVisible() throws Exception {
// verify that, if a strategy was reported for a shown action, it is visible
// in the progress bar.
String strategy = "verySpecialStrategy";
String primaryOutput = "some/path/to/a/file";
ManualClock clock = new ManualClock();
Path path = outputBase.getRelative(PathFragment.create(primaryOutput));
Artifact artifact =
ActionsTestUtil.createArtifact(ArtifactRoot.asSourceRoot(Root.fromPath(outputBase)), path);
Action action = mockAction("Some random action", primaryOutput);
when(action.getOwner()).thenReturn(Mockito.mock(ActionOwner.class));
when(action.getPrimaryOutput()).thenReturn(artifact);
UiStateTracker stateTracker = new UiStateTracker(clock);
stateTracker.actionStarted(new ActionStartedEvent(action, clock.nanoTime()));
stateTracker.runningAction(new RunningActionEvent(action, strategy));
LoggingTerminalWriter terminalWriter = new LoggingTerminalWriter(/*discardHighlight=*/ true);
stateTracker.writeProgressBar(terminalWriter);
String output = terminalWriter.getTranscript();
assertWithMessage("Output should mention strategy '" + strategy + "', but was: " + output)
.that(output.contains(strategy))
.isTrue();
}
@Test
public void testMultipleActionStrategiesVisibleForDynamicScheduling() throws Exception {
String strategy1 = "strategy1";
String strategy2 = "stratagy2";
String primaryOutput = "some/path/to/a/file";
ManualClock clock = new ManualClock();
Path path = outputBase.getRelative(PathFragment.create(primaryOutput));
Artifact artifact =
ActionsTestUtil.createArtifact(ArtifactRoot.asSourceRoot(Root.fromPath(outputBase)), path);
Action action = mockAction("Some random action", primaryOutput);
when(action.getOwner()).thenReturn(Mockito.mock(ActionOwner.class));
when(action.getPrimaryOutput()).thenReturn(artifact);
UiStateTracker stateTracker = new UiStateTracker(clock);
stateTracker.actionStarted(new ActionStartedEvent(action, clock.nanoTime()));
stateTracker.runningAction(new RunningActionEvent(action, strategy1));
stateTracker.runningAction(new RunningActionEvent(action, strategy2));
LoggingTerminalWriter terminalWriter = new LoggingTerminalWriter(/*discardHighlight=*/ true);
stateTracker.writeProgressBar(terminalWriter);
String output = terminalWriter.getTranscript();
assertWithMessage(
"Output should mention strategies '"
+ strategy1
+ "' and '"
+ strategy2
+ "', but was: "
+ output)
.that(output.contains(strategy1 + ", " + strategy2))
.isTrue();
}
@Test
public void testActionCountsWithDynamicScheduling() throws Exception {
String primaryOutput1 = "some/path/to/a/file";
String primaryOutput2 = "some/path/to/b/file";
ManualClock clock = new ManualClock();
UiStateTracker stateTracker = new UiStateTracker(clock);
LoggingTerminalWriter terminalWriter = new LoggingTerminalWriter(/*discardHighlight=*/ true);
Path path1 = outputBase.getRelative(PathFragment.create(primaryOutput1));
Artifact artifact1 =
ActionsTestUtil.createArtifact(ArtifactRoot.asSourceRoot(Root.fromPath(outputBase)), path1);
Action action1 = mockAction("First random action", primaryOutput1);
when(action1.getOwner()).thenReturn(Mockito.mock(ActionOwner.class));
when(action1.getPrimaryOutput()).thenReturn(artifact1);
stateTracker.actionStarted(new ActionStartedEvent(action1, clock.nanoTime()));
Path path2 = outputBase.getRelative(PathFragment.create(primaryOutput2));
Artifact artifact2 =
ActionsTestUtil.createArtifact(ArtifactRoot.asSourceRoot(Root.fromPath(outputBase)), path2);
Action action2 = mockAction("First random action", primaryOutput1);
when(action2.getOwner()).thenReturn(Mockito.mock(ActionOwner.class));
when(action2.getPrimaryOutput()).thenReturn(artifact2);
stateTracker.actionStarted(new ActionStartedEvent(action2, clock.nanoTime()));
stateTracker.runningAction(new RunningActionEvent(action1, "strategy1"));
stateTracker.schedulingAction(new SchedulingActionEvent(action2, "strategy1"));
terminalWriter.reset();
stateTracker.writeProgressBar(terminalWriter);
assertThat(terminalWriter.getTranscript()).contains("2 actions, 1 running");
stateTracker.runningAction(new RunningActionEvent(action1, "strategy2"));
terminalWriter.reset();
stateTracker.writeProgressBar(terminalWriter);
assertThat(terminalWriter.getTranscript()).contains("3 actions, 2 running");
stateTracker.runningAction(new RunningActionEvent(action2, "strategy1"));
terminalWriter.reset();
stateTracker.writeProgressBar(terminalWriter);
assertThat(terminalWriter.getTranscript()).contains("3 actions running");
stateTracker.runningAction(new RunningActionEvent(action2, "strategy2"));
terminalWriter.reset();
stateTracker.writeProgressBar(terminalWriter);
assertThat(terminalWriter.getTranscript()).contains("4 actions running");
}
private void doTestOutputLength(boolean withTest, int actions) throws Exception {
// If we target 70 characters, then there should be enough space to both,
// keep the line limit, and show the local part of the running actions and
// the passed test.
ManualClock clock = new ManualClock();
UiStateTracker stateTracker = new UiStateTracker(clock, 70);
Action foobuildAction =
mockAction(
"Building"
+ " //src/some/very/long/path/long/long/long/long/long/long/long/foo/foobuild.jar",
"src/some/very/long/path/long/long/long/long/long/long/long/foo/foobuild.jar");
Action bazbuildAction =
mockAction(
"Building"
+ " //src/some/very/long/path/long/long/long/long/long/long/long/baz/bazbuild.jar",
"src/some/very/long/path/long/long/long/long/long/long/long/baz/bazbuild.jar");
Label bartestLabel =
Label.parseAbsolute(
"//src/another/very/long/long/path/long/long/long/long/long/long/long/long/bars:bartest",
ImmutableMap.of());
ConfiguredTarget bartestTarget = Mockito.mock(ConfiguredTarget.class);
when(bartestTarget.getLabel()).thenReturn(bartestLabel);
TestFilteringCompleteEvent filteringComplete = Mockito.mock(TestFilteringCompleteEvent.class);
when(filteringComplete.getTestTargets()).thenReturn(ImmutableSet.of(bartestTarget));
TestSummary testSummary = Mockito.mock(TestSummary.class);
when(testSummary.getStatus()).thenReturn(BlazeTestStatus.PASSED);
when(testSummary.getTarget()).thenReturn(bartestTarget);
when(testSummary.getLabel()).thenReturn(bartestLabel);
if (actions >= 1) {
stateTracker.actionStarted(new ActionStartedEvent(foobuildAction, 123456789));
}
if (actions >= 2) {
stateTracker.actionStarted(new ActionStartedEvent(bazbuildAction, 123456900));
}
if (withTest) {
stateTracker.testFilteringComplete(filteringComplete);
stateTracker.testSummary(testSummary);
}
LoggingTerminalWriter terminalWriter = new LoggingTerminalWriter(/*discardHighlight=*/ true);
stateTracker.writeProgressBar(terminalWriter);
String output = terminalWriter.getTranscript();
assertWithMessage(
"Only lines with at most 70 chars should be present in the output:\n" + output)
.that(longestLine(output) <= 70)
.isTrue();
if (actions >= 1) {
assertWithMessage("Running action 'foobuild' should be mentioned in output:\n" + output)
.that(output.contains("foobuild"))
.isTrue();
}
if (actions >= 2) {
assertWithMessage("Running action 'bazbuild' should be mentioned in output:\n" + output)
.that(output.contains("bazbuild"))
.isTrue();
}
if (withTest) {
assertWithMessage("Passed test ':bartest' should be mentioned in output:\n" + output)
.that(output.contains(":bartest"))
.isTrue();
}
}
@Test
public void testOutputLength() throws Exception {
for (int i = 0; i < 3; i++) {
doTestOutputLength(true, i);
doTestOutputLength(false, i);
}
}
@Test
public void testStatusShown() throws Exception {
// Verify that for non-executing actions, at least the first 3 characters of the
// status are shown.
// Also verify that the number of running actions is reported correctly, if there is
// more than one active action and not all are running.
ManualClock clock = new ManualClock();
clock.advanceMillis(120000);
UiStateTracker stateTracker = new UiStateTracker(clock);
Action actionFoo = mockAction("Building foo", "foo/foo");
ActionOwner ownerFoo = Mockito.mock(ActionOwner.class);
when(actionFoo.getOwner()).thenReturn(ownerFoo);
Action actionBar = mockAction("Building bar", "bar/bar");
ActionOwner ownerBar = Mockito.mock(ActionOwner.class);
when(actionBar.getOwner()).thenReturn(ownerBar);
LoggingTerminalWriter terminalWriter;
String output;
// Action foo being scanned.
stateTracker.actionStarted(new ActionStartedEvent(actionFoo, 123456700));
stateTracker.scanningAction(new ScanningActionEvent(actionFoo));
terminalWriter = new LoggingTerminalWriter(/*discardHighlight=*/ true);
stateTracker.writeProgressBar(terminalWriter);
output = terminalWriter.getTranscript();
assertWithMessage("Action foo being scanned should be visible in output:\n" + output)
.that(output.contains("sca") || output.contains("Sca"))
.isTrue();
// Then action bar gets scheduled.
stateTracker.actionStarted(new ActionStartedEvent(actionBar, 123456701));
stateTracker.schedulingAction(new SchedulingActionEvent(actionBar, "bar-sandbox"));
terminalWriter = new LoggingTerminalWriter(/*discardHighlight=*/ true);
stateTracker.writeProgressBar(terminalWriter);
output = terminalWriter.getTranscript();
assertWithMessage("Action bar being scheduled should be visible in output:\n" + output)
.that(output.contains("sch") || output.contains("Sch"))
.isTrue();
assertWithMessage("Action foo being scanned should still be visible in output:\n" + output)
.that(output.contains("sca") || output.contains("Sca"))
.isTrue();
assertWithMessage("Indication at no actions are running is missing in output:\n" + output)
.that(output.contains("0 running"))
.isTrue();
assertWithMessage("Total number of actions expected in output:\n" + output)
.that(output.contains("2 actions"))
.isTrue();
// Then foo starts.
stateTracker.runningAction(new RunningActionEvent(actionFoo, "xyz-sandbox"));
stateTracker.writeProgressBar(terminalWriter);
terminalWriter = new LoggingTerminalWriter(/*discardHighlight=*/ true);
stateTracker.writeProgressBar(terminalWriter);
output = terminalWriter.getTranscript();
assertWithMessage("Action foo's xyz-sandbox strategy should be shown in output:\n" + output)
.that(output.contains("xyz-sandbox"))
.isTrue();
assertWithMessage("Action foo should no longer be analyzed in output:\n" + output)
.that(output.contains("ana") || output.contains("Ana"))
.isFalse();
assertWithMessage("Action bar being scheduled should still be visible in output:\n" + output)
.that(output.contains("sch") || output.contains("Sch"))
.isTrue();
assertWithMessage("Indication at one action is running is missing in output:\n" + output)
.that(output.contains("1 running"))
.isTrue();
assertWithMessage("Total number of actions expected in output:\n" + output)
.that(output.contains("2 actions"))
.isTrue();
}
@Test
public void testTimerReset() throws Exception {
// Verify that a change in an action state (e.g., from scheduling to executing) resets
// the time associated with that action.
ManualClock clock = new ManualClock();
clock.advanceMillis(TimeUnit.SECONDS.toMillis(123));
UiStateTracker stateTracker = new UiStateTracker(clock);
clock.advanceMillis(TimeUnit.SECONDS.toMillis(2));
LoggingTerminalWriter terminalWriter;
String output;
Action actionFoo = mockAction("Building foo", "foo/foo");
ActionOwner ownerFoo = Mockito.mock(ActionOwner.class);
when(actionFoo.getOwner()).thenReturn(ownerFoo);
Action actionBar = mockAction("Building bar", "bar/bar");
ActionOwner ownerBar = Mockito.mock(ActionOwner.class);
when(actionBar.getOwner()).thenReturn(ownerBar);
stateTracker.actionStarted(new ActionStartedEvent(actionFoo, clock.nanoTime()));
stateTracker.runningAction(new RunningActionEvent(actionFoo, "foo-sandbox"));
clock.advanceMillis(TimeUnit.SECONDS.toMillis(7));
stateTracker.actionStarted(new ActionStartedEvent(actionBar, clock.nanoTime()));
stateTracker.schedulingAction(new SchedulingActionEvent(actionBar, "bar-sandbox"));
clock.advanceMillis(TimeUnit.SECONDS.toMillis(21));
terminalWriter = new LoggingTerminalWriter(/*discardHighlight=*/ true);
stateTracker.writeProgressBar(terminalWriter);
output = terminalWriter.getTranscript();
assertWithMessage("Runtime of first action should be visible in output: " + output)
.that(output.contains("28s"))
.isTrue();
assertWithMessage("Scheduling time of second action should be visible in output: " + output)
.that(output.contains("21s"))
.isTrue();
stateTracker.runningAction(new RunningActionEvent(actionBar, "bar-sandbox"));
terminalWriter = new LoggingTerminalWriter(/*discardHighlight=*/ true);
stateTracker.writeProgressBar(terminalWriter);
output = terminalWriter.getTranscript();
assertWithMessage("Runtime of first action should still be visible in output: " + output)
.that(output.contains("28s"))
.isTrue();
assertWithMessage("Time of second action should no longer be visible in output: " + output)
.that(output.contains("21s"))
.isFalse();
clock.advanceMillis(TimeUnit.SECONDS.toMillis(30));
terminalWriter = new LoggingTerminalWriter(/*discardHighlight=*/ true);
stateTracker.writeProgressBar(terminalWriter);
output = terminalWriter.getTranscript();
assertWithMessage("New runtime of first action should be visible in output: " + output)
.that(output.contains("58s"))
.isTrue();
assertWithMessage("Runtime of second action should be visible in output: " + output)
.that(output.contains("30s"))
.isTrue();
}
@Test
public void testEarlyStatusHandledGracefully() throws Exception {
// On the event bus, events sometimes are sent out of order; verify that we handle an
// early message that an action is running gracefully.
ManualClock clock = new ManualClock();
UiStateTracker stateTracker = new UiStateTracker(clock);
Action actionFoo = mockAction("Building foo", "foo/foo");
ActionOwner ownerFoo = Mockito.mock(ActionOwner.class);
when(actionFoo.getOwner()).thenReturn(ownerFoo);
LoggingTerminalWriter terminalWriter;
String output;
// Early status announcement
stateTracker.runningAction(new RunningActionEvent(actionFoo, "foo-sandbox"));
// Here we don't expect any particular output, just some description; in particular, we do
// not expect the state tracker to hit an internal error.
terminalWriter = new LoggingTerminalWriter(/*discardHighlight=*/ true);
stateTracker.writeProgressBar(terminalWriter);
output = terminalWriter.getTranscript();
assertWithMessage("Expected at least some status bar").that(output.length() != 0).isTrue();
// Action actually started
stateTracker.actionStarted(new ActionStartedEvent(actionFoo, clock.nanoTime()));
terminalWriter = new LoggingTerminalWriter(/*discardHighlight=*/ true);
stateTracker.writeProgressBar(terminalWriter);
output = terminalWriter.getTranscript();
assertWithMessage("Even a strategy announced early should be shown in output:\n" + output)
.that(output.contains("foo-sandbox"))
.isTrue();
}
@Test
public void testExecutingActionsFirst() throws Exception {
// Verify that executing actions, even if started late, are visible.
ManualClock clock = new ManualClock();
UiStateTracker stateTracker = new UiStateTracker(clock);
clock.advanceMillis(120000);
for (int i = 0; i < 30; i++) {
Action action = mockAction("Takes long to schedule number " + i, "long/startup" + i);
ActionOwner owner = Mockito.mock(ActionOwner.class);
when(action.getOwner()).thenReturn(owner);
stateTracker.actionStarted(new ActionStartedEvent(action, 123456789 + i));
stateTracker.schedulingAction(new SchedulingActionEvent(action, "xyz-sandbox"));
}
for (int i = 0; i < 3; i++) {
Action action = mockAction("quickstart" + i, "pkg/quickstart" + i);
ActionOwner owner = Mockito.mock(ActionOwner.class);
when(action.getOwner()).thenReturn(owner);
stateTracker.actionStarted(new ActionStartedEvent(action, 123457000 + i));
stateTracker.runningAction(new RunningActionEvent(action, "xyz-sandbox"));
LoggingTerminalWriter terminalWriter = new LoggingTerminalWriter(/*discardHighlight=*/ true);
stateTracker.writeProgressBar(terminalWriter);
String output = terminalWriter.getTranscript();
assertWithMessage("Action quickstart" + i + " should be visible in output:\n" + output)
.that(output.contains("quickstart" + i))
.isTrue();
assertWithMessage("Number of running actions should be indicated in output:\n" + output)
.that(output.contains("" + (i + 1) + " running"))
.isTrue();
}
}
@Test
public void testAggregation() throws Exception {
// Assert that actions for the same test are aggregated so that an action afterwards
// is still shown.
ManualClock clock = new ManualClock();
clock.advanceMillis(TimeUnit.SECONDS.toMillis(1234));
UiStateTracker stateTracker = new UiStateTracker(clock, 80);
Label labelFooTest = Label.parseAbsolute("//foo/bar:footest", ImmutableMap.of());
ConfiguredTarget targetFooTest = Mockito.mock(ConfiguredTarget.class);
when(targetFooTest.getLabel()).thenReturn(labelFooTest);
ActionOwner fooOwner =
ActionOwner.create(
labelFooTest,
ImmutableList.<AspectDescriptor>of(),
null,
null,
null,
"abcdef",
null,
null,
ImmutableMap.of(),
null);
Label labelBarTest = Label.parseAbsolute("//baz:bartest", ImmutableMap.of());
ConfiguredTarget targetBarTest = Mockito.mock(ConfiguredTarget.class);
when(targetBarTest.getLabel()).thenReturn(labelBarTest);
TestFilteringCompleteEvent filteringComplete = Mockito.mock(TestFilteringCompleteEvent.class);
when(filteringComplete.getTestTargets())
.thenReturn(ImmutableSet.of(targetFooTest, targetBarTest));
ActionOwner barOwner =
ActionOwner.create(
labelBarTest,
ImmutableList.<AspectDescriptor>of(),
null,
null,
null,
"fedcba",
null,
null,
ImmutableMap.of(),
null);
stateTracker.testFilteringComplete(filteringComplete);
// First produce 10 actions for footest...
for (int i = 0; i < 10; i++) {
clock.advanceMillis(TimeUnit.SECONDS.toMillis(1));
Action action = mockAction("Testing foo, shard " + i, "testlog_foo_" + i);
when(action.getOwner()).thenReturn(fooOwner);
stateTracker.actionStarted(new ActionStartedEvent(action, clock.nanoTime()));
}
// ...then produce 10 actions for bartest...
for (int i = 0; i < 10; i++) {
clock.advanceMillis(TimeUnit.SECONDS.toMillis(1));
Action action = mockAction("Testing bar, shard " + i, "testlog_bar_" + i);
when(action.getOwner()).thenReturn(barOwner);
stateTracker.actionStarted(new ActionStartedEvent(action, clock.nanoTime()));
}
// ...and finally a completely unrelated action
clock.advanceMillis(TimeUnit.SECONDS.toMillis(1));
stateTracker.actionStarted(
new ActionStartedEvent(mockAction("Other action", "other/action"), clock.nanoTime()));
clock.advanceMillis(TimeUnit.SECONDS.toMillis(1));
LoggingTerminalWriter terminalWriter = new LoggingTerminalWriter(/*discardHighlight=*/ true);
stateTracker.writeProgressBar(terminalWriter);
String output = terminalWriter.getTranscript();
assertWithMessage("Progress bar should contain ':footest', but was:\n" + output)
.that(output.contains(":footest"))
.isTrue();
assertWithMessage("Progress bar should contain ':bartest', but was:\n" + output)
.that(output.contains(":bartest"))
.isTrue();
assertWithMessage("Progress bar should contain 'Other action', but was:\n" + output)
.that(output.contains("Other action"))
.isTrue();
}
@Test
public void testSuffix() throws Exception {
assertThat(UiStateTracker.suffix("foobar", 3)).isEqualTo("bar");
assertThat(UiStateTracker.suffix("foo", -2)).isEmpty();
assertThat(UiStateTracker.suffix("foobar", 200)).isEqualTo("foobar");
}
@Test
public void testDownloadShown() throws Exception {
// Verify that, whenever a single download is running in loading face, it is shown in the status
// bar.
ManualClock clock = new ManualClock();
clock.advanceMillis(TimeUnit.SECONDS.toMillis(1234));
UiStateTracker stateTracker = new UiStateTracker(clock, 80);
URL url = new URL("http://example.org/first/dep");
stateTracker.buildStarted(null);
stateTracker.downloadProgress(new DownloadProgressEvent(url));
clock.advanceMillis(TimeUnit.SECONDS.toMillis(6));
LoggingTerminalWriter terminalWriter = new LoggingTerminalWriter(/*discardHighlight=*/ true);
stateTracker.writeProgressBar(terminalWriter);
String output = terminalWriter.getTranscript();
assertWithMessage("Progress bar should contain '" + url.toString() + "', but was:\n" + output)
.that(output.contains(url.toString()))
.isTrue();
assertWithMessage("Progress bar should contain '6s', but was:\n" + output)
.that(output.contains("6s"))
.isTrue();
// Progress on the pending download should be reported appropriately
clock.advanceMillis(TimeUnit.SECONDS.toMillis(1));
stateTracker.downloadProgress(new DownloadProgressEvent(url, 256));
terminalWriter = new LoggingTerminalWriter(/*discardHighlight=*/ true);
stateTracker.writeProgressBar(terminalWriter);
output = terminalWriter.getTranscript();
assertWithMessage("Progress bar should contain '" + url.toString() + "', but was:\n" + output)
.that(output.contains(url.toString()))
.isTrue();
assertWithMessage("Progress bar should contain '7s', but was:\n" + output)
.that(output.contains("7s"))
.isTrue();
assertWithMessage("Progress bar should contain '256', but was:\n" + output)
.that(output.contains("256"))
.isTrue();
// After finishing the download, it should no longer be reported.
clock.advanceMillis(TimeUnit.SECONDS.toMillis(1));
stateTracker.downloadProgress(new DownloadProgressEvent(url, 256, true));
terminalWriter = new LoggingTerminalWriter(/*discardHighlight=*/ true);
stateTracker.writeProgressBar(terminalWriter);
output = terminalWriter.getTranscript();
assertWithMessage("Progress bar should not contain url, but was:\n" + output)
.that(output.contains("example.org"))
.isFalse();
}
@Test
public void testDownloadOutputLength() throws Exception {
// Verify that URLs are shortened in a reasonable way, if the terminal is not wide enough
// Also verify that the length is respected, even if only a download sample is shown.
ManualClock clock = new ManualClock();
clock.advanceMillis(TimeUnit.SECONDS.toMillis(1234));
UiStateTracker stateTracker = new UiStateTracker(clock, 60);
URL url = new URL("http://example.org/some/really/very/very/long/path/filename.tar.gz");
stateTracker.buildStarted(null);
stateTracker.downloadProgress(new DownloadProgressEvent(url));
clock.advanceMillis(TimeUnit.SECONDS.toMillis(6));
for (int i = 0; i < 10; i++) {
stateTracker.downloadProgress(
new DownloadProgressEvent(
new URL(
"http://otherhost.example/another/also/length/path/to/another/download"
+ i
+ ".zip")));
clock.advanceMillis(TimeUnit.SECONDS.toMillis(1));
}
LoggingTerminalWriter terminalWriter = new LoggingTerminalWriter(/*discardHighlight=*/ true);
stateTracker.writeProgressBar(terminalWriter);
String output = terminalWriter.getTranscript();
assertWithMessage(
"Only lines with at most 60 chars should be present in the output:\n" + output)
.that(longestLine(output) <= 60)
.isTrue();
assertWithMessage("Output still should contain the filename, but was:\n" + output)
.that(output.contains("filename.tar.gz"))
.isTrue();
assertWithMessage("Output still should contain the host name, but was:\n" + output)
.that(output.contains("example.org"))
.isTrue();
}
@Test
public void testMultipleBuildEventProtocolTransports() throws Exception {
// Verify that all announced transports are present in the progress bar
// and that as transports are closed they disappear from the progress bar.
// Verify that the wait duration is displayed.
// Verify that after all transports have been closed, the build status is displayed.
ManualClock clock = new ManualClock();
BuildEventTransport transport1 = newBepTransport("BuildEventTransport1");
BuildEventTransport transport2 = newBepTransport("BuildEventTransport2");
BuildEventTransport transport3 = newBepTransport("BuildEventTransport3");
BuildResult buildResult = new BuildResult(clock.currentTimeMillis());
buildResult.setExitCondition(ExitCode.SUCCESS);
clock.advanceMillis(TimeUnit.SECONDS.toMillis(1));
buildResult.setStopTime(clock.currentTimeMillis());
UiStateTracker stateTracker = new UiStateTracker(clock, 80);
stateTracker.buildStarted(null);
stateTracker.buildEventTransportsAnnounced(
new AnnounceBuildEventTransportsEvent(ImmutableList.of(transport1, transport2)));
stateTracker.buildEventTransportsAnnounced(
new AnnounceBuildEventTransportsEvent(ImmutableList.of(transport3)));
stateTracker.buildComplete(new BuildCompleteEvent(buildResult));
LoggingTerminalWriter terminalWriter = new LoggingTerminalWriter(true);
clock.advanceMillis(TimeUnit.SECONDS.toMillis(1));
stateTracker.writeProgressBar(terminalWriter);
String output = terminalWriter.getTranscript();
assertThat(output, containsString("1s"));
assertThat(output, containsString("BuildEventTransport1"));
assertThat(output, containsString("BuildEventTransport2"));
assertThat(output, containsString("BuildEventTransport3"));
assertThat(output, containsString("success"));
assertThat(output, containsString("complete"));
clock.advanceMillis(TimeUnit.SECONDS.toMillis(1));
stateTracker.buildEventTransportClosed(new BuildEventTransportClosedEvent(transport1));
terminalWriter = new LoggingTerminalWriter(true);
stateTracker.writeProgressBar(terminalWriter);
output = terminalWriter.getTranscript();
assertThat(output, containsString("2s"));
assertThat(output, not(containsString("BuildEventTransport1")));
assertThat(output, containsString("BuildEventTransport2"));
assertThat(output, containsString("BuildEventTransport3"));
assertThat(output, containsString("success"));
assertThat(output, containsString("complete"));
clock.advanceMillis(TimeUnit.SECONDS.toMillis(1));
stateTracker.buildEventTransportClosed(new BuildEventTransportClosedEvent(transport3));
terminalWriter = new LoggingTerminalWriter(true);
stateTracker.writeProgressBar(terminalWriter);
output = terminalWriter.getTranscript();
assertThat(output, containsString("3s"));
assertThat(output, not(containsString("BuildEventTransport1")));
assertThat(output, containsString("BuildEventTransport2"));
assertThat(output, not(containsString("BuildEventTransport3")));
assertThat(output, containsString("success"));
assertThat(output, containsString("complete"));
clock.advanceMillis(TimeUnit.SECONDS.toMillis(1));
stateTracker.buildEventTransportClosed(new BuildEventTransportClosedEvent(transport2));
terminalWriter = new LoggingTerminalWriter(true);
stateTracker.writeProgressBar(terminalWriter);
output = terminalWriter.getTranscript();
assertThat(output, not(containsString("3s")));
assertThat(output, not(containsString("BuildEventTransport1")));
assertThat(output, not(containsString("BuildEventTransport2")));
assertThat(output, not(containsString("BuildEventTransport3")));
assertThat(output, containsString("success"));
assertThat(output, containsString("complete"));
assertThat(output.split("\\n")).hasLength(1);
}
@Test
public void testBuildEventTransportsOnNarrowTerminal() throws IOException {
// Verify that the progress bar contains useful information on a 60-character terminal.
// - Too long names should be shortened to reasonably long prefixes of the name.
ManualClock clock = new ManualClock();
BuildEventTransport transport1 = newBepTransport(Strings.repeat("A", 61));
BuildEventTransport transport2 = newBepTransport("BuildEventTransport");
BuildResult buildResult = new BuildResult(clock.currentTimeMillis());
buildResult.setExitCondition(ExitCode.SUCCESS);
LoggingTerminalWriter terminalWriter = new LoggingTerminalWriter(true);
UiStateTracker stateTracker = new UiStateTracker(clock, 60);
stateTracker.buildStarted(null);
stateTracker.buildEventTransportsAnnounced(
new AnnounceBuildEventTransportsEvent(ImmutableList.of(transport1, transport2)));
stateTracker.buildComplete(new BuildCompleteEvent(buildResult));
clock.advanceMillis(TimeUnit.SECONDS.toMillis(1));
stateTracker.writeProgressBar(terminalWriter);
String output = terminalWriter.getTranscript();
assertThat(longestLine(output)).isAtMost(60);
assertThat(output, containsString("1s"));
assertThat(output, containsString(Strings.repeat("A", 30) + "..."));
assertThat(output, containsString("BuildEventTransport"));
assertThat(output, containsString("success"));
assertThat(output, containsString("complete"));
clock.advanceMillis(TimeUnit.SECONDS.toMillis(1));
stateTracker.buildEventTransportClosed(new BuildEventTransportClosedEvent(transport2));
terminalWriter = new LoggingTerminalWriter(true);
stateTracker.writeProgressBar(terminalWriter);
output = terminalWriter.getTranscript();
assertThat(longestLine(output)).isAtMost(60);
assertThat(output, containsString("2s"));
assertThat(output, containsString(Strings.repeat("A", 30) + "..."));
assertThat(output, not(containsString("BuildEventTransport")));
assertThat(output, containsString("success"));
assertThat(output, containsString("complete"));
assertThat(output.split("\\n")).hasLength(2);
}
private BuildEventTransport newBepTransport(String name) {
BuildEventTransport transport = Mockito.mock(BuildEventTransport.class);
when(transport.name()).thenReturn(name);
return transport;
}
@Test
public void testTotalFetchesReported() throws IOException {
ManualClock clock = new ManualClock();
UiStateTracker stateTracker = new UiStateTracker(clock, 80);
stateTracker.buildStarted(null);
for (int i = 0; i < 30; i++) {
stateTracker.downloadProgress(new FetchEvent("@repoFoo" + i));
}
clock.advanceMillis(TimeUnit.SECONDS.toMillis(7));
LoggingTerminalWriter terminalWriter = new LoggingTerminalWriter(true);
stateTracker.writeProgressBar(terminalWriter);
String output = terminalWriter.getTranscript();
assertThat(output, containsString("@repoFoo"));
assertThat(output, containsString("7s"));
assertThat(output, containsString("30 fetches"));
}
private Action mockActionWithMnemonic(String mnemonic, String primaryOutput) {
Path path = outputBase.getRelative(PathFragment.create(primaryOutput));
Artifact artifact =
ActionsTestUtil.createArtifact(ArtifactRoot.asSourceRoot(Root.fromPath(outputBase)), path);
Action action = Mockito.mock(Action.class);
when(action.getMnemonic()).thenReturn(mnemonic);
when(action.getPrimaryOutput()).thenReturn(artifact);
return action;
}
@Test
public void testMnemonicHistogram() throws IOException {
// Verify that the number of actions shown in the progress bar can be set as sample size.
ManualClock clock = new ManualClock();
clock.advanceMillis(Duration.ofSeconds(123).toMillis());
UiStateTracker stateTracker = new UiStateTracker(clock);
clock.advanceMillis(Duration.ofSeconds(2).toMillis());
// Start actions with 10 different mnemonics Mnemonic0-9, n+1 of each mnemonic.
for (int i = 0; i < 10; i++) {
clock.advanceMillis(Duration.ofSeconds(1).toMillis());
for (int j = 0; j <= i; j++) {
Action action = mockActionWithMnemonic("Mnemonic" + i, "action-" + i + "-" + j + ".out");
stateTracker.actionStarted(new ActionStartedEvent(action, clock.nanoTime()));
}
}
for (int sampleSize = 1; sampleSize < 11; sampleSize++) {
stateTracker.setProgressMode(ProgressMode.MNEMONIC_HISTOGRAM, sampleSize);
LoggingTerminalWriter terminalWriter = new LoggingTerminalWriter(/*discardHighlight=*/ true);
stateTracker.writeProgressBar(terminalWriter);
String output = terminalWriter.getTranscript();
assertThat(output).contains("Mnemonic" + (10 - sampleSize) + " " + (10 - sampleSize + 1));
assertThat(output).doesNotContain("Mnemonic" + (10 - sampleSize - 1));
}
}
private static class FetchEvent implements FetchProgress {
private final String id;
FetchEvent(String id) {
this.id = id;
}
@Override
public String getResourceIdentifier() {
return id;
}
@Override
public String getProgress() {
return "working...";
}
@Override
public boolean isFinished() {
return false;
}
}
}
| |
package org.embulk.output;
import com.google.common.base.Throwables;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.embulk.config.*;
import org.embulk.spi.Buffer;
import org.embulk.spi.Exec;
import org.embulk.spi.FileOutputPlugin;
import org.embulk.spi.TransactionalFileOutput;
import org.jruby.embed.ScriptingContainer;
import org.slf4j.Logger;
import java.io.IOException;
import java.io.OutputStream;
import java.util.List;
import java.util.Map;
public class HdfsOutputPlugin implements FileOutputPlugin
{
private static final Logger logger = Exec.getLogger(HdfsOutputPlugin.class);
public interface PluginTask extends Task
{
@Config("config_files")
@ConfigDefault("[]")
public List<String> getConfigFiles();
@Config("config")
@ConfigDefault("{}")
public Map<String, String> getConfig();
@Config("sequence_format")
@ConfigDefault("\"%03d.%02d\"")
public String getSequenceFormat();
@Config("output_path")
@ConfigDefault("\"/tmp/embulk.output.hdfs_output.%Y%m%d_%s\"")
public String getOutputPath();
@Config("working_path")
@ConfigDefault("\"/tmp/embulk.working.hdfs_output.%Y%m%d_%s\"")
public String getWorkingPath();
}
@Override
public ConfigDiff transaction(ConfigSource config,
int taskCount,
FileOutputPlugin.Control control)
{
PluginTask task = config.loadConfig(PluginTask.class);
return resume(task.dump(), taskCount, control);
}
@Override
public ConfigDiff resume(TaskSource taskSource,
int taskCount,
FileOutputPlugin.Control control)
{
control.run(taskSource);
return Exec.newConfigDiff();
}
@Override
public void cleanup(TaskSource taskSource,
int taskCount,
List<CommitReport> successCommitReports)
{
}
@Override
public TransactionalFileOutput open(TaskSource taskSource, final int taskIndex)
{
PluginTask task = taskSource.loadTask(PluginTask.class);
Configuration configuration = getHdfsConfiguration(task);
FileSystem fs = getFs(configuration);
String workingPath = strftime(task.getWorkingPath());
String outputPath = strftime(task.getOutputPath());
return new TransactionalHdfsFileOutput(task, fs, workingPath, outputPath, taskIndex);
}
private Configuration getHdfsConfiguration(final PluginTask task)
{
Configuration configuration = new Configuration();
List configFiles = task.getConfigFiles();
for (Object configFile : configFiles) {
configuration.addResource(configFile.toString());
}
for (Map.Entry<String, String> entry: task.getConfig().entrySet()) {
configuration.set(entry.getKey(), entry.getValue());
}
return configuration;
}
private FileSystem getFs(final Configuration configuration) {
try {
FileSystem fs = FileSystem.get(configuration);
return fs;
}
catch (IOException e) {
logger.error(e.getMessage());
throw Throwables.propagate(e);
}
}
private String strftime(final String path)
{
// strftime
ScriptingContainer jruby = new ScriptingContainer();
Object result = jruby.runScriptlet("Time.now.strftime('" + path + "')");
return result.toString();
}
static class TransactionalHdfsFileOutput implements TransactionalFileOutput
{
private final int taskIndex;
private final FileSystem fs;
private final String workingPath;
private final String outputPath;
private final String sequenceFormat;
private int fileIndex = 0;
private int callCount = 0;
private Path currentPath = null;
private OutputStream currentStream = null;
public TransactionalHdfsFileOutput(PluginTask task, FileSystem fs, String workingPath, String outputPath, int taskIndex)
{
this.taskIndex = taskIndex;
this.fs = fs;
this.workingPath = workingPath;
this.outputPath = outputPath;
this.sequenceFormat = task.getSequenceFormat();
}
public void nextFile() {
closeCurrentStream();
currentPath = new Path(workingPath + '/' + String.format(sequenceFormat, taskIndex, fileIndex));
try {
if (fs.exists(currentPath)) {
throw new IllegalAccessException(currentPath.toString() + "already exists.");
}
currentStream = fs.create(currentPath);
logger.info("Uploading '{}'", currentPath.toString());
}
catch (IOException | IllegalAccessException e) {
logger.error(e.getMessage());
throw Throwables.propagate(e);
}
fileIndex++;
}
@Override
public void add(Buffer buffer) {
if (currentStream == null) {
throw new IllegalStateException("nextFile() must be called before poll()");
}
try {
logger.debug("#add called {} times for taskIndex {}", callCount, taskIndex);
currentStream.write(buffer.array(), buffer.offset(), buffer.limit());
callCount++;
} catch (IOException e) {
throw new RuntimeException(e);
} finally {
buffer.release();
}
}
@Override
public void finish() {
closeCurrentStream();
}
@Override
public void close() {
closeCurrentStream();
}
@Override
public void abort() {
}
@Override
public CommitReport commit() {
try {
fs.rename(new Path(workingPath), new Path(outputPath));
logger.info("rename {} => {}", workingPath, outputPath);
} catch (IOException e) {
logger.error(e.getMessage());
throw Throwables.propagate(e);
}
CommitReport report = Exec.newCommitReport();
report.set("files", currentPath);
return report;
}
private void closeCurrentStream() {
try {
if (currentStream != null) {
currentStream.close();
currentStream = null;
}
callCount = 0;
} catch (IOException e) {
logger.error(e.getMessage());
throw Throwables.propagate(e);
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.jini.iiop;
import java.lang.ref.WeakReference;
import java.rmi.NoSuchObjectException;
import java.rmi.Remote;
import java.rmi.RemoteException;
import java.rmi.server.ExportException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.rmi.PortableRemoteObject;
import javax.rmi.CORBA.Stub;
import javax.rmi.CORBA.Util;
import net.jini.export.Exporter;
import org.omg.CORBA.ORB;
/**
* An <code>IiopExporter</code> can be used to export a single remote object to
* the <a href="http://java.sun.com/j2se/1.4/docs/guide/rmi-iiop/">RMI-IIOP</a>
* runtime. It acts as an adapter between the {@link Exporter} interface and
* existing RMI-IIOP (un)export/utility APIs provided by the {@link javax.rmi}
* and {@link javax.rmi.CORBA} packages.
*
* <p>Note: although this exporter internally makes use of {@link
* javax.rmi.PortableRemoteObject}, it cannot be used to export remote objects
* over JRMP (as <code>PortableRemoteObject</code> can).
*
* @author Sun Microsystems, Inc.
* @since 2.0
*
*
*
* <p>This implementation uses the {@link Logger} named
* <code>net.jini.iiop.IiopExporter</code> to log
* information at the following levels:
*
* <table summary="Describes what is logged by IiopExporter at various
* logging levels" border=1 cellpadding=5>
*
* <tr> <th scope="col"> Level <th scope="col"> Description
*
* <tr> <td> {@link Level#FINE FINE} <td> successful export of object
*
* <tr> <td> {@link Level#FINE FINE} <td> attempted unexport of object
*
* </table>
*/
public final class IiopExporter implements Exporter {
private static final Logger logger =
Logger.getLogger("net.jini.iiop.IiopExporter");
private ORB orb;
private WeakReference ref;
/**
* Creates a new exporter which can be used to export a remote object over
* IIOP. The stub resulting from an export of a remote object with this
* exporter will not be connected to any {@link ORB}.
*/
public IiopExporter() {
}
/**
* Creates a new exporter which can be used to export a remote object over
* IIOP. If the given {@link ORB} is non-<code>null</code>, then the stub
* resulting from an export of a remote object with this exporter will be
* connected to it; otherwise, the stub will be left unconnected.
*
* @param orb if non-<code>null</code>, ORB to which to connect stub of
* exported object
*/
public IiopExporter(ORB orb) {
this.orb = orb;
}
/**
* Exports a remote object, <code>impl</code>, to the RMI-IIOP runtime and
* returns a proxy (stub) for the remote object. If an {@link ORB} was
* specified during construction of this exporter, then the returned
* RMI-IIOP stub will be connected to it. This method cannot be called
* more than once to export a remote object or an
* {@link IllegalStateException} will be thrown.
*
* @throws NullPointerException {@inheritDoc}
* @throws IllegalStateException {@inheritDoc}
*/
public synchronized Remote export(Remote impl)
throws ExportException
{
if (impl == null) {
throw new NullPointerException();
} else if (ref != null) {
throw new IllegalStateException(
"object already exported via this exporter");
} else if (getTieClass(impl.getClass()) == null) {
throw new ExportException("tie class unavailable");
}
ref = new WeakReference(impl);
try {
PortableRemoteObject.exportObject(impl);
Remote proxy = PortableRemoteObject.toStub(impl);
if (orb != null) {
((Stub) proxy).connect(orb);
}
if (logger.isLoggable(Level.FINE)) {
logger.log(Level.FINE,
"export of {0} via {1} returns proxy {2}",
new Object[]{ impl, this, proxy });
}
return proxy;
} catch (ExportException ex) {
throw ex;
} catch (RemoteException ex) {
throw new ExportException("export failed", ex);
}
}
/**
* Unexports the remote object exported via this exporter's
* {@link #export} method such that the object can no longer
* accept incoming remote calls that were possible as a result of
* exporting via this exporter.
*
* <p>This method unexports the remote object via a call to
* {@link PortableRemoteObject#unexportObject}, which only supports the
* equivalent of a "forced" unexport (i.e., one in which the object is
* unexported regardless of the presence of pending or in-progress calls).
* Hence, this method will not consult the value of <code>force</code>,
* and will always attempt a "forced" unexport of the remote object,
* returning <code>true</code> upon normal completion.
*
* @param force ignored value (normally indicates whether or not to
* unexport the object in the presence of pending or in-progress
* calls, but this exporter does not support "unforced" unexports)
* @return <code>true</code>
* @throws IllegalStateException {@inheritDoc}
*/
public synchronized boolean unexport(boolean force) {
if (ref == null) {
throw new IllegalStateException(
"an object has not been exported via this exporter");
}
Remote impl = (Remote) ref.get();
if (impl != null) {
try {
PortableRemoteObject.unexportObject(impl);
if (logger.isLoggable(Level.FINE)) {
logger.log(Level.FINE, "unexport on {0} returns {1}",
new Object[]{ this, Boolean.TRUE });
}
} catch (NoSuchObjectException ex) {
}
}
return true;
}
/**
* Returns the string representation for this exporter.
*
* @return the string representation for this exporter
*/
public String toString() {
return (orb != null) ? "IiopExporter[" + orb + "]" : "IiopExporter[]";
}
/**
* Returns tie class for the given remote object class, or null if none
* available.
*/
private static Class getTieClass(Class implClass) {
// based on com.sun.corba.se.internal.util.Utility.loadTie()
// REMIND: cache results?
String implClassName = implClass.getName();
int i = implClassName.indexOf('$');
if (i < 0) {
i = implClassName.lastIndexOf('.');
}
String tieClassName = (i > 0) ?
implClassName.substring(0, i + 1) + "_" +
implClassName.substring(i + 1) + "_Tie" :
"_" + implClassName + "_Tie";
// workaround for 4632973
ArrayList names = new ArrayList(2);
names.add(tieClassName);
if (tieClassName.startsWith("java.") ||
tieClassName.startsWith("com.sun.") ||
tieClassName.startsWith("net.jini.") ||
tieClassName.startsWith("jini.") ||
tieClassName.startsWith("javax."))
{
names.add("org.omg.stub." + tieClassName);
}
ClassLoader loader = implClass.getClassLoader();
String codebase = Util.getCodebase(implClass);
for (Iterator iter = names.iterator(); iter.hasNext();) {
tieClassName = (String) iter.next();
try {
return Util.loadClass(tieClassName, codebase, loader);
} catch (ClassNotFoundException ex) {
logger.log(Level.FINE, codebase, ex);
}
// second attempt futile, but try anyway to mimic Utility.loadTie()
if (loader != null) {
try {
return loader.loadClass(tieClassName);
} catch (ClassNotFoundException ex) {
}
}
}
Class implSuper = implClass.getSuperclass();
return (implSuper != null &&
implSuper != PortableRemoteObject.class &&
implSuper != Object.class) ? getTieClass(implSuper) : null;
}
}
| |
/*
* Source code generated by Celerio, a Jaxio product.
* Documentation: http://www.jaxio.com/documentation/celerio/
* Follow us on twitter: @jaxiosoft
* Need commercial support ? Contact us: info@jaxio.com
* Template angular-lab:springboot/src/main/java/rest/EntityResource.e.vm.java
*/
package com.jaxio.demo.rest;
import static org.elasticsearch.index.query.QueryBuilders.queryStringQuery;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
import javax.inject.Inject;
import javax.transaction.Transactional;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.jdbc.core.BeanPropertyRowMapper;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.scheduling.annotation.Async;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import com.jaxio.demo.domain.AppParameter;
import com.jaxio.demo.repository.AppParameterRepository;
import com.jaxio.demo.searchrepository.AppParameterSearchRepository;
@RestController
@RequestMapping("/api/appParameters")
public class AppParameterResource {
private final Logger log = LoggerFactory.getLogger(AppParameterResource.class);
@Inject
private AppParameterRepository appParameterRepository;
@Autowired
private JdbcTemplate jdbcTemplate;
@Inject
private AppParameterSearchRepository appParameterSearchRepository;
/**
* Create a new AppParameter.
*/
@RequestMapping(value = "/", method = RequestMethod.POST, produces = MediaType.APPLICATION_JSON_VALUE)
public ResponseEntity<AppParameter> create(@RequestBody AppParameter appParameter) throws URISyntaxException {
log.debug("Create AppParameter : {}", appParameter);
AppParameter result = appParameterRepository.save(appParameter);
appParameterSearchRepository.save(appParameter);
return ResponseEntity.created(new URI("/api/appParameters/" + result.getId())).body(result);
}
/**
* Update AppParameter.
*/
@RequestMapping(value = "/", method = RequestMethod.PUT, produces = MediaType.APPLICATION_JSON_VALUE)
public ResponseEntity<AppParameter> update(@RequestBody AppParameter appParameter) throws URISyntaxException {
log.debug("Update AppParameter : {}", appParameter);
if (appParameter.getId() == null) {
return create(appParameter);
}
AppParameter result = appParameterRepository.save(appParameter);
appParameterSearchRepository.save(appParameter);
return ResponseEntity.ok().body(result);
}
/**
* Find all AppParameter.
* WARNING: if your table has got a lot of records, you will face OutOfMemory error.
*/
@RequestMapping(value = "/",
method = RequestMethod.GET,
produces = MediaType.APPLICATION_JSON_VALUE)
public ResponseEntity<List<AppParameter>> findAll() throws URISyntaxException {
log.debug("Find all AppParameters");
List<AppParameter> list = appParameterRepository.findAll();
return new ResponseEntity<>(list, new HttpHeaders(), HttpStatus.OK);
}
/**
* Find all AppParameter by page.
*/
@RequestMapping(value = "/bypage", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE)
public Page<AppParameter> findAllByPage(Pageable pageable) throws URISyntaxException {
log.debug("Find all by page AppParameters, page: " + pageable.getPageNumber() + ", size: " + pageable.getPageSize());
Page<AppParameter> page = appParameterRepository.findAll(pageable);
log.debug("There are " + page.getTotalElements() + " appParameters.");
return page;
}
/**
* Find by id AppParameter (for simple key).
*/
@RequestMapping(value = "/{id}",
method = RequestMethod.GET,
produces = MediaType.APPLICATION_JSON_VALUE)
@Transactional
public ResponseEntity<AppParameter> findById(@PathVariable Integer id) throws URISyntaxException {
log.debug("Find by id AppParameters : {}.", id);
AppParameter fullyLoadedAppParameter = appParameterRepository.findOne(id);
return Optional.ofNullable(fullyLoadedAppParameter)
.map(appParameter -> new ResponseEntity<>(
appParameter,
HttpStatus.OK))
.orElse(new ResponseEntity<>(HttpStatus.NOT_FOUND));
}
/**
* Delete by id AppParameter (for simple key).
*/
@RequestMapping(value = "/{id}", method = RequestMethod.DELETE, produces = MediaType.APPLICATION_JSON_VALUE)
public ResponseEntity<Void> delete(@PathVariable Integer id) throws URISyntaxException {
log.debug("Delete by id AppParameters : {}.", id);
appParameterRepository.delete(id);
appParameterSearchRepository.delete(id);
return ResponseEntity.ok().build();
}
/**
* Mass deletion (for simple key).
*/
@RequestMapping(value = "/mass/{id}", method = RequestMethod.DELETE, produces = MediaType.APPLICATION_JSON_VALUE)
@Transactional
public ResponseEntity<Void> delete(@PathVariable Integer[] id) throws URISyntaxException {
log.debug("Delete by id AppParameters : {}.", (Object[])id);
Stream.of(id).forEach(item -> {appParameterRepository.delete(item); appParameterSearchRepository.delete(item);});
return ResponseEntity.ok().build();
}
/**
* Index all AppParameter.
*/
@RequestMapping(value = "/indexAll",
method = RequestMethod.GET,
produces = MediaType.APPLICATION_JSON_VALUE)
@Async
public void indexAllAppParameters() {
log.debug("REST request to index all AppParameters, START");
appParameterRepository.findAll().forEach(p -> {log.debug("indexing");appParameterSearchRepository.index(p);});
PageRequest request = new PageRequest(0, 1000);
try {
Page<AppParameter> page = findAllByPage(request);
page.forEach(p -> appParameterSearchRepository.index(p));
while (page.hasNext()) {
request = new PageRequest(request.getPageNumber() + 1, 1000);
log.debug("we are indexing page: " + (request.getPageNumber() + 1));
page = findAllByPage(request);
page.forEach(p -> appParameterSearchRepository.index(p));
}
} catch (Exception e) {
log.error("", e);
}
log.debug("REST request to index all AppParameters, EXIT");
}
/**
* Search with ElasticSearch.
*/
@RequestMapping(value = "/esearch/{query}", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE)
public List<AppParameter> searchAppParameters(@PathVariable String query) {
return StreamSupport.stream(appParameterSearchRepository.search(queryStringQuery(query)).spliterator(), false).collect(Collectors.toList());
}
/**
* Count AppParameter.
* FIXME: this method should be asynchronous because it can take times to count all records !
*/
@RequestMapping(value = "/count",
method = RequestMethod.GET,
produces = MediaType.APPLICATION_JSON_VALUE)
public ResponseEntity<Long> count() throws URISyntaxException {
log.debug("Count appParameters");
long count = appParameterRepository.count();
return new ResponseEntity<>(count, new HttpHeaders(), HttpStatus.OK);
}
/**
* Check if a AppParameter exists via its id.
*/
@RequestMapping(value = "/exists/{id}",
method = RequestMethod.GET,
produces = MediaType.APPLICATION_JSON_VALUE)
public ResponseEntity<Boolean> exists(@PathVariable Integer id) throws URISyntaxException {
log.debug("Check appParameter existence via its id: {}.", id);
Boolean exists = appParameterRepository.exists(id);
return new ResponseEntity<>(exists, new HttpHeaders(), HttpStatus.OK);
}
/**
* Search appParameters.
*/
@RequestMapping(value = "/search", method = RequestMethod.POST, produces = MediaType.APPLICATION_JSON_VALUE)
public Page<AppParameter> search(@RequestBody AppParameter appParameter, Pageable pageable) throws URISyntaxException {
log.debug("Search appParameters, page: " + pageable.getPageNumber() + ", size: " + pageable.getPageSize());
log.debug("appParameter: " + appParameter);
long total = appParameterRepository.count();
String sqlMainPart = "select * from (select ID, DOMAIN, KEY, VALUE from APP_PARAMETER where 1=1";
String sqlSecondaryPart = "";
List<Object> values = new ArrayList<Object>();
if (appParameter.getId() != null) {
sqlSecondaryPart += " and id = ? ";
values.add(appParameter.getId());
}
if (appParameter.getDomain() != null) {
sqlSecondaryPart += " and upper(domain) like ? ";
values.add(appParameter.getDomain().toUpperCase() + "%");
}
if (appParameter.getKey() != null) {
sqlSecondaryPart += " and upper(key) like ? ";
values.add(appParameter.getKey().toUpperCase() + "%");
}
if (appParameter.getValue() != null) {
sqlSecondaryPart += " and upper(value) like ? ";
values.add(appParameter.getValue().toUpperCase() + "%");
}
sqlSecondaryPart += ") where rownum <= ?";
values.add(pageable.getPageSize());
log.debug("SQL: " + sqlMainPart + " " + sqlSecondaryPart);
List<AppParameter> appParameters = jdbcTemplate.query(sqlMainPart + " " + sqlSecondaryPart, values.toArray(), new BeanPropertyRowMapper<AppParameter>(
AppParameter.class));
Page<AppParameter> page = new PageImpl<AppParameter>(appParameters, pageable, total);
return page;
}
/**
* Find by domain and key a AppParameter.
*/
@RequestMapping(value = "/finder/{domain},{key}", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE)
public ResponseEntity<AppParameter> findById(@PathVariable String domain, @PathVariable String key) throws URISyntaxException {
log.debug("Find by domain and key AppParameters : " + domain + ", " + key);
AppParameter appParameter = appParameterRepository.findByDomainAndKey(domain, key);
return new ResponseEntity<AppParameter>(appParameter, HttpStatus.OK);
}
}
| |
/**
* Copyright (C) 2014-2015 LinkedIn Corp. (pinot-core@linkedin.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.linkedin.pinot.perf;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.net.URL;
import java.net.URLConnection;
import java.sql.Timestamp;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.configuration.Configuration;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.helix.manager.zk.ZKHelixAdmin;
import org.apache.helix.model.ExternalView;
import org.apache.helix.model.IdealState;
import org.apache.helix.tools.ClusterStateVerifier;
import org.apache.helix.tools.ClusterStateVerifier.Verifier;
import org.codehaus.jackson.JsonGenerationException;
import org.codehaus.jackson.JsonParseException;
import org.codehaus.jackson.JsonProcessingException;
import org.codehaus.jackson.map.JsonMappingException;
import org.json.JSONException;
import org.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.yaml.snakeyaml.Yaml;
import com.linkedin.pinot.broker.broker.helix.HelixBrokerStarter;
import com.linkedin.pinot.common.config.AbstractTableConfig;
import com.linkedin.pinot.common.config.Tenant;
import com.linkedin.pinot.common.config.Tenant.TenantBuilder;
import com.linkedin.pinot.common.segment.SegmentMetadata;
import com.linkedin.pinot.common.utils.CommonConstants;
import com.linkedin.pinot.common.utils.FileUploadUtils;
import com.linkedin.pinot.common.utils.TenantRole;
import com.linkedin.pinot.controller.ControllerConf;
import com.linkedin.pinot.controller.ControllerStarter;
import com.linkedin.pinot.controller.helix.ControllerRequestBuilderUtil;
import com.linkedin.pinot.controller.helix.core.PinotHelixResourceManager;
import com.linkedin.pinot.controller.helix.core.util.HelixSetupUtils;
import com.linkedin.pinot.server.starter.helix.HelixServerStarter;
public class PerfBenchmarkDriver {
private static final Logger LOGGER = LoggerFactory.getLogger(PerfBenchmarkDriver.class);
private PerfBenchmarkDriverConf conf;
final String zkAddress;
final String clusterName;
private ControllerStarter controllerStarter;
private String controllerHost;
private int controllerPort;
private String controllerDataDir;
private String brokerBaseApiUrl;
private String serverInstanceDataDir;
private String serverInstanceSegmentTarDir;
private String serverInstanceName;
private PinotHelixResourceManager helixResourceManager;
private boolean verbose = false;
public PerfBenchmarkDriver(PerfBenchmarkDriverConf conf) {
this.conf = conf;
zkAddress = conf.getZkHost() + ":" + conf.getZkPort();
clusterName = conf.getClusterName();
init();
}
private void init() {
controllerHost = "localhost";
if (conf.getControllerHost() != null) {
controllerHost = conf.getControllerHost();
}
controllerPort = 8300;
if (conf.getControllerPort() > 0) {
controllerPort = conf.getControllerPort();
}
String controllerInstanceName = controllerHost + ":" + controllerPort;
controllerDataDir = "/tmp/controller/" + controllerInstanceName + "/controller_data_dir";
if (conf.getControllerDataDir() != null) {
controllerDataDir = conf.getControllerDataDir();
}
//broker init
brokerBaseApiUrl = "http://" + conf.getBrokerHost() + ":" + conf.getBrokerPort();
serverInstanceName = "Server_localhost_" + CommonConstants.Helix.DEFAULT_SERVER_NETTY_PORT;
if (conf.getServerInstanceName() != null) {
serverInstanceName = conf.getServerInstanceName();
}
serverInstanceDataDir = "/tmp/server/" + serverInstanceName + "/index_data_dir";
if (conf.getServerInstanceDataDir() != null) {
serverInstanceDataDir = conf.getServerInstanceDataDir();
}
serverInstanceSegmentTarDir = "/tmp/pinot/server/" + serverInstanceName + "/segment_tar_dir";
if (conf.getServerInstanceSegmentTarDir() != null) {
serverInstanceSegmentTarDir = conf.getServerInstanceSegmentTarDir();
}
}
public void run() throws Exception {
startZookeeper();
startController();
startBroker();
startServer();
configureResources();
uploadIndexSegments();
final ZKHelixAdmin helixAdmin = new ZKHelixAdmin(zkAddress);
Verifier customVerifier = new Verifier() {
@Override
public boolean verify() {
List<String> resourcesInCluster = helixAdmin.getResourcesInCluster(clusterName);
LOGGER.info("Waiting for the cluster to be set up and indexes to be loaded on the servers"
+ new Timestamp(System.currentTimeMillis()));
for (String resourceName : resourcesInCluster) {
IdealState idealState = helixAdmin.getResourceIdealState(clusterName, resourceName);
ExternalView externalView = helixAdmin.getResourceExternalView(clusterName, resourceName);
if (idealState == null || externalView == null) {
return false;
}
Set<String> partitionSet = idealState.getPartitionSet();
for (String partition : partitionSet) {
Map<String, String> instanceStateMapIS = idealState.getInstanceStateMap(partition);
Map<String, String> instanceStateMapEV = externalView.getStateMap(partition);
if (instanceStateMapIS == null || instanceStateMapEV == null) {
return false;
}
if (!instanceStateMapIS.equals(instanceStateMapEV)) {
return false;
}
}
}
LOGGER.info("Cluster is ready to serve queries");
return true;
}
};
ClusterStateVerifier.verifyByPolling(customVerifier, 60 * 1000);
postQueries();
}
public void startZookeeper() throws Exception {
int zkPort = conf.getZkPort();
// START ZOOKEEPER
if (!conf.isStartZookeeper()) {
LOGGER.info("Skipping start zookeeper step. Assumes zookeeper is already started");
return;
}
ZookeeperLauncher launcher = new ZookeeperLauncher();
launcher.start(zkPort);
}
public void createAndConfigureHelixCluster() {
HelixSetupUtils.createHelixClusterIfNeeded(clusterName, zkAddress);
}
public void startBroker() throws Exception {
if (!conf.isStartBroker()) {
LOGGER.info("Skipping start broker step. Assumes broker is already started");
return;
}
String brokerInstanceName = "Broker_localhost_" + CommonConstants.Helix.DEFAULT_BROKER_QUERY_PORT;
Configuration brokerConfiguration = new PropertiesConfiguration();
brokerConfiguration.setProperty("instanceId", brokerInstanceName);
HelixBrokerStarter helixBrokerStarter = new HelixBrokerStarter(clusterName, zkAddress, brokerConfiguration);
}
public void startServer() throws Exception {
if (!conf.shouldStartServer()) {
LOGGER.info("Skipping start server step. Assumes server is already started");
return;
}
Configuration serverConfiguration = new PropertiesConfiguration();
serverConfiguration.addProperty(CommonConstants.Server.CONFIG_OF_INSTANCE_DATA_DIR.toString(),
serverInstanceDataDir);
serverConfiguration.addProperty(CommonConstants.Server.CONFIG_OF_INSTANCE_SEGMENT_TAR_DIR.toString(),
serverInstanceSegmentTarDir);
serverConfiguration.setProperty("instanceId", serverInstanceName);
HelixServerStarter helixServerStarter = new HelixServerStarter(clusterName, zkAddress, serverConfiguration);
}
public void startController() {
if (!conf.shouldStartController()) {
LOGGER.info("Skipping start controller step. Assumes controller is already started");
return;
}
ControllerConf conf = getControllerConf();
controllerStarter = new ControllerStarter(conf);
controllerStarter.start();
}
private ControllerConf getControllerConf() {
ControllerConf conf = new ControllerConf();
conf.setHelixClusterName(clusterName);
conf.setZkStr(zkAddress);
conf.setControllerHost(controllerHost);
conf.setControllerPort(String.valueOf(controllerPort));
conf.setDataDir(controllerDataDir);
conf.setControllerVipHost("localhost");
return conf;
}
public void configureResources() throws Exception {
if (!conf.isConfigureResources()) {
LOGGER.info("Skipping configure resources step");
return;
}
String tableName = conf.getTableName();
configureTable(tableName);
}
public void configureTable(String tableName) throws Exception {
//TODO:Get these from configuration
int numInstances = 1;
int numReplicas = 1;
String segmentAssignmentStrategy = "BalanceNumSegmentAssignmentStrategy";
String brokerTenantName = "testBrokerTenant";
String serverTenantName = "testServerTenant";
// create broker tenant
Tenant brokerTenant =
new TenantBuilder(brokerTenantName).setRole(TenantRole.BROKER).setTotalInstances(numInstances).build();
helixResourceManager = new PinotHelixResourceManager(getControllerConf());
helixResourceManager.start();
helixResourceManager.createBrokerTenant(brokerTenant);
// create server tenant
Tenant serverTenant =
new TenantBuilder(serverTenantName).setRole(TenantRole.SERVER).setTotalInstances(numInstances)
.setOfflineInstances(numInstances).build();
helixResourceManager.createServerTenant(serverTenant);
// upload schema
// create table
String jsonString =
ControllerRequestBuilderUtil.buildCreateOfflineTableJSON(tableName, serverTenantName, brokerTenantName,
numReplicas, segmentAssignmentStrategy).toString();
AbstractTableConfig offlineTableConfig = AbstractTableConfig.init(jsonString);
helixResourceManager.addTable(offlineTableConfig);
}
public void addSegment(SegmentMetadata metadata) {
helixResourceManager.addSegment(metadata,
"http://" + controllerHost + ":" + controllerPort + "/" + metadata.getName());
}
public void uploadIndexSegments() throws Exception {
if (!conf.isUploadIndexes()) {
LOGGER.info("Skipping upload Indexes step");
return;
}
String indexDirectory = conf.getIndexDirectory();
File file = new File(indexDirectory);
File[] listFiles = file.listFiles();
for (File indexFile : listFiles) {
LOGGER.info("Uploading index segment " + indexFile.getAbsolutePath());
FileUploadUtils.sendSegmentFile(controllerHost, "" + controllerPort, indexFile.getName(), new FileInputStream(
indexFile), indexFile.length());
}
}
public void postQueries() throws Exception {
if (!conf.isRunQueries()) {
LOGGER.info("Skipping run queries step");
return;
}
String queriesDirectory = conf.getQueriesDirectory();
File[] queryFiles = new File(queriesDirectory).listFiles();
for (File file : queryFiles) {
if (!file.getName().endsWith(".txt")) {
continue;
}
BufferedReader reader = new BufferedReader(new FileReader(file));
String query;
LOGGER.info("Running queries from " + file);
while ((query = reader.readLine()) != null) {
postQuery(query);
}
reader.close();
}
}
public JSONObject postQuery(String query) throws Exception {
final JSONObject json = new JSONObject();
json.put("pql", query);
final long start = System.currentTimeMillis();
final URLConnection conn = new URL(brokerBaseApiUrl + "/query").openConnection();
conn.setDoOutput(true);
final BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(conn.getOutputStream(), "UTF-8"));
final String reqStr = json.toString();
writer.write(reqStr, 0, reqStr.length());
writer.flush();
final BufferedReader reader = new BufferedReader(new InputStreamReader(conn.getInputStream(), "UTF-8"));
final StringBuilder sb = new StringBuilder();
String line = null;
while ((line = reader.readLine()) != null) {
sb.append(line);
}
final long stop = System.currentTimeMillis();
final String res = sb.toString();
final JSONObject ret = new JSONObject(res);
ret.put("totalTime", (stop - start));
if ((ret.getLong("numDocsScanned") > 0) && verbose) {
LOGGER.info("reqStr = " + reqStr);
LOGGER.info(" Client side time in ms:" + (stop - start));
LOGGER.info("numDocScanned : " + ret.getLong("numDocsScanned"));
LOGGER.info("timeUsedMs : " + ret.getLong("timeUsedMs"));
LOGGER.info("totalTime : " + ret.getLong("totalTime"));
LOGGER.info("res = " + res);
}
return ret;
}
public static void main(String[] args) throws Exception {
PerfBenchmarkDriverConf conf = (PerfBenchmarkDriverConf) new Yaml().load(new FileInputStream(args[0]));
PerfBenchmarkDriver perfBenchmarkDriver = new PerfBenchmarkDriver(conf);
perfBenchmarkDriver.run();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity;
import static org.junit.Assert.assertEquals;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.when;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.api.records.Priority;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.ResourceInformation;
import org.apache.hadoop.yarn.api.records.ResourceRequest;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.server.resourcemanager.MockAM;
import org.apache.hadoop.yarn.server.resourcemanager.MockNM;
import org.apache.hadoop.yarn.server.resourcemanager.MockRM;
import org.apache.hadoop.yarn.server.resourcemanager.MockRMAppSubmissionData;
import org.apache.hadoop.yarn.server.resourcemanager.MockRMAppSubmitter;
import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
import org.apache.hadoop.yarn.server.resourcemanager.nodelabels.NullRMNodeLabelsManager;
import org.apache.hadoop.yarn.server.resourcemanager.nodelabels.RMNodeLabelsManager;
import org.apache.hadoop.yarn.server.resourcemanager.resource.TestResourceProfiles;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceLimits;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceUsage;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerApplicationAttempt.AMState;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.preemption.PreemptionManager;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.common.fica.FiCaSchedulerApp;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.common.fica.FiCaSchedulerNode;
import org.apache.hadoop.yarn.util.resource.DefaultResourceCalculator;
import org.apache.hadoop.yarn.util.resource.DominantResourceCalculator;
import org.apache.hadoop.yarn.util.resource.ResourceCalculator;
import org.apache.hadoop.yarn.util.resource.ResourceUtils;
import org.apache.hadoop.yarn.util.resource.Resources;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.mockito.ArgumentMatchers;
import org.mockito.Mockito;
import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableMap;
import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableSet;
public class TestApplicationLimitsByPartition {
final static int GB = 1024;
LeafQueue queue;
RMNodeLabelsManager mgr;
private YarnConfiguration conf;
private final ResourceCalculator resourceCalculator =
new DefaultResourceCalculator();
@Before
public void setUp() throws IOException {
conf = new YarnConfiguration();
conf.setClass(YarnConfiguration.RM_SCHEDULER, CapacityScheduler.class,
ResourceScheduler.class);
mgr = new NullRMNodeLabelsManager();
mgr.init(conf);
}
private void simpleNodeLabelMappingToManager() throws IOException {
// set node -> label
mgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("x", "y"));
mgr.addLabelsToNode(ImmutableMap.of(NodeId.newInstance("h1", 0),
TestUtils.toSet("x"), NodeId.newInstance("h2", 0),
TestUtils.toSet("y")));
}
private void complexNodeLabelMappingToManager() throws IOException {
// set node -> label
mgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("x", "y",
"z"));
mgr.addLabelsToNode(ImmutableMap.of(NodeId.newInstance("h1", 0),
TestUtils.toSet("x"), NodeId.newInstance("h2", 0),
TestUtils.toSet("y"), NodeId.newInstance("h3", 0),
TestUtils.toSet("y"), NodeId.newInstance("h4", 0),
TestUtils.toSet("z"), NodeId.newInstance("h5", 0),
RMNodeLabelsManager.EMPTY_STRING_SET));
}
@Test(timeout = 120000)
public void testAMResourceLimitWithLabels() throws Exception {
/*
* Test Case:
* Verify AM resource limit per partition level and per queue level. So
* we use 2 queues to verify this case.
* Queue a1 supports labels (x,y). Configure am-resource-limit as 0.2 (x)
* Queue c1 supports default label. Configure am-resource-limit as 0.2
*
* Queue A1 for label X can only support 2Gb AM resource.
* Queue C1 (empty label) can support 2Gb AM resource.
*
* Verify atleast one AM is launched, and AM resources should not go more
* than 2GB in each queue.
*/
simpleNodeLabelMappingToManager();
CapacitySchedulerConfiguration config = (CapacitySchedulerConfiguration)
TestUtils.getConfigurationWithQueueLabels(conf);
// After getting queue conf, configure AM resource percent for Queue A1
// as 0.2 (Label X) and for Queue C1 as 0.2 (Empty Label)
final String A1 = CapacitySchedulerConfiguration.ROOT + ".a" + ".a1";
final String C1 = CapacitySchedulerConfiguration.ROOT + ".c" + ".c1";
config.setMaximumAMResourcePercentPerPartition(A1, "x", 0.2f);
config.setMaximumApplicationMasterResourcePerQueuePercent(C1, 0.2f);
// Now inject node label manager with this updated config
MockRM rm1 = new MockRM(config) {
@Override
public RMNodeLabelsManager createNodeLabelManager() {
return mgr;
}
};
rm1.getRMContext().setNodeLabelManager(mgr);
rm1.start();
MockNM nm1 = rm1.registerNode("h1:1234", 10 * GB); // label = x
rm1.registerNode("h2:1234", 10 * GB); // label = y
MockNM nm3 = rm1.registerNode("h3:1234", 10 * GB); // label = <empty>
// Submit app1 with 1Gb AM resource to Queue A1 for label X
MockRMAppSubmissionData data5 =
MockRMAppSubmissionData.Builder.createWithMemory(GB, rm1)
.withAppName("app")
.withUser("user")
.withAcls(null)
.withQueue("a1")
.withAmLabel("x")
.build();
RMApp app1 = MockRMAppSubmitter.submit(rm1, data5);
// Submit app2 with 1Gb AM resource to Queue A1 for label X
MockRMAppSubmissionData data4 =
MockRMAppSubmissionData.Builder.createWithMemory(GB, rm1)
.withAppName("app")
.withUser("user")
.withAcls(null)
.withQueue("a1")
.withAmLabel("x")
.build();
RMApp app2 = MockRMAppSubmitter.submit(rm1, data4);
// Submit 3rd app to Queue A1 for label X, and this will be pending as
// AM limit is already crossed for label X. (2GB)
MockRMAppSubmissionData data3 =
MockRMAppSubmissionData.Builder.createWithMemory(GB, rm1)
.withAppName("app")
.withUser("user")
.withAcls(null)
.withQueue("a1")
.withAmLabel("x")
.build();
RMApp pendingApp = MockRMAppSubmitter.submit(rm1, data3);
CapacityScheduler cs = (CapacityScheduler) rm1.getResourceScheduler();
LeafQueue leafQueue = (LeafQueue) cs.getQueue("a1");
Assert.assertNotNull(leafQueue);
// Only one AM will be activated here and second AM will be still
// pending.
Assert.assertEquals(2, leafQueue.getNumActiveApplications());
Assert.assertEquals(1, leafQueue.getNumPendingApplications());
Assert.assertTrue("AM diagnostics not set properly", app1.getDiagnostics()
.toString().contains(AMState.ACTIVATED.getDiagnosticMessage()));
Assert.assertTrue("AM diagnostics not set properly", app2.getDiagnostics()
.toString().contains(AMState.ACTIVATED.getDiagnosticMessage()));
Assert.assertTrue("AM diagnostics not set properly",
pendingApp.getDiagnostics().toString()
.contains(AMState.INACTIVATED.getDiagnosticMessage()));
Assert.assertTrue("AM diagnostics not set properly",
pendingApp.getDiagnostics().toString().contains(
CSAMContainerLaunchDiagnosticsConstants.QUEUE_AM_RESOURCE_LIMIT_EXCEED));
// Now verify the same test case in Queue C1 where label is not configured.
// Submit an app to Queue C1 with empty label
MockRMAppSubmissionData data2 =
MockRMAppSubmissionData.Builder.createWithMemory(GB, rm1)
.withAppName("app")
.withUser("user")
.withAcls(null)
.withQueue("c1")
.withUnmanagedAM(false)
.build();
RMApp app3 = MockRMAppSubmitter.submit(rm1, data2);
MockRM.launchAndRegisterAM(app3, rm1, nm3);
// Submit next app to Queue C1 with empty label
MockRMAppSubmissionData data1 =
MockRMAppSubmissionData.Builder.createWithMemory(GB, rm1)
.withAppName("app")
.withUser("user")
.withAcls(null)
.withQueue("c1")
.withUnmanagedAM(false)
.build();
RMApp app4 = MockRMAppSubmitter.submit(rm1, data1);
MockRM.launchAndRegisterAM(app4, rm1, nm3);
// Submit 3rd app to Queue C1. This will be pending as Queue's am-limit
// is reached.
MockRMAppSubmissionData data =
MockRMAppSubmissionData.Builder.createWithMemory(GB, rm1)
.withAppName("app")
.withUser("user")
.withAcls(null)
.withQueue("c1")
.withUnmanagedAM(false)
.build();
pendingApp = MockRMAppSubmitter.submit(rm1, data);
leafQueue = (LeafQueue) cs.getQueue("c1");
Assert.assertNotNull(leafQueue);
// 2 apps will be activated, third one will be pending as am-limit
// is reached.
Assert.assertEquals(2, leafQueue.getNumActiveApplications());
Assert.assertEquals(1, leafQueue.getNumPendingApplications());
Assert.assertTrue("AM diagnostics not set properly",
pendingApp.getDiagnostics().toString()
.contains(AMState.INACTIVATED.getDiagnosticMessage()));
Assert.assertTrue("AM diagnostics not set properly",
pendingApp.getDiagnostics().toString().contains(
CSAMContainerLaunchDiagnosticsConstants.QUEUE_AM_RESOURCE_LIMIT_EXCEED));
rm1.killApp(app3.getApplicationId());
Thread.sleep(1000);
// After killing one running app, pending app will also get activated.
Assert.assertEquals(2, leafQueue.getNumActiveApplications());
Assert.assertEquals(0, leafQueue.getNumPendingApplications());
rm1.close();
}
@Test(timeout = 120000)
public void testAtleastOneAMRunPerPartition() throws Exception {
/*
* Test Case:
* Even though am-resource-limit per queue/partition may cross if we
* activate an app (high am resource demand), we have to activate it
* since no other apps are running in that Queue/Partition. Here also
* we run one test case for partition level and one in queue level to
* ensure no breakage in existing functionality.
*
* Queue a1 supports labels (x,y). Configure am-resource-limit as 0.15 (x)
* Queue c1 supports default label. Configure am-resource-limit as 0.15
*
* Queue A1 for label X can only support 1.5Gb AM resource.
* Queue C1 (empty label) can support 1.5Gb AM resource.
*
* Verify atleast one AM is launched in each Queue.
*/
simpleNodeLabelMappingToManager();
CapacitySchedulerConfiguration config = (CapacitySchedulerConfiguration)
TestUtils.getConfigurationWithQueueLabels(conf);
// After getting queue conf, configure AM resource percent for Queue A1
// as 0.15 (Label X) and for Queue C1 as 0.15 (Empty Label)
final String A1 = CapacitySchedulerConfiguration.ROOT + ".a" + ".a1";
final String C1 = CapacitySchedulerConfiguration.ROOT + ".c" + ".c1";
config.setMaximumAMResourcePercentPerPartition(A1, "x", 0.15f);
config.setMaximumApplicationMasterResourcePerQueuePercent(C1, 0.15f);
// inject node label manager
MockRM rm1 = new MockRM(config) {
@Override
public RMNodeLabelsManager createNodeLabelManager() {
return mgr;
}
};
rm1.getRMContext().setNodeLabelManager(mgr);
rm1.start();
MockNM nm1 = rm1.registerNode("h1:1234", 10 * GB); // label = x
rm1.registerNode("h2:1234", 10 * GB); // label = y
MockNM nm3 = rm1.registerNode("h3:1234", 10 * GB); // label = <empty>
// Submit app1 (2 GB) to Queue A1 and label X
MockRMAppSubmissionData data3 =
MockRMAppSubmissionData.Builder.createWithMemory(2 * GB, rm1)
.withAppName("app")
.withUser("user")
.withAcls(null)
.withQueue("a1")
.withAmLabel("x")
.build();
RMApp app1 = MockRMAppSubmitter.submit(rm1, data3);
// This app must be activated eventhough the am-resource per-partition
// limit is only for 1.5GB.
MockRM.launchAndRegisterAM(app1, rm1, nm1);
// Submit 2nd app to label "X" with one GB and it must be pending since
// am-resource per-partition limit is crossed (1.5 GB was the limit).
MockRMAppSubmissionData data2 =
MockRMAppSubmissionData.Builder.createWithMemory(GB, rm1)
.withAppName("app")
.withUser("user")
.withAcls(null)
.withQueue("a1")
.withAmLabel("x")
.build();
MockRMAppSubmitter.submit(rm1, data2);
CapacityScheduler cs = (CapacityScheduler) rm1.getResourceScheduler();
LeafQueue leafQueue = (LeafQueue) cs.getQueue("a1");
Assert.assertNotNull(leafQueue);
// Only 1 app will be activated as am-limit for partition "x" is 0.15
Assert.assertEquals(1, leafQueue.getNumActiveApplications());
Assert.assertEquals(1, leafQueue.getNumPendingApplications());
// Now verify the same test case in Queue C1 which takes default label
// to see queue level am-resource-limit is still working as expected.
// Submit an app to Queue C1 with empty label (2 GB)
MockRMAppSubmissionData data1 =
MockRMAppSubmissionData.Builder.createWithMemory(2 * GB, rm1)
.withAppName("app")
.withUser("user")
.withAcls(null)
.withQueue("c1")
.withUnmanagedAM(false)
.build();
RMApp app3 = MockRMAppSubmitter.submit(rm1, data1);
// This app must be activated even though the am-resource per-queue
// limit is only for 1.5GB
MockRM.launchAndRegisterAM(app3, rm1, nm3);
// Submit 2nd app to C1 (Default label, hence am-limit per-queue will be
// considered).
MockRMAppSubmissionData data =
MockRMAppSubmissionData.Builder.createWithMemory(GB, rm1)
.withAppName("app")
.withUser("user")
.withAcls(null)
.withQueue("c1")
.withUnmanagedAM(false)
.build();
MockRMAppSubmitter.submit(rm1, data);
leafQueue = (LeafQueue) cs.getQueue("c1");
Assert.assertNotNull(leafQueue);
// 1 app will be activated (and it has AM resource more than queue limit)
Assert.assertEquals(1, leafQueue.getNumActiveApplications());
Assert.assertEquals(1, leafQueue.getNumPendingApplications());
rm1.close();
}
@Test(timeout = 120000)
public void testDefaultAMLimitFromQueueForPartition() throws Exception {
/*
* Test Case:
* Configure AM resource limit per queue level. If partition level config
* is not found, we will be considering per-queue level am-limit. Ensure
* this is working as expected.
*
* Queue A1 am-resource limit to be configured as 0.2 (not for partition x)
*
* Eventhough per-partition level config is not done, CS should consider
* the configuration done for queue level.
*/
simpleNodeLabelMappingToManager();
CapacitySchedulerConfiguration config = (CapacitySchedulerConfiguration)
TestUtils.getConfigurationWithQueueLabels(conf);
// After getting queue conf, configure AM resource percent for Queue A1
// as 0.2 (not for partition, rather in queue level)
final String A1 = CapacitySchedulerConfiguration.ROOT + ".a" + ".a1";
config.setMaximumApplicationMasterResourcePerQueuePercent(A1, 0.2f);
// inject node label manager
MockRM rm1 = new MockRM(config) {
@Override
public RMNodeLabelsManager createNodeLabelManager() {
return mgr;
}
};
rm1.getRMContext().setNodeLabelManager(mgr);
rm1.start();
MockNM nm1 = rm1.registerNode("h1:1234", 10 * GB); // label = x
rm1.registerNode("h2:1234", 10 * GB); // label = y
rm1.registerNode("h3:1234", 10 * GB); // label = <empty>
// Submit app1 (2 GB) to Queue A1 and label X
MockRMAppSubmissionData data1 =
MockRMAppSubmissionData.Builder.createWithMemory(2 * GB, rm1)
.withAppName("app")
.withUser("user")
.withAcls(null)
.withQueue("a1")
.withAmLabel("x")
.build();
RMApp app1 = MockRMAppSubmitter.submit(rm1, data1);
// Submit 2nd app to label "X" with one GB. Since queue am-limit is 2GB,
// 2nd app will be pending and first one will get activated.
MockRMAppSubmissionData data =
MockRMAppSubmissionData.Builder.createWithMemory(GB, rm1)
.withAppName("app")
.withUser("user")
.withAcls(null)
.withQueue("a1")
.withAmLabel("x")
.build();
RMApp pendingApp = MockRMAppSubmitter.submit(rm1, data);
CapacityScheduler cs = (CapacityScheduler) rm1.getResourceScheduler();
LeafQueue leafQueue = (LeafQueue) cs.getQueue("a1");
Assert.assertNotNull(leafQueue);
// Only 1 app will be activated as am-limit for queue is 0.2 and same is
// used for partition "x" also.
Assert.assertEquals(1, leafQueue.getNumActiveApplications());
Assert.assertEquals(1, leafQueue.getNumPendingApplications());
Assert.assertTrue("AM diagnostics not set properly", app1.getDiagnostics()
.toString().contains(AMState.ACTIVATED.getDiagnosticMessage()));
Assert.assertTrue("AM diagnostics not set properly",
pendingApp.getDiagnostics().toString()
.contains(AMState.INACTIVATED.getDiagnosticMessage()));
Assert.assertTrue("AM diagnostics not set properly",
pendingApp.getDiagnostics().toString()
.contains(CSAMContainerLaunchDiagnosticsConstants.QUEUE_AM_RESOURCE_LIMIT_EXCEED));
rm1.close();
}
@Test(timeout = 120000)
public void testUserAMResourceLimitWithLabels() throws Exception {
/*
* Test Case:
* Verify user level AM resource limit. This test case is ran with two
* users. And per-partition level am-resource-limit will be 0.4, which
* internally will be 4GB. Hence 2GB will be available for each
* user for its AM resource.
*
* Now this test case will create a scenario where AM resource limit per
* partition is not met, but user level am-resource limit is reached.
* Hence app will be pending.
*/
final String user_0 = "user_0";
final String user_1 = "user_1";
simpleNodeLabelMappingToManager();
CapacitySchedulerConfiguration config = (CapacitySchedulerConfiguration)
TestUtils.getConfigurationWithQueueLabels(conf);
// After getting queue conf, configure AM resource percent for Queue A1
// as 0.4 (Label X). Also set userlimit as 50% for this queue. So when we
// have two users submitting applications, each user will get 50% of AM
// resource which is available in this partition.
final String A1 = CapacitySchedulerConfiguration.ROOT + ".a" + ".a1";
config.setMaximumAMResourcePercentPerPartition(A1, "x", 0.4f);
config.setUserLimit(A1, 50);
// Now inject node label manager with this updated config
MockRM rm1 = new MockRM(config) {
@Override
public RMNodeLabelsManager createNodeLabelManager() {
return mgr;
}
};
rm1.getRMContext().setNodeLabelManager(mgr);
rm1.start();
MockNM nm1 = rm1.registerNode("h1:1234", 10 * GB); // label = x
rm1.registerNode("h2:1234", 10 * GB); // label = y
rm1.registerNode("h3:1234", 10 * GB); // label = <empty>
// Submit app1 with 1Gb AM resource to Queue A1 for label X for user0
MockRMAppSubmissionData data3 =
MockRMAppSubmissionData.Builder.createWithMemory(GB, rm1)
.withAppName("app")
.withUser(user_0)
.withAcls(null)
.withQueue("a1")
.withAmLabel("x")
.build();
RMApp app1 = MockRMAppSubmitter.submit(rm1, data3);
MockAM am1 = MockRM.launchAndRegisterAM(app1, rm1, nm1);
// Place few allocate requests to make it an active application
am1.allocate("*", 1 * GB, 15, new ArrayList<ContainerId>(), "");
// Now submit 2nd app to Queue A1 for label X for user1
MockRMAppSubmissionData data2 =
MockRMAppSubmissionData.Builder.createWithMemory(GB, rm1)
.withAppName("app")
.withUser(user_1)
.withAcls(null)
.withQueue("a1")
.withAmLabel("x")
.build();
RMApp app2 = MockRMAppSubmitter.submit(rm1, data2);
MockRM.launchAndRegisterAM(app2, rm1, nm1);
CapacityScheduler cs = (CapacityScheduler) rm1.getResourceScheduler();
LeafQueue leafQueue = (LeafQueue) cs.getQueue("a1");
Assert.assertNotNull(leafQueue);
// Verify active applications count in this queue.
Assert.assertEquals(2, leafQueue.getNumActiveApplications());
Assert.assertEquals(1, leafQueue.getNumActiveApplications(user_0));
Assert.assertEquals(0, leafQueue.getNumPendingApplications());
// Submit 3rd app to Queue A1 for label X for user1. Now user1 will have
// 2 applications (2 GB resource) and user0 will have one app (1GB).
MockRMAppSubmissionData data1 =
MockRMAppSubmissionData.Builder.createWithMemory(GB, rm1)
.withAppName("app")
.withUser(user_1)
.withAcls(null)
.withQueue("a1")
.withAmLabel("x")
.build();
RMApp app3 = MockRMAppSubmitter.submit(rm1, data1);
MockAM am2 = MockRM.launchAndRegisterAM(app3, rm1, nm1);
// Place few allocate requests to make it an active application. This is
// to ensure that user1 and user0 are active users.
am2.allocate("*", 1 * GB, 10, new ArrayList<ContainerId>(), "");
// Submit final app to Queue A1 for label X. Since we are trying to submit
// for user1, we need 3Gb resource for AMs.
// 4Gb -> 40% of label "X" in queue A1
// Since we have 2 users, 50% of 4Gb will be max for each user. Here user1
// has already crossed this 2GB limit, hence this app will be pending.
MockRMAppSubmissionData data =
MockRMAppSubmissionData.Builder.createWithMemory(GB, rm1)
.withAppName("app")
.withUser(user_1)
.withAcls(null)
.withQueue("a1")
.withAmLabel("x")
.build();
RMApp pendingApp = MockRMAppSubmitter.submit(rm1, data);
// Verify active applications count per user and also in queue level.
Assert.assertEquals(3, leafQueue.getNumActiveApplications());
Assert.assertEquals(1, leafQueue.getNumActiveApplications(user_0));
Assert.assertEquals(2, leafQueue.getNumActiveApplications(user_1));
Assert.assertEquals(1, leafQueue.getNumPendingApplications(user_1));
Assert.assertEquals(1, leafQueue.getNumPendingApplications());
//verify Diagnostic messages
Assert.assertTrue("AM diagnostics not set properly",
pendingApp.getDiagnostics().toString()
.contains(AMState.INACTIVATED.getDiagnosticMessage()));
Assert.assertTrue("AM diagnostics not set properly",
pendingApp.getDiagnostics().toString().contains(
CSAMContainerLaunchDiagnosticsConstants.USER_AM_RESOURCE_LIMIT_EXCEED));
rm1.close();
}
@Test
public void testAMResourceLimitForMultipleApplications() throws Exception {
/*
* Test Case:
* In a complex node label setup, verify am-resource-percentage calculation
* and check whether applications can get activated as per expectation.
*/
complexNodeLabelMappingToManager();
CapacitySchedulerConfiguration config = (CapacitySchedulerConfiguration)
TestUtils.getComplexConfigurationWithQueueLabels(conf);
/*
* Queue structure:
* root (*)
* ________________
* / \
* a x(100%), y(50%) b y(50%), z(100%)
* ________________ ______________
* / / \
* a1 (x,y) b1(no) b2(y,z)
* 100% y = 100%, z = 100%
*
* Node structure:
* h1 : x
* h2 : y
* h3 : y
* h4 : z
* h5 : NO
*
* Total resource:
* x: 10G
* y: 20G
* z: 10G
* *: 10G
*
* AM resource percentage config:
* A1 : 0.25
* B2 : 0.15
*/
final String A1 = CapacitySchedulerConfiguration.ROOT + ".a" + ".a1";
final String B1 = CapacitySchedulerConfiguration.ROOT + ".b" + ".b1";
config.setMaximumAMResourcePercentPerPartition(A1, "y", 0.25f);
config.setMaximumApplicationMasterResourcePerQueuePercent(B1, 0.15f);
// Now inject node label manager with this updated config
MockRM rm1 = new MockRM(config) {
@Override
public RMNodeLabelsManager createNodeLabelManager() {
return mgr;
}
};
rm1.getRMContext().setNodeLabelManager(mgr);
rm1.start();
rm1.registerNode("h1:1234", 10 * GB); // label = x
MockNM nm2 = rm1.registerNode("h2:1234", 10 * GB); // label = y
MockNM nm3 = rm1.registerNode("h3:1234", 10 * GB); // label = y
rm1.registerNode("h4:1234", 10 * GB); // label = z
MockNM nm5 = rm1.registerNode("h5:1234", 10 * GB); // label = <empty>
// Submit app1 with 2Gb AM resource to Queue A1 for label Y
MockRMAppSubmissionData data4 =
MockRMAppSubmissionData.Builder.createWithMemory(2 * GB, rm1)
.withAppName("app")
.withUser("user")
.withAcls(null)
.withQueue("a1")
.withAmLabel("y")
.build();
RMApp app1 = MockRMAppSubmitter.submit(rm1, data4);
MockRM.launchAndRegisterAM(app1, rm1, nm2);
// Submit app2 with 1Gb AM resource to Queue A1 for label Y
MockRMAppSubmissionData data3 =
MockRMAppSubmissionData.Builder.createWithMemory(GB, rm1)
.withAppName("app")
.withUser("user")
.withAcls(null)
.withQueue("a1")
.withAmLabel("y")
.build();
RMApp app2 = MockRMAppSubmitter.submit(rm1, data3);
MockRM.launchAndRegisterAM(app2, rm1, nm3);
// Submit another app with 1Gb AM resource to Queue A1 for label Y
MockRMAppSubmissionData data2 =
MockRMAppSubmissionData.Builder.createWithMemory(GB, rm1)
.withAppName("app")
.withUser("user")
.withAcls(null)
.withQueue("a1")
.withAmLabel("y")
.build();
MockRMAppSubmitter.submit(rm1, data2);
CapacityScheduler cs = (CapacityScheduler) rm1.getResourceScheduler();
LeafQueue leafQueue = (LeafQueue) cs.getQueue("a1");
Assert.assertNotNull(leafQueue);
/*
* capacity of queue A -> 50% for label Y
* capacity of queue A1 -> 100% for label Y
*
* Total resources available for label Y -> 20GB (nm2 and nm3)
* Hence in queue A1, max resource for label Y is 10GB.
*
* AM resource percent config for queue A1 -> 0.25
* ==> 2.5Gb (3 Gb) is max-am-resource-limit
*/
Assert.assertEquals(2, leafQueue.getNumActiveApplications());
Assert.assertEquals(1, leafQueue.getNumPendingApplications());
// Submit app3 with 1Gb AM resource to Queue B1 (no_label)
MockRMAppSubmissionData data1 =
MockRMAppSubmissionData.Builder.createWithMemory(GB, rm1)
.withAppName("app")
.withUser("user")
.withAcls(null)
.withQueue("b1")
.withUnmanagedAM(false)
.build();
RMApp app3 = MockRMAppSubmitter.submit(rm1, data1);
MockRM.launchAndRegisterAM(app3, rm1, nm5);
// Submit another app with 1Gb AM resource to Queue B1 (no_label)
MockRMAppSubmissionData data =
MockRMAppSubmissionData.Builder.createWithMemory(GB, rm1)
.withAppName("app")
.withUser("user")
.withAcls(null)
.withQueue("b1")
.withUnmanagedAM(false)
.build();
MockRMAppSubmitter.submit(rm1, data);
leafQueue = (LeafQueue) cs.getQueue("b1");
Assert.assertNotNull(leafQueue);
/*
* capacity of queue B -> 90% for queue
* -> and 100% for no-label
* capacity of queue B1 -> 50% for no-label/queue
*
* Total resources available for no-label -> 10GB (nm5)
* Hence in queue B1, max resource for no-label is 5GB.
*
* AM resource percent config for queue B1 -> 0.15
* ==> 1Gb is max-am-resource-limit
*
* Only one app will be activated and all othe will be pending.
*/
Assert.assertEquals(1, leafQueue.getNumActiveApplications());
Assert.assertEquals(1, leafQueue.getNumPendingApplications());
rm1.close();
}
@Test
public void testHeadroom() throws Exception {
/*
* Test Case: Verify Headroom calculated is sum of headrooms for each
* partition requested. So submit a app with requests for default partition
* and 'x' partition, so the total headroom for the user should be sum of
* the head room for both labels.
*/
simpleNodeLabelMappingToManager();
CapacitySchedulerConfiguration csConf =
(CapacitySchedulerConfiguration) TestUtils
.getComplexConfigurationWithQueueLabels(conf);
final String A1 = CapacitySchedulerConfiguration.ROOT + ".a" + ".a1";
final String B2 = CapacitySchedulerConfiguration.ROOT + ".b" + ".b2";
csConf.setUserLimit(A1, 25);
csConf.setUserLimit(B2, 25);
YarnConfiguration conf = new YarnConfiguration();
CapacitySchedulerContext csContext = mock(CapacitySchedulerContext.class);
when(csContext.getConfiguration()).thenReturn(csConf);
when(csContext.getConf()).thenReturn(conf);
when(csContext.getMinimumResourceCapability())
.thenReturn(Resources.createResource(GB));
when(csContext.getMaximumResourceCapability())
.thenReturn(Resources.createResource(16 * GB));
when(csContext.getResourceCalculator()).thenReturn(resourceCalculator);
RMContext rmContext = TestUtils.getMockRMContext();
RMContext spyRMContext = spy(rmContext);
when(spyRMContext.getNodeLabelManager()).thenReturn(mgr);
when(csContext.getRMContext()).thenReturn(spyRMContext);
when(csContext.getPreemptionManager()).thenReturn(new PreemptionManager());
mgr.activateNode(NodeId.newInstance("h0", 0),
Resource.newInstance(160 * GB, 16)); // default Label
mgr.activateNode(NodeId.newInstance("h1", 0),
Resource.newInstance(160 * GB, 16)); // label x
mgr.activateNode(NodeId.newInstance("h2", 0),
Resource.newInstance(160 * GB, 16)); // label y
// Say cluster has 100 nodes of 16G each
Resource clusterResource = Resources.createResource(160 * GB);
when(csContext.getClusterResource()).thenReturn(clusterResource);
CSQueueStore queues = new CSQueueStore();
CSQueue rootQueue = CapacitySchedulerQueueManager.parseQueue(csContext,
csConf, null, "root", queues, queues, TestUtils.spyHook);
rootQueue.updateClusterResource(clusterResource,
new ResourceLimits(clusterResource));
ResourceUsage queueResUsage = rootQueue.getQueueResourceUsage();
when(csContext.getClusterResourceUsage())
.thenReturn(queueResUsage);
// Manipulate queue 'a'
LeafQueue queue = TestLeafQueue.stubLeafQueue((LeafQueue) queues.get("b2"));
queue.updateClusterResource(clusterResource,
new ResourceLimits(clusterResource));
String rack_0 = "rack_0";
FiCaSchedulerNode node_0 = TestUtils.getMockNode("h0", rack_0, 0, 160 * GB);
FiCaSchedulerNode node_1 = TestUtils.getMockNode("h1", rack_0, 0, 160 * GB);
final String user_0 = "user_0";
final String user_1 = "user_1";
RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
ConcurrentMap<ApplicationId, RMApp> spyApps =
spy(new ConcurrentHashMap<ApplicationId, RMApp>());
RMApp rmApp = mock(RMApp.class);
ResourceRequest amResourceRequest = mock(ResourceRequest.class);
Resource amResource = Resources.createResource(0, 0);
when(amResourceRequest.getCapability()).thenReturn(amResource);
when(rmApp.getAMResourceRequests()).thenReturn(
Collections.singletonList(amResourceRequest));
Mockito.doReturn(rmApp)
.when(spyApps).get(ArgumentMatchers.<ApplicationId>any());
when(spyRMContext.getRMApps()).thenReturn(spyApps);
RMAppAttempt rmAppAttempt = mock(RMAppAttempt.class);
when(rmApp.getRMAppAttempt(any()))
.thenReturn(rmAppAttempt);
when(rmApp.getCurrentAppAttempt()).thenReturn(rmAppAttempt);
Mockito.doReturn(rmApp)
.when(spyApps).get(ArgumentMatchers.<ApplicationId>any());
Mockito.doReturn(true).when(spyApps)
.containsKey(ArgumentMatchers.<ApplicationId>any());
Priority priority_1 = TestUtils.createMockPriority(1);
// Submit first application with some resource-requests from user_0,
// and check headroom
final ApplicationAttemptId appAttemptId_0_0 =
TestUtils.getMockApplicationAttemptId(0, 0);
FiCaSchedulerApp app_0_0 = new FiCaSchedulerApp(appAttemptId_0_0, user_0,
queue, queue.getAbstractUsersManager(), spyRMContext);
queue.submitApplicationAttempt(app_0_0, user_0);
List<ResourceRequest> app_0_0_requests = new ArrayList<ResourceRequest>();
app_0_0_requests.add(TestUtils.createResourceRequest(ResourceRequest.ANY,
1 * GB, 2, true, priority_1, recordFactory));
app_0_0.updateResourceRequests(app_0_0_requests);
// Schedule to compute
queue.assignContainers(clusterResource, node_0,
new ResourceLimits(clusterResource),
SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY);
//head room = queue capacity = 50 % 90% 160 GB * 0.25 (UL)
Resource expectedHeadroom =
Resources.createResource((int) (0.5 * 0.9 * 160 * 0.25) * GB, 1);
assertEquals(expectedHeadroom, app_0_0.getHeadroom());
// Submit second application from user_0, check headroom
final ApplicationAttemptId appAttemptId_0_1 =
TestUtils.getMockApplicationAttemptId(1, 0);
FiCaSchedulerApp app_0_1 = new FiCaSchedulerApp(appAttemptId_0_1, user_0,
queue, queue.getAbstractUsersManager(), spyRMContext);
queue.submitApplicationAttempt(app_0_1, user_0);
List<ResourceRequest> app_0_1_requests = new ArrayList<ResourceRequest>();
app_0_1_requests.add(TestUtils.createResourceRequest(ResourceRequest.ANY,
1 * GB, 2, true, priority_1, recordFactory));
app_0_1.updateResourceRequests(app_0_1_requests);
app_0_1_requests.clear();
app_0_1_requests.add(TestUtils.createResourceRequest(ResourceRequest.ANY,
1 * GB, 2, true, priority_1, recordFactory, "y"));
app_0_1.updateResourceRequests(app_0_1_requests);
// Schedule to compute
queue.assignContainers(clusterResource, node_0,
new ResourceLimits(clusterResource),
SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); // Schedule to compute
queue.assignContainers(clusterResource, node_1,
new ResourceLimits(clusterResource),
SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); // Schedule to compute
assertEquals(expectedHeadroom, app_0_0.getHeadroom());// no change
//head room for default label + head room for y partition
//head room for y partition = 100% 50%(b queue capacity ) * 160 * GB
Resource expectedHeadroomWithReqInY = Resources.add(
Resources.createResource((int) (0.25 * 0.5 * 160) * GB, 1),
expectedHeadroom);
assertEquals(expectedHeadroomWithReqInY, app_0_1.getHeadroom());
// Submit first application from user_1, check for new headroom
final ApplicationAttemptId appAttemptId_1_0 =
TestUtils.getMockApplicationAttemptId(2, 0);
FiCaSchedulerApp app_1_0 = new FiCaSchedulerApp(appAttemptId_1_0, user_1,
queue, queue.getAbstractUsersManager(), spyRMContext);
queue.submitApplicationAttempt(app_1_0, user_1);
List<ResourceRequest> app_1_0_requests = new ArrayList<ResourceRequest>();
app_1_0_requests.add(TestUtils.createResourceRequest(ResourceRequest.ANY,
1 * GB, 2, true, priority_1, recordFactory));
app_1_0.updateResourceRequests(app_1_0_requests);
app_1_0_requests.clear();
app_1_0_requests.add(TestUtils.createResourceRequest(ResourceRequest.ANY,
1 * GB, 2, true, priority_1, recordFactory, "y"));
app_1_0.updateResourceRequests(app_1_0_requests);
// Schedule to compute
queue.assignContainers(clusterResource, node_0,
new ResourceLimits(clusterResource),
SchedulingMode.RESPECT_PARTITION_EXCLUSIVITY); // Schedule to compute
//head room = queue capacity = (50 % 90% 160 GB)/2 (for 2 users)
expectedHeadroom =
Resources.createResource((int) (0.5 * 0.9 * 160 * 0.25) * GB, 1);
//head room for default label + head room for y partition
//head room for y partition = 100% 50%(b queue capacity ) * 160 * GB
expectedHeadroomWithReqInY = Resources.add(
Resources.createResource((int) (0.25 * 0.5 * 160) * GB, 1),
expectedHeadroom);
assertEquals(expectedHeadroom, app_0_0.getHeadroom());
assertEquals(expectedHeadroomWithReqInY, app_0_1.getHeadroom());
assertEquals(expectedHeadroomWithReqInY, app_1_0.getHeadroom());
}
/**
* {@link LeafQueue#activateApplications()} should validate values of all
* resourceTypes before activating application.
*
* @throws Exception
*/
@Test
public void testAMLimitByAllResources() throws Exception {
CapacitySchedulerConfiguration csconf =
new CapacitySchedulerConfiguration();
csconf.setResourceComparator(DominantResourceCalculator.class);
String queueName = "a1";
csconf.setQueues(CapacitySchedulerConfiguration.ROOT,
new String[] {queueName});
csconf.setCapacity("root." + queueName, 100);
ResourceInformation res0 = ResourceInformation.newInstance("memory-mb",
ResourceInformation.MEMORY_MB.getUnits(), GB, Long.MAX_VALUE);
ResourceInformation res1 = ResourceInformation.newInstance("vcores",
ResourceInformation.VCORES.getUnits(), 1, Integer.MAX_VALUE);
ResourceInformation res2 = ResourceInformation.newInstance("gpu",
ResourceInformation.GPUS.getUnits(), 0, Integer.MAX_VALUE);
Map<String, ResourceInformation> riMap = new HashMap<>();
riMap.put(ResourceInformation.MEMORY_URI, res0);
riMap.put(ResourceInformation.VCORES_URI, res1);
riMap.put(ResourceInformation.GPU_URI, res2);
ResourceUtils.initializeResourcesFromResourceInformationMap(riMap);
YarnConfiguration config = new YarnConfiguration(csconf);
config.setClass(YarnConfiguration.RM_SCHEDULER, CapacityScheduler.class,
ResourceScheduler.class);
config.setBoolean(TestResourceProfiles.TEST_CONF_RESET_RESOURCE_TYPES,
false);
MockRM rm = new MockRM(config);
rm.start();
Map<String, Long> res = new HashMap<>();
res.put("gpu", 0L);
Resource clusterResource = Resource.newInstance(16 * GB, 64, res);
// Cluster Resource - 16GB, 64vcores
// AMLimit 16384 x .1 mb , 64 x .1 vcore
// Effective AM limit after normalized to minimum resource 2048,7
rm.registerNode("127.0.0.1:1234", clusterResource);
String userName = "user_0";
ResourceScheduler scheduler = rm.getRMContext().getScheduler();
LeafQueue queueA = (LeafQueue) ((CapacityScheduler) scheduler)
.getQueue(queueName);
Resource amResource = Resource.newInstance(GB, 1);
MockRMAppSubmitter.submit(rm,
MockRMAppSubmissionData.Builder.createWithResource(amResource, rm)
.withAppName("app-1")
.withUser(userName)
.withAcls(null)
.withQueue(queueName)
.build());
MockRMAppSubmitter.submit(rm,
MockRMAppSubmissionData.Builder.createWithResource(amResource, rm)
.withAppName("app-2")
.withUser(userName)
.withAcls(null)
.withQueue(queueName)
.build());
// app-3 should not be activated as amLimit will be reached
// for memory
MockRMAppSubmitter.submit(rm,
MockRMAppSubmissionData.Builder.createWithResource(amResource, rm)
.withAppName("app-3")
.withUser(userName)
.withAcls(null)
.withQueue(queueName)
.build());
Assert.assertEquals("PendingApplications should be 1", 1,
queueA.getNumPendingApplications());
Assert.assertEquals("Active applications should be 2", 2,
queueA.getNumActiveApplications());
// AMLimit is 2048,7
Assert.assertEquals(2048,
queueA.getQueueResourceUsage().getAMLimit().getMemorySize());
Assert.assertEquals(7,
queueA.getQueueResourceUsage().getAMLimit().getVirtualCores());
// Used AM Resource is 2048,2
Assert.assertEquals(2048,
queueA.getQueueResourceUsage().getAMUsed().getMemorySize());
Assert.assertEquals(2,
queueA.getQueueResourceUsage().getAMUsed().getVirtualCores());
rm.close();
}
}
| |
/*
* Copyright (C) 2014 Charalampakis Basilis - Blur ActionBarDrawerToggle
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.charbgr.BlurNavigationDrawer.v7;
import android.app.Activity;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.Drawable;
import android.os.Build;
import android.support.v7.app.ActionBarDrawerToggle;
import android.support.v4.widget.DrawerLayout;
import android.support.v7.widget.Toolbar;
import android.view.View;
import android.view.ViewGroup;
import android.view.animation.AlphaAnimation;
import android.widget.ImageView;
import android.widget.RelativeLayout;
import com.charbgr.BlurNavigationDrawer.Blur;
public class BlurActionBarDrawerToggle extends ActionBarDrawerToggle {
private Context context;
/**
* the layout that we take snapshot
*/
private DrawerLayout mDrawerLayout;
/**
* an imageview to display the blurred snapshot/bitmap
*/
private ImageView mBlurredImageView;
/**
* Blur radius used for the background.
*/
private int mBlurRadius = DEFAULT_BLUR_RADIUS;
/**
* Default Blur Radius
*/
public static int DEFAULT_BLUR_RADIUS = 5;
/**
* Down scale factor to reduce blurring time and memory allocation.
*/
private float mDownScaleFactor = DEFAULT_DOWNSCALEFACTOR;
/**
* Default Down Scale Factor
*/
public static float DEFAULT_DOWNSCALEFACTOR = 5.0f;
/**
* Render flag
* <p/>
* If true we must render
* if false, we have already blurred the background
*/
private boolean prepareToRender = true;
/**
* flag for "fake" sliding detection
*/
private boolean isOpening = false;
public BlurActionBarDrawerToggle(Activity activity, DrawerLayout drawerLayout, int openDrawerContentDescRes, int closeDrawerContentDescRes) {
super(activity, drawerLayout, openDrawerContentDescRes, closeDrawerContentDescRes);
this.context = activity.getBaseContext();
this.mDrawerLayout = drawerLayout;
init();
}
public BlurActionBarDrawerToggle(Activity activity, DrawerLayout drawerLayout, Toolbar toolbar, int openDrawerContentDescRes, int closeDrawerContentDescRes) {
super(activity, drawerLayout, toolbar, openDrawerContentDescRes, closeDrawerContentDescRes);
this.context = activity.getBaseContext();
this.mDrawerLayout = drawerLayout;
init();
}
/**
* We make a fake ImageView with width and height MATCH_PARENT.
* This ImageView will host the blurred snapshot/bitmap.
*/
private void init() {
mBlurredImageView = new ImageView(context);
RelativeLayout.LayoutParams params = new RelativeLayout.LayoutParams(
ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT);
mBlurredImageView.setLayoutParams(params);
mBlurredImageView.setClickable(false);
mBlurredImageView.setVisibility(View.GONE);
mBlurredImageView.setScaleType(ImageView.ScaleType.FIT_XY);
mDrawerLayout.post(new Runnable() {
@Override
public void run() {
// Add the ImageViewiew not in the last position.
// Otherwise, it will be shown in NavigationDrawer
mDrawerLayout.addView(mBlurredImageView, 1);
}
});
}
@Override
public void onDrawerSlide(final View drawerView, final float slideOffset) {
super.onDrawerSlide(drawerView, slideOffset);
//must check this for "fake" sliding..
if (slideOffset == 0.f)
isOpening = false;
else
isOpening = true;
render();
setAlpha(mBlurredImageView, slideOffset, 100);
}
@Override
public void onDrawerClosed(View view) {
prepareToRender = true;
mBlurredImageView.setVisibility(View.GONE);
}
@Override
public void onDrawerStateChanged(int newState) {
super.onDrawerStateChanged(newState);
// "fake" sliding detection
if (newState == DrawerLayout.STATE_IDLE
&& !isOpening) {
handleRecycle();
}
}
/**
* Snapshots the specified layout and scale it using scaleBitmap() function
* then we blur the scaled bitmap with the preferred blur radius.
* Finally, we post it to our fake {@link android.widget.ImageView}.
*/
private void render() {
if (prepareToRender) {
prepareToRender = false;
Bitmap bitmap = loadBitmapFromView(mDrawerLayout);
bitmap = scaleBitmap(bitmap);
bitmap = Blur.fastblur(context, bitmap, mBlurRadius, false);
mBlurredImageView.setVisibility(View.VISIBLE);
mBlurredImageView.setImageBitmap(bitmap);
}
}
public void setRadius(int radius) {
mBlurRadius = radius < 1 ? 1 : radius;
}
public void setDownScaleFactor(float downScaleFactor) {
mDownScaleFactor = downScaleFactor < 1 ? 1 : downScaleFactor;
}
private void setAlpha(View view, float alpha, long durationMillis) {
if (Build.VERSION.SDK_INT < 11) {
final AlphaAnimation animation = new AlphaAnimation(alpha, alpha);
animation.setDuration(durationMillis);
animation.setFillAfter(true);
view.startAnimation(animation);
} else {
view.setAlpha(alpha);
}
}
private Bitmap loadBitmapFromView(View mView) {
Bitmap b = Bitmap.createBitmap(
mView.getWidth(),
mView.getHeight(),
Bitmap.Config.ARGB_8888);
Canvas c = new Canvas(b);
// With the following, screen blinks
//v.layout(0, 0, v.getLayoutParams().width, v.getLayoutParams().height);
mView.draw(c);
return b;
}
private Bitmap scaleBitmap(Bitmap myBitmap) {
int width = (int) (myBitmap.getWidth() / mDownScaleFactor);
int height = (int) (myBitmap.getHeight() / mDownScaleFactor);
return Bitmap.createScaledBitmap(myBitmap, width, height, false);
}
private void handleRecycle() {
Drawable drawable = mBlurredImageView.getDrawable();
if (drawable instanceof BitmapDrawable) {
BitmapDrawable bitmapDrawable = ((BitmapDrawable) drawable);
Bitmap bitmap = bitmapDrawable.getBitmap();
if (bitmap != null)
bitmap.recycle();
mBlurredImageView.setImageBitmap(null);
}
prepareToRender = true;
}
}
| |
package com.evgenii.walktocircle.walkMap;
import android.graphics.Point;
import android.location.Location;
import com.evgenii.walktocircle.MainActivity;
import com.evgenii.walktocircle.WalkApplication;
import com.evgenii.walktocircle.utils.WalkGeo;
import com.evgenii.walktocircle.utils.WalkLocation;
import com.evgenii.walktocircle.utils.WalkView;
import com.evgenii.walktocircle.WalkConstants;
import com.google.android.gms.maps.CameraUpdate;
import com.google.android.gms.maps.CameraUpdateFactory;
import com.google.android.gms.maps.GoogleMap;
import com.google.android.gms.maps.Projection;
import com.google.android.gms.maps.model.CameraPosition;
import com.google.android.gms.maps.model.LatLng;
import com.google.android.gms.maps.model.LatLngBounds;
public class PrepareMapForPin {
public void prepare(Location userLocation, final Location pinLocation,
final GoogleMap map, final Point mapSizePixels,
final Point startButtonSizePixels, final Runnable complete) {
animateCameraToUserLocation(userLocation, map, new Runnable() {
@Override
public void run() {
complete.run();
animateCameraToShowPin(pinLocation, map, mapSizePixels, startButtonSizePixels);
}
});
}
/**
* Animates the camera to current user's location with default zoom level.
* If user's location is already shown on the map with proper zoom the camera animation
* is not performed and the callback is called right away.
* @param userLocation location which is used to animate the camera to
* @param map the map object
* @param callback called after camera animation is complete
*/
public void animateCameraToUserLocation(Location userLocation, GoogleMap map, final Runnable callback) {
CameraUpdate update = getUserCenteredCameraUpdate(userLocation, map);
if (update == null) {
// No camera update is necessary
callback.run();
} else {
map.animateCamera(update, WalkConstants.mapPositionAnimationDurationMilliseconds, new GoogleMap.CancelableCallback() {
@Override
public void onFinish() {
callback.run();
}
@Override
public void onCancel() { }
});
}
}
/**
*
* @return Returns the update for the camera centered at the user at the right zoom level.
* Returns null if no camera update is necessary.
*/
private CameraUpdate getUserCenteredCameraUpdate(Location userLocation, GoogleMap map) {
LatLng userLatLng = WalkLocation.latLngFromLocation(userLocation);
CameraPosition.Builder cameraPositionBuilder = new CameraPosition.Builder(map.getCameraPosition());
boolean didChangeCamera = false;
// Zoom
double mapInitialZoom = WalkGeo.normalizedZoomLevelForLatitude(userLocation.getLatitude(),
WalkConstants.mapInitialZoom);
double currentZoom = WalkGeo.normalizedZoomLevelForLatitude(userLocation.getLatitude(),
map.getCameraPosition().zoom);
if (Math.abs(mapInitialZoom - currentZoom) > WalkConstants.mapZoomLevelDelta) {
cameraPositionBuilder.zoom((float)mapInitialZoom);
didChangeCamera = true;
}
// Rotate the map
if (map.getCameraPosition().bearing > WalkConstants.mapMaxBearing
&& map.getCameraPosition().bearing < (360 - WalkConstants.mapMaxBearing)) {
cameraPositionBuilder.bearing(0);
didChangeCamera = true;
}
// Remove map camera tilt
if (map.getCameraPosition().tilt > WalkConstants.mapMaxTilt) {
cameraPositionBuilder.tilt(0);
didChangeCamera = true;
}
// Center the map on user location if it is not visible on the map
LatLngBounds bounds = map.getProjection().getVisibleRegion().latLngBounds;
if (!bounds.contains(userLatLng) || didChangeCamera) {
cameraPositionBuilder.target(userLatLng);
didChangeCamera = true;
}
if (!didChangeCamera) { return null; }
CameraPosition newCameraPosition = cameraPositionBuilder.build();
return CameraUpdateFactory.newCameraPosition(newCameraPosition);
}
/**
* Move the camera to the position such that the pin and its circle are fully visible.
* @param pinLocation location of the pin
* @param map map object
* @param mapSizePixels size of the map in pixels
* @param startButtonSizePixels size of the start button in pixels
*/
private void animateCameraToShowPin(Location pinLocation, GoogleMap map, Point mapSizePixels,
Point startButtonSizePixels) {
CameraUpdate update = getCameraUpdateToShowPin(pinLocation, map, mapSizePixels, startButtonSizePixels);
if (update != null) {
map.animateCamera(update, WalkConstants.mapPositionAnimationDurationMilliseconds, new GoogleMap.CancelableCallback() {
@Override
public void onFinish() {
}
@Override
public void onCancel() {
}
});
}
}
/**
* Returns the update for the camera needed to show the pin.
* Returns null if no camera update is necessary to show the pin.
* @param pinLocation the location of the pin
* @param map map object
* @param mapSizePixels the size of the map in pixels
* @return Returns the update for the camera needed to show the pin. Returns null if no camera update is necessary to show the pin.
*/
private CameraUpdate getCameraUpdateToShowPin(Location pinLocation, GoogleMap map,
Point mapSizePixels, Point startButtonSizePixels) {
Projection projection = map.getProjection();
Point pinScreenLocation = projection.toScreenLocation(WalkLocation.latLngFromLocation(pinLocation));
Point scroll = new Point(0, 0);
float circleRadiusInPixels = (float)WalkLocation.fromMetersToMapPixels(pinLocation, map, WalkConstants.mCircleRadiusMeters) *
WalkConstants.mapPaddingMultiplierFromCircleToMapEdgePixels;
// Circle is beyond the left edge of the screen
float beyondLeftEdge = pinScreenLocation.x - circleRadiusInPixels;
if (beyondLeftEdge < 0) {
scroll.x = (int) beyondLeftEdge;
}
// Circle is beyond the right edge of the screen
float beyondRightEdge = pinScreenLocation.x - (mapSizePixels.x - circleRadiusInPixels);
if (beyondRightEdge > 0) {
scroll.x = (int) beyondRightEdge;
}
// Circle is beyond the top edge of the screen
float beyondToEdge = pinScreenLocation.y - circleRadiusInPixels - WalkView.getStatusBarHeight(WalkApplication.getAppContext());
if (beyondToEdge < 0) {
scroll.y = (int) beyondToEdge;
}
// Circle is beyond the button edge of the screen
float beyondBottomEdge = pinScreenLocation.y - (mapSizePixels.y - circleRadiusInPixels);
if (beyondBottomEdge > 0) {
scroll.y = (int) beyondBottomEdge;
}
scroll = correctScrollForStartButton(scroll, pinScreenLocation, mapSizePixels, startButtonSizePixels);
if (scroll.x == 0 && scroll.y == 0) {
// No camera update is necessary
return null;
}
return CameraUpdateFactory.scrollBy(scroll.x, scroll.y);
}
/**
* Correct the scroll of the camera in order to avoid the pin showing on top of the start button.
*/
private Point correctScrollForStartButton(Point scroll, Point pinScreenLocation, Point mapSizePixels, Point startButtonSizePixels) {
pinScreenLocation = new Point(pinScreenLocation.x + scroll.x, pinScreenLocation.y - scroll.y);
float yCorrection = (mapSizePixels.y - pinScreenLocation.y) - (startButtonSizePixels.y * (float)1.3);
if (yCorrection < 0) {
float xCorrection = Math.abs(mapSizePixels.x / 2 - pinScreenLocation.x) - startButtonSizePixels.x;
if (xCorrection < 0) {
if ((mapSizePixels.x / 2) > pinScreenLocation.x) {
xCorrection *= -1;
}
scroll.x += xCorrection;
scroll.y -= yCorrection;
}
}
return scroll;
}
}
| |
package it.unibz.krdb.obda.model.impl;
/*
* #%L
* ontop-obdalib-core
* %%
* Copyright (C) 2009 - 2014 Free University of Bozen-Bolzano
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import it.unibz.krdb.obda.exception.DuplicateMappingException;
import it.unibz.krdb.obda.io.PrefixManager;
import it.unibz.krdb.obda.io.SimplePrefixManager;
import it.unibz.krdb.obda.model.Function;
import it.unibz.krdb.obda.model.OBDADataFactory;
import it.unibz.krdb.obda.model.OBDADataSource;
import it.unibz.krdb.obda.model.OBDAMappingAxiom;
import it.unibz.krdb.obda.model.OBDAMappingListener;
import it.unibz.krdb.obda.model.OBDAModel;
import it.unibz.krdb.obda.model.OBDAModelListener;
import it.unibz.krdb.obda.model.OBDASQLQuery;
import it.unibz.krdb.obda.model.Predicate;
import it.unibz.krdb.obda.ontology.OntologyVocabulary;
import it.unibz.krdb.obda.ontology.impl.OntologyVocabularyImpl;
import it.unibz.krdb.obda.querymanager.QueryController;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Hashtable;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.jar.Attributes;
import java.util.jar.Manifest;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class OBDAModelImpl implements OBDAModel {
private static final long serialVersionUID = 1L;
private QueryController queryController;
private PrefixManager prefixManager;
private Map<URI, OBDADataSource> datasources;
private List<OBDAModelListener> sourceslisteners;
private Hashtable<URI, ArrayList<OBDAMappingAxiom>> mappings;
private List<OBDAMappingListener> mappinglisteners;
private static OBDADataFactory dfac = OBDADataFactoryImpl.getInstance();
private static final Logger log = LoggerFactory.getLogger(OBDAModelImpl.class);
private final OntologyVocabulary vocabulary = new OntologyVocabularyImpl();
/**
* The default constructor
*/
public OBDAModelImpl() {
log.debug("OBDA model is initialized!");
queryController = new QueryController();
prefixManager = new SimplePrefixManager();
datasources = new HashMap<URI, OBDADataSource>();
sourceslisteners = new ArrayList<OBDAModelListener>();
mappings = new Hashtable<URI, ArrayList<OBDAMappingAxiom>>();
mappinglisteners = new ArrayList<OBDAMappingListener>();
}
@Override
public QueryController getQueryController() {
return queryController;
}
@Override
public String getVersion() {
try {
InputStream stream = getClass().getResourceAsStream("/META-INF/MANIFEST.MF");
Manifest manifest = new Manifest(stream);
Attributes attributes = manifest.getMainAttributes();
String implementationVersion = attributes.getValue("Implementation-Version");
return implementationVersion;
} catch (IOException e) {
return "";
}
}
@Override
public String getBuiltDate() {
try {
InputStream stream = getClass().getResourceAsStream("/META-INF/MANIFEST.MF");
Manifest manifest = new Manifest(stream);
Attributes attributes = manifest.getMainAttributes();
String builtDate = attributes.getValue("Built-Date");
return builtDate;
} catch (IOException e) {
return "";
}
}
@Override
public String getBuiltBy() {
try {
InputStream stream = getClass().getResourceAsStream("/META-INF/MANIFEST.MF");
Manifest manifest = new Manifest(stream);
Attributes attributes = manifest.getMainAttributes();
String builtBy = attributes.getValue("Built-By");
return builtBy;
} catch (IOException e) {
return "";
}
}
@Override
public void setPrefixManager(PrefixManager prefman) {
this.prefixManager = prefman;
}
@Override
public PrefixManager getPrefixManager() {
return prefixManager;
}
@Override
public OBDADataFactory getDataFactory() {
return dfac;
}
@Override
public void addSource(OBDADataSource source) {
datasources.put(source.getSourceID(), source);
fireSourceAdded(source);
}
@Override
public void addSourcesListener(OBDAModelListener listener) {
if (sourceslisteners.contains(listener)) {
return;
}
sourceslisteners.add(listener);
}
private void fireSourceAdded(OBDADataSource source) {
for (OBDAModelListener listener : sourceslisteners) {
listener.datasourceAdded(source);
}
}
private void fireSourceRemoved(OBDADataSource source) {
for (OBDAModelListener listener : sourceslisteners) {
listener.datasourceDeleted(source);
}
}
@Override
public void fireSourceParametersUpdated() {
for (OBDAModelListener listener : sourceslisteners) {
listener.datasourceParametersUpdated();
}
}
private void fireSourceNameUpdated(URI old, OBDADataSource neu) {
for (OBDAModelListener listener : sourceslisteners) {
listener.datasourceUpdated(old.toString(), neu);
}
}
@Override
public List<OBDADataSource> getSources() {
List<OBDADataSource> sources = new LinkedList<OBDADataSource>(datasources.values());
return Collections.unmodifiableList(sources);
}
@Override
public OBDADataSource getSource(URI name) {
return datasources.get(name);
}
@Override
public boolean containsSource(URI name) {
if (getSource(name) != null) {
return true;
}
return false;
}
@Override
public void removeSource(URI id) {
OBDADataSource source = getSource(id);
datasources.remove(id);
fireSourceRemoved(source);
}
@Override
public void removeSourcesListener(OBDAModelListener listener) {
sourceslisteners.remove(listener);
}
@Override
public void updateSource(URI id, OBDADataSource dsd) {
datasources.remove(id);
datasources.put(dsd.getSourceID(), dsd);
fireSourceNameUpdated(id, dsd);
}
@Override
public void addMappingsListener(OBDAMappingListener listener) {
mappinglisteners.add(listener);
}
@Override
public void removeMapping(URI datasource_uri, String mapping_id) {
int index = indexOf(datasource_uri, mapping_id);
if (index != -1) {
ArrayList<OBDAMappingAxiom> current_mappings = mappings.get(datasource_uri);
current_mappings.remove(index);
fireMappingDeleted(datasource_uri, mapping_id);
}
}
@Override
public void removeAllMappings(URI datasource_uri) {
List<OBDAMappingAxiom> mappings = getMappings(datasource_uri);
while (!mappings.isEmpty()) {
mappings.remove(0);
}
fireAllMappingsRemoved();
}
private void fireAllMappingsRemoved() {
for (OBDAMappingListener listener : mappinglisteners) {
listener.allMappingsRemoved();
}
}
/**
* Announces that a mapping has been updated.
*/
private void fireMappigUpdated(URI srcuri, String mapping_id, OBDAMappingAxiom mapping) {
for (OBDAMappingListener listener : mappinglisteners) {
listener.mappingUpdated(srcuri, mapping_id, mapping);
}
}
/**
* Announces to the listeners that a mapping was deleted.
*/
private void fireMappingDeleted(URI srcuri, String mapping_id) {
for (OBDAMappingListener listener : mappinglisteners) {
listener.mappingDeleted(srcuri, mapping_id);
}
}
/**
* Announces to the listeners that a mapping was inserted.
*/
private void fireMappingInserted(URI srcuri, String mapping_id) {
for (OBDAMappingListener listener : mappinglisteners) {
listener.mappingInserted(srcuri, mapping_id);
}
}
@Override
public OBDAMappingAxiom getMapping(URI source_uri, String mapping_id) {
int pos = indexOf(source_uri, mapping_id);
if (pos == -1) {
return null;
}
List<OBDAMappingAxiom> mappings = getMappings(source_uri);
return mappings.get(pos);
}
@Override
public Hashtable<URI, ArrayList<OBDAMappingAxiom>> getMappings() {
return mappings;
}
@Override
public List<OBDAMappingAxiom> getMappings(URI datasource_uri) {
if (datasource_uri == null)
return null;
List<OBDAMappingAxiom> current_mappings = mappings.get(datasource_uri);
if (current_mappings == null) {
initMappingsArray(datasource_uri);
}
return mappings.get(datasource_uri);
}
@Override
public int indexOf(URI datasource_uri, String mapping_id) {
ArrayList<OBDAMappingAxiom> current_mappings = mappings.get(datasource_uri);
if (current_mappings == null) {
initMappingsArray(datasource_uri);
current_mappings = mappings.get(datasource_uri);
}
int position = -1;
for (int i = 0; i < current_mappings.size(); i++) {
if (current_mappings.get(i).getId().equals(mapping_id)) {
position = i;
break;
}
}
return position;
}
private void initMappingsArray(URI datasource_uri) {
mappings.put(datasource_uri, new ArrayList<OBDAMappingAxiom>());
}
@Override
public void addMapping(URI datasource_uri, OBDAMappingAxiom mapping) throws DuplicateMappingException {
int index = indexOf(datasource_uri, mapping.getId());
if (index != -1) {
throw new DuplicateMappingException("ID " + mapping.getId());
}
mappings.get(datasource_uri).add(mapping);
fireMappingInserted(datasource_uri, mapping.getId());
}
@Override
public void removeAllMappings() {
mappings.clear();
mappings = new Hashtable<URI, ArrayList<OBDAMappingAxiom>>();
fireAllMappingsRemoved();
}
@Override
public void removeMappingsListener(OBDAMappingListener listener) {
mappinglisteners.remove(listener);
}
@Override
public void updateMappingsSourceQuery(URI datasource_uri, String mapping_id, OBDASQLQuery sourceQuery) {
OBDAMappingAxiom mapping = getMapping(datasource_uri, mapping_id);
mapping.setSourceQuery(sourceQuery);
fireMappigUpdated(datasource_uri, mapping.getId(), mapping);
}
@Override
public int updateMapping(URI datasource_uri, String mapping_id, String new_mappingid) throws DuplicateMappingException {
OBDAMappingAxiom mapping = getMapping(datasource_uri, mapping_id);
// adds a new mapping
if (!containsMapping(datasource_uri, new_mappingid)) {
mapping.setId(new_mappingid);
fireMappigUpdated(datasource_uri, mapping_id, mapping);
return 0;
}
// updates an existing mapping
else {
// updates the mapping without changing the mapping id
if (new_mappingid.equals(mapping_id)) {
return -1;
}
// changes the mapping id to an existing one
else {
throw new DuplicateMappingException(new_mappingid);
}
}
}
@Override
public void updateTargetQueryMapping(URI datasource_uri, String mapping_id, List<Function> targetQuery) {
OBDAMappingAxiom mapping = getMapping(datasource_uri, mapping_id);
if (mapping == null) {
return;
}
mapping.setTargetQuery(targetQuery);
fireMappigUpdated(datasource_uri, mapping.getId(), mapping);
}
@Override
public boolean containsMapping(URI datasourceUri, String mappingId) {
if (getMapping(datasourceUri, mappingId) != null) {
return true;
}
return false;
}
@Override
public void addMappings(URI datasource_uri, Collection<OBDAMappingAxiom> mappings) throws DuplicateMappingException {
List<String> duplicates = new ArrayList<String>();
for (OBDAMappingAxiom map : mappings) {
try {
addMapping(datasource_uri, map);
} catch (DuplicateMappingException e) {
duplicates.add(map.getId());
}
}
if (duplicates.size() > 0) {
String msg = String.format("Found %d duplicates in the following ids: %s", duplicates.size(), duplicates.toString());
throw new DuplicateMappingException(msg);
}
}
@Override
public Object clone() {
OBDAModel clone = dfac.getOBDAModel();
for (OBDADataSource source : datasources.values()) {
clone.addSource((OBDADataSource) source.clone());
for (ArrayList<OBDAMappingAxiom> mappingList : mappings.values()) {
for (OBDAMappingAxiom mapping : mappingList) {
try {
clone.addMapping(source.getSourceID(), (OBDAMappingAxiom) mapping.clone());
} catch (DuplicateMappingException e) {
// Does nothing
}
}
}
}
return clone;
}
@Override
public int renamePredicate(Predicate oldname, Predicate newName) {
int modifiedCount = 0;
for (OBDADataSource source : datasources.values()) {
ArrayList<OBDAMappingAxiom> mp = mappings.get(source.getSourceID());
for (OBDAMappingAxiom mapping : mp) {
List<Function> body = mapping.getTargetQuery();
for (int idx = 0; idx < body.size(); idx++) {
Function oldatom = body.get(idx);
if (!oldatom.getFunctionSymbol().equals(oldname)) {
continue;
}
modifiedCount += 1;
Function newatom = dfac.getFunction(newName, oldatom.getTerms());
body.set(idx, newatom);
}
fireMappigUpdated(source.getSourceID(), mapping.getId(), mapping);
}
}
return modifiedCount;
}
@Override
public int deletePredicate(Predicate predicate) {
int modifiedCount = 0;
for (OBDADataSource source : datasources.values()) {
List<OBDAMappingAxiom> mp = new ArrayList<OBDAMappingAxiom>(mappings.get(source.getSourceID()));
for (OBDAMappingAxiom mapping : mp) {
List<Function> body = mapping.getTargetQuery();
for (int idx = 0; idx < body.size(); idx++) {
Function oldatom = body.get(idx);
if (!oldatom.getFunctionSymbol().equals(predicate)) {
continue;
}
modifiedCount += 1;
body.remove(idx);
}
if (body.size() != 0) {
fireMappigUpdated(source.getSourceID(), mapping.getId(), mapping);
} else {
removeMapping(source.getSourceID(), mapping.getId());
}
}
}
return modifiedCount;
}
@Override
public void reset() {
log.debug("OBDA model is reset");
prefixManager.clear();
datasources.clear();
mappings.clear();
}
@Override
public OntologyVocabulary getOntologyVocabulary() {
return vocabulary;
}
}
| |
// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.content.browser.font;
import static junit.framework.Assert.assertEquals;
import static org.mockito.AdditionalMatchers.aryEq;
import static org.mockito.ArgumentMatchers.argThat;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.ArgumentMatchers.isNull;
import static org.mockito.ArgumentMatchers.notNull;
import static org.mockito.Mockito.timeout;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import static org.mockito.MockitoAnnotations.initMocks;
import android.content.Context;
import android.content.pm.PackageManager.NameNotFoundException;
import android.content.res.AssetFileDescriptor;
import android.net.Uri;
import android.os.Bundle;
import android.os.ParcelFileDescriptor;
import android.test.IsolatedContext;
import android.test.mock.MockContentProvider;
import android.test.mock.MockContentResolver;
import android.test.mock.MockContext;
import androidx.core.provider.FontRequest;
import androidx.core.provider.FontsContractCompat.Columns;
import androidx.core.provider.FontsContractCompat.FontFamilyResult;
import androidx.core.provider.FontsContractCompat.FontInfo;
import androidx.test.filters.SmallTest;
import com.google.common.collect.ImmutableMap;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.stubbing.OngoingStubbing;
import org.chromium.base.metrics.RecordHistogram;
import org.chromium.base.test.BaseJUnit4ClassRunner;
import org.chromium.base.test.util.CriteriaHelper;
import org.chromium.blink.mojom.AndroidFontLookup;
import org.chromium.blink.mojom.AndroidFontLookup.GetUniqueNameLookupTableResponse;
import org.chromium.blink.mojom.AndroidFontLookup.MatchLocalFontByUniqueNameResponse;
import org.chromium.content.browser.font.AndroidFontLookupImpl.FetchFontName;
import org.chromium.content.browser.font.AndroidFontLookupImpl.FetchFontResult;
import org.chromium.content_public.browser.test.NativeLibraryTestUtils;
import org.chromium.mojo.MojoTestRule;
import java.util.Map;
/**
* Tests the {@link AndroidFontLookup} implementation.
*/
@RunWith(BaseJUnit4ClassRunner.class)
public final class AndroidFontLookupImplTest {
private static final String FULL_FONT_NAME_1 = "foo";
private static final String FONT_QUERY_1 = "name=Foo&weight=400";
private static final String FULL_FONT_NAME_2 = "bar";
private static final String FONT_QUERY_2 = "name=Bar&weight=400";
private static final String FULL_FONT_NAME_3 = "bar bold";
private static final String FONT_QUERY_3 = "name=Bar&weight=700";
private static final String AUTHORITY = "com.google.android.gms.fonts";
private static final Uri URI = Uri.parse("content://com.google.android.gms.fonts/123");
private static final Uri URI2 = Uri.parse("content://com.google.android.gms.fonts/456");
private static final int FD = 42;
private static final int FD2 = 43;
private static final long RUN_LOOP_TIMEOUT_MS = 50;
@Rule
public MojoTestRule mMojoTestRule = new MojoTestRule(MojoTestRule.MojoCore.INITIALIZE);
@Mock
private FontsContractWrapper mMockFontsContractWrapper;
@Mock
private ParcelFileDescriptor mMockFileDescriptor;
@Mock
private ParcelFileDescriptor mMockFileDescriptor2;
private Context mMockContext;
@Mock
private GetUniqueNameLookupTableResponse mGetUniqueNameLookupTableCallback;
@Mock
private MatchLocalFontByUniqueNameResponse mMatchLocalFontByUniqueNameCallback;
private AndroidFontLookupImpl mAndroidFontLookup;
@Before
public void setUp() {
initMocks(this);
NativeLibraryTestUtils.loadNativeLibraryNoBrowserProcess();
MockContentResolver resolver = new MockContentResolver();
MockContext mockContext = new MockContext();
when(mMockFileDescriptor.detachFd()).thenReturn(FD);
when(mMockFileDescriptor2.detachFd()).thenReturn(FD2);
resolver.addProvider(AUTHORITY, new MockContentProvider(mockContext) {
@Override
public AssetFileDescriptor openTypedAssetFile(Uri url, String mimeType, Bundle opts) {
if (url.equals(URI)) {
return new AssetFileDescriptor(mMockFileDescriptor, 0, -1);
} else if (url.equals(URI2)) {
return new AssetFileDescriptor(mMockFileDescriptor2, 0, -1);
} else {
return null;
}
}
});
mMockContext = new IsolatedContext(resolver, mockContext);
Map<String, String> fullFontNameToQuery = ImmutableMap.of(FULL_FONT_NAME_1, FONT_QUERY_1,
FULL_FONT_NAME_2, FONT_QUERY_2, FULL_FONT_NAME_3, FONT_QUERY_3);
mAndroidFontLookup = new AndroidFontLookupImpl(
mMockContext, mMockFontsContractWrapper, fullFontNameToQuery);
}
@SmallTest
@Test
public void testGetUniqueNameLookupTable_Available() {
String[] expected = new String[] {FULL_FONT_NAME_2, FULL_FONT_NAME_3, FULL_FONT_NAME_1};
mAndroidFontLookup.getUniqueNameLookupTable(mGetUniqueNameLookupTableCallback);
mMojoTestRule.runLoop(RUN_LOOP_TIMEOUT_MS);
verify(mGetUniqueNameLookupTableCallback).call(aryEq(expected));
}
@SmallTest
@Test
public void testGetUniqueNameLookupTable_MultipleFonts() throws NameNotFoundException {
// All 3 fonts should be found in results.
mAndroidFontLookup.getUniqueNameLookupTable(mGetUniqueNameLookupTableCallback);
mMojoTestRule.runLoop(RUN_LOOP_TIMEOUT_MS);
verify(mGetUniqueNameLookupTableCallback)
.call(aryEq(new String[] {FULL_FONT_NAME_2, FULL_FONT_NAME_3, FULL_FONT_NAME_1}));
// Bar Bold is not available.
FontFamilyResult result3 =
new FontFamilyResult(FontFamilyResult.STATUS_OK, new FontInfo[0]);
whenFetchFontsWith(FONT_QUERY_3).thenReturn(result3);
mAndroidFontLookup.matchLocalFontByUniqueName(
FULL_FONT_NAME_3, mMatchLocalFontByUniqueNameCallback);
mMojoTestRule.runLoop(RUN_LOOP_TIMEOUT_MS);
verify(mMatchLocalFontByUniqueNameCallback,
timeout(CriteriaHelper.DEFAULT_MAX_TIME_TO_POLL))
.call(isNull());
// Bar Bold should now be excluded from list.
mAndroidFontLookup.getUniqueNameLookupTable(mGetUniqueNameLookupTableCallback);
mMojoTestRule.runLoop(RUN_LOOP_TIMEOUT_MS);
verify(mGetUniqueNameLookupTableCallback)
.call(aryEq(new String[] {FULL_FONT_NAME_2, FULL_FONT_NAME_1}));
}
@SmallTest
@Test
public void testMatchLocalFontByUniqueName_UnsupportedFontName() {
mAndroidFontLookup.matchLocalFontByUniqueName("baz", mMatchLocalFontByUniqueNameCallback);
mMojoTestRule.runLoop(RUN_LOOP_TIMEOUT_MS);
verify(mMatchLocalFontByUniqueNameCallback,
timeout(CriteriaHelper.DEFAULT_MAX_TIME_TO_POLL))
.call(isNull());
assertEquals(1,
RecordHistogram.getHistogramValueCountForTesting(
AndroidFontLookupImpl.FETCH_FONT_NAME_HISTOGRAM, FetchFontName.OTHER));
assertEquals(1,
RecordHistogram.getHistogramValueCountForTesting(
AndroidFontLookupImpl.FETCH_FONT_RESULT_HISTOGRAM,
FetchFontResult.FAILED_UNEXPECTED_NAME));
assertEquals(1,
RecordHistogram.getHistogramTotalCountForTesting(
AndroidFontLookupImpl.MATCH_LOCAL_FONT_BY_UNIQUE_NAME_HISTOGRAM));
}
@SmallTest
@Test
public void testMatchLocalFontByUniqueName_BadResultStatus() throws NameNotFoundException {
FontFamilyResult result =
new FontFamilyResult(FontFamilyResult.STATUS_UNEXPECTED_DATA_PROVIDED, null);
whenFetchFontsWith(FONT_QUERY_1).thenReturn(result);
mAndroidFontLookup.matchLocalFontByUniqueName(
FULL_FONT_NAME_1, mMatchLocalFontByUniqueNameCallback);
mMojoTestRule.runLoop(RUN_LOOP_TIMEOUT_MS);
verify(mMatchLocalFontByUniqueNameCallback,
timeout(CriteriaHelper.DEFAULT_MAX_TIME_TO_POLL))
.call(isNull());
assertEquals(1,
RecordHistogram.getHistogramValueCountForTesting(
AndroidFontLookupImpl.FETCH_FONT_NAME_HISTOGRAM, FetchFontName.OTHER));
assertEquals(1,
RecordHistogram.getHistogramValueCountForTesting(
AndroidFontLookupImpl.FETCH_FONT_RESULT_HISTOGRAM,
FetchFontResult.FAILED_STATUS_CODE));
assertEquals(1,
RecordHistogram.getHistogramTotalCountForTesting(
AndroidFontLookupImpl.MATCH_LOCAL_FONT_BY_UNIQUE_NAME_HISTOGRAM));
}
@SmallTest
@Test
public void testMatchLocalFontByUniqueName_EmptyResults() throws NameNotFoundException {
FontFamilyResult result = new FontFamilyResult(FontFamilyResult.STATUS_OK, new FontInfo[0]);
whenFetchFontsWith(FONT_QUERY_1).thenReturn(result);
mAndroidFontLookup.matchLocalFontByUniqueName(
FULL_FONT_NAME_1, mMatchLocalFontByUniqueNameCallback);
mMojoTestRule.runLoop(RUN_LOOP_TIMEOUT_MS);
verify(mMatchLocalFontByUniqueNameCallback,
timeout(CriteriaHelper.DEFAULT_MAX_TIME_TO_POLL))
.call(isNull());
assertEquals(1,
RecordHistogram.getHistogramValueCountForTesting(
AndroidFontLookupImpl.FETCH_FONT_NAME_HISTOGRAM, FetchFontName.OTHER));
assertEquals(1,
RecordHistogram.getHistogramValueCountForTesting(
AndroidFontLookupImpl.FETCH_FONT_RESULT_HISTOGRAM,
FetchFontResult.FAILED_NON_UNIQUE_RESULT));
assertEquals(1,
RecordHistogram.getHistogramTotalCountForTesting(
AndroidFontLookupImpl.MATCH_LOCAL_FONT_BY_UNIQUE_NAME_HISTOGRAM));
}
@SmallTest
@Test
public void testMatchLocalFontByUniqueName_BadFontInfoStatus() throws NameNotFoundException {
FontInfo fontInfo = new FontInfo(URI, 0, 400, false, Columns.RESULT_CODE_FONT_NOT_FOUND);
FontFamilyResult result =
new FontFamilyResult(FontFamilyResult.STATUS_OK, new FontInfo[] {fontInfo});
whenFetchFontsWith(FONT_QUERY_1).thenReturn(result);
mAndroidFontLookup.matchLocalFontByUniqueName(
FULL_FONT_NAME_1, mMatchLocalFontByUniqueNameCallback);
mMojoTestRule.runLoop(RUN_LOOP_TIMEOUT_MS);
verify(mMatchLocalFontByUniqueNameCallback,
timeout(CriteriaHelper.DEFAULT_MAX_TIME_TO_POLL))
.call(isNull());
assertEquals(1,
RecordHistogram.getHistogramValueCountForTesting(
AndroidFontLookupImpl.FETCH_FONT_NAME_HISTOGRAM, FetchFontName.OTHER));
assertEquals(1,
RecordHistogram.getHistogramValueCountForTesting(
AndroidFontLookupImpl.FETCH_FONT_RESULT_HISTOGRAM,
FetchFontResult.FAILED_RESULT_CODE));
assertEquals(1,
RecordHistogram.getHistogramTotalCountForTesting(
AndroidFontLookupImpl.MATCH_LOCAL_FONT_BY_UNIQUE_NAME_HISTOGRAM));
}
@SmallTest
@Test
public void testMatchLocalFontByUniqueName_Throws() throws NameNotFoundException {
whenFetchFontsWith(FONT_QUERY_1).thenThrow(new NameNotFoundException());
mAndroidFontLookup.matchLocalFontByUniqueName(
FULL_FONT_NAME_1, mMatchLocalFontByUniqueNameCallback);
mMojoTestRule.runLoop(RUN_LOOP_TIMEOUT_MS);
verify(mMatchLocalFontByUniqueNameCallback,
timeout(CriteriaHelper.DEFAULT_MAX_TIME_TO_POLL))
.call(isNull());
assertEquals(1,
RecordHistogram.getHistogramValueCountForTesting(
AndroidFontLookupImpl.FETCH_FONT_NAME_HISTOGRAM, FetchFontName.OTHER));
assertEquals(1,
RecordHistogram.getHistogramValueCountForTesting(
AndroidFontLookupImpl.FETCH_FONT_RESULT_HISTOGRAM,
FetchFontResult.FAILED_EXCEPTION));
assertEquals(1,
RecordHistogram.getHistogramTotalCountForTesting(
AndroidFontLookupImpl.MATCH_LOCAL_FONT_BY_UNIQUE_NAME_HISTOGRAM));
}
@SmallTest
@Test
public void testMatchLocalFontByUniqueName_NoRetry() throws NameNotFoundException {
// Request font and fail.
whenFetchFontsWith(FONT_QUERY_1).thenThrow(new NameNotFoundException());
mAndroidFontLookup.matchLocalFontByUniqueName(
FULL_FONT_NAME_1, mMatchLocalFontByUniqueNameCallback);
mMojoTestRule.runLoop(RUN_LOOP_TIMEOUT_MS);
verify(mMatchLocalFontByUniqueNameCallback,
timeout(CriteriaHelper.DEFAULT_MAX_TIME_TO_POLL))
.call(isNull());
assertEquals(1,
RecordHistogram.getHistogramValueCountForTesting(
AndroidFontLookupImpl.FETCH_FONT_RESULT_HISTOGRAM,
FetchFontResult.FAILED_EXCEPTION));
// Second request should early out with FAILED_AVOID_RETRY.
mAndroidFontLookup.matchLocalFontByUniqueName(
FULL_FONT_NAME_1, mMatchLocalFontByUniqueNameCallback);
mMojoTestRule.runLoop(RUN_LOOP_TIMEOUT_MS);
verify(mMatchLocalFontByUniqueNameCallback,
timeout(CriteriaHelper.DEFAULT_MAX_TIME_TO_POLL).times(2))
.call(isNull());
assertEquals(2,
RecordHistogram.getHistogramValueCountForTesting(
AndroidFontLookupImpl.FETCH_FONT_NAME_HISTOGRAM, FetchFontName.OTHER));
assertEquals(1,
RecordHistogram.getHistogramValueCountForTesting(
AndroidFontLookupImpl.FETCH_FONT_RESULT_HISTOGRAM,
FetchFontResult.FAILED_AVOID_RETRY));
assertEquals(2,
RecordHistogram.getHistogramTotalCountForTesting(
AndroidFontLookupImpl.MATCH_LOCAL_FONT_BY_UNIQUE_NAME_HISTOGRAM));
}
@SmallTest
@Test
public void testMatchLocalFontByUniqueName_Success() throws NameNotFoundException {
FontInfo fontInfo = new FontInfo(URI, 0, 400, false, Columns.RESULT_CODE_OK);
FontFamilyResult result =
new FontFamilyResult(FontFamilyResult.STATUS_OK, new FontInfo[] {fontInfo});
whenFetchFontsWith(FONT_QUERY_1).thenReturn(result);
mAndroidFontLookup.matchLocalFontByUniqueName(
FULL_FONT_NAME_1, mMatchLocalFontByUniqueNameCallback);
mMojoTestRule.runLoop(RUN_LOOP_TIMEOUT_MS);
verify(mMatchLocalFontByUniqueNameCallback,
timeout(CriteriaHelper.DEFAULT_MAX_TIME_TO_POLL))
.call(notNull());
assertEquals(1,
RecordHistogram.getHistogramValueCountForTesting(
AndroidFontLookupImpl.FETCH_FONT_NAME_HISTOGRAM, FetchFontName.OTHER));
assertEquals(1,
RecordHistogram.getHistogramValueCountForTesting(
AndroidFontLookupImpl.FETCH_FONT_RESULT_HISTOGRAM,
FetchFontResult.SUCCESS));
assertEquals(1,
RecordHistogram.getHistogramTotalCountForTesting(
AndroidFontLookupImpl.MATCH_LOCAL_FONT_BY_UNIQUE_NAME_HISTOGRAM));
}
private OngoingStubbing<FontFamilyResult> whenFetchFontsWith(String query)
throws NameNotFoundException {
return when(mMockFontsContractWrapper.fetchFonts(eq(mMockContext), isNull(),
argThat((FontRequest r) -> r.getQuery().equals(query))));
}
}
| |
/*
* Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
* Copyright [2016-2019] EMBL-European Bioinformatics Institute
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Copyright (C) 2003 EBI, GRL
*
* This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation,
* Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package org.ensembl.healthcheck;
import org.ensembl.healthcheck.util.Utils;
/**
* Typesafe "enum" to store information about the type of a database. Declared final since it only has private constructors.
*/
public final class DatabaseType {
/** A core database */
public static final DatabaseType CORE = new DatabaseType("core");
/** An EST database */
public static final DatabaseType EST = new DatabaseType("est");
/** An ESTgene database */
public static final DatabaseType ESTGENE = new DatabaseType("estgene");
/** A Vega database - note this actually refers to the Ensembl Vega database */
public static final DatabaseType VEGA = new DatabaseType("vega");
/** A Sanger Vega database - note this is different from the Ensembl Vega database */
public static final DatabaseType SANGER_VEGA = new DatabaseType("sangervega");
/** A Compara database */
public static final DatabaseType COMPARA = new DatabaseType("compara");
/** A Mart database */
public static final DatabaseType MART = new DatabaseType("mart");
/** A variation database */
public static final DatabaseType VARIATION = new DatabaseType("variation");
/** A disease database */
public static final DatabaseType DISEASE = new DatabaseType("disease");
/** A haplotype database */
public static final DatabaseType HAPLOTYPE = new DatabaseType("haplotype");
/** A lite database */
public static final DatabaseType LITE = new DatabaseType("lite");
/** A GO database */
public static final DatabaseType GO = new DatabaseType("go");
/** An expression database */
public static final DatabaseType EXPRESSION = new DatabaseType("expression");
/** An xref database */
public static final DatabaseType XREF = new DatabaseType("xref");
/** An cDNA database */
public static final DatabaseType CDNA = new DatabaseType("cdna");
/** A sequence database */
public static final DatabaseType SEQ = new DatabaseType("seq");
/** A help database */
public static final DatabaseType HELP = new DatabaseType("HELP");
/** An otherfeatures database */
public static final DatabaseType OTHERFEATURES = new DatabaseType("otherfeatures");
/** A system database */
public static final DatabaseType SYSTEM = new DatabaseType("system");
/** A taxonomy database */
public static final DatabaseType NCBI_TAXONOMY = new DatabaseType("ncbi_taxonomy");
/** An ensembl_website database */
public static final DatabaseType ENSEMBL_WEBSITE = new DatabaseType("ensembl_website");
/** A healthcheck database */
public static final DatabaseType HEALTHCHECK = new DatabaseType("healthcheck");
/** A functional genomics database */
public static final DatabaseType FUNCGEN = new DatabaseType("funcgen");
/** A production database */
public static final DatabaseType PRODUCTION = new DatabaseType("production");
/** An rnaseq database */
public static final DatabaseType RNASEQ = new DatabaseType("rnaseq");
/** A pre database */
public static final DatabaseType PRE_SITE = new DatabaseType("presite");
/** A database whose type has not been determined */
public static final DatabaseType UNKNOWN = new DatabaseType("unknown");
private final String name;
private DatabaseType(final String name) {
this.name = name;
}
/**
* @return a String representation of this DatabaseType object.
*/
public String toString() {
return this.name;
}
/**
* @return a String representation of this DatabaseType object.
*/
public String getName() {
return this.name;
}
// -----------------------------------------------------------------
/**
* Resolve an alias to a DatabaseType object.
*
* @param alias
* The alias (e.g. core).
* @return The DatabaseType object corresponding to alias, or DatabaseType.UNKNOWN if it cannot be resolved.
*/
public static DatabaseType resolveAlias(final String alias) {
String lcAlias = alias.toLowerCase();
// --------------------------------------
// needs to be before core and est since names
// are of the form homo_sapiens_coreexpressionest_24_34e
if (in(lcAlias, "expression")) {
return EXPRESSION;
}
// --------------------------------------
// EG: treat eg_core as core dbs as well
if (in(lcAlias, "core") || in(lcAlias, "eg_core") || in(lcAlias, "ancestral")) {
return CORE;
}
// --------------------------------------
if (in(lcAlias, "est")) {
return EST;
}
// --------------------------------------
if (in(lcAlias, "estgene")) {
return ESTGENE;
}
// --------------------------------------
if (in(lcAlias, "compara") || in(lcAlias, "eg_compara")) {
return COMPARA;
}
// --------------------------------------
if (in(lcAlias, "mart")) {
return MART;
}
// --------------------------------------
if (in(lcAlias, "vega")) {
return VEGA;
}
// --------------------------------------
if (in(lcAlias, "variation")) {
return VARIATION;
}
// --------------------------------------
if (in(lcAlias, "disease")) {
return DISEASE;
}
// --------------------------------------
if (in(lcAlias, "haplotype")) {
return HAPLOTYPE;
}
// --------------------------------------
if (in(lcAlias, "lite")) {
return LITE;
}
// --------------------------------------
if (in(lcAlias, "go")) {
return GO;
}
// --------------------------------------
if (in(lcAlias, "xref")) {
return XREF;
}
// --------------------------------------
if (in(lcAlias, "cdna")) {
return CDNA;
}
// --------------------------------------
if (in(lcAlias, "seq")) {
return SEQ;
}
// --------------------------------------
if (in(lcAlias, "help")) {
return HELP;
}
// --------------------------------------
if (in(lcAlias, "otherfeatures")) {
return OTHERFEATURES;
}
// --------------------------------------
if (in(lcAlias, "ensembl_website")) {
return ENSEMBL_WEBSITE;
}
// --------------------------------------
if (in(lcAlias, "ncbi_taxonomy")) {
return NCBI_TAXONOMY;
}
// --------------------------------------
if (in(lcAlias, "healthcheck")) {
return HEALTHCHECK;
}
// --------------------------------------
if (in(lcAlias, "funcgen") || in(lcAlias, "eg_funcgen")) {
return FUNCGEN;
}
// --------------------------------------
if (in(lcAlias, "ensembl_production")) {
return PRODUCTION;
}
// --------------------------------------
if (in(lcAlias, "rnaseq")) {
return RNASEQ;
}
// --------------------------------------
if (in(lcAlias, "presite")) {
return PRE_SITE;
}
// --------------------------------------
// treat ensembl genomes collection databases as core
if (in(lcAlias, "collection")) {
return CORE;
}
// --------------------------------------
// and sangervega (generally specified via -type)
if (in(lcAlias, "sangervega")) {
return SANGER_VEGA;
}
// --------------------------------------
// default case
return UNKNOWN;
} // resolveAlias
// -----------------------------------------------------------------
/**
* Return true if alias appears somewhere in comma-separated list.
*/
private static boolean in(final String alias, final String list) {
return (list.indexOf(alias) > -1);
}
// -------------------------------------------------------------------------
/**
* Check if a DatabaseType is generic (core, est, estgene, vega).
*
* @return true if database is core, est, estgene or vega etc.
*/
public boolean isGeneric() {
String[] genericTypes = {"core", "est", "estgene", "vega", "cdna", "otherfeatures", "sangervega", "rnaseq", "presite"};
return Utils.stringInArray(name, genericTypes, false);
}
// -----------------------------------------------------------------
} // DatabaseType
| |
/*
* JBoss, Home of Professional Open Source.
*
* Copyright 2013 Red Hat, Inc. and/or its affiliates, and individual
* contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.xnio.ssl;
import static org.xnio.IoUtils.safeClose;
import static org.xnio._private.Messages.msg;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.nio.ByteBuffer;
import java.security.KeyManagementException;
import java.security.NoSuchAlgorithmException;
import java.security.NoSuchProviderException;
import java.util.concurrent.TimeUnit;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLEngine;
import org.xnio.BufferAllocator;
import org.xnio.ByteBufferSlicePool;
import org.xnio.ChannelListener;
import org.xnio.ChannelListeners;
import org.xnio.FutureResult;
import org.xnio.IoFuture;
import org.xnio.IoUtils;
import org.xnio.Option;
import org.xnio.OptionMap;
import org.xnio.Options;
import org.xnio.Pool;
import org.xnio.StreamConnection;
import org.xnio.Xnio;
import org.xnio.XnioExecutor;
import org.xnio.XnioIoThread;
import org.xnio.XnioWorker;
import org.xnio.channels.AcceptingChannel;
import org.xnio.channels.AssembledConnectedSslStreamChannel;
import org.xnio.channels.BoundChannel;
import org.xnio.channels.ConnectedSslStreamChannel;
import org.xnio.channels.ConnectedStreamChannel;
/**
* An XNIO SSL provider based on JSSE. Works with any XNIO provider.
*
* @author <a href="mailto:david.lloyd@redhat.com">David M. Lloyd</a>
* @author <a href="mailto:frainone@redhat.com">Flavia Rainone</a>
*/
public final class JsseXnioSsl extends XnioSsl {
private static final Pool<ByteBuffer> bufferPool = new ByteBufferSlicePool(BufferAllocator.DIRECT_BYTE_BUFFER_ALLOCATOR, 17 * 1024, 17 * 1024 * 128);
private final SSLContext sslContext;
/**
* Construct a new instance.
*
* @param xnio the XNIO instance to associate with
* @param optionMap the options for this provider
* @throws NoSuchProviderException if the given SSL provider is not found
* @throws NoSuchAlgorithmException if the given SSL algorithm is not supported
* @throws KeyManagementException if the SSL context could not be initialized
*/
public JsseXnioSsl(final Xnio xnio, final OptionMap optionMap) throws NoSuchProviderException, NoSuchAlgorithmException, KeyManagementException {
this(xnio, optionMap, JsseSslUtils.createSSLContext(optionMap));
}
/**
* Construct a new instance.
*
* @param xnio the XNIO instance to associate with
* @param optionMap the options for this provider
* @param sslContext the SSL context to use for this instance
*/
public JsseXnioSsl(final Xnio xnio, final OptionMap optionMap, final SSLContext sslContext) {
super(xnio, sslContext, optionMap);
this.sslContext = sslContext;
}
/**
* Get the JSSE SSL context for this provider instance.
*
* @return the SSL context
*/
@SuppressWarnings("unused")
public SSLContext getSslContext() {
return sslContext;
}
/**
* Get the SSL engine for a given connection.
*
* @return the SSL engine
*/
public static SSLEngine getSslEngine(SslConnection connection) {
if (connection instanceof JsseSslStreamConnection) {
return ((JsseSslStreamConnection) connection).getEngine();
} else {
throw msg.notFromThisProvider();
}
}
@SuppressWarnings("deprecation")
public IoFuture<ConnectedSslStreamChannel> connectSsl(final XnioWorker worker, final InetSocketAddress bindAddress, final InetSocketAddress destination, final ChannelListener<? super ConnectedSslStreamChannel> openListener, final ChannelListener<? super BoundChannel> bindListener, final OptionMap optionMap) {
final FutureResult<ConnectedSslStreamChannel> futureResult = new FutureResult<ConnectedSslStreamChannel>(IoUtils.directExecutor());
final IoFuture<SslConnection> futureSslConnection = openSslConnection(worker, bindAddress, destination, new ChannelListener<SslConnection>() {
public void handleEvent(final SslConnection sslConnection) {
final ConnectedSslStreamChannel assembledChannel = new AssembledConnectedSslStreamChannel(sslConnection, sslConnection.getSourceChannel(), sslConnection.getSinkChannel());
if (!futureResult.setResult(assembledChannel)) {
safeClose(assembledChannel);
} else {
ChannelListeners.invokeChannelListener(assembledChannel, openListener);
}
}
}, bindListener, optionMap).addNotifier(new IoFuture.HandlingNotifier<SslConnection, FutureResult<ConnectedSslStreamChannel>>() {
public void handleCancelled(final FutureResult<ConnectedSslStreamChannel> result) {
result.setCancelled();
}
public void handleFailed(final IOException exception, final FutureResult<ConnectedSslStreamChannel> result) {
result.setException(exception);
}
}, futureResult);
futureResult.getIoFuture().addNotifier(new IoFuture.HandlingNotifier<ConnectedStreamChannel, IoFuture<SslConnection>>() {
public void handleCancelled(final IoFuture<SslConnection> result) {
result.cancel();
}
}, futureSslConnection);
futureResult.addCancelHandler(futureSslConnection);
return futureResult.getIoFuture();
}
public IoFuture<SslConnection> openSslConnection(final XnioWorker worker, final InetSocketAddress bindAddress, final InetSocketAddress destination, final ChannelListener<? super SslConnection> openListener, final ChannelListener<? super BoundChannel> bindListener, final OptionMap optionMap) {
final FutureResult<SslConnection> futureResult = new FutureResult<SslConnection>(worker);
final IoFuture<StreamConnection> connection = worker.openStreamConnection(bindAddress, destination, new StreamConnectionChannelListener(optionMap, destination, futureResult, openListener), bindListener, optionMap);
return setupSslConnection(futureResult, connection);
}
@Override
public IoFuture<SslConnection> openSslConnection(final XnioIoThread ioThread, final InetSocketAddress bindAddress, final InetSocketAddress destination, final ChannelListener<? super SslConnection> openListener, final ChannelListener<? super BoundChannel> bindListener, final OptionMap optionMap) {
final FutureResult<SslConnection> futureResult = new FutureResult<SslConnection>(ioThread);
final IoFuture<StreamConnection> connection = ioThread.openStreamConnection(bindAddress, destination, new StreamConnectionChannelListener(optionMap, destination, futureResult, openListener), bindListener, optionMap);
return setupSslConnection(futureResult, connection);
}
private IoFuture<SslConnection> setupSslConnection(FutureResult<SslConnection> futureResult, IoFuture<StreamConnection> connection) {
connection.addNotifier(new IoFuture.HandlingNotifier<StreamConnection, FutureResult<SslConnection>>() {
public void handleCancelled(final FutureResult<SslConnection> attachment) {
attachment.setCancelled();
}
public void handleFailed(final IOException exception, final FutureResult<SslConnection> attachment) {
attachment.setException(exception);
}
}, futureResult);
futureResult.addCancelHandler(connection);
return futureResult.getIoFuture();
}
@SuppressWarnings("deprecation")
public AcceptingChannel<ConnectedSslStreamChannel> createSslTcpServer(final XnioWorker worker, final InetSocketAddress bindAddress, final ChannelListener<? super AcceptingChannel<ConnectedSslStreamChannel>> acceptListener, final OptionMap optionMap) throws IOException {
final AcceptingChannel<SslConnection> server = createSslConnectionServer(worker, bindAddress, null, optionMap);
final AcceptingChannel<ConnectedSslStreamChannel> acceptingChannel = new AcceptingChannel<ConnectedSslStreamChannel>() {
public ConnectedSslStreamChannel accept() throws IOException {
final SslConnection connection = server.accept();
return connection == null ? null : new AssembledConnectedSslStreamChannel(connection, connection.getSourceChannel(), connection.getSinkChannel());
}
public ChannelListener.Setter<? extends AcceptingChannel<ConnectedSslStreamChannel>> getAcceptSetter() {
return ChannelListeners.getDelegatingSetter(server.getAcceptSetter(), this);
}
public ChannelListener.Setter<? extends AcceptingChannel<ConnectedSslStreamChannel>> getCloseSetter() {
return ChannelListeners.getDelegatingSetter(server.getCloseSetter(), this);
}
public SocketAddress getLocalAddress() {
return server.getLocalAddress();
}
public <A extends SocketAddress> A getLocalAddress(final Class<A> type) {
return server.getLocalAddress(type);
}
public void suspendAccepts() {
server.suspendAccepts();
}
public void resumeAccepts() {
server.resumeAccepts();
}
public boolean isAcceptResumed() {
return server.isAcceptResumed();
}
public void wakeupAccepts() {
server.wakeupAccepts();
}
public void awaitAcceptable() throws IOException {
server.awaitAcceptable();
}
public void awaitAcceptable(final long time, final TimeUnit timeUnit) throws IOException {
server.awaitAcceptable(time, timeUnit);
}
public XnioWorker getWorker() {
return server.getWorker();
}
@Deprecated
public XnioExecutor getAcceptThread() {
return server.getAcceptThread();
}
public XnioIoThread getIoThread() {
return server.getIoThread();
}
public void close() throws IOException {
server.close();
}
public boolean isOpen() {
return server.isOpen();
}
public boolean supportsOption(final Option<?> option) {
return server.supportsOption(option);
}
public <T> T getOption(final Option<T> option) throws IOException {
return server.getOption(option);
}
public <T> T setOption(final Option<T> option, final T value) throws IllegalArgumentException, IOException {
return server.setOption(option, value);
}
};
acceptingChannel.getAcceptSetter().set(acceptListener);
return acceptingChannel;
}
public AcceptingChannel<SslConnection> createSslConnectionServer(final XnioWorker worker, final InetSocketAddress bindAddress, final ChannelListener<? super AcceptingChannel<SslConnection>> acceptListener, final OptionMap optionMap) throws IOException {
final JsseAcceptingSslStreamConnection server = new JsseAcceptingSslStreamConnection(sslContext, worker.createStreamConnectionServer(bindAddress, null, optionMap), optionMap, bufferPool, bufferPool, optionMap.get(Options.SSL_STARTTLS, false));
if (acceptListener != null) server.getAcceptSetter().set(acceptListener);
return server;
}
private class StreamConnectionChannelListener implements ChannelListener<StreamConnection> {
private final OptionMap optionMap;
private final InetSocketAddress destination;
private final FutureResult<SslConnection> futureResult;
private final ChannelListener<? super SslConnection> openListener;
public StreamConnectionChannelListener(OptionMap optionMap, InetSocketAddress destination, FutureResult<SslConnection> futureResult, ChannelListener<? super SslConnection> openListener) {
this.optionMap = optionMap;
this.destination = destination;
this.futureResult = futureResult;
this.openListener = openListener;
}
public void handleEvent(final StreamConnection connection) {
final SslConnection wrappedConnection = new JsseSslStreamConnection(connection, JsseSslUtils.createSSLEngine(sslContext, optionMap, destination), bufferPool, bufferPool, optionMap.get(Options.SSL_STARTTLS, false));
if (! futureResult.setResult(wrappedConnection)) {
IoUtils.safeClose(connection);
} else {
ChannelListeners.invokeChannelListener(wrappedConnection, openListener);
}
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.sql.planner.optimizations;
import com.facebook.presto.Session;
import com.facebook.presto.metadata.FunctionRegistry;
import com.facebook.presto.metadata.Signature;
import com.facebook.presto.operator.aggregation.InternalAggregationFunction;
import com.facebook.presto.spi.type.Type;
import com.facebook.presto.sql.planner.Partitioning;
import com.facebook.presto.sql.planner.PartitioningScheme;
import com.facebook.presto.sql.planner.PlanNodeIdAllocator;
import com.facebook.presto.sql.planner.Symbol;
import com.facebook.presto.sql.planner.SymbolAllocator;
import com.facebook.presto.sql.planner.plan.AggregationNode;
import com.facebook.presto.sql.planner.plan.ExchangeNode;
import com.facebook.presto.sql.planner.plan.PlanNode;
import com.facebook.presto.sql.planner.plan.ProjectNode;
import com.facebook.presto.sql.planner.plan.SimplePlanRewriter;
import com.facebook.presto.sql.tree.Expression;
import com.facebook.presto.sql.tree.FunctionCall;
import com.facebook.presto.sql.tree.QualifiedName;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import static com.facebook.presto.sql.planner.plan.AggregationNode.Step.FINAL;
import static com.facebook.presto.sql.planner.plan.AggregationNode.Step.PARTIAL;
import static com.facebook.presto.sql.planner.plan.ExchangeNode.Type.GATHER;
import static com.facebook.presto.sql.planner.plan.ExchangeNode.Type.REPARTITION;
import static com.google.common.base.Verify.verify;
import static java.util.Objects.requireNonNull;
public class PartialAggregationPushDown
implements PlanOptimizer
{
private final FunctionRegistry functionRegistry;
public PartialAggregationPushDown(FunctionRegistry registry)
{
requireNonNull(registry, "registry is null");
this.functionRegistry = registry;
}
@Override
public PlanNode optimize(PlanNode plan, Session session, Map<Symbol, Type> types, SymbolAllocator symbolAllocator, PlanNodeIdAllocator idAllocator)
{
return SimplePlanRewriter.rewriteWith(new Rewriter(symbolAllocator, idAllocator), plan, null);
}
private class Rewriter
extends SimplePlanRewriter<Void>
{
private final SymbolAllocator allocator;
private final PlanNodeIdAllocator idAllocator;
public Rewriter(SymbolAllocator allocator, PlanNodeIdAllocator idAllocator)
{
this.allocator = requireNonNull(allocator, "allocator is null");
this.idAllocator = requireNonNull(idAllocator, "idAllocator is null");
}
@Override
public PlanNode visitAggregation(AggregationNode node, RewriteContext<Void> context)
{
PlanNode child = node.getSource();
if (!(child instanceof ExchangeNode)) {
return context.defaultRewrite(node);
}
// partial aggregation can only be pushed through exchange that doesn't change
// the cardinality of the stream (i.e., gather or repartition)
ExchangeNode exchange = (ExchangeNode) child;
if ((exchange.getType() != GATHER && exchange.getType() != REPARTITION) ||
exchange.getPartitioningScheme().isReplicateNulls()) {
return context.defaultRewrite(node);
}
if (exchange.getType() == REPARTITION) {
// if partitioning columns are not a subset of grouping keys,
// we can't push this through
List<Symbol> partitioningColumns = exchange.getPartitioningScheme()
.getPartitioning()
.getArguments()
.stream()
.filter(Partitioning.ArgumentBinding::isVariable)
.map(Partitioning.ArgumentBinding::getColumn)
.collect(Collectors.toList());
if (!node.getGroupingKeys().containsAll(partitioningColumns)) {
return context.defaultRewrite(node);
}
}
// currently, we only support plans that don't use pre-computed hash functions
if (node.getHashSymbol().isPresent() || exchange.getPartitioningScheme().getHashColumn().isPresent()) {
return context.defaultRewrite(node);
}
boolean decomposable = node.getFunctions().values().stream()
.map(functionRegistry::getAggregateFunctionImplementation)
.allMatch(InternalAggregationFunction::isDecomposable);
if (!decomposable) {
return context.defaultRewrite(node);
}
switch (node.getStep()) {
case SINGLE:
// Split it into a FINAL on top of a PARTIAL and
// reprocess the resulting plan to push the partial
// below the exchange (see case below).
return context.rewrite(split(node));
case PARTIAL:
// Push it underneath each branch of the exchange
// and reprocess in case it can be pushed further down
// (e.g., if there are local/remote exchanges stacked)
return context.rewrite(pushPartial(node, exchange));
default:
return context.defaultRewrite(node);
}
}
private PlanNode pushPartial(AggregationNode partial, ExchangeNode exchange)
{
List<PlanNode> partials = new ArrayList<>();
for (int i = 0; i < exchange.getSources().size(); i++) {
PlanNode source = exchange.getSources().get(i);
if (!exchange.getOutputSymbols().equals(exchange.getInputs().get(i))) {
// Add an identity projection to preserve the inputs to the aggregation, if necessary.
// This allows us to avoid having to rewrite the symbols in the aggregation node below.
ImmutableMap.Builder<Symbol, Expression> assignments = ImmutableMap.builder();
for (int outputIndex = 0; outputIndex < exchange.getOutputSymbols().size(); outputIndex++) {
Symbol output = exchange.getOutputSymbols().get(outputIndex);
Symbol input = exchange.getInputs().get(i).get(outputIndex);
assignments.put(output, input.toSymbolReference());
}
source = new ProjectNode(idAllocator.getNextId(), source, assignments.build());
}
// Since this exchange source is now guaranteed to have the same symbols as the inputs to the the partial
// aggregation, we can build a new AggregationNode without any further symbol rewrites
partials.add(new AggregationNode(
idAllocator.getNextId(),
source,
partial.getAggregations(),
partial.getFunctions(),
partial.getMasks(),
partial.getGroupingSets(),
partial.getStep(),
partial.getHashSymbol(),
partial.getGroupIdSymbol()));
}
for (PlanNode node : partials) {
verify(partial.getOutputSymbols().equals(node.getOutputSymbols()));
}
// Since this exchange source is now guaranteed to have the same symbols as the inputs to the the partial
// aggregation, we don't need to rewrite symbols in the partitioning function
PartitioningScheme partitioning = new PartitioningScheme(
exchange.getPartitioningScheme().getPartitioning(),
partial.getOutputSymbols(),
exchange.getPartitioningScheme().getHashColumn(),
exchange.getPartitioningScheme().isReplicateNulls(),
exchange.getPartitioningScheme().getBucketToPartition());
return new ExchangeNode(
idAllocator.getNextId(),
exchange.getType(),
exchange.getScope(),
partitioning,
partials,
ImmutableList.copyOf(Collections.nCopies(partials.size(), partial.getOutputSymbols())));
}
private PlanNode split(AggregationNode node)
{
// otherwise, add a partial and final with an exchange in between
Map<Symbol, Symbol> masks = node.getMasks();
Map<Symbol, FunctionCall> finalCalls = new HashMap<>();
Map<Symbol, FunctionCall> intermediateCalls = new HashMap<>();
Map<Symbol, Signature> intermediateFunctions = new HashMap<>();
Map<Symbol, Symbol> intermediateMask = new HashMap<>();
for (Map.Entry<Symbol, FunctionCall> entry : node.getAggregations().entrySet()) {
Signature signature = node.getFunctions().get(entry.getKey());
InternalAggregationFunction function = functionRegistry.getAggregateFunctionImplementation(signature);
Symbol intermediateSymbol = allocator.newSymbol(signature.getName(), function.getIntermediateType());
intermediateCalls.put(intermediateSymbol, entry.getValue());
intermediateFunctions.put(intermediateSymbol, signature);
if (masks.containsKey(entry.getKey())) {
intermediateMask.put(intermediateSymbol, masks.get(entry.getKey()));
}
// rewrite final aggregation in terms of intermediate function
finalCalls.put(entry.getKey(), new FunctionCall(QualifiedName.of(signature.getName()), ImmutableList.of(intermediateSymbol.toSymbolReference())));
}
PlanNode partial = new AggregationNode(
idAllocator.getNextId(),
node.getSource(),
intermediateCalls,
intermediateFunctions,
intermediateMask,
node.getGroupingSets(),
PARTIAL,
node.getHashSymbol(),
node.getGroupIdSymbol());
return new AggregationNode(
node.getId(),
partial,
finalCalls,
node.getFunctions(),
ImmutableMap.of(),
node.getGroupingSets(),
FINAL,
node.getHashSymbol(),
node.getGroupIdSymbol());
}
}
}
| |
package com.myrippleapps.mynotes.app.database;
import android.content.ContentValues;
import android.content.Context;
import android.content.SharedPreferences;
import android.database.Cursor;
import android.database.SQLException;
import android.database.sqlite.SQLiteDatabase;
import com.myrippleapps.mynotes.app.R;
import com.myrippleapps.mynotes.app.database.NoteContract.Note;
import java.util.ArrayList;
public class NoteDataSource {
private final String NEWEST = "newest";
private final String OLDEST = "oldest";
private final String[] allColumn = new String[]{Note.COLUMN_ID, Note.COLUMN_TITLE, Note.COLUMN_DESCRIPTION, Note.COLUMN_STATUS, Note.COLUMN_DATE_EDITED, Note.COLUMN_TIME_EDITED, Note.COLUMN_DATE_ADDED, Note.COLUMN_TIME_ADDED, Note.COLUMN_LOCK_STATUS, Note.COLUMN_COLOR};
private final Context mContext;
private final DatabaseSQLHelper dbHelper;
private final String[] pincodeCOLUMN = new String[]{Note.COLUMN_PINCODE_ID, Note.COLUMN_PINCODE};
private final String sharedPreferencesNotes = "mPreferences";
private final String sortByKey = "sortByKey";
// --Commented out by Inspection (25-Dec-15 8:43 AM):private String color;
private SQLiteDatabase database;
private String orderBy;
private SharedPreferences sharedPreferences;
public NoteDataSource(Context context) {
dbHelper = new DatabaseSQLHelper(context);
mContext = context;
}
public void archivedNote(int n) {
database = dbHelper.getWritableDatabase();
database.execSQL("UPDATE " + Note.NOTE_TABLE + " SET " + Note.COLUMN_STATUS + " = 'Archived' WHERE " + Note.COLUMN_ID + " = " + n);
}
// --Commented out by Inspection START (25-Dec-15 8:43 AM):
public void close() {
dbHelper.close();
}
// --Commented out by Inspection STOP (25-Dec-15 8:43 AM)
public void deleteNote(int n) {
database.execSQL("DELETE FROM " + Note.NOTE_TABLE + " WHERE " + Note.COLUMN_ID + " = " + n);
}
public void editNote(NoteRecord noteRecord) {
ContentValues contentValues = new ContentValues();
contentValues.put(Note.COLUMN_TITLE, noteRecord.getNote_title());
contentValues.put(Note.COLUMN_DESCRIPTION, noteRecord.getNote_description());
contentValues.put(Note.COLUMN_TIME_EDITED, noteRecord.getNote_time_edited());
contentValues.put(Note.COLUMN_DATE_EDITED, noteRecord.getNote_date_edited());
contentValues.put(Note.COLUMN_COLOR, noteRecord.getNote_color());
String string = Note.COLUMN_ID + " = '" + noteRecord.getNote_id() + "'";
database = dbHelper.getWritableDatabase();
database.update(Note.NOTE_TABLE, contentValues, string, null);
}
public ArrayList getAllArchivedNotes() {
ArrayList arrayList = new ArrayList();
sharedPreferences = mContext.getSharedPreferences(sharedPreferencesNotes, 0);
//PreferenceManager.getDefaultSharedPreferences(mContext);//mContext.getSharedPreferences(sharedPreferencesSortBy, 0);
// if (!sharedPreferences.contains(sortByKey)) {
// orderBy = Note.COLUMN_ID + " DESC";
// } else if (sharedPreferences.getString(sortByKey, "").equals("newest") || sharedPreferences.getString(sortByKey, "").equals("")) {
// orderBy = Note.COLUMN_ID + " DESC";
// } else if (sharedPreferences.getString(sortByKey, "").equals("oldest")) {
// orderBy = Note.COLUMN_ID + " ASC";
// }
// Cursor cursor = database.query(Note.NOTE_TABLE, allColumn, Note.COLUMN_STATUS + " = 'Archived'", null, null, null, orderBy);
// cursor.moveToFirst();
// while (!cursor.isAfterLast()) {
// NoteRecord noteRecord = new NoteRecord();
// noteRecord.setNote_id(cursor.getInt(0));
// noteRecord.setNote_title(cursor.getString(1));
// noteRecord.setNote_description(cursor.getString(2));
// noteRecord.setNote_status(cursor.getString(3));
// noteRecord.setNote_date_edited(cursor.getString(4));
// noteRecord.setNote_time_edited(cursor.getString(5));
// noteRecord.setNote_date_added(cursor.getString(6));
// noteRecord.setNote_time_added(cursor.getString(7));
// if (cursor.isNull(8)) {
// noteRecord.setLock_status("");
// } else {
// noteRecord.setLock_status(cursor.getString(8));
// }
// if (cursor.isNull(9)) {
// noteRecord.setNote_color("#009688");
// } else {
// noteRecord.setNote_color(cursor.getString(9));
// }
// arrayList.add(noteRecord);
// cursor.moveToNext();
// }
// cursor.close();
// return arrayList;
if (sharedPreferences.contains(sortByKey)) {
if (sharedPreferences.getString(sortByKey, "").equals(NEWEST) || sharedPreferences.getString(sortByKey, "").equals("")) {
orderBy = Note.COLUMN_ID + " DESC";
}
else if (sharedPreferences.getString(sortByKey, "").equals(OLDEST)) {
orderBy = Note.COLUMN_ID + " ASC";
}
}
else {
orderBy = Note.COLUMN_ID + " DESC";
}
Cursor cursor = database.query(Note.NOTE_TABLE, allColumn, Note.COLUMN_STATUS + " = 'Archived'", null, null, null, orderBy);
cursor.moveToFirst();
do {
if (cursor.isAfterLast()) {
cursor.close();
return arrayList;
}
NoteRecord noteRecord = new NoteRecord();
noteRecord.setNote_id(cursor.getInt(0));
noteRecord.setNote_title(cursor.getString(1));
noteRecord.setNote_description(cursor.getString(2));
noteRecord.setNote_status(cursor.getString(3));
noteRecord.setNote_date_edited(cursor.getString(4));
noteRecord.setNote_time_edited(cursor.getString(5));
noteRecord.setNote_date_added(cursor.getString(6));
noteRecord.setNote_time_added(cursor.getString(7));
if (cursor.isNull(8)) {
noteRecord.setLock_status("");
}
else {
noteRecord.setLock_status(cursor.getString(8));
}
if (cursor.isNull(9)) {
noteRecord.setNote_color(String.valueOf(R.color.primary_color).toString());
}
else {
noteRecord.setNote_color(cursor.getString(9));
}
arrayList.add(noteRecord);
cursor.moveToNext();
} while (true);
}
public ArrayList getAllNotes() {
ArrayList arrayList = new ArrayList<>();
sharedPreferences = mContext.getSharedPreferences(sharedPreferencesNotes, 0);
if (sharedPreferences.contains(sortByKey)) {
if (sharedPreferences.getString(sortByKey, "").equals(NEWEST) || sharedPreferences.getString(sortByKey, "").equals("")) {
orderBy = Note.COLUMN_ID + " DESC";
}
else if (sharedPreferences.getString(sortByKey, "").equals(OLDEST)) {
orderBy = Note.COLUMN_ID + " ASC";
}
}
else {
orderBy = Note.COLUMN_ID + " DESC";
}
Cursor cursor = database.query(Note.NOTE_TABLE, allColumn, Note.COLUMN_STATUS + " = 'Unarchived'", null, null, null, orderBy);
cursor.moveToFirst();
do {
if (cursor.isAfterLast()) {
cursor.close();
return arrayList;
}
NoteRecord noteRecord = new NoteRecord();
noteRecord.setNote_id(cursor.getInt(0));
noteRecord.setNote_title(cursor.getString(1));
noteRecord.setNote_description(cursor.getString(2));
noteRecord.setNote_status(cursor.getString(3));
noteRecord.setNote_date_edited(cursor.getString(4));
noteRecord.setNote_time_edited(cursor.getString(5));
noteRecord.setNote_date_added(cursor.getString(6));
noteRecord.setNote_time_added(cursor.getString(7));
if (cursor.isNull(8)) {
noteRecord.setLock_status("");
}
else {
noteRecord.setLock_status(cursor.getString(8));
}
if (cursor.isNull(9)) {
noteRecord.setNote_color(String.valueOf(R.color.primary_color).toString());
}
else {
noteRecord.setNote_color(cursor.getString(9));
}
arrayList.add(noteRecord);
cursor.moveToNext();
} while (true);
}
public int getLatestID() {
Cursor cursor = database.rawQuery("SELECT " + Note.COLUMN_ID + " from " + Note.NOTE_TABLE + " order by " + Note.COLUMN_ID + " DESC limit 1", null);
if (cursor != null && cursor.moveToFirst()) {
return cursor.getInt(0);
}
return 1;
}
public NoteRecord getNoteByID(String string) {
NoteRecord noteRecord = new NoteRecord();
String string2 = Note.COLUMN_ID + " = '" + string + "'";
Cursor cursor = database.query(Note.NOTE_TABLE, allColumn, string2, null, null, null, null);
cursor.moveToFirst();
do {
if (cursor.isAfterLast()) {
cursor.close();
return noteRecord;
}
noteRecord.setNote_id(cursor.getInt(0));
if (cursor.isNull(1)) {
noteRecord.setNote_title("");
}
else {
noteRecord.setNote_title(cursor.getString(1));
}
if (cursor.isNull(2)) {
noteRecord.setNote_description("");
}
else {
noteRecord.setNote_description(cursor.getString(2));
}
noteRecord.setNote_status(cursor.getString(3));
noteRecord.setNote_date_edited(cursor.getString(4));
noteRecord.setNote_time_edited(cursor.getString(5));
noteRecord.setNote_date_added(cursor.getString(6));
noteRecord.setNote_time_added(cursor.getString(7));
if (cursor.isNull(8)) {
noteRecord.setLock_status("");
}
else {
noteRecord.setLock_status(cursor.getString(8));
}
if (cursor.isNull(9)) {
noteRecord.setNote_color(String.valueOf(R.color.primary_color).toString());
}
else {
noteRecord.setNote_color(cursor.getString(9));
}
cursor.moveToNext();
} while (true);
}
public String getPIN() {
String string = "";
Cursor cursor = database.rawQuery("SELECT " + Note.COLUMN_PINCODE + " from " + Note.PINCODE_TABLE + " where " + Note.COLUMN_PINCODE_ID + " = 1", null);
if (cursor != null && cursor.moveToFirst()) {
string = cursor.getString(0);
}
return string;
}
public void insertNote(NoteRecord noteRecord) {
ContentValues contentValues = new ContentValues();
contentValues.put(Note.COLUMN_TITLE, noteRecord.getNote_title());
contentValues.put(Note.COLUMN_DESCRIPTION, noteRecord.getNote_description());
contentValues.put(Note.COLUMN_STATUS, noteRecord.getNote_status());
contentValues.put(Note.COLUMN_TIME_ADDED, noteRecord.getNote_time_added());
contentValues.put(Note.COLUMN_TIME_EDITED, noteRecord.getNote_time_edited());
contentValues.put(Note.COLUMN_DATE_ADDED, noteRecord.getNote_date_added());
contentValues.put(Note.COLUMN_DATE_EDITED, noteRecord.getNote_date_edited());
contentValues.put(Note.COLUMN_LOCK_STATUS, noteRecord.getLock_status());
contentValues.put(Note.COLUMN_COLOR, noteRecord.getNote_color());
database = dbHelper.getWritableDatabase();
database.insert(Note.NOTE_TABLE, null, contentValues);
}
public void insertPincode(String string) {
ContentValues contentValues = new ContentValues();
contentValues.put(Note.COLUMN_PINCODE_ID, 1);
contentValues.put(Note.COLUMN_PINCODE, string);
database = dbHelper.getWritableDatabase();
database.insert(Note.PINCODE_TABLE, null, contentValues);
}
public boolean isPincodeSet() {
boolean bl = false;
Cursor cursor = database.query(Note.PINCODE_TABLE, pincodeCOLUMN, null, null, null, null, null);
cursor.moveToFirst();
while (!cursor.isAfterLast()) {
bl = true;
cursor.moveToNext();
}
return bl;
}
public void lockNote(int n) {
database = dbHelper.getWritableDatabase();
database.execSQL("UPDATE " + Note.NOTE_TABLE + " SET " + Note.COLUMN_LOCK_STATUS + " = 'Locked' WHERE " + Note.COLUMN_ID + " = " + n);
}
public void open() throws SQLException {
database = dbHelper.getWritableDatabase();
}
public void removePIN() {
database.execSQL("DELETE FROM " + Note.PINCODE_TABLE + " WHERE " + Note.COLUMN_PINCODE_ID + " = 1");
}
public void unarchivedNote(int n) {
database = dbHelper.getWritableDatabase();
database.execSQL("UPDATE " + Note.NOTE_TABLE + " SET " + Note.COLUMN_STATUS + " = 'Unarchived' WHERE " + Note.COLUMN_ID + " = " + n);
}
public void unlockALlNotes() {
database = dbHelper.getWritableDatabase();
database.execSQL("UPDATE " + Note.NOTE_TABLE + " SET " + Note.COLUMN_LOCK_STATUS + " = 'Unlocked' ");
}
public void unlockNote(int n) {
database = dbHelper.getWritableDatabase();
database.execSQL("UPDATE " + Note.NOTE_TABLE + " SET " + Note.COLUMN_LOCK_STATUS + " = 'Unlocked' WHERE " + Note.COLUMN_ID + " = " + n);
}
}
| |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.ec2.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
import com.amazonaws.Request;
import com.amazonaws.services.ec2.model.transform.ModifyVpcAttributeRequestMarshaller;
/**
*
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ModifyVpcAttributeRequest extends AmazonWebServiceRequest implements Serializable, Cloneable, DryRunSupportedRequest<ModifyVpcAttributeRequest> {
/**
* <p>
* Indicates whether the instances launched in the VPC get DNS hostnames. If enabled, instances in the VPC get DNS
* hostnames; otherwise, they do not.
* </p>
* <p>
* You cannot modify the DNS resolution and DNS hostnames attributes in the same request. Use separate requests for
* each attribute. You can only enable DNS hostnames if you've enabled DNS support.
* </p>
*/
private Boolean enableDnsHostnames;
/**
* <p>
* Indicates whether the DNS resolution is supported for the VPC. If enabled, queries to the Amazon provided DNS
* server at the 169.254.169.253 IP address, or the reserved IP address at the base of the VPC network range
* "plus two" succeed. If disabled, the Amazon provided DNS service in the VPC that resolves public DNS hostnames to
* IP addresses is not enabled.
* </p>
* <p>
* You cannot modify the DNS resolution and DNS hostnames attributes in the same request. Use separate requests for
* each attribute.
* </p>
*/
private Boolean enableDnsSupport;
/**
* <p>
* The ID of the VPC.
* </p>
*/
private String vpcId;
/**
* <p>
* Indicates whether the instances launched in the VPC get DNS hostnames. If enabled, instances in the VPC get DNS
* hostnames; otherwise, they do not.
* </p>
* <p>
* You cannot modify the DNS resolution and DNS hostnames attributes in the same request. Use separate requests for
* each attribute. You can only enable DNS hostnames if you've enabled DNS support.
* </p>
*
* @param enableDnsHostnames
* Indicates whether the instances launched in the VPC get DNS hostnames. If enabled, instances in the VPC
* get DNS hostnames; otherwise, they do not.</p>
* <p>
* You cannot modify the DNS resolution and DNS hostnames attributes in the same request. Use separate
* requests for each attribute. You can only enable DNS hostnames if you've enabled DNS support.
*/
public void setEnableDnsHostnames(Boolean enableDnsHostnames) {
this.enableDnsHostnames = enableDnsHostnames;
}
/**
* <p>
* Indicates whether the instances launched in the VPC get DNS hostnames. If enabled, instances in the VPC get DNS
* hostnames; otherwise, they do not.
* </p>
* <p>
* You cannot modify the DNS resolution and DNS hostnames attributes in the same request. Use separate requests for
* each attribute. You can only enable DNS hostnames if you've enabled DNS support.
* </p>
*
* @return Indicates whether the instances launched in the VPC get DNS hostnames. If enabled, instances in the VPC
* get DNS hostnames; otherwise, they do not.</p>
* <p>
* You cannot modify the DNS resolution and DNS hostnames attributes in the same request. Use separate
* requests for each attribute. You can only enable DNS hostnames if you've enabled DNS support.
*/
public Boolean getEnableDnsHostnames() {
return this.enableDnsHostnames;
}
/**
* <p>
* Indicates whether the instances launched in the VPC get DNS hostnames. If enabled, instances in the VPC get DNS
* hostnames; otherwise, they do not.
* </p>
* <p>
* You cannot modify the DNS resolution and DNS hostnames attributes in the same request. Use separate requests for
* each attribute. You can only enable DNS hostnames if you've enabled DNS support.
* </p>
*
* @param enableDnsHostnames
* Indicates whether the instances launched in the VPC get DNS hostnames. If enabled, instances in the VPC
* get DNS hostnames; otherwise, they do not.</p>
* <p>
* You cannot modify the DNS resolution and DNS hostnames attributes in the same request. Use separate
* requests for each attribute. You can only enable DNS hostnames if you've enabled DNS support.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ModifyVpcAttributeRequest withEnableDnsHostnames(Boolean enableDnsHostnames) {
setEnableDnsHostnames(enableDnsHostnames);
return this;
}
/**
* <p>
* Indicates whether the instances launched in the VPC get DNS hostnames. If enabled, instances in the VPC get DNS
* hostnames; otherwise, they do not.
* </p>
* <p>
* You cannot modify the DNS resolution and DNS hostnames attributes in the same request. Use separate requests for
* each attribute. You can only enable DNS hostnames if you've enabled DNS support.
* </p>
*
* @return Indicates whether the instances launched in the VPC get DNS hostnames. If enabled, instances in the VPC
* get DNS hostnames; otherwise, they do not.</p>
* <p>
* You cannot modify the DNS resolution and DNS hostnames attributes in the same request. Use separate
* requests for each attribute. You can only enable DNS hostnames if you've enabled DNS support.
*/
public Boolean isEnableDnsHostnames() {
return this.enableDnsHostnames;
}
/**
* <p>
* Indicates whether the DNS resolution is supported for the VPC. If enabled, queries to the Amazon provided DNS
* server at the 169.254.169.253 IP address, or the reserved IP address at the base of the VPC network range
* "plus two" succeed. If disabled, the Amazon provided DNS service in the VPC that resolves public DNS hostnames to
* IP addresses is not enabled.
* </p>
* <p>
* You cannot modify the DNS resolution and DNS hostnames attributes in the same request. Use separate requests for
* each attribute.
* </p>
*
* @param enableDnsSupport
* Indicates whether the DNS resolution is supported for the VPC. If enabled, queries to the Amazon provided
* DNS server at the 169.254.169.253 IP address, or the reserved IP address at the base of the VPC network
* range "plus two" succeed. If disabled, the Amazon provided DNS service in the VPC that resolves public DNS
* hostnames to IP addresses is not enabled.</p>
* <p>
* You cannot modify the DNS resolution and DNS hostnames attributes in the same request. Use separate
* requests for each attribute.
*/
public void setEnableDnsSupport(Boolean enableDnsSupport) {
this.enableDnsSupport = enableDnsSupport;
}
/**
* <p>
* Indicates whether the DNS resolution is supported for the VPC. If enabled, queries to the Amazon provided DNS
* server at the 169.254.169.253 IP address, or the reserved IP address at the base of the VPC network range
* "plus two" succeed. If disabled, the Amazon provided DNS service in the VPC that resolves public DNS hostnames to
* IP addresses is not enabled.
* </p>
* <p>
* You cannot modify the DNS resolution and DNS hostnames attributes in the same request. Use separate requests for
* each attribute.
* </p>
*
* @return Indicates whether the DNS resolution is supported for the VPC. If enabled, queries to the Amazon provided
* DNS server at the 169.254.169.253 IP address, or the reserved IP address at the base of the VPC network
* range "plus two" succeed. If disabled, the Amazon provided DNS service in the VPC that resolves public
* DNS hostnames to IP addresses is not enabled.</p>
* <p>
* You cannot modify the DNS resolution and DNS hostnames attributes in the same request. Use separate
* requests for each attribute.
*/
public Boolean getEnableDnsSupport() {
return this.enableDnsSupport;
}
/**
* <p>
* Indicates whether the DNS resolution is supported for the VPC. If enabled, queries to the Amazon provided DNS
* server at the 169.254.169.253 IP address, or the reserved IP address at the base of the VPC network range
* "plus two" succeed. If disabled, the Amazon provided DNS service in the VPC that resolves public DNS hostnames to
* IP addresses is not enabled.
* </p>
* <p>
* You cannot modify the DNS resolution and DNS hostnames attributes in the same request. Use separate requests for
* each attribute.
* </p>
*
* @param enableDnsSupport
* Indicates whether the DNS resolution is supported for the VPC. If enabled, queries to the Amazon provided
* DNS server at the 169.254.169.253 IP address, or the reserved IP address at the base of the VPC network
* range "plus two" succeed. If disabled, the Amazon provided DNS service in the VPC that resolves public DNS
* hostnames to IP addresses is not enabled.</p>
* <p>
* You cannot modify the DNS resolution and DNS hostnames attributes in the same request. Use separate
* requests for each attribute.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ModifyVpcAttributeRequest withEnableDnsSupport(Boolean enableDnsSupport) {
setEnableDnsSupport(enableDnsSupport);
return this;
}
/**
* <p>
* Indicates whether the DNS resolution is supported for the VPC. If enabled, queries to the Amazon provided DNS
* server at the 169.254.169.253 IP address, or the reserved IP address at the base of the VPC network range
* "plus two" succeed. If disabled, the Amazon provided DNS service in the VPC that resolves public DNS hostnames to
* IP addresses is not enabled.
* </p>
* <p>
* You cannot modify the DNS resolution and DNS hostnames attributes in the same request. Use separate requests for
* each attribute.
* </p>
*
* @return Indicates whether the DNS resolution is supported for the VPC. If enabled, queries to the Amazon provided
* DNS server at the 169.254.169.253 IP address, or the reserved IP address at the base of the VPC network
* range "plus two" succeed. If disabled, the Amazon provided DNS service in the VPC that resolves public
* DNS hostnames to IP addresses is not enabled.</p>
* <p>
* You cannot modify the DNS resolution and DNS hostnames attributes in the same request. Use separate
* requests for each attribute.
*/
public Boolean isEnableDnsSupport() {
return this.enableDnsSupport;
}
/**
* <p>
* The ID of the VPC.
* </p>
*
* @param vpcId
* The ID of the VPC.
*/
public void setVpcId(String vpcId) {
this.vpcId = vpcId;
}
/**
* <p>
* The ID of the VPC.
* </p>
*
* @return The ID of the VPC.
*/
public String getVpcId() {
return this.vpcId;
}
/**
* <p>
* The ID of the VPC.
* </p>
*
* @param vpcId
* The ID of the VPC.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ModifyVpcAttributeRequest withVpcId(String vpcId) {
setVpcId(vpcId);
return this;
}
/**
* This method is intended for internal use only. Returns the marshaled request configured with additional
* parameters to enable operation dry-run.
*/
@Override
public Request<ModifyVpcAttributeRequest> getDryRunRequest() {
Request<ModifyVpcAttributeRequest> request = new ModifyVpcAttributeRequestMarshaller().marshall(this);
request.addParameter("DryRun", Boolean.toString(true));
return request;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getEnableDnsHostnames() != null)
sb.append("EnableDnsHostnames: ").append(getEnableDnsHostnames()).append(",");
if (getEnableDnsSupport() != null)
sb.append("EnableDnsSupport: ").append(getEnableDnsSupport()).append(",");
if (getVpcId() != null)
sb.append("VpcId: ").append(getVpcId());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ModifyVpcAttributeRequest == false)
return false;
ModifyVpcAttributeRequest other = (ModifyVpcAttributeRequest) obj;
if (other.getEnableDnsHostnames() == null ^ this.getEnableDnsHostnames() == null)
return false;
if (other.getEnableDnsHostnames() != null && other.getEnableDnsHostnames().equals(this.getEnableDnsHostnames()) == false)
return false;
if (other.getEnableDnsSupport() == null ^ this.getEnableDnsSupport() == null)
return false;
if (other.getEnableDnsSupport() != null && other.getEnableDnsSupport().equals(this.getEnableDnsSupport()) == false)
return false;
if (other.getVpcId() == null ^ this.getVpcId() == null)
return false;
if (other.getVpcId() != null && other.getVpcId().equals(this.getVpcId()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getEnableDnsHostnames() == null) ? 0 : getEnableDnsHostnames().hashCode());
hashCode = prime * hashCode + ((getEnableDnsSupport() == null) ? 0 : getEnableDnsSupport().hashCode());
hashCode = prime * hashCode + ((getVpcId() == null) ? 0 : getVpcId().hashCode());
return hashCode;
}
@Override
public ModifyVpcAttributeRequest clone() {
return (ModifyVpcAttributeRequest) super.clone();
}
}
| |
/*
*Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
*WSO2 Inc. licenses this file to you under the Apache License,
*Version 2.0 (the "License"); you may not use this file except
*in compliance with the License.
*You may obtain a copy of the License at
*
*http://www.apache.org/licenses/LICENSE-2.0
*
*Unless required by applicable law or agreed to in writing,
*software distributed under the License is distributed on an
*"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
*KIND, either express or implied. See the License for the
*specific language governing permissions and limitations
*under the License.
*/
package org.wso2.am.integration.tests.other;
import org.json.JSONObject;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import org.wso2.am.integration.test.utils.base.APIMIntegrationBaseTest;
import org.wso2.am.integration.test.utils.base.APIMIntegrationConstants;
import org.wso2.am.integration.test.utils.bean.APILifeCycleState;
import org.wso2.am.integration.test.utils.bean.APILifeCycleStateRequest;
import org.wso2.am.integration.test.utils.bean.APIRequest;
import org.wso2.am.integration.test.utils.bean.APPKeyRequestGenerator;
import org.wso2.am.integration.test.utils.bean.SubscriptionRequest;
import org.wso2.am.integration.test.utils.clients.APIPublisherRestClient;
import org.wso2.am.integration.test.utils.clients.APIStoreRestClient;
import org.wso2.carbon.automation.test.utils.http.client.HttpRequestUtil;
import org.wso2.carbon.automation.test.utils.http.client.HttpResponse;
import org.wso2.carbon.integration.common.admin.client.UserManagementClient;
import javax.ws.rs.core.Response;
import java.io.File;
import java.net.URL;
import java.util.HashMap;
import java.util.Map;
import static org.testng.Assert.assertEquals;
public class APIScopeTestForTenantsTestCase extends APIMIntegrationBaseTest {
// Details of the first tenant
private final String TENANT1_DOMAIN = "tenantscope1.com";
private final String TENANT1_ADMIN_USERNAME = "firstAdmin";
private final String TENANT1_ADMIN_PASSWORD = "password1";
private final String TENANT1_API_NAME = "APIScopeTenantAPI1";
private final String TENANT1_API_VERSION = "1.0.0";
private final String TENANT1_APP_NAME = "TenantScope1App";
private final String TENANT1_ADMIN_USER = TENANT1_ADMIN_USERNAME + "@" + TENANT1_DOMAIN;
private final String TENANT1_API_CONTEXT = "testScopeAPITenant1";
private UserManagementClient userManagementClient1 = null;
private final String TENANT1_SUBSCRIBER_ROLE = "subscriberTenant1";
private final String USER_PETER = "peter";
private final String USER_PASSWORD = "peter123";
private final String TENANT1_USER_PETER = USER_PETER + "@" + TENANT1_DOMAIN;
// Details of the second tenant
private final String TENANT2_DOMAIN = "tenantscope2.com";
private final String TENANT2_ADMIN_USERNAME = "secondAdmin";
private final String TENANT2_ADMIN_PASSWORD = "password2";
private final String TENANT2_API_NAME = "APIScopeTenantAPI2";
private final String TENANT2_API_VERSION = "1.0.0";
private final String TENANT2_APP_NAME = "TenantScope2App";
private final String TENANT2_ADMIN_USER = TENANT2_ADMIN_USERNAME + "@" + TENANT2_DOMAIN;
private final String TENANT2_API_CONTEXT = "testScopeAPITenant2";
private final String TENANT2_SUBSCRIBER_ROLE = "subscriberTenant2";
private final String TENANT2_USER_PETER = USER_PETER + "@" + TENANT2_DOMAIN;
private UserManagementClient userManagementClient2 = null;
private APIPublisherRestClient apiPublisher;
private APIStoreRestClient apiStore;
@BeforeClass(alwaysRun = true)
public void setEnvironment() throws Exception {
super.init();
apiPublisher = new APIPublisherRestClient(getPublisherURLHttp());
apiStore = new APIStoreRestClient(getStoreURLHttp());
String[] userPermissions = new String[]{"/permission/admin/login", "/permission/admin/manage/api/subscribe"};
// Create the first tenant
tenantManagementServiceClient.addTenant(TENANT1_DOMAIN, TENANT1_ADMIN_PASSWORD, TENANT1_ADMIN_USERNAME,
"demo");
userManagementClient1 = new UserManagementClient(keyManagerContext.getContextUrls().getBackEndUrl(),
TENANT1_ADMIN_USER, TENANT1_ADMIN_PASSWORD);
// Adding new role
userManagementClient1.addRole(TENANT1_SUBSCRIBER_ROLE, new String[]{}, userPermissions);
userManagementClient1.addUser(USER_PETER, USER_PASSWORD, new String[]{TENANT1_SUBSCRIBER_ROLE}, USER_PETER);
// Create the second tenant
tenantManagementServiceClient.addTenant(TENANT2_DOMAIN, TENANT2_ADMIN_PASSWORD, TENANT2_ADMIN_USERNAME,
"demo");
userManagementClient2 = new UserManagementClient(keyManagerContext.getContextUrls().getBackEndUrl(),
TENANT2_ADMIN_USER, TENANT2_ADMIN_PASSWORD);
// Adding new role
userManagementClient2.addRole(TENANT2_SUBSCRIBER_ROLE, new String[]{}, userPermissions);
userManagementClient2.addUser(USER_PETER, USER_PASSWORD, new String[]{TENANT2_SUBSCRIBER_ROLE}, USER_PETER);
//Load the back-end dummy API
}
@Test(groups = {"wso2.am"}, description = "Testing using same scope key in tenants")
public void testSameScopeInTenants() throws Exception {
// Publish the API for Tenant1
publishAPI(TENANT1_ADMIN_USER, TENANT1_ADMIN_PASSWORD, TENANT1_API_NAME, TENANT1_API_CONTEXT,
TENANT1_API_VERSION, TENANT1_SUBSCRIBER_ROLE);
apiStore.login(TENANT1_USER_PETER, USER_PASSWORD);
// App creation and subscribe to the API
subscribeToAPI(TENANT1_APP_NAME, TENANT1_API_NAME, TENANT1_ADMIN_USER);
// Generate production token
JSONObject jsonResponse = getApplicationKeys(TENANT1_APP_NAME);
String consumerKey = jsonResponse.getJSONObject("data").getJSONObject("key").getString("consumerKey");
String consumerSecret = jsonResponse.getJSONObject("data").getJSONObject("key").getString("consumerSecret");
// Generate user access token
JSONObject accessTokenGenerationResponse = getUserAccessKeys(TENANT1_USER_PETER, USER_PASSWORD, consumerKey,
consumerSecret);
apiStore.logout();
String accessToken = accessTokenGenerationResponse.getString("access_token");
String gatewayUrl = gatewayUrlsWrk.getWebAppURLNhttp() + "t/" + TENANT1_DOMAIN + "/";
// Invoke the API
HttpResponse response = invokeAPI(accessToken, gatewayUrl, TENANT1_API_CONTEXT, TENANT1_API_VERSION);
assertEquals(response.getResponseCode(), Response.Status.OK.getStatusCode(),
TENANT1_USER_PETER + " can access the POST method");
// Publish the API for Tenant2
publishAPI(TENANT2_ADMIN_USER, TENANT2_ADMIN_PASSWORD, TENANT2_API_NAME, TENANT2_API_CONTEXT,
TENANT2_API_VERSION, TENANT2_SUBSCRIBER_ROLE);
apiStore.login(TENANT2_USER_PETER, USER_PASSWORD);
// App creation and subscribe to the API
subscribeToAPI(TENANT2_APP_NAME, TENANT2_API_NAME, TENANT2_ADMIN_USER);
// Generate production token
jsonResponse = getApplicationKeys(TENANT2_APP_NAME);
consumerKey = jsonResponse.getJSONObject("data").getJSONObject("key").getString("consumerKey");
consumerSecret = jsonResponse.getJSONObject("data").getJSONObject("key").getString("consumerSecret");
// Generate user access token
accessTokenGenerationResponse = getUserAccessKeys(TENANT2_USER_PETER, USER_PASSWORD, consumerKey,
consumerSecret);
apiStore.logout();
accessToken = accessTokenGenerationResponse.getString("access_token");
gatewayUrl = gatewayUrlsWrk.getWebAppURLNhttp() + "t/" + TENANT2_DOMAIN + "/";
// Invoke the API
response = invokeAPI(accessToken, gatewayUrl, TENANT2_API_CONTEXT, TENANT2_API_VERSION);
assertEquals(response.getResponseCode(), Response.Status.OK.getStatusCode(),
TENANT2_USER_PETER + " can access the POST method");
}
private void publishAPI(String adminUser, String adminPassword, String apiName, String apiContext,
String apiVersion, String role) throws Exception {
String url = getGatewayURLNhttp() + "response";
String description = "This is a test API created by API manager integration test";
apiPublisher.login(adminUser, adminPassword);
//Wait till CommonConfigDeployer finishes adding the default set of policies to the database after tenant admin
//login, if not api creation fails since Unlimited resource tier is not available in database.
waitForAPIDeployment();
APIRequest apiRequest = new APIRequest(apiName, apiContext, new URL(url));
apiRequest.setDescription(description);
apiRequest.setVersion(apiVersion);
apiRequest.setProvider(adminUser);
apiPublisher.addAPI(apiRequest);
// Publishing API
APILifeCycleStateRequest updateRequest = new APILifeCycleStateRequest(apiName, adminUser,
APILifeCycleState.PUBLISHED);
apiPublisher.changeAPILifeCycleStatus(updateRequest);
waitForAPIDeploymentSync(adminUser, apiName, apiVersion,
APIMIntegrationConstants.IS_API_EXISTS);
// Resources are modified using swagger doc.
// user_scope(used for POST)
String modifiedResource = "{\"paths\":{ \"/test\":{\"put\":{ \"responses\":{\"200\":{}},\"x-auth-type\":" +
"\"Application User\",\"x-throttling-tier\":\"Unlimited\" },\"post\":{ \"responses\":{\"200\":{}}," +
"\"x-auth-type\":\"Application User\",\"x-throttling-tier\":\"Unlimited\",\"x-scope\":" +
"\"user_scope\"},\"get\":{ \"responses\":{\"200\":{}},\"x-auth-type\":\"Application User\"," +
"\"x-throttling-tier\":\"Unlimited\" },\"delete\":{ \"responses\":{\"200\":{}},\"x-auth-type\":" +
"\"Application User\",\"x-throttling-tier\":\"Unlimited\"},\"options\":{ \"responses\":{\"200\":{}}," +
"\"x-auth-type\":\"None\",\"x-throttling-tier\":\"Unlimited\"}}},\"swagger\":\"2.0\",\"info" +
"\":{\"title\":\"APIScopeTestAPI\",\"version\":\"1.0.0\"},\"x-wso2-security\":{\"apim" +
"\":{\"x-wso2-scopes\":[{\"name\":\"user_scope\",\"description\":\"\",\"key\":\"user_scope\",\"roles" +
"\":\"" + role + "\"}]}}}";
apiPublisher.updateResourceOfAPI(adminUser, apiName, apiVersion,
modifiedResource);
waitForAPIDeployment();
apiPublisher.logout();
}
private void subscribeToAPI(String appName, String apiName, String adminUser) throws Exception {
apiStore.addApplication(appName, APIMIntegrationConstants.APPLICATION_TIER.UNLIMITED, "some_url",
"NewApp");
SubscriptionRequest subscriptionRequest = new SubscriptionRequest(apiName, adminUser);
subscriptionRequest.setApplicationName(appName);
apiStore.subscribe(subscriptionRequest);
}
private JSONObject getApplicationKeys(String appName) throws Exception {
APPKeyRequestGenerator generateAppKeyRequest = new APPKeyRequestGenerator(appName);
String responseString = apiStore.generateApplicationKey(generateAppKeyRequest).getData();
return new JSONObject(responseString);
}
private JSONObject getUserAccessKeys(String username, String password, String consumerKey,
String consumerSecret) throws Exception {
URL tokenEndpointURL = new URL(gatewayUrlsWrk.getWebAppURLNhttp() + "token");
HttpResponse response;
String requestBody;
// Obtain user access token
requestBody = "grant_type=password&username=" + username +
"&password=" + password +
"&scope=user_scope";
response = apiStore.generateUserAccessKey(consumerKey, consumerSecret,
requestBody, tokenEndpointURL);
return new JSONObject(response.getData());
}
private HttpResponse invokeAPI(String accessToken, String gatewayUrl, String apiContext, String apiVersion)
throws Exception {
Map<String, String> requestHeaders = new HashMap<String, String>();
URL endPointURL = new URL(gatewayUrl + apiContext + "/" + apiVersion + "/test");
requestHeaders.put("Authorization", "Bearer " + accessToken);
// Accessing POST method
return HttpRequestUtil.doPost(endPointURL, "", requestHeaders);
}
@AfterClass(alwaysRun = true)
public void destroy() throws Exception {
if (apiStore != null) {
apiStore.login(TENANT1_USER_PETER, USER_PASSWORD);
apiStore.removeApplication(TENANT1_APP_NAME);
apiStore.logout();
apiStore.login(TENANT2_USER_PETER, USER_PASSWORD);
apiStore.removeApplication(TENANT2_APP_NAME);
apiStore.logout();
}
if (apiPublisher != null) {
apiPublisher.login(TENANT1_ADMIN_USER, TENANT1_ADMIN_PASSWORD);
apiPublisher.deleteAPI(TENANT1_API_NAME, TENANT1_API_VERSION, TENANT1_ADMIN_USER);
apiPublisher.logout();
apiPublisher.login(TENANT2_ADMIN_USER, TENANT2_ADMIN_PASSWORD);
apiPublisher.deleteAPI(TENANT2_API_NAME, TENANT2_API_VERSION, TENANT2_ADMIN_USER);
apiPublisher.logout();
}
if (userManagementClient1 != null) {
userManagementClient1.deleteUser(USER_PETER);
userManagementClient1.deleteRole(TENANT1_SUBSCRIBER_ROLE);
}
if (userManagementClient2 != null) {
userManagementClient2.deleteUser(USER_PETER);
userManagementClient2.deleteRole(TENANT2_SUBSCRIBER_ROLE);
}
super.cleanUp();
}
}
| |
/*
* Copyright 2016-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.store.primitives.resources.impl;
import java.util.Map;
import java.util.Set;
import org.onosproject.cluster.Leadership;
import org.onosproject.cluster.NodeId;
import com.google.common.base.MoreObjects;
import com.google.common.base.Strings;
import io.atomix.catalyst.buffer.BufferInput;
import io.atomix.catalyst.buffer.BufferOutput;
import io.atomix.catalyst.serializer.CatalystSerializable;
import io.atomix.catalyst.serializer.SerializableTypeResolver;
import io.atomix.catalyst.serializer.Serializer;
import io.atomix.catalyst.serializer.SerializerRegistry;
import io.atomix.catalyst.util.Assert;
import io.atomix.copycat.Command;
import io.atomix.copycat.Query;
/**
* {@link AtomixLeaderElector} resource state machine operations.
*/
public final class AtomixLeaderElectorCommands {
private AtomixLeaderElectorCommands() {
}
/**
* Abstract election query.
*/
@SuppressWarnings("serial")
public abstract static class ElectionQuery<V> implements Query<V>, CatalystSerializable {
@Override
public void writeObject(BufferOutput<?> buffer, Serializer serializer) {
}
@Override
public void readObject(BufferInput<?> buffer, Serializer serializer) {
}
}
/**
* Abstract election topic query.
*/
@SuppressWarnings("serial")
public abstract static class TopicQuery<V> extends ElectionQuery<V> implements CatalystSerializable {
String topic;
public TopicQuery() {
}
public TopicQuery(String topic) {
this.topic = Assert.notNull(topic, "topic");
}
/**
* Returns the topic.
* @return topic
*/
public String topic() {
return topic;
}
@Override
public void writeObject(BufferOutput<?> buffer, Serializer serializer) {
serializer.writeObject(topic, buffer);
}
@Override
public void readObject(BufferInput<?> buffer, Serializer serializer) {
topic = serializer.readObject(buffer);
}
}
/**
* Abstract election command.
*/
@SuppressWarnings("serial")
public abstract static class ElectionCommand<V> implements Command<V>, CatalystSerializable {
@Override
public void writeObject(BufferOutput<?> buffer, Serializer serializer) {
}
@Override
public void readObject(BufferInput<?> buffer, Serializer serializer) {
}
}
/**
* Listen command.
*/
@SuppressWarnings("serial")
public static class Listen extends ElectionCommand<Void> {
@Override
public CompactionMode compaction() {
return CompactionMode.QUORUM;
}
}
/**
* Unlisten command.
*/
@SuppressWarnings("serial")
public static class Unlisten extends ElectionCommand<Void> {
@Override
public CompactionMode compaction() {
return CompactionMode.TOMBSTONE;
}
}
/**
* GetLeader query.
*/
@SuppressWarnings("serial")
public static class GetLeadership extends TopicQuery<Leadership> {
public GetLeadership() {
}
public GetLeadership(String topic) {
super(topic);
}
@Override
public String toString() {
return MoreObjects.toStringHelper(getClass())
.add("topic", topic)
.toString();
}
}
/**
* GetAllLeaders query.
*/
@SuppressWarnings("serial")
public static class GetAllLeaderships extends ElectionQuery<Map<String, Leadership>> {
}
/**
* GetElectedTopics query.
*/
@SuppressWarnings("serial")
public static class GetElectedTopics extends ElectionQuery<Set<String>> {
private NodeId nodeId;
public GetElectedTopics() {
}
public GetElectedTopics(NodeId nodeId) {
this.nodeId = Assert.argNot(nodeId, nodeId == null, "nodeId cannot be null");
}
/**
* Returns the nodeId to check.
*
* @return The nodeId to check.
*/
public NodeId nodeId() {
return nodeId;
}
@Override
public String toString() {
return MoreObjects.toStringHelper(getClass())
.add("nodeId", nodeId)
.toString();
}
@Override
public void writeObject(BufferOutput<?> buffer, Serializer serializer) {
super.writeObject(buffer, serializer);
serializer.writeObject(nodeId, buffer);
}
@Override
public void readObject(BufferInput<?> buffer, Serializer serializer) {
super.readObject(buffer, serializer);
nodeId = serializer.readObject(buffer);
}
}
/**
* Enter and run for leadership.
*/
@SuppressWarnings("serial")
public static class Run extends ElectionCommand<Leadership> {
private String topic;
private NodeId nodeId;
public Run() {
}
public Run(String topic, NodeId nodeId) {
this.topic = Assert.argNot(topic, Strings.isNullOrEmpty(topic), "topic cannot be null or empty");
this.nodeId = Assert.argNot(nodeId, nodeId == null, "nodeId cannot be null");
}
/**
* Returns the topic.
*
* @return topic
*/
public String topic() {
return topic;
}
/**
* Returns the nodeId.
*
* @return the nodeId
*/
public NodeId nodeId() {
return nodeId;
}
@Override
public CompactionMode compaction() {
return CompactionMode.SNAPSHOT;
}
@Override
public String toString() {
return MoreObjects.toStringHelper(getClass())
.add("topic", topic)
.add("nodeId", nodeId)
.toString();
}
@Override
public void writeObject(BufferOutput<?> buffer, Serializer serializer) {
buffer.writeString(topic);
buffer.writeString(nodeId.toString());
}
@Override
public void readObject(BufferInput<?> buffer, Serializer serializer) {
topic = buffer.readString();
nodeId = new NodeId(buffer.readString());
}
}
/**
* Withdraw from a leadership contest.
*/
@SuppressWarnings("serial")
public static class Withdraw extends ElectionCommand<Void> {
private String topic;
public Withdraw() {
}
public Withdraw(String topic) {
this.topic = Assert.argNot(topic, Strings.isNullOrEmpty(topic), "topic cannot be null or empty");
}
/**
* Returns the topic.
*
* @return The topic
*/
public String topic() {
return topic;
}
@Override
public CompactionMode compaction() {
return CompactionMode.SNAPSHOT;
}
@Override
public String toString() {
return MoreObjects.toStringHelper(getClass())
.add("topic", topic)
.toString();
}
@Override
public void writeObject(BufferOutput<?> buffer, Serializer serializer) {
buffer.writeString(topic);
}
@Override
public void readObject(BufferInput<?> buffer, Serializer serializer) {
topic = buffer.readString();
}
}
/**
* Command for administratively changing the leadership state for a node.
*/
@SuppressWarnings("serial")
public abstract static class ElectionChangeCommand<V> extends ElectionCommand<V> {
private String topic;
private NodeId nodeId;
ElectionChangeCommand() {
topic = null;
nodeId = null;
}
public ElectionChangeCommand(String topic, NodeId nodeId) {
this.topic = topic;
this.nodeId = nodeId;
}
/**
* Returns the topic.
*
* @return The topic
*/
public String topic() {
return topic;
}
/**
* Returns the nodeId to make leader.
*
* @return The nodeId
*/
public NodeId nodeId() {
return nodeId;
}
@Override
public CompactionMode compaction() {
return CompactionMode.SNAPSHOT;
}
@Override
public String toString() {
return MoreObjects.toStringHelper(getClass())
.add("topic", topic)
.add("nodeId", nodeId)
.toString();
}
@Override
public void writeObject(BufferOutput<?> buffer, Serializer serializer) {
buffer.writeString(topic);
buffer.writeString(nodeId.toString());
}
@Override
public void readObject(BufferInput<?> buffer, Serializer serializer) {
topic = buffer.readString();
nodeId = new NodeId(buffer.readString());
}
}
/**
* Command for administratively anoint a node as leader.
*/
@SuppressWarnings("serial")
public static class Anoint extends ElectionChangeCommand<Boolean> {
private Anoint() {
}
public Anoint(String topic, NodeId nodeId) {
super(topic, nodeId);
}
}
/**
* Command for administratively promote a node as top candidate.
*/
@SuppressWarnings("serial")
public static class Promote extends ElectionChangeCommand<Boolean> {
private Promote() {
}
public Promote(String topic, NodeId nodeId) {
super(topic, nodeId);
}
}
/**
* Command for administratively evicting a node from all leadership topics.
*/
@SuppressWarnings("serial")
public static class Evict extends ElectionCommand<Void> {
private NodeId nodeId;
public Evict() {
}
public Evict(NodeId nodeId) {
this.nodeId = nodeId;
}
/**
* Returns the node identifier.
*
* @return The nodeId
*/
public NodeId nodeId() {
return nodeId;
}
@Override
public CompactionMode compaction() {
return CompactionMode.SNAPSHOT;
}
@Override
public String toString() {
return MoreObjects.toStringHelper(getClass())
.add("nodeId", nodeId)
.toString();
}
@Override
public void writeObject(BufferOutput<?> buffer, Serializer serializer) {
buffer.writeString(nodeId.toString());
}
@Override
public void readObject(BufferInput<?> buffer, Serializer serializer) {
nodeId = new NodeId(buffer.readString());
}
}
/**
* Map command type resolver.
*/
public static class TypeResolver implements SerializableTypeResolver {
@Override
public void resolve(SerializerRegistry registry) {
registry.register(Run.class, -861);
registry.register(Withdraw.class, -862);
registry.register(Anoint.class, -863);
registry.register(GetAllLeaderships.class, -864);
registry.register(GetElectedTopics.class, -865);
registry.register(GetLeadership.class, -866);
registry.register(Listen.class, -867);
registry.register(Unlisten.class, -868);
registry.register(Promote.class, -869);
registry.register(Evict.class, -870);
}
}
}
| |
package pe.com.equifax.mobile;
import java.io.Serializable;
import java.util.HashMap;
import java.util.List;
import org.codehaus.jackson.annotate.JsonIgnore;
import org.codehaus.jackson.annotate.JsonIgnoreProperties;
@JsonIgnoreProperties(ignoreUnknown = true)
public class TramaBean implements Serializable {
/**
*
*/
private static final long serialVersionUID = 1L;
private String operationId;
/**
* Id de aplicacion que emite la trama
*/
private String applicationId;
/**
* Estado de respuesta
*/
private String state;
/**
* Id de Usuario
*/
private String userId;
/**
* Clave de Usuario
*/
private String userClave;
/**
* Fecha de consulta
*/
private String date;
/**
* Flag para determinar si anteriormente se hizo una consulta historica
*/
private boolean flagCheck = false;
/**
* Parametros de Entrada
*/
private HashMap<String, Object> input;
/**
* Rutas de servicios
*/
private List<HashMap<String, Object>> paths;
/**
* Parametros de Salida
*/
private HashMap<String, Object> output;
/**
* Numero de Operacion Corto
*/
private String numeroCorto;
/**
* Numero de Operacion Corto
*/
private String tipoDocumento;
/**
* Numero de Operacion Corto
*/
private String numeroDocumento;
/**
* Numero de Operacion Corto
*/
private String apePaterno;
/**
* Numero de Operacion Corto
*/
private String apeMaterno;
/**
* Numero de Operacion Corto
*/
private String nombre;
/**
* Numero de Operacion Corto
*/
private String modelo;
public TramaBean() {
this.input = new HashMap<String, Object>(); //Instanciado para prevenir NullPointerException
this.output = new HashMap<String, Object>();//Instanciado para prevenir NullPointerException
}
public String getApplicationId() {
return applicationId;
}
public void setApplicationId(String applicationId) {
this.applicationId = applicationId;
}
public String getState() {
return state;
}
public void setState(String state) {
this.state = state;
}
public String getUserId() {
return userId;
}
public void setUserId(String userId) {
this.userId = userId;
}
public String getUserClave() {
return userClave;
}
public void setUserClave(String userClave) {
this.userClave = userClave;
}
public HashMap<String, Object> getInput() {
return input;
}
public void setInput(HashMap<String, Object> input) {
this.input = input;
}
public List<HashMap<String, Object>> getPaths() {
return paths;
}
public void setPaths(List<HashMap<String, Object>> paths) {
this.paths = paths;
}
public HashMap<String, Object> getOutput() {
return output;
}
public void setOutput(HashMap<String, Object> output) {
this.output = output;
}
public String getOperationId() {
return operationId;
}
public void setOperationId(String operationId) {
this.operationId = operationId;
}
public String getDate() {
return date;
}
public void setDate(String date) {
this.date = date;
}
@JsonIgnore
public void addToInput(HashMap<String, Object> map) {
this.input.putAll(map);
}
@JsonIgnore
public void addToOutput(HashMap<String, Object> map) {
this.output.putAll(map);
}
public boolean isFlagCheck() {
return flagCheck;
}
public void setFlagCheck(boolean flagCheck) {
this.flagCheck = flagCheck;
}
public String getNumeroCorto() {
return numeroCorto;
}
public void setNumeroCorto(String numeroCorto) {
this.numeroCorto = numeroCorto;
}
public String getTipoDocumento() {
return tipoDocumento;
}
public void setTipoDocumento(String tipoDocumento) {
this.tipoDocumento = tipoDocumento;
}
public String getNumeroDocumento() {
return numeroDocumento;
}
public void setNumeroDocumento(String numeroDocumento) {
this.numeroDocumento = numeroDocumento;
}
public String getApePaterno() {
return apePaterno;
}
public void setApePaterno(String apePaterno) {
this.apePaterno = apePaterno;
}
public String getApeMaterno() {
return apeMaterno;
}
public void setApeMaterno(String apeMaterno) {
this.apeMaterno = apeMaterno;
}
public String getNombre() {
return nombre;
}
public void setNombre(String nombre) {
this.nombre = nombre;
}
public String getModelo() {
return modelo;
}
public void setModelo(String modelo) {
this.modelo = modelo;
}
}
| |
// Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.offlinepages.indicator;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.graphics.Color;
import android.graphics.drawable.Drawable;
import android.os.SystemClock;
import android.text.TextUtils;
import androidx.annotation.VisibleForTesting;
import androidx.appcompat.content.res.AppCompatResources;
import org.chromium.base.ApplicationState;
import org.chromium.base.ApplicationStatus;
import org.chromium.base.metrics.RecordHistogram;
import org.chromium.chrome.R;
import org.chromium.chrome.browser.ChromeActivity;
import org.chromium.chrome.browser.download.DownloadOpenSource;
import org.chromium.chrome.browser.download.DownloadUtils;
import org.chromium.chrome.browser.flags.ChromeFeatureList;
import org.chromium.chrome.browser.offlinepages.OfflinePageUtils;
import org.chromium.chrome.browser.tab.EmptyTabObserver;
import org.chromium.chrome.browser.tab.Tab;
import org.chromium.chrome.browser.tab.TabHidingType;
import org.chromium.chrome.browser.tab.TabObserver;
import org.chromium.chrome.browser.ui.messages.snackbar.Snackbar;
import org.chromium.chrome.browser.ui.messages.snackbar.SnackbarManager;
import org.chromium.chrome.browser.ui.messages.snackbar.SnackbarManager.SnackbarController;
import org.chromium.content_public.browser.WebContents;
import org.chromium.content_public.common.ContentUrlConstants;
/**
* Class that controls when to show the offline indicator.
*/
public class OfflineIndicatorController implements ConnectivityDetector.Observer,
SnackbarController,
ApplicationStatus.ApplicationStateListener {
// OfflineIndicatorCTREvent defined in tools/metrics/histograms/enums.xml.
// These values are persisted to logs. Entries should not be renumbered and
// numeric values should never be reused.
public static final int OFFLINE_INDICATOR_CTR_DISPLAYED = 0;
public static final int OFFLINE_INDICATOR_CTR_CLICKED = 1;
public static final int OFFLINE_INDICATOR_CTR_COUNT = 2;
// Field trial params.
public static final String PARAM_BOTTOM_OFFLINE_INDICATOR_ENABLED = "bottom_offline_indicator";
public static final String PARAM_STABLE_OFFLINE_WAIT_SECONDS = "stable_offline_wait_s";
private static final int SNACKBAR_DURATION_MS = 10000;
// Default time in seconds to wait until the offline state is stablized in the case of flaky
// connections.
private static final int STABLE_OFFLINE_DEFAULT_WAIT_SECONDS = 20;
@SuppressLint("StaticFieldLeak")
private static OfflineIndicatorController sInstance;
private static int sTimeToWaitForStableOfflineForTesting;
private boolean mIsShowingOfflineIndicator;
// Set to true if the offline indicator has been shown once since the activity has resumed.
private boolean mHasOfflineIndicatorShownSinceActivityResumed;
// Set to true if the user has been continuously online for the required duration.
private boolean mWasOnlineForRequiredDuration;
private ConnectivityDetector mConnectivityDetector;
private ChromeActivity mObservedActivity;
private boolean mIsOnline;
// Last time when the online state is detected. It is recorded as milliseconds since boot.
private long mLastOnlineTime;
private TopSnackbarManager mTopSnackbarManager;
private OfflineIndicatorController() {
if (isUsingTopSnackbar()) {
mTopSnackbarManager = new TopSnackbarManager();
}
mConnectivityDetector = new ConnectivityDetector(this);
ApplicationStatus.registerApplicationStateListener(this);
}
/**
* Initializes the singleton once.
*/
public static void initialize() {
// No need to create the singleton if the feature is not enabled. Also, if V2 is enabled,
// this version will be disabled.
if (!ChromeFeatureList.isEnabled(ChromeFeatureList.OFFLINE_INDICATOR)
|| ChromeFeatureList.isEnabled(ChromeFeatureList.OFFLINE_INDICATOR_V2)) {
return;
}
if (sInstance == null) {
sInstance = new OfflineIndicatorController();
}
}
/**
* Returns the singleton instance.
*/
public static OfflineIndicatorController getInstance() {
assert sInstance != null;
return sInstance;
}
@Override
public void onConnectionStateChanged(
@ConnectivityDetector.ConnectionState int connectionState) {
if (connectionState == ConnectivityDetector.ConnectionState.NONE) return;
updateOfflineIndicator(connectionState == ConnectivityDetector.ConnectionState.VALIDATED);
}
@Override
public void onAction(Object actionData) {
mIsShowingOfflineIndicator = false;
DownloadUtils.showDownloadManager(
null, null, DownloadOpenSource.OFFLINE_INDICATOR, true /*showPrefetchedContent*/);
RecordHistogram.recordEnumeratedHistogram(
"OfflineIndicator.CTR", OFFLINE_INDICATOR_CTR_CLICKED, OFFLINE_INDICATOR_CTR_COUNT);
}
@Override
public void onDismissNoAction(Object actionData) {
mIsShowingOfflineIndicator = false;
}
@Override
public void onApplicationStateChange(int newState) {
// Note that the paused state can happen when the activity is temporarily covered by another
// activity's Fragment, in which case we should still treat the app as in foreground.
if (newState != ApplicationState.HAS_RUNNING_ACTIVITIES
&& newState != ApplicationState.HAS_PAUSED_ACTIVITIES) {
mHasOfflineIndicatorShownSinceActivityResumed = false;
}
// If the application is resumed, update the connection state and show indicator if needed.
if (newState == ApplicationState.HAS_RUNNING_ACTIVITIES) {
mConnectivityDetector.detect();
updateOfflineIndicator(mConnectivityDetector.getConnectionState()
== ConnectivityDetector.ConnectionState.VALIDATED);
}
}
private void updateOfflineIndicator(boolean isOnline) {
if (isOnline != mIsOnline) {
if (isOnline) {
mWasOnlineForRequiredDuration = false;
mLastOnlineTime = SystemClock.elapsedRealtime();
} else {
mWasOnlineForRequiredDuration = SystemClock.elapsedRealtime() - mLastOnlineTime
>= getTimeToWaitForStableOffline();
}
mIsOnline = isOnline;
}
if (ApplicationStatus.getStateForApplication() != ApplicationState.HAS_RUNNING_ACTIVITIES) {
return;
}
Activity activity = ApplicationStatus.getLastTrackedFocusedActivity();
if (activity == null) return;
if (!(activity instanceof ChromeActivity)) return;
ChromeActivity chromeActivity = (ChromeActivity) activity;
if (chromeActivity.getSnackbarManager() == null) return;
if (isOnline) {
hideOfflineIndicator(chromeActivity);
} else {
showOfflineIndicator(chromeActivity);
}
}
private boolean canShowOfflineIndicator(Activity activity) {
// For now, we only support ChromeActivity.
if (!(activity instanceof ChromeActivity)) return false;
ChromeActivity chromeActivity = (ChromeActivity) activity;
Tab tab = chromeActivity.getActivityTab();
if (tab == null) return false;
if (tab.isShowingErrorPage()) return false;
if (OfflinePageUtils.isOfflinePage(tab)) return false;
if (TextUtils.equals(tab.getUrlString(), ContentUrlConstants.ABOUT_BLANK_DISPLAY_URL)) {
return false;
}
return true;
}
/**
* Delay showing the offline indicator UI under some circumstances, i.e. current tab is still
* being loaded.
* Returns true if the offline indicator UI is delayed to be shown.
*/
private boolean delayShowingOfflineIndicatorIfNeeded(ChromeActivity chromeActivity) {
Tab tab = chromeActivity.getActivityTab();
if (tab == null) return false;
WebContents webContents = tab.getWebContents();
if (webContents != null && !webContents.isLoading()) return false;
// If the tab is still being loaded, we should wait until it finishes.
if (mObservedActivity == chromeActivity) return true;
mObservedActivity = chromeActivity;
TabObserver tabObserver = new EmptyTabObserver() {
@Override
public void onLoadStopped(Tab tab, boolean toDifferentDocument) {
mObservedActivity = null;
tab.removeObserver(this);
doUpdate();
}
@Override
public void onHidden(Tab tab, @TabHidingType int type) {
mObservedActivity = null;
tab.removeObserver(this);
doUpdate();
}
@Override
public void onDestroyed(Tab tab) {
mObservedActivity = null;
tab.removeObserver(this);
doUpdate();
}
private void doUpdate() {
updateOfflineIndicator(mConnectivityDetector.getConnectionState()
== ConnectivityDetector.ConnectionState.VALIDATED);
}
};
tab.addObserver(tabObserver);
return true;
}
private void showOfflineIndicator(ChromeActivity chromeActivity) {
if (mIsShowingOfflineIndicator || !canShowOfflineIndicator(chromeActivity)) return;
if (delayShowingOfflineIndicatorIfNeeded(chromeActivity)) return;
// If this is the first time to show offline indicator, show it. Otherwise, it will only
// be shown if the user has been continuously online for the required duration, then goes
// back to being offline.
// TODO(jianli): keep these values in shared prefernces. (http://crbug.com/879725)
if (mHasOfflineIndicatorShownSinceActivityResumed && !mWasOnlineForRequiredDuration) {
return;
}
Drawable icon =
AppCompatResources.getDrawable(chromeActivity, R.drawable.ic_offline_pin_white);
Snackbar snackbar =
Snackbar.make(chromeActivity.getString(R.string.offline_indicator_offline_title),
this, Snackbar.TYPE_ACTION, Snackbar.UMA_OFFLINE_INDICATOR)
.setSingleLine(true)
.setProfileImage(icon)
.setBackgroundColor(Color.BLACK)
.setTextAppearance(R.style.TextAppearance_TextMedium_Primary_Light)
.setDuration(SNACKBAR_DURATION_MS)
.setAction(chromeActivity.getString(
R.string.offline_indicator_view_offline_content),
null);
if (isUsingTopSnackbar()) {
mTopSnackbarManager.show(snackbar, chromeActivity);
} else {
// Show a bottom snackbar via SnackbarManager.
SnackbarManager snackbarManager = chromeActivity.getSnackbarManager();
snackbarManager.showSnackbar(snackbar);
}
RecordHistogram.recordEnumeratedHistogram("OfflineIndicator.CTR",
OFFLINE_INDICATOR_CTR_DISPLAYED, OFFLINE_INDICATOR_CTR_COUNT);
mIsShowingOfflineIndicator = true;
mHasOfflineIndicatorShownSinceActivityResumed = true;
}
@VisibleForTesting
void hideOfflineIndicator(ChromeActivity chromeActivity) {
if (!mIsShowingOfflineIndicator) return;
if (isUsingTopSnackbar()) {
mTopSnackbarManager.hide();
} else {
chromeActivity.getSnackbarManager().dismissSnackbars(this);
}
}
int getTimeToWaitForStableOffline() {
int seconds;
if (sTimeToWaitForStableOfflineForTesting != 0) {
seconds = sTimeToWaitForStableOfflineForTesting;
} else {
seconds = ChromeFeatureList.getFieldTrialParamByFeatureAsInt(
ChromeFeatureList.OFFLINE_INDICATOR, PARAM_STABLE_OFFLINE_WAIT_SECONDS,
STABLE_OFFLINE_DEFAULT_WAIT_SECONDS);
}
return seconds * 1000;
}
@VisibleForTesting
static boolean isUsingTopSnackbar() {
boolean useBottomSnackbar = ChromeFeatureList.getFieldTrialParamByFeatureAsBoolean(
ChromeFeatureList.OFFLINE_INDICATOR, PARAM_BOTTOM_OFFLINE_INDICATOR_ENABLED, false);
return !useBottomSnackbar;
}
@VisibleForTesting
static void setTimeToWaitForStableOfflineForTesting(int waitSeconds) {
sTimeToWaitForStableOfflineForTesting = waitSeconds;
}
@VisibleForTesting
ConnectivityDetector getConnectivityDetectorForTesting() {
return mConnectivityDetector;
}
@VisibleForTesting
TopSnackbarManager getTopSnackbarManagerForTesting() {
return mTopSnackbarManager;
}
}
| |
//
// This file is part of the Fuel Java SDK.
//
// Copyright (c) 2013, 2014, 2015, ExactTarget, Inc.
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
//
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// * Neither the name of ExactTarget, Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
package com.exacttarget.fuelsdk;
import java.util.Date;
import java.util.List;
import org.junit.Assume;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
public class ETObjectTest {
@BeforeClass
public static void setUpBeforeClass()
throws ETSdkException
{
// Assume.assumeNotNull(ETObjectTest.class
// .getResource("/fuelsdk-test.properties"));
}
@Test
public void testGetModified1() {
TestObject testObject = new TestObject();
assertNull(testObject.getModified("test"));
}
@Test
public void testGetModified2() {
TestObject testObject = new TestObject();
assertNull(testObject.setModified("test", true));
assertTrue(testObject.getModified("test"));
}
@Test
public void testGetModified3() {
TestObject testObject = new TestObject();
assertNull(testObject.setModified("test", false));
assertFalse(testObject.getModified("test"));
}
@Test
public void testGetModified4() {
TestObject testObject = new TestObject();
assertNull(testObject.setModified("test", true));
assertTrue(testObject.setModified("test", false));
assertFalse(testObject.getModified("test"));
}
@Test
public void testGetModified5() {
TestObject testObject = new TestObject();
assertNull(testObject.setModified("test", false));
assertFalse(testObject.setModified("test", true));
assertTrue(testObject.getModified("test"));
}
@Test
public void testGetAllModified1() {
TestObject testObject = new TestObject();
assertNull(testObject.setModified("test", true));
List<String> modified = testObject.getAllModified();
assertEquals(1, modified.size());
assertEquals("test", modified.get(0));
}
@Test
public void testGetAllModified2() {
TestObject testObject = new TestObject();
assertNull(testObject.setModified("test1", true));
assertNull(testObject.setModified("test2", true));
List<String> modified = testObject.getAllModified();
assertEquals(2, modified.size());
assertEquals("test1", modified.get(1));
assertEquals("test2", modified.get(0));
}
@Test
public void testGetAllModified3() {
TestObject testObject = new TestObject();
assertNull(testObject.setModified("test1", true));
assertNull(testObject.setModified("test2", true));
assertNull(testObject.setModified("test3", true));
List<String> modified = testObject.getAllModified();
assertEquals(3, modified.size());
assertEquals("test1", modified.get(2));
assertEquals("test2", modified.get(0));
assertEquals("test3", modified.get(1));
}
@Test
public void testGetAllModified4() {
TestObject testObject = new TestObject();
assertNull(testObject.setModified("test1", true));
assertNull(testObject.setModified("test2", false));
assertNull(testObject.setModified("test3", true));
List<String> modified = testObject.getAllModified();
assertEquals(2, modified.size());
assertEquals("test1", modified.get(1));
assertEquals("test3", modified.get(0));
}
@Test
public void testGetAllModified5() {
TestObject testObject = new TestObject();
assertNull(testObject.setModified("test1", true));
assertNull(testObject.setModified("test2", true));
assertNull(testObject.setModified("test3", true));
List<String> modified = testObject.getAllModified();
assertEquals(3, modified.size());
assertEquals("test1", modified.get(2));
assertEquals("test2", modified.get(0));
assertEquals("test3", modified.get(1));
assertTrue(testObject.setModified("test2", false));
modified = testObject.getAllModified();
assertEquals(2, modified.size());
assertEquals("test1", modified.get(1));
assertEquals("test3", modified.get(0));
}
class TestObject extends ETApiObject {
private String id = null;
private String key = null;
private String name = null;
private Date createdDate = null;
private Date modifiedDate = null;
@Override
public String getId() {
return id;
}
@Override
public void setId(String id) {
this.id = id;
}
public String getKey() {
return key;
}
public void setKey(String key) {
this.key = key;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Date getCreatedDate() {
return createdDate;
}
public void setCreatedDate(Date createdDate) {
this.createdDate = createdDate;
}
public Date getModifiedDate() {
return modifiedDate;
}
public void setModifiedDate(Date modifiedDate) {
this.modifiedDate = modifiedDate;
}
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.rds.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
* <p>
* This data type is used as a response element in the following actions:
* </p>
* <ul>
* <li>
* <p>
* <code>AuthorizeDBSecurityGroupIngress</code>
* </p>
* </li>
* <li>
* <p>
* <code>DescribeDBSecurityGroups</code>
* </p>
* </li>
* <li>
* <p>
* <code>RevokeDBSecurityGroupIngress</code>
* </p>
* </li>
* </ul>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/rds-2014-10-31/EC2SecurityGroup" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class EC2SecurityGroup implements Serializable, Cloneable {
/**
* <p>
* Provides the status of the EC2 security group. Status can be "authorizing", "authorized", "revoking", and
* "revoked".
* </p>
*/
private String status;
/**
* <p>
* Specifies the name of the EC2 security group.
* </p>
*/
private String eC2SecurityGroupName;
/**
* <p>
* Specifies the id of the EC2 security group.
* </p>
*/
private String eC2SecurityGroupId;
/**
* <p>
* Specifies the Amazon Web Services ID of the owner of the EC2 security group specified in the
* <code>EC2SecurityGroupName</code> field.
* </p>
*/
private String eC2SecurityGroupOwnerId;
/**
* <p>
* Provides the status of the EC2 security group. Status can be "authorizing", "authorized", "revoking", and
* "revoked".
* </p>
*
* @param status
* Provides the status of the EC2 security group. Status can be "authorizing", "authorized", "revoking", and
* "revoked".
*/
public void setStatus(String status) {
this.status = status;
}
/**
* <p>
* Provides the status of the EC2 security group. Status can be "authorizing", "authorized", "revoking", and
* "revoked".
* </p>
*
* @return Provides the status of the EC2 security group. Status can be "authorizing", "authorized", "revoking", and
* "revoked".
*/
public String getStatus() {
return this.status;
}
/**
* <p>
* Provides the status of the EC2 security group. Status can be "authorizing", "authorized", "revoking", and
* "revoked".
* </p>
*
* @param status
* Provides the status of the EC2 security group. Status can be "authorizing", "authorized", "revoking", and
* "revoked".
* @return Returns a reference to this object so that method calls can be chained together.
*/
public EC2SecurityGroup withStatus(String status) {
setStatus(status);
return this;
}
/**
* <p>
* Specifies the name of the EC2 security group.
* </p>
*
* @param eC2SecurityGroupName
* Specifies the name of the EC2 security group.
*/
public void setEC2SecurityGroupName(String eC2SecurityGroupName) {
this.eC2SecurityGroupName = eC2SecurityGroupName;
}
/**
* <p>
* Specifies the name of the EC2 security group.
* </p>
*
* @return Specifies the name of the EC2 security group.
*/
public String getEC2SecurityGroupName() {
return this.eC2SecurityGroupName;
}
/**
* <p>
* Specifies the name of the EC2 security group.
* </p>
*
* @param eC2SecurityGroupName
* Specifies the name of the EC2 security group.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public EC2SecurityGroup withEC2SecurityGroupName(String eC2SecurityGroupName) {
setEC2SecurityGroupName(eC2SecurityGroupName);
return this;
}
/**
* <p>
* Specifies the id of the EC2 security group.
* </p>
*
* @param eC2SecurityGroupId
* Specifies the id of the EC2 security group.
*/
public void setEC2SecurityGroupId(String eC2SecurityGroupId) {
this.eC2SecurityGroupId = eC2SecurityGroupId;
}
/**
* <p>
* Specifies the id of the EC2 security group.
* </p>
*
* @return Specifies the id of the EC2 security group.
*/
public String getEC2SecurityGroupId() {
return this.eC2SecurityGroupId;
}
/**
* <p>
* Specifies the id of the EC2 security group.
* </p>
*
* @param eC2SecurityGroupId
* Specifies the id of the EC2 security group.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public EC2SecurityGroup withEC2SecurityGroupId(String eC2SecurityGroupId) {
setEC2SecurityGroupId(eC2SecurityGroupId);
return this;
}
/**
* <p>
* Specifies the Amazon Web Services ID of the owner of the EC2 security group specified in the
* <code>EC2SecurityGroupName</code> field.
* </p>
*
* @param eC2SecurityGroupOwnerId
* Specifies the Amazon Web Services ID of the owner of the EC2 security group specified in the
* <code>EC2SecurityGroupName</code> field.
*/
public void setEC2SecurityGroupOwnerId(String eC2SecurityGroupOwnerId) {
this.eC2SecurityGroupOwnerId = eC2SecurityGroupOwnerId;
}
/**
* <p>
* Specifies the Amazon Web Services ID of the owner of the EC2 security group specified in the
* <code>EC2SecurityGroupName</code> field.
* </p>
*
* @return Specifies the Amazon Web Services ID of the owner of the EC2 security group specified in the
* <code>EC2SecurityGroupName</code> field.
*/
public String getEC2SecurityGroupOwnerId() {
return this.eC2SecurityGroupOwnerId;
}
/**
* <p>
* Specifies the Amazon Web Services ID of the owner of the EC2 security group specified in the
* <code>EC2SecurityGroupName</code> field.
* </p>
*
* @param eC2SecurityGroupOwnerId
* Specifies the Amazon Web Services ID of the owner of the EC2 security group specified in the
* <code>EC2SecurityGroupName</code> field.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public EC2SecurityGroup withEC2SecurityGroupOwnerId(String eC2SecurityGroupOwnerId) {
setEC2SecurityGroupOwnerId(eC2SecurityGroupOwnerId);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getStatus() != null)
sb.append("Status: ").append(getStatus()).append(",");
if (getEC2SecurityGroupName() != null)
sb.append("EC2SecurityGroupName: ").append(getEC2SecurityGroupName()).append(",");
if (getEC2SecurityGroupId() != null)
sb.append("EC2SecurityGroupId: ").append(getEC2SecurityGroupId()).append(",");
if (getEC2SecurityGroupOwnerId() != null)
sb.append("EC2SecurityGroupOwnerId: ").append(getEC2SecurityGroupOwnerId());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof EC2SecurityGroup == false)
return false;
EC2SecurityGroup other = (EC2SecurityGroup) obj;
if (other.getStatus() == null ^ this.getStatus() == null)
return false;
if (other.getStatus() != null && other.getStatus().equals(this.getStatus()) == false)
return false;
if (other.getEC2SecurityGroupName() == null ^ this.getEC2SecurityGroupName() == null)
return false;
if (other.getEC2SecurityGroupName() != null && other.getEC2SecurityGroupName().equals(this.getEC2SecurityGroupName()) == false)
return false;
if (other.getEC2SecurityGroupId() == null ^ this.getEC2SecurityGroupId() == null)
return false;
if (other.getEC2SecurityGroupId() != null && other.getEC2SecurityGroupId().equals(this.getEC2SecurityGroupId()) == false)
return false;
if (other.getEC2SecurityGroupOwnerId() == null ^ this.getEC2SecurityGroupOwnerId() == null)
return false;
if (other.getEC2SecurityGroupOwnerId() != null && other.getEC2SecurityGroupOwnerId().equals(this.getEC2SecurityGroupOwnerId()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getStatus() == null) ? 0 : getStatus().hashCode());
hashCode = prime * hashCode + ((getEC2SecurityGroupName() == null) ? 0 : getEC2SecurityGroupName().hashCode());
hashCode = prime * hashCode + ((getEC2SecurityGroupId() == null) ? 0 : getEC2SecurityGroupId().hashCode());
hashCode = prime * hashCode + ((getEC2SecurityGroupOwnerId() == null) ? 0 : getEC2SecurityGroupOwnerId().hashCode());
return hashCode;
}
@Override
public EC2SecurityGroup clone() {
try {
return (EC2SecurityGroup) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| |
package pamanager;
import java.io.*;
import java.awt.*;
import java.beans.*;
import java.awt.event.*;
import javax.swing.*;
import javax.swing.table.*;
import javax.swing.event.*;
import java.util.*;
import ciagent.*;
/**
* The <code>PAManagerFrame</code> class implements the GUI and the logic
* for the PAManager application.
*
* @author Joseph P. Bigus
* @author Jennifer Bigus
*
* @copyright
* Constructing Intelligent Agents using Java
* (C) Joseph P. Bigus and Jennifer Bigus 2001
*
*/
public class PAManagerFrame extends JFrame implements CIAgentEventListener, PropertyChangeListener, AgentPlatform {
/**
*
*/
private static final long serialVersionUID = -2905675549739365644L;
JMenuBar menuBar1 = new JMenuBar();
JMenu fileMenu = new JMenu();
JMenu editMenu = new JMenu();
JMenuItem cutMenuItem = new JMenuItem();
BorderLayout borderLayout1 = new BorderLayout();
JPanel jPanel1 = new JPanel();
JPanel jPanel2 = new JPanel();
BorderLayout borderLayout2 = new BorderLayout();
JLabel jLabel1 = new JLabel();
JScrollPane jScrollPane1 = new JScrollPane();
JTextArea traceTextArea = new JTextArea();
BorderLayout borderLayout3 = new BorderLayout();
JLabel jLabel2 = new JLabel();
JScrollPane jScrollPane2 = new JScrollPane();
JMenuItem clearMenuItem = new JMenuItem();
JMenuItem exitMenuItem = new JMenuItem();
JMenu createMenu = new JMenu();
JMenu helpMenu = new JMenu();
JMenuItem AboutMenuItem = new JMenuItem();
Vector<CIAgent> agents = new Vector<CIAgent>();
JTable agentTable = new JTable(); // list of alarms and watches
Hashtable<String, String> agentClasses = new Hashtable<String, String>();
private TableModel agentTableModel = null;
protected String[] columnNameList = { COL_NAME, COL_TYPE, COL_STATE, COL_TASK };
protected Object[][] data = null;
final static int NUM_COLS = 4;
final static int COL_NAMEID = 0;
private final static String COL_NAME = "Name";
final static int COL_TYPEID = 1;
private final static String COL_TYPE = "Type";
final static int COL_STATEID = 2;
private final static String COL_STATE = "State";
final static int COL_TASKID = 3;
private final static String COL_TASK = "Task";
JMenuItem propertiesMenuItem = new JMenuItem();
JMenuItem startProcessingMenuItem = new JMenuItem();
JMenuItem suspendProcessingMenuItem = new JMenuItem();
JMenuItem resumeProcessingMenuItem = new JMenuItem();
/**
* Creates a <code>PAManagerFrame</code> object.
*
*/
public PAManagerFrame() {
enableEvents(AWTEvent.WINDOW_EVENT_MASK);
try {
jbInit();
readPropertiesFile(); // load the agent classes and display names
} catch (Exception e) {
e.printStackTrace();
}
}
/**
* Initializes the components of this frame.
*
* @throws Exception if any errors occur during initialization
*
*/
private void jbInit() throws Exception {
this.getContentPane().setLayout(borderLayout1);
this.setSize(new Dimension(400, 400));
this.setTitle("CIAgent Personal Agent Manager Application");
fileMenu.setText("File");
editMenu.setText("Edit");
cutMenuItem.setText("Cut");
cutMenuItem.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
Cut_actionPerformed(e);
}
});
jPanel2.setLayout(borderLayout2);
jLabel1.setText("Activity Log");
jPanel1.setMinimumSize(new Dimension(550, 200));
jPanel1.setPreferredSize(new Dimension(550, 200));
jPanel1.setLayout(borderLayout3);
jLabel2.setText("CIAgent List");
clearMenuItem.setText("Clear");
clearMenuItem.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(ActionEvent e) {
clearMenuItem_actionPerformed(e);
}
});
exitMenuItem.setText("Exit");
exitMenuItem.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(ActionEvent e) {
exitMenuItem_actionPerformed(e);
}
});
createMenu.setText("Create");
helpMenu.setText("Help");
AboutMenuItem.setText("About");
AboutMenuItem.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(ActionEvent e) {
AboutMenuItem_actionPerformed(e);
}
});
agentTable.setPreferredSize(new Dimension(550, 400));
jScrollPane2.setPreferredSize(new Dimension(500, 400));
propertiesMenuItem.setText("Properties...");
propertiesMenuItem.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(ActionEvent e) {
propertiesMenuItem_actionPerformed(e);
}
});
startProcessingMenuItem.setText("Start processing");
startProcessingMenuItem.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(ActionEvent e) {
startProcessingMenuItem_actionPerformed(e);
}
});
suspendProcessingMenuItem.setText("Suspend processing");
suspendProcessingMenuItem.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(ActionEvent e) {
suspendProcessingMenuItem_actionPerformed(e);
}
});
resumeProcessingMenuItem.setText("Resume processing");
resumeProcessingMenuItem.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(ActionEvent e) {
resumeProcessingMenuItem_actionPerformed(e);
}
});
fileMenu.add(clearMenuItem);
fileMenu.addSeparator();
fileMenu.add(exitMenuItem);
editMenu.add(cutMenuItem);
editMenu.addSeparator();
editMenu.add(propertiesMenuItem);
editMenu.addSeparator();
editMenu.add(startProcessingMenuItem);
editMenu.add(suspendProcessingMenuItem);
editMenu.add(resumeProcessingMenuItem);
menuBar1.add(fileMenu);
menuBar1.add(editMenu);
menuBar1.add(createMenu);
menuBar1.add(helpMenu);
this.getContentPane().add(jPanel2, BorderLayout.CENTER);
jPanel2.add(jLabel1, BorderLayout.NORTH);
jPanel2.add(jScrollPane1, BorderLayout.CENTER);
this.getContentPane().add(jPanel1, BorderLayout.NORTH);
jPanel1.add(jLabel2, BorderLayout.NORTH);
jPanel1.add(jScrollPane2, BorderLayout.CENTER);
setUpTheTable();
jScrollPane2.getViewport().add(agentTable, null);
jScrollPane1.getViewport().add(traceTextArea, null);
helpMenu.add(AboutMenuItem);
this.setJMenuBar(menuBar1);
}
/**
* Processes a window event to add exiting on close.
*
* @param e the WindowEvent object that was generated for this frame
*
*/
protected void processWindowEvent(WindowEvent e) {
super.processWindowEvent(e);
if (e.getID() == WindowEvent.WINDOW_CLOSING) {
exitMenuItem_actionPerformed(null);
} else if (e.getID() == WindowEvent.WINDOW_ACTIVATED) {
e.getWindow().repaint();
}
}
/**
* Stops the selected agent and removes it from the PAManager list.
*
* @param e the ActionEvent object that was generated when cut was
* selected
*/
void Cut_actionPerformed(ActionEvent e) {
int selectedRow = agentTable.getSelectedRow();
if ((selectedRow < 0) || (selectedRow >= agents.size())) {
return; // nothing selected
}
CIAgent agent = (CIAgent) agents.elementAt(selectedRow);
agent.stopAgentProcessing(); // first, stop the agent from runnning
agents.removeElementAt(selectedRow); // remove from agent list
refreshTable(); // refresh table with this agent out of there
setEditMenuItemStates(); // turn edit menu items on/off
}
/**
* Adds a message to the trace text area.
*
* @param msg the String object that contains the message to be appended
*
*/
public synchronized void trace(String msg) {
traceTextArea.append(msg);
}
/**
* Processes the CIAgentEvent received by this application by displaying
* information in the trace text area.
*
* @param event the CIAgentEvent object
*
*/
public void processCIAgentEvent(CIAgentEvent event) {
Object source = event.getSource();
String agentName = "";
if (source instanceof CIAgent) {
agentName = ((CIAgent) source).getName();
}
Object arg = event.getArgObject();
Object action = event.getAction();
if (action != null) {
if (action.equals("trace")) {
if (((arg != null) && (arg instanceof String))) {
trace("\n"+ (String) arg); // display the msg
}
} else {
trace("\nPAManager received action event: " + action + " from agent " + agentName);
}
}
}
/**
* Processes an event (does not actually post it to the event queue).
*
* @param event the CIAgentEvent object to be processed
*
*/
public void postCIAgentEvent(CIAgentEvent event) {
processCIAgentEvent(event); // don't queue, just process
}
/**
* Sets up the agent table.
*
*/
public void setUpTheTable() {
agentTable = new JTable();
// Get the data from the Data Set
data = getTableData();
// Create a model of the data.
agentTableModel = new AbstractTableModel() {
/**
*
*/
private static final long serialVersionUID = -1338782244155162325L;
// These methods always need to be implemented.
public int getColumnCount() {
return columnNameList.length;
}
public int getRowCount() {
return data.length;
}
public Object getValueAt(int theRow, int theCol) {
return data[theRow][theCol];
}
public String getColumnName(int theCol) {
return columnNameList[theCol];
}
public Class<? extends Object> getColumnClass(int theCol) {
return getValueAt(0, theCol).getClass();
}
// don't allow the user to change any values in the JTable
public boolean isCellEditable(int theRow, int theCol) {
boolean canEdit = false;
return canEdit;
}
// don't allow the user to change any values in the JTable
public void setValueAt(Object theValue, int theRow, int theCol) {
switch (theCol) {
case COL_NAMEID :
break;
case COL_TYPEID :
break;
case COL_STATEID :
break;
case COL_TASKID :
break;
} // end switch
}
};
agentTable = new JTable(agentTableModel);
agentTable.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
// code to detect table selection events
ListSelectionModel rowSM = agentTable.getSelectionModel();
rowSM.addListSelectionListener(new ListSelectionListener() {
public void valueChanged(ListSelectionEvent e) {
// ignore extra messages
if (e.getValueIsAdjusting()) {
return;
}
ListSelectionModel lsm = (ListSelectionModel) e.getSource();
if (lsm.isSelectionEmpty()) {
// no rows selected
setEditMenuItemStates(); // turn off edit menu items
} else {
setEditMenuItemStates();
// updateTable(); //refresh status column
}
}
});
(agentTable.getColumn(COL_NAME)).setPreferredWidth(50);
(agentTable.getColumn(COL_TYPE)).setPreferredWidth(50);
(agentTable.getColumn(COL_STATE)).setPreferredWidth(25);
(agentTable.getColumn(COL_TASK)).setPreferredWidth(100);
setEditMenuItemStates();
agentTable.setRowSelectionAllowed(true) ;
}
/**
* Retrieves the data from each agent and fills the table model.
*
* @return the Object[][] that contains the table data
*/
private Object[][] getTableData() {
Object[][] lclArray = null;
if (agents.size() == 0) {
lclArray = new Object[1][NUM_COLS];
lclArray[0][0] = "";
lclArray[0][1] = "";
lclArray[0][2] = "";
lclArray[0][3] = "";
return lclArray;
} // no agents yet !!!!
if (agents != null) {
lclArray = new Object[agents.size()][NUM_COLS];
}
for (int i = 0; i < agents.size(); i++) {
CIAgent agent = (CIAgent) agents.elementAt(i);
lclArray[i][0] = agent.getName();
lclArray[i][1] = agent.getDisplayName();
lclArray[i][2] = agent.getState().toString();
lclArray[i][3] = agent.getTaskDescription();
}
return lclArray;
}
/**
* Changes the contents of the STATE column in the table only.
*/
private void updateTableData() {
if (agents.size() == 0) {
return; // no agent data yet !!!!
}
for (int i = 0; i < agents.size(); i++) {
CIAgent agent = (CIAgent) agents.elementAt(i);
data[i][2] = agent.getState().toString();
}
return;
}
/**
* Updates the table data then sends an event to refresh the screen.
*/
private void updateTable() {
updateTableData();
TableModelEvent e = new TableModelEvent(agentTableModel);
agentTable.tableChanged(e);
}
/**
* Refreshes the table data.
*/
private void refreshTable() {
data = getTableData();
updateTable();
}
/**
* Enables and disables the Edit menu items based on whether an agent
* has been selected and if so, updates the state of the selected agent.
*/
private void setEditMenuItemStates() {
CIAgent selectedAgent = null;
int selectedRow = agentTable.getSelectedRow();
if ((selectedRow < 0) || (selectedRow >= agents.size())) {
selectedAgent = null;
} else {
selectedAgent = (CIAgent) agents.elementAt(selectedRow);
}
if (selectedAgent == null) {
cutMenuItem.setEnabled(false);
propertiesMenuItem.setEnabled(false);
startProcessingMenuItem.setEnabled(false);
suspendProcessingMenuItem.setEnabled(false);
resumeProcessingMenuItem.setEnabled(false);
} else {
cutMenuItem.setEnabled(true);
propertiesMenuItem.setEnabled(true);
int state = selectedAgent.getState().getState();
startProcessingMenuItem.setEnabled(false);
suspendProcessingMenuItem.setEnabled(false);
resumeProcessingMenuItem.setEnabled(false);
// enable the options which make sense based on current state
switch (state) {
case CIAgentState.INITIATED :
startProcessingMenuItem.setEnabled(true);
break;
case CIAgentState.ACTIVE :
suspendProcessingMenuItem.setEnabled(true);
break;
case CIAgentState.SUSPENDED :
resumeProcessingMenuItem.setEnabled(true);
break;
case CIAgentState.UNINITIATED :
// if not initialized -- no processing actions allowed
break;
}
} // endif
}
/**
* Opens the customizer dialog when the user double clicks on an agent.
*
* @param agent the CIAgent object that was selected
* @param modal the boolean flag indicating modality
*/
private void openCustomizer(CIAgent agent, boolean modal) {
Class<?> customizerClass = agent.getCustomizerClass();
if (customizerClass == null) {
trace("ERROR - No customizer defined for this agent");
// tell user, no customizer defined
return;
}
// found a customizer, now open it
Customizer customizer = null;
try {
customizer = (Customizer) customizerClass.newInstance();
} catch (Exception exc) {
trace("\nError opening customizer - " + exc.toString());
return; // bail out
}
// customizer must be JFrame (window) or JDialog
if (customizer instanceof JFrame) {
Point pos = this.getLocation();
JFrame frame = (JFrame) customizer;
frame.setLocation(pos.x + 20, pos.y + 20);
customizer.setObject(agent);
frame.show();
} else if (customizer instanceof JDialog) {
Point pos = this.getLocation();
JDialog dlg = (JDialog) customizer;
dlg.setModal(modal);
dlg.setLocation(pos.x + 20, pos.y + 20);
customizer.setObject(agent);
dlg.show();
} else {
trace("\nError - CIAgent customizer must be JFrame or JDialog");
// Note: could provide support for JPanel here
}
setEditMenuItemStates(); // turn edit menu items on/off
refreshTable();
this.invalidate();
this.repaint();
}
/**
* Reads the PAManager.properties file that contains a list of supported
* agents.
*/
private void readPropertiesFile() {
// create a properties object to hold the preferences
Properties properties = new Properties();
try {
// Note: assume that properties file is in the pamanager directory
FileInputStream in = new FileInputStream("src/PAManager.properties");
properties.load(new BufferedInputStream(in));
String property;
property = properties.getProperty("AgentClassNames");
StringTokenizer tok = new StringTokenizer(property, ";");
String displayName;
while (tok.hasMoreTokens()) {
String agentClassName = tok.nextToken();
// try to instantiate the agent bean
CIAgent agent = null;
try {
Class<?> klas = Class.forName(agentClassName);
agent = (CIAgent) klas.newInstance();
displayName = agent.getDisplayName();
System.out.println("Adding agent class ... " + displayName);
addAgentMenuItem(displayName); // create menu item and add to Create menu
agentClasses.put(displayName, agentClassName); // save in hashtable for later
} catch (Exception exc) {
System.out.println("Error can't instantiate agent: " + agentClassName + " " + exc.toString());
}
}
} catch (Exception e) {
System.out.println("Error: cannot find or load PAManager properties file");
}
}
/**
* Adds a menu item for an agent to the Create Menu so the user can
* select the agent for use.
*
*
* @param item the String object that contains the agent type
*/
private void addAgentMenuItem(String item) {
JMenuItem menuItem = new JMenuItem(item);
menuItem.setActionCommand(item);
menuItem.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(ActionEvent e) {
CreateMenuItem_actionPerformed(e);
}
});
createMenu.add(menuItem); // create menu
}
/**
* Creates a new agent and adds it to the table.
*
* @param theEvent the ActionEvent object
*/
void CreateMenuItem_actionPerformed(ActionEvent theEvent) {
setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR));
// try to instantiate the agent bean
CIAgent agentBean = null;
Object bean = null;
String beanName = theEvent.getActionCommand();
String className = (String) agentClasses.get(beanName); // retrieve the class name
try {
Class<?> klas = Class.forName(className); // load or reload the class
bean = klas.newInstance(); // create an instance
agentBean = (CIAgent) bean; // try to cast it to CIAgent
agentBean.setAgentPlatform(this); // pass reference to agent for later use
agentBean.addCIAgentEventListener(this); // register PAManager as listener
agentBean.addPropertyChangeListener(this);
openCustomizer(agentBean, true); // automatically open customizer
addAgent(agentBean); // add the agent to the platform
} catch (Exception e) {
JOptionPane.showMessageDialog(this, // Parent
e.toString(), // Msg
"Error: Can't create agent " + beanName, // Title
JOptionPane.ERROR_MESSAGE // Severity
);
}
setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR));
refreshTable();
this.invalidate();
this.repaint();
}
/**
* Opens the bean customizer on the selected agent.
*
* @param e the ActionEvent object generated when the agent was selected
*/
void propertiesMenuItem_actionPerformed(ActionEvent e) {
int selectedRow = agentTable.getSelectedRow();
if ((selectedRow < 0) || (selectedRow >= agents.size())) {
return; // nothing selected
}
CIAgent agent = (CIAgent) agents.elementAt(selectedRow);
openCustomizer(agent, false);
}
/**
* Closes the main window on exit.
*
* @param e the ActionEvent object generated when exit was selected
*/
void exitMenuItem_actionPerformed(ActionEvent e) {
System.exit(0);
}
/**
* Clears the PAManager by stopping and removing all agents.
*
* @param e the ActionEvent object generated when clear was selected
*/
void clearMenuItem_actionPerformed(ActionEvent e) {
int size = agents.size();
for (int i = 0; i < size; i++) {
CIAgent agent = (CIAgent) agents.elementAt(0); // get first agent
agent.stopAgentProcessing(); // first, stop the agent from runnning
agents.removeElementAt(0); // remove from agent list
}
refreshTable(); // refresh table with agents removed
traceTextArea.setText("") ; // clear the bottom text area
}
/**
* Refreshes the table when an agent waschanged.
*
* @param event the PropertyChangeEvent object generated when the agent
* was changed
*
*/
public void propertyChange(PropertyChangeEvent event) {
refreshTable(); // an agent property changed, refresh the table
}
/**
* Starts an agent.
*
* @param e the ActionEvent object generated when start was selected
*/
void startProcessingMenuItem_actionPerformed(ActionEvent e) {
int selectedRow = agentTable.getSelectedRow();
if ((selectedRow < 0) || (selectedRow >= agents.size())) {
return; // nothing selected
}
// only start if agent is in the initialized state
CIAgent agent = (CIAgent) agents.elementAt(selectedRow);
if (agent.getState().getState() == CIAgentState.INITIATED) {
agent.startAgentProcessing();
setEditMenuItemStates(); // turn edit menu items on/off
updateTable();
}
}
/**
* Suspends agent processing.
*
* @param e the ActionEvent object generated when suspend was selected
*/
void suspendProcessingMenuItem_actionPerformed(ActionEvent e) {
int selectedRow = agentTable.getSelectedRow();
if ((selectedRow < 0) || (selectedRow >= agents.size())) {
return; // nothing selected
}
CIAgent agent = (CIAgent) agents.elementAt(selectedRow);
agent.suspendAgentProcessing();
setEditMenuItemStates(); // turn edit menu items on/off
updateTable();
}
/**
* Resumes agent processing.
*
* @param e the ActionEvent object generated when resume was selected
*/
void resumeProcessingMenuItem_actionPerformed(ActionEvent e) {
int selectedRow = agentTable.getSelectedRow();
if ((selectedRow < 0) || (selectedRow >= agents.size())) {
return; // nothing selected
}
CIAgent agent = (CIAgent) agents.elementAt(selectedRow);
agent.resumeAgentProcessing();
setEditMenuItemStates(); // turn edit menu items on/off
updateTable();
}
/**
* Displays the About dialog.
*
* @param e the ActionEvent object generated when About was selected
*
*/
void AboutMenuItem_actionPerformed(ActionEvent e) {
AboutDialog dlg = new AboutDialog(this, "About Personal Agent Manager", true);
Point loc = this.getLocation();
dlg.setLocation(loc.x + 50, loc.y + 50);
dlg.show();
}
/**
* Adds an agent bean to this platform.
*
* @param agent the CIAgent object to be added to this container, giving
* it a unique name
*/
public void addAgent(CIAgent agent) {
if (agents.contains(agent)) {
return; // don't add a second instance
}
String name = agent.getName();
for (int i = 0; i < agents.size(); i++) {
// see if we have a name collision
if (name.equals(((CIAgent) agents.elementAt(i)).getName())) {
String newName = generateUniqueName(agent);
agent.setName(newName); // change the name to be unique
}
}
agents.addElement(agent);
}
/**
* Generate a unique agent bean name by appending a colon and an integer
*
* @param agent the CIAgent object for which a new name is generated
*
* @return the String object that contains the unique name
*/
private String generateUniqueName(CIAgent agent) {
// keep a local hash list of used agent names
Hashtable<String, CIAgent> names = new Hashtable<String, CIAgent>();
for (int i = 0; i < agents.size(); i++) {
CIAgent bean = (CIAgent) agents.elementAt(i);
names.put(bean.getName(), bean);
}
String name = agent.getName();
String tmpName;
String tmpInx;
while (names.containsKey(name)) {
int inx = 0;
if ((inx = name.indexOf(':')) != -1) {
tmpName = name.substring(0, inx);
tmpInx = name.substring(inx + 1);
int index = Integer.parseInt(tmpInx);
name = tmpName + ":" + (index + 1); // bump index
} else {
name = name + ":1"; // add numeric suffic
}
}
return name; // returns a unique name
}
//
// AgentPlatform interface methods
//
/**
* Retrieves a list of the registered agents running on this platform.
*
* @return the Vector object that contains the agents
*/
@SuppressWarnings("unchecked")
public Vector<CIAgent> getAgents() {
return (Vector<CIAgent>) agents.clone();
}
/**
* Retrieves the agent that has specified name.
*
* @param agentName the String object that contains the name of the agent to be
* retrieved
*
* @return the CIAgent object or null if not found
*/
public CIAgent getAgent(String agentName) {
Enumeration<CIAgent> enumeration = agents.elements() ;
while (enumeration.hasMoreElements()) {
CIAgent lclAgent = (CIAgent)enumeration.nextElement() ;
if (lclAgent.getName().equals(agentName)) {
return lclAgent ;
}
}
return null; // agent not found
}
}
| |
/*
* Copyright (c) 2021, Peter Abeles. All Rights Reserved.
*
* This file is part of BoofCV (http://boofcv.org).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package boofcv.alg.transform.ii;
import boofcv.struct.image.GrayF32;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
/**
* Functions related to image derivatives in integral images.
*
* @author Peter Abeles
*/
public class DerivativeIntegralImage {
/**
* Creates a kernel for a symmetric box derivative.
*
* @param r Radius of the box. width is 2*r+1
* @return Kernel Kernel for derivative.
*/
public static IntegralKernel kernelDerivX( int r, @Nullable IntegralKernel ret ) {
ret = checkDeclareKernel(ret, 2);
ret.blocks[0].setTo(-r - 1, -r - 1, -1, r);
ret.blocks[1].setTo(0, -r - 1, r, r);
ret.scales[0] = -1;
ret.scales[1] = 1;
return ret;
}
@NotNull private static IntegralKernel checkDeclareKernel( @Nullable IntegralKernel ret, int numBlocks ) {
if (ret == null)
ret = new IntegralKernel(numBlocks);
else
ret.resizeBlocks(numBlocks);
return ret;
}
/**
* Creates a kernel for a symmetric box derivative.
*
* @param r Radius of the box. width is 2*r+1
* @return Kernel Kernel for derivative.
*/
public static IntegralKernel kernelDerivY( int r, @Nullable IntegralKernel ret ) {
ret = checkDeclareKernel(ret, 2);
ret.blocks[0].setTo(-r - 1, -r - 1, r, -1);
ret.blocks[1].setTo(-r - 1, 0, r, r);
ret.scales[0] = -1;
ret.scales[1] = 1;
return ret;
}
/**
* Creates a kernel for the Haar wavelet "centered" around the target pixel.
*
* @param r Radius of the box. width is 2*r
* @return Kernel for a Haar x-axis wavelet.
*/
public static IntegralKernel kernelHaarX( int r, @Nullable IntegralKernel ret ) {
ret = checkDeclareKernel(ret, 2);
ret.blocks[0].setTo(-r, -r, 0, r);
ret.blocks[1].setTo(0, -r, r, r);
ret.scales[0] = -1;
ret.scales[1] = 1;
return ret;
}
/**
* Creates a kernel for the Haar wavelet "centered" around the target pixel.
*
* @param r Radius of the box. width is 2*r
* @return Kernel for a Haar y-axis wavelet.
*/
public static IntegralKernel kernelHaarY( int r, @Nullable IntegralKernel ret ) {
ret = checkDeclareKernel(ret, 2);
ret.blocks[0].setTo(-r, -r, r, 0);
ret.blocks[1].setTo(-r, 0, r, r);
ret.scales[0] = -1;
ret.scales[1] = 1;
return ret;
}
public static IntegralKernel kernelDerivXX( int size, @Nullable IntegralKernel ret ) {
ret = checkDeclareKernel(ret, 2);
// lobe size
int blockW = size/3;
// horizontal band size
int blockH = size - blockW - 1;
int r1 = blockW/2;
int r2 = blockW + r1;
int r3 = blockH/2;
ret.blocks[0].setTo(-r2 - 1, -r3 - 1, r2, r3);
ret.blocks[1].setTo(-r1 - 1, -r3 - 1, r1, r3);
ret.scales[0] = 1;
ret.scales[1] = -3;
return ret;
}
public static IntegralKernel kernelDerivYY( int size, @Nullable IntegralKernel ret ) {
ret = checkDeclareKernel(ret, 2);
int blockW = size/3;
int blockH = size - blockW - 1;
int r1 = blockW/2;
int r2 = blockW + r1;
int r3 = blockH/2;
ret.blocks[0].setTo(-r3 - 1, -r2 - 1, r3, r2);
ret.blocks[1].setTo(-r3 - 1, -r1 - 1, r3, r1);
ret.scales[0] = 1;
ret.scales[1] = -3;
return ret;
}
public static IntegralKernel kernelDerivXY( int size, @Nullable IntegralKernel ret ) {
ret = checkDeclareKernel(ret, 4);
int block = size/3;
ret.blocks[0].setTo(-block - 1, -block - 1, -1, -1);
ret.blocks[1].setTo(0, -block - 1, block, -1);
ret.blocks[2].setTo(0, 0, block, block);
ret.blocks[3].setTo(-block - 1, 0, -1, block);
ret.scales[0] = 1;
ret.scales[1] = -1;
ret.scales[2] = 1;
ret.scales[3] = -1;
return ret;
}
public static void derivXX( GrayF32 input, GrayF32 output, int size ) {
int blockW = size/3;
int blockH = size - blockW - 1;
int radiusW = size/2;
int radiusH = blockH/2;
int blockW2 = 2*blockW;
int blockW3 = 3*blockW;
int endY = input.height - radiusH;
int endX = input.width - radiusW;
for (int y = radiusH + 1; y < endY; y++) {
int indexTop = input.startIndex + (y - radiusH - 1)*input.stride;
int indexBottom = indexTop + blockH*input.stride;
int indexDst = output.startIndex + y*output.stride + radiusW + 1;
for (int x = radiusW + 1; x < endX; x++, indexTop++, indexBottom++, indexDst++) {
float sum = input.data[indexBottom + blockW3] - input.data[indexTop + blockW3] - input.data[indexBottom] + input.data[indexTop];
sum -= 3*(input.data[indexBottom + blockW2] - input.data[indexTop + blockW2] - input.data[indexBottom + blockW] + input.data[indexTop + blockW]);
output.data[indexDst] = sum;
}
}
}
public static void derivYY( GrayF32 input, GrayF32 output, int size ) {
int blockH = size/3;
int blockW = size - blockH - 1;
int radiusH = size/2;
int radiusW = blockW/2;
int rowOff1 = blockH*input.stride;
int rowOff2 = 2*rowOff1;
int rowOff3 = 3*rowOff1;
int endY = input.height - radiusH;
int endX = input.width - radiusW;
for (int y = radiusH + 1; y < endY; y++) {
int indexL = input.startIndex + (y - radiusH - 1)*input.stride;
int indexR = indexL + blockW;
int indexDst = output.startIndex + y*output.stride + radiusW + 1;
for (int x = radiusW + 1; x < endX; x++, indexL++, indexR++, indexDst++) {
float sum = input.data[indexR + rowOff3] - input.data[indexL + rowOff3] - input.data[indexR] + input.data[indexL];
sum -= 3*(input.data[indexR + rowOff2] - input.data[indexL + rowOff2] - input.data[indexR + rowOff1] + input.data[indexL + rowOff1]);
output.data[indexDst] = sum;
}
}
}
public static void derivXY( GrayF32 input, GrayF32 output, int size ) {
int block = size/3;
int endY = input.height - block;
int endX = input.width - block;
for (int y = block + 1; y < endY; y++) {
int indexY1 = input.startIndex + (y - block - 1)*input.stride;
int indexY2 = indexY1 + block*input.stride;
int indexY3 = indexY2 + input.stride;
int indexY4 = indexY3 + block*input.stride;
int indexDst = output.startIndex + y*output.stride + block + 1;
for (int x = block + 1; x < endX; x++, indexY1++, indexY2++, indexY3++, indexY4++, indexDst++) {
int x3 = block + 1;
int x4 = x3 + block;
float sum = input.data[indexY2 + block] - input.data[indexY1 + block] - input.data[indexY2] + input.data[indexY1];
sum -= input.data[indexY2 + x4] - input.data[indexY1 + x4] - input.data[indexY2 + x3] + input.data[indexY1 + x3];
sum += input.data[indexY4 + x4] - input.data[indexY3 + x4] - input.data[indexY4 + x3] + input.data[indexY3 + x3];
sum -= input.data[indexY4 + block] - input.data[indexY3 + block] - input.data[indexY4] + input.data[indexY3];
output.data[indexDst] = sum;
}
}
}
}
| |
/*
* Copyright 2014-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.cxx;
import com.facebook.buck.core.cell.CellPathResolver;
import com.facebook.buck.core.description.MetadataProvidingDescription;
import com.facebook.buck.core.description.arg.HasContacts;
import com.facebook.buck.core.description.arg.HasTestTimeout;
import com.facebook.buck.core.description.attr.ImplicitDepsInferringDescription;
import com.facebook.buck.core.model.BuildTarget;
import com.facebook.buck.core.model.Flavor;
import com.facebook.buck.core.model.FlavorDomain;
import com.facebook.buck.core.model.Flavored;
import com.facebook.buck.core.model.TargetConfiguration;
import com.facebook.buck.core.model.targetgraph.BuildRuleCreationContextWithTargetGraph;
import com.facebook.buck.core.model.targetgraph.DescriptionWithTargetGraph;
import com.facebook.buck.core.rules.ActionGraphBuilder;
import com.facebook.buck.core.rules.BuildRule;
import com.facebook.buck.core.rules.BuildRuleParams;
import com.facebook.buck.core.rules.SourcePathRuleFinder;
import com.facebook.buck.core.rules.common.BuildableSupport;
import com.facebook.buck.core.sourcepath.PathSourcePath;
import com.facebook.buck.core.sourcepath.SourcePath;
import com.facebook.buck.core.toolchain.ToolchainProvider;
import com.facebook.buck.core.util.immutables.BuckStyleImmutable;
import com.facebook.buck.cxx.toolchain.CxxBuckConfig;
import com.facebook.buck.cxx.toolchain.CxxPlatform;
import com.facebook.buck.cxx.toolchain.CxxPlatformsProvider;
import com.facebook.buck.cxx.toolchain.LinkerMapMode;
import com.facebook.buck.cxx.toolchain.StripStyle;
import com.facebook.buck.cxx.toolchain.UnresolvedCxxPlatform;
import com.facebook.buck.io.filesystem.ProjectFilesystem;
import com.facebook.buck.rules.args.Arg;
import com.facebook.buck.rules.macros.LocationMacroExpander;
import com.facebook.buck.rules.macros.StringWithMacros;
import com.facebook.buck.rules.macros.StringWithMacrosConverter;
import com.facebook.buck.rules.query.QueryUtils;
import com.facebook.buck.test.config.TestBuckConfig;
import com.facebook.buck.versions.Version;
import com.facebook.buck.versions.VersionRoot;
import com.google.common.base.Preconditions;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableCollection;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.Maps;
import com.google.common.collect.Ordering;
import com.google.common.collect.Sets;
import java.nio.file.Path;
import java.util.Optional;
import java.util.function.Function;
import org.immutables.value.Value;
public class CxxTestDescription
implements DescriptionWithTargetGraph<CxxTestDescriptionArg>,
Flavored,
ImplicitDepsInferringDescription<CxxTestDescription.AbstractCxxTestDescriptionArg>,
MetadataProvidingDescription<CxxTestDescriptionArg>,
VersionRoot<CxxTestDescriptionArg> {
private static final CxxTestType DEFAULT_TEST_TYPE = CxxTestType.GTEST;
private final ToolchainProvider toolchainProvider;
private final CxxBuckConfig cxxBuckConfig;
private final ImmutableSet<Flavor> declaredPlatforms;
private final CxxBinaryMetadataFactory cxxBinaryMetadataFactory;
public CxxTestDescription(
ToolchainProvider toolchainProvider,
CxxBuckConfig cxxBuckConfig,
CxxBinaryMetadataFactory cxxBinaryMetadataFactory) {
this.toolchainProvider = toolchainProvider;
this.cxxBuckConfig = cxxBuckConfig;
this.declaredPlatforms = cxxBuckConfig.getDeclaredPlatforms();
this.cxxBinaryMetadataFactory = cxxBinaryMetadataFactory;
}
private ImmutableSet<BuildTarget> getImplicitFrameworkDeps(
TargetConfiguration targetConfiguration, AbstractCxxTestDescriptionArg constructorArg) {
ImmutableSet.Builder<BuildTarget> deps = ImmutableSet.builder();
CxxTestType type = constructorArg.getFramework().orElse(getDefaultTestType());
switch (type) {
case GTEST:
{
cxxBuckConfig.getGtestDep(targetConfiguration).ifPresent(deps::add);
if (constructorArg.getUseDefaultTestMain().orElse(true)) {
cxxBuckConfig.getGtestDefaultTestMainDep(targetConfiguration).ifPresent(deps::add);
}
break;
}
case BOOST:
{
cxxBuckConfig.getBoostTestDep(targetConfiguration).ifPresent(deps::add);
break;
}
default:
{
break;
}
}
return deps.build();
}
private UnresolvedCxxPlatform getCxxPlatform(
BuildTarget target, CxxBinaryDescription.CommonArg constructorArg) {
CxxPlatformsProvider cxxPlatformsProvider = getCxxPlatformsProvider();
FlavorDomain<UnresolvedCxxPlatform> cxxPlatforms =
cxxPlatformsProvider.getUnresolvedCxxPlatforms();
// First check if the build target is setting a particular target.
Optional<UnresolvedCxxPlatform> targetPlatform = cxxPlatforms.getValue(target.getFlavors());
if (targetPlatform.isPresent()) {
return targetPlatform.get();
}
// Next, check for a constructor arg level default platform.
if (constructorArg.getDefaultPlatform().isPresent()) {
return cxxPlatforms.getValue(constructorArg.getDefaultPlatform().get());
}
// Otherwise, fallback to the description-level default platform.
return cxxPlatforms.getValue(
cxxPlatformsProvider.getDefaultUnresolvedCxxPlatform().getFlavor());
}
@Override
public Class<CxxTestDescriptionArg> getConstructorArgType() {
return CxxTestDescriptionArg.class;
}
@SuppressWarnings("PMD.PrematureDeclaration")
@Override
public BuildRule createBuildRule(
BuildRuleCreationContextWithTargetGraph context,
BuildTarget inputBuildTarget,
BuildRuleParams params,
CxxTestDescriptionArg args) {
Optional<StripStyle> flavoredStripStyle = StripStyle.FLAVOR_DOMAIN.getValue(inputBuildTarget);
Optional<LinkerMapMode> flavoredLinkerMapMode =
LinkerMapMode.FLAVOR_DOMAIN.getValue(inputBuildTarget);
inputBuildTarget =
CxxStrip.removeStripStyleFlavorInTarget(inputBuildTarget, flavoredStripStyle);
inputBuildTarget =
LinkerMapMode.removeLinkerMapModeFlavorInTarget(inputBuildTarget, flavoredLinkerMapMode);
BuildTarget buildTarget = inputBuildTarget;
ActionGraphBuilder graphBuilder = context.getActionGraphBuilder();
CxxPlatform cxxPlatform = getCxxPlatform(buildTarget, args).resolve(graphBuilder);
SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(graphBuilder);
ProjectFilesystem projectFilesystem = context.getProjectFilesystem();
CellPathResolver cellRoots = context.getCellPathResolver();
if (buildTarget.getFlavors().contains(CxxCompilationDatabase.COMPILATION_DATABASE)) {
CxxLinkAndCompileRules cxxLinkAndCompileRules =
CxxDescriptionEnhancer.createBuildRulesForCxxBinaryDescriptionArg(
buildTarget.withoutFlavors(CxxCompilationDatabase.COMPILATION_DATABASE),
projectFilesystem,
graphBuilder,
cellRoots,
cxxBuckConfig,
cxxPlatform,
args,
getImplicitFrameworkDeps(buildTarget.getTargetConfiguration(), args),
flavoredStripStyle,
flavoredLinkerMapMode);
return CxxCompilationDatabase.createCompilationDatabase(
buildTarget, projectFilesystem, cxxLinkAndCompileRules.compileRules);
}
if (buildTarget.getFlavors().contains(CxxCompilationDatabase.UBER_COMPILATION_DATABASE)) {
return CxxDescriptionEnhancer.createUberCompilationDatabase(
getCxxPlatformsProvider().getUnresolvedCxxPlatforms().getValue(buildTarget).isPresent()
? buildTarget
: buildTarget.withAppendedFlavors(cxxPlatform.getFlavor()),
projectFilesystem,
graphBuilder);
}
// Generate the link rule that builds the test binary.
CxxLinkAndCompileRules cxxLinkAndCompileRules =
CxxDescriptionEnhancer.createBuildRulesForCxxBinaryDescriptionArg(
buildTarget,
projectFilesystem,
graphBuilder,
cellRoots,
cxxBuckConfig,
cxxPlatform,
args,
getImplicitFrameworkDeps(buildTarget.getTargetConfiguration(), args),
flavoredStripStyle,
flavoredLinkerMapMode);
// Construct the actual build params we'll use, notably with an added dependency on the
// CxxLink rule above which builds the test binary.
BuildTarget testBuildTarget =
CxxStrip.restoreStripStyleFlavorInTarget(buildTarget, flavoredStripStyle);
testBuildTarget =
LinkerMapMode.restoreLinkerMapModeFlavorInTarget(testBuildTarget, flavoredLinkerMapMode);
BuildRuleParams testParams =
params
.withDeclaredDeps(cxxLinkAndCompileRules.deps)
.copyAppendingExtraDeps(
BuildableSupport.getDepsCollection(cxxLinkAndCompileRules.executable, ruleFinder));
StringWithMacrosConverter macrosConverter =
StringWithMacrosConverter.builder()
.setBuildTarget(buildTarget)
.setCellPathResolver(cellRoots)
.addExpanders(new LocationMacroExpander())
.build();
// Supplier which expands macros in the passed in test environment.
ImmutableMap<String, Arg> testEnv =
ImmutableMap.copyOf(
Maps.transformValues(args.getEnv(), x -> macrosConverter.convert(x, graphBuilder)));
ImmutableList<Arg> testArgs =
args.getArgs()
.stream()
.map(x -> macrosConverter.convert(x, graphBuilder))
.collect(ImmutableList.toImmutableList());
Function<SourcePathRuleFinder, ImmutableSortedSet<BuildRule>> additionalDeps =
ruleFinderInner -> {
ImmutableSortedSet.Builder<BuildRule> deps = ImmutableSortedSet.naturalOrder();
// It's not uncommon for users to add dependencies onto other binaries that they run
// during the test, so make sure to add them as runtime deps.
deps.addAll(
Sets.difference(
params.getBuildDeps(), cxxLinkAndCompileRules.getBinaryRule().getBuildDeps()));
// Add any build-time from any macros embedded in the `env` or `args` parameter.
for (Arg part : Iterables.concat(testArgs, testEnv.values())) {
deps.addAll(BuildableSupport.getDepsCollection(part, ruleFinderInner));
}
return deps.build();
};
CxxTest test;
CxxTestType type = args.getFramework().orElse(getDefaultTestType());
switch (type) {
case GTEST:
{
test =
new CxxGtestTest(
testBuildTarget,
projectFilesystem,
testParams,
cxxLinkAndCompileRules.getBinaryRule(),
cxxLinkAndCompileRules.executable,
testEnv,
testArgs,
FluentIterable.from(args.getResources())
.transform(p -> PathSourcePath.of(projectFilesystem, p))
.toSortedSet(Ordering.natural()),
args.getAdditionalCoverageTargets(),
additionalDeps,
args.getLabels(),
args.getContacts(),
args.getRunTestSeparately().orElse(false),
args.getTestRuleTimeoutMs()
.map(Optional::of)
.orElse(
cxxBuckConfig
.getDelegate()
.getView(TestBuckConfig.class)
.getDefaultTestRuleTimeoutMs()),
cxxBuckConfig.getMaximumTestOutputSize());
break;
}
case BOOST:
{
test =
new CxxBoostTest(
testBuildTarget,
projectFilesystem,
testParams,
cxxLinkAndCompileRules.getBinaryRule(),
cxxLinkAndCompileRules.executable,
testEnv,
testArgs,
FluentIterable.from(args.getResources())
.transform(p -> PathSourcePath.of(projectFilesystem, p))
.toSortedSet(Ordering.natural()),
args.getAdditionalCoverageTargets(),
additionalDeps,
args.getLabels(),
args.getContacts(),
args.getRunTestSeparately().orElse(false),
args.getTestRuleTimeoutMs()
.map(Optional::of)
.orElse(
cxxBuckConfig
.getDelegate()
.getView(TestBuckConfig.class)
.getDefaultTestRuleTimeoutMs()));
break;
}
default:
{
Preconditions.checkState(false, "Unhandled C++ test type: %s", type);
throw new RuntimeException();
}
}
return test;
}
@Override
public void findDepsForTargetFromConstructorArgs(
BuildTarget buildTarget,
CellPathResolver cellRoots,
AbstractCxxTestDescriptionArg constructorArg,
ImmutableCollection.Builder<BuildTarget> extraDepsBuilder,
ImmutableCollection.Builder<BuildTarget> targetGraphOnlyDepsBuilder) {
// Get any parse time deps from the C/C++ platforms.
targetGraphOnlyDepsBuilder.addAll(
getCxxPlatform(buildTarget, constructorArg)
.getParseTimeDeps(buildTarget.getTargetConfiguration()));
// Add in any implicit framework deps.
extraDepsBuilder.addAll(
getImplicitFrameworkDeps(buildTarget.getTargetConfiguration(), constructorArg));
constructorArg
.getDepsQuery()
.ifPresent(
depsQuery ->
QueryUtils.extractParseTimeTargets(buildTarget, cellRoots, depsQuery)
.forEach(extraDepsBuilder::add));
}
public CxxTestType getDefaultTestType() {
return DEFAULT_TEST_TYPE;
}
@Override
public boolean hasFlavors(ImmutableSet<Flavor> flavors) {
if (flavors.isEmpty()) {
return true;
}
if (flavors.contains(CxxCompilationDatabase.COMPILATION_DATABASE)) {
return true;
}
if (flavors.contains(CxxCompilationDatabase.UBER_COMPILATION_DATABASE)) {
return true;
}
if (StripStyle.FLAVOR_DOMAIN.containsAnyOf(flavors)) {
return true;
}
if (LinkerMapMode.FLAVOR_DOMAIN.containsAnyOf(flavors)) {
return true;
}
return getCxxPlatformsProvider().getUnresolvedCxxPlatforms().containsAnyOf(flavors)
|| !Sets.intersection(declaredPlatforms, flavors).isEmpty();
}
@Override
public <U> Optional<U> createMetadata(
BuildTarget buildTarget,
ActionGraphBuilder graphBuilder,
CellPathResolver cellRoots,
CxxTestDescriptionArg args,
Optional<ImmutableMap<BuildTarget, Version>> selectedVersions,
Class<U> metadataClass) {
return cxxBinaryMetadataFactory.createMetadata(
buildTarget, graphBuilder, args.getDeps(), metadataClass);
}
@Override
public boolean producesCacheableSubgraph() {
return true;
}
private CxxPlatformsProvider getCxxPlatformsProvider() {
return toolchainProvider.getByName(
CxxPlatformsProvider.DEFAULT_NAME, CxxPlatformsProvider.class);
}
@BuckStyleImmutable
@Value.Immutable(copy = true)
interface AbstractCxxTestDescriptionArg
extends CxxBinaryDescription.CommonArg, HasContacts, HasTestTimeout {
Optional<CxxTestType> getFramework();
ImmutableMap<String, StringWithMacros> getEnv();
ImmutableList<StringWithMacros> getArgs();
Optional<Boolean> getRunTestSeparately();
Optional<Boolean> getUseDefaultTestMain();
@Value.NaturalOrder
ImmutableSortedSet<Path> getResources();
ImmutableSet<SourcePath> getAdditionalCoverageTargets();
}
}
| |
// Copyright 2012-2013 Fraunhofer FOKUS
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package de.fraunhofer.fokus.fuzzing.fuzzino;
import static de.fraunhofer.fokus.fuzzing.fuzzino.TestUtil.checkGeneratorPartForNumFuzzedValues;
import static de.fraunhofer.fokus.fuzzing.fuzzino.TestUtil.checkOperatorPartForNumFuzzedValues;
import static de.fraunhofer.fokus.fuzzing.fuzzino.TestUtil.checkResponseDocForErrorResponse;
import static de.fraunhofer.fokus.fuzzing.fuzzino.TestUtil.checkResponseDocForNumCollectionResponses;
import static de.fraunhofer.fokus.fuzzing.fuzzino.TestUtil.checkResponseDocForNumStringResponses;
import static de.fraunhofer.fokus.fuzzing.fuzzino.TestUtil.checkResponseDocForNumStructureResponses;
import static de.fraunhofer.fokus.fuzzing.fuzzino.TestUtil.checkResponseForMoreValuesAttribute;
import static de.fraunhofer.fokus.fuzzing.fuzzino.TestUtil.checkResponseForNoMoreValuesWarning;
import static de.fraunhofer.fokus.fuzzing.fuzzino.TestUtil.checkResponseForNumGeneratorParts;
import static de.fraunhofer.fokus.fuzzing.fuzzino.TestUtil.checkResponseForNumIllegalGenerators;
import static de.fraunhofer.fokus.fuzzing.fuzzino.TestUtil.checkResponseForNumIllegalOperators;
import static de.fraunhofer.fokus.fuzzing.fuzzino.TestUtil.checkResponseForNumIllegalRequestFormats;
import static de.fraunhofer.fokus.fuzzing.fuzzino.TestUtil.checkResponseForNumOperatorParts;
import static de.fraunhofer.fokus.fuzzing.fuzzino.TestUtil.checkResponseForWarningsPart;
import static de.fraunhofer.fokus.fuzzing.fuzzino.TestUtil.createContdRequest;
import static de.fraunhofer.fokus.fuzzing.fuzzino.TestUtil.getGeneratorPartFromResponseByName;
import static de.fraunhofer.fokus.fuzzing.fuzzino.TestUtil.getOperatorPartFromNumberResponseByName;
import static de.fraunhofer.fokus.fuzzing.fuzzino.TestUtil.getResponseDocForRequest;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.util.LinkedList;
import java.util.List;
import java.util.UUID;
import javax.xml.bind.JAXBException;
import org.junit.Before;
import org.junit.Test;
import org.xml.sax.SAXException;
import de.fraunhofer.fokus.fuzzing.fuzzino.exceptions.UnknownFuzzingHeuristicException;
import de.fraunhofer.fokus.fuzzing.fuzzino.heuristics.ComputableFuzzingHeuristic;
import de.fraunhofer.fokus.fuzzing.fuzzino.heuristics.generators.IntegerGenerator;
import de.fraunhofer.fokus.fuzzing.fuzzino.heuristics.generators.IntegerGeneratorFactory;
import de.fraunhofer.fokus.fuzzing.fuzzino.heuristics.operators.IntegerOperator;
import de.fraunhofer.fokus.fuzzing.fuzzino.heuristics.operators.IntegerOperatorFactory;
import de.fraunhofer.fokus.fuzzing.fuzzino.request.IntegerSpecification;
import de.fraunhofer.fokus.fuzzing.fuzzino.request.NumberRequest;
import de.fraunhofer.fokus.fuzzing.fuzzino.request.Request;
import de.fraunhofer.fokus.fuzzing.fuzzino.request.RequestFactory;
import de.fraunhofer.fokus.fuzzing.fuzzino.request.impl.RequestImpl;
import de.fraunhofer.fokus.fuzzing.fuzzino.response.GeneratorSpecificFuzzedValues;
import de.fraunhofer.fokus.fuzzing.fuzzino.response.NumberResponse;
import de.fraunhofer.fokus.fuzzing.fuzzino.response.OperatorSpecificFuzzedValues;
import de.fraunhofer.fokus.fuzzing.fuzzino.response.Response;
import de.fraunhofer.fokus.fuzzing.fuzzino.util.ResourcePath;
import de.fraunhofer.fokus.fuzzing.fuzzino.util.ResourceResolver;
public class IntegerRequestProcessorTest extends FuzzinoTest {
private static final long SEED = 4711;
private static final String NO_PARAM = null;
private static final IntegerSpecification NUMBER_SPEC = RequestFactory.INSTANCE.createNumberSpecification();
static {
NUMBER_SPEC.setBits(32);
NUMBER_SPEC.setIsSigned(true);
}
protected IntegerRequestProcessor reqProc;
private static String numRequestRoot = ResourcePath.TEST_RESOURCE + "reworked/integerRequests/";
@Before
public void init() throws Exception {
Request request = RequestImpl.unmarshall(new File(numRequestRoot + "ValidIntegerRequest.request.xml"));
NumberRequest numberRequest = request.getNumberRequests().get(0);
reqProc = new IntegerRequestProcessor(numberRequest, UUID.randomUUID());
}
/*@Test
public void setupFile() throws JAXBException{
String baseFilename = "ValidIntegerRequest_NumericalVariance";
File marshallInto = new File(numRequestRoot + baseFilename +".request.xml");
File marshallResponseInto = new File(numRequestRoot + baseFilename +".response.xml");
NumberRequest numReq = new NumberRequestImpl();
numReq.setName("NumericalVarianceTest");
numReq.setMaxValues(500);
numReq.setUseNoGenerators(true);
IntegerSpecification spec = new IntegerSpecificationImpl();
//spec.setBits(32);
//spec.setIsSigned(true);
numReq.setNumberSpecification(spec);
numReq.addValidValue("10");
numReq.getValidValuesSection().addRequestedOperator(new OperatorImpl("NumericalVariance"));
RequestImpl req = new RequestImpl();
req.addNumberRequest(numReq);
req.marshall(marshallInto);
reqProc = new IntegerRequestProcessor(numReq, UUID.randomUUID());
Response resp = new ResponseImpl();
resp.getNumberResponses().add(reqProc.getResponse());
resp.marshall(marshallResponseInto);
}*/
@Test
public void testGenerators() {
int expectedNumOfHeuristics = 2;
int actualNumOfHeuristics = reqProc.getAllFuzzingHeuristics().size();
assertTrue("Invalid number of generators: was "+ actualNumOfHeuristics + " instead of " + expectedNumOfHeuristics,
actualNumOfHeuristics == expectedNumOfHeuristics);
String expectedGeneratorName = "BoundaryNumbers";
ComputableFuzzingHeuristic<?> heuristic = reqProc.getAllFuzzingHeuristics().get(0);
String actualGeneratorName = heuristic.getName();
assertTrue("Invalid generator: was " + actualGeneratorName + " instead of " + expectedGeneratorName,
actualGeneratorName.equals(expectedGeneratorName));
}
@Test
public void testOperators() {
int expectedNumOfHeuristics = 2;
int actualNumOfHeuristics = reqProc.getAllFuzzingHeuristics().size();
assertTrue("Invalid number of operators: was " + actualNumOfHeuristics + " instead of " + expectedNumOfHeuristics,
actualNumOfHeuristics == expectedNumOfHeuristics);
String expectedOperatorName = "NumericalVariance";
ComputableFuzzingHeuristic<?> heuristic = reqProc.getAllFuzzingHeuristics().get(1);
String actualOperatorName = heuristic.getName();
assertTrue("Invalid operator: was " + actualOperatorName + " instead of " + expectedOperatorName,
expectedOperatorName.equals(actualOperatorName));
}
@Test
public void testValues() throws UnknownFuzzingHeuristicException {
List<Long> listOfLongs = new LinkedList<>();
listOfLongs.add(10L);
IntegerGenerator badNumbers = IntegerGeneratorFactory.INSTANCE.create("BoundaryNumbers", NO_PARAM, NUMBER_SPEC, SEED);
IntegerOperator numericalVariance = IntegerOperatorFactory.INSTANCE.create("NumericalVariance", listOfLongs, "10", null, SEED);
int expectedNumOfValues = badNumbers.size() + numericalVariance.size();
int actualNumOfValues = reqProc.size();
assertTrue("Invalid number of values: was " + actualNumOfValues + " instead of " + expectedNumOfValues,
actualNumOfValues == expectedNumOfValues);
}
@Test
public void testValidIntegerRequest() throws JAXBException, SAXException {
String requestFilename = numRequestRoot + "ValidIntegerRequest.request.xml";
Response response = getResponseDocForRequest(requestFilename);
NumberResponse<?> numberResponse = response.getNumberResponses().get(0);
checkResponseForNumGeneratorParts(numberResponse, 1);
GeneratorSpecificFuzzedValues<?> generatorPart = getGeneratorPartFromResponseByName(numberResponse, "BoundaryNumbers");
checkGeneratorPartForNumFuzzedValues(generatorPart, 17);
checkResponseForNumOperatorParts(numberResponse, 1);
OperatorSpecificFuzzedValues<?> operatorPart = getOperatorPartFromNumberResponseByName(numberResponse, "SimpleNumericalVarianceOperator", "10");
checkOperatorPartForNumFuzzedValues(operatorPart, 20);
checkResponseForWarningsPart(numberResponse, false);
checkResponseDocForErrorResponse(response, false);
checkResponseDocForNumStringResponses(response, 0);
checkResponseDocForNumCollectionResponses(response, 0);
checkResponseDocForNumStructureResponses(response, 0);
}
@Test
public void testValidIntegerRequest_BoundaryNumbers() throws JAXBException, SAXException {
String requestFilename = numRequestRoot + "ValidIntegerReqeuest_BoundaryNumbers.request.xml";
Response response = getResponseDocForRequest(requestFilename);
NumberResponse<?> numberResponse = response.getNumberResponses().get(0);
checkResponseForNumGeneratorParts(numberResponse, 1);
GeneratorSpecificFuzzedValues<?> generatorPart = getGeneratorPartFromResponseByName(numberResponse, "BoundaryNumbers");
checkGeneratorPartForNumFuzzedValues(generatorPart, 5);
checkResponseForNumOperatorParts(numberResponse, 0);
checkResponseForWarningsPart(numberResponse, false);
checkResponseDocForErrorResponse(response, false);
checkResponseDocForNumStringResponses(response, 0);
checkResponseDocForNumCollectionResponses(response, 0);
checkResponseDocForNumStructureResponses(response, 0);
}
@Test
public void testValidIntegerRequest_BoundaryNumbersContinued() throws JAXBException, SAXException {
String requestFilename = numRequestRoot + "ValidIntegerReqeuest_BoundaryNumbers.request.xml";
Response response = getResponseDocForRequest(requestFilename);
NumberResponse<?> numberResponse = response.getNumberResponses().get(0);
String contdRequestFilename = numRequestRoot + "ValidIntegerReqeuest_BoundaryNumbers_contd.request.xml";
int expectedNumFuzzedValues = 12;
createContdRequest(numberResponse, contdRequestFilename, expectedNumFuzzedValues);
Response responseContd = getResponseDocForRequest(contdRequestFilename);
NumberResponse<?> numberResponseContd = responseContd.getNumberResponses().get(0);
checkResponseForMoreValuesAttribute(numberResponseContd, false);
checkResponseForNumGeneratorParts(numberResponseContd, 1);
GeneratorSpecificFuzzedValues<?> generatorPart = getGeneratorPartFromResponseByName(numberResponseContd, "BoundaryNumbers");
checkGeneratorPartForNumFuzzedValues(generatorPart, expectedNumFuzzedValues);
checkResponseForNumOperatorParts(numberResponseContd, 0);
checkResponseForWarningsPart(numberResponseContd, false);
checkResponseDocForErrorResponse(responseContd, false);
checkResponseDocForNumStringResponses(responseContd, 0);
checkResponseDocForNumCollectionResponses(responseContd, 0);
checkResponseDocForNumStructureResponses(responseContd, 0);
}
@Test
public void testValidIntegerRequest_BoundaryNumbersContinuedTwoTimes() throws JAXBException, SAXException {
String requestFilename = numRequestRoot + "ValidIntegerReqeuest_BoundaryNumbers.request.xml";
Response response = getResponseDocForRequest(requestFilename);
NumberResponse<?> numberResponse = response.getNumberResponses().get(0);
String contdRequestFilename = numRequestRoot + "ValidIntegerReqeuest_BoundaryNumbers_contd.request.xml";
int expectedNumFuzzedValues = 7;
createContdRequest(numberResponse, contdRequestFilename, expectedNumFuzzedValues);
Response responseContd = getResponseDocForRequest(contdRequestFilename);
NumberResponse<?> numberResponseContd = responseContd.getNumberResponses().get(0);
int expectedNumFuzzedValues2 = 5;
String contd2RequestFilename = numRequestRoot + "ValidIntegerReqeuest_BoundaryNumbers_contd2.request.xml";
createContdRequest(numberResponseContd, contd2RequestFilename, expectedNumFuzzedValues2);
Response responseContd2 = getResponseDocForRequest(contd2RequestFilename);
NumberResponse<?> numberResponseContd2 = responseContd2.getNumberResponses().get(0);
checkResponseForMoreValuesAttribute(numberResponseContd2, false);
checkResponseForNumGeneratorParts(numberResponseContd2, 1);
GeneratorSpecificFuzzedValues<?> generatorPart = getGeneratorPartFromResponseByName(numberResponseContd2, "BoundaryNumbers");
checkGeneratorPartForNumFuzzedValues(generatorPart, expectedNumFuzzedValues2);
checkResponseForNumOperatorParts(numberResponseContd2, 0);
checkResponseForWarningsPart(numberResponseContd2, false);
checkResponseDocForErrorResponse(responseContd2, false);
checkResponseDocForNumStringResponses(responseContd2, 0);
checkResponseDocForNumCollectionResponses(responseContd2, 0);
checkResponseDocForNumStructureResponses(responseContd2, 0);
}
@Test
public void testContdIntegerRequestWithMoreValuesThanAvailable() throws JAXBException, SAXException {
String requestFilename = numRequestRoot + "ValidIntegerReqeuest_BoundaryNumbers.request.xml";
Response response = getResponseDocForRequest(requestFilename);
NumberResponse<?> numberResponse = response.getNumberResponses().get(0);
String contdRequestFilename = numRequestRoot + "ValidIntegerReqeuest_BoundaryNumbers_MoreValuesContd.request.xml";
createContdRequest(numberResponse, contdRequestFilename, 100);
Response responseDocContd = getResponseDocForRequest(contdRequestFilename);
NumberResponse<?> numberResponseContd = responseDocContd.getNumberResponses().get(0);
String contd2RequestFilename = numRequestRoot + "ValidIntegerReqeuest_BoundaryNumbers_MoreValuesContd2.request.xml";
createContdRequest(numberResponseContd, contd2RequestFilename, 1);
Response responseDocContd2 = getResponseDocForRequest(contd2RequestFilename);
NumberResponse<?> numberResponseContd2 = responseDocContd2.getNumberResponses().get(0);
checkResponseForNoMoreValuesWarning(numberResponseContd2);
checkResponseForNumGeneratorParts(numberResponseContd2, 0);
checkResponseForNumOperatorParts(numberResponseContd2, 0);
checkResponseForNumIllegalGenerators(numberResponseContd2, 0);
checkResponseForNumIllegalOperators(numberResponseContd2, 0);
checkResponseForNumIllegalRequestFormats(numberResponseContd2, 0);
checkResponseDocForErrorResponse(responseDocContd2, false);
checkResponseDocForNumStringResponses(responseDocContd2, 0);
checkResponseDocForNumCollectionResponses(responseDocContd2, 0);
checkResponseDocForNumStructureResponses(responseDocContd2, 0);
}
@Test
public void testValidIntegerRequest_NumericalVariance() throws JAXBException, SAXException {
String requestFilename = numRequestRoot + "ValidIntegerRequest_NumericalVariance.request.xml";
Response response = getResponseDocForRequest(requestFilename);
NumberResponse<?> numberResponse = response.getNumberResponses().get(0);
checkResponseForNumOperatorParts(numberResponse, 1);
OperatorSpecificFuzzedValues<?> operatorPart = getOperatorPartFromNumberResponseByName(numberResponse, "SimpleNumericalVarianceOperator", "10");
int expNumFuzzedValues = 20;
checkOperatorPartForNumFuzzedValues(operatorPart, expNumFuzzedValues);
checkResponseForNumGeneratorParts(numberResponse, 0);
checkResponseForWarningsPart(numberResponse, false);
checkResponseDocForErrorResponse(response, false);
checkResponseDocForNumStringResponses(response, 0);
checkResponseDocForNumCollectionResponses(response, 0);
checkResponseDocForNumStructureResponses(response, 0);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.harmony.test.func.api.javax.management.monitor.countermonitor;
import javax.management.JMException;
import javax.management.MBeanServer;
import javax.management.MBeanServerFactory;
import javax.management.MalformedObjectNameException;
import javax.management.Notification;
import javax.management.NotificationListener;
import javax.management.ObjectName;
import javax.management.monitor.CounterMonitor;
import org.apache.harmony.share.Test;
/**
* This test is intended to test the functionality of CounterMonitor (and
* supers) getter and setter methods. It calls consequently each setter method
* and then checks that the value returned by getter method is the same. There
* are three categories of values passed to each setter method during test:
* common intermediate values, boundary values (such as Long.MAX_VALUE) and
* invalid value (nulls or negatives). Invalid values are passed to method as
* parameters to check that corresponding exception is raised.
*
*/
public class GetterSetterTest extends Test implements NotificationListener {
/** Flag for notification differs from jmx.monitor.counter.threshold */
private static boolean isNotification = false;
/** Test result */
private boolean res;
/**
* @param args
*/
public static void main(String[] args) {
int res = new GetterSetterTest().test(args);
System.exit(res);
}
/**
* Test verifies if the notification jmx.monitor.counter.threshold type
*
* @param notification
* @param handback
*/
public void handleNotification(Notification notification, Object handback) {
log.info(notification.getType() + " notification received");
if (notification.getType().equals("jmx.monitor.counter.threshold")) {
isNotification = true;
} else {
isNotification = false;
}
// log.add(String.valueOf(isNotification));
}
/**
* @see org.apache.harmony.share.Test#test()
*/
public int test() {
res = thresholdTest();
CounterMonitor monitor = new CounterMonitor();
/* Checking basic functionality of setter/getter methods */
/* setting difference mode flag */
monitor.setDifferenceMode(false);
/* setting granularity period */
if (monitor.getGranularityPeriod() != 10000) {
res = false;
}
try {
monitor.setGranularityPeriod(-1684629);
log.info("FAIL: Exception expected when passing"
+ " negative value to setGranularityPeriod");
res = false;
} catch (IllegalArgumentException e) {
if (monitor.getGranularityPeriod() != 10000) {
log.info("FAIL: Granularity value "
+ "changed when exception was raised");
res = false;
}
}
try {
monitor.setGranularityPeriod(0);
log.info("FAIL: Exception expected when passing "
+ "zero value to setGranularityPeriod");
res = false;
} catch (IllegalArgumentException e) {
if (monitor.getGranularityPeriod() != 10000) {
log.info("FAIL: Granularity value changed "
+ "when exception was raised");
res = false;
}
}
try {
monitor.setGranularityPeriod(2459743);
} catch (Throwable e) {
log.info("FAIL: Unexpected exception caught "
+ "when setting granularity period");
e.printStackTrace();
return fail("FAILED");
}
/* setting init threshold */
try {
monitor.setInitThreshold(null);
log.info("FAIL: Exception expected when "
+ "passing null value to setInitThreshold");
res = false;
} catch (IllegalArgumentException e) {
/* Correct condition */
}
try {
Number value = new Integer(-5);
monitor.setInitThreshold(value);
log.info("FAIL: Exception expected when passing"
+ " negative value to setInitThreshold");
res = false;
} catch (IllegalArgumentException e) {
/* Correct condition */
}
try {
Number value = new Float(234.0);
monitor.setInitThreshold(value);
} catch (Throwable e) {
log.info("FAIL: Unexpected exception caught"
+ " when setting init threshold");
e.printStackTrace();
return fail("FAILED");
}
try {
Number value = new Float(-235.5);
monitor.setInitThreshold(value);
log.info("FAIL: Exception expected when "
+ "passing negative value to setInitThreshold");
res = false;
} catch (IllegalArgumentException e) {
Number value = monitor.getInitThreshold();
if (value.intValue() != 234) {
log.info("FAIL: InitThreshold value "
+ "changed when exception raised");
res = false;
}
}
try {
Number value = new Integer(436);
monitor.setInitThreshold(value);
} catch (Throwable e) {
log.info("FAIL: Unexpected exception caught "
+ "when setting init threshold");
e.printStackTrace();
return fail("FAILED");
}
/* setting modulus */
try {
monitor.setModulus(null);
log.info("FAIL: Exception expected when "
+ "passing null value to setModulus");
res = false;
} catch (IllegalArgumentException e) {
/* Correct condition */
}
try {
Number value = new Integer(-5);
monitor.setModulus(value);
log.info("FAIL: Exception expected when"
+ " passing negative value to setModulus");
res = false;
} catch (IllegalArgumentException e) {
/* Correct condition */
}
try {
Number value = new Float(234.0);
monitor.setModulus(value);
} catch (Throwable e) {
log.info("FAIL: Unexpected exception caught "
+ "when setting modulus");
e.printStackTrace();
return fail("FAILED");
}
try {
Number value = new Float(-235.5);
monitor.setModulus(value);
log.info("FAIL: Exception expected when "
+ "passing negative value to setModulus");
res = false;
} catch (IllegalArgumentException e) {
Number value = monitor.getModulus();
if (value.intValue() != 234) {
log.info("FAIL: Modulus value "
+ "changed when exception raised");
res = false;
}
}
try {
Number value = new Integer(436);
monitor.setModulus(value);
} catch (Throwable e) {
log.info("FAIL: Unexpected exception "
+ "caught when setting modulus");
e.printStackTrace();
return fail("FAILED");
}
/* setting notify flag */
monitor.setNotify(true);
/* setting offset */
try {
monitor.setOffset(null);
log.info("FAIL: Exception expected "
+ "when passing null value to setOffset");
res = false;
} catch (IllegalArgumentException e) {
/* Correct condition */
}
try {
Number value = new Integer(-5);
monitor.setOffset(value);
log.info("FAIL: Exception expected when "
+ "passing negative value to setOffset");
res = false;
} catch (IllegalArgumentException e) {
/* Correct condition */
}
try {
Number value = new Float(234.0);
monitor.setOffset(value);
} catch (Throwable e) {
log.info("FAIL: Unexpected exception "
+ "caught when setting offset");
e.printStackTrace();
return fail("FAILED");
}
try {
Number value = new Float(-235.5);
monitor.setOffset(value);
log.info("FAIL: Exception expected when "
+ "passing negative value to setOffset");
res = false;
} catch (IllegalArgumentException e) {
Number value = monitor.getOffset();
if (value.intValue() != 234) {
log.info("FAIL: InitThreshold value"
+ " changed when exception raised");
res = false;
}
}
try {
Number value = new Integer(436);
monitor.setOffset(value);
} catch (Throwable e) {
log.info("FAIL: Unexpected exception "
+ "caught when setting offset");
e.printStackTrace();
return fail("FAILED");
}
if (monitor.getModulus().intValue() != 436) {
log.info("FAIL: Modulus value changed");
res = false;
}
if (monitor.getInitThreshold().intValue() != 436) {
log.info("FAIL: InitThreshold value changed");
res = false;
}
if (monitor.getOffset().intValue() != 436) {
log.info("FAIL: Offset value changed");
res = false;
}
if (monitor.getDifferenceMode() != false) {
log.info("FAIL: Difference mode flag changed");
res = false;
}
if (monitor.getGranularityPeriod() != 2459743) {
log.info("FAIL: Granularity period value changed");
res = false;
}
if (monitor.getNotify() != true) {
log.info("FAIL: Notify flag changed");
res = false;
}
/* checking functionality on boundary values */
Number value = null;
long val;
try {
monitor.setInitThreshold(new Long(Long.MAX_VALUE));
} catch (Throwable e) {
log.info("FAIL: Unexpected exception caught "
+ "when setting InitThreshold as MAX_LONG");
e.printStackTrace();
res = false;
}
value = monitor.getInitThreshold();
val = value.longValue();
if (val != Long.MAX_VALUE) {
log.info("FAIL: Invalid value returned by getInitThreshold: " + val
+ " instead of Long.MAX_VALUE");
res = false;
}
try {
monitor.setOffset(new Long(Long.MAX_VALUE));
} catch (Throwable e) {
log.info("FAIL: Unexpected exception "
+ "caught when setting Offset as MAX_LONG");
e.printStackTrace();
res = false;
}
value = monitor.getOffset();
val = value.longValue();
if (val != Long.MAX_VALUE) {
log.info("FAIL: Invalid value returned by getOffset: " + val
+ " instead of Long.MAX_VALUE");
res = false;
}
try {
monitor.setModulus(new Long(Long.MAX_VALUE));
} catch (Throwable e) {
log.info("FAIL: Unexpected exception "
+ "caught when setting Modulus as MAX_LONG");
e.printStackTrace();
res = false;
}
value = monitor.getModulus();
val = value.longValue();
if (val != Long.MAX_VALUE) {
log.info("FAIL: Invalid value returned by getModulus: " + val
+ " instead of Long.MAX_VALUE");
res = false;
}
return res ? pass("PASSED") : fail("FAILED");
}
/**
* @return result of the subTest
*/
public boolean thresholdTest() {
isNotification = false;
CounterMonitor monitor = new CounterMonitor();
MBeanServer server = MBeanServerFactory.createMBeanServer();
ObjectName obj1, obj2, cm;
Counter c1, c2;
try {
obj1 = new ObjectName(Counter.COUNTER_NAME_TEMPLATE + "1");
obj2 = new ObjectName(Counter.COUNTER_NAME_TEMPLATE + "2");
cm = new ObjectName(
"org.apache.harmony.test.func.api.javax.management.monitor."
+ "countermonitor:type=CounterMonitor,id=1");
} catch (MalformedObjectNameException e) {
e.printStackTrace();
log.info("INTERNAL ERROR: Invalid object name");
return false;
}
try {
c1 = new Counter(0, 100);
c2 = new Counter(0, 100);
server.registerMBean(c1, obj1);
server.registerMBean(c2, obj2);
server.registerMBean(monitor, cm);
} catch (JMException e) {
e.printStackTrace();
log.info("INTERNAL ERROR: MBean registration failed");
return false;
}
monitor.addObservedObject(obj1);
monitor.addObservedObject(obj2);
monitor.setObservedAttribute("Value");
monitor.setGranularityPeriod(10);
monitor.setOffset(new Integer(1));
monitor.setInitThreshold(new Integer(10));
monitor.setNotify(true);
monitor.addNotificationListener(this, null, null);
monitor.setDifferenceMode(false);
monitor.start();
/* new Thread(c1).start(); */
c1.setValue(5);
try {
Thread.sleep(5000);
} catch (InterruptedException e) {
e.printStackTrace();
log.info("Thread interrupted");
return false;
}
Number th1 = monitor.getThreshold(obj1);
Number th2 = monitor.getThreshold(obj2);
log.info("Obj1: Attribute value is " + c1.getValue());
log.info("Threshold: Number class is " + th1.getClass().getName()
+ ", value is " + th1.intValue());
log.info("Obj2: Attribute value is " + c2.getValue());
log.info("Threshold: Number class is " + th2.getClass().getName()
+ ", value is " + th2.intValue());
monitor.setInitThreshold(new Integer(5));
th1 = monitor.getThreshold(obj1);
log.info("Init threshold changed");
if (th1.intValue() != 5) {
log.info("FAIL: Unexpected value of threshold for obj1");
return false;
}
th2 = monitor.getThreshold(obj2);
if (th2.intValue() != 5) {
log.info("FAIL: Unexpected value of threshold for obj2");
return false;
}
if (isNotification) {
log.info("FAIL:Error notification had been sent");
return false;
}
return true;
}
}
| |
/*
* SNMP Package
*
* Copyright (C) 2004, Jonathan Sevy <jsevy@mcs.drexel.edu>
*
* This is free software. Redistribution and use in source and binary forms, with
* or without modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
* OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*/
package br.hugo.sistema.snmp;
import java.util.*;
import java.io.*;
/**
* Class representing ASN.1 object identifiers. These are unbounded sequences (arrays) of
* natural numbers, written as dot-separated strings.
*/
public class SNMPObjectIdentifier extends SNMPObject
{
private long[] digits; // array of longs
protected byte tag = SNMPBERCodec.SNMPOBJECTIDENTIFIER;
/**
* Create a new empty object identifier (0-length array).
*/
public SNMPObjectIdentifier()
{
digits = new long[0];
}
/**
* Create a new object identifier from the supplied string of dot-separated nonegative
* decimal integer values.
* @throws SNMPBadValueException Indicates incorrectly-formatted string supplied.
*/
public SNMPObjectIdentifier(String digitString)
throws SNMPBadValueException
{
convertDigitString(digitString);
}
/**
* Create a new object identifier from the supplied array of nonegative
* integer values.
* @throws SNMPBadValueException Negative value(s) supplied.
*/
public SNMPObjectIdentifier(int[] digits)
throws SNMPBadValueException
{
long[] longDigits = new long[digits.length];
for (int i = 0; i < digits.length; i++)
{
if (digits[i] < 0)
throw new SNMPBadValueException("Negative value supplied for SNMPObjectIdentifier.");
longDigits[i] = digits[i];
}
this.digits = longDigits;
}
/**
* Create a new object identifier from the supplied array of nonegative
* long values.
* @throws SNMPBadValueException Negative value(s) supplied.
*/
public SNMPObjectIdentifier(long[] digits)
throws SNMPBadValueException
{
for (int i = 0; i < digits.length; i++)
{
if (digits[i] < 0)
throw new SNMPBadValueException("Negative value supplied for SNMPObjectIdentifier.");
}
this.digits = digits;
}
/**
* Used to initialize from the BER encoding, as received in a response from
* an SNMP device responding to an SNMPGetRequest.
* @throws SNMPBadValueException Indicates an invalid BER encoding supplied. Shouldn't
* occur in normal operation, i.e., when valid responses are received from devices.
*/
protected SNMPObjectIdentifier(byte[] enc)
throws SNMPBadValueException
{
extractFromBEREncoding(enc);
}
/**
* Return array of integers corresponding to components of identifier.
*/
public Object getValue()
{
return digits;
}
/**
* Used to set the value from an integer or long array containing the identifier components, or from
* a String containing a dot-separated sequence of nonegative values.
* @throws SNMPBadValueException Indicates an incorrect object type supplied, or negative array
* elements, or an incorrectly formatted String.
*/
public void setValue(Object digits)
throws SNMPBadValueException
{
if (digits instanceof long[])
{
for (int i = 0; i < ((long[])digits).length; i++)
{
if (((long[])digits)[i] < 0)
throw new SNMPBadValueException("Negative value supplied for SNMPObjectIdentifier.");
}
this.digits = (long[])digits;
}
else if (digits instanceof int[])
{
long[] longDigits = new long[((int[])digits).length];
for (int i = 0; i < ((int[])digits).length; i++)
{
if (((int[])digits)[i] < 0)
throw new SNMPBadValueException("Negative value supplied for SNMPObjectIdentifier.");
longDigits[i] = ((int[])digits)[i];
}
this.digits = longDigits;
}
else if (digits instanceof String)
{
convertDigitString((String)digits);
}
else
throw new SNMPBadValueException(" Object Identifier: bad object supplied to set value ");
}
/**
* Return BER encoding for this object identifier.
*/
protected byte[] getBEREncoding()
{
ByteArrayOutputStream outBytes = new ByteArrayOutputStream();
byte type = SNMPBERCodec.SNMPOBJECTIDENTIFIER;
// write contents of array of values
byte[] data = encodeArray();
// calculate encoding for length of data
byte[] len = SNMPBERCodec.encodeLength(data.length);
// encode T,L,V info
outBytes.write(type);
outBytes.write(len, 0, len.length);
outBytes.write(data, 0, data.length);
return outBytes.toByteArray();
}
private byte[] encodeArray()
{
ByteArrayOutputStream outBytes = new ByteArrayOutputStream();
int numElements = digits.length;
// encode first two identifier digits as one byte, using the 40*x + y rule;
// of course, if only one element, just use 40*x; if none, do nothing
if (numElements >= 2)
{
outBytes.write((byte)(40*digits[0] + digits[1]));
}
else if (numElements ==1)
{
outBytes.write((byte)(40*digits[0]));
}
for (int i = 2; i < numElements; ++i)
{
byte[] nextBytes = encodeValue(digits[i]);
outBytes.write(nextBytes, 0, nextBytes.length);
}
return outBytes.toByteArray();
}
private byte[] encodeValue(long v)
{
// see how many bytes are needed: each value uses just
// 7 bits of each byte, with high-order bit functioning as
// a continuation marker
int numBytes = 0;
long temp = v;
do
{
++numBytes;
temp = (long)Math.floor(temp / 128);
}
while (temp > 0);
byte[] enc = new byte[numBytes];
// encode lowest-order byte, without setting high bit
enc[numBytes-1] = (byte)(v % 128);
v = (long)Math.floor(v / 128);
//.encode other bytes with high bit set
for (int i = numBytes-2; i >= 0; --i)
{
enc[i] = (byte)((v % 128) + 128);
v = (long)Math.floor(v / 128);
}
return enc;
}
private void convertDigitString(String digitString)
throws SNMPBadValueException
{
try
{
StringTokenizer st = new StringTokenizer(digitString, " .");
int size = 0;
while (st.hasMoreTokens())
{
// figure out how many values are in string
size++;
st.nextToken();
}
long[] returnDigits = new long[size];
st = new StringTokenizer(digitString, " .");
for (int i = 0; i < size; i++)
{
returnDigits[i] = Long.parseLong(st.nextToken());
if (returnDigits[i] < 0)
throw new SNMPBadValueException(" Object Identifier: bad string supplied to set value ");
}
digits = returnDigits;
}
catch (NumberFormatException e)
{
throw new SNMPBadValueException(" Object Identifier: bad string supplied for object identifier value ");
}
}
private void extractFromBEREncoding(byte[] enc)
throws SNMPBadValueException
{
// note: masks must be ints; byte internal representation issue(?)
int bitTest = 0x80; // test for leading 1
int highBitMask = 0x7F; // mask out high bit for value
// first, compute number of "digits";
// will just be number of bytes with leading 0's
int numInts = 0;
for (int i = 0; i < enc.length; i++)
{
if ((enc[i] & bitTest) == 0) //high-order bit not set; count
numInts++;
}
if (numInts > 0)
{
// create new int array to hold digits; since first value is 40*x + y,
// need one extra entry in array to hold this.
digits = new long[numInts + 1];
int currentByte = -1; // will be incremented to 0
long value = 0;
// read in values 'til get leading 0 in byte
do
{
currentByte++;
value = value*128 + (enc[currentByte] & highBitMask);
}
while ((enc[currentByte] & bitTest) > 0); // implies high bit set!
// now handle 40a + b
digits[0] = (long)Math.floor(value / 40);
digits[1] = value % 40;
// now read in rest!
for (int i = 2; i < numInts + 1; i++)
{
// read in values 'til get leading 0 in byte
value = 0;
do
{
currentByte++;
value = value*128 + (enc[currentByte] & highBitMask);
}
while ((enc[currentByte] & bitTest) > 0);
digits[i] = value;
}
}
else
{
// no digits; create empty digit array
digits = new long[0];
}
}
/*
public boolean equals(SNMPObjectIdentifier other)
{
long[] otherDigits = (long[])(other.getValue());
boolean areEqual = true;
if (digits.length != otherDigits.length)
{
areEqual = false;
}
else
{
for (int i = 0; i < digits.length; i++)
{
if (digits[i] != otherDigits[i])
{
areEqual = false;
break;
}
}
}
return areEqual;
}
*/
/**
* Checks the internal arrays for equality.
*/
public boolean equals(Object other)
{
// false if other is null
if (other == null)
{
return false;
}
// check first to see that they're both of the same class
if (!this.getClass().equals(other.getClass()))
{
return false;
}
SNMPObjectIdentifier otherSNMPObject = (SNMPObjectIdentifier)other;
// see if their embedded arrays are equal
if (java.util.Arrays.equals((long[])this.getValue(),(long[])otherSNMPObject.getValue()))
{
return true;
}
else
{
return false;
}
}
/**
* Generates a hash value so SNMP Object Identifiers can be used in Hashtables.
*/
public int hashCode()
{
int hash = 0;
// generate a hashcode from the embedded array
for (int i = 0; i < digits.length; i++)
{
hash += (int)(digits[i] ^ (digits[i] >> 32));
hash += (hash << 10);
hash ^= (hash >> 6);
}
hash += (hash << 3);
hash ^= (hash >> 11);
hash += (hash << 15);
return hash;
}
/**
* Return dot-separated sequence of decimal values.
*/
public String toString()
{
StringBuffer valueStringBuffer = new StringBuffer();
if (digits.length > 0)
{
valueStringBuffer.append(digits[0]);
for (int i = 1; i < digits.length; ++i)
{
valueStringBuffer.append(".");
valueStringBuffer.append(digits[i]);
}
}
return valueStringBuffer.toString();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.codehaus.groovy.classgen;
import java.util.HashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import org.codehaus.groovy.ast.*;
import org.codehaus.groovy.ast.expr.BinaryExpression;
import org.codehaus.groovy.ast.expr.ConstantExpression;
import org.codehaus.groovy.ast.expr.DeclarationExpression;
import org.codehaus.groovy.ast.expr.Expression;
import org.codehaus.groovy.ast.expr.GStringExpression;
import org.codehaus.groovy.ast.expr.MapEntryExpression;
import org.codehaus.groovy.ast.expr.MethodCallExpression;
import org.codehaus.groovy.ast.expr.PropertyExpression;
import org.codehaus.groovy.ast.expr.TupleExpression;
import org.codehaus.groovy.ast.expr.VariableExpression;
import org.codehaus.groovy.ast.stmt.CatchStatement;
import org.codehaus.groovy.ast.tools.ClassNodeUtils;
import org.codehaus.groovy.ast.tools.GeneralUtils;
import org.codehaus.groovy.control.SourceUnit;
import org.codehaus.groovy.runtime.MetaClassHelper;
import org.codehaus.groovy.syntax.Types;
import org.codehaus.groovy.transform.trait.Traits;
import static java.lang.reflect.Modifier.*;
import static org.codehaus.groovy.ast.ClassHelper.VOID_TYPE;
import static org.objectweb.asm.Opcodes.*;
/**
* Checks that a class satisfies various conditions including:
* <ul>
* <li>Incorrect class or method access modifiers</li>
* <li>No abstract methods appear in a non-abstract class</li>
* <li>Existence and correct visibility for inherited members</li>
* <li>Invalid attempts to override final members</li>
* </ul>
*/
public class ClassCompletionVerifier extends ClassCodeVisitorSupport {
private static final String[] INVALID_NAME_CHARS = {".", ":", "/", ";", "[", "<", ">"};
// the groovy.compiler.strictNames system property is experimental and may change default value or be removed in a future version of Groovy
private final boolean strictNames = Boolean.parseBoolean(System.getProperty("groovy.compiler.strictNames", "false"));
private ClassNode currentClass;
private final SourceUnit source;
private boolean inConstructor = false;
private boolean inStaticConstructor = false;
public ClassCompletionVerifier(SourceUnit source) {
this.source = source;
}
public ClassNode getClassNode() {
return currentClass;
}
public void visitClass(ClassNode node) {
ClassNode oldClass = currentClass;
currentClass = node;
checkImplementsAndExtends(node);
if (source != null && !source.getErrorCollector().hasErrors()) {
checkClassForIncorrectModifiers(node);
checkInterfaceMethodVisibility(node);
checkAbstractMethodVisibility(node);
checkClassForOverwritingFinal(node);
checkMethodsForIncorrectModifiers(node);
checkMethodsForIncorrectName(node);
checkMethodsForWeakerAccess(node);
checkMethodsForOverridingFinal(node);
checkNoAbstractMethodsNonabstractClass(node);
checkClassExtendsAllSelfTypes(node);
checkNoStaticMethodWithSameSignatureAsNonStatic(node);
checkGenericsUsage(node, node.getUnresolvedInterfaces());
checkGenericsUsage(node, node.getUnresolvedSuperClass());
}
super.visitClass(node);
currentClass = oldClass;
}
private void checkNoStaticMethodWithSameSignatureAsNonStatic(final ClassNode node) {
ClassNode parent = node.getSuperClass();
Map<String, MethodNode> result;
// start with methods from the parent if any
if (parent != null) {
result = parent.getDeclaredMethodsMap();
} else {
result = new HashMap<String, MethodNode>();
}
// add in unimplemented abstract methods from the interfaces
ClassNodeUtils.addInterfaceMethods(node, result);
for (MethodNode methodNode : node.getMethods()) {
MethodNode mn = result.get(methodNode.getTypeDescriptor());
if (mn != null && (mn.isStatic() ^ methodNode.isStatic()) && !methodNode.isStaticConstructor()) {
if (!mn.isAbstract()) continue;
ClassNode declaringClass = mn.getDeclaringClass();
ClassNode cn = declaringClass.getOuterClass();
if (cn == null && declaringClass.isResolved()) {
// in case of a precompiled class, the outerclass is unknown
Class typeClass = declaringClass.getTypeClass();
typeClass = typeClass.getEnclosingClass();
if (typeClass != null) {
cn = ClassHelper.make(typeClass);
}
}
if (cn == null || !Traits.isTrait(cn)) {
ASTNode errorNode = methodNode;
String name = mn.getName();
if (errorNode.getLineNumber() == -1) {
// try to get a better error message location based on the property
for (PropertyNode propertyNode : node.getProperties()) {
if (name.startsWith("set") || name.startsWith("get") || name.startsWith("is")) {
String propName = Verifier.capitalize(propertyNode.getField().getName());
String shortName = name.substring(name.startsWith("is") ? 2 : 3);
if (propName.equals(shortName)) {
errorNode = propertyNode;
break;
}
}
}
}
addError("The " + getDescription(methodNode) + " is already defined in " + getDescription(node) +
". You cannot have both a static and an instance method with the same signature", errorNode);
}
}
result.put(methodNode.getTypeDescriptor(), methodNode);
}
}
private void checkInterfaceMethodVisibility(ClassNode node) {
if (!node.isInterface()) return;
for (MethodNode method : node.getMethods()) {
if (method.isPrivate()) {
addError("Method '" + method.getName() + "' is private but should be public in " + getDescription(currentClass) + ".", method);
} else if (method.isProtected()) {
addError("Method '" + method.getName() + "' is protected but should be public in " + getDescription(currentClass) + ".", method);
}
}
}
private void checkAbstractMethodVisibility(ClassNode node) {
// we only do check abstract classes (including enums), no interfaces or non-abstract classes
if (!isAbstract(node.getModifiers()) || isInterface(node.getModifiers())) return;
List<MethodNode> abstractMethods = node.getAbstractMethods();
if (abstractMethods == null || abstractMethods.isEmpty()) return;
for (MethodNode method : abstractMethods) {
if (method.isPrivate()) {
addError("Method '" + method.getName() + "' from " + getDescription(node) +
" must not be private as it is declared as an abstract method.", method);
}
}
}
private void checkNoAbstractMethodsNonabstractClass(ClassNode node) {
if (isAbstract(node.getModifiers())) return;
List<MethodNode> abstractMethods = node.getAbstractMethods();
if (abstractMethods == null) return;
for (MethodNode method : abstractMethods) {
MethodNode sameArgsMethod = node.getMethod(method.getName(), method.getParameters());
if (sameArgsMethod==null || method.getReturnType().equals(sameArgsMethod.getReturnType())) {
addError("Can't have an abstract method in a non-abstract class." +
" The " + getDescription(node) + " must be declared abstract or" +
" the " + getDescription(method) + " must be implemented.", node);
} else {
addError("Abstract "+getDescription(method)+" is not implemented but a " +
"method of the same name but different return type is defined: "+
(sameArgsMethod.isStatic()?"static ":"")+
getDescription(sameArgsMethod), method
);
}
}
}
private void checkClassExtendsAllSelfTypes(ClassNode node) {
int modifiers = node.getModifiers();
if (!isInterface(modifiers)) {
for (ClassNode anInterface : GeneralUtils.getInterfacesAndSuperInterfaces(node)) {
if (Traits.isTrait(anInterface)) {
LinkedHashSet<ClassNode> selfTypes = new LinkedHashSet<ClassNode>();
for (ClassNode type : Traits.collectSelfTypes(anInterface, selfTypes, true, false)) {
if (type.isInterface() && !node.implementsInterface(type)) {
addError(getDescription(node)
+ " implements " + getDescription(anInterface)
+ " but does not implement self type " + getDescription(type),
anInterface);
} else if (!type.isInterface() && !node.isDerivedFrom(type)) {
addError(getDescription(node)
+ " implements " + getDescription(anInterface)
+ " but does not extend self type " + getDescription(type),
anInterface);
}
}
}
}
}
}
private void checkClassForIncorrectModifiers(ClassNode node) {
checkClassForAbstractAndFinal(node);
checkClassForOtherModifiers(node);
}
private void checkClassForAbstractAndFinal(ClassNode node) {
if (!isAbstract(node.getModifiers())) return;
if (!isFinal(node.getModifiers())) return;
if (node.isInterface()) {
addError("The " + getDescription(node) + " must not be final. It is by definition abstract.", node);
} else {
addError("The " + getDescription(node) + " must not be both final and abstract.", node);
}
}
private void checkClassForOtherModifiers(ClassNode node) {
checkClassForModifier(node, isTransient(node.getModifiers()), "transient");
checkClassForModifier(node, isVolatile(node.getModifiers()), "volatile");
checkClassForModifier(node, isNative(node.getModifiers()), "native");
if (!(node instanceof InnerClassNode)) {
checkClassForModifier(node, isStatic(node.getModifiers()), "static");
checkClassForModifier(node, isPrivate(node.getModifiers()), "private");
}
// don't check synchronized here as it overlaps with ACC_SUPER
}
private void checkMethodForModifier(MethodNode node, boolean condition, String modifierName) {
if (!condition) return;
addError("The " + getDescription(node) + " has an incorrect modifier " + modifierName + ".", node);
}
private void checkClassForModifier(ClassNode node, boolean condition, String modifierName) {
if (!condition) return;
addError("The " + getDescription(node) + " has an incorrect modifier " + modifierName + ".", node);
}
private static String getDescription(ClassNode node) {
return (node.isInterface() ? (Traits.isTrait(node)?"trait":"interface") : "class") + " '" + node.getName() + "'";
}
private static String getDescription(MethodNode node) {
return "method '" + node.getTypeDescriptor() + "'";
}
private static String getDescription(FieldNode node) {
return "field '" + node.getName() + "'";
}
private static String getDescription(Parameter node) {
return "parameter '" + node.getName() + "'";
}
private void checkAbstractDeclaration(MethodNode methodNode) {
if (!methodNode.isAbstract()) return;
if (isAbstract(currentClass.getModifiers())) return;
addError("Can't have an abstract method in a non-abstract class." +
" The " + getDescription(currentClass) + " must be declared abstract or the method '" +
methodNode.getTypeDescriptor() + "' must not be abstract.", methodNode);
}
private void checkClassForOverwritingFinal(ClassNode cn) {
ClassNode superCN = cn.getSuperClass();
if (superCN == null) return;
if (!isFinal(superCN.getModifiers())) return;
String msg = "You are not allowed to overwrite the final " + getDescription(superCN) + ".";
addError(msg, cn);
}
private void checkImplementsAndExtends(ClassNode node) {
ClassNode cn = node.getSuperClass();
if (cn.isInterface() && !node.isInterface()) {
addError("You are not allowed to extend the " + getDescription(cn) + ", use implements instead.", node);
}
for (ClassNode anInterface : node.getInterfaces()) {
cn = anInterface;
if (!cn.isInterface()) {
addError("You are not allowed to implement the " + getDescription(cn) + ", use extends instead.", node);
}
}
}
private void checkMethodsForIncorrectName(ClassNode cn) {
if (!strictNames) return;
List<MethodNode> methods = cn.getAllDeclaredMethods();
for (MethodNode mNode : methods) {
String name = mNode.getName();
if (name.equals("<init>") || name.equals("<clinit>")) continue;
// Groovy allows more characters than Character.isValidJavaIdentifier() would allow
// if we find a good way to encode special chars we could remove (some of) these checks
for (String ch : INVALID_NAME_CHARS) {
if (name.contains(ch)) {
addError("You are not allowed to have '" + ch + "' in a method name", mNode);
}
}
}
}
private void checkMethodsForIncorrectModifiers(ClassNode cn) {
if (!cn.isInterface()) return;
for (MethodNode method : cn.getMethods()) {
if (method.isFinal()) {
addError("The " + getDescription(method) + " from " + getDescription(cn) +
" must not be final. It is by definition abstract.", method);
}
if (method.isStatic() && !isConstructor(method)) {
addError("The " + getDescription(method) + " from " + getDescription(cn) +
" must not be static. Only fields may be static in an interface.", method);
}
}
}
private void checkMethodsForWeakerAccess(ClassNode cn) {
for (MethodNode method : cn.getMethods()) {
checkMethodForWeakerAccessPrivileges(method, cn);
}
}
private static boolean isConstructor(MethodNode method) {
return method.getName().equals("<clinit>");
}
private void checkMethodsForOverridingFinal(ClassNode cn) {
for (MethodNode method : cn.getMethods()) {
Parameter[] params = method.getParameters();
for (MethodNode superMethod : cn.getSuperClass().getMethods(method.getName())) {
Parameter[] superParams = superMethod.getParameters();
if (!hasEqualParameterTypes(params, superParams)) continue;
if (!superMethod.isFinal()) break;
addInvalidUseOfFinalError(method, params, superMethod.getDeclaringClass());
return;
}
}
}
private void addInvalidUseOfFinalError(MethodNode method, Parameter[] parameters, ClassNode superCN) {
StringBuilder msg = new StringBuilder();
msg.append("You are not allowed to override the final method ").append(method.getName());
appendParamsDescription(parameters, msg);
msg.append(" from ").append(getDescription(superCN));
msg.append(".");
addError(msg.toString(), method);
}
private void appendParamsDescription(Parameter[] parameters, StringBuilder msg) {
msg.append("(");
boolean needsComma = false;
for (Parameter parameter : parameters) {
if (needsComma) {
msg.append(",");
} else {
needsComma = true;
}
msg.append(parameter.getType());
}
msg.append(")");
}
private void addWeakerAccessError(ClassNode cn, MethodNode method, Parameter[] parameters, MethodNode superMethod) {
StringBuilder msg = new StringBuilder();
msg.append(method.getName());
appendParamsDescription(parameters, msg);
msg.append(" in ");
msg.append(cn.getName());
msg.append(" cannot override ");
msg.append(superMethod.getName());
msg.append(" in ");
msg.append(superMethod.getDeclaringClass().getName());
msg.append("; attempting to assign weaker access privileges; was ");
msg.append(superMethod.isPublic() ? "public" : "protected");
addError(msg.toString(), method);
}
private static boolean hasEqualParameterTypes(Parameter[] first, Parameter[] second) {
if (first.length != second.length) return false;
for (int i = 0; i < first.length; i++) {
String ft = first[i].getType().getName();
String st = second[i].getType().getName();
if (ft.equals(st)) continue;
return false;
}
return true;
}
protected SourceUnit getSourceUnit() {
return source;
}
public void visitMethod(MethodNode node) {
inConstructor = false;
inStaticConstructor = node.isStaticConstructor();
checkAbstractDeclaration(node);
checkRepetitiveMethod(node);
checkOverloadingPrivateAndPublic(node);
checkMethodModifiers(node);
checkGenericsUsage(node, node.getParameters());
checkGenericsUsage(node, node.getReturnType());
for (Parameter param : node.getParameters()) {
if (param.getType().equals(VOID_TYPE)) {
addError("The " + getDescription(param) + " in " + getDescription(node) + " has invalid type void", param);
}
}
super.visitMethod(node);
}
private void checkMethodModifiers(MethodNode node) {
// don't check volatile here as it overlaps with ACC_BRIDGE
// additional modifiers not allowed for interfaces
if ((this.currentClass.getModifiers() & ACC_INTERFACE) != 0) {
checkMethodForModifier(node, isStrict(node.getModifiers()), "strictfp");
checkMethodForModifier(node, isSynchronized(node.getModifiers()), "synchronized");
checkMethodForModifier(node, isNative(node.getModifiers()), "native");
}
}
private void checkMethodForWeakerAccessPrivileges(MethodNode mn, ClassNode cn) {
if (mn.isPublic()) return;
Parameter[] params = mn.getParameters();
for (MethodNode superMethod : cn.getSuperClass().getMethods(mn.getName())) {
Parameter[] superParams = superMethod.getParameters();
if (!hasEqualParameterTypes(params, superParams)) continue;
if ((mn.isPrivate() && !superMethod.isPrivate()) ||
(mn.isProtected() && superMethod.isPublic())) {
addWeakerAccessError(cn, mn, params, superMethod);
return;
}
}
}
private void checkOverloadingPrivateAndPublic(MethodNode node) {
if (isConstructor(node)) return;
boolean hasPrivate = node.isPrivate();
boolean hasPublic = node.isPublic();
for (MethodNode method : currentClass.getMethods(node.getName())) {
if (method == node) continue;
if (!method.getDeclaringClass().equals(node.getDeclaringClass())) continue;
if (method.isPublic() || method.isProtected()) {
hasPublic = true;
} else {
hasPrivate = true;
}
if (hasPrivate && hasPublic) break;
}
if (hasPrivate && hasPublic) {
addError("Mixing private and public/protected methods of the same name causes multimethods to be disabled and is forbidden to avoid surprising behaviour. Renaming the private methods will solve the problem.", node);
}
}
private void checkRepetitiveMethod(MethodNode node) {
if (isConstructor(node)) return;
for (MethodNode method : currentClass.getMethods(node.getName())) {
if (method == node) continue;
if (!method.getDeclaringClass().equals(node.getDeclaringClass())) continue;
Parameter[] p1 = node.getParameters();
Parameter[] p2 = method.getParameters();
if (p1.length != p2.length) continue;
addErrorIfParamsAndReturnTypeEqual(p2, p1, node, method);
}
}
private void addErrorIfParamsAndReturnTypeEqual(Parameter[] p2, Parameter[] p1,
MethodNode node, MethodNode element) {
boolean isEqual = true;
for (int i = 0; i < p2.length; i++) {
isEqual &= p1[i].getType().equals(p2[i].getType());
if (!isEqual) break;
}
isEqual &= node.getReturnType().equals(element.getReturnType());
if (isEqual) {
addError("Repetitive method name/signature for " + getDescription(node) +
" in " + getDescription(currentClass) + ".", node);
}
}
public void visitField(FieldNode node) {
if (currentClass.getDeclaredField(node.getName()) != node) {
addError("The " + getDescription(node) + " is declared multiple times.", node);
}
checkInterfaceFieldModifiers(node);
checkGenericsUsage(node, node.getType());
if (node.getType().equals(VOID_TYPE)) {
addError("The " + getDescription(node) + " has invalid type void", node);
}
super.visitField(node);
}
public void visitProperty(PropertyNode node) {
checkDuplicateProperties(node);
checkGenericsUsage(node, node.getType());
super.visitProperty(node);
}
private void checkDuplicateProperties(PropertyNode node) {
ClassNode cn = node.getDeclaringClass();
String name = node.getName();
String getterName = "get" + MetaClassHelper.capitalize(name);
if (Character.isUpperCase(name.charAt(0))) {
for (PropertyNode propNode : cn.getProperties()) {
String otherName = propNode.getField().getName();
String otherGetterName = "get" + MetaClassHelper.capitalize(otherName);
if (node != propNode && getterName.equals(otherGetterName)) {
String msg = "The field " + name + " and " + otherName + " on the class " +
cn.getName() + " will result in duplicate JavaBean properties, which is not allowed";
addError(msg, node);
}
}
}
}
private void checkInterfaceFieldModifiers(FieldNode node) {
if (!currentClass.isInterface()) return;
if ((node.getModifiers() & (ACC_PUBLIC | ACC_STATIC | ACC_FINAL)) == 0 ||
(node.getModifiers() & (ACC_PRIVATE | ACC_PROTECTED)) != 0) {
addError("The " + getDescription(node) + " is not 'public static final' but is defined in " +
getDescription(currentClass) + ".", node);
}
}
public void visitBinaryExpression(BinaryExpression expression) {
if (expression.getOperation().getType() == Types.LEFT_SQUARE_BRACKET &&
expression.getRightExpression() instanceof MapEntryExpression) {
addError("You tried to use a map entry for an index operation, this is not allowed. " +
"Maybe something should be set in parentheses or a comma is missing?",
expression.getRightExpression());
}
super.visitBinaryExpression(expression);
if (Types.isAssignment(expression.getOperation().getType())) {
checkFinalFieldAccess(expression.getLeftExpression());
checkSuperOrThisOnLHS(expression.getLeftExpression());
}
}
private void checkSuperOrThisOnLHS(Expression expression) {
if (!(expression instanceof VariableExpression)) return;
VariableExpression ve = (VariableExpression) expression;
if (ve.isThisExpression()) {
addError("cannot have 'this' as LHS of an assignment", expression);
} else if (ve.isSuperExpression()) {
addError("cannot have 'super' as LHS of an assignment", expression);
}
}
private void checkFinalFieldAccess(Expression expression) {
if (!(expression instanceof VariableExpression) && !(expression instanceof PropertyExpression)) return;
Variable v = null;
if (expression instanceof VariableExpression) {
VariableExpression ve = (VariableExpression) expression;
v = ve.getAccessedVariable();
} else {
PropertyExpression propExp = ((PropertyExpression) expression);
Expression objectExpression = propExp.getObjectExpression();
if (objectExpression instanceof VariableExpression) {
VariableExpression varExp = (VariableExpression) objectExpression;
if (varExp.isThisExpression()) {
v = currentClass.getDeclaredField(propExp.getPropertyAsString());
}
}
}
if (v instanceof FieldNode) {
FieldNode fn = (FieldNode) v;
/*
* if it is static final but not accessed inside a static constructor, or,
* if it is an instance final but not accessed inside a instance constructor, it is an error
*/
boolean isFinal = fn.isFinal();
boolean isStatic = fn.isStatic();
boolean error = isFinal && ((isStatic && !inStaticConstructor) || (!isStatic && !inConstructor));
if (error) addError("cannot modify" + (isStatic ? " static" : "") + " final field '" + fn.getName() +
"' outside of " + (isStatic ? "static initialization block." : "constructor."), expression);
}
}
public void visitConstructor(ConstructorNode node) {
inConstructor = true;
inStaticConstructor = node.isStaticConstructor();
checkGenericsUsage(node, node.getParameters());
super.visitConstructor(node);
}
public void visitCatchStatement(CatchStatement cs) {
if (!(cs.getExceptionType().isDerivedFrom(ClassHelper.make(Throwable.class)))) {
addError("Catch statement parameter type is not a subclass of Throwable.", cs);
}
super.visitCatchStatement(cs);
}
public void visitMethodCallExpression(MethodCallExpression mce) {
super.visitMethodCallExpression(mce);
Expression aexp = mce.getArguments();
if (aexp instanceof TupleExpression) {
TupleExpression arguments = (TupleExpression) aexp;
for (Expression e : arguments.getExpressions()) {
checkForInvalidDeclaration(e);
}
} else {
checkForInvalidDeclaration(aexp);
}
}
@Override
public void visitDeclarationExpression(DeclarationExpression expression) {
super.visitDeclarationExpression(expression);
if (expression.isMultipleAssignmentDeclaration()) return;
checkInvalidDeclarationModifier(expression, ACC_ABSTRACT, "abstract");
checkInvalidDeclarationModifier(expression, ACC_NATIVE, "native");
checkInvalidDeclarationModifier(expression, ACC_PRIVATE, "private");
checkInvalidDeclarationModifier(expression, ACC_PROTECTED, "protected");
checkInvalidDeclarationModifier(expression, ACC_PUBLIC, "public");
checkInvalidDeclarationModifier(expression, ACC_STATIC, "static");
checkInvalidDeclarationModifier(expression, ACC_STRICT, "strictfp");
checkInvalidDeclarationModifier(expression, ACC_SYNCHRONIZED, "synchronized");
checkInvalidDeclarationModifier(expression, ACC_TRANSIENT, "transient");
checkInvalidDeclarationModifier(expression, ACC_VOLATILE, "volatile");
if (expression.getVariableExpression().getOriginType().equals(VOID_TYPE)) {
addError("The variable '" + expression.getVariableExpression().getName() + "' has invalid type void", expression);
}
}
private void checkInvalidDeclarationModifier(DeclarationExpression expression, int modifier, String modName) {
if ((expression.getVariableExpression().getModifiers() & modifier) != 0) {
addError("Modifier '" + modName + "' not allowed here.", expression);
}
}
private void checkForInvalidDeclaration(Expression exp) {
if (!(exp instanceof DeclarationExpression)) return;
addError("Invalid use of declaration inside method call.", exp);
}
public void visitConstantExpression(ConstantExpression expression) {
super.visitConstantExpression(expression);
checkStringExceedingMaximumLength(expression);
}
public void visitGStringExpression(GStringExpression expression) {
super.visitGStringExpression(expression);
for (ConstantExpression ce : expression.getStrings()) {
checkStringExceedingMaximumLength(ce);
}
}
private void checkStringExceedingMaximumLength(ConstantExpression expression) {
Object value = expression.getValue();
if (value instanceof String) {
String s = (String) value;
if (s.length() > 65535) {
addError("String too long. The given string is " + s.length() + " Unicode code units long, but only a maximum of 65535 is allowed.", expression);
}
}
}
private void checkGenericsUsage(ASTNode ref, ClassNode[] nodes) {
for (ClassNode node : nodes) {
checkGenericsUsage(ref, node);
}
}
private void checkGenericsUsage(ASTNode ref, Parameter[] params) {
for (Parameter p : params) {
checkGenericsUsage(ref, p.getType());
}
}
private void checkGenericsUsage(ASTNode ref, ClassNode node) {
if (node.isArray()) {
checkGenericsUsage(ref, node.getComponentType());
} else if (!node.isRedirectNode() && node.isUsingGenerics()) {
addError(
"A transform used a generics containing ClassNode "+ node + " " +
"for "+getRefDescriptor(ref) +
"directly. You are not supposed to do this. " +
"Please create a new ClassNode referring to the old ClassNode " +
"and use the new ClassNode instead of the old one. Otherwise " +
"the compiler will create wrong descriptors and a potential " +
"NullPointerException in TypeResolver in the OpenJDK. If this is " +
"not your own doing, please report this bug to the writer of the " +
"transform.",
ref);
}
}
private static String getRefDescriptor(ASTNode ref) {
if (ref instanceof FieldNode) {
FieldNode f = (FieldNode) ref;
return "the field "+f.getName()+" ";
} else if (ref instanceof PropertyNode) {
PropertyNode p = (PropertyNode) ref;
return "the property "+p.getName()+" ";
} else if (ref instanceof ConstructorNode) {
return "the constructor "+ref.getText()+" ";
} else if (ref instanceof MethodNode) {
return "the method "+ref.getText()+" ";
} else if (ref instanceof ClassNode) {
return "the super class "+ref+" ";
}
return "<unknown with class "+ref.getClass()+"> ";
}
}
| |
/*
* The MIT License (MIT)
*
* Copyright (c) 2007-2015 Broad Institute
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package org.broad.igv.ui.util;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.broad.igv.Globals;
import org.broad.igv.ui.IGV;
import javax.swing.*;
import java.awt.*;
import java.lang.reflect.InvocationTargetException;
/**
* Provides thread-safe, Swing-safe, utilities for interacting with JOptionPane. Accounts for
* (1) Swing is not thread safe => synchronize access
* (2) JOptionPane methods must be invoked on event dispatch thread
*
* @author jrobinso
*/
public class MessageUtils {
private static Logger log = Logger.getLogger(MessageUtils.class);
// Somewhat silly class, needed to pass values between threads
static class ValueHolder {
Object value;
}
/**
* Log the exception and show {@code message} to the user
*
* @param e
* @param message
*/
public static void showErrorMessage(String message, Exception e) {
log.error(message, e);
showMessage(Level.ERROR, message);
}
public static void showMessage(String message) {
showMessage(Level.INFO, message);
}
public static synchronized void showMessage(Level level, String message) {
log.log(level, message);
boolean showDialog = !(Globals.isHeadless() || Globals.isSuppressMessages() || Globals.isTesting() || Globals.isBatch());
if (showDialog) {
// Always use HTML for message displays, but first remove any embedded <html> tags.
message = "<html>" + message.replaceAll("<html>", "");
Frame parent = IGV.hasInstance() ? IGV.getMainFrame() : null;
Color background = parent != null ? parent.getBackground() : Color.lightGray;
//JEditorPane So users can select text
JEditorPane content = new JEditorPane();
content.setContentType("text/html");
content.setText(message);
content.setBackground(background);
content.setEditable(false);
Component dispMessage = content;
//Really long messages should be scrollable
if(message.length() > 200){
Dimension size = new Dimension(1000, content.getHeight());
content.setPreferredSize(size);
JScrollPane pane = new JScrollPane(content);
dispMessage = pane;
}
JOptionPane.showMessageDialog(parent, dispMessage);
}
}
public static void setStatusBarMessage(final String message) {
log.debug("Status bar: " + message);
if (IGV.hasInstance()) {
IGV.getInstance().setStatusBarMessage(message);
}
}
public static synchronized boolean confirm(final String message) {
if(Globals.isHeadless()){
log.error("Attempted to confirm while running headless with the following message:\n" + message);
return true;
}
if(Globals.isBatch()) {
return true;
}
final Frame parent = IGV.hasInstance() ? IGV.getMainFrame() : null;
return confirm(parent, message);
}
/**
* Show a yes/no confirmation dialog.
*
* @param component
* @param message
* @return
*/
public static synchronized boolean confirm(final Component component, final String message) {
if(Globals.isHeadless() || Globals.isBatch()) {
return true;
}
if (SwingUtilities.isEventDispatchThread()) {
int opt = JOptionPane.showConfirmDialog(component, message, "Confirm", JOptionPane.YES_NO_OPTION);
return opt == JOptionPane.YES_OPTION;
} else {
final ValueHolder returnValue = new ValueHolder();
Runnable runnable = new Runnable() {
public void run() {
int opt = JOptionPane.showConfirmDialog(component, message, "Confirm", JOptionPane.YES_NO_OPTION);
returnValue.value = (opt == JOptionPane.YES_OPTION);
}
};
try {
SwingUtilities.invokeAndWait(runnable);
} catch (InterruptedException e) {
log.error("Error in showMessage", e);
throw new RuntimeException(e);
} catch (InvocationTargetException e) {
log.error("Error in showMessage", e);
throw new RuntimeException(e.getCause());
}
return (Boolean) (returnValue.value);
}
}
public static String showInputDialog(String message, final String defaultValue) {
final Frame parent = IGV.hasInstance() ? IGV.getMainFrame() : null;
//Pad message with spaces so it's as wide as the defaultValue
if(message.length() < defaultValue.length()){
message = String.format("%-" + defaultValue.length() + "s", message);
}
final String actMsg = message;
if (SwingUtilities.isEventDispatchThread()) {
String val = JOptionPane.showInputDialog(parent, actMsg, defaultValue);
return val;
} else {
final ValueHolder returnValue = new ValueHolder();
Runnable runnable = new Runnable() {
public void run() {
String val = JOptionPane.showInputDialog(parent, actMsg, defaultValue);
returnValue.value = val;
}
};
try {
SwingUtilities.invokeAndWait(runnable);
} catch (InterruptedException e) {
log.error("Error in showMessage", e);
throw new RuntimeException(e);
} catch (InvocationTargetException e) {
log.error("Error in showMessage", e);
throw new RuntimeException(e.getCause());
}
return (String) (returnValue.value);
}
}
public static String showInputDialog(final String message) {
final Frame parent = IGV.hasInstance() ? IGV.getMainFrame() : null;
if (SwingUtilities.isEventDispatchThread()) {
String val = JOptionPane.showInputDialog(parent, message);
return val;
} else {
final ValueHolder returnValue = new ValueHolder();
Runnable runnable = new Runnable() {
public void run() {
String val = JOptionPane.showInputDialog(parent, message);
returnValue.value = val;
}
};
try {
SwingUtilities.invokeAndWait(runnable);
} catch (InterruptedException e) {
log.error("Error in showMessage", e);
throw new RuntimeException(e);
} catch (InvocationTargetException e) {
log.error("Error in showMessage", e);
throw new RuntimeException(e.getCause());
}
return (String) (returnValue.value);
}
}
/**
* Test program - call all methods from both main and swing threads
*
* @param args
* @throws Exception
*/
public static void main(String[] args) throws Exception {
Runnable runnable = new Runnable() {
public void run() {
showMessage("showMessage");
confirm("confirm");
confirm(null, "confirm with parent");
showInputDialog("showInputDialog", "default");
showInputDialog("showInputDialog");
}
};
// Test on main thread
runnable.run();
// Test on swing thread
SwingUtilities.invokeLater(runnable);
}
}
| |
// ========================================================================
// Copyright 2008-2012 NEXCOM Systems
// ------------------------------------------------------------------------
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// ========================================================================
package org.cipango.server;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.concurrent.atomic.AtomicLong;
import javax.servlet.ServletException;
import javax.servlet.sip.Address;
import javax.servlet.sip.SipServletMessage;
import javax.servlet.sip.SipServletResponse;
import javax.servlet.sip.SipURI;
import javax.servlet.sip.URI;
import org.cipango.server.log.AccessLog;
import org.cipango.sip.NameAddr;
import org.cipango.sip.SipGenerator;
import org.cipango.sip.SipHeaders;
import org.cipango.sip.Via;
import org.eclipse.jetty.io.Buffer;
import org.eclipse.jetty.io.Buffers;
import org.eclipse.jetty.io.ByteArrayBuffer;
import org.eclipse.jetty.util.LazyList;
import org.eclipse.jetty.util.MultiException;
import org.eclipse.jetty.util.component.AbstractLifeCycle;
import org.eclipse.jetty.util.component.LifeCycle;
import org.eclipse.jetty.util.log.Log;
import org.eclipse.jetty.util.log.Logger;
public class ConnectorManager extends AbstractLifeCycle implements Buffers, SipHandler
{
private static final Logger LOG = Log.getLogger(ConnectorManager.class);
private static final int DEFAULT_MESSAGE_SIZE = 16*1024; // FIXME
private static final int MAX_MESSAGE_SIZE = 64*1024;
// By default set MTU to max message size instead of 1500.
private static final int DEFAULT_MTU = MAX_MESSAGE_SIZE;
private Server _server;
private SipConnector[] _connectors;
private int _mtu = DEFAULT_MTU;
private SipGenerator _sipGenerator;
private AccessLog _accessLog;
private final AtomicLong _receivedStats = new AtomicLong();
private final AtomicLong _sentStats = new AtomicLong();
private transient long _nbParseErrors;
private ArrayList<Buffer> _buffers;
private int _messageSize = 10000;
private int _largeMessageSize = MAX_MESSAGE_SIZE;
private boolean _forceClientRport;
public void addConnector(SipConnector connector)
{
setConnectors((SipConnector[]) LazyList.addToArray(getConnectors(), connector, SipConnector.class));
}
public SipConnector[] getConnectors()
{
return _connectors;
}
public SipConnector getDefaultConnector()
{
if (_connectors == null || _connectors.length == 0)
return null;
return _connectors[0];
}
public void setConnectors(SipConnector[] connectors)
{
if (connectors != null)
{
for (int i = 0; i < connectors.length; i++)
{
SipConnector connector = connectors[i];
connector.setServer(_server);
connector.setHandler(this);
}
}
if (_server != null)
_server.getContainer().update(this, _connectors, connectors, "connectors");
_connectors = connectors;
}
public void setServer(org.eclipse.jetty.server.Server server)
{
_server = (Server) server;
}
public Server getServer()
{
return _server;
}
public Address getContact(int type)
{
SipConnector sc = findConnector(type, null);
return new NameAddr((URI) sc.getSipUri().clone());
//return (Address) findTransport(type, null).getContact().clone();
}
protected void doStart() throws Exception
{
super.doStart();
if (_buffers != null)
_buffers.clear();
else
_buffers = new ArrayList<Buffer>();
_sipGenerator = new SipGenerator();
if (_accessLog instanceof LifeCycle)
{
try
{
((LifeCycle) _accessLog).start();
}
catch (Exception e)
{
LOG.warn("failed to start access log", e);
}
}
if (_connectors != null)
{
for (int i = 0; i < _connectors.length; i++)
{
SipConnector connector = _connectors[i];
connector.start();
}
}
}
protected void doStop() throws Exception
{
MultiException mex = new MultiException();
if (_connectors != null)
{
for (int i = _connectors.length; i--> 0;)
{
try
{
_connectors[i].stop();
}
catch(Throwable e)
{
mex.add(e);
}
}
}
if (_accessLog instanceof LifeCycle)
try { ((LifeCycle) _accessLog).stop(); } catch (Throwable t) { LOG.warn(t); }
super.doStop();
mex.ifExceptionThrow();
}
public SipConnector findConnector(int type, InetAddress addr)
{
for (int i = 0; i < _connectors.length; i++)
{
SipConnector t = _connectors[i];
if (t.getTransportOrdinal() == type)
return t;
}
return _connectors[0];
}
public void messageReceived()
{
_receivedStats.incrementAndGet();
}
public void messageSent()
{
_sentStats.incrementAndGet();
}
public void handle(SipServletMessage message) throws IOException, ServletException
{
SipMessage msg = (SipMessage) message;
messageReceived();
if (_accessLog != null)
_accessLog.messageReceived(msg, msg.getConnection());
if (preValidateMessage((SipMessage) message))
{
if (msg.isRequest())
{
Via via = msg.getTopVia();
String remoteAddr = msg.getRemoteAddr();
String host = via.getHost();
if (host.indexOf('[') != -1)
{
// As there is multiple presentation of an IPv6 address, normalize it.
host = InetAddress.getByName(host).getHostAddress();
}
if (!host.equals(remoteAddr))
via.setReceived(remoteAddr);
if (via.getRport() != null || isForceClientRport())
via.setRport(Integer.toString(message.getRemotePort()));
}
getServer().handle(msg);
}
else
{
_nbParseErrors++;
}
}
public boolean isLocalUri(URI uri)
{
if (!uri.isSipURI())
return false;
SipURI sipUri = (SipURI) uri;
if (!sipUri.getLrParam())
return false;
String host = sipUri.getHost();
// Normalize IPv6 address
if (host.indexOf("[") != -1)
{
try
{
host = InetAddress.getByName(host).getHostAddress();
}
catch (UnknownHostException e)
{
LOG.ignore(e);
}
}
for (int i = 0; i < _connectors.length; i++)
{
SipConnector connector = _connectors[i];
String connectorHost = connector.getSipUri().getHost();
boolean samePort = connector.getPort() == sipUri.getPort() || sipUri.getPort() == -1;
if (samePort)
{
if ((connectorHost.equals(host) || connector.getAddr().getHostAddress().equals(host)))
{
if (sipUri.getPort() != -1)
return true;
// match on host address and port is not set ==> NAPTR case
if (connector.getAddr().getHostAddress().equals(host)
&& connector.getPort() != connector.getDefaultPort())
{
return false;
}
return true;
}
}
}
return false;
}
/**
* Sends the message and returns the connection used to sent the message.
* The returned connection can be different if initial connection is not reliable and
* message is bigger than MTU.
*/
public SipConnection send(SipMessage message, SipConnection connection) throws IOException
{
Buffer buffer = getBuffer(_messageSize);
_sipGenerator.generate(buffer, message);
try
{
if (!connection.getConnector().isReliable()
&& (buffer.putIndex() + 200 > _mtu)
&& message.isRequest()) {
LOG.debug("Message is too large. Switching to TCP");
try
{
SipConnection newConnection = getConnection((SipRequest) message,
SipConnectors.TCP_ORDINAL,
connection.getRemoteAddress(),
connection.getRemotePort());
if (newConnection.getConnector().isReliable())
{
return send(message, newConnection);
}
}
catch (IOException e)
{
Via via = message.getTopVia();
// Update via to ensure that right value is used in logs
SipConnector connector = connection.getConnector();
via.setTransport(connector.getTransport());
via.setHost(connector.getSipUri().getHost());
via.setPort(connector.getSipUri().getPort());
LOG.debug("Failed to switch to TCP, return to original connection");
}
}
connection.write(buffer);
if (_accessLog != null)
_accessLog.messageSent(message, connection);
messageSent();
return connection;
}
finally
{
returnBuffer(buffer);
}
}
public SipConnection getConnection(SipRequest request, int transport, InetAddress address, int port) throws IOException
{
SipConnector connector = findConnector(transport, address);
Via via = request.getTopVia();
via.setTransport(connector.getTransport());
via.setHost(connector.getSipUri().getHost());
via.setPort(connector.getSipUri().getPort());
SipConnection connection = connector.getConnection(address, port);
if (connection == null)
throw new IOException("Could not find connection to " + address + ":" + port + "/" + connector.getTransport());
return connection;
}
public void sendResponse(SipResponse response) throws IOException
{
SipRequest request = (SipRequest) response.getRequest();
SipConnection connection = null;
if (request != null)
connection = request.getConnection();
sendResponse(response, connection);
}
public void sendResponse(SipResponse response, SipConnection connection) throws IOException
{
if (connection == null || !connection.getConnector().isReliable() || !connection.isOpen())
{
Via via = response.getTopVia();
SipConnector connector = null;
InetAddress address = null;
if (connection != null)
{
connector = connection.getConnector();
address = connection.getRemoteAddress();
}
else
{
int transport = SipConnectors.getOrdinal(via.getTransport());
if (via.getMAddr() != null)
address = InetAddress.getByName(via.getMAddr());
else
address = InetAddress.getByName(via.getHost());
connector = findConnector(transport, address);
}
int port = -1;
String srport = via.getRport();
if (srport != null)
{
port = Integer.parseInt(srport);
}
else
{
port = via.getPort();
if (port == -1)
port = connection.getConnector().getDefaultPort();
}
connection = connector.getConnection(address, port);
if (connection == null)
throw new IOException("Could not found any SIP connection to "
+ address + ":" + port + "/" + connector.getTransport());
}
send(response, connection);
}
/*
public void send(SipResponse response, SipRequest request) throws IOException
{
SipConnector connector = null;
if (request != null && request.getEndpoint() != null)
{
SipEndpoint endpoint = request.getEndpoint();
connector = endpoint.getConnector();
if (connector.isReliable() && endpoint.isOpen())
{
Buffer buffer = getBuffer(_messageSize);
_sipGenerator.generate(buffer, response);
try
{
endpoint.getConnector().doSend(buffer, endpoint);
for (int i = 0; _loggers != null && i < _loggers.length; i++)
{
EndPoint ep = (EndPoint) endpoint;
_loggers[i].messageSent(
response,
connector.getTransportOrdinal(),
ep.getLocalAddr(),
ep.getLocalPort(),
ep.getRemoteAddr(),
ep.getRemotePort());
}
messageSent();
return;
}
finally
{
returnBuffer(buffer);
}
}
}
int transport = -1;
InetAddress address = null;
int port = -1;
if (request != null)
transport = request.transport();
else
transport = SipConnectors.getOrdinal(response.getTopVia().getTransport());
Via via = response.getTopVia();
if (request != null)
address = request.remoteAddress();
else
address = InetAddress.getByName(via.getHost());
if (connector == null)
connector = findConnector(transport, address);
String srport = via.getRport();
if (srport != null)
{
port = Integer.parseInt(srport);
}
else
{
port = via.getPort();
if (port == -1)
port = SipConnectors.getDefaultPort(transport);
}
Buffer buffer = getBuffer(_messageSize);
_sipGenerator.generate(buffer, response);
try
{
connector.send(buffer, address, port);
}
finally
{
returnBuffer(buffer);
}
for (int i = 0; _loggers != null && i < _loggers.length; i++)
_loggers[i].messageSent(
response,
connector.getTransportOrdinal(),
connector.getAddr().getHostAddress(),
connector.getPort(),
address.getHostAddress(),
port);
messageSent();
}
*/
public Buffer getBuffer(int size)
{
if (size == _messageSize)
{
synchronized (_buffers)
{
if (_buffers.size() == 0)
return newBuffer(size);
return (Buffer) _buffers.remove(_buffers.size() - 1);
}
}
else
return newBuffer(size);
}
public void returnBuffer(Buffer buffer)
{
buffer.clear();
int c = buffer.capacity();
if (c == _messageSize)
{
synchronized (_buffers)
{
_buffers.add(buffer);
}
}
}
public Buffer newBuffer(int size)
{
return new ByteArrayBuffer(size);
}
public static void putStringUTF8(Buffer buffer, String s)
{
byte[] bytes = null;
try
{
bytes = s.getBytes("UTF-8");
}
catch (UnsupportedEncodingException e)
{
throw new RuntimeException();
}
buffer.put(bytes);
}
public void setAccessLog(AccessLog accessLog)
{
if (getServer() != null)
getServer().getContainer().update(this, _accessLog, accessLog, "accessLog", false);
_accessLog = accessLog;
try
{
if (isRunning() && _accessLog instanceof LifeCycle)
((LifeCycle) accessLog).start();
}
catch (Exception e)
{
LOG.warn(e);
}
}
public long getMessagesReceived()
{
return _receivedStats.get();
}
public long getMessagesSent()
{
return _sentStats.get();
}
public long getNbParseError()
{
long val = _nbParseErrors;
for (int i = 0; _connectors != null && i <_connectors.length; i++)
{
val += _connectors[i].getNbParseError();
}
return val;
}
public void statsReset()
{
_receivedStats.set(0);
_sentStats.set(0);
_nbParseErrors = 0;
for (int i = 0; _connectors != null && i <_connectors.length; i++)
{
_connectors[i].statsReset();
}
}
public boolean preValidateMessage(SipMessage message)
{
boolean valid = true;
try
{
if (!isUnique(SipHeaders.FROM_BUFFER, message)
|| !isUnique(SipHeaders.TO_BUFFER, message)
|| !isUnique(SipHeaders.CALL_ID_BUFFER, message)
|| !isUnique(SipHeaders.CSEQ_BUFFER, message))
{
valid = false;
}
else if (message.getTopVia() == null
|| message.getFrom() == null
|| message.getTo() == null
|| message.getCSeq() == null)
{
LOG.info("Received bad message: unparsable required headers");
valid = false;
}
message.getAddressHeader("contact");
if (message instanceof SipRequest)
{
SipRequest request = (SipRequest) message;
if (request.getRequestURI() == null)
valid = false;
request.getTopRoute();
if (!request.getCSeq().getMethod().equals(request.getMethod()))
{
LOG.info("Received bad request: CSeq method does not match");
valid = false;
}
}
else
{
int status = ((SipResponse) message).getStatus();
if (status < 100 || status > 699)
{
LOG.info("Received bad response: Invalid status code: " + status);
valid = false;
}
}
}
catch (Exception e)
{
LOG.info("Received bad message: Some headers are not parsable: {}", e);
LOG.debug("Received bad message: Some headers are not parsable", e);
valid = false;
}
try
{
if (!valid
&& message instanceof SipRequest
&& !message.isAck()
&& message.getTopVia() != null)
{
// TODO send response stateless
SipResponse response =
(SipResponse) ((SipRequest) message).createResponse(SipServletResponse.SC_BAD_REQUEST);
sendResponse(response);
}
}
catch (Exception e)
{
LOG.ignore(e);
}
return valid;
}
private boolean isUnique(Buffer headerName, SipMessage message)
{
Iterator<String> it = message.getFields().getValues(headerName);
if (!it.hasNext())
{
LOG.info("Received bad message: Missing required header: " + headerName);
return false;
}
it.next();
if (it.hasNext())
LOG.info("Received bad message: Duplicate header: " + headerName);
return !it.hasNext();
}
public AccessLog getAccessLog()
{
return _accessLog;
}
public Buffer getBuffer() {
// TODO Auto-generated method stub
return null;
}
public Buffer getHeader() {
// TODO Auto-generated method stub
return null;
}
public int getMtu()
{
return _mtu;
}
public void setMtu(int mtu)
{
_mtu = mtu;
}
public boolean isForceClientRport()
{
return _forceClientRport;
}
public void setForceClientRport(boolean forceClientRport)
{
_forceClientRport = forceClientRport;
}
}
| |
/*
* Copyright 2012-2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.env;
import java.util.Arrays;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.json.JsonParser;
import org.springframework.boot.json.JsonParserFactory;
import org.springframework.boot.origin.Origin;
import org.springframework.boot.origin.OriginLookup;
import org.springframework.boot.origin.PropertySourceOrigin;
import org.springframework.core.Ordered;
import org.springframework.core.env.ConfigurableEnvironment;
import org.springframework.core.env.Environment;
import org.springframework.core.env.MapPropertySource;
import org.springframework.core.env.MutablePropertySources;
import org.springframework.core.env.PropertySource;
import org.springframework.core.env.StandardEnvironment;
import org.springframework.util.ClassUtils;
import org.springframework.util.CollectionUtils;
import org.springframework.util.StringUtils;
import org.springframework.web.context.support.StandardServletEnvironment;
/**
* An {@link EnvironmentPostProcessor} that parses JSON from
* {@code spring.application.json} or equivalently {@code SPRING_APPLICATION_JSON} and
* adds it as a map property source to the {@link Environment}. The new properties are
* added with higher priority than the system properties.
*
* @author Dave Syer
* @author Phillip Webb
* @author Madhura Bhave
* @author Artsiom Yudovin
* @since 1.3.0
*/
public class SpringApplicationJsonEnvironmentPostProcessor implements EnvironmentPostProcessor, Ordered {
/**
* Name of the {@code spring.application.json} property.
*/
public static final String SPRING_APPLICATION_JSON_PROPERTY = "spring.application.json";
/**
* Name of the {@code SPRING_APPLICATION_JSON} environment variable.
*/
public static final String SPRING_APPLICATION_JSON_ENVIRONMENT_VARIABLE = "SPRING_APPLICATION_JSON";
private static final String SERVLET_ENVIRONMENT_CLASS = "org.springframework.web."
+ "context.support.StandardServletEnvironment";
private static final Set<String> SERVLET_ENVIRONMENT_PROPERTY_SOURCES = new LinkedHashSet<>(
Arrays.asList(StandardServletEnvironment.JNDI_PROPERTY_SOURCE_NAME,
StandardServletEnvironment.SERVLET_CONTEXT_PROPERTY_SOURCE_NAME,
StandardServletEnvironment.SERVLET_CONFIG_PROPERTY_SOURCE_NAME));
/**
* The default order for the processor.
*/
public static final int DEFAULT_ORDER = Ordered.HIGHEST_PRECEDENCE + 5;
private int order = DEFAULT_ORDER;
@Override
public int getOrder() {
return this.order;
}
public void setOrder(int order) {
this.order = order;
}
@Override
public void postProcessEnvironment(ConfigurableEnvironment environment, SpringApplication application) {
MutablePropertySources propertySources = environment.getPropertySources();
propertySources.stream().map(JsonPropertyValue::get).filter(Objects::nonNull).findFirst()
.ifPresent((v) -> processJson(environment, v));
}
private void processJson(ConfigurableEnvironment environment, JsonPropertyValue propertyValue) {
JsonParser parser = JsonParserFactory.getJsonParser();
Map<String, Object> map = parser.parseMap(propertyValue.getJson());
if (!map.isEmpty()) {
addJsonPropertySource(environment, new JsonPropertySource(propertyValue, flatten(map)));
}
}
/**
* Flatten the map keys using period separator.
* @param map the map that should be flattened
* @return the flattened map
*/
private Map<String, Object> flatten(Map<String, Object> map) {
Map<String, Object> result = new LinkedHashMap<>();
flatten(null, result, map);
return result;
}
private void flatten(String prefix, Map<String, Object> result, Map<String, Object> map) {
String namePrefix = (prefix != null) ? prefix + "." : "";
map.forEach((key, value) -> extract(namePrefix + key, result, value));
}
@SuppressWarnings("unchecked")
private void extract(String name, Map<String, Object> result, Object value) {
if (value instanceof Map) {
if (CollectionUtils.isEmpty((Map<?, ?>) value)) {
result.put(name, value);
return;
}
flatten(name, result, (Map<String, Object>) value);
}
else if (value instanceof Collection) {
if (CollectionUtils.isEmpty((Collection<?>) value)) {
result.put(name, value);
return;
}
int index = 0;
for (Object object : (Collection<Object>) value) {
extract(name + "[" + index + "]", result, object);
index++;
}
}
else {
result.put(name, value);
}
}
private void addJsonPropertySource(ConfigurableEnvironment environment, PropertySource<?> source) {
MutablePropertySources sources = environment.getPropertySources();
String name = findPropertySource(sources);
if (sources.contains(name)) {
sources.addBefore(name, source);
}
else {
sources.addFirst(source);
}
}
private String findPropertySource(MutablePropertySources sources) {
if (ClassUtils.isPresent(SERVLET_ENVIRONMENT_CLASS, null)) {
PropertySource<?> servletPropertySource = sources.stream()
.filter((source) -> SERVLET_ENVIRONMENT_PROPERTY_SOURCES.contains(source.getName())).findFirst()
.orElse(null);
if (servletPropertySource != null) {
return servletPropertySource.getName();
}
}
return StandardEnvironment.SYSTEM_PROPERTIES_PROPERTY_SOURCE_NAME;
}
private static class JsonPropertySource extends MapPropertySource implements OriginLookup<String> {
private final JsonPropertyValue propertyValue;
JsonPropertySource(JsonPropertyValue propertyValue, Map<String, Object> source) {
super(SPRING_APPLICATION_JSON_PROPERTY, source);
this.propertyValue = propertyValue;
}
@Override
public Origin getOrigin(String key) {
return this.propertyValue.getOrigin();
}
}
private static class JsonPropertyValue {
private static final String[] CANDIDATES = { SPRING_APPLICATION_JSON_PROPERTY,
SPRING_APPLICATION_JSON_ENVIRONMENT_VARIABLE };
private final PropertySource<?> propertySource;
private final String propertyName;
private final String json;
JsonPropertyValue(PropertySource<?> propertySource, String propertyName, String json) {
this.propertySource = propertySource;
this.propertyName = propertyName;
this.json = json;
}
String getJson() {
return this.json;
}
Origin getOrigin() {
return PropertySourceOrigin.get(this.propertySource, this.propertyName);
}
static JsonPropertyValue get(PropertySource<?> propertySource) {
for (String candidate : CANDIDATES) {
Object value = propertySource.getProperty(candidate);
if (value instanceof String && StringUtils.hasLength((String) value)) {
return new JsonPropertyValue(propertySource, candidate, (String) value);
}
}
return null;
}
}
}
| |
// Licensed to the Software Freedom Conservancy (SFC) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The SFC licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.openqa.selenium.remote.internal;
import static org.hamcrest.Matchers.instanceOf;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import org.openqa.selenium.By;
import org.openqa.selenium.Dimension;
import org.openqa.selenium.OutputType;
import org.openqa.selenium.Point;
import org.openqa.selenium.Rectangle;
import org.openqa.selenium.WebDriverException;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.internal.WrapsElement;
import org.openqa.selenium.remote.Dialect;
import org.openqa.selenium.remote.RemoteWebElement;
import java.util.Collection;
import java.util.List;
import java.util.Map;
@RunWith(JUnit4.class)
public class WebElementToJsonConverterTest {
private static final WebElementToJsonConverter CONVERTER = new WebElementToJsonConverter();
@Test
public void returnsPrimitivesAsIs() {
assertNull(CONVERTER.apply(null));
assertEquals("abc", CONVERTER.apply("abc"));
assertEquals(Boolean.TRUE, CONVERTER.apply(Boolean.TRUE));
assertEquals(Integer.valueOf(123), CONVERTER.apply(123));
assertEquals(Math.PI, CONVERTER.apply(Math.PI));
}
@Test
public void convertsRemoteWebElementToWireProtocolMap() {
RemoteWebElement element = new RemoteWebElement();
element.setId("abc123");
Object value = CONVERTER.apply(element);
assertIsWebElementObject(value, "abc123");
}
@Test
public void unwrapsWrappedElements() {
RemoteWebElement element = new RemoteWebElement();
element.setId("abc123");
Object value = CONVERTER.apply(wrapElement(element));
assertIsWebElementObject(value, "abc123");
}
@Test
public void unwrapsWrappedElements_multipleLevelsOfWrapping() {
RemoteWebElement element = new RemoteWebElement();
element.setId("abc123");
WrappingWebElement wrapped = wrapElement(element);
wrapped = wrapElement(wrapped);
wrapped = wrapElement(wrapped);
wrapped = wrapElement(wrapped);
Object value = CONVERTER.apply(wrapped);
assertIsWebElementObject(value, "abc123");
}
@Test
public void convertsSimpleCollections() {
Object converted = CONVERTER.apply(Lists.newArrayList(null, "abc", true, 123, Math.PI));
assertThat(converted, instanceOf(Collection.class));
List<?> list = Lists.newArrayList((Collection<?>) converted);
assertContentsInOrder(list, null, "abc", true, 123, Math.PI);
}
@Test
public void convertsNestedCollections_simpleValues() {
List<?> innerList = Lists.newArrayList(123, "abc");
List<Object> outerList = Lists.newArrayList((Object) "apples", "oranges");
outerList.add(innerList);
Object converted = CONVERTER.apply(outerList);
assertThat(converted, instanceOf(Collection.class));
List<?> list = ImmutableList.copyOf((Collection<?>) converted);
assertEquals(3, list.size());
assertEquals("apples", list.get(0));
assertEquals("oranges", list.get(1));
assertThat(list.get(2), instanceOf(Collection.class));
list = ImmutableList.copyOf((Collection<?>) list.get(2));
assertContentsInOrder(list, 123, "abc");
}
@Test
public void requiresMapsToHaveStringKeys() {
try {
CONVERTER.apply(ImmutableMap.of(new Object(), "bunny"));
fail();
} catch (IllegalArgumentException expected) {
}
}
@Test
public void requiresNestedMapsToHaveStringKeys() {
try {
CONVERTER.apply(ImmutableMap.of(
"one", ImmutableMap.of(
"two", ImmutableMap.of(
Integer.valueOf(3), "not good"))));
fail();
} catch (IllegalArgumentException expected) {
}
}
@Test
public void convertsASimpleMap() {
Object converted = CONVERTER.apply(ImmutableMap.of(
"one", 1,
"fruit", "apples",
"honest", true));
assertThat(converted, instanceOf(Map.class));
@SuppressWarnings("unchecked")
Map<String, Object> map = (Map<String, Object>) converted;
assertEquals(3, map.size());
assertEquals(1, map.get("one"));
assertEquals("apples", map.get("fruit"));
assertEquals(true, map.get("honest"));
}
@SuppressWarnings("unchecked")
@Test
public void convertsANestedMap() {
Object converted = CONVERTER.apply(ImmutableMap.of(
"one", 1,
"fruit", "apples",
"honest", true,
"nested", ImmutableMap.of("bugs", "bunny")));
assertThat(converted, instanceOf(Map.class));
Map<String, Object> map = (Map<String, Object>) converted;
assertEquals(4, map.size());
assertEquals(1, map.get("one"));
assertEquals("apples", map.get("fruit"));
assertEquals(true, map.get("honest"));
assertThat(map.get("nested"), instanceOf(Map.class));
map = (Map<String, Object>) map.get("nested");
assertEquals(1, map.size());
assertEquals("bunny", map.get("bugs"));
}
@SuppressWarnings("unchecked")
@Test
public void convertsAListWithAWebElement() {
RemoteWebElement element = new RemoteWebElement();
element.setId("abc123");
RemoteWebElement element2 = new RemoteWebElement();
element2.setId("anotherId");
Object value = CONVERTER.apply(Lists.newArrayList(element, element2));
assertThat(value, instanceOf(Collection.class));
List<Object> list = Lists.newArrayList((Collection<Object>) value);
assertEquals(2, list.size());
assertIsWebElementObject(list.get(0), "abc123");
assertIsWebElementObject(list.get(1), "anotherId");
}
@SuppressWarnings("unchecked")
@Test
public void convertsAMapWithAWebElement() {
RemoteWebElement element = new RemoteWebElement();
element.setId("abc123");
Object value = CONVERTER.apply(ImmutableMap.of("one", element));
assertThat(value, instanceOf(Map.class));
Map<String, Object> map = (Map<String, Object>) value;
assertEquals(1, map.size());
assertIsWebElementObject(map.get("one"), "abc123");
}
@Test
public void convertsAnArray() {
Object value = CONVERTER.apply(new Object[] {
"abc123", true, 123, Math.PI
});
assertThat(value, instanceOf(Collection.class));
assertContentsInOrder(Lists.newArrayList((Collection<?>) value),
"abc123", true, 123, Math.PI);
}
@Test
public void convertsAnArrayWithAWebElement() {
RemoteWebElement element = new RemoteWebElement();
element.setId("abc123");
Object value = CONVERTER.apply(new Object[] { element });
assertContentsInOrder(Lists.newArrayList((Collection<?>) value),
ImmutableMap.of(
Dialect.OSS.getEncodedElementKey(), "abc123",
Dialect.W3C.getEncodedElementKey(), "abc123"));
}
@Test
public void rejectsUnrecognizedTypes() {
try {
CONVERTER.apply(new Object());
fail();
} catch (IllegalArgumentException expected) {
}
}
private static WrappingWebElement wrapElement(WebElement element) {
return new WrappingWebElement(element);
}
private static void assertIsWebElementObject(Object value, String expectedKey) {
assertThat(value, instanceOf(Map.class));
Map<?, ?> map = (Map<?, ?>) value;
assertEquals(2, map.size());
assertTrue(map.containsKey(Dialect.OSS.getEncodedElementKey()));
assertEquals(expectedKey, map.get(Dialect.OSS.getEncodedElementKey()));
assertTrue(map.containsKey(Dialect.W3C.getEncodedElementKey()));
assertEquals(expectedKey, map.get(Dialect.W3C.getEncodedElementKey()));
}
private static void assertContentsInOrder(List<?> list, Object... expectedContents) {
List<Object> expected = Lists.newArrayList(expectedContents);
assertEquals(expected, list);
}
private static class WrappingWebElement implements WebElement, WrapsElement {
private WebElement element;
public WrappingWebElement(WebElement element) {
this.element = element;
}
public WebElement getWrappedElement() {
return element;
}
public void click() {
throw new UnsupportedOperationException();
}
public void submit() {
throw new UnsupportedOperationException();
}
public void sendKeys(CharSequence... keysToSend) {
throw new UnsupportedOperationException();
}
public void clear() {
throw new UnsupportedOperationException();
}
public String getTagName() {
throw new UnsupportedOperationException();
}
public String getAttribute(String name) {
throw new UnsupportedOperationException();
}
public boolean isSelected() {
throw new UnsupportedOperationException();
}
public boolean isEnabled() {
throw new UnsupportedOperationException();
}
public String getText() {
throw new UnsupportedOperationException();
}
public List<WebElement> findElements(By by) {
throw new UnsupportedOperationException();
}
public WebElement findElement(By by) {
throw new UnsupportedOperationException();
}
public boolean isDisplayed() {
throw new UnsupportedOperationException();
}
public Point getLocation() {
throw new UnsupportedOperationException();
}
public Dimension getSize() {
throw new UnsupportedOperationException();
}
public Rectangle getRect() {
throw new UnsupportedOperationException();
}
public String getCssValue(String propertyName) {
throw new UnsupportedOperationException();
}
public <X> X getScreenshotAs(OutputType<X> outputType) throws WebDriverException {
throw new UnsupportedOperationException();
}
}
}
| |
package net.novaviper.zeroquest.common.world.biome;
import java.util.Arrays;
import java.util.Random;
import net.minecraft.block.Block;
import net.minecraft.block.BlockColored;
import net.minecraft.block.BlockDirt;
import net.minecraft.block.BlockSand;
import net.minecraft.block.material.Material;
import net.minecraft.block.state.IBlockState;
import net.minecraft.init.Blocks;
import net.minecraft.item.EnumDyeColor;
import net.minecraft.util.BlockPos;
import net.minecraft.world.World;
import net.minecraft.world.biome.BiomeGenBase;
import net.minecraft.world.chunk.ChunkPrimer;
import net.minecraft.world.gen.NoiseGeneratorPerlin;
import net.minecraft.world.gen.feature.WorldGenAbstractTree;
import net.minecraftforge.fml.relauncher.Side;
import net.minecraftforge.fml.relauncher.SideOnly;
import net.novaviper.zeroquest.common.entity.mob.EntityRiggator;
import net.novaviper.zeroquest.common.lib.Constants;
public class BiomeGenNileMesa extends BiomeGenBase
{
private IBlockState[] field_150621_aC;
private long field_150622_aD;
private NoiseGeneratorPerlin field_150623_aE;
private NoiseGeneratorPerlin field_150624_aF;
private NoiseGeneratorPerlin field_150625_aG;
private boolean field_150626_aH;
private boolean field_150620_aI;
private static final String __OBFID = "CL_00000176";
public BiomeGenNileMesa(int p_i45380_1_, boolean p_i45380_2_, boolean p_i45380_3_)
{
super(p_i45380_1_);
this.field_150626_aH = p_i45380_2_;
this.field_150620_aI = p_i45380_3_;
this.setDisableRain();
this.setTemperatureRainfall(2.0F, 0.0F);
this.spawnableCreatureList.clear();
this.topBlock = Blocks.sand.getDefaultState();
this.fillerBlock = Blocks.stained_hardened_clay.getDefaultState();
this.theBiomeDecorator.treesPerChunk = -999;
this.theBiomeDecorator.deadBushPerChunk = 20;
this.theBiomeDecorator.reedsPerChunk = 3;
this.theBiomeDecorator.cactiPerChunk = 5;
this.theBiomeDecorator.flowersPerChunk = 0;
if (p_i45380_3_)
{
this.theBiomeDecorator.treesPerChunk = 5;
}
}
public WorldGenAbstractTree genBigTreeChance(Random p_150567_1_)
{
return this.worldGeneratorTrees;
}
public void decorate(World worldIn, Random p_180624_2_, BlockPos p_180624_3_)
{
super.decorate(worldIn, p_180624_2_, p_180624_3_);
}
public void genTerrainBlocks(World worldIn, Random p_180622_2_, ChunkPrimer p_180622_3_, int p_180622_4_, int p_180622_5_, double p_180622_6_)
{
if (this.field_150621_aC == null || this.field_150622_aD != worldIn.getSeed())
{
this.func_150619_a(worldIn.getSeed());
}
if (this.field_150623_aE == null || this.field_150624_aF == null || this.field_150622_aD != worldIn.getSeed())
{
Random random1 = new Random(this.field_150622_aD);
this.field_150623_aE = new NoiseGeneratorPerlin(random1, 4);
this.field_150624_aF = new NoiseGeneratorPerlin(random1, 1);
}
this.field_150622_aD = worldIn.getSeed();
double d5 = 0.0D;
int k;
int l;
if (this.field_150626_aH)
{
k = (p_180622_4_ & -16) + (p_180622_5_ & 15);
l = (p_180622_5_ & -16) + (p_180622_4_ & 15);
double d1 = Math.min(Math.abs(p_180622_6_), this.field_150623_aE.func_151601_a((double)k * 0.25D, (double)l * 0.25D));
if (d1 > 0.0D)
{
double d2 = 0.001953125D;
double d3 = Math.abs(this.field_150624_aF.func_151601_a((double)k * d2, (double)l * d2));
d5 = d1 * d1 * 2.5D;
double d4 = Math.ceil(d3 * 50.0D) + 14.0D;
if (d5 > d4)
{
d5 = d4;
}
d5 += 64.0D;
}
}
k = p_180622_4_ & 15;
l = p_180622_5_ & 15;
boolean flag = true;
IBlockState iblockstate = Blocks.stained_hardened_clay.getDefaultState();
IBlockState iblockstate3 = this.fillerBlock;
int i1 = (int)(p_180622_6_ / 3.0D + 3.0D + p_180622_2_.nextDouble() * 0.25D);
boolean flag1 = Math.cos(p_180622_6_ / 3.0D * Math.PI) > 0.0D;
int j1 = -1;
boolean flag2 = false;
for (int k1 = 255; k1 >= 0; --k1)
{
if (p_180622_3_.getBlockState(l, k1, k).getBlock().getMaterial() == Material.air && k1 < (int)d5)
{
p_180622_3_.setBlockState(l, k1, k, Blocks.stone.getDefaultState());
}
if (k1 <= p_180622_2_.nextInt(5))
{
p_180622_3_.setBlockState(l, k1, k, Blocks.bedrock.getDefaultState());
}
else
{
IBlockState iblockstate1 = p_180622_3_.getBlockState(l, k1, k);
if (iblockstate1.getBlock().getMaterial() == Material.air)
{
j1 = -1;
}
else if (iblockstate1.getBlock() == Blocks.stone)
{
IBlockState iblockstate2;
if (j1 == -1)
{
flag2 = false;
if (i1 <= 0)
{
iblockstate = null;
iblockstate3 = Blocks.stone.getDefaultState();
}
else if (k1 >= 59 && k1 <= 64)
{
iblockstate = Blocks.stained_hardened_clay.getDefaultState();
iblockstate3 = this.fillerBlock;
}
if (k1 < 63 && (iblockstate == null || iblockstate.getBlock().getMaterial() == Material.air))
{
iblockstate = Blocks.water.getDefaultState();
}
j1 = i1 + Math.max(0, k1 - 63);
if (k1 >= 62)
{
if (this.field_150620_aI && k1 > 86 + i1 * 2)
{
if (flag1)
{
p_180622_3_.setBlockState(l, k1, k, Blocks.dirt.getDefaultState().withProperty(BlockDirt.VARIANT, BlockDirt.DirtType.COARSE_DIRT));
}
else
{
p_180622_3_.setBlockState(l, k1, k, Blocks.grass.getDefaultState());
}
}
else if (k1 > 66 + i1)
{
if (k1 >= 64 && k1 <= 127)
{
if (flag1)
{
iblockstate2 = Blocks.hardened_clay.getDefaultState();
}
else
{
iblockstate2 = this.func_180629_a(p_180622_4_, k1, p_180622_5_);
}
}
else
{
iblockstate2 = Blocks.stained_hardened_clay.getDefaultState().withProperty(BlockColored.COLOR, EnumDyeColor.ORANGE);
}
p_180622_3_.setBlockState(l, k1, k, iblockstate2);
}
else
{
p_180622_3_.setBlockState(l, k1, k, this.topBlock);
flag2 = true;
}
}
else
{
p_180622_3_.setBlockState(l, k1, k, iblockstate3);
if (iblockstate3.getBlock() == Blocks.stained_hardened_clay)
{
p_180622_3_.setBlockState(l, k1, k, iblockstate3.getBlock().getDefaultState().withProperty(BlockColored.COLOR, EnumDyeColor.ORANGE));
}
}
}
else if (j1 > 0)
{
--j1;
if (flag2)
{
p_180622_3_.setBlockState(l, k1, k, Blocks.stained_hardened_clay.getDefaultState().withProperty(BlockColored.COLOR, EnumDyeColor.ORANGE));
}
else
{
iblockstate2 = this.func_180629_a(p_180622_4_, k1, p_180622_5_);
p_180622_3_.setBlockState(l, k1, k, iblockstate2);
}
}
}
}
}
}
public void func_150619_a(long p_150619_1_)
{
this.field_150621_aC = new IBlockState[64];
Arrays.fill(this.field_150621_aC, Blocks.hardened_clay.getDefaultState());
Random random = new Random(p_150619_1_);
this.field_150625_aG = new NoiseGeneratorPerlin(random, 1);
int j;
for (j = 0; j < 64; ++j)
{
j += random.nextInt(5) + 1;
if (j < 64)
{
this.field_150621_aC[j] = Blocks.stained_hardened_clay.getDefaultState().withProperty(BlockColored.COLOR, EnumDyeColor.ORANGE);
}
}
j = random.nextInt(4) + 2;
int k;
int l;
int i1;
int j1;
for (k = 0; k < j; ++k)
{
l = random.nextInt(3) + 1;
i1 = random.nextInt(64);
for (j1 = 0; i1 + j1 < 64 && j1 < l; ++j1)
{
this.field_150621_aC[i1 + j1] = Blocks.stained_hardened_clay.getDefaultState().withProperty(BlockColored.COLOR, EnumDyeColor.YELLOW);
}
}
k = random.nextInt(4) + 2;
int k1;
for (l = 0; l < k; ++l)
{
i1 = random.nextInt(3) + 2;
j1 = random.nextInt(64);
for (k1 = 0; j1 + k1 < 64 && k1 < i1; ++k1)
{
this.field_150621_aC[j1 + k1] = Blocks.stained_hardened_clay.getDefaultState().withProperty(BlockColored.COLOR, EnumDyeColor.BROWN);
}
}
l = random.nextInt(4) + 2;
for (i1 = 0; i1 < l; ++i1)
{
j1 = random.nextInt(3) + 1;
k1 = random.nextInt(64);
for (int l1 = 0; k1 + l1 < 64 && l1 < j1; ++l1)
{
this.field_150621_aC[k1 + l1] = Blocks.stained_hardened_clay.getDefaultState().withProperty(BlockColored.COLOR, EnumDyeColor.RED);
}
}
i1 = random.nextInt(3) + 3;
j1 = 0;
for (k1 = 0; k1 < i1; ++k1)
{
byte b0 = 1;
j1 += random.nextInt(16) + 4;
for (int i2 = 0; j1 + i2 < 64 && i2 < b0; ++i2)
{
this.field_150621_aC[j1 + i2] = Blocks.stained_hardened_clay.getDefaultState().withProperty(BlockColored.COLOR, EnumDyeColor.WHITE);
if (j1 + i2 > 1 && random.nextBoolean())
{
this.field_150621_aC[j1 + i2 - 1] = Blocks.stained_hardened_clay.getDefaultState().withProperty(BlockColored.COLOR, EnumDyeColor.SILVER);
}
if (j1 + i2 < 63 && random.nextBoolean())
{
this.field_150621_aC[j1 + i2 + 1] = Blocks.stained_hardened_clay.getDefaultState().withProperty(BlockColored.COLOR, EnumDyeColor.SILVER);
}
}
}
}
public IBlockState func_180629_a(int p_180629_1_, int p_180629_2_, int p_180629_3_)
{
int l = (int)Math.round(this.field_150625_aG.func_151601_a((double)p_180629_1_ * 1.0D / 512.0D, (double)p_180629_1_ * 1.0D / 512.0D) * 2.0D);
return this.field_150621_aC[(p_180629_2_ + l + 64) % 64];
}
@SideOnly(Side.CLIENT)
public int getFoliageColorAtPos(BlockPos p_180625_1_)
{
return 10387789;
}
@SideOnly(Side.CLIENT)
public int getGrassColorAtPos(BlockPos p_180627_1_)
{
return 9470285;
}
public BiomeGenBase createMutatedBiome(int p_180277_1_)
{
boolean flag = this.biomeID == BiomeGenBase.mesa.biomeID;
BiomeGenNileMesa biomegenmesa = new BiomeGenNileMesa(p_180277_1_, flag, this.field_150620_aI);
if (!flag)
{
biomegenmesa.setHeight(height_LowHills);
biomegenmesa.setBiomeName(this.biomeName + " M");
}
else
{
biomegenmesa.setBiomeName(this.biomeName + " (Bryce)");
}
biomegenmesa.func_150557_a(this.color, true);
return biomegenmesa;
}
/**
* Provides the basic foliage color based on the biome temperature and rainfall
*/
@SideOnly(Side.CLIENT)
public int getModdedBiomeFoliageColor(int par1){return 0xCC6600;}
/**
* Provides the basic grass color based on the biome temperature and rainfall
*/
@SideOnly(Side.CLIENT)
public int getBiomeGrassColor(int par1){return 0xCC3300;}
}
| |
package cz.metacentrum.perun.webgui.tabs.groupstabs;
import com.google.gwt.core.client.JavaScriptObject;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.resources.client.ImageResource;
import com.google.gwt.user.cellview.client.CellTable;
import com.google.gwt.user.client.ui.*;
import cz.metacentrum.perun.webgui.client.PerunWebSession;
import cz.metacentrum.perun.webgui.client.UiElements;
import cz.metacentrum.perun.webgui.client.localization.ButtonTranslation;
import cz.metacentrum.perun.webgui.client.mainmenu.MainMenu;
import cz.metacentrum.perun.webgui.client.resources.*;
import cz.metacentrum.perun.webgui.json.GetEntityById;
import cz.metacentrum.perun.webgui.json.JsonCallbackEvents;
import cz.metacentrum.perun.webgui.json.JsonUtils;
import cz.metacentrum.perun.webgui.json.extSourcesManager.GetGroupExtSources;
import cz.metacentrum.perun.webgui.json.extSourcesManager.GetVoExtSources;
import cz.metacentrum.perun.webgui.json.extSourcesManager.RemoveExtSource;
import cz.metacentrum.perun.webgui.model.ExtSource;
import cz.metacentrum.perun.webgui.model.Group;
import cz.metacentrum.perun.webgui.model.VirtualOrganization;
import cz.metacentrum.perun.webgui.tabs.*;
import cz.metacentrum.perun.webgui.tabs.vostabs.AddVoExtSourceTabItem;
import cz.metacentrum.perun.webgui.widgets.CustomButton;
import cz.metacentrum.perun.webgui.widgets.ExtendedSuggestBox;
import cz.metacentrum.perun.webgui.widgets.TabMenu;
import java.util.ArrayList;
import java.util.Map;
/**
* Group ext. sources management page
*
* @author Pavel Zlamal <256627@mail.muni.cz>
* @author Vaclav Mach <374430@mail.muni.cz>
*/
public class GroupExtSourcesTabItem implements TabItem, TabItemWithUrl {
/**
* Perun web session
*/
private PerunWebSession session = PerunWebSession.getInstance();
/**
* Content widget - should be simple panel
*/
private SimplePanel contentWidget = new SimplePanel();
/**
* Title widget
*/
private Label titleWidget = new Label("Loading group ext sources");
// data
private Group group;
private int groupId;
private int voId;
/**
* Creates a tab instance
*
* @param group
*/
public GroupExtSourcesTabItem(Group group){
this.group = group;
this.groupId = group.getId();
this.voId = group.getVoId();
}
/**
* Creates a tab instance
*
* @param groupId
*/
public GroupExtSourcesTabItem(int groupId){
this.groupId = groupId;
JsonCallbackEvents events = new JsonCallbackEvents(){
public void onFinished(JavaScriptObject jso) {
group = jso.cast();
}
};
new GetEntityById(PerunEntity.GROUP, groupId, events).retrieveData();
}
public boolean isPrepared(){
return !(group == null);
}
public Widget draw() {
this.titleWidget.setText(Utils.getStrippedStringWithEllipsis(group.getName())+": "+"ext sources");
// main panel
VerticalPanel vp = new VerticalPanel();
vp.setSize("100%", "100%");
// HORIZONTAL MENU
TabMenu menu = new TabMenu();
menu.addWidget(UiElements.getRefreshButton(this));
// get VO resources
final GetGroupExtSources extSources = new GetGroupExtSources(groupId);
// refresh table event
final JsonCallbackEvents events = JsonCallbackEvents.refreshTableEvents(extSources);
// create ext source button
CustomButton addButton = TabMenu.getPredefinedButton(ButtonType.ADD, true, ButtonTranslation.INSTANCE.addExtSource(), new ClickHandler() {
public void onClick(ClickEvent event) {
session.getTabManager().addTabToCurrentTab(new AddGroupExtSourceTabItem(groupId), true);
}
});
if (session.isVoAdmin(voId)) {
menu.addWidget(addButton);
}
final CustomButton removeButton = TabMenu.getPredefinedButton(ButtonType.REMOVE, ButtonTranslation.INSTANCE.removeExtSource());
removeButton.addClickHandler(new ClickHandler() {
public void onClick(ClickEvent event) {
final ArrayList<ExtSource> extSourcesToRemove = extSources.getTableSelectedList();
String text = "Following external sources will be removed from Group. You won't be able to import members from them anymore.";
UiElements.showDeleteConfirm(extSourcesToRemove, text, new ClickHandler() {
@Override
public void onClick(ClickEvent clickEvent) {
// TODO - SHOULD HAVE ONLY ONE CALLBACK TO CORE !!
for (int i=0; i<extSourcesToRemove.size(); i++) {
RemoveExtSource request;
if (i == extSourcesToRemove.size()-1) {
request = new RemoveExtSource(JsonCallbackEvents.disableButtonEvents(removeButton, events));
} else {
request = new RemoveExtSource(JsonCallbackEvents.disableButtonEvents(removeButton));
}
request.removeGroupExtSource(groupId, extSourcesToRemove.get(i).getId());
}
}
});
}
});
if (session.isVoAdmin(voId)) {
menu.addWidget(removeButton);
}
// authorization - enable buttons for vo admin only.
if (!session.isVoAdmin(voId)) {
addButton.setEnabled(false);
removeButton.setEnabled(false);
extSources.setCheckable(false);
}
menu.addFilterWidget(new ExtendedSuggestBox(extSources.getOracle()), new PerunSearchEvent() {
@Override
public void searchFor(String text) {
extSources.filterTable(text);
}
}, "Filter external sources by name or type");
// add menu to the main panel
vp.add(menu);
vp.setCellHeight(menu, "30px");
CellTable<ExtSource> table = extSources.getTable();
if (session.isVoAdmin(voId)) {
removeButton.setEnabled(false);
JsonUtils.addTableManagedButton(extSources, table, removeButton);
}
table.addStyleName("perun-table");
table.setWidth("100%");
ScrollPanel sp = new ScrollPanel(table);
sp.addStyleName("perun-tableScrollPanel");
vp.add(sp);
session.getUiElements().resizePerunTable(sp, 350, this);
this.contentWidget.setWidget(vp);
return getWidget();
}
public Widget getWidget() {
return this.contentWidget;
}
public Widget getTitle() {
return this.titleWidget;
}
public ImageResource getIcon() {
return SmallIcons.INSTANCE.worldIcon();
}
@Override
public int hashCode() {
final int prime = 1601;
int result = 1;
result = prime * result + groupId;
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
GroupExtSourcesTabItem other = (GroupExtSourcesTabItem) obj;
if (groupId != other.groupId)
return false;
return true;
}
public boolean multipleInstancesEnabled() {
return false;
}
public void open() {
session.getUiElements().getMenu().openMenu(MainMenu.GROUP_ADMIN);
session.getUiElements().getBreadcrumbs().setLocation(group, "External sources", getUrlWithParameters());
if(group != null){
session.setActiveGroup(group);
return;
}
session.setActiveGroupId(groupId);
}
public boolean isAuthorized() {
if (session.isVoAdmin(voId) || session.isVoObserver(voId) || session.isGroupAdmin(groupId)) {
return true;
} else {
return false;
}
}
public final static String URL = "ext-sources";
public String getUrl()
{
return URL;
}
public String getUrlWithParameters() {
return GroupsTabs.URL + UrlMapper.TAB_NAME_SEPARATOR + getUrl() + "?id=" + groupId;
}
static public GroupExtSourcesTabItem load(Map<String, String> parameters) {
int voId = Integer.parseInt(parameters.get("id"));
return new GroupExtSourcesTabItem(voId);
}
}
| |
/*
* Copyright 2019 The Grafeas Authors. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: grafeas/v1/grafeas.proto
package io.grafeas.v1;
/**
*
*
* <pre>
* Request to get an occurrence.
* </pre>
*
* Protobuf type {@code grafeas.v1.GetOccurrenceRequest}
*/
public final class GetOccurrenceRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:grafeas.v1.GetOccurrenceRequest)
GetOccurrenceRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetOccurrenceRequest.newBuilder() to construct.
private GetOccurrenceRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private GetOccurrenceRequest() {
name_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new GetOccurrenceRequest();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private GetOccurrenceRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
name_ = s;
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return io.grafeas.v1.GrafeasOuterClass
.internal_static_grafeas_v1_GetOccurrenceRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return io.grafeas.v1.GrafeasOuterClass
.internal_static_grafeas_v1_GetOccurrenceRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
io.grafeas.v1.GetOccurrenceRequest.class,
io.grafeas.v1.GetOccurrenceRequest.Builder.class);
}
public static final int NAME_FIELD_NUMBER = 1;
private volatile java.lang.Object name_;
/**
*
*
* <pre>
* The name of the occurrence in the form of
* `projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]`.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* The name of the occurrence in the form of
* `projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]`.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof io.grafeas.v1.GetOccurrenceRequest)) {
return super.equals(obj);
}
io.grafeas.v1.GetOccurrenceRequest other = (io.grafeas.v1.GetOccurrenceRequest) obj;
if (!getName().equals(other.getName())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static io.grafeas.v1.GetOccurrenceRequest parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static io.grafeas.v1.GetOccurrenceRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static io.grafeas.v1.GetOccurrenceRequest parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static io.grafeas.v1.GetOccurrenceRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static io.grafeas.v1.GetOccurrenceRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static io.grafeas.v1.GetOccurrenceRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static io.grafeas.v1.GetOccurrenceRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static io.grafeas.v1.GetOccurrenceRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static io.grafeas.v1.GetOccurrenceRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static io.grafeas.v1.GetOccurrenceRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static io.grafeas.v1.GetOccurrenceRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static io.grafeas.v1.GetOccurrenceRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(io.grafeas.v1.GetOccurrenceRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request to get an occurrence.
* </pre>
*
* Protobuf type {@code grafeas.v1.GetOccurrenceRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:grafeas.v1.GetOccurrenceRequest)
io.grafeas.v1.GetOccurrenceRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return io.grafeas.v1.GrafeasOuterClass
.internal_static_grafeas_v1_GetOccurrenceRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return io.grafeas.v1.GrafeasOuterClass
.internal_static_grafeas_v1_GetOccurrenceRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
io.grafeas.v1.GetOccurrenceRequest.class,
io.grafeas.v1.GetOccurrenceRequest.Builder.class);
}
// Construct using io.grafeas.v1.GetOccurrenceRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
name_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return io.grafeas.v1.GrafeasOuterClass
.internal_static_grafeas_v1_GetOccurrenceRequest_descriptor;
}
@java.lang.Override
public io.grafeas.v1.GetOccurrenceRequest getDefaultInstanceForType() {
return io.grafeas.v1.GetOccurrenceRequest.getDefaultInstance();
}
@java.lang.Override
public io.grafeas.v1.GetOccurrenceRequest build() {
io.grafeas.v1.GetOccurrenceRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public io.grafeas.v1.GetOccurrenceRequest buildPartial() {
io.grafeas.v1.GetOccurrenceRequest result = new io.grafeas.v1.GetOccurrenceRequest(this);
result.name_ = name_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof io.grafeas.v1.GetOccurrenceRequest) {
return mergeFrom((io.grafeas.v1.GetOccurrenceRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(io.grafeas.v1.GetOccurrenceRequest other) {
if (other == io.grafeas.v1.GetOccurrenceRequest.getDefaultInstance()) return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
io.grafeas.v1.GetOccurrenceRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (io.grafeas.v1.GetOccurrenceRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* The name of the occurrence in the form of
* `projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]`.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The name of the occurrence in the form of
* `projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]`.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The name of the occurrence in the form of
* `projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]`.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* The name of the occurrence in the form of
* `projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]`.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
onChanged();
return this;
}
/**
*
*
* <pre>
* The name of the occurrence in the form of
* `projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]`.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:grafeas.v1.GetOccurrenceRequest)
}
// @@protoc_insertion_point(class_scope:grafeas.v1.GetOccurrenceRequest)
private static final io.grafeas.v1.GetOccurrenceRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new io.grafeas.v1.GetOccurrenceRequest();
}
public static io.grafeas.v1.GetOccurrenceRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<GetOccurrenceRequest> PARSER =
new com.google.protobuf.AbstractParser<GetOccurrenceRequest>() {
@java.lang.Override
public GetOccurrenceRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new GetOccurrenceRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<GetOccurrenceRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<GetOccurrenceRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public io.grafeas.v1.GetOccurrenceRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.mongodb3;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import java.util.stream.Collectors;
import com.mongodb.bulk.BulkWriteResult;
import com.mongodb.client.AggregateIterable;
import com.mongodb.client.DistinctIterable;
import com.mongodb.client.FindIterable;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.MongoDatabase;
import com.mongodb.client.model.BulkWriteOptions;
import com.mongodb.client.model.Filters;
import com.mongodb.client.model.UpdateOptions;
import com.mongodb.client.model.WriteModel;
import com.mongodb.client.result.DeleteResult;
import com.mongodb.client.result.UpdateResult;
import org.apache.camel.Exchange;
import org.apache.camel.InvalidPayloadException;
import org.apache.camel.Processor;
import org.apache.camel.TypeConverter;
import org.apache.camel.support.DefaultProducer;
import org.apache.camel.support.MessageHelper;
import org.apache.camel.util.ObjectHelper;
import org.bson.Document;
import org.bson.conversions.Bson;
import static com.mongodb.client.model.Filters.eq;
import static org.apache.camel.component.mongodb3.MongoDbConstants.BATCH_SIZE;
import static org.apache.camel.component.mongodb3.MongoDbConstants.COLLECTION;
import static org.apache.camel.component.mongodb3.MongoDbConstants.COLLECTION_INDEX;
import static org.apache.camel.component.mongodb3.MongoDbConstants.CRITERIA;
import static org.apache.camel.component.mongodb3.MongoDbConstants.DATABASE;
import static org.apache.camel.component.mongodb3.MongoDbConstants.FIELDS_PROJECTION;
import static org.apache.camel.component.mongodb3.MongoDbConstants.LIMIT;
import static org.apache.camel.component.mongodb3.MongoDbConstants.MONGO_ID;
import static org.apache.camel.component.mongodb3.MongoDbConstants.MULTIUPDATE;
import static org.apache.camel.component.mongodb3.MongoDbConstants.NUM_TO_SKIP;
import static org.apache.camel.component.mongodb3.MongoDbConstants.OID;
import static org.apache.camel.component.mongodb3.MongoDbConstants.OPERATION_HEADER;
import static org.apache.camel.component.mongodb3.MongoDbConstants.RECORDS_AFFECTED;
import static org.apache.camel.component.mongodb3.MongoDbConstants.RECORDS_MATCHED;
import static org.apache.camel.component.mongodb3.MongoDbConstants.RESULT_PAGE_SIZE;
import static org.apache.camel.component.mongodb3.MongoDbConstants.RESULT_TOTAL_SIZE;
import static org.apache.camel.component.mongodb3.MongoDbConstants.SORT_BY;
import static org.apache.camel.component.mongodb3.MongoDbConstants.UPSERT;
import static org.apache.camel.component.mongodb3.MongoDbConstants.WRITERESULT;
/**
* The MongoDb producer.
*/
public class MongoDbProducer extends DefaultProducer {
private final Map<MongoDbOperation, Processor> operations = new HashMap<>();
private MongoDbEndpoint endpoint;
{
bind(MongoDbOperation.aggregate, createDoAggregate());
bind(MongoDbOperation.bulkWrite, createDoBulkWrite());
bind(MongoDbOperation.command, createDoCommand());
bind(MongoDbOperation.count, createDoCount());
bind(MongoDbOperation.findDistinct, createDoDistinct());
bind(MongoDbOperation.findAll, createDoFindAll());
bind(MongoDbOperation.findById, createDoFindById());
bind(MongoDbOperation.findOneByQuery, createDoFindOneByQuery());
bind(MongoDbOperation.getColStats, createDoGetColStats());
bind(MongoDbOperation.getDbStats, createDoGetDbStats());
bind(MongoDbOperation.insert, createDoInsert());
bind(MongoDbOperation.remove, createDoRemove());
bind(MongoDbOperation.save, createDoSave());
bind(MongoDbOperation.update, createDoUpdate());
}
public MongoDbProducer(MongoDbEndpoint endpoint) {
super(endpoint);
this.endpoint = endpoint;
}
public void process(Exchange exchange) throws Exception {
MongoDbOperation operation = endpoint.getOperation();
Object header = exchange.getIn().getHeader(OPERATION_HEADER);
if (header != null) {
log.debug("Overriding default operation with operation specified on header: {}", header);
try {
if (header instanceof MongoDbOperation) {
operation = ObjectHelper.cast(MongoDbOperation.class, header);
} else {
// evaluate as a String
operation = MongoDbOperation.valueOf(exchange.getIn().getHeader(OPERATION_HEADER, String.class));
}
} catch (Exception e) {
throw new CamelMongoDbException("Operation specified on header is not supported. Value: " + header, e);
}
}
try {
invokeOperation(operation, exchange);
} catch (Exception e) {
throw MongoDbComponent.wrapInCamelMongoDbException(e);
}
}
/**
* Entry method that selects the appropriate MongoDB operation and executes it
*/
protected void invokeOperation(MongoDbOperation operation, Exchange exchange) throws Exception {
Processor processor = operations.get(operation);
if (processor != null) {
processor.process(exchange);
} else {
throw new CamelMongoDbException("Operation not supported. Value: " + operation);
}
}
private MongoDbProducer bind(MongoDbOperation operation, Function<Exchange, Object> mongoDbFunction) {
operations.put(operation, wrap(mongoDbFunction, operation));
return this;
}
// ----------- MongoDB operations ----------------
private Document createDbStatsCommand() {
return new Document("dbStats", 1).append("scale", 1);
}
private Document createCollStatsCommand(String collectionName) {
return new Document("collStats", collectionName);
}
// --------- Convenience methods -----------------------
private MongoDatabase calculateDb(Exchange exchange) {
// dynamic calculation is an option. In most cases it won't be used and
// we should not penalise all users with running this
// resolution logic on every Exchange if they won't be using this
// functionality at all
if (!endpoint.isDynamicity()) {
return endpoint.getMongoDatabase();
}
String dynamicDB = exchange.getIn().getHeader(DATABASE, String.class);
MongoDatabase db;
if (dynamicDB == null) {
db = endpoint.getMongoDatabase();
} else {
db = endpoint.getMongoConnection().getDatabase(dynamicDB);
}
if (log.isDebugEnabled()) {
log.debug("Dynamic database selected: {}", db.getName());
}
return db;
}
private String calculateCollectionName(Exchange exchange) {
if (!endpoint.isDynamicity()) {
return endpoint.getCollection();
}
String dynamicCollection = exchange.getIn().getHeader(COLLECTION, String.class);
if (dynamicCollection == null) {
return endpoint.getCollection();
}
return dynamicCollection;
}
private MongoCollection<Document> calculateCollection(Exchange exchange) {
// dynamic calculation is an option. In most cases it won't be used and
// we should not penalise all users with running this
// resolution logic on every Exchange if they won't be using this
// functionality at all
if (!endpoint.isDynamicity()) {
return endpoint.getMongoCollection().withWriteConcern(endpoint.getWriteConcern());
}
String dynamicDB = exchange.getIn().getHeader(DATABASE, String.class);
String dynamicCollection = exchange.getIn().getHeader(COLLECTION, String.class);
@SuppressWarnings("unchecked")
List<Bson> dynamicIndex = exchange.getIn().getHeader(COLLECTION_INDEX, List.class);
MongoCollection<Document> dbCol;
if (dynamicDB == null && dynamicCollection == null) {
dbCol = endpoint.getMongoCollection().withWriteConcern(endpoint.getWriteConcern());
} else {
MongoDatabase db = calculateDb(exchange);
if (dynamicCollection == null) {
dbCol = db.getCollection(endpoint.getCollection(), Document.class);
} else {
dbCol = db.getCollection(dynamicCollection, Document.class);
// on the fly add index
if (dynamicIndex == null) {
endpoint.ensureIndex(dbCol, endpoint.createIndex());
} else {
endpoint.ensureIndex(dbCol, dynamicIndex);
}
}
}
if (log.isDebugEnabled()) {
log.debug("Dynamic database and/or collection selected: {}->{}", endpoint.getDatabase(), endpoint.getCollection());
}
return dbCol;
}
@SuppressWarnings("rawtypes")
private List<Document> attemptConvertToList(List insertList, Exchange exchange) throws CamelMongoDbException {
List<Document> documentList = new ArrayList<>(insertList.size());
TypeConverter converter = exchange.getContext().getTypeConverter();
for (Object item : insertList) {
try {
Document document = converter.mandatoryConvertTo(Document.class, item);
documentList.add(document);
} catch (Exception e) {
throw new CamelMongoDbException("MongoDB operation = insert, Assuming List variant of MongoDB insert operation, but List contains non-Document items", e);
}
}
return documentList;
}
private boolean isWriteOperation(MongoDbOperation operation) {
return MongoDbComponent.WRITE_OPERATIONS.contains(operation);
}
private Processor wrap(Function<Exchange, Object> supplier, MongoDbOperation operation) {
return exchange -> {
Object result = supplier.apply(exchange);
copyHeaders(exchange);
moveBodyToOutIfResultIsReturnedAsHeader(exchange, operation);
processAndTransferResult(result, exchange, operation);
};
}
private void copyHeaders(Exchange exchange) {
MessageHelper.copyHeaders(exchange.getIn(), exchange.getOut(), false);
}
private void moveBodyToOutIfResultIsReturnedAsHeader(Exchange exchange, MongoDbOperation operation) {
if (isWriteOperation(operation) && endpoint.isWriteResultAsHeader()) {
exchange.getOut().setBody(exchange.getIn().getBody());
}
}
private void processAndTransferResult(Object result, Exchange exchange, MongoDbOperation operation) {
// determine where to set the WriteResult: as the OUT body or as an IN
// message header
if (isWriteOperation(operation) && endpoint.isWriteResultAsHeader()) {
exchange.getOut().setHeader(WRITERESULT, result);
} else {
exchange.getOut().setBody(result);
}
}
private Function<Exchange, Object> createDoGetColStats() {
return exch -> calculateDb(exch).runCommand(createCollStatsCommand(calculateCollectionName(exch)));
}
private Function<Exchange, Object> createDoFindOneByQuery() {
return exchange -> {
try {
MongoCollection<Document> dbCol = calculateCollection(exchange);
Bson query = exchange.getIn().getHeader(CRITERIA, Bson.class);
if (null == query) {
query = exchange.getIn().getMandatoryBody(Bson.class);
}
Bson sortBy = exchange.getIn().getHeader(SORT_BY, Bson.class);
Bson fieldFilter = exchange.getIn().getHeader(FIELDS_PROJECTION, Bson.class);
if (fieldFilter == null) {
fieldFilter = new Document();
}
if (sortBy == null) {
sortBy = new Document();
}
Document ret = dbCol.find(query).projection(fieldFilter).sort(sortBy).first();
exchange.getOut().setHeader(RESULT_TOTAL_SIZE, ret == null ? 0 : 1);
return ret;
} catch (InvalidPayloadException e) {
throw new CamelMongoDbException("Payload is no Document", e);
}
};
}
private Function<Exchange, Object> createDoCount() {
return exchange -> {
Bson query = exchange.getIn().getHeader(CRITERIA, Bson.class);
if (query == null) {
query = exchange.getContext().getTypeConverter().tryConvertTo(Bson.class, exchange, exchange.getIn().getBody());
}
if (query == null) {
query = new Document();
}
return calculateCollection(exchange).count(query);
};
}
private Function<Exchange, Object> createDoDistinct() {
return exchange -> {
Iterable<String> result = new ArrayList<>();
MongoCollection<Document> dbCol = calculateCollection(exchange);
// get the parameters out of the Exchange Header
String distinctFieldName = exchange.getIn().getHeader(MongoDbConstants.DISTINCT_QUERY_FIELD, String.class);
Bson query = exchange.getContext().getTypeConverter().tryConvertTo(Bson.class, exchange, exchange.getIn().getBody());
DistinctIterable<String> ret;
if (query != null) {
ret = dbCol.distinct(distinctFieldName, query, String.class);
} else {
ret = dbCol.distinct(distinctFieldName, String.class);
}
try {
ret.iterator().forEachRemaining(((List<String>) result)::add);
exchange.getOut().setHeader(MongoDbConstants.RESULT_PAGE_SIZE, ((List<String>) result).size());
} finally {
ret.iterator().close();
}
return result;
};
}
private Function<Exchange, Object> createDoFindAll() {
return exchange -> {
Iterable<Document> result;
MongoCollection<Document> dbCol = calculateCollection(exchange);
// do not use getMandatoryBody, because if the body is empty we want
// to retrieve all objects in the collection
Bson query = exchange.getIn().getHeader(CRITERIA, Bson.class);
// do not run around looking for a type converter unless there is a
// need for it
if (query == null && exchange.getIn().getBody() != null) {
query = exchange.getContext().getTypeConverter().tryConvertTo(Bson.class, exchange, exchange.getIn().getBody());
}
Bson fieldFilter = exchange.getIn().getHeader(FIELDS_PROJECTION, Bson.class);
// get the batch size and number to skip
Integer batchSize = exchange.getIn().getHeader(BATCH_SIZE, Integer.class);
Integer numToSkip = exchange.getIn().getHeader(NUM_TO_SKIP, Integer.class);
Integer limit = exchange.getIn().getHeader(LIMIT, Integer.class);
Document sortBy = exchange.getIn().getHeader(SORT_BY, Document.class);
FindIterable<Document> ret;
if (query == null && fieldFilter == null) {
ret = dbCol.find();
} else if (fieldFilter == null) {
ret = dbCol.find(query);
} else if (query != null) {
ret = dbCol.find(query).projection(fieldFilter);
} else {
ret = dbCol.find().projection(fieldFilter);
}
if (sortBy != null) {
ret.sort(sortBy);
}
if (batchSize != null) {
ret.batchSize(batchSize);
}
if (numToSkip != null) {
ret.skip(numToSkip);
}
if (limit != null) {
ret.limit(limit);
}
if (!MongoDbOutputType.MongoIterable.equals(endpoint.getOutputType())) {
try {
result = new ArrayList<>();
ret.iterator().forEachRemaining(((List<Document>)result)::add);
exchange.getOut().setHeader(RESULT_PAGE_SIZE, ((List<Document>)result).size());
} finally {
ret.iterator().close();
}
} else {
result = ret;
}
return result;
};
}
private Function<Exchange, Object> createDoInsert() {
return exchange -> {
MongoCollection<Document> dbCol = calculateCollection(exchange);
boolean singleInsert = true;
Object insert = exchange.getContext().getTypeConverter().tryConvertTo(Document.class, exchange, exchange.getIn().getBody());
// body could not be converted to Document, check to see if it's of
// type List<Document>
if (insert == null) {
insert = exchange.getIn().getBody(List.class);
// if the body of type List was obtained, ensure that all items
// are of type Document and cast the List to List<Document>
if (insert != null) {
singleInsert = false;
insert = attemptConvertToList((List<?>)insert, exchange);
} else {
throw new CamelMongoDbException("MongoDB operation = insert, Body is not conversible to type Document nor List<Document>");
}
}
if (singleInsert) {
Document insertObject = Document.class.cast(insert);
dbCol.insertOne(insertObject);
exchange.getIn().setHeader(OID, insertObject.get(MONGO_ID));
} else {
@SuppressWarnings("unchecked")
List<Document> insertObjects = (List<Document>)insert;
dbCol.insertMany(insertObjects);
List<Object> objectIdentification = new ArrayList<>(insertObjects.size());
objectIdentification.addAll(insertObjects.stream().map(insertObject -> insertObject.get(MONGO_ID)).collect(Collectors.toList()));
exchange.getIn().setHeader(OID, objectIdentification);
}
return insert;
};
}
private Function<Exchange, Object> createDoUpdate() {
return exchange -> {
try {
MongoCollection<Document> dbCol = calculateCollection(exchange);
Bson updateCriteria = exchange.getIn().getHeader(CRITERIA, Bson.class);
Bson objNew;
if (null == updateCriteria) {
@SuppressWarnings("unchecked")
List<Bson> saveObj = exchange.getIn().getMandatoryBody((Class<List<Bson>>)Class.class.cast(List.class));
if (saveObj.size() != 2) {
throw new CamelMongoDbException("MongoDB operation = insert, failed because body is not a List of Document objects with size = 2");
}
updateCriteria = saveObj.get(0);
objNew = saveObj.get(1);
} else {
objNew = exchange.getIn().getMandatoryBody(Bson.class);
}
Boolean multi = exchange.getIn().getHeader(MULTIUPDATE, Boolean.class);
Boolean upsert = exchange.getIn().getHeader(UPSERT, Boolean.class);
UpdateResult result;
UpdateOptions options = new UpdateOptions();
if (upsert != null) {
options.upsert(upsert);
}
if (multi == null || !multi) {
result = dbCol.updateOne(updateCriteria, objNew, options);
} else {
result = dbCol.updateMany(updateCriteria, objNew, options);
}
if (result.isModifiedCountAvailable()) {
exchange.getOut().setHeader(RECORDS_AFFECTED, result.getModifiedCount());
}
exchange.getOut().setHeader(RECORDS_MATCHED, result.getMatchedCount());
return result;
} catch (InvalidPayloadException e) {
throw new CamelMongoDbException("Invalid payload for update", e);
}
};
}
private Function<Exchange, Object> createDoRemove() {
return exchange -> {
try {
MongoCollection<Document> dbCol = calculateCollection(exchange);
Bson removeObj = exchange.getIn().getMandatoryBody(Bson.class);
DeleteResult result = dbCol.deleteMany(removeObj);
if (result.wasAcknowledged()) {
exchange.getOut().setHeader(RECORDS_AFFECTED, result.getDeletedCount());
}
return result;
} catch (InvalidPayloadException e) {
throw new CamelMongoDbException("Invalid payload for remove", e);
}
};
}
private Function<Exchange, Object> createDoAggregate() {
return exchange -> {
try {
MongoCollection<Document> dbCol = calculateCollection(exchange);
@SuppressWarnings("unchecked")
List<Bson> query = exchange.getIn().getMandatoryBody((Class<List<Bson>>)Class.class.cast(List.class));
// Allow body to be a pipeline
// @see http://docs.mongodb.org/manual/core/aggregation/
List<Bson> queryList;
if (query != null) {
queryList = query.stream().collect(Collectors.toList());
} else {
queryList = Arrays.asList(Bson.class.cast(exchange.getIn().getMandatoryBody(Bson.class)));
}
// The number to skip must be in body query
AggregateIterable<Document> aggregationResult = dbCol.aggregate(queryList);
// get the batch size
Integer batchSize = exchange.getIn().getHeader(MongoDbConstants.BATCH_SIZE, Integer.class);
if (batchSize != null) {
aggregationResult.batchSize(batchSize);
}
Boolean allowDiskUse = exchange.getIn().getHeader(MongoDbConstants.ALLOW_DISK_USE, Boolean.FALSE, Boolean.class);
aggregationResult.allowDiskUse(allowDiskUse);
Iterable<Document> result;
if (!MongoDbOutputType.MongoIterable.equals(endpoint.getOutputType())) {
try {
result = new ArrayList<>();
aggregationResult.iterator().forEachRemaining(((List<Document>) result)::add);
exchange.getOut().setHeader(MongoDbConstants.RESULT_PAGE_SIZE, ((List<Document>) result).size());
} finally {
aggregationResult.iterator().close();
}
} else {
result = aggregationResult;
}
return result;
} catch (InvalidPayloadException e) {
throw new CamelMongoDbException("Invalid payload for aggregate", e);
}
};
}
private Function<Exchange, Object> createDoCommand() {
return exchange -> {
try {
MongoDatabase db = calculateDb(exchange);
Document cmdObj = exchange.getIn().getMandatoryBody(Document.class);
return db.runCommand(cmdObj);
} catch (InvalidPayloadException e) {
throw new CamelMongoDbException("Invalid payload for command", e);
}
};
}
private Function<Exchange, Object> createDoGetDbStats() {
return exchange1 -> calculateDb(exchange1).runCommand(createDbStatsCommand());
}
private Function<Exchange, Object> createDoFindById() {
return exchange -> {
try {
MongoCollection<Document> dbCol = calculateCollection(exchange);
Object id = exchange.getIn().getMandatoryBody();
Bson o = Filters.eq(MONGO_ID, id);
Document ret;
Bson fieldFilter = exchange.getIn().getHeader(FIELDS_PROJECTION, Bson.class);
if (fieldFilter == null) {
fieldFilter = new Document();
}
ret = dbCol.find(o).projection(fieldFilter).first();
exchange.getOut().setHeader(RESULT_TOTAL_SIZE, ret == null ? 0 : 1);
return ret;
} catch (InvalidPayloadException e) {
throw new CamelMongoDbException("Invalid payload for findById", e);
}
};
}
private Function<Exchange, Object> createDoSave() {
return exchange -> {
try {
MongoCollection<Document> dbCol = calculateCollection(exchange);
Document saveObj = exchange.getIn().getMandatoryBody(Document.class);
UpdateOptions options = new UpdateOptions().upsert(true);
UpdateResult result;
if (null == saveObj.get(MONGO_ID)) {
result = dbCol.replaceOne(Filters.where("false"), saveObj, options);
exchange.getIn().setHeader(OID, result.getUpsertedId().asObjectId().getValue());
} else {
result = dbCol.replaceOne(eq(MONGO_ID, saveObj.get(MONGO_ID)), saveObj, options);
exchange.getIn().setHeader(OID, saveObj.get(MONGO_ID));
}
return result;
} catch (InvalidPayloadException e) {
throw new CamelMongoDbException("Body incorrect type for save", e);
}
};
}
private Function<Exchange, Object> createDoBulkWrite() {
return exchange -> {
try {
MongoCollection<Document> dbCol = calculateCollection(exchange);
Boolean ordered = exchange.getIn().getHeader(MongoDbConstants.BULK_ORDERED, Boolean.TRUE, Boolean.class);
BulkWriteOptions options = new BulkWriteOptions().ordered(ordered);
@SuppressWarnings("unchecked")
List<WriteModel<Document>> requests = exchange.getIn().getMandatoryBody((Class<List<WriteModel<Document>>>)Class.class.cast(List.class));
BulkWriteResult result = dbCol.bulkWrite(requests, options);
return result;
} catch (InvalidPayloadException e) {
throw new CamelMongoDbException("Invalid payload for bulk write", e);
}
};
}
}
| |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.ec2.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
import com.amazonaws.Request;
import com.amazonaws.services.ec2.model.transform.CreateNetworkAclEntryRequestMarshaller;
/**
*
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class CreateNetworkAclEntryRequest extends AmazonWebServiceRequest implements Serializable, Cloneable,
DryRunSupportedRequest<CreateNetworkAclEntryRequest> {
/**
* <p>
* The IPv4 network range to allow or deny, in CIDR notation (for example <code>172.16.0.0/24</code>).
* </p>
*/
private String cidrBlock;
/**
* <p>
* Indicates whether this is an egress rule (rule is applied to traffic leaving the subnet).
* </p>
*/
private Boolean egress;
/**
* <p>
* ICMP protocol: The ICMP or ICMPv6 type and code. Required if specifying protocol 1 (ICMP) or protocol 58 (ICMPv6)
* with an IPv6 CIDR block.
* </p>
*/
private IcmpTypeCode icmpTypeCode;
/**
* <p>
* The IPv6 network range to allow or deny, in CIDR notation (for example <code>2001:db8:1234:1a00::/64</code>).
* </p>
*/
private String ipv6CidrBlock;
/**
* <p>
* The ID of the network ACL.
* </p>
*/
private String networkAclId;
/**
* <p>
* TCP or UDP protocols: The range of ports the rule applies to. Required if specifying protocol 6 (TCP) or 17
* (UDP).
* </p>
*/
private PortRange portRange;
/**
* <p>
* The protocol number. A value of "-1" means all protocols. If you specify "-1" or a protocol number other than "6"
* (TCP), "17" (UDP), or "1" (ICMP), traffic on all ports is allowed, regardless of any ports or ICMP types or codes
* that you specify. If you specify protocol "58" (ICMPv6) and specify an IPv4 CIDR block, traffic for all ICMP
* types and codes allowed, regardless of any that you specify. If you specify protocol "58" (ICMPv6) and specify an
* IPv6 CIDR block, you must specify an ICMP type and code.
* </p>
*/
private String protocol;
/**
* <p>
* Indicates whether to allow or deny the traffic that matches the rule.
* </p>
*/
private String ruleAction;
/**
* <p>
* The rule number for the entry (for example, 100). ACL entries are processed in ascending order by rule number.
* </p>
* <p>
* Constraints: Positive integer from 1 to 32766. The range 32767 to 65535 is reserved for internal use.
* </p>
*/
private Integer ruleNumber;
/**
* <p>
* The IPv4 network range to allow or deny, in CIDR notation (for example <code>172.16.0.0/24</code>).
* </p>
*
* @param cidrBlock
* The IPv4 network range to allow or deny, in CIDR notation (for example <code>172.16.0.0/24</code>).
*/
public void setCidrBlock(String cidrBlock) {
this.cidrBlock = cidrBlock;
}
/**
* <p>
* The IPv4 network range to allow or deny, in CIDR notation (for example <code>172.16.0.0/24</code>).
* </p>
*
* @return The IPv4 network range to allow or deny, in CIDR notation (for example <code>172.16.0.0/24</code>).
*/
public String getCidrBlock() {
return this.cidrBlock;
}
/**
* <p>
* The IPv4 network range to allow or deny, in CIDR notation (for example <code>172.16.0.0/24</code>).
* </p>
*
* @param cidrBlock
* The IPv4 network range to allow or deny, in CIDR notation (for example <code>172.16.0.0/24</code>).
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateNetworkAclEntryRequest withCidrBlock(String cidrBlock) {
setCidrBlock(cidrBlock);
return this;
}
/**
* <p>
* Indicates whether this is an egress rule (rule is applied to traffic leaving the subnet).
* </p>
*
* @param egress
* Indicates whether this is an egress rule (rule is applied to traffic leaving the subnet).
*/
public void setEgress(Boolean egress) {
this.egress = egress;
}
/**
* <p>
* Indicates whether this is an egress rule (rule is applied to traffic leaving the subnet).
* </p>
*
* @return Indicates whether this is an egress rule (rule is applied to traffic leaving the subnet).
*/
public Boolean getEgress() {
return this.egress;
}
/**
* <p>
* Indicates whether this is an egress rule (rule is applied to traffic leaving the subnet).
* </p>
*
* @param egress
* Indicates whether this is an egress rule (rule is applied to traffic leaving the subnet).
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateNetworkAclEntryRequest withEgress(Boolean egress) {
setEgress(egress);
return this;
}
/**
* <p>
* Indicates whether this is an egress rule (rule is applied to traffic leaving the subnet).
* </p>
*
* @return Indicates whether this is an egress rule (rule is applied to traffic leaving the subnet).
*/
public Boolean isEgress() {
return this.egress;
}
/**
* <p>
* ICMP protocol: The ICMP or ICMPv6 type and code. Required if specifying protocol 1 (ICMP) or protocol 58 (ICMPv6)
* with an IPv6 CIDR block.
* </p>
*
* @param icmpTypeCode
* ICMP protocol: The ICMP or ICMPv6 type and code. Required if specifying protocol 1 (ICMP) or protocol 58
* (ICMPv6) with an IPv6 CIDR block.
*/
public void setIcmpTypeCode(IcmpTypeCode icmpTypeCode) {
this.icmpTypeCode = icmpTypeCode;
}
/**
* <p>
* ICMP protocol: The ICMP or ICMPv6 type and code. Required if specifying protocol 1 (ICMP) or protocol 58 (ICMPv6)
* with an IPv6 CIDR block.
* </p>
*
* @return ICMP protocol: The ICMP or ICMPv6 type and code. Required if specifying protocol 1 (ICMP) or protocol 58
* (ICMPv6) with an IPv6 CIDR block.
*/
public IcmpTypeCode getIcmpTypeCode() {
return this.icmpTypeCode;
}
/**
* <p>
* ICMP protocol: The ICMP or ICMPv6 type and code. Required if specifying protocol 1 (ICMP) or protocol 58 (ICMPv6)
* with an IPv6 CIDR block.
* </p>
*
* @param icmpTypeCode
* ICMP protocol: The ICMP or ICMPv6 type and code. Required if specifying protocol 1 (ICMP) or protocol 58
* (ICMPv6) with an IPv6 CIDR block.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateNetworkAclEntryRequest withIcmpTypeCode(IcmpTypeCode icmpTypeCode) {
setIcmpTypeCode(icmpTypeCode);
return this;
}
/**
* <p>
* The IPv6 network range to allow or deny, in CIDR notation (for example <code>2001:db8:1234:1a00::/64</code>).
* </p>
*
* @param ipv6CidrBlock
* The IPv6 network range to allow or deny, in CIDR notation (for example
* <code>2001:db8:1234:1a00::/64</code>).
*/
public void setIpv6CidrBlock(String ipv6CidrBlock) {
this.ipv6CidrBlock = ipv6CidrBlock;
}
/**
* <p>
* The IPv6 network range to allow or deny, in CIDR notation (for example <code>2001:db8:1234:1a00::/64</code>).
* </p>
*
* @return The IPv6 network range to allow or deny, in CIDR notation (for example
* <code>2001:db8:1234:1a00::/64</code>).
*/
public String getIpv6CidrBlock() {
return this.ipv6CidrBlock;
}
/**
* <p>
* The IPv6 network range to allow or deny, in CIDR notation (for example <code>2001:db8:1234:1a00::/64</code>).
* </p>
*
* @param ipv6CidrBlock
* The IPv6 network range to allow or deny, in CIDR notation (for example
* <code>2001:db8:1234:1a00::/64</code>).
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateNetworkAclEntryRequest withIpv6CidrBlock(String ipv6CidrBlock) {
setIpv6CidrBlock(ipv6CidrBlock);
return this;
}
/**
* <p>
* The ID of the network ACL.
* </p>
*
* @param networkAclId
* The ID of the network ACL.
*/
public void setNetworkAclId(String networkAclId) {
this.networkAclId = networkAclId;
}
/**
* <p>
* The ID of the network ACL.
* </p>
*
* @return The ID of the network ACL.
*/
public String getNetworkAclId() {
return this.networkAclId;
}
/**
* <p>
* The ID of the network ACL.
* </p>
*
* @param networkAclId
* The ID of the network ACL.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateNetworkAclEntryRequest withNetworkAclId(String networkAclId) {
setNetworkAclId(networkAclId);
return this;
}
/**
* <p>
* TCP or UDP protocols: The range of ports the rule applies to. Required if specifying protocol 6 (TCP) or 17
* (UDP).
* </p>
*
* @param portRange
* TCP or UDP protocols: The range of ports the rule applies to. Required if specifying protocol 6 (TCP) or
* 17 (UDP).
*/
public void setPortRange(PortRange portRange) {
this.portRange = portRange;
}
/**
* <p>
* TCP or UDP protocols: The range of ports the rule applies to. Required if specifying protocol 6 (TCP) or 17
* (UDP).
* </p>
*
* @return TCP or UDP protocols: The range of ports the rule applies to. Required if specifying protocol 6 (TCP) or
* 17 (UDP).
*/
public PortRange getPortRange() {
return this.portRange;
}
/**
* <p>
* TCP or UDP protocols: The range of ports the rule applies to. Required if specifying protocol 6 (TCP) or 17
* (UDP).
* </p>
*
* @param portRange
* TCP or UDP protocols: The range of ports the rule applies to. Required if specifying protocol 6 (TCP) or
* 17 (UDP).
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateNetworkAclEntryRequest withPortRange(PortRange portRange) {
setPortRange(portRange);
return this;
}
/**
* <p>
* The protocol number. A value of "-1" means all protocols. If you specify "-1" or a protocol number other than "6"
* (TCP), "17" (UDP), or "1" (ICMP), traffic on all ports is allowed, regardless of any ports or ICMP types or codes
* that you specify. If you specify protocol "58" (ICMPv6) and specify an IPv4 CIDR block, traffic for all ICMP
* types and codes allowed, regardless of any that you specify. If you specify protocol "58" (ICMPv6) and specify an
* IPv6 CIDR block, you must specify an ICMP type and code.
* </p>
*
* @param protocol
* The protocol number. A value of "-1" means all protocols. If you specify "-1" or a protocol number other
* than "6" (TCP), "17" (UDP), or "1" (ICMP), traffic on all ports is allowed, regardless of any ports or
* ICMP types or codes that you specify. If you specify protocol "58" (ICMPv6) and specify an IPv4 CIDR
* block, traffic for all ICMP types and codes allowed, regardless of any that you specify. If you specify
* protocol "58" (ICMPv6) and specify an IPv6 CIDR block, you must specify an ICMP type and code.
*/
public void setProtocol(String protocol) {
this.protocol = protocol;
}
/**
* <p>
* The protocol number. A value of "-1" means all protocols. If you specify "-1" or a protocol number other than "6"
* (TCP), "17" (UDP), or "1" (ICMP), traffic on all ports is allowed, regardless of any ports or ICMP types or codes
* that you specify. If you specify protocol "58" (ICMPv6) and specify an IPv4 CIDR block, traffic for all ICMP
* types and codes allowed, regardless of any that you specify. If you specify protocol "58" (ICMPv6) and specify an
* IPv6 CIDR block, you must specify an ICMP type and code.
* </p>
*
* @return The protocol number. A value of "-1" means all protocols. If you specify "-1" or a protocol number other
* than "6" (TCP), "17" (UDP), or "1" (ICMP), traffic on all ports is allowed, regardless of any ports or
* ICMP types or codes that you specify. If you specify protocol "58" (ICMPv6) and specify an IPv4 CIDR
* block, traffic for all ICMP types and codes allowed, regardless of any that you specify. If you specify
* protocol "58" (ICMPv6) and specify an IPv6 CIDR block, you must specify an ICMP type and code.
*/
public String getProtocol() {
return this.protocol;
}
/**
* <p>
* The protocol number. A value of "-1" means all protocols. If you specify "-1" or a protocol number other than "6"
* (TCP), "17" (UDP), or "1" (ICMP), traffic on all ports is allowed, regardless of any ports or ICMP types or codes
* that you specify. If you specify protocol "58" (ICMPv6) and specify an IPv4 CIDR block, traffic for all ICMP
* types and codes allowed, regardless of any that you specify. If you specify protocol "58" (ICMPv6) and specify an
* IPv6 CIDR block, you must specify an ICMP type and code.
* </p>
*
* @param protocol
* The protocol number. A value of "-1" means all protocols. If you specify "-1" or a protocol number other
* than "6" (TCP), "17" (UDP), or "1" (ICMP), traffic on all ports is allowed, regardless of any ports or
* ICMP types or codes that you specify. If you specify protocol "58" (ICMPv6) and specify an IPv4 CIDR
* block, traffic for all ICMP types and codes allowed, regardless of any that you specify. If you specify
* protocol "58" (ICMPv6) and specify an IPv6 CIDR block, you must specify an ICMP type and code.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateNetworkAclEntryRequest withProtocol(String protocol) {
setProtocol(protocol);
return this;
}
/**
* <p>
* Indicates whether to allow or deny the traffic that matches the rule.
* </p>
*
* @param ruleAction
* Indicates whether to allow or deny the traffic that matches the rule.
* @see RuleAction
*/
public void setRuleAction(String ruleAction) {
this.ruleAction = ruleAction;
}
/**
* <p>
* Indicates whether to allow or deny the traffic that matches the rule.
* </p>
*
* @return Indicates whether to allow or deny the traffic that matches the rule.
* @see RuleAction
*/
public String getRuleAction() {
return this.ruleAction;
}
/**
* <p>
* Indicates whether to allow or deny the traffic that matches the rule.
* </p>
*
* @param ruleAction
* Indicates whether to allow or deny the traffic that matches the rule.
* @return Returns a reference to this object so that method calls can be chained together.
* @see RuleAction
*/
public CreateNetworkAclEntryRequest withRuleAction(String ruleAction) {
setRuleAction(ruleAction);
return this;
}
/**
* <p>
* Indicates whether to allow or deny the traffic that matches the rule.
* </p>
*
* @param ruleAction
* Indicates whether to allow or deny the traffic that matches the rule.
* @see RuleAction
*/
public void setRuleAction(RuleAction ruleAction) {
withRuleAction(ruleAction);
}
/**
* <p>
* Indicates whether to allow or deny the traffic that matches the rule.
* </p>
*
* @param ruleAction
* Indicates whether to allow or deny the traffic that matches the rule.
* @return Returns a reference to this object so that method calls can be chained together.
* @see RuleAction
*/
public CreateNetworkAclEntryRequest withRuleAction(RuleAction ruleAction) {
this.ruleAction = ruleAction.toString();
return this;
}
/**
* <p>
* The rule number for the entry (for example, 100). ACL entries are processed in ascending order by rule number.
* </p>
* <p>
* Constraints: Positive integer from 1 to 32766. The range 32767 to 65535 is reserved for internal use.
* </p>
*
* @param ruleNumber
* The rule number for the entry (for example, 100). ACL entries are processed in ascending order by rule
* number.</p>
* <p>
* Constraints: Positive integer from 1 to 32766. The range 32767 to 65535 is reserved for internal use.
*/
public void setRuleNumber(Integer ruleNumber) {
this.ruleNumber = ruleNumber;
}
/**
* <p>
* The rule number for the entry (for example, 100). ACL entries are processed in ascending order by rule number.
* </p>
* <p>
* Constraints: Positive integer from 1 to 32766. The range 32767 to 65535 is reserved for internal use.
* </p>
*
* @return The rule number for the entry (for example, 100). ACL entries are processed in ascending order by rule
* number.</p>
* <p>
* Constraints: Positive integer from 1 to 32766. The range 32767 to 65535 is reserved for internal use.
*/
public Integer getRuleNumber() {
return this.ruleNumber;
}
/**
* <p>
* The rule number for the entry (for example, 100). ACL entries are processed in ascending order by rule number.
* </p>
* <p>
* Constraints: Positive integer from 1 to 32766. The range 32767 to 65535 is reserved for internal use.
* </p>
*
* @param ruleNumber
* The rule number for the entry (for example, 100). ACL entries are processed in ascending order by rule
* number.</p>
* <p>
* Constraints: Positive integer from 1 to 32766. The range 32767 to 65535 is reserved for internal use.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateNetworkAclEntryRequest withRuleNumber(Integer ruleNumber) {
setRuleNumber(ruleNumber);
return this;
}
/**
* This method is intended for internal use only. Returns the marshaled request configured with additional
* parameters to enable operation dry-run.
*/
@Override
public Request<CreateNetworkAclEntryRequest> getDryRunRequest() {
Request<CreateNetworkAclEntryRequest> request = new CreateNetworkAclEntryRequestMarshaller().marshall(this);
request.addParameter("DryRun", Boolean.toString(true));
return request;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getCidrBlock() != null)
sb.append("CidrBlock: ").append(getCidrBlock()).append(",");
if (getEgress() != null)
sb.append("Egress: ").append(getEgress()).append(",");
if (getIcmpTypeCode() != null)
sb.append("IcmpTypeCode: ").append(getIcmpTypeCode()).append(",");
if (getIpv6CidrBlock() != null)
sb.append("Ipv6CidrBlock: ").append(getIpv6CidrBlock()).append(",");
if (getNetworkAclId() != null)
sb.append("NetworkAclId: ").append(getNetworkAclId()).append(",");
if (getPortRange() != null)
sb.append("PortRange: ").append(getPortRange()).append(",");
if (getProtocol() != null)
sb.append("Protocol: ").append(getProtocol()).append(",");
if (getRuleAction() != null)
sb.append("RuleAction: ").append(getRuleAction()).append(",");
if (getRuleNumber() != null)
sb.append("RuleNumber: ").append(getRuleNumber());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof CreateNetworkAclEntryRequest == false)
return false;
CreateNetworkAclEntryRequest other = (CreateNetworkAclEntryRequest) obj;
if (other.getCidrBlock() == null ^ this.getCidrBlock() == null)
return false;
if (other.getCidrBlock() != null && other.getCidrBlock().equals(this.getCidrBlock()) == false)
return false;
if (other.getEgress() == null ^ this.getEgress() == null)
return false;
if (other.getEgress() != null && other.getEgress().equals(this.getEgress()) == false)
return false;
if (other.getIcmpTypeCode() == null ^ this.getIcmpTypeCode() == null)
return false;
if (other.getIcmpTypeCode() != null && other.getIcmpTypeCode().equals(this.getIcmpTypeCode()) == false)
return false;
if (other.getIpv6CidrBlock() == null ^ this.getIpv6CidrBlock() == null)
return false;
if (other.getIpv6CidrBlock() != null && other.getIpv6CidrBlock().equals(this.getIpv6CidrBlock()) == false)
return false;
if (other.getNetworkAclId() == null ^ this.getNetworkAclId() == null)
return false;
if (other.getNetworkAclId() != null && other.getNetworkAclId().equals(this.getNetworkAclId()) == false)
return false;
if (other.getPortRange() == null ^ this.getPortRange() == null)
return false;
if (other.getPortRange() != null && other.getPortRange().equals(this.getPortRange()) == false)
return false;
if (other.getProtocol() == null ^ this.getProtocol() == null)
return false;
if (other.getProtocol() != null && other.getProtocol().equals(this.getProtocol()) == false)
return false;
if (other.getRuleAction() == null ^ this.getRuleAction() == null)
return false;
if (other.getRuleAction() != null && other.getRuleAction().equals(this.getRuleAction()) == false)
return false;
if (other.getRuleNumber() == null ^ this.getRuleNumber() == null)
return false;
if (other.getRuleNumber() != null && other.getRuleNumber().equals(this.getRuleNumber()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getCidrBlock() == null) ? 0 : getCidrBlock().hashCode());
hashCode = prime * hashCode + ((getEgress() == null) ? 0 : getEgress().hashCode());
hashCode = prime * hashCode + ((getIcmpTypeCode() == null) ? 0 : getIcmpTypeCode().hashCode());
hashCode = prime * hashCode + ((getIpv6CidrBlock() == null) ? 0 : getIpv6CidrBlock().hashCode());
hashCode = prime * hashCode + ((getNetworkAclId() == null) ? 0 : getNetworkAclId().hashCode());
hashCode = prime * hashCode + ((getPortRange() == null) ? 0 : getPortRange().hashCode());
hashCode = prime * hashCode + ((getProtocol() == null) ? 0 : getProtocol().hashCode());
hashCode = prime * hashCode + ((getRuleAction() == null) ? 0 : getRuleAction().hashCode());
hashCode = prime * hashCode + ((getRuleNumber() == null) ? 0 : getRuleNumber().hashCode());
return hashCode;
}
@Override
public CreateNetworkAclEntryRequest clone() {
return (CreateNetworkAclEntryRequest) super.clone();
}
}
| |
/*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.idea.svn.status;
import com.intellij.openapi.util.Getter;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.CharsetToolkit;
import com.intellij.util.containers.Convertor;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.idea.svn.SvnUtil;
import org.jetbrains.idea.svn.api.BaseSvnClient;
import org.jetbrains.idea.svn.api.Depth;
import org.jetbrains.idea.svn.commandLine.*;
import org.jetbrains.idea.svn.info.Info;
import org.tmatesoft.svn.core.SVNErrorCode;
import org.tmatesoft.svn.core.SVNException;
import org.tmatesoft.svn.core.SVNURL;
import org.tmatesoft.svn.core.internal.util.SVNPathUtil;
import org.tmatesoft.svn.core.wc.*;
import org.tmatesoft.svn.core.wc2.SvnTarget;
import org.xml.sax.SAXException;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.parsers.SAXParser;
import javax.xml.parsers.SAXParserFactory;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
import java.util.*;
/**
* Created with IntelliJ IDEA.
* User: Irina.Chernushina
* Date: 1/25/12
* Time: 5:21 PM
*/
public class CmdStatusClient extends BaseSvnClient implements StatusClient {
@Override
public long doStatus(final File path,
final SVNRevision revision,
final Depth depth,
boolean remote,
boolean reportAll,
boolean includeIgnored,
boolean collectParentExternals,
final StatusConsumer handler,
final Collection changeLists) throws SvnBindException {
File base = path.isDirectory() ? path : path.getParentFile();
base = CommandUtil.correctUpToExistingParent(base);
final Info infoBase = myFactory.createInfoClient().doInfo(base, revision);
List<String> parameters = new ArrayList<String>();
putParameters(parameters, path, depth, remote, reportAll, includeIgnored, changeLists);
CommandExecutor command = execute(myVcs, SvnTarget.fromFile(path), SvnCommandName.st, parameters, null);
parseResult(path, revision, handler, base, infoBase, command);
return 0;
}
private void parseResult(final File path,
SVNRevision revision,
StatusConsumer handler,
File base,
Info infoBase,
CommandExecutor command) throws SvnBindException {
String result = command.getOutput();
if (StringUtil.isEmptyOrSpaces(result)) {
throw new SvnBindException("Status request returned nothing for command: " + command.getCommandText());
}
try {
final SvnStatusHandler[] svnHandl = new SvnStatusHandler[1];
svnHandl[0] = createStatusHandler(revision, handler, base, infoBase, svnHandl);
SAXParser parser = SAXParserFactory.newInstance().newSAXParser();
parser.parse(new ByteArrayInputStream(result.trim().getBytes(CharsetToolkit.UTF8_CHARSET)), svnHandl[0]);
if (!svnHandl[0].isAnythingReported()) {
if (!SvnUtil.isSvnVersioned(myVcs, path)) {
throw new SvnBindException(SVNErrorCode.WC_NOT_DIRECTORY, "Command - " + command.getCommandText() + ". Result - " + result);
} else {
// return status indicating "NORMAL" state
// typical output would be like
// <status>
// <target path="1.txt"></target>
// </status>
// so it does not contain any <entry> element and current parsing logic returns null
PortableStatus status = new PortableStatus();
status.setFile(path);
status.setPath(path.getAbsolutePath());
status.setContentsStatus(StatusType.STATUS_NORMAL);
status.setInfoGetter(new Getter<Info>() {
@Override
public Info get() {
return createInfoGetter(null).convert(path);
}
});
try {
handler.consume(status);
}
catch (SVNException e) {
throw new SvnBindException(e);
}
}
}
}
catch (SvnExceptionWrapper e) {
throw new SvnBindException(e.getCause());
} catch (IOException e) {
throw new SvnBindException(e);
}
catch (ParserConfigurationException e) {
throw new SvnBindException(e);
}
catch (SAXException e) {
// status parsing errors are logged separately as sometimes there are parsing errors connected to terminal output handling.
// these errors primarily occur when status output is rather large.
// and status output could be large, for instance, when working copy is locked (seems that each file is listed in status output).
command.logCommand();
throw new SvnBindException(e);
}
}
private static void putParameters(@NotNull List<String> parameters,
@NotNull File path,
@Nullable Depth depth,
boolean remote,
boolean reportAll,
boolean includeIgnored,
@Nullable Collection changeLists) {
CommandUtil.put(parameters, path);
CommandUtil.put(parameters, depth);
CommandUtil.put(parameters, remote, "-u");
CommandUtil.put(parameters, reportAll, "--verbose");
CommandUtil.put(parameters, includeIgnored, "--no-ignore");
// TODO: Fix this check - update corresponding parameters in StatusClient
CommandUtil.putChangeLists(parameters, changeLists);
parameters.add("--xml");
}
public SvnStatusHandler createStatusHandler(final SVNRevision revision,
final StatusConsumer handler,
final File base,
final Info infoBase, final SvnStatusHandler[] svnHandl) {
final SvnStatusHandler.ExternalDataCallback callback = createStatusCallback(handler, base, infoBase, svnHandl);
return new SvnStatusHandler(callback, base, createInfoGetter(revision));
}
private Convertor<File, Info> createInfoGetter(final SVNRevision revision) {
return new Convertor<File, Info>() {
@Override
public Info convert(File o) {
try {
return myFactory.createInfoClient().doInfo(o, revision);
}
catch (SvnBindException e) {
throw new SvnExceptionWrapper(e);
}
}
};
}
public static SvnStatusHandler.ExternalDataCallback createStatusCallback(final StatusConsumer handler,
final File base,
final Info infoBase,
final SvnStatusHandler[] svnHandl) {
final Map<File, Info> externalsMap = new HashMap<File, Info>();
final String[] changelistName = new String[1];
return new SvnStatusHandler.ExternalDataCallback() {
@Override
public void switchPath() {
final PortableStatus pending = svnHandl[0].getPending();
pending.setChangelistName(changelistName[0]);
try {
//if (infoBase != null) {
Info baseInfo = infoBase;
File baseFile = base;
final File pendingFile = new File(pending.getPath());
if (! externalsMap.isEmpty()) {
for (File file : externalsMap.keySet()) {
if (FileUtil.isAncestor(file, pendingFile, false)) {
baseInfo = externalsMap.get(file);
baseFile = file;
break;
}
}
}
if (baseInfo != null) {
final String append;
final String systemIndependentPath = FileUtil.toSystemIndependentName(pending.getPath());
if (pendingFile.isAbsolute()) {
final String relativePath =
FileUtil.getRelativePath(FileUtil.toSystemIndependentName(baseFile.getPath()), systemIndependentPath, '/');
append = SVNPathUtil.append(baseInfo.getURL().toString(), FileUtil.toSystemIndependentName(relativePath));
}
else {
append = SVNPathUtil.append(baseInfo.getURL().toString(), systemIndependentPath);
}
pending.setURL(SVNURL.parseURIEncoded(append));
}
if (StatusType.STATUS_EXTERNAL.equals(pending.getNodeStatus())) {
externalsMap.put(pending.getFile(), pending.getInfo());
}
handler.consume(pending);
}
catch (SVNException e) {
throw new SvnExceptionWrapper(e);
}
}
@Override
public void switchChangeList(String newList) {
changelistName[0] = newList;
}
};
}
@Override
public Status doStatus(File path, boolean remote) throws SvnBindException {
final Status[] svnStatus = new Status[1];
doStatus(path, SVNRevision.UNDEFINED, Depth.EMPTY, remote, false, false, false, new StatusConsumer() {
@Override
public void consume(Status status) throws SVNException {
svnStatus[0] = status;
}
}, null);
return svnStatus[0];
}
}
| |
/* Copyright 2004 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package misc.detailed;
import junit.framework.Assert;
import org.apache.xmlbeans.*;
import org.apache.xmlbeans.impl.tool.SchemaCompiler;
import org.apache.xmlbeans.impl.xb.xmlconfig.ConfigDocument;
import org.apache.beehive.netui.tools.testrecorder.x2004.session.RecorderSessionDocument;
import org.w3c.dom.Document;
import org.w3c.dom.DocumentType;
import org.w3c.dom.Node;
import javax.xml.stream.XMLStreamReader;
import javax.xml.stream.XMLInputFactory;
import javax.xml.namespace.QName;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import java.io.File;
import java.io.FileInputStream;
import java.util.*;
import java.net.URL;
import tools.util.JarUtil;
import test.xbean.xmlcursor.purchaseOrder.PurchaseOrderDocument;
import testDateAttribute.TestElementWithDateAttributeDocument;
import testDateAttribute.TestDatewTZone;
import misc.common.JiraTestBase;
import dufourrault.DummyDocument;
import dufourrault.Father;
import net.orthogony.xml.sample.structure.ARootDocument;
import net.orthogony.xml.sample.structure.ChildType;
public class JiraRegression50_100Test extends JiraTestBase
{
public JiraRegression50_100Test(String name)
{
super(name);
}
///**
// * [XMLBEANS-##] <BUG TITLE>
// */
//public void test_jira_XmlBeans45() throws Exception
//{
//
//}
/**
* [XMLBEANS-52] Validator loops when schema has certain conditions
*/
public void test_jira_XmlBeans52() throws Exception{
//reusing code from method test_jira_XmlBeans48()
String correctXml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?> \n" +
"<!--Sample XML file generated by XMLSPY v5 rel. 4 U (http://www.xmlspy.com)--/> \n" +
"<aList xmlns=\"http://pfa.dk/dummy/errorInXmlBeansValidation.xsd\" " +
"xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" " +
"xsi:schemaLocation=\"http://pfa.dk/dummy/errorInXmlBeansValidation.xsd \n" +
"C:\\pfa\\techr3\\TransformationWorkbench\\schema\\errorInXmlBeansValidation.xsd\"> \n" +
"<myListEntry> \n" +
"<HelloWorld>Hello World</HelloWorld> \n" +
"</myListEntry> \n" +
"</aList> ";
}
/*
* [XMLBEANS-54]: problem with default value
*
*/
public void test_jira_xmlbeans54() throws Exception {
List errors = new ArrayList();
SchemaCompiler.Parameters params = new SchemaCompiler.Parameters();
params.setXsdFiles(new File[]{new File(scompTestFilesRoot + "xmlbeans_54.xsd_")});
params.setErrorListener(errors);
params.setSrcDir(schemaCompSrcDir);
params.setClassesDir(schemaCompClassesDir);
params.setDownload(true);
params.setNoPvr(true);
// Runs out of Heap Memory
params.setMemoryMaximumSize("1024m");
params.setMemoryInitialSize("512m");
try {
SchemaCompiler.compile(params);
} catch (OutOfMemoryError ome) {
System.out.println(ome.getStackTrace());
System.out.println("test_jira_xmlbeans54() - out of Heap Memory");
}
if (printOptionErrMsgs(errors)) {
Assert.fail("test_jira_xmlbeans54() : Errors found when executing scomp");
}
}
/**
* [XMLBEANS-56] samples issue with easypo schema and config file
*/
public void test_jira_XmlBeans56() throws Exception
{
String xsdConfig = "<xb:config " +
" xmlns:xb=\"http://xml.apache.org/xmlbeans/2004/02/xbean/config\"\n" +
" xmlns:ep=\"http://openuri.org/easypo\">\n" +
" <xb:namespace uri=\"http://openuri.org/easypo\">\n" +
" <xb:package>com.easypo</xb:package>\n" +
" </xb:namespace>\n" +
" <xb:namespace uri=\"##any\">\n" +
" <xb:prefix>Xml</xb:prefix>\n" +
" <xb:suffix>Bean</xb:suffix>\n" +
" </xb:namespace>\n" +
" <xb:extension for=\"com.easypo.XmlCustomerBean\">\n" +
" <xb:interface name=\"myPackage.Foo\">\n" +
" <xb:staticHandler>myPackage.FooHandler</xb:staticHandler>\n" +
" </xb:interface>\n" +
" </xb:extension>\n" +
" <xb:qname name=\"ep:purchase-order\" javaname=\"purchaseOrderXXX\"/>\n" +
"</xb:config> ";
ConfigDocument config =
ConfigDocument.Factory.parse(xsdConfig);
xmOpts.setErrorListener(errorList);
if (config.validate(xmOpts)) {
System.out.println("Config Validated");
return;
} else {
System.err.println("Config File did not validate");
for (Iterator iterator = errorList.iterator(); iterator.hasNext();) {
System.out.println("Error: " + iterator.next());
}
throw new Exception("Config File did not validate");
}
}
/**
* [XMLBEANS-57] scomp failure for XSD namespace "DAV:"
*/
public void test_jira_XmlBeans57() throws Exception
{
String P = File.separator;
String outputDir = OUTPUTROOT + P + "dav";
File srcDir = new File(outputDir + P + "src");
srcDir.mkdirs();
File classDir = new File(outputDir + P + "classes");
classDir.mkdirs();
SchemaCompiler.Parameters params = new SchemaCompiler.Parameters();
params.setXsdFiles(new File[]{new File(JIRA_CASES + "xmlbeans_57.xml")});
params.setErrorListener(errorList);
params.setSrcDir(srcDir);
params.setClassesDir(classDir);
SchemaCompiler.compile(params);
Collection errs = params.getErrorListener();
boolean outTextPresent = true;
if (errs.size() != 0) {
for (Iterator iterator = errs.iterator(); iterator.hasNext();) {
Object o = iterator.next();
String out = o.toString();
System.out.println("Dav: " + out);
if (out.startsWith("Compiled types to"))
outTextPresent = false;
}
}
//cleanup gen'd dirs
srcDir.deleteOnExit();
classDir.deleteOnExit();
if (outTextPresent)
System.out.println("No errors when running schemacompiler with DAV namespace");
else
throw new Exception("There were errors while compiling XSD with DAV " +
"namespace. See sys.out for more info");
}
/*
* [XMLBEANS-58]:resolving transitive <redefine>'d types...
* This is realted to xmlbeans36 - its the same case but the schemas seem to have been updated at the w3c site.
* Hence adding a new testcase with the new schemas
*/
public void test_jira_xmlbeans58() throws Exception {
List errors = new ArrayList();
SchemaCompiler.Parameters params = new SchemaCompiler.Parameters();
// old url has been retired
//params.setUrlFiles(new URL[]{new URL("http://devresource.hp.com/drc/specifications/wsrf/interfaces/WS-BrokeredNotification-1-0.wsdl")});
// this seems to be a url for a WS-BrokeredNotification 1.0 wsdl
params.setUrlFiles(new URL[]{new URL("http://www.ibm.com/developerworks/library/specification/ws-notification/WS-BrokeredN.wsdl")});
params.setErrorListener(errors);
params.setSrcDir(schemaCompSrcDir);
params.setClassesDir(schemaCompClassesDir);
params.setDownload(true);
SchemaCompiler.compile(params);
if (printOptionErrMsgs(errors)) {
Assert.fail("test_jira_xmlbeans55() : Errors found when executing scomp");
}
}
/**
* [XMLBEANS-62] Avoid class cast exception when compiling older schema namespace
*/
public void test_jira_XmlBeans62() throws Exception
{
String P = File.separator;
String outputDir = System.getProperty("xbean.rootdir") + P + "build" +
P + "test" + P + "output" + P + "x1999";
File srcDir = new File(outputDir + P + "src");
srcDir.mkdirs();
File classDir = new File(outputDir + P + "classes");
classDir.mkdirs();
SchemaCompiler.Parameters params = new SchemaCompiler.Parameters();
params.setWsdlFiles(new File[]{new File(JIRA_CASES + "xmlbeans_62.xml")});
params.setErrorListener(errorList);
params.setSrcDir(srcDir);
params.setClassesDir(classDir);
SchemaCompiler.compile(params);
Collection errs = params.getErrorListener();
boolean warningPresent = false;
for (Iterator iterator = errs.iterator(); iterator.hasNext();) {
Object o = iterator.next();
String out = o.toString();
if (out.endsWith("did not have any schema documents in namespace 'http://www.w3.org/2001/XMLSchema'")) ;
warningPresent = true;
}
//cleanup gen'd dirs
srcDir.deleteOnExit();
classDir.deleteOnExit();
//validate error present
if (!warningPresent)
throw new Exception("Warning for 1999 schema was not found when compiling srcs");
else
System.out.println("Warning Present, test Passed");
}
/**
* [XMLBEANS-64] ArrayIndexOutOfBoundsException during validation
*/
public void test_jira_XmlBeans64() throws Exception
{
// load the document
File inst = new File(JIRA_CASES + "xmlbeans_64.xml");
XmlObject doc = RecorderSessionDocument.Factory.parse(inst);
// validate
XmlOptions validateOptions = new XmlOptions();
validateOptions.setLoadLineNumbers();
ArrayList errorList = new ArrayList();
validateOptions.setErrorListener(errorList);
boolean isValid = doc.validate(validateOptions);
if (!isValid)
throw new Exception("Errors: " + errorList);
}
/**
* [XMLBEANS-66] NullPointerException when restricting a union with one of the union members
*/
public void test_jira_XmlBeans66() throws Exception
{
String reproXsd = "<?xml version=\"1.0\" encoding=\"UTF-8\"?> \n" +
"<xsd:schema targetNamespace=\"http://www.w3.org/2003/12/XQueryX\" \n" +
" xmlns=\"http://www.w3.org/2003/12/XQueryX\" \n" +
" xmlns:xsd=\"http://www.w3.org/2001/XMLSchema\" \n" +
" elementFormDefault=\"qualified\" \n" +
" attributeFormDefault=\"qualified\"> \n" +
" <!-- Kludge for anySimpleType --> \n" +
" <xsd:simpleType name=\"constantValueType\"> \n" +
" <xsd:union memberTypes=\"xsd:integer xsd:decimal xsd:string xsd:double\"/> \n" +
" </xsd:simpleType> \n" +
" <!-- constant expressions. We have 4 different subclasses for this --> \n" +
" <xsd:complexType name=\"constantExpr\"> \n" +
" <xsd:sequence> \n" +
" <xsd:element name=\"value\" type=\"constantValueType\"/> \n" +
" </xsd:sequence> \n" +
" </xsd:complexType> \n" +
" <xsd:complexType name=\"integerConstantExpr\"> \n" +
" <xsd:complexContent> \n" +
" <xsd:restriction base=\"constantExpr\"> \n" +
" <xsd:sequence> \n" +
" <xsd:element name=\"value\" type=\"xsd:integer\"/> \n" +
" </xsd:sequence> \n" +
" </xsd:restriction> \n" +
" </xsd:complexContent> \n" +
" </xsd:complexType>" +
"<!-- added for element validation -->" +
"<xsd:element name=\"Kludge\" type=\"integerConstantExpr\" />\n" +
"</xsd:schema> ";
SchemaTypeLoader stl = makeSchemaTypeLoader(new String[]{reproXsd});
QName reproQName = new QName("http://www.w3.org/2003/12/XQueryX", "Kludge");
SchemaGlobalElement elVal = stl.findElement(reproQName);
Assert.assertTrue("Element is null or not found", (elVal != null));
String reproInst = "<Kludge xmlns=\"http://www.w3.org/2003/12/XQueryX\"><value>12</value></Kludge>";
validateInstance(new String[]{reproXsd}, new String[]{reproInst}, null);
}
/**
* [XMLBEANS-68] GDateBuilder outputs empty string when used without time or timezone
*/
public void test_jira_XmlBeans68() throws Exception
{
Calendar cal = Calendar.getInstance();
GDateBuilder gdateBuilder = new GDateBuilder(cal);
gdateBuilder.clearTime();
gdateBuilder.clearTimeZone();
GDate gdate = gdateBuilder.toGDate();
TestDatewTZone xdate = TestDatewTZone.Factory.newInstance();
xdate.setGDateValue(gdate);
TestElementWithDateAttributeDocument doc =
TestElementWithDateAttributeDocument.Factory.newInstance();
TestElementWithDateAttributeDocument.TestElementWithDateAttribute root =
doc.addNewTestElementWithDateAttribute();
root.xsetSomeDate(xdate);
System.out.println("Doc: " + doc);
System.out.println("Date: " + xdate.getStringValue());
if (xdate.getStringValue().compareTo("") == 0 ||
xdate.getStringValue().length() <= 1)
throw new Exception("Date without TimeZone should not be empty");
if (root.getSomeDate().getTimeInMillis() != gdate.getCalendar().getTimeInMillis())
throw new Exception("Set Dates were not equal");
}
/**
* This issue needed an elementFormDefault=qualified added to the schema
* [XMLBEANS-71] when trying to retrieve data from a XMLBean with Input from a XML Document, we cannot get any data from the Bean.
*/
public void test_jira_XmlBeans71() throws Exception
{
//schema src lives in cases/xbean/xmlobject/xmlbeans_71.xsd
abc.BazResponseDocument doc = abc.BazResponseDocument.Factory.parse(JarUtil.getResourceFromJarasFile("xbean/misc/jira/xmlbeans_71.xml"), xmOpts);
xmOpts.setErrorListener(errorList);
abc.BazResponseDocument.BazResponse baz = doc.getBazResponse();
if (!doc.validate(xmOpts))
System.out.println("DOC-ERRORS: " + errorList + "\n" + doc.xmlText());
else
System.out.println("DOC-XML:\n" + doc.xmlText());
errorList.removeAll(errorList);
xmOpts.setErrorListener(errorList);
if (!baz.validate(xmOpts))
System.out.println("BAZ-ERRORS: " + errorList + "\n" + baz.xmlText());
//throw new Exception("Response Document did not validate\n"+errorList);
else
System.out.println("BAZ-XML:\n" + baz.xmlText());
if (baz.getStatus().compareTo("SUCCESS") != 0)
throw new Exception("Status was not loaded properly");
else
System.out.println("Sucess was recieved correctly");
}
/**
* [XMLBEANS-72] Document properties are lost
*/
public void test_jira_XmlBeans72() throws Exception
{
String docTypeName = "struts-config";
String docTypePublicID = "-//Apache Software Foundation//DTD Struts Configuration 1.1//EN";
String docTypeSystemID = "http://jakarta.apache.org/struts/dtds/struts-config_1_1.dtd";
String fileName = "xmlbeans72.xml";
//create instance and set doc properties
PurchaseOrderDocument po = PurchaseOrderDocument.Factory.newInstance();
org.apache.xmlbeans.XmlDocumentProperties docProps = po.documentProperties();
docProps.setDoctypeName(docTypeName);
docProps.setDoctypePublicId(docTypePublicID);
docProps.setDoctypeSystemId(docTypeSystemID);
po.addNewPurchaseOrder();
po.save(new File(fileName));
//parse saved out file and verify values set above are present
PurchaseOrderDocument po2 = PurchaseOrderDocument.Factory.parse(new File(fileName));
//XmlObject po2 = XmlObject.Factory.parse(new File(fileName));
org.apache.xmlbeans.XmlDocumentProperties doc2Props = po2.documentProperties();
//verify information using DOM
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
factory.setValidating(false);
factory.setNamespaceAware(true);
DocumentBuilder builder = factory.newDocumentBuilder();
Document document = builder.parse(new File(fileName));
DocumentType docType = document.getDoctype();
//System.out.println("Name: "+ doc2Props.getDoctypeName() +" = " + docType.getName());
//System.out.println("System: "+ doc2Props.getDoctypeSystemId() + " = " + docType.getSystemId());
//System.out.println("Public: "+ doc2Props.getDoctypePublicId()+ " = " + docType.getPublicId());
StringBuffer compareText = new StringBuffer();
//check values - compare to expected and DOM
if (doc2Props != null) {
if (doc2Props.getDoctypeName() == null ||
doc2Props.getDoctypeName().compareTo(docTypeName) != 0 ||
doc2Props.getDoctypeName().compareTo(docType.getName()) != 0)
compareText.append("docTypeName was not as " +
"expected in the document properties " +
doc2Props.getDoctypeName()+"\n");
if (doc2Props.getDoctypePublicId() == null ||
doc2Props.getDoctypePublicId().compareTo(docTypePublicID) != 0 ||
doc2Props.getDoctypePublicId().compareTo(docType.getPublicId()) != 0)
compareText.append("docTypePublicID was not as " +
"expected in the document properties " +
doc2Props.getDoctypePublicId()+"\n");
if (doc2Props.getDoctypeSystemId() == null ||
doc2Props.getDoctypeSystemId().compareTo(docTypeSystemID) != 0 ||
doc2Props.getDoctypeSystemId().compareTo(docType.getSystemId()) != 0)
compareText.append("docTypeSystemID was not as " +
"expected in the document properties "+
doc2Props.getDoctypeSystemId()+"\n" );
} else {
compareText.append("Document Properties were null, should have been set");
}
//cleanup
po2 = null;
po = null;
new File(fileName).deleteOnExit();
if (compareText.toString().length() > 1)
throw new Exception("Doc properties were not saved or read correctly\n" + compareText.toString());
}
/**
* BUGBUG: XMLBEANS-78 - NPE when processing XMLStreamReader Midstream
* XMLBEANS-78 - NPE when processing XMLStreamReader Midstream
*
* @throws Exception
*/
public void test_jira_xmlbeans78() throws Exception
{
XMLInputFactory factory = XMLInputFactory.newInstance();
FileInputStream fis = new FileInputStream(new File(JIRA_CASES+ "xmlbeans_78.xml"));
XMLStreamReader reader = factory.createXMLStreamReader(fis);
skipToBody(reader);
XmlObject o = XmlObject.Factory.parse(reader);
}
/**
* Move reader to element of SOAP Body
*
* @param reader
* @throws javax.xml.stream.XMLStreamException
*
*/
private void skipToBody(XMLStreamReader reader) throws javax.xml.stream.XMLStreamException
{
while (true) {
int event = reader.next();
switch (event) {
case XMLStreamReader.END_DOCUMENT:
return;
case XMLStreamReader.START_ELEMENT:
if (reader.getLocalName().equals("Body")) {
return;
}
break;
default:
break;
}
}
}
/**
* Repro case for jira issue
* XMLBEANS-80 problems in XPath selecting with namespaces and Predicates.
*/
public void test_jira_xmlbeans80() throws Exception
{
String xpathDoc = "<?xml version=\"1.0\"?> \n" +
"<doc xmlns:ext=\"http://somebody.elses.extension\"> \n" +
" <ext:a test=\"test\" /> \n" +
" <b attr1=\"a1\" attr2=\"a2\" \n" +
" xmlns:java=\"http://xml.apache.org/xslt/java\"> \n" +
" <a> \n" +
" </a> \n" +
" </b> \n" +
"</doc> ";
XmlObject xb80 = XmlObject.Factory.parse(xpathDoc);
// change $this to '.' to avoid XQuery syntax error for $this not being declared
//XmlObject[] resSet = xb80.selectPath("declare namespace " +
// "ext='http://somebody.elses.extension'; $this//ext:a[@test='test']");
XmlObject[] resSet = xb80.selectPath("declare namespace " +
"ext='http://somebody.elses.extension'; .//ext:a[@test='test']");
Assert.assertTrue(resSet.length == 1);
System.out.println("Result was: " + resSet[0].xmlText());
}
/**
* Repro case for jira issue
* XMLBEANS-81 Cursor selectPath() method not working with predicates
*/
public void test_jira_xmlbeans81() throws Exception
{
String xpathDoc = "<MatchedRecords>" +
" <MatchedRecord>" +
" <TableName>" +
"ABC" +
"</TableName>" +
" </MatchedRecord>" +
" <MatchedRecord>" +
" <TableName>\n" +
" BCD \n" +
" </TableName> \n" +
" </MatchedRecord> \n" +
"</MatchedRecords> ";
XmlObject xb81 = XmlObject.Factory.parse(xpathDoc);
// change $this to '.' to avoid XQuery syntax error for $this not being declared
//XmlObject[] resSet = xb81.selectPath("$this//MatchedRecord[TableName=\"ABC\"]/TableName");
XmlObject[] resSet = xb81.selectPath(".//MatchedRecord[TableName=\"ABC\"]/TableName");
assertEquals(resSet.length , 1);
XmlCursor cursor = xb81.newCursor();
//cursor.selectPath("$this//MatchedRecord[TableName=\"ABC\"]/TableName");
cursor.selectPath(".//MatchedRecord[TableName=\"ABC\"]/TableName");
}
/**
* XMLBeans-84 Cannot run XmlObject.selectPath using Jaxen in multi threaded environment
*/
public void test_jira_XmlBeans84() throws Exception
{
XPathThread[] threads = new XPathThread[15];
for (int i = 0; i < threads.length; i++) {
threads[i] = new XPathThread();
System.out.println("Thread[" + i + "]-starting ");
threads[i].start();
}
Thread.sleep(6000);
System.out.println("Done with XPaths?...");
for (int i = 0; i < threads.length; i++) {
Assert.assertNull(threads[i].getException());
}
}
/*
* [XMLBEANS-88]:Cannot compile eBay schema
*
*/
/*
public void test_jira_xmlbeans88() throws Exception {
List errors = new ArrayList();
SchemaCompiler.Parameters params = new SchemaCompiler.Parameters();
params.setUrlFiles(new URL[]{new URL("http://developer.ebay.com/webservices/latest/eBaySvc.wsdl")});
params.setErrorListener(errors);
params.setSrcDir(schemaCompSrcDir);
params.setClassesDir(schemaCompClassesDir);
params.setDownload(true);
// ignore unique particle rule in order to compile this schema
params.setNoUpa(true);
// runs out of memory..
params.setMemoryMaximumSize("512m");
try {
SchemaCompiler.compile(params);
} catch (java.lang.OutOfMemoryError ome) {
System.out.println(ome.getCause());
System.out.println(ome.getMessage());
System.out.println(ome.getStackTrace());
Assert.fail("test_jira_xmlbeans88(): Out Of Memory Error");
} catch (Throwable t) {
t.getMessage();
System.out.println("Ok Some Exception is caught here");
}
if (printOptionErrMsgs(errors)) {
Assert.fail("test_jira_xmlbeans88() : Errors found when executing scomp");
}
}
*/
/**
* [XMLBEANS-96]:XmlDocumentProperties missing version and encoding
*/
public void test_jira_xmlbeans96() throws Exception {
StringBuffer xmlstringbuf = new StringBuffer("<?xml version=\"1.0\" encoding=\"UTF-8\" ?>");
xmlstringbuf.append("<test>");
xmlstringbuf.append("<testchild attr=\"abcd\"> Jira02 </testchild>");
xmlstringbuf.append("</test>");
XmlObject doc = XmlObject.Factory.parse(xmlstringbuf.toString());
XmlDocumentProperties props = doc.documentProperties();
Assert.assertEquals("test_jira_xmlbeans96() : Xml Version is not picked up", props.getVersion(), "1.0");
Assert.assertEquals("test_jira_xmlbeans96() : Xml Encoding is not picked up", props.getEncoding(), "UTF-8");
}
/**
* [XMLBEANS-98] setSaveSuggestedPrefixes doesn't
* work for QName attribute values
*
* @throws Exception
*/
public void test_jira_xmlbeans98() throws Exception {
String outfn = outputroot + "xmlbeans_98.xml";
String structnamespace = "http://www.orthogony.net/xml/sample/structure";
String datanamespace = "http://www.orthogony.net/xml/sample/data";
String schemaloc = "xmlbeans_98.xsd";
String xsinamespace = "http://www.w3.org/2001/XMLSchema-instance";
File out = new File(outfn);
XmlOptions options = new XmlOptions();
// associate namespaces with prefixes
Map prefixes = new HashMap();
prefixes.put(structnamespace, "s");
prefixes.put(datanamespace, "d");
prefixes.put(xsinamespace, "v");
options.setSaveSuggestedPrefixes(prefixes);
options.setSavePrettyPrint();
// create a sample document
ARootDocument doc = ARootDocument.Factory.newInstance();
ARootDocument.ARoot root = doc.addNewARoot();
ChildType child = root.addNewAChild();
// This is where the prefix map should take effect
child.setQualifiedData(new QName(datanamespace, "IAmQualified"));
// Add a schema location attribute to the doc element
XmlCursor c = root.newCursor();
c.toNextToken();
c.insertAttributeWithValue("schemaLocation", xsinamespace,
structnamespace + " " + schemaloc);
//String expXML = doc.xmlText(options.setSavePrettyPrint())
// save as XML text using the options
//System.out.println("OUT: \n"+doc.xmlText());
//doc.save(out, options);
doc.save(out, options);
XmlObject xObj = XmlObject.Factory.parse(out);
String expXText = "<s:a-root v:schemaLocation=\"http://www.orthogony.net/xml/sample/structure xmlbeans_98.xsd\" xmlns:s=\"http://www.orthogony.net/xml/sample/structure\" xmlns:v=\"http://www.w3.org/2001/XMLSchema-instance\">\n" +
" <s:a-child qualified-data=\"data:IAmQualified\" xmlns:data=\"http://www.orthogony.net/xml/sample/data\"/>\n" +
"</s:a-root>";
XmlObject txtXObj = XmlObject.Factory.parse(doc.xmlText());
System.out.println("xObj: " + xObj.xmlText());
//NamedNodeMap n = xObj.getDomNode().getAttributes();
//Assert.assertTrue("Length was not as expected", n.getLength() == 3);
Node no = xObj.getDomNode();//n.getNamedItem("a-root");
Assert.assertTrue("Expected Prefix was not present: " + no.getPrefix(), no.getPrefix().compareTo("s") == 0);
//Assert.assertTrue("s prefix was not found " + no.lookupPrefix(structnamespace), no.lookupPrefix(structnamespace).compareTo("s") == 0);
//Assert.assertTrue("d prefix was not found " + no.lookupPrefix(datanamespace), no.lookupPrefix(datanamespace).compareTo("s") == 0);
//Assert.assertTrue("v prefix was not found " + no.lookupPrefix(xsinamespace), no.lookupPrefix(xsinamespace).compareTo("s") == 0);
// throw new Exception(out.getCanonicalPath());
}
/**
* [XMLBEANS-99] NPE/AssertionFailure in newDomNode()
*
* @throws Exception
*/
public void test_jira_xmlbeans99_a() throws Exception {
//typed verification
DummyDocument doc = DummyDocument.Factory.parse(new File(JIRA_CASES + "xmlbeans_99.xml"));
org.w3c.dom.Node node = doc.newDomNode();
System.out.println("node = " + node);
//UnTyped Verification
XmlObject xObj = XmlObject.Factory.parse(new File(JIRA_CASES +
"xmlbeans_99.xml"));
org.w3c.dom.Node xNode = xObj.newDomNode();
System.out.println("xNode: " + xNode);
}
/*
* [XMLBEANS-99]: NPE/AssertionFailure in newDomNode()
* refer to [XMLBEANS-14]
*/
public void test_jira_xmlbeans99_b() throws Exception {
StringBuffer xmlstringbuf = new StringBuffer("<?xml version=\"1.0\" encoding=\"ISO-8859-1\" ?> \n");
xmlstringbuf.append(" <x:dummy xmlns:x=\"http://dufourrault\" xmlns:xsi=\"http://www.w3.org/2000/10/XMLSchema-instance\" xsi:SchemaLocation=\"dummy.xsd\">\n");
xmlstringbuf.append(" <x:father>\n");
xmlstringbuf.append(" <x:son>toto</x:son> \n");
xmlstringbuf.append(" </x:father>\n");
xmlstringbuf.append(" </x:dummy>");
try {
//From empty instance
DummyDocument newDoc = DummyDocument.Factory.newInstance();
DummyDocument.Dummy newDummy = newDoc.addNewDummy();
Node newNode = newDummy.newDomNode();
System.out.println("New Node = " + newNode);
//set Item
DummyDocument new2Doc = DummyDocument.Factory.newInstance();
DummyDocument.Dummy new2Dummy = new2Doc.addNewDummy();
Father newFather= Father.Factory.newInstance();
newFather.setSon("son");
new2Dummy.setFather(newFather);
Node new2Node = new2Dummy.newDomNode();
System.out.println("SetFather Node = " + new2Node);
//With Loaded instance Document
DummyDocument doc = DummyDocument.Factory.parse(xmlstringbuf.toString());
Node node = doc.newDomNode();
System.out.println("node = " + node);
//Just Element Type Node
dufourrault.DummyDocument.Dummy dummy = doc.addNewDummy();
Node typeNode = dummy.newDomNode();
System.out.println("TypeNode = "+typeNode);
dufourrault.Father fatherType = Father.Factory.newInstance();
fatherType.setSon("son");
Node fatherTypeNode = fatherType.newDomNode();
System.out.println("New Father Type Node: "+ fatherTypeNode);
} catch (NullPointerException npe) {
Assert.fail("test_jira_xmlbeans99() : Null Pointer Exception when create Dom Node");
} catch (Exception e) {
Assert.fail("test_jira_xmlbeans99() : Exception when create Dom Node");
}
}
/**
* For Testing jira issue 84
*/
public static class XPathThread extends TestThread
{
public XPathThread()
{
super();
}
public void run()
{
try {
for (int i = 0; i < ITERATION_COUNT; i++) {
switch (i % 2) {
case 0:
runStatusXPath();
break;
case 1:
runDocXPath();
break;
default:
System.out.println("Val: " + i);
break;
}
}
_result = true;
} catch (Throwable t) {
_throwable = t;
t.printStackTrace();
}
}
public void runStatusXPath()
{
try {
System.out.println("Testing Status");
String statusDoc = "<statusreport xmlns=\"http://openuri.org/enumtest\">\n" +
" <status name=\"first\" target=\"all\">all</status>\n" +
" <status name=\"second\" target=\"all\">few</status>\n" +
" <status name=\"third\" target=\"none\">most</status>\n" +
" <status name=\"first\" target=\"none\">none</status>\n" +
"</statusreport>";
XmlObject path = XmlObject.Factory.parse(statusDoc, xm);
XmlObject[] resSet = path.selectPath("//*:status");
Assert.assertTrue(resSet.length + "", resSet.length == 4);
resSet = path.selectPath("//*:status[@name='first']");
Assert.assertTrue(resSet.length == 2);
} catch (Throwable t) {
_throwable = t;
t.printStackTrace();
}
}
public void runDocXPath()
{
try {
System.out.println("Testing Doc");
String docDoc = "<?xml version=\"1.0\"?>\n" +
"<doc xmlns:ext=\"http://somebody.elses.extension\">\n" +
" <a test=\"test\" />\n" +
" <b attr1=\"a1\" attr2=\"a2\" \n" +
" xmlns:java=\"http://xml.apache.org/xslt/java\">\n" +
" <a>\n" +
" </a> \n" +
" </b>\n" +
"</doc><!-- --> ";
XmlObject path = XmlObject.Factory.parse(docDoc, xm);
XmlObject[] resSet = path.selectPath("//a");
Assert.assertTrue(resSet.length == 2);
resSet = path.selectPath("//b[@attr2]");
Assert.assertTrue(resSet.length == 1);
} catch (Throwable t) {
_throwable = t;
t.printStackTrace();
}
}
}
}
| |
package org.motechproject.nms.testing.it.utils;
import org.joda.time.DateTime;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import org.motechproject.nms.imi.domain.CallSummaryRecord;
import org.motechproject.nms.imi.domain.FileAuditRecord;
import org.motechproject.nms.imi.domain.FileType;
import org.motechproject.nms.imi.repository.FileAuditRecordDataService;
import org.motechproject.nms.imi.service.SettingsService;
import org.motechproject.nms.imi.web.contract.CdrFileNotificationRequest;
import org.motechproject.nms.imi.web.contract.FileInfo;
import org.motechproject.nms.kilkari.domain.Subscription;
import org.motechproject.nms.kilkari.domain.SubscriptionOrigin;
import org.motechproject.nms.kilkari.dto.CallDetailRecordDto;
import org.motechproject.nms.kilkari.repository.SubscriberDataService;
import org.motechproject.nms.kilkari.repository.SubscriptionPackDataService;
import org.motechproject.nms.kilkari.service.SubscriptionService;
import org.motechproject.nms.props.domain.CallDisconnectReason;
import org.motechproject.nms.props.domain.FinalCallStatus;
import org.motechproject.nms.props.domain.RequestId;
import org.motechproject.nms.props.domain.StatusCode;
import org.motechproject.nms.region.repository.CircleDataService;
import org.motechproject.nms.region.repository.DistrictDataService;
import org.motechproject.nms.region.repository.LanguageDataService;
import org.motechproject.nms.region.repository.StateDataService;
import org.motechproject.nms.region.service.DistrictService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static org.junit.Assert.assertTrue;
public class CdrHelper {
private static final DateTimeFormatter TIME_FORMATTER = DateTimeFormat.forPattern("yyyyMMddHHmmss");
private static final String OBD_FILENAME_FORMAT = "OBD_NMS_%s.csv";
private static final Pattern OBD_TIMESTAMP_PATTERN = Pattern.compile("OBD_NMS_([0-9]*).csv");
private static final Logger LOGGER = LoggerFactory.getLogger(CdrHelper.class);
public static final String LOCAL_CDR_DIR_PROP = "imi.local_cdr_dir";
public static final String REMOTE_CDR_DIR_PROP = "imi.remote_cdr_dir";
private static final String OBD_FILE_PARAM_KEY = "obdFile";
private static final String CSR_FILE_PARAM_KEY = "csrFile";
private static final String CSR_CHECKSUM_PARAM_KEY = "csrChecksum";
private static final String CSR_COUNT_PARAM_KEY = "csrCount";
private static final String CDR_FILE_PARAM_KEY = "cdrFile";
private static final String CDR_CHECKSUM_PARAM_KEY = "cdrChecksum";
private static final String CDR_COUNT_PARAM_KEY = "cdrCount";
private final String TEST_OBD_TIMESTAMP;
private final String TEST_OBD_FILENAME;
private final String TEST_CDR_DETAIL_FILENAME;
private final String TEST_CDR_SUMMARY_FILENAME;
private static final int CHILD_PACK_WEEKS = 48;
private static final int NORMAL_PRIORITY = 0;
private SettingsService settingsService;
private FileAuditRecordDataService fileAuditRecordDataService;
private SubscriptionHelper sh;
private RegionHelper rh;
private List<CallDetailRecordDto> cdrs = new ArrayList<>();
private List<CallSummaryRecord> csrs = new ArrayList<>();
public CdrHelper(
SettingsService settingsService,
SubscriptionService subscriptionService,
SubscriberDataService subscriberDataService,
SubscriptionPackDataService subscriptionPackDataService,
LanguageDataService languageDataService,
CircleDataService circleDataService,
StateDataService stateDataService,
DistrictDataService districtDataService,
FileAuditRecordDataService fileAuditRecordDataService,
DistrictService districtService,
String obdFileName
) throws IOException {
sh = new SubscriptionHelper(subscriptionService, subscriberDataService, subscriptionPackDataService,
languageDataService, circleDataService, stateDataService, districtDataService, districtService);
rh = new RegionHelper(languageDataService, circleDataService, stateDataService, districtDataService,
districtService);
this.settingsService = settingsService;
this.fileAuditRecordDataService = fileAuditRecordDataService;
TEST_OBD_FILENAME = obdFileName;
Matcher m = OBD_TIMESTAMP_PATTERN.matcher(obdFileName);
assertTrue(m.find());
TEST_OBD_TIMESTAMP = m.group(1);
TEST_CDR_DETAIL_FILENAME = String.format("cdrDetail_%s", TEST_OBD_FILENAME);
TEST_CDR_SUMMARY_FILENAME = String.format("cdrSummary_%s", TEST_OBD_FILENAME);
}
public CdrHelper(
SettingsService settingsService,
SubscriptionService subscriptionService,
SubscriberDataService subscriberDataService,
SubscriptionPackDataService subscriptionPackDataService,
LanguageDataService languageDataService,
CircleDataService circleDataService,
StateDataService stateDataService,
DistrictDataService districtDataService,
FileAuditRecordDataService fileAuditRecordDataService,
DistrictService districtService
) throws IOException {
this(settingsService, subscriptionService, subscriberDataService, subscriptionPackDataService,
languageDataService, circleDataService, stateDataService, districtDataService,
fileAuditRecordDataService, districtService,
String.format(OBD_FILENAME_FORMAT, DateTime.now().toString(TIME_FORMATTER)));
}
public List<CallDetailRecordDto> getCdrs() {
return cdrs;
}
private CallDetailRecordDto makeCdrDto(Subscription sub) {
CallDetailRecordDto cdr = new CallDetailRecordDto();
cdr.setRequestId(new RequestId(sub.getSubscriptionId(), timestamp()));
cdr.setMsisdn(sub.getSubscriber().getCallingNumber());
cdr.setCallAnswerTime(DateTime.now().minusHours(5));
cdr.setMsgPlayDuration(110 + (int) (Math.random() * 20));
cdr.setLanguageLocationId(rh.hindiLanguage().getCode());
cdr.setCircleId(rh.delhiCircle().getName());
cdr.setOperatorId("xx");
return cdr;
}
private CallSummaryRecord makeCsr(Subscription sub) {
CallSummaryRecord csr = new CallSummaryRecord();
csr.setRequestId(new RequestId(sub.getSubscriptionId(), timestamp()).toString());
csr.setMsisdn(sub.getSubscriber().getCallingNumber());
csr.setLanguageLocationCode(rh.hindiLanguage().getCode());
csr.setCircle(rh.delhiCircle().getName());
return csr;
}
private static final StatusCode[] failureReasons = {
StatusCode.OBD_FAILED_BUSY,
StatusCode.OBD_FAILED_NOANSWER,
StatusCode.OBD_FAILED_NOATTEMPT,
StatusCode.OBD_FAILED_OTHERS,
StatusCode.OBD_FAILED_SWITCHEDOFF
};
private StatusCode randomFailureStatusCode() {
return failureReasons[(int) (Math.random() * failureReasons.length)];
}
public void makeSingleCallCdrs(int numFailure, boolean eventuallySuccessful) {
if (cdrs == null) { cdrs = new ArrayList<>(); }
Subscription sub = sh.mksub(SubscriptionOrigin.MCTS_IMPORT, DateTime.now().minusDays(30));
for (int i = 0; i < numFailure; i++) {
CallDetailRecordDto cdr = makeCdrDto(sub);
cdr.setStatusCode(randomFailureStatusCode());
cdr.setContentFile(sh.childPack().getMessages().get(5).getMessageFileName());
cdr.setCallDisconnectReason(CallDisconnectReason.NORMAL_DROP);
cdr.setWeekId("w5_1");
cdrs.add(cdr);
}
if (eventuallySuccessful) {
CallDetailRecordDto cdr = makeCdrDto(sub);
cdr.setStatusCode(StatusCode.OBD_SUCCESS_CALL_CONNECTED);
cdr.setContentFile(sh.childPack().getMessages().get(5).getMessageFileName());
cdr.setCallDisconnectReason(CallDisconnectReason.NORMAL_DROP);
cdr.setWeekId("w5_1");
cdrs.add(cdr);
}
}
public void makeCdrs(int numSuccess, int numFailed, int numComplete, int numIvr) {
if (cdrs == null) { cdrs = new ArrayList<>(); }
for (int i=0 ; i<numSuccess ; i++) {
Subscription sub = sh.mksub(SubscriptionOrigin.MCTS_IMPORT, DateTime.now().minusDays(30));
CallDetailRecordDto cdr = makeCdrDto(sub);
cdr.setStatusCode(StatusCode.OBD_SUCCESS_CALL_CONNECTED);
cdr.setContentFile(sh.childPack().getMessages().get(4).getMessageFileName());
cdr.setCallDisconnectReason(CallDisconnectReason.NORMAL_DROP);
cdr.setWeekId("w4_1");
cdrs.add(cdr);
}
for (int i=0 ; i<numFailed ; i++) {
Subscription sub = sh.mksub(SubscriptionOrigin.MCTS_IMPORT, DateTime.now().minusDays(30));
CallDetailRecordDto cdr = makeCdrDto(sub);
cdr.setStatusCode(StatusCode.OBD_FAILED_NOANSWER);
cdr.setContentFile(sh.childPack().getMessages().get(5).getMessageFileName());
cdr.setCallDisconnectReason(CallDisconnectReason.NORMAL_DROP);
cdr.setWeekId("w5_1");
cdrs.add(cdr);
}
for (int i=0 ; i<numComplete ; i++) {
int days = CHILD_PACK_WEEKS * 7;
Subscription sub = sh.mksub(SubscriptionOrigin.MCTS_IMPORT, DateTime.now().minusDays(days));
CallDetailRecordDto cdr = makeCdrDto(sub);
cdr.setStatusCode(StatusCode.OBD_SUCCESS_CALL_CONNECTED);
cdr.setContentFile(sh.childPack().getMessages().get(CHILD_PACK_WEEKS-1).getMessageFileName());
cdr.setCallDisconnectReason(CallDisconnectReason.NORMAL_DROP);
cdr.setWeekId(String.format("w%d_1", CHILD_PACK_WEEKS));
cdrs.add(cdr);
}
for (int i=0 ; i<numIvr ; i++) {
Subscription sub = sh.mksub(SubscriptionOrigin.IVR, DateTime.now().minusDays(30));
CallDetailRecordDto cdr = makeCdrDto(sub);
cdr.setStatusCode(StatusCode.OBD_SUCCESS_CALL_CONNECTED);
cdr.setContentFile(sh.childPack().getMessages().get(6).getMessageFileName());
cdr.setCallDisconnectReason(CallDisconnectReason.NORMAL_DROP);
cdr.setWeekId("w6_1");
cdrs.add(cdr);
}
}
public void makeCsrs(int numFailed) {
if (csrs == null) { csrs = new ArrayList<>(); }
for (int i=0 ; i<numFailed ; i++) {
Subscription sub = sh.mksub(SubscriptionOrigin.MCTS_IMPORT, DateTime.now().minusDays(30));
CallSummaryRecord csr = makeCsr(sub);
csr.setStatusCode(StatusCode.OBD_FAILED_NOATTEMPT.getValue());
csr.setContentFileName(sh.childPack().getMessages().get(7).getMessageFileName());
csr.setWeekId("w7_1");
csr.setPriority(NORMAL_PRIORITY);
csr.setFinalStatus(FinalCallStatus.FAILED.getValue());
csr.setAttempts(1);
csr.setCallFlowUrl("url");
csr.setCli("cli");
csr.setServiceId("id");
csrs.add(csr);
}
}
public String timestamp() {
return TEST_OBD_TIMESTAMP;
}
public String obd() {
return TEST_OBD_FILENAME;
}
public String csr() {
return TEST_CDR_SUMMARY_FILENAME;
}
public String cdr() {
return TEST_CDR_DETAIL_FILENAME;
}
public int cdrCount() {
return cdrs.size();
}
public int csrCount() {
return csrs.size();
}
public static String csvLineFromCdr(CallDetailRecordDto cdr) {
StringBuilder sb = new StringBuilder();
//REQUEST_ID,
sb.append(cdr.getRequestId().toString());
sb.append(',');
//MSISDN,
sb.append(cdr.getMsisdn());
sb.append(',');
//CALL_ID,
sb.append("xxx");
sb.append(',');
//ATTEMPT_NO,
sb.append(1);
sb.append(',');
//CALL_START_TIME,
sb.append(1);
sb.append(',');
//CALL_ANSWER_TIME,
sb.append(cdr.getCallAnswerTime().getMillis() / 1000);
sb.append(',');
//CALL_END_TIME,
sb.append(1);
sb.append(',');
//CALL_DURATION_IN_PULSE,
sb.append(1);
sb.append(',');
//CALL_STATUS,
sb.append(cdr.getStatusCode().getValue());
sb.append(',');
//LANGUAGE_LOCATION_ID,
sb.append(cdr.getLanguageLocationId());
sb.append(',');
//CONTENT_FILE,
sb.append(cdr.getContentFile());
sb.append(',');
//MSG_PLAY_START_TIME,
sb.append(1);
sb.append(',');
//MSG_PLAY_END_TIME,
sb.append(1 + cdr.getMsgPlayDuration());
sb.append(',');
//CIRCLE_ID,
sb.append(cdr.getCircleId());
sb.append(',');
//OPERATOR_ID,
sb.append(cdr.getOperatorId());
sb.append(',');
//PRIORITY,
sb.append(0);
sb.append(',');
//CALL_DISCONNECT_REASON,
sb.append(cdr.getCallDisconnectReason().getValue());
sb.append(',');
//WEEK_ID,
sb.append(cdr.getWeekId());
return sb.toString();
}
public static String csvLineFromCsr(CallSummaryRecord csr) {
StringBuilder sb = new StringBuilder();
sb.append(csr.getRequestId());
sb.append(',');
sb.append(csr.getServiceId());
sb.append(',');
sb.append(csr.getMsisdn());
sb.append(',');
sb.append(csr.getCli());
sb.append(',');
sb.append(csr.getPriority());
sb.append(',');
sb.append(csr.getCallFlowUrl());
sb.append(',');
sb.append(csr.getContentFileName());
sb.append(',');
sb.append(csr.getWeekId());
sb.append(',');
sb.append(csr.getLanguageLocationCode());
sb.append(',');
sb.append(csr.getCircle());
sb.append(',');
sb.append(csr.getFinalStatus());
sb.append(',');
sb.append(csr.getStatusCode());
sb.append(',');
sb.append(csr.getAttempts());
return sb.toString();
}
public String remoteDir() {
return settingsService.getSettingsFacade().getProperty(REMOTE_CDR_DIR_PROP);
}
public String localDir() {
return settingsService.getSettingsFacade().getProperty(LOCAL_CDR_DIR_PROP);
}
private File doMakeCsrFile(String dir, int numInvalidLines) throws IOException {
File file = new File(dir, csr());
LOGGER.debug("Creating summary file {}...", file);
BufferedWriter writer = new BufferedWriter(new FileWriter(file));
int remainingInvalidLines = numInvalidLines;
writer.write(org.motechproject.nms.imi.service.impl.CsrHelper.CSR_HEADER);
writer.write("\n");
for(CallSummaryRecord csr : csrs) {
writer.write(csvLineFromCsr(csr));
if (remainingInvalidLines > 0) {
writer.write(",invalid_field");
remainingInvalidLines--;
}
writer.write("\n");
}
writer.close();
return file;
}
public void makeLocalCsrFile(int numInvalidLines) throws IOException {
doMakeCsrFile(localDir(), numInvalidLines);
}
public void makeLocalCsrFile() throws IOException {
doMakeCsrFile(localDir(), 0);
}
public void makeRemoteCsrFile(int numInvalidLines) throws IOException {
doMakeCsrFile(remoteDir(), numInvalidLines);
}
public void makeRemoteCsrFile() throws IOException {
doMakeCsrFile(remoteDir(), 0);
}
public void createObdFileAuditRecord(boolean valid, boolean success) throws IOException, NoSuchAlgorithmException {
fileAuditRecordDataService.create(new FileAuditRecord(
FileType.TARGET_FILE,
valid ? obd() : "xxx",
success,
success ? null : "ERROR",
123,
"123abc"
));
}
private File doMakeCdrFile(String dir, int numInvalidLines) throws IOException {
File file = new File(dir, cdr());
LOGGER.debug("Creating detail file {}...", file);
BufferedWriter writer = new BufferedWriter(new FileWriter(file));
int remainingInvalidLines = numInvalidLines;
writer.write(org.motechproject.nms.imi.service.impl.CdrHelper.CDR_HEADER);
writer.write("\n");
for(CallDetailRecordDto cdr : cdrs) {
writer.write(csvLineFromCdr(cdr));
if (remainingInvalidLines > 0) {
writer.write(",invalid_field");
remainingInvalidLines--;
}
writer.write("\n");
}
writer.close();
return file;
}
public File makeLocalCdrFile() throws IOException {
return doMakeCdrFile(localDir(), 0);
}
public File makeLocalCdrFile(int numInvalidLines) throws IOException {
return doMakeCdrFile(localDir(), numInvalidLines);
}
public File makeRemoteCdrFile() throws IOException {
return doMakeCdrFile(remoteDir(), 0);
}
public File makeRemoteCdrFile(String dir, int numInvalidLines) throws IOException {
return doMakeCdrFile(remoteDir(), numInvalidLines);
}
public String csrLocalChecksum() throws IOException, NoSuchAlgorithmException {
return ChecksumHelper.checksum(new File(localDir(), csr()));
}
private int recordCount(File file) throws FileNotFoundException, IOException {
int recordCount = 0;
FileInputStream fis = new FileInputStream(file);
InputStreamReader isr = new InputStreamReader(fis);
BufferedReader reader = new BufferedReader(isr);
// skip header
reader.readLine();
while (reader.readLine() != null) {
recordCount++;
}
reader.close();
isr.close();
fis.close();
return recordCount;
}
public String cdrLocalChecksum() throws IOException, NoSuchAlgorithmException {
return ChecksumHelper.checksum(new File(localDir(), cdr()));
}
public int cdrRemoteRecordCount() throws IOException, FileNotFoundException {
return recordCount(new File(remoteDir(), cdr()));
}
public int csrRemoteRecordCount() throws IOException, FileNotFoundException {
return recordCount(new File(remoteDir(), csr()));
}
public String csrRemoteChecksum() throws IOException, NoSuchAlgorithmException {
return ChecksumHelper.checksum(new File(remoteDir(), csr()));
}
public String cdrRemoteChecksum() throws IOException, NoSuchAlgorithmException {
return ChecksumHelper.checksum(new File(remoteDir(), cdr()));
}
public CdrFileNotificationRequest cdrFileNotificationRequest() throws IOException, NoSuchAlgorithmException {
FileInfo cdrFileInfo = new FileInfo(cdr(), cdrLocalChecksum(), cdrCount());
FileInfo csrFileInfo = new FileInfo(csr(), csrLocalChecksum(), csrCount());
return new CdrFileNotificationRequest(obd(), csrFileInfo, cdrFileInfo);
}
public static CdrFileNotificationRequest requestFromParams(Map<String, Object> params) {
return new CdrFileNotificationRequest(
(String) params.get(OBD_FILE_PARAM_KEY),
new FileInfo(
(String) params.get(CSR_FILE_PARAM_KEY),
(String) params.get(CSR_CHECKSUM_PARAM_KEY),
(int) params.get(CSR_COUNT_PARAM_KEY)
),
new FileInfo(
(String) params.get(CDR_FILE_PARAM_KEY),
(String) params.get(CDR_CHECKSUM_PARAM_KEY),
(int) params.get(CDR_COUNT_PARAM_KEY)
)
);
}
public Map<String, Object> cdrFileNotificationParams() throws IOException, NoSuchAlgorithmException{
Map<String, Object> params = new HashMap<>();
params.put(OBD_FILE_PARAM_KEY, obd());
params.put(CSR_FILE_PARAM_KEY, csr());
params.put(CSR_CHECKSUM_PARAM_KEY, csrLocalChecksum());
params.put(CSR_COUNT_PARAM_KEY, csrCount());
params.put(CDR_FILE_PARAM_KEY, cdr());
params.put(CDR_CHECKSUM_PARAM_KEY, cdrLocalChecksum());
params.put(CDR_COUNT_PARAM_KEY, cdrCount());
return params;
}
}
| |
package org.apache.maven.artifact;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.io.File;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import org.apache.maven.artifact.handler.ArtifactHandler;
import org.apache.maven.artifact.metadata.ArtifactMetadata;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.artifact.resolver.filter.ArtifactFilter;
import org.apache.maven.artifact.versioning.ArtifactVersion;
import org.apache.maven.artifact.versioning.DefaultArtifactVersion;
import org.apache.maven.artifact.versioning.OverConstrainedVersionException;
import org.apache.maven.artifact.versioning.VersionRange;
import org.codehaus.plexus.util.StringUtils;
/**
* @author Jason van Zyl
*/
public class DefaultArtifact
implements Artifact
{
private String groupId;
private String artifactId;
private String baseVersion;
private final String type;
private final String classifier;
private volatile String scope;
private volatile File file;
private ArtifactRepository repository;
private String downloadUrl;
private ArtifactFilter dependencyFilter;
private ArtifactHandler artifactHandler;
private List<String> dependencyTrail;
private volatile String version;
private VersionRange versionRange;
private volatile boolean resolved;
private boolean release;
private List<ArtifactVersion> availableVersions;
private Map<Object, ArtifactMetadata> metadataMap;
private boolean optional;
public DefaultArtifact( String groupId, String artifactId, String version, String scope, String type,
String classifier, ArtifactHandler artifactHandler )
{
this( groupId, artifactId, VersionRange.createFromVersion( version ), scope, type, classifier, artifactHandler,
false );
}
public DefaultArtifact( String groupId, String artifactId, VersionRange versionRange, String scope, String type,
String classifier, ArtifactHandler artifactHandler )
{
this( groupId, artifactId, versionRange, scope, type, classifier, artifactHandler, false );
}
@SuppressWarnings( "checkstyle:parameternumber" )
public DefaultArtifact( String groupId, String artifactId, VersionRange versionRange, String scope, String type,
String classifier, ArtifactHandler artifactHandler, boolean optional )
{
this.groupId = groupId;
this.artifactId = artifactId;
this.versionRange = versionRange;
selectVersionFromNewRangeIfAvailable();
this.artifactHandler = artifactHandler;
this.scope = scope;
this.type = type;
if ( classifier == null )
{
classifier = artifactHandler.getClassifier();
}
this.classifier = classifier;
this.optional = optional;
validateIdentity();
}
private void validateIdentity()
{
if ( empty( groupId ) )
{
throw new InvalidArtifactRTException( groupId, artifactId, getVersion(), type,
"The groupId cannot be empty." );
}
if ( artifactId == null )
{
throw new InvalidArtifactRTException( groupId, artifactId, getVersion(), type,
"The artifactId cannot be empty." );
}
if ( type == null )
{
throw new InvalidArtifactRTException( groupId, artifactId, getVersion(), type,
"The type cannot be empty." );
}
if ( ( version == null ) && ( versionRange == null ) )
{
throw new InvalidArtifactRTException( groupId, artifactId, getVersion(), type,
"The version cannot be empty." );
}
}
private boolean empty( String value )
{
return ( value == null ) || ( value.trim().length() < 1 );
}
public String getClassifier()
{
return classifier;
}
public boolean hasClassifier()
{
return StringUtils.isNotEmpty( classifier );
}
public String getScope()
{
return scope;
}
public String getGroupId()
{
return groupId;
}
public String getArtifactId()
{
return artifactId;
}
public String getVersion()
{
return version;
}
public void setVersion( String version )
{
this.version = version;
setBaseVersionInternal( version );
versionRange = null;
}
public String getType()
{
return type;
}
public void setFile( File file )
{
this.file = file;
}
public File getFile()
{
return file;
}
public ArtifactRepository getRepository()
{
return repository;
}
public void setRepository( ArtifactRepository repository )
{
this.repository = repository;
}
// ----------------------------------------------------------------------
//
// ----------------------------------------------------------------------
public String getId()
{
return getDependencyConflictId() + ":" + getBaseVersion();
}
public String getDependencyConflictId()
{
StringBuilder sb = new StringBuilder( 128 );
sb.append( getGroupId() );
sb.append( ':' );
appendArtifactTypeClassifierString( sb );
return sb.toString();
}
private void appendArtifactTypeClassifierString( StringBuilder sb )
{
sb.append( getArtifactId() );
sb.append( ':' );
sb.append( getType() );
if ( hasClassifier() )
{
sb.append( ':' );
sb.append( getClassifier() );
}
}
public void addMetadata( ArtifactMetadata metadata )
{
if ( metadataMap == null )
{
metadataMap = new HashMap<>();
}
ArtifactMetadata m = metadataMap.get( metadata.getKey() );
if ( m != null )
{
m.merge( metadata );
}
else
{
metadataMap.put( metadata.getKey(), metadata );
}
}
public Collection<ArtifactMetadata> getMetadataList()
{
if ( metadataMap == null )
{
return Collections.emptyList();
}
return Collections.unmodifiableCollection( metadataMap.values() );
}
// ----------------------------------------------------------------------
// Object overrides
// ----------------------------------------------------------------------
public String toString()
{
StringBuilder sb = new StringBuilder();
if ( getGroupId() != null )
{
sb.append( getGroupId() );
sb.append( ':' );
}
appendArtifactTypeClassifierString( sb );
sb.append( ':' );
if ( getBaseVersionInternal() != null )
{
sb.append( getBaseVersionInternal() );
}
else
{
sb.append( versionRange.toString() );
}
if ( scope != null )
{
sb.append( ':' );
sb.append( scope );
}
return sb.toString();
}
public int hashCode()
{
int result = 17;
result = 37 * result + groupId.hashCode();
result = 37 * result + artifactId.hashCode();
result = 37 * result + type.hashCode();
if ( version != null )
{
result = 37 * result + version.hashCode();
}
result = 37 * result + ( classifier != null ? classifier.hashCode() : 0 );
return result;
}
public boolean equals( Object o )
{
if ( o == this )
{
return true;
}
if ( !( o instanceof Artifact ) )
{
return false;
}
Artifact a = (Artifact) o;
if ( !a.getGroupId().equals( groupId ) )
{
return false;
}
else if ( !a.getArtifactId().equals( artifactId ) )
{
return false;
}
else if ( !a.getVersion().equals( version ) )
{
return false;
}
else if ( !a.getType().equals( type ) )
{
return false;
}
else if ( a.getClassifier() == null ? classifier != null : !a.getClassifier().equals( classifier ) )
{
return false;
}
// We don't consider the version range in the comparison, just the resolved version
return true;
}
public String getBaseVersion()
{
if ( baseVersion == null && version != null )
{
setBaseVersionInternal( version );
}
return baseVersion;
}
protected String getBaseVersionInternal()
{
if ( ( baseVersion == null ) && ( version != null ) )
{
setBaseVersionInternal( version );
}
return baseVersion;
}
public void setBaseVersion( String baseVersion )
{
setBaseVersionInternal( baseVersion );
}
protected void setBaseVersionInternal( String baseVersion )
{
Matcher m = VERSION_FILE_PATTERN.matcher( baseVersion );
if ( m.matches() )
{
this.baseVersion = m.group( 1 ) + "-" + SNAPSHOT_VERSION;
}
else
{
this.baseVersion = baseVersion;
}
}
public int compareTo( Artifact a )
{
int result = groupId.compareTo( a.getGroupId() );
if ( result == 0 )
{
result = artifactId.compareTo( a.getArtifactId() );
if ( result == 0 )
{
result = type.compareTo( a.getType() );
if ( result == 0 )
{
if ( classifier == null )
{
if ( a.getClassifier() != null )
{
result = 1;
}
}
else
{
if ( a.getClassifier() != null )
{
result = classifier.compareTo( a.getClassifier() );
}
else
{
result = -1;
}
}
if ( result == 0 )
{
// We don't consider the version range in the comparison, just the resolved version
result = new DefaultArtifactVersion( version ).compareTo(
new DefaultArtifactVersion( a.getVersion() ) );
}
}
}
}
return result;
}
public void updateVersion( String version, ArtifactRepository localRepository )
{
setResolvedVersion( version );
setFile( new File( localRepository.getBasedir(), localRepository.pathOf( this ) ) );
}
public String getDownloadUrl()
{
return downloadUrl;
}
public void setDownloadUrl( String downloadUrl )
{
this.downloadUrl = downloadUrl;
}
public ArtifactFilter getDependencyFilter()
{
return dependencyFilter;
}
public void setDependencyFilter( ArtifactFilter artifactFilter )
{
dependencyFilter = artifactFilter;
}
public ArtifactHandler getArtifactHandler()
{
return artifactHandler;
}
public List<String> getDependencyTrail()
{
return dependencyTrail;
}
public void setDependencyTrail( List<String> dependencyTrail )
{
this.dependencyTrail = dependencyTrail;
}
public void setScope( String scope )
{
this.scope = scope;
}
public VersionRange getVersionRange()
{
return versionRange;
}
public void setVersionRange( VersionRange versionRange )
{
this.versionRange = versionRange;
selectVersionFromNewRangeIfAvailable();
}
private void selectVersionFromNewRangeIfAvailable()
{
if ( ( versionRange != null ) && ( versionRange.getRecommendedVersion() != null ) )
{
selectVersion( versionRange.getRecommendedVersion().toString() );
}
else
{
version = null;
baseVersion = null;
}
}
public void selectVersion( String version )
{
this.version = version;
setBaseVersionInternal( version );
}
public void setGroupId( String groupId )
{
this.groupId = groupId;
}
public void setArtifactId( String artifactId )
{
this.artifactId = artifactId;
}
public boolean isSnapshot()
{
return getBaseVersion() != null
&& ( getBaseVersion().endsWith( SNAPSHOT_VERSION ) || getBaseVersion().equals( LATEST_VERSION ) );
}
public void setResolved( boolean resolved )
{
this.resolved = resolved;
}
public boolean isResolved()
{
return resolved;
}
public void setResolvedVersion( String version )
{
this.version = version;
// retain baseVersion
}
public void setArtifactHandler( ArtifactHandler artifactHandler )
{
this.artifactHandler = artifactHandler;
}
public void setRelease( boolean release )
{
this.release = release;
}
public boolean isRelease()
{
return release;
}
public List<ArtifactVersion> getAvailableVersions()
{
return availableVersions;
}
public void setAvailableVersions( List<ArtifactVersion> availableVersions )
{
this.availableVersions = availableVersions;
}
public boolean isOptional()
{
return optional;
}
public ArtifactVersion getSelectedVersion()
throws OverConstrainedVersionException
{
return versionRange.getSelectedVersion( this );
}
public boolean isSelectedVersionKnown()
throws OverConstrainedVersionException
{
return versionRange.isSelectedVersionKnown( this );
}
public void setOptional( boolean optional )
{
this.optional = optional;
}
}
| |
package var;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import pacbio.SiteR;
import stream.ConcurrentLegacyReadInputStream;
import stream.RTextInputStream;
import stream.Read;
import stream.SiteScore;
import stream.SiteScoreR;
import dna.CoverageArray;
import dna.Data;
import dna.Parser;
import dna.Timer;
import fileIO.ReadWrite;
import fileIO.TextFile;
import align2.ListNum;
import align2.MultiStateAligner9PacBio;
import align2.MultiStateAligner9ts;
import align2.Tools;
import align2.TranslateColorspaceRead;
/** Splits output files across blocks for low memory usage.
* Uses id-sorted site list for even lower memory usage. */
public class GenerateVarlets3 {
public static void main(String[] args){
System.err.println("Executing "+(new Object() { }.getClass().getEnclosingClass().getName())+" "+Arrays.toString(args)+"\n");
Data.GENOME_BUILD=-1;
String reads1=args[0];
String reads2=args[1].equalsIgnoreCase("null") ? null : args[1];
String outname=args[2];
String pcovFile=null;
String covFile=null;
// assert(outname.contains("#"));
String sitesfile=null;
int minChrom=-1;
int maxChrom=-1;
int distFromDefined=-1;
ReadWrite.USE_PIGZ=ReadWrite.USE_UNPIGZ=true;
for(int i=3; i<args.length; i++){
final String arg=args[i];
final String[] split=arg.split("=");
String a=split[0].toLowerCase();
String b=split.length>1 ? split[1] : null;
if("null".equalsIgnoreCase(b)){b=null;}
if(Parser.isJavaFlag(arg)){
//jvm argument; do nothing
}else if(Parser.parseZip(arg, a, b)){
//do nothing
}else if(a.equals("condense")){
CONDENSE=Tools.parseBoolean(b);
}else if(a.equals("condensesnps")){
CONDENSE_SNPS=Tools.parseBoolean(b);
}else if(a.startsWith("splitsubs")){
SPLIT_SUBS=Tools.parseBoolean(b);
}else if(a.startsWith("illumina")){
PAC_BIO_MODE=!Tools.parseBoolean(b);
}else if(a.startsWith("pacbio")){
PAC_BIO_MODE=Tools.parseBoolean(b);
}else if(a.equals("tosssolo1")){
TOSS_SOLO1=Tools.parseBoolean(b);
}else if(a.equals("tosssolo2")){
TOSS_SOLO2=Tools.parseBoolean(b);
}else if(a.startsWith("minchrom")){
minChrom=Integer.parseInt(b);
}else if(a.startsWith("maxchrom")){
maxChrom=Integer.parseInt(b);
}else if(a.startsWith("build") || a.startsWith("genomebuild") || a.startsWith("genome")){
Data.setGenome(Integer.parseInt(b));
System.out.println("Set GENOME_BUILD to "+Data.GENOME_BUILD);
}else if(a.equals("threads") || a.equals("t")){
THREADS=(Integer.parseInt(b));
}else if(a.startsWith("buffer") || a.startsWith("writebuffer")){
WRITE_BUFFER=(Integer.parseInt(b));
}else if(a.startsWith("maxreads")){
MAX_READS=(Long.parseLong(b));
}else if(a.startsWith("minenddist")){
MIN_END_DIST=Integer.parseInt(b);
}else if(a.startsWith("alignrow")){
ALIGN_ROWS=Integer.parseInt(b);
}else if(a.startsWith("aligncol")){
ALIGN_COLUMNS=Integer.parseInt(b);
}else if(a.startsWith("pcovtipdist")){
PCOV_TIP_DIST=Integer.parseInt(b);
}else if(a.equals("blocksize")){
BLOCKSIZE=(Integer.parseInt(b));
}else if(a.equals("norefcap") || a.equals("distfromdefined") || a.equals("maxdistfromdefined")){
distFromDefined=(Integer.parseInt(b));
}else if(a.startsWith("sites") || a.startsWith("sitesfile")){
sitesfile=(b==null || b.equalsIgnoreCase("null") ? null : b);
}else if(a.startsWith("pcov") || a.startsWith("perfectcov")){
pcovFile=(b==null || b.equalsIgnoreCase("null") ? null : b);
}else if(a.equals("cov") || a.startsWith("coverage")){
covFile=(b==null || b.equalsIgnoreCase("null") ? null : b);
}else{
throw new RuntimeException("Unknown parameter "+args[i]);
}
}
if(Data.GENOME_BUILD<0){throw new RuntimeException("Please set genome number.");}
if(minChrom<0){minChrom=1;}
if(maxChrom<0){maxChrom=Data.numChroms;}
assert(minChrom<=maxChrom && minChrom>=0);
if(ReadWrite.ZIPLEVEL<2){ReadWrite.ZIPLEVEL=2;}
GenerateVarlets3 gv=new GenerateVarlets3(reads1, reads2, outname, MAX_READS, sitesfile, pcovFile, distFromDefined);
gv.process();
}
public GenerateVarlets3(String fname1, String fname2, String outname_, long maxReads, String sitesfile_, String pcovFile, int distFromDefined_){
this(new RTextInputStream(fname1, fname2, maxReads), outname_, maxReads, sitesfile_, pcovFile, distFromDefined_);
assert(fname2==null || !fname1.equals(fname2)) : "Error - input files have same name.";
}
public GenerateVarlets3(RTextInputStream stream_, String outname_, long maxReads, String sitesfile_, String pcovFile, int distFromDefined_){
sitesfile=sitesfile_;
sitesTextFile=new TextFile(sitesfile, false, false);
stream=stream_;
outname=outname_;
assert(outname==null || outname.contains("#")) : "Output file name must contain the character '#' to be used for key number.";
makeKeyMap();
cris=(USE_CRIS ? new ConcurrentLegacyReadInputStream(stream, maxReads) : null);
if(CONDENSE_SNPS){assert(!SPLIT_SUBS);}
maxDistFromDefined=distFromDefined_;
if(maxDistFromDefined>0){
//Unfortunately, this serializes the chromosome loading.
nearestDefinedBase=new char[Data.numChroms+1][];
for(int i=1; i<=Data.numChroms; i++){
nearestDefinedBase[i]=Data.getChromosome(i).nearestDefinedBase();
}
}else{
nearestDefinedBase=null;
}
if(pcovFile!=null){
assert(pcovFile.contains("#") || Data.numChroms<2);
pcov=new CoverageArray[Data.numChroms+1];
for(int i=1; i<=Data.numChroms; i++){
String fname=pcovFile.replaceFirst("#", ""+i);
pcov[i]=ReadWrite.read(CoverageArray.class, fname, true);
}
}else{
pcov=null;
}
}
public void finish(){
ArrayList<Long> keys=new ArrayList<Long>();
keys.addAll(keymap.keySet());
Collections.sort(keys);
for(long k : keys){
ArrayList<Varlet> vars=keymap.remove(k);
if(!vars.isEmpty()){writeList(vars);}
}
if(cris!=null){ReadWrite.closeStream(cris);}
else{stream.close();}
}
public void process(){
Timer t=new Timer();
t.start();
if(sitesfile==null){
sitemap=null;
}
cris.start();
ProcessThread[] threadHandles=new ProcessThread[THREADS];
for(int i=0; i<THREADS; i++){
threadHandles[i]=new ProcessThread();
threadHandles[i].start();
}
long varsMade=0;
long norefsMade=0;
long snpMade=0;
long delMade=0;
long subnMade=0;
long subdMade=0;
long subiMade=0;
long insMade=0;
long deltaLen=0;
long sitesProcessed=0;
long readsProcessed=0;
for(int i=0; i<threadHandles.length; i++){
ProcessThread pt=threadHandles[i];
while(!pt.finished()){
synchronized(pt){
try {
pt.wait(1000);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
varsMade+=pt.varsMade;
norefsMade+=pt.norefsMade;
snpMade+=pt.snpMade;
delMade+=pt.delMade;
subnMade+=pt.subnMade;
subdMade+=pt.subdMade;
subiMade+=pt.subiMade;
insMade+=pt.insMade;
deltaLen+=pt.deltaLen;
sitesProcessed+=pt.sitesProcessed;
readsProcessed+=pt.readsProcessed;
}
sitesTextFile.close();
assert(sitemap==null || sitemap.size()==0) : sitemap;
finish();
t.stop();
System.out.println("\nOutput variations count");
System.out.println("Total (minus no-ref): \t"+(varsMade-norefsMade));
System.out.println("Deletions: \t"+(delMade));
System.out.println("D-type subs: \t"+(subdMade));
System.out.println("Insertions: \t"+(insMade));
System.out.println("I-type subs: \t"+(subiMade));
System.out.println("Snps: \t"+(snpMade));
System.out.println("N-type subs: \t"+(subnMade));
System.out.println("No-refs: \t"+(norefsMade));
System.out.println("Delta Length: \t"+(deltaLen));
System.out.println("Lines Loaded: \t"+(linesLoaded));
System.out.println("Lines Retained: \t"+(linesRetained));
System.out.println("Reads Processed: \t"+(readsProcessed));
System.out.println("Sites Loaded: \t"+(sitesLoaded));
System.out.println("Sites Retained: \t"+(sitesRetained));
System.out.println("Sites Processed: \t"+(sitesProcessed));
System.out.println();
System.out.println("Max Site Table Size: \t"+maxSiteTableSize);
System.out.println();
System.out.println("Time:\t"+t);
}
/**
* @param sitesfile2
* @return
*/
private final long readSites(TextFile tf, long maxID) {
long maxFound=-1;
final boolean retainSemiperfect=maxDistFromDefined!=0;
synchronized(sitemap){
// System.out.print("Sync for "+maxID+".");
if(maxID>=maxSiteRead && tf.isOpen()){
// System.out.print(" ... ");
// System.out.println("Looking for ")
String s;
for(s=tf.nextLine(); s!=null; s=tf.nextLine()){
// SiteScoreR[] array=CalcCoverageFromSites.toSites(s);
// SiteR head=new SiteR(array[0]);
// sitemap.put(head.idPairnum, head);
// for(int i=1; i<array.length; i++){
// head.next=new SiteR(array[i]);
// assert(head.idPairnum==head.next.idPairnum) : "Not sorted correctly.";
// head=head.next;
// }
SiteR head=toImperfectSites(s, retainSemiperfect);
if(head!=null){
sitemap.put(head.idPairnum, head);
long id=head.numericID();
assert(id>=maxFound);
maxFound=id;
}
if(maxFound>maxID){break;}
}
maxSiteRead=Tools.max(maxFound, maxSiteRead);
if(s==null){
tf.close();
// System.out.println(" closing file at maxFound="+maxFound+", maxRead="+maxSiteRead+", lines="+linesLoaded);
maxSiteRead=Long.MAX_VALUE;
}
}
// System.out.println(" maxFound="+maxFound+", maxRead="+maxSiteRead+", lines="+linesLoaded);
if(maxSiteRead<=maxID){assert(!tf.isOpen());}
maxSiteTableSize=Tools.max(maxSiteTableSize, sitemap.size());
}
return maxSiteRead;
}
public SiteR toImperfectSites(String s, boolean retainSemiperfect){
SiteR head=null;
SiteR prev=null;
String[] split=s.split("\t");
sitesLoaded+=split.length;
linesLoaded++;
for(int i=0; i<split.length; i++){
SiteScoreR ssr=SiteScoreR.fromText(split[i]);
boolean retain=true;
if(ssr.perfect || (ssr.semiperfect && !retainSemiperfect)){retain=false;}
//Note that this relies on the semiperfect tag being correct in order to generate no-refs from semiperfect reads.
if(retain && !ssr.semiperfect && pcov!=null){
CoverageArray ca=pcov[ssr.chrom];
boolean toss=true;
for(int j=ssr.start-PCOV_TIP_DIST; toss && j<=ssr.stop+PCOV_TIP_DIST; j++){
toss=ca.get(j)>=MIN_PCOV_DEPTH_TO_TOSS;
}
if(toss){retain=false;}
// for(int j=ssr.start-PCOV_TIP_DIST; retain && j<=ssr.stop+PCOV_TIP_DIST; j++){
// retain=ca.get(j)<MIN_PCOV_DEPTH_TO_TOSS;
// }
}
if(retain){
SiteR sr=new SiteR(ssr);
if(head==null){
head=sr;
prev=head;
}else{
assert(sr.idPairnum==prev.idPairnum) : "Not sorted correctly.";
prev.next=sr;
prev=sr;
}
}
}
// assert(head==null) : head.toTextRecursive(null);
if(head!=null){
sitesRetained+=head.listLength();
linesRetained++;
}
return head;
}
public static SiteR toImperfectSites2(String s){
SiteScoreR[] array=SiteScoreR.fromTextArray(s);
if(array!=null && array.length>0){
SiteR[] a2=new SiteR[array.length];
for(int i=0; i<a2.length; i++){
a2[i]=new SiteR(array[i]);
if(i>0){a2[i-1].next=a2[i];}
}
return a2[0];
}
return null;
}
private void writeList(ArrayList<Varlet> list){
assert(list!=null && list.size()>0);
long key=key(list.get(0).chromosome, list.get(0).beginLoc);
String fname=fname(key, outname);
boolean allowSubprocess=false;
OutputStream os=ReadWrite.getOutputStream(fname, true, true, allowSubprocess);
PrintWriter pw=new PrintWriter(os);
for(Varlet v : list){
pw.println(v.toText());
}
ReadWrite.finishWriting(pw, os, fname, allowSubprocess);
}
private final class ProcessThread extends Thread {
public ProcessThread(){
}
private void fixReadSites(ArrayList<Read> reads){
assert(sitemap!=null);
if(reads==null || reads.size()==0){return;}
long max=-2;
for(Read r : reads){
max=Tools.max(max, r.numericID);
}
synchronized(sitemap){
if(max>=maxSiteRead){
readSites(sitesTextFile, max);
}
for(Read r : reads){
{
long key=r.numericID;
if((r.pairnum()&1)==1){
key=-key;
assert(key<0);
}
SiteR head=sitemap.get(key);
ArrayList<SiteScore> old=r.sites;
r.sites=null;
if(head!=null){
r.sites=new ArrayList<SiteScore>();
sitemap.remove(key);
while(head!=null){
SiteScore ss=find(head, old); //Note - I can accelerate this by sorting SiteR and r.list by the same metric, e.g. position.
assert(ss!=null) : "\nCan't find sr "+head+" in read\n"+r+"\nlist:\n"+old;
r.sites.add(ss);
head=head.next;
}
}
}
Read r2=r.mate;
if(r2!=null){
long key=r2.numericID;
if((r2.pairnum()&1)==1){
key=-key;
assert(key<0);
}
SiteR head=sitemap.get(key);
ArrayList<SiteScore> old=r2.sites;
r2.sites=null;
if(head!=null){
r2.sites=new ArrayList<SiteScore>();
sitemap.remove(key);
while(head!=null){
SiteScore ss=find(head, old); //Note - I can accelerate this by sorting SiteR and r2.list by the same metric, e.g. position.
assert(ss!=null) : "\nCan't find sr "+head+" in read\n"+r2+"\nlist:\n"+old;
r2.sites.add(ss);
}
}
}
}
}
}
@Override
public void run(){
final boolean processReads=true;
if(!processReads){System.err.println("Warning: Skipping read processing.");}
if(cris!=null){
ListNum<Read> ln=cris.nextList();
ArrayList<Read> reads=(ln!=null ? ln.list : null);
while(!terminate && reads!=null && reads.size()>0){
if(processReads){processReads(reads);}
cris.returnList(ln.id, ln.list.isEmpty());
ln=cris.nextList();
reads=(ln!=null ? ln.list : null);
}
cris.returnList(ln.id, ln.list.isEmpty());
}else{
ArrayList<Read> reads=stream.nextList();
while(!terminate && reads!=null && reads.size()>0){
if(processReads){processReads(reads);}
reads=stream.nextList();
}
}
finished=true;
synchronized(this){this.notifyAll();}
}
private void processReads(ArrayList<Read> reads){
if(sitemap==null){
for(Read r : reads){
Read r2=r.mate;
assert(r2==null || r.mate.mate==r);
if(r2==null){
processRead(r);
}else{
if(!TOSS_SOLO1 || r.paired()){processRead(r);}
if(!TOSS_SOLO2 || r2.paired()){processRead(r2);}
}
}
}else{
fixReadSites(reads);
for(Read r : reads){
Read r2=r.mate;
assert(r2==null || r.mate.mate==r);
if(r2==null){
multiprocessRead(r);
}else{
if(!TOSS_SOLO1 || r.paired()){multiprocessRead(r);}
if(!TOSS_SOLO2 || r2.paired()){multiprocessRead(r2);}
}
}
}
}
private void multiprocessRead(Read r){
// assert(head==null) : "\n"+r.pairnum()+", "+key+",\n"+r.list+",\n"+r.mate.list+"\n"+head.toTextRecursive(null)+"\n";
if(r.numSites()==0){return;}
readsProcessed++;
for(SiteScore ss : r.sites){
r.clearSite();
r.setFromSite(ss);
r.match=null;
r.setPaired(ss.pairedScore>0);
r.setPerfect(ss.perfect);
r.setRescued(ss.rescued);
processRead(r);
}
}
/**
* @param ssr
* @param list
* @return
*/
private SiteScore find(SiteScoreR ssr, ArrayList<SiteScore> list) {
for(SiteScore ss : list){
if(ssr.equals(ss)){return ss;}
}
return null;
}
private SiteScore find(SiteR sr, ArrayList<SiteScore> list) {
for(SiteScore ss : list){
if(sr.equals(ss)){return ss;}
}
return null;
}
private void processRead(Read r){
sitesProcessed++;
assert(r.numericID<Integer.MAX_VALUE) : r.toText(false);
boolean flag=false;
if(false && (/*r.numericID==30719442 || r.numericID==107055007 || */ r.numericID==42829556) /*&& r.length()<=35*/){
System.err.println("Processing read:");
System.err.println("\n"+r.toText(false));
System.err.println("\n"+r.strand());
System.err.println("\n");
System.err.println(new String(r.bases));
System.err.println(r.match==null ? "null" : new String(r.match));
System.err.println("\n");
tcr.verbose=true;
flag=true;
System.err.println("Mapped Length: "+(r.stop-r.start+1));
}
// if(r.chrom<1 && r.list!=null && r.list.size()>0){
// SiteScore ss=r.list.get(0); //Should not be necessary
// r.start=ss.start;
// r.stop=ss.stop;
// r.chrom=ss.chrom;
// r.setStrand(ss.strand);
// }
assert((r.chrom>=1)==r.mapped()) : r.toText(false);
if(!r.mapped()){//Unmapped.
assert(r.sites==null || r.sites.isEmpty()) : r.toText(false);
return;
}
if(r.invalid()){return;} //Probably trimmed too short to be used.
if(r.match!=null){
if(r.perfect()){//Hopefully this will be set correctly...
assert(TranslateColorspaceRead.perfectMatch(r.match));
return;
}else if(TranslateColorspaceRead.perfectMatch(r.match)){
return;
}
}
assert(r.numericID<Integer.MAX_VALUE) : r.toText(false);
if(flag){
System.err.println("r.match = "+(r.match==null ? null : new String(r.match)));
System.err.println("Mapped Length: "+(r.stop-r.start+1));
}
// if(r.match!=null){
// for(int i=0; i<r.match.length; i++){
// if(r.match[i]=='I'){
// r.match=null;
// if(flag){System.err.println("nullified match string");}
// break;
// }
// }
// }
// r.match=null; //TODO - why are some match strings backwards?
if(r.match==null){
if(flag){
System.err.println("realigning match string");
System.err.println("Mapped Length: "+(r.stop-r.start+1));
}
tcr.realign_new(r, 20, true, 0, false); //Also generates the match string
if(TranslateColorspaceRead.perfectMatch(r.match)){return;}
if(flag){
System.err.println("new match string:\n"+(r.match==null ? null : new String(r.match)));
System.err.println("Mapped Length: "+(r.stop-r.start+1));
}
}
assert(r.numericID<Integer.MAX_VALUE) : r.toText(false);
r.errors=r.estimateErrors();
assert(r.numericID<Integer.MAX_VALUE) : r.toText(false);
if(r.match==null){
System.err.println("Could not align read "+r.numericID);
return;
}else if(r.match[0]=='X'){
System.err.println("Could not align read "+r.numericID+": "+new String(r.match));
return;
}
assert(r.numericID<Integer.MAX_VALUE) : r.toText(false);
// assert(CONDENSE);
// assert(false) : r+"\n"+CONDENSE+"\n"+CONDENSE_SNPS+"\n"+SPLIT_SUBS;
ArrayList<Varlet> vars=tcr.toVars(r, CONDENSE, CONDENSE_SNPS, SPLIT_SUBS);
if(vars==null){return;}
// if(r.numericID==36858949){
// System.err.println(r.toText(false));
// System.err.println(r.copies);
// System.err.println(r.mate.toText(false));
// System.err.println(r.mate.copies);
// System.err.println();
//
// for(Varlet v : vars){
// System.err.println(v.toText());
// System.err.println(v.numReads);
// }
// assert(false);
// }
char[] nearest=(nearestDefinedBase == null ? null : nearestDefinedBase[r.chrom]);
CoverageArray ca=(pcov==null ? null : pcov[r.chrom]);
for(Varlet v : vars){
if(v.endDist>=MIN_END_DIST){
assert(v.numUniqueReads==1);
assert(v.numSemiUniqueReads==1);
assert(v.numPlusReads1+v.numMinusReads1+v.numPlusReads2+v.numMinusReads2==1);
assert(v.numReads>=1);
// assert(!TranslateColorspaceReadPacBio.COUNT_DUPLICATES_WHEN_MAKING_VARLETS || v.numReads==1);
assert(v.numReads==r.copies);
assert(v.readLen==r.length());
boolean retain=true;
if(maxDistFromDefined>=0 && v.varType==Variation.NOREF){
char dist=(maxDistFromDefined==0 ? 1 : Tools.min(nearest[v.beginLoc], nearest[v.endLoc]));
if(dist>maxDistFromDefined){retain=false;}
}
if(retain && v.varType!=Variation.NOREF && ca!=null){
boolean toss=true;
assert(PCOV_TIP_DIST>0);
for(int j=v.beginLoc-PCOV_TIP_DIST; toss && j<=v.endLoc+PCOV_TIP_DIST; j++){
toss=ca.get(j)>=MIN_PCOV_DEPTH_TO_TOSS;
}
if(toss){retain=false;}
}
if(retain){
varsMade++;
if(v.varType==Variation.NOREF){norefsMade++;}
else if(v.varType==Variation.SNP){snpMade++;}
else if(v.varType==Variation.DEL){delMade++;}
else if(v.varType==Variation.INS){insMade++;}
else if(v.varType==Variation.DELINS){
int a=v.lengthRef();
int b=v.lengthVar();
if(a==b){subnMade++;}
else if(a>b){subdMade++;}
else{subiMade++;}
}
deltaLen+=v.lengthDif();
addVar(v);
}
}
}
// System.out.println(varsMade+", "+norefsMade);
}
/** TODO: Synchronize once per read, not once per varlet */
private void addVar(Varlet v){
long key=key(v.chromosome, v.beginLoc);
ArrayList<Varlet> list=keymap.get(key);
assert(list!=null) : "\nCan't find "+key+" in "+keymap.keySet()+"\n";
synchronized(list){
list.add(v);
if(list.size()>=WRITE_BUFFER){
if(MERGE_EQUAL_VARLETS){
mergeEqualVarlets(list);
}else{
Collections.sort(list);
}
writeList(list);
list.clear();
}
}
}
private void mergeEqualVarlets(ArrayList<Varlet> vars){
Collections.sort(vars);
ArrayList<Varlet> list=new ArrayList<Varlet>(8);
for(int i=0; i<vars.size(); i++){
Varlet a=vars.get(i);
vars.set(i, null);
Varlet b=(list.isEmpty() ? null : list.get(0));
if(b==null || a.equals(b)){
list.add(a);
}else{//purge
Varlet c=StackVariations.mergeEqualVarlets(list);
vars.set(i-1, c);
list.clear();
list.add(a);
}
}
if(!list.isEmpty()){
Varlet c=StackVariations.mergeEqualVarlets(list);
vars.set(list.size()-1, c);
}
Tools.condenseStrict(vars);
}
protected boolean finished(){return finished;}
protected void terminate(){terminate=true;}
private final TranslateColorspaceRead tcr=new TranslateColorspaceRead(PAC_BIO_MODE ?
new MultiStateAligner9PacBio(ALIGN_ROWS, ALIGN_COLUMNS) : new MultiStateAligner9ts(ALIGN_ROWS, ALIGN_COLUMNS));
private boolean finished=false;
private boolean terminate=false;
private long varsMade=0;
private long norefsMade=0;
private long snpMade=0;
private long delMade=0;
private long subnMade=0;
private long subdMade=0;
private long subiMade=0;
private long insMade=0;
private long deltaLen=0;
private long sitesProcessed=0;
private long readsProcessed=0;
}
protected static final long key(int chrom, int start){
long k=((long)chrom<<32)+(Tools.max(start, 0))/BLOCKSIZE;
return k;
}
protected static final long[] keys(final int chrom){
int lim=(Data.chromLengths[chrom]+1000)/BLOCKSIZE;
long[] keys=new long[lim+1];
for(int i=0; i<=lim; i++){
long key=key(chrom, i*BLOCKSIZE);
keys[i]=key;
}
return keys;
}
protected static final String fname(long key, String outname){
if(outname==null){outname="GV2TempFile_#.txt";}
assert(outname.contains("#")) : outname;
assert(!outname.endsWith(".gz") && !outname.endsWith(".zip") && !outname.endsWith(".bz2")) : outname;
return outname.replace("#", "b"+Data.GENOME_BUILD+"_"+key);
}
private final void makeKeyMap(){
final String header=Varlet.textHeader()+"\n";
keymap=new HashMap<Long, ArrayList<Varlet>>();
for(int chrom=1; chrom<=Data.numChroms; chrom++){
long[] keys=keys(chrom);
for(long key : keys){
keymap.put(key, new ArrayList<Varlet>(WRITE_BUFFER));
ReadWrite.writeString(header, fname(key, outname), false);
}
}
}
private HashMap<Long, ArrayList<Varlet>> keymap;
private final char[][] nearestDefinedBase;
private final int maxDistFromDefined;
private final CoverageArray[] pcov;
public final String outname;
public final String sitesfile;
private TextFile sitesTextFile;
private static long maxSiteRead=-1;
private static long maxSiteTableSize=-1;
private static long sitesLoaded=0;
private static long sitesRetained=0;
private static long linesLoaded=0;
private static long linesRetained=0;
private HashMap<Long, SiteR> sitemap=new HashMap<Long, SiteR>(4096);
private final RTextInputStream stream;
private final ConcurrentLegacyReadInputStream cris;
public static boolean USE_CRIS=true; //Similar speed either way. "true" may be better with many threads.
public static int THREADS=Data.LOGICAL_PROCESSORS;
public static int WRITE_BUFFER=16000; //Bigger number uses more memory, for less frequent writes.
public static boolean CONDENSE=true;
public static boolean CONDENSE_SNPS=true;
public static boolean SPLIT_SUBS=false;
public static boolean TOSS_SOLO1=false;
public static boolean TOSS_SOLO2=false;
public static boolean MERGE_EQUAL_VARLETS=false;
public static boolean PAC_BIO_MODE=true;
public static int ALIGN_ROWS=2020;
public static int ALIGN_COLUMNS=3000;
public static long MAX_READS=-1;
public static int MIN_END_DIST=4;
public static int BLOCKSIZE=1000000;
/** Imperfect reads fully covered by perfect reads to this depth or more will be tossed. */
public static int MIN_PCOV_DEPTH_TO_TOSS=2;
/** Extend perfect coverage depth requirement by this much of the tips of variations and reads before tossing them.
* A higher number means more varlets will be retained. */
public static int PCOV_TIP_DIST=8;
}
| |
/*
* Copyright (c) 2000, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package sun.awt.datatransfer;
import java.awt.datatransfer.DataFlavor;
import java.awt.datatransfer.Transferable;
import java.awt.datatransfer.UnsupportedFlavorException;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.InputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.ObjectStreamClass;
import java.io.OutputStream;
import java.lang.reflect.Modifier;
import java.lang.reflect.Proxy;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
/**
* Proxies for another Transferable so that Serializable objects are never
* returned directly by DnD or the Clipboard. Instead, a new instance of the
* object is returned.
*
* @author Lawrence P.G. Cable
* @author David Mendenhall
*
* @since 1.4
*/
public class TransferableProxy implements Transferable {
public TransferableProxy(Transferable t, boolean local) {
transferable = t;
isLocal = local;
}
public DataFlavor[] getTransferDataFlavors() {
return transferable.getTransferDataFlavors();
}
public boolean isDataFlavorSupported(DataFlavor flavor) {
return transferable.isDataFlavorSupported(flavor);
}
public Object getTransferData(DataFlavor df)
throws UnsupportedFlavorException, IOException
{
Object data = transferable.getTransferData(df);
// If the data is a Serializable object, then create a new instance
// before returning it. This insulates applications sharing DnD and
// Clipboard data from each other.
if (data != null && isLocal && df.isFlavorSerializedObjectType()) {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ClassLoaderObjectOutputStream oos =
new ClassLoaderObjectOutputStream(baos);
oos.writeObject(data);
ByteArrayInputStream bais =
new ByteArrayInputStream(baos.toByteArray());
try {
ClassLoaderObjectInputStream ois =
new ClassLoaderObjectInputStream(bais,
oos.getClassLoaderMap());
data = ois.readObject();
} catch (ClassNotFoundException cnfe) {
throw (IOException)new IOException().initCause(cnfe);
}
}
return data;
}
protected final Transferable transferable;
protected final boolean isLocal;
}
final class ClassLoaderObjectOutputStream extends ObjectOutputStream {
private final Map<Set<String>, ClassLoader> map =
new HashMap<Set<String>, ClassLoader>();
ClassLoaderObjectOutputStream(OutputStream os) throws IOException {
super(os);
}
protected void annotateClass(final Class<?> cl) throws IOException {
ClassLoader classLoader =
(ClassLoader)AccessController.doPrivileged(new PrivilegedAction() {
public Object run() {
return cl.getClassLoader();
}
});
Set<String> s = new HashSet<String>(1);
s.add(cl.getName());
map.put(s, classLoader);
}
protected void annotateProxyClass(final Class<?> cl) throws IOException {
ClassLoader classLoader =
(ClassLoader)AccessController.doPrivileged(new PrivilegedAction() {
public Object run() {
return cl.getClassLoader();
}
});
Class[] interfaces = cl.getInterfaces();
Set<String> s = new HashSet<String>(interfaces.length);
for (int i = 0; i < interfaces.length; i++) {
s.add(interfaces[i].getName());
}
map.put(s, classLoader);
}
Map<Set<String>, ClassLoader> getClassLoaderMap() {
return new HashMap(map);
}
}
final class ClassLoaderObjectInputStream extends ObjectInputStream {
private final Map<Set<String>, ClassLoader> map;
ClassLoaderObjectInputStream(InputStream is,
Map<Set<String>, ClassLoader> map)
throws IOException {
super(is);
if (map == null) {
throw new NullPointerException("Null map");
}
this.map = map;
}
protected Class<?> resolveClass(ObjectStreamClass classDesc)
throws IOException, ClassNotFoundException {
String className = classDesc.getName();
Set<String> s = new HashSet<String>(1);
s.add(className);
ClassLoader classLoader = map.get(s);
if (classLoader != null) {
return Class.forName(className, false, classLoader);
} else {
return super.resolveClass(classDesc);
}
}
protected Class<?> resolveProxyClass(String[] interfaces)
throws IOException, ClassNotFoundException {
Set<String> s = new HashSet<String>(interfaces.length);
for (int i = 0; i < interfaces.length; i++) {
s.add(interfaces[i]);
}
ClassLoader classLoader = map.get(s);
if (classLoader == null) {
return super.resolveProxyClass(interfaces);
}
// The code below is mostly copied from the superclass.
ClassLoader nonPublicLoader = null;
boolean hasNonPublicInterface = false;
// define proxy in class loader of non-public interface(s), if any
Class[] classObjs = new Class[interfaces.length];
for (int i = 0; i < interfaces.length; i++) {
Class cl = Class.forName(interfaces[i], false, classLoader);
if ((cl.getModifiers() & Modifier.PUBLIC) == 0) {
if (hasNonPublicInterface) {
if (nonPublicLoader != cl.getClassLoader()) {
throw new IllegalAccessError(
"conflicting non-public interface class loaders");
}
} else {
nonPublicLoader = cl.getClassLoader();
hasNonPublicInterface = true;
}
}
classObjs[i] = cl;
}
try {
return Proxy.getProxyClass(hasNonPublicInterface ?
nonPublicLoader : classLoader,
classObjs);
} catch (IllegalArgumentException e) {
throw new ClassNotFoundException(null, e);
}
}
}
| |
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.indexing.kafka;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Optional;
import com.google.common.collect.ImmutableMap;
import com.google.common.util.concurrent.Futures;
import com.metamx.common.IAE;
import com.metamx.http.client.HttpClient;
import com.metamx.http.client.Request;
import com.metamx.http.client.response.FullResponseHandler;
import com.metamx.http.client.response.FullResponseHolder;
import io.druid.indexing.common.RetryPolicyConfig;
import io.druid.indexing.common.RetryPolicyFactory;
import io.druid.indexing.common.TaskInfoProvider;
import io.druid.indexing.common.TaskLocation;
import io.druid.indexing.common.TaskStatus;
import io.druid.jackson.DefaultObjectMapper;
import org.easymock.Capture;
import org.easymock.EasyMockRunner;
import org.easymock.EasyMockSupport;
import org.easymock.Mock;
import org.jboss.netty.handler.codec.http.HttpHeaders;
import org.jboss.netty.handler.codec.http.HttpMethod;
import org.jboss.netty.handler.codec.http.HttpResponse;
import org.jboss.netty.handler.codec.http.HttpResponseStatus;
import org.joda.time.DateTime;
import org.joda.time.Period;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.io.IOException;
import java.net.URL;
import java.util.Map;
import static org.easymock.EasyMock.anyObject;
import static org.easymock.EasyMock.capture;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.reset;
@RunWith(EasyMockRunner.class)
public class KafkaIndexTaskClientTest extends EasyMockSupport
{
private static final ObjectMapper objectMapper = new DefaultObjectMapper();
private static final String TEST_ID = "test-id";
private static final String TEST_HOST = "test-host";
private static final int TEST_PORT = 1234;
@Mock
private HttpClient httpClient;
@Mock
private TaskInfoProvider taskInfoProvider;
@Mock
private FullResponseHolder responseHolder;
@Mock
private HttpResponse response;
@Mock
private HttpHeaders headers;
private KafkaIndexTaskClient client;
@Before
public void setUp() throws Exception
{
client = new TestableKafkaIndexTaskClient(httpClient, objectMapper, taskInfoProvider);
expect(taskInfoProvider.getTaskLocation(TEST_ID)).andReturn(new TaskLocation(TEST_HOST, TEST_PORT)).anyTimes();
expect(taskInfoProvider.getTaskStatus(TEST_ID)).andReturn(Optional.of(TaskStatus.running(TEST_ID))).anyTimes();
}
@Test(expected = KafkaIndexTaskClient.NoTaskLocationException.class)
public void testNoTaskLocationException() throws Exception
{
reset(taskInfoProvider);
expect(taskInfoProvider.getTaskLocation(TEST_ID)).andReturn(TaskLocation.unknown()).anyTimes();
expect(taskInfoProvider.getTaskStatus(TEST_ID)).andReturn(Optional.of(TaskStatus.running(TEST_ID))).anyTimes();
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.BAD_REQUEST).times(2);
expect(responseHolder.getContent()).andReturn("");
expect(httpClient.go(anyObject(Request.class), anyObject(FullResponseHandler.class))).andReturn(
Futures.immediateFuture(responseHolder)
);
replayAll();
client.getCurrentOffsets(TEST_ID, true);
}
@Test(expected = KafkaIndexTaskClient.TaskNotRunnableException.class)
public void testTaskNotRunnableException() throws Exception
{
reset(taskInfoProvider);
expect(taskInfoProvider.getTaskLocation(TEST_ID)).andReturn(new TaskLocation(TEST_HOST, TEST_PORT)).anyTimes();
expect(taskInfoProvider.getTaskStatus(TEST_ID)).andReturn(Optional.of(TaskStatus.failure(TEST_ID))).anyTimes();
replayAll();
client.getCurrentOffsets(TEST_ID, true);
verifyAll();
}
@Test(expected = RuntimeException.class)
public void testInternalServerError() throws Exception
{
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.INTERNAL_SERVER_ERROR).times(2);
expect(httpClient.go(anyObject(Request.class), anyObject(FullResponseHandler.class))).andReturn(
Futures.immediateFuture(responseHolder)
);
replayAll();
client.getCurrentOffsets(TEST_ID, true);
verifyAll();
}
@Test(expected = IAE.class)
public void testBadRequest() throws Exception
{
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.BAD_REQUEST).times(2);
expect(responseHolder.getContent()).andReturn("");
expect(httpClient.go(anyObject(Request.class), anyObject(FullResponseHandler.class))).andReturn(
Futures.immediateFuture(responseHolder)
);
replayAll();
client.getCurrentOffsets(TEST_ID, true);
verifyAll();
}
@Test
public void testTaskLocationMismatch() throws Exception
{
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.NOT_FOUND).times(3)
.andReturn(HttpResponseStatus.OK);
expect(responseHolder.getResponse()).andReturn(response);
expect(responseHolder.getContent()).andReturn("")
.andReturn("{}");
expect(response.headers()).andReturn(headers);
expect(headers.get("X-Druid-Task-Id")).andReturn("a-different-task-id");
expect(httpClient.go(anyObject(Request.class), anyObject(FullResponseHandler.class))).andReturn(
Futures.immediateFuture(responseHolder)
).times(2);
replayAll();
Map<Integer, Long> results = client.getCurrentOffsets(TEST_ID, true);
verifyAll();
Assert.assertEquals(0, results.size());
}
@Test
public void testGetCurrentOffsets() throws Exception
{
Capture<Request> captured = Capture.newInstance();
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK);
expect(responseHolder.getContent()).andReturn("{\"0\":1, \"1\":10}");
expect(httpClient.go(capture(captured), anyObject(FullResponseHandler.class))).andReturn(
Futures.immediateFuture(responseHolder)
);
replayAll();
Map<Integer, Long> results = client.getCurrentOffsets(TEST_ID, true);
verifyAll();
Request request = captured.getValue();
Assert.assertEquals(HttpMethod.GET, request.getMethod());
Assert.assertEquals(
new URL("http://test-host:1234/druid/worker/v1/chat/test-id/offsets/current"),
request.getUrl()
);
Assert.assertTrue(request.getHeaders().get("X-Druid-Task-Id").contains("test-id"));
Assert.assertEquals(2, results.size());
Assert.assertEquals(1, (long) results.get(0));
Assert.assertEquals(10, (long) results.get(1));
}
@Test
public void testGetCurrentOffsetsWithRetry() throws Exception
{
client = new RetryingTestableKafkaIndexTaskClient(httpClient, objectMapper, taskInfoProvider);
Capture<Request> captured = Capture.newInstance();
Capture<Request> captured2 = Capture.newInstance();
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.NOT_FOUND).times(2)
.andReturn(HttpResponseStatus.OK).times(2);
expect(responseHolder.getContent()).andReturn("")
.andReturn("{\"0\":1, \"1\":10}");
expect(responseHolder.getResponse()).andReturn(response);
expect(response.headers()).andReturn(headers);
expect(headers.get("X-Druid-Task-Id")).andReturn(TEST_ID);
expect(httpClient.go(capture(captured), anyObject(FullResponseHandler.class))).andReturn(
Futures.immediateFuture(responseHolder)
);
expect(httpClient.go(capture(captured2), anyObject(FullResponseHandler.class))).andReturn(
Futures.immediateFuture(responseHolder)
);
replayAll();
Map<Integer, Long> results = client.getCurrentOffsets(TEST_ID, true);
verifyAll();
Request request = captured.getValue();
Assert.assertEquals(HttpMethod.GET, request.getMethod());
Assert.assertEquals(
new URL("http://test-host:1234/druid/worker/v1/chat/test-id/offsets/current"),
request.getUrl()
);
Assert.assertTrue(request.getHeaders().get("X-Druid-Task-Id").contains("test-id"));
request = captured2.getValue();
Assert.assertEquals(HttpMethod.GET, request.getMethod());
Assert.assertEquals(
new URL("http://test-host:1234/druid/worker/v1/chat/test-id/offsets/current"),
request.getUrl()
);
Assert.assertTrue(request.getHeaders().get("X-Druid-Task-Id").contains("test-id"));
Assert.assertEquals(2, results.size());
Assert.assertEquals(1, (long) results.get(0));
Assert.assertEquals(10, (long) results.get(1));
}
@Test
public void testGetEndOffsets() throws Exception
{
Capture<Request> captured = Capture.newInstance();
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK);
expect(responseHolder.getContent()).andReturn("{\"0\":1, \"1\":10}");
expect(httpClient.go(capture(captured), anyObject(FullResponseHandler.class))).andReturn(
Futures.immediateFuture(responseHolder)
);
replayAll();
Map<Integer, Long> results = client.getEndOffsets(TEST_ID);
verifyAll();
Request request = captured.getValue();
Assert.assertEquals(HttpMethod.GET, request.getMethod());
Assert.assertEquals(
new URL("http://test-host:1234/druid/worker/v1/chat/test-id/offsets/end"),
request.getUrl()
);
Assert.assertTrue(request.getHeaders().get("X-Druid-Task-Id").contains("test-id"));
Assert.assertEquals(2, results.size());
Assert.assertEquals(1, (long) results.get(0));
Assert.assertEquals(10, (long) results.get(1));
}
@Test
public void testGetStartTime() throws Exception
{
DateTime now = DateTime.now();
Capture<Request> captured = Capture.newInstance();
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK);
expect(responseHolder.getContent()).andReturn(String.valueOf(now.getMillis())).anyTimes();
expect(httpClient.go(capture(captured), anyObject(FullResponseHandler.class))).andReturn(
Futures.immediateFuture(responseHolder)
);
replayAll();
DateTime results = client.getStartTime(TEST_ID, false);
verifyAll();
Request request = captured.getValue();
Assert.assertEquals(HttpMethod.GET, request.getMethod());
Assert.assertEquals(
new URL("http://test-host:1234/druid/worker/v1/chat/test-id/time/start"),
request.getUrl()
);
Assert.assertTrue(request.getHeaders().get("X-Druid-Task-Id").contains("test-id"));
Assert.assertEquals(now, results);
}
@Test
public void testGetStatus() throws Exception
{
KafkaIndexTask.Status status = KafkaIndexTask.Status.READING;
Capture<Request> captured = Capture.newInstance();
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK);
expect(responseHolder.getContent()).andReturn(String.format("\"%s\"", status.toString())).anyTimes();
expect(httpClient.go(capture(captured), anyObject(FullResponseHandler.class))).andReturn(
Futures.immediateFuture(responseHolder)
);
replayAll();
KafkaIndexTask.Status results = client.getStatus(TEST_ID);
verifyAll();
Request request = captured.getValue();
Assert.assertEquals(HttpMethod.GET, request.getMethod());
Assert.assertEquals(
new URL("http://test-host:1234/druid/worker/v1/chat/test-id/status"),
request.getUrl()
);
Assert.assertTrue(null, request.getHeaders().get("X-Druid-Task-Id").contains("test-id"));
Assert.assertEquals(status, results);
}
@Test
public void testPause() throws Exception
{
Capture<Request> captured = Capture.newInstance();
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).times(2);
expect(responseHolder.getContent()).andReturn("{\"0\":1, \"1\":10}").anyTimes();
expect(httpClient.go(capture(captured), anyObject(FullResponseHandler.class))).andReturn(
Futures.immediateFuture(responseHolder)
);
replayAll();
Map<Integer, Long> results = client.pause(TEST_ID);
verifyAll();
Request request = captured.getValue();
Assert.assertEquals(HttpMethod.POST, request.getMethod());
Assert.assertEquals(
new URL("http://test-host:1234/druid/worker/v1/chat/test-id/pause"),
request.getUrl()
);
Assert.assertTrue(request.getHeaders().get("X-Druid-Task-Id").contains("test-id"));
Assert.assertEquals(2, results.size());
Assert.assertEquals(1, (long) results.get(0));
Assert.assertEquals(10, (long) results.get(1));
}
@Test
public void testPauseWithTimeout() throws Exception
{
Capture<Request> captured = Capture.newInstance();
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).times(2);
expect(responseHolder.getContent()).andReturn("{\"0\":1, \"1\":10}").anyTimes();
expect(httpClient.go(capture(captured), anyObject(FullResponseHandler.class))).andReturn(
Futures.immediateFuture(responseHolder)
);
replayAll();
Map<Integer, Long> results = client.pause(TEST_ID, 101);
verifyAll();
Request request = captured.getValue();
Assert.assertEquals(HttpMethod.POST, request.getMethod());
Assert.assertEquals(
new URL("http://test-host:1234/druid/worker/v1/chat/test-id/pause?timeout=101"),
request.getUrl()
);
Assert.assertTrue(request.getHeaders().get("X-Druid-Task-Id").contains("test-id"));
Assert.assertEquals(2, results.size());
Assert.assertEquals(1, (long) results.get(0));
Assert.assertEquals(10, (long) results.get(1));
}
@Test
public void testPauseWithSubsequentGetOffsets() throws Exception
{
Capture<Request> captured = Capture.newInstance();
Capture<Request> captured2 = Capture.newInstance();
Capture<Request> captured3 = Capture.newInstance();
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.ACCEPTED).times(2)
.andReturn(HttpResponseStatus.OK).times(2);
expect(responseHolder.getContent()).andReturn("\"PAUSED\"")
.andReturn("{\"0\":1, \"1\":10}").anyTimes();
expect(httpClient.go(capture(captured), anyObject(FullResponseHandler.class))).andReturn(
Futures.immediateFuture(responseHolder)
);
expect(httpClient.go(capture(captured2), anyObject(FullResponseHandler.class))).andReturn(
Futures.immediateFuture(responseHolder)
);
expect(httpClient.go(capture(captured3), anyObject(FullResponseHandler.class))).andReturn(
Futures.immediateFuture(responseHolder)
);
replayAll();
Map<Integer, Long> results = client.pause(TEST_ID);
verifyAll();
Request request = captured.getValue();
Assert.assertEquals(HttpMethod.POST, request.getMethod());
Assert.assertEquals(
new URL("http://test-host:1234/druid/worker/v1/chat/test-id/pause"),
request.getUrl()
);
Assert.assertTrue(request.getHeaders().get("X-Druid-Task-Id").contains("test-id"));
request = captured2.getValue();
Assert.assertEquals(HttpMethod.GET, request.getMethod());
Assert.assertEquals(
new URL("http://test-host:1234/druid/worker/v1/chat/test-id/status"),
request.getUrl()
);
request = captured3.getValue();
Assert.assertEquals(HttpMethod.GET, request.getMethod());
Assert.assertEquals(
new URL("http://test-host:1234/druid/worker/v1/chat/test-id/offsets/current"),
request.getUrl()
);
Assert.assertEquals(2, results.size());
Assert.assertEquals(1, (long) results.get(0));
Assert.assertEquals(10, (long) results.get(1));
}
@Test
public void testResume() throws Exception
{
Capture<Request> captured = Capture.newInstance();
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK);
expect(httpClient.go(capture(captured), anyObject(FullResponseHandler.class))).andReturn(
Futures.immediateFuture(responseHolder)
);
replayAll();
client.resume(TEST_ID);
verifyAll();
Request request = captured.getValue();
Assert.assertEquals(HttpMethod.POST, request.getMethod());
Assert.assertEquals(
new URL("http://test-host:1234/druid/worker/v1/chat/test-id/resume"),
request.getUrl()
);
Assert.assertTrue(request.getHeaders().get("X-Druid-Task-Id").contains("test-id"));
}
@Test
public void testSetEndOffsets() throws Exception
{
Map<Integer, Long> endOffsets = ImmutableMap.of(0, 15L, 1, 120L);
Capture<Request> captured = Capture.newInstance();
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK);
expect(httpClient.go(capture(captured), anyObject(FullResponseHandler.class))).andReturn(
Futures.immediateFuture(responseHolder)
);
replayAll();
client.setEndOffsets(TEST_ID, endOffsets);
verifyAll();
Request request = captured.getValue();
Assert.assertEquals(HttpMethod.POST, request.getMethod());
Assert.assertEquals(
new URL("http://test-host:1234/druid/worker/v1/chat/test-id/offsets/end"),
request.getUrl()
);
Assert.assertTrue(request.getHeaders().get("X-Druid-Task-Id").contains("test-id"));
Assert.assertEquals("{\"0\":15,\"1\":120}", new String(request.getContent().array()));
}
@Test
public void testSetEndOffsetsAndResume() throws Exception
{
Map<Integer, Long> endOffsets = ImmutableMap.of(0, 15L, 1, 120L);
Capture<Request> captured = Capture.newInstance();
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK);
expect(httpClient.go(capture(captured), anyObject(FullResponseHandler.class))).andReturn(
Futures.immediateFuture(responseHolder)
);
replayAll();
client.setEndOffsets(TEST_ID, endOffsets, true);
verifyAll();
Request request = captured.getValue();
Assert.assertEquals(HttpMethod.POST, request.getMethod());
Assert.assertEquals(
new URL("http://test-host:1234/druid/worker/v1/chat/test-id/offsets/end?resume=true"),
request.getUrl()
);
Assert.assertTrue(request.getHeaders().get("X-Druid-Task-Id").contains("test-id"));
Assert.assertEquals("{\"0\":15,\"1\":120}", new String(request.getContent().array()));
}
@Test
public void testStop() throws Exception
{
Capture<Request> captured = Capture.newInstance();
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK);
expect(httpClient.go(capture(captured), anyObject(FullResponseHandler.class))).andReturn(
Futures.immediateFuture(responseHolder)
);
replayAll();
client.stop(TEST_ID, false);
verifyAll();
Request request = captured.getValue();
Assert.assertEquals(HttpMethod.POST, request.getMethod());
Assert.assertEquals(
new URL("http://test-host:1234/druid/worker/v1/chat/test-id/stop"),
request.getUrl()
);
Assert.assertTrue(request.getHeaders().get("X-Druid-Task-Id").contains("test-id"));
}
@Test
public void testStopAndPublish() throws Exception
{
Capture<Request> captured = Capture.newInstance();
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK);
expect(httpClient.go(capture(captured), anyObject(FullResponseHandler.class))).andReturn(
Futures.immediateFuture(responseHolder)
);
replayAll();
client.stop(TEST_ID, true);
verifyAll();
Request request = captured.getValue();
Assert.assertEquals(HttpMethod.POST, request.getMethod());
Assert.assertEquals(
new URL("http://test-host:1234/druid/worker/v1/chat/test-id/stop?publish=true"),
request.getUrl()
);
Assert.assertTrue(request.getHeaders().get("X-Druid-Task-Id").contains("test-id"));
}
private class TestableKafkaIndexTaskClient extends KafkaIndexTaskClient
{
public TestableKafkaIndexTaskClient(
HttpClient httpClient,
ObjectMapper jsonMapper,
TaskInfoProvider taskInfoProvider
)
{
super(httpClient, jsonMapper, taskInfoProvider);
}
@Override
RetryPolicyFactory createRetryPolicyFactory()
{
return new RetryPolicyFactory(
new RetryPolicyConfig()
.setMinWait(new Period("PT1S"))
.setMaxRetryCount(0)
);
}
@Override
void checkConnection(String host, int port) throws IOException { }
}
private class RetryingTestableKafkaIndexTaskClient extends TestableKafkaIndexTaskClient
{
public RetryingTestableKafkaIndexTaskClient(
HttpClient httpClient,
ObjectMapper jsonMapper,
TaskInfoProvider taskInfoProvider
)
{
super(httpClient, jsonMapper, taskInfoProvider);
}
@Override
RetryPolicyFactory createRetryPolicyFactory()
{
return new RetryPolicyFactory(
new RetryPolicyConfig()
.setMinWait(new Period("PT1S"))
.setMaxRetryCount(1)
);
}
}
}
| |
/*
* Copyright (C) 2015 HaiYang Li
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.landawn.abacus.util;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
*
* @since 0.8
*
* @author Haiyang Li
*/
public final class SQLParser {
private static final char TAB = '\t';
private static final char ENTER = '\n';
private static final Set<Object> seperators = new HashSet<>();
static {
seperators.add(TAB);
seperators.add(ENTER);
seperators.add(' ');
seperators.add('?');
seperators.add(',');
seperators.add('~');
seperators.add('!');
seperators.add('@');
seperators.add('^');
seperators.add('#');
seperators.add("!!");
seperators.add(';');
seperators.add('(');
seperators.add(')');
seperators.add('=');
seperators.add("==");
seperators.add(":=");
seperators.add("^=");
seperators.add("~=");
seperators.add("+=");
seperators.add("-=");
seperators.add("*=");
seperators.add("/=");
seperators.add("%=");
seperators.add("&=");
seperators.add("|=");
seperators.add("!=");
seperators.add("!<");
seperators.add("!>");
seperators.add('>');
seperators.add(">>");
seperators.add(">=");
seperators.add("@>");
seperators.add("&>");
seperators.add(">^");
seperators.add('<');
seperators.add("<<");
seperators.add("<=");
seperators.add("<@");
seperators.add("&<");
seperators.add("<^");
seperators.add('+');
seperators.add('-');
seperators.add('%');
seperators.add('/');
seperators.add('*');
seperators.add('&');
seperators.add("&&");
seperators.add('|');
seperators.add("||");
seperators.add("|/");
seperators.add("||/");
seperators.add('^');
seperators.add('~');
seperators.add('!');
seperators.add("->");
seperators.add('#');
seperators.add("##");
seperators.add("@@");
seperators.add("@-@");
seperators.add("@@@");
seperators.add("->>");
seperators.add("<->");
seperators.add("<=>");
seperators.add(">>=");
seperators.add("<<=");
seperators.add("<<|");
seperators.add("|>>");
seperators.add("&<|");
seperators.add("|&>");
seperators.add("|>>");
seperators.add("(+)");
seperators.add("?#");
seperators.add("?-");
seperators.add("?-");
seperators.add("?|");
seperators.add("?-|");
seperators.add("?||");
seperators.add("~*");
seperators.add("!~");
seperators.add("!~*");
seperators.add("^-=");
seperators.add("|*=");
}
private static final Map<String, String[]> compositeWords = new ObjectPool<String, String[]>(64);
static {
compositeWords.put(WD.LEFT_JOIN, new String[] { "LEFT", "JOIN" });
compositeWords.put(WD.RIGHT_JOIN, new String[] { "RIGHT", "JOIN" });
compositeWords.put(WD.FULL_JOIN, new String[] { "FULL", "JOIN" });
compositeWords.put(WD.CROSS_JOIN, new String[] { "CROSS", "JOIN" });
compositeWords.put(WD.INNER_JOIN, new String[] { "INNER", "JOIN" });
compositeWords.put(WD.NATURAL_JOIN, new String[] { "NATURAL", "JOIN" });
compositeWords.put(WD.INNER_JOIN, new String[] { "INNER", "JOIN" });
compositeWords.put(WD.GROUP_BY, new String[] { "GROUP", "BY" });
compositeWords.put(WD.ORDER_BY, new String[] { "ORDER", "BY" });
compositeWords.put(WD.FOR_UPDATE, new String[] { "FOR", "UPDATE" });
compositeWords.put(WD.FETCH_FIRST, new String[] { "FETCH", "FIRST" });
compositeWords.put(WD.FETCH_NEXT, new String[] { "FETCH", "NEXT" });
compositeWords.put(WD.ROWS_ONLY, new String[] { "ROWS", "ONLY" });
compositeWords.put(WD.UNION_ALL, new String[] { "UNION", "ALL" });
compositeWords.put(WD.IS_NOT, new String[] { "IS", "NOT" });
compositeWords.put(WD.IS_NULL, new String[] { "IS", "NULL" });
compositeWords.put(WD.IS_NOT_NULL, new String[] { "IS", "NOT", "NULL" });
compositeWords.put(WD.IS_EMPTY, new String[] { "IS", "EMPTY" });
compositeWords.put(WD.IS_NOT_EMPTY, new String[] { "IS", "NOT", "EMPTY" });
compositeWords.put(WD.IS_BLANK, new String[] { "IS", "BLANK" });
compositeWords.put(WD.IS_NOT_BLANK, new String[] { "IS", "NOT", "BLANK" });
compositeWords.put(WD.NOT_IN, new String[] { "NOT", "IN" });
compositeWords.put(WD.NOT_EXISTS, new String[] { "NOT", "EXISTS" });
List<String> list = new ArrayList<>(compositeWords.keySet());
for (String e : list) {
e = e.toLowerCase();
if (!compositeWords.containsKey(e)) {
compositeWords.put(e, Splitter.with(WD.SPACE).trim(true).splitToArray(e));
}
e = e.toUpperCase();
if (!compositeWords.containsKey(e)) {
compositeWords.put(e, Splitter.with(WD.SPACE).trim(true).splitToArray(e));
}
}
}
private SQLParser() {
}
public static List<String> parse(String sql) {
final int sqlLength = sql.length();
final StringBuilder sb = Objectory.createStringBuilder();
final List<String> words = new ArrayList<>();
String temp = "";
char quoteChar = 0;
for (int index = 0; index < sqlLength; index++) {
// TODO [performance improvement]. will it improve performance if
// change to char array?
// char c = sqlCharArray[charIndex];
char c = sql.charAt(index);
// is it in a quoted identifier?
if (quoteChar != 0) {
sb.append(c);
// end in quote.
if (c == quoteChar) {
words.add(sb.toString());
sb.setLength(0);
quoteChar = 0;
}
} else if (isSeperator(sql, sqlLength, index, c)) {
if (sb.length() > 0) {
words.add(sb.toString());
sb.setLength(0);
}
if ((index < (sqlLength - 2)) && seperators.contains(temp = sql.substring(index, index + 3))) {
words.add(temp);
index += 2;
} else if ((index < (sqlLength - 1)) && seperators.contains(temp = sql.substring(index, index + 2))) {
words.add(temp);
index += 1;
} else if (c == WD._SPACE || c == TAB || c == ENTER) {
if ((words.size() > 0) && !words.get(words.size() - 1).equals(WD.SPACE)) {
words.add(WD.SPACE);
}
} else {
words.add(String.valueOf(c));
}
} else {
sb.append(c);
if ((c == WD._QUOTATION_S) || (c == WD._QUOTATION_D)) {
quoteChar = c;
}
}
}
if (sb.length() > 0) {
words.add(sb.toString());
sb.setLength(0);
}
Objectory.recycle(sb);
return words;
}
public static int indexWord(String sql, String word, int fromIndex, boolean caseSensitive) {
String[] subWords = compositeWords.get(word);
if (subWords == null) {
subWords = Splitter.with(WD.SPACE).trim(true).splitToArray(word);
compositeWords.put(word, subWords);
}
if ((subWords == null) || (subWords.length <= 1)) {
int result = N.INDEX_NOT_FOUND;
final StringBuilder sb = Objectory.createStringBuilder();
final int sqlLength = sql.length();
String temp = "";
char quoteChar = 0;
for (int index = fromIndex; index < sqlLength; index++) {
char c = sql.charAt(index);
// is it in a quoted identifier?
if (quoteChar != 0) {
sb.append(c);
// end in quote.
if (c == quoteChar) {
temp = sb.toString();
if (word.equals(temp) || (!caseSensitive && word.equalsIgnoreCase(temp))) {
result = index - word.length() + 1;
break;
}
sb.setLength(0);
quoteChar = 0;
}
} else if (isSeperator(sql, sqlLength, index, c)) {
if (sb.length() > 0) {
temp = sb.toString();
if (word.equals(temp) || (!caseSensitive && word.equalsIgnoreCase(temp))) {
result = index - word.length();
break;
}
sb.setLength(0);
} else if (c == WD._SPACE || c == TAB || c == ENTER) {
// skip white char
continue;
}
if ((index < (sqlLength - 2)) && seperators.contains(temp = sql.substring(index, index + 3))) {
if (word.equals(temp) || (!caseSensitive && word.equalsIgnoreCase(temp))) {
result = index;
break;
}
index += 2;
} else if ((index < (sqlLength - 1)) && seperators.contains(temp = sql.substring(index, index + 2))) {
if (word.equals(temp) || (!caseSensitive && word.equalsIgnoreCase(temp))) {
result = index;
break;
}
index += 1;
} else if (word.equals(String.valueOf(c)) || (!caseSensitive && word.equalsIgnoreCase(String.valueOf(c)))) {
result = index;
break;
}
} else {
sb.append(c);
if ((c == WD._QUOTATION_S) || (c == WD._QUOTATION_D)) {
quoteChar = c;
}
}
}
if (result < 0 && sb.length() > 0) {
temp = sb.toString();
if (word.equals(temp) || (!caseSensitive && word.equalsIgnoreCase(temp))) {
result = sqlLength - word.length();
}
}
Objectory.recycle(sb);
return result;
} else {
int result = indexWord(sql, subWords[0], fromIndex, caseSensitive);
if (result >= 0) {
int tmpIndex = result + subWords[0].length();
String nextWord = null;
for (int i = 1; i < subWords.length; i++) {
nextWord = nextWord(sql, tmpIndex);
if (N.notNullOrEmpty(nextWord) && (nextWord.equals(subWords[i]) || (!caseSensitive && nextWord.equalsIgnoreCase(subWords[i])))) {
tmpIndex += (subWords[i].length() + 1);
} else {
result = -1;
break;
}
}
}
return result;
}
}
public static String nextWord(String sql, int fromIndex) {
final int sqlLength = sql.length();
final StringBuilder sb = Objectory.createStringBuilder();
String temp = "";
char quoteChar = 0;
for (int index = fromIndex; index < sqlLength; index++) {
char c = sql.charAt(index);
// is it in a quoted identifier?
if (quoteChar != 0) {
sb.append(c);
// end in quote.
if (c == quoteChar) {
break;
}
} else if (isSeperator(sql, sqlLength, index, c)) {
if (sb.length() > 0) {
break;
} else if (c == WD._SPACE || c == TAB || c == ENTER) {
// skip white char
continue;
}
if (((index < (sqlLength - 2)) && seperators.contains(temp = sql.substring(index, index + 3)))
|| ((index < (sqlLength - 1)) && seperators.contains(temp = sql.substring(index, index + 2)))) {
sb.append(temp);
} else {
sb.append(c);
}
break;
} else {
sb.append(c);
if ((c == WD._QUOTATION_S) || (c == WD._QUOTATION_D)) {
quoteChar = c;
}
}
}
String st = (sb.length() == 0) ? "" : sb.toString();
Objectory.recycle(sb);
return st;
}
public static void registerSeperator(char seperator) {
N.checkArgPositive(seperator, "seperator");
seperators.add(seperator);
}
public static void registerSeperator(String seperator) {
N.checkArgNotNull(seperator, "seperator");
seperators.add(seperator);
if (seperator.length() == 1) {
seperators.add(seperator.charAt(0));
}
}
public static boolean isSeperator(String str, int len, int index, char ch) {
// for Ibatis/Mybatis
if (ch == '#' && index < len - 1 && str.charAt(index + 1) == '{') {
return false;
}
return seperators.contains(ch);
}
public static boolean isFunctionName(final List<String> words, int len, int index) {
// return (i < len - 1 && words.get(i + 1).charAt(0) == WD._PARENTHESES_L)
// || (i < len - 2 && WD.SPACE.equals(words.get(i + 1)) && words.get(i + 2).charAt(0) == WD._PARENTHESES_L);
return (index < len - 1 && words.get(index + 1).charAt(0) == WD._PARENTHESES_L);
}
}
| |
package php.runtime.invoke;
import php.runtime.Memory;
import php.runtime.common.Messages;
import php.runtime.common.Modifier;
import php.runtime.env.CallStack;
import php.runtime.env.CallStackItem;
import php.runtime.env.Environment;
import php.runtime.env.TraceInfo;
import php.runtime.env.TraceInfoCallCache;
import php.runtime.exceptions.CriticalException;
import php.runtime.exceptions.support.ErrorType;
import php.runtime.invoke.cache.ConstantCallCache;
import php.runtime.invoke.cache.PropertyCallCache;
import php.runtime.lang.Closure;
import php.runtime.lang.IObject;
import php.runtime.memory.ArrayMemory;
import php.runtime.memory.ObjectMemory;
import php.runtime.memory.ReferenceMemory;
import php.runtime.memory.StringMemory;
import php.runtime.reflection.ClassEntity;
import php.runtime.reflection.ConstantEntity;
import php.runtime.reflection.MethodEntity;
import php.runtime.reflection.ParameterEntity;
final public class ObjectInvokeHelper {
private ObjectInvokeHelper() {
}
public static Memory invokeParentMethod(Memory object, String methodName, String methodLowerName,
Environment env, TraceInfo trace, Memory[] args)
throws Throwable {
Memory[] passed = null;
boolean doublePop = false;
if (object.isNull()) {
ClassEntity parent = env.__getParentClass(trace);
return InvokeHelper.callStatic(
env, trace, parent.getLowerName(), methodLowerName, parent.getName(), methodName, args, null, 0
);
}
IObject iObject = ((ObjectMemory) object).value;
ClassEntity childClazz = iObject.getReflection();
ClassEntity clazz = env.getLastClassOnStack().getParent();
MethodEntity method;
if (clazz == null) {
env.error(trace, Messages.ERR_CANNOT_ACCESS_PARENT_WHEN_SCOPE_NO_PARENT.fetch());
return Memory.NULL;
}
if (methodName == null) {
method = childClazz.methodMagicInvoke != null ? childClazz.methodMagicInvoke : clazz.methodMagicInvoke;
} else {
method = clazz.findMethod(methodLowerName);
if (method == null
&& ((
method = childClazz.methodMagicCall != null
? childClazz.methodMagicCall
: clazz.methodMagicCall)
!= null)) {
passed = new Memory[]{new StringMemory(methodName), ArrayMemory.of(args)};
doublePop = true;
}
}
String className = clazz.getName();
if (method == null) {
if (methodName == null)
methodName = "__invoke";
env.error(trace, ErrorType.E_ERROR,
Messages.ERR_CALL_TO_UNDEFINED_METHOD.fetch(
className + "::" + methodName
)
);
return Memory.NULL;
}
InvokeHelper.checkAccess(env, trace, method);
if (passed == null) {
passed = InvokeArgumentHelper.makeArguments(
env, args, method.getParameters(), className, methodName, className, trace
);
}
Memory result = method.getImmutableResultTyped(env, trace);
if (result != null) {
return result;
}
try {
if (trace != null) {
env.pushCall(trace, iObject, args, methodName, method.getClazz().getName(), className);
if (doublePop)
env.pushCall(trace, iObject, passed, method.getName(), method.getClazz().getName(), className);
}
result = method.invokeDynamic(iObject, env, trace, passed);
} catch (ArrayIndexOutOfBoundsException e) {
throw new CriticalException("Unable to call parent:: method " + className + "::" + methodName + "(), error = " + e.getMessage());
} finally {
if (trace != null) {
env.popCall();
if (doublePop)
env.popCall();
}
}
return result;
}
public static Memory invokeMethod(Memory object, String methodName,
Environment env, TraceInfo trace, Memory... args)
throws Throwable {
return invokeMethod(object, methodName, methodName.toLowerCase(), env, trace, args);
}
public static Memory invokeMethod(Memory object, String methodName, Environment env, Memory... args)
throws Throwable {
return invokeMethod(object, methodName, methodName.toLowerCase(), env, env.trace(), args);
}
public static Memory invokeMethod(Memory object, String methodName, String methodLowerName,
Environment env, TraceInfo trace, Memory[] args)
throws Throwable {
object = object.toValue();
Memory[] passed = null;
boolean doublePop = false;
if (object.type != Memory.Type.OBJECT) {
env.error(trace, ErrorType.E_RECOVERABLE_ERROR, Messages.ERR_CANNOT_CALL_OF_NON_OBJECT.fetch(methodName));
return Memory.NULL;
}
IObject iObject = ((ObjectMemory) object).value;
ClassEntity clazz = iObject.getReflection();
MethodEntity method;
if (methodName == null) {
method = clazz.methodMagicInvoke;
} else {
method = clazz.findMethod(methodLowerName);
/*TraceInfoCallCache callCache = trace == null ? null : trace.getCallCache();
if (callCache != null && callCache.self == clazz && callCache.callEntity != null) {
method = (MethodEntity) callCache.callEntity;
} else {
method = clazz.findMethod(methodLowerName);
if (trace != null && !trace.isUnknown()) {
if (callCache == null) {
callCache = new TraceInfoCallCache();
}
callCache.self = clazz;
callCache.callEntity = method;
trace.setCallCache(callCache);
}
}*/
if (method != null && method.isContextDepends()) {
ClassEntity context = env.getLastClassOnStack();
if (context != null) {
MethodEntity contextMethod = context.findMethod(methodLowerName);
if (contextMethod != null) {
method = contextMethod;
}
}
}
if (method == null && ((method = clazz.methodMagicCall) != null)) {
clazz.methodMagicCall.setModifier(Modifier.PUBLIC);
passed = new Memory[]{new StringMemory(methodName), ArrayMemory.of(args)};
doublePop = true;
}
}
String className = clazz.getName();
if (method == null) {
if (methodName == null)
methodName = "__invoke";
env.error(trace, ErrorType.E_ERROR,
Messages.ERR_CALL_TO_UNDEFINED_METHOD.fetch(className + "::" + methodName)
);
return Memory.NULL;
}
InvokeHelper.checkAccess(env, trace, method);
Memory result = method.getImmutableResultTyped(env, trace);
if (passed == null) {
ParameterEntity[] parameters = method.getParameters(args == null ? 0 : args.length);
if (result != null && args == null && (parameters == null || parameters.length == 0)) {
return result;
}
passed = InvokeArgumentHelper.makeArguments(
env, args, parameters, className, methodName, className, trace
);
}
if (result != null) {
return result;
}
CallStack callStack = (trace == null || !method.isUsesStackTrace()) ? null : env.getCallStack();
try {
if (callStack != null) {
String staticClass = className;
if (iObject instanceof Closure) {
staticClass = ((Closure) iObject).getScope();
}
if (clazz.isHiddenInCallStack()) {
callStack.push(trace, iObject, args, methodName, staticClass, staticClass);
//env.pushCall(trace, iObject, args, methodName, staticClass, staticClass);
if (doublePop) {
callStack.push(trace, iObject, passed, method.getName(), staticClass, staticClass);
//env.pushCall(trace, iObject, passed, method.getName(), staticClass, staticClass);
}
} else {
callStack.push(trace, iObject, args, methodName, method.getClazz(), staticClass);
//env.pushCallEx(trace, iObject, args, methodName, method.getClazz(), staticClass);
if (doublePop) {
callStack.push(trace, iObject, passed, method.getName(), method.getClazz(), staticClass);
//env.pushCallEx(trace, iObject, passed, method.getName(), method.getClazz(), staticClass);
}
}
}
return method.invokeDynamic(iObject, env, trace, passed);
} catch (NoClassDefFoundError e) {
throw new CriticalException("Unable to call method " + className + "::" + methodName + "(), " + e.getMessage());
} finally {
if (callStack != null) {
callStack.pop();
//env.popCall();
if (doublePop) {
callStack.pop();
//env.popCall();
}
}
}
}
public static Memory invokeMethod(IObject iObject, MethodEntity method,
Environment env, TraceInfo trace, Memory[] args)
throws Throwable {
return invokeMethod(iObject, method, env, trace, args, true);
}
public static Memory invokeMethod(IObject iObject, MethodEntity method,
Environment env, TraceInfo trace, Memory[] args, boolean checkAccess)
throws Throwable {
ClassEntity clazz = iObject.getReflection();
if (method == null)
method = clazz.methodMagicInvoke;
String className = clazz.getName();
if (method == null) {
env.error(trace, Messages.ERR_CALL_TO_UNDEFINED_METHOD.fetch(className + "::__invoke"));
return Memory.NULL;
}
if (checkAccess)
InvokeHelper.checkAccess(env, trace, method);
Memory[] passed = InvokeArgumentHelper.makeArguments(
env, args, method.getParameters(args == null ? 0 : args.length), className, method.getName(), className, trace
);
Memory result = method.getImmutableResultTyped(env, trace);
if (result != null) {
return result;
}
if (trace != null) {
String staticClass = className;
if (iObject instanceof Closure) {
staticClass = ((Closure) iObject).getScope();
}
if (clazz.isHiddenInCallStack()) {
env.pushCall(trace, iObject, args, method.getName(), staticClass, staticClass);
} else {
env.pushCallEx(trace, iObject, args, method.getName(), method.getClazz(), staticClass);
}
}
try {
result = method.invokeDynamic(iObject, env, trace, passed);
} finally {
if (trace != null)
env.popCall();
}
return result;
}
public static Memory emptyProperty(Memory object, String property, Environment env, TraceInfo trace,
PropertyCallCache callCache, int cacheIndex)
throws Throwable {
object = object.toValue();
if (!object.isObject()) {
return Memory.NULL;
//env.error(trace, Messages.ERR_CANNOT_GET_PROPERTY_OF_NON_OBJECT.fetch(property));
}
IObject iObject = ((ObjectMemory) object).value;
return iObject.getReflection().emptyProperty(env, trace, iObject, property);
}
public static Memory issetProperty(Memory object, String property, Environment env, TraceInfo trace,
PropertyCallCache callCache, int cacheIndex)
throws Throwable {
object = object.toValue();
if (!object.isObject()) {
return Memory.NULL;
//env.error(trace, Messages.ERR_CANNOT_GET_PROPERTY_OF_NON_OBJECT.fetch(property));
}
IObject iObject = ((ObjectMemory) object).value;
return iObject.getReflection().issetProperty(env, trace, iObject, property, callCache, cacheIndex);
}
public static void unsetProperty(Memory object, String property, Environment env, TraceInfo trace,
PropertyCallCache callCache, int cacheIndex)
throws Throwable {
object = object.toValue();
if (!object.isObject()) {
env.error(trace, Messages.ERR_CANNOT_GET_PROPERTY_OF_NON_OBJECT.fetch(property));
}
IObject iObject = ((ObjectMemory) object).value;
iObject.getReflection().unsetProperty(env, trace, iObject, property, callCache, cacheIndex);
}
public static Memory getConstant(String className, String lowerClassName, String constant,
Environment env, TraceInfo trace, ConstantCallCache callCache, int cacheIndex,
boolean lateStaticCall) {
ConstantEntity constantEntity = null;
if (callCache != null) {
constantEntity = callCache.get(env, cacheIndex);
}
boolean alreadyCached = constantEntity != null;
if (!alreadyCached) {
ClassEntity entity = env.fetchClass(className, lowerClassName, true);
if (entity == null) {
env.error(trace, Messages.ERR_CLASS_NOT_FOUND.fetch(className));
return Memory.NULL;
}
constantEntity = entity.findConstant(constant);
if (constantEntity == null) {
env.error(trace, Messages.ERR_UNDEFINED_CLASS_CONSTANT.fetch(constant));
return Memory.NULL;
}
if (callCache != null) {
callCache.put(env, cacheIndex, constantEntity);
}
}
Memory value = constantEntity.getValue(env);
if (!alreadyCached) {
InvokeHelper.checkAccess(env, trace, constantEntity, lateStaticCall);
}
if (value == null) {
return Memory.NULL;
}
return value;
}
public static Memory getProperty(Memory object, String property, Environment env, TraceInfo trace,
PropertyCallCache callCache, int cacheIndex)
throws Throwable {
object = object.toValue();
if (!object.isObject()) {
env.error(trace,
Messages.ERR_CANNOT_GET_PROPERTY_OF_NON_OBJECT.fetch(property)
);
return Memory.NULL;
}
IObject iObject = ((ObjectMemory) object).value;
return iObject.getReflection().getProperty(env, trace, iObject, property, callCache, cacheIndex);
}
public static Memory getRefProperty(Memory object, String property, Environment env, TraceInfo trace,
PropertyCallCache callCache, int cacheIndex)
throws Throwable {
object = object.toValue();
if (!object.isObject()) {
env.error(trace,
Messages.ERR_CANNOT_GET_PROPERTY_OF_NON_OBJECT.fetch(property)
);
return Memory.NULL;
}
IObject iObject = ((ObjectMemory) object).value;
return iObject.getReflection().getRefProperty(env, trace, iObject, property, callCache, cacheIndex);
}
public static Memory getStaticProperty(String className, String lowerClassName, String property, Environment env,
TraceInfo trace, PropertyCallCache callCache, int cacheIndex, boolean lateStaticCall) throws Throwable {
ClassEntity entity = env.fetchClass(className, lowerClassName, true);
if (entity == null) {
env.error(trace, Messages.ERR_CLASS_NOT_FOUND.fetch(className));
return Memory.NULL;
}
return entity.getStaticProperty(
env, trace, property, true, true, entity, callCache, cacheIndex, lateStaticCall
);
}
public static Memory issetStaticProperty(String className, String lowerClassName, String property, Environment env,
TraceInfo trace, PropertyCallCache callCache, int cacheIndex, boolean lateStaticCall) throws Throwable {
ClassEntity entity = env.fetchClass(className, lowerClassName, true);
if (entity == null) {
env.error(trace, Messages.ERR_CLASS_NOT_FOUND.fetch(className));
return Memory.NULL;
}
return entity.getStaticProperty(env, trace, property, false, true, entity, callCache, cacheIndex, lateStaticCall);
}
public static Memory unsetStaticProperty(String className, String lowerClassName, String property, Environment env,
TraceInfo trace, PropertyCallCache callCache, int cacheIndex, boolean lateStaticCall) throws Throwable {
Memory get = getStaticProperty(className, lowerClassName, property, env, trace, callCache, cacheIndex, lateStaticCall);
get.manualUnset(env);
return Memory.NULL;
}
private static IObject fetchObject(Memory object, String property, Environment env, TraceInfo trace) {
object = object.toValue();
if (!object.isObject()) {
env.error(trace, Messages.ERR_CANNOT_SET_PROPERTY_OF_NON_OBJECT.fetch(property));
return null;
}
return ((ObjectMemory) object).value;
}
public static Memory incAndGetProperty(Memory object, String property, Environment env, TraceInfo trace,
PropertyCallCache callCache, int cacheIndex)
throws Throwable {
return assignPlusProperty(object, Memory.CONST_INT_1, property, env, trace, callCache, cacheIndex);
}
public static Memory GetAndIncProperty(Memory object, String property, Environment env, TraceInfo trace,
PropertyCallCache callCache, int cacheIndex)
throws Throwable {
IObject iObject = fetchObject(object, property, env, trace);
if (iObject == null) return Memory.NULL;
ReferenceMemory ref = new ReferenceMemory();
iObject.getReflection().plusProperty(env, trace, iObject, property, Memory.CONST_INT_1, ref);
return ref.getValue();
}
public static Memory decAndGetProperty(Memory object, String property, Environment env, TraceInfo trace,
PropertyCallCache callCache, int cacheIndex)
throws Throwable {
return assignMinusProperty(object, Memory.CONST_INT_1, property, env, trace, callCache, cacheIndex);
}
public static Memory GetAndDecProperty(Memory object, String property, Environment env, TraceInfo trace,
PropertyCallCache callCache, int cacheIndex)
throws Throwable {
IObject iObject = fetchObject(object, property, env, trace);
if (iObject == null) return Memory.NULL;
ReferenceMemory ref = new ReferenceMemory();
iObject.getReflection().minusProperty(env, trace, iObject, property, Memory.CONST_INT_1, ref);
return ref.getValue();
}
public static Memory assignProperty(Memory object, Memory value, String property, Environment env, TraceInfo trace,
PropertyCallCache callCache, int cacheIndex)
throws Throwable {
IObject iObject = fetchObject(object, property, env, trace);
if (iObject == null) return Memory.NULL;
return iObject.getReflection().setProperty(env, trace, iObject, property, value, null, callCache, cacheIndex);
}
public static Memory assignPropertyRight(Memory value, String property, Environment env, TraceInfo trace, Memory object,
PropertyCallCache callCache, int cacheIndex)
throws Throwable {
return assignProperty(object, value, property, env, trace, callCache, cacheIndex);
}
public static Memory assignPlusProperty(Memory object, Memory value, String property, Environment env, TraceInfo trace,
PropertyCallCache callCache, int cacheIndex)
throws Throwable {
IObject iObject = fetchObject(object, property, env, trace);
if (iObject == null) return Memory.NULL;
return iObject.getReflection().plusProperty(env, trace, iObject, property, value, null);
}
public static Memory assignMinusProperty(Memory object, Memory value, String property, Environment env, TraceInfo trace,
PropertyCallCache callCache, int cacheIndex)
throws Throwable {
IObject iObject = fetchObject(object, property, env, trace);
if (iObject == null) return Memory.NULL;
return iObject.getReflection().minusProperty(env, trace, iObject, property, value, null);
}
public static Memory assignMulProperty(Memory object, Memory value, String property, Environment env, TraceInfo trace,
PropertyCallCache callCache, int cacheIndex)
throws Throwable {
IObject iObject = fetchObject(object, property, env, trace);
if (iObject == null) return Memory.NULL;
return iObject.getReflection().mulProperty(env, trace, iObject, property, value, callCache, cacheIndex);
}
public static Memory assignDivProperty(Memory object, Memory value, String property, Environment env, TraceInfo trace,
PropertyCallCache callCache, int cacheIndex)
throws Throwable {
IObject iObject = fetchObject(object, property, env, trace);
if (iObject == null) return Memory.NULL;
return iObject.getReflection().divProperty(env, trace, iObject, property, value, callCache, cacheIndex);
}
public static Memory assignModProperty(Memory object, Memory value, String property, Environment env, TraceInfo trace,
PropertyCallCache callCache, int cacheIndex)
throws Throwable {
IObject iObject = fetchObject(object, property, env, trace);
if (iObject == null) return Memory.NULL;
return iObject.getReflection().modProperty(env, trace, iObject, property, value, callCache, cacheIndex);
}
public static Memory assignConcatProperty(Memory object, Memory value, String property, Environment env, TraceInfo trace,
PropertyCallCache callCache, int cacheIndex)
throws Throwable {
IObject iObject = fetchObject(object, property, env, trace);
if (iObject == null) return Memory.NULL;
return iObject.getReflection().concatProperty(env, trace, iObject, property, value, callCache, cacheIndex);
}
public static Memory assignBitAndProperty(Memory object, Memory value, String property, Environment env, TraceInfo trace,
PropertyCallCache callCache, int cacheIndex)
throws Throwable {
IObject iObject = fetchObject(object, property, env, trace);
if (iObject == null) return Memory.NULL;
return iObject.getReflection().bitAndProperty(env, trace, iObject, property, value, callCache, cacheIndex);
}
public static Memory assignBitOrProperty(Memory object, Memory value, String property, Environment env, TraceInfo trace,
PropertyCallCache callCache, int cacheIndex)
throws Throwable {
IObject iObject = fetchObject(object, property, env, trace);
if (iObject == null) return Memory.NULL;
return iObject.getReflection().bitOrProperty(env, trace, iObject, property, value, callCache, cacheIndex);
}
public static Memory assignBitXorProperty(Memory object, Memory value, String property, Environment env, TraceInfo trace,
PropertyCallCache callCache, int cacheIndex)
throws Throwable {
IObject iObject = fetchObject(object, property, env, trace);
if (iObject == null) return Memory.NULL;
return iObject.getReflection().bitXorProperty(env, trace, iObject, property, value, callCache, cacheIndex);
}
public static Memory assignBitShrProperty(Memory object, Memory value, String property, Environment env, TraceInfo trace,
PropertyCallCache callCache, int cacheIndex)
throws Throwable {
IObject iObject = fetchObject(object, property, env, trace);
if (iObject == null) return Memory.NULL;
return iObject.getReflection().bitShrProperty(env, trace, iObject, property, value, callCache, cacheIndex);
}
public static Memory assignBitShlProperty(Memory object, Memory value, String property, Environment env, TraceInfo trace,
PropertyCallCache callCache, int cacheIndex)
throws Throwable {
IObject iObject = fetchObject(object, property, env, trace);
if (iObject == null) return Memory.NULL;
return iObject.getReflection().bitShlProperty(env, trace, iObject, property, value, callCache, cacheIndex);
}
/**
* 0 - success
* 1 - invalid protected
* 2 - invalid private
* @param env
* @return
*/
public static int canAccess(Environment env, Modifier modifier, ClassEntity classEntity, ClassEntity context, boolean lateStaticCall) {
switch (modifier){
case PUBLIC: return 0;
case PRIVATE:
ClassEntity cl = context == null
? (lateStaticCall ? env.getLateStaticClass() : env.getLastClassOnStack())
: context;
return cl != null && cl.getId() == classEntity.getId() ? 0 : 2;
case PROTECTED:
ClassEntity clazz = context == null
? (lateStaticCall ? env.getLateStaticClass() : env.getLastClassOnStack())
: context;
if (clazz == null) {
return 1;
}
long id = classEntity.getId();
do {
if (clazz.getId() == id) {
return 0;
}
clazz = clazz.getParent();
} while (clazz != null);
return 1;
}
return 2;
}
}
| |
/**
* Copyright (C) 2012-2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ninja;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import ninja.utils.NinjaProperties;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Optional;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableList;
import com.google.inject.Inject;
import com.google.inject.Injector;
public class RouterImpl implements Router {
private final NinjaProperties ninjaProperties;
private final Logger logger = LoggerFactory.getLogger(RouterImpl.class);
private final List<RouteBuilderImpl> allRouteBuilders = new ArrayList<>();
private final Injector injector;
private List<Route> routes;
// This regex works for both {myParam} AND {myParam: .*} (with regex)
private final String VARIABLE_PART_PATTERN_WITH_PLACEHOLDER = "\\{(%s)(:\\s([^}]*))?\\}";
@Inject
public RouterImpl(
Injector injector,
NinjaProperties ninjaProperties) {
this.injector = injector;
this.ninjaProperties = ninjaProperties;
}
@Override
public Route getRouteFor(String httpMethod, String uri) {
if (routes == null) {
throw new IllegalStateException(
"Attempt to get route when routes not compiled");
}
for (Route route : routes) {
if (route.matches(httpMethod, uri)) {
return route;
}
}
return null;
}
@Override
public String getReverseRoute(
Class<?> controllerClass,
String controllerMethodName) {
Optional<Map<String, Object>> parameterMap = Optional.absent();
return getReverseRoute(controllerClass, controllerMethodName, parameterMap);
}
@Override
public String getReverseRoute(Class<?> controllerClass,
String controllerMethodName,
Object... parameterMap) {
if (parameterMap.length % 2 != 0) {
logger.error("Always provide key (as String) value (as Object) pairs in parameterMap. That means providing e.g. 2, 4, 6... objects.");
return null;
}
Map<String, Object> map = new HashMap<>(parameterMap.length / 2);
for (int i = 0; i < parameterMap.length; i += 2) {
map.put((String) parameterMap[i], parameterMap[i + 1]);
}
return getReverseRoute(controllerClass, controllerMethodName, Optional.of(map));
}
@Override
public String getReverseRoute(
Class<?> controllerClass,
String controllerMethodName,
Optional<Map<String, Object>> parameterMap) {
if (routes == null) {
throw new IllegalStateException(
"Attempt to get route when routes not compiled");
}
Optional<Route> route
= getRouteForControllerClassAndMethod(
controllerClass,
controllerMethodName);
if (route.isPresent()) {
// The original url. Something like route/user/{id}/{email}/userDashboard/{name: .*}
String urlWithReplacedPlaceholders
= replaceVariablePartsOfUrlWithValuesProvidedByUser(
route.get().getUrl(),
parameterMap);
String finalUrl = addContextPathToUrlIfAvailable(
urlWithReplacedPlaceholders,
ninjaProperties);
return finalUrl;
}
else {
logger.info(
"Could not find any reverse route for the method {} of the Controller class {}",
controllerMethodName, controllerClass.getSimpleName());
return null;
}
}
@Override
public String getReverseRoute(Class<?> controllerClass,
String controllerMethodName,
Map<String, Object> parameterMap) {
Optional<Map<String, Object>> parameterMapOptional
= Optional.fromNullable(parameterMap);
return getReverseRoute(
controllerClass,
controllerMethodName,
parameterMapOptional);
}
@Override
public void compileRoutes() {
if (routes != null) {
throw new IllegalStateException("Routes already compiled");
}
List<Route> routes = new ArrayList<>();
for (RouteBuilderImpl routeBuilder : allRouteBuilders) {
routes.add(routeBuilder.buildRoute(injector));
}
this.routes = ImmutableList.copyOf(routes);
logRoutes();
}
@Override
public List<Route> getRoutes() {
if (routes == null) {
throw new IllegalStateException("Routes have not been compiled");
}
return routes;
}
@Override
public RouteBuilder GET() {
RouteBuilderImpl routeBuilder = new RouteBuilderImpl().GET();
allRouteBuilders.add(routeBuilder);
return routeBuilder;
}
@Override
public RouteBuilder POST() {
RouteBuilderImpl routeBuilder = new RouteBuilderImpl().POST();
allRouteBuilders.add(routeBuilder);
return routeBuilder;
}
@Override
public RouteBuilder PUT() {
RouteBuilderImpl routeBuilder = new RouteBuilderImpl().PUT();
allRouteBuilders.add(routeBuilder);
return routeBuilder;
}
@Override
public RouteBuilder DELETE() {
RouteBuilderImpl routeBuilder = new RouteBuilderImpl().DELETE();
allRouteBuilders.add(routeBuilder);
return routeBuilder;
}
@Override
public RouteBuilder OPTIONS() {
RouteBuilderImpl routeBuilder = new RouteBuilderImpl().OPTIONS();
allRouteBuilders.add(routeBuilder);
return routeBuilder;
}
@Override
public RouteBuilder HEAD() {
RouteBuilderImpl routeBuilder = new RouteBuilderImpl().HEAD();
allRouteBuilders.add(routeBuilder);
return routeBuilder;
}
@Override
public RouteBuilder METHOD(String method) {
RouteBuilderImpl routeBuilder = new RouteBuilderImpl().METHOD(method);
allRouteBuilders.add(routeBuilder);
return routeBuilder;
}
private Optional<Route> getRouteForControllerClassAndMethod(
Class<?> controllerClass,
String controllerMethodName) {
for (Route route : routes) {
if (route.getControllerClass() != null
&& route.getControllerClass().equals(controllerClass)
&& route.getControllerMethod().getName().equals(controllerMethodName)) {
return Optional.of(route);
}
}
return Optional.absent();
}
private String replaceVariablePartsOfUrlWithValuesProvidedByUser(
String routeUrlWithVariableParts,
Optional<Map<String, Object>> parameterMap) {
String urlWithReplacedPlaceholders = routeUrlWithVariableParts;
if (parameterMap.isPresent()) {
Map<String, Object> queryParameterMap = new HashMap<>(parameterMap.get().size());
for (Entry<String, Object> parameterPair : parameterMap.get().entrySet()) {
boolean foundAsPathParameter = false;
StringBuffer stringBuffer = new StringBuffer();
String buffer = String.format(
VARIABLE_PART_PATTERN_WITH_PLACEHOLDER,
parameterPair.getKey());
Pattern PATTERN = Pattern.compile(buffer);
Matcher matcher = PATTERN.matcher(urlWithReplacedPlaceholders);
while (matcher.find()) {
String resultingRegexReplacement = parameterPair.getValue().toString();
matcher.appendReplacement(stringBuffer, resultingRegexReplacement);
foundAsPathParameter = true;
}
matcher.appendTail(stringBuffer);
urlWithReplacedPlaceholders = stringBuffer.toString();
if (!foundAsPathParameter) {
queryParameterMap.put(parameterPair.getKey(), parameterPair.getValue());
}
}
// now prepare the query string for this url if we got some query params
if (queryParameterMap.size() > 0) {
StringBuffer queryParameterStringBuffer = new StringBuffer();
// The uri is now replaced => we now have to add potential query parameters
for (Iterator<Entry<String, Object>> iterator = queryParameterMap.entrySet().iterator();
iterator.hasNext();) {
Entry<String, Object> queryParameterEntry = iterator.next();
queryParameterStringBuffer.append(queryParameterEntry.getKey());
queryParameterStringBuffer.append("=");
queryParameterStringBuffer.append(queryParameterEntry.getValue());
if (iterator.hasNext()) {
queryParameterStringBuffer.append("&");
}
}
urlWithReplacedPlaceholders = urlWithReplacedPlaceholders
+ "?"
+ queryParameterStringBuffer.toString();
}
}
return urlWithReplacedPlaceholders;
}
private String addContextPathToUrlIfAvailable(
String routeWithoutContextPath,
NinjaProperties ninjaProperties) {
// contextPath can only be empty. never null.
return ninjaProperties.getContextPath()
+ routeWithoutContextPath;
}
private void logRoutes() {
// determine the width of the columns
int maxMethodLen = 0;
int maxPathLen = 0;
int maxControllerLen = 0;
for (Route route : getRoutes()) {
maxMethodLen = Math.max(maxMethodLen, route.getHttpMethod().length());
maxPathLen = Math.max(maxPathLen, route.getUri().length());
if (route.getControllerClass() != null) {
int controllerLen = route.getControllerClass().getName().length()
+ route.getControllerMethod().getName().length();
maxControllerLen = Math.max(maxControllerLen, controllerLen);
}
}
// log the routing table
int borderLen = 10 + maxMethodLen + maxPathLen + maxControllerLen;
String border = Strings.padEnd("", borderLen, '-');
logger.info(border);
logger.info("Registered routes");
logger.info(border);
for (Route route : getRoutes()) {
if (route.getControllerClass() != null) {
logger.info("{} {} => {}.{}()",
Strings.padEnd(route.getHttpMethod(), maxMethodLen, ' '),
Strings.padEnd(route.getUri(), maxPathLen, ' '),
route.getControllerClass().getName(),
route.getControllerMethod().getName());
} else {
logger.info("{} {}", route.getHttpMethod(), route.getUri());
}
}
}
}
| |
/*
* Copyright (C) 2014-2015 LinkedIn Corp. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use
* this file except in compliance with the License. You may obtain a copy of the
* License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed
* under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied.
*/
package gobblin.metrics;
import java.io.IOException;
import java.util.Map;
import java.util.SortedMap;
import java.util.SortedSet;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import com.codahale.metrics.Counter;
import com.codahale.metrics.Gauge;
import com.codahale.metrics.Histogram;
import com.codahale.metrics.Meter;
import com.codahale.metrics.Metric;
import com.codahale.metrics.MetricFilter;
import com.codahale.metrics.Timer;
import static gobblin.metrics.TestConstants.*;
import gobblin.metrics.reporter.ContextAwareScheduledReporter;
/**
* Unit tests for {@link MetricContext}.
*
* <p>
* This test class also tests classes {@link ContextAwareCounter}, {@link ContextAwareMeter},
* {@link ContextAwareHistogram}, {@link ContextAwareTimer}, {@link ContextAwareGauge},
* {@link gobblin.metrics.reporter.ContextAwareScheduledReporter}, and {@link TagBasedMetricFilter}.
* </p>
*
* @author ynli
*/
@Test(groups = {"gobblin.metrics"})
public class MetricContextTest {
private static final String CHILD_CONTEXT_NAME = "TestChildContext";
private static final String JOB_ID_KEY = "job.id";
private static final String JOB_ID_PREFIX = "TestJob-";
private static final String TASK_ID_KEY = "task.id";
private static final String TASK_ID_PREFIX = "TestTask-";
private static final String METRIC_GROUP_KEY = "metric.group";
private static final String INPUT_RECORDS_GROUP = "INPUT_RECORDS";
private static final String TEST_REPORTER_NAME = TestContextAwareScheduledReporter.class.getName();
private MetricContext context;
private MetricContext childContext;
@BeforeClass
public void setUp() {
String contextName = CONTEXT_NAME + "_" + UUID.randomUUID().toString();
this.context = MetricContext.builder(contextName)
.addTag(new Tag<String>(JOB_ID_KEY, JOB_ID_PREFIX + 0))
.build();
Assert.assertEquals(this.context.getName(), contextName);
Assert.assertTrue(this.context.getParent().isPresent());
Assert.assertEquals(this.context.getParent().get(), RootMetricContext.get());
Assert.assertEquals(this.context.getTags().size(), 2); // uuid tag gets added automatically
Assert.assertEquals(this.context.getTags().get(0).getKey(), JOB_ID_KEY);
Assert.assertEquals(this.context.getTags().get(0).getValue(), JOB_ID_PREFIX + 0);
// Second tag should be uuid
Assert.assertTrue(this.context.getTags().get(1).getValue().toString()
.matches("[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}"));
}
@Test
public void testChildContext() {
this.childContext = this.context.childBuilder(CHILD_CONTEXT_NAME)
.addTag(new Tag<String>(TASK_ID_KEY, TASK_ID_PREFIX + 0))
.build();
Assert.assertEquals(this.childContext.getName(), CHILD_CONTEXT_NAME);
Assert.assertTrue(this.childContext.getParent().isPresent());
Assert.assertEquals(this.childContext.getParent().get(), this.context);
Assert.assertEquals(this.childContext.getTags().size(), 3);
Assert.assertEquals(this.childContext.getTags().get(0).getKey(), JOB_ID_KEY);
Assert.assertEquals(this.childContext.getTags().get(0).getValue(), JOB_ID_PREFIX + 0);
Assert.assertEquals(this.childContext.getTags().get(1).getKey(), MetricContext.METRIC_CONTEXT_ID_TAG_NAME);
Assert.assertEquals(this.childContext.getTags().get(2).getKey(), TASK_ID_KEY);
Assert.assertEquals(this.childContext.getTags().get(2).getValue(), TASK_ID_PREFIX + 0);
}
@Test(dependsOnMethods = "testChildContext")
public void testContextAwareCounter() {
ContextAwareCounter jobRecordsProcessed = this.context.contextAwareCounter(RECORDS_PROCESSED);
Assert.assertEquals(this.context.getCounters().get(jobRecordsProcessed.getName()),
jobRecordsProcessed.getInnerMetric());
Assert.assertEquals(jobRecordsProcessed.getContext(), this.context);
Assert.assertEquals(jobRecordsProcessed.getName(), RECORDS_PROCESSED);
jobRecordsProcessed.inc();
Assert.assertEquals(jobRecordsProcessed.getCount(), 1l);
jobRecordsProcessed.inc(5);
Assert.assertEquals(jobRecordsProcessed.getCount(), 6l);
jobRecordsProcessed.dec();
Assert.assertEquals(jobRecordsProcessed.getCount(), 5l);
jobRecordsProcessed.dec(3);
Assert.assertEquals(jobRecordsProcessed.getCount(), 2l);
ContextAwareCounter taskRecordsProcessed = this.childContext.contextAwareCounter(RECORDS_PROCESSED);
Assert.assertEquals(this.childContext.getCounters()
.get(taskRecordsProcessed.getName()),
taskRecordsProcessed.getInnerMetric());
Assert.assertEquals(taskRecordsProcessed.getContext(), this.childContext);
Assert.assertEquals(taskRecordsProcessed.getName(), RECORDS_PROCESSED);
taskRecordsProcessed.inc();
Assert.assertEquals(taskRecordsProcessed.getCount(), 1l);
Assert.assertEquals(jobRecordsProcessed.getCount(), 3l);
taskRecordsProcessed.inc(3);
Assert.assertEquals(taskRecordsProcessed.getCount(), 4l);
Assert.assertEquals(jobRecordsProcessed.getCount(), 6l);
taskRecordsProcessed.dec(4);
Assert.assertEquals(taskRecordsProcessed.getCount(), 0l);
Assert.assertEquals(jobRecordsProcessed.getCount(), 2l);
}
@Test(dependsOnMethods = "testChildContext")
public void testContextAwareMeter() {
ContextAwareMeter jobRecordsProcessRate = this.context.contextAwareMeter(RECORD_PROCESS_RATE);
Assert.assertEquals(this.context.getMeters()
.get(jobRecordsProcessRate.getName()),
jobRecordsProcessRate.getInnerMetric());
Assert.assertEquals(jobRecordsProcessRate.getContext(), this.context);
Assert.assertEquals(jobRecordsProcessRate.getName(), RECORD_PROCESS_RATE);
jobRecordsProcessRate.mark();
jobRecordsProcessRate.mark(3);
Assert.assertEquals(jobRecordsProcessRate.getCount(), 4l);
ContextAwareMeter taskRecordsProcessRate = this.childContext.contextAwareMeter(RECORD_PROCESS_RATE);
Assert.assertEquals(this.childContext.getMeters()
.get(taskRecordsProcessRate.getName()),
taskRecordsProcessRate.getInnerMetric());
Assert.assertEquals(taskRecordsProcessRate.getContext(), this.childContext);
Assert.assertEquals(taskRecordsProcessRate.getName(), RECORD_PROCESS_RATE);
taskRecordsProcessRate.mark(2);
Assert.assertEquals(taskRecordsProcessRate.getCount(), 2l);
Assert.assertEquals(jobRecordsProcessRate.getCount(), 6l);
taskRecordsProcessRate.mark(5);
Assert.assertEquals(taskRecordsProcessRate.getCount(), 7l);
Assert.assertEquals(jobRecordsProcessRate.getCount(), 11l);
}
@Test(dependsOnMethods = "testChildContext")
public void testContextAwareHistogram() {
ContextAwareHistogram jobRecordSizeDist = this.context.contextAwareHistogram(RECORD_SIZE_DISTRIBUTION);
Assert.assertEquals(
this.context.getHistograms().get(
jobRecordSizeDist.getName()),
jobRecordSizeDist.getInnerMetric());
Assert.assertEquals(jobRecordSizeDist.getContext(), this.context);
Assert.assertEquals(jobRecordSizeDist.getName(), RECORD_SIZE_DISTRIBUTION);
jobRecordSizeDist.update(2);
jobRecordSizeDist.update(4);
jobRecordSizeDist.update(7);
Assert.assertEquals(jobRecordSizeDist.getCount(), 3l);
Assert.assertEquals(jobRecordSizeDist.getSnapshot().getMin(), 2l);
Assert.assertEquals(jobRecordSizeDist.getSnapshot().getMax(), 7l);
ContextAwareHistogram taskRecordSizeDist = this.childContext.contextAwareHistogram(RECORD_SIZE_DISTRIBUTION);
Assert.assertEquals(this.childContext.getHistograms().get(taskRecordSizeDist.getName()),
taskRecordSizeDist.getInnerMetric());
Assert.assertEquals(taskRecordSizeDist.getContext(), this.childContext);
Assert.assertEquals(taskRecordSizeDist.getName(), RECORD_SIZE_DISTRIBUTION);
taskRecordSizeDist.update(3);
taskRecordSizeDist.update(14);
taskRecordSizeDist.update(11);
Assert.assertEquals(taskRecordSizeDist.getCount(), 3l);
Assert.assertEquals(taskRecordSizeDist.getSnapshot().getMin(), 3l);
Assert.assertEquals(taskRecordSizeDist.getSnapshot().getMax(), 14l);
Assert.assertEquals(jobRecordSizeDist.getCount(), 6l);
Assert.assertEquals(jobRecordSizeDist.getSnapshot().getMin(), 2l);
Assert.assertEquals(jobRecordSizeDist.getSnapshot().getMax(), 14l);
}
@Test
public void testContextAwareTimer() {
ContextAwareTimer jobTotalDuration = this.context.contextAwareTimer(TOTAL_DURATION);
Assert.assertEquals(this.context.getTimers().get(jobTotalDuration.getName()), jobTotalDuration.getInnerMetric());
Assert.assertEquals(jobTotalDuration.getContext(), this.context);
Assert.assertEquals(jobTotalDuration.getName(), TOTAL_DURATION);
jobTotalDuration.update(50, TimeUnit.SECONDS);
jobTotalDuration.update(100, TimeUnit.SECONDS);
jobTotalDuration.update(150, TimeUnit.SECONDS);
Assert.assertEquals(jobTotalDuration.getCount(), 3l);
Assert.assertEquals(jobTotalDuration.getSnapshot().getMin(), TimeUnit.SECONDS.toNanos(50l));
Assert.assertEquals(jobTotalDuration.getSnapshot().getMax(), TimeUnit.SECONDS.toNanos(150l));
Assert.assertTrue(jobTotalDuration.time().stop() >= 0l);
}
@Test
public void testTaggableGauge() {
ContextAwareGauge<Long> queueSize = this.context.newContextAwareGauge(
QUEUE_SIZE,
new Gauge<Long>() {
@Override
public Long getValue() {
return 1000l;
}
});
this.context.register(QUEUE_SIZE, queueSize);
Assert.assertEquals(queueSize.getValue().longValue(), 1000l);
Assert.assertEquals(
this.context.getGauges().get(queueSize.getName()),
queueSize.getInnerMetric());
Assert.assertEquals(queueSize.getContext(), this.context);
Assert.assertEquals(queueSize.getName(), QUEUE_SIZE);
}
@Test(dependsOnMethods = {
"testContextAwareCounter",
"testContextAwareMeter",
"testContextAwareHistogram",
"testContextAwareTimer",
"testTaggableGauge"
})
public void testGetMetrics() {
SortedSet<String> names = this.context.getNames();
Assert.assertEquals(names.size(), 6);
Assert.assertTrue(names.contains(RECORDS_PROCESSED));
Assert.assertTrue(names.contains(RECORD_PROCESS_RATE));
Assert.assertTrue(
names.contains(RECORD_SIZE_DISTRIBUTION));
Assert.assertTrue(names.contains(TOTAL_DURATION));
Assert.assertTrue(names.contains(QUEUE_SIZE));
SortedSet<String> childNames = this.childContext.getNames();
Assert.assertEquals(childNames.size(), 4);
Assert.assertTrue(
childNames.contains(RECORDS_PROCESSED));
Assert.assertTrue(
childNames.contains(RECORD_PROCESS_RATE));
Assert.assertTrue(
childNames.contains(RECORD_SIZE_DISTRIBUTION));
Map<String, Metric> metrics = this.context.getMetrics();
Assert.assertEquals(metrics.size(), 6);
Assert.assertTrue(
metrics.containsKey(RECORDS_PROCESSED));
Assert.assertTrue(
metrics.containsKey(RECORD_PROCESS_RATE));
Assert.assertTrue(
metrics.containsKey(RECORD_SIZE_DISTRIBUTION));
Assert.assertTrue(metrics.containsKey(TOTAL_DURATION));
Assert.assertTrue(metrics.containsKey(QUEUE_SIZE));
Map<String, Counter> counters = this.context.getCounters();
Assert.assertEquals(counters.size(), 1);
Assert.assertTrue(
counters.containsKey(RECORDS_PROCESSED));
Map<String, Meter> meters = this.context.getMeters();
Assert.assertEquals(meters.size(), 1);
Assert.assertTrue(
meters.containsKey(RECORD_PROCESS_RATE));
Map<String, Histogram> histograms = this.context.getHistograms();
Assert.assertEquals(histograms.size(), 1);
Assert.assertTrue(
histograms.containsKey(RECORD_SIZE_DISTRIBUTION));
Map<String, Timer> timers = this.context.getTimers();
Assert.assertEquals(timers.size(), 2);
Assert.assertTrue(timers.containsKey(TOTAL_DURATION));
Map<String, Gauge> gauges = this.context.getGauges();
Assert.assertEquals(gauges.size(), 1);
Assert.assertTrue(gauges.containsKey(QUEUE_SIZE));
}
@Test(dependsOnMethods = "testGetMetrics")
@SuppressWarnings("unchecked")
public void testGetMetricsWithFilter() {
MetricFilter filter = new MetricFilter() {
@Override public boolean matches(String name, Metric metric) {
return !name.equals(MetricContext.GOBBLIN_METRICS_NOTIFICATIONS_TIMER_NAME);
}
};
Map<String, Counter> counters = this.context.getCounters(filter);
Assert.assertEquals(counters.size(), 1);
Assert.assertTrue(
counters.containsKey(RECORDS_PROCESSED));
Map<String, Meter> meters = this.context.getMeters(filter);
Assert.assertEquals(meters.size(), 1);
Assert.assertTrue(
meters.containsKey(RECORD_PROCESS_RATE));
Map<String, Histogram> histograms = this.context.getHistograms(filter);
Assert.assertEquals(histograms.size(), 1);
Assert.assertTrue(
histograms.containsKey(RECORD_SIZE_DISTRIBUTION));
Map<String, Timer> timers = this.context.getTimers(filter);
Assert.assertEquals(timers.size(), 1);
Assert.assertTrue(timers.containsKey(TOTAL_DURATION));
Map<String, Gauge> gauges = this.context.getGauges(filter);
Assert.assertEquals(gauges.size(), 1);
Assert.assertTrue(gauges.containsKey(QUEUE_SIZE));
}
@Test(dependsOnMethods = {
"testGetMetricsWithFilter"
})
public void testRemoveMetrics() {
Assert.assertTrue(this.childContext.remove(RECORDS_PROCESSED));
Assert.assertTrue(this.childContext.getCounters().isEmpty());
Assert.assertTrue(this.childContext.remove(RECORD_PROCESS_RATE));
Assert.assertTrue(this.childContext.getMeters().isEmpty());
Assert.assertTrue(this.childContext.remove(RECORD_SIZE_DISTRIBUTION));
Assert.assertTrue(this.childContext.getHistograms().isEmpty());
Assert.assertEquals(this.childContext.getNames().size(), 1);
}
@AfterClass
public void tearDown() throws IOException {
if (this.context != null) {
this.context.close();
}
}
private static class TestContextAwareScheduledReporter extends ContextAwareScheduledReporter {
protected TestContextAwareScheduledReporter(MetricContext context, String name, MetricFilter filter,
TimeUnit rateUnit, TimeUnit durationUnit) {
super(context, name, filter, rateUnit, durationUnit);
}
@Override
protected void reportInContext(MetricContext context,
SortedMap<String, Gauge> gauges,
SortedMap<String, Counter> counters,
SortedMap<String, Histogram> histograms,
SortedMap<String, Meter> meters,
SortedMap<String, Timer> timers) {
Assert.assertEquals(context.getName(), CONTEXT_NAME);
Assert.assertEquals(gauges.size(), 1);
Assert.assertTrue(gauges.containsKey(QUEUE_SIZE));
Assert.assertEquals(counters.size(), 1);
Assert.assertTrue(counters.containsKey(RECORDS_PROCESSED));
Assert.assertEquals(histograms.size(), 1);
Assert.assertTrue(
histograms.containsKey(RECORD_SIZE_DISTRIBUTION));
Assert.assertEquals(meters.size(), 1);
Assert.assertTrue(meters.containsKey(RECORD_PROCESS_RATE));
Assert.assertEquals(timers.size(), 2);
Assert.assertTrue(timers.containsKey(TOTAL_DURATION));
}
private static class TestContextAwareScheduledReporterBuilder extends Builder {
public TestContextAwareScheduledReporterBuilder(String name) {
super(name);
}
@Override
public ContextAwareScheduledReporter build(MetricContext context) {
return new MetricContextTest.TestContextAwareScheduledReporter(
context, this.name, this.filter, this.rateUnit, this.durationUnit);
}
}
}
}
| |
/*
* JDBReport Designer
*
* Copyright (C) 2006-2011 Andrey Kholmanskih
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package jdbreport.design.grid;
import java.awt.Component;
import java.awt.Dialog;
import java.awt.Frame;
import java.awt.HeadlessException;
import java.awt.Window;
import java.awt.event.MouseEvent;
import java.beans.PropertyChangeEvent;
import java.util.Iterator;
import javax.swing.Icon;
import javax.swing.JTable;
import javax.swing.SwingUtilities;
import javax.swing.table.JTableHeader;
import javax.swing.table.TableModel;
import org.xml.sax.XMLReader;
import org.xml.sax.helpers.DefaultHandler;
import jdbreport.design.grid.undo.FunctionUndoItem;
import jdbreport.design.grid.undo.TemplateGridHandler;
import jdbreport.design.grid.undo.TemplateGridParser;
import jdbreport.design.model.CellObject;
import jdbreport.design.model.TemplateModel;
import jdbreport.design.view.clipboard.TemplateClipboardParser;
import jdbreport.design.view.clipboard.TemplateFragmentHandler;
import jdbreport.grid.CellPropertiesDlg;
import jdbreport.grid.JReportGrid;
import jdbreport.grid.ReportCellRenderer;
import jdbreport.grid.RowHeader;
import jdbreport.grid.undo.BackupItem;
import jdbreport.grid.undo.CellUndoItem;
import jdbreport.grid.undo.GridParser;
import jdbreport.grid.undo.UndoItem;
import jdbreport.model.Cell;
import jdbreport.model.GridRect;
import jdbreport.model.TableRow;
import jdbreport.view.clipboard.ClipboardParser;
import jdbreport.util.Utils;
/**
* @version 3.1 14.12.2014
* @author Andrey Kholmanskih
*
*/
public class TemplateGrid extends JReportGrid {
private static final long serialVersionUID = 1L;
private static Icon functionIcon;
private static Icon totalIcon;
private transient ReportCellRenderer templateReportRenderer;
public TemplateGrid(TableModel tm) {
super(tm);
}
protected static Icon getTotalIcon() {
if (totalIcon == null) {
totalIcon = TemplateReportResources.getInstance().getIcon(
"total_ovr.png");
}
return totalIcon;
}
protected static Icon getFunctionIcon() {
if (functionIcon == null) {
functionIcon = TemplateReportResources.getInstance().getIcon(
"function.gif");
}
return functionIcon;
}
/**
* Sets the function's name for the selected cells
*
* @param functionName
* - the function name
*/
public void setFunctionName(String functionName) {
pushUndo(new FunctionUndoItem(this));
getTemplateModel().setFunctionName(getSelectionRect(), functionName);
repaint();
}
/**
*
* @return the TemplateModel
*/
public TemplateModel getTemplateModel() {
return (TemplateModel) getReportModel();
}
protected boolean canEdit(Cell cell) {
return true;
}
/**
*
* @return the first selected CellObject
*/
public CellObject getSelectedCellObject() {
return getTemplateModel().getCellObject(getSelectedRow(),
getSelectedColumn());
}
protected void createDefaultRenderers() {
super.createDefaultRenderers();
setLazyRenderer(Object.class,
"jdbreport.design.grid.TemplateGrid$TemplateReportRenderer"); //$NON-NLS-1$
}
protected RowHeader createDefaultRowHeader() {
return new TemplateRowHeader(this);
}
protected JTableHeader createDefaultTableHeader() {
return new TemplateHeader(columnModel);
}
public ReportCellRenderer getTextReportRenderer() {
if (templateReportRenderer == null) {
templateReportRenderer = new TemplateReportRenderer();
}
return templateReportRenderer;
}
/**
* Adds rows' count of the defined type to the index of the model. The new
* rows will contain null values. Notification of the row being added will
* be generated.
*
* @param count
* - rows' count
* @param index
* - the row index of the rows to be inserted
* @param rowType
* - the row's type
*/
public void addRows(int count, int index, int rowType) {
try {
pushUndo(new BackupItem(this, UndoItem.ADD_ROWS));
} catch (Throwable e) {
Utils.showError(e);
}
int row = getTemplateModel().addRows(count, index, rowType);
if (row >= 0)
setSelectedRect(new GridRect(row, 0, row + count - 1,
getColumnCount() - 1));
else
getSelectionModel().clearSelection();
}
/**
* Sets type of the selected rows
*
* @param rows
* - the selected rows
* @param rowType
* - the row's type
*/
public void setRowType(int[] rows, int rowType) {
if (rows.length == 0) {
return;
}
try {
pushUndo(new BackupItem(this, UndoItem.CHANGE_ROWTYPE));
} catch (Throwable e) {
Utils.showError(e);
}
getTemplateModel().setRowType(addSpanedRows(rows), rowType);
getRowHeader().repaint();
repaint();
}
private int[] addSpanedRows(int[] rows) {
int l = rows.length;
for (int i = 0; i < rows.length; i++) {
TableRow tableRow = getReportModel().getRowModel().getRow(rows[i]);
for (Cell cell : tableRow) {
if (cell.isSpan() && i + cell.getRowSpan() >= rows.length) {
l = i + cell.getRowSpan() + 1;
}
}
}
if (l > rows.length) {
int[] newrows = new int[l];
System.arraycopy(rows, 0, newrows, 0, rows.length);
for (int i = rows.length; i < newrows.length; i++) {
newrows[i] = newrows[i - 1] + 1;
}
return newrows;
}
return rows;
}
/**
* Inserts detail group
*
*/
public void insertDetailGroup() {
try {
pushUndo(new BackupItem(this, UndoItem.ADD_GROUP));
} catch (Throwable e) {
Utils.showError(e);
}
int row = getTemplateModel().insertDetailGroup(getSelectedRow());
if (row >= 0) {
setSelectedRect(new GridRect(row, 0, row + 2, getColumnCount() - 1));
} else {
getSelectionModel().clearSelection();
}
}
/**
* Sets total functions for the selected cells between CellObject.AF_NONE
* and CellObject.AF_AVG
*
* @param kind
* - the total functions kind
*/
public void setAgrFunc(int kind) {
GridRect selectionRect = getSelectionRect();
if (selectionRect == null)
return;
pushUndo(new CellUndoItem(this, UndoItem.TOTAL_FUNCTION));
CellObject cell = (CellObject) getSelectedCell();
if (cell.getTotalFunction() == kind)
kind = CellObject.AF_NONE;
Iterator<Cell> it = getReportModel().getSelectedCells(selectionRect);
while (it.hasNext()) {
((CellObject) it.next()).setTotalFunction(kind);
}
repaint();
}
@Override
public String getToolTipText(MouseEvent event) {
int row = getReportModel().getRowModel().getRowIndexAtY(
event.getPoint().y);
int column = getColumnModel().getColumnIndexAtX(event.getPoint().x);
String result = getReportModel().getToolTipText(row, column);
if (!"".equals(result))
return result;
return super.getToolTipText(event);
}
protected ClipboardParser createClipboardWriter() {
return new TemplateClipboardParser();
}
protected DefaultHandler createPasteHandler(XMLReader reader,
int selectRow, int selectCol) {
return new TemplateFragmentHandler(getTemplateModel(), reader,
selectRow, selectCol);
}
public GridParser createGridWriter() {
return new TemplateGridParser();
}
public DefaultHandler createGridHandler(XMLReader reader) {
return new TemplateGridHandler(getReportModel(), reader);
}
public void setNotRepeate() {
GridRect selectionRect = getSelectionRect();
if (selectionRect == null)
return;
pushUndo(new CellUndoItem(this, UndoItem.NOT_REPEATE));
CellObject cell = (CellObject) getSelectedCell();
boolean notrepeate = !cell.isNotRepeat();
Iterator<Cell> it = getReportModel().getSelectedCells(selectionRect);
while (it.hasNext()) {
((CellObject) it.next()).setNotRepeat(notrepeate);
}
}
@Override
protected CellPropertiesDlg createCellProperties() throws HeadlessException {
Window w = SwingUtilities.getWindowAncestor(this);
if (w instanceof Frame) {
return new TemplCellPropertiesDlg((Frame) w, this);
} else {
return new TemplCellPropertiesDlg((Dialog) w, this);
}
}
public void propertyChange(PropertyChangeEvent evt) {
if (evt.getSource() == getReportModel()) {
if (evt.getPropertyName().equals("colSizing")) { //$NON-NLS-1$
return;
} else if (evt.getPropertyName().equals("colMoving")) { //$NON-NLS-1$
return;
}
}
super.propertyChange(evt);
}
protected static class TemplateReportRenderer extends TextReportRenderer {
private static final long serialVersionUID = 1L;
public TemplateReportRenderer() {
super();
}
public Component getTableCellRendererComponent(JTable table,
Object value, boolean isSelected, boolean hasFocus, int row,
int column) {
super.getTableCellRendererComponent(table, value, isSelected,
hasFocus, row, column);
CellObject cellObject = (CellObject) cell;
setSelectionStart(0);
setSelectionEnd(0);
if (cellObject.getFunctionName() != null) {
insertIcon(TemplateGrid.getFunctionIcon());
}
if (cellObject.getTotalFunction() != CellObject.AF_NONE) {
insertIcon(TemplateGrid.getTotalIcon());
}
return this;
}
}
}
| |
/*
* Copyright 2010 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import static com.google.common.truth.Truth.assertThat;
import static com.google.common.truth.Truth.assertWithMessage;
import static com.google.javascript.rhino.testing.NodeSubject.assertNode;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.MultimapBuilder;
import com.google.javascript.jscomp.CompilerOptions.AliasTransformation;
import com.google.javascript.jscomp.CompilerOptions.AliasTransformationHandler;
import com.google.javascript.jscomp.ScopedAliases.InvalidModuleGetHandling;
import com.google.javascript.rhino.JSDocInfo;
import com.google.javascript.rhino.Node;
import com.google.javascript.rhino.SourcePosition;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/**
* Tests for {@link ScopedAliases}
*
*/
@RunWith(JUnit4.class)
public final class ScopedAliasesTest extends CompilerTestCase {
private static final String GOOG_SCOPE_START_BLOCK =
"goog.scope(function() {";
private static final String GOOG_SCOPE_END_BLOCK = "});";
private static final String SCOPE_NAMESPACE =
"/** @const */ var $jscomp = $jscomp || {}; /** @const */ $jscomp.scope = {};";
private static final String EXTERNS = lines(
MINIMAL_EXTERNS,
"var window;");
private InvalidModuleGetHandling invalidModuleGetHandling;
private AliasTransformationHandler transformationHandler =
CompilerOptions.NULL_ALIAS_TRANSFORMATION_HANDLER;
public ScopedAliasesTest() {
super(EXTERNS);
}
@Override
@Before
public void setUp() throws Exception {
super.setUp();
disableTypeCheck();
enableRunTypeCheckAfterProcessing();
enableCreateModuleMap();
this.invalidModuleGetHandling = InvalidModuleGetHandling.DELETE;
}
private void testScoped(String code, String expected, Postcondition... postconditions) {
ImmutableList<TestPart> parts = ImmutableList.<TestPart>builder()
.add(srcs(GOOG_SCOPE_START_BLOCK + code + GOOG_SCOPE_END_BLOCK))
.add(expected(expected))
.addAll(Arrays.asList(postconditions)).build();
test(parts.toArray(new TestPart[0]));
}
private void testScopedNoChanges(String aliases, String code, Postcondition... postconditions) {
testScoped(aliases + code, code, postconditions);
}
private static final Postcondition VERIFY_TYPES =
(Compiler compiler) ->
new TypeVerifyingPass(compiler).process(compiler.externsRoot, compiler.jsRoot);
@Test
public void testLet() {
testScoped(
"let d = goog.dom; d.createElement(DIV);",
"goog.dom.createElement(DIV)");
}
@Test
public void testConst() {
testScoped(
"const d = goog.dom; d.createElement(DIV);",
"goog.dom.createElement(DIV)");
}
@Test
public void testOneLevel() {
testScoped("var g = goog;g.dom.createElement(g.dom.TagName.DIV);",
"goog.dom.createElement(goog.dom.TagName.DIV);");
}
@Test
public void testTwoLevel() {
testScoped("var d = goog.dom;d.createElement(d.TagName.DIV);",
"goog.dom.createElement(goog.dom.TagName.DIV);");
}
@Test
public void testSourceInfo() {
testScoped("var d = dom;\n" +
"var e = event;\n" +
"alert(e.EventType.MOUSEUP);\n" +
"alert(d.TagName.DIV);\n",
"alert(event.EventType.MOUSEUP); alert(dom.TagName.DIV);");
Node root = getLastCompiler().getRoot();
Node dom = findQualifiedNameNode("dom", root);
Node event = findQualifiedNameNode("event", root);
assertWithMessage("Dom line should be after event line.")
.that(dom.getLineno())
.isGreaterThan(event.getLineno());
}
@Test
public void testTransitive() {
testScoped("var d = goog.dom;var DIV = d.TagName.DIV;d.createElement(DIV);",
"goog.dom.createElement(goog.dom.TagName.DIV);");
}
@Test
public void testTransitiveInSameVar() {
testScoped("var d = goog.dom, DIV = d.TagName.DIV;d.createElement(DIV);",
"goog.dom.createElement(goog.dom.TagName.DIV);");
}
@Test
public void testMultipleTransitive() {
testScoped(
"var g=goog;var d=g.dom;var t=d.TagName;var DIV=t.DIV;" +
"d.createElement(DIV);",
"goog.dom.createElement(goog.dom.TagName.DIV);");
}
@Test
public void testFourLevel() {
testScoped("var DIV = goog.dom.TagName.DIV;goog.dom.createElement(DIV);",
"goog.dom.createElement(goog.dom.TagName.DIV);");
}
@Test
public void testWorksInClosures() {
testScoped(
"var DIV = goog.dom.TagName.DIV;" +
"goog.x = function() {goog.dom.createElement(DIV);};",
"goog.x = function() {goog.dom.createElement(goog.dom.TagName.DIV);};");
}
@Test
public void testOverridden() {
// Test that the alias doesn't get unaliased when it's overridden by a
// parameter.
testScopedNoChanges(
"var g = goog;", "goog.x = function(g) {g.z()};");
// Same for a local.
testScopedNoChanges(
"var g = goog;", "goog.x = function() {var g = {}; g.z()};");
}
@Test
public void testTwoScopes() {
test(
"goog.scope(function() {var g = goog;g.method()});" +
"goog.scope(function() {g.method();});",
"goog.method();g.method();");
}
@Test
public void testTwoSymbolsInTwoScopes() {
test(
"var goog = {};" +
"goog.scope(function() { var g = goog; g.Foo = function() {}; });" +
"goog.scope(function() { " +
" var Foo = goog.Foo; goog.bar = function() { return new Foo(); };" +
"});",
"var goog = {};" +
"goog.Foo = function() {};" +
"goog.bar = function() { return new goog.Foo(); };");
}
@Test
public void testAliasOfSymbolInGoogScope() {
test(
"var goog = {};" +
"goog.scope(function() {" +
" var g = goog;" +
" g.Foo = function() {};" +
" var Foo = g.Foo;" +
" Foo.prototype.bar = function() {};" +
"});",
"var goog = {}; goog.Foo = function() {};" +
"goog.Foo.prototype.bar = function() {};");
}
@Test
public void testScopedFunctionReturnThis() {
test("goog.scope(function() { " +
" var g = goog; g.f = function() { return this; };" +
"});",
"goog.f = function() { return this; };");
}
@Test
public void testScopedFunctionAssignsToVar() {
test("goog.scope(function() { " +
" var g = goog; g.f = function(x) { x = 3; return x; };" +
"});",
"goog.f = function(x) { x = 3; return x; };");
}
@Test
public void testScopedFunctionThrows() {
test("goog.scope(function() { " +
" var g = goog; g.f = function() { throw 'error'; };" +
"});",
"goog.f = function() { throw 'error'; };");
}
@Test
public void testPropertiesNotChanged() {
testScopedNoChanges("var x = goog.dom;", "y.x();");
}
@Test
public void testShadowedVar() {
test("var Popup = {};" +
"var OtherPopup = {};" +
"goog.scope(function() {" +
" var Popup = OtherPopup;" +
" Popup.newMethod = function() { return new Popup(); };" +
"});",
"var Popup = {};" +
"var OtherPopup = {};" +
"OtherPopup.newMethod = function() { return new OtherPopup(); };");
}
@Test
public void testShadowedScopedVar() {
test("var goog = {};" +
"goog.bar = {};" +
"goog.scope(function() {" +
" var bar = goog.bar;" +
// This is bogus, because when the aliases are expanded, goog will
// shadow goog.bar.
" bar.newMethod = function(goog) { return goog + bar; };" +
"});",
"var goog={};" +
"goog.bar={};" +
"goog.bar.newMethod=function(goog$jscomp$1){return goog$jscomp$1 + goog.bar}");
}
@Test
public void testShadowedScopedVarTwoScopes() {
test("var goog = {};" +
"goog.bar = {};" +
"goog.scope(function() {" +
" var bar = goog.bar;" +
" bar.newMethod = function(goog, a) { return bar + a; };" +
"});" +
"goog.scope(function() {" +
" var bar = goog.bar;" +
" bar.newMethod2 = function(goog, b) { return bar + b; };" +
"});",
"var goog={};" +
"goog.bar={};" +
"goog.bar.newMethod=function(goog$jscomp$1, a){return goog.bar + a};" +
"goog.bar.newMethod2=function(goog$jscomp$1, b){return goog.bar + b};");
}
@Test
public void testFunctionDeclarationInScope() {
testScoped("var foo = function() {};", SCOPE_NAMESPACE + "$jscomp.scope.foo = function() {};");
}
@Test
public void testFunctionDeclarationInScope_letConst() {
testScoped(
"var baz = goog.bar; let foo = function() {return baz;};",
SCOPE_NAMESPACE + "$jscomp.scope.foo = function() {return goog.bar;};");
testScoped(
"var baz = goog.bar; const foo = function() {return baz;};",
SCOPE_NAMESPACE + "$jscomp.scope.foo = function() {return goog.bar;};");
}
@Test
public void testLetConstShadowing() {
testScoped(
"var foo = goog.bar; var f = function() {" + "let foo = baz; return foo;};",
SCOPE_NAMESPACE + "$jscomp.scope.f = function() {" + "let foo = baz; return foo;};");
testScoped(
"var foo = goog.bar; var f = function() {" + "const foo = baz; return foo;};",
SCOPE_NAMESPACE + "$jscomp.scope.f = function() {" + "const foo = baz; return foo;};");
}
@Test
public void testYieldExpression() {
testScoped(
"var foo = goog.bar; var f = function*() {yield foo;};",
SCOPE_NAMESPACE + "$jscomp.scope.f = function*() {yield goog.bar;};");
}
@Test
public void testDestructuringError() {
testScopedError("var [x] = [1];",
ScopedAliases.GOOG_SCOPE_NON_ALIAS_LOCAL);
}
@Test
public void testObjectDescructuringError1() {
testScopedError("var {x} = {x: 1};",
ScopedAliases.GOOG_SCOPE_NON_ALIAS_LOCAL);
}
@Test
public void testObjectDescructuringError2() {
testScopedError("var {x: y} = {x: 1};",
ScopedAliases.GOOG_SCOPE_NON_ALIAS_LOCAL);
}
@Test
public void testNonTopLevelDestructuring() {
testScoped(
"var f = function() {var [x, y] = [1, 2];};",
SCOPE_NAMESPACE + "$jscomp.scope.f = function() {var [x, y] = [1, 2];};");
}
@Test
public void testArrowFunction() {
testScoped(
"var foo = goog.bar; var v = (x => x + foo);",
SCOPE_NAMESPACE + "$jscomp.scope.v = (x => x + goog.bar)");
}
@Test
public void testClassDefinition1() {
testScoped(
"class Foo {}",
SCOPE_NAMESPACE + "$jscomp.scope.Foo=class{}");
}
@Test
public void testClassDefinition2() {
testScoped(
"var bar = goog.bar;" + "class Foo { constructor() { this.x = bar; }}",
SCOPE_NAMESPACE + "$jscomp.scope.Foo = class { constructor() { this.x = goog.bar; } }");
}
@Test
public void testClassDefinition3() {
testScoped(
"var bar = {};" + "bar.Foo = class {};",
SCOPE_NAMESPACE + "$jscomp.scope.bar = {}; $jscomp.scope.bar.Foo = class {}");
}
@Test
public void testClassDefinition_letConst() {
testScoped(
"let bar = {};" + "bar.Foo = class {};",
SCOPE_NAMESPACE + "$jscomp.scope.bar = {}; $jscomp.scope.bar.Foo = class {}");
testScoped(
"const bar = {};" + "bar.Foo = class {};",
SCOPE_NAMESPACE + "$jscomp.scope.bar = {}; $jscomp.scope.bar.Foo = class {}");
}
@Test
public void testDefaultParameter() {
testScoped(
"var foo = goog.bar; var f = function(y=foo) {};",
SCOPE_NAMESPACE + "$jscomp.scope.f = function(y=goog.bar) {};");
}
/**
* Make sure we don't hit an IllegalStateException for this case.
*
* @see https://github.com/google/closure-compiler/issues/400
*/
@Test
public void testObjectLiteral() {
testScoped(
lines(
"var Foo = goog.Foo;",
"goog.x = {",
" /** @param {Foo} foo */",
" y: function(foo) { }",
"};"),
lines(
"goog.x = {",
" /** @param {goog.Foo} foo */",
" y: function(foo) {}",
"};"));
testScoped(
lines(
"var Foo = goog.Foo;",
"goog.x = {",
" y: /** @param {Foo} foo */ function(foo) {}",
"};"),
lines(
"goog.x = {",
" y: /** @param {goog.Foo} foo */ function(foo) {}",
"};"));
testScoped(
lines(
"var Foo = goog.Foo;",
"goog.x = {",
" y: /** @type {function(Foo)} */ (function(foo) {})",
"};"),
lines(
"goog.x = {",
" y: /** @type {function(goog.Foo)} */ (function(foo) {})",
"};"));
}
@Test
public void testObjectLiteralShorthand() {
testScoped(
"var bar = goog.bar; var Foo = {bar};",
SCOPE_NAMESPACE + "$jscomp.scope.Foo={bar: goog.bar};");
}
@Test
public void testObjectLiteralMethods() {
testScoped(
"var foo = goog.bar; var obj = {toString() {return foo}};",
SCOPE_NAMESPACE + "$jscomp.scope.obj = {toString() {return goog.bar}};");
}
@Test
public void testObjectLiteralComputedPropertyNames() {
testScoped(
"var foo = goog.bar; var obj = {[(() => foo)()]: baz};",
SCOPE_NAMESPACE + "$jscomp.scope.obj = {[(() => goog.bar)()]:baz};");
testScoped(
"var foo = goog.bar; var obj = {[x => x + foo]: baz};",
SCOPE_NAMESPACE + "$jscomp.scope.obj = {[x => x + goog.bar]:baz};");
}
@Test
public void testJsDocNotIgnored() {
enableTypeCheck();
String externs =
lines(
MINIMAL_EXTERNS,
"/** @const */ var ns = {};",
"/** @constructor */",
"ns.Foo;",
"",
"var goog;",
"/** @param {function()} fn */",
"goog.scope = function(fn) {}");
String js =
lines(
"goog.scope(function() {",
" var Foo = ns.Foo;",
" var x = {",
" /** @param {Foo} foo */ y: function(foo) {}",
" };",
" x.y('');",
"});");
test(
externs(externs),
srcs(js),
warning(TypeValidator.TYPE_MISMATCH_WARNING));
js =
lines(
"goog.scope(function() {",
" var Foo = ns.Foo;",
" var x = {",
" y: /** @param {Foo} foo */ function(foo) {}",
" };",
" x.y('');",
"});");
test(
externs(externs),
srcs(js),
warning(TypeValidator.TYPE_MISMATCH_WARNING));
}
@Test
public void testUsingObjectLiteralToEscapeScoping() {
// There are many ways to shoot yourself in the foot with goog.scope
// and make the compiler generate bad code. We generally don't care.
//
// We only try to protect against accidental mis-use, not deliberate
// mis-use.
test(
"var goog = {};" +
"goog.bar = {};" +
"goog.scope(function() {" +
" var bar = goog.bar;" +
" var baz = goog.bar.baz;" +
" goog.foo = function() {" +
" goog.bar = {baz: 3};" +
" return baz;" +
" };" +
"});",
"var goog = {};" +
"goog.bar = {};" +
"goog.foo = function(){" +
" goog.bar = {baz:3};" +
" return goog.bar.baz;" +
"};");
}
private void testTypes(String aliases, String code) {
testScopedNoChanges(aliases, code, VERIFY_TYPES);
}
@Test
public void testJsDocType() {
testTypes(
"var x = goog.Timer;",
lines(
"/** @type {goog.Timer} */ types.actual;",
"/** @type {goog.Timer} */ types.expected;"));
}
@Test
public void testJsDocParameter() {
testTypes(
"var x = goog.Timer;",
lines(
"/** @param {goog.Timer} a */ types.actual;",
"/** @param {goog.Timer} a */ types.expected;"));
}
@Test
public void testJsDocExtends() {
testTypes(
"var x = goog.Timer;",
lines(
"/** @extends {goog.Timer} */ types.actual;",
"/** @extends {goog.Timer} */ types.expected;"));
}
@Test
public void testJsDocImplements() {
testTypes(
"var x = goog.Timer;",
lines(
"/** @implements {goog.Timer} */ types.actual;",
"/** @implements {goog.Timer} */ types.expected;"));
}
@Test
public void testJsDocEnum() {
testTypes(
"var x = goog.Timer;",
lines(
"",
"/** @enum {goog.Timer} */ types.actual;",
"/** @enum {goog.Timer} */ types.expected;"));
}
@Test
public void testJsDocReturn() {
testTypes(
"var x = goog.Timer;",
lines(
"/** @return {goog.Timer} */ types.actual;",
"/** @return {goog.Timer} */ types.expected;"));
}
@Test
public void testJsDocThis() {
testTypes(
"var x = goog.Timer;",
lines(
"/** @this {goog.Timer} */ types.actual;",
"/** @this {goog.Timer} */ types.expected;"));
}
@Test
public void testJsDocThrows() {
testTypes(
"var x = goog.Timer;",
lines(
"/** @throws {goog.Timer} */ types.actual;",
"/** @throws {goog.Timer} */ types.expected;"));
}
@Test
public void testJsDocSubType() {
testTypes(
"var x = goog.Timer;",
lines(
"/** @type {goog.Timer.Enum} */ types.actual;",
"/** @type {goog.Timer.Enum} */ types.expected;"));
}
@Test
public void testJsDocTypedef() {
testTypes(
"var x = goog.Timer;",
lines(
"/** @typedef {goog.Timer} */ types.actual;",
"/** @typedef {goog.Timer} */ types.expected;"));
testScoped(
lines(
"/** @typedef {string} */ var s;",
"/** @type {s} */ var t;"),
lines(
SCOPE_NAMESPACE,
"/** @typedef {string} */ $jscomp.scope.s;",
"/** @type {$jscomp.scope.s} */ $jscomp.scope.t;"));
testScoped(
lines("/** @typedef {string} */ let s;", "/** @type {s} */ var t;"),
lines(
SCOPE_NAMESPACE,
"/** @typedef {string} */ $jscomp.scope.s;",
"/** @type {$jscomp.scope.s} */ $jscomp.scope.t;"));
}
@Test
public void testJsDocRecord() {
enableTypeCheck();
test(
lines(
"/** @const */ var ns = {};",
"goog.scope(function () {",
" var x = goog.Timer;",
" /** @type {{x: string}} */ ns.y = {'goog.Timer': 'x'};",
"});"),
lines(
"/** @const */ var ns = {};",
"/** @type {{x: string}} */ ns.y = {'goog.Timer': 'x'};"),
warning(TypeValidator.TYPE_MISMATCH_WARNING));
}
@Test
public void testArrayJsDoc() {
testTypes(
"var x = goog.Timer;",
lines(
"/** @type {Array.<goog.Timer>} */ types.actual;",
"/** @type {Array.<goog.Timer>} */ types.expected;"));
}
@Test
public void testObjectJsDoc() {
testTypes(
"var x = goog.Timer;",
lines(
"/** @type {{someKey: goog.Timer}} */ types.actual;",
"/** @type {{someKey: goog.Timer}} */ types.expected;"));
testTypes(
"var x = goog.Timer;",
lines(
"/** @type {{x: number}} */ types.actual;",
"/** @type {{x: number}} */ types.expected;"));
}
@Test
public void testObjectJsDoc2() {
testTypes(
"var x = goog$Timer;",
lines(
"/** @type {{someKey: goog$Timer}} */ types.actual;",
"/** @type {{someKey: goog$Timer}} */ types.expected;"));
}
@Test
public void testUnionJsDoc() {
testTypes(
"var x = goog.Timer;",
""
+ "/** @type {goog.Timer|Object} */ types.actual;"
+ "/** @type {goog.Timer|Object} */ types.expected;");
}
@Test
public void testFunctionJsDoc() {
testTypes(
"var x = goog.Timer;",
""
+ "/** @type {function(goog.Timer) : void} */ types.actual;"
+ "/** @type {function(goog.Timer) : void} */ types.expected;");
testTypes(
"var x = goog.Timer;",
""
+ "/** @type {function() : goog.Timer} */ types.actual;"
+ "/** @type {function() : goog.Timer} */ types.expected;");
}
@Test
public void testForwardJsDoc() {
testScoped(
lines(
"/**",
" * @constructor",
" */",
"foo.Foo = function() {};",
"/** @param {Foo.Bar} x */ foo.Foo.actual = function(x) {3};",
"var Foo = foo.Foo;",
"/** @constructor */ Foo.Bar = function() {};",
"/** @param {foo.Foo.Bar} x */ foo.Foo.expected = function(x) {};"),
lines(
"/**",
" * @constructor",
" */",
"foo.Foo = function() {};",
"/** @param {foo.Foo.Bar} x */ foo.Foo.actual = function(x) {3};",
"/** @constructor */ foo.Foo.Bar = function() {};",
"/** @param {foo.Foo.Bar} x */ foo.Foo.expected = function(x) {};"),
VERIFY_TYPES);
}
@Test
public void testTestTypes() {
try {
testTypes(
"var x = goog.Timer;",
""
+ "/** @type {function() : x} */ types.actual;"
+ "/** @type {function() : wrong.wrong} */ types.expected;");
throw new Error("Test types should fail here.");
} catch (AssertionError expected) {
}
}
@Test
public void testNullType() {
testTypes(
"var x = goog.Timer;",
"/** @param draggable */ types.actual;"
+ "/** @param draggable */ types.expected;");
}
@Test
public void testJSDocCopiedForFunctions() {
testScoped(
"/** @export */ function Foo() {}",
SCOPE_NAMESPACE + "/** @export */ $jscomp.scope.Foo =/** @export */ function() {}");
}
@Test
public void testJSDocCopiedForClasses() {
testScoped(
"/** @export */ class Foo {}",
SCOPE_NAMESPACE + "/** @export */ $jscomp.scope.Foo = /** @export */ class {}");
}
@Test
public void testIssue772() {
testTypes(
"var b = a.b;" +
"var c = b.c;",
"/** @param {a.b.c.MyType} x */ types.actual;" +
"/** @param {a.b.c.MyType} x */ types.expected;");
}
@Test
public void testInlineJsDoc() {
enableTypeCheck();
test(
srcs(lines(
"/** @const */ var ns = {};",
"/** @constructor */ ns.A = function() {};",
"goog.scope(function() {",
" /** @const */ var A = ns.A;",
" var /** ?A */ b = null;",
"});")),
expected(lines(
"/** @const */ var $jscomp = $jscomp || {};",
"/** @const */ $jscomp.scope = {};",
"/** @const */ var ns = {};",
"/** @constructor */ ns.A = function() {};",
"/** @type {?ns.A} */ $jscomp.scope.b = null;")),
VERIFY_TYPES);
}
@Test
public void testInlineReturn() {
enableTypeCheck();
test(
srcs(lines(
"/** @const */ var ns = {};",
"/** @constructor */ ns.A = function() {};",
"goog.scope(function() {",
" /** @const */ var A = ns.A;",
" function /** ?A */ b() { return null; }",
"});")),
expected(lines(
"/** @const */ var $jscomp = $jscomp || {};",
"/** @const */ $jscomp.scope = {};",
"/** @const */ var ns = {};",
"/** @constructor */ ns.A = function() {};",
// TODO(moz): See if we can avoid generating duplicate @return's
"/** @return {?ns.A} */ $jscomp.scope.b = ",
" /** @return {?ns.A} */ function() { return null; };")),
VERIFY_TYPES);
}
@Test
public void testInlineParam() {
enableTypeCheck();
test(
srcs(lines(
"/** @const */ var ns = {};",
"/** @constructor */ ns.A = function() {};",
"goog.scope(function() {",
" /** @const */ var A = ns.A;",
" function b(/** ?A */ bee) {}",
"});")),
expected(lines(
"/** @const */ var $jscomp = $jscomp || {};",
"/** @const */ $jscomp.scope = {};",
"/** @const */ var ns = {};",
"/** @constructor */ ns.A = function() {};",
"$jscomp.scope.b = function(/** ?ns.A */ bee) {};")),
VERIFY_TYPES);
}
// TODO(robbyw): What if it's recursive? var goog = goog.dom;
// FAILURE CASES
private void testScopedError(String code, DiagnosticType expectedError) {
testError("goog.scope(function() {" + code + "});", expectedError);
}
@Test
public void testScopedThis() {
testScopedError("this.y = 10;", ScopedAliases.GOOG_SCOPE_REFERENCES_THIS);
testScopedError("var x = this;", ScopedAliases.GOOG_SCOPE_REFERENCES_THIS);
testScopedError("fn(this);", ScopedAliases.GOOG_SCOPE_REFERENCES_THIS);
}
@Test
public void testAliasRedefinition() {
testScopedError("var x = goog.dom; x = goog.events;", ScopedAliases.GOOG_SCOPE_ALIAS_REDEFINED);
}
@Test
public void testAliasNonRedefinition() {
test("var y = {}; goog.scope(function() { goog.dom = y; });",
"var y = {}; goog.dom = y;");
}
@Test
public void testCtorAlias() {
test("var x = {y: {}};" +
"goog.scope(function() {" +
" var y = x.y;" +
" y.ClassA = function() { this.b = new ClassB(); };" +
" y.ClassB = function() {};" +
" var ClassB = y.ClassB;" +
"});",
"var x = {y: {}};" +
"x.y.ClassA = function() { this.b = new x.y.ClassB(); };" +
"x.y.ClassB = function() { };");
}
@Test
public void testAliasCycle() {
testError("var x = {y: {}};" +
"goog.scope(function() {" +
" var y = z.x;" +
" var z = y.x;" +
" y.ClassA = function() {};" +
" z.ClassB = function() {};" +
"});",
ScopedAliases.GOOG_SCOPE_ALIAS_CYCLE);
}
@Test
public void testScopedReturn() {
testScopedError("return;", ScopedAliases.GOOG_SCOPE_USES_RETURN);
testScopedError("var x = goog.dom; return;", ScopedAliases.GOOG_SCOPE_USES_RETURN);
}
@Test
public void testScopedThrow() {
testScopedError("throw 'error';", ScopedAliases.GOOG_SCOPE_USES_THROW);
}
@Test
public void testUsedImproperly() {
testError("var x = goog.scope(function() {});", ScopedAliases.GOOG_SCOPE_MUST_BE_ALONE);
testError("var f = function() { goog.scope(function() {}); }",
ScopedAliases.GOOG_SCOPE_MUST_BE_IN_GLOBAL_SCOPE);
}
@Test
public void testScopeCallInIf() {
test("if (true) { goog.scope(function() {});}", "if (true) {}");
test("if (true) { goog.scope(function() { var x = foo; });}", "if (true) { }");
test("if (true) { goog.scope(function() { var x = foo; console.log(x); });}",
"if (true) { console.log(foo); }");
}
@Test
public void testBadParameters() {
testError("goog.scope()", ScopedAliases.GOOG_SCOPE_HAS_BAD_PARAMETERS);
testError("goog.scope(10)", ScopedAliases.GOOG_SCOPE_HAS_BAD_PARAMETERS);
testError("goog.scope(function() {}, 10)", ScopedAliases.GOOG_SCOPE_HAS_BAD_PARAMETERS);
testError("goog.scope(function z() {})", ScopedAliases.GOOG_SCOPE_HAS_BAD_PARAMETERS);
testError("goog.scope(function(a, b, c) {})", ScopedAliases.GOOG_SCOPE_HAS_BAD_PARAMETERS);
}
@Test
public void testNonAliasLocal() {
testScopedError("for (var k in { a: 1, b: 2 }) {}", ScopedAliases.GOOG_SCOPE_NON_ALIAS_LOCAL);
testScopedError("for (var k of [1, 2, 3]) {}", ScopedAliases.GOOG_SCOPE_NON_ALIAS_LOCAL);
}
@Test
public void testInvalidVariableInScope() {
testScopedError("if (true) { function f() {}}", ScopedAliases.GOOG_SCOPE_INVALID_VARIABLE);
testScopedError("for (;;) { function f() {}}", ScopedAliases.GOOG_SCOPE_INVALID_VARIABLE);
}
@Test
public void testWithCatch1() {
testScoped(
"var x = foo(); try { } catch (e) {}",
SCOPE_NAMESPACE + "$jscomp.scope.x = foo(); try { } catch (e) {}");
}
@Test
public void testWithCatch2() {
testScoped(
"try { } catch (e) {var x = foo();}",
SCOPE_NAMESPACE + "try { } catch (e) {$jscomp.scope.x = foo();}");
}
@Test
public void testVariablesInCatchBlock() {
testScopedNoChanges("", "try {} catch (e) {}");
testScopedNoChanges("", "try {} catch (e) { let x = foo }");
testScopedNoChanges("", "try {} catch (e) { const x = foo }");
}
@Test
public void testLetConstInBlock() {
testScopedNoChanges("", "if (true) {let x = foo;}");
testScopedNoChanges("", "if (true) {const x = foo;}");
}
@Test
public void testHoistedAliases() {
testScoped("if (true) { var x = foo;}", "if (true) {}");
testScoped("if (true) { var x = foo; console.log(x); }",
"if (true) { console.log(foo); }");
}
@Test
public void testOkAliasLocal() {
testScoped("var x = 10;",
SCOPE_NAMESPACE + "$jscomp.scope.x = 10");
testScoped("var x = goog['dom'];",
SCOPE_NAMESPACE + "$jscomp.scope.x = goog['dom']");
testScoped("var x = 10, y = 9;",
SCOPE_NAMESPACE + "$jscomp.scope.x = 10; $jscomp.scope.y = 9;");
testScoped("var x = 10, y = 9; goog.getX = function () { return x + y; }",
SCOPE_NAMESPACE + "$jscomp.scope.x = 10; $jscomp.scope.y = 9;" +
"goog.getX = function () { " +
" return $jscomp.scope.x + $jscomp.scope.y; }");
}
@Test
public void testOkAliasLocal_letConst() {
testScoped(
"let x = 10;", SCOPE_NAMESPACE + "$jscomp.scope.x = 10");
testScoped(
"const x = 10;", SCOPE_NAMESPACE + "$jscomp.scope.x = 10");
}
@Test
public void testHoistedFunctionDeclaration() {
testScoped(" g(f); function f() {} ",
SCOPE_NAMESPACE +
" $jscomp.scope.f = function () {}; " +
"g($jscomp.scope.f); ");
}
@Test
public void testAliasReassign() {
testScopedError("var x = 3; x = 5;", ScopedAliases.GOOG_SCOPE_ALIAS_REDEFINED);
}
@Test
public void testMultipleLocals() {
test("goog.scope(function () { var x = 3; });" +
"goog.scope(function () { var x = 4; });",
SCOPE_NAMESPACE + "$jscomp.scope.x = 3; $jscomp.scope.x$jscomp$1 = 4");
}
@Test
public void testIssue1103a() {
test("goog.scope(function () {" +
" var a;" +
" foo.bar = function () { a = 1; };" +
"});",
SCOPE_NAMESPACE + "foo.bar = function () { $jscomp.scope.a = 1; }");
}
@Test
public void testIssue1103b() {
test("goog.scope(function () {" +
" var a = foo, b, c = 1;" +
"});",
SCOPE_NAMESPACE + "$jscomp.scope.c=1");
}
@Test
public void testIssue1103c() {
test("goog.scope(function () {" +
" /** @type {number} */ var a;" +
"});",
SCOPE_NAMESPACE + "/** @type {number} */ $jscomp.scope.a;");
}
@Test
public void testIssue1144() {
test("var ns = {};" +
"ns.sub = {};" +
"/** @constructor */ ns.sub.C = function () {};" +
"goog.scope(function () {" +
" var sub = ns.sub;" +
" /** @type {sub.C} */" +
" var x = null;" +
"});",
SCOPE_NAMESPACE +
"var ns = {};" +
"ns.sub = {};" +
"/** @constructor */ ns.sub.C = function () {};" +
"/** @type {ns.sub.C} */" +
"$jscomp.scope.x = null;");
}
// https://github.com/google/closure-compiler/issues/2211
@Test
public void testIssue2211() {
test(
lines(
"var ns = {};",
"var y = 1;",
"goog.scope(function () {",
" ns.fact = function y(n) {",
" return n == 1 ? 1 : n * y(n - 1);",
" };",
"});"),
lines(
"var ns = {};",
"var y = 1;",
"ns.fact = function y$jscomp$scopedAliases$0(n) {",
" return n == 1 ? 1 : n * y$jscomp$scopedAliases$0(n - 1);",
"};"));
}
// https://github.com/google/closure-compiler/issues/2211
@Test
public void testIssue2211b() {
test(
lines(
"var ns = {};",
"var y = 1;",
"goog.scope(function () {",
" function x(y) {}",
" ns.fact = function y(n) {",
" return n == 1 ? 1 : n * y(n - 1);",
" };",
"});"),
lines(
SCOPE_NAMESPACE,
"var ns = {};",
"var y = 1;",
"$jscomp.scope.x = function (y) {};",
"ns.fact = function y$jscomp$scopedAliases$0(n) {",
" return n == 1 ? 1 : n * y$jscomp$scopedAliases$0(n - 1);",
"};"));
}
// https://github.com/google/closure-compiler/issues/2211
@Test
public void testIssue2211c() {
testScoped(
lines(
"foo(() => {",
" const y = function y() {",
" use(y);",
" };",
"});"),
lines(
"foo(() => {",
" const y = function y$jscomp$scopedAliases$0() {",
" use(y$jscomp$scopedAliases$0);",
" };",
"});"));
}
@Test
public void testGoogModuleGet1() {
test(
lines(
"goog.provide('provided');",
"goog.provide('other.name.Foo');",
"goog.scope(function() {",
" var Foo = goog.module.get('other.name.Foo')",
" /** @type {!Foo} */",
" provided.f = new Foo;",
"});",
""),
lines(
"goog.provide('provided');",
"goog.provide('other.name.Foo');",
"/** @type {!other.name.Foo} */",
"provided.f = new (goog.module.get('other.name.Foo'));",
""));
}
@Test
public void testGoogModuleGet2() {
test(
lines(
"goog.provide('foo.baz');",
"goog.provide('other.thing');",
"",
"goog.scope(function() {",
"",
"const a = goog.module.get('other.thing');",
"const b = a.b;",
"foo.baz = b",
"",
"}); // goog.scope"),
lines(
"goog.provide('foo.baz');",
"goog.provide('other.thing');",
"foo.baz = goog.module.get('other.thing').b;",
""));
}
@Test
public void testGoogModuleGet3() {
test(
lines(
"goog.provide('foo.baz');",
"goog.provide('other.thing');",
"",
"goog.scope(function() {",
"",
"const a = goog.module.get('other.thing').b;",
"foo.baz = a",
"",
"}); // goog.scope"),
lines(
"goog.provide('foo.baz');",
"goog.provide('other.thing');",
"foo.baz = goog.module.get('other.thing').b;",
""));
}
@Test
public void testGoogModuleGet_ofModule() {
test(
srcs(
"goog.module('other.name.Foo');",
lines(
"goog.provide('provided');",
"goog.scope(function() {",
" var Foo = goog.module.get('other.name.Foo')",
" /** @type {!Foo} */",
" provided.f = new Foo;",
"});",
"")),
expected(
"goog.module('other.name.Foo');",
lines(
"goog.provide('provided');",
"/** @type {!other.name.Foo} */",
"provided.f = new (goog.module.get('other.name.Foo'));",
"")));
}
@Test
public void testGoogModuleGet_missing_noDeletion() {
invalidModuleGetHandling = InvalidModuleGetHandling.PRESERVE;
test(
lines(
"goog.provide('provided');",
"",
"goog.scope(function() {",
" var Foo = goog.module.get('missing.name.Foo');",
" /** @type {!Foo} */",
" provided.f = new Foo;",
"});",
""),
lines(
"goog.provide('provided');", //
"",
"/** @type {!missing.name.Foo} */",
"provided.f = new (goog.module.get('missing.name.Foo'));",
""));
}
@Test
public void testGoogModuleGet_missing() {
test(
lines(
"goog.provide('provided');",
"",
"goog.scope(function() {",
" var Foo = goog.module.get('missing.name.Foo')",
" /** @type {!Foo} */",
" provided.f = new Foo;",
"});",
""),
lines(
"goog.provide('provided');", //
"",
"/** @type {!Foo} */",
"provided.f = new Foo();",
""));
}
@Test
public void testGoogModuleGet_missingUsedInRhs() {
test(
lines(
"goog.provide('provided');",
"",
"goog.scope(function() {",
" var Foo = goog.module.get('missing.name.Foo')",
" provided.Bar = class extends Foo {};",
"});",
""),
lines(
"goog.provide('provided');", //
"",
"provided.Bar=class extends Foo {}",
""));
}
@Test
public void testGoogModuleGet_missingRecursive() {
test(
lines(
"goog.provide('provided');",
"",
"goog.scope(function() {",
" var Foo = goog.module.get('missing.name.Foo')",
" /** @type {!Foo.Bar} */",
" provided.f = new Foo.Bar;",
"});",
""),
lines(
"goog.provide('provided');",
"",
"/** @type {!Foo.Bar} */",
"provided.f = new Foo.Bar();",
""));
}
@Test
public void testGoogModuleGet_missingProperty() {
test(
lines(
"goog.provide('provided');",
"",
"goog.scope(function() {",
" var Bar = goog.module.get('missing.name.Foo').Bar",
" /** @type {!Bar} */",
" provided.f = new Bar;",
"});",
""),
lines(
"goog.provide('provided');", //
"",
"/** @type {!Bar} */",
"provided.f = new Bar;",
""));
}
@Test
public void testGoogModuleGet_aliasOfDeletedModuleGetAlias() {
test(
lines(
"goog.provide('provided');",
"",
"goog.scope(function() {",
" var Foo = goog.module.get('missing.name.Foo');",
" var Bar = Foo.Bar",
" /** @type {!Bar} */",
" provided.f = new Bar;",
"});",
""),
lines(
"goog.provide('provided');",
"",
"/** @type {!Foo.Bar} */",
"provided.f = new Foo.Bar();",
""));
}
@Test
public void testObjectPattern() {
testScopedNoChanges("", "{foo: ({bar}) => baz};");
}
@Test
public void testTypeCheck() {
enableTypeCheck();
test(
lines(
"goog.scope(function () {",
" /** @constructor */ function F() {}",
" /** @return {F} */ function createFoo() { return 1; }",
"});"),
lines(
SCOPE_NAMESPACE,
"/** @return {$jscomp.scope.F} */",
"$jscomp.scope.createFoo = /** @return {$jscomp.scope.F} */ function() { return 1; };",
"/** @constructor */ $jscomp.scope.F = /** @constructor */ function() { };"),
warning(TypeValidator.TYPE_MISMATCH_WARNING));
}
// Alias Recording Tests
// TODO(tylerg) : update these to EasyMock style tests once available
@Test
public void testNoGoogScope() {
String fullJsCode =
"var g = goog;\n g.dom.createElement(g.dom.TagName.DIV);";
TransformationHandlerSpy spy = new TransformationHandlerSpy();
transformationHandler = spy;
testSame(fullJsCode);
assertThat(spy.observedPositions).isEmpty();
}
@Test
public void testRecordOneAlias() {
String fullJsCode = GOOG_SCOPE_START_BLOCK
+ "var g = goog;\n g.dom.createElement(g.dom.TagName.DIV);\n"
+ GOOG_SCOPE_END_BLOCK;
String expectedJsCode = "goog.dom.createElement(goog.dom.TagName.DIV);\n";
TransformationHandlerSpy spy = new TransformationHandlerSpy();
transformationHandler = spy;
test(fullJsCode, expectedJsCode);
assertThat(spy.observedPositions).containsKey("testcode");
List<SourcePosition<AliasTransformation>> positions = spy.observedPositions.get("testcode");
assertThat(positions).hasSize(1);
verifyAliasTransformationPosition(1, 0, 2, 1, positions.get(0));
assertThat(spy.constructedAliases).hasSize(1);
AliasSpy aliasSpy = (AliasSpy) spy.constructedAliases.get(0);
assertThat(aliasSpy.observedDefinitions).containsEntry("g", "goog");
}
@Test
public void testRecordOneAlias2() {
String fullJsCode = GOOG_SCOPE_START_BLOCK
+ "var g$1 = goog;\n g$1.dom.createElement(g$1.dom.TagName.DIV);\n"
+ GOOG_SCOPE_END_BLOCK;
String expectedJsCode = "goog.dom.createElement(goog.dom.TagName.DIV);\n";
TransformationHandlerSpy spy = new TransformationHandlerSpy();
transformationHandler = spy;
test(fullJsCode, expectedJsCode);
assertThat(spy.observedPositions).containsKey("testcode");
List<SourcePosition<AliasTransformation>> positions = spy.observedPositions.get("testcode");
assertThat(positions).hasSize(1);
verifyAliasTransformationPosition(1, 0, 2, 1, positions.get(0));
assertThat(spy.constructedAliases).hasSize(1);
AliasSpy aliasSpy = (AliasSpy) spy.constructedAliases.get(0);
assertThat(aliasSpy.observedDefinitions).containsEntry("g$1", "goog");
}
@Test
public void testRecordMultipleAliases() {
String fullJsCode = GOOG_SCOPE_START_BLOCK
+ "var g = goog;\n var b= g.bar;\n var f = goog.something.foo;"
+ "g.dom.createElement(g.dom.TagName.DIV);\n b.foo();"
+ GOOG_SCOPE_END_BLOCK;
String expectedJsCode =
"goog.dom.createElement(goog.dom.TagName.DIV);\n goog.bar.foo();";
TransformationHandlerSpy spy = new TransformationHandlerSpy();
transformationHandler = spy;
test(fullJsCode, expectedJsCode);
assertThat(spy.observedPositions).containsKey("testcode");
List<SourcePosition<AliasTransformation>> positions = spy.observedPositions.get("testcode");
assertThat(positions).hasSize(1);
verifyAliasTransformationPosition(1, 0, 3, 1, positions.get(0));
assertThat(spy.constructedAliases).hasSize(1);
AliasSpy aliasSpy = (AliasSpy) spy.constructedAliases.get(0);
assertThat(aliasSpy.observedDefinitions).containsEntry("g", "goog");
assertThat(aliasSpy.observedDefinitions).containsEntry("b", "g.bar");
assertThat(aliasSpy.observedDefinitions).containsEntry("f", "goog.something.foo");
}
@Test
public void testRecordAliasFromMultipleGoogScope() {
String firstGoogScopeBlock = GOOG_SCOPE_START_BLOCK
+ "\n var g = goog;\n g.dom.createElement(g.dom.TagName.DIV);\n"
+ GOOG_SCOPE_END_BLOCK;
String fullJsCode = firstGoogScopeBlock + "\n\nvar l = abc.def;\n\n"
+ GOOG_SCOPE_START_BLOCK
+ "\n var z = namespace.Zoo;\n z.getAnimals(l);\n"
+ GOOG_SCOPE_END_BLOCK;
String expectedJsCode = "goog.dom.createElement(goog.dom.TagName.DIV);\n"
+ "\n\nvar l = abc.def;\n\n" + "\n namespace.Zoo.getAnimals(l);\n";
TransformationHandlerSpy spy = new TransformationHandlerSpy();
transformationHandler = spy;
test(fullJsCode, expectedJsCode);
assertThat(spy.observedPositions).containsKey("testcode");
List<SourcePosition<AliasTransformation>> positions = spy.observedPositions.get("testcode");
assertThat(positions).hasSize(2);
verifyAliasTransformationPosition(1, 0, 6, 0, positions.get(0));
verifyAliasTransformationPosition(8, 0, 11, 4, positions.get(1));
assertThat(spy.constructedAliases).hasSize(2);
AliasSpy aliasSpy = (AliasSpy) spy.constructedAliases.get(0);
assertThat(aliasSpy.observedDefinitions).containsEntry("g", "goog");
aliasSpy = (AliasSpy) spy.constructedAliases.get(1);
assertThat(aliasSpy.observedDefinitions).containsEntry("z", "namespace.Zoo");
}
private void verifyAliasTransformationPosition(int startLine, int startChar,
int endLine, int endChar, SourcePosition<AliasTransformation> pos) {
assertThat(pos.getStartLine()).isEqualTo(startLine);
assertThat(pos.getPositionOnStartLine()).isEqualTo(startChar);
assertWithMessage("Endline smaller than expected.").that(pos.getEndLine()).isAtLeast(endLine);
assertWithMessage("Endchar is smaller thatn expected.")
.that(pos.getPositionOnEndLine())
.isAtLeast(endChar);
}
@Override
protected ScopedAliases getProcessor(Compiler compiler) {
return ScopedAliases.builder(compiler)
.setAliasTransformationHandler(transformationHandler)
.setModuleMetadataMap(compiler.getModuleMetadataMap())
.setInvalidModuleGetHandling(invalidModuleGetHandling)
.build();
}
private static class TransformationHandlerSpy implements AliasTransformationHandler {
private final ListMultimap<String, SourcePosition<AliasTransformation>> observedPositions =
MultimapBuilder.hashKeys().arrayListValues().build();
public final List<AliasTransformation> constructedAliases =
new ArrayList<>();
@Override
public AliasTransformation logAliasTransformation(
String sourceFile, SourcePosition<AliasTransformation> position) {
observedPositions.put(sourceFile, position);
AliasTransformation spy = new AliasSpy();
constructedAliases.add(spy);
return spy;
}
}
private static class AliasSpy implements AliasTransformation {
public final Map<String, String> observedDefinitions = new HashMap<>();
@Override
public void addAlias(String alias, String definition) {
observedDefinitions.put(alias, definition);
}
}
private static class TypeVerifyingPass
implements CompilerPass, NodeTraversal.Callback {
private final Compiler compiler;
private List<Node> actualTypes = null;
public TypeVerifyingPass(Compiler compiler) {
this.compiler = compiler;
}
@Override
public void process(Node externs, Node root) {
NodeTraversal.traverse(compiler, root, this);
}
@Override
public boolean shouldTraverse(NodeTraversal nodeTraversal, Node n,
Node parent) {
return true;
}
@Override
public void visit(NodeTraversal t, Node n, Node parent) {
JSDocInfo info = n.getJSDocInfo();
if (info != null) {
Collection<Node> typeNodes = info.getTypeNodes();
if (!typeNodes.isEmpty()) {
if (actualTypes != null) {
List<Node> expectedTypes = new ArrayList<>();
expectedTypes.addAll(info.getTypeNodes());
assertWithMessage("Wrong number of JsDoc types")
.that(actualTypes.size())
.isEqualTo(expectedTypes.size());
for (int i = 0; i < expectedTypes.size(); i++) {
assertNode(actualTypes.get(i)).isEqualTo(expectedTypes.get(i));
}
} else {
actualTypes = new ArrayList<>();
actualTypes.addAll(info.getTypeNodes());
}
}
}
}
}
}
| |
package org.hisp.dhis.preheat;
/*
* Copyright (c) 2004-2018, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import com.google.common.collect.Lists;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.hisp.dhis.attribute.Attribute;
import org.hisp.dhis.attribute.AttributeService;
import org.hisp.dhis.category.CategoryDimension;
import org.hisp.dhis.common.AnalyticalObject;
import org.hisp.dhis.common.BaseAnalyticalObject;
import org.hisp.dhis.common.BaseIdentifiableObject;
import org.hisp.dhis.common.CodeGenerator;
import org.hisp.dhis.common.DataDimensionItem;
import org.hisp.dhis.common.EmbeddedObject;
import org.hisp.dhis.common.IdentifiableObject;
import org.hisp.dhis.common.IdentifiableObjectManager;
import org.hisp.dhis.commons.collection.CollectionUtils;
import org.hisp.dhis.commons.timer.SystemTimer;
import org.hisp.dhis.commons.timer.Timer;
import org.hisp.dhis.dataelement.DataElementOperand;
import org.hisp.dhis.dataset.DataSetElement;
import org.hisp.dhis.period.Period;
import org.hisp.dhis.period.PeriodService;
import org.hisp.dhis.period.PeriodStore;
import org.hisp.dhis.query.Query;
import org.hisp.dhis.query.QueryService;
import org.hisp.dhis.query.Restrictions;
import org.hisp.dhis.schema.MergeParams;
import org.hisp.dhis.schema.MergeService;
import org.hisp.dhis.schema.Property;
import org.hisp.dhis.schema.PropertyType;
import org.hisp.dhis.schema.Schema;
import org.hisp.dhis.schema.SchemaService;
import org.hisp.dhis.system.util.ReflectionUtils;
import org.hisp.dhis.trackedentity.TrackedEntityAttributeDimension;
import org.hisp.dhis.trackedentity.TrackedEntityDataElementDimension;
import org.hisp.dhis.trackedentity.TrackedEntityProgramIndicatorDimension;
import org.hisp.dhis.user.CurrentUserService;
import org.hisp.dhis.user.User;
import org.hisp.dhis.user.UserCredentials;
import org.hisp.dhis.user.UserGroup;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.annotation.Transactional;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
/**
* @author Morten Olav Hansen <mortenoh@gmail.com>
*/
@Transactional
public class DefaultPreheatService implements PreheatService
{
private static final Log log = LogFactory.getLog( DefaultPreheatService.class );
@Autowired
private SchemaService schemaService;
@Autowired
private QueryService queryService;
@Autowired
private IdentifiableObjectManager manager;
@Autowired
private CurrentUserService currentUserService;
@Autowired
private PeriodStore periodStore;
@Autowired
private PeriodService periodService;
@Autowired
private AttributeService attributeService;
@Autowired
private MergeService mergeService;
@Override
@SuppressWarnings( "unchecked" )
public Preheat preheat( PreheatParams params )
{
Timer timer = new SystemTimer().start();
Preheat preheat = new Preheat();
preheat.setUser( params.getUser() );
preheat.setDefaults( manager.getDefaults() );
if ( preheat.getUser() == null )
{
preheat.setUser( currentUserService.getCurrentUser() );
}
preheat.put( PreheatIdentifier.UID, preheat.getUser() );
preheat.put( PreheatIdentifier.CODE, preheat.getUser() );
for ( Class<? extends IdentifiableObject> klass : params.getObjects().keySet() )
{
params.getObjects().get( klass ).stream()
.filter( identifiableObject -> StringUtils.isEmpty( identifiableObject.getUid() ) )
.forEach( identifiableObject -> ((BaseIdentifiableObject) identifiableObject).setUid( CodeGenerator.generateUid() ) );
}
Map<Class<? extends IdentifiableObject>, List<IdentifiableObject>> uniqueCollectionMap = new HashMap<>();
Set<Class<? extends IdentifiableObject>> klasses = new HashSet<>( params.getObjects().keySet() );
if ( PreheatMode.ALL == params.getPreheatMode() )
{
if ( params.getClasses().isEmpty() )
{
schemaService.getMetadataSchemas().stream().filter( Schema::isIdentifiableObject )
.forEach( schema -> params.getClasses().add( (Class<? extends IdentifiableObject>) schema.getKlass() ) );
}
for ( Class<? extends IdentifiableObject> klass : params.getClasses() )
{
Query query = Query.from( schemaService.getDynamicSchema( klass ) );
query.setUser( preheat.getUser() );
List<? extends IdentifiableObject> objects = queryService.query( query );
if ( PreheatIdentifier.UID == params.getPreheatIdentifier() || PreheatIdentifier.AUTO == params.getPreheatIdentifier() )
{
preheat.put( PreheatIdentifier.UID, objects );
}
if ( PreheatIdentifier.CODE == params.getPreheatIdentifier() || PreheatIdentifier.AUTO == params.getPreheatIdentifier() )
{
preheat.put( PreheatIdentifier.CODE, objects );
}
if ( klasses.contains( klass ) && !objects.isEmpty() )
{
uniqueCollectionMap.put( klass, new ArrayList<>( objects ) );
}
}
}
else if ( PreheatMode.REFERENCE == params.getPreheatMode() )
{
Map<PreheatIdentifier, Map<Class<? extends IdentifiableObject>, Set<String>>> references = collectReferences( params.getObjects() );
Map<Class<? extends IdentifiableObject>, Set<String>> uidMap = references.get( PreheatIdentifier.UID );
Map<Class<? extends IdentifiableObject>, Set<String>> codeMap = references.get( PreheatIdentifier.CODE );
if ( uidMap != null && (PreheatIdentifier.UID == params.getPreheatIdentifier() || PreheatIdentifier.AUTO == params.getPreheatIdentifier()) )
{
for ( Class<? extends IdentifiableObject> klass : uidMap.keySet() )
{
List<List<String>> identifiers = Lists.partition( Lists.newArrayList( uidMap.get( klass ) ), 20000 );
if ( !identifiers.isEmpty() )
{
for ( List<String> ids : identifiers )
{
Query query = Query.from( schemaService.getDynamicSchema( klass ) );
query.setUser( preheat.getUser() );
query.add( Restrictions.in( "id", ids ) );
List<? extends IdentifiableObject> objects = queryService.query( query );
preheat.put( PreheatIdentifier.UID, objects );
}
}
}
}
if ( codeMap != null && (PreheatIdentifier.CODE == params.getPreheatIdentifier() || PreheatIdentifier.AUTO == params.getPreheatIdentifier()) )
{
for ( Class<? extends IdentifiableObject> klass : codeMap.keySet() )
{
List<List<String>> identifiers = Lists.partition( Lists.newArrayList( codeMap.get( klass ) ), 20000 );
if ( !identifiers.isEmpty() )
{
for ( List<String> ids : identifiers )
{
Query query = Query.from( schemaService.getDynamicSchema( klass ) );
query.setUser( preheat.getUser() );
query.add( Restrictions.in( "code", ids ) );
List<? extends IdentifiableObject> objects = queryService.query( query );
preheat.put( PreheatIdentifier.CODE, objects );
}
}
}
}
for ( Class<? extends IdentifiableObject> klass : klasses )
{
Query query = Query.from( schemaService.getDynamicSchema( klass ) );
query.setUser( preheat.getUser() );
List<? extends IdentifiableObject> objects = queryService.query( query );
if ( !objects.isEmpty() )
{
uniqueCollectionMap.put( klass, new ArrayList<>( objects ) );
}
}
}
if ( uniqueCollectionMap.containsKey( User.class ) )
{
List<IdentifiableObject> userCredentials = new ArrayList<>();
for ( IdentifiableObject identifiableObject : uniqueCollectionMap.get( User.class ) )
{
User user = (User) identifiableObject;
if ( user.getUserCredentials() != null )
{
userCredentials.add( user.getUserCredentials() );
}
}
uniqueCollectionMap.put( UserCredentials.class, userCredentials );
}
preheat.setUniquenessMap( collectUniqueness( uniqueCollectionMap ) );
// add preheat placeholders for objects that will be created and set mandatory/unique attributes
for ( Class<? extends IdentifiableObject> klass : params.getObjects().keySet() )
{
List<IdentifiableObject> objects = params.getObjects().get( klass );
preheat.put( params.getPreheatIdentifier(), objects );
}
handleAttributes( params.getObjects(), preheat );
handleSecurity( params.getObjects(), params.getPreheatIdentifier(), preheat );
periodStore.getAll().forEach( period -> preheat.getPeriodMap().put( period.getName(), period ) );
periodStore.getAllPeriodTypes().forEach( periodType -> preheat.getPeriodTypeMap().put( periodType.getName(), periodType ) );
log.info( "(" + preheat.getUsername() + ") Import:Preheat[" + params.getPreheatMode() + "] took " + timer.toString() );
return preheat;
}
private void handleSecurity( Map<Class<? extends IdentifiableObject>, List<IdentifiableObject>> objects, PreheatIdentifier identifier, Preheat preheat )
{
objects.forEach( ( klass, list ) -> list.forEach( object ->
{
object.getUserAccesses().forEach( ua ->
{
User user = null;
if ( ua.getUser() != null )
{
if ( PreheatIdentifier.UID == identifier )
{
user = preheat.get( identifier, User.class, ua.getUser().getUid() );
}
else if ( PreheatIdentifier.CODE == identifier )
{
user = preheat.get( identifier, User.class, ua.getUser().getCode() );
}
}
else
{
user = preheat.get( PreheatIdentifier.UID, User.class, ua.getUserUid() );
}
if ( user != null )
{
ua.setUser( user );
}
} );
object.getUserGroupAccesses().forEach( uga ->
{
UserGroup userGroup = null;
if ( uga.getUserGroup() != null )
{
if ( PreheatIdentifier.UID == identifier )
{
userGroup = preheat.get( identifier, UserGroup.class, uga.getUserGroup().getUid() );
}
else if ( PreheatIdentifier.CODE == identifier )
{
userGroup = preheat.get( identifier, UserGroup.class, uga.getUserGroup().getCode() );
}
}
else
{
userGroup = preheat.get( PreheatIdentifier.UID, UserGroup.class, uga.getUserGroupUid() );
}
if ( userGroup != null )
{
uga.setUserGroup( userGroup );
}
} );
} ) );
}
private void handleAttributes( Map<Class<? extends IdentifiableObject>, List<IdentifiableObject>> objects, Preheat preheat )
{
for ( Class<? extends IdentifiableObject> klass : objects.keySet() )
{
List<Attribute> mandatoryAttributes = attributeService.getMandatoryAttributes( klass );
if ( !mandatoryAttributes.isEmpty() )
{
preheat.getMandatoryAttributes().put( klass, new HashSet<>() );
}
mandatoryAttributes.forEach( attribute -> preheat.getMandatoryAttributes().get( klass ).add( attribute.getUid() ) );
List<Attribute> uniqueAttributes = attributeService.getUniqueAttributes( klass );
if ( !uniqueAttributes.isEmpty() )
{
preheat.getUniqueAttributes().put( klass, new HashSet<>() );
}
uniqueAttributes.forEach( attribute -> preheat.getUniqueAttributes().get( klass ).add( attribute.getUid() ) );
List<? extends IdentifiableObject> uniqueAttributeValues = manager.getAllByAttributes( klass, uniqueAttributes );
handleUniqueAttributeValues( klass, uniqueAttributeValues, preheat );
}
if ( objects.containsKey( Attribute.class ) )
{
List<IdentifiableObject> attributes = objects.get( Attribute.class );
for ( IdentifiableObject identifiableObject : attributes )
{
Attribute attribute = (Attribute) identifiableObject;
if ( attribute.isMandatory() )
{
attribute.getSupportedClasses().forEach( klass ->
{
if ( !preheat.getMandatoryAttributes().containsKey( klass ) ) preheat.getMandatoryAttributes().put( klass, new HashSet<>() );
preheat.getMandatoryAttributes().get( klass ).add( attribute.getUid() );
} );
}
if ( attribute.isUnique() )
{
attribute.getSupportedClasses().forEach( klass ->
{
if ( !preheat.getUniqueAttributes().containsKey( klass ) ) preheat.getUniqueAttributes().put( klass, new HashSet<>() );
preheat.getUniqueAttributes().get( klass ).add( attribute.getUid() );
} );
}
}
}
}
private void handleUniqueAttributeValues( Class<? extends IdentifiableObject> klass, List<? extends IdentifiableObject> objects, Preheat preheat )
{
if ( objects.isEmpty() )
{
return;
}
preheat.getUniqueAttributeValues().put( klass, new HashMap<>() );
objects.forEach( object ->
{
object.getAttributeValues().forEach( attributeValue ->
{
Set<String> uids = preheat.getUniqueAttributes().get( klass );
if ( uids != null && uids.contains( attributeValue.getAttribute().getUid() ) )
{
if ( !preheat.getUniqueAttributeValues().get( klass ).containsKey( attributeValue.getAttribute().getUid() ) )
{
preheat.getUniqueAttributeValues().get( klass ).put( attributeValue.getAttribute().getUid(), new HashMap<>() );
}
preheat.getUniqueAttributeValues().get( klass ).get( attributeValue.getAttribute().getUid() )
.put( attributeValue.getValue(), object.getUid() );
}
} );
} );
}
@Override
public void validate( PreheatParams params ) throws PreheatException
{
if ( PreheatMode.ALL == params.getPreheatMode() || PreheatMode.NONE == params.getPreheatMode() )
{
// nothing to validate for now, if classes is empty it will get all metadata classes
}
else if ( PreheatMode.REFERENCE == params.getPreheatMode() )
{
if ( params.getObjects().isEmpty() )
{
throw new PreheatException( "PreheatMode.REFERENCE, but no objects were provided." );
}
}
else
{
throw new PreheatException( "Invalid preheat mode." );
}
}
@Override
@SuppressWarnings( "unchecked" )
public Map<PreheatIdentifier, Map<Class<? extends IdentifiableObject>, Set<String>>> collectReferences( Object object )
{
if ( object == null )
{
return new HashMap<>();
}
if ( Collection.class.isInstance( object ) )
{
return collectReferences( (Collection<?>) object );
}
else if ( Map.class.isInstance( object ) )
{
return collectReferences( (Map<Class<?>, List<?>>) object );
}
Map<Class<?>, List<?>> map = new HashMap<>();
map.put( object.getClass(), Lists.newArrayList( object ) );
return collectReferences( map );
}
private Map<PreheatIdentifier, Map<Class<? extends IdentifiableObject>, Set<String>>> collectReferences( Collection<?> objects )
{
if ( objects == null || objects.isEmpty() )
{
return new HashMap<>();
}
Map<Class<?>, List<?>> map = new HashMap<>();
map.put( objects.iterator().next().getClass(), Lists.newArrayList( objects ) );
return collectReferences( map );
}
@SuppressWarnings( "unchecked" )
private Map<PreheatIdentifier, Map<Class<? extends IdentifiableObject>, Set<String>>> collectReferences( Map<Class<?>, List<?>> objects )
{
Map<PreheatIdentifier, Map<Class<? extends IdentifiableObject>, Set<String>>> map = new HashMap<>();
map.put( PreheatIdentifier.UID, new HashMap<>() );
map.put( PreheatIdentifier.CODE, new HashMap<>() );
Map<Class<? extends IdentifiableObject>, Set<String>> uidMap = map.get( PreheatIdentifier.UID );
Map<Class<? extends IdentifiableObject>, Set<String>> codeMap = map.get( PreheatIdentifier.CODE );
if ( objects.isEmpty() )
{
return map;
}
Map<Class<?>, List<?>> targets = new HashMap<>();
targets.putAll( objects ); // clone objects list, we don't want to modify it
collectScanTargets( targets );
for ( Class<?> klass : targets.keySet() )
{
Schema schema = schemaService.getDynamicSchema( klass );
List<Property> referenceProperties = schema.getProperties().stream()
.filter( p -> p.isPersisted() && p.isOwner() && (PropertyType.REFERENCE == p.getPropertyType() || PropertyType.REFERENCE == p.getItemPropertyType()) )
.collect( Collectors.toList() );
for ( Object object : targets.get( klass ) )
{
if ( schema.isIdentifiableObject() )
{
IdentifiableObject identifiableObject = (IdentifiableObject) object;
identifiableObject.getAttributeValues().forEach( av -> addIdentifiers( map, av.getAttribute() ) );
identifiableObject.getUserGroupAccesses().forEach( uga -> addIdentifiers( map, uga.getUserGroup() ) );
identifiableObject.getUserAccesses().forEach( ua -> addIdentifiers( map, ua.getUser() ) );
addIdentifiers( map, identifiableObject );
}
referenceProperties.forEach( p ->
{
if ( !p.isCollection() )
{
Class<? extends IdentifiableObject> itemKlass = (Class<? extends IdentifiableObject>) p.getKlass();
if ( !uidMap.containsKey( itemKlass ) ) uidMap.put( itemKlass, new HashSet<>() );
if ( !codeMap.containsKey( itemKlass ) ) codeMap.put( itemKlass, new HashSet<>() );
Object reference = ReflectionUtils.invokeMethod( object, p.getGetterMethod() );
if ( reference != null )
{
IdentifiableObject identifiableObject = (IdentifiableObject) reference;
addIdentifiers( map, identifiableObject );
}
}
else
{
Collection<IdentifiableObject> reference = ReflectionUtils.invokeMethod( object, p.getGetterMethod() );
reference.forEach( identifiableObject -> addIdentifiers( map, identifiableObject ) );
if ( DataElementOperand.class.isAssignableFrom( p.getItemKlass() ) )
{
CollectionUtils.nullSafeForEach( reference, identifiableObject ->
{
DataElementOperand dataElementOperand = (DataElementOperand) identifiableObject;
addIdentifiers( map, dataElementOperand.getDataElement() );
addIdentifiers( map, dataElementOperand.getCategoryOptionCombo() );
} );
}
}
} );
if ( AnalyticalObject.class.isInstance( object ) )
{
BaseAnalyticalObject analyticalObject = (BaseAnalyticalObject) object;
List<DataDimensionItem> dataDimensionItems = analyticalObject.getDataDimensionItems();
List<CategoryDimension> categoryDimensions = analyticalObject.getCategoryDimensions();
List<TrackedEntityDataElementDimension> trackedEntityDataElementDimensions = analyticalObject.getDataElementDimensions();
List<TrackedEntityAttributeDimension> attributeDimensions = analyticalObject.getAttributeDimensions();
List<TrackedEntityProgramIndicatorDimension> programIndicatorDimensions = analyticalObject.getProgramIndicatorDimensions();
CollectionUtils.nullSafeForEach( dataDimensionItems, dataDimensionItem ->
{
addIdentifiers( map, dataDimensionItem.getDimensionalItemObject() );
if ( dataDimensionItem.getDataElementOperand() != null )
{
addIdentifiers( map, dataDimensionItem.getDataElementOperand().getDataElement() );
addIdentifiers( map, dataDimensionItem.getDataElementOperand().getCategoryOptionCombo() );
}
if ( dataDimensionItem.getReportingRate() != null )
{
addIdentifiers( map, dataDimensionItem.getReportingRate().getDataSet() );
}
if ( dataDimensionItem.getProgramDataElement() != null )
{
addIdentifiers( map, dataDimensionItem.getProgramDataElement().getDataElement() );
addIdentifiers( map, dataDimensionItem.getProgramDataElement().getProgram() );
}
if ( dataDimensionItem.getProgramAttribute() != null )
{
addIdentifiers( map, dataDimensionItem.getProgramAttribute().getAttribute() );
addIdentifiers( map, dataDimensionItem.getProgramAttribute().getProgram() );
}
} );
CollectionUtils.nullSafeForEach( categoryDimensions, categoryDimension ->
{
addIdentifiers( map, categoryDimension.getDimension() );
categoryDimension.getItems().forEach( item -> addIdentifiers( map, item ) );
} );
CollectionUtils.nullSafeForEach( trackedEntityDataElementDimensions, trackedEntityDataElementDimension ->
{
addIdentifiers( map, trackedEntityDataElementDimension.getDataElement() );
addIdentifiers( map, trackedEntityDataElementDimension.getLegendSet() );
} );
CollectionUtils.nullSafeForEach( attributeDimensions, trackedEntityAttributeDimension ->
{
addIdentifiers( map, trackedEntityAttributeDimension.getAttribute() );
addIdentifiers( map, trackedEntityAttributeDimension.getLegendSet() );
} );
CollectionUtils.nullSafeForEach( programIndicatorDimensions, programIndicatorDimension ->
{
addIdentifiers( map, programIndicatorDimension.getProgramIndicator() );
addIdentifiers( map, programIndicatorDimension.getLegendSet() );
} );
}
}
}
cleanEmptyEntries( uidMap );
cleanEmptyEntries( codeMap );
return map;
}
@Override
@SuppressWarnings( "unchecked" )
public Map<Class<?>, Map<String, Map<String, Object>>> collectObjectReferences( Object object )
{
if ( object == null )
{
return new HashMap<>();
}
if ( Collection.class.isInstance( object ) )
{
return collectObjectReferences( (Collection<?>) object );
}
else if ( Map.class.isInstance( object ) )
{
return collectObjectReferences( (Map<Class<?>, List<?>>) object );
}
Map<Class<?>, List<?>> map = new HashMap<>();
map.put( object.getClass(), Lists.newArrayList( object ) );
return collectObjectReferences( map );
}
private Map<Class<?>, Map<String, Map<String, Object>>> collectObjectReferences( Collection<?> objects )
{
if ( objects == null || objects.isEmpty() )
{
return new HashMap<>();
}
Map<Class<?>, List<?>> map = new HashMap<>();
map.put( objects.iterator().next().getClass(), Lists.newArrayList( objects ) );
return collectObjectReferences( map );
}
@SuppressWarnings( "unchecked" )
private Map<Class<?>, Map<String, Map<String, Object>>> collectObjectReferences( Map<Class<?>, List<?>> objects )
{
Map<Class<?>, Map<String, Map<String, Object>>> map = new HashMap<>();
if ( objects.isEmpty() )
{
return map;
}
Map<Class<?>, List<?>> targets = new HashMap<>();
targets.putAll( objects ); // clone objects list, we don't want to modify it
collectScanTargets( targets );
for ( Class<?> objectClass : targets.keySet() )
{
Schema schema = schemaService.getDynamicSchema( objectClass );
if ( !schema.isIdentifiableObject() )
{
continue;
}
List<Property> properties = schema.getProperties().stream()
.filter( p -> p.isPersisted() && p.isOwner() && (PropertyType.REFERENCE == p.getPropertyType() || PropertyType.REFERENCE == p.getItemPropertyType()) )
.collect( Collectors.toList() );
List<IdentifiableObject> identifiableObjects = (List<IdentifiableObject>) targets.get( objectClass );
Map<String, Map<String, Object>> refMap = new HashMap<>();
map.put( objectClass, refMap );
for ( IdentifiableObject object : identifiableObjects )
{
refMap.put( object.getUid(), new HashMap<>() );
properties.forEach( p ->
{
if ( !p.isCollection() )
{
IdentifiableObject reference = ReflectionUtils.invokeMethod( object, p.getGetterMethod() );
if ( reference != null )
{
try
{
IdentifiableObject identifiableObject = (IdentifiableObject) p.getKlass().newInstance();
mergeService.merge( new MergeParams<>( reference, identifiableObject ) );
refMap.get( object.getUid() ).put( p.getName(), identifiableObject );
}
catch ( InstantiationException | IllegalAccessException ignored )
{
}
}
}
else
{
Collection<IdentifiableObject> refObjects = ReflectionUtils.newCollectionInstance( p.getKlass() );
Collection<IdentifiableObject> references = ReflectionUtils.invokeMethod( object, p.getGetterMethod() );
if ( references != null )
{
for ( IdentifiableObject reference : references )
{
try
{
IdentifiableObject identifiableObject = (IdentifiableObject) p.getItemKlass().newInstance();
mergeService.merge( new MergeParams<>( reference, identifiableObject ) );
refObjects.add( identifiableObject );
}
catch ( InstantiationException | IllegalAccessException ignored )
{
}
}
}
refMap.get( object.getUid() ).put( p.getCollectionName(), refObjects );
}
} );
}
}
return map;
}
@SuppressWarnings( "unchecked" )
private void collectScanTargets( Map<Class<?>, List<?>> targets )
{
if ( targets.containsKey( User.class ) )
{
List<User> users = (List<User>) targets.get( User.class );
List<UserCredentials> userCredentials = new ArrayList<>();
for ( User user : users )
{
if ( user.getUserCredentials() != null )
{
userCredentials.add( user.getUserCredentials() );
}
}
targets.put( UserCredentials.class, userCredentials );
}
for ( Map.Entry<Class<?>, List<?>> entry : new HashMap<>( targets ).entrySet() )
{
Class<?> klass = entry.getKey();
List<?> objects = entry.getValue();
Schema schema = schemaService.getDynamicSchema( klass );
Map<String, Property> properties = schema.getEmbeddedObjectProperties();
if ( properties.isEmpty() )
{
return;
}
for ( Property property : properties.values() )
{
if ( property.isCollection() )
{
List<Object> list = new ArrayList<>();
if ( targets.containsKey( property.getItemKlass() ) )
{
list.addAll( targets.get( property.getItemKlass() ) );
}
objects.forEach( o -> list.addAll( ReflectionUtils.invokeMethod( o, property.getGetterMethod() ) ) );
targets.put( property.getItemKlass(), list );
}
else
{
List<Object> list = new ArrayList<>();
if ( targets.containsKey( property.getKlass() ) )
{
list.addAll( targets.get( property.getKlass() ) );
}
objects.forEach( o -> list.add( ReflectionUtils.invokeMethod( o, property.getGetterMethod() ) ) );
targets.put( property.getKlass(), list );
}
}
}
}
@Override
public Map<Class<? extends IdentifiableObject>, Map<String, Map<Object, String>>> collectUniqueness( Map<Class<? extends IdentifiableObject>, List<IdentifiableObject>> objects )
{
Map<Class<? extends IdentifiableObject>, Map<String, Map<Object, String>>> uniqueMap = new HashMap<>();
if ( objects.isEmpty() )
{
return uniqueMap;
}
for ( Class<? extends IdentifiableObject> objectClass : objects.keySet() )
{
Schema schema = schemaService.getDynamicSchema( objectClass );
List<IdentifiableObject> identifiableObjects = objects.get( objectClass );
uniqueMap.put( objectClass, handleUniqueProperties( schema, identifiableObjects ) );
}
return uniqueMap;
}
@Override
public void connectReferences( Object object, Preheat preheat, PreheatIdentifier identifier )
{
if ( object == null )
{
return;
}
Schema schema = schemaService.getDynamicSchema( object.getClass() );
List<Property> properties = schema.getProperties().stream()
.filter( p -> p.isPersisted() && p.isOwner() && (PropertyType.REFERENCE == p.getPropertyType() || PropertyType.REFERENCE == p.getItemPropertyType()) )
.collect( Collectors.toList() );
for ( Property property : properties )
{
if ( skipConnect( property.getKlass() ) || skipConnect( property.getItemKlass() ) )
{
continue;
}
if ( !property.isCollection() )
{
IdentifiableObject refObject = ReflectionUtils.invokeMethod( object, property.getGetterMethod() );
IdentifiableObject ref = getPersistedObject( preheat, identifier, refObject );
ref = connectDefaults( preheat, property, object, refObject, ref );
if ( ref != null && ref.getId() == 0 )
{
ReflectionUtils.invokeMethod( object, property.getSetterMethod(), (Object) null );
}
else
{
ReflectionUtils.invokeMethod( object, property.getSetterMethod(), ref );
}
}
else
{
Collection<IdentifiableObject> objects = ReflectionUtils.newCollectionInstance( property.getKlass() );
Collection<IdentifiableObject> refObjects = ReflectionUtils.invokeMethod( object, property.getGetterMethod() );
for ( IdentifiableObject refObject : refObjects )
{
IdentifiableObject ref = getPersistedObject( preheat, identifier, refObject );
if ( ref != null && ref.getId() != 0 ) objects.add( ref );
}
ReflectionUtils.invokeMethod( object, property.getSetterMethod(), objects );
}
}
}
@Override
public void refresh( IdentifiableObject object )
{
PreheatParams preheatParams = new PreheatParams();
preheatParams.setUser( currentUserService.getCurrentUser() );
preheatParams.addObject( object );
Preheat preheat = preheat( preheatParams );
connectReferences( object, preheat, PreheatIdentifier.UID );
}
//-----------------------------------------------------------------------------------
// Utility Methods
//-----------------------------------------------------------------------------------
private IdentifiableObject connectDefaults( Preheat preheat, Property property, Object object,
IdentifiableObject refObject, IdentifiableObject ref )
{
Map<Class<? extends IdentifiableObject>, IdentifiableObject> defaults = preheat.getDefaults();
if ( refObject == null && DataSetElement.class.isInstance( object ) )
{
return null;
}
IdentifiableObject defaultObject = defaults.get( property.getKlass() );
if ( Preheat.isDefaultClass( property.getKlass() ) )
{
if ( refObject == null || ( refObject.getUid() != null && refObject.getUid().equals( defaultObject.getUid() ) ) )
{
ref = defaultObject;
}
}
return ref;
}
private void cleanEmptyEntries( Map<Class<? extends IdentifiableObject>, Set<String>> map )
{
Set<Class<? extends IdentifiableObject>> classes = new HashSet<>( map.keySet() );
classes.stream().filter( klass -> map.get( klass ).isEmpty() ).forEach( map::remove );
}
@SuppressWarnings( "unchecked" )
private void addIdentifiers( Map<PreheatIdentifier, Map<Class<? extends IdentifiableObject>, Set<String>>> map, IdentifiableObject identifiableObject )
{
if ( identifiableObject == null ) return;
Map<Class<? extends IdentifiableObject>, Set<String>> uidMap = map.get( PreheatIdentifier.UID );
Map<Class<? extends IdentifiableObject>, Set<String>> codeMap = map.get( PreheatIdentifier.CODE );
Class<? extends IdentifiableObject> klass = (Class<? extends IdentifiableObject>) ReflectionUtils.getRealClass( identifiableObject.getClass() );
if ( !uidMap.containsKey( klass ) ) uidMap.put( klass, new HashSet<>() );
if ( !codeMap.containsKey( klass ) ) codeMap.put( klass, new HashSet<>() );
if ( !StringUtils.isEmpty( identifiableObject.getUid() ) ) uidMap.get( klass ).add( identifiableObject.getUid() );
if ( !StringUtils.isEmpty( identifiableObject.getCode() ) ) codeMap.get( klass ).add( identifiableObject.getCode() );
}
private Map<String, Map<Object, String>> handleUniqueProperties( Schema schema, List<IdentifiableObject> objects )
{
List<Property> uniqueProperties = schema.getProperties().stream()
.filter( p -> p.isPersisted() && p.isOwner() && p.isUnique() && p.isSimple() )
.collect( Collectors.toList() );
Map<String, Map<Object, String>> map = new HashMap<>();
for ( IdentifiableObject object : objects )
{
uniqueProperties.forEach( property ->
{
if ( !map.containsKey( property.getName() ) ) map.put( property.getName(), new HashMap<>() );
Object value = ReflectionUtils.invokeMethod( object, property.getGetterMethod() );
if ( value != null ) map.get( property.getName() ).put( value, object.getUid() );
} );
}
return map;
}
private IdentifiableObject getPersistedObject( Preheat preheat, PreheatIdentifier identifier, IdentifiableObject ref )
{
if ( Period.class.isInstance( ref ) )
{
IdentifiableObject period = preheat.getPeriodMap().get( ref.getName() );
if ( period == null )
{
period = periodService.reloadIsoPeriod( ref.getName() );
}
if ( period != null )
{
preheat.getPeriodMap().put( period.getName(), (Period) period );
}
return period;
}
return preheat.get( identifier, ref );
}
private boolean skipConnect( Class<?> klass )
{
return klass != null && (UserCredentials.class.isAssignableFrom( klass ) || EmbeddedObject.class.isAssignableFrom( klass ));
}
}
| |
package org.tolweb.hivemind;
import java.awt.Dimension;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashSet;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.hivemind.util.PropertyUtils;
import org.apache.log4j.Logger;
import org.apache.tapestry.IMarkupWriter;
import org.apache.tapestry.IPage;
import org.apache.tapestry.IRender;
import org.apache.tapestry.IRequestCycle;
import org.apache.tapestry.engine.ExternalServiceParameter;
import org.apache.tapestry.engine.IEngineService;
import org.apache.tapestry.request.IUploadFile;
import org.jdom.Document;
import org.jdom.Element;
import org.tolweb.dao.ImageDAO;
import org.tolweb.dao.NodeDAO;
import org.tolweb.dao.PageDAO;
import org.tolweb.hibernate.MappedNode;
import org.tolweb.hibernate.Movie;
import org.tolweb.hibernate.Sound;
import org.tolweb.hibernate.Student;
import org.tolweb.misc.ImageUtils;
import org.tolweb.misc.UsePermissionHelper;
import org.tolweb.tapestry.EditImageData;
import org.tolweb.tapestry.EditTitleIllustrations;
import org.tolweb.tapestry.IImageCallback;
import org.tolweb.treegrow.main.Contributor;
import org.tolweb.treegrow.main.ImageVersion;
import org.tolweb.treegrow.main.NodeImage;
import org.tolweb.treegrow.main.StringUtils;
import org.tolweb.treegrow.main.XMLConstants;
import org.tolweb.treegrowserver.ServerXMLReader;
public class ImageHelperImpl extends AppStateManagerAware implements ImageHelper {
private ImageDAO imageDAO;
private PageDAO workingPageDAO;
private PageDAO publicPageDAO;
private NodeDAO workingNodeDAO;
private ImageUtils imageUtils;
private UsePermissionHelper usePermissionHelper;
private ServerXMLReader serverXMLReader;
private IImageCallback editCallback;
private IImageCallback deleteCallback;
private IImageCallback copyDataCallback;
private IEngineService externalService;
private static Logger logger;
static {
logger = Logger.getLogger(ImageHelperImpl.class);
}
public IImageCallback getEditCallback() {
if (editCallback == null) {
editCallback = new IImageCallback() {
public void actOnImage(NodeImage img, IRequestCycle cycle) {
String editPageName;
editPageName = EditImageData.getEditPageNameForMedia(img);
EditImageData editPage = (EditImageData) cycle.getPage(editPageName);
editPage.setImage(img);
img.setCheckedOut(true);
img.setOnlineCheckedOut(true);
img.setCheckedOutContributor(getContributor());
img.setCheckoutDate(new Date());
getImageDAO().saveImage(img);
cycle.activate(editPage);
}
};
}
return editCallback;
}
public IImageCallback getDeleteCallback(final Long editedObjectId, final String searchPageName) {
if (deleteCallback == null) {
deleteCallback = new IImageCallback() {
public void actOnImage(NodeImage img, IRequestCycle cycle) {
if (logger.isDebugEnabled()) {
Contributor contr = getContributor();
logger.debug("Contributor : " + contr.getNameOrInstitution() + " just deleted image: " +
img.getLocation() + " with id: " + img.getId());
}
deleteImage(img);
IPage page = cycle.getPage(searchPageName);
try {
PropertyUtils.write(page, "editedObjectId", editedObjectId);
} catch (Exception e) {
e.printStackTrace();
}
cycle.activate(page);
}
};
}
return deleteCallback;
}
public IImageCallback getCopyDataCallback() {
if (copyDataCallback == null) {
copyDataCallback = new IImageCallback() {
public void actOnImage(NodeImage img, IRequestCycle cycle) {
EditImageData editPage = (EditImageData) cycle.getPage("EditImageData");
editPage.getImage().setValues(img, false, false);
cycle.activate(editPage);
}
};
}
return copyDataCallback;
}
@SuppressWarnings("unchecked")
public IImageCallback getTillusCallback(final Long editedObjectId, final String pageName) {
IImageCallback addTillusCallback = new IImageCallback() {
public void actOnImage(NodeImage img, IRequestCycle cycle) {
EditTitleIllustrations editPage = (EditTitleIllustrations) cycle.getPage(pageName);
editPage.setEditedObjectId(editedObjectId);
ImageDAO dao = getImageDAO();
List versions = dao.getUsableVersionsForImage(img);
editPage.addTitleIllustration(versions);
cycle.activate(editPage);
}
};
return addTillusCallback;
}
@SuppressWarnings("unchecked")
public void deleteImage(NodeImage img) {
List versions = getImageDAO().getVersionsForImage(img);
List versionIds = new ArrayList();
// dont need to bother looking for versions if it's a document, sound, movie, etc.
if (versions.size() > 0) {
for (Iterator iter = versions.iterator(); iter.hasNext();) {
ImageVersion version = (ImageVersion) iter.next();
versionIds.add(version.getVersionId());
}
// check to see if there are any title illustrations
getWorkingPageDAO().deleteTitleIllustrationsPointingAtVersionIds(versionIds);
getPublicPageDAO().deleteTitleIllustrationsPointingAtVersionIds(versionIds);
for (Iterator iter = versions.iterator(); iter.hasNext();) {
ImageVersion nextVersion = (ImageVersion) iter.next();
imageDAO.deleteImageVersion(nextVersion);
}
}
getImageDAO().deleteImage(img);
}
public void writeOutImage(NodeImage img, IUploadFile file) {
saveAndWriteOutImage(img, file, null);
}
/**
* Treat things a little differently if the contributor is
* using the simple media form
* @param contr
* @return
*/
public boolean getContributorShouldUseSimpleMedia(Contributor contr) {
return (contr.getContributorType() == Contributor.TREEHOUSE_CONTRIBUTOR ||
Student.class.isInstance(contr) && !getUseRegularImageForm());
}
public boolean getContributorShouldUseSimpleMedia() {
return getContributorShouldUseSimpleMedia(getContributor());
}
/**
* used for flickr importer
*/
public void saveAndWriteOutImageStream(NodeImage img, InputStream stream, String filename, Long nodeId) {
saveAndWriteOutImageStream(img, stream, filename, nodeId, getContributor());
}
/**
* used for flickr importer
*/
public void saveAndWriteOutImageStream(NodeImage img, InputStream stream, String filename,
Long nodeId, Contributor contr) {
img.setLocation("");
initNode(img, nodeId);
getImageDAO().addImage(img, contr, false);
saveToDisk(img, stream, filename);
getImageDAO().saveImage(img);
initVersions(img);
}
/**
* Utility method for writing out a new image and its thumbnail
* @param img The image object to write out
* @param file The uploaded image file -- if null, called from zip uploading
* @param isPreview Whether to write out a preview thumbnail
*/
public void saveAndWriteOutImage(NodeImage img, IUploadFile file, Long nodeId) {
Contributor contr = getContributor();
if (img.getId() <= 0) {
// only init permissions on non zip uploads
if (file != null) {
img.setUsePermission(NodeImage.EVERYWHERE_USE);
}
String location = img.getLocation();
if (StringUtils.isEmpty(location)) {
// Going to set this so we don't get exceptions on the initial save
img.setLocation("");
}
initNode(img, nodeId);
if (getContributorShouldUseSimpleMedia()) {
img.setIsUnapproved(true);
}
// Create the image but only set the contributor as
// the copyright holder if this is a single image upload
getImageDAO().addImage(img, contr, file != null && !getContributorShouldUseSimpleMedia());
}
if (file != null) {
saveToDisk(img, file.getStream(), file.getFileName());
getUsePermissionHelper().initializeNewPermissions(contr, img, true);
}
ImageDAO dao = getImageDAO();
dao.saveImage(img);
initVersions(img);
}
private void saveToDisk(NodeImage img, InputStream stream, String filename) {
String serverImgFile = getImageUtils().writeInputStreamToDisk(stream, filename);
serverImgFile = getImageUtils().stripSlashesFromFilename(serverImgFile);
img.setLocation(serverImgFile);
}
private void initNode(NodeImage img, Long nodeId) {
if (nodeId != null) {
MappedNode node = getWorkingNodeDAO().getNodeWithId(nodeId);
img.addToNodesSet(node);
}
}
@SuppressWarnings("unchecked")
private void initVersions(NodeImage img) {
List versions = new ArrayList();
if (!Movie.class.isInstance(img) && !Sound.class.isInstance(img) && !Document.class.isInstance(img)) {
getImageUtils().reinitializeAutogeneratedVersions(img, versions);
saveVersions(versions);
} else if (Movie.class.isInstance(img)) {
Movie newMovie = (Movie) img;
List newVersions = getImageUtils().writeMovieFilesToDisk(newMovie);
if (newVersions != null) {
saveVersions(newVersions);
newMovie.setUseFlashVideo(true);
getImageDAO().saveImage(newMovie);
}
}
}
@SuppressWarnings("unchecked")
private void saveVersions(List versions) {
for (Iterator iter = versions.iterator(); iter.hasNext();) {
ImageVersion nextVersion = (ImageVersion) iter.next();
getImageDAO().saveImageVersion(nextVersion);
}
}
public void writeOutImageVersion(ImageVersion version, IUploadFile file) {
ImageUtils utils = getImageUtils();
String serverImgFile = getImageUtils().writeImageFileToDisk(file);
version.setFileName(getImageUtils().stripSlashesFromFilename(serverImgFile));
version.setVersionName("new image version");
Dimension dims = utils.getVersionDimensions(version);
version.setHeight(Integer.valueOf(dims.height));
version.setWidth(Integer.valueOf(dims.width));
int numBytes = utils.getVersionFilesize(version);
version.setFileSize(utils.getFileSizeStringFromInt(numBytes));
ImageDAO dao = getImageDAO();
dao.saveImageVersion(version);
}
/**
* Unzips the zip file and attempts to apply the image data contained in the
* xml document to each individual image
* @param file The upload file containing the image zip
* @param doc The xml document that contains the information for each image
* @param contr
* @return A 3-element array with the first element being a list of the images
* created, the second being a set of image filenames that weren't matched in
* the xml, and the third being a set of image filenames present in the xml
* but not present in the zip file
*/
@SuppressWarnings("unchecked")
public Object[] saveAndWriteOutZipFile(IUploadFile file, Document doc, Contributor contr) {
Map filenames = getImageUtils().createImagesFromZip(file);
// set of filenames that were found in the zip file
Set originalImageFiles = new HashSet(filenames.keySet());
// list from which filenames will be removed as they are seen in the zip file
List strayXMLFilenames = getAllImageElementFilenames(doc);
// the filenames in the xml document (as they were uploaded)
List preservedXMLFilenames = new ArrayList(strayXMLFilenames);
Hashtable newImages = new Hashtable();
// TODO consider changes this to filenames.entrySet() to get the key/value
// Map.Entry iterator
for (Iterator iter = filenames.keySet().iterator(); iter.hasNext();) {
boolean removeFromLists = false;
String oldImageName = (String) iter.next();
String newImageName = (String) filenames.get(oldImageName);
if (!oldImageName.startsWith(".") && !oldImageName.endsWith(".db")) {
NodeImage newImage = new NodeImage();
newImage.setLocation(newImageName);
// Store things in a hashtable for easy lookup later when we do the sort.
newImages.put(newImageName, newImage);
// this would be the part where we'd find the xml record and decode stuff
if (doc != null) {
Element imgElement = getServerXMLReader().getImageElementWithFilename(oldImageName, doc);
if (imgElement != null) {
getServerXMLReader().getNodeImageFromElement(newImage, imgElement, contr);
// Remove it from the list of images since we know we've seen it
removeFromLists = true;
}
}
saveAndWriteOutImage(newImage, null, null);
} else {
// we're ignoring it so make sure it gets removed from the lists
removeFromLists = true;
}
if (removeFromLists) {
originalImageFiles.remove(oldImageName);
strayXMLFilenames.remove(oldImageName);
}
}
// Before we return, perform a sort so that the image order matches the order in the
// xml file
List returnImages = new ArrayList();
for (Iterator it = preservedXMLFilenames.iterator(); it.hasNext();) {
String filename = (String) it.next();
String newFilename = (String) filenames.get(filename);
if (newFilename != null) {
NodeImage img = (NodeImage) newImages.get(newFilename);
// if for whatever reason the image didn't get created,
// then don't add null to the list
if (img != null) {
returnImages.add(img);
}
newImages.remove(newFilename);
}
}
for (Iterator iter = newImages.keySet().iterator(); iter.hasNext();) {
String filename = (String) iter.next();
NodeImage img = (NodeImage) newImages.get(filename);
if (img != null) {
returnImages.add(img);
}
}
Object[] returnArray = new Object[3];
returnArray[0] = returnImages;
returnArray[1] = originalImageFiles;
returnArray[2] = new HashSet(strayXMLFilenames);
return returnArray;
}
@SuppressWarnings("unchecked")
private List getAllImageElementFilenames(org.jdom.Document doc) {
List filenames = new ArrayList();
for (Iterator iter = doc.getRootElement().getChildren(XMLConstants.image).iterator(); iter.hasNext();) {
Element imgElement = (Element) iter.next();
String filename = imgElement.getChildText(XMLConstants.filename);
filenames.add(filename);
}
return filenames;
}
public String getEditUrlForMedia(NodeImage media, IRequestCycle cycle, Contributor contr) {
return getEditUrlForMedia(media, cycle, contr, false);
}
public String getEditUrlForMedia(NodeImage media, IRequestCycle cycle, Contributor contr, boolean someBool) {
Object[] params = {contr.getEmail(), contr.getPassword(), Integer.valueOf(media.getId()), someBool};
ExternalServiceParameter parameters = new ExternalServiceParameter(EditImageData.getEditPageNameForMedia(media), params);
return getExternalService().getLink(false, parameters).getURL();
}
public IRender getRedirectDelegate(final String url) {
IRender delegate = new IRender() {
public void render(IMarkupWriter writer, IRequestCycle cycle) {
writer.printRaw("<meta http-equiv=\"refresh\" content=\"3;url=" + url + "\">");
}
};
return delegate;
}
public ImageDAO getImageDAO() {
return imageDAO;
}
public void setImageDAO(ImageDAO imageDAO) {
this.imageDAO = imageDAO;
}
public ImageUtils getImageUtils() {
return imageUtils;
}
public void setImageUtils(ImageUtils imageUtils) {
this.imageUtils = imageUtils;
}
public PageDAO getPublicPageDAO() {
return publicPageDAO;
}
public void setPublicPageDAO(PageDAO publicPageDAO) {
this.publicPageDAO = publicPageDAO;
}
public PageDAO getWorkingPageDAO() {
return workingPageDAO;
}
public void setWorkingPageDAO(PageDAO workingPageDAO) {
this.workingPageDAO = workingPageDAO;
}
public NodeDAO getWorkingNodeDAO() {
return workingNodeDAO;
}
public void setWorkingNodeDAO(NodeDAO workingNodeDAO) {
this.workingNodeDAO = workingNodeDAO;
}
public UsePermissionHelper getUsePermissionHelper() {
return usePermissionHelper;
}
public void setUsePermissionHelper(UsePermissionHelper usePermissionHelper) {
this.usePermissionHelper = usePermissionHelper;
}
public ServerXMLReader getServerXMLReader() {
return serverXMLReader;
}
public void setServerXMLReader(ServerXMLReader serverXMLReader) {
this.serverXMLReader = serverXMLReader;
}
public IEngineService getExternalService() {
return externalService;
}
public void setExternalService(IEngineService externalService) {
this.externalService = externalService;
}
}
| |
package erp.mtrn.data;
import erp.data.SDataConstants;
import erp.lib.SLibConstants;
import erp.lib.SLibUtilities;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.Statement;
import java.util.Date;
import java.util.HashMap;
import java.util.Vector;
import sa.lib.SLibConsts;
public class SDataDpsMinorChanges extends erp.lib.data.SDataRegistry{
public static final HashMap<Integer, String> AutAuthornRejMap = new HashMap<>();
protected int mnPkYearId;
protected int mnPkDocId;
protected java.lang.String msDriver;
protected java.lang.String msPlate;
protected java.lang.String msTicket;
protected int mnFkSalesAgentId_n;
protected int mnFkSalesSupervisorId_n;
protected int mnFkUserNewId;
protected int mnFkUserEditId;
protected int mnFkUserDeleteId;
protected java.util.Vector<SDataDpsEntryMinorChanges> mvDbmsDpsEntries;
protected java.util.Vector<SDataDpsNotes> mvDbmsDpsNotes;
public void setPkYearId(int n) { mnPkYearId = n; }
public void setPkDocId(int n) { mnPkDocId = n; }
public void setDriver(java.lang.String s) { msDriver = s; }
public void setPlate(java.lang.String s) { msPlate = s; }
public void setTicket(java.lang.String s) { msTicket = s; }
public void setFkSalesAgentId_n(int n) { mnFkSalesAgentId_n = n; }
public void setFkSalesSupervisorId_n(int n) { mnFkSalesSupervisorId_n = n; }
public void setFkUserNewId(int n) { mnFkUserNewId = n; }
public void setFkUserEditId(int n) { mnFkUserEditId = n; }
public void setFkUserDeleteId(int n) { mnFkUserDeleteId = n; }
public int getPkYearId() { return mnPkYearId; }
public int getPkDocId() { return mnPkDocId; }
public java.lang.String getDriver() { return msDriver; }
public java.lang.String getPlate() { return msPlate; }
public java.lang.String getTicket() { return msTicket; }
public int getFkSalesAgentId_n() { return mnFkSalesAgentId_n; }
public int getFkSalesSupervisorId_n() { return mnFkSalesSupervisorId_n; }
public int getFkUserNewId() { return mnFkUserNewId; }
public int getFkUserEditId() { return mnFkUserEditId; }
public int getFkUserDeleteId() { return mnFkUserDeleteId; }
public java.util.Vector<erp.mtrn.data.SDataDpsEntryMinorChanges> getDbmsDpsEntries() { return mvDbmsDpsEntries; }
public java.util.Vector<erp.mtrn.data.SDataDpsNotes> getDbmsDpsNotes() { return mvDbmsDpsNotes; }
public SDataDpsMinorChanges() {
super(SDataConstants.TRN_DPS);
mlRegistryTimeout = 1000 * 60 * 60 * 2; // 2 hr
mvDbmsDpsEntries = new Vector<>();
mvDbmsDpsNotes = new Vector<>();
reset();
}
public void setEntry(SDataDpsEntryMinorChanges v){
mvDbmsDpsEntries.add(v);
}
@Override
public void setPrimaryKey(Object pk) {
mnPkYearId = ((int[]) pk)[0];
mnPkDocId = ((int[]) pk)[1];
}
@Override
public Object getPrimaryKey() {
return new int[] { mnPkYearId, mnPkDocId };
}
@Override
public void reset() {
super.resetRegistry();
mnPkYearId = 0;
mnPkDocId = 0;
msDriver = "";
msPlate = "";
msTicket = "";
mnFkSalesAgentId_n = 0;
mnFkSalesSupervisorId_n = 0;
mnFkUserNewId = 0;
mnFkUserEditId = 0;
mnFkUserDeleteId = 0;
mvDbmsDpsEntries.clear();
mvDbmsDpsNotes.clear();
}
public void setData(SDataDps Data){
int sizeEntry = 0;
int sizeNota = 0;
mnPkYearId = Data.getPkYearId();
mnPkDocId = Data.getPkDocId();
mnFkUserEditId = Data.getFkUserEditId();
msDriver = Data.getDriver();
msPlate = Data.getPlate();
msTicket = Data.getTicket();
mnFkSalesAgentId_n = Data.getFkSalesAgentId_n();
mnFkSalesSupervisorId_n = Data.getFkSalesSupervisorId_n();
Vector<SDataDpsEntry> entries = Data.getDbmsDpsEntries();
sizeEntry = entries.size();
Vector<SDataDpsNotes> notas = Data.getDbmsDpsNotes();
sizeNota = notas.size();
for(int i = 0; i < sizeEntry; i++){
SDataDpsEntryMinorChanges entry = new SDataDpsEntryMinorChanges();
entry.setData(entries.get(i));
mvDbmsDpsEntries.add(entry);
}
for(int i = 0; i < sizeNota; i++){
SDataDpsNotes notes = new SDataDpsNotes();
mvDbmsDpsNotes.add(notas.get(i));
}
}
@Override
public int read(Object pk, Statement statement) {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public int save(Connection connection) {
mnLastDbActionResult = SLibConsts.UNDEFINED;
String Agent;
String Super;
Statement oStatement = null;
String sSql = "";
ResultSet oResultSet = null;
try {
oStatement = connection.createStatement();
for (SDataDpsEntryMinorChanges entry : mvDbmsDpsEntries) {
if (entry.getIsRegistryNew() || entry.getIsRegistryEdited()) {
entry.setPkYearId(mnPkYearId);
entry.setPkDocId(mnPkDocId);
//if(entry.getIsEdited()){
if (entry.save(connection) != SLibConstants.DB_ACTION_SAVE_OK) {
throw new Exception(SLibConstants.MSG_ERR_DB_REG_SAVE_DEP);
}
//}
}
}
for (SDataDpsNotes notes : mvDbmsDpsNotes) {
if (notes.getIsRegistryNew() || notes.getIsRegistryEdited()) {
notes.setPkYearId(mnPkYearId);
notes.setPkDocId(mnPkDocId);
if (notes.save(connection) != SLibConstants.DB_ACTION_SAVE_OK) {
throw new Exception(SLibConstants.MSG_ERR_DB_REG_SAVE_DEP);
}
}
}
if(mnFkSalesAgentId_n != 0){
Agent = " fid_sal_agt_n = " + mnFkSalesAgentId_n;
}else{
Agent = "fid_sal_agt_n = NULL";
}
if(mnFkSalesSupervisorId_n != 0){
Super = " fid_sal_sup_n = " + mnFkSalesSupervisorId_n;
}else{
Super = " fid_sal_sup_n = NULL";
}
sSql = "UPDATE trn_dps SET" +
" driver = " + '"' + msDriver +'"' + "," +
" plate = " + '"' + msPlate + '"' + "," +
" ticket = " + '"' + msTicket + '"' + "," +
Agent + "," +
Super + "," +
"fid_usr_edit = " + mnFkUserEditId + "," +
"ts_edit = NOW()" +
" WHERE id_year = " + mnPkYearId + " AND id_doc = " + mnPkDocId + " ";
oStatement.execute(sSql);
mnLastDbActionResult = SLibConstants.DB_ACTION_SAVE_OK;
}
catch (Exception e) {
mnLastDbActionResult = SLibConstants.DB_ACTION_SAVE_ERROR;
if (msDbmsError.isEmpty()) {
msDbmsError = SLibConstants.MSG_ERR_DB_REG_SAVE;
}
msDbmsError += "\n" + e.toString();
SLibUtilities.printOutException(this, e);
}
return mnLastDbActionResult;
}
@Override
public Date getLastDbUpdate() {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
}
| |
package org.myrobotlab.kinematics;
import java.io.Serializable;
import java.util.ArrayList;
import org.myrobotlab.logging.LoggerFactory;
import org.myrobotlab.service.InverseKinematics3D;
import org.slf4j.Logger;
public class DHRobotArm implements Serializable {
private static final long serialVersionUID = 1L;
transient public final static Logger log = LoggerFactory.getLogger(DHRobotArm.class);
private int maxIterations = 10000;
private ArrayList<DHLink> links;
public String name;
// for debugging .. hmmm
public transient InverseKinematics3D ik3D = null;
public DHRobotArm() {
super();
links = new ArrayList<DHLink>();
}
public DHRobotArm(DHRobotArm copy) {
super();
name = copy.name;
links = new ArrayList<DHLink>();
for (DHLink link : copy.links) {
links.add(new DHLink(link));
}
}
public ArrayList<DHLink> addLink(DHLink link) {
links.add(link);
return links;
}
public Matrix getJInverse() {
// something small.
// double delta = 0.000001;
double delta = 0.0001;
int numLinks = this.getNumLinks();
// we need a jacobian matrix that is 6 x numLinks
// for now we'll only deal with x,y,z we can add rotation later. so only 3
// We can add rotation information into slots 4,5,6 when we add it to the
// algorithm.
Matrix jacobian = new Matrix(3, numLinks);
// compute the gradient of x,y,z based on the joint movement.
Point basePosition = this.getPalmPosition();
// log.debug("Base Position : " + basePosition);
// for each servo, we'll rotate it forward by delta (and back), and get
// the new positions
for (int j = 0; j < numLinks; j++) {
this.getLink(j).incrRotate(delta);
Point palmPoint = this.getPalmPosition();
Point deltaPoint = palmPoint.subtract(basePosition);
this.getLink(j).incrRotate(-delta);
// delta position / base position gives us the slope / rate of
// change
// this is an approx of the gradient of P
// UHoh,, what about divide by zero?!
// log.debug("Delta Point" + deltaPoint);
double dXdj = deltaPoint.getX() / delta;
double dYdj = deltaPoint.getY() / delta;
double dZdj = deltaPoint.getZ() / delta;
jacobian.elements[0][j] = dXdj;
jacobian.elements[1][j] = dYdj;
jacobian.elements[2][j] = dZdj;
// TODO: get orientation roll/pitch/yaw
}
// log.debug("Jacobian(p)approx");
// log.info("JACOBIAN\n" +jacobian);
// This is the MAGIC! the pseudo inverse should map
// deltaTheta[i] to delta[x,y,z]
Matrix jInverse = jacobian.pseudoInverse();
// log.debug("Pseudo inverse Jacobian(p)approx\n" + jInverse);
if (jInverse == null) {
jInverse = new Matrix(3, numLinks);
}
return jInverse;
}
public DHLink getLink(int i) {
if (links.size() >= i) {
return links.get(i);
} else {
// TODO log a warning or something?
return null;
}
}
public ArrayList<DHLink> getLinks() {
return links;
}
public int getNumLinks() {
return links.size();
}
public synchronized Point getJointPosition(int index) {
if (index > this.links.size() || index < 0) {
// TODO: bound check
return null;
}
Matrix m = new Matrix(4, 4);
// TODO: init to the ident?
// initial frame orientated around x
m.elements[0][0] = 1;
m.elements[1][1] = 1;
m.elements[2][2] = 1;
m.elements[3][3] = 1;
// initial frame orientated around z
// m.elements[0][2] = 1;
// m.elements[1][1] = 1;
// m.elements[2][0] = 1;
// m.elements[3][3] = 1;
// log.debug("-------------------------");
// log.debug(m);
// TODO: validate this approach..
for (int i = 0; i <= index; i++) {
DHLink link = links.get(i);
Matrix s = link.resolveMatrix();
// log.debug(s);
m = m.multiply(s);
// log.debug("-------------------------");
// log.debug(m);
}
// now m should be the total translation for the arm
// given the arms current position
double x = m.elements[0][3];
double y = m.elements[1][3];
double z = m.elements[2][3];
// double ws = m.elements[3][3];
// log.debug("World Scale : " + ws);
Point jointPosition = new Point(x, y, z, 0, 0, 0);
return jointPosition;
}
/**
* @param lastDHLink
* the index of the link that you want the global position at.
* @return the x,y,z of the palm. roll,pitc, and yaw are not returned/computed
* with this function
*/
public Point getPalmPosition(String lastDHLink) {
// TODO Auto-generated method stub
// return the position of the end effector wrt the base frame
Matrix m = new Matrix(4, 4);
// TODO: init to the ident?
// initial frame orientated around x
m.elements[0][0] = 1;
m.elements[1][1] = 1;
m.elements[2][2] = 1;
m.elements[3][3] = 1;
// initial frame orientated around z
// m.elements[0][2] = 1;
// m.elements[1][1] = 1;
// m.elements[2][0] = 1;
// m.elements[3][3] = 1;
// log.debug("-------------------------");
// log.debug(m);
// TODO: validate this approach..
for (int i = 0; i < links.size(); i++) {
Matrix s = links.get(i).resolveMatrix();
// log.debug(s);
m = m.multiply(s);
// log.debug("-------------------------");
// log.debug(m);
if (links.get(i).getName() != null && links.get(i).getName().equals(lastDHLink)) {
break;
}
}
// now m should be the total translation for the arm
// given the arms current position
double x = m.elements[0][3];
double y = m.elements[1][3];
double z = m.elements[2][3];
// double ws = m.elements[3][3];
// log.debug("World Scale : " + ws);
// TODO: pass /compute the roll pitch and yaw ..
double pitch = Math.atan2(-1.0 * (m.elements[2][0]), Math.sqrt(m.elements[0][0] * m.elements[0][0] + m.elements[1][0] * m.elements[1][0]));
double roll = 0;
double yaw = 0;
if (pitch == Math.PI / 2) {
roll = Math.atan2(m.elements[0][1], m.elements[1][1]);
} else if (pitch == -1 * Math.PI / 2) {
roll = Math.atan2(m.elements[0][1], m.elements[1][1]) * -1;
} else {
roll = Math.atan2(m.elements[2][1] / Math.cos(pitch), m.elements[2][2]) / Math.cos(pitch);
yaw = Math.atan2(m.elements[1][0] / Math.cos(pitch), m.elements[0][0] / Math.cos(pitch)) - Math.PI / 2;
}
// double pitch=0, roll=0, yaw=0; //attitude, bank, heading
// if (m.elements[1][0] > 0.998) {
// yaw = Math.atan2(m.elements[0][2], m.elements[2][2]);
// pitch = Math.PI/2;
// }
// else if (m.elements[1][0] < -0.998) {
// yaw = Math.atan2(m.elements[0][2], m.elements[2][2]);
// pitch = -Math.PI/2;
// }
// else {
// yaw = Math.atan2(-m.elements[2][0], m.elements[0][0]);
// roll = Math.atan2(-m.elements[1][2], m.elements[1][1]);
// pitch = Math.asin(m.elements[1][0]);
// }
Point palm = new Point(x, y, z, pitch * 180 / Math.PI, roll * 180 / Math.PI, yaw * 180 / Math.PI);
return palm;
}
public void centerAllJoints() {
for (DHLink link : links) {
double center = (link.getMax() + link.getMin()) / 2.0;
log.debug("Centering Servo {} to {} degrees", link.getName(), center);
link.setTheta(center);
}
}
public boolean moveToGoal(Point goal) {
// we know where we are.. we know where we want to go.
int numSteps = 0;
double iterStep = 0.05;
// we're in millimeters..
double errorThreshold = 2.0;
// what's the current point
while (true) {
numSteps++;
if (numSteps >= maxIterations) {
log.info("Attempted to iterate, didn't make it. Current Position: {} Goal: {} Distance: {}", getPalmPosition(), goal, goal.distanceTo(getPalmPosition()));
// we shouldn't publish if we don't solve!
return false;
}
// TODO: what if its unreachable!
Point currentPos = this.getPalmPosition();
log.debug("Current Position " + currentPos);
// vector to destination
Point deltaPoint = goal.subtract(currentPos);
Matrix dP = new Matrix(3, 1);
dP.elements[0][0] = deltaPoint.getX();
dP.elements[1][0] = deltaPoint.getY();
dP.elements[2][0] = deltaPoint.getZ();
// scale a vector towards the goal by the increment step.
dP = dP.multiply(iterStep);
Matrix jInverse = this.getJInverse();
// why is this zero?
Matrix dTheta = jInverse.multiply(dP);
log.debug("delta Theta + " + dTheta);
for (int i = 0; i < dTheta.getNumRows(); i++) {
// update joint positions! move towards the goal!
double d = dTheta.elements[i][0];
// incr rotate needs to be min/max aware here!
this.getLink(i).incrRotate(d);
}
// delta point represents the direction we need to move in order to
// get there.
// we should figure out how to scale the steps.
// For debugging of trajectories we should publish here?
// ik3D.publishTelemetry();
// try {
// Thread.sleep(2);
// } catch (InterruptedException e) {
// // TODO Auto-generated catch block
// e.printStackTrace();
// }
if (deltaPoint.magnitude() < errorThreshold) {
log.info("Final Position {} Number of Iterations {}", getPalmPosition(), numSteps);
break;
}
}
return true;
}
public void setLinks(ArrayList<DHLink> links) {
this.links = links;
}
public void setIk3D(InverseKinematics3D ik3d) {
ik3D = ik3d;
}
public boolean armMovementEnds() {
for (DHLink link : links) {
// if (link.getState() != Servo.SERVO_EVENT_STOPPED) {
// return false;
// }
}
return true;
}
public double[][] createJointPositionMap() {
double[][] jointPositionMap = new double[getNumLinks() + 1][3];
// first position is the origin... second is the end of the first link
jointPositionMap[0][0] = 0;
jointPositionMap[0][1] = 0;
jointPositionMap[0][2] = 0;
for (int i = 1; i <= getNumLinks(); i++) {
Point jp = getJointPosition(i - 1);
jointPositionMap[i][0] = jp.getX();
jointPositionMap[i][1] = jp.getY();
jointPositionMap[i][2] = jp.getZ();
}
return jointPositionMap;
}
public Point getVector() {
Point lastJoint = getJointPosition(links.size() - 1);
Point previousJoint = getJointPosition(links.size() - 2);
Point retval = lastJoint.subtract(previousJoint);
return retval;
}
public Point getPalmPosition() {
return getPalmPosition(null);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package groovy.text;
import groovy.lang.*;
import org.codehaus.groovy.control.CompilationFailedException;
import org.codehaus.groovy.control.ErrorCollector;
import org.codehaus.groovy.control.MultipleCompilationErrorsException;
import org.codehaus.groovy.control.messages.Message;
import org.codehaus.groovy.control.messages.SyntaxErrorMessage;
import org.codehaus.groovy.runtime.StackTraceUtils;
import org.codehaus.groovy.syntax.SyntaxException;
import java.io.IOException;
import java.io.LineNumberReader;
import java.io.Reader;
import java.io.StringReader;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* Processes template source files substituting variables and expressions into
* placeholders in a template source text to produce the desired output using a
* closure based approach. This engine has equivalent functionality to the
* {@link groovy.text.SimpleTemplateEngine} but creates the template using
* writable closures making it more scalable for large templates.
* <p>
* Specifically this template engine can handle strings larger than 64k which
* still causes problems for the other groovy template engines.
* </p>
* <p>
* The template engine uses JSP style <% %> script and <%= %>
* expression syntax or GString style expressions. The variable
* '<code>out</code>' is bound to the writer that the template is being written
* to.
* </p>
* Frequently, the template source will be in a file but here is a simple
* example providing the template as a string:
* <pre>
*
* def binding = [
* firstname : "Grace",
* lastname : "Hopper",
* accepted : true,
* title : 'Groovy for COBOL programmers'
* ]
* def text = '''\
* Dear <% out.print firstname %> ${lastname},
*
* We <% if (accepted) out.print 'are pleased' else out.print 'regret' %> \
* to inform you that your paper entitled
* '$title' was ${ accepted ? 'accepted' : 'rejected' }.
*
* The conference committee.
* '''
*
* def template = new groovy.text.StreamingTemplateEngine().createTemplate(text)
*
* print template.make(binding)
* </pre>
*
* This example uses a mix of the JSP style and GString style
* placeholders but you can typically use just one style if you wish. Running
* this example will produce this output:
*
* <pre>
*
* Dear Grace Hopper,
*
* We are pleased to inform you that your paper entitled
* 'Groovy for COBOL programmers' was accepted.
*
* The conference committee.
* </pre>
* <br />
* <h3>StreamingTemplateEngine as a servlet engine</h3>
* The template engine can also be used as the engine for
* {@link groovy.servlet.TemplateServlet} by placing the following in your
* <code>web.xml</code> file (plus a corresponding servlet-mapping element):
* <pre>
*
* <servlet>
* <servlet-name>StreamingTemplate</servlet-name>
* <servlet-class>groovy.servlet.TemplateServlet</servlet-class>
* <init-param>
* <param-name>template.engine</param-name>
* <param-value>groovy.text.StreamingTemplateEngine</param-value>
* </init-param>
* </servlet>
* </pre> In this case, your template source file should be HTML with the
* appropriate embedded placeholders.
*
* <h3>Debugging Template Code</h3>
* <p>The template engine makes an effort to throw descriptive exceptions with
* context lines, ie:
* <pre>
* groovy.text.TemplateExecutionException: Template parse error at line 4:
* 3: We <% if (accepted) out.print 'are pleased' else out.print 'regret' %> to inform you that your paper entitled
* --> 4: '$txitle' was ${ accepted ? 'accepted' : 'rejected' }.
* 5:
* at test.run(test.groovy:18)
*
* Caused by: groovy.lang.MissingPropertyException: No such property: txitle for class: groovy.tmp.templates.StreamingTemplateScript1
* ... 1 more
* </pre>
* and sanitize the exceptions to make things readable.
* </p>
* <p>When the exceptions are not enough, it might sometimes be useful to view the actual script source
* generated by the template engine. This would conceptually be equivalent to viewing the
* .java file generated for a jsp page. The source is not currently very readable and
* until we get a built in groovy code pretty printer, we will probably continue to opt for compactness
* rather than readability.</p>
*
* <p>With that being said, viewing the source might still have some value. For this reason the script
* source is accessible via the template.scriptSource property, i.e.:
* <pre>
* println template.scriptSource
* </pre>
* In the above example.
* </p>
*
* @author mbjarland@gmail.com
* @author Matias Bjarland
*/
public class StreamingTemplateEngine extends TemplateEngine {
private static final String TEMPLATE_SCRIPT_PREFIX = "StreamingTemplateScript";
private final ClassLoader parentLoader;
private static int counter = 1;
/**
* Create a streaming template engine instance using the default class loader
*/
public StreamingTemplateEngine() {
this(StreamingTemplate.class.getClassLoader());
}
/**
* Create a streaming template engine instance using a custom class loader
*
* <p>The custom loader is used when parsing the template code</p>
*
* @param parentLoader The class loader to use when parsing the template code.
*/
public StreamingTemplateEngine(ClassLoader parentLoader) {
this.parentLoader = parentLoader;
}
/**
* <p>Creates a template instance using the template source from the provided Reader.</p>
*
* <p>The template can be applied repeatedly on different bindings to produce custom
* output.</p>
*
*
* <strong>Technical detail</strong><br />
* Under the hood the returned template is represented as a four argument
* closure where the three first arguments are {@link groovy.lang.Closure#curry curried} in
* while generating the template. <br />
* <br />
* In essence we start with a closure on the form:
*
* <pre>
* { parentClass, stringSectionList, binding, out ->
* //code generated by parsing the template data
* } *
* </pre>
*
* , we then curry in the parentClass and stringSectionList arguments so that the StreamingTemplate
* instance returned from 'createTemplate' internally contains a template closure on the form:
*
* <pre>
* { binding, out ->
* //code generated by parsing the template data
* } *
* </pre>
*
* Calling template.make(binding), curries in the 'binding' argument:
*
* <pre>
* public Writable make(final Map map) {
* final Closure template = this.template.curry(new Object[]{map});
* return (Writable) template;
* }
* </pre>
*
* which only leaves the 'out' argument unbound. The only method on the {@link groovy.lang.Writable writable} interface is
* {@link groovy.lang.Writable#writeTo writeTo(Writer out)} so groovy rules about casting a closure to a one-method-interface
* apply and the above works. I.e. we return the now one argument closure as the Writable
* which can be serialized to System.out, a file, etc according to the Writable interface contract.
* </p>
* @see groovy.text.TemplateEngine#createTemplate(java.io.Reader)
*/
@Override
public Template createTemplate(final Reader reader) throws CompilationFailedException, ClassNotFoundException, IOException {
return new StreamingTemplate(reader, parentLoader);
}
/**
* The class used to implement the Template interface for the StreamingTemplateEngine
*
*/
private static class StreamingTemplate implements Template {
/**
* The 'header' we use for the resulting groovy script source.
*/
private static final String SCRIPT_HEAD
= "package groovy.tmp.templates;"
+ "def getTemplate() { "
//the below params are:
// _p - parent class, for handling exceptions
// _s - sections, string sections list
// _b - binding map
// out - out stream
//the three first parameters will be curried in as we move along
+ "return { _p, _s, _b, out -> "
+ "int _i = 0;"
+ "try {"
+ "delegate = new Binding(_b);";
/**
* The 'footer' we use for the resulting groovy script source
*/
private static final String SCRIPT_TAIL
= "} catch (Throwable e) { "
+ "_p.error(_i, _s, e);"
+ "}"
+ "}.asWritable()"
+ "}";
private StringBuilder templateSource;
// we use a hard index instead of incrementing the _i variable due to previous
// bug where the increment was not executed when hitting non-executed if branch
private int index = 0;
final Closure template;
String scriptSource;
private static class FinishedReadingException extends Exception {}
//WE USE THIS AS REUSABLE
//CHECKSTYLE.OFF: ConstantNameCheck - special case with a reusable exception
private static final FinishedReadingException finishedReadingException;
//CHECKSTYLE.ON: ConstantNameCheck
public static final StackTraceElement[] EMPTY_STACKTRACE = new StackTraceElement[0];
static {
finishedReadingException = new FinishedReadingException();
finishedReadingException.setStackTrace(EMPTY_STACKTRACE);
}
private static final class Position {
//CHECKSTYLE.OFF: VisibilityModifierCheck - special case, direct access for performance
public int row;
public int column;
//CHECKSTYLE.ON: VisibilityModifierCheck
private Position(int row, int column) {
this.row = row;
this.column = column;
}
private Position(Position p) {
set(p);
}
private void set(Position p) {
this.row = p.row;
this.column = p.column;
}
public String toString() {
return row + ":" + column;
}
}
/**
* A StringSection represent a section in the template source
* with only string data (i.e. no branching, GString references, etc).
* As an example, the following template string:
*
* <pre>
* Alice why is a $bird like a writing desk
* </pre>
*
* Would produce a string section "Alice why is a " followed by
* a dollar identifier expression followed by another string
* section " like a writing desk".
*/
private static final class StringSection {
StringBuilder data;
Position firstSourcePosition;
Position lastSourcePosition;
Position lastTargetPosition;
private StringSection(Position firstSourcePosition) {
this.data = new StringBuilder();
this.firstSourcePosition = new Position(firstSourcePosition);
}
@Override
public String toString() {
return data.toString();
}
}
/**
* Called to handle the ending of a string section.
*
* @param sections The list of string sections. The current section gets added to this section.
* @param currentSection The current string section.
* @param templateExpressions Template expressions
* @param lastSourcePosition The last read position in the source template stream.
* @param targetPosition The last written to position in the target script stream.
*/
private void finishStringSection(List<StringSection> sections, StringSection currentSection,
StringBuilder templateExpressions,
Position lastSourcePosition, Position targetPosition) {
//when we get exceptions from the parseXXX methods in the main loop, we might try to
//re-finish a section
if (currentSection.lastSourcePosition != null) {
return;
}
currentSection.lastSourcePosition = new Position(lastSourcePosition);
sections.add(currentSection);
append(templateExpressions, targetPosition, "out<<_s[_i=" + index++ + "];");
currentSection.lastTargetPosition = new Position(targetPosition.row, targetPosition.column);
}
public void error(int index, List<StringSection> sections, Throwable e) throws Throwable {
int i = Math.max(0, index);
StringSection precedingSection = sections.get(i);
int traceLine = -1;
for (StackTraceElement element : e.getStackTrace()) {
if (element.getClassName().contains(TEMPLATE_SCRIPT_PREFIX)) {
traceLine = element.getLineNumber();
break;
}
}
if (traceLine != -1) {
int actualLine = precedingSection.lastSourcePosition.row + traceLine - 1;
String message = "Template execution error at line " + actualLine + ":\n" + getErrorContext(actualLine);
TemplateExecutionException unsanitized = new TemplateExecutionException(actualLine, message, StackTraceUtils.sanitize(e));
throw StackTraceUtils.sanitize(unsanitized);
} else {
throw e;
}
}
private int getLinesInSource() throws IOException {
int result = 0;
LineNumberReader reader = null;
try {
reader = new LineNumberReader(new StringReader(templateSource.toString()));
reader.skip(Long.MAX_VALUE);
result = reader.getLineNumber();
} finally {
if (reader != null) {
reader.close();
}
}
return result;
}
private String getErrorContext(int actualLine) throws IOException {
int minLine = Math.max(0, actualLine -1);
int maxLine = Math.min(getLinesInSource(), actualLine + 1);
LineNumberReader r = new LineNumberReader(new StringReader(templateSource.toString()));
int lineNr;
StringBuilder result = new StringBuilder();
while ((lineNr = r.getLineNumber()+1) <= maxLine) {
String line = r.readLine();
if (lineNr < minLine) continue;
String nr = Integer.toString(lineNr);
if (lineNr == actualLine) {
nr = " --> " + nr;
}
result.append(padLeft(nr, 10));
result.append(": ");
result.append(line);
result.append('\n');
}
return result.toString();
}
private String padLeft(String s, int len) {
StringBuilder b = new StringBuilder(s);
while (b.length() < len) b.insert(0, " ");
return b.toString();
}
/**
* Turn the template into a writable Closure. When executed the closure
* evaluates all the code embedded in the template and then writes a
* GString containing the fixed and variable items to the writer passed
* as a parameter
* <p/>
* For example:
* <pre>
* '<%= "test" %> of expr and <% test = 1 %>${test} script.'
* </pre>
* would compile into:
* <pre>
* { out -> out << "${"test"} of expr and "; test = 1 ; out << "${test} script."}.asWritable()
* </pre>
* @param source A reader into the template source data
* @param parentLoader A class loader we use
* @throws CompilationFailedException
* @throws ClassNotFoundException
* @throws IOException
*/
StreamingTemplate(final Reader source, final ClassLoader parentLoader) throws CompilationFailedException, ClassNotFoundException, IOException {
final StringBuilder target = new StringBuilder();
List<StringSection> sections = new ArrayList<StringSection>();
Position sourcePosition = new Position(1, 1);
Position targetPosition = new Position(1, 1);
Position lastSourcePosition = new Position(1, 1);
StringSection currentSection = new StringSection(sourcePosition);
templateSource = new StringBuilder();
//we use the lookAhead to make sure that a template file ending in say "abcdef\\"
//will give a result of "abcdef\\" even though we have special handling for \\
StringBuilder lookAhead = new StringBuilder(10);
append(target, targetPosition, SCRIPT_HEAD);
try {
int skipRead = -1;
//noinspection InfiniteLoopStatement
while (true) {
lastSourcePosition.set(sourcePosition);
int c = (skipRead != -1) ? skipRead : read(source, sourcePosition, lookAhead);
skipRead = -1;
if (c == '\\') {
handleEscaping(source, sourcePosition, currentSection, lookAhead);
continue;
} else if (c == '<') {
c = read(source, sourcePosition, lookAhead);
if (c == '%') {
c = read(source, sourcePosition);
clear(lookAhead);
if (c == '=') {
finishStringSection(sections, currentSection, target, lastSourcePosition, targetPosition);
parseExpression(source, target, sourcePosition, targetPosition);
currentSection = new StringSection(sourcePosition);
continue;
} else {
finishStringSection(sections, currentSection, target, lastSourcePosition, targetPosition);
parseSection(c, source, target, sourcePosition, targetPosition);
currentSection = new StringSection(sourcePosition);
continue;
}
} else {
currentSection.data.append('<');
}
} else if (c == '$') {
c = read(source, sourcePosition);
clear(lookAhead);
if (c == '{') {
finishStringSection(sections, currentSection, target, lastSourcePosition, targetPosition);
parseDollarCurlyIdentifier(source, target, sourcePosition, targetPosition);
currentSection = new StringSection(sourcePosition);
continue;
} else if (Character.isJavaIdentifierStart(c)) {
finishStringSection(sections, currentSection, target, lastSourcePosition, targetPosition);
skipRead = parseDollarIdentifier(c, source, target, sourcePosition, targetPosition);
currentSection = new StringSection(sourcePosition);
continue;
} else {
currentSection.data.append('$');
}
}
currentSection.data.append((char) c);
clear(lookAhead);
}
} catch (FinishedReadingException e) {
if (lookAhead.length() > 0) {
currentSection.data.append(lookAhead);
}
//Ignored here, just used for exiting the read loop. Yeah I know we don't like
//empty catch blocks or expected behavior trowing exceptions, but this just cleaned out the code
//_so_ much that I thought it worth it...this once -Matias Bjarland 20100126
}
finishStringSection(sections, currentSection, target, sourcePosition, targetPosition);
append(target, targetPosition, SCRIPT_TAIL);
scriptSource = target.toString();
this.template = createTemplateClosure(sections, parentLoader, target);
}
private static void clear(StringBuilder lookAhead) {
lookAhead.delete(0, lookAhead.length());
}
private void handleEscaping(final Reader source,
final Position sourcePosition,
final StringSection currentSection,
final StringBuilder lookAhead) throws IOException, FinishedReadingException {
//if we get here, we just read in a back-slash from the source, now figure out what to do with it
int c = read(source, sourcePosition, lookAhead);
/*
The _only_ special escaping this template engine allows is to escape the sequences:
${ and <% and potential slashes in front of these. Escaping in any other sections of the
source string is ignored. The following is a source -> result mapping of a few values, assume a
binding of [alice: 'rabbit'].
Note: we don't do java escaping of slashes in the below
example, i.e. the source string is what you would see in a text editor when looking at your template
file:
source string result
'bob' -> 'bob'
'\bob' -> '\bob'
'\\bob' -> '\\bob'
'${alice}' -> 'rabbit'
'\${alice}' -> '${alice}'
'\\${alice}' -> '\rabbit'
'\\$bob' -> '\\$bob'
'\\' -> '\\'
'\\\' -> '\\\'
'%<= alice %>' -> 'rabbit'
'\%<= alice %>' -> '%<= alice %>'
*/
if (c == '\\') {
//this means we have received a double backslash sequence
//if this is followed by ${ or <% we output one backslash
//and interpret the following sequences with groovy, if followed by anything
//else we output the two backslashes and continue as usual
source.mark(3);
int d = read(source, sourcePosition, lookAhead);
c = read(source, sourcePosition, lookAhead);
clear(lookAhead);
if ((d == '$' && c == '{') ||
(d == '<' && c == '%')) {
source.reset();
currentSection.data.append('\\');
return;
} else {
currentSection.data.append('\\');
currentSection.data.append('\\');
currentSection.data.append((char) d);
}
} else if (c == '$') {
c = read(source, sourcePosition, lookAhead);
if (c == '{') {
currentSection.data.append('$');
} else {
currentSection.data.append('\\');
currentSection.data.append('$');
}
} else if (c == '<') {
c = read(source, sourcePosition, lookAhead);
if (c == '%') {
currentSection.data.append('<');
} else {
currentSection.data.append('\\');
currentSection.data.append('<');
}
} else {
currentSection.data.append('\\');
}
currentSection.data.append((char) c);
clear(lookAhead);
}
private Closure createTemplateClosure(List<StringSection> sections, final ClassLoader parentLoader, StringBuilder target) throws ClassNotFoundException {
final GroovyClassLoader loader = AccessController.doPrivileged(new PrivilegedAction<GroovyClassLoader>() {
public GroovyClassLoader run() {
return new GroovyClassLoader(parentLoader);
}
});
final Class groovyClass;
try {
groovyClass = loader.parseClass(new GroovyCodeSource(target.toString(), TEMPLATE_SCRIPT_PREFIX + counter++ + ".groovy", "x"));
} catch (MultipleCompilationErrorsException e) {
throw mangleMultipleCompilationErrorsException(e, sections);
} catch (Exception e) {
throw new GroovyRuntimeException("Failed to parse template script (your template may contain an error or be trying to use expressions not currently supported): " + e.getMessage());
}
Closure result;
try {
final GroovyObject object = (GroovyObject) groovyClass.newInstance();
Closure chicken = (Closure) object.invokeMethod("getTemplate", null);
//bind the two first parameters of the generated closure to this class and the sections list
result = chicken.curry(this, sections);
} catch (InstantiationException e) {
throw new ClassNotFoundException(e.getMessage());
} catch (IllegalAccessException e) {
throw new ClassNotFoundException(e.getMessage());
}
return result;
}
/**
* Parses a non curly dollar preceded identifier of the type
* '$bird' in the following template example:
*
* <pre>
* Alice why is a $bird like a writing desk
* </pre>
*
* which would produce the following template data:
*
* <pre>
* out << "Alice why is a ";
* out << bird;
* out << " like a writing desk";
* </pre>
*
* This method is given the 'b' in 'bird' in argument c, checks if it is a valid
* java identifier start (we assume groovy did not mangle the java
* identifier rules). If so it proceeds to parse characters from the input
* until it encounters a non-java-identifier character. At that point
*
* @param c The first letter of the potential identifier, 'b' in the above example
* @param reader The reader reading from the template source
* @param target The target groovy script source we write to
* @param sourcePosition The reader position in the source stream
* @param targetPosition The writer position in the target stream
* @return true if a valid dollar preceded identifier was found, false otherwise. More
* specifically, returns true if the first character after the dollar sign is
* a valid java identifier. Note that the dollar curly syntax is handled by
* another method.
*
* @throws IOException
* @throws FinishedReadingException If we encountered the end of the source stream.
*/
private int parseDollarIdentifier(int c ,
final Reader reader,
final StringBuilder target,
final Position sourcePosition,
final Position targetPosition) throws IOException, FinishedReadingException {
append(target, targetPosition, "out<<");
append(target, targetPosition, (char) c);
while (true) {
c = read(reader, sourcePosition);
if (!Character.isJavaIdentifierPart(c) || c == '$') {
break;
}
append(target, targetPosition, (char) c);
}
append(target, targetPosition, ";");
return c;
}
/**
* Parses a dollar curly preceded identifier of the type
* '${bird}' in the following template example:
*
* <pre>
* Alice why is a ${bird} like a writing desk
* </pre>
*
* which would produce the following template data:
*
* <pre>
* out << "Alice why is a ";
* out << """${bird}""";
* out << " like a writing desk";
* </pre>
*
* This method is given the 'b' in 'bird' in argument c, checks if it is a valid
* java identifier start (we assume groovy did not mangle the java
* identifier rules). If so it proceeds to parse characters from the input
* until it encounters a non-java-identifier character. At that point
*
* @param reader The reader reading from the template source
* @param target The target groovy script source we write to
* @param sourcePosition The reader position in the source stream
* @param targetPosition The writer position in the target stream
* @throws IOException
* @throws FinishedReadingException
*/
private void parseDollarCurlyIdentifier(final Reader reader,
final StringBuilder target,
final Position sourcePosition,
final Position targetPosition) throws IOException, FinishedReadingException {
append(target, targetPosition, "out<<\"\"\"${");
while (true) {
int c = read(reader, sourcePosition);
append(target, targetPosition, (char) c);
if (c == '}') break;
}
append(target, targetPosition, "\"\"\";");
}
/**
* Parse a <% .... %> section if we are writing a GString close and
* append ';' then write the section as a statement
*/
private void parseSection(final int pendingC,
final Reader reader,
final StringBuilder target,
final Position sourcePosition,
final Position targetPosition) throws IOException, FinishedReadingException {
//the below is a quirk, we do this so that every non-string-section is prefixed by
//the same number of characters (the others have "out<<\"\"\"${"), this allows us to
//figure out the exception row and column later on
append(target, targetPosition, " ");
append(target, targetPosition, (char) pendingC);
while (true) {
int c = read(reader, sourcePosition);
if (c == '%') {
c = read(reader, sourcePosition);
if (c == '>') break;
append(target, targetPosition, '%');
}
append(target, targetPosition, (char) c);
}
append(target, targetPosition, ';');
}
/**
* Parse a <%= .... %> expression
*/
private void parseExpression(final Reader reader,
final StringBuilder target,
final Position sourcePosition,
final Position targetPosition) throws IOException, FinishedReadingException {
append(target, targetPosition, "out<<\"\"\"${");
while (true) {
int c = read(reader, sourcePosition);
if (c == '%') {
c = read(reader, sourcePosition);
if (c == '>') break;
append(target, targetPosition, '%');
}
append(target, targetPosition, (char) c);
}
append(target, targetPosition, "}\"\"\";");
}
@Override
public Writable make() {
return make(null);
}
@Override
public Writable make(final Map map) {
//we don't need a template.clone here as curry calls clone under the hood
final Closure template = this.template.curry(new Object[]{map});
return (Writable) template;
}
/*
* Create groovy assertion style error message for template error. Example:
*
* Error parsing expression on line 71 column 15, message: no such property jboss for for class DUMMY
* templatedata${jboss}templateddatatemplateddata
* ^------^
* |
* syntax error
*/
private RuntimeException mangleMultipleCompilationErrorsException(MultipleCompilationErrorsException e, List<StringSection> sections) {
RuntimeException result = e;
ErrorCollector collector = e.getErrorCollector();
@SuppressWarnings({"unchecked"})
List<Message> errors = (List<Message>) collector.getErrors();
if (errors.size() > 0) {
Message firstMessage = errors.get(0);
if (firstMessage instanceof SyntaxErrorMessage) {
@SuppressWarnings({"ThrowableResultOfMethodCallIgnored"})
SyntaxException syntaxException = ((SyntaxErrorMessage) firstMessage).getCause();
Position errorPosition = new Position(syntaxException.getLine(), syntaxException.getStartColumn());
//find the string section which precedes the row/col of the thrown exception
StringSection precedingSection = findPrecedingSection(errorPosition, sections);
//and now use the string section to mangle the line numbers so that they refer to the
//appropriate line in the source template data
if (precedingSection != null) {
//if the error was thrown on the same row as where the last string section
//ended, fix column value
offsetPositionFromSection(errorPosition, precedingSection);
//the below being true indicates that we had an unterminated ${ or <% sequence and
//the column is thus meaningless, we reset it to where the %{ or <% starts to at
//least give the user a sporting chance
if (sections.get(sections.size() - 1) == precedingSection) {
errorPosition.column = precedingSection.lastSourcePosition.column;
}
String message = mangleExceptionMessage(e.getMessage(), errorPosition);
result = new TemplateParseException(message, e, errorPosition.row, errorPosition.column);
}
}
}
return result;
}
private String mangleExceptionMessage(String original, Position p) {
String result = original;
int index = result.indexOf("@ line ");
if (index != -1) {
result = result.substring(0, index);
}
int count = 0;
index = 0;
for (char c : result.toCharArray()) {
if (c == ':') {
count++;
if (count == 3) {
result = result.substring(index + 2);
break;
}
}
index++;
}
String msg = "Template parse error '" + result + "' at line " + p.row + ", column " + p.column;
try {
msg += "\n" + getErrorContext(p.row);
} catch (IOException e) {
//we opt for not doing anthing here...we just do not get context if
//this happens
}
return msg;
}
private void offsetPositionFromSection(Position p, StringSection s) {
if (p.row == s.lastTargetPosition.row) {
//The number 8 below represents the number of characters in the header of a non-string-section such as
//<% ... %>. A section like this is represented in the target script as:
//out<<"""......."""
//12345678
p.column -= s.lastTargetPosition.column + 8;
p.column += s.lastSourcePosition.column;
}
p.row += s.lastSourcePosition.row - 1;
}
private StringSection findPrecedingSection(Position p, List<StringSection> sections) {
StringSection result = null;
for (StringSection s : sections) {
if (s.lastTargetPosition.row > p.row
|| (s.lastTargetPosition.row == p.row && s.lastTargetPosition.column > p.column)) {
break;
}
result = s;
}
return result;
}
private void append(final StringBuilder target, Position targetPosition, char c) {
if (c == '\n') {
targetPosition.row++;
targetPosition.column = 1;
} else {
targetPosition.column++;
}
target.append(c);
}
private void append(final StringBuilder target, Position targetPosition, String s) {
int len = s.length();
for (int i = 0; i < len; i++) {
append(target, targetPosition, s.charAt(i));
}
}
private int read(final Reader reader, Position position, StringBuilder lookAhead) throws IOException, FinishedReadingException {
int c = read(reader, position);
lookAhead.append((char) c);
return c;
}
// SEE BELOW
boolean useLastRead = false;
private int lastRead = -1;
/* All \r\n sequences are treated as a single \n. By doing this we
* produce the same output as the GStringTemplateEngine. Otherwise, some
* of our output is on a newline when it should not be.
*
* Instead of using a pushback reader, we just keep a private instance
* variable 'lastRead'.
*/
private int read(final Reader reader, Position position) throws IOException, FinishedReadingException {
int c;
if (useLastRead) {
// use last one if we stored a character
c = lastRead;
// reset last
useLastRead = false;
lastRead = -1;
} else {
c = read(reader);
if (c == '\r') {
// IF CRLF JUST KEEP LF
c = read(reader);
if (c != '\n') {
// ELSE keep original char
// and pushback the one we just read
lastRead = c;
useLastRead = true;
c = '\r';
}
}
}
if (c == -1) {
throw finishedReadingException;
}
if (c == '\n') {
position.row++;
position.column = 1;
} else {
position.column++;
}
return c;
}
private int read(final Reader reader) throws IOException {
int c = reader.read();
templateSource.append((char) c);
return c;
}
}
}
| |
/*
* Copyright 2005 Sascha Weinreuter
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.intellij.lang.xpath.xslt;
import com.intellij.openapi.fileTypes.StdFileTypes;
import com.intellij.openapi.util.IconLoader;
import com.intellij.openapi.util.Key;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiLanguageInjectionHost;
import com.intellij.psi.impl.PsiFileEx;
import com.intellij.psi.util.CachedValueProvider;
import com.intellij.psi.util.ParameterizedCachedValue;
import com.intellij.psi.util.ParameterizedCachedValueProvider;
import com.intellij.psi.util.CachedValuesManager;
import com.intellij.psi.xml.*;
import com.intellij.ui.LayeredIcon;
import com.intellij.util.SmartList;
import com.intellij.util.xml.NanoXmlUtil;
import gnu.trove.THashMap;
import gnu.trove.THashSet;
import org.intellij.lang.xpath.XPathFile;
import org.intellij.lang.xpath.xslt.impl.XsltChecker;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Set;
public class XsltSupport {
public static final String XALAN_EXTENSION_PREFIX = "http://xml.apache.org/xalan/";
public static final String XSLT_NS = "http://www.w3.org/1999/XSL/Transform";
public static final String PLUGIN_EXTENSIONS_NS = "urn:idea:xslt-plugin#extensions";
public static final Key<ParameterizedCachedValue<XsltChecker.SupportLevel, PsiFile>> FORCE_XSLT_KEY = Key.create("FORCE_XSLT");
private static final Icon XSLT_OVERLAY = IconLoader.getIcon("/icons/xslt-filetype-overlay.png");
private static final Map<String, String> XPATH_ATTR_MAP = new THashMap<String, String>(10);
private static final Map<String, Set<String>> XPATH_AVT_MAP = new THashMap<String, Set<String>>(10);
static {
XPATH_ATTR_MAP.put("select", "");
XPATH_ATTR_MAP.put("match", "");
XPATH_ATTR_MAP.put("test", "");
XPATH_ATTR_MAP.put("count", "number");
XPATH_ATTR_MAP.put("from", "number");
XPATH_ATTR_MAP.put("value", "number");
XPATH_ATTR_MAP.put("use", "key");
XPATH_AVT_MAP.put("element", new THashSet<String>(Arrays.asList("name", "namespace")));
XPATH_AVT_MAP.put("attribute", new THashSet<String>(Arrays.asList("name", "namespace")));
XPATH_AVT_MAP.put("processing-instruction", new THashSet<String>(Arrays.asList("name")));
XPATH_AVT_MAP.put("number", new THashSet<String>(Arrays.asList("format", "lang", "letter-value", "grouping-separator", "grouping-size")));
XPATH_AVT_MAP.put("sort", new THashSet<String>(Arrays.asList("lang", "data-type", "order", "case-order")));
}
@NotNull
public static PsiFile[] getFiles(XmlAttribute attribute) {
final XmlAttributeValue value = attribute.getValueElement();
if (value != null) {
final List<PsiFile> files = new SmartList<PsiFile>();
((PsiLanguageInjectionHost)value).processInjectedPsi(new PsiLanguageInjectionHost.InjectedPsiVisitor() {
public void visit(@NotNull PsiFile injectedPsi, @NotNull List<PsiLanguageInjectionHost.Shred> places) {
if (injectedPsi instanceof XPathFile) {
files.add(injectedPsi);
}
}
});
return files.isEmpty() ? PsiFile.EMPTY_ARRAY : files.toArray(new PsiFile[files.size()]);
}
return PsiFile.EMPTY_ARRAY;
}
public static boolean isXsltAttribute(@NotNull XmlAttribute attribute) {
return isXsltTag(attribute.getParent());
}
public static boolean isXsltTag(@NotNull XmlTag tag) {
final String s = tag.getNamespace();
return XSLT_NS.equals(s) || s.startsWith(XALAN_EXTENSION_PREFIX);
}
public static boolean isXPathAttribute(@NotNull XmlAttribute attribute) {
if (attribute.getValueElement() == null) return false;
final String name = attribute.getName();
if (isXsltAttribute(attribute)) {
final String tagName = attribute.getParent().getLocalName();
final String s = XPATH_ATTR_MAP.get(name);
//noinspection StringEquality
if (s != "" && !tagName.equals(s)) {
if (!isAttributeValueTemplate(attribute, true)) {
return false;
}
}
} else {
if (!isAttributeValueTemplate(attribute, false)) {
return false;
}
}
final PsiFile file = attribute.getContainingFile();
return file != null && getXsltSupportLevel(file) == XsltChecker.SupportLevel.FULL;
}
private static boolean isAttributeValueTemplate(@NotNull XmlAttribute attribute, boolean isXsltAttribute) {
return (!isXsltAttribute || mayBeAVT(attribute)) && getAVTOffset(attribute.getValue(), 0) != -1;
}
public static boolean isVariableOrParamName(@NotNull XmlAttribute attribute) {
return isXsltNameAttribute(attribute) && isVariableOrParam(attribute.getParent());
}
public static boolean isVariableOrParam(@NotNull XmlTag tag) {
final String localName = tag.getLocalName();
return ("variable".equals(localName) || "param".equals(localName)) && isXsltTag(tag);
}
public static boolean isVariable(@NotNull XmlAttribute attribute) {
return isXsltNameAttribute(attribute) && isVariable(attribute.getParent());
}
public static boolean isVariable(@NotNull XmlTag tag) {
final String localName = tag.getLocalName();
return "variable".equals(localName) && isXsltTag(tag);
}
public static boolean isParam(@NotNull XmlAttribute attribute) {
return isXsltNameAttribute(attribute) && isParam(attribute.getParent());
}
public static boolean isParam(@NotNull XmlTag tag) {
final String localName = tag.getLocalName();
return "param".equals(localName) && isXsltTag(tag);
}
public static boolean isPatternAttribute(@NotNull XmlAttribute attribute) {
if (!isXsltAttribute(attribute)) return false;
final String name = attribute.getName();
if ("match".equals(name)) {
return true;
} else if ("count".equals(name) || "from".equals(name)) {
return "number".equals(attribute.getParent().getLocalName());
}
return false;
}
public static boolean isTemplateCall(@NotNull XmlTag tag) {
return "call-template".equals(tag.getLocalName()) && hasNameAttribute(tag) && isXsltTag(tag);
}
public static boolean isApplyTemplates(@NotNull XmlTag tag) {
final String localName = tag.getLocalName();
return "apply-templates".equals(localName) && isXsltTag(tag);
}
private static boolean hasNameAttribute(@NotNull XmlTag tag) {
return tag.getAttribute("name", null) != null;
}
public static boolean isTemplateCallName(@NotNull XmlAttribute attribute) {
return isXsltNameAttribute(attribute) && isTemplateCall(attribute.getParent());
}
private static boolean isXsltNameAttribute(@NotNull XmlAttribute attribute) {
return "name".equals(attribute.getName()) && isXsltAttribute(attribute);
}
public static boolean isTemplateName(@NotNull XmlAttribute attribute) {
return isXsltNameAttribute(attribute) && isTemplate(attribute.getParent());
}
public static boolean isTemplate(@NotNull XmlTag element) {
return isTemplate(element, true);
}
public static boolean isTemplate(@NotNull XmlTag element, boolean requireName) {
return "template".equals(element.getLocalName()) && (!requireName || hasNameAttribute(element)) && isXsltTag(element);
}
public static boolean isXsltFile(@NotNull PsiFile psiFile) {
if (psiFile.getFileType() != StdFileTypes.XML) return false;
if (!(psiFile instanceof XmlFile)) return false;
final XsltChecker.SupportLevel level = getXsltSupportLevel(psiFile);
return level == XsltChecker.SupportLevel.FULL || level == XsltChecker.SupportLevel.PARTIAL;
}
public static XsltChecker.SupportLevel getXsltSupportLevel(PsiFile psiFile) {
final CachedValuesManager mgr = CachedValuesManager.getManager(psiFile.getProject());
return mgr.getParameterizedCachedValue(psiFile, FORCE_XSLT_KEY, XsltSupportProvider.INSTANCE, false, psiFile);
}
public static boolean isXsltRootTag(@NotNull XmlTag tag) {
final String localName = tag.getLocalName();
return ("stylesheet".equals(localName) || "transform".equals(localName)) && XSLT_NS.equals(tag.getNamespace());
}
public static boolean isTemplateCallParamName(@NotNull XmlAttribute attribute) {
return isXsltNameAttribute(attribute) && isTemplateCallParam(attribute.getParent());
}
private static boolean isTemplateCallParam(@NotNull XmlTag parent) {
return "with-param".equals(parent.getLocalName()) && hasNameAttribute(parent) && isXsltTag(parent);
}
public static boolean isTopLevelElement(XmlTag tag) {
XmlTag p = tag;
// not really necessary, XSLT doesn't allow literal result elements on top level anyway
while ((p = p.getParentTag()) != null) {
if (isXsltTag(p)) {
return isXsltRootTag(p);
}
}
return false;
}
public static boolean isIncludeOrImportHref(XmlAttribute xmlattribute) {
if (xmlattribute == null || !isXsltAttribute(xmlattribute)) return false;
final String localName = xmlattribute.getParent().getLocalName();
return isIncludeOrImport(localName) && "href".equals(xmlattribute.getName());
}
private static boolean isIncludeOrImport(String localName) {
// treat import and include the same. right now it doesn't seem necessary to distinguish them
return ("import".equals(localName) || "include".equals(localName));
}
public static boolean isIncludeOrImport(XmlTag tag) {
if (tag == null || !isXsltTag(tag)) return false;
final String localName = tag.getLocalName();
return isIncludeOrImport(localName) && tag.getAttribute("href", null) != null;
}
public static boolean isImport(XmlTag tag) {
if (tag == null || !isXsltTag(tag)) return false;
final String localName = tag.getLocalName();
return "import".equals(localName) && tag.getAttribute("href", null) != null;
}
@Nullable
public static PsiElement getAttValueToken(@NotNull XmlAttribute attribute) {
final XmlAttributeValue valueElement = attribute.getValueElement();
if (valueElement != null) {
final PsiElement firstChild = valueElement.getFirstChild();
if (firstChild != null) {
final PsiElement nextSibling = firstChild.getNextSibling();
return nextSibling instanceof XmlToken && ((XmlToken)nextSibling).getTokenType() == XmlTokenType.XML_ATTRIBUTE_VALUE_TOKEN ? nextSibling : null;
}
}
return null;
}
public static boolean isMode(XmlAttribute xmlattribute) {
if ("mode".equals(xmlattribute.getName())) {
final XmlTag parent = xmlattribute.getParent();
return isApplyTemplates(parent) || isTemplate(parent, false);
}
return false;
}
public static int getAVTOffset(String value, int i) {
do {
i = value.indexOf('{', i);
if (i != -1 && i == value.indexOf("{{", i)) {
i += 2;
} else {
break;
}
} while (i != -1);
return i;
}
public static boolean mayBeAVT(@NotNull XmlAttribute attribute) {
if (XsltSupport.isXsltAttribute(attribute)) {
final String tagName = attribute.getParent().getLocalName();
final Set<String> allowedAttrs = XPATH_AVT_MAP.get(tagName);
if (allowedAttrs == null) return isExtensionAvtAttribute(attribute);
return allowedAttrs.contains(attribute.getName());
} else {
return true;
}
}
private static boolean isExtensionAvtAttribute(XmlAttribute attribute) {
final String namespace = attribute.getParent().getNamespace();
return namespace.startsWith(XALAN_EXTENSION_PREFIX) && "file".equals(attribute.getName());
}
public static Icon createXsltIcon(Icon icon) {
return LayeredIcon.create(icon, XSLT_OVERLAY);
}
private static class XsltSupportProvider implements ParameterizedCachedValueProvider<XsltChecker.SupportLevel, PsiFile> {
public static final ParameterizedCachedValueProvider<XsltChecker.SupportLevel, PsiFile> INSTANCE = new XsltSupportProvider();
public CachedValueProvider.Result<XsltChecker.SupportLevel> compute(PsiFile psiFile) {
if (psiFile instanceof PsiFileEx) {
if (((PsiFileEx)psiFile).isContentsLoaded()) {
final XmlDocument doc = ((XmlFile)psiFile).getDocument();
if (doc != null) {
final XmlTag rootTag = doc.getRootTag();
if (rootTag != null) {
XmlAttribute v;
XsltChecker.SupportLevel level;
if (isXsltRootTag(rootTag)) {
v = rootTag.getAttribute("version");
level = v != null ? XsltChecker.getSupportLevel(v.getValue()) : XsltChecker.SupportLevel.NONE;
} else {
v = rootTag.getAttribute("version", XSLT_NS);
level = v != null ? XsltChecker.getSupportLevel(v.getValue()) : XsltChecker.SupportLevel.NONE;
}
return CachedValueProvider.Result.create(level, rootTag);
}
}
}
}
final XsltChecker xsltChecker = new XsltChecker();
NanoXmlUtil.parseFile(psiFile, xsltChecker);
return CachedValueProvider.Result.create(xsltChecker.getSupportLevel(), psiFile);
}
}
}
| |
/*
* Copyright (C) 2007 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.collect;
import static com.google.common.collect.Lists.newArrayList;
import static com.google.common.collect.Sets.newHashSet;
import static com.google.common.collect.Sets.newLinkedHashSet;
import static com.google.common.collect.testing.IteratorFeature.MODIFIABLE;
import static com.google.common.collect.testing.IteratorFeature.SUPPORTS_REMOVE;
import static com.google.common.collect.testing.IteratorFeature.SUPPORTS_SET;
import static com.google.common.truth.Truth.assertThat;
import static java.util.Arrays.asList;
import com.google.common.annotations.GwtCompatible;
import com.google.common.annotations.GwtIncompatible;
import com.google.common.collect.testing.IteratorTester;
import com.google.common.collect.testing.ListIteratorTester;
import com.google.common.collect.testing.features.CollectionFeature;
import com.google.common.collect.testing.features.CollectionSize;
import com.google.common.collect.testing.features.MapFeature;
import com.google.common.collect.testing.google.ListMultimapTestSuiteBuilder;
import com.google.common.collect.testing.google.TestStringListMultimapGenerator;
import com.google.common.testing.EqualsTester;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.ListIterator;
import java.util.Map;
import java.util.Map.Entry;
import java.util.RandomAccess;
import java.util.Set;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
/**
* Tests for {@code LinkedListMultimap}.
*
* @author Mike Bostock
*/
@GwtCompatible(emulated = true)
public class LinkedListMultimapTest extends TestCase {
@GwtIncompatible // suite
public static Test suite() {
TestSuite suite = new TestSuite();
suite.addTest(ListMultimapTestSuiteBuilder.using(new TestStringListMultimapGenerator() {
@Override
protected ListMultimap<String, String> create(Entry<String, String>[] entries) {
ListMultimap<String, String> multimap = LinkedListMultimap.create();
for (Entry<String, String> entry : entries) {
multimap.put(entry.getKey(), entry.getValue());
}
return multimap;
}
})
.named("LinkedListMultimap")
.withFeatures(
MapFeature.ALLOWS_NULL_KEYS,
MapFeature.ALLOWS_NULL_VALUES,
MapFeature.ALLOWS_ANY_NULL_QUERIES,
MapFeature.GENERAL_PURPOSE,
CollectionFeature.SUPPORTS_ITERATOR_REMOVE,
CollectionFeature.SERIALIZABLE,
CollectionFeature.KNOWN_ORDER,
CollectionSize.ANY)
.createTestSuite());
suite.addTestSuite(LinkedListMultimapTest.class);
return suite;
}
protected LinkedListMultimap<String, Integer> create() {
return LinkedListMultimap.create();
}
/**
* Confirm that get() returns a List that doesn't implement RandomAccess.
*/
public void testGetRandomAccess() {
Multimap<String, Integer> multimap = create();
multimap.put("foo", 1);
multimap.put("foo", 3);
assertFalse(multimap.get("foo") instanceof RandomAccess);
assertFalse(multimap.get("bar") instanceof RandomAccess);
}
/**
* Confirm that removeAll() returns a List that implements RandomAccess, even
* though get() doesn't.
*/
public void testRemoveAllRandomAccess() {
Multimap<String, Integer> multimap = create();
multimap.put("foo", 1);
multimap.put("foo", 3);
assertTrue(multimap.removeAll("foo") instanceof RandomAccess);
assertTrue(multimap.removeAll("bar") instanceof RandomAccess);
}
/**
* Confirm that replaceValues() returns a List that implements RandomAccess,
* even though get() doesn't.
*/
public void testReplaceValuesRandomAccess() {
Multimap<String, Integer> multimap = create();
multimap.put("foo", 1);
multimap.put("foo", 3);
assertTrue(multimap.replaceValues("foo", Arrays.asList(2, 4))
instanceof RandomAccess);
assertTrue(multimap.replaceValues("bar", Arrays.asList(2, 4))
instanceof RandomAccess);
}
public void testCreateFromMultimap() {
Multimap<String, Integer> multimap = LinkedListMultimap.create();
multimap.put("foo", 1);
multimap.put("bar", 3);
multimap.put("foo", 2);
LinkedListMultimap<String, Integer> copy =
LinkedListMultimap.create(multimap);
assertEquals(multimap, copy);
assertThat(copy.entries()).containsExactlyElementsIn(multimap.entries()).inOrder();
}
public void testCreateFromSize() {
LinkedListMultimap<String, Integer> multimap
= LinkedListMultimap.create(20);
multimap.put("foo", 1);
multimap.put("bar", 2);
multimap.put("foo", 3);
assertEquals(ImmutableList.of(1, 3), multimap.get("foo"));
}
public void testCreateFromIllegalSize() {
try {
LinkedListMultimap.create(-20);
fail();
} catch (IllegalArgumentException expected) {}
}
public void testLinkedGetAdd() {
LinkedListMultimap<String, Integer> map = create();
map.put("bar", 1);
Collection<Integer> foos = map.get("foo");
foos.add(2);
foos.add(3);
map.put("bar", 4);
map.put("foo", 5);
assertEquals("{bar=[1, 4], foo=[2, 3, 5]}", map.toString());
assertEquals("[bar=1, foo=2, foo=3, bar=4, foo=5]",
map.entries().toString());
}
public void testLinkedGetInsert() {
ListMultimap<String, Integer> map = create();
map.put("bar", 1);
List<Integer> foos = map.get("foo");
foos.add(2);
foos.add(0, 3);
map.put("bar", 4);
map.put("foo", 5);
assertEquals("{bar=[1, 4], foo=[3, 2, 5]}", map.toString());
assertEquals("[bar=1, foo=3, foo=2, bar=4, foo=5]",
map.entries().toString());
}
public void testLinkedPutInOrder() {
Multimap<String, Integer> map = create();
map.put("foo", 1);
map.put("bar", 2);
map.put("bar", 3);
assertEquals("{foo=[1], bar=[2, 3]}", map.toString());
assertEquals("[foo=1, bar=2, bar=3]", map.entries().toString());
}
public void testLinkedPutOutOfOrder() {
Multimap<String, Integer> map = create();
map.put("bar", 1);
map.put("foo", 2);
map.put("bar", 3);
assertEquals("{bar=[1, 3], foo=[2]}", map.toString());
assertEquals("[bar=1, foo=2, bar=3]", map.entries().toString());
}
public void testLinkedPutAllMultimap() {
Multimap<String, Integer> src = create();
src.put("bar", 1);
src.put("foo", 2);
src.put("bar", 3);
Multimap<String, Integer> dst = create();
dst.putAll(src);
assertEquals("{bar=[1, 3], foo=[2]}", dst.toString());
assertEquals("[bar=1, foo=2, bar=3]", src.entries().toString());
}
public void testLinkedReplaceValues() {
Multimap<String, Integer> map = create();
map.put("bar", 1);
map.put("foo", 2);
map.put("bar", 3);
map.put("bar", 4);
assertEquals("{bar=[1, 3, 4], foo=[2]}", map.toString());
map.replaceValues("bar", asList(1, 2));
assertEquals("[bar=1, foo=2, bar=2]", map.entries().toString());
assertEquals("{bar=[1, 2], foo=[2]}", map.toString());
}
public void testLinkedClear() {
ListMultimap<String, Integer> map = create();
map.put("foo", 1);
map.put("foo", 2);
map.put("bar", 3);
List<Integer> foos = map.get("foo");
Collection<Integer> values = map.values();
assertEquals(asList(1, 2), foos);
assertThat(values).containsExactly(1, 2, 3).inOrder();
map.clear();
assertEquals(Collections.emptyList(), foos);
assertThat(values).isEmpty();
assertEquals("[]", map.entries().toString());
assertEquals("{}", map.toString());
}
public void testLinkedKeySet() {
Multimap<String, Integer> map = create();
map.put("bar", 1);
map.put("foo", 2);
map.put("bar", 3);
map.put("bar", 4);
assertEquals("[bar, foo]", map.keySet().toString());
map.keySet().remove("bar");
assertEquals("{foo=[2]}", map.toString());
}
public void testLinkedKeys() {
Multimap<String, Integer> map = create();
map.put("bar", 1);
map.put("foo", 2);
map.put("bar", 3);
map.put("bar", 4);
assertEquals("[bar=1, foo=2, bar=3, bar=4]",
map.entries().toString());
assertThat(map.keys()).containsExactly("bar", "foo", "bar", "bar").inOrder();
map.keys().remove("bar"); // bar is no longer the first key!
assertEquals("{foo=[2], bar=[3, 4]}", map.toString());
}
public void testLinkedValues() {
Multimap<String, Integer> map = create();
map.put("bar", 1);
map.put("foo", 2);
map.put("bar", 3);
map.put("bar", 4);
assertEquals("[1, 2, 3, 4]", map.values().toString());
map.values().remove(2);
assertEquals("{bar=[1, 3, 4]}", map.toString());
}
public void testLinkedEntries() {
Multimap<String, Integer> map = create();
map.put("bar", 1);
map.put("foo", 2);
map.put("bar", 3);
Iterator<Map.Entry<String, Integer>> entries = map.entries().iterator();
Map.Entry<String, Integer> entry = entries.next();
assertEquals("bar", entry.getKey());
assertEquals(1, (int) entry.getValue());
entry = entries.next();
assertEquals("foo", entry.getKey());
assertEquals(2, (int) entry.getValue());
entry.setValue(4);
entry = entries.next();
assertEquals("bar", entry.getKey());
assertEquals(3, (int) entry.getValue());
assertFalse(entries.hasNext());
entries.remove();
assertEquals("{bar=[1], foo=[4]}", map.toString());
}
public void testLinkedAsMapEntries() {
Multimap<String, Integer> map = create();
map.put("bar", 1);
map.put("foo", 2);
map.put("bar", 3);
Iterator<Map.Entry<String, Collection<Integer>>> entries
= map.asMap().entrySet().iterator();
Map.Entry<String, Collection<Integer>> entry = entries.next();
assertEquals("bar", entry.getKey());
assertThat(entry.getValue()).containsExactly(1, 3).inOrder();
try {
entry.setValue(Arrays.<Integer>asList());
fail("UnsupportedOperationException expected");
} catch (UnsupportedOperationException expected) {}
entries.remove(); // clear
entry = entries.next();
assertEquals("foo", entry.getKey());
assertThat(entry.getValue()).contains(2);
assertFalse(entries.hasNext());
assertEquals("{foo=[2]}", map.toString());
}
public void testEntriesAfterMultimapUpdate() {
ListMultimap<String, Integer> multimap = create();
multimap.put("foo", 2);
multimap.put("bar", 3);
Collection<Map.Entry<String, Integer>> entries = multimap.entries();
Iterator<Map.Entry<String, Integer>> iterator = entries.iterator();
Map.Entry<String, Integer> entrya = iterator.next();
Map.Entry<String, Integer> entryb = iterator.next();
assertEquals(2, (int) multimap.get("foo").set(0, 4));
assertFalse(multimap.containsEntry("foo", 2));
assertTrue(multimap.containsEntry("foo", 4));
assertTrue(multimap.containsEntry("bar", 3));
assertEquals(4, (int) entrya.getValue());
assertEquals(3, (int) entryb.getValue());
assertTrue(multimap.put("foo", 5));
assertTrue(multimap.containsEntry("foo", 5));
assertTrue(multimap.containsEntry("foo", 4));
assertTrue(multimap.containsEntry("bar", 3));
assertEquals(4, (int) entrya.getValue());
assertEquals(3, (int) entryb.getValue());
}
@SuppressWarnings("unchecked")
@GwtIncompatible // unreasonably slow
public void testEntriesIteration() {
List<Entry<String, Integer>> addItems = ImmutableList.of(
Maps.immutableEntry("foo", 99),
Maps.immutableEntry("foo", 88),
Maps.immutableEntry("bar", 77));
for (final int startIndex : new int[] {0, 3, 5}) {
List<Entry<String, Integer>> list = Lists.newArrayList(
Maps.immutableEntry("foo", 2),
Maps.immutableEntry("foo", 3),
Maps.immutableEntry("bar", 4),
Maps.immutableEntry("bar", 5),
Maps.immutableEntry("foo", 6));
new ListIteratorTester<Entry<String, Integer>>(3, addItems,
ImmutableList.of(SUPPORTS_REMOVE), list, startIndex) {
private LinkedListMultimap<String, Integer> multimap;
@Override protected ListIterator<Entry<String, Integer>> newTargetIterator() {
multimap = create();
multimap.putAll("foo", asList(2, 3));
multimap.putAll("bar", asList(4, 5));
multimap.put("foo", 6);
return multimap.entries().listIterator(startIndex);
}
@Override protected void verify(List<Entry<String, Integer>> elements) {
assertEquals(elements, multimap.entries());
}
}.test();
}
}
@GwtIncompatible // unreasonably slow
public void testKeysIteration() {
new IteratorTester<String>(6, MODIFIABLE, newArrayList("foo", "foo", "bar",
"bar", "foo"), IteratorTester.KnownOrder.KNOWN_ORDER) {
private Multimap<String, Integer> multimap;
@Override protected Iterator<String> newTargetIterator() {
multimap = create();
multimap.putAll("foo", asList(2, 3));
multimap.putAll("bar", asList(4, 5));
multimap.putAll("foo", asList(6));
return multimap.keys().iterator();
}
@Override protected void verify(List<String> elements) {
assertEquals(elements, Lists.newArrayList(multimap.keys()));
}
}.test();
}
@GwtIncompatible // unreasonably slow
public void testValuesIteration() {
List<Integer> addItems = ImmutableList.of(99, 88, 77);
for (final int startIndex : new int[] {0, 3, 5}) {
new ListIteratorTester<Integer>(3, addItems,
ImmutableList.of(SUPPORTS_REMOVE, SUPPORTS_SET),
Lists.newArrayList(2, 3, 4, 5, 6), startIndex) {
private LinkedListMultimap<String, Integer> multimap;
@Override protected ListIterator<Integer> newTargetIterator() {
multimap = create();
multimap.put("bar", 2);
multimap.putAll("foo", Arrays.asList(3, 4));
multimap.put("bar", 5);
multimap.put("foo", 6);
return multimap.values().listIterator(startIndex);
}
@Override protected void verify(List<Integer> elements) {
assertEquals(elements, multimap.values());
}
}.test();
}
}
@GwtIncompatible // unreasonably slow
public void testKeySetIteration() {
new IteratorTester<String>(6, MODIFIABLE, newLinkedHashSet(asList(
"foo", "bar", "baz", "dog", "cat")),
IteratorTester.KnownOrder.KNOWN_ORDER) {
private Multimap<String, Integer> multimap;
@Override protected Iterator<String> newTargetIterator() {
multimap = create();
multimap.putAll("foo", asList(2, 3));
multimap.putAll("bar", asList(4, 5));
multimap.putAll("foo", asList(6));
multimap.putAll("baz", asList(7, 8));
multimap.putAll("dog", asList(9));
multimap.putAll("bar", asList(10, 11));
multimap.putAll("cat", asList(12, 13, 14));
return multimap.keySet().iterator();
}
@Override protected void verify(List<String> elements) {
assertEquals(newHashSet(elements), multimap.keySet());
}
}.test();
}
@SuppressWarnings("unchecked")
@GwtIncompatible // unreasonably slow
public void testAsSetIteration() {
Set<Entry<String, Collection<Integer>>> set = Sets.newLinkedHashSet(asList(
Maps.immutableEntry("foo",
(Collection<Integer>) asList(2, 3, 6)),
Maps.immutableEntry("bar",
(Collection<Integer>) asList(4, 5, 10, 11)),
Maps.immutableEntry("baz",
(Collection<Integer>) asList(7, 8)),
Maps.immutableEntry("dog",
(Collection<Integer>) asList(9)),
Maps.immutableEntry("cat",
(Collection<Integer>) asList(12, 13, 14))
));
new IteratorTester<Entry<String, Collection<Integer>>>(6, MODIFIABLE, set,
IteratorTester.KnownOrder.KNOWN_ORDER) {
private Multimap<String, Integer> multimap;
@Override protected Iterator<Entry<String, Collection<Integer>>>
newTargetIterator() {
multimap = create();
multimap.putAll("foo", asList(2, 3));
multimap.putAll("bar", asList(4, 5));
multimap.putAll("foo", asList(6));
multimap.putAll("baz", asList(7, 8));
multimap.putAll("dog", asList(9));
multimap.putAll("bar", asList(10, 11));
multimap.putAll("cat", asList(12, 13, 14));
return multimap.asMap().entrySet().iterator();
}
@Override protected void verify(
List<Entry<String, Collection<Integer>>> elements) {
assertEquals(newHashSet(elements), multimap.asMap().entrySet());
}
}.test();
}
public void testEquals() {
new EqualsTester()
.addEqualityGroup(
LinkedListMultimap.create(),
LinkedListMultimap.create(),
LinkedListMultimap.create(1))
.testEquals();
}
}
| |
package uk.co.jemos.podam.typeManufacturers;
import org.apache.commons.lang3.ArrayUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import uk.co.jemos.podam.api.DataProviderStrategy;
import uk.co.jemos.podam.api.ObjectStrategy;
import uk.co.jemos.podam.api.PodamUtils;
import uk.co.jemos.podam.common.*;
import javax.validation.Constraint;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size;
import java.lang.annotation.Annotation;
import java.lang.reflect.*;
import java.util.*;
import java.util.concurrent.atomic.AtomicReference;
/**
* Type Manufacturer utility class.
*
* Created by tedonema on 01/07/2015.
*
* @since 6.0.0.RELEASE
*/
public abstract class TypeManufacturerUtil {
/** The application logger */
private static final Logger LOG = LoggerFactory.getLogger(TypeManufacturerUtil.class);
/**
* It returns a {@link AttributeStrategy} if one was specified in
* annotations, or {@code null} otherwise.
*
* @param strategy
* The data provider strategy
* @param annotations
* The list of annotations, irrelevant annotations will be removed
* @param attributeType
* Type of attribute expected to be returned
* @return {@link AttributeStrategy}, if {@link PodamStrategyValue} or bean
* validation constraint annotation was found among annotations
* @throws IllegalAccessException
* if attribute strategy cannot be instantiated
* @throws InstantiationException
* if attribute strategy cannot be instantiated
* @throws SecurityException
* if access security is violated
* @throws InvocationTargetException
* if invocation failed
* @throws IllegalArgumentException
* if illegal argument provided to a constructor
*/
public static AttributeStrategy<?> findAttributeStrategy(DataProviderStrategy strategy,
List<Annotation> annotations, Class<?> attributeType)
throws InstantiationException, IllegalAccessException, SecurityException, IllegalArgumentException, InvocationTargetException {
List<Annotation> localAnnotations = new ArrayList<Annotation>(annotations);
Iterator<Annotation> iter = localAnnotations.iterator();
while (iter.hasNext()) {
Annotation annotation = iter.next();
if (annotation instanceof PodamStrategyValue) {
PodamStrategyValue strategyAnnotation = (PodamStrategyValue) annotation;
return strategyAnnotation.value().newInstance();
}
/* Podam annotation is present, this will be handled later by type manufacturers */
if (annotation.annotationType().getAnnotation(PodamAnnotation.class) != null) {
return null;
}
/* Find real class out of proxy */
Class<? extends Annotation> annotationClass = annotation.getClass();
if (Proxy.isProxyClass(annotationClass)) {
Class<?>[] interfaces = annotationClass.getInterfaces();
if (interfaces.length == 1) {
@SuppressWarnings("unchecked")
Class<? extends Annotation> tmp = (Class<? extends Annotation>) interfaces[0];
annotationClass = tmp;
}
}
AttributeStrategy<?> attrStrategy = strategy.getStrategyForAnnotation(annotationClass);
if (null != attrStrategy) {
return attrStrategy;
}
if (annotation.annotationType().getAnnotation(Constraint.class) != null) {
if (annotation instanceof NotNull ||
annotation.annotationType().getName().equals("org.hibernate.validator.constraints.NotEmpty") ||
annotation.annotationType().getName().equals("org.hibernate.validator.constraints.NotBlank")) {
/* We don't need to do anything for NotNull constraint */
iter.remove();
} else if (!NotNull.class.getPackage().equals(annotationClass.getPackage())) {
LOG.warn("Please, register AttributeStratergy for custom "
+ "constraint {}, in DataProviderStrategy! Value "
+ "will be left to null", annotation);
}
} else {
iter.remove();
}
}
AttributeStrategy<?> retValue = null;
if (!localAnnotations.isEmpty()
&& !Collection.class.isAssignableFrom(attributeType)
&& !Map.class.isAssignableFrom(attributeType)
&& !attributeType.isArray()) {
retValue = new BeanValidationStrategy(attributeType);
}
return retValue;
}
/**
* Finds suitable static constructors for POJO instantiation
* <p>
* This method places required and provided types for object creation into a
* map, which will be used for type mapping.
* </p>
*
* @param factoryClass
* Factory class to produce the POJO
* @param pojoClass
* Typed class
* @return an array of suitable static constructors found
*/
public static Method[] findSuitableConstructors(final Class<?> factoryClass,
final Class<?> pojoClass) {
// If no publicly accessible constructors are available,
// the best we can do is to find a constructor (e.g.
// getInstance())
Method[] declaredMethods = factoryClass.getDeclaredMethods();
List<Method> constructors = new ArrayList<Method>();
// A candidate factory method is a method which returns the
// Class type
for (Method candidateConstructor : declaredMethods) {
if (candidateConstructor.getReturnType().equals(pojoClass)) {
if (Modifier.isStatic(candidateConstructor.getModifiers())
|| !factoryClass.equals(pojoClass)) {
constructors.add(candidateConstructor);
}
}
}
return constructors.toArray(new Method[constructors.size()]);
}
/**
* Fills type agruments map
* <p>
* This method places required and provided types for object creation into a
* map, which will be used for type mapping.
* </p>
*
* @param typeArgsMap
* a map to fill
* @param pojoClass
* Typed class
* @param genericTypeArgs
* Type arguments provided for a generics object by caller
* @return Array of unused provided generic type arguments
* @throws IllegalStateException
* If number of typed parameters doesn't match number of
* provided generic types
*/
public static Type[] fillTypeArgMap(final Map<String, Type> typeArgsMap,
final Class<?> pojoClass, final Type[] genericTypeArgs) {
TypeVariable<?>[] typeArray = pojoClass.getTypeParameters();
List<TypeVariable<?>> typeParameters = new ArrayList<TypeVariable<?>>(Arrays.asList(typeArray));
List<Type> genericTypes = new ArrayList<Type>(Arrays.asList(genericTypeArgs));
Iterator<TypeVariable<?>> iterator = typeParameters.iterator();
Iterator<Type> iterator2 = genericTypes.iterator();
while (iterator.hasNext()) {
Type genericType = (iterator2.hasNext() ? iterator2.next() : null);
/* Removing types, which are already in typeArgsMap */
if (typeArgsMap.containsKey(iterator.next().getName())) {
iterator.remove();
/* Removing types, which are type variables */
if (genericType instanceof TypeVariable) {
iterator2.remove();
}
}
}
if (typeParameters.size() > genericTypes.size()) {
String msg = pojoClass.getCanonicalName()
+ " is missing generic type arguments, expected "
+ Arrays.toString(typeArray) + ", provided "
+ Arrays.toString(genericTypeArgs);
throw new IllegalArgumentException(msg);
}
final Method[] suitableConstructors
= TypeManufacturerUtil.findSuitableConstructors(pojoClass, pojoClass);
for (Method constructor : suitableConstructors) {
TypeVariable<Method>[] ctorTypeParams = constructor.getTypeParameters();
if (ctorTypeParams.length == genericTypes.size()) {
for (int i = 0; i < ctorTypeParams.length; i++) {
Type foundType = genericTypes.get(i);
typeArgsMap.put(ctorTypeParams[i].getName(), foundType);
}
}
}
for (int i = 0; i < typeParameters.size(); i++) {
Type foundType = genericTypes.remove(0);
typeArgsMap.put(typeParameters.get(i).getName(), foundType);
}
Type[] genericTypeArgsExtra;
if (genericTypes.size() > 0) {
genericTypeArgsExtra = genericTypes.toArray(new Type[genericTypes.size()]);
} else {
genericTypeArgsExtra = PodamConstants.NO_TYPES;
}
/* Adding types, which were specified during inheritance */
Class<?> clazz = pojoClass;
while (clazz != null) {
Type superType = clazz.getGenericSuperclass();
clazz = clazz.getSuperclass();
if (superType instanceof ParameterizedType) {
ParameterizedType paramType = (ParameterizedType) superType;
Type[] actualParamTypes = paramType.getActualTypeArguments();
TypeVariable<?>[] paramTypes = clazz.getTypeParameters();
for (int i = 0; i < actualParamTypes.length
&& i < paramTypes.length; i++) {
if (actualParamTypes[i] instanceof Class) {
typeArgsMap.put(paramTypes[i].getName(),
actualParamTypes[i]);
}
}
}
}
return genericTypeArgsExtra;
}
/**
* Searches for annotation with information about collection/map size
* and filling strategies
*
* @param strategy
* a data provider strategy
* @param annotations
* a list of annotations to inspect
* @param collectionElementType
* a collection element type
* @param elementStrategyHolder
* a holder to pass found element strategy back to the caller,
* can be null
* @param keyStrategyHolder
* a holder to pass found key strategy back to the caller,
* can be null
* @return
* A number of element in collection or null, if no annotation was
* found
* @throws InstantiationException
* A strategy cannot be instantiated
* @throws IllegalAccessException
* A strategy cannot be instantiated
*/
public static Integer findCollectionSize( DataProviderStrategy strategy,
List<Annotation> annotations,
Class<?> collectionElementType,
Holder<AttributeStrategy<?>> elementStrategyHolder,
Holder<AttributeStrategy<?>> keyStrategyHolder)
throws InstantiationException, IllegalAccessException {
// If the user defined a strategy to fill the collection elements,
// we use it
Size size = null;
for (Annotation annotation : annotations) {
if (annotation instanceof PodamCollection) {
PodamCollection collectionAnnotation = (PodamCollection) annotation;
if (null != elementStrategyHolder) {
Class<? extends AttributeStrategy<?>> attributeStrategy
= collectionAnnotation.collectionElementStrategy();
if (null == attributeStrategy || ObjectStrategy.class.isAssignableFrom(attributeStrategy)) {
attributeStrategy = collectionAnnotation.mapElementStrategy();
}
if (null != attributeStrategy) {
elementStrategyHolder.setValue(attributeStrategy.newInstance());
}
}
if (null != keyStrategyHolder) {
Class<? extends AttributeStrategy<?>> attributeStrategy
= collectionAnnotation.mapKeyStrategy();
if (null != attributeStrategy) {
keyStrategyHolder.setValue(attributeStrategy.newInstance());
}
}
return collectionAnnotation.nbrElements();
} else if (annotation instanceof Size) {
size = (Size) annotation;
}
}
Integer nbrElements = strategy
.getNumberOfCollectionElements(collectionElementType);
if (null != size) {
if (nbrElements > size.max()) {
nbrElements = size.max();
}
if (nbrElements < size.min()) {
nbrElements = size.min();
}
}
return nbrElements;
}
/**
* Utility to merge actual types with supplied array of generic type
* substitutions
*
* @param attributeType
* actual type of object
* @param genericAttributeType
* generic type of object
* @param suppliedTypes
* an array of supplied types for generic type substitution
* @param typeArgsMap
* a map relating the generic class arguments ("<T, V>" for
* example) with their actual types
* @return An array of merged actual and supplied types with generic types
* resolved
*/
public static Type[] mergeActualAndSuppliedGenericTypes(
Class<?> attributeType, Type genericAttributeType, Type[] suppliedTypes,
Map<String, Type> typeArgsMap) {
TypeVariable<?>[] actualTypes = attributeType.getTypeParameters();
if (actualTypes.length <= suppliedTypes.length) {
return suppliedTypes;
}
Type[] genericTypes = null;
if (genericAttributeType instanceof ParameterizedType) {
ParameterizedType paramType = (ParameterizedType) genericAttributeType;
genericTypes = paramType.getActualTypeArguments();
} else if (genericAttributeType instanceof WildcardType) {
WildcardType wildcardType = (WildcardType) genericAttributeType;
genericTypes = wildcardType.getLowerBounds();
if (ArrayUtils.isEmpty(genericTypes)) {
genericTypes = wildcardType.getUpperBounds();
}
}
List<Type> resolvedTypes = new ArrayList<Type>();
List<Type> substitutionTypes = new ArrayList<Type>(Arrays.asList(suppliedTypes));
for (int i = 0; i < actualTypes.length; i++) {
Type type = null;
if (actualTypes[i] instanceof TypeVariable) {
type = typeArgsMap.get(((TypeVariable<?>)actualTypes[i]).getName());
} else if (actualTypes[i] instanceof WildcardType) {
AtomicReference<Type[]> methodGenericTypeArgs
= new AtomicReference<Type[]>(PodamConstants.NO_TYPES);
type = TypeManufacturerUtil.resolveGenericParameter(actualTypes[i], typeArgsMap,
methodGenericTypeArgs);
}
if ((type == null) && (genericTypes != null)) {
if (genericTypes[i] instanceof Class) {
type = genericTypes[i];
} else if (genericTypes[i] instanceof WildcardType) {
AtomicReference<Type[]> methodGenericTypeArgs
= new AtomicReference<Type[]>(PodamConstants.NO_TYPES);
type = resolveGenericParameter(genericTypes[i], typeArgsMap,
methodGenericTypeArgs);
} else if (genericTypes[i] instanceof ParameterizedType) {
type = genericTypes[i];
} else {
LOG.debug("Skipping type {} {}", actualTypes[i], genericTypes[i]);
}
}
if (type != null) {
resolvedTypes.add(type);
if (!substitutionTypes.isEmpty() && substitutionTypes.get(0).equals(type)) {
substitutionTypes.remove(0);
}
}
}
Type[] resolved = resolvedTypes.toArray(new Type[resolvedTypes.size()]);
Type[] supplied = substitutionTypes.toArray(new Type[substitutionTypes.size()]);
return ArrayUtils.addAll(resolved, supplied);
}
/**
* It resolves generic parameter type
*
*
* @param paramType
* The generic parameter type
* @param typeArgsMap
* A map of resolved types
* @param methodGenericTypeArgs
* Return value posible generic types of the generic parameter
* type
* @return value for class representing the generic parameter type
*/
public static Class<?> resolveGenericParameter(Type paramType,
Map<String, Type> typeArgsMap,
AtomicReference<Type[]> methodGenericTypeArgs) {
Class<?> parameterType = null;
//Safe copy
Map<String, Type> localMap = new HashMap<String, Type>(typeArgsMap);
methodGenericTypeArgs.set(PodamConstants.NO_TYPES);
if (paramType instanceof Class) {
parameterType = (Class<?>) paramType;
} else if (paramType instanceof TypeVariable<?>) {
final TypeVariable<?> typeVariable = (TypeVariable<?>) paramType;
final Type type = localMap.get(typeVariable.getName());
if (type != null) {
parameterType = resolveGenericParameter(type, localMap,
methodGenericTypeArgs);
}
} else if (paramType instanceof ParameterizedType) {
ParameterizedType pType = (ParameterizedType) paramType;
parameterType = (Class<?>) pType.getRawType();
Type[] actualTypeArgs = pType.getActualTypeArguments();
if (!typeArgsMap.isEmpty()) {
for (int i = 0; i < actualTypeArgs.length; i++) {
Class<?> tmp = resolveGenericParameter(actualTypeArgs[i],
localMap, methodGenericTypeArgs);
if (tmp != actualTypeArgs[i]) {
/* If actual type argument has its own arguments,
* we will loose them now, so we will leave type unresolved
* until lower levels of type resolution */
if (ArrayUtils.isEmpty(methodGenericTypeArgs.get())) {
actualTypeArgs[i] = tmp;
}
}
}
}
methodGenericTypeArgs.set(actualTypeArgs);
} else if (paramType instanceof WildcardType) {
WildcardType wType = (WildcardType) paramType;
Type[] bounds = wType.getLowerBounds();
String msg;
if (ArrayUtils.isNotEmpty(bounds)) {
msg = "Lower bounds:";
} else {
bounds = wType.getUpperBounds();
msg = "Upper bounds:";
}
if (ArrayUtils.isNotEmpty(bounds)) {
LOG.debug(msg + Arrays.toString(bounds));
parameterType = resolveGenericParameter(bounds[0], localMap,
methodGenericTypeArgs);
}
}
if (parameterType == null) {
LOG.warn("Unrecognized type {}. Will use Object instead",
paramType);
parameterType = Object.class;
}
return parameterType;
}
/**
* It retrieves the value for the {@link PodamStrategyValue} annotation with
* which the attribute was annotated
*
* @param attributeType
* The attribute type, used for type checking
* @param annotations
* Annotations attached to the attribute
* @param attributeStrategy
* The {@link AttributeStrategy} to use
* @return The value for the {@link PodamStrategyValue} annotation with
* which the attribute was annotated
* @throws IllegalArgumentException
* If the type of the data strategy defined for the
* {@link PodamStrategyValue} annotation is not assignable to
* the annotated attribute. This de facto guarantees type
* safety.
*/
public static Object returnAttributeDataStrategyValue(Class<?> attributeType,
List<Annotation> annotations,
AttributeStrategy<?> attributeStrategy)
throws IllegalArgumentException {
if (null == attributeStrategy) {
return null;
}
Object retValue = attributeStrategy.getValue(attributeType, annotations);
if (retValue != null) {
Class<?> desiredType = attributeType.isPrimitive() ?
PodamUtils.primitiveToBoxedType(attributeType) : attributeType;
if (!desiredType.isAssignableFrom(retValue.getClass())) {
String errMsg = "The AttributeStrategy "
+ attributeStrategy.getClass().getName()
+ " produced value of type "
+ retValue.getClass().getName()
+ " incompatible with attribute type "
+ attributeType.getName();
throw new IllegalArgumentException(errMsg);
} else {
LOG.debug("The parameter {} will be filled using the following strategy {}",
attributeType, attributeStrategy);
}
}
return retValue;
}
}
| |
/*
* #%L
* =====================================================
* _____ _ ____ _ _ _ _
* |_ _|_ __ _ _ ___| |_ / __ \| | | | ___ | | | |
* | | | '__| | | / __| __|/ / _` | |_| |/ __|| |_| |
* | | | | | |_| \__ \ |_| | (_| | _ |\__ \| _ |
* |_| |_| \__,_|___/\__|\ \__,_|_| |_||___/|_| |_|
* \____/
*
* =====================================================
*
* Hochschule Hannover
* (University of Applied Sciences and Arts, Hannover)
* Faculty IV, Dept. of Computer Science
* Ricklinger Stadtweg 118, 30459 Hannover, Germany
*
* Email: trust@f4-i.fh-hannover.de
* Website: http://trust.f4.hs-hannover.de/
*
* This file is part of visitmeta-visualization, version 0.5.0,
* implemented by the Trust@HsH research group at the Hochschule Hannover.
* %%
* Copyright (C) 2012 - 2015 Trust@HsH
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package de.hshannover.f4.trust.visitmeta.util;
import java.io.StringReader;
import java.io.StringWriter;
import java.util.Arrays;
import java.util.Iterator;
import javax.xml.XMLConstants;
import javax.xml.namespace.NamespaceContext;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.transform.OutputKeys;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerConfigurationException;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.apache.log4j.Logger;
import org.w3c.dom.Document;
import org.xml.sax.InputSource;
import de.hshannover.f4.trust.visitmeta.IfmapStrings;
import de.hshannover.f4.trust.visitmeta.interfaces.Identifier;
/**
* Factory for {@link IdentifierWrapper} instances.
*
* @author Bastian Hellmann
*
*/
public class IdentifierHelper {
private static Logger LOGGER = Logger.getLogger(IdentifierHelper.class);
private static final XPathFactory XPATH_FACTORY = XPathFactory.newInstance();
private static final TransformerFactory TRANSFORMER_FACTORY =
TransformerFactory.newInstance();
private static final DocumentBuilderFactory DOCUMENT_BUILDER_FACTORY = DocumentBuilderFactory.newInstance();
/**
* Default namespace context which uses the prefixes 'meta' and 'ifmap'
* as specified in TNC IF-MAP Binding for SOAP version 2.2.
* TODO extended identifier namespaces?
*/
public static final NamespaceContext DEFAULT_NAMESPACE_CONTEXT = new NamespaceContext() {
@Override
public Iterator<String> getPrefixes(String namespaceURI) {
return Arrays.asList(
IfmapStrings.STD_METADATA_PREFIX,
IfmapStrings.BASE_PREFIX)
.iterator();
}
@Override
public String getPrefix(String namespaceURI) {
if (namespaceURI.equals(IfmapStrings.STD_METADATA_NS_URI)) {
return IfmapStrings.STD_METADATA_PREFIX;
} else if (namespaceURI.equals(IfmapStrings.BASE_NS_URI)) {
return IfmapStrings.BASE_PREFIX;
} else {
return null;
}
}
@Override
public String getNamespaceURI(String prefix) {
if (prefix.equals(IfmapStrings.STD_METADATA_PREFIX)) {
return IfmapStrings.STD_METADATA_NS_URI;
} else if (prefix.equals(IfmapStrings.BASE_PREFIX)) {
return IfmapStrings.BASE_NS_URI;
} else {
return XMLConstants.NULL_NS_URI;
}
}
};
/**
* Create a {@link IdentifierWrapper} instance for the given document.
*
* @param document a identifier document
* @return the wrapped Identifier
*/
public static IdentifierWrapper identifier(Identifier identifier) {
try {
Transformer printFormatTransformer = TRANSFORMER_FACTORY.newTransformer();
printFormatTransformer.setOutputProperty(OutputKeys.OMIT_XML_DECLARATION, "yes");
printFormatTransformer.setOutputProperty(OutputKeys.METHOD, "xml");
printFormatTransformer.setOutputProperty(OutputKeys.INDENT, "yes");
printFormatTransformer.setOutputProperty(OutputKeys.ENCODING, "UTF-8");
printFormatTransformer.setOutputProperty("{http://xml.apache.org/xslt}indent-amount", "2");
Transformer equalsTransformer = TRANSFORMER_FACTORY.newTransformer();
equalsTransformer.setOutputProperty(OutputKeys.OMIT_XML_DECLARATION, "yes");
equalsTransformer.setOutputProperty(OutputKeys.INDENT, "no");
equalsTransformer.setOutputProperty(OutputKeys.METHOD, "xml");
XPath xPath = XPATH_FACTORY.newXPath();
Document document;
DocumentBuilder builder;
try
{
builder = DOCUMENT_BUILDER_FACTORY.newDocumentBuilder();
document = builder.parse( new InputSource(new StringReader(identifier.getRawData())));
return new IdentifierWrapperImpl(
identifier.getTypeName(),
document,
xPath,
printFormatTransformer,
DEFAULT_NAMESPACE_CONTEXT);
} catch (Exception e) {
e.printStackTrace();
return null;
}
} catch (TransformerConfigurationException e) {
throw new RuntimeException(e);
}
}
/**
* Wrapper implementation which uses {@link XPath} to extract values
* from {@link Document} instances representing Identifier.
*/
private static class IdentifierWrapperImpl implements IdentifierWrapper {
final String mTypeName;
final Document mDocument;
final XPath mXpath;
final Transformer mPrintTransformer;
/**
* Create a wrapper instance for the given document.
*
* @param document the document to wrap
* @param xpath the XPATH instance for this wrapper
* @param printFormatTransformer the transformer to use for pretty printing
* @param namespaceContext the namespace context for XPath operations
* @param equalsTransformer the transformer to use for canonical serialization
*/
public IdentifierWrapperImpl(
String typeName,
Document document,
XPath xpath,
Transformer printFormatTransformer,
NamespaceContext namespaceContext) {
mTypeName = typeName;
mDocument = document;
mXpath = xpath;
mPrintTransformer = printFormatTransformer;
mXpath.setNamespaceContext(namespaceContext);
}
/**
* Evaluate the given XPATH expression on the given document. Return
* the result as a string or null if an error occurred.
*/
private String getValueFromExpression(String expression, Document doc) {
try {
return mXpath.evaluate(expression, mDocument.getDocumentElement());
} catch (XPathExpressionException e) {
LOGGER.error("could not evaluate '" + expression + "' on '" + mDocument + "'");
return null;
}
}
@Override
public String getValueForXpathExpression(String xPathExpression) {
return getValueFromExpression(xPathExpression, mDocument);
}
@Override
public String getValueForXpathExpressionOrElse(String xPathExpression,
String defaultValue) {
String result = getValueForXpathExpression(xPathExpression);
if (result == null) {
return defaultValue;
} else {
return result;
}
}
@Override
public String toFormattedString() {
StringWriter writer = new StringWriter();
try {
mPrintTransformer.transform(
new DOMSource(mDocument), new StreamResult(writer));
} catch (TransformerException e) {
throw new RuntimeException(e);
}
return writer.toString();
}
@Override
public void setNamespaceContext(NamespaceContext context) {
mXpath.setNamespaceContext(context);
}
@Override
public String getTypeName() {
return mTypeName;
}
}
}
| |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ui;
import com.intellij.ide.util.treeView.NodeDescriptor;
import com.intellij.openapi.actionSystem.ActionManager;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.actionSystem.IdeActions;
import com.intellij.openapi.project.DumbAwareAction;
import com.intellij.ui.treeStructure.Tree;
import com.intellij.util.ArrayUtil;
import com.intellij.util.ArrayUtilRt;
import com.intellij.util.ObjectUtils;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.Convertor;
import com.intellij.util.ui.tree.TreeUtil;
import org.jetbrains.annotations.NotNull;
import javax.swing.*;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.TreeNode;
import javax.swing.tree.TreePath;
import javax.swing.tree.TreeSelectionModel;
import java.util.ArrayList;
import java.util.List;
import java.util.ListIterator;
import static javax.swing.tree.TreeSelectionModel.DISCONTIGUOUS_TREE_SELECTION;
public class TreeSpeedSearch extends SpeedSearchBase<JTree> {
private boolean myCanExpand;
private static final Convertor<TreePath, String> TO_STRING = new Convertor<TreePath, String>() {
@Override
public String convert(TreePath object) {
DefaultMutableTreeNode node = (DefaultMutableTreeNode)object.getLastPathComponent();
return node.toString();
}
};
private final Convertor<TreePath, String> myToStringConvertor;
public static final Convertor<TreePath, String> NODE_DESCRIPTOR_TOSTRING = new Convertor<TreePath, String>() {
@Override
public String convert(TreePath path) {
final DefaultMutableTreeNode node = (DefaultMutableTreeNode)path.getLastPathComponent();
final Object userObject = node.getUserObject();
if (userObject instanceof NodeDescriptor) {
NodeDescriptor descr = (NodeDescriptor)userObject;
return descr.toString();
}
return TO_STRING.convert(path);
}
};
public TreeSpeedSearch(JTree tree, Convertor<TreePath, String> toStringConvertor) {
this(tree, toStringConvertor, false);
}
public TreeSpeedSearch(JTree tree) {
this(tree, TO_STRING);
}
public TreeSpeedSearch(Tree tree, Convertor<TreePath, String> toString) {
this(tree, toString, false);
}
public TreeSpeedSearch(Tree tree, Convertor<TreePath, String> toString, boolean canExpand) {
this((JTree)tree, toString, canExpand);
}
public TreeSpeedSearch(JTree tree, Convertor<TreePath, String> toString, boolean canExpand) {
super(tree);
setComparator(new SpeedSearchComparator(false, true));
myToStringConvertor = toString;
myCanExpand = canExpand;
new MySelectAllAction(tree, this).registerCustomShortcutSet(tree, null);
}
@Override
protected void selectElement(Object element, String selectedText) {
TreeUtil.selectPath(myComponent, (TreePath)element);
}
@Override
protected int getSelectedIndex() {
if (myCanExpand) {
return ArrayUtilRt.find(getAllElements(), myComponent.getSelectionPath());
}
int[] selectionRows = myComponent.getSelectionRows();
return selectionRows == null || selectionRows.length == 0 ? -1 : selectionRows[0];
}
@Override
protected Object[] getAllElements() {
if (myCanExpand) {
final Object root = myComponent.getModel().getRoot();
if (root instanceof DefaultMutableTreeNode || root instanceof PathAwareTreeNode) {
final List<TreePath> paths = new ArrayList<>();
TreeUtil.traverseDepth((TreeNode)root, node -> {
if (node instanceof DefaultMutableTreeNode) {
paths.add(new TreePath(((DefaultMutableTreeNode)node).getPath()));
}
else if (node instanceof PathAwareTreeNode) {
paths.add(((PathAwareTreeNode)node).getPath());
}
return true;
});
return paths.toArray(new TreePath[paths.size()]);
}
}
TreePath[] paths = new TreePath[myComponent.getRowCount()];
for (int i = 0; i < paths.length; i++) {
paths[i] = myComponent.getPathForRow(i);
}
return paths;
}
@Override
protected String getElementText(Object element) {
TreePath path = (TreePath)element;
String string = myToStringConvertor.convert(path);
if (string == null) return TO_STRING.convert(path);
return string;
}
public interface PathAwareTreeNode extends TreeNode {
TreePath getPath();
}
@NotNull
private List<TreePath> findAllFilteredElements(String s) {
List<TreePath> paths = new ArrayList<>();
String _s = s.trim();
ListIterator<Object> it = getElementIterator(0);
while (it.hasNext()) {
Object element = it.next();
if (isMatchingElement(element, _s)) paths.add((TreePath)element);
}
return paths;
}
private static class MySelectAllAction extends DumbAwareAction {
@NotNull private final JTree myTree;
@NotNull private final TreeSpeedSearch mySearch;
public MySelectAllAction(@NotNull JTree tree, @NotNull TreeSpeedSearch search) {
myTree = tree;
mySearch = search;
copyShortcutFrom(ActionManager.getInstance().getAction(IdeActions.ACTION_SELECT_ALL));
setEnabledInModalContext(true);
}
@Override
public void update(AnActionEvent e) {
e.getPresentation().setEnabled(mySearch.isPopupActive() &&
myTree.getSelectionModel().getSelectionMode() == DISCONTIGUOUS_TREE_SELECTION);
}
@Override
public void actionPerformed(AnActionEvent e) {
TreeSelectionModel sm = myTree.getSelectionModel();
String query = mySearch.getEnteredPrefix();
if (query == null) return;
List<TreePath> filtered = mySearch.findAllFilteredElements(query);
if (filtered.isEmpty()) return;
boolean alreadySelected = sm.getSelectionCount() == filtered.size() &&
ContainerUtil.and(filtered, (path) -> sm.isPathSelected(path));
if (alreadySelected) {
TreePath anchor = myTree.getAnchorSelectionPath();
sm.setSelectionPath(anchor);
myTree.setAnchorSelectionPath(anchor);
mySearch.findAndSelectElement(query);
}
else {
TreePath currentElement = (TreePath)mySearch.findElement(query);
TreePath anchor = ObjectUtils.chooseNotNull(currentElement, filtered.get(0));
sm.setSelectionPaths(ArrayUtil.toObjectArray(filtered, TreePath.class));
myTree.setAnchorSelectionPath(anchor);
}
}
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight.documentation;
import com.intellij.ide.BrowserUtil;
import com.intellij.openapi.application.Application;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.util.Computable;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.util.text.StringUtilRt;
import com.intellij.openapi.vfs.CharsetToolkit;
import com.intellij.openapi.vfs.VfsUtilCore;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.vfs.VirtualFileManager;
import com.intellij.psi.PsiElement;
import com.intellij.util.io.HttpRequests;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.*;
import java.net.URL;
import java.util.Locale;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public abstract class AbstractExternalFilter {
private static final Logger LOG = Logger.getInstance(AbstractExternalFilter.class);
private static final Pattern ourClassDataStartPattern = Pattern.compile("START OF CLASS DATA", Pattern.CASE_INSENSITIVE);
private static final Pattern ourClassDataEndPattern = Pattern.compile("SUMMARY ========", Pattern.CASE_INSENSITIVE);
private static final Pattern ourNonClassDataEndPattern = Pattern.compile("<A NAME=", Pattern.CASE_INSENSITIVE);
@NonNls
protected static final Pattern ourAnchorSuffix = Pattern.compile("#(.*)$");
protected static @NonNls final Pattern ourHtmlFileSuffix = Pattern.compile("/([^/]*[.][hH][tT][mM][lL]?)$");
private static @NonNls final Pattern ourAnnihilator = Pattern.compile("/[^/^.]*/[.][.]/");
private static @NonNls final String JAR_PROTOCOL = "jar:";
@NonNls private static final String HR = "<HR>";
@NonNls private static final String P = "<P>";
@NonNls private static final String DL = "<DL>";
@NonNls protected static final String H2 = "</H2>";
@NonNls protected static final String HTML_CLOSE = "</HTML>";
@NonNls protected static final String HTML = "<HTML>";
@NonNls private static final String BR = "<BR>";
@NonNls private static final String DT = "<DT>";
private static final Pattern CHARSET_META_PATTERN =
Pattern.compile("<meta[^>]+\\s*charset=\"?([\\w\\-]*)\\s*\">", Pattern.CASE_INSENSITIVE);
private static final String FIELD_SUMMARY = "<!-- =========== FIELD SUMMARY =========== -->";
private static final String CLASS_SUMMARY = "<div class=\"summary\">";
protected static abstract class RefConvertor {
@NotNull
private final Pattern mySelector;
public RefConvertor(@NotNull Pattern selector) {
mySelector = selector;
}
protected abstract String convertReference(String root, String href);
public CharSequence refFilter(final String root, @NotNull CharSequence read) {
CharSequence toMatch = StringUtilRt.toUpperCase(read);
StringBuilder ready = new StringBuilder();
int prev = 0;
Matcher matcher = mySelector.matcher(toMatch);
while (matcher.find()) {
CharSequence before = read.subSequence(prev, matcher.start(1) - 1); // Before reference
final CharSequence href = read.subSequence(matcher.start(1), matcher.end(1)); // The URL
prev = matcher.end(1) + 1;
ready.append(before);
ready.append("\"");
ready.append(ApplicationManager.getApplication().runReadAction(
new Computable<String>() {
@Override
public String compute() {
return convertReference(root, href.toString());
}
}
));
ready.append("\"");
}
ready.append(read, prev, read.length());
return ready;
}
}
protected static String doAnnihilate(String path) {
int len = path.length();
do {
path = ourAnnihilator.matcher(path).replaceAll("/");
}
while (len > (len = path.length()));
return path;
}
public CharSequence correctRefs(String root, CharSequence read) {
CharSequence result = read;
for (RefConvertor myReferenceConvertor : getRefConverters()) {
result = myReferenceConvertor.refFilter(root, result);
}
return result;
}
protected abstract RefConvertor[] getRefConverters();
@Nullable
@SuppressWarnings({"HardCodedStringLiteral"})
public String getExternalDocInfo(final String url) throws Exception {
Application app = ApplicationManager.getApplication();
if (!app.isUnitTestMode() && app.isDispatchThread() || app.isWriteAccessAllowed()) {
LOG.error("May block indefinitely: shouldn't be called from EDT or under write lock");
return null;
}
if (url == null || !MyJavadocFetcher.ourFree) {
return null;
}
MyJavadocFetcher fetcher = new MyJavadocFetcher(url, new MyDocBuilder() {
@Override
public void buildFromStream(String url, Reader input, StringBuilder result) throws IOException {
doBuildFromStream(url, input, result);
}
});
try {
app.executeOnPooledThread(fetcher).get();
}
catch (Exception e) {
return null;
}
Exception exception = fetcher.myException;
if (exception != null) {
fetcher.myException = null;
throw exception;
}
return correctDocText(url, fetcher.data);
}
@NotNull
protected String correctDocText(@NotNull String url, @NotNull CharSequence data) {
CharSequence docText = correctRefs(ourAnchorSuffix.matcher(url).replaceAll(""), data);
if (LOG.isDebugEnabled()) {
LOG.debug("Filtered JavaDoc: " + docText + "\n");
}
return PlatformDocumentationUtil.fixupText(docText);
}
@Nullable
public String getExternalDocInfoForElement(final String docURL, final PsiElement element) throws Exception {
return getExternalDocInfo(docURL);
}
protected void doBuildFromStream(String url, Reader input, StringBuilder data) throws IOException {
doBuildFromStream(url, input, data, true, true);
}
protected void doBuildFromStream(final String url, Reader input, final StringBuilder data, boolean searchForEncoding, boolean matchStart) throws IOException {
ParseSettings settings = getParseSettings(url);
@NonNls Pattern startSection = settings.startPattern;
@NonNls Pattern endSection = settings.endPattern;
boolean useDt = settings.useDt;
@NonNls String greatestEndSection = "<!-- ========= END OF CLASS DATA ========= -->";
data.append(HTML);
URL baseUrl = VfsUtilCore.convertToURL(url);
if (baseUrl != null) {
data.append("<base href=\"").append(baseUrl).append("\">");
}
data.append("<style type=\"text/css\">" +
" ul.inheritance {\n" +
" margin:0;\n" +
" padding:0;\n" +
" }\n" +
" ul.inheritance li {\n" +
" display:inline;\n" +
" list-style:none;\n" +
" }\n" +
" ul.inheritance li ul.inheritance {\n" +
" margin-left:15px;\n" +
" padding-left:15px;\n" +
" padding-top:1px;\n" +
" }\n" +
"</style>");
String read;
String contentEncoding = null;
@SuppressWarnings("IOResourceOpenedButNotSafelyClosed")
BufferedReader buf = new BufferedReader(input);
do {
read = buf.readLine();
if (read != null && searchForEncoding && read.contains("charset")) {
String foundEncoding = parseContentEncoding(read);
if (foundEncoding != null) {
contentEncoding = foundEncoding;
}
}
}
while (read != null && matchStart && !startSection.matcher(StringUtil.toUpperCase(read)).find());
if (input instanceof MyReader && contentEncoding != null && !contentEncoding.equalsIgnoreCase(CharsetToolkit.UTF8) &&
!contentEncoding.equals(((MyReader)input).getEncoding())) {
//restart page parsing with correct encoding
try {
data.setLength(0);
doBuildFromStream(url, new MyReader(((MyReader)input).myInputStream, contentEncoding), data, false, true);
}
catch (ProcessCanceledException e) {
return;
}
return;
}
if (read == null) {
data.setLength(0);
if (matchStart && !settings.forcePatternSearch && input instanceof MyReader) {
try {
final MyReader reader = contentEncoding != null ? new MyReader(((MyReader)input).myInputStream, contentEncoding)
: new MyReader(((MyReader)input).myInputStream, ((MyReader)input).getEncoding());
doBuildFromStream(url, reader, data, false, false);
}
catch (ProcessCanceledException ignored) {}
}
return;
}
if (useDt) {
boolean skip = false;
do {
if (StringUtil.toUpperCase(read).contains(H2) && !read.toUpperCase(Locale.ENGLISH).contains("H2")) { // read=class name in <H2>
data.append(H2);
skip = true;
}
else if (endSection.matcher(read).find() || StringUtil.indexOfIgnoreCase(read, greatestEndSection, 0) != -1) {
data.append(HTML_CLOSE);
return;
}
else if (!skip) {
appendLine(data, read);
}
}
while (((read = buf.readLine()) != null) && !StringUtil.toUpperCase(read).trim().equals(DL) &&
!StringUtil.containsIgnoreCase(read, "<div class=\"description\""));
data.append(DL);
StringBuilder classDetails = new StringBuilder();
while (((read = buf.readLine()) != null) && !StringUtil.toUpperCase(read).equals(HR) && !StringUtil.toUpperCase(read).equals(P)) {
if (reachTheEnd(data, read, classDetails)) return;
appendLine(classDetails, read);
}
while (((read = buf.readLine()) != null) && !StringUtil.toUpperCase(read).equals(P) && !StringUtil.toUpperCase(read).equals(HR)) {
if (reachTheEnd(data, read, classDetails)) return;
appendLine(data, read.replaceAll(DT, DT + BR));
}
data.append(classDetails);
data.append(P);
}
else {
appendLine(data, read);
}
while (((read = buf.readLine()) != null) &&
!endSection.matcher(read).find() &&
StringUtil.indexOfIgnoreCase(read, greatestEndSection, 0) == -1) {
if (!StringUtil.toUpperCase(read).contains(HR)
&& !StringUtil.containsIgnoreCase(read, "<ul class=\"blockList\">")
&& !StringUtil.containsIgnoreCase(read, "<li class=\"blockList\">")) {
appendLine(data, read);
}
}
data.append(HTML_CLOSE);
}
@NotNull
protected ParseSettings getParseSettings(@NotNull String url) {
Pattern startSection = ourClassDataStartPattern;
Pattern endSection = ourClassDataEndPattern;
boolean anchorPresent = false;
Matcher anchorMatcher = ourAnchorSuffix.matcher(url);
if (anchorMatcher.find()) {
anchorPresent = true;
startSection = Pattern.compile(Pattern.quote("<a name=\"" + anchorMatcher.group(1) + "\""), Pattern.CASE_INSENSITIVE);
endSection = ourNonClassDataEndPattern;
}
return new ParseSettings(startSection, endSection, !anchorPresent, anchorPresent);
}
private static boolean reachTheEnd(StringBuilder data, String read, StringBuilder classDetails) {
if (StringUtil.indexOfIgnoreCase(read, FIELD_SUMMARY, 0) != -1 ||
StringUtil.indexOfIgnoreCase(read, CLASS_SUMMARY, 0) != -1) {
data.append(classDetails);
data.append(HTML_CLOSE);
return true;
}
return false;
}
@Nullable
static String parseContentEncoding(@NotNull String htmlLine) {
if (!htmlLine.contains("charset")) {
return null;
}
Matcher matcher = CHARSET_META_PATTERN.matcher(htmlLine);
return matcher.find() ? matcher.group(1) : null;
}
private static void appendLine(StringBuilder buffer, final String read) {
buffer.append(read);
buffer.append("\n");
}
private interface MyDocBuilder {
void buildFromStream(String url, Reader input, StringBuilder result) throws IOException;
}
private static class MyJavadocFetcher implements Runnable {
private static boolean ourFree = true;
private final StringBuilder data = new StringBuilder();
private final String url;
private final MyDocBuilder myBuilder;
private Exception myException;
public MyJavadocFetcher(String url, MyDocBuilder builder) {
this.url = url;
myBuilder = builder;
//noinspection AssignmentToStaticFieldFromInstanceMethod
ourFree = false;
}
@Override
public void run() {
try {
if (url == null) {
return;
}
if (url.startsWith(JAR_PROTOCOL)) {
VirtualFile file = VirtualFileManager.getInstance().findFileByUrl(BrowserUtil.getDocURL(url));
if (file != null) {
myBuilder.buildFromStream(url, new StringReader(VfsUtilCore.loadText(file)), data);
}
}
else {
URL parsedUrl = BrowserUtil.getURL(url);
if (parsedUrl != null) {
HttpRequests.request(parsedUrl.toString()).gzip(false).connect(new HttpRequests.RequestProcessor<Void>() {
@Override
public Void process(@NotNull HttpRequests.Request request) throws IOException {
byte[] bytes = request.readBytes(null);
String contentEncoding = null;
ByteArrayInputStream stream = new ByteArrayInputStream(bytes);
BufferedReader reader = new BufferedReader(new InputStreamReader(stream));
try {
for (String htmlLine = reader.readLine(); htmlLine != null; htmlLine = reader.readLine()) {
contentEncoding = parseContentEncoding(htmlLine);
if (contentEncoding != null) {
break;
}
}
}
finally {
reader.close();
stream.reset();
}
if (contentEncoding == null) {
contentEncoding = request.getConnection().getContentEncoding();
}
//noinspection IOResourceOpenedButNotSafelyClosed
myBuilder.buildFromStream(url, contentEncoding != null ? new MyReader(stream, contentEncoding) : new MyReader(stream), data);
return null;
}
});
}
}
}
catch (ProcessCanceledException ignored) {
}
catch (IOException e) {
myException = e;
}
finally {
//noinspection AssignmentToStaticFieldFromInstanceMethod
ourFree = true;
}
}
}
private static class MyReader extends InputStreamReader {
private ByteArrayInputStream myInputStream;
public MyReader(ByteArrayInputStream in) {
super(in);
in.reset();
myInputStream = in;
}
public MyReader(ByteArrayInputStream in, String charsetName) throws UnsupportedEncodingException {
super(in, charsetName);
in.reset();
myInputStream = in;
}
}
/**
* Settings used for parsing of external documentation
*/
protected static class ParseSettings {
@NotNull
/**
* Pattern defining the start of target fragment
*/
private final Pattern startPattern;
@NotNull
/**
* Pattern defining the end of target fragment
*/
private final Pattern endPattern;
/**
* If <code>false</code>, and line matching start pattern is not found, whole document will be processed
*/
private final boolean forcePatternSearch;
/**
* Replace table data by <dt>
*/
private final boolean useDt;
public ParseSettings(@NotNull Pattern startPattern, @NotNull Pattern endPattern, boolean useDt, boolean forcePatternSearch) {
this.startPattern = startPattern;
this.endPattern = endPattern;
this.useDt = useDt;
this.forcePatternSearch = forcePatternSearch;
}
}
}
| |
package org.apereo.cas.authentication.attribute;
import org.apereo.cas.services.RegisteredService;
import org.apereo.cas.util.CollectionUtils;
import org.apereo.cas.util.EncodingUtils;
import org.apereo.cas.util.LoggingUtils;
import org.apereo.cas.util.ResourceUtils;
import org.apereo.cas.util.scripting.ExecutableCompiledGroovyScript;
import org.apereo.cas.util.scripting.GroovyShellScript;
import org.apereo.cas.util.scripting.ScriptResourceCacheManager;
import org.apereo.cas.util.scripting.ScriptingUtils;
import org.apereo.cas.util.scripting.WatchableGroovyScriptResource;
import org.apereo.cas.util.spring.ApplicationContextProvider;
import org.apereo.cas.util.spring.SpringExpressionLanguageValueResolver;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
import lombok.ToString;
import lombok.extern.slf4j.Slf4j;
import lombok.val;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.builder.CompareToBuilder;
import org.apache.commons.lang3.tuple.Pair;
import org.jooq.lambda.Unchecked;
import java.nio.charset.StandardCharsets;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
/**
* This is {@link DefaultAttributeDefinition}.
*
* @author Misagh Moayyed
* @author Travis Schmidt
* @since 6.2.0
*/
@JsonTypeInfo(use = JsonTypeInfo.Id.CLASS)
@ToString
@EqualsAndHashCode(of = "key")
@Getter
@Setter
@AllArgsConstructor
@Builder
@NoArgsConstructor
@Slf4j
public class DefaultAttributeDefinition implements AttributeDefinition {
private static final long serialVersionUID = 6898745248727445565L;
private String key;
private String name;
private boolean scoped;
private boolean encrypted;
private String attribute;
private String patternFormat;
private String script;
@Override
public int compareTo(final AttributeDefinition o) {
return new CompareToBuilder()
.append(getKey(), o.getKey())
.build();
}
@JsonIgnore
@Override
public List<Object> resolveAttributeValues(final List<Object> attributeValues,
final String scope,
final RegisteredService registeredService) {
List<Object> currentValues = new ArrayList<>(attributeValues);
if (StringUtils.isNotBlank(getScript())) {
currentValues = getScriptedAttributeValue(key, currentValues);
}
if (isScoped()) {
currentValues = formatValuesWithScope(scope, currentValues);
}
if (StringUtils.isNotBlank(getPatternFormat())) {
currentValues = formatValuesWithPattern(currentValues);
}
if (isEncrypted()) {
currentValues = encryptValues(currentValues, registeredService);
}
LOGGER.trace("Resolved values [{}] for attribute definition [{}]", currentValues, this);
return currentValues;
}
private static List<Object> formatValuesWithScope(final String scope, final List<Object> currentValues) {
return currentValues
.stream()
.map(v -> String.format("%s@%s", v, scope))
.collect(Collectors.toCollection(ArrayList::new));
}
private static List<Object> encryptValues(final List<Object> currentValues, final RegisteredService registeredService) {
val publicKey = registeredService.getPublicKey();
if (publicKey == null) {
LOGGER.error("No public key is defined for service [{}]. No attributes will be released", registeredService);
return new ArrayList<>(0);
}
val cipher = publicKey.toCipher();
if (cipher == null) {
LOGGER.error("Unable to initialize cipher given the public key algorithm [{}]", publicKey.getAlgorithm());
return new ArrayList<>(0);
}
return currentValues
.stream()
.map(Unchecked.function(value -> {
LOGGER.trace("Encrypting attribute value [{}]", value);
val result = EncodingUtils.encodeBase64(cipher.doFinal(value.toString().getBytes(StandardCharsets.UTF_8)));
LOGGER.trace("Encrypted attribute value [{}]", result);
return result;
}))
.collect(Collectors.toCollection(ArrayList::new));
}
private List<Object> formatValuesWithPattern(final List<Object> currentValues) {
return currentValues
.stream()
.map(v -> MessageFormat.format(getPatternFormat(), v))
.collect(Collectors.toCollection(ArrayList::new));
}
@JsonIgnore
private List<Object> getScriptedAttributeValue(final String attributeKey,
final List<Object> currentValues) {
LOGGER.trace("Locating attribute value via script for definition [{}]", this);
val matcherInline = ScriptingUtils.getMatcherForInlineGroovyScript(getScript());
if (matcherInline.find()) {
return fetchAttributeValueAsInlineGroovyScript(attributeKey, currentValues, matcherInline.group(1));
}
val matcherFile = ScriptingUtils.getMatcherForExternalGroovyScript(getScript());
if (matcherFile.find()) {
return fetchAttributeValueFromExternalGroovyScript(attributeKey, currentValues, matcherFile.group());
}
return new ArrayList<>(0);
}
private static List<Object> fetchAttributeValueFromExternalGroovyScript(final String attributeName,
final List<Object> currentValues,
final String file) {
val result = ApplicationContextProvider.getScriptResourceCacheManager();
if (result.isPresent()) {
val cacheMgr = result.get();
val cacheKey = ScriptResourceCacheManager.computeKey(Pair.of(attributeName, file));
LOGGER.trace("Constructed cache key [{}] for attribute [{}] mapped as groovy script", cacheKey, attributeName);
var script = (ExecutableCompiledGroovyScript) null;
if (cacheMgr.containsKey(cacheKey)) {
script = cacheMgr.get(cacheKey);
LOGGER.trace("Located cached groovy script [{}] for key [{}]", script, cacheKey);
} else {
try {
val scriptPath = SpringExpressionLanguageValueResolver.getInstance().resolve(file);
val resource = ResourceUtils.getResourceFrom(scriptPath);
LOGGER.trace("Groovy script [{}] for key [{}] is not cached", resource, cacheKey);
script = new WatchableGroovyScriptResource(resource);
cacheMgr.put(cacheKey, script);
LOGGER.trace("Cached groovy script [{}] for key [{}]", script, cacheKey);
} catch (final Exception e) {
LoggingUtils.error(LOGGER, e);
return new ArrayList<>(0);
}
}
if (script != null) {
return fetchAttributeValueFromScript(script, attributeName, currentValues);
}
}
LOGGER.warn("No groovy script cache manager is available to execute attribute mappings");
return new ArrayList<>(0);
}
private static List<Object> fetchAttributeValueAsInlineGroovyScript(final String attributeName,
final List<Object> currentValues,
final String inlineGroovy) {
val result = ApplicationContextProvider.getScriptResourceCacheManager();
if (result.isPresent()) {
val cacheMgr = result.get();
val cacheKey = ScriptResourceCacheManager.computeKey(Pair.of(attributeName, inlineGroovy));
LOGGER.trace("Constructed cache key [{}] for attribute [{}] mapped as inline groovy script", cacheKey, attributeName);
var executableScript = (ExecutableCompiledGroovyScript) null;
if (cacheMgr.containsKey(cacheKey)) {
LOGGER.trace("Inline groovy script for key [{}] is not cached", cacheKey);
executableScript = cacheMgr.get(cacheKey);
} else {
LOGGER.trace("Inline groovy script for key [{}] is not cached", cacheKey);
executableScript = new GroovyShellScript(inlineGroovy);
cacheMgr.put(cacheKey, executableScript);
LOGGER.trace("Cached inline groovy script for key [{}]", cacheKey);
}
return fetchAttributeValueFromScript(executableScript, attributeName, currentValues);
}
LOGGER.warn("No groovy script cache manager is available to execute attribute mappings");
return new ArrayList<>(0);
}
private static List<Object> fetchAttributeValueFromScript(final ExecutableCompiledGroovyScript scriptToExec,
final String attributeKey,
final List<Object> currentValues) {
val args = CollectionUtils.<String, Object>wrap("attributeName", Objects.requireNonNull(attributeKey),
"attributeValues", currentValues, "logger", LOGGER);
scriptToExec.setBinding(args);
return scriptToExec.execute(args.values().toArray(), List.class);
}
}
| |
/*
* Copyright 2007 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.zxing.datamatrix.decoder;
import com.google.zxing.FormatException;
/**
* The Version object encapsulates attributes about a particular
* size Data Matrix Code.
*
* @author bbrown@google.com (Brian Brown)
*/
public final class Version {
private static final Version[] VERSIONS = buildVersions();
private final int versionNumber;
private final int symbolSizeRows;
private final int symbolSizeColumns;
private final int dataRegionSizeRows;
private final int dataRegionSizeColumns;
private final ECBlocks ecBlocks;
private final int totalCodewords;
private Version(int versionNumber,
int symbolSizeRows,
int symbolSizeColumns,
int dataRegionSizeRows,
int dataRegionSizeColumns,
ECBlocks ecBlocks) {
this.versionNumber = versionNumber;
this.symbolSizeRows = symbolSizeRows;
this.symbolSizeColumns = symbolSizeColumns;
this.dataRegionSizeRows = dataRegionSizeRows;
this.dataRegionSizeColumns = dataRegionSizeColumns;
this.ecBlocks = ecBlocks;
// Calculate the total number of codewords
int total = 0;
int ecCodewords = ecBlocks.getECCodewords();
ECB[] ecbArray = ecBlocks.getECBlocks();
for (ECB ecBlock : ecbArray) {
total += ecBlock.getCount() * (ecBlock.getDataCodewords() + ecCodewords);
}
this.totalCodewords = total;
}
public int getVersionNumber() {
return versionNumber;
}
public int getSymbolSizeRows() {
return symbolSizeRows;
}
public int getSymbolSizeColumns() {
return symbolSizeColumns;
}
public int getDataRegionSizeRows() {
return dataRegionSizeRows;
}
public int getDataRegionSizeColumns() {
return dataRegionSizeColumns;
}
public int getTotalCodewords() {
return totalCodewords;
}
ECBlocks getECBlocks() {
return ecBlocks;
}
/**
* <p>Deduces version information from Data Matrix dimensions.</p>
*
* @param numRows Number of rows in modules
* @param numColumns Number of columns in modules
* @return Version for a Data Matrix Code of those dimensions
* @throws FormatException if dimensions do correspond to a valid Data Matrix size
*/
public static Version getVersionForDimensions(int numRows, int numColumns) throws FormatException {
if ((numRows & 0x01) != 0 || (numColumns & 0x01) != 0) {
throw FormatException.getFormatInstance();
}
for (Version version : VERSIONS) {
if (version.symbolSizeRows == numRows && version.symbolSizeColumns == numColumns) {
return version;
}
}
throw FormatException.getFormatInstance();
}
/**
* <p>Encapsulates a set of error-correction blocks in one symbol version. Most versions will
* use blocks of differing sizes within one version, so, this encapsulates the parameters for
* each set of blocks. It also holds the number of error-correction codewords per block since it
* will be the same across all blocks within one version.</p>
*/
static final class ECBlocks {
private final int ecCodewords;
private final ECB[] ecBlocks;
private ECBlocks(int ecCodewords, ECB ecBlocks) {
this.ecCodewords = ecCodewords;
this.ecBlocks = new ECB[] { ecBlocks };
}
private ECBlocks(int ecCodewords, ECB ecBlocks1, ECB ecBlocks2) {
this.ecCodewords = ecCodewords;
this.ecBlocks = new ECB[] { ecBlocks1, ecBlocks2 };
}
int getECCodewords() {
return ecCodewords;
}
ECB[] getECBlocks() {
return ecBlocks;
}
}
/**
* <p>Encapsulates the parameters for one error-correction block in one symbol version.
* This includes the number of data codewords, and the number of times a block with these
* parameters is used consecutively in the Data Matrix code version's format.</p>
*/
static final class ECB {
private final int count;
private final int dataCodewords;
private ECB(int count, int dataCodewords) {
this.count = count;
this.dataCodewords = dataCodewords;
}
int getCount() {
return count;
}
int getDataCodewords() {
return dataCodewords;
}
}
@Override
public String toString() {
return String.valueOf(versionNumber);
}
/**
* See ISO 16022:2006 5.5.1 Table 7
*/
private static Version[] buildVersions() {
return new Version[]{
new Version(1, 10, 10, 8, 8,
new ECBlocks(5, new ECB(1, 3))),
new Version(2, 12, 12, 10, 10,
new ECBlocks(7, new ECB(1, 5))),
new Version(3, 14, 14, 12, 12,
new ECBlocks(10, new ECB(1, 8))),
new Version(4, 16, 16, 14, 14,
new ECBlocks(12, new ECB(1, 12))),
new Version(5, 18, 18, 16, 16,
new ECBlocks(14, new ECB(1, 18))),
new Version(6, 20, 20, 18, 18,
new ECBlocks(18, new ECB(1, 22))),
new Version(7, 22, 22, 20, 20,
new ECBlocks(20, new ECB(1, 30))),
new Version(8, 24, 24, 22, 22,
new ECBlocks(24, new ECB(1, 36))),
new Version(9, 26, 26, 24, 24,
new ECBlocks(28, new ECB(1, 44))),
new Version(10, 32, 32, 14, 14,
new ECBlocks(36, new ECB(1, 62))),
new Version(11, 36, 36, 16, 16,
new ECBlocks(42, new ECB(1, 86))),
new Version(12, 40, 40, 18, 18,
new ECBlocks(48, new ECB(1, 114))),
new Version(13, 44, 44, 20, 20,
new ECBlocks(56, new ECB(1, 144))),
new Version(14, 48, 48, 22, 22,
new ECBlocks(68, new ECB(1, 174))),
new Version(15, 52, 52, 24, 24,
new ECBlocks(42, new ECB(2, 102))),
new Version(16, 64, 64, 14, 14,
new ECBlocks(56, new ECB(2, 140))),
new Version(17, 72, 72, 16, 16,
new ECBlocks(36, new ECB(4, 92))),
new Version(18, 80, 80, 18, 18,
new ECBlocks(48, new ECB(4, 114))),
new Version(19, 88, 88, 20, 20,
new ECBlocks(56, new ECB(4, 144))),
new Version(20, 96, 96, 22, 22,
new ECBlocks(68, new ECB(4, 174))),
new Version(21, 104, 104, 24, 24,
new ECBlocks(56, new ECB(6, 136))),
new Version(22, 120, 120, 18, 18,
new ECBlocks(68, new ECB(6, 175))),
new Version(23, 132, 132, 20, 20,
new ECBlocks(62, new ECB(8, 163))),
new Version(24, 144, 144, 22, 22,
new ECBlocks(62, new ECB(8, 156), new ECB(2, 155))),
new Version(25, 8, 18, 6, 16,
new ECBlocks(7, new ECB(1, 5))),
new Version(26, 8, 32, 6, 14,
new ECBlocks(11, new ECB(1, 10))),
new Version(27, 12, 26, 10, 24,
new ECBlocks(14, new ECB(1, 16))),
new Version(28, 12, 36, 10, 16,
new ECBlocks(18, new ECB(1, 22))),
new Version(29, 16, 36, 14, 16,
new ECBlocks(24, new ECB(1, 32))),
new Version(30, 16, 48, 14, 22,
new ECBlocks(28, new ECB(1, 49))),
// extended forms as specified in
// ISO 21471:2020 (DMRE) 5.5.1 Table 7
new Version(31, 8, 48, 6, 22,
new ECBlocks(15, new ECB(1, 18))),
new Version(32, 8, 64, 6, 14,
new ECBlocks(18, new ECB(1, 24))),
new Version(33, 8, 80, 6, 18,
new ECBlocks(22, new ECB(1, 32))),
new Version(34, 8, 96, 6, 22,
new ECBlocks(28, new ECB(1, 38))),
new Version(35, 8, 120, 6, 18,
new ECBlocks(32, new ECB(1, 49))),
new Version(36, 8, 144, 6, 22,
new ECBlocks(36, new ECB(1, 63))),
new Version(37, 12, 64, 10, 14,
new ECBlocks(27, new ECB(1, 43))),
new Version(38, 12, 88, 10, 20,
new ECBlocks(36, new ECB(1, 64))),
new Version(39, 16, 64, 14, 14,
new ECBlocks(36, new ECB(1, 62))),
new Version(40, 20, 36, 18, 16,
new ECBlocks(28, new ECB(1, 44))),
new Version(41, 20, 44, 18, 20,
new ECBlocks(34, new ECB(1, 56))),
new Version(42, 20, 64, 18, 14,
new ECBlocks(42, new ECB(1, 84))),
new Version(43, 22, 48, 20, 22,
new ECBlocks(38, new ECB(1, 72))),
new Version(44, 24, 48, 22, 22,
new ECBlocks(41, new ECB(1, 80))),
new Version(45, 24, 64, 22, 14,
new ECBlocks(46, new ECB(1, 108))),
new Version(46, 26, 40, 24, 18,
new ECBlocks(38, new ECB(1, 70))),
new Version(47, 26, 48, 24, 22,
new ECBlocks(42, new ECB(1, 90))),
new Version(48, 26, 64, 24, 14,
new ECBlocks(50, new ECB(1, 118)))
};
}
}
| |
/*
* dk.brics.automaton - AutomatonMatcher
*
* Copyright (c) 2008-2011 John Gibson
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package dk.brics.automaton;
import java.util.regex.MatchResult;
/**
* A tool that performs match operations on a given character sequence using
* a compiled automaton.
*
* @author John Gibson <<a href="mailto:jgibson@mitre.org">jgibson@mitre.org</a>>
* @see RunAutomaton#newMatcher(java.lang.CharSequence)
* @see RunAutomaton#newMatcher(java.lang.CharSequence, int, int)
*/
public class AutomatonMatcher implements MatchResult {
AutomatonMatcher(final CharSequence chars, final RunAutomaton automaton) {
this.chars = chars;
this.automaton = automaton;
}
private final RunAutomaton automaton;
private final CharSequence chars;
private int matchStart = -1;
private int matchEnd = -1;
/**
* Find the next matching subsequence of the input.
* <br />
* This also updates the values for the {@code start}, {@code end}, and
* {@code group} methods.
*
* @return {@code true} if there is a matching subsequence.
*/
public boolean find() {
int begin;
switch(getMatchStart()) {
case -2:
return false;
case -1:
begin = 0;
break;
default:
begin = getMatchEnd();
// This occurs when a previous find() call matched the empty string. This can happen when the pattern is a* for example.
if(begin == getMatchStart()) {
begin += 1;
if(begin > getChars().length()) {
setMatch(-2, -2);
return false;
}
}
}
int match_start;
int match_end;
if (automaton.isAccept(automaton.getInitialState())) {
match_start = begin;
match_end = begin;
} else {
match_start = -1;
match_end = -1;
}
int l = getChars().length();
while (begin < l) {
int p = automaton.getInitialState();
for (int i = begin; i < l; i++) {
final int new_state = automaton.step(p, getChars().charAt(i));
if (new_state == -1) {
break;
} else if (automaton.isAccept(new_state)) {
// found a match from begin to (i+1)
match_start = begin;
match_end=(i+1);
}
p = new_state;
}
if (match_start != -1) {
setMatch(match_start, match_end);
return true;
}
begin += 1;
}
if (match_start != -1) {
setMatch(match_start, match_end);
return true;
} else {
setMatch(-2, -2);
return false;
}
}
private void setMatch(final int matchStart, final int matchEnd) throws IllegalArgumentException {
if (matchStart > matchEnd) {
throw new IllegalArgumentException("Start must be less than or equal to end: " + matchStart + ", " + matchEnd);
}
this.matchStart = matchStart;
this.matchEnd = matchEnd;
}
private int getMatchStart() {
return matchStart;
}
private int getMatchEnd() {
return matchEnd;
}
private CharSequence getChars() {
return chars;
}
/**
* Returns the offset after the last character matched.
*
* @return The offset after the last character matched.
* @throws IllegalStateException if there has not been a match attempt or
* if the last attempt yielded no results.
*/
public int end() throws IllegalStateException {
matchGood();
return matchEnd;
}
/**
* Returns the offset after the last character matched of the specified
* capturing group.
* <br />
* Note that because the automaton does not support capturing groups the
* only valid group is 0 (the entire match).
*
* @param group the desired capturing group.
* @return The offset after the last character matched of the specified
* capturing group.
* @throws IllegalStateException if there has not been a match attempt or
* if the last attempt yielded no results.
* @throws IndexOutOfBoundsException if the specified capturing group does
* not exist in the underlying automaton.
*/
public int end(final int group) throws IndexOutOfBoundsException, IllegalStateException {
onlyZero(group);
return end();
}
/**
* Returns the subsequence of the input found by the previous match.
*
* @return The subsequence of the input found by the previous match.
* @throws IllegalStateException if there has not been a match attempt or
* if the last attempt yielded no results.
*/
public String group() throws IllegalStateException {
matchGood();
return chars.subSequence(matchStart, matchEnd).toString();
}
/**
* Returns the subsequence of the input found by the specified capturing
* group during the previous match operation.
* <br />
* Note that because the automaton does not support capturing groups the
* only valid group is 0 (the entire match).
*
* @param group the desired capturing group.
* @return The subsequence of the input found by the specified capturing
* group during the previous match operation the previous match. Or
* {@code null} if the given group did match.
* @throws IllegalStateException if there has not been a match attempt or
* if the last attempt yielded no results.
* @throws IndexOutOfBoundsException if the specified capturing group does
* not exist in the underlying automaton.
*/
public String group(final int group) throws IndexOutOfBoundsException, IllegalStateException {
onlyZero(group);
return group();
}
/**
* Returns the number of capturing groups in the underlying automaton.
* <br />
* Note that because the automaton does not support capturing groups this
* method will always return 0.
*
* @return The number of capturing groups in the underlying automaton.
*/
public int groupCount() {
return 0;
}
/**
* Returns the offset of the first character matched.
*
* @return The offset of the first character matched.
* @throws IllegalStateException if there has not been a match attempt or
* if the last attempt yielded no results.
*/
public int start() throws IllegalStateException {
matchGood();
return matchStart;
}
/**
* Returns the offset of the first character matched of the specified
* capturing group.
* <br />
* Note that because the automaton does not support capturing groups the
* only valid group is 0 (the entire match).
*
* @param group the desired capturing group.
* @return The offset of the first character matched of the specified
* capturing group.
* @throws IllegalStateException if there has not been a match attempt or
* if the last attempt yielded no results.
* @throws IndexOutOfBoundsException if the specified capturing group does
* not exist in the underlying automaton.
*/
public int start(int group) throws IndexOutOfBoundsException, IllegalStateException {
onlyZero(group);
return start();
}
/**
* Returns the current state of this {@code AutomatonMatcher} as a
* {@code MatchResult}.
* The result is unaffected by subsequent operations on this object.
*
* @return a {@code MatchResult} with the state of this
* {@code AutomatonMatcher}.
*/
public MatchResult toMatchResult() {
final AutomatonMatcher match = new AutomatonMatcher(chars, automaton);
match.matchStart = this.matchStart;
match.matchEnd = this.matchEnd;
return match;
}
/** Helper method that requires the group argument to be 0. */
private static void onlyZero(final int group) throws IndexOutOfBoundsException {
if (group != 0) {
throw new IndexOutOfBoundsException("The only group supported is 0.");
}
}
/** Helper method to check that the last match attempt was valid. */
private void matchGood() throws IllegalStateException {
if ((matchStart < 0) || (matchEnd < 0)) {
throw new IllegalStateException("There was no available match.");
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.search;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.StatusToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.rest.action.support.RestActions;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.aggregations.Aggregations;
import org.elasticsearch.search.internal.InternalSearchResponse;
import org.elasticsearch.search.profile.ProfileShardResult;
import org.elasticsearch.search.suggest.Suggest;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.action.search.ShardSearchFailure.readShardSearchFailure;
import static org.elasticsearch.search.internal.InternalSearchResponse.readInternalSearchResponse;
/**
* A response of a search request.
*/
public class SearchResponse extends ActionResponse implements StatusToXContent {
private InternalSearchResponse internalResponse;
private String scrollId;
private int totalShards;
private int successfulShards;
private ShardSearchFailure[] shardFailures;
private long tookInMillis;
public SearchResponse() {
}
public SearchResponse(InternalSearchResponse internalResponse, String scrollId, int totalShards, int successfulShards, long tookInMillis, ShardSearchFailure[] shardFailures) {
this.internalResponse = internalResponse;
this.scrollId = scrollId;
this.totalShards = totalShards;
this.successfulShards = successfulShards;
this.tookInMillis = tookInMillis;
this.shardFailures = shardFailures;
}
@Override
public RestStatus status() {
return RestStatus.status(successfulShards, totalShards, shardFailures);
}
/**
* The search hits.
*/
public SearchHits getHits() {
return internalResponse.hits();
}
public Aggregations getAggregations() {
return internalResponse.aggregations();
}
public Suggest getSuggest() {
return internalResponse.suggest();
}
/**
* Has the search operation timed out.
*/
public boolean isTimedOut() {
return internalResponse.timedOut();
}
/**
* Has the search operation terminated early due to reaching
* <code>terminateAfter</code>
*/
public Boolean isTerminatedEarly() {
return internalResponse.terminatedEarly();
}
/**
* How long the search took.
*/
public TimeValue getTook() {
return new TimeValue(tookInMillis);
}
/**
* How long the search took in milliseconds.
*/
public long getTookInMillis() {
return tookInMillis;
}
/**
* The total number of shards the search was executed on.
*/
public int getTotalShards() {
return totalShards;
}
/**
* The successful number of shards the search was executed on.
*/
public int getSuccessfulShards() {
return successfulShards;
}
/**
* The failed number of shards the search was executed on.
*/
public int getFailedShards() {
// we don't return totalShards - successfulShards, we don't count "no shards available" as a failed shard, just don't
// count it in the successful counter
return shardFailures.length;
}
/**
* The failures that occurred during the search.
*/
public ShardSearchFailure[] getShardFailures() {
return this.shardFailures;
}
/**
* If scrolling was enabled ({@link SearchRequest#scroll(org.elasticsearch.search.Scroll)}, the
* scroll id that can be used to continue scrolling.
*/
public String getScrollId() {
return scrollId;
}
public void scrollId(String scrollId) {
this.scrollId = scrollId;
}
/**
* If profiling was enabled, this returns an object containing the profile results from
* each shard. If profiling was not enabled, this will return null
*
* @return The profile results or an empty map
*/
public @Nullable Map<String, List<ProfileShardResult>> getProfileResults() {
return internalResponse.profile();
}
static final class Fields {
static final String _SCROLL_ID = "_scroll_id";
static final String TOOK = "took";
static final String TIMED_OUT = "timed_out";
static final String TERMINATED_EARLY = "terminated_early";
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if (scrollId != null) {
builder.field(Fields._SCROLL_ID, scrollId);
}
builder.field(Fields.TOOK, tookInMillis);
builder.field(Fields.TIMED_OUT, isTimedOut());
if (isTerminatedEarly() != null) {
builder.field(Fields.TERMINATED_EARLY, isTerminatedEarly());
}
RestActions.buildBroadcastShardsHeader(builder, params, getTotalShards(), getSuccessfulShards(), getFailedShards(), getShardFailures());
internalResponse.toXContent(builder, params);
return builder;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
internalResponse = readInternalSearchResponse(in);
totalShards = in.readVInt();
successfulShards = in.readVInt();
int size = in.readVInt();
if (size == 0) {
shardFailures = ShardSearchFailure.EMPTY_ARRAY;
} else {
shardFailures = new ShardSearchFailure[size];
for (int i = 0; i < shardFailures.length; i++) {
shardFailures[i] = readShardSearchFailure(in);
}
}
scrollId = in.readOptionalString();
tookInMillis = in.readVLong();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
internalResponse.writeTo(out);
out.writeVInt(totalShards);
out.writeVInt(successfulShards);
out.writeVInt(shardFailures.length);
for (ShardSearchFailure shardSearchFailure : shardFailures) {
shardSearchFailure.writeTo(out);
}
out.writeOptionalString(scrollId);
out.writeVLong(tookInMillis);
}
@Override
public String toString() {
return Strings.toString(this, true);
}
}
| |
/*
* Copyright 2002-2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.security.test.web.support;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.context.annotation.Configuration;
import org.springframework.mock.web.MockHttpServletRequest;
import org.springframework.security.config.BeanIds;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity;
import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter;
import org.springframework.security.web.DefaultSecurityFilterChain;
import org.springframework.security.web.FilterChainProxy;
import org.springframework.security.web.context.HttpSessionSecurityContextRepository;
import org.springframework.security.web.context.SecurityContextPersistenceFilter;
import org.springframework.security.web.context.SecurityContextRepository;
import org.springframework.security.web.csrf.CsrfFilter;
import org.springframework.security.web.csrf.CsrfTokenRepository;
import org.springframework.security.web.csrf.HttpSessionCsrfTokenRepository;
import org.springframework.security.web.util.matcher.AnyRequestMatcher;
import org.springframework.web.context.WebApplicationContext;
import org.springframework.web.context.support.AnnotationConfigWebApplicationContext;
import static org.assertj.core.api.Assertions.assertThat;
@RunWith(MockitoJUnitRunner.class)
public class WebTestUtilsTests {
@Mock
private SecurityContextRepository contextRepo;
@Mock
private CsrfTokenRepository csrfRepo;
private MockHttpServletRequest request;
private ConfigurableApplicationContext context;
@Before
public void setup() {
this.request = new MockHttpServletRequest();
}
@After
public void cleanup() {
if (this.context != null) {
this.context.close();
}
}
@Test
public void getCsrfTokenRepositorytNoWac() {
assertThat(WebTestUtils.getCsrfTokenRepository(this.request))
.isInstanceOf(HttpSessionCsrfTokenRepository.class);
}
@Test
public void getCsrfTokenRepositorytNoSecurity() {
loadConfig(Config.class);
assertThat(WebTestUtils.getCsrfTokenRepository(this.request))
.isInstanceOf(HttpSessionCsrfTokenRepository.class);
}
@Test
public void getCsrfTokenRepositorytSecurityNoCsrf() {
loadConfig(SecurityNoCsrfConfig.class);
assertThat(WebTestUtils.getCsrfTokenRepository(this.request))
.isInstanceOf(HttpSessionCsrfTokenRepository.class);
}
@Test
public void getCsrfTokenRepositorytSecurityCustomRepo() {
CustomSecurityConfig.CONTEXT_REPO = this.contextRepo;
CustomSecurityConfig.CSRF_REPO = this.csrfRepo;
loadConfig(CustomSecurityConfig.class);
assertThat(WebTestUtils.getCsrfTokenRepository(this.request)).isSameAs(this.csrfRepo);
}
// getSecurityContextRepository
@Test
public void getSecurityContextRepositoryNoWac() {
assertThat(WebTestUtils.getSecurityContextRepository(this.request))
.isInstanceOf(HttpSessionSecurityContextRepository.class);
}
@Test
public void getSecurityContextRepositoryNoSecurity() {
loadConfig(Config.class);
assertThat(WebTestUtils.getSecurityContextRepository(this.request))
.isInstanceOf(HttpSessionSecurityContextRepository.class);
}
@Test
public void getSecurityContextRepositorySecurityNoCsrf() {
loadConfig(SecurityNoCsrfConfig.class);
assertThat(WebTestUtils.getSecurityContextRepository(this.request))
.isInstanceOf(HttpSessionSecurityContextRepository.class);
}
@Test
public void getSecurityContextRepositorySecurityCustomRepo() {
CustomSecurityConfig.CONTEXT_REPO = this.contextRepo;
CustomSecurityConfig.CSRF_REPO = this.csrfRepo;
loadConfig(CustomSecurityConfig.class);
assertThat(WebTestUtils.getSecurityContextRepository(this.request)).isSameAs(this.contextRepo);
}
// gh-3343
@Test
public void findFilterNoMatchingFilters() {
loadConfig(PartialSecurityConfig.class);
assertThat(WebTestUtils.findFilter(this.request, SecurityContextPersistenceFilter.class)).isNull();
}
@Test
public void findFilterNoSpringSecurityFilterChainInContext() {
loadConfig(NoSecurityConfig.class);
CsrfFilter toFind = new CsrfFilter(new HttpSessionCsrfTokenRepository());
FilterChainProxy springSecurityFilterChain = new FilterChainProxy(
new DefaultSecurityFilterChain(AnyRequestMatcher.INSTANCE, toFind));
this.request.getServletContext().setAttribute(BeanIds.SPRING_SECURITY_FILTER_CHAIN, springSecurityFilterChain);
assertThat(WebTestUtils.findFilter(this.request, toFind.getClass())).isEqualTo(toFind);
}
@Test
public void findFilterExplicitWithSecurityFilterInContext() {
loadConfig(SecurityConfigWithDefaults.class);
CsrfFilter toFind = new CsrfFilter(new HttpSessionCsrfTokenRepository());
FilterChainProxy springSecurityFilterChain = new FilterChainProxy(
new DefaultSecurityFilterChain(AnyRequestMatcher.INSTANCE, toFind));
this.request.getServletContext().setAttribute(BeanIds.SPRING_SECURITY_FILTER_CHAIN, springSecurityFilterChain);
assertThat(WebTestUtils.findFilter(this.request, toFind.getClass())).isSameAs(toFind);
}
private void loadConfig(Class<?> config) {
AnnotationConfigWebApplicationContext context = new AnnotationConfigWebApplicationContext();
context.register(config);
context.refresh();
this.context = context;
this.request.getServletContext().setAttribute(WebApplicationContext.ROOT_WEB_APPLICATION_CONTEXT_ATTRIBUTE,
context);
}
@Configuration
static class Config {
}
@EnableWebSecurity
static class SecurityNoCsrfConfig extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
http.csrf().disable();
}
}
@EnableWebSecurity
static class CustomSecurityConfig extends WebSecurityConfigurerAdapter {
static CsrfTokenRepository CSRF_REPO;
static SecurityContextRepository CONTEXT_REPO;
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.csrf()
.csrfTokenRepository(CSRF_REPO)
.and()
.securityContext()
.securityContextRepository(CONTEXT_REPO);
// @formatter:on
}
}
@EnableWebSecurity
static class PartialSecurityConfig extends WebSecurityConfigurerAdapter {
@Override
public void configure(HttpSecurity http) {
// @formatter:off
http
.antMatcher("/willnotmatchthis");
// @formatter:on
}
}
@Configuration
static class NoSecurityConfig {
}
@EnableWebSecurity
static class SecurityConfigWithDefaults extends WebSecurityConfigurerAdapter {
}
}
| |
/*
* MediaWiki import/export processing tools
* Copyright 2005 by Brion Vibber
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*
* $Id: XmlDumpReader.java 59325 2009-11-22 01:21:03Z rainman $
*/
/**
* Description: XML processing tool for JIRA issues, Honeywell ACS
*
* Author : Tao LEE
* Date : 05.04.13
* */
package org.mediawiki.importer;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.GregorianCalendar;
import java.util.HashMap;
import java.util.Map;
import java.util.TimeZone;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.parsers.SAXParser;
import javax.xml.parsers.SAXParserFactory;
import org.xml.sax.Attributes;
import org.xml.sax.SAXException;
import org.xml.sax.helpers.DefaultHandler;
public class XmlDumpReader2 extends DefaultHandler {
InputStream input;
DumpWriter writer;
private char[] buffer;
private int len;
private boolean hasContent = false;
private boolean deleted = false;
/* Siteinfo siteinfo;
Page page;
boolean pageSent;
Contributor contrib;
Revision rev;
int nskey;*/
JiraIssueItem item;
JiraCustomField customfield;
boolean abortFlag;
/**
* Initialize a processor for a MediaWiki XML dump stream.
* Events are sent to a single DumpWriter output sink, but you
* can chain multiple output processors with a MultiWriter.
* @param inputStream Stream to read XML from.
* @param writer Output sink to send processed events to.
*/
public XmlDumpReader2(InputStream inputStream, DumpWriter writer) {
input = inputStream;
this.writer = writer;
buffer = new char[4096];
len = 0;
hasContent = false;
}
/**
* Reads through the entire XML dump on the input stream, sending
* events to the DumpWriter as it goes. May throw exceptions on
* invalid input or due to problems with the output.
* @throws IOException
*/
public void readDump() throws IOException {
try {
SAXParserFactory factory = SAXParserFactory.newInstance();
SAXParser parser = factory.newSAXParser();
parser.parse(input, this);
} catch (ParserConfigurationException e) {
throw (IOException)new IOException(e.getMessage()).initCause(e);
} catch (SAXException e) {
throw (IOException)new IOException(e.getMessage()).initCause(e);
}
writer.close();
}
/**
* Request that the dump processing be aborted.
* At the next element, an exception will be thrown to stop the XML parser.
* @fixme Is setting a bool thread-safe? It should be atomic...
*/
public void abort() {
abortFlag = true;
}
// --------------------------
// SAX handler interface methods:
private static final Map startElements = new HashMap(64);
private static final Map endElements = new HashMap(64);
static {
startElements.put("channel", "channel");
startElements.put("item", "item");
startElements.put("title", "title");
startElements.put("project", "project");
startElements.put("description", "description");
startElements.put("environment", "environment");
startElements.put("key", "key");
startElements.put("summary", "summary");
startElements.put("type", "type");
startElements.put("priority", "priority");
startElements.put("status", "status");
startElements.put("resolution", "resolution");
startElements.put("assignee", "assignee");
startElements.put("reporter", "reporter");
startElements.put("created", "created");
startElements.put("updated", "updated");
startElements.put("resolved", "resolved");
startElements.put("fixVersion", "fixVersion");
startElements.put("component", "component");
startElements.put("due", "due");
startElements.put("votes", "votes");
startElements.put("watches", "watches");
startElements.put("comments", "comments");
startElements.put("comment", "comment");
startElements.put("attachments", "attachments");
startElements.put("attachment", "attachment");
startElements.put("subtasks", "subtasks");
startElements.put("subtask", "subtask");
startElements.put("customfields", "customfields");
startElements.put("customfield", "customfield");
startElements.put("customfieldname", "customfieldname");
startElements.put("customfieldvalues", "customfieldvalues");
startElements.put("customfieldvalue", "customfieldvalue");
endElements.put("channel", "channel");
endElements.put("item", "item");
endElements.put("title", "title");
endElements.put("project", "project");
endElements.put("description", "description");
endElements.put("environment", "environment");
endElements.put("key", "key");
endElements.put("summary", "summary");
endElements.put("type", "type");
endElements.put("priority", "priority");
endElements.put("status", "status");
endElements.put("resolution", "resolution");
endElements.put("assignee", "assignee");
endElements.put("reporter", "reporter");
endElements.put("created", "created");
endElements.put("updated", "updated");
endElements.put("resolved", "resolved");
endElements.put("fixVersion", "fixVersion");
endElements.put("component", "component");
endElements.put("due", "due");
endElements.put("votes", "votes");
endElements.put("watches", "watches");
endElements.put("comments", "comments");
endElements.put("comment", "comment");
endElements.put("attachments", "attachments");
endElements.put("attachment", "attachment");
endElements.put("subtasks", "subtasks");
endElements.put("subtask", "subtask");
endElements.put("customfields", "customfields");
endElements.put("customfield", "customfield");
endElements.put("customfieldname", "customfieldname");
endElements.put("customfieldvalues", "customfieldvalues");
endElements.put("customfieldvalue", "customfieldvalue");
}
public void startElement(String uri, String localname, String qName, Attributes attributes) throws SAXException {
// Clear the buffer for character data; we'll initialize it
// if and when character data arrives -- at that point we
// have a length.
len = 0;
hasContent = false;
if (abortFlag)
throw new SAXException("XmlDumpReader2 set abort flag.");
// check for deleted="deleted", and set deleted flag for the current element.
String d = attributes.getValue("deleted");
deleted = (d!=null && d.equals("deleted"));
try {
qName = (String)startElements.get(qName);
if (qName == null)
return;
if (qName == "channel") openChannel();
else if (qName == "item") openItem();
else if (qName == "title") openTitle();
else if (qName == "project") openProject();
else if (qName == "description") openDescription();
else if (qName == "environment") openEnvironment();
else if (qName == "key") openKey();
else if (qName == "summary") openSummary();
else if (qName == "type") openType();
else if (qName == "priority") openPriority();
else if (qName == "status") openStatus();
else if (qName == "resolution") openResolution();
else if (qName == "assignee") openAssignee();
else if (qName == "reporter") openReporter();
else if (qName == "created") openCreated();
else if (qName == "updated") openUpdated();
else if (qName == "updated") openUpdated();
else if (qName == "resolved") openResolved();
else if (qName == "fixVersion") openFixVersion();
else if (qName == "component") openComponent();
else if (qName == "due") openDue();
else if (qName == "votes") openVotes();
else if (qName == "watches") openWatches();
else if (qName == "comments") openComments();
else if (qName == "comment") openComment();
else if (qName == "attachments") openAttachments();
else if (qName == "attachment") openAttachment();
else if (qName == "subtasks") openSubtasks();
else if (qName == "subtask") openSubtask();
else if (qName == "customfields") openCustomFields();
else if (qName == "customfield") openCustomField();
else if (qName == "customfieldname") openCustomFieldName();
else if (qName == "customfieldvalues") openCustomFieldValues();
else if (qName == "customfieldvalue") openCustomFieldValue();
} catch (IOException e) {
throw new SAXException(e);
}
}
public void characters(char[] ch, int start, int length) {
if (buffer.length < len + length) {
int maxlen = buffer.length * 2;
if (maxlen < len + length)
maxlen = len + length;
char[] tmp = new char[maxlen];
System.arraycopy(buffer, 0, tmp, 0, len);
buffer = tmp;
}
System.arraycopy(ch, start, buffer, len, length);
len += length;
hasContent = true;
}
public void endElement(String uri, String localname, String qName) throws SAXException {
try {
qName = (String)endElements.get(qName);
if (qName == null)
return;
if (qName == "channel") closeChannel();
else if (qName == "item") closeItem();
else if (qName == "title") closeTitle();
else if (qName == "project") closeProject();
else if (qName == "description") closeDescription();
else if (qName == "environment") closeEnvironment();
else if (qName == "key") closeKey();
else if (qName == "summary") closeSummary();
else if (qName == "type") closeType();
else if (qName == "priority") closePriority();
else if (qName == "status") closeStatus();
else if (qName == "resolution") closeResolution();
else if (qName == "assignee") closeAssignee();
else if (qName == "reporter") closeReporter();
else if (qName == "created") closeCreated();
else if (qName == "updated") closeUpdated();
else if (qName == "resolved") closeResolved();
else if (qName == "fixVersion") closeFixVersion();
else if (qName == "component") closeComponent();
else if (qName == "due") closeDue();
else if (qName == "votes") closeVotes();
else if (qName == "watches") closeWatches();
else if (qName == "comments") closeComments();
else if (qName == "comment") closeComment();
else if (qName == "attachments") closeAttachments();
else if (qName == "attachment") closeAttachment();
else if (qName == "subtasks") closeSubtasks();
else if (qName == "subtask") closeSubtask();
else if (qName == "customfields") closeCustomFields();
else if (qName == "customfield") closeCustomField();
else if (qName == "customfieldname") closeCustomFieldName();
else if (qName == "customfieldvalues") closeCustomFieldValues();
else if (qName == "customfieldvalue") closeCustomFieldValue();
// else throw(SAXException)new SAXException("Unrecognised "+qName+"(substring "+qName.length()+qName.substring(0,6)+")");
} catch (IOException e) {
throw (SAXException)new SAXException(e.getMessage()).initCause(e);
}
}
// ----------
void openChannel() throws IOException {
writer.writeStartWiki();
}
void openItem() {
item = new JiraIssueItem();
}
void openTitle() {
item.title = bufferContents();
}
void openProject() {
item.project = bufferContents();
}
void openDescription() {
item.description = bufferContents();
}
void openEnvironment() {
item.environment = bufferContents();
}
void openKey() {
item.key = bufferContents();
}
void openSummary() {
item.summary = bufferContents();
}
void openType() {
item.type = bufferContents();
}
void openPriority() {
item.priority = bufferContents();
}
void openStatus() {
item.status = bufferContents();
}
void openResolution() {
item.resolution = bufferContents();
}
void openAssignee() {
item.assignee = bufferContents();
}
void openReporter() {
item.reporter = bufferContents();
}
void openCreated() {
item.created = parseJiraTimestamp(bufferContents());
}
void openUpdated() {
item.updated = parseJiraTimestamp(bufferContents());
}
void openResolved() {
item.resolved = parseJiraTimestamp(bufferContents());
}
void openFixVersion() {
item.fixVersion = bufferContents();
}
void openComponent() {
item.component = bufferContents();
}
void openDue() {
item.due = bufferContents();
}
void openVotes() {
item.votes = Integer.parseInt(bufferContents());
}
void openWatches() {
item.watches = Integer.parseInt(bufferContents());
}
void openComments() {
item.comments = new ArrayList<String>();
}
void openComment() {
item.comments.add(bufferContents());
}
void openAttachments() {
item.attachments = new ArrayList<String>();
}
void openAttachment() {
item.attachments.add(bufferContents());
}
void openSubtasks() {
item.subtasks = new ArrayList<String>();
}
void openSubtask() {
item.subtasks.add(bufferContents());
}
void openCustomFields() {
item.customfields = new ArrayList<JiraCustomField>();
}
void openCustomField() {
customfield = new JiraCustomField();
}
void openCustomFieldName() {
customfield.customfieldname = bufferContents();
}
void openCustomFieldValues() {
customfield.customfieldvalues = new ArrayList<String>();
}
void openCustomFieldValue() {
customfield.customfieldvalues.add(bufferContents());
}
void closeChannel() throws IOException {
writer.writeEndWiki();
}
void closeItem() throws IOException {
writer.writeEndPage();
item = null;
}
void closeTitle() {
// do nothing
}
void closeProject() {
// do nothing
}
void closeDescription() {
// do nothing
}
void closeEnvironment() {
// do nothing
}
void closeKey() {
// do nothing
}
void closeSummary() {
// do nothing
}
void closeType() {
// do nothing
}
void closePriority() {
// do nothing
}
void closeStatus() {
// do nothing
}
void closeResolution() {
// do nothing
}
void closeAssignee() {
// do nothing
}
void closeReporter() {
// do nothing
}
void closeCreated() {
// do nothing
}
void closeUpdated() {
// do nothing
}
void closeResolved() {
// do nothing
}
void closeFixVersion() {
// do nothing
}
void closeComponent() {
// do nothing
}
void closeDue() {
// do nothing
}
void closeVotes() {
// do nothing
}
void closeWatches() {
// do nothing
}
void closeComments() {
// do nothing
}
void closeComment() {
// do nothing
}
void closeAttachments() {
// do nothing
}
void closeAttachment() {
// do nothing
}
void closeSubtasks() {
// do nothing
}
void closeSubtask() {
// do nothing
}
void closeCustomFields() {
// do nothing
}
void closeCustomField() {
item.customfields.add(customfield);
customfield = null;
}
void closeCustomFieldName() {
// do nothing
}
void closeCustomFieldValues() {
// do nothing
}
void closeCustomFieldValue() {
// do nothing
}
/*void threadAttribute(String attrib) throws IOException {
if(attrib.equals("ThreadPage")) // parse title
page.DiscussionThreadingInfo.put(attrib, new Title(bufferContents(), siteinfo.Namespaces));
else
page.DiscussionThreadingInfo.put(attrib, bufferContents());
}
void openMediaWiki() throws IOException {
siteinfo = null;
writer.writeStartWiki();
}
void closeMediaWiki() throws IOException {
writer.writeEndWiki();
siteinfo = null;
}
// ------------------
void openSiteinfo() {
siteinfo = new Siteinfo();
}
void closeSiteinfo() throws IOException {
writer.writeSiteinfo(siteinfo);
}
private String bufferContentsOrNull() {
if (!hasContent) return null;
else return bufferContents();
}
private String bufferContents() {
return len == 0 ? "" : new String(buffer, 0, len);
}
void readSitename() {
siteinfo.Sitename = bufferContents();
}
void readBase() {
siteinfo.Base = bufferContents();
}
void readGenerator() {
siteinfo.Generator = bufferContents();
}
void readCase() {
siteinfo.Case = bufferContents();
}
void openNamespaces() {
siteinfo.Namespaces = new NamespaceSet();
}
void openNamespace(Attributes attribs) {
nskey = Integer.parseInt(attribs.getValue("key"));
}
void closeNamespace() {
siteinfo.Namespaces.add(nskey, bufferContents());
}
void closeNamespaces() {
// NOP
}
// -----------
void openPage() {
page = new Page();
pageSent = false;
}
void closePage() throws IOException {
if (pageSent)
writer.writeEndPage();
page = null;
}
void readTitle() {
page.Title = new Title(bufferContents(), siteinfo.Namespaces);
}
void readId() {
int id = Integer.parseInt(bufferContents());
if (contrib != null)
contrib.Id = id;
else if (rev != null)
rev.Id = id;
else if (page != null)
page.Id = id;
else
throw new IllegalArgumentException("Unexpected <id> outside a <page>, <revision>, or <contributor>");
}
void readRestrictions() {
page.Restrictions = bufferContents();
}
// ------
void openRevision() throws IOException {
if (!pageSent) {
writer.writeStartPage(page);
pageSent = true;
}
rev = new Revision();
}
void closeRevision() throws IOException {
writer.writeRevision(rev);
rev = null;
}
void readTimestamp() {
rev.Timestamp = parseUTCTimestamp(bufferContents());
}
void readComment() {
rev.Comment = bufferContentsOrNull();
if (rev.Comment==null && !deleted) rev.Comment = ""; //NOTE: null means deleted/supressed
}
void readMinor() {
rev.Minor = true;
}
void readText() {
rev.Text = bufferContentsOrNull();
if (rev.Text==null && !deleted) rev.Text = ""; //NOTE: null means deleted/supressed
}
// -----------
void openContributor() {
//XXX: record deleted flag?! as it is, any empty <contributor> tag counts as "deleted"
contrib = new Contributor();
}
void closeContributor() {
//NOTE: if the contributor was supressed, nither username nor id have been set in the Contributor object
rev.Contributor = contrib;
contrib = null;
}
void readUsername() {
contrib.Username = bufferContentsOrNull();
}
void readIp() {
contrib.Username = bufferContents();
contrib.isIP = true;
}*/
private String bufferContents() {
return len == 0 ? "" : new String(buffer, 0, len);
}
private String bufferContentsOrNull() {
if (!hasContent) return null;
else return bufferContents();
}
private static final TimeZone utc = TimeZone.getTimeZone("UTC");
private static Calendar parseUTCTimestamp(String text) {
// 2003-10-26T04:50:47Z
// We're doing this manually for now, though DateFormatter might work...
String trimmed = text.trim();
GregorianCalendar ts = new GregorianCalendar(utc);
ts.set(
Integer.parseInt(trimmed.substring(0,0+4)), // year
Integer.parseInt(trimmed.substring(5,5+2)) - 1, // month is 0-based!
Integer.parseInt(trimmed.substring(8,8+2)), // day
Integer.parseInt(trimmed.substring(11,11+2)), // hour
Integer.parseInt(trimmed.substring(14,14+2)), // minute
Integer.parseInt(trimmed.substring(17,17+2))); // second
return ts;
}
private static Calendar parseJiraTimestamp(String text) {
GregorianCalendar ts = new GregorianCalendar(utc);
return ts;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.io.sstable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Random;
import java.util.UUID;
import com.google.common.collect.Lists;
import com.google.common.collect.Iterables;
import org.junit.After;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.cassandra.SchemaLoader;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.cql3.QueryOptions;
import org.apache.cassandra.cql3.QueryProcessor;
import org.apache.cassandra.cql3.UntypedResultSet;
import org.apache.cassandra.cql3.statements.SelectStatement;
import org.apache.cassandra.db.ColumnFamilyStore;
import org.apache.cassandra.db.Keyspace;
import org.apache.cassandra.db.compaction.AbstractCompactionTask;
import org.apache.cassandra.db.compaction.CompactionManager;
import org.apache.cassandra.db.compaction.Verifier;
import org.apache.cassandra.db.repair.PendingAntiCompaction;
import org.apache.cassandra.db.streaming.CassandraOutgoingFile;
import org.apache.cassandra.db.ReadExecutionController;
import org.apache.cassandra.db.SinglePartitionReadCommand;
import org.apache.cassandra.db.SinglePartitionSliceCommandTest;
import org.apache.cassandra.db.compaction.Verifier;
import org.apache.cassandra.db.partitions.UnfilteredPartitionIterator;
import org.apache.cassandra.db.rows.RangeTombstoneMarker;
import org.apache.cassandra.db.rows.Unfiltered;
import org.apache.cassandra.db.rows.UnfilteredRowIterator;
import org.apache.cassandra.dht.IPartitioner;
import org.apache.cassandra.dht.Range;
import org.apache.cassandra.dht.Token;
import org.apache.cassandra.exceptions.ConfigurationException;
import org.apache.cassandra.io.sstable.format.SSTableFormat;
import org.apache.cassandra.io.sstable.format.SSTableReader;
import org.apache.cassandra.io.sstable.format.Version;
import org.apache.cassandra.io.sstable.format.big.BigFormat;
import org.apache.cassandra.service.ActiveRepairService;
import org.apache.cassandra.service.CacheService;
import org.apache.cassandra.service.ClientState;
import org.apache.cassandra.service.StorageService;
import org.apache.cassandra.streaming.OutgoingStream;
import org.apache.cassandra.streaming.StreamPlan;
import org.apache.cassandra.streaming.StreamSession;
import org.apache.cassandra.streaming.StreamOperation;
import org.apache.cassandra.utils.ByteBufferUtil;
import org.apache.cassandra.utils.FBUtilities;
import org.apache.cassandra.utils.UUIDGen;
import static org.apache.cassandra.service.ActiveRepairService.NO_PENDING_REPAIR;
import static org.apache.cassandra.service.ActiveRepairService.UNREPAIRED_SSTABLE;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
/**
* Tests backwards compatibility for SSTables
*/
public class LegacySSTableTest
{
private static final Logger logger = LoggerFactory.getLogger(LegacySSTableTest.class);
public static final String LEGACY_SSTABLE_PROP = "legacy-sstable-root";
public static File LEGACY_SSTABLE_ROOT;
/**
* When adding a new sstable version, add that one here.
* See {@link #testGenerateSstables()} to generate sstables.
* Take care on commit as you need to add the sstable files using {@code git add -f}
*/
public static final String[] legacyVersions = {"na", "mc", "mb", "ma"};
// 1200 chars
static final String longString = "0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789" +
"0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789" +
"0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789" +
"0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789" +
"0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789" +
"0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789" +
"0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789" +
"0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789" +
"0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789" +
"0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789" +
"0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789" +
"0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789";
@BeforeClass
public static void defineSchema() throws ConfigurationException
{
String scp = System.getProperty(LEGACY_SSTABLE_PROP);
Assert.assertNotNull("System property " + LEGACY_SSTABLE_PROP + " not set", scp);
LEGACY_SSTABLE_ROOT = new File(scp).getAbsoluteFile();
Assert.assertTrue("System property " + LEGACY_SSTABLE_ROOT + " does not specify a directory", LEGACY_SSTABLE_ROOT.isDirectory());
SchemaLoader.prepareServer();
StorageService.instance.initServer();
Keyspace.setInitialized();
createKeyspace();
for (String legacyVersion : legacyVersions)
{
createTables(legacyVersion);
}
}
@After
public void tearDown()
{
for (String legacyVersion : legacyVersions)
{
truncateTables(legacyVersion);
}
}
/**
* Get a descriptor for the legacy sstable at the given version.
*/
protected Descriptor getDescriptor(String legacyVersion, String table)
{
return new Descriptor(SSTableFormat.Type.BIG.info.getVersion(legacyVersion),
getTableDir(legacyVersion, table),
"legacy_tables",
table,
1,
SSTableFormat.Type.BIG);
}
@Test
public void testLoadLegacyCqlTables() throws Exception
{
DatabaseDescriptor.setColumnIndexCacheSize(99999);
CacheService.instance.invalidateKeyCache();
doTestLegacyCqlTables();
}
@Test
public void testLoadLegacyCqlTablesShallow() throws Exception
{
DatabaseDescriptor.setColumnIndexCacheSize(0);
CacheService.instance.invalidateKeyCache();
doTestLegacyCqlTables();
}
@Test
public void testMutateMetadata() throws Exception
{
// we need to make sure we write old version metadata in the format for that version
for (String legacyVersion : legacyVersions)
{
logger.info("Loading legacy version: {}", legacyVersion);
truncateLegacyTables(legacyVersion);
loadLegacyTables(legacyVersion);
CacheService.instance.invalidateKeyCache();
for (ColumnFamilyStore cfs : Keyspace.open("legacy_tables").getColumnFamilyStores())
{
for (SSTableReader sstable : cfs.getLiveSSTables())
{
sstable.descriptor.getMetadataSerializer().mutateRepairMetadata(sstable.descriptor, 1234, NO_PENDING_REPAIR, false);
sstable.reloadSSTableMetadata();
assertEquals(1234, sstable.getRepairedAt());
if (sstable.descriptor.version.hasPendingRepair())
assertEquals(NO_PENDING_REPAIR, sstable.getPendingRepair());
}
boolean isTransient = false;
for (SSTableReader sstable : cfs.getLiveSSTables())
{
UUID random = UUID.randomUUID();
sstable.descriptor.getMetadataSerializer().mutateRepairMetadata(sstable.descriptor, UNREPAIRED_SSTABLE, random, isTransient);
sstable.reloadSSTableMetadata();
assertEquals(UNREPAIRED_SSTABLE, sstable.getRepairedAt());
if (sstable.descriptor.version.hasPendingRepair())
assertEquals(random, sstable.getPendingRepair());
if (sstable.descriptor.version.hasIsTransient())
assertEquals(isTransient, sstable.isTransient());
isTransient = !isTransient;
}
}
}
}
@Test
public void testMutateMetadataCSM() throws Exception
{
// we need to make sure we write old version metadata in the format for that version
for (String legacyVersion : legacyVersions)
{
// Skip 2.0.1 sstables as it doesn't have repaired information
if (legacyVersion.equals("jb"))
continue;
truncateTables(legacyVersion);
loadLegacyTables(legacyVersion);
for (ColumnFamilyStore cfs : Keyspace.open("legacy_tables").getColumnFamilyStores())
{
// set pending
for (SSTableReader sstable : cfs.getLiveSSTables())
{
UUID random = UUID.randomUUID();
try
{
cfs.getCompactionStrategyManager().mutateRepaired(Collections.singleton(sstable), UNREPAIRED_SSTABLE, random, false);
if (!sstable.descriptor.version.hasPendingRepair())
fail("We should fail setting pending repair on unsupported sstables "+sstable);
}
catch (IllegalStateException e)
{
if (sstable.descriptor.version.hasPendingRepair())
fail("We should succeed setting pending repair on "+legacyVersion + " sstables, failed on "+sstable);
}
}
// set transient
for (SSTableReader sstable : cfs.getLiveSSTables())
{
try
{
cfs.getCompactionStrategyManager().mutateRepaired(Collections.singleton(sstable), UNREPAIRED_SSTABLE, UUID.randomUUID(), true);
if (!sstable.descriptor.version.hasIsTransient())
fail("We should fail setting pending repair on unsupported sstables "+sstable);
}
catch (IllegalStateException e)
{
if (sstable.descriptor.version.hasIsTransient())
fail("We should succeed setting pending repair on "+legacyVersion + " sstables, failed on "+sstable);
}
}
}
}
}
@Test
public void testMutateLevel() throws Exception
{
// we need to make sure we write old version metadata in the format for that version
for (String legacyVersion : legacyVersions)
{
logger.info("Loading legacy version: {}", legacyVersion);
truncateLegacyTables(legacyVersion);
loadLegacyTables(legacyVersion);
CacheService.instance.invalidateKeyCache();
for (ColumnFamilyStore cfs : Keyspace.open("legacy_tables").getColumnFamilyStores())
{
for (SSTableReader sstable : cfs.getLiveSSTables())
{
sstable.descriptor.getMetadataSerializer().mutateLevel(sstable.descriptor, 1234);
sstable.reloadSSTableMetadata();
assertEquals(1234, sstable.getSSTableLevel());
}
}
}
}
private void doTestLegacyCqlTables() throws Exception
{
for (String legacyVersion : legacyVersions)
{
logger.info("Loading legacy version: {}", legacyVersion);
truncateLegacyTables(legacyVersion);
loadLegacyTables(legacyVersion);
CacheService.instance.invalidateKeyCache();
long startCount = CacheService.instance.keyCache.size();
verifyReads(legacyVersion);
verifyCache(legacyVersion, startCount);
compactLegacyTables(legacyVersion);
}
}
@Test
public void testStreamLegacyCqlTables() throws Exception
{
for (String legacyVersion : legacyVersions)
{
streamLegacyTables(legacyVersion);
verifyReads(legacyVersion);
}
}
@Test
public void testInaccurateSSTableMinMax() throws Exception
{
QueryProcessor.executeInternal("CREATE TABLE legacy_tables.legacy_mc_inaccurate_min_max (k int, c1 int, c2 int, c3 int, v int, primary key (k, c1, c2, c3))");
loadLegacyTable("legacy_%s_inaccurate_min_max", "mc");
/*
sstable has the following mutations:
INSERT INTO legacy_tables.legacy_mc_inaccurate_min_max (k, c1, c2, c3, v) VALUES (100, 4, 4, 4, 4)
DELETE FROM legacy_tables.legacy_mc_inaccurate_min_max WHERE k=100 AND c1<3
*/
String query = "SELECT * FROM legacy_tables.legacy_mc_inaccurate_min_max WHERE k=100 AND c1=1 AND c2=1";
List<Unfiltered> unfiltereds = SinglePartitionSliceCommandTest.getUnfilteredsFromSinglePartition(query);
Assert.assertEquals(2, unfiltereds.size());
Assert.assertTrue(unfiltereds.get(0).isRangeTombstoneMarker());
Assert.assertTrue(((RangeTombstoneMarker) unfiltereds.get(0)).isOpen(false));
Assert.assertTrue(unfiltereds.get(1).isRangeTombstoneMarker());
Assert.assertTrue(((RangeTombstoneMarker) unfiltereds.get(1)).isClose(false));
}
@Test
public void testVerifyOldSSTables() throws IOException
{
for (String legacyVersion : legacyVersions)
{
ColumnFamilyStore cfs = Keyspace.open("legacy_tables").getColumnFamilyStore(String.format("legacy_%s_simple", legacyVersion));
loadLegacyTable("legacy_%s_simple", legacyVersion);
for (SSTableReader sstable : cfs.getLiveSSTables())
{
try (Verifier verifier = new Verifier(cfs, sstable, false, Verifier.options().checkVersion(true).build()))
{
verifier.verify();
if (!sstable.descriptor.version.isLatestVersion())
fail("Verify should throw RuntimeException for old sstables "+sstable);
}
catch (RuntimeException e)
{}
}
// make sure we don't throw any exception if not checking version:
for (SSTableReader sstable : cfs.getLiveSSTables())
{
try (Verifier verifier = new Verifier(cfs, sstable, false, Verifier.options().checkVersion(false).build()))
{
verifier.verify();
}
catch (Throwable e)
{
fail("Verify should throw RuntimeException for old sstables "+sstable);
}
}
}
}
@Test
public void testPendingAntiCompactionOldSSTables() throws Exception
{
for (String legacyVersion : legacyVersions)
{
ColumnFamilyStore cfs = Keyspace.open("legacy_tables").getColumnFamilyStore(String.format("legacy_%s_simple", legacyVersion));
loadLegacyTable("legacy_%s_simple", legacyVersion);
boolean shouldFail = !cfs.getLiveSSTables().stream().allMatch(sstable -> sstable.descriptor.version.hasPendingRepair());
IPartitioner p = Iterables.getFirst(cfs.getLiveSSTables(), null).getPartitioner();
Range<Token> r = new Range<>(p.getMinimumToken(), p.getMinimumToken());
PendingAntiCompaction.AcquisitionCallable acquisitionCallable = new PendingAntiCompaction.AcquisitionCallable(cfs, Collections.singleton(r), UUIDGen.getTimeUUID(), 0, 0);
PendingAntiCompaction.AcquireResult res = acquisitionCallable.call();
assertEquals(shouldFail, res == null);
if (res != null)
res.abort();
}
}
@Test
public void testAutomaticUpgrade() throws Exception
{
for (String legacyVersion : legacyVersions)
{
logger.info("Loading legacy version: {}", legacyVersion);
truncateLegacyTables(legacyVersion);
loadLegacyTables(legacyVersion);
ColumnFamilyStore cfs = Keyspace.open("legacy_tables").getColumnFamilyStore(String.format("legacy_%s_simple", legacyVersion));
AbstractCompactionTask act = cfs.getCompactionStrategyManager().getNextBackgroundTask(0);
// there should be no compactions to run with auto upgrades disabled:
assertEquals(null, act);
}
DatabaseDescriptor.setAutomaticSSTableUpgradeEnabled(true);
for (String legacyVersion : legacyVersions)
{
logger.info("Loading legacy version: {}", legacyVersion);
truncateLegacyTables(legacyVersion);
loadLegacyTables(legacyVersion);
ColumnFamilyStore cfs = Keyspace.open("legacy_tables").getColumnFamilyStore(String.format("legacy_%s_simple", legacyVersion));
if (cfs.getLiveSSTables().stream().anyMatch(s -> !s.descriptor.version.isLatestVersion()))
assertTrue(cfs.metric.oldVersionSSTableCount.getValue() > 0);
while (cfs.getLiveSSTables().stream().anyMatch(s -> !s.descriptor.version.isLatestVersion()))
{
CompactionManager.instance.submitBackground(cfs);
Thread.sleep(100);
}
assertTrue(cfs.metric.oldVersionSSTableCount.getValue() == 0);
}
DatabaseDescriptor.setAutomaticSSTableUpgradeEnabled(false);
}
private void streamLegacyTables(String legacyVersion) throws Exception
{
logger.info("Streaming legacy version {}", legacyVersion);
streamLegacyTable("legacy_%s_simple", legacyVersion);
streamLegacyTable("legacy_%s_simple_counter", legacyVersion);
streamLegacyTable("legacy_%s_clust", legacyVersion);
streamLegacyTable("legacy_%s_clust_counter", legacyVersion);
}
private void streamLegacyTable(String tablePattern, String legacyVersion) throws Exception
{
String table = String.format(tablePattern, legacyVersion);
SSTableReader sstable = SSTableReader.open(getDescriptor(legacyVersion, table));
IPartitioner p = sstable.getPartitioner();
List<Range<Token>> ranges = new ArrayList<>();
ranges.add(new Range<>(p.getMinimumToken(), p.getToken(ByteBufferUtil.bytes("100"))));
ranges.add(new Range<>(p.getToken(ByteBufferUtil.bytes("100")), p.getMinimumToken()));
List<OutgoingStream> streams = Lists.newArrayList(new CassandraOutgoingFile(StreamOperation.OTHER,
sstable.ref(),
sstable.getPositionsForRanges(ranges),
ranges,
sstable.estimatedKeysForRanges(ranges)));
new StreamPlan(StreamOperation.OTHER).transferStreams(FBUtilities.getBroadcastAddressAndPort(), streams).execute().get();
}
public static void truncateLegacyTables(String legacyVersion) throws Exception
{
logger.info("Truncating legacy version {}", legacyVersion);
Keyspace.open("legacy_tables").getColumnFamilyStore(String.format("legacy_%s_simple", legacyVersion)).truncateBlocking();
Keyspace.open("legacy_tables").getColumnFamilyStore(String.format("legacy_%s_simple_counter", legacyVersion)).truncateBlocking();
Keyspace.open("legacy_tables").getColumnFamilyStore(String.format("legacy_%s_clust", legacyVersion)).truncateBlocking();
Keyspace.open("legacy_tables").getColumnFamilyStore(String.format("legacy_%s_clust_counter", legacyVersion)).truncateBlocking();
}
private static void compactLegacyTables(String legacyVersion) throws Exception
{
logger.info("Compacting legacy version {}", legacyVersion);
Keyspace.open("legacy_tables").getColumnFamilyStore(String.format("legacy_%s_simple", legacyVersion)).forceMajorCompaction();
Keyspace.open("legacy_tables").getColumnFamilyStore(String.format("legacy_%s_simple_counter", legacyVersion)).forceMajorCompaction();
Keyspace.open("legacy_tables").getColumnFamilyStore(String.format("legacy_%s_clust", legacyVersion)).forceMajorCompaction();
Keyspace.open("legacy_tables").getColumnFamilyStore(String.format("legacy_%s_clust_counter", legacyVersion)).forceMajorCompaction();
}
public static void loadLegacyTables(String legacyVersion) throws Exception
{
logger.info("Preparing legacy version {}", legacyVersion);
loadLegacyTable("legacy_%s_simple", legacyVersion);
loadLegacyTable("legacy_%s_simple_counter", legacyVersion);
loadLegacyTable("legacy_%s_clust", legacyVersion);
loadLegacyTable("legacy_%s_clust_counter", legacyVersion);
}
private static void verifyCache(String legacyVersion, long startCount) throws InterruptedException, java.util.concurrent.ExecutionException
{
//For https://issues.apache.org/jira/browse/CASSANDRA-10778
//Validate whether the key cache successfully saves in the presence of old keys as
//well as loads the correct number of keys
long endCount = CacheService.instance.keyCache.size();
Assert.assertTrue(endCount > startCount);
CacheService.instance.keyCache.submitWrite(Integer.MAX_VALUE).get();
CacheService.instance.invalidateKeyCache();
Assert.assertEquals(startCount, CacheService.instance.keyCache.size());
CacheService.instance.keyCache.loadSaved();
Assert.assertEquals(endCount, CacheService.instance.keyCache.size());
}
private static void verifyReads(String legacyVersion)
{
for (int ck = 0; ck < 50; ck++)
{
String ckValue = Integer.toString(ck) + longString;
for (int pk = 0; pk < 5; pk++)
{
logger.debug("for pk={} ck={}", pk, ck);
String pkValue = Integer.toString(pk);
if (ck == 0)
{
readSimpleTable(legacyVersion, pkValue);
readSimpleCounterTable(legacyVersion, pkValue);
}
readClusteringTable(legacyVersion, ck, ckValue, pkValue);
readClusteringCounterTable(legacyVersion, ckValue, pkValue);
}
}
}
private static void readClusteringCounterTable(String legacyVersion, String ckValue, String pkValue)
{
logger.debug("Read legacy_{}_clust_counter", legacyVersion);
UntypedResultSet rs;
rs = QueryProcessor.executeInternal(String.format("SELECT val FROM legacy_tables.legacy_%s_clust_counter WHERE pk=? AND ck=?", legacyVersion), pkValue, ckValue);
Assert.assertNotNull(rs);
Assert.assertEquals(1, rs.size());
Assert.assertEquals(1L, rs.one().getLong("val"));
}
private static void readClusteringTable(String legacyVersion, int ck, String ckValue, String pkValue)
{
logger.debug("Read legacy_{}_clust", legacyVersion);
UntypedResultSet rs;
rs = QueryProcessor.executeInternal(String.format("SELECT val FROM legacy_tables.legacy_%s_clust WHERE pk=? AND ck=?", legacyVersion), pkValue, ckValue);
assertLegacyClustRows(1, rs);
String ckValue2 = Integer.toString(ck < 10 ? 40 : ck - 1) + longString;
String ckValue3 = Integer.toString(ck > 39 ? 10 : ck + 1) + longString;
rs = QueryProcessor.executeInternal(String.format("SELECT val FROM legacy_tables.legacy_%s_clust WHERE pk=? AND ck IN (?, ?, ?)", legacyVersion), pkValue, ckValue, ckValue2, ckValue3);
assertLegacyClustRows(3, rs);
}
private static void readSimpleCounterTable(String legacyVersion, String pkValue)
{
logger.debug("Read legacy_{}_simple_counter", legacyVersion);
UntypedResultSet rs;
rs = QueryProcessor.executeInternal(String.format("SELECT val FROM legacy_tables.legacy_%s_simple_counter WHERE pk=?", legacyVersion), pkValue);
Assert.assertNotNull(rs);
Assert.assertEquals(1, rs.size());
Assert.assertEquals(1L, rs.one().getLong("val"));
}
private static void readSimpleTable(String legacyVersion, String pkValue)
{
logger.debug("Read simple: legacy_{}_simple", legacyVersion);
UntypedResultSet rs;
rs = QueryProcessor.executeInternal(String.format("SELECT val FROM legacy_tables.legacy_%s_simple WHERE pk=?", legacyVersion), pkValue);
Assert.assertNotNull(rs);
Assert.assertEquals(1, rs.size());
Assert.assertEquals("foo bar baz", rs.one().getString("val"));
}
private static void createKeyspace()
{
QueryProcessor.executeInternal("CREATE KEYSPACE legacy_tables WITH replication = {'class': 'SimpleStrategy', 'replication_factor': '1'}");
}
private static void createTables(String legacyVersion)
{
QueryProcessor.executeInternal(String.format("CREATE TABLE legacy_tables.legacy_%s_simple (pk text PRIMARY KEY, val text)", legacyVersion));
QueryProcessor.executeInternal(String.format("CREATE TABLE legacy_tables.legacy_%s_simple_counter (pk text PRIMARY KEY, val counter)", legacyVersion));
QueryProcessor.executeInternal(String.format("CREATE TABLE legacy_tables.legacy_%s_clust (pk text, ck text, val text, PRIMARY KEY (pk, ck))", legacyVersion));
QueryProcessor.executeInternal(String.format("CREATE TABLE legacy_tables.legacy_%s_clust_counter (pk text, ck text, val counter, PRIMARY KEY (pk, ck))", legacyVersion));
}
private static void truncateTables(String legacyVersion)
{
QueryProcessor.executeInternal(String.format("TRUNCATE legacy_tables.legacy_%s_simple", legacyVersion));
QueryProcessor.executeInternal(String.format("TRUNCATE legacy_tables.legacy_%s_simple_counter", legacyVersion));
QueryProcessor.executeInternal(String.format("TRUNCATE legacy_tables.legacy_%s_clust", legacyVersion));
QueryProcessor.executeInternal(String.format("TRUNCATE legacy_tables.legacy_%s_clust_counter", legacyVersion));
CacheService.instance.invalidateCounterCache();
CacheService.instance.invalidateKeyCache();
}
private static void assertLegacyClustRows(int count, UntypedResultSet rs)
{
Assert.assertNotNull(rs);
Assert.assertEquals(count, rs.size());
for (int i = 0; i < count; i++)
{
for (UntypedResultSet.Row r : rs)
{
Assert.assertEquals(128, r.getString("val").length());
}
}
}
private static void loadLegacyTable(String tablePattern, String legacyVersion) throws IOException
{
String table = String.format(tablePattern, legacyVersion);
logger.info("Loading legacy table {}", table);
ColumnFamilyStore cfs = Keyspace.open("legacy_tables").getColumnFamilyStore(table);
for (File cfDir : cfs.getDirectories().getCFDirectories())
{
copySstablesToTestData(legacyVersion, table, cfDir);
}
cfs.loadNewSSTables();
}
/**
* Generates sstables for 8 CQL tables (see {@link #createTables(String)}) in <i>current</i>
* sstable format (version) into {@code test/data/legacy-sstables/VERSION}, where
* {@code VERSION} matches {@link Version#getVersion() BigFormat.latestVersion.getVersion()}.
* <p>
* Run this test alone (e.g. from your IDE) when a new version is introduced or format changed
* during development. I.e. remove the {@code @Ignore} annotation temporarily.
* </p>
*/
@Ignore
@Test
public void testGenerateSstables() throws Throwable
{
Random rand = new Random();
StringBuilder sb = new StringBuilder();
for (int i = 0; i < 128; i++)
{
sb.append((char)('a' + rand.nextInt(26)));
}
String randomString = sb.toString();
for (int pk = 0; pk < 5; pk++)
{
String valPk = Integer.toString(pk);
QueryProcessor.executeInternal(String.format("INSERT INTO legacy_tables.legacy_%s_simple (pk, val) VALUES ('%s', '%s')",
BigFormat.latestVersion, valPk, "foo bar baz"));
QueryProcessor.executeInternal(String.format("UPDATE legacy_tables.legacy_%s_simple_counter SET val = val + 1 WHERE pk = '%s'",
BigFormat.latestVersion, valPk));
for (int ck = 0; ck < 50; ck++)
{
String valCk = Integer.toString(ck);
QueryProcessor.executeInternal(String.format("INSERT INTO legacy_tables.legacy_%s_clust (pk, ck, val) VALUES ('%s', '%s', '%s')",
BigFormat.latestVersion, valPk, valCk + longString, randomString));
QueryProcessor.executeInternal(String.format("UPDATE legacy_tables.legacy_%s_clust_counter SET val = val + 1 WHERE pk = '%s' AND ck='%s'",
BigFormat.latestVersion, valPk, valCk + longString));
}
}
StorageService.instance.forceKeyspaceFlush("legacy_tables");
File ksDir = new File(LEGACY_SSTABLE_ROOT, String.format("%s/legacy_tables", BigFormat.latestVersion));
ksDir.mkdirs();
copySstablesFromTestData(String.format("legacy_%s_simple", BigFormat.latestVersion), ksDir);
copySstablesFromTestData(String.format("legacy_%s_simple_counter", BigFormat.latestVersion), ksDir);
copySstablesFromTestData(String.format("legacy_%s_clust", BigFormat.latestVersion), ksDir);
copySstablesFromTestData(String.format("legacy_%s_clust_counter", BigFormat.latestVersion), ksDir);
}
public static void copySstablesFromTestData(String table, File ksDir) throws IOException
{
File cfDir = new File(ksDir, table);
cfDir.mkdir();
for (File srcDir : Keyspace.open("legacy_tables").getColumnFamilyStore(table).getDirectories().getCFDirectories())
{
for (File file : srcDir.listFiles())
{
copyFile(cfDir, file);
}
}
}
private static void copySstablesToTestData(String legacyVersion, String table, File cfDir) throws IOException
{
File tableDir = getTableDir(legacyVersion, table);
Assert.assertTrue("The table directory " + tableDir + " was not found", tableDir.isDirectory());
for (File file : tableDir.listFiles())
{
copyFile(cfDir, file);
}
}
private static File getTableDir(String legacyVersion, String table)
{
return new File(LEGACY_SSTABLE_ROOT, String.format("%s/legacy_tables/%s", legacyVersion, table));
}
private static void copyFile(File cfDir, File file) throws IOException
{
byte[] buf = new byte[65536];
if (file.isFile())
{
File target = new File(cfDir, file.getName());
int rd;
try (FileInputStream is = new FileInputStream(file);
FileOutputStream os = new FileOutputStream(target);) {
while ((rd = is.read(buf)) >= 0)
os.write(buf, 0, rd);
}
}
}
}
| |
/*
* Copyright (c) 2005-2011 Grameen Foundation USA
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*
* See also http://www.apache.org/licenses/LICENSE-2.0.html for an
* explanation of the license and how it is applied.
*/
package org.mifos.customers.office.struts.tag;
import java.net.URLEncoder;
import java.util.List;
import java.util.Locale;
import java.util.Set;
import java.util.TreeSet;
import javax.servlet.jsp.JspException;
import javax.servlet.jsp.tagext.BodyTagSupport;
import org.apache.struts.taglib.TagUtils;
import org.mifos.application.master.MessageLookup;
import org.mifos.customers.office.business.OfficeBO;
import org.mifos.customers.office.persistence.OfficePersistence;
import org.mifos.customers.office.util.helpers.OfficeLevel;
import org.mifos.dto.domain.OfficeDetailsDto;
import org.mifos.dto.domain.OfficeHierarchyDto;
import org.mifos.dto.screen.OnlyBranchOfficeHierarchyDto;
import org.mifos.framework.struts.tags.XmlBuilder;
import org.mifos.framework.util.helpers.Constants;
import org.mifos.security.util.UserContext;
public class OfficeListTag extends BodyTagSupport {
private String actionName;
private String methodName;
private String flowKey;
/* null for false, anything else for true */
private String onlyBranchOffices;
public OfficeListTag() {
}
public OfficeListTag(String action, String method, String flow) {
actionName = action;
methodName = method;
flowKey = flow;
}
@Override
public int doStartTag() throws JspException {
try {
String officeListString = "";
OnlyBranchOfficeHierarchyDto officeHierarchyDto = (OnlyBranchOfficeHierarchyDto) pageContext
.getAttribute(OnlyBranchOfficeHierarchyDto.IDENTIFIER);
if (officeHierarchyDto != null) {
officeListString = getOfficeList(officeHierarchyDto);
} else {
// FIXME - #00006 - keithw - personnel creation use this still
UserContext userContext = (UserContext) pageContext.getSession().getAttribute(Constants.USERCONTEXT);
OfficePersistence officePersistence = new OfficePersistence();
OfficeBO officeBO = officePersistence.getOffice(userContext.getBranchId());
List<OfficeDetailsDto> levels = officePersistence.getActiveLevels();
OfficeBO loggedInOffice = officePersistence.getOffice(userContext.getBranchId());
List<OfficeBO> branchParents = officePersistence.getBranchParents(officeBO.getSearchId());
List<OfficeHierarchyDto> officeHierarchy = OfficeBO
.convertToBranchOnlyHierarchyWithParentsOfficeHierarchy(branchParents);
List<OfficeBO> officesTillBranchOffice = officePersistence.getOfficesTillBranchOffice(officeBO
.getSearchId());
officeListString = getOfficeList(userContext.getPreferredLocale(), levels,
loggedInOffice.getSearchId(), officeHierarchy, officesTillBranchOffice);
}
TagUtils.getInstance().write(pageContext, officeListString);
} catch (Exception e) {
/**
* This turns into a (rather ugly) error 500. TODO: make it more reasonable.
*/
throw new JspException(e);
}
return EVAL_PAGE;
}
public String getActionName() {
return actionName;
}
public void setActionName(String actionName) {
this.actionName = actionName;
}
public String getMethodName() {
return methodName;
}
public void setMethodName(String methodName) {
this.methodName = methodName;
}
public String getOnlyBranchOffices() {
return onlyBranchOffices;
}
public void setOnlyBranchOffices(String onlyBranchOffices) {
this.onlyBranchOffices = onlyBranchOffices;
}
private String getOfficeList(OnlyBranchOfficeHierarchyDto officeHierarchy) {
return getOfficeList(officeHierarchy.getPreferredLocaleOfUser(), officeHierarchy.getLevels(), officeHierarchy
.getLoggedInOfficeSearchId(), officeHierarchy.getBranchOnlyOfficeHierarchy(), null);
}
String getOfficeList(Locale preferredUserLocale, List<OfficeDetailsDto> levels, String loggedInOfficeSearchId,
List<OfficeHierarchyDto> officeHierarchy, List<OfficeBO> officesTillBranchOffice) {
String termForBranch = "";
String regional = "";
String subregional = "";
String area = "";
for (OfficeDetailsDto level : levels) {
if (level.getLevelId().equals(OfficeLevel.BRANCHOFFICE.getValue())) {
termForBranch = level.getLevelName();
} else if (level.getLevelId().equals(OfficeLevel.AREAOFFICE.getValue())) {
area = level.getLevelName();
} else if (level.getLevelId().equals(OfficeLevel.REGIONALOFFICE.getValue())) {
regional = level.getLevelName();
} else if (level.getLevelId().equals(OfficeLevel.SUBREGIONALOFFICE.getValue())) {
subregional = level.getLevelName();
}
}
XmlBuilder result = new XmlBuilder();
if (onlyBranchOffices != null) {
getBranchOffices(result, officeHierarchy, preferredUserLocale, loggedInOfficeSearchId, termForBranch);
} else {
getAboveBranches(result, officesTillBranchOffice, regional, subregional, area);
getBranchOffices(result, officeHierarchy, preferredUserLocale, loggedInOfficeSearchId, termForBranch);
}
return result.getOutput();
}
void getBranchOffices(XmlBuilder html, List<OfficeHierarchyDto> officeList, Locale preferredUserLocale,
String loggedInOfficeSearchId, String branchName) {
html.singleTag("br");
html.startTag("span", "class", "fontnormalBold");
html.text(branchName);
html.endTag("span");
html.singleTag("br");
if (officeList == null) {
html.startTag("span", "class", "fontnormal");
html.text(MessageLookup.getLocalizedMessage("Office.labelNo"));
html.text(" ");
html.text(branchName.toLowerCase());
html.text(" ");
html.text(MessageLookup.getLocalizedMessage("Office.labelPresent"));
html.endTag("span");
} else {
for (int i = 0; i < officeList.size(); i++) {
OfficeHierarchyDto officeParent = officeList.get(i);
Set<OfficeHierarchyDto> branchList = new TreeSet<OfficeHierarchyDto>();
for (OfficeHierarchyDto dataScopeBranch : officeParent.getChildren()) {
if (dataScopeBranch.getSearchId().startsWith(loggedInOfficeSearchId) && dataScopeBranch.isActive()) {
branchList.add(dataScopeBranch);
}
}
if (branchList.size() > 0) {
if (i > 0) {
html.singleTag("br");
}
html.startTag("span", "class", "fontnormal");
html.text(officeParent.getOfficeName());
html.endTag("span");
html.startTag("table", "width", "90%", "border", "0", "cellspacing", "0", "cellpadding", "0");
for (OfficeHierarchyDto office : branchList) {
html.startTag("tr", "class", "fontnormal");
bullet(html);
html.startTag("td", "width", "99%");
html.append(getLink(office.getOfficeId(), office.getOfficeName()));
html.endTag("td");
html.endTag("tr");
}
html.endTag("table");
}
}
}
}
XmlBuilder getLink(Short officeId, String officeName) {
String urlencodedOfficeName = URLEncoder.encode(officeName);
XmlBuilder builder = new XmlBuilder();
String url = (actionName + "?method=" + methodName + "&office.officeId=" + officeId + "&office.officeName="
+ urlencodedOfficeName + "&officeId=" + officeId + "&officeName=" + urlencodedOfficeName
+ "¤tFlowKey=" + flowKey);
builder.startTag("a", "href", url);
builder.text(officeName);
builder.endTag("a");
return builder;
}
public String replaceSpaces(String officeName) {
return officeName.trim().replaceAll(" ", "%20");
}
void getAboveBranches(XmlBuilder html, List<OfficeBO> officeList, String regional, String subregional, String area) {
if (null != officeList) {
XmlBuilder regionalHtml = null;
XmlBuilder subregionalHtml = null;
XmlBuilder areaHtml = null;
for (int i = 0; i < officeList.size(); i++) {
OfficeBO office = officeList.get(i);
if (office.getOfficeLevel() == OfficeLevel.HEADOFFICE) {
html.singleTag("br");
html.startTag("span", "class", "fontnormalbold");
html.append(getLink(office.getOfficeId(), office.getOfficeName()));
html.singleTag("br");
html.endTag("span");
} else if (office.getOfficeLevel() == OfficeLevel.REGIONALOFFICE) {
regionalHtml = processOffice(regionalHtml, office, regional);
} else if (office.getOfficeLevel() == OfficeLevel.SUBREGIONALOFFICE) {
subregionalHtml = processOffice(subregionalHtml, office, subregional);
} else if (office.getOfficeLevel() == OfficeLevel.AREAOFFICE) {
areaHtml = processOffice(areaHtml, office, area);
}
}
outputLevel(html, regionalHtml);
outputLevel(html, subregionalHtml);
outputLevel(html, areaHtml);
}
}
private void outputLevel(XmlBuilder result, XmlBuilder levelHtml) {
if (levelHtml != null) {
levelHtml.endTag("table");
result.append(levelHtml);
}
}
private XmlBuilder processOffice(XmlBuilder levelHtml, OfficeBO office, String levelName) {
if (levelHtml == null) {
levelHtml = new XmlBuilder();
levelHtml.singleTag("br");
levelHtml.startTag("table", "width", "95%", "border", "0", "cellspacing", "0", "cellpadding", "0");
levelHtml.startTag("tr");
levelHtml.startTag("td");
levelHtml.startTag("span", "class", "fontnormalBold");
levelHtml.text(levelName);
levelHtml.endTag("span");
levelHtml.endTag("td");
levelHtml.endTag("tr");
levelHtml.endTag("table");
levelHtml.startTag("table", "width", "90%", "border", "0", "cellspacing", "0", "cellpadding", "0");
}
levelHtml.startTag("tr", "class", "fontnormal");
bullet(levelHtml);
levelHtml.startTag("td", "width", "99%");
levelHtml.append(getLink(office.getOfficeId(), office.getOfficeName()));
levelHtml.endTag("td");
levelHtml.endTag("tr");
return levelHtml;
}
private void bullet(XmlBuilder html) {
html.startTag("td", "width", "1%");
html.singleTag("img", "src", "pages/framework/images/bullet_circle.gif", "width", "9", "height", "11");
html.endTag("td");
}
public String getFlowKey() {
return flowKey;
}
public void setFlowKey(String flowKey) {
this.flowKey = flowKey;
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.internal;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Sort;
import org.apache.lucene.util.Counter;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.cache.recycler.PageCacheRecycler;
import org.elasticsearch.common.DelegatingHasContextAndHeaders;
import org.elasticsearch.common.HasContextAndHeaders;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.iterable.Iterables;
import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.aggregations.SearchContextAggregations;
import org.elasticsearch.search.dfs.DfsSearchResult;
import org.elasticsearch.search.fetch.FetchSearchResult;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.FetchSubPhaseContext;
import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
import org.elasticsearch.search.fetch.script.ScriptFieldsContext;
import org.elasticsearch.search.fetch.source.FetchSourceContext;
import org.elasticsearch.search.highlight.SearchContextHighlight;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.profile.Profilers;
import org.elasticsearch.search.query.QuerySearchResult;
import org.elasticsearch.search.rescore.RescoreSearchContext;
import org.elasticsearch.search.suggest.SuggestionSearchContext;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
public abstract class SearchContext extends DelegatingHasContextAndHeaders implements Releasable {
private static ThreadLocal<SearchContext> current = new ThreadLocal<>();
public final static int DEFAULT_TERMINATE_AFTER = 0;
public static void setCurrent(SearchContext value) {
current.set(value);
QueryShardContext.setTypes(value.types());
}
public static void removeCurrent() {
current.remove();
QueryShardContext.removeTypes();
}
public static SearchContext current() {
return current.get();
}
private Map<Lifetime, List<Releasable>> clearables = null;
private final AtomicBoolean closed = new AtomicBoolean(false);
protected final ParseFieldMatcher parseFieldMatcher;
protected SearchContext(ParseFieldMatcher parseFieldMatcher, HasContextAndHeaders contextHeaders) {
super(contextHeaders);
this.parseFieldMatcher = parseFieldMatcher;
}
public ParseFieldMatcher parseFieldMatcher() {
return parseFieldMatcher;
}
@Override
public final void close() {
if (closed.compareAndSet(false, true)) { // prevent double release
try {
clearReleasables(Lifetime.CONTEXT);
} finally {
doClose();
}
}
}
private boolean nowInMillisUsed;
protected abstract void doClose();
/**
* Should be called before executing the main query and after all other parameters have been set.
*/
public abstract void preProcess();
public abstract Query searchFilter(String[] types);
public abstract long id();
public abstract String source();
public abstract ShardSearchRequest request();
public abstract SearchType searchType();
public abstract SearchContext searchType(SearchType searchType);
public abstract SearchShardTarget shardTarget();
public abstract int numberOfShards();
public abstract boolean hasTypes();
public abstract String[] types();
public abstract float queryBoost();
public abstract SearchContext queryBoost(float queryBoost);
public abstract long getOriginNanoTime();
public final long nowInMillis() {
nowInMillisUsed = true;
return nowInMillisImpl();
}
public final boolean nowInMillisUsed() {
return nowInMillisUsed;
}
protected abstract long nowInMillisImpl();
public abstract ScrollContext scrollContext();
public abstract SearchContext scrollContext(ScrollContext scroll);
public abstract SearchContextAggregations aggregations();
public abstract SearchContext aggregations(SearchContextAggregations aggregations);
public abstract <SubPhaseContext extends FetchSubPhaseContext> SubPhaseContext getFetchSubPhaseContext(FetchSubPhase.ContextFactory<SubPhaseContext> contextFactory);
public abstract SearchContextHighlight highlight();
public abstract void highlight(SearchContextHighlight highlight);
public abstract void innerHits(InnerHitsContext innerHitsContext);
public abstract InnerHitsContext innerHits();
public abstract SuggestionSearchContext suggest();
public abstract void suggest(SuggestionSearchContext suggest);
/**
* @return list of all rescore contexts. empty if there aren't any.
*/
public abstract List<RescoreSearchContext> rescore();
public abstract void addRescore(RescoreSearchContext rescore);
public abstract boolean hasScriptFields();
public abstract ScriptFieldsContext scriptFields();
/**
* A shortcut function to see whether there is a fetchSourceContext and it says the source is requested.
*/
public abstract boolean sourceRequested();
public abstract boolean hasFetchSourceContext();
public abstract FetchSourceContext fetchSourceContext();
public abstract SearchContext fetchSourceContext(FetchSourceContext fetchSourceContext);
public abstract ContextIndexSearcher searcher();
public abstract IndexShard indexShard();
public abstract MapperService mapperService();
public abstract AnalysisService analysisService();
public abstract SimilarityService similarityService();
public abstract ScriptService scriptService();
public abstract PageCacheRecycler pageCacheRecycler();
public abstract BigArrays bigArrays();
public abstract BitsetFilterCache bitsetFilterCache();
public abstract IndexFieldDataService fieldData();
public abstract long timeoutInMillis();
public abstract void timeoutInMillis(long timeoutInMillis);
public abstract int terminateAfter();
public abstract void terminateAfter(int terminateAfter);
public abstract SearchContext minimumScore(float minimumScore);
public abstract Float minimumScore();
public abstract SearchContext sort(Sort sort);
public abstract Sort sort();
public abstract SearchContext trackScores(boolean trackScores);
public abstract boolean trackScores();
public abstract SearchContext parsedPostFilter(ParsedQuery postFilter);
public abstract ParsedQuery parsedPostFilter();
public abstract Query aliasFilter();
public abstract SearchContext parsedQuery(ParsedQuery query);
public abstract ParsedQuery parsedQuery();
/**
* The query to execute, might be rewritten.
*/
public abstract Query query();
public abstract int from();
public abstract SearchContext from(int from);
public abstract int size();
public abstract SearchContext size(int size);
public abstract boolean hasFieldNames();
public abstract List<String> fieldNames();
public abstract void emptyFieldNames();
public abstract boolean explain();
public abstract void explain(boolean explain);
@Nullable
public abstract List<String> groupStats();
public abstract void groupStats(List<String> groupStats);
public abstract boolean version();
public abstract void version(boolean version);
public abstract int[] docIdsToLoad();
public abstract int docIdsToLoadFrom();
public abstract int docIdsToLoadSize();
public abstract SearchContext docIdsToLoad(int[] docIdsToLoad, int docsIdsToLoadFrom, int docsIdsToLoadSize);
public abstract void accessed(long accessTime);
public abstract long lastAccessTime();
public abstract long keepAlive();
public abstract void keepAlive(long keepAlive);
public abstract SearchLookup lookup();
public abstract DfsSearchResult dfsResult();
public abstract QuerySearchResult queryResult();
public abstract FetchSearchResult fetchResult();
/**
* Return a handle over the profilers for the current search request, or {@code null} if profiling is not enabled.
*/
public abstract Profilers getProfilers();
/**
* Schedule the release of a resource. The time when {@link Releasable#close()} will be called on this object
* is function of the provided {@link Lifetime}.
*/
public void addReleasable(Releasable releasable, Lifetime lifetime) {
if (clearables == null) {
clearables = new HashMap<>();
}
List<Releasable> releasables = clearables.get(lifetime);
if (releasables == null) {
releasables = new ArrayList<>();
clearables.put(lifetime, releasables);
}
releasables.add(releasable);
}
public void clearReleasables(Lifetime lifetime) {
if (clearables != null) {
List<List<Releasable>>releasables = new ArrayList<>();
for (Lifetime lc : Lifetime.values()) {
if (lc.compareTo(lifetime) > 0) {
break;
}
List<Releasable> remove = clearables.remove(lc);
if (remove != null) {
releasables.add(remove);
}
}
Releasables.close(Iterables.flatten(releasables));
}
}
/**
* Looks up the given field, but does not restrict to fields in the types set on this context.
*/
public abstract MappedFieldType smartNameFieldType(String name);
public abstract ObjectMapper getObjectMapper(String name);
public abstract Counter timeEstimateCounter();
/** Return a view of the additional query collectors that should be run for this context. */
public abstract Map<Class<?>, Collector> queryCollectors();
/**
* The life time of an object that is used during search execution.
*/
public enum Lifetime {
/**
* This life time is for objects that only live during collection time.
*/
COLLECTION,
/**
* This life time is for objects that need to live until the end of the current search phase.
*/
PHASE,
/**
* This life time is for objects that need to live until the search context they are attached to is destroyed.
*/
CONTEXT
}
}
| |
/*
* Copyright 2018 Crown Copyright
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package stroom.pipeline.refdata.store.offheapstore;
import stroom.bytebuffer.ByteBufferPool;
import stroom.bytebuffer.ByteBufferPoolFactory;
import stroom.bytebuffer.PooledByteBuffer;
import stroom.bytebuffer.PooledByteBufferOutputStream;
import stroom.bytebuffer.PooledByteBufferOutputStream.Factory;
import stroom.pipeline.refdata.store.BasicValueStoreHashAlgorithmImpl;
import stroom.pipeline.refdata.store.RefDataValue;
import stroom.pipeline.refdata.store.StringValue;
import stroom.pipeline.refdata.store.ValueStoreHashAlgorithm;
import stroom.pipeline.refdata.store.XxHashValueStoreHashAlgorithm;
import stroom.pipeline.refdata.store.offheapstore.databases.AbstractStoreDbTest;
import stroom.pipeline.refdata.store.offheapstore.databases.ValueStoreDb;
import stroom.pipeline.refdata.store.offheapstore.databases.ValueStoreMetaDb;
import stroom.pipeline.refdata.store.offheapstore.serdes.GenericRefDataValueSerde;
import stroom.pipeline.refdata.store.offheapstore.serdes.RefDataValueSerdeFactory;
import stroom.pipeline.refdata.store.offheapstore.serdes.ValueStoreKeySerde;
import stroom.pipeline.refdata.store.offheapstore.serdes.ValueStoreMetaSerde;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.lmdbjava.Txn;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.nio.ByteBuffer;
import java.util.Optional;
import java.util.concurrent.atomic.AtomicReference;
import static org.assertj.core.api.Assertions.assertThat;
class TestValueStore extends AbstractStoreDbTest {
private static final Logger LOGGER = LoggerFactory.getLogger(TestValueStore.class);
private final RefDataValueSerdeFactory refDataValueSerdeFactory = new RefDataValueSerdeFactory();
private final ValueStoreHashAlgorithm xxHashAlgorithm = new XxHashValueStoreHashAlgorithm();
private final ValueStoreHashAlgorithm basicHashAlgorithm = new BasicValueStoreHashAlgorithmImpl();
private final ByteBufferPool byteBufferPool = new ByteBufferPoolFactory().getByteBufferPool();
private final PooledByteBufferOutputStream.Factory pooledByteBufferOutputStreamFactory = new Factory() {
@Override
public PooledByteBufferOutputStream create(final int initialCapacity) {
return new PooledByteBufferOutputStream(byteBufferPool, initialCapacity);
}
};
private ValueStore valueStore = null;
private ValueStoreDb valueStoreDb = null;
private ValueStoreMetaDb valueStoreMetaDb = null;
@BeforeEach
void setup() {
valueStoreDb = new ValueStoreDb(
lmdbEnv,
byteBufferPool,
new ValueStoreKeySerde(),
new GenericRefDataValueSerde(refDataValueSerdeFactory),
xxHashAlgorithm,
pooledByteBufferOutputStreamFactory);
valueStoreMetaDb = new ValueStoreMetaDb(
lmdbEnv,
byteBufferPool,
new ValueStoreKeySerde(),
new ValueStoreMetaSerde());
valueStore = new ValueStore(lmdbEnv, valueStoreDb, valueStoreMetaDb);
}
private void setupValueStoreDb(final ValueStoreHashAlgorithm valueStoreHashAlgorithm) {
valueStoreDb = new ValueStoreDb(
lmdbEnv,
new ByteBufferPoolFactory().getByteBufferPool(),
new ValueStoreKeySerde(),
new GenericRefDataValueSerde(refDataValueSerdeFactory),
valueStoreHashAlgorithm,
pooledByteBufferOutputStreamFactory);
valueStore = new ValueStore(lmdbEnv, valueStoreDb, valueStoreMetaDb);
}
private ValueStoreKey getOrCreate(Txn<ByteBuffer> writeTxn, RefDataValue refDataValue) {
try (PooledByteBuffer valueStoreKeyPooledBuffer = valueStore.getPooledKeyBuffer()) {
ByteBuffer valueStoreKeyBuffer = valueStore.getOrCreateKey(
writeTxn,
valueStoreKeyPooledBuffer,
refDataValue,
false);
return valueStoreDb.deserializeKey(valueStoreKeyBuffer);
}
}
@Test
void testGetOrCreate() {
// We have to set up the DB with the basic hash func so we can be assured of hash clashes
setupValueStoreDb(basicHashAlgorithm);
ValueStoreHashAlgorithm hashAlgorithm = valueStoreDb.getValueStoreHashAlgorithm();
// 1 & 2 have the same hashcode, 3 has a different hashcode
final String stringValueStr1 = "Aa";
final String stringValueStr2 = "BB";
final String stringValueStr3 = "SomethingDifferent";
assertThat(hashAlgorithm.hash(stringValueStr1)).isEqualTo(hashAlgorithm.hash(stringValueStr2));
assertThat(hashAlgorithm.hash(stringValueStr1)).isNotEqualTo(hashAlgorithm.hash(stringValueStr3));
assertThat(valueStoreDb.getEntryCount()).isEqualTo(0);
assertThat(valueStoreMetaDb.getEntryCount()).isEqualTo(0);
lmdbEnv.doWithWriteTxn(writeTxn -> {
ValueStoreKey valueStoreKey = getOrCreate(writeTxn, StringValue.of(stringValueStr1));
assertThat(valueStoreKey).isNotNull();
assertThat(valueStoreKey.getUniqueId()).isEqualTo((short) 0);
assertRefCount(writeTxn, valueStoreKey, 1);
});
logContents();
assertThat(valueStoreDb.getEntryCount()).isEqualTo(1);
assertThat(valueStoreMetaDb.getEntryCount()).isEqualTo(1);
LOGGER.debug("----------------------------");
// now put the same value again. Entry count should not change as we already have the value
// returned valueStoreKey should also be the same.
lmdbEnv.doWithWriteTxn(writeTxn -> {
ValueStoreKey valueStoreKey = getOrCreate(writeTxn, StringValue.of(stringValueStr1));
assertThat(valueStoreKey).isNotNull();
assertThat(valueStoreKey.getUniqueId()).isEqualTo((short) 0);
// ref count increases as two things have an interest in the value
assertRefCount(writeTxn, valueStoreKey, 2);
});
logContents();
assertThat(valueStoreDb.getEntryCount()).isEqualTo(1);
assertThat(valueStoreMetaDb.getEntryCount()).isEqualTo(1);
LOGGER.debug("----------------------------");
// now put a different value with same hashcode. Entry count should increase and the
// returned valueStoreKey should have an id of 1 as it has same hashcode as last one
lmdbEnv.doWithWriteTxn(writeTxn -> {
ValueStoreKey valueStoreKey = getOrCreate(writeTxn, StringValue.of(stringValueStr2));
assertThat(valueStoreKey).isNotNull();
assertThat(valueStoreKey.getUniqueId()).isEqualTo((short) 1);
assertRefCount(writeTxn, valueStoreKey, 1);
});
logContents();
assertThat(valueStoreDb.getEntryCount()).isEqualTo(2);
assertThat(valueStoreMetaDb.getEntryCount()).isEqualTo(2);
LOGGER.debug("----------------------------");
// get the same value again, no change to DB or returned values
lmdbEnv.doWithWriteTxn(writeTxn -> {
ValueStoreKey valueStoreKey = getOrCreate(writeTxn, StringValue.of(stringValueStr2));
assertThat(valueStoreKey).isNotNull();
assertThat(valueStoreKey.getUniqueId()).isEqualTo((short) 1);
// ref count increases as two things have an interest in the value
assertRefCount(writeTxn, valueStoreKey, 2);
});
logContents();
assertThat(valueStoreDb.getEntryCount()).isEqualTo(2);
assertThat(valueStoreMetaDb.getEntryCount()).isEqualTo(2);
LOGGER.debug("----------------------------");
// now put a different value with a different hashcode. Entry count should increase and the
// returned valueStoreKey should have an id of 0 as it has a different hashcode.
lmdbEnv.doWithWriteTxn(writeTxn -> {
ValueStoreKey valueStoreKey = getOrCreate(writeTxn, StringValue.of(stringValueStr3));
assertThat(valueStoreKey).isNotNull();
assertThat(valueStoreKey.getUniqueId()).isEqualTo((short) 0);
assertRefCount(writeTxn, valueStoreKey, 1);
});
logContents();
assertThat(valueStoreDb.getEntryCount()).isEqualTo(3);
assertThat(valueStoreMetaDb.getEntryCount()).isEqualTo(3);
LOGGER.debug("----------------------------");
// get the same value again, no change to DB or returned values
lmdbEnv.doWithWriteTxn(writeTxn -> {
ValueStoreKey valueStoreKey = getOrCreate(writeTxn, StringValue.of(stringValueStr3));
assertThat(valueStoreKey).isNotNull();
assertThat(valueStoreKey.getUniqueId()).isEqualTo((short) 0);
// ref count increases as two things have an interest in the value
assertRefCount(writeTxn, valueStoreKey, 2);
});
logContents();
assertThat(valueStoreDb.getEntryCount()).isEqualTo(3);
assertThat(valueStoreMetaDb.getEntryCount()).isEqualTo(3);
}
private void logContents() {
valueStoreDb.logRawDatabaseContents(LOGGER::debug);
valueStoreDb.logDatabaseContents(LOGGER::debug);
valueStoreMetaDb.logRawDatabaseContents(LOGGER::debug);
valueStoreMetaDb.logDatabaseContents(LOGGER::debug);
}
@Test
void testDereference() {
StringValue value1 = StringValue.of("1111");
StringValue value2 = StringValue.of("2222");
// ensure hashcodes don't clash
assertThat(value1.getValue().hashCode())
.isNotEqualTo(value2.getValue().hashCode());
final int iterations = 10;
final AtomicReference<ValueStoreKey> valueStoreKey1aRef = new AtomicReference<>();
final AtomicReference<ValueStoreKey> valueStoreKey2aRef = new AtomicReference<>();
// insert 10 of the same values, should have one entry with a ref count going up to 10
for (int i = 1; i <= iterations; i++) {
final int expectedRefCount = i;
lmdbEnv.doWithWriteTxn(writeTxn -> {
final ValueStoreKey valueStoreKey1a = getOrCreate(writeTxn, value1);
// value should always be the same
valueStoreKey1aRef.accumulateAndGet(valueStoreKey1a, (currVal, newVal) -> {
if (currVal != null) {
assertThat(newVal)
.isEqualTo(currVal);
}
return newVal;
});
assertThat(valueStoreDb.getEntryCount(writeTxn)).isEqualTo(1);
final StringValue stringValue1 = (StringValue) valueStore.get(writeTxn, valueStoreKey1a).get();
assertThat(getRefCount(writeTxn, valueStoreKey1a))
.isEqualTo(expectedRefCount);
assertThat(stringValue1.getValue())
.isEqualTo(value1.getValue());
});
}
valueStoreDb.logRawDatabaseContents();
valueStoreDb.logDatabaseContents();
// insert 10 of the same values, should now have one entry (plus the one from above)
// with a ref count going up to 10
for (int i = 1; i <= iterations; i++) {
final int expectedRefCount = i;
lmdbEnv.doWithWriteTxn(writeTxn -> {
final ValueStoreKey valueStoreKey2a = getOrCreate(writeTxn, value2);
valueStoreKey2aRef.accumulateAndGet(valueStoreKey2a, (currVal, newVal) -> {
if (currVal != null) {
assertThat(newVal)
.isEqualTo(currVal);
}
return newVal;
});
assertThat(valueStoreDb.getEntryCount(writeTxn)).isEqualTo(2);
final StringValue stringValue2 = (StringValue) valueStore.get(writeTxn, valueStoreKey2a).get();
assertThat(getRefCount(writeTxn, valueStoreKey2a))
.isEqualTo(expectedRefCount);
assertThat(stringValue2.getValue())
.isEqualTo(value2.getValue());
});
}
valueStoreDb.logRawDatabaseContents();
valueStoreDb.logDatabaseContents();
// Now keep trying to delete value 1, ref count should go down until the delete happens
for (int i = iterations; i >= 1; i--) {
final int expectedPreDeleteRefCount = i;
lmdbEnv.doWithWriteTxn(writeTxn -> {
assertThat(getRefCount(writeTxn, valueStoreKey1aRef.get()))
.isEqualTo(expectedPreDeleteRefCount);
// now dereference value1
deReferenceOrDeleteValue(writeTxn, valueStoreKey1aRef.get());
final Optional<RefDataValue> optValue = valueStore.get(writeTxn, valueStoreKey1aRef.get());
if (expectedPreDeleteRefCount == 1) {
// entry should actually be deleted here
assertThat(optValue)
.isEmpty();
assertThat(valueStoreDb.getEntryCount(writeTxn))
.isEqualTo(1);
} else {
assertThat(optValue)
.isPresent();
assertThat(getRefCount(writeTxn, valueStoreKey1aRef.get()))
.isEqualTo(expectedPreDeleteRefCount - 1);
assertThat(valueStoreDb.getEntryCount(writeTxn))
.isEqualTo(2);
}
});
}
valueStoreDb.logRawDatabaseContents();
valueStoreDb.logDatabaseContents();
// Now keep trying to delete value 2, ref count should go down until the delete happens
for (int i = iterations; i >= 1; i--) {
final int expectedPreDeleteRefCount = i;
lmdbEnv.doWithWriteTxn(writeTxn -> {
assertThat(getRefCount(writeTxn, valueStoreKey2aRef.get()))
.isEqualTo(expectedPreDeleteRefCount);
// now dereference value1
deReferenceOrDeleteValue(writeTxn, valueStoreKey2aRef.get());
final Optional<RefDataValue> optValue = valueStore.get(
writeTxn,
valueStoreKey2aRef.get());
if (expectedPreDeleteRefCount == 1) {
// entry should actually be deleted here
assertThat(optValue)
.isEmpty();
assertThat(valueStoreDb.getEntryCount(writeTxn))
.isEqualTo(0);
} else {
assertThat(optValue)
.isPresent();
assertThat(getRefCount(writeTxn, valueStoreKey2aRef.get()))
.isEqualTo(expectedPreDeleteRefCount - 1);
assertThat(valueStoreDb.getEntryCount(writeTxn))
.isEqualTo(1);
}
});
}
}
private void deReferenceOrDeleteValue(final Txn<ByteBuffer> writeTxn, final ValueStoreKey valueStoreKey) {
try (PooledByteBuffer pooledByteBuffer = valueStoreDb.getPooledKeyBuffer()) {
ByteBuffer valueStoreKeyBuffer = pooledByteBuffer.getByteBuffer();
valueStoreDb.serializeKey(valueStoreKeyBuffer, valueStoreKey);
valueStore.deReferenceOrDeleteValue(writeTxn, valueStoreKeyBuffer);
}
}
private int getRefCount(Txn<ByteBuffer> txn, ValueStoreKey valueStoreKey) {
ValueStoreMeta valueStoreMeta = valueStoreMetaDb.get(txn, valueStoreKey).get();
final int referenceCount = valueStoreMeta.getReferenceCount();
LOGGER.info("Ref count: {}", referenceCount);
return referenceCount;
}
private void assertRefCount(Txn<ByteBuffer> txn, final ValueStoreKey valueStoreKey, final int expectedRefCount) {
ValueStoreMeta valueStoreMeta = valueStoreMetaDb.get(txn, valueStoreKey).get();
int foundRefCount = valueStoreMeta.getReferenceCount();
assertThat(foundRefCount).isEqualTo(expectedRefCount);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sysml.test.integration.functions.codegen;
import org.junit.Test;
import org.apache.sysml.hops.DataOp;
import org.apache.sysml.hops.Hop;
import org.apache.sysml.hops.Hop.DataOpTypes;
import org.apache.sysml.hops.LiteralOp;
import org.apache.sysml.hops.codegen.cplan.CNode;
import org.apache.sysml.hops.codegen.cplan.CNodeBinary;
import org.apache.sysml.hops.codegen.cplan.CNodeData;
import org.apache.sysml.hops.codegen.cplan.CNodeTernary;
import org.apache.sysml.hops.codegen.cplan.CNodeTernary.TernaryType;
import org.apache.sysml.hops.codegen.cplan.CNodeUnary;
import org.apache.sysml.hops.codegen.cplan.CNodeBinary.BinType;
import org.apache.sysml.hops.codegen.cplan.CNodeUnary.UnaryType;
import org.apache.sysml.parser.Expression.DataType;
import org.apache.sysml.parser.Expression.ValueType;
import org.apache.sysml.runtime.controlprogram.parfor.util.IDSequence;
import org.apache.sysml.test.integration.AutomatedTestBase;
import org.apache.sysml.test.utils.TestUtils;
/**
* A CPlan is the internal representation of code generation plans
* and consists of a DAG of CNodes and a surrounding template. These
* plans implements equals and hashCode to efficient match equivalent
* plans and subexpressions. Since this was a frequent source of issues
* in the past, this testsuite aims to explicitly check various scenarios.
*
*/
public class CPlanComparisonTest extends AutomatedTestBase
{
private IDSequence _seq = new IDSequence();
@Override
public void setUp() {
TestUtils.clearAssertionInformation();
}
@Test
public void testEqualLiteral() {
if(shouldSkipTest())
return;
CNodeData c1 = new CNodeData(new LiteralOp(7), 0, 0, DataType.SCALAR);
CNodeData c2 = new CNodeData(new LiteralOp(7), 0, 0, DataType.SCALAR);
assertEquals(c1.hashCode(), c2.hashCode());
assertEquals(c1, c2);
c1.setLiteral(true);
c2.setLiteral(true);
assertEquals(c1.hashCode(), c2.hashCode());
assertEquals(c1, c2);
c1.setStrictEquals(true);
c2.setStrictEquals(true);
assertEquals(c1.hashCode(), c2.hashCode());
assertEquals(c1, c2);
}
@Test
public void testNotEqualLiteral() {
if(shouldSkipTest())
return;
CNodeData c1 = new CNodeData(new LiteralOp(7), 0, 0, DataType.SCALAR);
CNodeData c2 = new CNodeData(new LiteralOp(3), 0, 0, DataType.SCALAR);
assertNotEquals(c1.hashCode(), c2.hashCode());
assertNotEquals(c1, c2);
c1.setLiteral(true);
c2.setLiteral(true);
assertNotEquals(c1.hashCode(), c2.hashCode());
assertNotEquals(c1, c2);
c1.setStrictEquals(true);
c2.setStrictEquals(true);
assertNotEquals(c1.hashCode(), c2.hashCode());
assertNotEquals(c1, c2);
}
@Test
public void testEqualMatrixDataNode() {
if(shouldSkipTest())
return;
Hop data = createDataOp(DataType.MATRIX);
CNode c1 = new CNodeData(data);
CNode c2 = new CNodeData(data);
assertEquals(c1.hashCode(), c2.hashCode());
assertEquals(c1, c2);
}
@Test
public void testNotEqualDataTypeDataNode() {
if(shouldSkipTest())
return;
assertNotEquals(
createCNodeData(DataType.MATRIX),
createCNodeData(DataType.SCALAR));
}
@Test
public void testEqualUnaryNodes() {
if(shouldSkipTest())
return;
CNode c0 = createCNodeData(DataType.MATRIX);
CNode c1 = new CNodeUnary(c0, UnaryType.EXP);
CNode c2 = new CNodeUnary(c0, UnaryType.EXP);
assertEquals(c1.hashCode(), c2.hashCode());
assertEquals(c1, c2);
}
@Test
public void testNotEqualUnaryNodes() {
if(shouldSkipTest())
return;
CNode c0 = createCNodeData(DataType.MATRIX);
CNode c1 = new CNodeUnary(c0, UnaryType.EXP);
CNode c2 = new CNodeUnary(c0, UnaryType.LOG);
assertNotEquals(c1, c2);
}
@Test
public void testEqualBinaryNodes() {
if(shouldSkipTest())
return;
CNode c1 = createCNodeData(DataType.MATRIX);
CNode c2 = createCNodeData(DataType.SCALAR);
CNode bin1 = new CNodeBinary(c1, c2, BinType.PLUS);
CNode bin2 = new CNodeBinary(c1, c2, BinType.PLUS);
assertEquals(bin1.hashCode(), bin2.hashCode());
assertEquals(bin1, bin2);
}
@Test
public void testNotEqualBinaryNodes() {
if(shouldSkipTest())
return;
CNode c1 = createCNodeData(DataType.MATRIX);
CNode c2 = createCNodeData(DataType.SCALAR);
assertNotEquals(
new CNodeBinary(c1, c2, BinType.PLUS),
new CNodeBinary(c1, c2, BinType.MULT));
}
@Test
public void testEqualTernaryNodes() {
if(shouldSkipTest())
return;
CNode c1 = createCNodeData(DataType.MATRIX);
CNode c2 = createCNodeData(DataType.SCALAR);
CNode c3 = createCNodeData(DataType.MATRIX);
CNode ter1 = new CNodeTernary(c1, c2, c3, TernaryType.MINUS_MULT);
CNode ter2 = new CNodeTernary(c1, c2, c3, TernaryType.MINUS_MULT);
assertEquals(ter1.hashCode(), ter2.hashCode());
assertEquals(ter1, ter2);
}
@Test
public void testNotEqualTernaryNodes() {
if(shouldSkipTest())
return;
CNode c1 = createCNodeData(DataType.MATRIX);
CNode c2 = createCNodeData(DataType.SCALAR);
CNode c3 = createCNodeData(DataType.MATRIX);
CNode ter1 = new CNodeTernary(c1, c2, c3, TernaryType.MINUS_MULT);
CNode ter2 = new CNodeTernary(c1, c2, c3, TernaryType.PLUS_MULT);
assertNotEquals(ter1, ter2);
}
@Test
public void testNotEqualUnaryBinaryNodes() {
if(shouldSkipTest())
return;
CNode c1 = createCNodeData(DataType.MATRIX);
CNode c2 = createCNodeData(DataType.SCALAR);
CNode un1 = new CNodeUnary(c1, UnaryType.ABS);
CNode bin2 = new CNodeBinary(c1, c2, BinType.DIV);
assertNotEquals(un1, bin2);
}
@Test
public void testNotEqualUnaryTernaryNodes() {
if(shouldSkipTest())
return;
CNode c1 = createCNodeData(DataType.MATRIX);
CNode c2 = createCNodeData(DataType.SCALAR);
CNode c3 = createCNodeData(DataType.MATRIX);
CNode un1 = new CNodeUnary(c1, UnaryType.ABS);
CNode ter2 = new CNodeTernary(c1, c2, c3, TernaryType.PLUS_MULT);
assertNotEquals(un1, ter2);
}
@Test
public void testNotEqualBinaryTernaryNodes() {
if(shouldSkipTest())
return;
CNode c1 = createCNodeData(DataType.MATRIX);
CNode c2 = createCNodeData(DataType.SCALAR);
CNode c3 = createCNodeData(DataType.MATRIX);
CNode un1 = new CNodeBinary(c1, c2, BinType.EQUAL);
CNode ter2 = new CNodeTernary(c1, c2, c3, TernaryType.PLUS_MULT);
assertNotEquals(un1, ter2);
}
@Test
public void testNotEqualBinaryDAG1() {
if(shouldSkipTest())
return;
CNode c1 = createCNodeData(DataType.MATRIX);
CNode c2 = createCNodeData(DataType.MATRIX);
CNode c3 = createCNodeData(DataType.SCALAR);
//DAG 1a: (c1*c2)*c3
CNode b1a = new CNodeBinary(c1, c2, BinType.MULT);
CNode b2a = new CNodeBinary(b1a, c3, BinType.MULT);
//DAG 1b: (c1*c2)*c1
CNode b1b = new CNodeBinary(c1, c2, BinType.MULT);
CNode b2b = new CNodeBinary(b1b, c1, BinType.MULT);
assertNotEquals(b2a, b2b);
}
@Test
public void testNotEqualBinaryDAG2() {
if(shouldSkipTest())
return;
CNode c1 = createCNodeData(DataType.MATRIX);
CNode c2 = createCNodeData(DataType.MATRIX);
CNode c3 = createCNodeData(DataType.MATRIX);
//DAG 2a: (c1*c2)*c3
CNode b1a = new CNodeBinary(c1, c2, BinType.MULT);
CNode b2a = new CNodeBinary(b1a, c3, BinType.MULT);
//DAG 2b: (c1*c2)*c1
CNode b1b = new CNodeBinary(c1, c2, BinType.MULT);
CNode b2b = new CNodeBinary(b1b, c1, BinType.MULT);
assertNotEquals(b2a, b2b);
}
@Test
public void testNotEqualBinaryDAG3() {
if(shouldSkipTest())
return;
CNode c1 = createCNodeData(DataType.MATRIX);
CNode c2 = createCNodeData(DataType.MATRIX);
CNode c3 = createCNodeData(DataType.MATRIX);
//DAG 3a: (c1+c3)*(c2+c3)
CNode b1a = new CNodeBinary(c1, c3, BinType.PLUS);
CNode b2a = new CNodeBinary(c2, c3, BinType.PLUS);
CNode b3a = new CNodeBinary(b1a, b2a, BinType.MULT);
//DAG 3b: (c1+c2)*(c3+c3)
CNode b1b = new CNodeBinary(c1, c2, BinType.PLUS);
CNode b2b = new CNodeBinary(c3, c3, BinType.PLUS);
CNode b3b = new CNodeBinary(b1b, b2b, BinType.MULT);
assertNotEquals(b3a, b3b);
}
private CNode createCNodeData(DataType dt) {
return new CNodeData(createDataOp("tmp"+_seq.getNextID(), dt));
}
private Hop createDataOp(DataType dt) {
return new DataOp("tmp"+_seq.getNextID(), dt, ValueType.DOUBLE,
DataOpTypes.TRANSIENTREAD, "tmp", 77L, 7L, -1L, 1000, 1000);
}
private static Hop createDataOp(String name, DataType dt) {
return new DataOp(name, dt, ValueType.DOUBLE,
DataOpTypes.TRANSIENTREAD, "tmp", 77L, 7L, -1L, 1000, 1000);
}
}
| |
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2017 by Hitachi Vantara : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.steps.sql;
import java.util.ArrayList;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.Result;
import org.pentaho.di.core.RowMetaAndData;
import org.pentaho.di.core.database.Database;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleStepException;
import org.pentaho.di.core.row.RowDataUtil;
import org.pentaho.di.core.row.RowMeta;
import org.pentaho.di.core.row.ValueMetaInterface;
import org.pentaho.di.core.row.value.ValueMetaInteger;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.trans.Trans;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.step.BaseStep;
import org.pentaho.di.trans.step.StepDataInterface;
import org.pentaho.di.trans.step.StepInterface;
import org.pentaho.di.trans.step.StepMeta;
import org.pentaho.di.trans.step.StepMetaInterface;
/**
* Execute one or more SQL statements in a script, one time or parameterised (for every row)
*
* @author Matt
* @since 10-sep-2005
*/
public class ExecSQL extends BaseStep implements StepInterface {
private static Class<?> PKG = ExecSQLMeta.class; // for i18n purposes, needed by Translator2!!
private ExecSQLMeta meta;
private ExecSQLData data;
public ExecSQL( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta,
Trans trans ) {
super( stepMeta, stepDataInterface, copyNr, transMeta, trans );
}
public static final RowMetaAndData getResultRow( Result result, String upd, String ins, String del, String read ) {
RowMetaAndData resultRow = new RowMetaAndData();
if ( upd != null && upd.length() > 0 ) {
ValueMetaInterface meta = new ValueMetaInteger( upd );
meta.setLength( ValueMetaInterface.DEFAULT_INTEGER_LENGTH, 0 );
resultRow.addValue( meta, new Long( result.getNrLinesUpdated() ) );
}
if ( ins != null && ins.length() > 0 ) {
ValueMetaInterface meta = new ValueMetaInteger( ins );
meta.setLength( ValueMetaInterface.DEFAULT_INTEGER_LENGTH, 0 );
resultRow.addValue( meta, new Long( result.getNrLinesOutput() ) );
}
if ( del != null && del.length() > 0 ) {
ValueMetaInterface meta = new ValueMetaInteger( del );
meta.setLength( ValueMetaInterface.DEFAULT_INTEGER_LENGTH, 0 );
resultRow.addValue( meta, new Long( result.getNrLinesDeleted() ) );
}
if ( read != null && read.length() > 0 ) {
ValueMetaInterface meta = new ValueMetaInteger( read );
meta.setLength( ValueMetaInterface.DEFAULT_INTEGER_LENGTH, 0 );
resultRow.addValue( meta, new Long( result.getNrLinesRead() ) );
}
return resultRow;
}
@Override
public boolean processRow( StepMetaInterface smi, StepDataInterface sdi ) throws KettleException {
meta = (ExecSQLMeta) smi;
data = (ExecSQLData) sdi;
if ( !meta.isExecutedEachInputRow() ) {
RowMetaAndData resultRow =
getResultRow( data.result, meta.getUpdateField(), meta.getInsertField(), meta.getDeleteField(), meta
.getReadField() );
putRow( resultRow.getRowMeta(), resultRow.getData() );
setOutputDone(); // Stop processing, this is all we do!
return false;
}
Object[] row = getRow();
if ( row == null ) { // no more input to be expected...
setOutputDone();
return false;
}
if ( first ) { // we just got started
first = false;
data.outputRowMeta = getInputRowMeta().clone();
meta.getFields( data.outputRowMeta, getStepname(), null, null, this, repository, metaStore );
// Find the indexes of the arguments
data.argumentIndexes = new int[meta.getArguments().length];
for ( int i = 0; i < meta.getArguments().length; i++ ) {
data.argumentIndexes[i] = this.getInputRowMeta().indexOfValue( meta.getArguments()[i] );
if ( data.argumentIndexes[i] < 0 ) {
logError( BaseMessages.getString( PKG, "ExecSQL.Log.ErrorFindingField" ) + meta.getArguments()[i] + "]" );
throw new KettleStepException( BaseMessages.getString( PKG, "ExecSQL.Exception.CouldNotFindField", meta
.getArguments()[i] ) );
}
if ( meta.isParams() ) {
if ( i == 0 ) {
// Define parameters meta
data.paramsMeta = new RowMeta();
}
data.paramsMeta.addValueMeta( getInputRowMeta().getValueMeta( data.argumentIndexes[i] ) );
}
}
if ( !meta.isParams() ) {
// We need to replace question marks by string value
// Find the locations of the question marks in the String...
// We replace the question marks with the values...
// We ignore quotes etc. to make inserts easier...
data.markerPositions = new ArrayList<Integer>();
int len = data.sql.length();
int pos = len - 1;
while ( pos >= 0 ) {
if ( data.sql.charAt( pos ) == '?' ) {
data.markerPositions.add( Integer.valueOf( pos ) ); // save the
}
// marker
// position
pos--;
}
}
}
String sql;
Object[] paramsData = null;
if ( meta.isParams() ) {
// Get parameters data
paramsData = new Object[data.argumentIndexes.length];
sql = this.data.sql;
for ( int i = 0; i < this.data.argumentIndexes.length; i++ ) {
paramsData[i] = row[data.argumentIndexes[i]];
}
} else {
int numMarkers = data.markerPositions.size();
if ( numMarkers > 0 ) {
StringBuilder buf = new StringBuilder( data.sql );
// Replace the values in the SQL string...
//
for ( int i = 0; i < numMarkers; i++ ) {
// Get the appropriate value from the input row...
//
int index = data.argumentIndexes[data.markerPositions.size() - i - 1];
ValueMetaInterface valueMeta = getInputRowMeta().getValueMeta( index );
Object valueData = row[index];
// replace the '?' with the String in the row.
//
int pos = data.markerPositions.get( i );
String replaceValue = valueMeta.getString( valueData );
replaceValue = Const.NVL( replaceValue, "" );
if ( meta.isQuoteString() && ( valueMeta.getType() == ValueMetaInterface.TYPE_STRING ) ) {
// Have the database dialect do the quoting.
// This also adds the quotes around the string
replaceValue = meta.getDatabaseMeta().quoteSQLString( replaceValue );
}
buf.replace( pos, pos + 1, replaceValue );
}
sql = buf.toString();
} else {
sql = data.sql;
}
}
if ( log.isRowLevel() ) {
logRowlevel( BaseMessages.getString( PKG, "ExecSQL.Log.ExecutingSQLScript" ) + Const.CR + sql );
}
boolean sendToErrorRow = false;
String errorMessage = null;
try {
if ( meta.isSingleStatement() ) {
data.result = data.db.execStatement( sql, data.paramsMeta, paramsData );
} else {
data.result = data.db.execStatements( sql, data.paramsMeta, paramsData );
}
RowMetaAndData add =
getResultRow( data.result, meta.getUpdateField(), meta.getInsertField(), meta.getDeleteField(), meta
.getReadField() );
row = RowDataUtil.addRowData( row, getInputRowMeta().size(), add.getData() );
if ( !data.db.isAutoCommit() ) {
data.db.commit();
}
putRow( data.outputRowMeta, row ); // send it out!
if ( checkFeedback( getLinesWritten() ) ) {
if ( log.isBasic() ) {
logBasic( BaseMessages.getString( PKG, "ExecSQL.Log.LineNumber" ) + getLinesWritten() );
}
}
} catch ( KettleException e ) {
if ( getStepMeta().isDoingErrorHandling() ) {
sendToErrorRow = true;
errorMessage = e.toString();
} else {
throw new KettleStepException( BaseMessages.getString( PKG, "ExecSQL.Log.ErrorInStep" ), e );
}
if ( sendToErrorRow ) {
// Simply add this row to the error row
putError( getInputRowMeta(), row, 1, errorMessage, null, "ExecSQL001" );
}
}
return true;
}
@Override
public void dispose( StepMetaInterface smi, StepDataInterface sdi ) {
meta = (ExecSQLMeta) smi;
data = (ExecSQLData) sdi;
if ( log.isBasic() ) {
logBasic( BaseMessages.getString( PKG, "ExecSQL.Log.FinishingReadingQuery" ) );
}
if ( data.db != null ) {
data.db.disconnect();
}
super.dispose( smi, sdi );
}
/** Stop the running query */
@Override
public void stopRunning( StepMetaInterface smi, StepDataInterface sdi ) throws KettleException {
meta = (ExecSQLMeta) smi;
data = (ExecSQLData) sdi;
if ( data.db != null && !data.isCanceled ) {
synchronized ( data.db ) {
data.db.cancelQuery();
}
data.isCanceled = true;
}
}
@Override
public boolean init( StepMetaInterface smi, StepDataInterface sdi ) {
meta = (ExecSQLMeta) smi;
data = (ExecSQLData) sdi;
if ( super.init( smi, sdi ) ) {
if ( meta.getDatabaseMeta() == null ) {
logError( BaseMessages.getString( PKG, "ExecSQL.Init.ConnectionMissing", getStepname() ) );
return false;
}
data.db = new Database( this, meta.getDatabaseMeta() );
data.db.shareVariablesWith( this );
// Connect to the database
try {
if ( getTransMeta().isUsingUniqueConnections() ) {
synchronized ( getTrans() ) {
data.db.connect( getTrans().getTransactionId(), getPartitionID() );
}
} else {
data.db.connect( getPartitionID() );
}
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "ExecSQL.Log.ConnectedToDB" ) );
}
if ( meta.isReplaceVariables() ) {
data.sql = environmentSubstitute( meta.getSql() );
} else {
data.sql = meta.getSql();
}
// If the SQL needs to be executed once, this is a starting step
// somewhere.
if ( !meta.isExecutedEachInputRow() ) {
if ( meta.isSingleStatement() ) {
data.result = data.db.execStatement( data.sql );
} else {
data.result = data.db.execStatements( data.sql );
}
if ( !data.db.isAutoCommit() ) {
data.db.commit();
}
}
return true;
} catch ( KettleException e ) {
logError( BaseMessages.getString( PKG, "ExecSQL.Log.ErrorOccurred" ) + e.getMessage() );
setErrors( 1 );
stopAll();
}
}
return false;
}
}
| |
package au.com.codeka.warworlds.server.ctrl;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Random;
import java.util.TreeMap;
import org.joda.time.DateTime;
import org.joda.time.Seconds;
import au.com.codeka.common.Log;
import au.com.codeka.common.Vector2;
import au.com.codeka.common.model.BaseBuildRequest;
import au.com.codeka.common.model.BaseColony;
import au.com.codeka.common.model.BaseEmpirePresence;
import au.com.codeka.common.model.BaseFleet;
import au.com.codeka.common.model.BaseFleetUpgrade;
import au.com.codeka.common.model.BasePlanet;
import au.com.codeka.common.model.BaseScoutReport;
import au.com.codeka.common.model.BuildingDesign;
import au.com.codeka.common.model.BuildingEffect;
import au.com.codeka.common.model.Design;
import au.com.codeka.common.model.Simulation;
import au.com.codeka.common.protobuf.Messages;
import au.com.codeka.warworlds.server.EventProcessor;
import au.com.codeka.warworlds.server.RequestException;
import au.com.codeka.warworlds.server.data.SqlResult;
import au.com.codeka.warworlds.server.data.SqlStmt;
import au.com.codeka.warworlds.server.data.Transaction;
import au.com.codeka.warworlds.server.designeffects.RadarBuildingEffect;
import au.com.codeka.warworlds.server.model.Alliance;
import au.com.codeka.warworlds.server.model.BuildRequest;
import au.com.codeka.warworlds.server.model.Building;
import au.com.codeka.warworlds.server.model.BuildingPosition;
import au.com.codeka.warworlds.server.model.Colony;
import au.com.codeka.warworlds.server.model.CombatReport;
import au.com.codeka.warworlds.server.model.EmpirePresence;
import au.com.codeka.warworlds.server.model.Fleet;
import au.com.codeka.warworlds.server.model.FleetUpgrade;
import au.com.codeka.warworlds.server.model.Planet;
import au.com.codeka.warworlds.server.model.ScoutReport;
import au.com.codeka.warworlds.server.model.Sector;
import au.com.codeka.warworlds.server.model.Star;
public class StarController {
private static final Log log = new Log("StarController");
private DataBase db;
public StarController() {
db = new DataBase();
}
public StarController(Transaction trans) {
db = new DataBase(trans);
}
public Star getStar(int id) throws RequestException {
List<Star> stars = db.getStars(new int[] {id});
if (stars.isEmpty()) {
throw new RequestException(404);
}
return stars.get(0);
}
public List<Star> getStars(int[] ids) throws RequestException {
return db.getStars(ids);
}
public List<Star> getStars(Collection<Integer> ids) throws RequestException {
int[] idArray = new int[ids.size()];
int i = 0;
for (Integer id : ids) {
idArray[i++] = id;
}
return db.getStars(idArray);
}
public List<Star> getWormholesForAlliance(int allianceID) throws RequestException {
Alliance alliance = new AllianceController().getAlliance(allianceID);
try {
return db.getWormholesForAlliance(alliance);
} catch(Exception e) {
throw new RequestException(e);
}
}
public Star addMarkerStar(long sectorX, long sectorY, int offsetX, int offsetY) throws RequestException {
// TODO: check that this isn't too close to an existing star...
Sector sector = new SectorController().getSector(sectorX, sectorY);
try {
int starID = db.addStar(sector.getID(), offsetX, offsetY, 20, "Marker", Star.Type.Marker, null);
return getStar(starID);
} catch (Exception e) {
throw new RequestException(e);
}
}
public void update(Star star) throws RequestException {
update(star, true);
}
public void update(Star star, boolean pingEventProcessor) throws RequestException {
try {
updateNoRetry(star);
} catch (Exception e) {
throw new RequestException(e);
}
if (pingEventProcessor) {
// we may need to ping the event processor if a build time change, or whatever.
EventProcessor.i.ping();
}
}
private void updateNoRetry(Star star) throws Exception {
db.updateStar(star);
removeEmpirePresences(star.getID());
}
public void removeEmpirePresences(int starID) throws RequestException {
// delete an empire presences for empires that no longer have colonies on this star...
String sql = "DELETE FROM empire_presences" +
" WHERE star_id = ?" +
" AND (SELECT COUNT(*)" +
" FROM colonies" +
" WHERE colonies.empire_id = empire_presences.empire_id" +
" AND colonies.star_id = empire_presences.star_id) = 0";
try (SqlStmt stmt = db.prepare(sql)) {
stmt.setInt(1, starID);
stmt.update();
} catch(Exception e) {
throw new RequestException(e);
}
}
/**
* "Sanitizes" a star and removes all info specific to other empires.
* @param star
* @param myEmpireID
*/
public void sanitizeStar(Star star, int myEmpireID,
ArrayList<BuildingPosition> buildings,
ArrayList<Star> otherStars) {
// if the star is a wormhole, don't sanitize it -- a wormhole is basically fleets in
// transit anyway
if (star.getStarType().getType() == Star.Type.Wormhole) {
return;
}
// if we don't have any fleets here, remove all the others
boolean removeFleets = true;
ArrayList<Fleet> fleetsToAddBack = null;
for (BaseFleet baseFleet : star.getFleets()) {
Fleet fleet = (Fleet) baseFleet;
if (fleet.getEmpireID() != null && fleet.getEmpireID() == myEmpireID) {
removeFleets = false;
}
}
// ... unless we have a radar on a nearby star
if (buildings != null) for (BuildingPosition building : buildings) {
BuildingDesign design = building.getDesign();
float radarRange = 0.0f;
for (RadarBuildingEffect effect : design.getEffects(building.getLevel(), RadarBuildingEffect.class)) {
if (effect.getRange() > radarRange) {
radarRange = effect.getRange();
}
}
if (radarRange > 0.0f) {
float distanceToBuilding = Sector.distanceInParsecs(star,
building.getSectorX(), building.getSectorY(),
building.getOffsetX(), building.getOffsetY());
if (distanceToBuilding < radarRange) {
removeFleets = false;
}
if (removeFleets && otherStars != null) {
// check any moving fleets, we'll want to add those back
for (BaseFleet baseFleet : star.getFleets()) {
if (baseFleet.getState() != Fleet.State.MOVING) {
continue;
}
Fleet fleet = (Fleet) baseFleet;
Star destinationStar = null;
for (Star otherStar : otherStars) {
if (otherStar.getID() == fleet.getDestinationStarID()) {
destinationStar = otherStar;
break;
}
}
if (destinationStar != null) {
Vector2 dir = Sector.directionBetween(star, destinationStar);
float progress = fleet.getMovementProgress();
dir.scale(progress);
float distanceToFleet = Sector.distanceInParsecs(
star.getSectorX(), star.getSectorY(),
star.getOffsetX() + (int) (dir.x * Sector.PIXELS_PER_PARSEC),
star.getOffsetY() + (int) (dir.y * Sector.PIXELS_PER_PARSEC),
building.getSectorX(), building.getSectorY(),
building.getOffsetX(), building.getOffsetY());
if (distanceToFleet < radarRange) {
if (fleetsToAddBack == null) {
fleetsToAddBack = new ArrayList<Fleet>();
}
fleetsToAddBack.add(fleet);
}
}
}
}
}
}
if (removeFleets) {
star.getFleets().clear();
if (fleetsToAddBack != null) {
star.getFleets().addAll(fleetsToAddBack);
}
}
// remove all fleets that aren't ours and have a cloaking device (regardless of radars)
ArrayList<Fleet> fleetsToRemove = null;
for (BaseFleet baseFleet : star.getFleets()) {
Fleet fleet = (Fleet) baseFleet;
if (fleet.hasUpgrade("cloak") && fleet.getEmpireID() != myEmpireID) {
if (fleetsToRemove == null) {
fleetsToRemove = new ArrayList<Fleet>();
}
fleetsToRemove.add(fleet);
}
}
if (fleetsToRemove != null) {
star.getFleets().removeAll(fleetsToRemove);
}
// remove build requests that aren't ours
if (star.getBuildRequests() != null) {
ArrayList<BaseBuildRequest> toRemove = new ArrayList<BaseBuildRequest>();
for (BaseBuildRequest baseBuildRequest : star.getBuildRequests()) {
BuildRequest buildRequest = (BuildRequest) baseBuildRequest;
if (buildRequest.getEmpireID() != myEmpireID) {
toRemove.add(baseBuildRequest);
}
}
star.getBuildRequests().removeAll(toRemove);
}
// remove all scout reports that aren't ours
if (star.getScoutReports() != null) {
ArrayList<BaseScoutReport> toRemove = new ArrayList<BaseScoutReport>();
for (BaseScoutReport baseScoutReport : star.getScoutReports()) {
ScoutReport scoutReport = (ScoutReport) baseScoutReport;
if (!scoutReport.getEmpireKey().equals(Integer.toString(myEmpireID))) {
toRemove.add(baseScoutReport);
}
}
star.getScoutReports().removeAll(toRemove);
}
// for any colonies that are not ours, hide some "secret" information
for (BaseColony baseColony : star.getColonies()) {
Colony colony = (Colony) baseColony;
if (colony.getEmpireID() != null && colony.getEmpireID() != myEmpireID) {
colony.sanitize();
}
}
}
private static class DataBase extends BaseDataBase {
public DataBase() {
super();
}
public DataBase(Transaction trans) {
super(trans);
}
public List<Star> getStars(int[] ids) throws RequestException {
if (ids.length == 0) {
return new ArrayList<Star>();
}
ArrayList<Star> stars = new ArrayList<Star>();
final String sql = "SELECT stars.id, sector_id, name, sectors.x AS sector_x," +
" sectors.y AS sector_y, stars.x, stars.y, size, star_type, planets," +
" extra, last_simulation, time_emptied" +
" FROM stars" +
" INNER JOIN sectors ON stars.sector_id = sectors.id" +
" WHERE stars.id IN "+buildInClause(ids);
try (SqlStmt stmt = prepare(sql)) {
SqlResult res = stmt.select();
while (res.next()) {
stars.add(new Star(res));
}
if (stars.isEmpty()) {
return stars;
}
} catch(Exception e) {
throw new RequestException(e);
}
int[] starIds = new int[stars.size()];
for (int i = 0; i < stars.size(); i++) {
Star star = stars.get(i);
star.setColonies(new ArrayList<BaseColony>());
star.setFleets(new ArrayList<BaseFleet>());
star.setEmpires(new ArrayList<BaseEmpirePresence>());
star.setBuildRequests(new ArrayList<BaseBuildRequest>());
starIds[i] = star.getID();
}
String inClause = buildInClause(starIds);
try {
populateEmpires(stars, inClause);
populateColonies(stars, inClause);
populateFleets(stars, inClause);
populateBuildings(stars, inClause);
populateBuildRequests(stars, inClause);
checkNativeColonies(stars);
populateCombatReports(stars, inClause);
} catch(Exception e) {
throw new RequestException(e);
}
return stars;
}
public List<Star> getWormholesForAlliance(Alliance alliance) throws Exception {
String sql = "SELECT stars.id, sector_id, name, sectors.x AS sector_x," +
" sectors.y AS sector_y, stars.x, stars.y, size, star_type, planets," +
" extra, last_simulation, time_emptied" +
" FROM stars" +
" INNER JOIN sectors ON stars.sector_id = sectors.id" +
" WHERE star_type = "+Star.Type.Wormhole.ordinal();
try (SqlStmt stmt = prepare(sql)) {
SqlResult res = stmt.select();
ArrayList<Star> stars = new ArrayList<Star>();
while (res.next()) {
Star star = new Star(res);
if (star.getWormholeExtra() == null) {
continue;
}
int empireID = star.getWormholeExtra().getEmpireID();
if (alliance.isEmpireMember(empireID)) {
stars.add(star);
}
}
return stars;
}
}
public int addStar(int sectorID, int x, int y, int size, String name, Star.Type starType, Planet[] planets) throws Exception {
String sql = "INSERT INTO stars (sector_id, x, y, size, name, star_type, planets, last_simulation, time_emptied)" +
" VALUES (?, ?, ?, ?, ?, ?, ?, NOW(), NOW())";
try (SqlStmt stmt = prepare(sql, Statement.RETURN_GENERATED_KEYS)) {
stmt.setInt(1, sectorID);
stmt.setInt(2, x);
stmt.setInt(3, y);
stmt.setInt(4, size);
stmt.setString(5, name);
stmt.setInt(6, starType.ordinal());
Messages.Planets.Builder planets_pb = Messages.Planets.newBuilder();
if (planets == null) {
} else {
for (Planet planet : planets) {
Messages.Planet.Builder planet_pb = Messages.Planet.newBuilder();
planet.toProtocolBuffer(planet_pb);
planets_pb.addPlanets(planet_pb);
}
}
stmt.setBytes(7, planets_pb.build().toByteArray());
stmt.update();
return stmt.getAutoGeneratedID();
}
}
public void updateStar(Star star) throws Exception {
final String sql = "UPDATE stars SET" +
" last_simulation = ?," +
" name = ?," +
" star_type = ?," +
" empire_count = ?," +
" extra = ?" +
" WHERE id = ?";
try (SqlStmt stmt = prepare(sql)) {
DateTime lastSimulation = star.getLastSimulation();
DateTime now = DateTime.now();
if (lastSimulation.isAfter(now)) {
int difference = Seconds.secondsBetween(now, lastSimulation).getSeconds();
if (difference > 120) {
log.error(String.format(Locale.ENGLISH, "last_simulation is after now!"
+ " [star=%d %s] [last_simulation=%s] [now=%s] [difference=%d seconds]",
star.getID(), star.getName(), lastSimulation, now, difference),
new Throwable());
lastSimulation = DateTime.now();
}
}
stmt.setDateTime(1, lastSimulation);
stmt.setString(2, star.getName());
stmt.setInt(3, star.getStarType().getType().ordinal());
int empireCount = 0;
for (BaseEmpirePresence empirePresence : star.getEmpirePresences()) {
if (empirePresence.getEmpireKey() != null) {
empireCount ++;
}
}
stmt.setInt(4, empireCount);
Messages.Star.StarExtra.Builder star_extra_pb = null;
if (star.getWormholeExtra() != null) {
star_extra_pb = Messages.Star.StarExtra.newBuilder();
star.getWormholeExtra().toProtocolBuffer(star_extra_pb);
}
if (star_extra_pb == null) {
stmt.setNull(5);
} else {
stmt.setBytes(5, star_extra_pb.build().toByteArray());
}
stmt.setInt(6, star.getID());
stmt.update();
}
updateEmpires(star);
updateColonies(star);
updateFleets(star);
updateFleetUpgrades(star);
updateBuildRequests(star);
CombatReport combatReport = (CombatReport) star.getCombatReport();
if (combatReport != null) {
updateCombatReport(star, combatReport);
}
}
private void updateEmpires(Star star) throws Exception {
final String sql = "UPDATE empire_presences SET" +
" total_goods = ?," +
" total_minerals = ?," +
" tax_per_hour = ?," +
" goods_zero_time = ?" +
" WHERE id = ?";
try (SqlStmt stmt = prepare(sql)) {
for (BaseEmpirePresence empire : star.getEmpires()) {
stmt.setDouble(1, empire.getTotalGoods());
stmt.setDouble(2, empire.getTotalMinerals());
stmt.setDouble(3, empire.getTaxPerHour());
stmt.setDateTime(4, empire.getGoodsZeroTime());
stmt.setInt(5, ((EmpirePresence) empire).getID());
stmt.update();
}
}
}
private void updateColonies(Star star) throws Exception {
boolean needDelete = false;
final float MIN_POPULATION = 0.0001f;
TreeMap<Integer, Float> empireTaxes = new TreeMap<Integer, Float>();
String sql = "UPDATE colonies SET" +
" focus_population = ?," +
" focus_construction = ?," +
" focus_farming = ?," +
" focus_mining = ?," +
" population = ?," +
" uncollected_taxes = ?" +
" WHERE id = ?";
try (SqlStmt stmt = prepare(sql)) {
for (BaseColony baseColony : star.getColonies()) {
Colony colony = (Colony) baseColony;
if (colony.getPopulation() <= MIN_POPULATION) {
needDelete = true;
continue;
}
if (colony.getEmpireID() != null) {
Float uncollectedTaxes = empireTaxes.get(colony.getEmpireID());
uncollectedTaxes = (uncollectedTaxes == null ? 0 : uncollectedTaxes) +
colony.getUncollectedTaxes();
empireTaxes.put(colony.getEmpireID(), uncollectedTaxes);
}
stmt.setDouble(1, colony.getPopulationFocus());
stmt.setDouble(2, colony.getConstructionFocus());
stmt.setDouble(3, colony.getFarmingFocus());
stmt.setDouble(4, colony.getMiningFocus());
stmt.setDouble(5, colony.getPopulation());
stmt.setDouble(6, 0); // TODO: remove this column from the database
stmt.setInt(7, ((Colony) colony).getID());
stmt.update();
colony.setUncollectedTaxes(0.0f);
}
}
if (!empireTaxes.isEmpty()) {
sql = "UPDATE empires SET cash = cash + ? WHERE id = ? RETURNING cash";
try (SqlStmt stmt = prepare(sql)) {
for (Map.Entry<Integer, Float> entry : empireTaxes.entrySet()) {
stmt.setDouble(1, entry.getValue());
stmt.setInt(2, entry.getKey());
SqlResult res = stmt.updateAndSelect();
if (res.next()) {
double totalCash = res.getDouble(1);
// send a notification that cash has been updated
new NotificationController().sendNotificationToOnlineEmpire(entry.getKey(),
"cash", Double.toString(totalCash));
}
}
}
}
if (needDelete) {
ArrayList<BaseColony> toRemove = new ArrayList<BaseColony>();
sql = "DELETE FROM colonies WHERE id = ?";
try (SqlStmt stmt = prepare(sql)) {
for (BaseColony colony : star.getColonies()) {
if (colony.getPopulation() > MIN_POPULATION) {
continue;
}
stmt.setInt(1, ((Colony) colony).getID());
stmt.update();
toRemove.add(colony);
}
star.getColonies().removeAll(toRemove);
}
}
}
private void updateFleets(Star star) throws Exception {
boolean needInsert = false;
boolean needDelete = false;
DateTime now = DateTime.now();
String sql = "UPDATE fleets SET" +
" star_id = ?," +
" sector_id = ?," +
" num_ships = ?," +
" stance = ?," +
" state = ?," +
" state_start_time = ?," +
" eta = ?," +
" target_star_id = ?," +
" target_fleet_id = ?," +
" time_destroyed = ?," +
" notes = ?" +
" WHERE id = ?";
try (SqlStmt stmt = prepare(sql)) {
for (BaseFleet baseFleet : star.getFleets()) {
if (baseFleet.getNumShips() < 0) {
throw new RequestException(500,
"Cannot have < 0 ships in a fleet. StarID=" + star.getID());
}
if (baseFleet.getKey() == null) {
needInsert = true;
continue;
}
if (baseFleet.getTimeDestroyed() != null && baseFleet.getTimeDestroyed().isBefore(now)) {
needDelete = true;
continue;
}
Fleet fleet = (Fleet) baseFleet;
stmt.setInt(1, star.getID());
stmt.setInt(2, star.getSectorID());
stmt.setDouble(3, fleet.getNumShips());
stmt.setInt(4, fleet.getStance().getValue());
stmt.setInt(5, fleet.getState().getValue());
stmt.setDateTime(6, fleet.getStateStartTime());
stmt.setDateTime(7, fleet.getEta());
stmt.setInt(8, fleet.getDestinationStarID());
stmt.setInt(9, fleet.getTargetFleetID());
stmt.setDateTime(10, fleet.getTimeDestroyed());
stmt.setString(11, fleet.getNotes());
stmt.setInt(12, fleet.getID());
stmt.update();
}
}
if (needInsert) {
sql = "INSERT INTO fleets (star_id, sector_id, design_id, empire_id, num_ships," +
" stance, state, state_start_time, eta, target_star_id," +
" target_fleet_id, time_destroyed, notes)" +
" VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)";
try (SqlStmt stmt = prepare(sql, Statement.RETURN_GENERATED_KEYS)) {
for (BaseFleet baseFleet : star.getFleets()) {
if (baseFleet.getKey() != null) {
continue;
}
Fleet fleet = (Fleet) baseFleet;
stmt.setInt(1, fleet.getStarID());
stmt.setInt(2, fleet.getSectorID());
stmt.setString(3, fleet.getDesignID());
if (fleet.getEmpireKey() != null) {
stmt.setInt(4, fleet.getEmpireID());
} else {
stmt.setNull(4);
}
stmt.setDouble(5, fleet.getNumShips());
stmt.setInt(6, fleet.getStance().getValue());
stmt.setInt(7, fleet.getState().getValue());
stmt.setDateTime(8, fleet.getStateStartTime());
stmt.setDateTime(9, fleet.getEta());
stmt.setInt(10, fleet.getDestinationStarID());
stmt.setInt(11, fleet.getTargetFleetID());
stmt.setDateTime(12, fleet.getTimeDestroyed());
stmt.setString(13, fleet.getNotes());
stmt.update();
fleet.setID(stmt.getAutoGeneratedID());
}
}
}
if (needDelete) {
sql = "DELETE FROM fleet_upgrades WHERE fleet_id = ?";
try (SqlStmt stmt = prepare(sql)) {
for (BaseFleet baseFleet : star.getFleets()) {
if (baseFleet.getTimeDestroyed() != null && baseFleet.getTimeDestroyed().isBefore(now)) {
Fleet fleet = (Fleet) baseFleet;
stmt.setInt(1, fleet.getID());
stmt.update();
}
}
} catch(Exception e) {
throw new RequestException(e);
}
sql = "DELETE FROM fleets WHERE id = ?";
ArrayList<BaseFleet> toRemove = new ArrayList<BaseFleet>();
try (SqlStmt stmt = prepare(sql)) {
for (BaseFleet baseFleet : star.getFleets()) {
if (baseFleet.getTimeDestroyed() != null && baseFleet.getTimeDestroyed().isBefore(now)) {
Fleet fleet = (Fleet) baseFleet;
stmt.setInt(1, fleet.getID());
stmt.update();
toRemove.add(baseFleet);
}
}
star.getFleets().removeAll(toRemove);
} catch(Exception e) {
throw new RequestException(e);
}
}
}
private void updateFleetUpgrades(Star star) throws Exception {
String sql = "DELETE FROM fleet_upgrades WHERE star_id = ?";
try (SqlStmt stmt = prepare(sql)) {
stmt.setInt(1, star.getID());
stmt.update();
}
DateTime now = DateTime.now();
sql = "INSERT INTO fleet_upgrades (star_id, fleet_id, upgrade_id, extra) VALUES (?, ?, ?, ?)";
try (SqlStmt stmt = prepare(sql)) {
stmt.setInt(1, star.getID());
for (BaseFleet baseFleet : star.getFleets()) {
if (baseFleet.getUpgrades() == null || baseFleet.getUpgrades().isEmpty()) {
continue;
}
if (baseFleet.getTimeDestroyed() != null && baseFleet.getTimeDestroyed().isBefore(now)) {
continue;
}
Fleet fleet = (Fleet) baseFleet;
stmt.setInt(2, fleet.getID());
for (BaseFleetUpgrade upgrade : fleet.getUpgrades()) {
stmt.setString(3, upgrade.getUpgradeID());
stmt.setString(4, upgrade.getExtra());
stmt.update();
}
}
}
}
private void updateBuildRequests(Star star) throws Exception {
String sql = "UPDATE build_requests SET progress = ?, end_time = ?, disable_notification = ? WHERE id = ?";
try (SqlStmt stmt = prepare(sql)) {
for (BaseBuildRequest baseBuildRequest : star.getBuildRequests()) {
BuildRequest buildRequest = (BuildRequest) baseBuildRequest;
stmt.setDouble(1, buildRequest.getProgress(false));
stmt.setDateTime(2, buildRequest.getEndTime());
stmt.setInt(3, buildRequest.getDisableNotification() ? 1 : 0);
stmt.setInt(4, buildRequest.getID());
stmt.update();
}
}
}
private void updateCombatReport(Star star, CombatReport combatReport) throws Exception {
Messages.CombatReport.Builder pb = Messages.CombatReport.newBuilder();
combatReport.toProtocolBuffer(pb);
String sql;
if (combatReport.getKey() == null) {
sql = "INSERT INTO combat_reports (star_id, start_time, end_time, rounds) VALUES (?, ?, ?, ?)";
} else {
sql = "UPDATE combat_reports SET star_id = ?, start_time = ?, end_time = ?, rounds = ? WHERE id = ?";
}
try (SqlStmt stmt = prepare(sql, Statement.RETURN_GENERATED_KEYS)) {
stmt.setInt(1, star.getID());
stmt.setDateTime(2, combatReport.getStartTime());
stmt.setDateTime(3, combatReport.getEndTime());
stmt.setBytes(4, pb.build().toByteArray());
if (combatReport.getKey() != null) {
stmt.setInt(5, Integer.parseInt(combatReport.getKey()));
}
stmt.update();
combatReport.setID(stmt.getAutoGeneratedID());
}
}
private void populateColonies(List<Star> stars, String inClause) throws Exception {
String sql = "SELECT * FROM colonies WHERE star_id IN "+inClause;
try (SqlStmt stmt = prepare(sql)) {
SqlResult res = stmt.select();
while (res.next()) {
Colony colony = new Colony(res);
for (Star star : stars) {
if (star.getID() == colony.getStarID()) {
// max population for the colony is initially just it's congeniality
BasePlanet planet = star.getPlanets()[colony.getPlanetIndex() - 1];
colony.setMaxPopulation(planet.getPopulationCongeniality());
star.getColonies().add(colony);
}
}
}
}
}
private void populateFleets(List<Star> stars, String inClause) throws Exception {
String sql = "SELECT fleets.*, empires.alliance_id" +
" FROM fleets" +
" LEFT OUTER JOIN empires ON empires.id = fleets.empire_id" +
" WHERE star_id IN "+inClause;
ArrayList<Fleet> fleets = new ArrayList<Fleet>();
try (SqlStmt stmt = prepare(sql)) {
SqlResult res = stmt.select();
while (res.next()) {
Fleet fleet = new Fleet(res);
fleets.add(fleet);
for (Star star : stars) {
if (star.getID() == fleet.getStarID()) {
star.getFleets().add(fleet);
}
}
}
}
sql = "SELECT * FROM fleet_upgrades WHERE star_id IN "+inClause;
try (SqlStmt stmt = prepare(sql)) {
SqlResult res = stmt.select();
while (res.next()) {
FleetUpgrade fleetUpgrade = FleetUpgrade.create(res);
for (Fleet fleet : fleets) {
if (fleet.getID() == fleetUpgrade.getFleetID()) {
fleet.getUpgrades().add(fleetUpgrade);
break;
}
}
}
}
}
private void populateEmpires(List<Star> stars, String inClause) throws Exception {
String sql = "SELECT * FROM empire_presences WHERE star_id IN "+inClause;
try (SqlStmt stmt = prepare(sql)) {
SqlResult res = stmt.select();
while (res.next()) {
EmpirePresence empirePresence = new EmpirePresence(res);
for (Star star : stars) {
if (star.getID() == empirePresence.getStarID()) {
// by default, you get 500 max goods/minerals
empirePresence.setMaxGoods(500);
empirePresence.setMaxMinerals(500);
star.getEmpirePresences().add(empirePresence);
}
}
}
}
}
private void populateBuildRequests(List<Star> stars, String inClause) throws Exception {
String sql = "SELECT * FROM build_requests WHERE star_id IN "+inClause;
try (SqlStmt stmt = prepare(sql)) {
SqlResult res = stmt.select();
while (res.next()) {
int starID = res.getInt("star_id");
Star star = null;
for (Star thisStar : stars) {
if (thisStar.getID() == starID) {
star = thisStar;
break;
}
}
BuildRequest buildRequest = new BuildRequest(star, res);
if (star != null) {
star.getBuildRequests().add(buildRequest);
}
}
}
}
private void populateBuildings(List<Star> stars, String inClause) throws Exception {
String sql = "SELECT * FROM buildings WHERE star_id IN "+inClause;
try (SqlStmt stmt = prepare(sql)) {
SqlResult res = stmt.select();
while (res.next()) {
Building building = new Building(res);
for (Star star : stars) {
for (BaseColony baseColony : star.getColonies()) {
Colony colony = (Colony) baseColony;
if (colony.getID() == building.getColonyID()) {
for (Design.Effect effect : building.getDesign().getEffects(building.getLevel())) {
BuildingEffect buildingEffect = (BuildingEffect) effect;
buildingEffect.apply(star, colony, building);
}
colony.getBuildings().add(building);
}
}
}
}
}
}
private void populateCombatReports(List<Star> stars, String inClause) throws Exception {
String sql = "SELECT star_id, rounds FROM combat_reports WHERE star_id IN "+inClause;
sql += " AND end_time > ?";
try (SqlStmt stmt = prepare(sql)) {
stmt.setDateTime(1, DateTime.now());
SqlResult res = stmt.select();
while (res.next()) {
int starID = res.getInt(1);
Messages.CombatReport pb = Messages.CombatReport.parseFrom(res.getBytes(2));
CombatReport combatReport = new CombatReport();
combatReport.fromProtocolBuffer(pb);
for (Star star : stars) {
if (star.getID() == starID) {
star.setCombatReport(combatReport);
}
}
}
}
}
/**
* Checks if any of the stars in the given list need native colonies added, and adds them
* if so.
*/
private void checkNativeColonies(List<Star> stars) throws Exception {
for (Star star : stars) {
// marker and wormhole don't get colonies anyway
if (star.getStarType().getType() == Star.Type.Marker ||
star.getStarType().getType() == Star.Type.Wormhole) {
continue;
}
// first, make sure there's no colonies and no fleets
if (!star.getColonies().isEmpty() || !star.getFleets().isEmpty()) {
continue;
}
// next, if it was only emptied 3 days ago, don't add more just yet
if (star.getTimeEmptied() != null && star.getTimeEmptied().isAfter(DateTime.now().minusDays(3))) {
continue;
}
// OK, add those native colonies!
addNativeColonies(star);
}
}
private void addNativeColonies(Star star) throws Exception {
ArrayList<Planet> planets = new ArrayList<Planet>();
for (int i = 0; i < star.getPlanets().length; i++) {
planets.add((Planet) star.getPlanets()[i]);
}
// sort the planets in order of most desirable to least desirable
Collections.sort(planets, new Comparator<Planet>() {
@Override
public int compare(Planet lhs, Planet rhs) {
double lhsScore = lhs.getPopulationCongeniality() +
(lhs.getFarmingCongeniality() * 0.75) +
(lhs.getMiningCongeniality() * 0.5);
double rhsScore = rhs.getPopulationCongeniality() +
(rhs.getFarmingCongeniality() * 0.75) +
(rhs.getMiningCongeniality() * 0.5);
return Double.compare(rhsScore, lhsScore);
}
});
Random rand = new Random();
int numColonies = rand.nextInt(Math.min(3, planets.size() - 1)) + 1;
for (int i = 0; i < numColonies; i++) {
Planet planet = planets.get(i);
Colony colony = new ColonyController(getTransaction()).colonize(null, star, planet.getIndex(), 100.0f);
colony.setConstructionFocus(0.0f);
colony.setPopulationFocus(0.5f);
colony.setFarmingFocus(0.5f);
colony.setMiningFocus(0.0f);
colony.setMaxPopulation(planet.getPopulationCongeniality());
}
int numFleets = rand.nextInt(4) + 1;
for (int i = 0; i < numFleets; i++) {
float numShips = (rand.nextInt(5) + 1) * 5;
new FleetController(getTransaction()).createFleet(null, star, "fighter", numShips);
}
// simulate for 24 hours to make it look like it's being doing stuff before you got here...
star.setLastSimulation(DateTime.now().minusHours(24));
Simulation sim = new Simulation();
sim.simulate(star);
updateStar(star);
}
}
}
| |
/**
* $RCSfile$
* $Revision$
* $Date$
*
* Copyright 2003-2007 Jive Software.
*
* All rights reserved. Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jivesoftware.smack;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadFactory;
import org.jivesoftware.smack.Connection.ListenerWrapper;
import org.jivesoftware.smack.packet.IQ;
import org.jivesoftware.smack.packet.Packet;
import org.jivesoftware.smack.packet.Presence;
import org.jivesoftware.smack.packet.XMPPError;
import org.jivesoftware.smack.parsing.ParsingExceptionCallback;
import org.jivesoftware.smack.parsing.UnparsedIQ;
import org.jivesoftware.smack.parsing.UnparsedMessage;
import org.jivesoftware.smack.parsing.UnparsedPresence;
import org.jivesoftware.smack.sasl.SASLMechanism.Challenge;
import org.jivesoftware.smack.sasl.SASLMechanism.Failure;
import org.jivesoftware.smack.sasl.SASLMechanism.Success;
import org.jivesoftware.smack.util.PacketParserUtils;
import org.xmlpull.v1.XmlPullParser;
import org.xmlpull.v1.XmlPullParserException;
import org.xmlpull.v1.XmlPullParserFactory;
/**
* Listens for XML traffic from the XMPP server and parses it into packet objects.
* The packet reader also invokes all packet listeners and collectors.<p>
*
* @see Connection#createPacketCollector
* @see Connection#addPacketListener
* @author Matt Tucker
*/
class PacketReader {
private Thread readerThread;
private ExecutorService listenerExecutor;
private final XMPPConnection connection;
private XmlPullParser parser;
volatile boolean done;
private String connectionID = null;
protected PacketReader(final XMPPConnection connection) {
this.connection = connection;
this.init();
}
/**
* Initializes the reader in order to be used. The reader is initialized during the
* first connection and when reconnecting due to an abruptly disconnection.
*/
protected void init() {
done = false;
connectionID = null;
readerThread = new Thread() {
@Override
public void run() {
parsePackets(this);
}
};
readerThread.setName("Smack Packet Reader (" + connection.connectionCounterValue + ")");
readerThread.setDaemon(true);
// Create an executor to deliver incoming packets to listeners. We'll use a single
// thread with an unbounded queue.
listenerExecutor = Executors.newSingleThreadExecutor(new ThreadFactory() {
@Override
public Thread newThread(Runnable runnable) {
Thread thread = new Thread(runnable,
"Smack Listener Processor (" + connection.connectionCounterValue + ")");
thread.setDaemon(true);
return thread;
}
});
resetParser();
}
/**
* Starts the packet reader thread and returns once a connection to the server
* has been established. A connection will be attempted for a maximum of five
* seconds. An XMPPException will be thrown if the connection fails.
*
* @throws XMPPException if the server fails to send an opening stream back
* for more than five seconds.
*/
synchronized public void startup() throws XMPPException {
readerThread.start();
// Wait for stream tag before returning. We'll wait a couple of seconds before
// giving up and throwing an error.
try {
// A waiting thread may be woken up before the wait time or a notify
// (although this is a rare thing). Therefore, we continue waiting
// until either a connectionID has been set (and hence a notify was
// made) or the total wait time has elapsed.
int waitTime = SmackConfiguration.getPacketReplyTimeout();
wait(3 * waitTime);
}
catch (InterruptedException ie) {
// Ignore.
}
if (connectionID == null) {
throw new XMPPException("Connection failed. No response from server.");
}
else {
connection.connectionID = connectionID;
}
}
/**
* Shuts the packet reader down.
*/
public void shutdown() {
// Notify connection listeners of the connection closing if done hasn't already been set.
if (!done) {
for (ConnectionListener listener : connection.getConnectionListeners()) {
try {
listener.connectionClosed();
}
catch (Exception e) {
// Catch and print any exception so we can recover
// from a faulty listener and finish the shutdown process
e.printStackTrace();
}
}
}
done = true;
// Shut down the listener executor.
listenerExecutor.shutdown();
}
/**
* Cleans up all resources used by the packet reader.
*/
void cleanup() {
connection.recvListeners.clear();
connection.collectors.clear();
}
/**
* Resets the parser using the latest connection's reader. Reseting the parser is necessary
* when the plain connection has been secured or when a new opening stream element is going
* to be sent by the server.
*/
private void resetParser() {
try {
parser = XmlPullParserFactory.newInstance().newPullParser();
parser.setFeature(XmlPullParser.FEATURE_PROCESS_NAMESPACES, true);
parser.setInput(connection.reader);
}
catch (XmlPullParserException xppe) {
xppe.printStackTrace();
}
}
/**
* Parse top-level packets in order to process them further.
*
* @param thread the thread that is being used by the reader to parse incoming packets.
*/
private void parsePackets(Thread thread) {
try {
int eventType = parser.getEventType();
do {
if (eventType == XmlPullParser.START_TAG) {
int parserDepth = parser.getDepth();
ParsingExceptionCallback callback = connection.getParsingExceptionCallback();
if (parser.getName().equals("message")) {
Packet packet;
try {
packet = PacketParserUtils.parseMessage(parser);
} catch (Exception e) {
String content = PacketParserUtils.parseContentDepth(parser, parserDepth);
UnparsedMessage message = new UnparsedMessage(content, e);
if (callback != null) {
callback.messageParsingException(e, message);
}
continue;
}
processPacket(packet);
}
else if (parser.getName().equals("iq")) {
IQ iq;
try {
iq = PacketParserUtils.parseIQ(parser, connection);
} catch (Exception e) {
String content = PacketParserUtils.parseContentDepth(parser, parserDepth);
UnparsedIQ uniq = new UnparsedIQ(content, e);
if (callback != null) {
callback.iqParsingException(e, uniq);
}
continue;
}
processPacket(iq);
}
else if (parser.getName().equals("presence")) {
Presence presence;
try {
presence = PacketParserUtils.parsePresence(parser);
} catch (Exception e) {
String content = PacketParserUtils.parseContentDepth(parser, parserDepth);
UnparsedPresence unpresence = new UnparsedPresence(content, e);
if (callback != null) {
callback.presenceParsingException(e, unpresence);
}
continue;
}
processPacket(presence);
}
// We found an opening stream. Record information about it, then notify
// the connectionID lock so that the packet reader startup can finish.
else if (parser.getName().equals("stream")) {
// Ensure the correct jabber:client namespace is being used.
if ("jabber:client".equals(parser.getNamespace(null))) {
// Get the connection id.
for (int i=0; i<parser.getAttributeCount(); i++) {
if (parser.getAttributeName(i).equals("id")) {
// Save the connectionID
connectionID = parser.getAttributeValue(i);
if (!"1.0".equals(parser.getAttributeValue("", "version"))) {
// Notify that a stream has been opened if the
// server is not XMPP 1.0 compliant otherwise make the
// notification after TLS has been negotiated or if TLS
// is not supported
releaseConnectionIDLock();
}
}
else if (parser.getAttributeName(i).equals("from")) {
// Use the server name that the server says that it is.
connection.config.setServiceName(parser.getAttributeValue(i));
}
}
}
}
else if (parser.getName().equals("error")) {
throw new XMPPException(PacketParserUtils.parseStreamError(parser));
}
else if (parser.getName().equals("features")) {
parseFeatures(parser);
}
else if (parser.getName().equals("proceed")) {
// Secure the connection by negotiating TLS
connection.proceedTLSReceived();
// Reset the state of the parser since a new stream element is going
// to be sent by the server
resetParser();
}
else if (parser.getName().equals("failure")) {
String namespace = parser.getNamespace(null);
if ("urn:ietf:params:xml:ns:xmpp-tls".equals(namespace)) {
// TLS negotiation has failed. The server will close the connection
throw new Exception("TLS negotiation has failed");
}
else if ("http://jabber.org/protocol/compress".equals(namespace)) {
// Stream compression has been denied. This is a recoverable
// situation. It is still possible to authenticate and
// use the connection but using an uncompressed connection
connection.streamCompressionDenied();
}
else {
// SASL authentication has failed. The server may close the connection
// depending on the number of retries
final Failure failure = PacketParserUtils.parseSASLFailure(parser);
processPacket(failure);
connection.getSASLAuthentication().authenticationFailed();
}
}
else if (parser.getName().equals("challenge")) {
// The server is challenging the SASL authentication made by the client
String challengeData = parser.nextText();
processPacket(new Challenge(challengeData));
connection.getSASLAuthentication().challengeReceived(challengeData);
}
else if (parser.getName().equals("success")) {
processPacket(new Success(parser.nextText()));
// We now need to bind a resource for the connection
// Open a new stream and wait for the response
connection.packetWriter.openStream();
// Reset the state of the parser since a new stream element is going
// to be sent by the server
resetParser();
// The SASL authentication with the server was successful. The next step
// will be to bind the resource
connection.getSASLAuthentication().authenticated();
}
else if (parser.getName().equals("compressed")) {
// Server confirmed that it's possible to use stream compression. Start
// stream compression
connection.startStreamCompression();
// Reset the state of the parser since a new stream element is going
// to be sent by the server
resetParser();
}
}
else if (eventType == XmlPullParser.END_TAG) {
if (parser.getName().equals("stream")) {
// Disconnect the connection
connection.disconnect();
}
}
eventType = parser.next();
} while (!done && eventType != XmlPullParser.END_DOCUMENT && thread == readerThread);
}
catch (Exception e) {
// The exception can be ignored if the the connection is 'done'
// or if the it was caused because the socket got closed
if (!(done || connection.isSocketClosed())) {
// Close the connection and notify connection listeners of the
// error.
connection.notifyConnectionError(e);
}
}
}
/**
* Releases the connection ID lock so that the thread that was waiting can resume. The
* lock will be released when one of the following three conditions is met:<p>
*
* 1) An opening stream was sent from a non XMPP 1.0 compliant server
* 2) Stream features were received from an XMPP 1.0 compliant server that does not support TLS
* 3) TLS negotiation was successful
*
*/
synchronized private void releaseConnectionIDLock() {
notify();
}
/**
* Processes a packet after it's been fully parsed by looping through the installed
* packet collectors and listeners and letting them examine the packet to see if
* they are a match with the filter.
*
* @param packet the packet to process.
*/
private void processPacket(Packet packet) {
if (packet == null) {
return;
}
// Loop through all collectors and notify the appropriate ones.
for (PacketCollector collector: connection.getPacketCollectors()) {
collector.processPacket(packet);
}
// Deliver the incoming packet to listeners.
listenerExecutor.submit(new ListenerNotification(packet));
}
private void parseFeatures(XmlPullParser parser) throws Exception {
boolean startTLSReceived = false;
boolean startTLSRequired = false;
boolean done = false;
while (!done) {
int eventType = parser.next();
if (eventType == XmlPullParser.START_TAG) {
if (parser.getName().equals("starttls")) {
startTLSReceived = true;
}
else if (parser.getName().equals("mechanisms")) {
// The server is reporting available SASL mechanisms. Store this information
// which will be used later while logging (i.e. authenticating) into
// the server
connection.getSASLAuthentication()
.setAvailableSASLMethods(PacketParserUtils.parseMechanisms(parser));
}
else if (parser.getName().equals("bind")) {
// The server requires the client to bind a resource to the stream
connection.getSASLAuthentication().bindingRequired();
}
else if(parser.getName().equals("ver")){
connection.getConfiguration().setRosterVersioningAvailable(true);
}
// Set the entity caps node for the server if one is send
// See http://xmpp.org/extensions/xep-0115.html#stream
else if (parser.getName().equals("c")) {
String node = parser.getAttributeValue(null, "node");
String ver = parser.getAttributeValue(null, "ver");
if (ver != null && node != null) {
String capsNode = node + "#" + ver;
// In order to avoid a dependency from smack to smackx
// we have to set the services caps node in the connection
// and not directly in the EntityCapsManager
connection.setServiceCapsNode(capsNode);
}
}
else if (parser.getName().equals("session")) {
// The server supports sessions
connection.getSASLAuthentication().sessionsSupported();
}
else if (parser.getName().equals("compression")) {
// The server supports stream compression
connection.setAvailableCompressionMethods(PacketParserUtils.parseCompressionMethods(parser));
}
else if (parser.getName().equals("register")) {
}
}
else if (eventType == XmlPullParser.END_TAG) {
if (parser.getName().equals("starttls")) {
// Confirm the server that we want to use TLS
connection.startTLSReceived(startTLSRequired);
}
else if (parser.getName().equals("required") && startTLSReceived) {
startTLSRequired = true;
}
else if (parser.getName().equals("features")) {
done = true;
}
}
}
// If TLS is required but the server doesn't offer it, disconnect
// from the server and throw an error. First check if we've already negotiated TLS
// and are secure, however (features get parsed a second time after TLS is established).
if (!connection.isSecureConnection()) {
if (!startTLSReceived && connection.getConfiguration().getSecurityMode() ==
ConnectionConfiguration.SecurityMode.required)
{
throw new XMPPException("Server does not support security (TLS), " +
"but security required by connection configuration.",
new XMPPError(XMPPError.Condition.forbidden));
}
}
// Release the lock after TLS has been negotiated or we are not insterested in TLS
if (!startTLSReceived || connection.getConfiguration().getSecurityMode() ==
ConnectionConfiguration.SecurityMode.disabled)
{
releaseConnectionIDLock();
}
}
/**
* A runnable to notify all listeners of a packet.
*/
private class ListenerNotification implements Runnable {
private final Packet packet;
public ListenerNotification(Packet packet) {
this.packet = packet;
}
@Override
public void run() {
for (ListenerWrapper listenerWrapper : connection.recvListeners.values()) {
listenerWrapper.notifyListener(packet);
}
}
}
}
| |
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.externalSystem.service.project.wizard;
import com.intellij.ide.JavaUiBundle;
import com.intellij.ide.util.projectWizard.WizardContext;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.application.PathManager;
import com.intellij.openapi.components.PersistentStateComponent;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.externalSystem.importing.ImportSpecBuilder;
import com.intellij.openapi.externalSystem.model.DataNode;
import com.intellij.openapi.externalSystem.model.ExternalSystemDataKeys;
import com.intellij.openapi.externalSystem.model.ProjectSystemId;
import com.intellij.openapi.externalSystem.model.internal.InternalExternalProjectInfo;
import com.intellij.openapi.externalSystem.model.project.ProjectData;
import com.intellij.openapi.externalSystem.service.execution.ExternalSystemJdkUtil;
import com.intellij.openapi.externalSystem.service.execution.ProgressExecutionMode;
import com.intellij.openapi.externalSystem.service.project.*;
import com.intellij.openapi.externalSystem.service.settings.AbstractImportFromExternalSystemControl;
import com.intellij.openapi.externalSystem.service.ui.ExternalProjectDataSelectorDialog;
import com.intellij.openapi.externalSystem.settings.AbstractExternalSystemSettings;
import com.intellij.openapi.externalSystem.settings.ExternalProjectSettings;
import com.intellij.openapi.externalSystem.util.ExternalSystemApiUtil;
import com.intellij.openapi.module.ModifiableModuleModel;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleManager;
import com.intellij.openapi.options.ConfigurationException;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.project.ProjectManager;
import com.intellij.openapi.projectRoots.Sdk;
import com.intellij.openapi.roots.ModifiableRootModel;
import com.intellij.openapi.roots.ui.configuration.ModulesConfigurator;
import com.intellij.openapi.roots.ui.configuration.ModulesProvider;
import com.intellij.openapi.startup.StartupManager;
import com.intellij.openapi.util.*;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.packaging.artifacts.ModifiableArtifactModel;
import com.intellij.projectImport.ProjectImportBuilder;
import com.intellij.util.SmartList;
import org.jetbrains.annotations.ApiStatus;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.File;
import java.util.*;
import static com.intellij.openapi.externalSystem.util.ExternalSystemUtil.invokeLater;
import static com.intellij.openapi.externalSystem.util.ExternalSystemUtil.refreshProject;
/**
* GoF builder for external system backed projects.
*
* @author Denis Zhdanov
*/
public abstract class AbstractExternalProjectImportBuilder<C extends AbstractImportFromExternalSystemControl>
extends ProjectImportBuilder<DataNode<ProjectData>>
{
private static final Logger LOG = Logger.getInstance(AbstractExternalProjectImportBuilder.class);
@NotNull private final ProjectDataManager myProjectDataManager;
@NotNull private final NotNullLazyValue<C> myControlValue;
@NotNull private final ProjectSystemId myExternalSystemId;
private DataNode<ProjectData> myExternalProjectNode;
/**
* @deprecated use {@link AbstractExternalProjectImportBuilder#AbstractExternalProjectImportBuilder(ProjectDataManager, NotNullFactory, ProjectSystemId)}
*/
@Deprecated
@ApiStatus.ScheduledForRemoval(inVersion = "2021.3")
public AbstractExternalProjectImportBuilder(@NotNull ProjectDataManager projectDataManager,
@NotNull C control,
@NotNull ProjectSystemId externalSystemId) {
myProjectDataManager = projectDataManager;
myControlValue = NotNullLazyValue.createValue(() -> control);
myExternalSystemId = externalSystemId;
}
public AbstractExternalProjectImportBuilder(@NotNull ProjectDataManager projectDataManager,
@NotNull NotNullFactory<? extends C> controlFactory,
@NotNull ProjectSystemId externalSystemId) {
myProjectDataManager = projectDataManager;
myControlValue = NotNullLazyValue.createValue(controlFactory);
myExternalSystemId = externalSystemId;
}
@Override
public List<DataNode<ProjectData>> getList() {
return Collections.singletonList(myExternalProjectNode);
}
@Override
public boolean isMarked(DataNode<ProjectData> element) {
return true;
}
@Override
public void setOpenProjectSettingsAfter(boolean on) {
}
@NotNull
public C getControl(@Nullable Project currentProject) {
C control = getControl();
control.setCurrentProject(currentProject);
return control;
}
public void prepare(@NotNull WizardContext context) {
if (context.getProjectJdk() == null) {
context.setProjectJdk(resolveProjectJdk(context));
}
C control = getControl();
control.setShowProjectFormatPanel(context.isCreatingNewProject());
control.reset(context, null);
String pathToUse = getFileToImport();
control.setLinkedProjectPath(pathToUse);
doPrepare(context);
}
@Nullable
protected Sdk resolveProjectJdk(@NotNull WizardContext context) {
Project project = context.getProject() != null ? context.getProject() : ProjectManager.getInstance().getDefaultProject();
final Pair<String, Sdk> sdkPair = ExternalSystemJdkUtil.getAvailableJdk(project);
if (!ExternalSystemJdkUtil.USE_INTERNAL_JAVA.equals(sdkPair.first)) {
return sdkPair.second;
}
return null;
}
protected abstract void doPrepare(@NotNull WizardContext context);
@Override
public List<Module> commit(final Project project,
final ModifiableModuleModel model,
final ModulesProvider modulesProvider,
final ModifiableArtifactModel artifactModel)
{
project.putUserData(ExternalSystemDataKeys.NEWLY_IMPORTED_PROJECT, Boolean.TRUE);
final DataNode<ProjectData> externalProjectNode = getExternalProjectNode();
if (externalProjectNode != null) {
beforeCommit(externalProjectNode, project);
}
final boolean isFromUI = model != null;
final List<Module> modules = new SmartList<>();
final Map<ModifiableRootModel, Module> moduleMap = new IdentityHashMap<>();
final IdeModifiableModelsProvider modelsProvider = isFromUI ? new IdeUIModifiableModelsProvider(
project, model, (ModulesConfigurator)modulesProvider, artifactModel) {
@Override
protected ModifiableRootModel doGetModifiableRootModel(Module module) {
ModifiableRootModel modifiableRootModel = super.doGetModifiableRootModel(module);
moduleMap.put(modifiableRootModel, module);
return modifiableRootModel;
}
@Override
public void commit() {
super.commit();
for (Map.Entry<ModifiableRootModel, Module> moduleEntry : moduleMap.entrySet()) {
modules.add(moduleEntry.getValue());
}
}
} : new IdeModifiableModelsProviderImpl(project){
@NotNull
@Override
protected ModifiableRootModel doGetModifiableRootModel(@NotNull Module module) {
ModifiableRootModel modifiableRootModel = super.doGetModifiableRootModel(module);
moduleMap.put(modifiableRootModel, module);
return modifiableRootModel;
}
@Override
public void commit() {
super.commit();
for (Map.Entry<ModifiableRootModel, Module> moduleEntry : moduleMap.entrySet()) {
if (!moduleEntry.getKey().isWritable()) {
modules.add(moduleEntry.getValue());
}
}
}
};
AbstractExternalSystemSettings systemSettings = ExternalSystemApiUtil.getSettings(project, myExternalSystemId);
final ExternalProjectSettings projectSettings = getCurrentExternalProjectSettings();
//noinspection unchecked
Set<ExternalProjectSettings> projects = new HashSet<>(systemSettings.getLinkedProjectsSettings());
// add current importing project settings to linked projects settings or replace if similar already exist
projects.remove(projectSettings);
projects.add(projectSettings);
//noinspection unchecked
systemSettings.copyFrom(getControl().getSystemSettings());
//noinspection unchecked
systemSettings.setLinkedProjectsSettings(projects);
if (externalProjectNode != null) {
if (systemSettings.showSelectiveImportDialogOnInitialImport() && !ApplicationManager.getApplication().isHeadlessEnvironment()) {
ExternalProjectDataSelectorDialog dialog = new ExternalProjectDataSelectorDialog(
project, new InternalExternalProjectInfo(myExternalSystemId, projectSettings.getExternalProjectPath(), externalProjectNode));
if (dialog.hasMultipleDataToSelect()) {
dialog.showAndGet();
} else {
Disposer.dispose(dialog.getDisposable());
}
}
if (!project.isInitialized()) {
StartupManager.getInstance(project).runWhenProjectIsInitialized(
() -> finishImport(project, externalProjectNode, isFromUI, modules, modelsProvider, projectSettings));
}
else finishImport(project, externalProjectNode, isFromUI, modules, modelsProvider, projectSettings);
}
return modules;
}
protected void finishImport(final Project project,
DataNode<ProjectData> externalProjectNode,
boolean isFromUI,
final List<Module> modules,
IdeModifiableModelsProvider modelsProvider, final ExternalProjectSettings projectSettings) {
myProjectDataManager.importData(externalProjectNode, project, modelsProvider, true);
myExternalProjectNode = null;
// resolve dependencies
final Runnable resolveDependenciesTask = () -> refreshProject(projectSettings.getExternalProjectPath(),
new ImportSpecBuilder(project, myExternalSystemId)
.callback(createFinalImportCallback(project, projectSettings)));
if (!isFromUI) {
resolveDependenciesTask.run();
}
else {
// execute when current dialog is closed
invokeLater(project, ModalityState.NON_MODAL, () -> {
final Module[] committedModules = ModuleManager.getInstance(project).getModules();
if (Arrays.asList(committedModules).containsAll(modules)) {
resolveDependenciesTask.run();
}
});
}
}
protected ExternalProjectRefreshCallback createFinalImportCallback(@NotNull Project project,
@NotNull ExternalProjectSettings projectSettings) {
return new ExternalProjectRefreshCallback() {
@Override
public void onSuccess(@Nullable final DataNode<ProjectData> externalProject) {
if (externalProject == null) {
return;
}
ApplicationManager.getApplication().getService(ProjectDataManager.class).importData(externalProject, project, false);
}
};
}
@NotNull
private ExternalProjectSettings getCurrentExternalProjectSettings() {
ExternalProjectSettings result = getControl().getProjectSettings().clone();
File externalProjectConfigFile = getExternalProjectConfigToUse(new File(result.getExternalProjectPath()));
final String linkedProjectPath = FileUtil.toCanonicalPath(externalProjectConfigFile.getPath());
assert linkedProjectPath != null;
result.setExternalProjectPath(linkedProjectPath);
return result;
}
protected abstract void beforeCommit(@NotNull DataNode<ProjectData> dataNode, @NotNull Project project);
@Nullable
private File getProjectFile() {
String path = getControl().getProjectSettings().getExternalProjectPath();
return path == null ? null : new File(path);
}
/**
* Asks current builder to ensure that target external project is defined.
*
* @param wizardContext current wizard context
* @throws ConfigurationException if external project is not defined and can't be constructed
*/
public void ensureProjectIsDefined(@NotNull WizardContext wizardContext) throws ConfigurationException {
final String externalSystemName = myExternalSystemId.getReadableName();
File projectFile = getProjectFile();
if (projectFile == null) {
throw new ConfigurationException(JavaUiBundle.message("error.project.undefined"));
}
projectFile = getExternalProjectConfigToUse(projectFile);
final Ref<ConfigurationException> error = new Ref<>();
final ExternalProjectRefreshCallback callback = new ExternalProjectRefreshCallback() {
@Override
public void onSuccess(@Nullable DataNode<ProjectData> externalProject) {
myExternalProjectNode = externalProject;
}
@Override
public void onFailure(@NotNull String errorMessage, @Nullable String errorDetails) {
if (!StringUtil.isEmpty(errorDetails)) {
LOG.warn(errorDetails);
}
error.set(new ConfigurationException(
JavaUiBundle.message("error.resolve.with.log_link", errorMessage, PathManager.getLogPath()),
JavaUiBundle.message("error.resolve.generic")));
}
};
final Project project = getProject(wizardContext);
final File finalProjectFile = projectFile;
final String externalProjectPath = FileUtil.toCanonicalPath(finalProjectFile.getAbsolutePath());
final Ref<ConfigurationException> exRef = new Ref<>();
executeAndRestoreDefaultProjectSettings(project, () -> {
try {
refreshProject(externalProjectPath,
new ImportSpecBuilder(project, myExternalSystemId)
.use(ProgressExecutionMode.MODAL_SYNC)
.usePreviewMode()
.callback(callback));
}
catch (IllegalArgumentException e) {
exRef.set(
new ConfigurationException(e.getMessage(), JavaUiBundle.message("error.cannot.parse.project", externalSystemName)));
}
});
ConfigurationException ex = exRef.get();
if (ex != null) {
throw ex;
}
if (myExternalProjectNode == null) {
ConfigurationException exception = error.get();
if (exception != null) {
throw exception;
}
}
else {
applyProjectSettings(wizardContext);
}
}
@SuppressWarnings("unchecked")
private void executeAndRestoreDefaultProjectSettings(@NotNull Project project, @NotNull Runnable task) {
AbstractExternalSystemSettings systemSettings = ExternalSystemApiUtil.getSettings(project, myExternalSystemId);
Object systemStateToRestore = null;
if (systemSettings instanceof PersistentStateComponent) {
systemStateToRestore = ((PersistentStateComponent)systemSettings).getState();
}
systemSettings.copyFrom(getControl().getSystemSettings());
Collection projectSettingsToRestore = systemSettings.getLinkedProjectsSettings();
Set<ExternalProjectSettings> projects = new HashSet<>(systemSettings.getLinkedProjectsSettings());
projects.add(getCurrentExternalProjectSettings());
systemSettings.setLinkedProjectsSettings(projects);
try {
task.run();
}
finally {
if (systemStateToRestore != null) {
((PersistentStateComponent)systemSettings).loadState(systemStateToRestore);
}
else {
systemSettings.setLinkedProjectsSettings(projectSettingsToRestore);
}
}
}
/**
* Allows to adjust external project config file to use on the basis of the given value.
* <p/>
* Example: a user might choose a directory which contains target config file and particular implementation expands
* that to a particular file under the directory.
*
* @param file base external project config file
* @return external project config file to use
*/
@NotNull
protected abstract File getExternalProjectConfigToUse(@NotNull File file);
@Nullable
public DataNode<ProjectData> getExternalProjectNode() {
return myExternalProjectNode;
}
/**
* Applies external system-specific settings like project files location etc to the given context.
*
* @param context storage for the project/module settings.
*/
public void applyProjectSettings(@NotNull WizardContext context) {
if (myExternalProjectNode == null) {
assert false;
return;
}
context.setProjectName(myExternalProjectNode.getData().getInternalName());
context.setProjectFileDirectory(myExternalProjectNode.getData().getIdeProjectFileDirectoryPath());
applyExtraSettings(context);
}
protected abstract void applyExtraSettings(@NotNull WizardContext context);
/**
* Allows to get {@link Project} instance to use. Basically, there are two alternatives -
* {@link WizardContext#getProject() project from the current wizard context} and
* {@link ProjectManager#getDefaultProject() default project}.
*
* @param wizardContext current wizard context
* @return {@link Project} instance to use
*/
@NotNull
public Project getProject(@NotNull WizardContext wizardContext) {
Project result = wizardContext.getProject();
if (result == null) {
result = ProjectManager.getInstance().getDefaultProject();
}
return result;
}
@Nullable
@Override
public Project createProject(String name, String path) {
Project project = super.createProject(name, path);
if (project != null) {
project.putUserData(ExternalSystemDataKeys.NEWLY_CREATED_PROJECT, Boolean.TRUE);
}
return project;
}
@NotNull
private C getControl() {
return myControlValue.getValue();
}
}
| |
package com.communote.server.web.api.service.blog;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang.StringUtils;
import org.hibernate.criterion.MatchMode;
import org.springframework.security.access.AccessDeniedException;
import com.communote.common.string.StringHelper;
import com.communote.common.util.ParameterHelper;
import com.communote.server.api.ServiceLocator;
import com.communote.server.api.core.blog.BlogAccessException;
import com.communote.server.api.core.blog.BlogData;
import com.communote.server.api.core.blog.BlogIdentifierValidationException;
import com.communote.server.api.core.blog.BlogManagement;
import com.communote.server.api.core.blog.BlogNotFoundException;
import com.communote.server.api.core.blog.CreationBlogTO;
import com.communote.server.api.core.blog.NonUniqueBlogIdentifierException;
import com.communote.server.core.converter.blog.BlogToUserDetailBlogListItemConverter;
import com.communote.server.core.filter.ResultSpecification;
import com.communote.server.core.filter.listitems.blog.UserDetailBlogListItem;
import com.communote.server.core.query.QueryManagement;
import com.communote.server.core.security.SecurityHelper;
import com.communote.server.core.tag.TagParserFactory;
import com.communote.server.core.vo.query.blog.BlogQuery;
import com.communote.server.core.vo.query.blog.BlogQueryParameters;
import com.communote.server.core.vo.query.blog.TopicAccessLevel;
import com.communote.server.core.vo.query.config.FilterApiParameterNameProvider;
import com.communote.server.core.vo.query.config.QueryParametersParameterNameProvider;
import com.communote.server.model.blog.Blog;
import com.communote.server.web.api.service.ApiException;
import com.communote.server.web.api.service.BaseRestApiController;
import com.communote.server.web.api.service.IllegalRequestParameterException;
import com.communote.server.web.api.service.RequestedResourceNotFoundException;
import com.communote.server.web.api.to.ApiResult;
import com.communote.server.web.commons.MessageHelper;
/**
* Controller for handling api request for blogs.
*
* @author Communote GmbH - <a href="http://www.communote.com/">http://www.communote.com/</a>
*
* @deprecated Use new generated REST-API instead.
*/
@Deprecated
public class BlogApiController extends BaseRestApiController {
/**
* Type.
*/
private enum BlogListType {
MOST_USED, LAST_USED, LAST_MODIFIED, MANAGER, READ, WRITE
}
private static final String PARAM_LAST_MODIFICATION_DATE = "lastModificationDate";
private static final String PARAM_MAX_RESULTS = "maxResults";
private static final String PARAM_BLOG_LIST_TYPE = "blogListType";
private final QueryParametersParameterNameProvider nameProvider = new FilterApiParameterNameProvider();
/**
* Do the get on a resource
*
* @param apiResult
* The reuslt to fill.
* @param request
* the request
* @param response
* the response
* @return the post resource
* @throws RequestedResourceNotFoundException
* the resource has not been found
* @throws IllegalRequestParameterException
* in case of an illegal parameter
*/
@Override
protected Object doGet(ApiResult apiResult, HttpServletRequest request,
HttpServletResponse response) throws RequestedResourceNotFoundException,
IllegalRequestParameterException {
Long blogId = getResourceId(request, false);
// no blog id so get the list
if (blogId == null) {
List<BlogData> items = getBlogList(request, response);
return items;
} else {
BlogToUserDetailBlogListItemConverter converter = new BlogToUserDetailBlogListItemConverter(
UserDetailBlogListItem.class, false, true, true, false, false, null);
UserDetailBlogListItem blogListItem;
try {
blogListItem = getBlogManagement().getBlogById(blogId, converter);
} catch (BlogAccessException e) {
// for compatibility reasons throw an AccessDeniedException
throw new AccessDeniedException(e.getMessage(), e);
}
return blogListItem;
}
}
/**
* Do a creation or edit of a post resource
*
* @param apiResult
* the apiResult
* @param request
* the request
* @param response
* the response
* @return the id of the blog created/updated
* @throws ApiException
* in case of an error
*/
@Override
protected Object doPost(ApiResult apiResult, HttpServletRequest request,
HttpServletResponse response) throws ApiException {
Long blogId = getResourceId(request, false);
String title = getNonEmptyParameter(request, "title");
String identifier = ParameterHelper.getParameterAsString(request.getParameterMap(),
"nameIdentifier");
String description = ParameterHelper.getParameterAsString(request.getParameterMap(),
"description");
String unparsedTags = ParameterHelper.getParameterAsString(request.getParameterMap(),
"tags");
CreationBlogTO blogTO = new CreationBlogTO();
blogTO.setCreatorUserId(SecurityHelper.assertCurrentUserId());
blogTO.setDescription(description);
blogTO.setTitle(title);
blogTO.setNameIdentifier(identifier);
blogTO.setUnparsedTags(TagParserFactory.instance().getDefaultTagParser()
.parseTags(unparsedTags));
Object result;
try {
if (blogId == null) {
// its an create
Blog blog = getBlogManagement().createBlog(blogTO);
result = blog.getId();
} else {
// its an update
Blog blog = getBlogManagement().updateBlog(blogId, blogTO);
result = blog.getId();
}
} catch (NonUniqueBlogIdentifierException e) {
String errorMsg = MessageHelper.getText(request, "error.blog.identifier.noneunique");
throw new ApiException(errorMsg);
} catch (BlogIdentifierValidationException e) {
String errorMsg = MessageHelper.getText(request, "error.blog.identifier.notvalid");
throw new ApiException(errorMsg);
} catch (BlogAccessException e) {
String errorMsg = MessageHelper.getText(request,
"error.blogpost.blog.no.access.no.manager");
throw new ApiException(errorMsg);
} catch (BlogNotFoundException e) {
throw new ApiException("unexpected exception");
}
return result;
}
/**
* Method to get the blog list based on the blog list type parameter
*
* @param request
* the request
* @param response
* the response
* @return the list of requested blogs
* @throws IllegalRequestParameterException
* in case of an illegal parameter
*/
protected List<BlogData> getBlogList(HttpServletRequest request,
HttpServletResponse response) throws IllegalRequestParameterException {
BlogListType blogListType = getBlogListType(request);
int offset = ParameterHelper.getParameterAsInteger(request.getParameterMap(), nameProvider
.getNameForOffset(), 0);
int maxCount = ParameterHelper.getParameterAsInteger(request.getParameterMap(),
nameProvider.getNameForMaxCount(), 5);
ResultSpecification resultSpecification = new ResultSpecification(offset, maxCount);
Collection<BlogData> blogs;
String searchString;
switch (blogListType) {
case LAST_MODIFIED:
// backwards compatibility for the max result parameter
if (StringUtils.isEmpty(request.getParameter(nameProvider.getNameForMaxCount()))) {
maxCount = getIntegerParameter(request, PARAM_MAX_RESULTS);
resultSpecification = new ResultSpecification(offset, maxCount);
}
Date lastModificationDate;
try {
lastModificationDate = new Date(getLongParameter(request,
PARAM_LAST_MODIFICATION_DATE));
} catch (Exception e) {
lastModificationDate = null;
}
blogs = getLastModifiedBlogs(SecurityHelper.assertCurrentUserId(),
lastModificationDate, resultSpecification);
break;
case LAST_USED:
blogs = getBlogManagement().getLastUsedBlogs(maxCount, false);
break;
case MANAGER:
blogs = getManageableBlogs(resultSpecification);
break;
case MOST_USED:
blogs = getBlogManagement().getMostUsedBlogs(maxCount, false);
break;
case READ:
searchString = ParameterHelper.getParameterAsString(request.getParameterMap(),
"searchString");
blogs = getReadableBlogs(searchString, TopicAccessLevel.READ, resultSpecification);
break;
case WRITE:
searchString = ParameterHelper.getParameterAsString(request.getParameterMap(),
"searchString");
blogs = getReadableBlogs(searchString, TopicAccessLevel.WRITE, resultSpecification);
break;
default:
throw new IllegalRequestParameterException(PARAM_BLOG_LIST_TYPE, blogListType.name(),
"Invalid value. Allowed values are: "
+ StringHelper.toString(BlogListType.values(), "|"));
}
return new ArrayList<BlogData>(blogs);
}
/**
* @param request
* the request
* @return Get the type of blogs to get out of the request
* @throws IllegalRequestParameterException
* invalid blog list type
*/
private BlogListType getBlogListType(HttpServletRequest request)
throws IllegalRequestParameterException {
String parameterValue = getNonEmptyParameter(request, PARAM_BLOG_LIST_TYPE);
try {
return BlogListType.valueOf(parameterValue.toUpperCase());
} catch (Exception e) {
throw new IllegalRequestParameterException(PARAM_BLOG_LIST_TYPE, parameterValue, e
.getMessage());
}
}
/**
* @return the blog management
*/
private BlogManagement getBlogManagement() {
return ServiceLocator.findService(BlogManagement.class);
}
/**
* Retrieves the last modified blogs a user can access, sort by the oldest modified blog first.
*
* @param userId
* id of the user
* @param minimumLastModificationDate
* the minimum last modification the blog must be modified AFTER
* @param resultSpecification
* the result specification
* @return the found blogs
*/
private List<BlogData> getLastModifiedBlogs(Long userId,
Date minimumLastModificationDate, ResultSpecification resultSpecification) {
BlogQueryParameters blogQueryInstance = BlogQuery.DEFAULT_QUERY.createInstance();
blogQueryInstance.setAccessLevel(TopicAccessLevel.READ);
blogQueryInstance.setUserId(userId);
blogQueryInstance.setMinimumLastModificationDate(minimumLastModificationDate);
blogQueryInstance.sortByLastModificationDateAsc();
blogQueryInstance.setResultSpecification(resultSpecification);
QueryManagement qm = ServiceLocator.findService(QueryManagement.class);
return qm.query(BlogQuery.DEFAULT_QUERY, blogQueryInstance);
}
/**
* @param resultSpecification
* the {@link ResultSpecification} states the offset and the maximum number of
* @return get the blogs the current user is manager of
*/
private List<BlogData> getManageableBlogs(ResultSpecification resultSpecification) {
List<BlogData> blogs;
BlogQueryParameters instance = BlogQuery.DEFAULT_QUERY.createInstance();
instance.setAccessLevel(TopicAccessLevel.MANAGER);
instance.setUserId(SecurityHelper.getCurrentUserId());
instance.setResultSpecification(resultSpecification);
instance.sortByNameAsc();
QueryManagement qm = ServiceLocator.findService(QueryManagement.class);
blogs = qm.executeQuery(BlogQuery.DEFAULT_QUERY, instance);
return blogs;
}
/**
* @param searchString
* the search string
* @param blogAccessLevel
* Level the user must have to access the blog.
* @param resultSpecification
* the {@link ResultSpecification} states the offset and the maximum number of
* elements to get
* @return get the blogs the current user can read
*/
private List<BlogData> getReadableBlogs(String searchString,
TopicAccessLevel blogAccessLevel, ResultSpecification resultSpecification) {
List<BlogData> blogs;
BlogQueryParameters instance = BlogQuery.DEFAULT_QUERY.createInstance();
instance.setAccessLevel(blogAccessLevel);
instance.setUserId(SecurityHelper.assertCurrentUserId());
if (StringUtils.isNotEmpty(searchString)) {
instance.setSearchFieldMask(BlogQueryParameters.SEARCH_FIELD_TITLE);
instance.setTextFilter(searchString.split(" "));
instance.setMatchMode(MatchMode.ANYWHERE);
}
instance.sortByNameAsc();
instance.setResultSpecification(resultSpecification);
QueryManagement queryManagement = ServiceLocator.instance().getService(
QueryManagement.class);
blogs = queryManagement.executeQuery(BlogQuery.DEFAULT_QUERY, instance);
return blogs;
}
}
| |
/*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2019 Serge Rider (serge@jkiss.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jkiss.dbeaver.registry.driver;
import com.google.gson.stream.JsonWriter;
import org.eclipse.core.runtime.IConfigurationElement;
import org.eclipse.core.runtime.Platform;
import org.jkiss.code.NotNull;
import org.jkiss.code.Nullable;
import org.jkiss.dbeaver.DBException;
import org.jkiss.dbeaver.Log;
import org.jkiss.dbeaver.ModelPreferences;
import org.jkiss.dbeaver.model.*;
import org.jkiss.dbeaver.model.connection.*;
import org.jkiss.dbeaver.model.impl.AbstractDescriptor;
import org.jkiss.dbeaver.model.impl.PropertyDescriptor;
import org.jkiss.dbeaver.model.meta.Property;
import org.jkiss.dbeaver.model.navigator.meta.DBXTreeNode;
import org.jkiss.dbeaver.model.preferences.DBPPreferenceStore;
import org.jkiss.dbeaver.model.preferences.DBPPropertyDescriptor;
import org.jkiss.dbeaver.model.runtime.DBRProgressMonitor;
import org.jkiss.dbeaver.model.runtime.OSDescriptor;
import org.jkiss.dbeaver.registry.DataSourceProviderDescriptor;
import org.jkiss.dbeaver.registry.NativeClientDescriptor;
import org.jkiss.dbeaver.registry.RegistryConstants;
import org.jkiss.dbeaver.registry.VersionUtils;
import org.jkiss.dbeaver.runtime.DBWorkbench;
import org.jkiss.dbeaver.utils.ContentUtils;
import org.jkiss.dbeaver.utils.GeneralUtils;
import org.jkiss.dbeaver.utils.SystemVariablesResolver;
import org.jkiss.utils.CommonUtils;
import org.jkiss.utils.StandardConstants;
import org.jkiss.utils.xml.XMLBuilder;
import java.io.File;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.*;
/**
* DriverDescriptor
*/
public class DriverDescriptor extends AbstractDescriptor implements DBPDriver {
private static final Log log = Log.getLog(DriverDescriptor.class);
private static final String DRIVERS_FOLDER = "drivers"; //$NON-NLS-1$
private static final String PROP_DRIVERS_LOCATION = "DRIVERS_LOCATION";
private static final String LICENSE_ACCEPT_KEY = "driver.license.accept.";
public static class DriverFileInfo {
private final String id;
private final String version;
private final File file;
DriverFileInfo(String id, String version, File file) {
this.id = id;
this.version = version;
this.file = file;
}
DriverFileInfo(DBPDriverLibrary library) {
this.id = library.getId();
this.version = library.getVersion();
this.file = library.getLocalFile();
}
public File getFile() {
return file;
}
public String getId() {
return id;
}
public String getVersion() {
return version;
}
@Override
public String toString() {
return file.getName();
}
}
private final DataSourceProviderDescriptor providerDescriptor;
private final String id;
private String category;
private List<String> categories;
private final String origName;
private final String origDescription;
private final String origClassName;
private final String origDefaultPort;
private final String origSampleURL;
private String name;
private String description;
private String driverClassName;
private String driverDefaultPort;
private String sampleURL;
private String webURL;
private String propertiesWebURL;
private DBPImage iconPlain;
private DBPImage iconNormal;
private DBPImage iconError;
private DBPImage iconBig;
private boolean embedded;
private boolean clientRequired;
private boolean supportsDriverProperties;
private boolean anonymousAccess;
private boolean allowsEmptyPassword;
private boolean licenseRequired;
private boolean customDriverLoader;
private boolean useURLTemplate;
private boolean custom;
private boolean modified;
private boolean disabled;
private boolean temporary;
private int promoted;
private final List<DBPNativeClientLocation> nativeClientHomes = new ArrayList<>();
private final List<DriverFileSource> fileSources = new ArrayList<>();
private final List<DBPDriverLibrary> libraries = new ArrayList<>();
private final List<DBPDriverLibrary> origFiles = new ArrayList<>();
private final List<DBPPropertyDescriptor> connectionPropertyDescriptors = new ArrayList<>();
private final List<OSDescriptor> supportedSystems = new ArrayList<>();
private final List<ReplaceInfo> driverReplacements = new ArrayList<>();
private DriverDescriptor replacedBy;
private final Map<Object, Object> defaultParameters = new HashMap<>();
private final Map<Object, Object> customParameters = new HashMap<>();
private final Map<Object, Object> defaultConnectionProperties = new HashMap<>();
private final Map<Object, Object> customConnectionProperties = new HashMap<>();
private Map<DBPDriverLibrary, List<DriverFileInfo>> resolvedFiles = new HashMap<>();
private Class driverClass;
private boolean isLoaded;
private Object driverInstance;
private DriverClassLoader classLoader;
private transient boolean isFailed = false;
static {
File driversHome = DriverDescriptor.getCustomDriversHome();
System.setProperty(PROP_DRIVERS_LOCATION, driversHome.getAbsolutePath());
}
// New driver constructor
public DriverDescriptor(DataSourceProviderDescriptor providerDescriptor, String id) {
this(providerDescriptor, id, null);
}
public DriverDescriptor(DataSourceProviderDescriptor providerDescriptor, String id, DriverDescriptor copyFrom) {
super(providerDescriptor.getPluginId());
this.providerDescriptor = providerDescriptor;
this.id = id;
this.custom = true;
this.useURLTemplate = true;
this.promoted = 0;
this.origName = null;
this.origDescription = null;
this.origClassName = null;
this.origDefaultPort = null;
this.origSampleURL = null;
this.iconPlain = providerDescriptor.getIcon();
if (this.iconPlain == null) {
this.iconPlain = DBIcon.DATABASE_DEFAULT;
}
this.iconBig = DBIcon.DATABASE_BIG_DEFAULT;
makeIconExtensions();
if (copyFrom != null) {
// Copy props from source
this.category = copyFrom.category;
this.categories = new ArrayList<>(copyFrom.categories);
this.name = copyFrom.name;
this.description = copyFrom.description;
this.driverClassName = copyFrom.driverClassName;
this.driverDefaultPort = copyFrom.driverDefaultPort;
this.sampleURL = copyFrom.sampleURL;
this.webURL = copyFrom.webURL;
this.propertiesWebURL = copyFrom.webURL;
this.embedded = copyFrom.embedded;
this.clientRequired = copyFrom.clientRequired;
this.supportsDriverProperties = copyFrom.supportsDriverProperties;
this.anonymousAccess = copyFrom.anonymousAccess;
this.allowsEmptyPassword = copyFrom.allowsEmptyPassword;
this.licenseRequired = copyFrom.licenseRequired;
this.customDriverLoader = copyFrom.customDriverLoader;
this.useURLTemplate = copyFrom.customDriverLoader;
this.promoted = copyFrom.promoted;
this.nativeClientHomes.addAll(copyFrom.nativeClientHomes);
for (DriverFileSource fs : copyFrom.fileSources) {
this.fileSources.add(new DriverFileSource(fs));
}
for (DBPDriverLibrary library : copyFrom.libraries) {
if (library instanceof DriverLibraryAbstract) {
this.libraries.add(((DriverLibraryAbstract) library).copyLibrary(this));
} else {
this.libraries.add(library);
}
}
this.connectionPropertyDescriptors.addAll(copyFrom.connectionPropertyDescriptors);
this.defaultParameters.putAll(copyFrom.defaultParameters);
this.customParameters.putAll(copyFrom.customParameters);
this.defaultConnectionProperties.putAll(copyFrom.defaultConnectionProperties);
this.customConnectionProperties.putAll(copyFrom.customConnectionProperties);
} else {
this.categories = new ArrayList<>();
this.name = "";
}
}
// Predefined driver constructor
public DriverDescriptor(DataSourceProviderDescriptor providerDescriptor, IConfigurationElement config) {
super(providerDescriptor.getPluginId());
this.providerDescriptor = providerDescriptor;
this.id = CommonUtils.notEmpty(config.getAttribute(RegistryConstants.ATTR_ID));
this.category = CommonUtils.notEmpty(config.getAttribute(RegistryConstants.ATTR_CATEGORY));
this.categories = Arrays.asList(CommonUtils.split(config.getAttribute(RegistryConstants.ATTR_CATEGORIES), ","));
this.origName = this.name = CommonUtils.notEmpty(config.getAttribute(RegistryConstants.ATTR_LABEL));
this.origDescription = this.description = config.getAttribute(RegistryConstants.ATTR_DESCRIPTION);
this.origClassName = this.driverClassName = config.getAttribute(RegistryConstants.ATTR_CLASS);
if (!CommonUtils.isEmpty(config.getAttribute(RegistryConstants.ATTR_DEFAULT_PORT))) {
this.origDefaultPort = this.driverDefaultPort = config.getAttribute(RegistryConstants.ATTR_DEFAULT_PORT);
} else {
this.origDefaultPort = this.driverDefaultPort = null;
}
this.origSampleURL = this.sampleURL = config.getAttribute(RegistryConstants.ATTR_SAMPLE_URL);
this.webURL = config.getAttribute(RegistryConstants.ATTR_WEB_URL);
this.propertiesWebURL = config.getAttribute(RegistryConstants.ATTR_PROPERTIES_WEB_URL);
this.clientRequired = CommonUtils.getBoolean(config.getAttribute(RegistryConstants.ATTR_CLIENT_REQUIRED), false);
this.customDriverLoader = CommonUtils.getBoolean(config.getAttribute(RegistryConstants.ATTR_CUSTOM_DRIVER_LOADER), false);
this.useURLTemplate = CommonUtils.getBoolean(config.getAttribute(RegistryConstants.ATTR_USE_URL_TEMPLATE), true);
this.promoted = CommonUtils.toInt(config.getAttribute(RegistryConstants.ATTR_PROMOTED), 0);
this.supportsDriverProperties = CommonUtils.getBoolean(config.getAttribute(RegistryConstants.ATTR_SUPPORTS_DRIVER_PROPERTIES), true);
this.embedded = CommonUtils.getBoolean(config.getAttribute(RegistryConstants.ATTR_EMBEDDED));
this.anonymousAccess = CommonUtils.getBoolean(config.getAttribute(RegistryConstants.ATTR_ANONYMOUS));
this.allowsEmptyPassword = CommonUtils.getBoolean("allowsEmptyPassword");
this.licenseRequired = CommonUtils.getBoolean(config.getAttribute(RegistryConstants.ATTR_LICENSE_REQUIRED));
this.custom = false;
this.isLoaded = false;
for (IConfigurationElement lib : config.getChildren(RegistryConstants.TAG_FILE)) {
DriverLibraryAbstract library = DriverLibraryAbstract.createFromConfig(this, lib);
if (library != null) {
this.libraries.add(library);
}
}
this.origFiles.addAll(this.libraries);
for (IConfigurationElement lib : config.getChildren(RegistryConstants.TAG_FILE_SOURCE)) {
this.fileSources.add(new DriverFileSource(lib));
}
this.iconPlain = iconToImage(config.getAttribute(RegistryConstants.ATTR_ICON));
if (this.iconPlain == null) {
this.iconPlain = providerDescriptor.getIcon();
}
this.iconBig = this.iconPlain;
if (config.getAttribute(RegistryConstants.ATTR_ICON_BIG) != null) {
this.iconBig = iconToImage(config.getAttribute(RegistryConstants.ATTR_ICON_BIG));
}
makeIconExtensions();
{
// OSes
IConfigurationElement[] osElements = config.getChildren(RegistryConstants.TAG_OS);
for (IConfigurationElement os : osElements) {
supportedSystems.add(new OSDescriptor(
os.getAttribute(RegistryConstants.ATTR_NAME),
os.getAttribute(RegistryConstants.ATTR_ARCH)
));
}
}
{
// Connection property groups
IConfigurationElement[] propElements = config.getChildren(PropertyDescriptor.TAG_PROPERTY_GROUP);
for (IConfigurationElement prop : propElements) {
connectionPropertyDescriptors.addAll(PropertyDescriptor.extractProperties(prop));
}
}
{
// Driver parameters
IConfigurationElement[] paramElements = config.getChildren(RegistryConstants.TAG_PARAMETER);
for (IConfigurationElement param : paramElements) {
String paramName = param.getAttribute(RegistryConstants.ATTR_NAME);
String paramValue = param.getAttribute(RegistryConstants.ATTR_VALUE);
if (CommonUtils.isEmpty(paramValue)) {
paramValue = param.getValue();
}
if (!CommonUtils.isEmpty(paramName) && !CommonUtils.isEmpty(paramValue)) {
setDriverParameter(paramName, paramValue, true);
}
}
}
{
// Connection properties
IConfigurationElement[] propElements = config.getChildren(RegistryConstants.TAG_PROPERTY);
for (IConfigurationElement param : propElements) {
String paramName = param.getAttribute(RegistryConstants.ATTR_NAME);
String paramValue = param.getAttribute(RegistryConstants.ATTR_VALUE);
if (CommonUtils.isEmpty(paramValue)) {
paramValue = param.getValue();
}
if (!CommonUtils.isEmpty(paramName) && !CommonUtils.isEmpty(paramValue)) {
defaultConnectionProperties.put(paramName, paramValue);
if (!paramName.startsWith(DBConstants.INTERNAL_PROP_PREFIX)) {
customConnectionProperties.put(paramName, paramValue);
}
}
}
}
{
// Driver replacements
IConfigurationElement[] replaceElements = config.getChildren(RegistryConstants.TAG_REPLACE);
for (IConfigurationElement replace : replaceElements) {
String providerId = replace.getAttribute(RegistryConstants.ATTR_PROVIDER);
String driverId = replace.getAttribute(RegistryConstants.ATTR_DRIVER);
if (!CommonUtils.isEmpty(providerId) && !CommonUtils.isEmpty(driverId)) {
driverReplacements.add(new ReplaceInfo(providerId, driverId));
}
}
}
}
Map<Object, Object> getDefaultParameters() {
return defaultParameters;
}
Map<Object, Object> getCustomParameters() {
return customParameters;
}
Map<Object, Object> getCustomConnectionProperties() {
return customConnectionProperties;
}
Map<DBPDriverLibrary, List<DriverFileInfo>> getResolvedFiles() {
return resolvedFiles;
}
List<DBPNativeClientLocation> getNativeClientHomes() {
return nativeClientHomes;
}
public DriverDescriptor getReplacedBy() {
return replacedBy;
}
public void setReplacedBy(DriverDescriptor replaceBy) {
this.replacedBy = replaceBy;
}
public boolean replaces(DriverDescriptor driver) {
for (ReplaceInfo replaceInfo : driverReplacements) {
if (driver.getProviderDescriptor().getId().equals(replaceInfo.providerId) &&
driver.getId().equals(replaceInfo.driverId)) {
return true;
}
}
return false;
}
private void makeIconExtensions() {
if (isCustom()) {
this.iconNormal = new DBIconComposite(this.iconPlain, false, null, null, DBIcon.OVER_LAMP, null);
} else {
this.iconNormal = this.iconPlain;
}
this.iconError = new DBIconComposite(this.iconPlain, false, null, null, isCustom() ? DBIcon.OVER_LAMP : null, DBIcon.OVER_ERROR);
}
@Nullable
@Override
public DriverClassLoader getClassLoader() {
return classLoader;
}
public DataSourceProviderDescriptor getProviderDescriptor() {
return providerDescriptor;
}
@NotNull
@Override
public DBPDataSourceProvider getDataSourceProvider() {
return providerDescriptor.getInstance(this);
}
@Nullable
@Override
public DBPNativeClientLocationManager getNativeClientManager() {
DBPDataSourceProvider provider = getDataSourceProvider();
if (provider instanceof DBPNativeClientLocationManager) {
return (DBPNativeClientLocationManager) provider;
} else {
return null;
}
}
@NotNull
@Override
public String getId() {
return id;
}
@Override
public String getProviderId() {
return providerDescriptor.getId();
}
@Override
@Property(viewable = true, order = 2)
public String getCategory() {
return category;
}
public void setCategory(String category) {
this.category = category;
}
@Override
public List<String> getCategories() {
return new ArrayList<>(categories);
}
@NotNull
@Override
@Property(viewable = true, order = 1)
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
@Override
@Property(viewable = true, multiline = true, order = 100)
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
@NotNull
public String getFullName() {
if (CommonUtils.isEmpty(category) || name.contains(category)) {
return name;
} else {
return category + " / " + name;
}
}
/**
* Plain icon (without any overlays).
*
* @return plain icon
*/
@NotNull
public DBPImage getPlainIcon() {
return iconPlain;
}
/**
* Driver icon, includes overlays for driver conditions (custom, invalid, etc)..
*
* @return icon
*/
@NotNull
@Override
public DBPImage getIcon() {
if (!isLoaded && isFailed) {
return iconError;
} else {
return iconNormal;
}
}
@Override
public DBPImage getIconBig() {
return iconBig;
}
@Override
public boolean isCustom() {
return custom;
}
public void setCustom(boolean custom) {
this.custom = custom;
}
public boolean isModified() {
return !isTemporary() && modified;
}
public void setModified(boolean modified) {
this.modified = modified;
}
public boolean isDisabled() {
return disabled;
}
public void setDisabled(boolean disabled) {
this.disabled = disabled;
}
@Nullable
@Override
@Property(viewable = true, order = 2)
public String getDriverClassName() {
return driverClassName;
}
public void setDriverClassName(String driverClassName) {
if (this.driverClassName == null || !this.driverClassName.equals(driverClassName)) {
this.driverClassName = driverClassName;
resetDriverInstance();
}
}
@NotNull
@Override
public Object getDriverInstance(@NotNull DBRProgressMonitor monitor)
throws DBException {
if (driverInstance == null) {
loadDriver(monitor);
}
if (isInternalDriver() && driverInstance == null) {
return createDriverInstance();
}
return driverInstance;
}
private void resetDriverInstance() {
this.driverInstance = null;
this.driverClass = null;
this.isLoaded = false;
this.resolvedFiles.clear();
}
private Object createDriverInstance()
throws DBException {
try {
return driverClass.newInstance();
} catch (InstantiationException ex) {
throw new DBException("Can't instantiate driver class", ex);
} catch (IllegalAccessException ex) {
throw new DBException("Illegal access", ex);
} catch (ClassCastException ex) {
throw new DBException("Bad driver class name specified", ex);
} catch (Throwable ex) {
throw new DBException("Error during driver instantiation", ex);
}
}
@Nullable
@Override
public String getDefaultPort() {
return driverDefaultPort;
}
public void setDriverDefaultPort(String driverDefaultPort) {
this.driverDefaultPort = driverDefaultPort;
}
@Nullable
@Override
@Property(viewable = true, order = 3)
public String getSampleURL() {
return sampleURL;
}
public void setSampleURL(String sampleURL) {
this.sampleURL = sampleURL;
}
@Nullable
@Override
public String getWebURL() {
return webURL;
}
@Nullable
@Override
public String getPropertiesWebURL() {
return propertiesWebURL;
}
@Override
public boolean isClientRequired() {
return clientRequired;
}
@Override
public boolean supportsDriverProperties() {
return this.supportsDriverProperties;
}
@Override
public boolean isEmbedded() {
return embedded;
}
public void setEmbedded(boolean embedded) {
this.embedded = embedded;
}
@Override
public boolean isAnonymousAccess() {
return anonymousAccess;
}
public void setAnonymousAccess(boolean anonymousAccess) {
this.anonymousAccess = anonymousAccess;
}
@Override
public boolean isAllowsEmptyPassword() {
return allowsEmptyPassword;
}
public void setAllowsEmptyPassword(boolean allowsEmptyPassword) {
this.allowsEmptyPassword = allowsEmptyPassword;
}
@Override
public boolean isLicenseRequired() {
return licenseRequired;
}
@Override
public boolean isCustomDriverLoader() {
return customDriverLoader;
}
void setCustomDriverLoader(boolean customDriverLoader) {
this.customDriverLoader = customDriverLoader;
}
@Override
public boolean isUseURL() {
return useURLTemplate;
}
public void setUseURL(boolean useURLTemplate) {
this.useURLTemplate = useURLTemplate;
}
@Override
public int getPromotedScore() {
return promoted;
}
@Override
public boolean isInstantiable() {
return !CommonUtils.isEmpty(driverClassName);
}
@Override
public boolean isTemporary() {
return temporary || providerDescriptor.isTemporary();
}
public void setTemporary(boolean temporary) {
this.temporary = temporary;
}
@Nullable
@Override
public DBXTreeNode getNavigatorRoot() {
return providerDescriptor.getTreeDescriptor();
}
public boolean isManagable() {
return getProviderDescriptor().isDriversManagable();
}
@Override
public boolean isInternalDriver() {
return
driverClassName != null &&
driverClassName.contains("sun.jdbc"); //$NON-NLS-1$
}
@NotNull
@Override
public List<DBPNativeClientLocation> getNativeClientLocations() {
List<DBPNativeClientLocation> ids = new ArrayList<>();
for (NativeClientDescriptor nc : getProviderDescriptor().getNativeClients()) {
if (nc.findDistribution() != null) {
ids.add(new RemoteNativeClientLocation(nc));
}
}
ids.addAll(nativeClientHomes);
return ids;
}
public void setNativeClientLocations(Collection<DBPNativeClientLocation> locations) {
nativeClientHomes.clear();
nativeClientHomes.addAll(locations);
}
void addNativeClientLocation(DBPNativeClientLocation location) {
if (!nativeClientHomes.contains(location)) {
nativeClientHomes.add(location);
}
}
@NotNull
@Override
public List<? extends DBPDriverLibrary> getDriverLibraries() {
return libraries;
}
public List<DBPDriverLibrary> getEnabledDriverLibraries() {
List<DBPDriverLibrary> filtered = new ArrayList<>();
for (DBPDriverLibrary lib : libraries) {
if (!lib.isDisabled()) {
filtered.add(lib);
}
}
return filtered;
}
DBPDriverLibrary getDriverLibrary(String path) {
for (DBPDriverLibrary lib : libraries) {
if (lib.getPath().equals(path)) {
return lib;
}
}
return null;
}
void addLibraryFile(DBPDriverLibrary library, DriverFileInfo fileInfo) {
List<DriverFileInfo> files = resolvedFiles.computeIfAbsent(library, k -> new ArrayList<>());
files.add(fileInfo);
}
public DBPDriverLibrary addDriverLibrary(String path, DBPDriverLibrary.FileType fileType) {
for (DBPDriverLibrary lib : libraries) {
if (lib.getPath().equals(path)) {
return lib;
}
}
DriverLibraryAbstract lib = DriverLibraryAbstract.createFromPath(this, fileType, path, null);
addDriverLibrary(lib, true);
return lib;
}
public boolean addDriverLibrary(DBPDriverLibrary descriptor, boolean resetCache) {
if (!libraries.contains(descriptor)) {
if (resetCache) {
resetDriverInstance();
}
this.libraries.add(descriptor);
return true;
}
return false;
}
public boolean removeDriverLibrary(DBPDriverLibrary lib) {
resetDriverInstance();
if (!lib.isCustom()) {
lib.setDisabled(true);
return true;
} else {
return this.libraries.remove(lib);
}
}
@NotNull
public List<DriverFileSource> getDriverFileSources() {
return fileSources;
}
@NotNull
@Override
public List<DBPPropertyDescriptor> getConnectionPropertyDescriptors() {
return connectionPropertyDescriptors;
}
@NotNull
@Override
public Map<Object, Object> getDefaultConnectionProperties() {
return defaultConnectionProperties;
}
@NotNull
@Override
public Map<Object, Object> getConnectionProperties() {
return customConnectionProperties;
}
public void setConnectionProperty(String name, String value) {
customConnectionProperties.put(name, value);
}
public void setConnectionProperties(Map<Object, Object> parameters) {
customConnectionProperties.clear();
customConnectionProperties.putAll(parameters);
}
public Map<Object, Object> getDefaultDriverParameters() {
return defaultParameters;
}
@NotNull
@Override
public Map<Object, Object> getDriverParameters() {
return customParameters;
}
@Nullable
@Override
public Object getDriverParameter(String name) {
Object value = customParameters.get(name);
if (value == null) {
DBPPropertyDescriptor defProperty = providerDescriptor.getDriverProperty(name);
if (defProperty != null) {
return defProperty.getDefaultValue();
}
}
return value;
}
public void setDriverParameter(String name, String value, boolean setDefault) {
DBPPropertyDescriptor prop = getProviderDescriptor().getDriverProperty(name);
Object valueObject = prop == null ? value : GeneralUtils.convertString(value, prop.getDataType());
customParameters.put(name, valueObject);
if (setDefault) {
defaultParameters.put(name, valueObject);
}
}
public void setDriverParameters(Map<Object, Object> parameters) {
customParameters.clear();
customParameters.putAll(parameters);
}
@Override
public boolean isSupportedByLocalSystem() {
if (supportedSystems.isEmpty()) {
// Multi-platform
return true;
}
OSDescriptor localSystem = DBWorkbench.getPlatform().getLocalSystem();
for (OSDescriptor system : supportedSystems) {
if (system.matches(localSystem)) {
return true;
}
}
return false;
}
@Override
public String getLicense() {
for (DBPDriverLibrary file : libraries) {
if (file.getType() == DBPDriverLibrary.FileType.license) {
final File licenseFile = file.getLocalFile();
if (licenseFile != null && licenseFile.exists()) {
try {
return ContentUtils.readFileToString(licenseFile);
} catch (IOException e) {
log.warn(e);
}
}
}
}
return null;
}
@Override
public void loadDriver(DBRProgressMonitor monitor)
throws DBException {
this.loadDriver(monitor, false);
}
private void loadDriver(DBRProgressMonitor monitor, boolean forceReload)
throws DBException {
if (isLoaded && !forceReload) {
return;
}
isLoaded = false;
loadLibraries();
if (licenseRequired) {
String licenseText = getLicense();
if (!CommonUtils.isEmpty(licenseText) && !acceptLicense(licenseText)) {
throw new DBException("You have to accept driver '" + getFullName() + "' license to be able to connect");
}
}
try {
if (!isCustomDriverLoader()) {
try {
// Load driver classes into core module using plugin class loader
driverClass = Class.forName(driverClassName, true, classLoader);
} catch (Throwable ex) {
throw new DBException("Error creating driver '" + getFullName() + "' instance.\nMost likely required jar files are missing.\nYou should configure jars in driver settings.\n\nReason: can't load driver class '" + driverClassName + "'", ex);
}
// Create driver instance
/*if (!this.isInternalDriver())*/
{
driverInstance = createDriverInstance();
}
isLoaded = true;
isFailed = false;
}
} catch (DBException e) {
isFailed = true;
throw e;
}
}
private void loadLibraries()
throws DBException {
this.classLoader = null;
List<File> allLibraryFiles = validateFilesPresence(false);
List<URL> libraryURLs = new ArrayList<>();
// Load libraries
for (File file : allLibraryFiles) {
URL url;
try {
url = file.toURI().toURL();
} catch (MalformedURLException e) {
log.error(e);
continue;
}
libraryURLs.add(url);
}
// Make class loader
this.classLoader = new DriverClassLoader(
this,
libraryURLs.toArray(new URL[libraryURLs.size()]),
getDataSourceProvider().getClass().getClassLoader());
}
public void updateFiles() {
validateFilesPresence(true);
}
@NotNull
private List<File> validateFilesPresence(boolean resetVersions) {
boolean localLibsExists = false;
final List<DBPDriverLibrary> downloadCandidates = new ArrayList<>();
for (DBPDriverLibrary library : libraries) {
if (library.isDisabled()) {
// Nothing we can do about it
continue;
}
if (!library.matchesCurrentPlatform()) {
// Wrong OS or architecture
continue;
}
if (library.isDownloadable()) {
boolean allExists = true;
if (resetVersions) {
allExists = false;
} else {
List<DriverFileInfo> files = resolvedFiles.get(library);
if (files == null) {
allExists = false;
} else {
for (DriverFileInfo file : files) {
if (file.file == null || !file.file.exists()) {
allExists = false;
break;
}
}
}
}
if (!allExists) {
downloadCandidates.add(library);
}
} else {
localLibsExists = true;
}
}
// if (!CommonUtils.isEmpty(fileSources)) {
// for (DriverFileSource source : fileSources) {
// for (DriverFileSource.FileInfo fileInfo : source.getFiles()) {
// DriverLibraryLocal libraryLocal = new DriverLibraryLocal(this, DBPDriverLibrary.FileType.jar, fileInfo.getName());
// final File localFile = libraryLocal.getLocalFile();
// }
// }
// }
boolean downloaded = false;
if (!downloadCandidates.isEmpty() || (!localLibsExists && !fileSources.isEmpty())) {
final DriverDependencies dependencies = new DriverDependencies(downloadCandidates);
boolean downloadOk = DBWorkbench.getPlatformUI().downloadDriverFiles(this, dependencies);
if (!downloadOk) {
return Collections.emptyList();
}
if (resetVersions) {
resetDriverInstance();
/*
for (DBPDriverLibrary library : libraries) {
if (!library.isDisabled()) {
library.resetVersion();
}
}
*/
}
downloaded = true;
for (DBPDriverDependencies.DependencyNode node : dependencies.getLibraryMap()) {
List<DriverFileInfo> info = new ArrayList<>();
resolvedFiles.put(node.library, info);
collectLibraryFiles(node, info);
}
providerDescriptor.getRegistry().saveDrivers();
}
List<File> result = new ArrayList<>();
for (DBPDriverLibrary library : libraries) {
if (library.isDisabled() || !library.matchesCurrentPlatform()) {
// Wrong OS or architecture
continue;
}
if (library.isDownloadable()) {
List<DriverFileInfo> files = resolvedFiles.get(library);
if (files != null) {
for (DriverFileInfo file : files) {
if (file.file != null) {
result.add(file.file);
}
}
}
} else {
if (library.getLocalFile() != null) {
result.add(library.getLocalFile());
}
}
}
// Now check driver version
if (DBWorkbench.getPlatform().getPreferenceStore().getBoolean(ModelPreferences.UI_DRIVERS_VERSION_UPDATE) && !downloaded) {
// TODO: implement new version check
/*
{
try {
UIUtils.runInProgressService(monitor -> {
try {
checkDriverVersion(monitor);
} catch (IOException e) {
throw new InvocationTargetException(e);
}
});
} catch (InvocationTargetException e) {
log.error(e.getTargetException());
} catch (InterruptedException e) {
// ignore
}
}
*/
}
// Check if local files are zip archives with jars inside
return DriverUtils.extractZipArchives(result);
}
private void checkDriverVersion(DBRProgressMonitor monitor) throws IOException {
for (DBPDriverLibrary library : libraries) {
final Collection<String> availableVersions = library.getAvailableVersions(monitor);
if (!CommonUtils.isEmpty(availableVersions)) {
final String curVersion = library.getVersion();
String latestVersion = VersionUtils.findLatestVersion(availableVersions);
if (latestVersion != null && !latestVersion.equals(curVersion)) {
log.debug("Update driver " + getName() + " " + curVersion + "->" + latestVersion);
}
}
}
}
public boolean isLibraryResolved(DBPDriverLibrary library) {
return !library.isDownloadable() || !CommonUtils.isEmpty(resolvedFiles.get(library));
}
public Collection<DriverFileInfo> getLibraryFiles(DBPDriverLibrary library) {
return resolvedFiles.get(library);
}
private void collectLibraryFiles(DBPDriverDependencies.DependencyNode node, List<DriverFileInfo> files) {
if (node.duplicate) {
return;
}
files.add(new DriverFileInfo(node.library));
for (DBPDriverDependencies.DependencyNode sub : node.dependencies) {
collectLibraryFiles(sub, files);
}
}
private boolean acceptLicense(String licenseText) {
// Check registry
DBPPreferenceStore prefs = DBWorkbench.getPlatform().getPreferenceStore();
String acceptedStr = prefs.getString(LICENSE_ACCEPT_KEY + getId());
if (!CommonUtils.isEmpty(acceptedStr)) {
return true;
}
if (DBWorkbench.getPlatformUI().acceptLicense(
"You have to accept license of '" + getFullName() + " ' to continue",
licenseText))
{
// Save in registry
prefs.setValue(LICENSE_ACCEPT_KEY + getId(), true + ":" + System.currentTimeMillis() + ":" + System.getProperty(StandardConstants.ENV_USER_NAME));
return true;
}
return false;
}
public String getOrigName() {
return origName;
}
public String getOrigDescription() {
return origDescription;
}
public String getOrigClassName() {
return origClassName;
}
public String getOrigDefaultPort() {
return origDefaultPort;
}
public String getOrigSampleURL() {
return origSampleURL;
}
public List<DBPDriverLibrary> getOrigFiles() {
return origFiles;
}
public static File getDriversContribFolder() throws IOException {
return new File(Platform.getInstallLocation().getDataArea(DRIVERS_FOLDER).toExternalForm());
}
public void serialize(JsonWriter json, boolean export) throws IOException {
new DriverDescriptorSerializerModern(this).serialize(json, export);
}
@Deprecated
public void serialize(XMLBuilder xml, boolean export) throws IOException {
new DriverDescriptorSerializerLegacy(this).serialize(xml, export);
}
public DBPNativeClientLocation getDefaultClientLocation() {
DBPNativeClientLocationManager clientManager = getNativeClientManager();
if (clientManager != null) {
return clientManager.getDefaultLocalClientLocation();
}
return null;
}
public static File getCustomDriversHome() {
File homeFolder;
// Try to use custom drivers path from preferences
String driversHome = DBWorkbench.getPlatform().getPreferenceStore().getString(ModelPreferences.UI_DRIVERS_HOME);
if (!CommonUtils.isEmpty(driversHome)) {
homeFolder = new File(driversHome);
} else {
homeFolder = new File(
DBWorkbench.getPlatform().getWorkspace().getAbsolutePath().getParent(),
DBConstants.DEFAULT_DRIVERS_FOLDER);
}
if (!homeFolder.exists()) {
if (!homeFolder.mkdirs()) {
log.warn("Can't create drivers folder '" + homeFolder.getAbsolutePath() + "'");
}
}
return homeFolder;
}
public static String[] getDriversSources() {
String sourcesString = DBWorkbench.getPlatform().getPreferenceStore().getString(ModelPreferences.UI_DRIVERS_SOURCES);
List<String> pathList = CommonUtils.splitString(sourcesString, '|');
return pathList.toArray(new String[0]);
}
public static String getDriversPrimarySource() {
String sourcesString = DBWorkbench.getPlatform().getPreferenceStore().getString(ModelPreferences.UI_DRIVERS_SOURCES);
int divPos = sourcesString.indexOf('|');
return divPos == -1 ? sourcesString : sourcesString.substring(0, divPos);
}
@Override
public String toString() {
return name;
}
private static class ReplaceInfo {
String providerId;
String driverId;
private ReplaceInfo(String providerId, String driverId) {
this.providerId = providerId;
this.driverId = driverId;
}
}
private static String replacePathVariables(String path) {
return GeneralUtils.replaceVariables(path, new DriverVariablesResolver());
}
private static String substitutePathVariables(Map<String, String> pathSubstitutions, String path) {
for (Map.Entry<String, String> ps : pathSubstitutions.entrySet()) {
if (path.startsWith(ps.getKey())) {
path = GeneralUtils.variablePattern(ps.getValue()) + path.substring(ps.getKey().length());
break;
}
}
return path;
}
private static class DriverVariablesResolver extends SystemVariablesResolver {
private static final String VAR_DRIVERS_HOME = "drivers_home";
@Override
public String get(String name) {
if (name.equalsIgnoreCase(VAR_DRIVERS_HOME)) {
return getCustomDriversHome().getAbsolutePath();
} else {
return super.get(name);
}
}
}
}
| |
/*
* Copyright 2002-2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.aposoft.util.spring;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.lang.reflect.UndeclaredThrowableException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
/**
* Simple utility class for working with the reflection API and handling
* reflection exceptions.
*
* <p>
* Only intended for internal use.
*
* @author Juergen Hoeller
* @author Rob Harrop
* @author Rod Johnson
* @author Costin Leau
* @author Sam Brannen
* @author Chris Beams
* @since 1.2.2
*/
public abstract class ReflectionUtils {
/**
* Naming prefix for CGLIB-renamed methods.
*
* @see #isCglibRenamedMethod
*/
private static final String CGLIB_RENAMED_METHOD_PREFIX = "CGLIB$";
private static final Method[] NO_METHODS = {};
private static final Field[] NO_FIELDS = {};
/**
* Cache for {@link Class#getDeclaredMethods()} plus equivalent default
* methods from Java 8 based interfaces, allowing for fast iteration.
*/
private static final Map<Class<?>, Method[]> declaredMethodsCache = new ConcurrentReferenceHashMap<Class<?>, Method[]>(
256);
/**
* Cache for {@link Class#getDeclaredFields()}, allowing for fast iteration.
*/
private static final Map<Class<?>, Field[]> declaredFieldsCache = new ConcurrentReferenceHashMap<Class<?>, Field[]>(
256);
/**
* Attempt to find a {@link Field field} on the supplied {@link Class} with
* the supplied {@code name}. Searches all superclasses up to {@link Object}
* .
*
* @param clazz
* the class to introspect
* @param name
* the name of the field
* @return the corresponding Field object, or {@code null} if not found
*/
public static Field findField(Class<?> clazz, String name) {
return findField(clazz, name, null);
}
/**
* Attempt to find a {@link Field field} on the supplied {@link Class} with
* the supplied {@code name} and/or {@link Class type}. Searches all
* superclasses up to {@link Object}.
*
* @param clazz
* the class to introspect
* @param name
* the name of the field (may be {@code null} if type is
* specified)
* @param type
* the type of the field (may be {@code null} if name is
* specified)
* @return the corresponding Field object, or {@code null} if not found
*/
public static Field findField(Class<?> clazz, String name, Class<?> type) {
Assert.notNull(clazz, "Class must not be null");
Assert.isTrue(name != null || type != null, "Either name or type of the field must be specified");
Class<?> searchType = clazz;
while (Object.class != searchType && searchType != null) {
Field[] fields = getDeclaredFields(searchType);
for (Field field : fields) {
if ((name == null || name.equals(field.getName())) && (type == null || type.equals(field.getType()))) {
return field;
}
}
searchType = searchType.getSuperclass();
}
return null;
}
/**
* Set the field represented by the supplied {@link Field field object} on
* the specified {@link Object target object} to the specified {@code value}
* . In accordance with {@link Field#set(Object, Object)} semantics, the new
* value is automatically unwrapped if the underlying field has a primitive
* type.
* <p>
* Thrown exceptions are handled via a call to
* {@link #handleReflectionException(Exception)}.
*
* @param field
* the field to set
* @param target
* the target object on which to set the field
* @param value
* the value to set (may be {@code null})
*/
public static void setField(Field field, Object target, Object value) {
try {
field.set(target, value);
} catch (IllegalAccessException ex) {
handleReflectionException(ex);
throw new IllegalStateException(
"Unexpected reflection exception - " + ex.getClass().getName() + ": " + ex.getMessage());
}
}
/**
* Get the field represented by the supplied {@link Field field object} on
* the specified {@link Object target object}. In accordance with
* {@link Field#get(Object)} semantics, the returned value is automatically
* wrapped if the underlying field has a primitive type.
* <p>
* Thrown exceptions are handled via a call to
* {@link #handleReflectionException(Exception)}.
*
* @param field
* the field to get
* @param target
* the target object from which to get the field
* @return the field's current value
*/
public static Object getField(Field field, Object target) {
try {
return field.get(target);
} catch (IllegalAccessException ex) {
handleReflectionException(ex);
throw new IllegalStateException(
"Unexpected reflection exception - " + ex.getClass().getName() + ": " + ex.getMessage());
}
}
/**
* Attempt to find a {@link Method} on the supplied class with the supplied
* name and no parameters. Searches all superclasses up to {@code Object}.
* <p>
* Returns {@code null} if no {@link Method} can be found.
*
* @param clazz
* the class to introspect
* @param name
* the name of the method
* @return the Method object, or {@code null} if none found
*/
public static Method findMethod(Class<?> clazz, String name) {
return findMethod(clazz, name, new Class<?>[0]);
}
/**
* Attempt to find a {@link Method} on the supplied class with the supplied
* name and parameter types. Searches all superclasses up to {@code Object}.
* <p>
* Returns {@code null} if no {@link Method} can be found.
*
* @param clazz
* the class to introspect
* @param name
* the name of the method
* @param paramTypes
* the parameter types of the method (may be {@code null} to
* indicate any signature)
* @return the Method object, or {@code null} if none found
*/
public static Method findMethod(Class<?> clazz, String name, Class<?>... paramTypes) {
Assert.notNull(clazz, "Class must not be null");
Assert.notNull(name, "Method name must not be null");
Class<?> searchType = clazz;
while (searchType != null) {
Method[] methods = (searchType.isInterface() ? searchType.getMethods() : getDeclaredMethods(searchType));
for (Method method : methods) {
if (name.equals(method.getName())
&& (paramTypes == null || Arrays.equals(paramTypes, method.getParameterTypes()))) {
return method;
}
}
searchType = searchType.getSuperclass();
}
return null;
}
/**
* Invoke the specified {@link Method} against the supplied target object
* with no arguments. The target object can be {@code null} when invoking a
* static {@link Method}.
* <p>
* Thrown exceptions are handled via a call to
* {@link #handleReflectionException}.
*
* @param method
* the method to invoke
* @param target
* the target object to invoke the method on
* @return the invocation result, if any
* @see #invokeMethod(java.lang.reflect.Method, Object, Object[])
*/
public static Object invokeMethod(Method method, Object target) {
return invokeMethod(method, target, new Object[0]);
}
/**
* Invoke the specified {@link Method} against the supplied target object
* with the supplied arguments. The target object can be {@code null} when
* invoking a static {@link Method}.
* <p>
* Thrown exceptions are handled via a call to
* {@link #handleReflectionException}.
*
* @param method
* the method to invoke
* @param target
* the target object to invoke the method on
* @param args
* the invocation arguments (may be {@code null})
* @return the invocation result, if any
*/
public static Object invokeMethod(Method method, Object target, Object... args) {
try {
return method.invoke(target, args);
} catch (Exception ex) {
handleReflectionException(ex);
}
throw new IllegalStateException("Should never get here");
}
/**
* Invoke the specified JDBC API {@link Method} against the supplied target
* object with no arguments.
*
* @param method
* the method to invoke
* @param target
* the target object to invoke the method on
* @return the invocation result, if any
* @throws SQLException
* the JDBC API SQLException to rethrow (if any)
* @see #invokeJdbcMethod(java.lang.reflect.Method, Object, Object[])
*/
public static Object invokeJdbcMethod(Method method, Object target) throws SQLException {
return invokeJdbcMethod(method, target, new Object[0]);
}
/**
* Invoke the specified JDBC API {@link Method} against the supplied target
* object with the supplied arguments.
*
* @param method
* the method to invoke
* @param target
* the target object to invoke the method on
* @param args
* the invocation arguments (may be {@code null})
* @return the invocation result, if any
* @throws SQLException
* the JDBC API SQLException to rethrow (if any)
* @see #invokeMethod(java.lang.reflect.Method, Object, Object[])
*/
public static Object invokeJdbcMethod(Method method, Object target, Object... args) throws SQLException {
try {
return method.invoke(target, args);
} catch (IllegalAccessException ex) {
handleReflectionException(ex);
} catch (InvocationTargetException ex) {
if (ex.getTargetException() instanceof SQLException) {
throw (SQLException) ex.getTargetException();
}
handleInvocationTargetException(ex);
}
throw new IllegalStateException("Should never get here");
}
/**
* Handle the given reflection exception. Should only be called if no
* checked exception is expected to be thrown by the target method.
* <p>
* Throws the underlying RuntimeException or Error in case of an
* InvocationTargetException with such a root cause. Throws an
* IllegalStateException with an appropriate message else.
*
* @param ex
* the reflection exception to handle
*/
public static void handleReflectionException(Exception ex) {
if (ex instanceof NoSuchMethodException) {
throw new IllegalStateException("Method not found: " + ex.getMessage());
}
if (ex instanceof IllegalAccessException) {
throw new IllegalStateException("Could not access method: " + ex.getMessage());
}
if (ex instanceof InvocationTargetException) {
handleInvocationTargetException((InvocationTargetException) ex);
}
if (ex instanceof RuntimeException) {
throw (RuntimeException) ex;
}
throw new UndeclaredThrowableException(ex);
}
/**
* Handle the given invocation target exception. Should only be called if no
* checked exception is expected to be thrown by the target method.
* <p>
* Throws the underlying RuntimeException or Error in case of such a root
* cause. Throws an IllegalStateException else.
*
* @param ex
* the invocation target exception to handle
*/
public static void handleInvocationTargetException(InvocationTargetException ex) {
rethrowRuntimeException(ex.getTargetException());
}
/**
* Rethrow the given {@link Throwable exception}, which is presumably the
* <em>target exception</em> of an {@link InvocationTargetException}. Should
* only be called if no checked exception is expected to be thrown by the
* target method.
* <p>
* Rethrows the underlying exception cast to an {@link RuntimeException} or
* {@link Error} if appropriate; otherwise, throws an
* {@link IllegalStateException}.
*
* @param ex
* the exception to rethrow
* @throws RuntimeException
* the rethrown exception
*/
public static void rethrowRuntimeException(Throwable ex) {
if (ex instanceof RuntimeException) {
throw (RuntimeException) ex;
}
if (ex instanceof Error) {
throw (Error) ex;
}
throw new UndeclaredThrowableException(ex);
}
/**
* Rethrow the given {@link Throwable exception}, which is presumably the
* <em>target exception</em> of an {@link InvocationTargetException}. Should
* only be called if no checked exception is expected to be thrown by the
* target method.
* <p>
* Rethrows the underlying exception cast to an {@link Exception} or
* {@link Error} if appropriate; otherwise, throws an
* {@link IllegalStateException}.
*
* @param ex
* the exception to rethrow
* @throws Exception
* the rethrown exception (in case of a checked exception)
*/
public static void rethrowException(Throwable ex) throws Exception {
if (ex instanceof Exception) {
throw (Exception) ex;
}
if (ex instanceof Error) {
throw (Error) ex;
}
throw new UndeclaredThrowableException(ex);
}
/**
* Determine whether the given method explicitly declares the given
* exception or one of its superclasses, which means that an exception of
* that type can be propagated as-is within a reflective invocation.
*
* @param method
* the declaring method
* @param exceptionType
* the exception to throw
* @return {@code true} if the exception can be thrown as-is; {@code false}
* if it needs to be wrapped
*/
public static boolean declaresException(Method method, Class<?> exceptionType) {
Assert.notNull(method, "Method must not be null");
Class<?>[] declaredExceptions = method.getExceptionTypes();
for (Class<?> declaredException : declaredExceptions) {
if (declaredException.isAssignableFrom(exceptionType)) {
return true;
}
}
return false;
}
/**
* Determine whether the given field is a "public static final" constant.
*
* @param field
* the field to check
*/
public static boolean isPublicStaticFinal(Field field) {
int modifiers = field.getModifiers();
return (Modifier.isPublic(modifiers) && Modifier.isStatic(modifiers) && Modifier.isFinal(modifiers));
}
/**
* Determine whether the given method is an "equals" method.
*
* @see java.lang.Object#equals(Object)
*/
public static boolean isEqualsMethod(Method method) {
if (method == null || !method.getName().equals("equals")) {
return false;
}
Class<?>[] paramTypes = method.getParameterTypes();
return (paramTypes.length == 1 && paramTypes[0] == Object.class);
}
/**
* Determine whether the given method is a "hashCode" method.
*
* @see java.lang.Object#hashCode()
*/
public static boolean isHashCodeMethod(Method method) {
return (method != null && method.getName().equals("hashCode") && method.getParameterTypes().length == 0);
}
/**
* Determine whether the given method is a "toString" method.
*
* @see java.lang.Object#toString()
*/
public static boolean isToStringMethod(Method method) {
return (method != null && method.getName().equals("toString") && method.getParameterTypes().length == 0);
}
/**
* Determine whether the given method is originally declared by
* {@link java.lang.Object}.
*/
public static boolean isObjectMethod(Method method) {
if (method == null) {
return false;
}
try {
Object.class.getDeclaredMethod(method.getName(), method.getParameterTypes());
return true;
} catch (Exception ex) {
return false;
}
}
/**
* Determine whether the given method is a CGLIB 'renamed' method, following
* the pattern "CGLIB$methodName$0".
*
* @param renamedMethod
* the method to check
* @see org.springframework.cglib.proxy.Enhancer#rename
*/
public static boolean isCglibRenamedMethod(Method renamedMethod) {
String name = renamedMethod.getName();
if (name.startsWith(CGLIB_RENAMED_METHOD_PREFIX)) {
int i = name.length() - 1;
while (i >= 0 && Character.isDigit(name.charAt(i))) {
i--;
}
return ((i > CGLIB_RENAMED_METHOD_PREFIX.length()) && (i < name.length() - 1) && name.charAt(i) == '$');
}
return false;
}
/**
* Make the given field accessible, explicitly setting it accessible if
* necessary. The {@code setAccessible(true)} method is only called when
* actually necessary, to avoid unnecessary conflicts with a JVM
* SecurityManager (if active).
*
* @param field
* the field to make accessible
* @see java.lang.reflect.Field#setAccessible
*/
public static void makeAccessible(Field field) {
if ((!Modifier.isPublic(field.getModifiers()) || !Modifier.isPublic(field.getDeclaringClass().getModifiers())
|| Modifier.isFinal(field.getModifiers())) && !field.isAccessible()) {
field.setAccessible(true);
}
}
/**
* Make the given method accessible, explicitly setting it accessible if
* necessary. The {@code setAccessible(true)} method is only called when
* actually necessary, to avoid unnecessary conflicts with a JVM
* SecurityManager (if active).
*
* @param method
* the method to make accessible
* @see java.lang.reflect.Method#setAccessible
*/
public static void makeAccessible(Method method) {
if ((!Modifier.isPublic(method.getModifiers()) || !Modifier.isPublic(method.getDeclaringClass().getModifiers()))
&& !method.isAccessible()) {
method.setAccessible(true);
}
}
/**
* Make the given constructor accessible, explicitly setting it accessible
* if necessary. The {@code setAccessible(true)} method is only called when
* actually necessary, to avoid unnecessary conflicts with a JVM
* SecurityManager (if active).
*
* @param ctor
* the constructor to make accessible
* @see java.lang.reflect.Constructor#setAccessible
*/
public static void makeAccessible(Constructor<?> ctor) {
if ((!Modifier.isPublic(ctor.getModifiers()) || !Modifier.isPublic(ctor.getDeclaringClass().getModifiers()))
&& !ctor.isAccessible()) {
ctor.setAccessible(true);
}
}
/**
* Perform the given callback operation on all matching methods of the given
* class, as locally declared or equivalent thereof (such as default methods
* on Java 8 based interfaces that the given class implements).
*
* @param clazz
* the class to introspect
* @param mc
* the callback to invoke for each method
* @since 4.2
* @see #doWithMethods
*/
public static void doWithLocalMethods(Class<?> clazz, MethodCallback mc) {
Method[] methods = getDeclaredMethods(clazz);
for (Method method : methods) {
try {
mc.doWith(method);
} catch (IllegalAccessException ex) {
throw new IllegalStateException("Not allowed to access method '" + method.getName() + "': " + ex);
}
}
}
/**
* Perform the given callback operation on all matching methods of the given
* class and superclasses.
* <p>
* The same named method occurring on subclass and superclass will appear
* twice, unless excluded by a {@link MethodFilter}.
*
* @param clazz
* the class to introspect
* @param mc
* the callback to invoke for each method
* @see #doWithMethods(Class, MethodCallback, MethodFilter)
*/
public static void doWithMethods(Class<?> clazz, MethodCallback mc) {
doWithMethods(clazz, mc, null);
}
/**
* Perform the given callback operation on all matching methods of the given
* class and superclasses (or given interface and super-interfaces).
* <p>
* The same named method occurring on subclass and superclass will appear
* twice, unless excluded by the specified {@link MethodFilter}.
*
* @param clazz
* the class to introspect
* @param mc
* the callback to invoke for each method
* @param mf
* the filter that determines the methods to apply the callback
* to
*/
public static void doWithMethods(Class<?> clazz, MethodCallback mc, MethodFilter mf) {
// Keep backing up the inheritance hierarchy.
Method[] methods = getDeclaredMethods(clazz);
for (Method method : methods) {
if (mf != null && !mf.matches(method)) {
continue;
}
try {
mc.doWith(method);
} catch (IllegalAccessException ex) {
throw new IllegalStateException("Not allowed to access method '" + method.getName() + "': " + ex);
}
}
if (clazz.getSuperclass() != null) {
doWithMethods(clazz.getSuperclass(), mc, mf);
} else if (clazz.isInterface()) {
for (Class<?> superIfc : clazz.getInterfaces()) {
doWithMethods(superIfc, mc, mf);
}
}
}
/**
* Get all declared methods on the leaf class and all superclasses. Leaf
* class methods are included first.
*
* @param leafClass
* the class to introspect
*/
public static Method[] getAllDeclaredMethods(Class<?> leafClass) {
final List<Method> methods = new ArrayList<Method>(32);
doWithMethods(leafClass, new MethodCallback() {
@Override
public void doWith(Method method) {
methods.add(method);
}
});
return methods.toArray(new Method[methods.size()]);
}
/**
* Get the unique set of declared methods on the leaf class and all
* superclasses. Leaf class methods are included first and while traversing
* the superclass hierarchy any methods found with signatures matching a
* method already included are filtered out.
*
* @param leafClass
* the class to introspect
*/
public static Method[] getUniqueDeclaredMethods(Class<?> leafClass) {
final List<Method> methods = new ArrayList<Method>(32);
doWithMethods(leafClass, new MethodCallback() {
@Override
public void doWith(Method method) {
boolean knownSignature = false;
Method methodBeingOverriddenWithCovariantReturnType = null;
for (Method existingMethod : methods) {
if (method.getName().equals(existingMethod.getName())
&& Arrays.equals(method.getParameterTypes(), existingMethod.getParameterTypes())) {
// Is this a covariant return type situation?
if (existingMethod.getReturnType() != method.getReturnType()
&& existingMethod.getReturnType().isAssignableFrom(method.getReturnType())) {
methodBeingOverriddenWithCovariantReturnType = existingMethod;
} else {
knownSignature = true;
}
break;
}
}
if (methodBeingOverriddenWithCovariantReturnType != null) {
methods.remove(methodBeingOverriddenWithCovariantReturnType);
}
if (!knownSignature && !isCglibRenamedMethod(method)) {
methods.add(method);
}
}
});
return methods.toArray(new Method[methods.size()]);
}
/**
* This variant retrieves {@link Class#getDeclaredMethods()} from a local
* cache in order to avoid the JVM's SecurityManager check and defensive
* array copying. In addition, it also includes Java 8 default methods from
* locally implemented interfaces, since those are effectively to be treated
* just like declared methods.
*
* @param clazz
* the class to introspect
* @return the cached array of methods
* @see Class#getDeclaredMethods()
*/
private static Method[] getDeclaredMethods(Class<?> clazz) {
Method[] result = declaredMethodsCache.get(clazz);
if (result == null) {
Method[] declaredMethods = clazz.getDeclaredMethods();
List<Method> defaultMethods = findConcreteMethodsOnInterfaces(clazz);
if (defaultMethods != null) {
result = new Method[declaredMethods.length + defaultMethods.size()];
System.arraycopy(declaredMethods, 0, result, 0, declaredMethods.length);
int index = declaredMethods.length;
for (Method defaultMethod : defaultMethods) {
result[index] = defaultMethod;
index++;
}
} else {
result = declaredMethods;
}
declaredMethodsCache.put(clazz, (result.length == 0 ? NO_METHODS : result));
}
return result;
}
private static List<Method> findConcreteMethodsOnInterfaces(Class<?> clazz) {
List<Method> result = null;
for (Class<?> ifc : clazz.getInterfaces()) {
for (Method ifcMethod : ifc.getMethods()) {
if (!Modifier.isAbstract(ifcMethod.getModifiers())) {
if (result == null) {
result = new LinkedList<Method>();
}
result.add(ifcMethod);
}
}
}
return result;
}
/**
* Invoke the given callback on all fields in the target class, going up the
* class hierarchy to get all declared fields.
*
* @param clazz
* the target class to analyze
* @param fc
* the callback to invoke for each field
* @since 4.2
* @see #doWithFields
*/
public static void doWithLocalFields(Class<?> clazz, FieldCallback fc) {
for (Field field : getDeclaredFields(clazz)) {
try {
fc.doWith(field);
} catch (IllegalAccessException ex) {
throw new IllegalStateException("Not allowed to access field '" + field.getName() + "': " + ex);
}
}
}
/**
* Invoke the given callback on all fields in the target class, going up the
* class hierarchy to get all declared fields.
*
* @param clazz
* the target class to analyze
* @param fc
* the callback to invoke for each field
*/
public static void doWithFields(Class<?> clazz, FieldCallback fc) {
doWithFields(clazz, fc, null);
}
/**
* Invoke the given callback on all fields in the target class, going up the
* class hierarchy to get all declared fields.
*
* @param clazz
* the target class to analyze
* @param fc
* the callback to invoke for each field
* @param ff
* the filter that determines the fields to apply the callback to
*/
public static void doWithFields(Class<?> clazz, FieldCallback fc, FieldFilter ff) {
// Keep backing up the inheritance hierarchy.
Class<?> targetClass = clazz;
do {
Field[] fields = getDeclaredFields(targetClass);
for (Field field : fields) {
if (ff != null && !ff.matches(field)) {
continue;
}
try {
fc.doWith(field);
} catch (IllegalAccessException ex) {
throw new IllegalStateException("Not allowed to access field '" + field.getName() + "': " + ex);
}
}
targetClass = targetClass.getSuperclass();
} while (targetClass != null && targetClass != Object.class);
}
/**
* This variant retrieves {@link Class#getDeclaredFields()} from a local
* cache in order to avoid the JVM's SecurityManager check and defensive
* array copying.
*
* @param clazz
* the class to introspect
* @return the cached array of fields
* @see Class#getDeclaredFields()
*/
private static Field[] getDeclaredFields(Class<?> clazz) {
Field[] result = declaredFieldsCache.get(clazz);
if (result == null) {
result = clazz.getDeclaredFields();
declaredFieldsCache.put(clazz, (result.length == 0 ? NO_FIELDS : result));
}
return result;
}
/**
* Given the source object and the destination, which must be the same class
* or a subclass, copy all fields, including inherited fields. Designed to
* work on objects with public no-arg constructors.
*/
public static void shallowCopyFieldState(final Object src, final Object dest) {
if (src == null) {
throw new IllegalArgumentException("Source for field copy cannot be null");
}
if (dest == null) {
throw new IllegalArgumentException("Destination for field copy cannot be null");
}
if (!src.getClass().isAssignableFrom(dest.getClass())) {
throw new IllegalArgumentException("Destination class [" + dest.getClass().getName()
+ "] must be same or subclass as source class [" + src.getClass().getName() + "]");
}
doWithFields(src.getClass(), new FieldCallback() {
@Override
public void doWith(Field field) throws IllegalArgumentException, IllegalAccessException {
makeAccessible(field);
Object srcValue = field.get(src);
field.set(dest, srcValue);
}
}, COPYABLE_FIELDS);
}
/**
* Clear the internal method/field cache.
*
* @since 4.2.4
*/
public static void clearCache() {
declaredMethodsCache.clear();
declaredFieldsCache.clear();
}
/**
* Action to take on each method.
*/
public interface MethodCallback {
/**
* Perform an operation using the given method.
*
* @param method
* the method to operate on
*/
void doWith(Method method) throws IllegalArgumentException, IllegalAccessException;
}
/**
* Callback optionally used to filter methods to be operated on by a method
* callback.
*/
public interface MethodFilter {
/**
* Determine whether the given method matches.
*
* @param method
* the method to check
*/
boolean matches(Method method);
}
/**
* Callback interface invoked on each field in the hierarchy.
*/
public interface FieldCallback {
/**
* Perform an operation using the given field.
*
* @param field
* the field to operate on
*/
void doWith(Field field) throws IllegalArgumentException, IllegalAccessException;
}
/**
* Callback optionally used to filter fields to be operated on by a field
* callback.
*/
public interface FieldFilter {
/**
* Determine whether the given field matches.
*
* @param field
* the field to check
*/
boolean matches(Field field);
}
/**
* Pre-built FieldFilter that matches all non-static, non-final fields.
*/
public static FieldFilter COPYABLE_FIELDS = new FieldFilter() {
@Override
public boolean matches(Field field) {
return !(Modifier.isStatic(field.getModifiers()) || Modifier.isFinal(field.getModifiers()));
}
};
/**
* Pre-built MethodFilter that matches all non-bridge methods.
*/
public static MethodFilter NON_BRIDGED_METHODS = new MethodFilter() {
@Override
public boolean matches(Method method) {
return !method.isBridge();
}
};
/**
* Pre-built MethodFilter that matches all non-bridge methods which are not
* declared on {@code java.lang.Object}.
*/
public static MethodFilter USER_DECLARED_METHODS = new MethodFilter() {
@Override
public boolean matches(Method method) {
return (!method.isBridge() && method.getDeclaringClass() != Object.class);
}
};
}
| |
/*
* Copyright (C) 2007 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.preference;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import android.os.Handler;
import android.preference.Preference.OnPreferenceChangeInternalListener;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Adapter;
import android.widget.BaseAdapter;
import android.widget.ListView;
/**
* An adapter that returns the {@link Preference} contained in this group.
* In most cases, this adapter should be the base class for any custom
* adapters from {@link Preference#getAdapter()}.
* <p>
* This adapter obeys the
* {@link Preference}'s adapter rule (the
* {@link Adapter#getView(int, View, ViewGroup)} should be used instead of
* {@link Preference#getView(ViewGroup)} if a {@link Preference} has an
* adapter via {@link Preference#getAdapter()}).
* <p>
* This adapter also propagates data change/invalidated notifications upward.
* <p>
* This adapter does not include this {@link PreferenceGroup} in the returned
* adapter, use {@link PreferenceCategoryAdapter} instead.
*
* @see PreferenceCategoryAdapter
*/
class PreferenceGroupAdapter extends BaseAdapter implements OnPreferenceChangeInternalListener {
private static final String TAG = "PreferenceGroupAdapter";
/**
* The group that we are providing data from.
*/
private PreferenceGroup mPreferenceGroup;
/**
* Maps a position into this adapter -> {@link Preference}. These
* {@link Preference}s don't have to be direct children of this
* {@link PreferenceGroup}, they can be grand children or younger)
*/
private List<Preference> mPreferenceList;
/**
* List of unique Preference and its subclasses' names. This is used to find
* out how many types of views this adapter can return. Once the count is
* returned, this cannot be modified (since the ListView only checks the
* count once--when the adapter is being set). We will not recycle views for
* Preference subclasses seen after the count has been returned.
*/
private ArrayList<PreferenceLayout> mPreferenceLayouts;
private PreferenceLayout mTempPreferenceLayout = new PreferenceLayout();
/**
* Blocks the mPreferenceClassNames from being changed anymore.
*/
private boolean mHasReturnedViewTypeCount = false;
private volatile boolean mIsSyncing = false;
private Handler mHandler = new Handler();
private Runnable mSyncRunnable = new Runnable() {
public void run() {
syncMyPreferences();
}
};
private static class PreferenceLayout implements Comparable<PreferenceLayout> {
private int resId;
private int widgetResId;
private String name;
public int compareTo(PreferenceLayout other) {
int compareNames = name.compareTo(other.name);
if (compareNames == 0) {
if (resId == other.resId) {
if (widgetResId == other.widgetResId) {
return 0;
} else {
return widgetResId - other.widgetResId;
}
} else {
return resId - other.resId;
}
} else {
return compareNames;
}
}
}
public PreferenceGroupAdapter(PreferenceGroup preferenceGroup) {
mPreferenceGroup = preferenceGroup;
// If this group gets or loses any children, let us know
mPreferenceGroup.setOnPreferenceChangeInternalListener(this);
mPreferenceList = new ArrayList<Preference>();
mPreferenceLayouts = new ArrayList<PreferenceLayout>();
syncMyPreferences();
}
private void syncMyPreferences() {
synchronized(this) {
if (mIsSyncing) {
return;
}
mIsSyncing = true;
}
List<Preference> newPreferenceList = new ArrayList<Preference>(mPreferenceList.size());
flattenPreferenceGroup(newPreferenceList, mPreferenceGroup);
mPreferenceList = newPreferenceList;
notifyDataSetChanged();
synchronized(this) {
mIsSyncing = false;
notifyAll();
}
}
private void flattenPreferenceGroup(List<Preference> preferences, PreferenceGroup group) {
// TODO: shouldn't always?
group.sortPreferences();
final int groupSize = group.getPreferenceCount();
for (int i = 0; i < groupSize; i++) {
final Preference preference = group.getPreference(i);
preferences.add(preference);
if (!mHasReturnedViewTypeCount && !preference.hasSpecifiedLayout()) {
addPreferenceClassName(preference);
}
if (preference instanceof PreferenceGroup) {
final PreferenceGroup preferenceAsGroup = (PreferenceGroup) preference;
if (preferenceAsGroup.isOnSameScreenAsChildren()) {
flattenPreferenceGroup(preferences, preferenceAsGroup);
}
}
preference.setOnPreferenceChangeInternalListener(this);
}
}
/**
* Creates a string that includes the preference name, layout id and widget layout id.
* If a particular preference type uses 2 different resources, they will be treated as
* different view types.
*/
private PreferenceLayout createPreferenceLayout(Preference preference, PreferenceLayout in) {
PreferenceLayout pl = in != null? in : new PreferenceLayout();
pl.name = preference.getClass().getName();
pl.resId = preference.getLayoutResource();
pl.widgetResId = preference.getWidgetLayoutResource();
return pl;
}
private void addPreferenceClassName(Preference preference) {
final PreferenceLayout pl = createPreferenceLayout(preference, null);
int insertPos = Collections.binarySearch(mPreferenceLayouts, pl);
// Only insert if it doesn't exist (when it is negative).
if (insertPos < 0) {
// Convert to insert index
insertPos = insertPos * -1 - 1;
mPreferenceLayouts.add(insertPos, pl);
}
}
public int getCount() {
return mPreferenceList.size();
}
public Preference getItem(int position) {
if (position < 0 || position >= getCount()) return null;
return mPreferenceList.get(position);
}
public long getItemId(int position) {
if (position < 0 || position >= getCount()) return ListView.INVALID_ROW_ID;
return this.getItem(position).getId();
}
public View getView(int position, View convertView, ViewGroup parent) {
final Preference preference = this.getItem(position);
// Build a PreferenceLayout to compare with known ones that are cacheable.
mTempPreferenceLayout = createPreferenceLayout(preference, mTempPreferenceLayout);
// If it's not one of the cached ones, set the convertView to null so that
// the layout gets re-created by the Preference.
if (Collections.binarySearch(mPreferenceLayouts, mTempPreferenceLayout) < 0) {
convertView = null;
}
return preference.getView(convertView, parent);
}
@Override
public boolean isEnabled(int position) {
if (position < 0 || position >= getCount()) return true;
return this.getItem(position).isSelectable();
}
@Override
public boolean areAllItemsEnabled() {
// There should always be a preference group, and these groups are always
// disabled
return false;
}
public void onPreferenceChange(Preference preference) {
notifyDataSetChanged();
}
public void onPreferenceHierarchyChange(Preference preference) {
mHandler.removeCallbacks(mSyncRunnable);
mHandler.post(mSyncRunnable);
}
@Override
public boolean hasStableIds() {
return true;
}
@Override
public int getItemViewType(int position) {
if (!mHasReturnedViewTypeCount) {
mHasReturnedViewTypeCount = true;
}
final Preference preference = this.getItem(position);
if (preference.hasSpecifiedLayout()) {
return IGNORE_ITEM_VIEW_TYPE;
}
mTempPreferenceLayout = createPreferenceLayout(preference, mTempPreferenceLayout);
int viewType = Collections.binarySearch(mPreferenceLayouts, mTempPreferenceLayout);
if (viewType < 0) {
// This is a class that was seen after we returned the count, so
// don't recycle it.
return IGNORE_ITEM_VIEW_TYPE;
} else {
return viewType;
}
}
@Override
public int getViewTypeCount() {
if (!mHasReturnedViewTypeCount) {
mHasReturnedViewTypeCount = true;
}
return Math.max(1, mPreferenceLayouts.size());
}
}
| |
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.android.sunshine.app.data;
import android.content.ContentValues;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.test.AndroidTestCase;
import java.util.HashSet;
public class TestDb extends AndroidTestCase {
public static final String LOG_TAG = TestDb.class.getSimpleName();
// Since we want each test to start with a clean slate
void deleteTheDatabase() {
mContext.deleteDatabase(RestaurantDataBaseHelper.DATABASE_NAME);
}
/*
This function gets called before each test is executed to delete the database. This makes
sure that we always have a clean test.
*/
public void setUp() {
deleteTheDatabase();
}
/*
Students: Uncomment this test once you've written the code to create the Location
table. Note that you will have to have chosen the same column names that I did in
my solution for this test to compile, so if you haven't yet done that, this is
a good time to change your column names to match mine.
Note that this only tests that the Location table has the correct columns, since we
give you the code for the weather table. This test does not look at the
*/
public void testCreateDb() throws Throwable {
// build a HashSet of all of the table names we wish to look for
// Note that there will be another table in the DB that stores the
// Android metadata (db version information)
final HashSet<String> tableNameHashSet = new HashSet<String>();
tableNameHashSet.add(WeatherContract.LocationEntry.TABLE_NAME);
tableNameHashSet.add(WeatherContract.WeatherEntry.TABLE_NAME);
mContext.deleteDatabase(RestaurantDataBaseHelper.DATABASE_NAME);
SQLiteDatabase db = new RestaurantDataBaseHelper(
this.mContext).getWritableDatabase();
assertEquals(true, db.isOpen());
// have we created the tables we want?
Cursor c = db.rawQuery("SELECT name FROM sqlite_master WHERE type='table'", null);
assertTrue("Error: This means that the database has not been created correctly",
c.moveToFirst());
// verify that the tables have been created
do {
tableNameHashSet.remove(c.getString(0));
} while( c.moveToNext() );
// if this fails, it means that your database doesn't contain both the location entry
// and weather entry tables
assertTrue("Error: Your database was created without both the location entry and weather entry tables",
tableNameHashSet.isEmpty());
// now, do our tables contain the correct columns?
c = db.rawQuery("PRAGMA table_info(" + WeatherContract.LocationEntry.TABLE_NAME + ")",
null);
assertTrue("Error: This means that we were unable to query the database for table information.",
c.moveToFirst());
// Build a HashSet of all of the column names we want to look for
final HashSet<String> locationColumnHashSet = new HashSet<String>();
locationColumnHashSet.add(WeatherContract.LocationEntry._ID);
locationColumnHashSet.add(WeatherContract.LocationEntry.COLUMN_CITY_NAME);
locationColumnHashSet.add(WeatherContract.LocationEntry.COLUMN_COORD_LAT);
locationColumnHashSet.add(WeatherContract.LocationEntry.COLUMN_COORD_LONG);
locationColumnHashSet.add(WeatherContract.LocationEntry.COLUMN_LOCATION_SETTING);
int columnNameIndex = c.getColumnIndex("name");
do {
String columnName = c.getString(columnNameIndex);
locationColumnHashSet.remove(columnName);
} while(c.moveToNext());
// if this fails, it means that your database doesn't contain all of the required location
// entry columns
assertTrue("Error: The database doesn't contain all of the required location entry columns",
locationColumnHashSet.isEmpty());
db.close();
}
/*
Students: Here is where you will build code to test that we can insert and query the
location database. We've done a lot of work for you. You'll want to look in TestUtilities
where you can uncomment out the "createNorthPoleLocationValues" function. You can
also make use of the ValidateCurrentRecord function from within TestUtilities.
*/
public void testLocationTable() {
insertLocation();
}
/*
Students: Here is where you will build code to test that we can insert and query the
database. We've done a lot of work for you. You'll want to look in TestUtilities
where you can use the "createWeatherValues" function. You can
also make use of the validateCurrentRecord function from within TestUtilities.
*/
public void testWeatherTable() {
// First insert the location, and then use the locationRowId to insert
// the weather. Make sure to cover as many failure cases as you can.
// Instead of rewriting all of the code we've already written in testLocationTable
// we can move this code to insertLocation and then call insertLocation from both
// tests. Why move it? We need the code to return the ID of the inserted location
// and our testLocationTable can only return void because it's a test.
long locationRowId = insertLocation();
// Make sure we have a valid row ID.
assertFalse("Error: Location Not Inserted Correctly", locationRowId == -1L);
// First step: Get reference to writable database
// If there's an error in those massive SQL table creation Strings,
// errors will be thrown here when you try to get a writable database.
RestaurantDataBaseHelper dbHelper = new RestaurantDataBaseHelper(mContext);
SQLiteDatabase db = dbHelper.getWritableDatabase();
// Second Step (Weather): Create weather values
ContentValues weatherValues = TestUtilities.createWeatherValues(locationRowId);
// Third Step (Weather): Insert ContentValues into database and get a row ID back
long weatherRowId = db.insert(WeatherContract.WeatherEntry.TABLE_NAME, null, weatherValues);
assertTrue(weatherRowId != -1);
// Fourth Step: Query the database and receive a Cursor back
// A cursor is your primary interface to the query results.
Cursor weatherCursor = db.query(
WeatherContract.WeatherEntry.TABLE_NAME, // Table to Query
null, // leaving "columns" null just returns all the columns.
null, // cols for "where" clause
null, // values for "where" clause
null, // columns to group by
null, // columns to filter by row groups
null // sort order
);
// Move the cursor to the first valid database row and check to see if we have any rows
assertTrue( "Error: No Records returned from location query", weatherCursor.moveToFirst() );
// Fifth Step: Validate the location Query
TestUtilities.validateCurrentRecord("testInsertReadDb weatherEntry failed to validate",
weatherCursor, weatherValues);
// Move the cursor to demonstrate that there is only one record in the database
assertFalse( "Error: More than one record returned from weather query",
weatherCursor.moveToNext() );
// Sixth Step: Close cursor and database
weatherCursor.close();
dbHelper.close();
}
/*
Students: This is a helper method for the testWeatherTable quiz. You can move your
code from testLocationTable to here so that you can call this code from both
testWeatherTable and testLocationTable.
*/
public long insertLocation() {
// First step: Get reference to writable database
// If there's an error in those massive SQL table creation Strings,
// errors will be thrown here when you try to get a writable database.
RestaurantDataBaseHelper dbHelper = new RestaurantDataBaseHelper(mContext);
SQLiteDatabase db = dbHelper.getWritableDatabase();
// Second Step: Create ContentValues of what you want to insert
// (you can use the createNorthPoleLocationValues if you wish)
ContentValues testValues = TestUtilities.createNorthPoleLocationValues();
// Third Step: Insert ContentValues into database and get a row ID back
long locationRowId;
locationRowId = db.insert(WeatherContract.LocationEntry.TABLE_NAME, null, testValues);
// Verify we got a row back.
assertTrue(locationRowId != -1);
// Data's inserted. IN THEORY. Now pull some out to stare at it and verify it made
// the round trip.
// Fourth Step: Query the database and receive a Cursor back
// A cursor is your primary interface to the query results.
Cursor cursor = db.query(
WeatherContract.LocationEntry.TABLE_NAME, // Table to Query
null, // all columns
null, // Columns for the "where" clause
null, // Values for the "where" clause
null, // columns to group by
null, // columns to filter by row groups
null // sort order
);
// Move the cursor to a valid database row and check to see if we got any records back
// from the query
assertTrue( "Error: No Records returned from location query", cursor.moveToFirst() );
// Fifth Step: Validate data in resulting Cursor with the original ContentValues
// (you can use the validateCurrentRecord function in TestUtilities to validate the
// query if you like)
TestUtilities.validateCurrentRecord("Error: Location Query Validation Failed",
cursor, testValues);
// Move the cursor to demonstrate that there is only one record in the database
assertFalse( "Error: More than one record returned from location query",
cursor.moveToNext() );
// Sixth Step: Close Cursor and Database
cursor.close();
db.close();
return locationRowId;
}
}
| |
package com.laytonsmith.tools.docgen;
import com.laytonsmith.PureUtilities.ClassLoading.ClassDiscovery;
import com.laytonsmith.PureUtilities.Common.StreamUtils;
import com.laytonsmith.abstraction.Implementation;
import com.laytonsmith.annotations.api;
import com.laytonsmith.annotations.typeof;
import com.laytonsmith.commandhelper.CommandHelperFileLocations;
import com.laytonsmith.core.MSLog;
import com.laytonsmith.core.Documentation;
import com.laytonsmith.core.Installer;
import com.laytonsmith.core.Prefs;
import com.laytonsmith.core.events.Event;
import com.laytonsmith.core.exceptions.CRE.CREThrowable;
import com.laytonsmith.core.exceptions.ConfigCompileException;
import com.laytonsmith.core.extensions.ExtensionManager;
import com.laytonsmith.core.functions.Function;
import com.laytonsmith.core.functions.FunctionBase;
import com.laytonsmith.core.functions.FunctionList;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.TreeSet;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
*
*
*/
public class DocGen {
public static void main(String[] args) throws Exception {
try {
//Boilerplate startup stuff
Implementation.setServerType(Implementation.Type.BUKKIT);
ClassDiscovery.getDefaultInstance().addDiscoveryLocation(ClassDiscovery.GetClassContainer(DocGen.class));
ExtensionManager.Initialize(ClassDiscovery.getDefaultInstance());
Installer.Install(CommandHelperFileLocations.getDefault().getConfigDirectory());
Prefs.init(CommandHelperFileLocations.getDefault().getPreferencesFile());
MSLog.initialize(CommandHelperFileLocations.getDefault().getConfigDirectory());
//StreamUtils.GetSystemOut().println(functions("wiki", api.Platforms.INTERPRETER_JAVA, true));
// StreamUtils.GetSystemOut().println(examples("if", true));
//System.exit(0);
//events("wiki");
//StreamUtils.GetSystemOut().println(Template("persistence_network"));
} catch (Throwable t) {
t.printStackTrace();
System.exit(1);
} finally {
System.exit(0);
}
}
// private static String examples(String function, boolean staged) throws Exception {
// FunctionBase fb = FunctionList.getFunction(new CFunction(function, Target.UNKNOWN));
// if(fb instanceof Function) {
// Function f = (Function) fb;
// String restricted = (f instanceof Function && ((Function) f).isRestricted()) ? "<div style=\"background-color: red; font-weight: bold; text-align: center;\">Yes</div>"
// : "<div style=\"background-color: green; font-weight: bold; text-align: center;\">No</div>";
// String optimizationMessage = "None";
// if(f instanceof Optimizable) {
// Set<Optimizable.OptimizationOption> options = ((Optimizable) f).optimizationOptions();
// List<String> list = new ArrayList<String>();
// for(Optimizable.OptimizationOption option : options) {
// list.add("[[CommandHelper/" + (staged ? "Staged/" : "") + "Optimizer#" + option.name() + "|" + option.name() + "]]");
// }
// optimizationMessage = StringUtils.Join(list, "<br />");
// }
// DocInfo di = new DocInfo(f.docs());
// StringBuilder thrown = new StringBuilder();
// if(f instanceof Function && ((Function) f).thrown() != null) {
// List thrownList = Arrays.asList(((Function) f).thrown());
// for(int i = 0; i < thrownList.size(); i++) {
// String t = ((Class<? extends CREThrowable>) thrownList.get(i)).getAnnotation(typeof.class).value();
// if(i != 0) {
// thrown.append("<br />\n");
// }
// thrown.append("[[CommandHelper/Exceptions#").append(t).append("|").append(t).append("]]");
// }
// }
// String tableUsages = di.originalArgs.replace("|", "<hr />");
// String[] usages = di.originalArgs.split("\\|");
// StringBuilder usageBuilder = new StringBuilder();
// for(String usage : usages) {
// usageBuilder.append("<pre>\n").append(f.getName()).append("(").append(usage.trim()).append(")\n</pre>");
// }
// StringBuilder exampleBuilder = new StringBuilder();
// if(f.examples() != null && f.examples().length > 0) {
// int count = 1;
// //If the output was automatically generated, change the color of the pre
// for(ExampleScript es : f.examples()) {
// exampleBuilder.append("====Example ").append(count).append("====\n")
// .append(es.getDescription()).append("\n\n"
// + "Given the following code:\n");
// exampleBuilder.append(SimpleSyntaxHighlighter.Highlight(es.getScript(), true)).append("\n");
// String style = "";
// if(es.isAutomatic()) {
// style = " style=\"background-color: #BDC7E9\"";
// exampleBuilder.append("\n\nThe output would be:\n<pre");
// } else {
// exampleBuilder.append("\n\nThe output might be:\n<pre");
// }
// exampleBuilder.append(style).append(">").append(es.getOutput()).append("</pre>\n");
// count++;
// }
// } else {
// exampleBuilder.append("Sorry, there are no examples for this function! :(");
// }
//
// Class[] seeAlso = f.seeAlso();
// String seeAlsoText = "";
// if(seeAlso != null && seeAlso.length > 0) {
// seeAlsoText += "===See Also===\n";
// boolean first = true;
// for(Class c : seeAlso) {
// if(!first) {
// seeAlsoText += ", ";
// }
// first = false;
// if(Function.class.isAssignableFrom(c)) {
// Function f2 = (Function) c.newInstance();
// seeAlsoText += "<code>[[CommandHelper/" + (staged ? "Staged/" : "") + "API/" + f2.getName() + "|" + f2.getName() + "]]</code>";
// } else if(Template.class.isAssignableFrom(c)) {
// Template t = (Template) c.newInstance();
// seeAlsoText += "[[CommandHelper/" + (staged ? "Staged/" : "") + t.getName() + "|Learning Trail: " + t.getDisplayName() + "]]";
// } else {
// throw new Error("Unsupported class found in @seealso annotation: " + c.getName());
// }
// }
// }
//
// Map<String, String> templateFields = new HashMap<>();
// templateFields.put("function_name", f.getName());
// templateFields.put("returns", di.ret);
// templateFields.put("tableUsages", tableUsages);
// templateFields.put("throws", thrown.toString());
// templateFields.put("since", f.since().toString());
// templateFields.put("restricted", restricted);
// templateFields.put("optimizationMessage", optimizationMessage);
// templateFields.put("description", di.extendedDesc == null ? di.desc : di.topDesc + "\n\n" + di.extendedDesc);
// templateFields.put("usages", usageBuilder.toString());
// templateFields.put("examples", exampleBuilder.toString());
// templateFields.put("staged", staged ? "Staged/" : "");
// templateFields.put("seeAlso", seeAlsoText);
//
// String template = StreamUtils.GetString(DocGenTemplates.class.getResourceAsStream("/templates/example_templates"));
// //Find all the %%templates%% in the template
// Matcher m = Pattern.compile("%%(.*?)%%").matcher(template);
// try {
// while(m.find()) {
// String name = m.group(1);
// String templateValue = templateFields.get(name);
// template = template.replaceAll("%%" + Pattern.quote(name) + "%%", templateValue.replace("$", "\\$").replaceAll("\\'", "\\\\'"));
// }
// return template;
// } catch (RuntimeException e) {
// throw new RuntimeException("Caught a runtime exception while generating template for " + function, e);
// }
// } else {
// throw new RuntimeException(function + " does not implement Function");
// }
// }
/**
* Returns the documentation for a single function.
*
* @param type The type of output to use. May be one of: html, wiki, text
* @param platform The platform we're using
* @param staged Is this for the staged wiki?
* @return
* @throws ConfigCompileException
*/
@SuppressWarnings("StringConcatenationInsideStringBufferAppend")
public static String functions(MarkupType type, api.Platforms platform, boolean staged) throws ConfigCompileException {
Set<FunctionBase> functions = FunctionList.getFunctionList(platform, null);
HashMap<Class, ArrayList<FunctionBase>> functionlist = new HashMap<Class, ArrayList<FunctionBase>>();
StringBuilder out = new StringBuilder();
for(FunctionBase f : functions) {
//Sort the functions into classes
Class apiClass = (f.getClass().getEnclosingClass() != null
? f.getClass().getEnclosingClass()
: null);
ArrayList<FunctionBase> fl = functionlist.get(apiClass);
if(fl == null) {
fl = new ArrayList<FunctionBase>();
functionlist.put(apiClass, fl);
}
fl.add(f);
}
if(type == MarkupType.HTML) {
out.append("Command Helper uses a language called MethodScript, which greatly extend the capabilities of the plugin, "
+ "and make the plugin a fully "
+ "<a href=\"http://en.wikipedia.org/wiki/Turing_Complete\">Turing Complete</a> language. "
+ "There are several functions defined, and they are grouped into \"classes\". \n");
} else if(type == MarkupType.WIKI) {
out.append("Command Helper uses a language called MethodScript, which greatly extend the capabilities of the plugin, "
+ "and make the plugin a fully "
+ "[http://en.wikipedia.org/wiki/Turing_Complete Turing Complete] language. "
+ "There are several functions defined, and they are grouped into \"classes\". \n");
out.append("<p>Each function has its own page for documentation, where you can view examples for how to use a"
+ " particular function.\n");
} else if(type == MarkupType.TEXT) {
out.append("Command Helper uses a language called MethodScript, which greatly extend the capabilities of the plugin, "
+ "and make the plugin a fully "
+ "Turing Complete language [http://en.wikipedia.org/wiki/Turing_Complete].\n"
+ "There are several functions defined, and they are grouped into \"classes\".\n");
}
List<Map.Entry<Class, ArrayList<FunctionBase>>> entrySet = new ArrayList<Map.Entry<Class, ArrayList<FunctionBase>>>(functionlist.entrySet());
Collections.sort(entrySet, new Comparator<Map.Entry<Class, ArrayList<FunctionBase>>>() {
@Override
public int compare(Map.Entry<Class, ArrayList<FunctionBase>> o1, Map.Entry<Class, ArrayList<FunctionBase>> o2) {
return o1.getKey().getName().compareTo(o2.getKey().getName());
}
});
int total = 0;
int workingExamples = 0;
for(Map.Entry<Class, ArrayList<FunctionBase>> entry : entrySet) {
Class apiClass = entry.getKey();
String className = apiClass.getName().split("\\.")[apiClass.getName().split("\\.").length - 1];
if(className.equals("Sandbox")) {
continue; //Skip Sandbox functions
}
String classDocs = null;
try {
Method m = apiClass.getMethod("docs", (Class[]) null);
Object o = null;
if((m.getModifiers() & Modifier.STATIC) == 0) {
try {
o = apiClass.newInstance();
} catch (InstantiationException ex) {
}
}
classDocs = (String) m.invoke(o, (Object[]) null);
} catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException | NoSuchMethodException ex) {
} catch (Exception e) {
e.printStackTrace(StreamUtils.GetSystemErr());
StreamUtils.GetSystemErr().println("Continuing however.");
}
StringBuilder intro = new StringBuilder();
if(type == MarkupType.HTML) {
if(className != null) {
intro.append("<h1>").append(className).append("</h1>" + "\n");
intro.append(classDocs == null ? "" : classDocs).append("\n");
} else {
intro.append("<h1>Other Functions</h1>" + "\n");
}
intro.append("<table>" + "\n");
} else if(type == MarkupType.WIKI) {
if(className != null) {
intro.append("===").append(className).append("===" + "\n");
intro.append(classDocs == null ? "" : classDocs).append("\n");
} else {
intro.append("===Other Functions===" + "\n");
}
intro.append("{| width=\"100%\" cellspacing=\"1\" cellpadding=\"1\" border=\"1\" class=\"wikitable\"\n"
+ "|-\n"
+ "! scope=\"col\" width=\"6%\" | Function Name\n"
+ "! scope=\"col\" width=\"5%\" | Returns\n"
+ "! scope=\"col\" width=\"10%\" | Arguments\n"
+ "! scope=\"col\" width=\"10%\" | Throws\n"
+ "! scope=\"col\" width=\"61%\" | Description\n"
+ "! scope=\"col\" width=\"3%\" | Since\n"
+ "! scope=\"col\" width=\"5%\" | Restricted" + "\n");
} else if(type == MarkupType.TEXT) {
intro.append("\n").append(className).append("\n");
intro.append("**********************************************************************************************" + "\n");
if(className != null) {
intro.append(classDocs == null ? "" : classDocs).append("\n");
} else {
intro.append("Other Functions" + "\n");
}
intro.append("**********************************************************************************************" + "\n");
}
List<FunctionBase> documentableFunctions = new ArrayList<FunctionBase>();
for(FunctionBase f : entry.getValue()) {
if(f.appearInDocumentation()) {
documentableFunctions.add(f);
}
}
if(!documentableFunctions.isEmpty()) {
out.append(intro.toString() + "\n");
}
Collections.sort(documentableFunctions, new Comparator<FunctionBase>() {
@Override
public int compare(FunctionBase o1, FunctionBase o2) {
return o1.getName().compareTo(o2.getName());
}
});
for(FunctionBase f : documentableFunctions) {
total++;
String doc = f.docs();
String restricted = (f instanceof Function && ((Function) f).isRestricted()) ? "<div style=\"background-color: red; font-weight: bold; text-align: center;\">Yes</div>"
: "<div style=\"background-color: green; font-weight: bold; text-align: center;\">No</div>";
StringBuilder thrown = new StringBuilder();
if(f instanceof Function && ((Function) f).thrown() != null) {
List<Class<? extends CREThrowable>> thrownList = Arrays.asList(((Function) f).thrown());
for(int i = 0; i < thrownList.size(); i++) {
String t = ((Class<? extends CREThrowable>) thrownList.get(i)).getAnnotation(typeof.class).value();
if(type == MarkupType.HTML || type == MarkupType.TEXT) {
if(i != 0) {
thrown.append((type == MarkupType.HTML ? "<br />\n" : " | "));
}
thrown.append(t);
} else {
if(i != 0) {
thrown.append("<br />\n");
}
thrown.append("[[CommandHelper/Exceptions#").append(t).append("|").append(t).append("]]");
}
}
}
String since = (f instanceof Documentation ? ((Documentation) f).since().toString() : "0.0.0");
DocInfo di = new DocInfo(doc);
boolean hasExample = false;
if(f instanceof Function && ((Function) f).examples() != null && ((Function) f).examples().length > 0) {
hasExample = true;
workingExamples++;
}
if(di.ret == null || di.args == null || di.desc == null) {
out.append(f.getName() + "'s documentation is not correctly formatted. Please check it and try again.\n");
}
if(type == MarkupType.HTML) {
out.append("<tr><td>" + di.ret + "</td><td>" + di.args + "</td><td>" + thrown.toString() + "</td><td>" + di.desc + "</td><td>" + since + "</td><td>" + restricted + "</td></tr>\n");
} else if(type == MarkupType.WIKI) {
//Turn args into a prettified version
out.append("|- id=\"" + f.getName() + "\"\n"
+ "! scope=\"row\" | [[CommandHelper/" + (staged ? "Staged/" : "") + "API/" + f.getName() + "|" + f.getName() + "]]()\n"
+ "| " + di.ret + "\n"
+ "| " + di.args + "\n"
+ "| " + thrown.toString() + "\n"
+ "| " + (di.topDesc != null ? di.topDesc + " [[CommandHelper/" + (staged ? "Staged/" : "") + "API/" + f.getName() + "#Description|See More...]]" : di.desc)
+ (hasExample ? "<br />([[CommandHelper/" + (staged ? "Staged/" : "") + "API/" + f.getName() + "#Examples|Examples...]])" : "") + "\n"
+ "| " + since + "\n"
+ "| " + restricted + "\n");
} else if(type == MarkupType.TEXT) {
out.append(di.ret + " " + f.getName() + "(" + di.args + ")" + " {" + thrown.toString() + "}\n\t" + di.desc + "\n\t" + since + ((f instanceof Function ? ((Function) f).isRestricted() : false) ? "\n\tThis function is restricted"
: "\n\tThis function is not restricted\n"));
}
}
if(!documentableFunctions.isEmpty()) {
if(type == MarkupType.HTML) {
out.append("</table>\n");
} else if(type == MarkupType.WIKI) {
out.append("|}\n{{Back to top}}\n");
} else if(type == MarkupType.TEXT) {
out.append("\n");
}
}
}
if(type == MarkupType.HTML) {
out.append(""
+ "<h2>Errors in documentation</h2>\n"
+ "<em>Please note that this documentation is generated automatically,"
+ " if you notice an error in the documentation, please file a bug report for the"
+ " plugin itself!</em>"
+ "<div style='text-size:small; text-decoration:italics; color:grey'>There are " + total + " functions in this API page</div>\n");
} else if(type == MarkupType.WIKI) {
out.append(""
+ "===Errors in documentation===\n"
+ "''Please note that this documentation is generated automatically,"
+ " if you notice an error in the documentation, please file a bug report for the"
+ " plugin itself!'' For information on undocumented functions, see [[CommandHelper/Sandbox|this page]]"
+ "<div style='font-size:xx-small; font-style:italic; color:grey'>There are " + total + " functions in this API page, " + workingExamples + " of which"
+ " have examples.</div>\n\n{{Back to top}}\n{{LearningTrail}}\n");
}
return out.toString();
}
public static String Template(String template, boolean staged) {
Map<String, String> customTemplates = new HashMap<String, String>();
customTemplates.put("staged", staged ? "Staged/" : "");
return DocGenTemplates.Generate(template, customTemplates);
}
public static String events(MarkupType type) {
Set<Class<?>> classes = ClassDiscovery.getDefaultInstance().loadClassesWithAnnotation(api.class);
Set<Documentation> list = new TreeSet<Documentation>();
for(Class<?> c : classes) {
if(Event.class.isAssignableFrom(c)
&& Documentation.class.isAssignableFrom(c)) {
try {
//First, we have to instatiate the event.
Constructor<Event> cons = (Constructor<Event>) c.getConstructor();
Documentation docs = cons.newInstance();
list.add(docs);
} catch (Exception ex) {
StreamUtils.GetSystemErr().println("Could not get documentation for " + c.getSimpleName());
}
}
}
StringBuilder doc = new StringBuilder();
if(type == MarkupType.HTML) {
doc.append("Events allow you to trigger scripts not just on commands, but also on other actions, such as"
+ " a player logging in, or a player breaking a block. See the documentation on events for"
+ " more information"
+ "<table><thead><tr><th>Name</th><th>Description</th><th>Prefilters</th>"
+ "<th>Event Data</th><th>Mutable Fields</th><th>Since</th></thead><tbody>");
} else if(type == MarkupType.WIKI) {
doc.append("Events allow you to trigger scripts not just on commands, but also on other actions, such as"
+ " a player logging in, or a player breaking a block. See the [[CommandHelper/Events|documentation on events]] for"
+ " more information<br />\n\n");
doc.append("{| width=\"100%\" cellspacing=\"1\" cellpadding=\"1\" border=\"1\" class=\"wikitable\"\n"
+ "|-\n"
+ "! scope=\"col\" width=\"7%\" | Event Name\n"
+ "! scope=\"col\" width=\"36%\" | Description\n"
+ "! scope=\"col\" width=\"18%\" | Prefilters\n"
+ "! scope=\"col\" width=\"18%\" | Event Data\n"
+ "! scope=\"col\" width=\"18%\" | Mutable Fields\n"
+ "! scope=\"col\" width=\"3%\" | Since\n");
} else if(type == MarkupType.TEXT) {
doc.append("Events allow you to trigger scripts not just on commands, but also on other actions, such as"
+ " a player logging in, or a player breaking a block. See the documentation on events for"
+ " more information\n\n\n");
}
Pattern p = Pattern.compile("\\{(.*?)\\} *?(.*?) *?\\{(.*?)\\} *?\\{(.*?)\\}");
for(Documentation d : list) {
Matcher m = p.matcher(d.docs());
if(m.find()) {
String name = d.getName();
String description = m.group(2).trim();
String prefilter = PrefilterData.Get(m.group(1).split("\\|"), type);
String eventData = EventData.Get(m.group(3).split("\\|"), type);
String mutability = MutabilityData.Get(m.group(4).split("\\|"), type);
//String manualTrigger = ManualTriggerData.Get(m.group(5).split("\\|"), type);
String since = d.since().toString();
if(type == MarkupType.HTML) {
doc.append("<tr><td style=\"vertical-align:top\">").append(name).append("</td><td style=\"vertical-align:top\">").append(description).append("</td><td style=\"vertical-align:top\">").append(prefilter).append("</td><td style=\"vertical-align:top\">").append(eventData).append("</td><td style=\"vertical-align:top\">").append(mutability).append("</td><td style=\"vertical-align:top\">").append(since).append("</td></tr>\n");
} else if(type == MarkupType.WIKI) {
doc.append("|-\n" + "! scope=\"row\" | [[CommandHelper/Event API/").append(name).append("|").append(name).append("]]\n" + "| ").append(description).append("\n" + "| ").append(prefilter).append("\n" + "| ").append(eventData).append("\n" + "| ").append(mutability).append("\n" + "| ").append(since).append("\n");
} else if(type == MarkupType.TEXT) {
doc.append("Name: ").append(name).append("\nDescription: ").append(description).append("\nPrefilters:\n").append(prefilter).append("\nEvent Data:\n").append(eventData).append("\nMutable Fields:\n").append(mutability).append("\nSince: ").append(since).append("\n\n");
}
}
}
if(type == MarkupType.HTML) {
doc.append("</tbody></table>\n");
} else if(type == MarkupType.WIKI) {
doc.append("|}\n");
}
if(type == MarkupType.HTML) {
doc.append(""
+ "<h2>Errors in documentation</h2>\n"
+ "<em>Please note that this documentation is generated automatically,"
+ " if you notice an error in the documentation, please file a bug report for the"
+ " plugin itself!</em>\n");
} else if(type == MarkupType.WIKI) {
doc.append(""
+ "===Errors in documentation===\n"
+ "''Please note that this documentation is generated automatically,"
+ " if you notice an error in the documentation, please file a bug report for the"
+ " plugin itself!'' For information on undocumented functions, see [[CommandHelper/Sandbox|this page]]\n\n{{LearningTrail}}\n");
}
return doc.toString();
}
public static class PrefilterData {
public static String Get(String[] data, MarkupType type) {
StringBuilder b = new StringBuilder();
boolean first = true;
if(data.length == 1 && "".equals(data[0].trim())) {
return "";
}
for(String d : data) {
int split = d.indexOf(':');
String name;
String description;
if(split == -1) {
name = d;
description = "";
} else {
name = d.substring(0, split).trim();
description = ExpandMacro(d.substring(split + 1).trim(), type);
}
if(type == MarkupType.HTML) {
b.append(first ? "" : "<br />").append("<strong>").append(name).append("</strong>: ").append(description);
} else if(type == MarkupType.WIKI) {
b.append(first ? "" : "<br />").append("'''").append(name).append("''': ").append(description);
} else if(type == MarkupType.TEXT) {
b.append(first ? "" : "\n").append("\t").append(name).append(": ").append(description);
} else if(type == MarkupType.MARKDOWN) {
b.append(first ? "" : " \n").append("**").append(name).append("**: ").append(description);
}
first = false;
}
return b.toString();
}
private static String ExpandMacro(String macro, MarkupType type) {
if(type == MarkupType.HTML) {
return "<em>" + macro
.replaceAll("<string match>", "<String Match>")
.replaceAll("<boolean match>", "<Boolean Match>")
.replaceAll("<regex>", "<Regex>")
.replaceAll("<location match>", "<Location Match>")
.replaceAll("<math match>", "<Math Match>")
.replaceAll("<macro>", "<Macro>")
.replaceAll("<expression>", "<Expression>") + "</em>";
} else if(type == MarkupType.WIKI) {
return macro
.replaceAll("<string match>", "[[CommandHelper/Events/Prefilters#String Match|String Match]]")
.replaceAll("<boolean match>", "[[CommandHelper/Events/Prefilters#Boolean Match|Boolean Match]]")
.replaceAll("<regex>", "[[CommandHelper/Events/Prefilters#Regex|Regex]]")
.replaceAll("<location match>", "[[CommandHelper/Events/Prefilters#Location Match|Location Match]]")
.replaceAll("<math match>", "[[CommandHelper/Events/Prefilters#Math Match|Math Match]]")
.replaceAll("<macro>", "[[CommandHelper/Events/Prefilters#Macro|Macro]]")
.replaceAll("<expression>", "[[CommandHelper/Events/Prefilters#Expression|Expression]]");
} else if(type == MarkupType.TEXT || type == MarkupType.MARKDOWN) {
return macro
.replaceAll("<string match>", "<String Match>")
.replaceAll("<boolean match>", "<Boolean Match>")
.replaceAll("<regex>", "<Regex>")
.replaceAll("<location match>", "<Location Match>")
.replaceAll("<math match>", "<Math Match>")
.replaceAll("<macro>", "<Macro>")
.replaceAll("<expression>", "<Expression>");
}
return macro;
}
}
public static class EventData {
public static String Get(String[] data, MarkupType type) {
StringBuilder b = new StringBuilder();
boolean first = true;
if(data.length == 1 && "".equals(data[0].trim())) {
return "";
}
for(String d : data) {
int split = d.indexOf(':');
String name;
String description;
if(split == -1) {
name = d;
description = "";
} else {
name = d.substring(0, split).trim();
description = d.substring(split + 1).trim();
}
if(type == MarkupType.HTML) {
b.append(first ? "" : "<br />").append("<strong>").append(name).append("</strong>: ").append(description);
} else if(type == MarkupType.WIKI) {
b.append(first ? "" : "<br />").append("'''").append(name).append("''': ").append(description);
} else if(type == MarkupType.TEXT) {
b.append(first ? "" : "\n").append("\t").append(name).append(": ").append(description);
} else if(type == MarkupType.MARKDOWN) {
b.append(first ? "" : " \n").append("**").append(name).append("**").append(": ").append(description);
}
first = false;
}
return b.toString();
}
}
public static class MutabilityData {
public static String Get(String[] data, MarkupType type) {
StringBuilder b = new StringBuilder();
boolean first = true;
if(data.length == 1 && "".equals(data[0].trim())) {
return "";
}
for(String d : data) {
int split = d.indexOf(':');
if(split == -1) {
if(type == MarkupType.HTML) {
b.append(first ? "" : "<br />").append("<strong>").append(d.trim()).append("</strong>");
} else if(type == MarkupType.WIKI) {
b.append(first ? "" : "<br />").append("'''").append(d.trim()).append("'''");
} else if(type == MarkupType.TEXT) {
b.append(first ? "" : "\n").append("\t").append(d.trim());
} else if(type == MarkupType.MARKDOWN) {
b.append(first ? "" : " \n").append("**").append(d.trim()).append("**");
}
} else {
String name = d.substring(0, split).trim();
String description = d.substring(split).trim();
if(type == MarkupType.HTML) {
b.append(first ? "" : "<br />").append("<strong>").append(name).append("</strong>: ").append(description);
} else if(type == MarkupType.WIKI) {
b.append(first ? "" : "<br />").append("'''").append(name).append("''': ").append(description);
} else if(type == MarkupType.TEXT) {
b.append(first ? "" : "\n").append("\t").append(name).append(": ").append(description);
} else if(type == MarkupType.MARKDOWN) {
b.append(first ? "" : " \n").append("**").append(name).append("**: ").append(description);
}
}
first = false;
}
return b.toString();
}
}
public static class EventDocInfo {
public static class PrefilterData {
/**
* The prefilter name
*/
public final String name;
/**
* The description. Possibly empty string, but never null
*/
public final String description;
public PrefilterData(String name, String description) {
Objects.requireNonNull(name, "name must not be null");
if(description == null) {
description = "";
}
this.name = name.trim();
this.description = description.trim();
}
/**
* Returns the prefilter description, formatted for the given type
*
* @param type
* @return
*/
public String formatDescription(MarkupType type) {
return DocGen.PrefilterData.ExpandMacro(description, type);
}
}
public static class EventData {
/**
* The event name
*/
public final String name;
/**
* The description. Possibly empty string, but never null
*/
public final String description;
public EventData(String name, String description) {
Objects.requireNonNull(name, "name must not be null");
if(description == null) {
description = "";
}
this.name = name.trim();
this.description = description.trim();
}
}
public static class MutabilityData {
/**
* The mutable field name
*/
public final String name;
/**
* The description. Possibly empty string, but never null.
*/
public final String description;
public MutabilityData(String name, String description) {
Objects.requireNonNull(name, "name must not be null");
if(description == null) {
description = "";
}
this.name = name.trim();
this.description = description.trim();
}
}
public final String description;
public final List<PrefilterData> prefilter;
public final List<EventData> eventData;
public final List<MutabilityData> mutability;
private static final Pattern EVENT_PATTERN = Pattern.compile("\\{(.*?)\\} *?(.*?) *?\\{(.*?)\\} *?\\{(.*?)\\}");
public EventDocInfo(String docs, String eventName) {
Matcher m = EVENT_PATTERN.matcher(docs);
if(m.find()) {
description = m.group(2).trim();
prefilter = new ArrayList<>();
for(String p : m.group(1).split("\\|")) {
if("".equals(p)) {
continue;
}
String[] d = p.split(":");
prefilter.add(new PrefilterData(d[0], d.length > 1 ? d[1] : ""));
}
eventData = new ArrayList<>();
for(String e : m.group(3).split("\\|")) {
if("".equals(e)) {
continue;
}
String[] d = e.split(":");
eventData.add(new EventData(d[0], d.length > 1 ? d[1] : ""));
}
mutability = new ArrayList<>();
for(String mu : m.group(4).split("\\|")) {
if("".equals(mu)) {
continue;
}
String[] d = mu.split(":");
mutability.add(new MutabilityData(d[0], d.length > 1 ? d[1] : ""));
}
} else {
throw new IllegalArgumentException("Invalid docs formatting for " + eventName + ": \"" + docs + "\"");
}
}
}
public static class DocInfo {
/**
* The return type
*/
public String ret;
/**
* The args, with html styling in place
*/
public String args;
/**
* The args, without html styling in place (but with [ brackets ] to denote optional arguments
*/
public String originalArgs;
/**
* The full description, if the ---- separator isn't present, or the top description if not present.
*/
public String desc;
/**
* The top description, or null if the ---- separator isn't present.
*/
public String topDesc = null;
/**
* The extended description, or null if the ---- separator isn't present.
*/
public String extendedDesc = null;
public DocInfo(String doc) {
Pattern p = Pattern.compile("(?s)\\s*(.*?)\\s*\\{(.*?)\\}\\s*(.*)\\s*");
Matcher m = p.matcher(doc);
if(m.find()) {
ret = m.group(1);
originalArgs = m.group(2);
desc = m.group(3);
if(desc.contains("----")) {
String[] parts = desc.split("----", 2);
desc = topDesc = parts[0].trim();
extendedDesc = parts[1].trim();
}
} else {
throw new IllegalArgumentException("Could not generate DocInfo from string: \"" + doc + "\"");
}
args = originalArgs.replaceAll("\\|", "<hr />").replaceAll("\\[(.*?)\\]", "<strong>[</strong>$1<strong>]</strong>");
}
}
public static enum MarkupType {
HTML, WIKI, TEXT, MARKDOWN;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.