gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.xwalk.core.internal;
import android.content.Context;
import android.content.pm.PackageManager;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
import android.os.Process;
import android.provider.Settings;
import android.webkit.WebSettings;
import org.chromium.base.annotations.CalledByNative;
import org.chromium.base.annotations.JNINamespace;
import org.chromium.base.ThreadUtils;
import org.chromium.content_public.browser.WebContents;
/**
* Settings for single XWalkView object
*/
@JNINamespace("xwalk")
@XWalkAPI(createInternally = true)
public class XWalkSettingsInternal {
private static final String TAG = "XWalkSettings";
// This class must be created on the UI thread. Afterwards, it can be
// used from any thread. Internally, the class uses a message queue
// to call native code on the UI thread only.
// Lock to protect all settings.
private final Object mXWalkSettingsLock = new Object();
private final Context mContext;
private boolean mAllowScriptsToCloseWindows = true;
private boolean mLoadsImagesAutomatically = true;
private boolean mImagesEnabled = true;
private boolean mJavaScriptEnabled = true;
private boolean mAllowUniversalAccessFromFileURLs = false;
private boolean mAllowFileAccessFromFileURLs = false;
private boolean mJavaScriptCanOpenWindowsAutomatically = true;
private int mCacheMode = WebSettings.LOAD_DEFAULT;
private boolean mSupportMultipleWindows = false;
private boolean mAppCacheEnabled = true;
private boolean mDomStorageEnabled = true;
private boolean mDatabaseEnabled = true;
private boolean mUseWideViewport = false;
private boolean mMediaPlaybackRequiresUserGesture = false;
private String mDefaultVideoPosterURL;
private final boolean mPasswordEchoEnabled;
// Not accessed by the native side.
private boolean mBlockNetworkLoads; // Default depends on permission of embedding APK.
private boolean mAllowContentUrlAccess = true;
private boolean mAllowFileUrlAccess = true;
private boolean mShouldFocusFirstNode = true;
private boolean mGeolocationEnabled = true;
private String mUserAgent;
private String mAcceptLanguages;
// Protects access to settings global fields.
private static final Object sGlobalContentSettingsLock = new Object();
// For compatibility with the legacy WebView, we can only enable AppCache when the path is
// provided. However, we don't use the path, so we just check if we have received it from the
// client.
private static boolean sAppCachePathIsSet = false;
// The native side of this object.
private long mNativeXWalkSettings = 0;
// A flag to avoid sending superfluous synchronization messages.
private boolean mIsUpdateWebkitPrefsMessagePending = false;
// Custom handler that queues messages to call native code on the UI thread.
private final EventHandler mEventHandler;
private static final int MINIMUM_FONT_SIZE = 1;
private static final int MAXIMUM_FONT_SIZE = 72;
private int mDefaultFontSize = 16;
private int mDefaultFixedFontSize = 13;
private boolean mAutoCompleteEnabled = true;
private float mInitialPageScalePercent = 0;
private double mDIPScale = 1.0;
private int mTextSizePercent = 100;
private ZoomSupportChangeListener mZoomChangeListener;
private boolean mSupportZoom = true;
private boolean mBuiltInZoomControls = false;
private boolean mDisplayZoomControls = true;
private boolean mSpatialNavigationEnabled = true;
static class LazyDefaultUserAgent{
private static final String sInstance = nativeGetDefaultUserAgent();
}
// Class to handle messages to be processed on the UI thread.
private class EventHandler {
// Message id for updating Webkit preferences
private static final int UPDATE_WEBKIT_PREFERENCES = 0;
// Actual UI thread handler
private Handler mHandler;
EventHandler() {
}
void bindUiThread() {
if (mHandler != null) return;
mHandler = new Handler(ThreadUtils.getUiThreadLooper()) {
@Override
public void handleMessage(Message msg) {
switch (msg.what) {
case UPDATE_WEBKIT_PREFERENCES:
synchronized (mXWalkSettingsLock) {
updateWebkitPreferencesOnUiThread();
mIsUpdateWebkitPrefsMessagePending = false;
mXWalkSettingsLock.notifyAll();
}
break;
}
}
};
}
void maybeRunOnUiThreadBlocking(Runnable r) {
if (mHandler != null) {
ThreadUtils.runOnUiThreadBlocking(r);
}
}
void maybePostOnUiThread(Runnable r) {
if (mHandler != null) {
mHandler.post(r);
}
}
private void updateWebkitPreferencesLocked() {
assert Thread.holdsLock(mXWalkSettingsLock);
if (mNativeXWalkSettings == 0) return;
if (mHandler == null) return;
if (ThreadUtils.runningOnUiThread()) {
updateWebkitPreferencesOnUiThread();
} else {
// We're being called on a background thread, so post a message.
if (mIsUpdateWebkitPrefsMessagePending) {
return;
}
mIsUpdateWebkitPrefsMessagePending = true;
mHandler.sendMessage(Message.obtain(null, UPDATE_WEBKIT_PREFERENCES));
// We must block until the settings have been sync'd to native to
// ensure that they have taken effect.
try {
while (mIsUpdateWebkitPrefsMessagePending) {
mXWalkSettingsLock.wait();
}
} catch (InterruptedException e) {}
}
}
}
interface ZoomSupportChangeListener {
public void onGestureZoomSupportChanged(
boolean supportsDoubleTapZoom, boolean supportsMultiTouchZoom);
}
// Never use this constructor.
// It is only used in XWalkSettingsBridge.
XWalkSettingsInternal() {
mContext = null;
mEventHandler = null;
mPasswordEchoEnabled = false;
}
XWalkSettingsInternal(Context context, WebContents webContents,
boolean isAccessFromFileURLsGrantedByDefault) {
ThreadUtils.assertOnUiThread();
mContext = context;
mBlockNetworkLoads = mContext.checkPermission(
android.Manifest.permission.INTERNET,
Process.myPid(),
Process.myUid()) != PackageManager.PERMISSION_GRANTED;
if (isAccessFromFileURLsGrantedByDefault) {
mAllowUniversalAccessFromFileURLs = true;
mAllowFileAccessFromFileURLs = true;
}
mUserAgent = LazyDefaultUserAgent.sInstance;
// Respect the system setting for password echoing.
mPasswordEchoEnabled = Settings.System.getInt(context.getContentResolver(),
Settings.System.TEXT_SHOW_PASSWORD, 1) == 1;
mEventHandler = new EventHandler();
setWebContents(webContents);
}
void setWebContents(WebContents webContents) {
synchronized (mXWalkSettingsLock) {
if (mNativeXWalkSettings != 0) {
nativeDestroy(mNativeXWalkSettings);
assert mNativeXWalkSettings == 0;
}
if (webContents != null) {
mEventHandler.bindUiThread();
mNativeXWalkSettings = nativeInit(webContents);
nativeUpdateEverythingLocked(mNativeXWalkSettings);
}
}
}
@CalledByNative
private void nativeXWalkSettingsGone(long nativeXWalkSettings) {
assert mNativeXWalkSettings != 0 && mNativeXWalkSettings == nativeXWalkSettings;
mNativeXWalkSettings = 0;
}
public void setAllowScriptsToCloseWindows(boolean allow) {
synchronized (mXWalkSettingsLock) {
if (mAllowScriptsToCloseWindows != allow) {
mAllowScriptsToCloseWindows = allow;
}
}
}
public boolean getAllowScriptsToCloseWindows() {
synchronized (mXWalkSettingsLock) {
return mAllowScriptsToCloseWindows;
}
}
/**
* See {@link android.webkit.WebSettings#setCacheMode}.
*/
public void setCacheMode(int mode) {
synchronized (mXWalkSettingsLock) {
if (mCacheMode != mode) {
mCacheMode = mode;
}
}
}
/**
* See {@link android.webkit.WebSettings#getCacheMode}.
*/
public int getCacheMode() {
synchronized (mXWalkSettingsLock) {
return mCacheMode;
}
}
/**
* See {@link android.webkit.WebSettings#setBlockNetworkLoads}.
*/
public void setBlockNetworkLoads(boolean flag) {
synchronized (mXWalkSettingsLock) {
if (!flag && mContext.checkPermission(
android.Manifest.permission.INTERNET,
Process.myPid(),
Process.myUid()) != PackageManager.PERMISSION_GRANTED) {
throw new SecurityException("Permission denied - " +
"application missing INTERNET permission");
}
mBlockNetworkLoads = flag;
}
}
/**
* See {@link android.webkit.WebSettings#getBlockNetworkLoads}.
*/
public boolean getBlockNetworkLoads() {
synchronized (mXWalkSettingsLock) {
return mBlockNetworkLoads;
}
}
/**
* See {@link android.webkit.WebSettings#setAllowFileAccess}.
*/
public void setAllowFileAccess(boolean allow) {
synchronized (mXWalkSettingsLock) {
if (mAllowFileUrlAccess != allow) {
mAllowFileUrlAccess = allow;
}
}
}
/**
* See {@link android.webkit.WebSettings#getAllowFileAccess}.
*/
public boolean getAllowFileAccess() {
synchronized (mXWalkSettingsLock) {
return mAllowFileUrlAccess;
}
}
/**
* See {@link android.webkit.WebSettings#setAllowContentAccess}.
*/
public void setAllowContentAccess(boolean allow) {
synchronized (mXWalkSettingsLock) {
if (mAllowContentUrlAccess != allow) {
mAllowContentUrlAccess = allow;
}
}
}
/**
* See {@link android.webkit.WebSettings#getAllowContentAccess}.
*/
public boolean getAllowContentAccess() {
synchronized (mXWalkSettingsLock) {
return mAllowContentUrlAccess;
}
}
/**
* See {@link android.webkit.WebSettings#setGeolocationEnabled}.
*/
public void setGeolocationEnabled(boolean flag) {
synchronized (mXWalkSettingsLock) {
if (mGeolocationEnabled != flag) {
mGeolocationEnabled = flag;
}
}
}
/**
* @return Returns if geolocation is currently enabled.
*/
boolean getGeolocationEnabled() {
synchronized (mXWalkSettingsLock) {
return mGeolocationEnabled;
}
}
/**
* See {@link android.webkit.WebSettings#setJavaScriptEnabled}.
*/
public void setJavaScriptEnabled(boolean flag) {
synchronized (mXWalkSettingsLock) {
if (mJavaScriptEnabled != flag) {
mJavaScriptEnabled = flag;
mEventHandler.updateWebkitPreferencesLocked();
}
}
}
/**
* See {@link android.webkit.WebSettings#setAllowUniversalAccessFromFileURLs}.
*/
public void setAllowUniversalAccessFromFileURLs(boolean flag) {
synchronized (mXWalkSettingsLock) {
if (mAllowUniversalAccessFromFileURLs != flag) {
mAllowUniversalAccessFromFileURLs = flag;
mEventHandler.updateWebkitPreferencesLocked();
}
}
}
/**
* See {@link android.webkit.WebSettings#setAllowFileAccessFromFileURLs}.
*/
public void setAllowFileAccessFromFileURLs(boolean flag) {
synchronized (mXWalkSettingsLock) {
if (mAllowFileAccessFromFileURLs != flag) {
mAllowFileAccessFromFileURLs = flag;
mEventHandler.updateWebkitPreferencesLocked();
}
}
}
/**
* See {@link android.webkit.WebSettings#setLoadsImagesAutomatically}.
*/
public void setLoadsImagesAutomatically(boolean flag) {
synchronized (mXWalkSettingsLock) {
if (mLoadsImagesAutomatically != flag) {
mLoadsImagesAutomatically = flag;
mEventHandler.updateWebkitPreferencesLocked();
}
}
}
/**
* See {@link android.webkit.WebSettings#getLoadsImagesAutomatically}.
*/
public boolean getLoadsImagesAutomatically() {
synchronized (mXWalkSettingsLock) {
return mLoadsImagesAutomatically;
}
}
/**
* See {@link android.webkit.WebSettings#setImagesEnabled}.
*/
public void setImagesEnabled(boolean flag) {
synchronized (mXWalkSettingsLock) {
if (mImagesEnabled != flag) {
mImagesEnabled = flag;
mEventHandler.updateWebkitPreferencesLocked();
}
}
}
/**
* See {@link android.webkit.WebSettings#getImagesEnabled}.
*/
public boolean getImagesEnabled() {
synchronized (mXWalkSettingsLock) {
return mImagesEnabled;
}
}
/**
* See {@link android.webkit.WebSettings#getJavaScriptEnabled}.
*/
public boolean getJavaScriptEnabled() {
synchronized (mXWalkSettingsLock) {
return mJavaScriptEnabled;
}
}
/**
* See {@link android.webkit.WebSettings#getAllowUniversalAccessFromFileURLs}.
*/
public boolean getAllowUniversalAccessFromFileURLs() {
synchronized (mXWalkSettingsLock) {
return mAllowUniversalAccessFromFileURLs;
}
}
/**
* See {@link android.webkit.WebSettings#getAllowFileAccessFromFileURLs}.
*/
public boolean getAllowFileAccessFromFileURLs() {
synchronized (mXWalkSettingsLock) {
return mAllowFileAccessFromFileURLs;
}
}
/**
* See {@link android.webkit.WebSettings#setJavaScriptCanOpenWindowsAutomatically}.
*/
public void setJavaScriptCanOpenWindowsAutomatically(boolean flag) {
synchronized (mXWalkSettingsLock) {
if (mJavaScriptCanOpenWindowsAutomatically != flag) {
mJavaScriptCanOpenWindowsAutomatically = flag;
mEventHandler.updateWebkitPreferencesLocked();
}
}
}
/**
* See {@link android.webkit.WebSettings#getJavaScriptCanOpenWindowsAutomatically}.
*/
public boolean getJavaScriptCanOpenWindowsAutomatically() {
synchronized (mXWalkSettingsLock) {
return mJavaScriptCanOpenWindowsAutomatically;
}
}
/**
* See {@link android.webkit.WebSettings#setSupportMultipleWindows}.
*/
public void setSupportMultipleWindows(boolean support) {
synchronized (mXWalkSettingsLock) {
if (mSupportMultipleWindows != support) {
mSupportMultipleWindows = support;
mEventHandler.updateWebkitPreferencesLocked();
}
}
}
/**
* See {@link android.webkit.WebSettings#supportMultipleWindows}.
*/
public boolean supportMultipleWindows() {
synchronized (mXWalkSettingsLock) {
return mSupportMultipleWindows;
}
}
/**
* Sets whether the XWalkView should enable support for the "viewport" HTML
* meta tag or should use a wide viewport. When the value of the setting is
* false, the layout width is always set to the width of the XWalkView control
* in device-independent (CSS) pixels. When the value is true and the page
* contains the viewport meta tag, the value of the width specified in the
* tag is used. If the page does not contain the tag or does not provide a
* width, then a wide viewport will be used.
* @param use whether to enable support for the viewport meta tag.
* @since 6.0
*/
@XWalkAPI
public void setUseWideViewPort(boolean use) {
synchronized (mXWalkSettingsLock) {
if (mUseWideViewport != use) {
mUseWideViewport = use;
mEventHandler.updateWebkitPreferencesLocked();
}
}
}
/**
* Gets whether the XWalkView supports the "viewport" HTML meta tag or will
* use a wide viewport.
* @return true if the XWalkView supports the viewport meta tag.
* @since 6.0
*/
@XWalkAPI
public boolean getUseWideViewPort() {
synchronized (mXWalkSettingsLock) {
return mUseWideViewport;
}
}
/**
* See {@link android.webkit.WebSettings#setAppCacheEnabled}.
*/
public void setAppCacheEnabled(boolean flag) {
synchronized (mXWalkSettingsLock) {
if (mAppCacheEnabled != flag) {
mAppCacheEnabled = flag;
mEventHandler.updateWebkitPreferencesLocked();
}
}
}
/**
* See {@link android.webkit.WebSettings#setAppCachePath}.
*/
public void setAppCachePath(String path) {
boolean needToSync = false;
synchronized (sGlobalContentSettingsLock) {
// AppCachePath can only be set once.
if (!sAppCachePathIsSet && path != null && !path.isEmpty()) {
sAppCachePathIsSet = true;
needToSync = true;
}
}
// The obvious problem here is that other WebViews will not be updated,
// until they execute synchronization from Java to the native side.
// But this is the same behaviour as it was in the legacy WebView.
if (needToSync) {
synchronized (mXWalkSettingsLock) {
mEventHandler.updateWebkitPreferencesLocked();
}
}
}
/**
* Gets whether Application Cache is enabled.
*
* @return true if Application Cache is enabled
* @hide
*/
@CalledByNative
private boolean getAppCacheEnabled() {
// When no app cache path is set, use chromium default cache path.
return mAppCacheEnabled;
}
/**
* See {@link android.webkit.WebSettings#setDomStorageEnabled}.
*/
public void setDomStorageEnabled(boolean flag) {
synchronized (mXWalkSettingsLock) {
if (mDomStorageEnabled != flag) {
mDomStorageEnabled = flag;
mEventHandler.updateWebkitPreferencesLocked();
}
}
}
/**
* See {@link android.webkit.WebSettings#getDomStorageEnabled}.
*/
public boolean getDomStorageEnabled() {
synchronized (mXWalkSettingsLock) {
return mDomStorageEnabled;
}
}
/**
* See {@link android.webkit.WebSettings#setDatabaseEnabled}.
*/
public void setDatabaseEnabled(boolean flag) {
synchronized (mXWalkSettingsLock) {
if (mDatabaseEnabled != flag) {
mDatabaseEnabled = flag;
mEventHandler.updateWebkitPreferencesLocked();
}
}
}
/**
* See {@link android.webkit.WebSettings#getDatabaseEnabled}.
*/
public boolean getDatabaseEnabled() {
synchronized (mXWalkSettingsLock) {
return mDatabaseEnabled;
}
}
/**
* See {@link android.webkit.WebSettings#setMediaPlaybackRequiresUserGesture}.
*/
public void setMediaPlaybackRequiresUserGesture(boolean require) {
synchronized (mXWalkSettingsLock) {
if (mMediaPlaybackRequiresUserGesture != require) {
mMediaPlaybackRequiresUserGesture = require;
mEventHandler.updateWebkitPreferencesLocked();
}
}
}
/**
* See {@link android.webkit.WebSettings#getMediaPlaybackRequiresUserGesture}.
*/
public boolean getMediaPlaybackRequiresUserGesture() {
synchronized (mXWalkSettingsLock) {
return mMediaPlaybackRequiresUserGesture;
}
}
/**
* See {@link android.webkit.WebSettings#setDefaultVideoPosterURL}.
*/
public void setDefaultVideoPosterURL(String url) {
synchronized (mXWalkSettingsLock) {
if (mDefaultVideoPosterURL != null && !mDefaultVideoPosterURL.equals(url) ||
mDefaultVideoPosterURL == null && url != null) {
mDefaultVideoPosterURL = url;
mEventHandler.updateWebkitPreferencesLocked();
}
}
}
/**
* @return returns the default User-Agent used by each ContentViewCore instance, i.e. unless
* overridden by {@link #setUserAgentString()}
*/
public static String getDefaultUserAgent() {
return LazyDefaultUserAgent.sInstance;
}
/**
* Set the user agent of web page/app.
* @param userAgent the user agent string passed from client.
* @since 6.0
*/
@XWalkAPI
public void setUserAgentString(String userAgent) {
synchronized (mXWalkSettingsLock) {
final String oldUserAgent = mUserAgent;
if (userAgent == null || userAgent.length() == 0) {
mUserAgent = LazyDefaultUserAgent.sInstance;
} else {
mUserAgent = userAgent;
}
if (!oldUserAgent.equals(mUserAgent)) {
mEventHandler.maybeRunOnUiThreadBlocking(new Runnable() {
@Override
public void run() {
if (mNativeXWalkSettings != 0) {
nativeUpdateUserAgent(mNativeXWalkSettings);
}
}
});
}
}
}
/**
* Get the user agent of web page/app.
* @return the XWalkView's user-agent string.
* @since 6.0
*/
@XWalkAPI
public String getUserAgentString() {
synchronized (mXWalkSettingsLock) {
return mUserAgent;
}
}
@CalledByNative
private String getUserAgentLocked() {
return mUserAgent;
}
/**
* See {@link android.webkit.WebSettings#getDefaultVideoPosterURL}.
*/
public String getDefaultVideoPosterURL() {
synchronized (mXWalkSettingsLock) {
return mDefaultVideoPosterURL;
}
}
@CalledByNative
private void updateEverything() {
synchronized (mXWalkSettingsLock) {
nativeUpdateEverythingLocked(mNativeXWalkSettings);
}
}
private void updateWebkitPreferencesOnUiThread() {
if (mNativeXWalkSettings != 0) {
ThreadUtils.assertOnUiThread();
nativeUpdateWebkitPreferences(mNativeXWalkSettings);
}
}
/**
* Set the accept languages of XWalkView.
* @param acceptLanguages the accept languages string passed from client.
* @since 6.0
*/
@XWalkAPI
public void setAcceptLanguages(final String acceptLanguages) {
synchronized (mXWalkSettingsLock) {
if (mAcceptLanguages == acceptLanguages) return;
mAcceptLanguages = acceptLanguages;
mEventHandler.maybeRunOnUiThreadBlocking(new Runnable() {
@Override
public void run() {
if (mNativeXWalkSettings != 0) {
nativeUpdateAcceptLanguages(mNativeXWalkSettings);
}
}
});
}
}
/**
* Get the accept languages of XWalkView.
* @return the accept languages
* @since 6.0
*/
@XWalkAPI
public String getAcceptLanguages() {
synchronized (mXWalkSettingsLock) {
return mAcceptLanguages;
}
}
/**
* See {@link android.webkit.WebSettings#setSaveFormData}.
*/
public void setSaveFormData(final boolean enable) {
synchronized (mXWalkSettingsLock) {
if (mAutoCompleteEnabled == enable) return;
mAutoCompleteEnabled = enable;
mEventHandler.maybeRunOnUiThreadBlocking(new Runnable() {
@Override
public void run() {
if (mNativeXWalkSettings != 0) {
nativeUpdateFormDataPreferences(mNativeXWalkSettings);
}
}
});
}
}
/**
* See {@link android.webkit.WebSettings#getSaveFormData}.
*/
public boolean getSaveFormData() {
synchronized (mXWalkSettingsLock) {
return getSaveFormDataLocked();
}
}
@CalledByNative
private String getAcceptLanguagesLocked() {
return mAcceptLanguages;
}
@CalledByNative
private boolean getSaveFormDataLocked() {
return mAutoCompleteEnabled;
}
void setDIPScale(double dipScale) {
synchronized (mXWalkSettingsLock) {
mDIPScale = dipScale;
// TODO(hengzhi.wu): This should also be synced over to native side, but right now
// the setDIPScale call is always followed by a setWebContents() which covers this.
}
}
/**
* Sets the initial scale for this XWalkView.
* @param scaleInPercent the initial scale in percent.
* @since 6.0
*/
@XWalkAPI
public void setInitialPageScale(final float scaleInPercent) {
synchronized (mXWalkSettingsLock) {
if (mInitialPageScalePercent == scaleInPercent) return;
mInitialPageScalePercent = scaleInPercent;
mEventHandler.maybeRunOnUiThreadBlocking(new Runnable() {
@Override
public void run() {
if (mNativeXWalkSettings != 0) {
nativeUpdateInitialPageScale(mNativeXWalkSettings);
}
}
});
}
}
@CalledByNative
private float getInitialPageScalePercentLocked() {
assert Thread.holdsLock(mXWalkSettingsLock);
return mInitialPageScalePercent;
}
@CalledByNative
private double getDIPScaleLocked() {
assert Thread.holdsLock(mXWalkSettingsLock);
return mDIPScale;
}
@CalledByNative
private boolean getPasswordEchoEnabledLocked() {
assert Thread.holdsLock(mXWalkSettingsLock);
return mPasswordEchoEnabled;
}
/**
* Sets the text zoom of the page in percent. The default is 100.
* @param textZoom the text zoom in percent.
* @since 6.0
*/
@XWalkAPI
public void setTextZoom(final int textZoom) {
synchronized (mXWalkSettingsLock) {
if (mTextSizePercent == textZoom) return;
mTextSizePercent = textZoom;
mEventHandler.updateWebkitPreferencesLocked();
}
}
/**
* Gets the text zoom of the page in percent.
* @return the text zoom of the page in percent.
* @since 6.0
*/
@XWalkAPI
public int getTextZoom() {
synchronized (mXWalkSettingsLock) {
return mTextSizePercent;
}
}
private int clipFontSize(int size) {
if (size < MINIMUM_FONT_SIZE) {
return MINIMUM_FONT_SIZE;
} else if (size > MAXIMUM_FONT_SIZE) {
return MAXIMUM_FONT_SIZE;
}
return size;
}
/**
* Sets the default font size. The default is 16.
* @param size non-negative integer between 1 and 72.
* Any number outside the specified range will be pinned.
* @since 6.0
*/
@XWalkAPI
public void setDefaultFontSize(int size) {
synchronized (mXWalkSettingsLock) {
size = clipFontSize(size);
if (mDefaultFontSize == size) return;
mDefaultFontSize = size;
mEventHandler.updateWebkitPreferencesLocked();
}
}
/**
* Gets the default font size.
* @return a non-negative integer between 1 and 72.
* @since 6.0
*/
@XWalkAPI
public int getDefaultFontSize() {
synchronized (mXWalkSettingsLock) {
return mDefaultFontSize;
}
}
/**
* Sets the default fixed font size. The default is 16.
* @param size a non-negative integer between 1 and 72.
* Any number outside the specified range will be pinned.
* @since 6.0
*/
@XWalkAPI
public void setDefaultFixedFontSize(int size) {
synchronized (mXWalkSettingsLock) {
size = clipFontSize(size);
if (mDefaultFixedFontSize == size) return;
mDefaultFixedFontSize = size;
mEventHandler.updateWebkitPreferencesLocked();
}
}
/**
* Gets the default fixed font size.
* @return a non-negative integer between 1 and 72.
* @since 6.0
*/
@XWalkAPI
public int getDefaultFixedFontSize() {
synchronized (mXWalkSettingsLock) {
return mDefaultFixedFontSize;
}
}
void setZoomListener(ZoomSupportChangeListener zoomChangeListener) {
synchronized (mXWalkSettingsLock) {
mZoomChangeListener = zoomChangeListener;
}
}
private void onGestureZoomSupportChanged(
final boolean supportsDoubleTapZoom, final boolean supportsMultiTouchZoom) {
// Always post asynchronously here, to avoid doubling back onto the caller.
mEventHandler.maybePostOnUiThread(new Runnable() {
@Override
public void run() {
synchronized (mXWalkSettingsLock) {
if (mZoomChangeListener == null) return;
mZoomChangeListener.onGestureZoomSupportChanged(
supportsDoubleTapZoom, supportsMultiTouchZoom);
}
}
});
}
@CalledByNative
private boolean supportsDoubleTapZoomLocked() {
assert Thread.holdsLock(mXWalkSettingsLock);
return mSupportZoom && mBuiltInZoomControls && mUseWideViewport;
}
private boolean supportsMultiTouchZoomLocked() {
assert Thread.holdsLock(mXWalkSettingsLock);
return mSupportZoom && mBuiltInZoomControls;
}
/**
* Sets whether the XWalkView should support zooming using its on-screen zoom controls
* and gestures. The particular zoom mechanisms that should be used can be set with
* setBuiltInZoomControls(boolean). This setting does not affect zooming performed
* using the zoomIn() and zoomOut() methods. The default is true.
* @param support whether the XWalkView should support zoom.
* @since 6.0
*/
@XWalkAPI
public void setSupportZoom(boolean support) {
synchronized (mXWalkSettingsLock) {
if (mSupportZoom == support) return;
mSupportZoom = support;
onGestureZoomSupportChanged(
supportsDoubleTapZoomLocked(), supportsMultiTouchZoomLocked());
}
}
/**
* Gets whether the XWalkView supports zoom.
* @return true if the XWalkView supports zoom.
* @since 6.0
*/
@XWalkAPI
public boolean supportZoom() {
synchronized (mXWalkSettingsLock) {
return mSupportZoom;
}
}
/**
* Sets whether the XWalkView should use its built-in zoom mechanisms.
* The built-in zoom mechanisms comprise on-screen zoom controls, which are
* displayed over the XWalkView's content, and the use of a pinch gesture to
* control zooming. Whether or not these on-screen controls are displayed
* can be set with setDisplayZoomControls(boolean). The default is false.
* @param enabled whether the XWalkView should use its built-in zoom mechanisms.
* @since 6.0
*/
@XWalkAPI
public void setBuiltInZoomControls(boolean enabled) {
synchronized (mXWalkSettingsLock) {
if (mBuiltInZoomControls == enabled) return;
mBuiltInZoomControls = enabled;
onGestureZoomSupportChanged(
supportsDoubleTapZoomLocked(), supportsMultiTouchZoomLocked());
}
}
/**
* Gets whether the zoom mechanisms built into XWalkView are being used.
* @return true if the zoom mechanisms built into XWalkView are being used.
* @since 6.0
*/
@XWalkAPI
public boolean getBuiltInZoomControls() {
synchronized (mXWalkSettingsLock) {
return mBuiltInZoomControls;
}
}
/**
* Note: Just for test case.
* Gets whether the XWalkView supports multi touch zoom.
* @return true if the XWalkView supports multi touch zoom.
*/
@XWalkAPI
public boolean supportsMultiTouchZoomForTest() {
synchronized (mXWalkSettingsLock) {
return supportsMultiTouchZoomLocked();
}
}
/**
* Sets whether the XWalkView should support the spatial navigation,
* like a TV remote control.
* @param enable whether the XWalkView should support the spatial navigation.
* @since 6.0
*/
@XWalkAPI
public void setSupportSpatialNavigation(boolean enable) {
synchronized (mXWalkSettingsLock) {
if (mSpatialNavigationEnabled == enable) return;
mSpatialNavigationEnabled = enable;
mEventHandler.updateWebkitPreferencesLocked();
}
}
/**
* Gets whether the XWalkView should support the spatial navigation.
* @return true if XWalkView support the spatial navigation.
* @since 6.0
*/
@XWalkAPI
public boolean getSupportSpatialNavigation() {
synchronized (mXWalkSettingsLock) {
return mSpatialNavigationEnabled;
}
}
private native long nativeInit(WebContents webContents);
private native void nativeDestroy(long nativeXWalkSettings);
private static native String nativeGetDefaultUserAgent();
private native void nativeUpdateEverythingLocked(long nativeXWalkSettings);
private native void nativeUpdateUserAgent(long nativeXWalkSettings);
private native void nativeUpdateWebkitPreferences(long nativeXWalkSettings);
private native void nativeUpdateAcceptLanguages(long nativeXWalkSettings);
private native void nativeUpdateFormDataPreferences(long nativeXWalkSettings);
private native void nativeUpdateInitialPageScale(long nativeXWalkSettings);
}
| |
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.cluster.routing.allocation;
import com.google.common.collect.Lists;
import org.elasticsearch.ElasticSearchException;
import org.elasticsearch.ElasticSearchIllegalStateException;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.routing.*;
import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocators;
import org.elasticsearch.cluster.routing.allocation.command.AllocationCommands;
import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders;
import org.elasticsearch.cluster.routing.allocation.decider.Decision;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.node.settings.NodeSettingsService;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING;
import static org.elasticsearch.cluster.routing.ShardRoutingState.RELOCATING;
/**
*
*/
public class AllocationService extends AbstractComponent {
private final AllocationDeciders allocationDeciders;
private final ShardsAllocators shardsAllocators;
public AllocationService() {
this(ImmutableSettings.Builder.EMPTY_SETTINGS);
}
public AllocationService(Settings settings) {
this(settings,
new AllocationDeciders(settings, new NodeSettingsService(ImmutableSettings.Builder.EMPTY_SETTINGS)),
new ShardsAllocators(settings)
);
}
@Inject
public AllocationService(Settings settings, AllocationDeciders allocationDeciders, ShardsAllocators shardsAllocators) {
super(settings);
this.allocationDeciders = allocationDeciders;
this.shardsAllocators = shardsAllocators;
}
/**
* Applies the started shards. Note, shards can be called several times within this method.
* <p/>
* <p>If the same instance of the routing table is returned, then no change has been made.
*/
public RoutingAllocation.Result applyStartedShards(ClusterState clusterState, List<? extends ShardRouting> startedShards) {
RoutingNodes routingNodes = clusterState.routingNodes();
// shuffle the unassigned nodes, just so we won't have things like poison failed shards
Collections.shuffle(routingNodes.unassigned());
StartedRerouteAllocation allocation = new StartedRerouteAllocation(allocationDeciders, routingNodes, clusterState.nodes(), startedShards);
boolean changed = applyStartedShards(routingNodes, startedShards);
if (!changed) {
return new RoutingAllocation.Result(false, clusterState.routingTable(), allocation.explanation());
}
shardsAllocators.applyStartedShards(allocation);
reroute(allocation);
return new RoutingAllocation.Result(true, new RoutingTable.Builder().updateNodes(routingNodes).build().validateRaiseException(clusterState.metaData()), allocation.explanation());
}
/**
* Applies the failed shards. Note, shards can be called several times within this method.
* <p/>
* <p>If the same instance of the routing table is returned, then no change has been made.
*/
public RoutingAllocation.Result applyFailedShard(ClusterState clusterState, ShardRouting failedShard) {
RoutingNodes routingNodes = clusterState.routingNodes();
// shuffle the unassigned nodes, just so we won't have things like poison failed shards
Collections.shuffle(routingNodes.unassigned());
FailedRerouteAllocation allocation = new FailedRerouteAllocation(allocationDeciders, routingNodes, clusterState.nodes(), failedShard);
boolean changed = applyFailedShard(allocation, failedShard);
if (!changed) {
return new RoutingAllocation.Result(false, clusterState.routingTable(), allocation.explanation());
}
shardsAllocators.applyFailedShards(allocation);
reroute(allocation);
return new RoutingAllocation.Result(true, new RoutingTable.Builder().updateNodes(routingNodes).build().validateRaiseException(clusterState.metaData()), allocation.explanation());
}
public RoutingAllocation.Result reroute(ClusterState clusterState, AllocationCommands commands) throws ElasticSearchException {
RoutingNodes routingNodes = clusterState.routingNodes();
// we don't shuffle the unassigned shards here, to try and get as close as possible to
// a consistent result of the effect the commands have on the routing
// this allows systems to dry run the commands, see the resulting cluster state, and act on it
RoutingAllocation allocation = new RoutingAllocation(allocationDeciders, routingNodes, clusterState.nodes());
// we ignore disable allocation, because commands are explicit
allocation.ignoreDisable(true);
commands.execute(allocation);
// we revert the ignore disable flag, since when rerouting, we want the original setting to take place
allocation.ignoreDisable(false);
// the assumption is that commands will move / act on shards (or fail through exceptions)
// so, there will always be shard "movements", so no need to check on reroute
reroute(allocation);
return new RoutingAllocation.Result(true, new RoutingTable.Builder().updateNodes(routingNodes).build().validateRaiseException(clusterState.metaData()), allocation.explanation());
}
/**
* Reroutes the routing table based on the live nodes.
* <p/>
* <p>If the same instance of the routing table is returned, then no change has been made.
*/
public RoutingAllocation.Result reroute(ClusterState clusterState) {
RoutingNodes routingNodes = clusterState.routingNodes();
// shuffle the unassigned nodes, just so we won't have things like poison failed shards
Collections.shuffle(routingNodes.unassigned());
RoutingAllocation allocation = new RoutingAllocation(allocationDeciders, routingNodes, clusterState.nodes());
if (!reroute(allocation)) {
return new RoutingAllocation.Result(false, clusterState.routingTable(), allocation.explanation());
}
return new RoutingAllocation.Result(true, new RoutingTable.Builder().updateNodes(routingNodes).build().validateRaiseException(clusterState.metaData()), allocation.explanation());
}
/**
* Only handles reroute but *without* any reassignment of unassigned shards or rebalancing. Does
* make sure to handle removed nodes, but only moved the shards to UNASSIGNED, does not reassign
* them.
*/
public RoutingAllocation.Result rerouteWithNoReassign(ClusterState clusterState) {
RoutingNodes routingNodes = clusterState.routingNodes();
// shuffle the unassigned nodes, just so we won't have things like poison failed shards
Collections.shuffle(routingNodes.unassigned());
RoutingAllocation allocation = new RoutingAllocation(allocationDeciders, routingNodes, clusterState.nodes());
Iterable<DiscoveryNode> dataNodes = allocation.nodes().dataNodes().values();
boolean changed = false;
// first, clear from the shards any node id they used to belong to that is now dead
changed |= deassociateDeadNodes(allocation);
// create a sorted list of from nodes with least number of shards to the maximum ones
applyNewNodes(allocation);
// elect primaries *before* allocating unassigned, so backups of primaries that failed
// will be moved to primary state and not wait for primaries to be allocated and recovered (*from gateway*)
changed |= electPrimaries(allocation.routingNodes());
if (!changed) {
return new RoutingAllocation.Result(false, clusterState.routingTable(), allocation.explanation());
}
return new RoutingAllocation.Result(true, new RoutingTable.Builder().updateNodes(routingNodes).build().validateRaiseException(clusterState.metaData()), allocation.explanation());
}
private boolean reroute(RoutingAllocation allocation) {
Iterable<DiscoveryNode> dataNodes = allocation.nodes().dataNodes().values();
boolean changed = false;
// first, clear from the shards any node id they used to belong to that is now dead
changed |= deassociateDeadNodes(allocation);
// create a sorted list of from nodes with least number of shards to the maximum ones
applyNewNodes(allocation);
// elect primaries *before* allocating unassigned, so backups of primaries that failed
// will be moved to primary state and not wait for primaries to be allocated and recovered (*from gateway*)
changed |= electPrimaries(allocation.routingNodes());
// now allocate all the unassigned to available nodes
if (allocation.routingNodes().hasUnassigned()) {
changed |= shardsAllocators.allocateUnassigned(allocation);
// elect primaries again, in case this is needed with unassigned allocation
changed |= electPrimaries(allocation.routingNodes());
}
// move shards that no longer can be allocated
changed |= moveShards(allocation);
// rebalance
changed |= shardsAllocators.rebalance(allocation);
return changed;
}
private boolean moveShards(RoutingAllocation allocation) {
boolean changed = false;
// create a copy of the shards interleaving between nodes, and check if they can remain
List<MutableShardRouting> shards = new ArrayList<MutableShardRouting>();
int index = 0;
boolean found = true;
while (found) {
found = false;
for (RoutingNode routingNode : allocation.routingNodes()) {
if (index >= routingNode.shards().size()) {
continue;
}
found = true;
shards.add(routingNode.shards().get(index));
}
index++;
}
for (int i = 0; i < shards.size(); i++) {
MutableShardRouting shardRouting = shards.get(i);
// we can only move started shards...
if (!shardRouting.started()) {
continue;
}
RoutingNode routingNode = allocation.routingNodes().node(shardRouting.currentNodeId());
Decision decision = allocation.deciders().canRemain(shardRouting, routingNode, allocation);
if (decision.type() == Decision.Type.NO) {
logger.debug("[{}][{}] allocated on [{}], but can no longer be allocated on it, moving...", shardRouting.index(), shardRouting.id(), routingNode.node());
boolean moved = shardsAllocators.move(shardRouting, routingNode, allocation);
if (!moved) {
logger.debug("[{}][{}] can't move", shardRouting.index(), shardRouting.id());
} else {
changed = true;
}
}
}
return changed;
}
private boolean electPrimaries(RoutingNodes routingNodes) {
boolean changed = false;
for (MutableShardRouting shardEntry : routingNodes.unassigned()) {
if (shardEntry.primary() && !shardEntry.assignedToNode()) {
boolean elected = false;
// primary and not assigned, go over and find a replica that is assigned and active (since it might be relocating)
for (RoutingNode routingNode : routingNodes.nodesToShards().values()) {
for (MutableShardRouting shardEntry2 : routingNode.shards()) {
if (shardEntry.shardId().equals(shardEntry2.shardId()) && shardEntry2.active()) {
assert shardEntry2.assignedToNode();
assert !shardEntry2.primary();
changed = true;
shardEntry.moveFromPrimary();
shardEntry2.moveToPrimary();
if (shardEntry2.relocatingNodeId() != null) {
// its also relocating, make sure to move the other routing to primary
RoutingNode node = routingNodes.node(shardEntry2.relocatingNodeId());
if (node != null) {
for (MutableShardRouting shardRouting : node) {
if (shardRouting.shardId().equals(shardEntry2.shardId()) && !shardRouting.primary()) {
shardRouting.moveToPrimary();
break;
}
}
}
}
elected = true;
break;
}
}
if (elected) {
break;
}
}
}
}
return changed;
}
/**
* Applies the new nodes to the routing nodes and returns them (just the
* new nodes);
*/
private void applyNewNodes(RoutingAllocation allocation) {
for (DiscoveryNode node : allocation.nodes().dataNodes().values()) {
if (!allocation.routingNodes().nodesToShards().containsKey(node.id())) {
RoutingNode routingNode = new RoutingNode(node.id(), node);
allocation.routingNodes().nodesToShards().put(node.id(), routingNode);
}
}
}
private boolean deassociateDeadNodes(RoutingAllocation allocation) {
boolean changed = false;
for (Iterator<RoutingNode> it = allocation.routingNodes().nodesToShards().values().iterator(); it.hasNext(); ) {
RoutingNode node = it.next();
if (allocation.nodes().dataNodes().containsKey(node.nodeId())) {
// its a live node, continue
continue;
}
changed = true;
// now, go over all the shards routing on the node, and fail them
for (MutableShardRouting shardRouting : new ArrayList<MutableShardRouting>(node.shards())) {
// we create a copy of the shard routing, since applyFailedShard assumes its a new copy
applyFailedShard(allocation, shardRouting);
}
// its a dead node, remove it, note, its important to remove it *after* we apply failed shard
// since it relies on the fact that the RoutingNode exists in the list of nodes
it.remove();
}
return changed;
}
private boolean applyStartedShards(RoutingNodes routingNodes, Iterable<? extends ShardRouting> startedShardEntries) {
boolean dirty = false;
// apply shards might be called several times with the same shard, ignore it
for (ShardRouting startedShard : startedShardEntries) {
assert startedShard.state() == INITIALIZING;
// retrieve the relocating node id before calling moveToStarted().
String relocatingNodeId = null;
RoutingNode currentRoutingNode = routingNodes.nodesToShards().get(startedShard.currentNodeId());
if (currentRoutingNode != null) {
for (MutableShardRouting shard : currentRoutingNode) {
if (shard.shardId().equals(startedShard.shardId())) {
relocatingNodeId = shard.relocatingNodeId();
if (!shard.started()) {
dirty = true;
shard.moveToStarted();
}
break;
}
}
}
// startedShard is the current state of the shard (post relocation for example)
// this means that after relocation, the state will be started and the currentNodeId will be
// the node we relocated to
if (relocatingNodeId == null)
continue;
RoutingNode sourceRoutingNode = routingNodes.nodesToShards().get(relocatingNodeId);
if (sourceRoutingNode != null) {
Iterator<MutableShardRouting> shardsIter = sourceRoutingNode.iterator();
while (shardsIter.hasNext()) {
MutableShardRouting shard = shardsIter.next();
if (shard.shardId().equals(startedShard.shardId())) {
if (shard.relocating()) {
dirty = true;
shardsIter.remove();
break;
}
}
}
}
}
return dirty;
}
/**
* Applies the relevant logic to handle a failed shard. Returns <tt>true</tt> if changes happened that
* require relocation.
*/
private boolean applyFailedShard(RoutingAllocation allocation, ShardRouting failedShard) {
// create a copy of the failed shard, since we assume we can change possible refernces to it without
// changing the state of failed shard
failedShard = new ImmutableShardRouting(failedShard);
IndexRoutingTable indexRoutingTable = allocation.routingTable().index(failedShard.index());
if (indexRoutingTable == null) {
return false;
}
if (failedShard.relocatingNodeId() != null) {
// the shard is relocating, either in initializing (recovery from another node) or relocating (moving to another node)
if (failedShard.state() == INITIALIZING) {
// the shard is initializing and recovering from another node
boolean dirty = false;
// first, we need to cancel the current node that is being initialized
RoutingNode initializingNode = allocation.routingNodes().node(failedShard.currentNodeId());
if (initializingNode != null) {
for (Iterator<MutableShardRouting> it = initializingNode.iterator(); it.hasNext(); ) {
MutableShardRouting shardRouting = it.next();
if (shardRouting.equals(failedShard)) {
dirty = true;
it.remove();
shardRouting.deassignNode();
// make sure we ignore this shard on the relevant node
allocation.addIgnoreShardForNode(failedShard.shardId(), failedShard.currentNodeId());
break;
}
}
}
if (dirty) {
// now, find the node that we are relocating *from*, and cancel its relocation
RoutingNode relocatingFromNode = allocation.routingNodes().node(failedShard.relocatingNodeId());
if (relocatingFromNode != null) {
for (Iterator<MutableShardRouting> it = relocatingFromNode.iterator(); it.hasNext(); ) {
MutableShardRouting shardRouting = it.next();
if (shardRouting.shardId().equals(failedShard.shardId()) && shardRouting.state() == RELOCATING) {
dirty = true;
shardRouting.cancelRelocation();
break;
}
}
}
}
return dirty;
} else if (failedShard.state() == RELOCATING) {
boolean dirty = false;
// the shard is relocating, meaning its the source the shard is relocating from
// first, we need to cancel the current relocation from the current node
// now, find the node that we are recovering from, cancel the relocation, remove it from the node
// and add it to the unassigned shards list...
RoutingNode relocatingFromNode = allocation.routingNodes().node(failedShard.currentNodeId());
if (relocatingFromNode != null) {
for (Iterator<MutableShardRouting> it = relocatingFromNode.iterator(); it.hasNext(); ) {
MutableShardRouting shardRouting = it.next();
if (shardRouting.equals(failedShard)) {
dirty = true;
shardRouting.cancelRelocation();
it.remove();
// make sure we ignore this shard on the relevant node
allocation.addIgnoreShardForNode(failedShard.shardId(), failedShard.currentNodeId());
allocation.routingNodes().unassigned().add(new MutableShardRouting(failedShard.index(), failedShard.id(),
null, failedShard.primary(), ShardRoutingState.UNASSIGNED, failedShard.version() + 1));
break;
}
}
}
if (dirty) {
// next, we need to find the target initializing shard that is recovering from, and remove it...
RoutingNode initializingNode = allocation.routingNodes().node(failedShard.relocatingNodeId());
if (initializingNode != null) {
for (Iterator<MutableShardRouting> it = initializingNode.iterator(); it.hasNext(); ) {
MutableShardRouting shardRouting = it.next();
if (shardRouting.shardId().equals(failedShard.shardId()) && shardRouting.state() == INITIALIZING) {
dirty = true;
shardRouting.deassignNode();
it.remove();
}
}
}
}
return dirty;
} else {
throw new ElasticSearchIllegalStateException("illegal state for a failed shard, relocating node id is set, but state does not match: " + failedShard);
}
} else {
// the shard is not relocating, its either started, or initializing, just cancel it and move on...
boolean dirty = false;
RoutingNode node = allocation.routingNodes().node(failedShard.currentNodeId());
if (node != null) {
for (Iterator<MutableShardRouting> it = node.iterator(); it.hasNext(); ) {
MutableShardRouting shardRouting = it.next();
if (shardRouting.equals(failedShard)) {
dirty = true;
// make sure we ignore this shard on the relevant node
allocation.addIgnoreShardForNode(failedShard.shardId(), failedShard.currentNodeId());
it.remove();
// move all the shards matching the failed shard to the end of the unassigned list
// so we give a chance for other allocations and won't create poison failed allocations
// that can keep other shards from being allocated (because of limits applied on how many
// shards we can start per node)
List<MutableShardRouting> shardsToMove = Lists.newArrayList();
for (Iterator<MutableShardRouting> unassignedIt = allocation.routingNodes().unassigned().iterator(); unassignedIt.hasNext(); ) {
MutableShardRouting unassignedShardRouting = unassignedIt.next();
if (unassignedShardRouting.shardId().equals(failedShard.shardId())) {
unassignedIt.remove();
shardsToMove.add(unassignedShardRouting);
}
}
if (!shardsToMove.isEmpty()) {
allocation.routingNodes().unassigned().addAll(shardsToMove);
}
allocation.routingNodes().unassigned().add(new MutableShardRouting(failedShard.index(), failedShard.id(),
null, failedShard.primary(), ShardRoutingState.UNASSIGNED, failedShard.version() + 1));
break;
}
}
}
return dirty;
}
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.rds.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
* <p/>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/rds-2014-10-31/DescribeDBInstances" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DescribeDBInstancesRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The user-supplied instance identifier. If this parameter is specified, information from only the specific DB
* instance is returned. This parameter isn't case-sensitive.
* </p>
* <p>
* Constraints:
* </p>
* <ul>
* <li>
* <p>
* If supplied, must match the identifier of an existing DBInstance.
* </p>
* </li>
* </ul>
*/
private String dBInstanceIdentifier;
/**
* <p>
* A filter that specifies one or more DB instances to describe.
* </p>
* <p>
* Supported filters:
* </p>
* <ul>
* <li>
* <p>
* <code>db-cluster-id</code> - Accepts DB cluster identifiers and DB cluster Amazon Resource Names (ARNs). The
* results list only includes information about the DB instances associated with the DB clusters identified by these
* ARNs.
* </p>
* </li>
* <li>
* <p>
* <code>db-instance-id</code> - Accepts DB instance identifiers and DB instance Amazon Resource Names (ARNs). The
* results list only includes information about the DB instances identified by these ARNs.
* </p>
* </li>
* <li>
* <p>
* <code>dbi-resource-id</code> - Accepts DB instance resource identifiers. The results list will only include
* information about the DB instances identified by these DB instance resource identifiers.
* </p>
* </li>
* <li>
* <p>
* <code>domain</code> - Accepts Active Directory directory IDs. The results list only includes information about
* the DB instances associated with these domains.
* </p>
* </li>
* <li>
* <p>
* <code>engine</code> - Accepts engine names. The results list only includes information about the DB instances for
* these engines.
* </p>
* </li>
* </ul>
*/
private com.amazonaws.internal.SdkInternalList<Filter> filters;
/**
* <p>
* The maximum number of records to include in the response. If more records exist than the specified
* <code>MaxRecords</code> value, a pagination token called a marker is included in the response so that you can
* retrieve the remaining results.
* </p>
* <p>
* Default: 100
* </p>
* <p>
* Constraints: Minimum 20, maximum 100.
* </p>
*/
private Integer maxRecords;
/**
* <p>
* An optional pagination token provided by a previous <code>DescribeDBInstances</code> request. If this parameter
* is specified, the response includes only records beyond the marker, up to the value specified by
* <code>MaxRecords</code>.
* </p>
*/
private String marker;
/**
* <p>
* The user-supplied instance identifier. If this parameter is specified, information from only the specific DB
* instance is returned. This parameter isn't case-sensitive.
* </p>
* <p>
* Constraints:
* </p>
* <ul>
* <li>
* <p>
* If supplied, must match the identifier of an existing DBInstance.
* </p>
* </li>
* </ul>
*
* @param dBInstanceIdentifier
* The user-supplied instance identifier. If this parameter is specified, information from only the specific
* DB instance is returned. This parameter isn't case-sensitive.</p>
* <p>
* Constraints:
* </p>
* <ul>
* <li>
* <p>
* If supplied, must match the identifier of an existing DBInstance.
* </p>
* </li>
*/
public void setDBInstanceIdentifier(String dBInstanceIdentifier) {
this.dBInstanceIdentifier = dBInstanceIdentifier;
}
/**
* <p>
* The user-supplied instance identifier. If this parameter is specified, information from only the specific DB
* instance is returned. This parameter isn't case-sensitive.
* </p>
* <p>
* Constraints:
* </p>
* <ul>
* <li>
* <p>
* If supplied, must match the identifier of an existing DBInstance.
* </p>
* </li>
* </ul>
*
* @return The user-supplied instance identifier. If this parameter is specified, information from only the specific
* DB instance is returned. This parameter isn't case-sensitive.</p>
* <p>
* Constraints:
* </p>
* <ul>
* <li>
* <p>
* If supplied, must match the identifier of an existing DBInstance.
* </p>
* </li>
*/
public String getDBInstanceIdentifier() {
return this.dBInstanceIdentifier;
}
/**
* <p>
* The user-supplied instance identifier. If this parameter is specified, information from only the specific DB
* instance is returned. This parameter isn't case-sensitive.
* </p>
* <p>
* Constraints:
* </p>
* <ul>
* <li>
* <p>
* If supplied, must match the identifier of an existing DBInstance.
* </p>
* </li>
* </ul>
*
* @param dBInstanceIdentifier
* The user-supplied instance identifier. If this parameter is specified, information from only the specific
* DB instance is returned. This parameter isn't case-sensitive.</p>
* <p>
* Constraints:
* </p>
* <ul>
* <li>
* <p>
* If supplied, must match the identifier of an existing DBInstance.
* </p>
* </li>
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeDBInstancesRequest withDBInstanceIdentifier(String dBInstanceIdentifier) {
setDBInstanceIdentifier(dBInstanceIdentifier);
return this;
}
/**
* <p>
* A filter that specifies one or more DB instances to describe.
* </p>
* <p>
* Supported filters:
* </p>
* <ul>
* <li>
* <p>
* <code>db-cluster-id</code> - Accepts DB cluster identifiers and DB cluster Amazon Resource Names (ARNs). The
* results list only includes information about the DB instances associated with the DB clusters identified by these
* ARNs.
* </p>
* </li>
* <li>
* <p>
* <code>db-instance-id</code> - Accepts DB instance identifiers and DB instance Amazon Resource Names (ARNs). The
* results list only includes information about the DB instances identified by these ARNs.
* </p>
* </li>
* <li>
* <p>
* <code>dbi-resource-id</code> - Accepts DB instance resource identifiers. The results list will only include
* information about the DB instances identified by these DB instance resource identifiers.
* </p>
* </li>
* <li>
* <p>
* <code>domain</code> - Accepts Active Directory directory IDs. The results list only includes information about
* the DB instances associated with these domains.
* </p>
* </li>
* <li>
* <p>
* <code>engine</code> - Accepts engine names. The results list only includes information about the DB instances for
* these engines.
* </p>
* </li>
* </ul>
*
* @return A filter that specifies one or more DB instances to describe.</p>
* <p>
* Supported filters:
* </p>
* <ul>
* <li>
* <p>
* <code>db-cluster-id</code> - Accepts DB cluster identifiers and DB cluster Amazon Resource Names (ARNs).
* The results list only includes information about the DB instances associated with the DB clusters
* identified by these ARNs.
* </p>
* </li>
* <li>
* <p>
* <code>db-instance-id</code> - Accepts DB instance identifiers and DB instance Amazon Resource Names
* (ARNs). The results list only includes information about the DB instances identified by these ARNs.
* </p>
* </li>
* <li>
* <p>
* <code>dbi-resource-id</code> - Accepts DB instance resource identifiers. The results list will only
* include information about the DB instances identified by these DB instance resource identifiers.
* </p>
* </li>
* <li>
* <p>
* <code>domain</code> - Accepts Active Directory directory IDs. The results list only includes information
* about the DB instances associated with these domains.
* </p>
* </li>
* <li>
* <p>
* <code>engine</code> - Accepts engine names. The results list only includes information about the DB
* instances for these engines.
* </p>
* </li>
*/
public java.util.List<Filter> getFilters() {
if (filters == null) {
filters = new com.amazonaws.internal.SdkInternalList<Filter>();
}
return filters;
}
/**
* <p>
* A filter that specifies one or more DB instances to describe.
* </p>
* <p>
* Supported filters:
* </p>
* <ul>
* <li>
* <p>
* <code>db-cluster-id</code> - Accepts DB cluster identifiers and DB cluster Amazon Resource Names (ARNs). The
* results list only includes information about the DB instances associated with the DB clusters identified by these
* ARNs.
* </p>
* </li>
* <li>
* <p>
* <code>db-instance-id</code> - Accepts DB instance identifiers and DB instance Amazon Resource Names (ARNs). The
* results list only includes information about the DB instances identified by these ARNs.
* </p>
* </li>
* <li>
* <p>
* <code>dbi-resource-id</code> - Accepts DB instance resource identifiers. The results list will only include
* information about the DB instances identified by these DB instance resource identifiers.
* </p>
* </li>
* <li>
* <p>
* <code>domain</code> - Accepts Active Directory directory IDs. The results list only includes information about
* the DB instances associated with these domains.
* </p>
* </li>
* <li>
* <p>
* <code>engine</code> - Accepts engine names. The results list only includes information about the DB instances for
* these engines.
* </p>
* </li>
* </ul>
*
* @param filters
* A filter that specifies one or more DB instances to describe.</p>
* <p>
* Supported filters:
* </p>
* <ul>
* <li>
* <p>
* <code>db-cluster-id</code> - Accepts DB cluster identifiers and DB cluster Amazon Resource Names (ARNs).
* The results list only includes information about the DB instances associated with the DB clusters
* identified by these ARNs.
* </p>
* </li>
* <li>
* <p>
* <code>db-instance-id</code> - Accepts DB instance identifiers and DB instance Amazon Resource Names
* (ARNs). The results list only includes information about the DB instances identified by these ARNs.
* </p>
* </li>
* <li>
* <p>
* <code>dbi-resource-id</code> - Accepts DB instance resource identifiers. The results list will only
* include information about the DB instances identified by these DB instance resource identifiers.
* </p>
* </li>
* <li>
* <p>
* <code>domain</code> - Accepts Active Directory directory IDs. The results list only includes information
* about the DB instances associated with these domains.
* </p>
* </li>
* <li>
* <p>
* <code>engine</code> - Accepts engine names. The results list only includes information about the DB
* instances for these engines.
* </p>
* </li>
*/
public void setFilters(java.util.Collection<Filter> filters) {
if (filters == null) {
this.filters = null;
return;
}
this.filters = new com.amazonaws.internal.SdkInternalList<Filter>(filters);
}
/**
* <p>
* A filter that specifies one or more DB instances to describe.
* </p>
* <p>
* Supported filters:
* </p>
* <ul>
* <li>
* <p>
* <code>db-cluster-id</code> - Accepts DB cluster identifiers and DB cluster Amazon Resource Names (ARNs). The
* results list only includes information about the DB instances associated with the DB clusters identified by these
* ARNs.
* </p>
* </li>
* <li>
* <p>
* <code>db-instance-id</code> - Accepts DB instance identifiers and DB instance Amazon Resource Names (ARNs). The
* results list only includes information about the DB instances identified by these ARNs.
* </p>
* </li>
* <li>
* <p>
* <code>dbi-resource-id</code> - Accepts DB instance resource identifiers. The results list will only include
* information about the DB instances identified by these DB instance resource identifiers.
* </p>
* </li>
* <li>
* <p>
* <code>domain</code> - Accepts Active Directory directory IDs. The results list only includes information about
* the DB instances associated with these domains.
* </p>
* </li>
* <li>
* <p>
* <code>engine</code> - Accepts engine names. The results list only includes information about the DB instances for
* these engines.
* </p>
* </li>
* </ul>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setFilters(java.util.Collection)} or {@link #withFilters(java.util.Collection)} if you want to override
* the existing values.
* </p>
*
* @param filters
* A filter that specifies one or more DB instances to describe.</p>
* <p>
* Supported filters:
* </p>
* <ul>
* <li>
* <p>
* <code>db-cluster-id</code> - Accepts DB cluster identifiers and DB cluster Amazon Resource Names (ARNs).
* The results list only includes information about the DB instances associated with the DB clusters
* identified by these ARNs.
* </p>
* </li>
* <li>
* <p>
* <code>db-instance-id</code> - Accepts DB instance identifiers and DB instance Amazon Resource Names
* (ARNs). The results list only includes information about the DB instances identified by these ARNs.
* </p>
* </li>
* <li>
* <p>
* <code>dbi-resource-id</code> - Accepts DB instance resource identifiers. The results list will only
* include information about the DB instances identified by these DB instance resource identifiers.
* </p>
* </li>
* <li>
* <p>
* <code>domain</code> - Accepts Active Directory directory IDs. The results list only includes information
* about the DB instances associated with these domains.
* </p>
* </li>
* <li>
* <p>
* <code>engine</code> - Accepts engine names. The results list only includes information about the DB
* instances for these engines.
* </p>
* </li>
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeDBInstancesRequest withFilters(Filter... filters) {
if (this.filters == null) {
setFilters(new com.amazonaws.internal.SdkInternalList<Filter>(filters.length));
}
for (Filter ele : filters) {
this.filters.add(ele);
}
return this;
}
/**
* <p>
* A filter that specifies one or more DB instances to describe.
* </p>
* <p>
* Supported filters:
* </p>
* <ul>
* <li>
* <p>
* <code>db-cluster-id</code> - Accepts DB cluster identifiers and DB cluster Amazon Resource Names (ARNs). The
* results list only includes information about the DB instances associated with the DB clusters identified by these
* ARNs.
* </p>
* </li>
* <li>
* <p>
* <code>db-instance-id</code> - Accepts DB instance identifiers and DB instance Amazon Resource Names (ARNs). The
* results list only includes information about the DB instances identified by these ARNs.
* </p>
* </li>
* <li>
* <p>
* <code>dbi-resource-id</code> - Accepts DB instance resource identifiers. The results list will only include
* information about the DB instances identified by these DB instance resource identifiers.
* </p>
* </li>
* <li>
* <p>
* <code>domain</code> - Accepts Active Directory directory IDs. The results list only includes information about
* the DB instances associated with these domains.
* </p>
* </li>
* <li>
* <p>
* <code>engine</code> - Accepts engine names. The results list only includes information about the DB instances for
* these engines.
* </p>
* </li>
* </ul>
*
* @param filters
* A filter that specifies one or more DB instances to describe.</p>
* <p>
* Supported filters:
* </p>
* <ul>
* <li>
* <p>
* <code>db-cluster-id</code> - Accepts DB cluster identifiers and DB cluster Amazon Resource Names (ARNs).
* The results list only includes information about the DB instances associated with the DB clusters
* identified by these ARNs.
* </p>
* </li>
* <li>
* <p>
* <code>db-instance-id</code> - Accepts DB instance identifiers and DB instance Amazon Resource Names
* (ARNs). The results list only includes information about the DB instances identified by these ARNs.
* </p>
* </li>
* <li>
* <p>
* <code>dbi-resource-id</code> - Accepts DB instance resource identifiers. The results list will only
* include information about the DB instances identified by these DB instance resource identifiers.
* </p>
* </li>
* <li>
* <p>
* <code>domain</code> - Accepts Active Directory directory IDs. The results list only includes information
* about the DB instances associated with these domains.
* </p>
* </li>
* <li>
* <p>
* <code>engine</code> - Accepts engine names. The results list only includes information about the DB
* instances for these engines.
* </p>
* </li>
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeDBInstancesRequest withFilters(java.util.Collection<Filter> filters) {
setFilters(filters);
return this;
}
/**
* <p>
* The maximum number of records to include in the response. If more records exist than the specified
* <code>MaxRecords</code> value, a pagination token called a marker is included in the response so that you can
* retrieve the remaining results.
* </p>
* <p>
* Default: 100
* </p>
* <p>
* Constraints: Minimum 20, maximum 100.
* </p>
*
* @param maxRecords
* The maximum number of records to include in the response. If more records exist than the specified
* <code>MaxRecords</code> value, a pagination token called a marker is included in the response so that you
* can retrieve the remaining results. </p>
* <p>
* Default: 100
* </p>
* <p>
* Constraints: Minimum 20, maximum 100.
*/
public void setMaxRecords(Integer maxRecords) {
this.maxRecords = maxRecords;
}
/**
* <p>
* The maximum number of records to include in the response. If more records exist than the specified
* <code>MaxRecords</code> value, a pagination token called a marker is included in the response so that you can
* retrieve the remaining results.
* </p>
* <p>
* Default: 100
* </p>
* <p>
* Constraints: Minimum 20, maximum 100.
* </p>
*
* @return The maximum number of records to include in the response. If more records exist than the specified
* <code>MaxRecords</code> value, a pagination token called a marker is included in the response so that you
* can retrieve the remaining results. </p>
* <p>
* Default: 100
* </p>
* <p>
* Constraints: Minimum 20, maximum 100.
*/
public Integer getMaxRecords() {
return this.maxRecords;
}
/**
* <p>
* The maximum number of records to include in the response. If more records exist than the specified
* <code>MaxRecords</code> value, a pagination token called a marker is included in the response so that you can
* retrieve the remaining results.
* </p>
* <p>
* Default: 100
* </p>
* <p>
* Constraints: Minimum 20, maximum 100.
* </p>
*
* @param maxRecords
* The maximum number of records to include in the response. If more records exist than the specified
* <code>MaxRecords</code> value, a pagination token called a marker is included in the response so that you
* can retrieve the remaining results. </p>
* <p>
* Default: 100
* </p>
* <p>
* Constraints: Minimum 20, maximum 100.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeDBInstancesRequest withMaxRecords(Integer maxRecords) {
setMaxRecords(maxRecords);
return this;
}
/**
* <p>
* An optional pagination token provided by a previous <code>DescribeDBInstances</code> request. If this parameter
* is specified, the response includes only records beyond the marker, up to the value specified by
* <code>MaxRecords</code>.
* </p>
*
* @param marker
* An optional pagination token provided by a previous <code>DescribeDBInstances</code> request. If this
* parameter is specified, the response includes only records beyond the marker, up to the value specified by
* <code>MaxRecords</code>.
*/
public void setMarker(String marker) {
this.marker = marker;
}
/**
* <p>
* An optional pagination token provided by a previous <code>DescribeDBInstances</code> request. If this parameter
* is specified, the response includes only records beyond the marker, up to the value specified by
* <code>MaxRecords</code>.
* </p>
*
* @return An optional pagination token provided by a previous <code>DescribeDBInstances</code> request. If this
* parameter is specified, the response includes only records beyond the marker, up to the value specified
* by <code>MaxRecords</code>.
*/
public String getMarker() {
return this.marker;
}
/**
* <p>
* An optional pagination token provided by a previous <code>DescribeDBInstances</code> request. If this parameter
* is specified, the response includes only records beyond the marker, up to the value specified by
* <code>MaxRecords</code>.
* </p>
*
* @param marker
* An optional pagination token provided by a previous <code>DescribeDBInstances</code> request. If this
* parameter is specified, the response includes only records beyond the marker, up to the value specified by
* <code>MaxRecords</code>.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeDBInstancesRequest withMarker(String marker) {
setMarker(marker);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getDBInstanceIdentifier() != null)
sb.append("DBInstanceIdentifier: ").append(getDBInstanceIdentifier()).append(",");
if (getFilters() != null)
sb.append("Filters: ").append(getFilters()).append(",");
if (getMaxRecords() != null)
sb.append("MaxRecords: ").append(getMaxRecords()).append(",");
if (getMarker() != null)
sb.append("Marker: ").append(getMarker());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DescribeDBInstancesRequest == false)
return false;
DescribeDBInstancesRequest other = (DescribeDBInstancesRequest) obj;
if (other.getDBInstanceIdentifier() == null ^ this.getDBInstanceIdentifier() == null)
return false;
if (other.getDBInstanceIdentifier() != null && other.getDBInstanceIdentifier().equals(this.getDBInstanceIdentifier()) == false)
return false;
if (other.getFilters() == null ^ this.getFilters() == null)
return false;
if (other.getFilters() != null && other.getFilters().equals(this.getFilters()) == false)
return false;
if (other.getMaxRecords() == null ^ this.getMaxRecords() == null)
return false;
if (other.getMaxRecords() != null && other.getMaxRecords().equals(this.getMaxRecords()) == false)
return false;
if (other.getMarker() == null ^ this.getMarker() == null)
return false;
if (other.getMarker() != null && other.getMarker().equals(this.getMarker()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getDBInstanceIdentifier() == null) ? 0 : getDBInstanceIdentifier().hashCode());
hashCode = prime * hashCode + ((getFilters() == null) ? 0 : getFilters().hashCode());
hashCode = prime * hashCode + ((getMaxRecords() == null) ? 0 : getMaxRecords().hashCode());
hashCode = prime * hashCode + ((getMarker() == null) ? 0 : getMarker().hashCode());
return hashCode;
}
@Override
public DescribeDBInstancesRequest clone() {
return (DescribeDBInstancesRequest) super.clone();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.runners.apex;
import static com.google.common.base.Preconditions.checkArgument;
import com.datatorrent.api.Attribute;
import com.datatorrent.api.Attribute.AttributeMap;
import com.datatorrent.api.DAG;
import com.datatorrent.api.StreamingApplication;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Sets;
import java.io.BufferedReader;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.Serializable;
import java.lang.reflect.AccessibleObject;
import java.lang.reflect.Field;
import java.net.URI;
import java.net.URL;
import java.net.URLClassLoader;
import java.nio.file.FileSystem;
import java.nio.file.FileSystems;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.jar.JarFile;
import java.util.jar.Manifest;
import org.apache.apex.api.EmbeddedAppLauncher;
import org.apache.apex.api.Launcher;
import org.apache.apex.api.Launcher.AppHandle;
import org.apache.apex.api.Launcher.LaunchMode;
import org.apache.apex.api.Launcher.LauncherException;
import org.apache.apex.api.Launcher.ShutdownMode;
import org.apache.apex.api.YarnAppLauncher;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.SerializationUtils;
import org.apache.hadoop.conf.Configuration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Proxy to launch the YARN application through the hadoop script to run in the
* pre-configured environment (class path, configuration, native libraries etc.).
*
* <p>The proxy takes the DAG and communicates with the Hadoop services to launch
* it on the cluster.
*/
public class ApexYarnLauncher {
private static final Logger LOG = LoggerFactory.getLogger(ApexYarnLauncher.class);
public AppHandle launchApp(StreamingApplication app, Properties configProperties)
throws IOException {
List<File> jarsToShip = getYarnDeployDependencies();
StringBuilder classpath = new StringBuilder();
for (File path : jarsToShip) {
if (path.isDirectory()) {
File tmpJar = File.createTempFile("beam-runners-apex-", ".jar");
createJar(path, tmpJar);
tmpJar.deleteOnExit();
path = tmpJar;
}
if (classpath.length() != 0) {
classpath.append(':');
}
classpath.append(path.getAbsolutePath());
}
EmbeddedAppLauncher<?> embeddedLauncher = Launcher.getLauncher(LaunchMode.EMBEDDED);
DAG dag = embeddedLauncher.getDAG();
app.populateDAG(dag, new Configuration(false));
Attribute.AttributeMap launchAttributes = new Attribute.AttributeMap.DefaultAttributeMap();
launchAttributes.put(YarnAppLauncher.LIB_JARS, classpath.toString().replace(':', ','));
LaunchParams lp = new LaunchParams(dag, launchAttributes, configProperties);
lp.cmd = "hadoop " + ApexYarnLauncher.class.getName();
HashMap<String, String> env = new HashMap<>();
env.put("HADOOP_USER_CLASSPATH_FIRST", "1");
env.put("HADOOP_CLASSPATH", classpath.toString());
lp.env = env;
return launchApp(lp);
}
protected AppHandle launchApp(LaunchParams params) throws IOException {
File tmpFile = File.createTempFile("beam-runner-apex", "params");
tmpFile.deleteOnExit();
try (FileOutputStream fos = new FileOutputStream(tmpFile)) {
SerializationUtils.serialize(params, fos);
}
if (params.getCmd() == null) {
ApexYarnLauncher.main(new String[] {tmpFile.getAbsolutePath()});
} else {
String cmd = params.getCmd() + " " + tmpFile.getAbsolutePath();
ByteArrayOutputStream consoleOutput = new ByteArrayOutputStream();
LOG.info("Executing: {} with {}", cmd, params.getEnv());
ProcessBuilder pb = new ProcessBuilder("bash", "-c", cmd);
Map<String, String> env = pb.environment();
env.putAll(params.getEnv());
Process p = pb.start();
ProcessWatcher pw = new ProcessWatcher(p);
InputStream output = p.getInputStream();
InputStream error = p.getErrorStream();
while (!pw.isFinished()) {
IOUtils.copy(output, consoleOutput);
IOUtils.copy(error, consoleOutput);
}
if (pw.rc != 0) {
String msg = "The Beam Apex runner in non-embedded mode requires the Hadoop client"
+ " to be installed on the machine from which you launch the job"
+ " and the 'hadoop' script in $PATH";
LOG.error(msg);
throw new RuntimeException("Failed to run: " + cmd + " (exit code " + pw.rc + ")" + "\n"
+ consoleOutput.toString());
}
}
return new AppHandle() {
@Override
public boolean isFinished() {
// TODO (future PR): interaction with child process
LOG.warn("YARN application runs asynchronously and status check not implemented.");
return true;
}
@Override
public void shutdown(ShutdownMode arg0) throws LauncherException {
// TODO (future PR): interaction with child process
throw new UnsupportedOperationException();
}
};
}
/**
* From the current classpath, find the jar files that need to be deployed
* with the application to run on YARN. Hadoop dependencies are provided
* through the Hadoop installation and the application should not bundle them
* to avoid conflicts. This is done by removing the Hadoop compile
* dependencies (transitively) by parsing the Maven dependency tree.
*
* @return list of jar files to ship
* @throws IOException when dependency information cannot be read
*/
public static List<File> getYarnDeployDependencies() throws IOException {
try (InputStream dependencyTree = ApexRunner.class.getResourceAsStream("dependency-tree")) {
try (BufferedReader br = new BufferedReader(new InputStreamReader(dependencyTree))) {
String line;
List<String> excludes = new ArrayList<>();
int excludeLevel = Integer.MAX_VALUE;
while ((line = br.readLine()) != null) {
for (int i = 0; i < line.length(); i++) {
char c = line.charAt(i);
if (Character.isLetter(c)) {
if (i > excludeLevel) {
excludes.add(line.substring(i));
} else {
if (line.substring(i).startsWith("org.apache.hadoop")) {
excludeLevel = i;
excludes.add(line.substring(i));
} else {
excludeLevel = Integer.MAX_VALUE;
}
}
break;
}
}
}
Set<String> excludeJarFileNames = Sets.newHashSet();
for (String exclude : excludes) {
String[] mvnc = exclude.split(":");
String fileName = mvnc[1] + "-";
if (mvnc.length == 6) {
fileName += mvnc[4] + "-" + mvnc[3]; // with classifier
} else {
fileName += mvnc[3];
}
fileName += ".jar";
excludeJarFileNames.add(fileName);
}
ClassLoader classLoader = ApexYarnLauncher.class.getClassLoader();
URL[] urls = ((URLClassLoader) classLoader).getURLs();
List<File> dependencyJars = new ArrayList<>();
for (int i = 0; i < urls.length; i++) {
File f = new File(urls[i].getFile());
// dependencies can also be directories in the build reactor,
// the Apex client will automatically create jar files for those.
if (f.exists() && !excludeJarFileNames.contains(f.getName())) {
dependencyJars.add(f);
}
}
return dependencyJars;
}
}
}
/**
* Create a jar file from the given directory.
* @param dir source directory
* @param jarFile jar file name
* @throws IOException when file cannot be created
*/
public static void createJar(File dir, File jarFile) throws IOException {
final Map<String, ?> env = Collections.singletonMap("create", "true");
if (jarFile.exists() && !jarFile.delete()) {
throw new RuntimeException("Failed to remove " + jarFile);
}
URI uri = URI.create("jar:" + jarFile.toURI());
try (final FileSystem zipfs = FileSystems.newFileSystem(uri, env)) {
File manifestFile = new File(dir, JarFile.MANIFEST_NAME);
Files.createDirectory(zipfs.getPath("META-INF"));
try (final OutputStream out = Files.newOutputStream(zipfs.getPath(JarFile.MANIFEST_NAME))) {
if (!manifestFile.exists()) {
new Manifest().write(out);
} else {
FileUtils.copyFile(manifestFile, out);
}
}
final java.nio.file.Path root = dir.toPath();
Files.walkFileTree(root, new java.nio.file.SimpleFileVisitor<Path>() {
String relativePath;
@Override
public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs)
throws IOException {
relativePath = root.relativize(dir).toString();
if (!relativePath.isEmpty()) {
if (!relativePath.endsWith("/")) {
relativePath += "/";
}
if (!relativePath.equals("META-INF/")) {
final Path dstDir = zipfs.getPath(relativePath);
Files.createDirectory(dstDir);
}
}
return super.preVisitDirectory(dir, attrs);
}
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
String name = relativePath + file.getFileName();
if (!JarFile.MANIFEST_NAME.equals(name)) {
try (final OutputStream out = Files.newOutputStream(zipfs.getPath(name))) {
FileUtils.copyFile(file.toFile(), out);
}
}
return super.visitFile(file, attrs);
}
@Override
public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException {
relativePath = root.relativize(dir.getParent()).toString();
if (!relativePath.isEmpty() && !relativePath.endsWith("/")) {
relativePath += "/";
}
return super.postVisitDirectory(dir, exc);
}
});
}
}
/**
* Transfer the properties to the configuration object.
* @param conf
* @param props
*/
public static void addProperties(Configuration conf, Properties props) {
for (final String propertyName : props.stringPropertyNames()) {
String propertyValue = props.getProperty(propertyName);
conf.set(propertyName, propertyValue);
}
}
/**
* The main method expects the serialized DAG and will launch the YARN application.
* @param args location of launch parameters
* @throws IOException when parameters cannot be read
*/
public static void main(String[] args) throws IOException {
checkArgument(args.length == 1, "exactly one argument expected");
File file = new File(args[0]);
checkArgument(file.exists() && file.isFile(), "invalid file path %s", file);
final LaunchParams params = (LaunchParams) SerializationUtils.deserialize(
new FileInputStream(file));
StreamingApplication apexApp = new StreamingApplication() {
@Override
public void populateDAG(DAG dag, Configuration conf) {
copyShallow(params.dag, dag);
}
};
Configuration conf = new Configuration(); // configuration from Hadoop client
addProperties(conf, params.configProperties);
AppHandle appHandle = params.getApexLauncher().launchApp(apexApp, conf,
params.launchAttributes);
if (appHandle == null) {
throw new AssertionError("Launch returns null handle.");
}
// TODO (future PR)
// At this point the application is running, but this process should remain active to
// allow the parent to implement the runner result.
}
/**
* Launch parameters that will be serialized and passed to the child process.
*/
@VisibleForTesting
protected static class LaunchParams implements Serializable {
private static final long serialVersionUID = 1L;
private final DAG dag;
private final Attribute.AttributeMap launchAttributes;
private final Properties configProperties;
private HashMap<String, String> env;
private String cmd;
protected LaunchParams(DAG dag, AttributeMap launchAttributes, Properties configProperties) {
this.dag = dag;
this.launchAttributes = launchAttributes;
this.configProperties = configProperties;
}
protected Launcher<?> getApexLauncher() {
return Launcher.getLauncher(LaunchMode.YARN);
}
protected String getCmd() {
return cmd;
}
protected Map<String, String> getEnv() {
return env;
}
}
private static void copyShallow(DAG from, DAG to) {
checkArgument(from.getClass() == to.getClass(), "must be same class %s %s",
from.getClass(), to.getClass());
Field[] fields = from.getClass().getDeclaredFields();
AccessibleObject.setAccessible(fields, true);
for (int i = 0; i < fields.length; i++) {
Field field = fields[i];
if (!java.lang.reflect.Modifier.isStatic(field.getModifiers())) {
try {
field.set(to, field.get(from));
} catch (IllegalArgumentException | IllegalAccessException e) {
throw new RuntimeException(e);
}
}
}
}
/**
* Starts a command and waits for it to complete.
*/
public static class ProcessWatcher implements Runnable {
private final Process p;
private volatile boolean finished = false;
private volatile int rc;
public ProcessWatcher(Process p) {
this.p = p;
new Thread(this).start();
}
public boolean isFinished() {
return finished;
}
@Override
public void run() {
try {
rc = p.waitFor();
} catch (Exception e) {
// ignore
}
finished = true;
}
}
}
| |
/**
* Copyright 2011-2019 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asakusafw.directio.hive.parquet.v2;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.HashMap;
import java.util.Map;
import org.apache.parquet.column.Dictionary;
import org.apache.parquet.io.api.Binary;
import org.apache.parquet.io.api.RecordConsumer;
import org.apache.parquet.schema.PrimitiveType;
import org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName;
import org.apache.parquet.schema.Type;
import org.apache.parquet.schema.Type.Repetition;
import com.asakusafw.directio.hive.util.TemporalUtil;
import com.asakusafw.runtime.value.DateOption;
import com.asakusafw.runtime.value.DateTimeOption;
import com.asakusafw.runtime.value.DateUtil;
import com.asakusafw.runtime.value.ValueOption;
/**
* Converts between {@link ValueOption} and {@code timestamp (binary)}.
* @since 0.7.2
*/
public enum TimestampValueDrivers implements ParquetValueDriver {
/**
* {@link DateOption}.
*/
DATE(DateOption.class) {
@Override
public ValueConverter getConverter() {
return new DateConverter();
}
@Override
public ValueWriter getWriter() {
return new DateWriter();
}
},
/**
* {@link DateTimeOption}.
*/
DATETIME(DateTimeOption.class) {
@Override
public ValueConverter getConverter() {
return new DateTimeConverter();
}
@Override
public ValueWriter getWriter() {
return new DateTimeWriter();
}
},
;
final Class<? extends ValueOption<?>> valueOptionClass;
TimestampValueDrivers(Class<? extends ValueOption<?>> valueOptionClass) {
this.valueOptionClass = valueOptionClass;
}
@Override
public Type getType(String name) {
return new PrimitiveType(Repetition.OPTIONAL, PrimitiveTypeName.INT96, name);
}
/**
* Returns a {@link ParquetValueDriver} for the specified type.
* @param valueClass the {@link ValueOption} type
* @return the corresponded {@link ParquetValueDriver}, or {@code null} if it is not found
*/
public static ParquetValueDriver find(Class<?> valueClass) {
return Lazy.FROM_CLASS.get(valueClass);
}
private static final class Lazy {
static final Map<Class<?>, TimestampValueDrivers> FROM_CLASS;
static {
Map<Class<?>, TimestampValueDrivers> map = new HashMap<>();
for (TimestampValueDrivers element : TimestampValueDrivers.values()) {
map.put(element.valueOptionClass, element);
}
FROM_CLASS = map;
}
private Lazy() {
return;
}
}
abstract static class AbstractWriter implements ValueWriter {
void write(int julianDay, long timeOfDayNanos, RecordConsumer consumer) {
ByteBuffer buf = ByteBuffer.allocate(12).order(ByteOrder.LITTLE_ENDIAN);
buf.clear();
buf.putLong(timeOfDayNanos);
buf.putInt(julianDay);
buf.flip();
consumer.addBinary(Binary.fromConstantByteBuffer(buf));
}
}
static class DateWriter extends AbstractWriter {
@Override
public void write(Object value, RecordConsumer consumer) {
DateOption option = (DateOption) value;
int julianDayNumber = TemporalUtil.getJulianDayNumber(option.get());
long nanoTime = TemporalUtil.getTimeOfDayNanos(option.get());
write(julianDayNumber, nanoTime, consumer);
}
}
static class DateTimeWriter extends AbstractWriter {
@Override
public void write(Object value, RecordConsumer consumer) {
DateTimeOption option = (DateTimeOption) value;
int julianDayNumber = TemporalUtil.getJulianDayNumber(option.get());
long nanoTime = TemporalUtil.getTimeOfDayNanos(option.get());
write(julianDayNumber, nanoTime, consumer);
}
}
abstract static class AbstractConverter extends ValueConverter {
private int[] julianDays;
private long[] nanoTimes;
protected AbstractConverter() {
return;
}
@Override
public boolean hasDictionarySupport() {
return true;
}
@Override
public void setDictionary(Dictionary dictionary) {
int size = dictionary.getMaxId() + 1;
if (this.julianDays == null || this.julianDays.length < size) {
int capacity = (int) (size * 1.2) + 1;
this.julianDays = new int[capacity];
this.nanoTimes = new long[capacity];
}
for (int id = 0, max = dictionary.getMaxId(); id <= max; id++) {
ByteBuffer bytes = dictionary.decodeToBinary(id).toByteBuffer().order(ByteOrder.LITTLE_ENDIAN);
long time = bytes.getLong();
int day = bytes.getInt();
julianDays[id] = day;
nanoTimes[id] = time;
}
}
@Override
public void addValueFromDictionary(int dictionaryId) {
addNanoTime(julianDays[dictionaryId], nanoTimes[dictionaryId]);
}
@Override
public void addBinary(Binary value) {
ByteBuffer bytes = value.toByteBuffer().order(ByteOrder.LITTLE_ENDIAN);
long time = bytes.getLong();
int day = bytes.getInt();
addNanoTime(day, time);
}
abstract void addNanoTime(int julianDay, long nanoTime);
}
static class DateConverter extends AbstractConverter {
private DateOption target;
@Override
public void set(ValueOption<?> value) {
this.target = (DateOption) value;
}
@SuppressWarnings("deprecation")
@Override
void addNanoTime(int julianDay, long nanoTime) {
long seconds = TemporalUtil.toElapsedSeconds(julianDay, nanoTime);
target.modify(DateUtil.getDayFromSeconds(seconds));
}
}
static class DateTimeConverter extends AbstractConverter {
private DateTimeOption target;
@Override
public void set(ValueOption<?> value) {
this.target = (DateTimeOption) value;
}
@SuppressWarnings("deprecation")
@Override
void addNanoTime(int julianDay, long nanoTime) {
target.modify(TemporalUtil.toElapsedSeconds(julianDay, nanoTime));
}
}
}
| |
package com.missionhub.application;
import android.content.Context;
import android.database.sqlite.SQLiteDatabase;
import android.util.Log;
import com.google.analytics.tracking.android.GoogleAnalytics;
import com.google.analytics.tracking.android.Tracker;
import com.missionhub.BuildConfig;
import com.missionhub.R;
import com.missionhub.event.ToastEvent;
import com.missionhub.model.DaoMaster;
import com.missionhub.model.DaoMaster.OpenHelper;
import com.missionhub.model.DaoSession;
import com.missionhub.model.MissionHubOpenHelper;
import com.missionhub.util.ErrbitReportSender;
import com.missionhub.util.LruBitmapCache;
import com.missionhub.util.NetworkUtils;
import com.newrelic.agent.android.NewRelic;
import org.acra.ACRA;
import org.acra.ACRAConfiguration;
import org.acra.ACRAConfigurationException;
import org.acra.ReportingInteractionMode;
import org.acra.annotation.ReportsCrashes;
import org.holoeverywhere.widget.Toast;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import de.greenrobot.event.EventBus;
/**
* The MissionHub application context
*/
@ReportsCrashes(formKey = "")
public class Application extends org.holoeverywhere.app.Application {
/**
* the logging tag
*/
public static final String TAG = Application.class.getSimpleName();
/**
* singleton application
*/
private static Application sApplication;
/**
* the executor service
*/
private final ExecutorService mExecutorService = Executors.newFixedThreadPool(25);
/**
* application context's sqlite database
*/
private SQLiteDatabase mDb;
/**
* application context's database cache mSession
*/
private DaoSession mDaoSession;
/**
* database name
*/
private static final String DB_NAME = "missionhub.db";
/**
* the missionhub session
*/
private final Session mSession = new Session();
/**
* Initialize the static context
*/
public Application() {
sApplication = this;
}
/**
* called when the application is created.
*/
@Override
public void onCreate() {
sApplication = this;
if (Configuration.isACRAEnabled()) {
try {
ACRAConfiguration config = ACRA.getConfig();
config.setFormKey(Configuration.getACRAFormKey());
config.setFormUri(Configuration.getACRAFormUri());
config.setResToastText(R.string.crash_dialog_title);
config.setResDialogCommentPrompt(R.string.crash_dialog_comment_prompt);
config.setResDialogText(R.string.crash_dialog_text);
config.setResDialogTitle(R.string.crash_dialog_title);
config.setResDialogIcon(R.drawable.ic_launcher);
config.setResDialogOkToast(R.string.crash_dialog_ok_toast);
config.setMode(ReportingInteractionMode.DIALOG);
ACRA.init(this);
ACRA.getErrorReporter().setReportSender(new ErrbitReportSender());
for (Map.Entry<String, String> property : Configuration.getInstance().asMap().entrySet()) {
ACRA.getErrorReporter().putCustomData("CONFIGURATION_" + property.getKey(), property.getValue());
}
ErrbitReportSender.putErrbitData(ErrbitReportSender.ErrbitReportField.ENVIRONMENT_NAME, BuildConfig.BUILD_TYPE);
} catch (ACRAConfigurationException e) {
Log.e("MissionHub", e.getMessage(), e);
}
}
super.onCreate();
UpgradeManager.doUpgrade();
if (Configuration.isAnalyticsEnabled()) {
GoogleAnalytics ga = GoogleAnalytics.getInstance(this);
ga.setDebug(Configuration.isAnalyticsDebug());
Tracker tracker = ga.getTracker(Configuration.getAnalyticsKey());
tracker.setAnonymizeIp(Configuration.isAnalyticsAnonymizeIp());
tracker.setUseSecure(true);
ga.setDefaultTracker(tracker);
}
// set up the networking
NetworkUtils.disableConnectionReuseIfNecessary();
NetworkUtils.enableHttpResponseCache(this);
registerEventSubscriber(this, ToastEvent.class);
if (Configuration.isNewRelicEnabled()) {
try {
NewRelic.withApplicationToken(Configuration.getNewRelicApiKey()).start(this);
} catch(Exception e) {
Log.e("MissionHub", e.getMessage(), e);
}
}
}
/**
* @return the singleton instance of the application
*/
public static Application getInstance() {
return sApplication;
}
/**
* @return the application context
*/
public static Context getContext() {
return getInstance().getApplicationContext();
}
/**
* returns a global executor service for long running processes
*
* @return
*/
public static ExecutorService getExecutor() {
return getInstance().mExecutorService;
}
/**
* triggered when a log memory notification is received from the os. posts an OnLowMemoryEvent event to notify
* listeners that they should reduce their memory usage.
*/
@Override
public void onLowMemory() {
super.onLowMemory();
Log.e(TAG, "****** ====== ON LOW MEMORY ====== ******");
LruBitmapCache.getInstance().evictAll();
getDb().clear();
postEvent(new OnLowMemoryEvent());
}
/**
* event posted on low memory
*/
public static class OnLowMemoryEvent {
}
/**
* @return the raw sqlite database for the application context
*/
public static SQLiteDatabase getRawDb() {
if (getInstance().mDb == null) {
final OpenHelper helper = new MissionHubOpenHelper(getContext(), DB_NAME, null);
synchronized (Application.class) {
getInstance().mDb = helper.getWritableDatabase();
}
}
return getInstance().mDb;
}
/**
* Closes the database entirely
*/
public static void closeDb() {
synchronized (Application.class) {
getInstance().mDaoSession = null;
if (getInstance().mDb != null) {
getInstance().mDb.close();
getInstance().mDb = null;
}
}
}
/**
* @return the database database session for the application context
*/
public static DaoSession getDb() {
if (getInstance().mDaoSession == null) {
final DaoMaster daoMaster = new DaoMaster(getRawDb());
synchronized (Application.class) {
getInstance().mDaoSession = daoMaster.newSession();
}
}
return getInstance().mDaoSession;
}
/**
* Deletes the mh-mDb
*
* @return true if the database was successfully deleted; else false.
*/
public boolean deleteDatabase() {
getRawDb().close();
synchronized (Application.class) {
mDaoSession = null;
mDb = null;
}
return deleteDatabase(DB_NAME);
}
/**
* Returns the package version code
*
* @return
*/
public static int getVersionCode() {
return BuildConfig.VERSION_CODE;
}
/**
* Returns the package version name
*
* @return
*/
public static String getVersionName() {
return BuildConfig.VERSION_NAME;
}
/**
* Returns the default event bus
*
* @return
*/
public static EventBus getEventBus() {
return EventBus.getDefault();
}
/**
* Posts an event to the default EventBus
*/
public static void postEvent(final Object event) {
getEventBus().post(event);
}
/**
* Register an EventBus event subscriber with the default bus
*/
public static void registerEventSubscriber(final Object subscriber) {
getEventBus().register(subscriber);
}
/**
* Register an EventBus event subscriber with the default bus
*/
public static void registerEventSubscriber(final Object subscriber, final Class<?> eventType, final Class<?>... moreEventTypes) {
getEventBus().register(subscriber, eventType, moreEventTypes);
}
/**
* Unregisters an EventBus event subscriber from the default bus
*/
public static void unregisterEventSubscriber(final Object subscriber) {
getEventBus().unregister(subscriber);
}
/**
* Unregisters an EventBus event subscriber from the default bus
*/
public static void unregisterEventSubscriber(final Object subscriber, final Class<?>... eventTypes) {
getEventBus().unregister(subscriber, eventTypes);
}
public static void showToast(final int message, final int duration) {
showToast(getContext().getString(message), duration);
}
public static void showToast(final String message, final int duration) {
postEvent(new ToastEvent(message, duration));
}
@SuppressWarnings("unused")
public void onEventMainThread(final ToastEvent event) {
Toast.makeText(getContext(), event.message, event.duration).show();
}
public static Tracker getTracker() {
if (Configuration.isAnalyticsEnabled()) {
return GoogleAnalytics.getInstance(getContext()).getDefaultTracker();
}
return null;
}
public static void trackView(String view) {
if (getTracker() != null) {
getTracker().sendView(view);
}
}
public static void trackException(String thread, Throwable e, boolean fatal) {
if (getTracker() != null) {
getTracker().sendException(thread, e, fatal);
}
try {
ACRA.getErrorReporter().handleSilentException(e);
} catch (Exception e2) {
/* ignore */
}
}
public static void trackEvent(String category, String action, String label) {
trackEvent(category, action, label, 0);
}
public static void trackEvent(String category, String action, String label, long value) {
if (getTracker() != null) {
getTracker().sendEvent(category, action, label, value);
}
}
public static void trackNewSession() {
if (getTracker() != null) {
getTracker().setStartSession(true);
}
}
public static Session getSession() {
return getInstance().mSession;
}
public static boolean isDebug() {
return BuildConfig.FLAVOR.equals("debug");
}
public static Market getMarket() {
try {
return Market.valueOf(BuildConfig.FLAVOR.toUpperCase());
} catch (Exception e) {
Log.e(TAG, e.getMessage(), e);
return Market.PLAY;
}
}
}
| |
/*L
* Copyright SAIC, SAIC-Frederick.
*
* Distributed under the OSI-approved BSD 3-Clause License.
* See http://ncip.github.com/caadapter/LICENSE.txt for details.
*/
package gov.nih.nci.cbiit.cmts.ui.actions;
import gov.nih.nci.cbiit.cmts.ui.common.ActionConstants;
import gov.nih.nci.cbiit.cmts.ui.common.ContextManagerClient;
import gov.nih.nci.cbiit.cmts.ui.common.DefaultSettings;
import gov.nih.nci.cbiit.cmts.ui.main.MainFrame;
import gov.nih.nci.cbiit.cmts.ui.main.MainFrameContainer;
import javax.swing.Icon;
import javax.swing.ImageIcon;
import javax.swing.JComponent;
import java.awt.event.ActionEvent;
import java.awt.Component;
import java.awt.Event;
import javax.swing.KeyStroke;
import java.awt.event.KeyEvent;
import java.io.File;
/**
* This class defines the default implementation of context sensitive open action.
*
* @author Chunqing Lin
* @author LAST UPDATE $Author: wangeug $
* @since CMTS v1.0
* @version $Revision: 1.2 $
* @date $Date: 2009-11-23 18:32:47 $
*/
public abstract class DefaultContextOpenAction extends AbstractContextAction
{
protected static final String COMMAND_NAME = ActionConstants.OPEN;
protected static final Character COMMAND_MNEMONIC = new Character('O');
//hotkey//protected static final KeyStroke ACCELERATOR_KEY_STROKE = KeyStroke.getKeyStroke(KeyEvent.VK_O, Event.CTRL_MASK, false);
//protected static final ImageIcon IMAGE_ICON = new ImageIcon(DefaultSettings.getImage("fileOpen.gif"));
protected static final ImageIcon IMAGE_ICON = new ImageIcon(DefaultSettings.getImage("ico_open.bmp"));
protected static final String TOOL_TIP_DESCRIPTION = ActionConstants.OPEN;
protected transient MainFrameContainer mainFrame;
protected transient ActionEvent actionEvent;
/**
* Defines an <code>Action</code> object with a default
* description string and default icon.
*/
public DefaultContextOpenAction(MainFrameContainer mainFrame)
{
this(COMMAND_NAME, mainFrame);
}
/**
* Defines an <code>Action</code> object with the specified
* description string and a default icon.
*/
public DefaultContextOpenAction(String name, MainFrameContainer mainFrame)
{
this(name, IMAGE_ICON, mainFrame);
}
/**
* Defines an <code>Action</code> object with the specified
* description string and a the specified icon.
*/
public DefaultContextOpenAction(String name, Icon icon, MainFrameContainer mainFrame)
{
super(name, icon);
this.mainFrame = mainFrame;
setAdditionalAttributes();
}
protected void setAdditionalAttributes()
{//override super class's one to plug in its own attributes.
setMnemonic(COMMAND_MNEMONIC);
//hotkey//setAcceleratorKey(ACCELERATOR_KEY_STROKE);
setActionCommandType(DESKTOP_ACTION_TYPE);
setShortDescription(TOOL_TIP_DESCRIPTION);
}
/**
* Return the real implementation of ContextClient class.
* @return the real implementation of ContextClient class.
*/
protected abstract Class getContextClientClass();
/**
* Return the real user input of the file to be opened.
* @return the real user input of the file to be opened.
*/
protected abstract File getFileFromUserInput();
/**
* Launch the context manager client to UI.
* @param panel
* @param file
*/
protected abstract void launchPanel(final ContextManagerClient panel, final File file);
/**
* This function will be called by actionPerfermed() to do some pre-work
* before the real action is performed.
*
* @param e
* @return true if the action shall proceed;
* false if the action cannot continue, shall return immediately.
*/
protected boolean preActionPerformed(ActionEvent e)
{
try
{
JComponent currentActivePanel = mainFrame.hasComponentOfGivenClass(getContextClientClass(), true);
if (currentActivePanel!=null)
{
AbstractContextAction closeAction = (AbstractContextAction)((ContextManagerClient)currentActivePanel).getDefaultCloseAction();
closeAction.actionPerformed(e);
return closeAction.isSuccessfullyPerformed();
}
else
{//no need to close
return true;
}
}
catch (Throwable t)
{
reportThrowableToUI(t, this.mainFrame.getAssociatedUIComponent());
return false;
}
}
/**
* Descendant class could override this method to provide actions to be executed after the
* given action is performed, such as update menu status, etc.
*
* @param e
* @return true if the action shall proceed;
* false if the action cannot continue, shall return immediately.
*/
protected boolean postActionPerformed(ActionEvent e)
{
ContextManagerClient panel = null;
boolean everythingGood = true;
try
{
Class contextClientClass = getContextClientClass();
if(contextClientClass!=null)
{
panel = (ContextManagerClient) contextClientClass.newInstance();
launchPanel(panel, getFileFromUserInput());
}
}
catch (Throwable t)
{
reportThrowableToUI(t, this.mainFrame.getAssociatedUIComponent());
if (panel != null)
{//close panel if accidentally there.
panel.getDefaultCloseAction().actionPerformed(e);
}
everythingGood = false;
}
return everythingGood;
}
/**
* Invoked when an action occurs.
*/
protected boolean doAction(ActionEvent e)
{
try
{
this.actionEvent = e;
if (!preActionPerformed(e))
{//return immediately if no further action is needed.
setSuccessfullyPerformed(false);
return false;
}
if (!postActionPerformed(e))
{
setSuccessfullyPerformed(false);
}
else
{
setSuccessfullyPerformed(true);
}
}
catch (Exception e1)
{
reportThrowableToUI(e1, this.mainFrame.getAssociatedUIComponent());
setSuccessfullyPerformed(false);
}
return isSuccessfullyPerformed();
}
/**
* Return the associated UI component.
*
* @return the associated UI component.
*/
protected Component getAssociatedUIComponent()
{
return this.mainFrame.getAssociatedUIComponent();
}
}
/**
* HISTORY : $Log: not supported by cvs2svn $
* HISTORY : Revision 1.1 2008/12/09 19:04:17 linc
* HISTORY : First GUI release
* HISTORY :
*/
| |
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.ec2.model;
import java.io.Serializable;
import com.amazonaws.AmazonWebServiceRequest;
import com.amazonaws.Request;
import com.amazonaws.services.ec2.model.transform.AttachVolumeRequestMarshaller;
/**
*
*/
public class AttachVolumeRequest extends AmazonWebServiceRequest implements
Serializable, Cloneable, DryRunSupportedRequest<AttachVolumeRequest> {
/**
* <p>
* The ID of the EBS volume. The volume and instance must be within the same
* Availability Zone.
* </p>
*/
private String volumeId;
/**
* <p>
* The ID of the instance.
* </p>
*/
private String instanceId;
/**
* <p>
* The device name to expose to the instance (for example,
* <code>/dev/sdh</code> or <code>xvdh</code>).
* </p>
*/
private String device;
/**
* Default constructor for AttachVolumeRequest object. Callers should use
* the setter or fluent setter (with...) methods to initialize the object
* after creating it.
*/
public AttachVolumeRequest() {
}
/**
* Constructs a new AttachVolumeRequest object. Callers should use the
* setter or fluent setter (with...) methods to initialize any additional
* object members.
*
* @param volumeId
* The ID of the EBS volume. The volume and instance must be within
* the same Availability Zone.
* @param instanceId
* The ID of the instance.
* @param device
* The device name to expose to the instance (for example,
* <code>/dev/sdh</code> or <code>xvdh</code>).
*/
public AttachVolumeRequest(String volumeId, String instanceId, String device) {
setVolumeId(volumeId);
setInstanceId(instanceId);
setDevice(device);
}
/**
* <p>
* The ID of the EBS volume. The volume and instance must be within the same
* Availability Zone.
* </p>
*
* @param volumeId
* The ID of the EBS volume. The volume and instance must be within
* the same Availability Zone.
*/
public void setVolumeId(String volumeId) {
this.volumeId = volumeId;
}
/**
* <p>
* The ID of the EBS volume. The volume and instance must be within the same
* Availability Zone.
* </p>
*
* @return The ID of the EBS volume. The volume and instance must be within
* the same Availability Zone.
*/
public String getVolumeId() {
return this.volumeId;
}
/**
* <p>
* The ID of the EBS volume. The volume and instance must be within the same
* Availability Zone.
* </p>
*
* @param volumeId
* The ID of the EBS volume. The volume and instance must be within
* the same Availability Zone.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public AttachVolumeRequest withVolumeId(String volumeId) {
setVolumeId(volumeId);
return this;
}
/**
* <p>
* The ID of the instance.
* </p>
*
* @param instanceId
* The ID of the instance.
*/
public void setInstanceId(String instanceId) {
this.instanceId = instanceId;
}
/**
* <p>
* The ID of the instance.
* </p>
*
* @return The ID of the instance.
*/
public String getInstanceId() {
return this.instanceId;
}
/**
* <p>
* The ID of the instance.
* </p>
*
* @param instanceId
* The ID of the instance.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public AttachVolumeRequest withInstanceId(String instanceId) {
setInstanceId(instanceId);
return this;
}
/**
* <p>
* The device name to expose to the instance (for example,
* <code>/dev/sdh</code> or <code>xvdh</code>).
* </p>
*
* @param device
* The device name to expose to the instance (for example,
* <code>/dev/sdh</code> or <code>xvdh</code>).
*/
public void setDevice(String device) {
this.device = device;
}
/**
* <p>
* The device name to expose to the instance (for example,
* <code>/dev/sdh</code> or <code>xvdh</code>).
* </p>
*
* @return The device name to expose to the instance (for example,
* <code>/dev/sdh</code> or <code>xvdh</code>).
*/
public String getDevice() {
return this.device;
}
/**
* <p>
* The device name to expose to the instance (for example,
* <code>/dev/sdh</code> or <code>xvdh</code>).
* </p>
*
* @param device
* The device name to expose to the instance (for example,
* <code>/dev/sdh</code> or <code>xvdh</code>).
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public AttachVolumeRequest withDevice(String device) {
setDevice(device);
return this;
}
/**
* This method is intended for internal use only. Returns the marshaled
* request configured with additional parameters to enable operation
* dry-run.
*/
@Override
public Request<AttachVolumeRequest> getDryRunRequest() {
Request<AttachVolumeRequest> request = new AttachVolumeRequestMarshaller()
.marshall(this);
request.addParameter("DryRun", Boolean.toString(true));
return request;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getVolumeId() != null)
sb.append("VolumeId: " + getVolumeId() + ",");
if (getInstanceId() != null)
sb.append("InstanceId: " + getInstanceId() + ",");
if (getDevice() != null)
sb.append("Device: " + getDevice());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof AttachVolumeRequest == false)
return false;
AttachVolumeRequest other = (AttachVolumeRequest) obj;
if (other.getVolumeId() == null ^ this.getVolumeId() == null)
return false;
if (other.getVolumeId() != null
&& other.getVolumeId().equals(this.getVolumeId()) == false)
return false;
if (other.getInstanceId() == null ^ this.getInstanceId() == null)
return false;
if (other.getInstanceId() != null
&& other.getInstanceId().equals(this.getInstanceId()) == false)
return false;
if (other.getDevice() == null ^ this.getDevice() == null)
return false;
if (other.getDevice() != null
&& other.getDevice().equals(this.getDevice()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode
+ ((getVolumeId() == null) ? 0 : getVolumeId().hashCode());
hashCode = prime * hashCode
+ ((getInstanceId() == null) ? 0 : getInstanceId().hashCode());
hashCode = prime * hashCode
+ ((getDevice() == null) ? 0 : getDevice().hashCode());
return hashCode;
}
@Override
public AttachVolumeRequest clone() {
return (AttachVolumeRequest) super.clone();
}
}
| |
/*
* <p>Copyright: Copyright (c) 2007</p>
* <p>Company: Institut de recherches cliniques de Montréal (http://www.ircm.qc.ca)</p>
*/
package org.stripesstuff.plugin.session;
import java.io.Serializable;
import java.lang.reflect.Field;
import java.util.Collection;
import java.util.Enumeration;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpSession;
import org.stripesstuff.plugin.session.Session;
import net.sourceforge.stripes.action.ActionBean;
import net.sourceforge.stripes.action.ActionBeanContext;
import net.sourceforge.stripes.action.Resolution;
import net.sourceforge.stripes.controller.ExecutionContext;
import net.sourceforge.stripes.controller.Interceptor;
import net.sourceforge.stripes.controller.Intercepts;
import net.sourceforge.stripes.controller.LifecycleStage;
import net.sourceforge.stripes.util.ReflectUtil;
/**
* Interceptor that stores or restores session objects.
*
* @author Christian Poitras
*/
@Intercepts(value={LifecycleStage.ActionBeanResolution, LifecycleStage.EventHandling})
public class SessionStoreInterceptor implements Interceptor {
/** Lazily filled in map of Class to fields annotated with Session. */
private Map<Class<?>, Collection<Field>> fieldMap = new ConcurrentHashMap<Class<?>, Collection<Field>>();
/**
* Session attribute where map linking MaxTimeSaverThreads to keys is stored.
*/
private static final String MAPPER_ATTRIBUTE = SessionStoreInterceptor.class + "#mapper";
/* (non-Javadoc)
* @see net.sourceforge.stripes.controller.Interceptor#intercept(net.sourceforge.stripes.controller.ExecutionContext)
*/
public Resolution intercept(ExecutionContext context) throws Exception {
// Continue on and execute other filters and the lifecycle code.
Resolution resolution = context.proceed();
// Get all fields with session.
Collection<Field> fields = getSessionFields(context.getActionBean().getClass());
// Restores values from session.
if (LifecycleStage.ActionBeanResolution.equals(context.getLifecycleStage())) {
this.restoreFields(fields, context.getActionBean(), context.getActionBeanContext());
}
// Store values in session.
if (LifecycleStage.EventHandling.equals(context.getLifecycleStage())) {
// Dont't update values in session if a validation error occured.
if (context.getActionBeanContext().getValidationErrors().isEmpty()) {
if (fields.size() > 0)
this.saveFields(fields, context.getActionBean(), context.getActionBeanContext().getRequest().getSession());
}
}
return resolution;
}
/**
* Saves all fields in session.
* @param fields Fields to save in session.
* @param actionBean ActionBean.
* @param session HttpSession.
* @throws IllegalAccessException Cannot get access to some fields.
*/
protected void saveFields(Collection<Field> fields, ActionBean actionBean, HttpSession session) throws IllegalAccessException {
for (Field field : fields) {
if (!field.isAccessible()) {
field.setAccessible(true);
}
setAttribute(session, getFieldKey(field, actionBean.getClass()), field.get(actionBean), ((Session)field.getAnnotation(Session.class)).serializable(), ((Session)field.getAnnotation(Session.class)).maxTime());
}
}
/**
* Restore all fields from value stored in session except if they.
* @param fields Fields to restore from session.
* @param actionBean ActionBean.
* @param context ActionBeanContext.
* @throws IllegalAccessException Cannot get access to some fields.
*/
protected void restoreFields(Collection<Field> fields, ActionBean actionBean, ActionBeanContext context) throws IllegalAccessException {
HttpSession session = context.getRequest().getSession(false);
if (session != null) {
Set<String> parameters = this.getParameters(context.getRequest());
for (Field field : fields) {
if (!field.isAccessible()) {
field.setAccessible(true);
}
if (!parameters.contains(field.getName())) {
// Replace value.
Object value = session.getAttribute(getFieldKey(field, actionBean.getClass()));
// If value is null and field is primitive, don't set value.
if (!(value == null && field.getType().isPrimitive())) {
field.set(actionBean, value);
}
}
}
}
}
/**
* Returns all property that stripes will replace for request.
* @param request Request.
* @return All property that stripes will replace for request.
*/
protected Set<String> getParameters(HttpServletRequest request) {
Set<String> parameters = new HashSet<String>();
Enumeration<?> paramNames = request.getParameterNames();
while (paramNames.hasMoreElements()) {
String parameter = (String) paramNames.nextElement();
// Keep only first property.
while (parameter.contains(".") || parameter.contains("[")) {
if (parameter.contains(".")) {
parameter = parameter.substring(0, parameter.indexOf("."));
}
if (parameter.contains("[")) {
parameter = parameter.substring(0, parameter.indexOf("["));
}
}
parameters.add(parameter);
}
return parameters;
}
/**
* Returns session key under which field should be saved or read.
* @param field Field.
* @param actionBeanClass Action bean class.
* @return Session key under which field should be saved or read.
*/
protected String getFieldKey(Field field, Class<? extends ActionBean> actionBeanClass) {
// Use key attribute if it is defined.
String sessionKey = ((Session)field.getAnnotation(Session.class)).key();
if (sessionKey != null && !"".equals(sessionKey)) {
return sessionKey;
}
else {
// Use default key since no custom key is defined.
return actionBeanClass + "#" + field.getName();
}
}
/**
* Returns all fields with Session annotation for a class.
* @param clazz Class.
* @return All fields with Session annotation for a class.
*/
protected Collection<Field> getSessionFields(Class<?> clazz) {
Collection<Field> fields = fieldMap.get(clazz);
if (fields == null) {
fields = ReflectUtil.getFields(clazz);
Iterator<Field> iterator = fields.iterator();
while (iterator.hasNext()) {
Field field = iterator.next();
if (!field.isAnnotationPresent(Session.class)) {
iterator.remove();
}
}
fieldMap.put(clazz, fields);
}
return fields;
}
/**
* Returns an object in session.
* @param key Key under which object is saved.
* @return Object.
* @deprecated Use {@link HttpSession#getAttribute(String)} instead.
*/
@Deprecated
public static Object getAttribute(HttpSession session, String key) {
return session.getAttribute(key);
}
/**
* Saves an object in session for latter use.
* @param session Session in which to store object.
* @param key Key under which object is saved.
* @param object Object to save.
* @param serializable True if object is serializable.
* @param maxTime Maximum time to keep object in session.
* @return Object previously saved under key.
*/
protected Object setAttribute(HttpSession session, String key, Object object, boolean serializable, int maxTime) {
if (object == null) {
// If object is null, remove attribute.
Object ret;
synchronized (session) {
ret = session.getAttribute(key);
session.removeAttribute(key);
}
return ret;
}
else {
// Set object in session.
Object ret;
synchronized (session) {
ret = session.getAttribute(key);
session.setAttribute(key, object);
}
SessionMapper mapper = (SessionMapper) session.getAttribute(MAPPER_ATTRIBUTE);
if (mapper == null) {
// Register mapper for session.
mapper = new SessionMapper();
session.setAttribute(MAPPER_ATTRIBUTE, mapper);
}
synchronized (mapper) {
// Update field mapper.
SessionFieldMapper fieldMapper = mapper.get(key);
if (fieldMapper == null) {
fieldMapper = new SessionFieldMapper(serializable && object instanceof Serializable);
mapper.put(key, fieldMapper);
}
if (maxTime > 0) {
// Register runnable to remove attribute.
if (fieldMapper.runnable != null) {
// Cancel old runnable because a new one will be created.
fieldMapper.runnable.cancel();
}
// Register runnable.
RemoveFieldRunnable runnable = new RemoveFieldRunnable(key, maxTime, session);
fieldMapper.runnable = runnable;
(new Thread(runnable)).start();
}
}
return ret;
}
}
}
| |
package org.keycloak.models;
import org.keycloak.models.utils.Pbkdf2PasswordEncoder;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.LinkedList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* @author <a href="mailto:sthorger@redhat.com">Stian Thorgersen</a>
*/
public class PasswordPolicy implements Serializable {
private static final long serialVersionUID = 1L;
public static final String INVALID_PASSWORD_MIN_LENGTH_MESSAGE = "invalidPasswordMinLengthMessage";
public static final String INVALID_PASSWORD_MIN_DIGITS_MESSAGE = "invalidPasswordMinDigitsMessage";
public static final String INVALID_PASSWORD_MIN_LOWER_CASE_CHARS_MESSAGE = "invalidPasswordMinLowerCaseCharsMessage";
public static final String INVALID_PASSWORD_MIN_UPPER_CASE_CHARS_MESSAGE = "invalidPasswordMinUpperCaseCharsMessage";
public static final String INVALID_PASSWORD_MIN_SPECIAL_CHARS_MESSAGE = "invalidPasswordMinSpecialCharsMessage";
public static final String INVALID_PASSWORD_NOT_USERNAME = "invalidPasswordNotUsernameMessage";
public static final String INVALID_PASSWORD_REGEX_PATTERN = "invalidPasswordRegexPatternMessage";
public static final String INVALID_PASSWORD_HISTORY = "invalidPasswordHistoryMessage";
private List<Policy> policies;
private String policyString;
public PasswordPolicy(String policyString) {
if (policyString == null || policyString.length() == 0) {
this.policyString = null;
policies = Collections.emptyList();
} else {
this.policyString = policyString;
policies = parse(policyString);
}
}
private static List<Policy> parse(String policyString) {
List<Policy> list = new LinkedList<Policy>();
String[] policies = policyString.split(" and ");
for (String policy : policies) {
policy = policy.trim();
String name;
String[] args = null;
int i = policy.indexOf('(');
if (i == -1) {
name = policy.trim();
} else {
name = policy.substring(0, i).trim();
args = policy.substring(i + 1, policy.length() - 1).split(",");
for (int j = 0; j < args.length; j++) {
args[j] = args[j].trim();
}
}
if (name.equals(Length.NAME)) {
list.add(new Length(args));
} else if (name.equals(Digits.NAME)) {
list.add(new Digits(args));
} else if (name.equals(LowerCase.NAME)) {
list.add(new LowerCase(args));
} else if (name.equals(UpperCase.NAME)) {
list.add(new UpperCase(args));
} else if (name.equals(SpecialChars.NAME)) {
list.add(new SpecialChars(args));
} else if (name.equals(NotUsername.NAME)) {
list.add(new NotUsername(args));
} else if (name.equals(HashIterations.NAME)) {
list.add(new HashIterations(args));
} else if (name.equals(RegexPatterns.NAME)) {
for (String regexPattern : args) {
Pattern.compile(regexPattern);
}
list.add(new RegexPatterns(args));
} else if (name.equals(PasswordHistory.NAME)) {
list.add(new PasswordHistory(args));
} else if (name.equals(ForceExpiredPasswordChange.NAME)) {
list.add(new ForceExpiredPasswordChange(args));
}
}
return list;
}
/**
*
* @return -1 if no hash iterations setting
*/
public int getHashIterations() {
if (policies == null)
return -1;
for (Policy p : policies) {
if (p instanceof HashIterations) {
return ((HashIterations) p).iterations;
}
}
return -1;
}
/**
*
* @return -1 if no expired passwords setting
*/
public int getExpiredPasswords() {
if (policies == null)
return -1;
for (Policy p : policies) {
if (p instanceof PasswordHistory) {
return ((PasswordHistory) p).passwordHistoryPolicyValue;
}
}
return -1;
}
/**
*
* @return -1 if no force expired password change setting
*/
public int getDaysToExpirePassword() {
if (policies == null)
return -1;
for (Policy p : policies) {
if (p instanceof ForceExpiredPasswordChange) {
return ((ForceExpiredPasswordChange) p).daysToExpirePassword;
}
}
return -1;
}
public Error validate(UserModel user, String password) {
for (Policy p : policies) {
Error error = p.validate(user, password);
if (error != null) {
return error;
}
}
return null;
}
public Error validate(String user, String password) {
for (Policy p : policies) {
Error error = p.validate(user, password);
if (error != null) {
return error;
}
}
return null;
}
private static interface Policy extends Serializable {
public Error validate(UserModel user, String password);
public Error validate(String user, String password);
}
public static class Error {
private String message;
private Object[] parameters;
private Error(String message, Object... parameters) {
this.message = message;
this.parameters = parameters;
}
public String getMessage() {
return message;
}
public Object[] getParameters() {
return parameters;
}
}
private static class HashIterations implements Policy {
private static final String NAME = "hashIterations";
private int iterations;
public HashIterations(String[] args) {
iterations = intArg(NAME, 1, args);
}
@Override
public Error validate(String user, String password) {
return null;
}
@Override
public Error validate(UserModel user, String password) {
return null;
}
}
private static class NotUsername implements Policy {
private static final String NAME = "notUsername";
public NotUsername(String[] args) {
}
@Override
public Error validate(String username, String password) {
return username.equals(password) ? new Error(INVALID_PASSWORD_NOT_USERNAME) : null;
}
@Override
public Error validate(UserModel user, String password) {
return validate(user.getUsername(), password);
}
}
private static class Length implements Policy {
private static final String NAME = "length";
private int min;
public Length(String[] args) {
min = intArg(NAME, 8, args);
}
@Override
public Error validate(String username, String password) {
return password.length() < min ? new Error(INVALID_PASSWORD_MIN_LENGTH_MESSAGE, min) : null;
}
@Override
public Error validate(UserModel user, String password) {
return validate(user.getUsername(), password);
}
}
private static class Digits implements Policy {
private static final String NAME = "digits";
private int min;
public Digits(String[] args) {
min = intArg(NAME, 1, args);
}
@Override
public Error validate(String username, String password) {
int count = 0;
for (char c : password.toCharArray()) {
if (Character.isDigit(c)) {
count++;
}
}
return count < min ? new Error(INVALID_PASSWORD_MIN_DIGITS_MESSAGE, min) : null;
}
@Override
public Error validate(UserModel user, String password) {
return validate(user.getUsername(), password);
}
}
private static class LowerCase implements Policy {
private static final String NAME = "lowerCase";
private int min;
public LowerCase(String[] args) {
min = intArg(NAME, 1, args);
}
@Override
public Error validate(String username, String password) {
int count = 0;
for (char c : password.toCharArray()) {
if (Character.isLowerCase(c)) {
count++;
}
}
return count < min ? new Error(INVALID_PASSWORD_MIN_LOWER_CASE_CHARS_MESSAGE, min) : null;
}
@Override
public Error validate(UserModel user, String password) {
return validate(user.getUsername(), password);
}
}
private static class UpperCase implements Policy {
private static final String NAME = "upperCase";
private int min;
public UpperCase(String[] args) {
min = intArg(NAME, 1, args);
}
@Override
public Error validate(String username, String password) {
int count = 0;
for (char c : password.toCharArray()) {
if (Character.isUpperCase(c)) {
count++;
}
}
return count < min ? new Error(INVALID_PASSWORD_MIN_UPPER_CASE_CHARS_MESSAGE, min) : null;
}
@Override
public Error validate(UserModel user, String password) {
return validate(user.getUsername(), password);
}
}
private static class SpecialChars implements Policy {
private static final String NAME = "specialChars";
private int min;
public SpecialChars(String[] args) {
min = intArg(NAME, 1, args);
}
@Override
public Error validate(String username, String password) {
int count = 0;
for (char c : password.toCharArray()) {
if (!Character.isLetterOrDigit(c)) {
count++;
}
}
return count < min ? new Error(INVALID_PASSWORD_MIN_SPECIAL_CHARS_MESSAGE, min) : null;
}
@Override
public Error validate(UserModel user, String password) {
return validate(user.getUsername(), password);
}
}
private static class RegexPatterns implements Policy {
private static final String NAME = "regexPatterns";
private String regexPatterns[];
public RegexPatterns(String[] args) {
regexPatterns = args;
}
@Override
public Error validate(String username, String password) {
Pattern pattern = null;
Matcher matcher = null;
for (String regexPattern : regexPatterns) {
pattern = Pattern.compile(regexPattern);
matcher = pattern.matcher(password);
if (!matcher.matches()) {
return new Error(INVALID_PASSWORD_REGEX_PATTERN, (Object) regexPatterns);
}
}
return null;
}
@Override
public Error validate(UserModel user, String password) {
return validate(user.getUsername(), password);
}
}
private static class PasswordHistory implements Policy {
private static final String NAME = "passwordHistory";
private int passwordHistoryPolicyValue;
public PasswordHistory(String[] args) {
passwordHistoryPolicyValue = intArg(NAME, 3, args);
}
@Override
public Error validate(String user, String password) {
return null;
}
@Override
public Error validate(UserModel user, String password) {
if (passwordHistoryPolicyValue != -1) {
UserCredentialValueModel cred = getCredentialValueModel(user, UserCredentialModel.PASSWORD);
if (cred != null) {
if(new Pbkdf2PasswordEncoder(cred.getSalt()).verify(password, cred.getValue(), cred.getHashIterations())) {
return new Error(INVALID_PASSWORD_HISTORY, passwordHistoryPolicyValue);
}
}
List<UserCredentialValueModel> passwordExpiredCredentials = getCredentialValueModels(user, passwordHistoryPolicyValue - 1,
UserCredentialModel.PASSWORD_HISTORY);
for (UserCredentialValueModel credential : passwordExpiredCredentials) {
if (new Pbkdf2PasswordEncoder(credential.getSalt()).verify(password, credential.getValue(), credential.getHashIterations())) {
return new Error(INVALID_PASSWORD_HISTORY, passwordHistoryPolicyValue);
}
}
}
return null;
}
private UserCredentialValueModel getCredentialValueModel(UserModel user, String credType) {
for (UserCredentialValueModel model : user.getCredentialsDirectly()) {
if (model.getType().equals(credType)) {
return model;
}
}
return null;
}
private List<UserCredentialValueModel> getCredentialValueModels(UserModel user, int expiredPasswordsPolicyValue,
String credType) {
List<UserCredentialValueModel> credentialModels = new ArrayList<UserCredentialValueModel>();
for (UserCredentialValueModel model : user.getCredentialsDirectly()) {
if (model.getType().equals(credType)) {
credentialModels.add(model);
}
}
Collections.sort(credentialModels, new Comparator<UserCredentialValueModel>() {
public int compare(UserCredentialValueModel credFirst, UserCredentialValueModel credSecond) {
if (credFirst.getCreatedDate() > credSecond.getCreatedDate()) {
return -1;
} else if (credFirst.getCreatedDate() < credSecond.getCreatedDate()) {
return 1;
} else {
return 0;
}
}
});
if (credentialModels.size() > expiredPasswordsPolicyValue) {
return credentialModels.subList(0, expiredPasswordsPolicyValue);
}
return credentialModels;
}
}
private static class ForceExpiredPasswordChange implements Policy {
private static final String NAME = "forceExpiredPasswordChange";
private int daysToExpirePassword;
public ForceExpiredPasswordChange(String[] args) {
daysToExpirePassword = intArg(NAME, 365, args);
}
@Override
public Error validate(String username, String password) {
return null;
}
@Override
public Error validate(UserModel user, String password) {
return null;
}
}
private static int intArg(String policy, int defaultValue, String... args) {
if (args == null || args.length == 0) {
return defaultValue;
} else if (args.length == 1) {
return Integer.parseInt(args[0]);
} else {
throw new IllegalArgumentException("Invalid arguments to " + policy + ", expect no argument or single integer");
}
}
@Override
public String toString() {
return policyString;
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/genomics/v1/references.proto
package com.google.genomics.v1;
/**
* Protobuf type {@code google.genomics.v1.SearchReferenceSetsRequest}
*/
public final class SearchReferenceSetsRequest extends
com.google.protobuf.GeneratedMessage implements
// @@protoc_insertion_point(message_implements:google.genomics.v1.SearchReferenceSetsRequest)
SearchReferenceSetsRequestOrBuilder {
// Use SearchReferenceSetsRequest.newBuilder() to construct.
private SearchReferenceSetsRequest(com.google.protobuf.GeneratedMessage.Builder builder) {
super(builder);
}
private SearchReferenceSetsRequest() {
md5Checksums_ = com.google.protobuf.LazyStringArrayList.EMPTY;
accessions_ = com.google.protobuf.LazyStringArrayList.EMPTY;
assemblyId_ = "";
pageToken_ = "";
pageSize_ = 0;
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return com.google.protobuf.UnknownFieldSet.getDefaultInstance();
}
private SearchReferenceSetsRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
int mutable_bitField0_ = 0;
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!input.skipField(tag)) {
done = true;
}
break;
}
case 10: {
com.google.protobuf.ByteString bs = input.readBytes();
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
md5Checksums_ = new com.google.protobuf.LazyStringArrayList();
mutable_bitField0_ |= 0x00000001;
}
md5Checksums_.add(bs);
break;
}
case 18: {
com.google.protobuf.ByteString bs = input.readBytes();
if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
accessions_ = new com.google.protobuf.LazyStringArrayList();
mutable_bitField0_ |= 0x00000002;
}
accessions_.add(bs);
break;
}
case 26: {
com.google.protobuf.ByteString bs = input.readBytes();
assemblyId_ = bs;
break;
}
case 34: {
com.google.protobuf.ByteString bs = input.readBytes();
pageToken_ = bs;
break;
}
case 40: {
pageSize_ = input.readInt32();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
md5Checksums_ = md5Checksums_.getUnmodifiableView();
}
if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
accessions_ = accessions_.getUnmodifiableView();
}
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.genomics.v1.ReferencesProto.internal_static_google_genomics_v1_SearchReferenceSetsRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.genomics.v1.ReferencesProto.internal_static_google_genomics_v1_SearchReferenceSetsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.genomics.v1.SearchReferenceSetsRequest.class, com.google.genomics.v1.SearchReferenceSetsRequest.Builder.class);
}
public static final com.google.protobuf.Parser<SearchReferenceSetsRequest> PARSER =
new com.google.protobuf.AbstractParser<SearchReferenceSetsRequest>() {
public SearchReferenceSetsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new SearchReferenceSetsRequest(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<SearchReferenceSetsRequest> getParserForType() {
return PARSER;
}
private int bitField0_;
public static final int MD5CHECKSUMS_FIELD_NUMBER = 1;
private com.google.protobuf.LazyStringList md5Checksums_;
/**
* <code>repeated string md5checksums = 1;</code>
*
* <pre>
* If present, return references for which the `md5checksum`
* matches. See `ReferenceSet.md5checksum` for details.
* </pre>
*/
public com.google.protobuf.ProtocolStringList
getMd5ChecksumsList() {
return md5Checksums_;
}
/**
* <code>repeated string md5checksums = 1;</code>
*
* <pre>
* If present, return references for which the `md5checksum`
* matches. See `ReferenceSet.md5checksum` for details.
* </pre>
*/
public int getMd5ChecksumsCount() {
return md5Checksums_.size();
}
/**
* <code>repeated string md5checksums = 1;</code>
*
* <pre>
* If present, return references for which the `md5checksum`
* matches. See `ReferenceSet.md5checksum` for details.
* </pre>
*/
public java.lang.String getMd5Checksums(int index) {
return md5Checksums_.get(index);
}
/**
* <code>repeated string md5checksums = 1;</code>
*
* <pre>
* If present, return references for which the `md5checksum`
* matches. See `ReferenceSet.md5checksum` for details.
* </pre>
*/
public com.google.protobuf.ByteString
getMd5ChecksumsBytes(int index) {
return md5Checksums_.getByteString(index);
}
public static final int ACCESSIONS_FIELD_NUMBER = 2;
private com.google.protobuf.LazyStringList accessions_;
/**
* <code>repeated string accessions = 2;</code>
*
* <pre>
* If present, return references for which the accession matches any of these
* strings. Best to give a version number, for example
* `GCF_000001405.26`. If only the main accession number is given
* then all records with that main accession will be returned, whichever
* version. Note that different versions will have different sequences.
* </pre>
*/
public com.google.protobuf.ProtocolStringList
getAccessionsList() {
return accessions_;
}
/**
* <code>repeated string accessions = 2;</code>
*
* <pre>
* If present, return references for which the accession matches any of these
* strings. Best to give a version number, for example
* `GCF_000001405.26`. If only the main accession number is given
* then all records with that main accession will be returned, whichever
* version. Note that different versions will have different sequences.
* </pre>
*/
public int getAccessionsCount() {
return accessions_.size();
}
/**
* <code>repeated string accessions = 2;</code>
*
* <pre>
* If present, return references for which the accession matches any of these
* strings. Best to give a version number, for example
* `GCF_000001405.26`. If only the main accession number is given
* then all records with that main accession will be returned, whichever
* version. Note that different versions will have different sequences.
* </pre>
*/
public java.lang.String getAccessions(int index) {
return accessions_.get(index);
}
/**
* <code>repeated string accessions = 2;</code>
*
* <pre>
* If present, return references for which the accession matches any of these
* strings. Best to give a version number, for example
* `GCF_000001405.26`. If only the main accession number is given
* then all records with that main accession will be returned, whichever
* version. Note that different versions will have different sequences.
* </pre>
*/
public com.google.protobuf.ByteString
getAccessionsBytes(int index) {
return accessions_.getByteString(index);
}
public static final int ASSEMBLY_ID_FIELD_NUMBER = 3;
private java.lang.Object assemblyId_;
/**
* <code>optional string assembly_id = 3;</code>
*
* <pre>
* If present, return reference sets for which a substring of their
* `assemblyId` matches this string (case insensitive).
* </pre>
*/
public java.lang.String getAssemblyId() {
java.lang.Object ref = assemblyId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
assemblyId_ = s;
}
return s;
}
}
/**
* <code>optional string assembly_id = 3;</code>
*
* <pre>
* If present, return reference sets for which a substring of their
* `assemblyId` matches this string (case insensitive).
* </pre>
*/
public com.google.protobuf.ByteString
getAssemblyIdBytes() {
java.lang.Object ref = assemblyId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
assemblyId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 4;
private java.lang.Object pageToken_;
/**
* <code>optional string page_token = 4;</code>
*
* <pre>
* The continuation token, which is used to page through large result sets.
* To get the next page of results, set this parameter to the value of
* `nextPageToken` from the previous response.
* </pre>
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
pageToken_ = s;
}
return s;
}
}
/**
* <code>optional string page_token = 4;</code>
*
* <pre>
* The continuation token, which is used to page through large result sets.
* To get the next page of results, set this parameter to the value of
* `nextPageToken` from the previous response.
* </pre>
*/
public com.google.protobuf.ByteString
getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 5;
private int pageSize_;
/**
* <code>optional int32 page_size = 5;</code>
*
* <pre>
* Specifies the maximum number of results to return in a single page.
* </pre>
*/
public int getPageSize() {
return pageSize_;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
for (int i = 0; i < md5Checksums_.size(); i++) {
output.writeBytes(1, md5Checksums_.getByteString(i));
}
for (int i = 0; i < accessions_.size(); i++) {
output.writeBytes(2, accessions_.getByteString(i));
}
if (!getAssemblyIdBytes().isEmpty()) {
output.writeBytes(3, getAssemblyIdBytes());
}
if (!getPageTokenBytes().isEmpty()) {
output.writeBytes(4, getPageTokenBytes());
}
if (pageSize_ != 0) {
output.writeInt32(5, pageSize_);
}
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
{
int dataSize = 0;
for (int i = 0; i < md5Checksums_.size(); i++) {
dataSize += com.google.protobuf.CodedOutputStream
.computeBytesSizeNoTag(md5Checksums_.getByteString(i));
}
size += dataSize;
size += 1 * getMd5ChecksumsList().size();
}
{
int dataSize = 0;
for (int i = 0; i < accessions_.size(); i++) {
dataSize += com.google.protobuf.CodedOutputStream
.computeBytesSizeNoTag(accessions_.getByteString(i));
}
size += dataSize;
size += 1 * getAccessionsList().size();
}
if (!getAssemblyIdBytes().isEmpty()) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(3, getAssemblyIdBytes());
}
if (!getPageTokenBytes().isEmpty()) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(4, getPageTokenBytes());
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream
.computeInt32Size(5, pageSize_);
}
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
public static com.google.genomics.v1.SearchReferenceSetsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.genomics.v1.SearchReferenceSetsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.genomics.v1.SearchReferenceSetsRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.genomics.v1.SearchReferenceSetsRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.genomics.v1.SearchReferenceSetsRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static com.google.genomics.v1.SearchReferenceSetsRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static com.google.genomics.v1.SearchReferenceSetsRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static com.google.genomics.v1.SearchReferenceSetsRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static com.google.genomics.v1.SearchReferenceSetsRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static com.google.genomics.v1.SearchReferenceSetsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return new Builder(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(com.google.genomics.v1.SearchReferenceSetsRequest prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code google.genomics.v1.SearchReferenceSetsRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.genomics.v1.SearchReferenceSetsRequest)
com.google.genomics.v1.SearchReferenceSetsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.genomics.v1.ReferencesProto.internal_static_google_genomics_v1_SearchReferenceSetsRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.genomics.v1.ReferencesProto.internal_static_google_genomics_v1_SearchReferenceSetsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.genomics.v1.SearchReferenceSetsRequest.class, com.google.genomics.v1.SearchReferenceSetsRequest.Builder.class);
}
// Construct using com.google.genomics.v1.SearchReferenceSetsRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
public Builder clear() {
super.clear();
md5Checksums_ = com.google.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
accessions_ = com.google.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000002);
assemblyId_ = "";
pageToken_ = "";
pageSize_ = 0;
return this;
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.genomics.v1.ReferencesProto.internal_static_google_genomics_v1_SearchReferenceSetsRequest_descriptor;
}
public com.google.genomics.v1.SearchReferenceSetsRequest getDefaultInstanceForType() {
return com.google.genomics.v1.SearchReferenceSetsRequest.getDefaultInstance();
}
public com.google.genomics.v1.SearchReferenceSetsRequest build() {
com.google.genomics.v1.SearchReferenceSetsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public com.google.genomics.v1.SearchReferenceSetsRequest buildPartial() {
com.google.genomics.v1.SearchReferenceSetsRequest result = new com.google.genomics.v1.SearchReferenceSetsRequest(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
md5Checksums_ = md5Checksums_.getUnmodifiableView();
bitField0_ = (bitField0_ & ~0x00000001);
}
result.md5Checksums_ = md5Checksums_;
if (((bitField0_ & 0x00000002) == 0x00000002)) {
accessions_ = accessions_.getUnmodifiableView();
bitField0_ = (bitField0_ & ~0x00000002);
}
result.accessions_ = accessions_;
result.assemblyId_ = assemblyId_;
result.pageToken_ = pageToken_;
result.pageSize_ = pageSize_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.genomics.v1.SearchReferenceSetsRequest) {
return mergeFrom((com.google.genomics.v1.SearchReferenceSetsRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.genomics.v1.SearchReferenceSetsRequest other) {
if (other == com.google.genomics.v1.SearchReferenceSetsRequest.getDefaultInstance()) return this;
if (!other.md5Checksums_.isEmpty()) {
if (md5Checksums_.isEmpty()) {
md5Checksums_ = other.md5Checksums_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureMd5ChecksumsIsMutable();
md5Checksums_.addAll(other.md5Checksums_);
}
onChanged();
}
if (!other.accessions_.isEmpty()) {
if (accessions_.isEmpty()) {
accessions_ = other.accessions_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureAccessionsIsMutable();
accessions_.addAll(other.accessions_);
}
onChanged();
}
if (!other.getAssemblyId().isEmpty()) {
assemblyId_ = other.assemblyId_;
onChanged();
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.genomics.v1.SearchReferenceSetsRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.genomics.v1.SearchReferenceSetsRequest) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private com.google.protobuf.LazyStringList md5Checksums_ = com.google.protobuf.LazyStringArrayList.EMPTY;
private void ensureMd5ChecksumsIsMutable() {
if (!((bitField0_ & 0x00000001) == 0x00000001)) {
md5Checksums_ = new com.google.protobuf.LazyStringArrayList(md5Checksums_);
bitField0_ |= 0x00000001;
}
}
/**
* <code>repeated string md5checksums = 1;</code>
*
* <pre>
* If present, return references for which the `md5checksum`
* matches. See `ReferenceSet.md5checksum` for details.
* </pre>
*/
public com.google.protobuf.ProtocolStringList
getMd5ChecksumsList() {
return md5Checksums_.getUnmodifiableView();
}
/**
* <code>repeated string md5checksums = 1;</code>
*
* <pre>
* If present, return references for which the `md5checksum`
* matches. See `ReferenceSet.md5checksum` for details.
* </pre>
*/
public int getMd5ChecksumsCount() {
return md5Checksums_.size();
}
/**
* <code>repeated string md5checksums = 1;</code>
*
* <pre>
* If present, return references for which the `md5checksum`
* matches. See `ReferenceSet.md5checksum` for details.
* </pre>
*/
public java.lang.String getMd5Checksums(int index) {
return md5Checksums_.get(index);
}
/**
* <code>repeated string md5checksums = 1;</code>
*
* <pre>
* If present, return references for which the `md5checksum`
* matches. See `ReferenceSet.md5checksum` for details.
* </pre>
*/
public com.google.protobuf.ByteString
getMd5ChecksumsBytes(int index) {
return md5Checksums_.getByteString(index);
}
/**
* <code>repeated string md5checksums = 1;</code>
*
* <pre>
* If present, return references for which the `md5checksum`
* matches. See `ReferenceSet.md5checksum` for details.
* </pre>
*/
public Builder setMd5Checksums(
int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureMd5ChecksumsIsMutable();
md5Checksums_.set(index, value);
onChanged();
return this;
}
/**
* <code>repeated string md5checksums = 1;</code>
*
* <pre>
* If present, return references for which the `md5checksum`
* matches. See `ReferenceSet.md5checksum` for details.
* </pre>
*/
public Builder addMd5Checksums(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureMd5ChecksumsIsMutable();
md5Checksums_.add(value);
onChanged();
return this;
}
/**
* <code>repeated string md5checksums = 1;</code>
*
* <pre>
* If present, return references for which the `md5checksum`
* matches. See `ReferenceSet.md5checksum` for details.
* </pre>
*/
public Builder addAllMd5Checksums(
java.lang.Iterable<java.lang.String> values) {
ensureMd5ChecksumsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, md5Checksums_);
onChanged();
return this;
}
/**
* <code>repeated string md5checksums = 1;</code>
*
* <pre>
* If present, return references for which the `md5checksum`
* matches. See `ReferenceSet.md5checksum` for details.
* </pre>
*/
public Builder clearMd5Checksums() {
md5Checksums_ = com.google.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* <code>repeated string md5checksums = 1;</code>
*
* <pre>
* If present, return references for which the `md5checksum`
* matches. See `ReferenceSet.md5checksum` for details.
* </pre>
*/
public Builder addMd5ChecksumsBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
ensureMd5ChecksumsIsMutable();
md5Checksums_.add(value);
onChanged();
return this;
}
private com.google.protobuf.LazyStringList accessions_ = com.google.protobuf.LazyStringArrayList.EMPTY;
private void ensureAccessionsIsMutable() {
if (!((bitField0_ & 0x00000002) == 0x00000002)) {
accessions_ = new com.google.protobuf.LazyStringArrayList(accessions_);
bitField0_ |= 0x00000002;
}
}
/**
* <code>repeated string accessions = 2;</code>
*
* <pre>
* If present, return references for which the accession matches any of these
* strings. Best to give a version number, for example
* `GCF_000001405.26`. If only the main accession number is given
* then all records with that main accession will be returned, whichever
* version. Note that different versions will have different sequences.
* </pre>
*/
public com.google.protobuf.ProtocolStringList
getAccessionsList() {
return accessions_.getUnmodifiableView();
}
/**
* <code>repeated string accessions = 2;</code>
*
* <pre>
* If present, return references for which the accession matches any of these
* strings. Best to give a version number, for example
* `GCF_000001405.26`. If only the main accession number is given
* then all records with that main accession will be returned, whichever
* version. Note that different versions will have different sequences.
* </pre>
*/
public int getAccessionsCount() {
return accessions_.size();
}
/**
* <code>repeated string accessions = 2;</code>
*
* <pre>
* If present, return references for which the accession matches any of these
* strings. Best to give a version number, for example
* `GCF_000001405.26`. If only the main accession number is given
* then all records with that main accession will be returned, whichever
* version. Note that different versions will have different sequences.
* </pre>
*/
public java.lang.String getAccessions(int index) {
return accessions_.get(index);
}
/**
* <code>repeated string accessions = 2;</code>
*
* <pre>
* If present, return references for which the accession matches any of these
* strings. Best to give a version number, for example
* `GCF_000001405.26`. If only the main accession number is given
* then all records with that main accession will be returned, whichever
* version. Note that different versions will have different sequences.
* </pre>
*/
public com.google.protobuf.ByteString
getAccessionsBytes(int index) {
return accessions_.getByteString(index);
}
/**
* <code>repeated string accessions = 2;</code>
*
* <pre>
* If present, return references for which the accession matches any of these
* strings. Best to give a version number, for example
* `GCF_000001405.26`. If only the main accession number is given
* then all records with that main accession will be returned, whichever
* version. Note that different versions will have different sequences.
* </pre>
*/
public Builder setAccessions(
int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureAccessionsIsMutable();
accessions_.set(index, value);
onChanged();
return this;
}
/**
* <code>repeated string accessions = 2;</code>
*
* <pre>
* If present, return references for which the accession matches any of these
* strings. Best to give a version number, for example
* `GCF_000001405.26`. If only the main accession number is given
* then all records with that main accession will be returned, whichever
* version. Note that different versions will have different sequences.
* </pre>
*/
public Builder addAccessions(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureAccessionsIsMutable();
accessions_.add(value);
onChanged();
return this;
}
/**
* <code>repeated string accessions = 2;</code>
*
* <pre>
* If present, return references for which the accession matches any of these
* strings. Best to give a version number, for example
* `GCF_000001405.26`. If only the main accession number is given
* then all records with that main accession will be returned, whichever
* version. Note that different versions will have different sequences.
* </pre>
*/
public Builder addAllAccessions(
java.lang.Iterable<java.lang.String> values) {
ensureAccessionsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, accessions_);
onChanged();
return this;
}
/**
* <code>repeated string accessions = 2;</code>
*
* <pre>
* If present, return references for which the accession matches any of these
* strings. Best to give a version number, for example
* `GCF_000001405.26`. If only the main accession number is given
* then all records with that main accession will be returned, whichever
* version. Note that different versions will have different sequences.
* </pre>
*/
public Builder clearAccessions() {
accessions_ = com.google.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
* <code>repeated string accessions = 2;</code>
*
* <pre>
* If present, return references for which the accession matches any of these
* strings. Best to give a version number, for example
* `GCF_000001405.26`. If only the main accession number is given
* then all records with that main accession will be returned, whichever
* version. Note that different versions will have different sequences.
* </pre>
*/
public Builder addAccessionsBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
ensureAccessionsIsMutable();
accessions_.add(value);
onChanged();
return this;
}
private java.lang.Object assemblyId_ = "";
/**
* <code>optional string assembly_id = 3;</code>
*
* <pre>
* If present, return reference sets for which a substring of their
* `assemblyId` matches this string (case insensitive).
* </pre>
*/
public java.lang.String getAssemblyId() {
java.lang.Object ref = assemblyId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
assemblyId_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string assembly_id = 3;</code>
*
* <pre>
* If present, return reference sets for which a substring of their
* `assemblyId` matches this string (case insensitive).
* </pre>
*/
public com.google.protobuf.ByteString
getAssemblyIdBytes() {
java.lang.Object ref = assemblyId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
assemblyId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>optional string assembly_id = 3;</code>
*
* <pre>
* If present, return reference sets for which a substring of their
* `assemblyId` matches this string (case insensitive).
* </pre>
*/
public Builder setAssemblyId(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
assemblyId_ = value;
onChanged();
return this;
}
/**
* <code>optional string assembly_id = 3;</code>
*
* <pre>
* If present, return reference sets for which a substring of their
* `assemblyId` matches this string (case insensitive).
* </pre>
*/
public Builder clearAssemblyId() {
assemblyId_ = getDefaultInstance().getAssemblyId();
onChanged();
return this;
}
/**
* <code>optional string assembly_id = 3;</code>
*
* <pre>
* If present, return reference sets for which a substring of their
* `assemblyId` matches this string (case insensitive).
* </pre>
*/
public Builder setAssemblyIdBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
assemblyId_ = value;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
* <code>optional string page_token = 4;</code>
*
* <pre>
* The continuation token, which is used to page through large result sets.
* To get the next page of results, set this parameter to the value of
* `nextPageToken` from the previous response.
* </pre>
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
pageToken_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string page_token = 4;</code>
*
* <pre>
* The continuation token, which is used to page through large result sets.
* To get the next page of results, set this parameter to the value of
* `nextPageToken` from the previous response.
* </pre>
*/
public com.google.protobuf.ByteString
getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>optional string page_token = 4;</code>
*
* <pre>
* The continuation token, which is used to page through large result sets.
* To get the next page of results, set this parameter to the value of
* `nextPageToken` from the previous response.
* </pre>
*/
public Builder setPageToken(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
onChanged();
return this;
}
/**
* <code>optional string page_token = 4;</code>
*
* <pre>
* The continuation token, which is used to page through large result sets.
* To get the next page of results, set this parameter to the value of
* `nextPageToken` from the previous response.
* </pre>
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
onChanged();
return this;
}
/**
* <code>optional string page_token = 4;</code>
*
* <pre>
* The continuation token, which is used to page through large result sets.
* To get the next page of results, set this parameter to the value of
* `nextPageToken` from the previous response.
* </pre>
*/
public Builder setPageTokenBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
onChanged();
return this;
}
private int pageSize_ ;
/**
* <code>optional int32 page_size = 5;</code>
*
* <pre>
* Specifies the maximum number of results to return in a single page.
* </pre>
*/
public int getPageSize() {
return pageSize_;
}
/**
* <code>optional int32 page_size = 5;</code>
*
* <pre>
* Specifies the maximum number of results to return in a single page.
* </pre>
*/
public Builder setPageSize(int value) {
pageSize_ = value;
onChanged();
return this;
}
/**
* <code>optional int32 page_size = 5;</code>
*
* <pre>
* Specifies the maximum number of results to return in a single page.
* </pre>
*/
public Builder clearPageSize() {
pageSize_ = 0;
onChanged();
return this;
}
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return this;
}
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return this;
}
// @@protoc_insertion_point(builder_scope:google.genomics.v1.SearchReferenceSetsRequest)
}
// @@protoc_insertion_point(class_scope:google.genomics.v1.SearchReferenceSetsRequest)
private static final com.google.genomics.v1.SearchReferenceSetsRequest defaultInstance;static {
defaultInstance = new com.google.genomics.v1.SearchReferenceSetsRequest();
}
public static com.google.genomics.v1.SearchReferenceSetsRequest getDefaultInstance() {
return defaultInstance;
}
public com.google.genomics.v1.SearchReferenceSetsRequest getDefaultInstanceForType() {
return defaultInstance;
}
}
| |
/*
* Copyright 2002-2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.beans.factory.support;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.factory.BeanCreationException;
import org.springframework.beans.factory.BeanCreationNotAllowedException;
import org.springframework.beans.factory.BeanCurrentlyInCreationException;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.ObjectFactory;
import org.springframework.beans.factory.config.SingletonBeanRegistry;
import org.springframework.core.SimpleAliasRegistry;
import org.springframework.util.Assert;
import org.springframework.util.StringUtils;
/**
* Generic registry for shared bean instances, implementing the
* {@link org.springframework.beans.factory.config.SingletonBeanRegistry}.
* Allows for registering singleton instances that should be shared
* for all callers of the registry, to be obtained via bean name.
*
* <p>Also supports registration of
* {@link org.springframework.beans.factory.DisposableBean} instances,
* (which might or might not correspond to registered singletons),
* to be destroyed on shutdown of the registry. Dependencies between
* beans can be registered to enforce an appropriate shutdown order.
*
* <p>This class mainly serves as base class for
* {@link org.springframework.beans.factory.BeanFactory} implementations,
* factoring out the common management of singleton bean instances. Note that
* the {@link org.springframework.beans.factory.config.ConfigurableBeanFactory}
* interface extends the {@link SingletonBeanRegistry} interface.
*
* <p>Note that this class assumes neither a bean definition concept
* nor a specific creation process for bean instances, in contrast to
* {@link AbstractBeanFactory} and {@link DefaultListableBeanFactory}
* (which inherit from it). Can alternatively also be used as a nested
* helper to delegate to.
*
* @author Juergen Hoeller
* @since 2.0
* @see #registerSingleton
* @see #registerDisposableBean
* @see org.springframework.beans.factory.DisposableBean
* @see org.springframework.beans.factory.config.ConfigurableBeanFactory
*/
public class DefaultSingletonBeanRegistry extends SimpleAliasRegistry implements SingletonBeanRegistry {
/**
* Internal marker for a null singleton object:
* used as marker value for concurrent Maps (which don't support null values).
*/
protected static final Object NULL_OBJECT = new Object();
/** Logger available to subclasses */
protected final Log logger = LogFactory.getLog(getClass());
/** Cache of singleton objects: bean name --> bean instance */
private final Map<String, Object> singletonObjects = new ConcurrentHashMap<String, Object>(64);
/** Cache of singleton factories: bean name --> ObjectFactory */
private final Map<String, ObjectFactory<?>> singletonFactories = new HashMap<String, ObjectFactory<?>>(16);
/** Cache of early singleton objects: bean name --> bean instance */
private final Map<String, Object> earlySingletonObjects = new HashMap<String, Object>(16);
/** Set of registered singletons, containing the bean names in registration order */
private final Set<String> registeredSingletons = new LinkedHashSet<String>(64);
/** Names of beans that are currently in creation */
private final Set<String> singletonsCurrentlyInCreation =
Collections.newSetFromMap(new ConcurrentHashMap<String, Boolean>(16));
/** Names of beans currently excluded from in creation checks */
private final Set<String> inCreationCheckExclusions =
Collections.newSetFromMap(new ConcurrentHashMap<String, Boolean>(16));
/** List of suppressed Exceptions, available for associating related causes */
private Set<Exception> suppressedExceptions;
/** Flag that indicates whether we're currently within destroySingletons */
private boolean singletonsCurrentlyInDestruction = false;
/** Disposable bean instances: bean name --> disposable instance */
private final Map<String, Object> disposableBeans = new LinkedHashMap<String, Object>();
/** Map between containing bean names: bean name --> Set of bean names that the bean contains */
private final Map<String, Set<String>> containedBeanMap = new ConcurrentHashMap<String, Set<String>>(16);
/** Map between dependent bean names: bean name --> Set of dependent bean names */
private final Map<String, Set<String>> dependentBeanMap = new ConcurrentHashMap<String, Set<String>>(64);
/** Map between depending bean names: bean name --> Set of bean names for the bean's dependencies */
private final Map<String, Set<String>> dependenciesForBeanMap = new ConcurrentHashMap<String, Set<String>>(64);
@Override
public void registerSingleton(String beanName, Object singletonObject) throws IllegalStateException {
Assert.notNull(beanName, "'beanName' must not be null");
synchronized (this.singletonObjects) {
Object oldObject = this.singletonObjects.get(beanName);
if (oldObject != null) {
throw new IllegalStateException("Could not register object [" + singletonObject +
"] under bean name '" + beanName + "': there is already object [" + oldObject + "] bound");
}
addSingleton(beanName, singletonObject);
}
}
/**
* Add the given singleton object to the singleton cache of this factory.
* <p>To be called for eager registration of singletons.
* @param beanName the name of the bean
* @param singletonObject the singleton object
*/
protected void addSingleton(String beanName, Object singletonObject) {
synchronized (this.singletonObjects) {
this.singletonObjects.put(beanName, (singletonObject != null ? singletonObject : NULL_OBJECT));
this.singletonFactories.remove(beanName);
this.earlySingletonObjects.remove(beanName);
this.registeredSingletons.add(beanName);
}
}
/**
* Add the given singleton factory for building the specified singleton
* if necessary.
* <p>To be called for eager registration of singletons, e.g. to be able to
* resolve circular references.
* @param beanName the name of the bean
* @param singletonFactory the factory for the singleton object
*/
protected void addSingletonFactory(String beanName, ObjectFactory<?> singletonFactory) {
Assert.notNull(singletonFactory, "Singleton factory must not be null");
synchronized (this.singletonObjects) {
if (!this.singletonObjects.containsKey(beanName)) {
this.singletonFactories.put(beanName, singletonFactory);
this.earlySingletonObjects.remove(beanName);
this.registeredSingletons.add(beanName);
}
}
}
@Override
public Object getSingleton(String beanName) {
return getSingleton(beanName, true);
}
/**
* Return the (raw) singleton object registered under the given name.
* <p>Checks already instantiated singletons and also allows for an early
* reference to a currently created singleton (resolving a circular reference).
* @param beanName the name of the bean to look for
* @param allowEarlyReference whether early references should be created or not
* @return the registered singleton object, or {@code null} if none found
*/
protected Object getSingleton(String beanName, boolean allowEarlyReference) {
Object singletonObject = this.singletonObjects.get(beanName);
if (singletonObject == null && isSingletonCurrentlyInCreation(beanName)) {
synchronized (this.singletonObjects) {
singletonObject = this.earlySingletonObjects.get(beanName);
if (singletonObject == null && allowEarlyReference) {
ObjectFactory<?> singletonFactory = this.singletonFactories.get(beanName);
if (singletonFactory != null) {
singletonObject = singletonFactory.getObject();
this.earlySingletonObjects.put(beanName, singletonObject);
this.singletonFactories.remove(beanName);
}
}
}
}
return (singletonObject != NULL_OBJECT ? singletonObject : null);
}
/**
* Return the (raw) singleton object registered under the given name,
* creating and registering a new one if none registered yet.
* @param beanName the name of the bean
* @param singletonFactory the ObjectFactory to lazily create the singleton
* with, if necessary
* @return the registered singleton object
*/
public Object getSingleton(String beanName, ObjectFactory<?> singletonFactory) {
Assert.notNull(beanName, "'beanName' must not be null");
synchronized (this.singletonObjects) {
Object singletonObject = this.singletonObjects.get(beanName);
if (singletonObject == null) {
if (this.singletonsCurrentlyInDestruction) {
throw new BeanCreationNotAllowedException(beanName,
"Singleton bean creation not allowed while the singletons of this factory are in destruction " +
"(Do not request a bean from a BeanFactory in a destroy method implementation!)");
}
if (logger.isDebugEnabled()) {
logger.debug("Creating shared instance of singleton bean '" + beanName + "'");
}
beforeSingletonCreation(beanName);
boolean newSingleton = false;
boolean recordSuppressedExceptions = (this.suppressedExceptions == null);
if (recordSuppressedExceptions) {
this.suppressedExceptions = new LinkedHashSet<Exception>();
}
try {
singletonObject = singletonFactory.getObject();
newSingleton = true;
}
catch (IllegalStateException ex) {
// Has the singleton object implicitly appeared in the meantime ->
// if yes, proceed with it since the exception indicates that state.
singletonObject = this.singletonObjects.get(beanName);
if (singletonObject == null) {
throw ex;
}
}
catch (BeanCreationException ex) {
if (recordSuppressedExceptions) {
for (Exception suppressedException : this.suppressedExceptions) {
ex.addRelatedCause(suppressedException);
}
}
throw ex;
}
finally {
if (recordSuppressedExceptions) {
this.suppressedExceptions = null;
}
afterSingletonCreation(beanName);
}
if (newSingleton) {
addSingleton(beanName, singletonObject);
}
}
return (singletonObject != NULL_OBJECT ? singletonObject : null);
}
}
/**
* Register an Exception that happened to get suppressed during the creation of a
* singleton bean instance, e.g. a temporary circular reference resolution problem.
* @param ex the Exception to register
*/
protected void onSuppressedException(Exception ex) {
synchronized (this.singletonObjects) {
if (this.suppressedExceptions != null) {
this.suppressedExceptions.add(ex);
}
}
}
/**
* Remove the bean with the given name from the singleton cache of this factory,
* to be able to clean up eager registration of a singleton if creation failed.
* @param beanName the name of the bean
* @see #getSingletonMutex()
*/
protected void removeSingleton(String beanName) {
synchronized (this.singletonObjects) {
this.singletonObjects.remove(beanName);
this.singletonFactories.remove(beanName);
this.earlySingletonObjects.remove(beanName);
this.registeredSingletons.remove(beanName);
}
}
@Override
public boolean containsSingleton(String beanName) {
return this.singletonObjects.containsKey(beanName);
}
@Override
public String[] getSingletonNames() {
synchronized (this.singletonObjects) {
return StringUtils.toStringArray(this.registeredSingletons);
}
}
@Override
public int getSingletonCount() {
synchronized (this.singletonObjects) {
return this.registeredSingletons.size();
}
}
public void setCurrentlyInCreation(String beanName, boolean inCreation) {
Assert.notNull(beanName, "Bean name must not be null");
if (!inCreation) {
this.inCreationCheckExclusions.add(beanName);
}
else {
this.inCreationCheckExclusions.remove(beanName);
}
}
public boolean isCurrentlyInCreation(String beanName) {
Assert.notNull(beanName, "Bean name must not be null");
return (!this.inCreationCheckExclusions.contains(beanName) && isActuallyInCreation(beanName));
}
protected boolean isActuallyInCreation(String beanName) {
return isSingletonCurrentlyInCreation(beanName);
}
/**
* Return whether the specified singleton bean is currently in creation
* (within the entire factory).
* @param beanName the name of the bean
*/
public boolean isSingletonCurrentlyInCreation(String beanName) {
return this.singletonsCurrentlyInCreation.contains(beanName);
}
/**
* Callback before singleton creation.
* <p>The default implementation register the singleton as currently in creation.
* @param beanName the name of the singleton about to be created
* @see #isSingletonCurrentlyInCreation
*/
protected void beforeSingletonCreation(String beanName) {
if (!this.inCreationCheckExclusions.contains(beanName) && !this.singletonsCurrentlyInCreation.add(beanName)) {
throw new BeanCurrentlyInCreationException(beanName);
}
}
/**
* Callback after singleton creation.
* <p>The default implementation marks the singleton as not in creation anymore.
* @param beanName the name of the singleton that has been created
* @see #isSingletonCurrentlyInCreation
*/
protected void afterSingletonCreation(String beanName) {
if (!this.inCreationCheckExclusions.contains(beanName) && !this.singletonsCurrentlyInCreation.remove(beanName)) {
throw new IllegalStateException("Singleton '" + beanName + "' isn't currently in creation");
}
}
/**
* Add the given bean to the list of disposable beans in this registry.
* <p>Disposable beans usually correspond to registered singletons,
* matching the bean name but potentially being a different instance
* (for example, a DisposableBean adapter for a singleton that does not
* naturally implement Spring's DisposableBean interface).
* @param beanName the name of the bean
* @param bean the bean instance
*/
public void registerDisposableBean(String beanName, DisposableBean bean) {
synchronized (this.disposableBeans) {
this.disposableBeans.put(beanName, bean);
}
}
/**
* Register a containment relationship between two beans,
* e.g. between an inner bean and its containing outer bean.
* <p>Also registers the containing bean as dependent on the contained bean
* in terms of destruction order.
* @param containedBeanName the name of the contained (inner) bean
* @param containingBeanName the name of the containing (outer) bean
* @see #registerDependentBean
*/
public void registerContainedBean(String containedBeanName, String containingBeanName) {
// A quick check for an existing entry upfront, avoiding synchronization...
Set<String> containedBeans = this.containedBeanMap.get(containingBeanName);
if (containedBeans != null && containedBeans.contains(containedBeanName)) {
return;
}
// No entry yet -> fully synchronized manipulation of the containedBeans Set
synchronized (this.containedBeanMap) {
containedBeans = this.containedBeanMap.get(containingBeanName);
if (containedBeans == null) {
containedBeans = new LinkedHashSet<String>(8);
this.containedBeanMap.put(containingBeanName, containedBeans);
}
containedBeans.add(containedBeanName);
}
registerDependentBean(containedBeanName, containingBeanName);
}
/**
* Register a dependent bean for the given bean,
* to be destroyed before the given bean is destroyed.
* @param beanName the name of the bean
* @param dependentBeanName the name of the dependent bean
*/
public void registerDependentBean(String beanName, String dependentBeanName) {
// A quick check for an existing entry upfront, avoiding synchronization...
String canonicalName = canonicalName(beanName);
Set<String> dependentBeans = this.dependentBeanMap.get(canonicalName);
if (dependentBeans != null && dependentBeans.contains(dependentBeanName)) {
return;
}
// No entry yet -> fully synchronized manipulation of the dependentBeans Set
synchronized (this.dependentBeanMap) {
dependentBeans = this.dependentBeanMap.get(canonicalName);
if (dependentBeans == null) {
dependentBeans = new LinkedHashSet<String>(8);
this.dependentBeanMap.put(canonicalName, dependentBeans);
}
dependentBeans.add(dependentBeanName);
}
synchronized (this.dependenciesForBeanMap) {
Set<String> dependenciesForBean = this.dependenciesForBeanMap.get(dependentBeanName);
if (dependenciesForBean == null) {
dependenciesForBean = new LinkedHashSet<String>(8);
this.dependenciesForBeanMap.put(dependentBeanName, dependenciesForBean);
}
dependenciesForBean.add(canonicalName);
}
}
/**
* Determine whether the specified dependent bean has been registered as
* dependent on the given bean or on any of its transitive dependencies.
* @param beanName the name of the bean to check
* @param dependentBeanName the name of the dependent bean
* @since 4.0
*/
protected boolean isDependent(String beanName, String dependentBeanName) {
return isDependent(beanName, dependentBeanName, null);
}
private boolean isDependent(String beanName, String dependentBeanName, Set<String> alreadySeen) {
String canonicalName = canonicalName(beanName);
if (alreadySeen != null && alreadySeen.contains(beanName)) {
return false;
}
Set<String> dependentBeans = this.dependentBeanMap.get(canonicalName);
if (dependentBeans == null) {
return false;
}
if (dependentBeans.contains(dependentBeanName)) {
return true;
}
for (String transitiveDependency : dependentBeans) {
if (alreadySeen == null) {
alreadySeen = new HashSet<String>();
}
alreadySeen.add(beanName);
if (isDependent(transitiveDependency, dependentBeanName, alreadySeen)) {
return true;
}
}
return false;
}
/**
* Determine whether a dependent bean has been registered for the given name.
* @param beanName the name of the bean to check
*/
protected boolean hasDependentBean(String beanName) {
return this.dependentBeanMap.containsKey(beanName);
}
/**
* Return the names of all beans which depend on the specified bean, if any.
* @param beanName the name of the bean
* @return the array of dependent bean names, or an empty array if none
*/
public String[] getDependentBeans(String beanName) {
Set<String> dependentBeans = this.dependentBeanMap.get(beanName);
if (dependentBeans == null) {
return new String[0];
}
return StringUtils.toStringArray(dependentBeans);
}
/**
* Return the names of all beans that the specified bean depends on, if any.
* @param beanName the name of the bean
* @return the array of names of beans which the bean depends on,
* or an empty array if none
*/
public String[] getDependenciesForBean(String beanName) {
Set<String> dependenciesForBean = this.dependenciesForBeanMap.get(beanName);
if (dependenciesForBean == null) {
return new String[0];
}
return dependenciesForBean.toArray(new String[dependenciesForBean.size()]);
}
public void destroySingletons() {
if (logger.isDebugEnabled()) {
logger.debug("Destroying singletons in " + this);
}
synchronized (this.singletonObjects) {
this.singletonsCurrentlyInDestruction = true;
}
String[] disposableBeanNames;
synchronized (this.disposableBeans) {
disposableBeanNames = StringUtils.toStringArray(this.disposableBeans.keySet());
}
for (int i = disposableBeanNames.length - 1; i >= 0; i--) {
destroySingleton(disposableBeanNames[i]);
}
this.containedBeanMap.clear();
this.dependentBeanMap.clear();
this.dependenciesForBeanMap.clear();
synchronized (this.singletonObjects) {
this.singletonObjects.clear();
this.singletonFactories.clear();
this.earlySingletonObjects.clear();
this.registeredSingletons.clear();
this.singletonsCurrentlyInDestruction = false;
}
}
/**
* Destroy the given bean. Delegates to {@code destroyBean}
* if a corresponding disposable bean instance is found.
* @param beanName the name of the bean
* @see #destroyBean
*/
public void destroySingleton(String beanName) {
// Remove a registered singleton of the given name, if any.
removeSingleton(beanName);
// Destroy the corresponding DisposableBean instance.
DisposableBean disposableBean;
synchronized (this.disposableBeans) {
disposableBean = (DisposableBean) this.disposableBeans.remove(beanName);
}
destroyBean(beanName, disposableBean);
}
/**
* Destroy the given bean. Must destroy beans that depend on the given
* bean before the bean itself. Should not throw any exceptions.
* @param beanName the name of the bean
* @param bean the bean instance to destroy
*/
protected void destroyBean(String beanName, DisposableBean bean) {
// Trigger destruction of dependent beans first...
Set<String> dependencies = this.dependentBeanMap.remove(beanName);
if (dependencies != null) {
if (logger.isDebugEnabled()) {
logger.debug("Retrieved dependent beans for bean '" + beanName + "': " + dependencies);
}
for (String dependentBeanName : dependencies) {
destroySingleton(dependentBeanName);
}
}
// Actually destroy the bean now...
if (bean != null) {
try {
bean.destroy();
}
catch (Throwable ex) {
logger.error("Destroy method on bean with name '" + beanName + "' threw an exception", ex);
}
}
// Trigger destruction of contained beans...
Set<String> containedBeans = this.containedBeanMap.remove(beanName);
if (containedBeans != null) {
for (String containedBeanName : containedBeans) {
destroySingleton(containedBeanName);
}
}
// Remove destroyed bean from other beans' dependencies.
synchronized (this.dependentBeanMap) {
for (Iterator<Map.Entry<String, Set<String>>> it = this.dependentBeanMap.entrySet().iterator(); it.hasNext();) {
Map.Entry<String, Set<String>> entry = it.next();
Set<String> dependenciesToClean = entry.getValue();
dependenciesToClean.remove(beanName);
if (dependenciesToClean.isEmpty()) {
it.remove();
}
}
}
// Remove destroyed bean's prepared dependency information.
this.dependenciesForBeanMap.remove(beanName);
}
/**
* Exposes the singleton mutex to subclasses and external collaborators.
* <p>Subclasses should synchronize on the given Object if they perform
* any sort of extended singleton creation phase. In particular, subclasses
* should <i>not</i> have their own mutexes involved in singleton creation,
* to avoid the potential for deadlocks in lazy-init situations.
*/
public final Object getSingletonMutex() {
return this.singletonObjects;
}
}
| |
// Copyright 2006 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.analysis.stringtemplate;
import static com.google.common.truth.Truth.assertThat;
import static org.junit.Assert.assertThrows;
import static org.junit.Assert.fail;
import java.util.HashMap;
import java.util.Map;
import java.util.function.Function;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/**
* Unit tests for the {@link TemplateExpander}.
*/
@RunWith(JUnit4.class)
public class TemplateExpanderTest {
private static final class TemplateContextImpl implements TemplateContext {
private final Map<String, String> vars = new HashMap<>();
private final Map<String, Function<String, String>> functions = new HashMap<>();
@Override
public String lookupVariable(String name)
throws ExpansionException {
// Not a Make variable. Let the shell handle the expansion.
if (name.startsWith("$")) {
return name;
}
if (!vars.containsKey(name)) {
throw new ExpansionException(String.format("$(%s) not defined", name));
}
return vars.get(name);
}
@Override
public String lookupFunction(String name, String param) throws ExpansionException {
if (!functions.containsKey(name)) {
throw new ExpansionException(String.format("$(%s) not defined", name));
}
return functions.get(name).apply(param);
}
}
private TemplateContextImpl context;
@Before
public final void createContext() throws Exception {
context = new TemplateContextImpl();
}
private String expand(String value) throws ExpansionException {
return TemplateExpander.expand(value, context);
}
private ExpansionException expansionFailure(String cmd) {
try {
expand(cmd);
fail("Expansion of " + cmd + " didn't fail as expected");
throw new AssertionError();
} catch (ExpansionException e) {
return e;
}
}
@Test
public void testVariableExpansion() throws Exception {
context.vars.put("SRCS", "src1 src2");
context.vars.put("<", "src1");
context.vars.put("OUTS", "out1 out2");
context.vars.put("@", "out1");
context.vars.put("^", "src1 src2 dep1 dep2");
context.vars.put("@D", "outdir");
context.vars.put("BINDIR", "bindir");
assertThat(expand("$(SRCS)")).isEqualTo("src1 src2");
assertThat(expand("$<")).isEqualTo("src1");
assertThat(expand("$(OUTS)")).isEqualTo("out1 out2");
assertThat(expand("$(@)")).isEqualTo("out1");
assertThat(expand("$@")).isEqualTo("out1");
assertThat(expand("$@,")).isEqualTo("out1,");
assertThat(expand("$(SRCS) $(OUTS)")).isEqualTo("src1 src2 out1 out2");
assertThat(expand("cmd")).isEqualTo("cmd");
assertThat(expand("cmd $(SRCS),")).isEqualTo("cmd src1 src2,");
assertThat(expand("label1 $(SRCS),")).isEqualTo("label1 src1 src2,");
assertThat(expand(":label1 $(SRCS),")).isEqualTo(":label1 src1 src2,");
}
@Test
public void testUndefinedVariableExpansion() throws Exception {
assertThat(expansionFailure("$(foo)"))
.hasMessageThat().isEqualTo("$(foo) not defined");
}
@Test
public void testFunctionExpansion() throws Exception {
context.functions.put("foo", (String p) -> "FOO(" + p + ")");
context.vars.put("bar", "bar");
assertThat(expand("$(foo baz)")).isEqualTo("FOO(baz)");
assertThat(expand("$(bar) $(foo baz)")).isEqualTo("bar FOO(baz)");
assertThat(expand("xyz$(foo baz)zyx")).isEqualTo("xyzFOO(baz)zyx");
}
@Test
public void testFunctionExpansionThrows() throws Exception {
ExpansionException e =
assertThrows(
ExpansionException.class,
() ->
TemplateExpander.expand(
"$(foo baz)",
new TemplateContext() {
@Override
public String lookupVariable(String name) throws ExpansionException {
throw new ExpansionException(name);
}
@Override
public String lookupFunction(String name, String param)
throws ExpansionException {
throw new ExpansionException(name + "(" + param + ")");
}
}));
assertThat(e).hasMessageThat().isEqualTo("foo(baz)");
}
@Test
public void testUndefinedFunctionExpansion() throws Exception {
// Note: $(location x) is considered an undefined variable;
assertThat(expansionFailure("$(location label1), $(SRCS),"))
.hasMessageThat().isEqualTo("$(location) not defined");
assertThat(expansionFailure("$(basename file)"))
.hasMessageThat().isEqualTo("$(basename) not defined");
}
@Test
public void testRecursiveExpansion() throws Exception {
// Expansion is recursive: $(recursive) -> $(SRCS) -> "src1 src2"
context.vars.put("SRCS", "src1 src2");
context.vars.put("recursive", "$(SRCS)");
assertThat(expand("$(recursive)")).isEqualTo("src1 src2");
}
@Test
public void testRecursiveExpansionDoesNotSpanExpansionBoundaries() throws Exception {
// Recursion does not span expansion boundaries:
// $(recur2a)$(recur2b) --> "$" + "(SRCS)" --/--> "src1 src2"
context.vars.put("SRCS", "src1 src2");
context.vars.put("recur2a", "$$");
context.vars.put("recur2b", "(SRCS)");
assertThat(expand("$(recur2a)$(recur2b)")).isEqualTo("$(SRCS)");
}
@Test
public void testSelfInfiniteExpansionFailsGracefully() throws Exception {
context.vars.put("infinite", "$(infinite)");
assertThat(expansionFailure("$(infinite)")).hasMessageThat()
.isEqualTo("potentially unbounded recursion during expansion of '$(infinite)'");
}
@Test
public void testMutuallyInfiniteExpansionFailsGracefully() throws Exception {
context.vars.put("black", "$(white)");
context.vars.put("white", "$(black)");
assertThat(expansionFailure("$(white) is the new $(black)")).hasMessageThat()
.isEqualTo("potentially unbounded recursion during expansion of '$(black)'");
}
@Test
public void testErrors() throws Exception {
assertThat(expansionFailure("$(SRCS")).hasMessageThat()
.isEqualTo("unterminated variable reference");
assertThat(expansionFailure("$")).hasMessageThat().isEqualTo("unterminated $");
String suffix = "instead for \"Make\" variables, or escape the '$' as '$$' if you intended "
+ "this for the shell";
assertThat(expansionFailure("for file in a b c;do echo $file;done")).hasMessageThat()
.isEqualTo("'$file' syntax is not supported; use '$(file)' " + suffix);
assertThat(expansionFailure("${file%:.*8}")).hasMessageThat()
.isEqualTo("'${file%:.*8}' syntax is not supported; use '$(file%:.*8)' " + suffix);
}
@Test
public void testDollarDollar() throws Exception {
assertThat(expand("for file in a b c;do echo $$file;done"))
.isEqualTo("for file in a b c;do echo $file;done");
assertThat(expand("$${file%:.*8}")).isEqualTo("${file%:.*8}");
assertThat(expand("$$(basename file)")).isEqualTo("$(basename file)");
}
// Regression test: check that the parameter is trimmed before expanding.
@Test
public void testFunctionExpansionIsTrimmed() throws Exception {
context.functions.put("foo", (String p) -> "FOO(" + p + ")");
assertThat(expand("$(foo baz )")).isEqualTo("FOO(baz)");
}
}
| |
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight.editorActions;
import com.intellij.featureStatistics.FeatureUsageTracker;
import com.intellij.ide.DataManager;
import com.intellij.injected.editor.EditorWindow;
import com.intellij.lang.CompositeLanguage;
import com.intellij.lang.Language;
import com.intellij.lang.injection.InjectedLanguageManager;
import com.intellij.openapi.actionSystem.CommonDataKeys;
import com.intellij.openapi.actionSystem.DataContext;
import consulo.logging.Logger;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.actionSystem.EditorActionHandler;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.*;
import com.intellij.psi.templateLanguages.OuterLanguageElement;
import com.intellij.util.Processor;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.List;
public class SelectWordHandler extends EditorActionHandler {
private static final Logger LOG = Logger.getInstance(SelectWordHandler.class);
private final EditorActionHandler myOriginalHandler;
public SelectWordHandler(EditorActionHandler originalHandler) {
super(true);
myOriginalHandler = originalHandler;
}
@Override
public void execute(@Nonnull Editor editor, DataContext dataContext) {
if (LOG.isDebugEnabled()) {
LOG.debug("enter: execute(editor='" + editor + "')");
}
Project project = DataManager.getInstance().getDataContext(editor.getComponent()).getData(CommonDataKeys.PROJECT);
if (project == null) {
if (myOriginalHandler != null) {
myOriginalHandler.execute(editor, dataContext);
}
return;
}
PsiDocumentManager.getInstance(project).commitAllDocuments();
TextRange range = selectWord(editor, project);
if (editor instanceof EditorWindow) {
if (range == null || !isInsideEditableInjection((EditorWindow)editor, range, project) || TextRange.from(0, editor.getDocument().getTextLength()).equals(
new TextRange(editor.getSelectionModel().getSelectionStart(), editor.getSelectionModel().getSelectionEnd()))) {
editor = ((EditorWindow)editor).getDelegate();
range = selectWord(editor, project);
}
}
if (range == null) {
if (myOriginalHandler != null) {
myOriginalHandler.execute(editor, dataContext);
}
}
else {
editor.getSelectionModel().setSelection(range.getStartOffset(), range.getEndOffset());
}
}
private static boolean isInsideEditableInjection(EditorWindow editor, TextRange range, Project project) {
PsiFile file = PsiDocumentManager.getInstance(project).getPsiFile(editor.getDocument());
if (file == null) return true;
List<TextRange> editables = InjectedLanguageManager.getInstance(project).intersectWithAllEditableFragments(file, range);
return editables.size() == 1 && range.equals(editables.get(0));
}
/**
*
* @param editor
* @param project
* @return null means unable to select
*/
@Nullable
private static TextRange selectWord(@Nonnull Editor editor, @Nonnull Project project) {
Document document = editor.getDocument();
PsiFile file = PsiDocumentManager.getInstance(project).getPsiFile(document);
if (file instanceof PsiCompiledFile) {
file = ((PsiCompiledFile)file).getDecompiledPsiFile();
}
if (file == null) return null;
FeatureUsageTracker.getInstance().triggerFeatureUsed("editing.select.word");
int caretOffset = adjustCaretOffset(editor);
PsiElement element = findElementAt(file, caretOffset);
if (element instanceof PsiWhiteSpace && caretOffset > 0) {
PsiElement anotherElement = findElementAt(file, caretOffset - 1);
if (!(anotherElement instanceof PsiWhiteSpace)) {
element = anotherElement;
}
}
while (element instanceof PsiWhiteSpace || element != null && StringUtil.isEmptyOrSpaces(element.getText())) {
while (element.getNextSibling() == null) {
if (element instanceof PsiFile) return null;
final PsiElement parent = element.getParent();
final PsiElement[] children = parent.getChildren();
if (children.length > 0 && children[children.length - 1] == element) {
element = parent;
}
else {
element = parent;
break;
}
}
element = element.getNextSibling();
if (element == null) return null;
TextRange range = element.getTextRange();
if (range == null) return null; // Fix NPE (EA-29110)
caretOffset = range.getStartOffset();
}
if (element instanceof OuterLanguageElement) {
PsiElement elementInOtherTree = file.getViewProvider().findElementAt(element.getTextOffset(), element.getLanguage());
if (elementInOtherTree == null || elementInOtherTree.getContainingFile() != element.getContainingFile()) {
while (elementInOtherTree != null && elementInOtherTree.getPrevSibling() == null) {
elementInOtherTree = elementInOtherTree.getParent();
}
if (elementInOtherTree != null) {
assert elementInOtherTree.getTextOffset() == caretOffset;
element = elementInOtherTree;
}
}
}
final TextRange selectionRange = new TextRange(editor.getSelectionModel().getSelectionStart(), editor.getSelectionModel().getSelectionEnd());
final Ref<TextRange> minimumRange = new Ref<TextRange>(new TextRange(0, editor.getDocument().getTextLength()));
SelectWordUtil.processRanges(element, editor.getDocument().getCharsSequence(), caretOffset, editor, new Processor<TextRange>() {
@Override
public boolean process(@Nonnull TextRange range) {
if (range.contains(selectionRange) && !range.equals(selectionRange)) {
if (minimumRange.get().contains(range)) {
minimumRange.set(range);
return true;
}
}
return false;
}
});
return minimumRange.get();
}
private static int adjustCaretOffset(@Nonnull Editor editor) {
int caretOffset = editor.getCaretModel().getOffset();
if (caretOffset == 0) {
return caretOffset;
}
CharSequence text = editor.getDocument().getCharsSequence();
char prev = text.charAt(caretOffset - 1);
if (caretOffset < text.length() &&
!Character.isJavaIdentifierPart(text.charAt(caretOffset)) && Character.isJavaIdentifierPart(prev)) {
return caretOffset - 1;
}
if ((caretOffset == text.length() || Character.isWhitespace(text.charAt(caretOffset))) && !Character.isWhitespace(prev)) {
return caretOffset - 1;
}
return caretOffset;
}
@Nullable
private static PsiElement findElementAt(@Nonnull final PsiFile file, final int caretOffset) {
PsiElement elementAt = file.findElementAt(caretOffset);
if (elementAt != null && isLanguageExtension(file, elementAt)) {
return file.getViewProvider().findElementAt(caretOffset, file.getLanguage());
}
return elementAt;
}
private static boolean isLanguageExtension(@Nonnull final PsiFile file, @Nonnull final PsiElement elementAt) {
final Language elementLanguage = elementAt.getLanguage();
if (file.getLanguage() instanceof CompositeLanguage) {
CompositeLanguage compositeLanguage = (CompositeLanguage) file.getLanguage();
final Language[] extensions = compositeLanguage.getLanguageExtensionsForFile(file);
for(Language extension: extensions) {
if (extension == elementLanguage) {
return true;
}
}
}
return false;
}
}
| |
// Copyright 2011-2016 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.security.zynamics.binnavi.Database.PostgreSQL.Functions;
import com.google.common.base.Preconditions;
import com.google.security.zynamics.binnavi.Database.AbstractSQLProvider;
import com.google.security.zynamics.binnavi.Database.CConnection;
import com.google.security.zynamics.binnavi.Database.CModuleViewFinder;
import com.google.security.zynamics.binnavi.Database.CTableNames;
import com.google.security.zynamics.binnavi.Database.Exceptions.CouldntDeleteException;
import com.google.security.zynamics.binnavi.Database.Exceptions.CouldntLoadDataException;
import com.google.security.zynamics.binnavi.Database.Exceptions.CouldntSaveDataException;
import com.google.security.zynamics.binnavi.Database.Interfaces.SQLProvider;
import com.google.security.zynamics.binnavi.Database.PostgreSQL.PostgreSQLHelpers;
import com.google.security.zynamics.binnavi.Log.NaviLogger;
import com.google.security.zynamics.binnavi.debug.debugger.DebuggerTemplate;
import com.google.security.zynamics.binnavi.disassembly.UnrelocatedAddress;
import com.google.security.zynamics.binnavi.disassembly.INaviModule;
import com.google.security.zynamics.binnavi.disassembly.INaviRawModule;
import com.google.security.zynamics.binnavi.disassembly.Modules.CModule;
import com.google.security.zynamics.binnavi.disassembly.views.INaviView;
import com.google.security.zynamics.zylib.disassembly.IAddress;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
public final class PostgreSQLModuleFunctions {
/**
* You are not supposed to instantiate this class.
*/
private PostgreSQLModuleFunctions() {
// You are not supposed to instantiate this class.
}
/**
* Checks the validity of a given SQL provider and a given module. If there is a problem with the
* arguments, an exception is thrown.
*
* @param provider The SQL provider to check.
* @param module The module to check.
*/
protected static void checkArguments(final AbstractSQLProvider provider,
final INaviModule module) {
Preconditions.checkNotNull(provider, "IE00488: Provider argument can not be null");
Preconditions.checkNotNull(module, "IE00489: Module argument can not be null");
Preconditions.checkArgument(module.inSameDatabase(provider),
"IE00490: Module is not part of this database");
}
/**
* Sets the debugger used to debug a module.
*
* The module and the debugger must be stored in the database connected to by the provider
* argument.
*
* @param provider The SQL provider that provides the connection.
* @param module The module whose debugger is set.
* @param debugger The debugger that is assigned to the module or null if a former assignment
* should be cleared.
*
* @throws CouldntSaveDataException Thrown if the debugger could not be assigned to the module.
*/
public static void assignDebugger(final AbstractSQLProvider provider, final INaviModule module,
final DebuggerTemplate debugger) throws CouldntSaveDataException {
checkArguments(provider, module);
// ATTENTION: Argument "debugger" can be null
if ((debugger != null) && !debugger.inSameDatabase(provider)) {
throw new IllegalArgumentException("IE00491: Debugger is not part of this database");
}
final CConnection connection = provider.getConnection();
try {
final String query = String.format("update %s set debugger_id = %s where id = %d",
CTableNames.MODULES_TABLE, debugger == null ? "NULL" : String.valueOf(debugger.getId()),
module.getConfiguration().getId());
connection.executeUpdate(query, true);
} catch (final SQLException e) {
throw new CouldntSaveDataException(e);
}
PostgreSQLHelpers.updateModificationDate(connection, CTableNames.MODULES_TABLE,
module.getConfiguration().getId());
}
/**
* Deletes a module from the database.
*
* The module must be stored in the database connected to by the provider argument.
*
* @param provider The SQL provider that provides the connection.
* @param module The module to be deleted.
*
* @throws CouldntDeleteException Thrown if the module could not be deleted.
*/
public static void deleteModule(final AbstractSQLProvider provider, final INaviModule module)
throws CouldntDeleteException {
PostgreSQLModuleFunctions.checkArguments(provider, module);
NaviLogger.info("Deleting module %s", module.getConfiguration().getName());
final CConnection connection = provider.getConnection();
try {
final String moduleViewQuery = "DELETE FROM " + CTableNames.VIEWS_TABLE + " "
+ " WHERE id IN (SELECT view_id FROM " + CTableNames.MODULE_VIEWS_TABLE
+ " WHERE module_id = " + module.getConfiguration().getId() + ")";
connection.executeUpdate(moduleViewQuery, true);
final String nodeQuery = "DELETE FROM " + CTableNames.NODES_TABLE + " " + " WHERE id IN "
+ " (SELECT view_id FROM " + CTableNames.MODULE_VIEWS_TABLE + " WHERE module_id = "
+ module.getConfiguration().getId() + ")";
connection.executeUpdate(nodeQuery, true);
final String instructionsQuery = String.format(
"DELETE FROM " + CTableNames.INSTRUCTIONS_TABLE + " WHERE module_id = %d",
module.getConfiguration().getId());
connection.executeUpdate(instructionsQuery, true);
connection.executeUpdate(String.format(
"delete FROM " + CTableNames.EXPRESSION_TREE_TABLE + "_mapping where module_id = %d",
module.getConfiguration().getId()), true);
connection.executeUpdate(String.format(
"delete FROM " + CTableNames.EXPRESSION_TREE_TABLE + " where module_id = %d",
module.getConfiguration().getId()), true);
connection.executeUpdate(String.format(
"delete FROM " + CTableNames.EXPRESSION_TREE_TABLE + "_ids where module_id = %d",
module.getConfiguration().getId()), true);
connection.executeUpdate(String.format(
"delete FROM " + CTableNames.CODE_NODES_TABLE + " where module_id = %d",
module.getConfiguration().getId()), true);
connection.executeUpdate(String.format(
"delete from " + CTableNames.MODULES_TABLE + " where id = %d",
module.getConfiguration().getId()), true);
} catch (final SQLException e) {
throw new CouldntDeleteException(e);
}
}
/**
* Returns the modification date of the module.
*
* The module must be stored in the database connected to by the provider argument.
*
* @param provider The SQL provider that provides the connection.
* @param module The module whose modification date is determined.
*
* @return The modification date of the module.
*
* @throws CouldntLoadDataException Thrown if the modification date of the module could not be
* determined.
*/
public static Date getModificationDate(final AbstractSQLProvider provider,
final INaviModule module) throws CouldntLoadDataException {
checkArguments(provider, module);
return PostgreSQLHelpers.getModificationDate(provider.getConnection(),
CTableNames.MODULES_TABLE, module.getConfiguration().getId());
}
/**
* Finds the views inside the module that contain instructions of a given address.
*
* The module must be stored in the database connected to by the provider argument.
*
* @param provider The SQL provider that provides the connection.
* @param module The module to search through.
* @param addresses The addresses to search for.
* @param all True, to search for views that contain all addresses. False, for any addresses.
*
* @return A list of views where instructions with the given address can be found.
*
* @throws CouldntLoadDataException Thrown if searching through the module failed.
*/
public static List<INaviView> getViewsWithAddresses(final AbstractSQLProvider provider,
final INaviModule module, final List<UnrelocatedAddress> addresses, final boolean all)
throws CouldntLoadDataException {
checkArguments(provider, module);
Preconditions.checkNotNull(addresses, "IE00492: Addresses argument can not be null");
final StringBuilder queryBuilder = new StringBuilder();
final int moduleID = module.getConfiguration().getId();
if (addresses.size() == 0) {
return new ArrayList<INaviView>();
} else if (addresses.size() == 1) {
queryBuilder.append("SELECT mvt.module_id, mvt.view_id FROM " + CTableNames.MODULE_VIEWS_TABLE
+ " AS mvt JOIN " + CTableNames.NODES_TABLE
+ " AS nt ON mvt.view_id = nt.view_id AND mvt.module_id = " + moduleID + " JOIN "
+ CTableNames.CODENODE_INSTRUCTIONS_TABLE
+ " AS cit ON nt.id = cit.node_id AND cit.module_id = " + moduleID + " JOIN "
+ CTableNames.INSTRUCTIONS_TABLE
+ " AS it ON it.address = cit.address AND it.module_id = " + moduleID
+ " WHERE it.address = " + addresses.get(0).getAddress().toLong());
} else if (all) {
boolean needsComma = false;
int counter = 0;
queryBuilder.append("select view_id from ");
for (final UnrelocatedAddress address : addresses) {
if (needsComma) {
queryBuilder.append(" inner join ");
}
needsComma = true;
queryBuilder.append("(SELECT mvt.module_id, mvt.view_id FROM "
+ CTableNames.MODULE_VIEWS_TABLE + " AS mvt JOIN " + CTableNames.NODES_TABLE
+ " AS nt ON mvt.view_id = nt.view_id AND mvt.module_id = " + moduleID + " JOIN "
+ CTableNames.CODENODE_INSTRUCTIONS_TABLE
+ " AS cit ON nt.id = cit.node_id AND cit.module_id = " + moduleID + " JOIN "
+ CTableNames.INSTRUCTIONS_TABLE
+ " AS it ON it.address = cit.address AND it.module_id = " + moduleID
+ " WHERE it.address = " + address.getAddress().toLong() + ") AS t" + counter);
counter++;
}
queryBuilder.append(" USING (view_id)");
} else {
queryBuilder.append("SELECT mvt.module_id, mvt.view_id FROM " + CTableNames.MODULE_VIEWS_TABLE
+ " AS mvt JOIN " + CTableNames.NODES_TABLE
+ " AS nt ON mvt.view_id = nt.view_id AND mvt.module_id = " + moduleID + " JOIN "
+ CTableNames.CODENODE_INSTRUCTIONS_TABLE
+ " AS cit ON nt.id = cit.node_id AND cit.module_id = " + moduleID + " JOIN "
+ CTableNames.INSTRUCTIONS_TABLE
+ " AS it ON it.address = cit.address AND it.module_id = " + moduleID
+ " WHERE it.address IN (");
boolean needsComma = false;
for (final UnrelocatedAddress address : addresses) {
if (needsComma) {
queryBuilder.append(", ");
}
needsComma = true;
queryBuilder.append(address.getAddress().toLong());
}
queryBuilder.append(") GROUP BY mvt.view_id, mvt.module_id");
}
return PostgreSQLHelpers.getViewsWithAddress(provider.getConnection(), queryBuilder.toString(),
"module_id", new CModuleViewFinder(provider));
}
public static CModule readModule(final CConnection connection, final int moduleId,
final INaviRawModule rawModule, final SQLProvider provider) throws CouldntLoadDataException {
Preconditions.checkNotNull(rawModule, "IE01797: Raw module argument can not be null");
Preconditions.checkNotNull(provider, "IE01798: Provider argument can not be null");
final String query = "SELECT id, " + CTableNames.MODULES_TABLE + ".name, md5, sha1, "
+ " description, import_time, modification_date, image_base, file_base, stared, "
+ " initialization_state " + " FROM " + CTableNames.MODULES_TABLE + " WHERE id = "
+ moduleId + " ORDER by id";
try {
final ResultSet resultSet = connection.executeQuery(query, true);
try {
while (resultSet.next()) {
final String name = PostgreSQLHelpers.readString(resultSet, "name");
final String md5 = PostgreSQLHelpers.readString(resultSet, "md5");
final String sha1 = PostgreSQLHelpers.readString(resultSet, "sha1");
final String comment = PostgreSQLHelpers.readString(resultSet, "description");
final Timestamp importTime = resultSet.getTimestamp("import_time");
final Timestamp modificationDate = resultSet.getTimestamp("modification_date");
final int functionCount = rawModule.getFunctionCount();
final int viewCount = 0;
final IAddress imageBase = PostgreSQLHelpers.loadAddress(resultSet, "image_base");
final IAddress fileBase = PostgreSQLHelpers.loadAddress(resultSet, "file_base");
final boolean isStared = resultSet.getBoolean("stared");
final int initializationState = resultSet.getInt("initialization_state");
return new CModule(moduleId,
name,
comment,
importTime,
modificationDate,
md5,
sha1,
functionCount,
viewCount,
fileBase,
imageBase,
null,
rawModule,
initializationState,
isStared,
provider);
}
} finally {
resultSet.close();
}
} catch (final SQLException e) {
throw new CouldntLoadDataException(e);
}
throw new CouldntLoadDataException("Error: No module with the given ID exists");
}
/**
* Changes the description of a module.
*
* The module must be stored in the database connected to by the provider argument.
*
* @param provider The SQL provider that provides the connection.
* @param module The module whose description is changed.
* @param description The new description of the module.
*
* @throws CouldntSaveDataException Thrown if the description of the module could not be changed.
*/
public static void setDescription(final AbstractSQLProvider provider, final INaviModule module,
final String description) throws CouldntSaveDataException {
checkArguments(provider, module);
Preconditions.checkNotNull(description, "IE00493: Description argument can not be null");
PostgreSQLHelpers.setDescription(provider.getConnection(), module.getConfiguration().getId(),
description, CTableNames.MODULES_TABLE);
}
/**
* Changes the file base of a module.
*
* The module must be stored in the database connected to by the provider argument.
*
* @param provider The SQL provider that provides the connection.
* @param module The module whose file base is changed.
* @param address The new file base of the module.
*
* @throws CouldntSaveDataException Thrown if the file base of the module could not be changed.
*/
public static void setFileBase(final AbstractSQLProvider provider, final INaviModule module,
final IAddress address) throws CouldntSaveDataException {
checkArguments(provider, module);
Preconditions.checkNotNull(address, "IE00494: Address argument can not be null");
final CConnection connection = provider.getConnection();
try {
final String query = String.format("UPDATE %s SET file_base = %s " + " WHERE id = %d",
CTableNames.MODULES_TABLE, address.toBigInteger().toString(),
module.getConfiguration().getId());
connection.executeUpdate(query, true);
} catch (final SQLException e) {
throw new CouldntSaveDataException(e);
}
PostgreSQLHelpers.updateModificationDate(connection, CTableNames.MODULES_TABLE,
module.getConfiguration().getId());
}
/**
* Changes the image base of a module.
*
* The module must be stored in the database connected to by the provider argument.
*
* @param provider The SQL provider that provides the connection.
* @param module The module whose image base is changed.
* @param address The new image base of the module.
*
* @throws CouldntSaveDataException Thrown if the image base of the module could not be changed.
*/
public static void setImageBase(final AbstractSQLProvider provider, final INaviModule module,
final IAddress address) throws CouldntSaveDataException {
checkArguments(provider, module);
Preconditions.checkNotNull(address, "IE00495: Address argument can not be null");
final CConnection connection = provider.getConnection();
try {
final String query = String.format("UPDATE %s SET image_base = %s " + " WHERE id = %d",
CTableNames.MODULES_TABLE, address.toBigInteger().toString(),
module.getConfiguration().getId());
connection.executeUpdate(query, true);
} catch (final SQLException e) {
throw new CouldntSaveDataException(e);
}
PostgreSQLHelpers.updateModificationDate(connection, CTableNames.MODULES_TABLE,
module.getConfiguration().getId());
}
/**
* Changes the name of a module.
*
* The module must be stored in the database connected to by the provider argument.
*
* @param provider The SQL provider that provides the connection.
* @param module The module whose name is changed.
* @param name The new name of the module.
*
* @throws CouldntSaveDataException Thrown if changing the name of the module changed.
*/
public static void setName(final AbstractSQLProvider provider, final INaviModule module,
final String name) throws CouldntSaveDataException {
checkArguments(provider, module);
Preconditions.checkNotNull(name, "IE00496: Name argument can not be null");
PostgreSQLHelpers.setName(provider.getConnection(), module.getConfiguration().getId(), name,
CTableNames.MODULES_TABLE);
}
/**
* Stars a module.
*
* @param provider Provides the connection to the database.
* @param module The module to star.
* @param isStared True, to star the module. False, to unstar it.
*
* @throws CouldntSaveDataException Thrown if the the star state of the module could not be
* updated.
*/
public static void starModule(final AbstractSQLProvider provider, final INaviModule module,
final boolean isStared) throws CouldntSaveDataException {
final String starModuleQuery = "UPDATE " + CTableNames.MODULES_TABLE + " SET stared = "
+ isStared + " WHERE id = " + module.getConfiguration().getId();
try {
provider.getConnection().executeUpdate(starModuleQuery, true);
} catch (final SQLException e) {
throw new CouldntSaveDataException(e);
}
}
}
| |
/*
* AsyncRequestInterceptor.java created on 2011-11-26
*
* Created by Brushing Bits Labs
* http://www.brushingbits.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jnap.core.mvc.async;
import java.io.IOException;
import java.lang.reflect.Method;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.Map;
import javax.servlet.ServletConfig;
import javax.servlet.ServletContext;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.collections.iterators.IteratorEnumeration;
import org.atmosphere.annotation.Broadcast;
import org.atmosphere.annotation.Cluster;
import org.atmosphere.annotation.Publish;
import org.atmosphere.annotation.Resume;
import org.atmosphere.annotation.Schedule;
import org.atmosphere.annotation.Subscribe;
import org.atmosphere.annotation.Suspend;
import org.atmosphere.annotation.Suspend.SCOPE;
import org.atmosphere.cpr.ApplicationConfig;
import org.atmosphere.cpr.AtmosphereHandler;
import org.atmosphere.cpr.AtmosphereResource;
import org.atmosphere.cpr.AtmosphereResourceEvent;
import org.atmosphere.cpr.AtmosphereResourceEventListener;
import org.atmosphere.cpr.AtmosphereServlet;
import org.atmosphere.cpr.AtmosphereServlet.Action;
import org.atmosphere.cpr.FrameworkConfig;
import org.atmosphere.cpr.HeaderConfig;
import org.jnap.core.mvc.support.RestfulHandlerAdapter;
import org.jnap.core.stereotype.RestController;
import org.jnap.core.util.TimeUnitConverter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.config.AutowireCapableBeanFactory;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.core.annotation.AnnotationUtils;
import org.springframework.util.Assert;
import org.springframework.web.servlet.LocaleResolver;
import org.springframework.web.servlet.ModelAndView;
import org.springframework.web.servlet.View;
import org.springframework.web.servlet.handler.HandlerInterceptorAdapter;
import org.springframework.web.servlet.view.ContentNegotiatingViewResolver;
/**
* @author Daniel Rochetti
* @since 0.9.3
*/
public class AsyncRequestInterceptor extends HandlerInterceptorAdapter
implements AtmosphereHandler<HttpServletRequest, HttpServletResponse>,
ApplicationContextAware, InitializingBean {
private static Logger logger = LoggerFactory.getLogger(AsyncRequestInterceptor.class);
@Autowired
private ServletContext servletContext;
@Autowired
private ContentNegotiatingViewResolver viewResolver;
@Autowired
private LocaleResolver localeResolver;
private ApplicationContext applicationContext;
private boolean useWebSocket = true;
private boolean useNative = true;
private boolean useBlocking = false;
private boolean useStream = true;
private AtmosphereServlet atmosphere = null;
@Override
public void afterPropertiesSet() throws Exception {
Assert.notNull(servletContext);
final ServletContext sc = servletContext;
final Map<String, String> params = new HashMap<String, String>();
params.put(ApplicationConfig.WEBSOCKET_SUPPORT, Boolean.toString(this.useWebSocket));
params.put(ApplicationConfig.PROPERTY_NATIVE_COMETSUPPORT, Boolean.toString(this.useNative));
params.put(ApplicationConfig.PROPERTY_BLOCKING_COMETSUPPORT, Boolean.toString(this.useBlocking));
params.put(ApplicationConfig.PROPERTY_USE_STREAM, Boolean.toString(this.useStream));
// atmosphere = new AtmosphereServlet(true);
atmosphere = new AtmosphereServlet();
atmosphere.addAtmosphereHandler("/*", this);
atmosphere.init(new ServletConfig() {
@Override
public String getServletName() {
return AsyncRequestInterceptor.class.getSimpleName();
}
@Override
public ServletContext getServletContext() {
return sc;
}
@Override
public Enumeration getInitParameterNames() {
return new IteratorEnumeration(params.keySet().iterator());
}
@Override
public String getInitParameter(String name) {
return params.get(name);
}
});
}
@Override
public boolean preHandle(HttpServletRequest request,
HttpServletResponse response, Object handler) throws Exception {
Action action = null;
if (isAsync(request, handler)) {
action = atmosphere.doCometSupport(request, response);
}
return action == null || action.type != Action.TYPE.SUSPEND;
}
/**
*
* @param request
* @param handler
* @return
*/
protected boolean isAsync(HttpServletRequest request, Object handler) {
RestController controllerAnnotation = AnnotationUtils.findAnnotation(handler.getClass(), RestController.class);
return (controllerAnnotation != null && controllerAnnotation.async())
|| request.getHeader(HeaderConfig.X_ATMOSPHERE_TRANSPORT) != null
|| request.getHeader("Sec-WebSocket-Version") != null;
}
@Override
public void postHandle(HttpServletRequest request,
HttpServletResponse response, Object handler,
ModelAndView modelAndView) throws Exception {
if (isAsync(request, handler)) {
Method handlerMethod = (Method) request.getAttribute(RestfulHandlerAdapter.CURRENT_HANDLER_METHOD_ATTRIBUTE);
if (handlerMethod != null) {
LinkedList<AsyncResponseHandler> handlers = new LinkedList<AsyncResponseHandler>();
AsyncResponseHandler responseHandler = null;
if (AsyncResponseModel.class.isInstance(modelAndView)) {
// TODO AsyncState.SUSPEND_RESPONSE
}
if (handlerMethod.isAnnotationPresent(Broadcast.class)) {
Broadcast annotation = handlerMethod.getAnnotation(Broadcast.class);
int delay = annotation.delay();
Class[] suspendTimeout = annotation.value();
AsyncState state = annotation.resumeOnBroadcast()
? AsyncState.RESUME_ON_BROADCAST
: AsyncState.BROADCAST;
responseHandler = new AsyncResponseHandler(state, delay, 0,
SCOPE.APPLICATION, true, suspendTimeout, null);
handlers.addLast(responseHandler);
if (handlerMethod.isAnnotationPresent(Cluster.class)) {
// TODO add @Cluster support
}
}
if (handlerMethod.isAnnotationPresent(Suspend.class)) {
Suspend annotation = handlerMethod.getAnnotation(Suspend.class);
long suspendTimeout = annotation.period();
suspendTimeout = TimeUnitConverter.convert(suspendTimeout, annotation.timeUnit());
Suspend.SCOPE scope = annotation.scope();
boolean outputComments = annotation.outputComments();
boolean trackable = false;
// TODO add Trackable support
// if (TrackableResource.class.isAssignableFrom(am.getMethod().getReturnType())) {
// trackable = true;
// }
AsyncState state = annotation.resumeOnBroadcast()
? AsyncState.SUSPEND_RESUME
: AsyncState.SUSPEND;
if (trackable) {
state = AsyncState.SUSPEND_TRACKABLE;
}
responseHandler = new AsyncResponseHandler(state, suspendTimeout, 0, scope, outputComments);
responseHandler.setListeners(createListeners(annotation.listeners()));
handlers.addFirst(responseHandler);
}
if (handlerMethod.isAnnotationPresent(Subscribe.class)) {
boolean trackable = false;
// TODO add Trackable support
// if (TrackableResource.class.isAssignableFrom(am.getMethod().getReturnType())) {
// trackable = true;
// }
Subscribe annotation = handlerMethod.getAnnotation(Subscribe.class);
AsyncState state = trackable ? AsyncState.SUBSCRIBE_TRACKABLE : AsyncState.SUBSCRIBE;
String topic = annotation.value(); // TODO add SpEL support
responseHandler = new AsyncResponseHandler(state, 30000, -1,
Suspend.SCOPE.APPLICATION, false, null, topic);
responseHandler.setListeners(createListeners(annotation.listeners()));
handlers.addFirst(responseHandler);
}
if (handlerMethod.isAnnotationPresent(Publish.class)) {
String topic = handlerMethod.getAnnotation(Publish.class).value(); // TODO add SpEL support
responseHandler = new AsyncResponseHandler(AsyncState.PUBLISH, 30000, -1,
Suspend.SCOPE.APPLICATION, false, null, topic);
handlers.addFirst(responseHandler);
}
if (handlerMethod.isAnnotationPresent(Resume.class)) {
handlers.addFirst(new AsyncResponseHandler(AsyncState.RESUME,
handlerMethod.getAnnotation(Resume.class).value()));
}
if (handlerMethod.isAnnotationPresent(Schedule.class)) {
Schedule annotation = handlerMethod.getAnnotation(Schedule.class);
AsyncState state = annotation.resumeOnBroadcast()
? AsyncState.SCHEDULE_RESUME
: AsyncState.SCHEDULE;
handlers.addFirst(new AsyncResponseHandler(state, annotation.period(), annotation.waitFor()));
}
for (AsyncResponseHandler asyncHandler : handlers) {
asyncHandler.handle(request, response, modelAndView);
}
} else {
logger.warn("Atmosphere annotation support disabled on this request.");
}
}
}
/**
*
* @param listenerTypes
* @return
*/
protected AtmosphereResourceEventListener[] createListeners(
Class<? extends AtmosphereResourceEventListener>[] listenerTypes) {
AtmosphereResourceEventListener[] listeners = null;
if (listenerTypes != null) {
listeners = new AtmosphereResourceEventListener[listenerTypes.length];
AtmosphereResourceEventListener listener = null;
AutowireCapableBeanFactory autowireCapableBeanFactory = null;
try {
autowireCapableBeanFactory = applicationContext.getAutowireCapableBeanFactory();
} catch (IllegalStateException e) {
// ignore; listeners will not be inject with spring beans
}
for (int i = 0; i < listenerTypes.length; i++) {
Class<? extends AtmosphereResourceEventListener> listenerType = listenerTypes[i];
listener = BeanUtils.instantiate(listenerType);
if (autowireCapableBeanFactory != null) {
autowireCapableBeanFactory.autowireBean(listener);
}
listeners[i] = listener;
}
}
return listeners;
}
/**
* @param request
* @return
*/
protected AtmosphereResource<HttpServletRequest, HttpServletResponse> getAtmosphereResource(
HttpServletRequest request) {
return (AtmosphereResource<HttpServletRequest, HttpServletResponse>) request.getAttribute(FrameworkConfig.ATMOSPHERE_RESOURCE);
}
@Override
public void onRequest(AtmosphereResource<HttpServletRequest, HttpServletResponse> resource) throws IOException {
Assert.notNull(resource);
resource.getRequest().setAttribute(FrameworkConfig.ATMOSPHERE_RESOURCE, resource);
resource.getRequest().setAttribute(FrameworkConfig.ATMOSPHERE_HANDLER, this);
}
@Override
public void onStateChange(AtmosphereResourceEvent<HttpServletRequest, HttpServletResponse> event) throws IOException {
System.out.println("AsyncRequestInterceptor.onStateChange()");
System.out.println(event.getMessage());
HttpServletRequest req = event.getResource().getRequest();
HttpServletResponse res = event.getResource().getResponse();
try {
ModelAndView mv = (ModelAndView) event.getMessage();
View view = null;
if (mv.isReference()) {
view = this.viewResolver.resolveViewName(mv.getViewName(), this.localeResolver.resolveLocale(req));
} else {
view = mv.getView();
if (view == null) {
}
}
view.render(mv.getModelMap(), req, res);
} catch (Exception e) {
throw new IOException(e.getMessage(), e);
}
}
@Override
public void destroy() {
}
public void setServletContext(ServletContext servletContext) {
this.servletContext = servletContext;
}
public void setUseNative(boolean useNativeImplementation) {
this.useNative = useNativeImplementation;
}
public void setUseBlocking(boolean useBlockingImplementation) {
this.useBlocking = useBlockingImplementation;
}
public void setUseStream(boolean useStream) {
this.useStream = useStream;
}
public void setViewResolver(ContentNegotiatingViewResolver viewResolver) {
this.viewResolver = viewResolver;
}
public void setApplicationContext(ApplicationContext applicationContext) {
this.applicationContext = applicationContext;
}
public void setUseWebSocket(boolean useWebSocket) {
this.useWebSocket = useWebSocket;
}
}
| |
/*
* Copyright (C) 2014 Chris Renke
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.tripvi.drawerlayout;
import android.content.res.Resources;
import android.graphics.Canvas;
import android.graphics.ColorFilter;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.Path;
import android.graphics.PathMeasure;
import android.graphics.Rect;
import android.graphics.drawable.Drawable;
import android.view.View;
import static android.graphics.Color.BLACK;
import static android.graphics.Paint.ANTI_ALIAS_FLAG;
import static android.graphics.Paint.Cap;
import static android.graphics.Paint.Cap.BUTT;
import static android.graphics.Paint.Cap.ROUND;
import static android.graphics.Paint.SUBPIXEL_TEXT_FLAG;
import static android.graphics.Paint.Style.STROKE;
import static android.graphics.PixelFormat.TRANSLUCENT;
import static android.support.v4.widget.DrawerLayout.DrawerListener;
import static java.lang.Math.sqrt;
/** A drawable that rotates between a drawer icon and a back arrow based on parameter. */
public class DrawerArrowDrawable extends Drawable {
/**
* Joins two {@link Path}s as if they were one where the first 50% of the path is {@code
* PathFirst} and the second 50% of the path is {@code pathSecond}.
*/
private static class JoinedPath {
private final PathMeasure measureFirst;
private final PathMeasure measureSecond;
private final float lengthFirst;
private final float lengthSecond;
private JoinedPath(Path pathFirst, Path pathSecond) {
measureFirst = new PathMeasure(pathFirst, false);
measureSecond = new PathMeasure(pathSecond, false);
lengthFirst = measureFirst.getLength();
lengthSecond = measureSecond.getLength();
}
/**
* Returns a point on this curve at the given {@code parameter}.
* For {@code parameter} values less than .5f, the first path will drive the point.
* For {@code parameter} values greater than .5f, the second path will drive the point.
* For {@code parameter} equal to .5f, the point will be the point where the two
* internal paths connect.
*/
private void getPointOnLine(float parameter, float[] coords) {
if (parameter <= .5f) {
parameter *= 2;
measureFirst.getPosTan(lengthFirst * parameter, coords, null);
} else {
parameter -= .5f;
parameter *= 2;
measureSecond.getPosTan(lengthSecond * parameter, coords, null);
}
}
}
/** Draws a line between two {@link JoinedPath}s at distance {@code parameter} along each path. */
private class BridgingLine {
private final JoinedPath pathA;
private final JoinedPath pathB;
private BridgingLine(JoinedPath pathA, JoinedPath pathB) {
this.pathA = pathA;
this.pathB = pathB;
}
/**
* Draw a line between the points defined on the paths backing {@code measureA} and
* {@code measureB} at the current parameter.
*/
private void draw(Canvas canvas) {
pathA.getPointOnLine(parameter, coordsA);
pathB.getPointOnLine(parameter, coordsB);
if (rounded) insetPointsForRoundCaps();
canvas.drawLine(coordsA[0], coordsA[1], coordsB[0], coordsB[1], linePaint);
}
/**
* Insets the end points of the current line to account for the protruding
* ends drawn for {@link Cap#ROUND} style lines.
*/
private void insetPointsForRoundCaps() {
vX = coordsB[0] - coordsA[0];
vY = coordsB[1] - coordsA[1];
magnitude = (float) sqrt((vX * vX + vY * vY));
paramA = (magnitude - halfStrokeWidthPixel) / magnitude;
paramB = halfStrokeWidthPixel / magnitude;
coordsA[0] = coordsB[0] - (vX * paramA);
coordsA[1] = coordsB[1] - (vY * paramA);
coordsB[0] = coordsB[0] - (vX * paramB);
coordsB[1] = coordsB[1] - (vY * paramB);
}
}
/** Paths were generated at a 3px/dp density; this is the scale factor for different densities. */
private final static float PATH_GEN_DENSITY = 3;
/** Paths were generated with at this size for {@link DrawerArrowDrawable#PATH_GEN_DENSITY}. */
private final static float DIMEN_DP = 23.5f;
/**
* Paths were generated targeting this stroke width to form the arrowhead properly, modification
* may cause the arrow to not for nicely.
*/
private final static float STROKE_WIDTH_DP = 2;
private BridgingLine topLine;
private BridgingLine middleLine;
private BridgingLine bottomLine;
private final Rect bounds;
private final float halfStrokeWidthPixel;
private final Paint linePaint;
private final boolean rounded;
private boolean flip;
private float parameter;
// Helper fields during drawing calculations.
private float vX, vY, magnitude, paramA, paramB;
private final float coordsA[] = { 0f, 0f };
private final float coordsB[] = { 0f, 0f };
public DrawerArrowDrawable(Resources resources) {
this(resources, false);
}
public DrawerArrowDrawable(Resources resources, boolean rounded) {
this.rounded = rounded;
float density = resources.getDisplayMetrics().density;
float strokeWidthPixel = STROKE_WIDTH_DP * density;
halfStrokeWidthPixel = strokeWidthPixel / 2;
linePaint = new Paint(SUBPIXEL_TEXT_FLAG | ANTI_ALIAS_FLAG);
linePaint.setStrokeCap(rounded ? ROUND : BUTT);
linePaint.setColor(BLACK);
linePaint.setStyle(STROKE);
linePaint.setStrokeWidth(strokeWidthPixel);
int dimen = (int) (DIMEN_DP * density);
int paddingLeft = (int) (6.12f * density);
int paddingRight = (int) (6.12f * density);
bounds = new Rect(0-paddingLeft, 0, dimen+paddingRight, dimen);
Path first, second;
JoinedPath joinedA, joinedB;
// Top
first = new Path();
first.moveTo(5.042f, 20f);
first.rCubicTo(8.125f, -16.317f, 39.753f, -27.851f, 55.49f, -2.765f);
second = new Path();
second.moveTo(60.531f, 17.235f);
second.rCubicTo(11.301f, 18.015f, -3.699f, 46.083f, -23.725f, 43.456f);
scalePath(first, density, paddingLeft);
scalePath(second, density, paddingLeft);
joinedA = new JoinedPath(first, second);
first = new Path();
first.moveTo(64.959f, 20f);
first.rCubicTo(4.457f, 16.75f, 1.512f, 37.982f, -22.557f, 42.699f);
second = new Path();
second.moveTo(42.402f, 62.699f);
second.cubicTo(18.333f, 67.418f, 8.807f, 45.646f, 8.807f, 32.823f);
scalePath(first, density, paddingLeft);
scalePath(second, density, paddingLeft);
joinedB = new JoinedPath(first, second);
topLine = new BridgingLine(joinedA, joinedB);
// Middle
first = new Path();
first.moveTo(5.042f, 35f);
first.cubicTo(5.042f, 20.333f, 18.625f, 6.791f, 35f, 6.791f);
second = new Path();
second.moveTo(35f, 6.791f);
second.rCubicTo(16.083f, 0f, 26.853f, 16.702f, 26.853f, 28.209f);
scalePath(first, density, paddingLeft);
scalePath(second, density, paddingLeft);
joinedA = new JoinedPath(first, second);
first = new Path();
first.moveTo(64.959f, 35f);
first.rCubicTo(0f, 10.926f, -8.709f, 26.416f, -29.958f, 26.416f);
second = new Path();
second.moveTo(35f, 61.416f);
second.rCubicTo(-7.5f, 0f, -23.946f, -8.211f, -23.946f, -26.416f);
scalePath(first, density, paddingLeft);
scalePath(second, density, paddingLeft);
joinedB = new JoinedPath(first, second);
middleLine = new BridgingLine(joinedA, joinedB);
// Bottom
first = new Path();
first.moveTo(5.042f, 50f);
first.cubicTo(2.5f, 43.312f, 0.013f, 26.546f, 9.475f, 17.346f);
second = new Path();
second.moveTo(9.475f, 17.346f);
second.rCubicTo(9.462f, -9.2f, 24.188f, -10.353f, 27.326f, -8.245f);
scalePath(first, density, paddingLeft);
scalePath(second, density, paddingLeft);
joinedA = new JoinedPath(first, second);
first = new Path();
first.moveTo(64.959f, 50f);
first.rCubicTo(-7.021f, 10.08f, -20.584f, 19.699f, -37.361f, 12.74f);
second = new Path();
second.moveTo(27.598f, 62.699f);
second.rCubicTo(-15.723f, -6.521f, -18.8f, -23.543f, -18.8f, -25.642f);
scalePath(first, density, paddingLeft);
scalePath(second, density, paddingLeft);
joinedB = new JoinedPath(first, second);
bottomLine = new BridgingLine(joinedA, joinedB);
}
@Override public int getIntrinsicHeight() {
return bounds.height();
}
@Override public int getIntrinsicWidth() {
return bounds.width();
}
@Override public void draw(Canvas canvas) {
if (flip) {
canvas.save();
canvas.scale(1f, -1f, getIntrinsicWidth() / 2, getIntrinsicHeight() / 2);
}
topLine.draw(canvas);
middleLine.draw(canvas);
bottomLine.draw(canvas);
if (flip) canvas.restore();
}
@Override public void setAlpha(int alpha) {
linePaint.setAlpha(alpha);
invalidateSelf();
}
@Override public void setColorFilter(ColorFilter cf) {
linePaint.setColorFilter(cf);
invalidateSelf();
}
@Override public int getOpacity() {
return TRANSLUCENT;
}
public void setStrokeColor(int color) {
linePaint.setColor(color);
invalidateSelf();
}
/**
* Sets the rotation of this drawable based on {@code parameter} between 0 and 1. Usually driven
* via {@link DrawerListener#onDrawerSlide(View, float)}'s {@code slideOffset} parameter.
*/
public void setParameter(float parameter) {
if (parameter > 1 || parameter < 0) {
throw new IllegalArgumentException("Value must be between 1 and zero inclusive!");
}
this.parameter = parameter;
invalidateSelf();
}
/**
* When false, rotates from 3 o'clock to 9 o'clock between a drawer icon and a back arrow.
* When true, rotates from 9 o'clock to 3 o'clock between a back arrow and a drawer icon.
*/
public void setFlip(boolean flip) {
this.flip = flip;
invalidateSelf();
}
/**
* Scales the paths to the given screen density. If the density matches the
* {@link DrawerArrowDrawable#PATH_GEN_DENSITY}, no scaling needs to be done.
*/
private static void scalePath(Path path, float density, int paddingLeft) {
path.offset(paddingLeft, 0);
if (density == PATH_GEN_DENSITY) return;
Matrix scaleMatrix = new Matrix();
scaleMatrix.setScale(density / PATH_GEN_DENSITY, density / PATH_GEN_DENSITY, 0, 0);
path.transform(scaleMatrix);
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.execution.testframework.sm.runner.ui;
import com.intellij.execution.runners.ExecutionEnvironment;
import com.intellij.execution.testframework.TestConsoleProperties;
import com.intellij.execution.testframework.sm.Marker;
import com.intellij.execution.testframework.sm.runner.BaseSMTRunnerTestCase;
import com.intellij.execution.testframework.sm.runner.GeneralToSMTRunnerEventsConvertor;
import com.intellij.execution.testframework.sm.runner.SMTestProxy;
import com.intellij.execution.testframework.sm.runner.events.*;
import com.intellij.openapi.progress.util.ColorProgressBar;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.Ref;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.TreeModel;
import javax.swing.tree.TreePath;
import java.awt.*;
/**
* @author Roman Chernyatchik
*/
public class SMTestRunnerResultsFormTest extends BaseSMTRunnerTestCase {
private SMTRunnerConsoleView myConsole;
private GeneralToSMTRunnerEventsConvertor myEventsProcessor;
private TreeModel myTreeModel;
private SMTestRunnerResultsForm myResultsViewer;
private TestConsoleProperties myConsoleProperties;
private SMTestProxy.SMRootTestProxy myTestsRootNode;
@Override
protected void setUp() throws Exception {
super.setUp();
myConsoleProperties = createConsoleProperties();
TestConsoleProperties.HIDE_PASSED_TESTS.set(myConsoleProperties, false);
TestConsoleProperties.OPEN_FAILURE_LINE.set(myConsoleProperties, false);
TestConsoleProperties.SCROLL_TO_SOURCE.set(myConsoleProperties, false);
TestConsoleProperties.SELECT_FIRST_DEFECT.set(myConsoleProperties, false);
TestConsoleProperties.TRACK_RUNNING_TEST.set(myConsoleProperties, false);
final ExecutionEnvironment environment = new ExecutionEnvironment();
myConsole = new SMTRunnerConsoleView(myConsoleProperties, environment);
myConsole.initUI();
myResultsViewer = myConsole.getResultsViewer();
myTestsRootNode = myResultsViewer.getTestsRootNode();
myEventsProcessor = new GeneralToSMTRunnerEventsConvertor(myResultsViewer.getTestsRootNode(), "SMTestFramework");
myEventsProcessor.addEventsListener(myResultsViewer);
myTreeModel = myResultsViewer.getTreeView().getModel();
}
@Override
protected void tearDown() throws Exception {
Disposer.dispose(myEventsProcessor);
Disposer.dispose(myConsole);
super.tearDown();
}
public void testGetTestsRootNode() {
assertNotNull(myTestsRootNode);
myResultsViewer.onTestingFinished(myTestsRootNode);
assertNotNull(myResultsViewer.getTestsRootNode());
}
public void testTestingStarted() {
myResultsViewer.onTestingStarted(myTestsRootNode);
assertTrue(myResultsViewer.getStartTime() > 0);
assertEquals(0, myResultsViewer.getFinishedTestCount());
assertEquals(0, myResultsViewer.getTotalTestCount());
}
public void testOnTestStarted() {
myResultsViewer.onTestStarted(createTestProxy("some_test", myTestsRootNode));
assertEquals(1, myResultsViewer.getStartedTestCount());
myResultsViewer.onTestStarted(createTestProxy("some_test2", myTestsRootNode));
assertEquals(2, myResultsViewer.getStartedTestCount());
}
public void testCount() {
myResultsViewer.onTestsCountInSuite(1);
assertEquals(1, myResultsViewer.getTotalTestCount());
myResultsViewer.onTestStarted(createTestProxy("some_test", myTestsRootNode));
assertEquals(1, myResultsViewer.getTotalTestCount());
// if exceeds - will be incremented
myResultsViewer.onTestStarted(createTestProxy("some_test2", myTestsRootNode));
assertEquals(2, myResultsViewer.getTotalTestCount());
}
public void testCount_UnSet() {
myResultsViewer.onTestStarted(createTestProxy("some_test", myTestsRootNode));
assertEquals(0, myResultsViewer.getTotalTestCount());
myResultsViewer.onTestStarted(createTestProxy("some_test2", myTestsRootNode));
assertEquals(0, myResultsViewer.getTotalTestCount());
// count will be updated only on tests finished if wasn't set
myResultsViewer.onTestingFinished(myTestsRootNode);
assertEquals(2, myResultsViewer.getTotalTestCount());
}
public void testOnTestFailure() {
final SMTestProxy test = createTestProxy(myTestsRootNode);
myResultsViewer.onTestStarted(test);
myResultsViewer.onTestFailed(test);
assertEquals(1, myResultsViewer.getFailedTestCount());
assertEquals(1, myResultsViewer.getFailedTestCount());
}
public void testOnTestFinished() {
final SMTestProxy test = createTestProxy("some_test", myTestsRootNode);
myResultsViewer.onTestStarted(test);
assertEquals(1, myResultsViewer.getStartedTestCount());
myResultsViewer.onTestFinished(test);
assertEquals(1, myResultsViewer.getFinishedTestCount());
}
public void testOnTestsCountInSuite() {
myResultsViewer.onTestsCountInSuite(200);
assertEquals(0, myResultsViewer.getFinishedTestCount());
assertEquals(200, myResultsViewer.getTotalTestCount());
myResultsViewer.onTestsCountInSuite(50);
assertEquals(250, myResultsViewer.getTotalTestCount());
}
public void testOnTestStart_ChangeTotal() {
myResultsViewer.onTestsCountInSuite(2);
myResultsViewer.onTestStarted(createTestProxy("some_test1", myTestsRootNode));
assertEquals(2, myResultsViewer.getTotalTestCount());
myResultsViewer.onTestStarted(createTestProxy("some_test2", myTestsRootNode));
assertEquals(2, myResultsViewer.getTotalTestCount());
myResultsViewer.onTestStarted(createTestProxy("some_test3", myTestsRootNode));
assertEquals(3, myResultsViewer.getTotalTestCount());
myResultsViewer.onTestStarted(createTestProxy("some_test4", myTestsRootNode));
assertEquals(4, myResultsViewer.getTotalTestCount());
myResultsViewer.onTestsCountInSuite(2);
myResultsViewer.onTestStarted(createTestProxy("another_test1", myTestsRootNode));
assertEquals(6, myResultsViewer.getTotalTestCount());
myResultsViewer.onTestStarted(createTestProxy("another_test2", myTestsRootNode));
assertEquals(6, myResultsViewer.getTotalTestCount());
myResultsViewer.onTestStarted(createTestProxy("another_test3", myTestsRootNode));
assertEquals(7, myResultsViewer.getTotalTestCount());
}
public void testOnFinishTesting_EndTime() {
myResultsViewer.onTestingFinished(myTestsRootNode);
assertTrue(myResultsViewer.getEndTime() > 0);
}
public void testOnSuiteStarted() {
assertEquals(0, myResultsViewer.getFinishedTestCount());
myResultsViewer.onSuiteStarted(createSuiteProxy(myTestsRootNode));
assertEquals(0, myResultsViewer.getFinishedTestCount());
}
public void testChangeSelectionAction() {
final Marker onSelectedHappend = new Marker();
final Ref<SMTestProxy> proxyRef = new Ref<SMTestProxy>();
final Ref<Boolean> focusRequestedRef = new Ref<Boolean>();
myResultsViewer.setShowStatisticForProxyHandler(new PropagateSelectionHandler() {
@Override
public void handlePropagateSelectionRequest(@Nullable final SMTestProxy selectedTestProxy, @NotNull final Object sender,
final boolean requestFocus) {
onSelectedHappend.set();
proxyRef.set(selectedTestProxy);
focusRequestedRef.set(requestFocus);
}
});
final SMTestProxy suite = createSuiteProxy("suite", myTestsRootNode);
final SMTestProxy test = createTestProxy("test", myTestsRootNode);
myResultsViewer.onSuiteStarted(suite);
myResultsViewer.onTestStarted(test);
//On test
myResultsViewer.selectAndNotify(test);
myResultsViewer.showStatisticsForSelectedProxy();
assertTrue(onSelectedHappend.isSet());
assertEquals(test, proxyRef.get());
assertTrue(focusRequestedRef.get());
//on suite
//reset markers
onSelectedHappend.reset();
proxyRef.set(null);
focusRequestedRef.set(null);
myResultsViewer.selectAndNotify(suite);
myResultsViewer.showStatisticsForSelectedProxy();
assertTrue(onSelectedHappend.isSet());
assertEquals(suite, proxyRef.get());
assertTrue(focusRequestedRef.get());
}
public void testRuby_1767() throws InterruptedException {
TestConsoleProperties.HIDE_PASSED_TESTS.set(myConsoleProperties, true);
myEventsProcessor.onStartTesting();
myEventsProcessor.onSuiteStarted(new TestSuiteStartedEvent("suite", null));
myResultsViewer.performUpdate();
myEventsProcessor.onTestStarted(new TestStartedEvent("test_failed", null));
myResultsViewer.performUpdate();
myEventsProcessor.onTestFailure(new TestFailedEvent("test_failed", "", "", false, null, null));
myResultsViewer.performUpdate();
myEventsProcessor.onTestFinished(new TestFinishedEvent("test_failed", 10l));
myResultsViewer.performUpdate();
myEventsProcessor.onTestStarted(new TestStartedEvent("test", null));
myResultsViewer.performUpdate();
assertEquals(2, myTreeModel.getChildCount(myTreeModel.getChild(myTreeModel.getRoot(), 0)));
myEventsProcessor.onTestFinished(new TestFinishedEvent("test", 10l));
assertEquals(2, myTreeModel.getChildCount(myTreeModel.getChild(myTreeModel.getRoot(), 0)));
myEventsProcessor.onSuiteFinished(new TestSuiteFinishedEvent("suite"));
myEventsProcessor.onFinishTesting();
assertEquals(1, myTreeModel.getChildCount(myTreeModel.getChild(myTreeModel.getRoot(), 0)));
}
public void testExpandIfOnlyOneRootChild() throws InterruptedException {
myEventsProcessor.onStartTesting();
myEventsProcessor.onSuiteStarted(new TestSuiteStartedEvent("suite1", null));
myResultsViewer.performUpdate();
myEventsProcessor.onSuiteStarted(new TestSuiteStartedEvent("suite2", null));
myResultsViewer.performUpdate();
myEventsProcessor.onTestStarted(new TestStartedEvent("test_failed", null));
myResultsViewer.performUpdate();
myEventsProcessor.onTestFailure(new TestFailedEvent("test_failed", "", "", false, null, null));
myResultsViewer.performUpdate();
myEventsProcessor.onTestFinished(new TestFinishedEvent("test_failed", 10l));
myResultsViewer.performUpdate();
myEventsProcessor.onTestStarted(new TestStartedEvent("test", null));
myResultsViewer.performUpdate();
myEventsProcessor.onTestFinished(new TestFinishedEvent("test", 10l));
myResultsViewer.performUpdate();
myEventsProcessor.onSuiteFinished(new TestSuiteFinishedEvent("suite2"));
myResultsViewer.performUpdate();
myEventsProcessor.onSuiteFinished(new TestSuiteFinishedEvent("suite1"));
myResultsViewer.performUpdate();
myEventsProcessor.onFinishTesting();
myResultsViewer.performUpdate();
final DefaultMutableTreeNode suite1Node =
(DefaultMutableTreeNode)myTreeModel.getChild(myTreeModel.getRoot(), 0);
final DefaultMutableTreeNode suite2Node =
(DefaultMutableTreeNode)myTreeModel.getChild(suite1Node, 0);
assertTrue(myResultsViewer.getTreeView().isExpanded(new TreePath(suite1Node.getPath())));
assertFalse(myResultsViewer.getTreeView().isExpanded(new TreePath(suite2Node.getPath())));
}
//with test tree build before start actual tests
public void testPrependTreeAndSameTestsStartFinish() throws Exception {
//send tree
myEventsProcessor.onSuiteTreeStarted("suite1", null);
myEventsProcessor.onSuiteTreeNodeAdded("test1", null);
myEventsProcessor.onSuiteTreeEnded("suite1");
//start testing
myEventsProcessor.onStartTesting();
//invocation count for method set to 2
for(int i = 0; i < 2; i++) {
myEventsProcessor.onSuiteStarted(new TestSuiteStartedEvent("suite1", null));
myEventsProcessor.onTestStarted(new TestStartedEvent("test1", null));
myResultsViewer.performUpdate();
myEventsProcessor.onTestFailure(new TestFailedEvent("test1", "", "", false, "a", "b"));
myResultsViewer.performUpdate();
myEventsProcessor.onTestFinished(new TestFinishedEvent("test1", 10l));
myResultsViewer.performUpdate();
myEventsProcessor.onSuiteFinished(new TestSuiteFinishedEvent("suite1"));
myResultsViewer.performUpdate();
}
myEventsProcessor.onFinishTesting();
myResultsViewer.performUpdate();
//ensure 2 nodes found
assertEquals(2, myResultsViewer.getFailedTestCount());
}
public void testCustomProgress_General() {
myResultsViewer.onCustomProgressTestsCategory("foo", 4);
myResultsViewer.onTestStarted(createTestProxy("some_test1", myTestsRootNode));
assertEquals(0, myResultsViewer.getFinishedTestCount());
myResultsViewer.onCustomProgressTestStarted();
assertEquals(1, myResultsViewer.getStartedTestCount());
myResultsViewer.onTestStarted(createTestProxy("some_test2", myTestsRootNode));
assertEquals(1, myResultsViewer.getStartedTestCount());
myResultsViewer.onCustomProgressTestStarted();
assertEquals(2, myResultsViewer.getStartedTestCount());
}
public void testCustomProgress_MixedMde() {
// enable custom mode
myResultsViewer.onCustomProgressTestsCategory("foo", 4);
myResultsViewer.onTestStarted(createTestProxy("some_test1", myTestsRootNode));
assertEquals(0, myResultsViewer.getFinishedTestCount());
myResultsViewer.onCustomProgressTestStarted();
assertEquals(1, myResultsViewer.getStartedTestCount());
myResultsViewer.onTestStarted(createTestProxy("some_test2", myTestsRootNode));
assertEquals(1, myResultsViewer.getStartedTestCount());
myResultsViewer.onCustomProgressTestStarted();
assertEquals(2, myResultsViewer.getStartedTestCount());
// disable custom mode
myResultsViewer.onCustomProgressTestsCategory(null, 0);
assertEquals(2, myResultsViewer.getStartedTestCount());
myResultsViewer.onCustomProgressTestStarted();
assertEquals(2, myResultsViewer.getStartedTestCount());
myResultsViewer.onTestStarted(createTestProxy("some_test1", myTestsRootNode));
assertEquals(3, myResultsViewer.getStartedTestCount());
assertEquals(3, myResultsViewer.getStartedTestCount());
myResultsViewer.onCustomProgressTestStarted();
assertEquals(3, myResultsViewer.getStartedTestCount());
myResultsViewer.onTestStarted(createTestProxy("some_test1", myTestsRootNode));
assertEquals(4, myResultsViewer.getStartedTestCount());
}
public void testCustomProgress_EmptySuite() {
myResultsViewer.onCustomProgressTestsCategory("foo", 0);
final SMTestProxy suite = createSuiteProxy("some_suite", myTestsRootNode);
myTestsRootNode.setStarted();
myResultsViewer.onSuiteStarted(suite);
suite.setStarted();
suite.setFinished();
myResultsViewer.onSuiteFinished(suite);
myTestsRootNode.setFinished();
myResultsViewer.onSuiteFinished(myTestsRootNode);
myResultsViewer.onTestingFinished(myTestsRootNode);
assertEquals(0, myResultsViewer.getTotalTestCount());
assertEquals(Color.LIGHT_GRAY, myResultsViewer.getTestsStatusColor());
}
public void testCustomProgress_Failure() {
myResultsViewer.onCustomProgressTestsCategory("foo", 4);
final SMTestProxy test1 = createTestProxy("some_test1", myTestsRootNode);
myResultsViewer.onTestStarted(test1);
myResultsViewer.onCustomProgressTestStarted();
myResultsViewer.onTestFailed(test1);
assertEquals(0, myResultsViewer.getFailedTestCount());
myResultsViewer.onCustomProgressTestFailed();
assertEquals(1, myResultsViewer.getFailedTestCount());
assertEquals(ColorProgressBar.RED, myResultsViewer.getTestsStatusColor());
}
public void testProgressBar_Ignored() {
final SMTestProxy test1 = createTestProxy("some_test1", myTestsRootNode);
myResultsViewer.onTestStarted(test1);
myResultsViewer.performUpdate();
myResultsViewer.onTestIgnored(test1);
myResultsViewer.performUpdate();
assertEquals(0, myResultsViewer.getFailedTestCount());
assertEquals(1, myResultsViewer.getIgnoredTestCount());
assertEquals(ColorProgressBar.GREEN, myResultsViewer.getTestsStatusColor());
}
public void testCustomProgress_Terminated() {
myResultsViewer.onTestingStarted(myTestsRootNode);
final SMTestProxy test1 = createTestProxy("some_test1", myTestsRootNode);
myResultsViewer.onTestStarted(test1);
myResultsViewer.onTestingFinished(myTestsRootNode);
assertEquals(ColorProgressBar.GREEN, myResultsViewer.getTestsStatusColor());
}
public void testCustomProgress_NotRun() {
myResultsViewer.onTestingStarted(myTestsRootNode);
myResultsViewer.onTestingFinished(myTestsRootNode);
assertEquals(Color.LIGHT_GRAY, myResultsViewer.getTestsStatusColor());
}
public void testCustomProgress_NotRun_ReporterAttached() {
myResultsViewer.onTestingStarted(myTestsRootNode);
myTestsRootNode.setTestsReporterAttached();
myResultsViewer.onTestingFinished(myTestsRootNode);
// e.g. reporter attached but tests were actually launched
// seems cannot happen in current implementation but is expected behaviour
// for future
assertEquals(ColorProgressBar.RED, myResultsViewer.getTestsStatusColor());
}
public void testCustomProgress_Terminated_SmthFailed() {
myResultsViewer.onTestingStarted(myTestsRootNode);
final SMTestProxy test1 = createTestProxy("some_test1", myTestsRootNode);
myResultsViewer.onTestStarted(test1);
myResultsViewer.onTestFailed(test1);
myResultsViewer.onTestStarted(createTestProxy("some_test2", myTestsRootNode));
myResultsViewer.onTestingFinished(myTestsRootNode);
assertEquals(ColorProgressBar.RED, myResultsViewer.getTestsStatusColor());
}
public void testCustomProgress_UnSetCount() {
myResultsViewer.onCustomProgressTestsCategory("foo", 0);
assertEquals(0, myResultsViewer.getTotalTestCount());
myResultsViewer.onCustomProgressTestStarted();
assertEquals(0, myResultsViewer.getTotalTestCount());
myResultsViewer.onCustomProgressTestStarted();
assertEquals(0, myResultsViewer.getTotalTestCount());
// count will be updated only on tests finished if wasn't set
myResultsViewer.onTestingFinished(myTestsRootNode);
assertEquals(2, myResultsViewer.getTotalTestCount());
}
public void testCustomProgress_IncreaseCount() {
myResultsViewer.onCustomProgressTestsCategory("foo", 1);
assertEquals(1, myResultsViewer.getTotalTestCount());
myResultsViewer.onCustomProgressTestStarted();
assertEquals(1, myResultsViewer.getTotalTestCount());
myResultsViewer.onCustomProgressTestStarted();
assertEquals(2, myResultsViewer.getTotalTestCount());
}
public void testCustomProgress_IncreaseCount_MixedMode() {
// custom mode
myResultsViewer.onCustomProgressTestsCategory("foo", 1);
assertEquals(1, myResultsViewer.getTotalTestCount());
myResultsViewer.onCustomProgressTestStarted();
assertEquals(1, myResultsViewer.getTotalTestCount());
myResultsViewer.onCustomProgressTestStarted();
assertEquals(2, myResultsViewer.getTotalTestCount());
// disable custom mode
myResultsViewer.onCustomProgressTestsCategory(null, 0);
assertEquals(2, myResultsViewer.getTotalTestCount());
myResultsViewer.onTestsCountInSuite(1);
assertEquals(3, myResultsViewer.getTotalTestCount());
myResultsViewer.onTestStarted(createTestProxy("some_test1", myTestsRootNode));
assertEquals(3, myResultsViewer.getTotalTestCount());
myResultsViewer.onTestStarted(createTestProxy("some_test2", myTestsRootNode));
assertEquals(4, myResultsViewer.getTotalTestCount());
}
//TODO categories - mized
public void testCustomProgress_MentionedCategories_CategoryWithoutName() {
// enable custom mode
assertTrue(myResultsViewer.getMentionedCategories().isEmpty());
myResultsViewer.onCustomProgressTestsCategory("foo", 4);
assertTrue(myResultsViewer.getMentionedCategories().isEmpty());
}
public void testCustomProgress_MentionedCategories_DefaultCategory() {
// enable custom mode
assertTrue(myResultsViewer.getMentionedCategories().isEmpty());
myResultsViewer.onCustomProgressTestStarted();
assertTrue(myResultsViewer.getMentionedCategories().isEmpty());
}
public void testCustomProgress_MentionedCategories_OneCustomCategory() {
// enable custom mode
myResultsViewer.onCustomProgressTestsCategory("Foo", 4);
assertTrue(myResultsViewer.getMentionedCategories().isEmpty());
myResultsViewer.onTestStarted(createTestProxy("some_test1", myTestsRootNode));
assertTrue(myResultsViewer.getMentionedCategories().isEmpty());
myResultsViewer.onCustomProgressTestStarted();
assertSameElements(myResultsViewer.getMentionedCategories(), "Foo");
// disable custom mode
myResultsViewer.onCustomProgressTestsCategory(null, 0);
assertSameElements(myResultsViewer.getMentionedCategories(), "Foo");
}
public void testCustomProgress_MentionedCategories_SeveralCategories() {
// enable custom mode
myResultsViewer.onCustomProgressTestsCategory("Foo", 4);
assertTrue(myResultsViewer.getMentionedCategories().isEmpty());
myResultsViewer.onTestStarted(createTestProxy("some_test1", myTestsRootNode));
assertTrue(myResultsViewer.getMentionedCategories().isEmpty());
myResultsViewer.onCustomProgressTestStarted();
assertSameElements(myResultsViewer.getMentionedCategories(), "Foo");
// disable custom mode
myResultsViewer.onCustomProgressTestsCategory(null, 0);
myResultsViewer.onCustomProgressTestStarted();
assertSameElements(myResultsViewer.getMentionedCategories(), "Foo");
myResultsViewer.onTestStarted(createTestProxy("some_test2", myTestsRootNode));
assertSameElements(myResultsViewer.getMentionedCategories(), "Foo", TestsPresentationUtil.DEFAULT_TESTS_CATEGORY);
}
public void testCustomProgress_MentionedCategories() {
// enable custom mode
assertTrue(myResultsViewer.getMentionedCategories().isEmpty());
myResultsViewer.onCustomProgressTestsCategory("foo", 4);
myResultsViewer.onTestStarted(createTestProxy("some_test1", myTestsRootNode));
assertEquals(0, myResultsViewer.getStartedTestCount());
myResultsViewer.onCustomProgressTestStarted();
assertEquals(1, myResultsViewer.getStartedTestCount());
myResultsViewer.onTestStarted(createTestProxy("some_test2", myTestsRootNode));
assertEquals(1, myResultsViewer.getStartedTestCount());
myResultsViewer.onCustomProgressTestStarted();
assertEquals(2, myResultsViewer.getStartedTestCount());
// disable custom mode
myResultsViewer.onCustomProgressTestsCategory(null, 0);
assertEquals(2, myResultsViewer.getStartedTestCount());
myResultsViewer.onCustomProgressTestStarted();
assertEquals(2, myResultsViewer.getStartedTestCount());
myResultsViewer.onTestStarted(createTestProxy("some_test1", myTestsRootNode));
assertEquals(3, myResultsViewer.getStartedTestCount());
assertEquals(3, myResultsViewer.getStartedTestCount());
myResultsViewer.onCustomProgressTestStarted();
assertEquals(3, myResultsViewer.getStartedTestCount());
myResultsViewer.onTestStarted(createTestProxy("some_test1", myTestsRootNode));
assertEquals(4, myResultsViewer.getStartedTestCount());
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.hadoop.ozone.container.common.statemachine;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.ipc.ProtobufRpcEngine;
import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.metrics2.util.MBeans;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.ozone.protocolPB
.StorageContainerDatanodeProtocolClientSideTranslatorPB;
import org.apache.hadoop.ozone.protocolPB.StorageContainerDatanodeProtocolPB;
import org.apache.hadoop.security.UserGroupInformation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.management.ObjectName;
import java.io.Closeable;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import static org.apache.hadoop.hdds.scm.HddsServerUtil
.getScmRpcTimeOutInMilliseconds;
/**
* SCMConnectionManager - Acts as a class that manages the membership
* information of the SCMs that we are working with.
*/
public class SCMConnectionManager
implements Closeable, SCMConnectionManagerMXBean {
private static final Logger LOG =
LoggerFactory.getLogger(SCMConnectionManager.class);
private final ReadWriteLock mapLock;
private final Map<InetSocketAddress, EndpointStateMachine> scmMachines;
private final int rpcTimeout;
private final Configuration conf;
private final ObjectName jmxBean;
public SCMConnectionManager(Configuration conf) {
this.mapLock = new ReentrantReadWriteLock();
Long timeOut = getScmRpcTimeOutInMilliseconds(conf);
this.rpcTimeout = timeOut.intValue();
this.scmMachines = new HashMap<>();
this.conf = conf;
jmxBean = MBeans.register("HddsDatanode",
"SCMConnectionManager",
this);
}
/**
* Returns Config.
*
* @return ozoneConfig.
*/
public Configuration getConf() {
return conf;
}
/**
* Get RpcTimeout.
*
* @return - Return RPC timeout.
*/
public int getRpcTimeout() {
return rpcTimeout;
}
/**
* Takes a read lock.
*/
public void readLock() {
this.mapLock.readLock().lock();
}
/**
* Releases the read lock.
*/
public void readUnlock() {
this.mapLock.readLock().unlock();
}
/**
* Takes the write lock.
*/
public void writeLock() {
this.mapLock.writeLock().lock();
}
/**
* Releases the write lock.
*/
public void writeUnlock() {
this.mapLock.writeLock().unlock();
}
/**
* adds a new SCM machine to the target set.
*
* @param address - Address of the SCM machine to send heatbeat to.
* @throws IOException
*/
public void addSCMServer(InetSocketAddress address) throws IOException {
writeLock();
try {
if (scmMachines.containsKey(address)) {
LOG.warn("Trying to add an existing SCM Machine to Machines group. " +
"Ignoring the request.");
return;
}
RPC.setProtocolEngine(conf, StorageContainerDatanodeProtocolPB.class,
ProtobufRpcEngine.class);
long version =
RPC.getProtocolVersion(StorageContainerDatanodeProtocolPB.class);
StorageContainerDatanodeProtocolPB rpcProxy = RPC.getProxy(
StorageContainerDatanodeProtocolPB.class, version,
address, UserGroupInformation.getCurrentUser(), conf,
NetUtils.getDefaultSocketFactory(conf), getRpcTimeout());
StorageContainerDatanodeProtocolClientSideTranslatorPB rpcClient =
new StorageContainerDatanodeProtocolClientSideTranslatorPB(rpcProxy);
EndpointStateMachine endPoint =
new EndpointStateMachine(address, rpcClient, conf);
scmMachines.put(address, endPoint);
} finally {
writeUnlock();
}
}
/**
* Removes a SCM machine for the target set.
*
* @param address - Address of the SCM machine to send heatbeat to.
* @throws IOException
*/
public void removeSCMServer(InetSocketAddress address) throws IOException {
writeLock();
try {
if (!scmMachines.containsKey(address)) {
LOG.warn("Trying to remove a non-existent SCM machine. " +
"Ignoring the request.");
return;
}
EndpointStateMachine endPoint = scmMachines.get(address);
endPoint.close();
scmMachines.remove(address);
} finally {
writeUnlock();
}
}
/**
* Returns all known RPCEndpoints.
*
* @return - List of RPC Endpoints.
*/
public Collection<EndpointStateMachine> getValues() {
return scmMachines.values();
}
@Override
public void close() throws IOException {
getValues().forEach(endpointStateMachine
-> IOUtils.cleanupWithLogger(LOG, endpointStateMachine));
MBeans.unregister(jmxBean);
}
@Override
public List<EndpointStateMachineMBean> getSCMServers() {
readLock();
try {
return Collections
.unmodifiableList(new ArrayList<>(scmMachines.values()));
} finally {
readUnlock();
}
}
}
| |
/*
* Copyright 2012-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.jvm.java.classes;
import com.facebook.buck.io.MorePaths;
import com.facebook.buck.io.ProjectFilesystem;
import com.facebook.buck.util.ZipFileTraversal;
import com.google.common.collect.ImmutableSet;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.FileVisitOption;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.Collection;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
import javax.annotation.Nullable;
/**
* Traversal strategy for traversing a set of paths that themselves are traversed. The provided
* paths can point to zip/jar files, directories of resource/class files, or individual files
* themselves.
* <p>
* For example, given the input paths of { foo.zip, foo/, and foo.txt }, traverse would first
* expand foo.zip and traverse its contents, then list the files recursively in foo/, and finally
* visit the single file foo.txt.
*/
public abstract class ClasspathTraversal {
private final Iterable<Path> paths;
private final ProjectFilesystem filesystem;
public ClasspathTraversal(Collection<Path> paths, ProjectFilesystem filesystem) {
this.paths = paths;
this.filesystem = filesystem;
}
public abstract void visit(FileLike fileLike) throws IOException;
/**
* Subclasses can override this method to return a value of any type. This often represents some
* sort of cumulative value that is computed as a result of the traversal.
*/
// TODO(bolinfest): Change this from Object to a generic <T>.
@Nullable
public Object getResult() {
return null;
}
public final void traverse() throws IOException {
for (Path path : paths) {
ClasspathTraverser adapter = createTraversalAdapter(filesystem.getPathForRelativePath(path));
adapter.traverse(this);
}
}
private ClasspathTraverser createTraversalAdapter(Path path) {
String extension = MorePaths.getFileExtension(path);
if (filesystem.isDirectory(path)) {
return new DirectoryTraversalAdapter(filesystem, path);
} else if (filesystem.isFile(path)) {
if (extension.equalsIgnoreCase("jar") || extension.equalsIgnoreCase("zip")) {
return new ZipFileTraversalAdapter(path);
} else {
return new FileTraversalAdapter(path);
}
} else {
throw new IllegalArgumentException("Unsupported classpath traversal input: " + path);
}
}
private static class ZipFileTraversalAdapter implements ClasspathTraverser {
private final Path file;
public ZipFileTraversalAdapter(Path file) {
this.file = file;
}
@Override
public void traverse(final ClasspathTraversal traversal) throws IOException {
ZipFileTraversal impl = new ZipFileTraversal(file) {
@Override
public void visit(ZipFile zipFile, ZipEntry zipEntry) throws IOException {
traversal.visit(new FileLikeInZip(file, zipFile, zipEntry));
}
};
impl.traverse();
}
private static class FileLikeInZip extends AbstractFileLike {
private final Path container;
private final ZipFile zipFile;
private final ZipEntry entry;
public FileLikeInZip(Path container, ZipFile zipFile, ZipEntry entry) {
this.container = container;
this.zipFile = zipFile;
this.entry = entry;
}
@Override
public Path getContainer() {
return container;
}
@Override
public String getRelativePath() {
return entry.getName();
}
@Override
public long getSize() {
return entry.getSize();
}
@Override
public InputStream getInput() throws IOException {
return zipFile.getInputStream(entry);
}
}
}
private static class DirectoryTraversalAdapter implements ClasspathTraverser {
private final ProjectFilesystem filesystem;
private final Path directory;
public DirectoryTraversalAdapter(ProjectFilesystem filesystem, Path directory) {
this.filesystem = filesystem;
this.directory = directory;
}
@Override
public void traverse(final ClasspathTraversal traversal) throws IOException {
filesystem.walkFileTree(
directory,
ImmutableSet.of(FileVisitOption.FOLLOW_LINKS),
Integer.MAX_VALUE,
new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs)
throws IOException {
String relativePath =
MorePaths.pathWithUnixSeparators(MorePaths.relativize(directory, file));
traversal.visit(new FileLikeInDirectory(file, relativePath));
return FileVisitResult.CONTINUE;
}
});
}
}
private static class FileTraversalAdapter implements ClasspathTraverser {
private final Path file;
public FileTraversalAdapter(Path file) {
this.file = file;
}
@Override
public void traverse(ClasspathTraversal traversal) throws IOException {
traversal.visit(new FileLikeInDirectory(file, file.getFileName().toString()));
}
}
private static class FileLikeInDirectory extends AbstractFileLike {
private final Path file;
private final String relativePath;
public FileLikeInDirectory(Path file, String relativePath) {
// Currently, the only instances of FileLikeInDirectory appear to be the .class files
// generated from an R.java in Android. The only exception is in unit tests.
this.file = file;
this.relativePath = relativePath;
}
@Override
public Path getContainer() {
return file;
}
@Override
public String getRelativePath() {
return relativePath;
}
@Override
public long getSize() throws IOException {
return Files.size(file);
}
@Override
public InputStream getInput() throws IOException {
return Files.newInputStream(file);
}
}
}
| |
/*
* #%L
* Wisdom-Framework
* %%
* Copyright (C) 2013 - 2014 Wisdom Framework
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package org.wisdom.engine.wrapper;
import com.google.common.base.Charsets;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.primitives.Bytes;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.http.HttpContent;
import io.netty.handler.codec.http.HttpHeaders;
import io.netty.handler.codec.http.HttpRequest;
import io.netty.handler.codec.http.QueryStringDecoder;
import io.netty.handler.codec.http.multipart.Attribute;
import io.netty.handler.codec.http.multipart.FileUpload;
import io.netty.handler.codec.http.multipart.HttpPostRequestDecoder;
import io.netty.handler.codec.http.multipart.InterfaceHttpData;
import org.apache.commons.io.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.wisdom.api.content.BodyParser;
import org.wisdom.api.cookies.Cookie;
import org.wisdom.api.cookies.Cookies;
import org.wisdom.api.cookies.FlashCookie;
import org.wisdom.api.cookies.SessionCookie;
import org.wisdom.api.http.Context;
import org.wisdom.api.http.FileItem;
import org.wisdom.api.http.MimeTypes;
import org.wisdom.api.http.Request;
import org.wisdom.api.router.Route;
import org.wisdom.engine.server.ServiceAccessor;
import org.wisdom.engine.wrapper.cookies.CookieHelper;
import org.wisdom.engine.wrapper.cookies.FlashCookieImpl;
import org.wisdom.engine.wrapper.cookies.SessionCookieImpl;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URI;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicLong;
/**
* An implementation from the Wisdom HTTP context based on servlet objects.
* Not Thread Safe !
*/
public class ContextFromNetty implements Context {
private static AtomicLong ids = new AtomicLong();
private final long id;
private final ServiceAccessor services;
private final FlashCookie flashCookie;
private final SessionCookie sessionCookie;
private final QueryStringDecoder queryStringDecoder;
private /*not final*/ Route route;
/**
* the request object, created lazily.
*/
private RequestFromNetty request;
/**
* the data from the body sent by forms.
*/
private Map<String, List<String>> form = Maps.newHashMap();
/**
* List of uploaded files.
*/
private List<FileItemFromNetty> files = Lists.newArrayList();
/**
* The raw body.
*/
private byte[] raw;
/**
* The logger.
*/
private static final Logger LOGGER = LoggerFactory.getLogger(ContextFromNetty.class);
/**
* Creates a new context.
*
* @param accessor a structure containing the used services.
* @param ctxt the channel handler context.
* @param req the incoming HTTP Request.
*/
public ContextFromNetty(ServiceAccessor accessor, ChannelHandlerContext ctxt, HttpRequest req) {
id = ids.getAndIncrement();
services = accessor;
queryStringDecoder = new QueryStringDecoder(req.getUri());
request = new RequestFromNetty(this, ctxt, req);
flashCookie = new FlashCookieImpl(accessor.getConfiguration());
sessionCookie = new SessionCookieImpl(accessor.getCrypto(), accessor.getConfiguration());
sessionCookie.init(this);
flashCookie.init(this);
}
/**
* A http content type should contain a character set like
* "application/json; charset=utf-8".
* <p>
* If you only want to get "application/json" you can use this method.
* <p>
* See also: http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.7.1
*
* @param rawContentType "application/json; charset=utf-8" or "application/json"
* @return only the contentType without charset. Eg "application/json"
*/
public static String getContentTypeFromContentTypeAndCharacterSetting(String rawContentType) {
if (rawContentType.contains(";")) {
return rawContentType.split(";")[0];
} else {
return rawContentType;
}
}
/**
* Decodes the content of the request. Notice that the content can be split in several chunk.
*
* @param req the request
* @param content the content
* @param decoder the decoder.
*/
public void decodeContent(HttpRequest req, HttpContent content, HttpPostRequestDecoder decoder) {
// Determine whether the content is chunked.
boolean readingChunks = HttpHeaders.isTransferEncodingChunked(req);
// Offer the content to the decoder.
if (readingChunks) {
// If needed, read content chunk by chunk.
decoder.offer(content);
readHttpDataChunkByChunk(decoder);
} else {
// Else, read content.
if (content.content().isReadable()) {
// We may have the content in different HTTP message, check if we already have a content.
// Issue #257.
// To avoid we run out of memory we cut the read body to 100Kb. This can be configured using the
// "request.body.max.size" property.
boolean exceeded = raw != null
&& raw.length >=
services.getConfiguration().getIntegerWithDefault("request.body.max.size", 100 * 1024);
if (!exceeded) {
if (this.raw == null) {
this.raw = new byte[content.content().readableBytes()];
int readerIndex = content.content().readerIndex();
content.content().getBytes(readerIndex, this.raw);
} else {
byte[] bytes = new byte[content.content().readableBytes()];
int readerIndex = content.content().readerIndex();
content.content().getBytes(readerIndex, bytes);
this.raw = Bytes.concat(this.raw, bytes);
}
}
}
decoder.offer(content);
try {
for (InterfaceHttpData data : decoder.getBodyHttpDatas()) {
readAttributeOrFile(data);
}
} catch (HttpPostRequestDecoder.NotEnoughDataDecoderException e) {
LOGGER.debug("Error when decoding content, not enough data", e);
}
}
}
/**
* Reads request by chunk and getting values from chunk to chunk.
*/
private void readHttpDataChunkByChunk(HttpPostRequestDecoder decoder) {
try {
while (decoder.hasNext()) {
InterfaceHttpData data = decoder.next();
if (data != null) {
try {
// new value
readAttributeOrFile(data);
} finally {
// Do not release the data if it's a file, we released it once everything is done.
if (data.getHttpDataType() != InterfaceHttpData.HttpDataType.FileUpload) {
data.release();
}
}
}
}
} catch (HttpPostRequestDecoder.EndOfDataDecoderException e) {
LOGGER.debug("Error when decoding content, end of data reached", e);
}
}
private void readAttributeOrFile(InterfaceHttpData data) {
if (data.getHttpDataType() == InterfaceHttpData.HttpDataType.Attribute) {
Attribute attribute = (Attribute) data;
String value;
try {
String name = attribute.getName();
value = attribute.getValue();
List<String> values = form.get(name);
if (values == null) {
values = new ArrayList<>();
form.put(name, values);
}
values.add(value);
} catch (IOException e) {
LOGGER.warn("Error while reading attributes (form data)", e);
}
} else {
if (data.getHttpDataType() == InterfaceHttpData.HttpDataType.FileUpload) {
FileUpload fileUpload = (FileUpload) data;
if (fileUpload.isCompleted()) {
files.add(new FileItemFromNetty(fileUpload));
} else {
LOGGER.warn("Un-complete file upload");
}
}
}
}
/**
* The context id (unique).
*/
@Override
public Long id() {
return id;
}
/**
* Returns the current request.
*/
@Override
public Request request() {
return request;
}
/**
* Returns the path that the controller should act upon.
* <p>
* For instance in servlets you could have something like a context prefix.
* /myContext/app
* <p>
* If your route only defines /app it will work as the requestpath will
* return only "/app". A context path is not returned.
* <p>
* It does NOT decode any parts of the url.
* <p>
* Interesting reads: -
* http://www.lunatech-research.com/archives/2009/02/03/
* what-every-web-developer-must-know-about-url-encoding -
* http://stackoverflow
* .com/questions/966077/java-reading-undecoded-url-from-servlet
*
* @return The the path as seen by the server. Does exclude any container
* set context prefixes. Not decoded.
*/
@Override
public String path() {
return request().path();
}
/**
* Returns the flash cookie. Flash cookies only live for one request. Good
* uses are error messages to display. Almost everything else is bad use of
* Flash Cookies.
* <p>
* A FlashCookie is usually not signed. Don't trust the content.
*
* @return the flash cookie of that request.
*/
@Override
public FlashCookie flash() {
return flashCookie;
}
/**
* Returns the client side session. It is a cookie. Therefore you cannot
* store a lot of information inside the cookie. This is by intention.
* <p>
* If you have the feeling that the session cookie is too small for what you
* want to achieve thing again. Most likely your design is wrong.
*
* @return the Session of that request / response cycle.
*/
@Override
public SessionCookie session() {
return sessionCookie;
}
/**
* Get cookie from context.
*
* @param cookieName Name of the cookie to retrieve
* @return the cookie with that name or null.
*/
@Override
public Cookie cookie(String cookieName) {
return request().cookie(cookieName);
}
/**
* Checks whether the context contains a given cookie.
*
* @param cookieName Name of the cookie to check for
* @return {@code true} if the context has a cookie with that name.
*/
@Override
public boolean hasCookie(String cookieName) {
return request().cookie(cookieName) != null;
}
/**
* Get all cookies from the context.
*
* @return the cookie with that name or null.
*/
@Override
public Cookies cookies() {
return request().cookies();
}
/**
* Get the context path on which the application is running.
*
* @return the context-path with a leading "/" or "" if running on root
*/
@Override
public String contextPath() {
return "";
}
/**
* Get the parameter with the given key from the request. The parameter may
* either be a query parameter, or in the case of form submissions, may be a
* form parameter.
* <p>
* When the parameter is multivalued, returns the first value.
* <p>
* The parameter is decoded by default.
*
* @param name The key of the parameter
* @return The value, or null if no parameter was found.
* @see #parameterMultipleValues
*/
@Override
public String parameter(String name) {
Map<String, List<String>> parameters = queryStringDecoder.parameters();
if (parameters != null && parameters.containsKey(name)) {
// Return only the first one.
return parameters.get(name).get(0);
}
// Also check form
if (form() != null && form.containsKey(name)) {
return form().get(name).get(0);
}
return null;
}
@Override
public Map<String, List<String>> attributes() {
return form();
}
@Override
public Map<String, List<String>> form() {
return form;
}
/**
* Get the parameter with the given key from the request. The parameter may
* either be a query parameter, or in the case of form submissions, may be a
* form parameter.
* <p>
* The parameter is decoded by default.
*
* @param name The key of the parameter
* @return The values, possibly an empty list.
*/
@Override
public List<String> parameterMultipleValues(String name) {
Map<String, List<String>> parameters = queryStringDecoder.parameters();
if (parameters != null && parameters.containsKey(name)) {
return parameters.get(name);
}
return new ArrayList<>();
}
/**
* Same like {@link #parameter(String)}, but returns given defaultValue
* instead of null in case parameter cannot be found.
* <p>
* The parameter is decoded by default.
*
* @param name The name of the parameter
* @param defaultValue A default value if parameter not found.
* @return The value of the parameter of the defaultValue if not found.
*/
@Override
public String parameter(String name, String defaultValue) {
String parameter = parameter(name);
if (parameter == null) {
return defaultValue;
}
return parameter;
}
/**
* Same like {@link #parameter(String)}, but converts the parameter to
* Integer if found.
* <p>
* The parameter is decoded by default.
*
* @param name The name of the parameter
* @return The value of the parameter or null if not found.
*/
@Override
public Integer parameterAsInteger(String name) {
String parameter = parameter(name);
try {
return Integer.parseInt(parameter);
} catch (Exception e) { //NOSONAR
return null;
}
}
/**
* Same like {@link #parameter(String, String)}, but converts the
* parameter to Integer if found.
* <p>
* The parameter is decoded by default.
*
* @param name The name of the parameter
* @param defaultValue A default value if parameter not found.
* @return The value of the parameter of the defaultValue if not found.
*/
@Override
public Integer parameterAsInteger(String name, Integer defaultValue) {
Integer parameter = parameterAsInteger(name);
if (parameter == null) {
return defaultValue;
}
return parameter;
}
/**
* Same like {@link #parameter(String)}, but converts the
* parameter to Boolean if found.
* <p>
* The parameter is decoded by default.
*
* @param name The name parameter
* @return The value of the parameter of the defaultValue if not found.
*/
@Override
public Boolean parameterAsBoolean(String name) {
String parameter = parameter(name);
try {
return Boolean.parseBoolean(parameter);
} catch (Exception e) { //NOSONAR
return null;
}
}
/**
* Same like {@link #parameter(String, String)}, but converts the
* parameter to Boolean if found.
* <p>
* The parameter is decoded by default.
*
* @param name The name of the parameter
* @param defaultValue A default value if parameter not found.
* @return The value of the parameter of the defaultValue if not found.
*/
@Override
public Boolean parameterAsBoolean(String name, boolean defaultValue) {
// We have to check if the map contains the key, as the retrieval method returns false on missing key.
if (!parameters().containsKey(name)) {
return defaultValue;
}
Boolean parameter = parameterAsBoolean(name);
if (parameter == null) {
return defaultValue;
}
return parameter;
}
/**
* Get the path parameter for the given key.
* <p>
* The parameter will be decoded based on the RFCs.
* <p>
* Check out http://docs.oracle.com/javase/6/docs/api/java/net/URI.html for
* more information.
*
* @param name The name of the path parameter in a route. Eg
* /{myName}/rest/of/url
* @return The decoded path parameter, or null if no such path parameter was
* found.
*/
@Override
public String parameterFromPath(String name) {
String encodedParameter = route.getPathParametersEncoded(
path()).get(name);
if (encodedParameter == null) {
return null;
} else {
return URI.create(encodedParameter).getPath();
}
}
/**
* Get the path parameter for the given key.
* <p>
* Returns the raw path part. That means you can get stuff like:
* blue%2Fred%3Fand+green
*
* @param name The name of the path parameter in a route. Eg
* /{myName}/rest/of/url
* @return The encoded (!) path parameter, or null if no such path parameter
* was found.
*/
@Override
public String parameterFromPathEncoded(String name) {
return route.getPathParametersEncoded(path()).get(name);
}
/**
* Get the path parameter for the given key and convert it to Integer.
* <p>
* The parameter will be decoded based on the RFCs.
* <p>
* Check out http://docs.oracle.com/javase/6/docs/api/java/net/URI.html for
* more information.
*
* @param key the key of the path parameter
* @return the numeric path parameter, or null of no such path parameter is
* defined, or if it cannot be parsed to int
*/
@Override
public Integer parameterFromPathAsInteger(String key) {
String parameter = parameterFromPath(key);
if (parameter == null) {
return null;
} else {
return Integer.parseInt(parameter);
}
}
/**
* Get all the parameters from the request.
* This method does not retrieved the form data, use {@link #form()} for this.
*
* @return The parameters
*/
@Override
public Map<String, List<String>> parameters() {
return queryStringDecoder.parameters();
}
/**
* Get the (first) request header with the given name.
*
* @return The header value
*/
@Override
public String header(String name) {
List<String> list = request.headers().get(name);
if (list != null && ! list.isEmpty()) {
return list.get(0);
}
return null;
}
/**
* Get all the request headers with the given name.
*
* @return the header values
*/
@Override
public List<String> headers(String name) {
return request.headers().get(name);
}
/**
* Get all the headers from the request.
*
* @return The headers
*/
@Override
public Map<String, List<String>> headers() {
return request.headers();
}
/**
* Get the cookie value from the request, if defined.
*
* @param name The name of the cookie
* @return The cookie value, or null if the cookie was not found
*/
@Override
public String cookieValue(String name) {
return CookieHelper.getCookieValue(name, request().cookies());
}
/**
* This will give you the request body nicely parsed. You can register your
* own parsers depending on the request type.
* <p>
*
* @param classOfT The class of the result.
* @return The parsed request or null if something went wrong.
*/
@Override
public <T> T body(Class<T> classOfT) {
String rawContentType = request().contentType();
// If the Content-type: xxx header is not set we return null.
// we cannot parse that request.
if (rawContentType == null) {
return null;
}
// If Content-type is application/json; charset=utf-8 we split away the charset
// application/json
String contentTypeOnly = getContentTypeFromContentTypeAndCharacterSetting(
rawContentType);
BodyParser parser = services.getContentEngines().getBodyParserEngineForContentType(contentTypeOnly);
if (parser == null) {
return null;
}
return parser.invoke(this, classOfT);
}
/**
* Retrieves the request body as a String. If the request has no body, {@code null} is returned.
*
* @return the body as String
*/
public String body() {
return new String(raw, Charsets.UTF_8);
}
/**
* Retrieves the request body as a byte array. If the request has no body, {@code null} is returned.
*
* @return the body as byte array, as sent in the request
*/
@Override
public byte[] raw() {
return raw;
}
/**
* Get the reader to read the request.
*
* @return The reader
*/
@Override
public BufferedReader reader() throws IOException {
if (raw != null) {
return IOUtils.toBufferedReader(new InputStreamReader(new ByteArrayInputStream(raw)));
}
return null;
}
/**
* Get the route for this context.
*
* @return The route
*/
@Override
public Route route() {
return route;
}
/**
* Sets the route associated with the current context.
*
* @param route the route
*/
public void route(Route route) {
// Can be called only once, with a non null route.
Preconditions.checkState(this.route == null);
Preconditions.checkNotNull(route);
this.route = route;
}
/**
* Check if request is of type multipart. Important when you want to process
* uploads for instance.
* <p>
* Also check out: http://commons.apache.org/fileupload/streaming.html
*
* @return true if request is of type multipart.
*/
@Override
public boolean isMultipart() {
return MimeTypes.MULTIPART.equals(request().contentType());
}
/**
* Gets the collection of uploaded files.
*
* @return the collection of files, {@literal empty} if no files.
*/
@Override
public Collection<? extends FileItem> files() {
return files;
}
/**
* Gets the uploaded file having a form's field matching the given name.
*
* @param name the name of the field of the form that have uploaded the file
* @return the file object, {@literal null} if there are no file with this name
*/
@Override
public FileItem file(String name) {
for (FileItem item : files) {
// FileItem that do not have an real file attached, should not be returned
if (item.field().equals(name) && !Strings.isNullOrEmpty(item.name())) {
return item;
}
}
return null;
}
/**
* Releases uploaded files.
*/
public void cleanup() {
for (FileItemFromNetty file : files) {
file.upload().release();
}
request().data().clear();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.catalog;
import java.io.FileInputStream;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.apache.camel.catalog.impl.CatalogHelper.loadText;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class CamelCatalogTest {
static CamelCatalog catalog;
private static final Logger LOG = LoggerFactory.getLogger(CamelCatalogTest.class);
@BeforeAll
public static void createCamelCatalog() {
catalog = new DefaultCamelCatalog();
}
@Test
public void testGetVersion() throws Exception {
String version = catalog.getCatalogVersion();
assertNotNull(version);
String loaded = catalog.getLoadedVersion();
assertNotNull(loaded);
assertEquals(version, loaded);
}
@Test
public void testLoadVersion() throws Exception {
boolean result = catalog.loadVersion("1.0");
assertFalse(result);
String version = catalog.getCatalogVersion();
result = catalog.loadVersion(version);
assertTrue(result);
}
@Test
public void testFindComponentNames() throws Exception {
List<String> names = catalog.findComponentNames();
assertNotNull(names);
assertTrue(names.contains("file"));
assertTrue(names.contains("log"));
assertTrue(names.contains("docker"));
assertTrue(names.contains("jms"));
assertTrue(names.contains("activemq"));
assertTrue(names.contains("zookeeper-master"));
}
@Test
public void testFindOtherNames() throws Exception {
List<String> names = catalog.findOtherNames();
assertTrue(names.contains("hystrix"));
assertTrue(names.contains("leveldb"));
assertTrue(names.contains("swagger-java"));
assertTrue(names.contains("test-spring"));
assertFalse(names.contains("http-common"));
assertFalse(names.contains("kura"));
assertFalse(names.contains("core-osgi"));
assertFalse(names.contains("file"));
assertFalse(names.contains("ftp"));
assertFalse(names.contains("jetty"));
}
@Test
public void testFindDataFormatNames() throws Exception {
List<String> names = catalog.findDataFormatNames();
assertNotNull(names);
assertTrue(names.contains("bindy-csv"));
assertTrue(names.contains("hl7"));
assertTrue(names.contains("jaxb"));
assertTrue(names.contains("syslog"));
assertTrue(names.contains("asn1"));
assertTrue(names.contains("zipfile"));
}
@Test
public void testFindLanguageNames() throws Exception {
List<String> names = catalog.findLanguageNames();
assertTrue(names.contains("simple"));
assertTrue(names.contains("groovy"));
assertTrue(names.contains("mvel"));
assertTrue(names.contains("bean"));
assertTrue(names.contains("file"));
assertTrue(names.contains("xtokenize"));
assertTrue(names.contains("hl7terser"));
}
@Test
public void testFindModelNames() throws Exception {
List<String> names = catalog.findModelNames();
assertNotNull(names);
assertTrue(names.contains("from"));
assertTrue(names.contains("to"));
assertTrue(names.contains("recipientList"));
assertTrue(names.contains("aggregate"));
assertTrue(names.contains("split"));
assertTrue(names.contains("loadBalance"));
assertTrue(names.contains("circuitBreaker"));
assertTrue(names.contains("saga"));
}
@Test
public void testJsonSchema() throws Exception {
String schema = catalog.componentJSonSchema("docker");
assertNotNull(schema);
schema = catalog.dataFormatJSonSchema("hl7");
assertNotNull(schema);
schema = catalog.languageJSonSchema("groovy");
assertNotNull(schema);
schema = catalog.modelJSonSchema("aggregate");
assertNotNull(schema);
schema = catalog.otherJSonSchema("swagger-java");
assertNotNull(schema);
// lets make it possible to find bean/method using both names
schema = catalog.modelJSonSchema("method");
assertNotNull(schema);
schema = catalog.modelJSonSchema("bean");
assertNotNull(schema);
}
@Test
public void testXmlSchema() throws Exception {
String schema = catalog.springSchemaAsXml();
assertNotNull(schema);
}
@Test
public void testArchetypeCatalog() throws Exception {
String schema = catalog.archetypeCatalogAsXml();
assertNotNull(schema);
}
@Test
public void testMain() throws Exception {
String schema = catalog.mainJsonSchema();
assertNotNull(schema);
}
@Test
public void testAsEndpointUriMapFile() throws Exception {
Map<String, String> map = new HashMap<>();
map.put("directoryName", "src/data/inbox");
map.put("noop", "true");
map.put("delay", "5000");
String uri = catalog.asEndpointUri("file", map, true);
assertEquals("file:src/data/inbox?delay=5000&noop=true", uri);
String uri2 = catalog.asEndpointUriXml("file", map, true);
assertEquals("file:src/data/inbox?delay=5000&noop=true", uri2);
}
@Test
public void testAsEndpointUriMapFtp() throws Exception {
Map<String, String> map = new HashMap<>();
map.put("host", "someserver");
map.put("port", "21");
map.put("directoryName", "foo");
map.put("connectTimeout", "5000");
String uri = catalog.asEndpointUri("ftp", map, true);
assertEquals("ftp:someserver:21/foo?connectTimeout=5000", uri);
String uri2 = catalog.asEndpointUriXml("ftp", map, true);
assertEquals("ftp:someserver:21/foo?connectTimeout=5000", uri2);
}
@Test
public void testAsEndpointUriMapJms() throws Exception {
Map<String, String> map = new HashMap<>();
map.put("destinationType", "queue");
map.put("destinationName", "foo");
String uri = catalog.asEndpointUri("jms", map, true);
assertEquals("jms:queue:foo", uri);
}
@Test
public void testAsEndpointUriNettyhttp() throws Exception {
Map<String, String> map = new HashMap<>();
// use http protocol
map.put("protocol", "http");
map.put("host", "localhost");
map.put("port", "8080");
map.put("path", "foo/bar");
map.put("disconnect", "true");
String uri = catalog.asEndpointUri("netty-http", map, true);
assertEquals("netty-http:http:localhost:8080/foo/bar?disconnect=true", uri);
// lets switch protocol
map.put("protocol", "https");
uri = catalog.asEndpointUri("netty-http", map, true);
assertEquals("netty-http:https:localhost:8080/foo/bar?disconnect=true", uri);
// lets set a query parameter in the path
map.put("path", "foo/bar?verbose=true");
map.put("disconnect", "true");
uri = catalog.asEndpointUri("netty-http", map, true);
assertEquals("netty-http:https:localhost:8080/foo/bar?verbose=true&disconnect=true", uri);
}
@Test
public void testAsEndpointUriTimer() throws Exception {
Map<String, String> map = new HashMap<>();
map.put("timerName", "foo");
map.put("period", "5000");
String uri = catalog.asEndpointUri("timer", map, true);
assertEquals("timer:foo?period=5000", uri);
}
@Test
public void testAsEndpointDefaultValue() throws Exception {
Map<String, String> map = new HashMap<>();
map.put("destinationName", "cheese");
map.put("maxMessagesPerTask", "-1");
String uri = catalog.asEndpointUri("jms", map, true);
assertEquals("jms:cheese?maxMessagesPerTask=-1", uri);
}
@Test
public void testAsEndpointUriPropertiesPlaceholders() throws Exception {
Map<String, String> map = new HashMap<>();
map.put("timerName", "foo");
map.put("period", "{{howoften}}");
map.put("repeatCount", "5");
String uri = catalog.asEndpointUri("timer", map, true);
assertEquals("timer:foo?period=%7B%7Bhowoften%7D%7D&repeatCount=5", uri);
uri = catalog.asEndpointUri("timer", map, false);
assertEquals("timer:foo?period={{howoften}}&repeatCount=5", uri);
}
@Test
public void testAsEndpointUriBeanLookup() throws Exception {
Map<String, String> map = new HashMap<>();
map.put("resourceUri", "foo.xslt");
map.put("converter", "#myConverter");
String uri = catalog.asEndpointUri("xslt", map, true);
assertEquals("xslt:foo.xslt?converter=%23myConverter", uri);
uri = catalog.asEndpointUri("xslt", map, false);
assertEquals("xslt:foo.xslt?converter=#myConverter", uri);
}
@Test
public void testAsEndpointUriMapJmsRequiredOnly() throws Exception {
Map<String, String> map = new HashMap<>();
map.put("destinationName", "foo");
String uri = catalog.asEndpointUri("jms", map, true);
assertEquals("jms:foo", uri);
map.put("deliveryPersistent", "false");
map.put("allowNullBody", "true");
uri = catalog.asEndpointUri("jms", map, true);
assertEquals("jms:foo?allowNullBody=true&deliveryPersistent=false", uri);
String uri2 = catalog.asEndpointUriXml("jms", map, true);
assertEquals("jms:foo?allowNullBody=true&deliveryPersistent=false", uri2);
}
@Test
public void testAsEndpointUriRestUriTemplate() throws Exception {
Map<String, String> map = new LinkedHashMap<>();
map.put("method", "get");
map.put("path", "api");
map.put("uriTemplate", "user/{id}");
String uri = catalog.asEndpointUri("rest", map, true);
assertEquals("rest:get:api:user/{id}", uri);
}
@Test
public void testAsEndpointUriNettyHttpHostnameWithDash() throws Exception {
Map<String, String> map = new LinkedHashMap<>();
map.put("protocol", "http");
map.put("host", "a-b-c.hostname.tld");
map.put("port", "8080");
map.put("path", "anything");
String uri = catalog.asEndpointUri("netty-http", map, false);
assertEquals("netty-http:http:a-b-c.hostname.tld:8080/anything", uri);
map = new LinkedHashMap<>();
map.put("protocol", "http");
map.put("host", "a-b-c.server.net");
map.put("port", "8888");
map.put("path", "service/v3");
uri = catalog.asEndpointUri("netty-http", map, true);
assertEquals("netty-http:http:a-b-c.server.net:8888/service/v3", uri);
}
@Test
public void testNettyHttpDynamicToIssueHost() throws Exception {
String uri = "netty-http:http://a-b-c.hostname.tld:8080/anything";
Map<String, String> params = catalog.endpointProperties(uri);
assertEquals("http", params.get("protocol"));
assertEquals("a-b-c.hostname.tld", params.get("host"));
assertEquals("8080", params.get("port"));
assertEquals("anything", params.get("path"));
// remove path
params.remove("path");
String resolved = catalog.asEndpointUri("netty-http", params, false);
assertEquals("netty-http:http:a-b-c.hostname.tld:8080", resolved);
}
@Test
public void testEndpointProperties() throws Exception {
Map<String, String> map = catalog.endpointProperties("ftp:someserver:21/foo?connectTimeout=5000");
assertNotNull(map);
assertEquals(4, map.size());
assertEquals("someserver", map.get("host"));
assertEquals("21", map.get("port"));
assertEquals("foo", map.get("directoryName"));
assertEquals("5000", map.get("connectTimeout"));
}
@Test
public void testEndpointLenientProperties() throws Exception {
Map<String, String> map
= catalog.endpointLenientProperties("http:myserver?throwExceptionOnFailure=false&foo=123&bar=456");
assertNotNull(map);
assertEquals(2, map.size());
assertEquals("123", map.get("foo"));
assertEquals("456", map.get("bar"));
map = catalog.endpointLenientProperties(
"http:myserver?throwExceptionOnFailure=false&foo=123&bar=456&httpClient.timeout=5000&httpClient.soTimeout=10000");
assertNotNull(map);
assertEquals(2, map.size());
assertEquals("123", map.get("foo"));
assertEquals("456", map.get("bar"));
map = catalog.endpointLenientProperties(
"http:myserver?throwExceptionOnFailure=false&foo=123&bar=456&httpClient.timeout=5000&httpClient.soTimeout=10000&myPrefix.baz=beer");
assertNotNull(map);
assertEquals(3, map.size());
assertEquals("123", map.get("foo"));
assertEquals("456", map.get("bar"));
assertEquals("beer", map.get("myPrefix.baz"));
}
@Test
public void testEndpointPropertiesPlaceholders() throws Exception {
Map<String, String> map = catalog.endpointProperties("timer:foo?period={{howoften}}&repeatCount=5");
assertNotNull(map);
assertEquals(3, map.size());
assertEquals("foo", map.get("timerName"));
assertEquals("{{howoften}}", map.get("period"));
assertEquals("5", map.get("repeatCount"));
}
@Test
public void testEndpointPropertiesNettyHttp() throws Exception {
Map<String, String> map
= catalog.endpointProperties("netty-http:http:localhost:8080/foo/bar?disconnect=true&keepAlive=false");
assertNotNull(map);
assertEquals(6, map.size());
assertEquals("http", map.get("protocol"));
assertEquals("localhost", map.get("host"));
assertEquals("8080", map.get("port"));
assertEquals("foo/bar", map.get("path"));
assertEquals("true", map.get("disconnect"));
assertEquals("false", map.get("keepAlive"));
}
@Test
public void testEndpointPropertiesNettyHttpDefaultPort() throws Exception {
Map<String, String> map
= catalog.endpointProperties("netty-http:http:localhost/foo/bar?disconnect=true&keepAlive=false");
assertNotNull(map);
assertEquals(5, map.size());
assertEquals("http", map.get("protocol"));
assertEquals("localhost", map.get("host"));
assertEquals("foo/bar", map.get("path"));
assertEquals("true", map.get("disconnect"));
assertEquals("false", map.get("keepAlive"));
}
@Test
public void testEndpointPropertiesNettyHttpPlaceholder() throws Exception {
Map<String, String> map
= catalog.endpointProperties("netty-http:http:{{myhost}}:{{myport}}/foo/bar?disconnect=true&keepAlive=false");
assertNotNull(map);
assertEquals(6, map.size());
assertEquals("http", map.get("protocol"));
assertEquals("{{myhost}}", map.get("host"));
assertEquals("{{myport}}", map.get("port"));
assertEquals("foo/bar", map.get("path"));
assertEquals("true", map.get("disconnect"));
assertEquals("false", map.get("keepAlive"));
}
@Test
public void testEndpointPropertiesNettyHttpWithDoubleSlash() throws Exception {
Map<String, String> map
= catalog.endpointProperties("netty-http:http://localhost:8080/foo/bar?disconnect=true&keepAlive=false");
assertNotNull(map);
assertEquals(6, map.size());
assertEquals("http", map.get("protocol"));
assertEquals("localhost", map.get("host"));
assertEquals("8080", map.get("port"));
assertEquals("foo/bar", map.get("path"));
assertEquals("true", map.get("disconnect"));
assertEquals("false", map.get("keepAlive"));
}
@Test
public void testAsEndpointUriLog() throws Exception {
Map<String, String> map = new HashMap<>();
map.put("loggerName", "foo");
map.put("loggerLevel", "WARN");
map.put("multiline", "true");
map.put("showAll", "true");
map.put("showBody", "false");
map.put("showBodyType", "false");
map.put("showExchangePattern", "false");
map.put("style", "Tab");
assertEquals("log:foo?loggerLevel=WARN&multiline=true&showAll=true&style=Tab",
catalog.asEndpointUri("log", map, false));
}
@Test
public void testAsEndpointUriLogShort() throws Exception {
Map<String, String> map = new HashMap<>();
map.put("loggerName", "foo");
map.put("loggerLevel", "DEBUG");
assertEquals("log:foo?loggerLevel=DEBUG", catalog.asEndpointUri("log", map, false));
}
@Test
public void testAsEndpointUriWithplaceholder() throws Exception {
Map<String, String> map = new HashMap<>();
map.put("query", "{{insert}}");
assertEquals("sql:{{insert}}", catalog.asEndpointUri("sql", map, false));
map.put("useMessageBodyForSql", "true");
assertEquals("sql:{{insert}}?useMessageBodyForSql=true", catalog.asEndpointUri("sql", map, false));
map.put("parametersCount", "{{count}}");
assertEquals("sql:{{insert}}?parametersCount={{count}}&useMessageBodyForSql=true",
catalog.asEndpointUri("sql", map, false));
}
@Test
public void testAsEndpointUriStream() throws Exception {
Map<String, String> map = new LinkedHashMap<>();
map.put("kind", "url");
map.put("url", "http://camel.apache.org");
assertEquals("stream:url?url=http://camel.apache.org", catalog.asEndpointUri("stream", map, false));
}
@Test
public void testEndpointPropertiesJms() throws Exception {
Map<String, String> map = catalog.endpointProperties("jms:queue:foo");
assertNotNull(map);
assertEquals(2, map.size());
assertEquals("queue", map.get("destinationType"));
assertEquals("foo", map.get("destinationName"));
map = catalog.endpointProperties("jms:foo");
assertNotNull(map);
assertEquals(1, map.size());
assertEquals("foo", map.get("destinationName"));
}
@Test
public void testEndpointPropertiesJmsWithDotInName() throws Exception {
Map<String, String> map = catalog.endpointProperties("jms:browse.me");
assertNotNull(map);
assertEquals(1, map.size());
assertEquals("browse.me", map.get("destinationName"));
map = catalog.endpointProperties("jms:browse.me");
assertNotNull(map);
assertEquals(1, map.size());
assertEquals("browse.me", map.get("destinationName"));
}
@Test
public void testEndpointPropertiesJmsRequired() throws Exception {
Map<String, String> map = catalog.endpointProperties("jms:foo");
assertNotNull(map);
assertEquals(1, map.size());
assertEquals("foo", map.get("destinationName"));
map = catalog.endpointProperties("jms:foo?allowNullBody=true&deliveryPersistent=false");
assertNotNull(map);
assertEquals(3, map.size());
assertEquals("foo", map.get("destinationName"));
assertEquals("true", map.get("allowNullBody"));
assertEquals("false", map.get("deliveryPersistent"));
}
@Test
public void testEndpointPropertiesAtom() throws Exception {
Map<String, String> map = catalog.endpointProperties("atom:file:src/test/data/feed.atom");
assertNotNull(map);
assertEquals(1, map.size());
assertEquals("file:src/test/data/feed.atom", map.get("feedUri"));
map = catalog.endpointProperties("atom:file:src/test/data/feed.atom?splitEntries=false&delay=5000");
assertNotNull(map);
assertEquals(3, map.size());
assertEquals("file:src/test/data/feed.atom", map.get("feedUri"));
assertEquals("false", map.get("splitEntries"));
assertEquals("5000", map.get("delay"));
}
@Test
public void testEndpointPropertiesMultiValued() throws Exception {
Map<String, String> map
= catalog.endpointProperties("http:helloworld?httpClientOptions=httpClient.foo=123&httpClient.bar=456");
assertNotNull(map);
assertEquals(2, map.size());
assertEquals("helloworld", map.get("httpUri"));
assertEquals("httpClient.foo=123&httpClient.bar=456", map.get("httpClientOptions"));
}
@Test
public void testEndpointPropertiesSshWithUserInfo() throws Exception {
Map<String, String> map = catalog.endpointProperties("ssh:localhost:8101?username=scott&password=tiger");
assertNotNull(map);
assertEquals(4, map.size());
assertEquals("8101", map.get("port"));
assertEquals("localhost", map.get("host"));
assertEquals("scott", map.get("username"));
assertEquals("tiger", map.get("password"));
map = catalog.endpointProperties("ssh://scott:tiger@localhost:8101");
assertNotNull(map);
assertEquals(4, map.size());
assertEquals("8101", map.get("port"));
assertEquals("localhost", map.get("host"));
assertEquals("scott", map.get("username"));
assertEquals("tiger", map.get("password"));
}
@Test
public void validateActiveMQProperties() throws Exception {
// add activemq as known component
catalog.addComponent("activemq", "org.apache.camel.component.activemq.ActiveMQComponent");
// activemq
EndpointValidationResult result = catalog.validateEndpointProperties("activemq:temp-queue:cheese?jmsMessageType=Bytes");
assertTrue(result.isSuccess());
result = catalog.validateEndpointProperties("activemq:temp-queue:cheese?jmsMessageType=Bytes");
assertTrue(result.isSuccess());
result = catalog.validateEndpointProperties("activemq:temp-queue:cheese?jmsMessageType=Bytes", false, true, false);
assertTrue(result.isSuccess());
result = catalog.validateEndpointProperties("activemq:temp-queue:cheese?jmsMessageType=Bytes", false, false, true);
assertTrue(result.isSuccess());
// connection factory
result = catalog.validateEndpointProperties(
"activemq:Consumer.Baz.VirtualTopic.FooRequest?connectionFactory=#pooledJmsConnectionFactory");
assertTrue(result.isSuccess());
}
@Test
public void validateJmsProperties() throws Exception {
// jms
EndpointValidationResult result = catalog.validateEndpointProperties("jms:temp-queue:cheese?jmsMessageType=Bytes");
assertTrue(result.isSuccess());
result = catalog.validateEndpointProperties("jms:temp-queue:cheese?jmsMessageType=Bytes");
assertTrue(result.isSuccess());
result = catalog.validateEndpointProperties("jms:temp-queue:cheese?jmsMessageType=Bytes", false, true, false);
assertTrue(result.isSuccess());
result = catalog.validateEndpointProperties("jms:temp-queue:cheese?jmsMessageType=Bytes", false, false, true);
assertTrue(result.isSuccess());
}
@Test
public void validateProperties() throws Exception {
// valid
EndpointValidationResult result = catalog.validateEndpointProperties("log:mylog");
assertTrue(result.isSuccess());
// unknown
result = catalog.validateEndpointProperties("log:mylog?level=WARN&foo=bar");
assertFalse(result.isSuccess());
assertTrue(result.getUnknown().contains("foo"));
assertEquals(1, result.getNumberOfErrors());
// enum
result = catalog.validateEndpointProperties("jms:unknown:myqueue");
assertFalse(result.isSuccess());
assertEquals("unknown", result.getInvalidEnum().get("destinationType"));
assertEquals("queue", result.getDefaultValues().get("destinationType"));
assertEquals(1, result.getNumberOfErrors());
// reference okay
result = catalog.validateEndpointProperties("jms:queue:myqueue?jmsKeyFormatStrategy=#key");
assertTrue(result.isSuccess());
assertEquals(0, result.getNumberOfErrors());
// reference
result = catalog.validateEndpointProperties("jms:queue:myqueue?jmsKeyFormatStrategy=foo");
assertFalse(result.isSuccess());
assertEquals("foo", result.getInvalidEnum().get("jmsKeyFormatStrategy"));
assertEquals(1, result.getNumberOfErrors());
// okay
result = catalog.validateEndpointProperties(
"yammer:MESSAGES?accessToken=aaa&consumerKey=bbb&consumerSecret=ccc&useJson=true&initialDelay=500");
assertTrue(result.isSuccess());
// required / boolean / integer
result = catalog
.validateEndpointProperties("yammer:MESSAGES?accessToken=aaa&consumerKey=&useJson=no&initialDelay=five");
assertFalse(result.isSuccess());
assertEquals(4, result.getNumberOfErrors());
assertTrue(result.getRequired().contains("consumerKey"));
assertTrue(result.getRequired().contains("consumerSecret"));
assertEquals("no", result.getInvalidBoolean().get("useJson"));
assertEquals("five", result.getInvalidInteger().get("initialDelay"));
// unknown component
result = catalog.validateEndpointProperties("foo:bar?me=you");
assertTrue(result.isSuccess());
assertTrue(result.hasWarnings());
assertEquals("foo", result.getUnknownComponent());
assertEquals(0, result.getNumberOfErrors());
assertEquals(1, result.getNumberOfWarnings());
// invalid boolean but default value
result = catalog.validateEndpointProperties("log:output?showAll=ggg");
assertFalse(result.isSuccess());
assertEquals("ggg", result.getInvalidBoolean().get("showAll"));
assertEquals(1, result.getNumberOfErrors());
// dataset
result = catalog.validateEndpointProperties("dataset:foo?minRate=50");
assertTrue(result.isSuccess());
// time pattern
result = catalog.validateEndpointProperties("timer://foo?fixedRate=true&delay=0&period=2000");
assertTrue(result.isSuccess());
// reference lookup
result = catalog.validateEndpointProperties("timer://foo?fixedRate=#fixed&delay=#myDelay");
assertTrue(result.isSuccess());
// optional consumer. prefix
result = catalog.validateEndpointProperties("file:inbox?consumer.delay=5000&consumer.greedy=true");
assertTrue(result.isSuccess());
// optional without consumer. prefix
result = catalog.validateEndpointProperties("file:inbox?delay=5000&greedy=true");
assertTrue(result.isSuccess());
// mixed optional without consumer. prefix
result = catalog.validateEndpointProperties("file:inbox?delay=5000&consumer.greedy=true");
assertTrue(result.isSuccess());
// prefix
result = catalog.validateEndpointProperties("file:inbox?delay=5000&scheduler.foo=123&scheduler.bar=456");
assertTrue(result.isSuccess());
// stub
result = catalog.validateEndpointProperties("stub:foo?me=123&you=456");
assertTrue(result.isSuccess());
// lenient on
result = catalog.validateEndpointProperties("dataformat:string:marshal?foo=bar");
assertTrue(result.isSuccess());
// lenient off
result = catalog.validateEndpointProperties("dataformat:string:marshal?foo=bar", true);
assertFalse(result.isSuccess());
assertTrue(result.getUnknown().contains("foo"));
// lenient off consumer only
result = catalog.validateEndpointProperties("netty-http:http://myserver?foo=bar", false, true, false);
assertFalse(result.isSuccess());
// consumer should still fail because we cannot use lenient option in consumer mode
assertEquals("foo", result.getUnknown().iterator().next());
assertNull(result.getLenient());
// lenient off producer only
result = catalog.validateEndpointProperties("netty-http:http://myserver?foo=bar", false, false, true);
assertTrue(result.isSuccess());
// foo is the lenient option
assertEquals(1, result.getLenient().size());
assertEquals("foo", result.getLenient().iterator().next());
// lenient on consumer only
result = catalog.validateEndpointProperties("netty-http:http://myserver?foo=bar", true, true, false);
assertFalse(result.isSuccess());
// consumer should still fail because we cannot use lenient option in consumer mode
assertEquals("foo", result.getUnknown().iterator().next());
assertNull(result.getLenient());
// lenient on producer only
result = catalog.validateEndpointProperties("netty-http:http://myserver?foo=bar", true, false, true);
assertFalse(result.isSuccess());
assertEquals("foo", result.getUnknown().iterator().next());
assertNull(result.getLenient());
// lenient on rss consumer only
result = catalog.validateEndpointProperties(
"rss:file:src/test/data/rss20.xml?splitEntries=true&sortEntries=true&consumer.delay=50&foo=bar", false, true,
false);
assertTrue(result.isSuccess());
assertEquals("foo", result.getLenient().iterator().next());
// data format
result = catalog.validateEndpointProperties("dataformat:zipdeflater:marshal?compressionLevel=2", true);
assertTrue(result.isSuccess());
// 2 slash after component name
result = catalog.validateEndpointProperties("atmos://put?remotePath=/dummy.txt");
assertTrue(result.isSuccess());
// userinfo in authority with username and password
result = catalog.validateEndpointProperties("ssh://karaf:karaf@localhost:8101");
assertTrue(result.isSuccess());
// userinfo in authority without password
result = catalog.validateEndpointProperties(
"ssh://scott@localhost:8101?certResource=classpath:test_rsa&useFixedDelay=true&delay=5000&pollCommand=features:list%0A");
assertTrue(result.isSuccess());
// userinfo with both user and password and placeholder
result = catalog.validateEndpointProperties("ssh://smx:smx@localhost:8181?timeout=3000");
assertTrue(result.isSuccess());
// and should also work when port is using a placeholder
result = catalog.validateEndpointProperties("ssh://smx:smx@localhost:{{port}}?timeout=3000");
assertTrue(result.isSuccess());
// placeholder for a bunch of optional options
result = catalog.validateEndpointProperties("aws-swf://activity?{{options}}");
assertTrue(result.isSuccess());
// incapable to parse
result = catalog.validateEndpointProperties("{{getFtpUrl}}?recursive=true");
assertTrue(result.isSuccess());
assertTrue(result.hasWarnings());
assertNotNull(result.getIncapable());
}
@Test
public void validatePropertiesSummary() throws Exception {
EndpointValidationResult result = catalog.validateEndpointProperties(
"yammer:MESSAGES?blah=yada&accessToken=aaa&consumerKey=&useJson=no&initialDelay=five&pollStrategy=myStrategy");
assertFalse(result.isSuccess());
String reason = result.summaryErrorMessage(true);
LOG.info(reason);
result = catalog.validateEndpointProperties("jms:unknown:myqueue");
assertFalse(result.isSuccess());
reason = result.summaryErrorMessage(false);
LOG.info(reason);
}
@Test
public void validateTimePattern() throws Exception {
assertTrue(catalog.validateTimePattern("0"));
assertTrue(catalog.validateTimePattern("500"));
assertTrue(catalog.validateTimePattern("10000"));
assertTrue(catalog.validateTimePattern("5s"));
assertTrue(catalog.validateTimePattern("5sec"));
assertTrue(catalog.validateTimePattern("5secs"));
assertTrue(catalog.validateTimePattern("3m"));
assertTrue(catalog.validateTimePattern("3min"));
assertTrue(catalog.validateTimePattern("3minutes"));
assertTrue(catalog.validateTimePattern("5m15s"));
assertTrue(catalog.validateTimePattern("1h"));
assertTrue(catalog.validateTimePattern("1hour"));
assertTrue(catalog.validateTimePattern("2hours"));
assertFalse(catalog.validateTimePattern("bla"));
assertFalse(catalog.validateTimePattern("2year"));
assertFalse(catalog.validateTimePattern("60darn"));
}
@Test
public void testEndpointComponentName() throws Exception {
String name = catalog.endpointComponentName("jms:queue:foo");
assertEquals("jms", name);
}
@Test
public void testListComponentsAsJson() throws Exception {
String json = catalog.listComponentsAsJson();
assertNotNull(json);
// validate we can parse the json
ObjectMapper mapper = new ObjectMapper();
JsonNode tree = mapper.readTree(json);
assertNotNull(tree);
}
@Test
public void testListDataFormatsAsJson() throws Exception {
String json = catalog.listDataFormatsAsJson();
assertNotNull(json);
// validate we can parse the json
ObjectMapper mapper = new ObjectMapper();
JsonNode tree = mapper.readTree(json);
assertNotNull(tree);
}
@Test
public void testListLanguagesAsJson() throws Exception {
String json = catalog.listLanguagesAsJson();
assertNotNull(json);
// validate we can parse the json
ObjectMapper mapper = new ObjectMapper();
JsonNode tree = mapper.readTree(json);
assertNotNull(tree);
}
@Test
public void testListModelsAsJson() throws Exception {
String json = catalog.listModelsAsJson();
assertNotNull(json);
// validate we can parse the json
ObjectMapper mapper = new ObjectMapper();
JsonNode tree = mapper.readTree(json);
assertNotNull(tree);
}
@Test
public void testListOthersAsJson() throws Exception {
String json = catalog.listOthersAsJson();
assertNotNull(json);
// validate we can parse the json
ObjectMapper mapper = new ObjectMapper();
JsonNode tree = mapper.readTree(json);
assertNotNull(tree);
}
@Test
public void testSummaryAsJson() throws Exception {
String json = catalog.summaryAsJson();
assertNotNull(json);
// validate we can parse the json
ObjectMapper mapper = new ObjectMapper();
JsonNode tree = mapper.readTree(json);
assertNotNull(tree);
}
@Test
public void testAddComponent() throws Exception {
catalog.addComponent("dummy", "org.foo.camel.DummyComponent");
assertTrue(catalog.findComponentNames().contains("dummy"));
String json = catalog.componentJSonSchema("dummy");
assertNotNull(json);
// validate we can parse the json
ObjectMapper mapper = new ObjectMapper();
JsonNode tree = mapper.readTree(json);
assertNotNull(tree);
}
@Test
public void testAddComponentWithJson() throws Exception {
String json = loadText(new FileInputStream("src/test/resources/org/foo/camel/dummy.json"));
assertNotNull(json);
catalog.addComponent("dummy", "org.foo.camel.DummyComponent", json);
assertTrue(catalog.findComponentNames().contains("dummy"));
json = catalog.componentJSonSchema("dummy");
assertNotNull(json);
// validate we can parse the json
ObjectMapper mapper = new ObjectMapper();
JsonNode tree = mapper.readTree(json);
assertNotNull(tree);
}
@Test
public void testAddComponentWithPrettyJson() throws Exception {
String json = loadText(new FileInputStream("src/test/resources/org/foo/camel/dummy-pretty.json"));
assertNotNull(json);
catalog.addComponent("dummy", "org.foo.camel.DummyComponent", json);
assertTrue(catalog.findComponentNames().contains("dummy"));
json = catalog.componentJSonSchema("dummy");
assertNotNull(json);
// validate we can parse the json
ObjectMapper mapper = new ObjectMapper();
JsonNode tree = mapper.readTree(json);
assertNotNull(tree);
}
@Test
public void testAddDataFormat() throws Exception {
catalog.addDataFormat("dummyformat", "org.foo.camel.DummyDataFormat");
assertTrue(catalog.findDataFormatNames().contains("dummyformat"));
String json = catalog.dataFormatJSonSchema("dummyformat");
assertNotNull(json);
// validate we can parse the json
ObjectMapper mapper = new ObjectMapper();
JsonNode tree = mapper.readTree(json);
assertNotNull(tree);
}
@Test
public void testAddDataFormatWithJSon() throws Exception {
String json = loadText(new FileInputStream("src/test/resources/org/foo/camel/dummyformat.json"));
assertNotNull(json);
catalog.addDataFormat("dummyformat", "org.foo.camel.DummyDataFormat", json);
assertTrue(catalog.findDataFormatNames().contains("dummyformat"));
json = catalog.dataFormatJSonSchema("dummyformat");
assertNotNull(json);
// validate we can parse the json
ObjectMapper mapper = new ObjectMapper();
JsonNode tree = mapper.readTree(json);
assertNotNull(tree);
}
@Test
public void testAddDataFormatWithPrettyJSon() throws Exception {
String json = loadText(new FileInputStream("src/test/resources/org/foo/camel/dummyformat-pretty.json"));
assertNotNull(json);
catalog.addDataFormat("dummyformat", "org.foo.camel.DummyDataFormat", json);
assertTrue(catalog.findDataFormatNames().contains("dummyformat"));
json = catalog.dataFormatJSonSchema("dummyformat");
assertNotNull(json);
// validate we can parse the json
ObjectMapper mapper = new ObjectMapper();
JsonNode tree = mapper.readTree(json);
assertNotNull(tree);
}
@Test
public void testSimpleExpression() throws Exception {
LanguageValidationResult result = catalog.validateLanguageExpression(null, "simple", "${body}");
assertTrue(result.isSuccess());
assertEquals("${body}", result.getText());
result = catalog.validateLanguageExpression(null, "simple", "${body");
assertFalse(result.isSuccess());
assertEquals("${body", result.getText());
LOG.info(result.getError());
assertTrue(result.getError().startsWith("expected symbol functionEnd but was eol at location 5"));
assertEquals("expected symbol functionEnd but was eol", result.getShortError());
assertEquals(5, result.getIndex());
result = catalog.validateLanguageExpression(null, "simple", "${bodyxxx}");
assertFalse(result.isSuccess());
assertEquals("${bodyxxx}", result.getText());
LOG.info(result.getError());
assertEquals("Valid syntax: ${body.OGNL} was: bodyxxx", result.getShortError());
assertEquals(0, result.getIndex());
}
@Test
public void testSimplePredicate() throws Exception {
LanguageValidationResult result = catalog.validateLanguagePredicate(null, "simple", "${body} == 'abc'");
assertTrue(result.isSuccess());
assertEquals("${body} == 'abc'", result.getText());
result = catalog.validateLanguagePredicate(null, "simple", "${body} > ${header.size");
assertFalse(result.isSuccess());
assertEquals("${body} > ${header.size", result.getText());
LOG.info(result.getError());
assertTrue(result.getError().startsWith("expected symbol functionEnd but was eol at location 22"));
assertEquals("expected symbol functionEnd but was eol", result.getShortError());
assertEquals(22, result.getIndex());
}
@Test
public void testPredicatePlaceholder() throws Exception {
LanguageValidationResult result = catalog.validateLanguagePredicate(null, "simple", "${body} contains '{{danger}}'");
assertTrue(result.isSuccess());
assertEquals("${body} contains '{{danger}}'", result.getText());
result = catalog.validateLanguagePredicate(null, "simple", "${bdy} contains '{{danger}}'");
assertFalse(result.isSuccess());
assertEquals("${bdy} contains '{{danger}}'", result.getText());
LOG.info(result.getError());
assertTrue(result.getError().startsWith("Unknown function: bdy at location 0"));
assertTrue(result.getError().contains("'{{danger}}'"));
assertEquals("Unknown function: bdy", result.getShortError());
assertEquals(0, result.getIndex());
}
@Test
public void testValidateLanguage() throws Exception {
LanguageValidationResult result = catalog.validateLanguageExpression(null, "simple", "${body}");
assertTrue(result.isSuccess());
assertEquals("${body}", result.getText());
result = catalog.validateLanguageExpression(null, "header", "foo");
assertTrue(result.isSuccess());
assertEquals("foo", result.getText());
result = catalog.validateLanguagePredicate(null, "simple", "${body} > 10");
assertTrue(result.isSuccess());
assertEquals("${body} > 10", result.getText());
result = catalog.validateLanguagePredicate(null, "header", "bar");
assertTrue(result.isSuccess());
assertEquals("bar", result.getText());
result = catalog.validateLanguagePredicate(null, "foobar", "bar");
assertFalse(result.isSuccess());
assertEquals("Unknown language foobar", result.getError());
result = catalog.validateLanguagePredicate(null, "simple", "${body.length} =!= 12");
assertFalse(result.isSuccess());
assertEquals("Unexpected token =", result.getShortError());
}
@Test
public void testValidateJSonPathLanguage() throws Exception {
LanguageValidationResult result = catalog.validateLanguageExpression(null, "jsonpath", "$.store.book[?(@.price < 10)]");
assertTrue(result.isSuccess());
assertEquals("$.store.book[?(@.price < 10)]", result.getText());
result = catalog.validateLanguageExpression(null, "jsonpath", "$.store.book[?(@.price ^^^ 10)]");
assertFalse(result.isSuccess());
assertEquals("$.store.book[?(@.price ^^^ 10)]", result.getText());
assertEquals("Illegal syntax: $.store.book[?(@.price ^^^ 10)]", result.getError());
}
@Test
public void testSpringCamelContext() throws Exception {
String json = catalog.modelJSonSchema("camelContext");
assertNotNull(json);
// validate we can parse the json
ObjectMapper mapper = new ObjectMapper();
JsonNode tree = mapper.readTree(json);
assertNotNull(tree);
assertTrue(json.contains("CamelContext using XML configuration"));
}
@Test
public void testComponentAsciiDoc() throws Exception {
String doc = catalog.componentAsciiDoc("mock");
assertNotNull(doc);
assertTrue(doc.contains("mock:someName"));
doc = catalog.componentAsciiDoc("geocoder");
assertNotNull(doc);
assertTrue(doc.contains("looking up geocodes"));
doc = catalog.componentAsciiDoc("smtp");
assertNotNull(doc);
assertTrue(doc.contains("The mail component"));
doc = catalog.componentAsciiDoc("unknown");
assertNull(doc);
}
@Test
public void testTransactedAndPolicyNoOutputs() throws Exception {
String json = catalog.modelJSonSchema("transacted");
assertNotNull(json);
assertTrue(json.contains("\"output\": false"));
assertFalse(json.contains("\"outputs\":"));
json = catalog.modelJSonSchema("policy");
assertNotNull(json);
assertTrue(json.contains("\"output\": false"));
assertFalse(json.contains("\"outputs\":"));
}
@Test
public void testDataFormatAsciiDoc() throws Exception {
String doc = catalog.dataFormatAsciiDoc("json-jackson");
assertNotNull(doc);
assertTrue(doc.contains("Jackson dataformat"));
doc = catalog.dataFormatAsciiDoc("bindy-csv");
assertNotNull(doc);
assertTrue(doc.contains("CsvRecord"));
}
@Test
public void testLanguageAsciiDoc() throws Exception {
String doc = catalog.languageAsciiDoc("jsonpath");
assertNotNull(doc);
assertTrue(doc.contains("JSonPath language"));
}
@Test
public void testOtherAsciiDoc() throws Exception {
String doc = catalog.otherAsciiDoc("swagger-java");
assertNotNull(doc);
assertTrue(doc.contains("Swagger"));
}
@Test
public void testValidateEndpointTwitterSpecial() throws Exception {
String uri = "twitter-search://java?{{%s}}";
EndpointValidationResult result = catalog.validateEndpointProperties(uri);
assertTrue(result.isSuccess());
}
@Test
public void testValidateApiEndpoint() throws Exception {
// there is a type converter that converts from and to to phone number
String uri = "twilio:call/create?applicationSid=123&from=#555&to=#999";
EndpointValidationResult result = catalog.validateEndpointProperties(uri);
assertTrue(result.isSuccess());
// there is a type converter that converts from and to to phone number
uri = "twilio:call/create?applicationSid=123&from=#555&to=#999&unknown=true";
result = catalog.validateEndpointProperties(uri);
assertFalse(result.isSuccess());
assertTrue(result.getUnknown().contains("unknown"));
// call/fetcher does not have from and to parameters
uri = "twilio:Call/Fetch?applicationSid=123&from=#555&to=#999";
result = catalog.validateEndpointProperties(uri);
assertFalse(result.isSuccess());
assertTrue(result.getUnknown().contains("from"));
assertTrue(result.getUnknown().contains("to"));
uri = "zendesk:getTopicsByUser?user_id=123";
result = catalog.validateEndpointProperties(uri);
assertTrue(result.isSuccess());
uri = "zendesk:GET_TOPICS_BY_USER?user_id=123";
result = catalog.validateEndpointProperties(uri);
assertTrue(result.isSuccess());
uri = "zendesk:get-topics-by-user?user_id=123&unknown=true";
result = catalog.validateEndpointProperties(uri);
assertFalse(result.isSuccess());
assertTrue(result.getUnknown().contains("unknown"));
uri = "twilio:account/fetch";
result = catalog.validateEndpointProperties(uri);
assertTrue(result.isSuccess());
uri = "twilio:account/fetch?pathSid=123";
result = catalog.validateEndpointProperties(uri);
assertTrue(result.isSuccess());
uri = "twilio:account/update";
result = catalog.validateEndpointProperties(uri);
assertTrue(result.isSuccess());
uri = "twilio:account/update?pathSid=123";
result = catalog.validateEndpointProperties(uri);
assertTrue(result.isSuccess());
uri = "twilio:account/read";
result = catalog.validateEndpointProperties(uri);
assertFalse(result.isSuccess());
assertEquals(2, result.getEnumChoices("methodName").size());
assertTrue(result.getEnumChoices("methodName").contains("fetch"));
assertTrue(result.getEnumChoices("methodName").contains("update"));
uri = "twilio:account/read?pathSid=123";
result = catalog.validateEndpointProperties(uri);
assertFalse(result.isSuccess());
}
@Test
public void testValidateEndpointTimerDuration() throws Exception {
String uri = "timer:foo?period=5s";
EndpointValidationResult result = catalog.validateEndpointProperties(uri);
assertTrue(result.isSuccess());
uri = "timer:foo?period=5p";
result = catalog.validateEndpointProperties(uri);
assertFalse(result.isSuccess());
assertEquals("5p", result.getInvalidDuration().get("period"));
}
@Test
public void testValidateEndpointHttpPropertyPlaceholder() throws Exception {
String uri = "http://api.openweathermap.org/data/2.5/weather?{{property.weatherUri}}";
EndpointValidationResult result = catalog.validateEndpointProperties(uri);
assertTrue(result.isSuccess());
result = catalog.validateEndpointProperties(uri, true);
assertTrue(result.isSuccess());
// use incorrect style using ${ } as placeholder
uri = "http://api.openweathermap.org/data/2.5/weather?${property.weatherUri}";
result = catalog.validateEndpointProperties(uri);
assertTrue(result.isSuccess());
result = catalog.validateEndpointProperties(uri, true);
assertFalse(result.isSuccess());
assertEquals("${property.weatherUri}", result.getUnknown().iterator().next());
}
@Test
public void testValidateEndpointJmsDefault() throws Exception {
String uri = "jms:cheese?maxMessagesPerTask=-1";
EndpointValidationResult result = catalog.validateEndpointProperties(uri);
assertTrue(result.isSuccess());
assertEquals(1, result.getDefaultValues().size());
assertEquals("-1", result.getDefaultValues().get("maxMessagesPerTask"));
}
@Test
public void testValidateEndpointConsumerOnly() throws Exception {
String uri = "file:inbox?bufferSize=4096&readLock=changed&delete=true";
EndpointValidationResult result = catalog.validateEndpointProperties(uri, false, true, false);
assertTrue(result.isSuccess());
uri = "file:inbox?bufferSize=4096&readLock=changed&delete=true&fileExist=Append";
result = catalog.validateEndpointProperties(uri, false, true, false);
assertFalse(result.isSuccess());
assertEquals("fileExist", result.getNotConsumerOnly().iterator().next());
}
@Test
public void testValidateEndpointProducerOnly() throws Exception {
String uri = "file:outbox?bufferSize=4096&fileExist=Append";
EndpointValidationResult result = catalog.validateEndpointProperties(uri, false, false, true);
assertTrue(result.isSuccess());
uri = "file:outbox?bufferSize=4096&fileExist=Append&delete=true";
result = catalog.validateEndpointProperties(uri, false, false, true);
assertFalse(result.isSuccess());
assertEquals("delete", result.getNotProducerOnly().iterator().next());
}
@Test
public void testNettyHttpDynamicToIssue() throws Exception {
String uri = "netty-http:http://10.192.1.10:8080/client/alerts/summary?throwExceptionOnFailure=false";
Map<String, String> params = catalog.endpointProperties(uri);
params.remove("path");
params.remove("throwExceptionOnFailure");
String resolved = catalog.asEndpointUri("netty-http", params, false);
assertEquals("netty-http:http:10.192.1.10:8080", resolved);
// another example with dash in hostname
uri = "netty-http:http://a-b-c.hostname.tld:8080/anything";
params = catalog.endpointProperties(uri);
resolved = catalog.asEndpointUri("netty-http", params, false);
assertEquals("netty-http:http:a-b-c.hostname.tld:8080/anything", resolved);
}
@Test
public void testValidateConfigurationPropertyComponent() throws Exception {
String text = "camel.component.seda.queueSize=1234";
ConfigurationPropertiesValidationResult result = catalog.validateConfigurationProperty(text);
assertTrue(result.isSuccess());
text = "camel.component.seda.queue-size=1234";
result = catalog.validateConfigurationProperty(text);
assertTrue(result.isSuccess());
text = "camel.component.seda.queuesize=1234";
result = catalog.validateConfigurationProperty(text);
assertTrue(result.isSuccess());
text = "camel.component.seda.queueSize=abc";
result = catalog.validateConfigurationProperty(text);
assertFalse(result.isSuccess());
assertEquals("abc", result.getInvalidInteger().get("camel.component.seda.queueSize"));
text = "camel.component.seda.foo=abc";
result = catalog.validateConfigurationProperty(text);
assertFalse(result.isSuccess());
assertTrue(result.getUnknown().contains("camel.component.seda.foo"));
text = "camel.component.jms.acknowledgementModeName=abc";
result = catalog.validateConfigurationProperty(text);
assertFalse(result.isSuccess());
assertEquals("abc", result.getInvalidEnum().get("camel.component.jms.acknowledgementModeName"));
List<String> list = result.getEnumChoices("camel.component.jms.acknowledgementModeName");
assertEquals(4, list.size());
assertEquals("SESSION_TRANSACTED", list.get(0));
assertEquals("CLIENT_ACKNOWLEDGE", list.get(1));
assertEquals("AUTO_ACKNOWLEDGE", list.get(2));
assertEquals("DUPS_OK_ACKNOWLEDGE", list.get(3));
}
@Test
public void testValidateConfigurationPropertyLanguage() throws Exception {
String text = "camel.language.tokenize.token=;";
ConfigurationPropertiesValidationResult result = catalog.validateConfigurationProperty(text);
assertTrue(result.isSuccess());
text = "camel.language.tokenize.regex=true";
result = catalog.validateConfigurationProperty(text);
assertTrue(result.isSuccess());
text = "camel.language.tokenize.regex=abc";
result = catalog.validateConfigurationProperty(text);
assertFalse(result.isSuccess());
assertEquals("abc", result.getInvalidBoolean().get("camel.language.tokenize.regex"));
text = "camel.language.tokenize.foo=abc";
result = catalog.validateConfigurationProperty(text);
assertFalse(result.isSuccess());
assertTrue(result.getUnknown().contains("camel.language.tokenize.foo"));
}
@Test
public void testValidateConfigurationPropertyDataformat() throws Exception {
String text = "camel.dataformat.bindy-csv.type=csv";
ConfigurationPropertiesValidationResult result = catalog.validateConfigurationProperty(text);
assertTrue(result.isSuccess());
text = "camel.dataformat.bindy-csv.locale=us";
result = catalog.validateConfigurationProperty(text);
assertTrue(result.isSuccess());
text = "camel.dataformat.bindy-csv.allowEmptyStream=abc";
result = catalog.validateConfigurationProperty(text);
assertFalse(result.isSuccess());
assertEquals("abc", result.getInvalidBoolean().get("camel.dataformat.bindy-csv.allowEmptyStream"));
text = "camel.dataformat.bindy-csv.foo=abc";
result = catalog.validateConfigurationProperty(text);
assertFalse(result.isSuccess());
assertTrue(result.getUnknown().contains("camel.dataformat.bindy-csv.foo"));
text = "camel.dataformat.bindy-csv.type=abc";
result = catalog.validateConfigurationProperty(text);
assertFalse(result.isSuccess());
assertEquals("abc", result.getInvalidEnum().get("camel.dataformat.bindy-csv.type"));
List<String> list = result.getEnumChoices("camel.dataformat.bindy-csv.type");
assertEquals(3, list.size());
assertEquals("Csv", list.get(0));
assertEquals("Fixed", list.get(1));
assertEquals("KeyValue", list.get(2));
}
@Test
public void testValidateConfigurationPropertyComponentQuartz() throws Exception {
String text = "camel.component.quartz.auto-start-scheduler=true";
ConfigurationPropertiesValidationResult result = catalog.validateConfigurationProperty(text);
assertTrue(result.isSuccess());
text = "camel.component.quartz.properties=#myProp";
result = catalog.validateConfigurationProperty(text);
assertTrue(result.isSuccess());
text = "camel.component.quartz.properties=123";
result = catalog.validateConfigurationProperty(text);
assertFalse(result.isSuccess());
text = "camel.component.quartz.properties.foo=123";
result = catalog.validateConfigurationProperty(text);
assertTrue(result.isSuccess());
text = "camel.component.quartz.properties.bar=true";
result = catalog.validateConfigurationProperty(text);
assertTrue(result.isSuccess());
text = "camel.component.quartz.properties[0]=yes";
result = catalog.validateConfigurationProperty(text);
assertTrue(result.isSuccess());
text = "camel.component.quartz.properties[1]=no";
result = catalog.validateConfigurationProperty(text);
assertTrue(result.isSuccess());
text = "camel.component.quartz.properties[foo]=abc";
result = catalog.validateConfigurationProperty(text);
assertTrue(result.isSuccess());
text = "camel.component.quartz.properties[foo].beer=yes";
result = catalog.validateConfigurationProperty(text);
assertTrue(result.isSuccess());
text = "camel.component.quartz.properties[foo].drink=no";
result = catalog.validateConfigurationProperty(text);
assertTrue(result.isSuccess());
}
@Test
public void testValidateConfigurationPropertyComponentJClouds() throws Exception {
String text = "camel.component.jclouds.autowiredEnabled=true";
ConfigurationPropertiesValidationResult result = catalog.validateConfigurationProperty(text);
assertTrue(result.isSuccess());
text = "camel.component.jclouds.blobStores=#myStores";
result = catalog.validateConfigurationProperty(text);
assertTrue(result.isSuccess());
text = "camel.component.jclouds.blobStores=foo";
result = catalog.validateConfigurationProperty(text);
assertFalse(result.isSuccess());
assertTrue(result.getInvalidArray().containsKey("camel.component.jclouds.blobStores"));
text = "camel.component.jclouds.blobStores[0]=foo";
result = catalog.validateConfigurationProperty(text);
assertTrue(result.isSuccess());
text = "camel.component.jclouds.blobStores[1]=bar";
result = catalog.validateConfigurationProperty(text);
assertTrue(result.isSuccess());
text = "camel.component.jclouds.blobStores[foo]=123";
result = catalog.validateConfigurationProperty(text);
assertFalse(result.isSuccess());
assertEquals("foo", result.getInvalidInteger().get("camel.component.jclouds.blobStores[foo]"));
text = "camel.component.jclouds.blobStores[0].beer=yes";
result = catalog.validateConfigurationProperty(text);
assertTrue(result.isSuccess());
text = "camel.component.jclouds.blobStores[1].drink=no";
result = catalog.validateConfigurationProperty(text);
assertTrue(result.isSuccess());
text = "camel.component.jclouds.blobStores[foo].beer=yes";
result = catalog.validateConfigurationProperty(text);
assertFalse(result.isSuccess());
assertEquals("foo", result.getInvalidInteger().get("camel.component.jclouds.blobStores[foo].beer"));
}
@Test
public void testValidateConfigurationPropertyMain() throws Exception {
String text = "camel.main.allow-use-original-message=true";
ConfigurationPropertiesValidationResult result = catalog.validateConfigurationProperty(text);
assertTrue(result.isSuccess());
// spaces around
text = "camel.main.allow-use-original-message = true";
result = catalog.validateConfigurationProperty(text);
assertTrue(result.isSuccess());
text = "camel.main.allow-use-original-message= true";
result = catalog.validateConfigurationProperty(text);
assertTrue(result.isSuccess());
text = "camel.main.allow-use-original-message =true";
result = catalog.validateConfigurationProperty(text);
assertTrue(result.isSuccess());
text = "camel.main.allow-use-original-message = true";
result = catalog.validateConfigurationProperty(text);
assertTrue(result.isSuccess());
text = "camel.main.allow-use-original-message=abc";
result = catalog.validateConfigurationProperty(text);
assertFalse(result.isSuccess());
assertEquals("abc", result.getInvalidBoolean().get("camel.main.allow-use-original-message"));
text = "camel.main.foo=abc";
result = catalog.validateConfigurationProperty(text);
assertFalse(result.isSuccess());
assertTrue(result.getUnknown().contains("camel.main.foo"));
text = "camel.resilience4j.minimum-number-of-calls=123";
result = catalog.validateConfigurationProperty(text);
assertTrue(result.isSuccess());
text = "camel.resilience4j.minimum-number-of-calls=abc";
result = catalog.validateConfigurationProperty(text);
assertFalse(result.isSuccess());
assertEquals("abc", result.getInvalidInteger().get("camel.resilience4j.minimum-number-of-calls"));
text = "camel.resilience4j.slow-call-rate-threshold=12.5";
result = catalog.validateConfigurationProperty(text);
assertTrue(result.isSuccess());
text = "camel.resilience4j.slow-call-rate-threshold=12x5";
result = catalog.validateConfigurationProperty(text);
assertFalse(result.isSuccess());
assertEquals("12x5", result.getInvalidNumber().get("camel.resilience4j.slow-call-rate-threshold"));
text = "camel.faulttolerance.timeoutPoolSize=5";
result = catalog.validateConfigurationProperty(text);
assertTrue(result.isSuccess());
text = "camel.lra.coordinatorUrl=foobar";
result = catalog.validateConfigurationProperty(text);
assertTrue(result.isSuccess());
text = "camel.threadpool.maxQueueSize=123";
result = catalog.validateConfigurationProperty(text);
assertTrue(result.isSuccess());
text = "camel.threadpool.maxQueueSize=12x5";
result = catalog.validateConfigurationProperty(text);
assertFalse(result.isSuccess());
assertEquals("12x5", result.getInvalidInteger().get("camel.threadpool.maxQueueSize"));
text = "camel.health.routesEnabled=abc";
result = catalog.validateConfigurationProperty(text);
assertFalse(result.isSuccess());
assertEquals("abc", result.getInvalidBoolean().get("camel.health.routesEnabled"));
}
@Test
public void testValidateConfigurationPropertyMainMap() throws Exception {
String text = "camel.rest.api-properties=#foo";
ConfigurationPropertiesValidationResult result = catalog.validateConfigurationProperty(text);
assertTrue(result.isSuccess());
text = "camel.rest.api-properties=bar";
result = catalog.validateConfigurationProperty(text);
assertFalse(result.isSuccess());
assertEquals("bar", result.getInvalidMap().get("camel.rest.api-properties"));
text = "camel.rest.api-properties.foo=abc";
result = catalog.validateConfigurationProperty(text);
assertTrue(result.isSuccess());
text = "camel.rest.api-properties.bar=123";
result = catalog.validateConfigurationProperty(text);
assertTrue(result.isSuccess());
text = "camel.rest.api-properties.beer=yes";
result = catalog.validateConfigurationProperty(text);
assertTrue(result.isSuccess());
text = "camel.rest.api-properties[drink]=no";
result = catalog.validateConfigurationProperty(text);
assertTrue(result.isSuccess());
}
@Test
public void validateEnvVariableInSyntax() throws Exception {
EndpointValidationResult result
= catalog.validateEndpointProperties("netty-http:http://foo-bar.{{env:NAMESPACE}}.svc.cluster.local/samples");
assertTrue(result.isSuccess());
result = catalog.validateEndpointProperties("netty-http:http://foo-bar/?requestTimeout={{env:TIMEOUT}}");
assertTrue(result.isSuccess());
}
}
| |
/*******************************************************************************
* Copyright FUJITSU LIMITED 2017
*******************************************************************************/
package org.oscm.integrationtests.mockproduct;
import java.util.List;
import javax.annotation.Resource;
import javax.jws.WebService;
import javax.servlet.ServletContext;
import javax.servlet.ServletRequest;
import javax.xml.ws.WebServiceContext;
import javax.xml.ws.handler.MessageContext;
import org.oscm.integrationtests.mockproduct.RequestLogEntry.RequestDirection;
import org.oscm.integrationtests.mockproduct.i18n.Messages;
import org.oscm.provisioning.data.BaseResult;
import org.oscm.provisioning.data.InstanceInfo;
import org.oscm.provisioning.data.InstanceRequest;
import org.oscm.provisioning.data.InstanceResult;
import org.oscm.provisioning.data.ServiceAttribute;
import org.oscm.provisioning.data.ServiceParameter;
import org.oscm.provisioning.data.User;
import org.oscm.provisioning.data.UserResult;
import org.oscm.provisioning.intf.ProvisioningService;
/**
* This is a stub implementation of the {@link ProvisioningService}
*
* @author pock
*/
@WebService(serviceName = "ProvisioningService", targetNamespace = "http://oscm.org/xsd", portName = "ProvisioningServicePort", endpointInterface = "org.oscm.provisioning.intf.ProvisioningService", wsdlLocation = "ProvisioningService.wsdl")
public class ProvisioningServiceBean implements ProvisioningService {
@Resource
private WebServiceContext context;
private static final int RETURN_CODE_OK = 0;
private <T extends BaseResult> T setOk(T result) {
result.setRc(RETURN_CODE_OK);
result.setDesc("Ok");
return result;
}
private <T extends BaseResult> T setOk(T result, String message) {
result.setRc(RETURN_CODE_OK);
result.setDesc(message);
return result;
}
private BaseResult getBaseResultOk() {
return setOk(new BaseResult());
}
private BaseResult getBaseResultOk(String message) {
return setOk(new BaseResult(), message);
}
private RequestLogEntry createLogEntry(String title) {
final ServletContext servletContext = (ServletContext) context
.getMessageContext().get(MessageContext.SERVLET_CONTEXT);
final RequestLog log = (RequestLog) servletContext
.getAttribute(InitServlet.REQUESTLOG);
final RequestLogEntry entry = log.createEntry(
ProvisioningService.class.getSimpleName() + "." + title,
RequestDirection.INBOUND);
ServletRequest request = (ServletRequest) context.getMessageContext()
.get(MessageContext.SERVLET_REQUEST);
entry.setHost(request.getRemoteHost());
return entry;
}
@Override
public BaseResult asyncCreateInstance(InstanceRequest request,
User requestingUser) {
final RequestLogEntry entry = createLogEntry("asyncCreateInstance");
entry.addParameter("request", request);
entry.addParameter("requestingUser", requestingUser);
final QuickLink link1 = entry.addQuickLink("abort",
"SubscriptionService.abortAsyncSubscription");
link1.addParameter("subscriptionId", request.getSubscriptionId());
link1.addParameter("organizationId", request.getOrganizationId());
final QuickLink link2 = entry.addQuickLink("progress",
"SubscriptionService.updateAsyncSubscriptionProgress");
link2.addParameter("subscriptionId", request.getSubscriptionId());
link2.addParameter("organizationId", request.getOrganizationId());
final QuickLink link3 = entry.addQuickLink("complete",
"SubscriptionService.completeAsyncSubscription");
link3.addParameter("subscriptionId", request.getSubscriptionId());
link3.addParameter("organizationId", request.getOrganizationId());
String message = Messages.get(requestingUser.getLocale(),
"info.subscription.async.created");
return getBaseResultOk(message);
}
@Override
public InstanceResult createInstance(InstanceRequest request,
User requestingUser) {
final RequestLogEntry entry = createLogEntry("createInstance");
entry.addParameter("request", request);
entry.addParameter("requestingUser", requestingUser);
InstanceInfo instance = new InstanceInfo();
instance.setInstanceId(request.getSubscriptionId());
instance.setAccessInfo(null);
InstanceResult result = new InstanceResult();
result.setInstance(instance);
String message = Messages.get(requestingUser.getLocale(),
"info.subscription.created");
setOk(result, message);
return result;
}
@Override
public UserResult createUsers(String instanceId, List<User> users,
User requestingUser) {
final RequestLogEntry entry = createLogEntry("createUsers");
entry.addParameter("instanceId", instanceId);
entry.addParameter("users", users);
entry.addParameter("requestingUser", requestingUser);
UserResult result = new UserResult();
for (User user : users) {
user.setApplicationUserId(user.getUserId());
}
result.setUsers(users);
setOk(result);
return result;
}
@Override
public BaseResult deleteInstance(String instanceId, String organizationId,
String subscriptionId, User requestingUser) {
final RequestLogEntry entry = createLogEntry("deleteInstance");
entry.addParameter("instanceId", instanceId);
entry.addParameter("organizationId", organizationId);
entry.addParameter("subscriptionId", subscriptionId);
entry.addParameter("requestingUser", requestingUser);
return getBaseResultOk();
}
@Override
public BaseResult deleteUsers(String instanceId, List<User> users,
User requestingUser) {
final RequestLogEntry entry = createLogEntry("deleteUsers");
entry.addParameter("instanceId", instanceId);
entry.addParameter("users", users);
entry.addParameter("requestingUser", requestingUser);
return getBaseResultOk();
}
@Override
public String sendPing(String arg) {
final RequestLogEntry entry = createLogEntry("sendPing");
entry.addParameter("arg", arg);
return arg;
}
@Override
public BaseResult modifySubscription(String instanceId,
String subscriptionId, String referenceId,
List<ServiceParameter> parameterValues,
List<ServiceAttribute> attributeValues, User requestingUser) {
final RequestLogEntry entry = createLogEntry("modifySubscription");
entry.addParameter("instanceId", instanceId);
entry.addParameter("subscriptionId", subscriptionId);
entry.addParameter("referenceId", referenceId);
entry.addParameter("parameterValues", parameterValues);
entry.addParameter("attributeValues", attributeValues);
entry.addParameter("requestingUser", requestingUser);
return getBaseResultOk();
}
@Override
public BaseResult updateUsers(String instanceId, List<User> users,
User requestingUser) {
final RequestLogEntry entry = createLogEntry("updateUsers");
entry.addParameter("instanceId", instanceId);
entry.addParameter("users", users);
entry.addParameter("requestingUser", requestingUser);
return getBaseResultOk();
}
@Override
public BaseResult activateInstance(String instanceId, User requestingUser) {
final RequestLogEntry entry = createLogEntry("activateInstance");
entry.addParameter("instanceId", instanceId);
entry.addParameter("requestingUser", requestingUser);
return getBaseResultOk();
}
@Override
public BaseResult deactivateInstance(String instanceId,
User requestingUser) {
final RequestLogEntry entry = createLogEntry("deactivateInstance");
entry.addParameter("instanceId", instanceId);
entry.addParameter("requestingUser", requestingUser);
return getBaseResultOk();
}
@Override
public BaseResult asyncModifySubscription(String instanceId,
String subscriptionId, String referenceId,
List<ServiceParameter> parameterValues,
List<ServiceAttribute> attributeValues, User requestingUser) {
final RequestLogEntry entry = createLogEntry("asyncModifySubscription");
entry.addParameter("instanceId", instanceId);
entry.addParameter("subscriptionId", subscriptionId);
entry.addParameter("referenceId", referenceId);
entry.addParameter("parameterValues", parameterValues);
entry.addParameter("attributeValues", attributeValues);
entry.addParameter("requestingUser", requestingUser);
final QuickLink link1 = entry.addQuickLink("abort",
"SubscriptionService.abortAsyncModifySubscription");
link1.addParameter("subscriptionId", subscriptionId);
link1.addParameter("instanceId", instanceId);
final QuickLink link2 = entry.addQuickLink("complete",
"SubscriptionService.completeAsyncModifySubscription");
link2.addParameter("subscriptionId", subscriptionId);
link2.addParameter("instanceId", instanceId);
return getBaseResultOk();
}
@Override
public BaseResult asyncUpgradeSubscription(String instanceId,
String subscriptionId, String referenceId,
List<ServiceParameter> parameterValues,
List<ServiceAttribute> attributeValues, User requestingUser) {
final RequestLogEntry entry = createLogEntry(
"asyncUpgradeSubscription");
entry.addParameter("instanceId", instanceId);
entry.addParameter("subscriptionId", subscriptionId);
entry.addParameter("referenceId", referenceId);
entry.addParameter("parameterValues", parameterValues);
entry.addParameter("attributeValues", attributeValues);
entry.addParameter("requestingUser", requestingUser);
final QuickLink link1 = entry.addQuickLink("abort",
"SubscriptionService.abortAsyncUpgradeSubscription");
link1.addParameter("subscriptionId", subscriptionId);
link1.addParameter("instanceId", instanceId);
final QuickLink link2 = entry.addQuickLink("complete",
"SubscriptionService.completeAsyncUpgradeSubscription");
link2.addParameter("subscriptionId", subscriptionId);
link2.addParameter("instanceId", instanceId);
return getBaseResultOk();
}
@Override
public BaseResult upgradeSubscription(String instanceId,
String subscriptionId, String referenceId,
List<ServiceParameter> parameterValues,
List<ServiceAttribute> attributeValues, User requestingUser) {
final RequestLogEntry entry = createLogEntry("upgradeSubscription");
entry.addParameter("instanceId", instanceId);
entry.addParameter("subscriptionId", subscriptionId);
entry.addParameter("referenceId", referenceId);
entry.addParameter("parameterValues", parameterValues);
entry.addParameter("attributeValues", attributeValues);
entry.addParameter("requestingUser", requestingUser);
return getBaseResultOk();
}
@Override
public BaseResult saveAttributes(String organizationId,
List<ServiceAttribute> attributeValues, User requestingUser) {
final RequestLogEntry entry = createLogEntry("saveAttributes");
entry.addParameter("organizationId", organizationId);
entry.addParameter("attributeValues", attributeValues);
entry.addParameter("requestingUser", requestingUser);
return getBaseResultOk();
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.metrics;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.DoublePoint;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FloatPoint;
import org.apache.lucene.document.IntPoint;
import org.apache.lucene.document.LongPoint;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.document.SortedNumericDocValuesField;
import org.apache.lucene.document.StringField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.NoMergePolicy;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.DocValuesFieldExistsQuery;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.store.Directory;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.NumberFieldMapper;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorTestCase;
import org.elasticsearch.search.aggregations.support.FieldContext;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.function.DoubleConsumer;
import java.util.function.Function;
import java.util.function.Supplier;
import static org.hamcrest.Matchers.equalTo;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class MinAggregatorTests extends AggregatorTestCase {
public void testMinAggregator_numericDv() throws Exception {
Directory directory = newDirectory();
RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory);
Document document = new Document();
document.add(new NumericDocValuesField("number", 9));
document.add(new LongPoint("number", 9));
indexWriter.addDocument(document);
document = new Document();
document.add(new NumericDocValuesField("number", 7));
document.add(new LongPoint("number", 7));
indexWriter.addDocument(document);
document = new Document();
document.add(new NumericDocValuesField("number", 5));
document.add(new LongPoint("number", 5));
indexWriter.addDocument(document);
document = new Document();
document.add(new NumericDocValuesField("number", 3));
document.add(new LongPoint("number", 3));
indexWriter.addDocument(document);
document = new Document();
document.add(new NumericDocValuesField("number", 1));
document.add(new LongPoint("number", 1));
indexWriter.addDocument(document);
document = new Document();
document.add(new NumericDocValuesField("number", -1));
document.add(new LongPoint("number", -1));
indexWriter.addDocument(document);
indexWriter.close();
IndexReader indexReader = DirectoryReader.open(directory);
IndexSearcher indexSearcher = newSearcher(indexReader, true, true);
MinAggregationBuilder aggregationBuilder = new MinAggregationBuilder("_name").field("number");
MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG);
fieldType.setName("number");
testMinCase(indexSearcher, aggregationBuilder, fieldType, min -> assertEquals(-1.0d, min, 0));
MinAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType);
aggregator.preCollection();
indexSearcher.search(new MatchAllDocsQuery(), aggregator);
aggregator.postCollection();
InternalMin result = (InternalMin) aggregator.buildAggregation(0L);
assertEquals(-1.0, result.getValue(), 0);
indexReader.close();
directory.close();
}
public void testMinAggregator_sortedNumericDv() throws Exception {
Directory directory = newDirectory();
RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory);
Document document = new Document();
document.add(new SortedNumericDocValuesField("number", 9));
document.add(new SortedNumericDocValuesField("number", 7));
document.add(new LongPoint("number", 9));
document.add(new LongPoint("number", 7));
indexWriter.addDocument(document);
document = new Document();
document.add(new SortedNumericDocValuesField("number", 5));
document.add(new SortedNumericDocValuesField("number", 3));
document.add(new LongPoint("number", 5));
document.add(new LongPoint("number", 3));
indexWriter.addDocument(document);
document = new Document();
document.add(new SortedNumericDocValuesField("number", 1));
document.add(new SortedNumericDocValuesField("number", -1));
document.add(new LongPoint("number", 1));
document.add(new LongPoint("number", -1));
indexWriter.addDocument(document);
indexWriter.close();
IndexReader indexReader = DirectoryReader.open(directory);
IndexSearcher indexSearcher = newSearcher(indexReader, true, true);
MinAggregationBuilder aggregationBuilder = new MinAggregationBuilder("_name").field("number");
MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG);
fieldType.setName("number");
MinAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType);
aggregator.preCollection();
indexSearcher.search(new MatchAllDocsQuery(), aggregator);
aggregator.postCollection();
InternalMin result = (InternalMin) aggregator.buildAggregation(0L);
assertEquals(-1.0, result.getValue(), 0);
indexReader.close();
directory.close();
}
public void testMinAggregator_noValue() throws Exception {
Directory directory = newDirectory();
RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory);
Document document = new Document();
document.add(new SortedNumericDocValuesField("number1", 7));
indexWriter.addDocument(document);
document = new Document();
document.add(new SortedNumericDocValuesField("number1", 3));
indexWriter.addDocument(document);
document = new Document();
document.add(new SortedNumericDocValuesField("number1", 1));
indexWriter.addDocument(document);
indexWriter.close();
IndexReader indexReader = DirectoryReader.open(directory);
IndexSearcher indexSearcher = newSearcher(indexReader, true, true);
MinAggregationBuilder aggregationBuilder = new MinAggregationBuilder("_name").field("number2");
MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG);
fieldType.setName("number2");
MinAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType);
aggregator.preCollection();
indexSearcher.search(new MatchAllDocsQuery(), aggregator);
aggregator.postCollection();
InternalMin result = (InternalMin) aggregator.buildAggregation(0L);
assertEquals(Double.POSITIVE_INFINITY, result.getValue(), 0);
indexReader.close();
directory.close();
}
public void testMinAggregator_noDocs() throws Exception {
Directory directory = newDirectory();
RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory);
indexWriter.close();
IndexReader indexReader = DirectoryReader.open(directory);
IndexSearcher indexSearcher = newSearcher(indexReader, true, true);
MinAggregationBuilder aggregationBuilder = new MinAggregationBuilder("_name").field("number");
MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG);
fieldType.setName("number");
MinAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType);
aggregator.preCollection();
indexSearcher.search(new MatchAllDocsQuery(), aggregator);
aggregator.postCollection();
InternalMin result = (InternalMin) aggregator.buildAggregation(0L);
assertEquals(Double.POSITIVE_INFINITY, result.getValue(), 0);
indexReader.close();
directory.close();
}
public void testShortcutIsApplicable() {
for (NumberFieldMapper.NumberType type : NumberFieldMapper.NumberType.values()) {
assertNotNull(
MinAggregator.getPointReaderOrNull(
mockSearchContext(new MatchAllDocsQuery()),
null,
mockNumericValuesSourceConfig("number", type, true)
)
);
assertNotNull(
MinAggregator.getPointReaderOrNull(
mockSearchContext(null),
null,
mockNumericValuesSourceConfig("number", type, true)
)
);
assertNull(
MinAggregator.getPointReaderOrNull(
mockSearchContext(null),
mockAggregator(),
mockNumericValuesSourceConfig("number", type, true)
)
);
assertNull(
MinAggregator.getPointReaderOrNull(
mockSearchContext(new TermQuery(new Term("foo", "bar"))),
null,
mockNumericValuesSourceConfig("number", type, true)
)
);
assertNull(
MinAggregator.getPointReaderOrNull(
mockSearchContext(null),
mockAggregator(),
mockNumericValuesSourceConfig("number", type, true)
)
);
assertNull(
MinAggregator.getPointReaderOrNull(
mockSearchContext(null),
null,
mockNumericValuesSourceConfig("number", type, false)
)
);
}
assertNotNull(
MinAggregator.getPointReaderOrNull(
mockSearchContext(new MatchAllDocsQuery()),
null,
mockDateValuesSourceConfig("number", true)
)
);
assertNull(
MinAggregator.getPointReaderOrNull(
mockSearchContext(new MatchAllDocsQuery()),
mockAggregator(),
mockDateValuesSourceConfig("number", true)
)
);
assertNull(
MinAggregator.getPointReaderOrNull(
mockSearchContext(new TermQuery(new Term("foo", "bar"))),
null,
mockDateValuesSourceConfig("number", true)
)
);
assertNull(
MinAggregator.getPointReaderOrNull(
mockSearchContext(null),
mockAggregator(),
mockDateValuesSourceConfig("number", true)
)
);
assertNull(
MinAggregator.getPointReaderOrNull(
mockSearchContext(null),
null,
mockDateValuesSourceConfig("number", false)
)
);
}
public void testMinShortcutRandom() throws Exception {
testMinShortcutCase(
() -> randomLongBetween(Integer.MIN_VALUE, Integer.MAX_VALUE),
(n) -> new LongPoint("number", n.longValue()),
(v) -> LongPoint.decodeDimension(v, 0));
testMinShortcutCase(
() -> randomInt(),
(n) -> new IntPoint("number", n.intValue()),
(v) -> IntPoint.decodeDimension(v, 0));
testMinShortcutCase(
() -> randomFloat(),
(n) -> new FloatPoint("number", n.floatValue()),
(v) -> FloatPoint.decodeDimension(v, 0));
testMinShortcutCase(
() -> randomDouble(),
(n) -> new DoublePoint("number", n.doubleValue()),
(v) -> DoublePoint.decodeDimension(v, 0));
}
private void testMinCase(IndexSearcher searcher,
AggregationBuilder aggregationBuilder,
MappedFieldType ft,
DoubleConsumer testResult) throws IOException {
Collection<Query> queries = Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery(ft.name()));
for (Query query : queries) {
MinAggregator aggregator = createAggregator(query, aggregationBuilder, searcher, createIndexSettings(), ft);
aggregator.preCollection();
searcher.search(new MatchAllDocsQuery(), aggregator);
aggregator.postCollection();
InternalMin result = (InternalMin) aggregator.buildAggregation(0L);
testResult.accept(result.getValue());
}
}
private void testMinShortcutCase(Supplier<Number> randomNumber,
Function<Number, Field> pointFieldFunc,
Function<byte[], Number> pointConvertFunc) throws IOException {
Directory directory = newDirectory();
IndexWriterConfig config = newIndexWriterConfig().setMergePolicy(NoMergePolicy.INSTANCE);
IndexWriter indexWriter = new IndexWriter(directory, config);
List<Document> documents = new ArrayList<>();
List<Tuple<Integer, Number>> values = new ArrayList<>();
int numValues = atLeast(50);
int docID = 0;
for (int i = 0; i < numValues; i++) {
int numDup = randomIntBetween(1, 3);
for (int j = 0; j < numDup; j++) {
Document document = new Document();
Number nextValue = randomNumber.get();
values.add(new Tuple<>(docID, nextValue));
document.add(new StringField("id", Integer.toString(docID), Field.Store.NO));
document.add(pointFieldFunc.apply(nextValue));
document.add(pointFieldFunc.apply(nextValue));
documents.add(document);
docID ++;
}
}
// insert some documents without a value for the metric field.
for (int i = 0; i < 3; i++) {
Document document = new Document();
documents.add(document);
}
indexWriter.addDocuments(documents);
Collections.sort(values, Comparator.comparingDouble(t -> t.v2().doubleValue()));
try (IndexReader reader = DirectoryReader.open(indexWriter)) {
LeafReaderContext ctx = reader.leaves().get(0);
Number res = MinAggregator.findLeafMinValue(ctx.reader(), "number", pointConvertFunc);
assertThat(res, equalTo(values.get(0).v2()));
}
for (int i = 1; i < values.size(); i++) {
indexWriter.deleteDocuments(new Term("id", values.get(i-1).v1().toString()));
try (IndexReader reader = DirectoryReader.open(indexWriter)) {
LeafReaderContext ctx = reader.leaves().get(0);
Number res = MinAggregator.findLeafMinValue(ctx.reader(), "number", pointConvertFunc);
assertThat(res, equalTo(values.get(i).v2()));
}
}
indexWriter.deleteDocuments(new Term("id", values.get(values.size()-1).v1().toString()));
try (IndexReader reader = DirectoryReader.open(indexWriter)) {
LeafReaderContext ctx = reader.leaves().get(0);
Number res = MinAggregator.findLeafMinValue(ctx.reader(), "number", pointConvertFunc);
assertThat(res, equalTo(null));
}
indexWriter.close();
directory.close();
}
private SearchContext mockSearchContext(Query query) {
SearchContext searchContext = mock(SearchContext.class);
when(searchContext.query()).thenReturn(query);
return searchContext;
}
private Aggregator mockAggregator() {
return mock(Aggregator.class);
}
private ValuesSourceConfig<ValuesSource.Numeric> mockNumericValuesSourceConfig(String fieldName,
NumberFieldMapper.NumberType numType,
boolean indexed) {
ValuesSourceConfig<ValuesSource.Numeric> config = mock(ValuesSourceConfig.class);
MappedFieldType ft = new NumberFieldMapper.NumberFieldType(numType);
ft.setName(fieldName);
ft.setIndexOptions(indexed ? IndexOptions.DOCS : IndexOptions.NONE);
ft.freeze();
when(config.fieldContext()).thenReturn(new FieldContext(fieldName, null, ft));
return config;
}
private ValuesSourceConfig<ValuesSource.Numeric> mockDateValuesSourceConfig(String fieldName, boolean indexed) {
ValuesSourceConfig<ValuesSource.Numeric> config = mock(ValuesSourceConfig.class);
MappedFieldType ft = new DateFieldMapper.Builder(fieldName).fieldType();
ft.setName(fieldName);
ft.setIndexOptions(indexed ? IndexOptions.DOCS : IndexOptions.NONE);
ft.freeze();
when(config.fieldContext()).thenReturn(new FieldContext(fieldName, null, ft));
return config;
}
}
| |
package org.jaudiotagger.audio.mp3;
/**
* @author : Paul Taylor
* @author : Eric Farng
*
* Version @version:$Id$
*
* MusicTag Copyright (C)2003,2004
*
* This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser
* General Public License as published by the Free Software Foundation; either version 2.1 of the License,
* or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even
* the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License along with this library; if not,
* you can get a copy from http://www.opensource.org/licenses/lgpl-license.php or write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*
*/
import org.jaudiotagger.audio.AudioFile;
import org.jaudiotagger.audio.exceptions.CannotReadException;
import org.jaudiotagger.audio.exceptions.CannotWriteException;
import org.jaudiotagger.audio.exceptions.InvalidAudioFrameException;
import org.jaudiotagger.audio.exceptions.NoWritePermissionsException;
import org.jaudiotagger.audio.exceptions.ReadOnlyFileException;
import org.jaudiotagger.audio.exceptions.UnableToModifyFileException;
import org.jaudiotagger.audio.generic.Permissions;
import org.jaudiotagger.logging.*;
import org.jaudiotagger.tag.Tag;
import org.jaudiotagger.tag.TagException;
import org.jaudiotagger.tag.TagNotFoundException;
import org.jaudiotagger.tag.TagOptionSingleton;
import org.jaudiotagger.tag.id3.*;
import org.jaudiotagger.tag.lyrics3.AbstractLyrics3;
import org.jaudiotagger.tag.reference.ID3V2Version;
import java.io.*;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.logging.Level;
/**
* This class represents a physical MP3 File
*/
public class MP3File extends AudioFile
{
private static final int MINIMUM_FILESIZE = 150;
protected static AbstractTagDisplayFormatter tagFormatter;
/**
* the ID3v2 tag that this file contains.
*/
private AbstractID3v2Tag id3v2tag = null;
/**
* Representation of the idv2 tag as a idv24 tag
*/
private ID3v24Tag id3v2Asv24tag = null;
/**
* The Lyrics3 tag that this file contains.
*/
private AbstractLyrics3 lyrics3tag = null;
/**
* The ID3v1 tag that this file contains.
*/
private ID3v1Tag id3v1tag = null;
/**
* Creates a new empty MP3File datatype that is not associated with a
* specific file.
*/
public MP3File()
{
}
/**
* Creates a new MP3File datatype and parse the tag from the given filename.
*
* @param filename MP3 file
* @throws IOException on any I/O error
* @throws TagException on any exception generated by this library.
* @throws org.jaudiotagger.audio.exceptions.ReadOnlyFileException
* @throws org.jaudiotagger.audio.exceptions.InvalidAudioFrameException
*/
public MP3File(String filename) throws IOException, TagException, ReadOnlyFileException, CannotReadException, InvalidAudioFrameException
{
this(new File(filename));
}
/* Load ID3V1tag if exists */
public static final int LOAD_IDV1TAG = 2;
/* Load ID3V2tag if exists */
public static final int LOAD_IDV2TAG = 4;
/**
* This option is currently ignored
*/
public static final int LOAD_LYRICS3 = 8;
public static final int LOAD_ALL = LOAD_IDV1TAG | LOAD_IDV2TAG | LOAD_LYRICS3;
/**
* Creates a new MP3File dataType and parse the tag from the given file
* Object, files must be writable to use this constructor.
*
* @param file MP3 file
* @param loadOptions decide what tags to load
* @throws IOException on any I/O error
* @throws TagException on any exception generated by this library.
* @throws org.jaudiotagger.audio.exceptions.ReadOnlyFileException
* @throws org.jaudiotagger.audio.exceptions.InvalidAudioFrameException
*/
public MP3File(File file, int loadOptions) throws IOException, TagException, ReadOnlyFileException, CannotReadException, InvalidAudioFrameException
{
this(file, loadOptions, false);
}
/**
* Read v1 tag
*
* @param file
* @param newFile
* @param loadOptions
* @throws IOException
*/
private void readV1Tag(File file, RandomAccessFile newFile, int loadOptions) throws IOException
{
if ((loadOptions & LOAD_IDV1TAG) != 0)
{
logger.finer("Attempting to read id3v1tags");
try
{
id3v1tag = new ID3v11Tag(newFile, file.getName());
}
catch (TagNotFoundException ex)
{
logger.config("No ids3v11 tag found");
}
try
{
if (id3v1tag == null)
{
id3v1tag = new ID3v1Tag(newFile, file.getName());
}
}
catch (TagNotFoundException ex)
{
logger.config("No id3v1 tag found");
}
}
}
/**
* Read V2tag if exists
*
* TODO:shouldn't we be handing TagExceptions:when will they be thrown
*
* @param file
* @param loadOptions
* @throws IOException
* @throws TagException
*/
private void readV2Tag(File file, int loadOptions, int startByte) throws IOException, TagException
{
//We know where the actual Audio starts so load all the file from start to that point into
//a buffer then we can read the IDv2 information without needing any more File I/O
if (startByte >= AbstractID3v2Tag.TAG_HEADER_LENGTH)
{
logger.finer("Attempting to read id3v2tags");
FileInputStream fis = null;
FileChannel fc = null;
ByteBuffer bb;
try
{
fis = new FileInputStream(file);
fc = fis.getChannel();
bb = ByteBuffer.allocate(startByte);
// XXX: don't change it to map
// https://stackoverflow.com/questions/28378713/bytebuffer-getbyte-int-int-failed-on-android-ics-and-jb
fc.read(bb, 0);
}
finally
{
if (fc != null)
{
fc.close();
}
if (fis != null)
{
fis.close();
}
}
try
{
bb.rewind();
if ((loadOptions & LOAD_IDV2TAG) != 0)
{
logger.config("Attempting to read id3v2tags");
try
{
this.setID3v2Tag(new ID3v24Tag(bb, file.getName()));
}
catch (TagNotFoundException ex)
{
logger.config("No id3v24 tag found");
}
try
{
if (id3v2tag == null)
{
this.setID3v2Tag(new ID3v23Tag(bb, file.getName()));
}
}
catch (TagNotFoundException ex)
{
logger.config("No id3v23 tag found");
}
try
{
if (id3v2tag == null)
{
this.setID3v2Tag(new ID3v22Tag(bb, file.getName()));
}
}
catch (TagNotFoundException ex)
{
logger.config("No id3v22 tag found");
}
}
}
finally
{
//Workaround for 4724038 on Windows
bb.clear();
if (bb.isDirect() && !TagOptionSingleton.getInstance().isAndroid())
{
// Reflection substitute for following code:
// ((sun.nio.ch.DirectBuffer) bb).cleaner().clean();
// which causes exception on Android - Sun NIO classes are not available
try {
Class<?> clazz = Class.forName("sun.nio.ch.DirectBuffer");
Method cleanerMethod = clazz.getMethod("cleaner");
Object cleaner = cleanerMethod.invoke(bb); // cleaner = bb.cleaner()
if (cleaner != null) {
Method cleanMethod = cleaner.getClass().getMethod("clean");
cleanMethod.invoke(cleaner); // cleaner.clean()
}
} catch (ClassNotFoundException e) {
logger.severe("Could not load sun.nio.ch.DirectBuffer.");
} catch (NoSuchMethodException e) {
logger.severe("Could not invoke DirectBuffer method - " + e.getMessage());
} catch (InvocationTargetException e) {
logger.severe("Could not invoke DirectBuffer method - target exception");
} catch (IllegalAccessException e) {
logger.severe("Could not invoke DirectBuffer method - illegal access");
}
} }
}
else
{
logger.config("Not enough room for valid id3v2 tag:" + startByte);
}
}
/**
* Read lyrics3 Tag
*
* TODO:not working
*
* @param file
* @param newFile
* @param loadOptions
* @throws IOException
*/
private void readLyrics3Tag(File file, RandomAccessFile newFile, int loadOptions) throws IOException
{
/*if ((loadOptions & LOAD_LYRICS3) != 0)
{
try
{
lyrics3tag = new Lyrics3v2(newFile);
}
catch (TagNotFoundException ex)
{
}
try
{
if (lyrics3tag == null)
{
lyrics3tag = new Lyrics3v1(newFile);
}
}
catch (TagNotFoundException ex)
{
}
}
*/
}
/**
*
* @param startByte
* @param endByte
* @return
* @throws Exception
*
* @return true if all the bytes between in the file between startByte and endByte are null, false
* otherwise
*/
private boolean isFilePortionNull(int startByte, int endByte) throws IOException
{
logger.config("Checking file portion:" + Hex.asHex(startByte) + ":" + Hex.asHex(endByte));
FileInputStream fis=null;
FileChannel fc=null;
try
{
fis = new FileInputStream(file);
fc = fis.getChannel();
fc.position(startByte);
ByteBuffer bb = ByteBuffer.allocateDirect(endByte - startByte);
fc.read(bb);
while(bb.hasRemaining())
{
if(bb.get()!=0)
{
return false;
}
}
}
finally
{
if (fc != null)
{
fc.close();
}
if (fis != null)
{
fis.close();
}
}
return true;
}
/**
* Regets the audio header starting from start of file, and write appropriate logging to indicate
* potential problem to user.
*
* @param startByte
* @param firstHeaderAfterTag
* @return
* @throws IOException
* @throws InvalidAudioFrameException
*/
private MP3AudioHeader checkAudioStart(long startByte, MP3AudioHeader firstHeaderAfterTag) throws IOException, InvalidAudioFrameException
{
MP3AudioHeader headerOne;
MP3AudioHeader headerTwo;
logger.warning(ErrorMessage.MP3_ID3TAG_LENGTH_INCORRECT.getMsg(file.getPath(), Hex.asHex(startByte), Hex.asHex(firstHeaderAfterTag.getMp3StartByte())));
//because we cant agree on start location we reread the audioheader from the start of the file, at least
//this way we cant overwrite the audio although we might overwrite part of the tag if we write this file
//back later
headerOne = new MP3AudioHeader(file, 0);
logger.config("Checking from start:" + headerOne);
//Although the id3 tag size appears to be incorrect at least we have found the same location for the start
//of audio whether we start searching from start of file or at the end of the alleged of file so no real
//problem
if (firstHeaderAfterTag.getMp3StartByte() == headerOne.getMp3StartByte())
{
logger.config(ErrorMessage.MP3_START_OF_AUDIO_CONFIRMED.getMsg(file.getPath(),
Hex.asHex(headerOne.getMp3StartByte())));
return firstHeaderAfterTag;
}
else
{
//We get a different value if read from start, can't guarantee 100% correct lets do some more checks
logger.config((ErrorMessage.MP3_RECALCULATED_POSSIBLE_START_OF_MP3_AUDIO.getMsg(file.getPath(),
Hex.asHex(headerOne.getMp3StartByte()))));
//Same frame count so probably both audio headers with newAudioHeader being the first one
if (firstHeaderAfterTag.getNumberOfFrames() == headerOne.getNumberOfFrames())
{
logger.warning((ErrorMessage.MP3_RECALCULATED_START_OF_MP3_AUDIO.getMsg(file.getPath(),
Hex.asHex(headerOne.getMp3StartByte()))));
return headerOne;
}
//If the size reported by the tag header is a little short and there is only nulls between the recorded value
//and the start of the first audio found then we stick with the original header as more likely that currentHeader
//DataInputStream not really a header
if(isFilePortionNull((int) startByte,(int) firstHeaderAfterTag.getMp3StartByte()))
{
return firstHeaderAfterTag;
}
//Skip to the next header (header 2, counting from start of file)
headerTwo = new MP3AudioHeader(file, headerOne.getMp3StartByte()
+ headerOne.mp3FrameHeader.getFrameLength());
//It matches the header we found when doing the original search from after the ID3Tag therefore it
//seems that newAudioHeader was a false match and the original header was correct
if (headerTwo.getMp3StartByte() == firstHeaderAfterTag.getMp3StartByte())
{
logger.warning((ErrorMessage.MP3_START_OF_AUDIO_CONFIRMED.getMsg(file.getPath(),
Hex.asHex(firstHeaderAfterTag.getMp3StartByte()))));
return firstHeaderAfterTag;
}
//It matches the frameCount the header we just found so lends weight to the fact that the audio does indeed start at new header
//however it maybe that neither are really headers and just contain the same data being misrepresented as headers.
if (headerTwo.getNumberOfFrames() == headerOne.getNumberOfFrames())
{
logger.warning((ErrorMessage.MP3_RECALCULATED_START_OF_MP3_AUDIO.getMsg(file.getPath(),
Hex.asHex(headerOne.getMp3StartByte()))));
return headerOne;
}
///Doesnt match the frameCount lets go back to the original header
else
{
logger.warning((ErrorMessage.MP3_RECALCULATED_START_OF_MP3_AUDIO.getMsg(file.getPath(),
Hex.asHex(firstHeaderAfterTag.getMp3StartByte()))));
return firstHeaderAfterTag;
}
}
}
/**
* Creates a new MP3File dataType and parse the tag from the given file
* Object, files can be opened read only if required.
*
* @param file MP3 file
* @param loadOptions decide what tags to load
* @param readOnly causes the files to be opened readonly
* @throws IOException on any I/O error
* @throws TagException on any exception generated by this library.
* @throws org.jaudiotagger.audio.exceptions.ReadOnlyFileException
* @throws org.jaudiotagger.audio.exceptions.InvalidAudioFrameException
*/
public MP3File(File file, int loadOptions, boolean readOnly) throws IOException, TagException, ReadOnlyFileException, CannotReadException, InvalidAudioFrameException
{
RandomAccessFile newFile = null;
try
{
this.file = file;
//Check File accessibility
newFile = checkFilePermissions(file, readOnly);
//Read ID3v2 tag size (if tag exists) to allow audioHeader parsing to skip over tag
long tagSizeReportedByHeader = AbstractID3v2Tag.getV2TagSizeIfExists(file);
logger.config("TagHeaderSize:" + Hex.asHex(tagSizeReportedByHeader));
audioHeader = new MP3AudioHeader(file, tagSizeReportedByHeader);
//If the audio header is not straight after the end of the tag then search from start of file
if (tagSizeReportedByHeader != ((MP3AudioHeader) audioHeader).getMp3StartByte())
{
logger.config("First header found after tag:" + audioHeader);
audioHeader = checkAudioStart(tagSizeReportedByHeader, (MP3AudioHeader) audioHeader);
}
//Read v1 tags (if any)
readV1Tag(file, newFile, loadOptions);
//Read v2 tags (if any)
readV2Tag(file, loadOptions, (int)((MP3AudioHeader) audioHeader).getMp3StartByte());
//If we have a v2 tag use that, if we do not but have v1 tag use that
//otherwise use nothing
//TODO:if have both should we merge
//rather than just returning specific ID3v22 tag, would it be better to return v24 version ?
if (this.getID3v2Tag() != null)
{
tag = this.getID3v2Tag();
}
else if (id3v1tag != null)
{
tag = id3v1tag;
}
}
finally
{
if (newFile != null)
{
newFile.close();
}
}
}
/**
* Used by tags when writing to calculate the location of the music file
*
* @param file
* @return the location within the file that the audio starts
* @throws java.io.IOException
* @throws org.jaudiotagger.audio.exceptions.InvalidAudioFrameException
*/
public long getMP3StartByte(File file) throws InvalidAudioFrameException, IOException
{
try
{
//Read ID3v2 tag size (if tag exists) to allow audio header parsing to skip over tag
long startByte = AbstractID3v2Tag.getV2TagSizeIfExists(file);
MP3AudioHeader audioHeader = new MP3AudioHeader(file, startByte);
if (startByte != audioHeader.getMp3StartByte())
{
logger.config("First header found after tag:" + audioHeader);
audioHeader = checkAudioStart(startByte, audioHeader);
}
return audioHeader.getMp3StartByte();
}
catch (InvalidAudioFrameException iafe)
{
throw iafe;
}
catch (IOException ioe)
{
throw ioe;
}
}
/**
* Extracts the raw ID3v2 tag data into a file.
*
* This provides access to the raw data before manipulation, the data is written from the start of the file
* to the start of the Audio Data. This is primarily useful for manipulating corrupted tags that are not
* (fully) loaded using the standard methods.
*
* @param outputFile to write the data to
* @return
* @throws TagNotFoundException
* @throws IOException
*/
public File extractID3v2TagDataIntoFile(File outputFile) throws TagNotFoundException, IOException
{
int startByte = (int) ((MP3AudioHeader) audioHeader).getMp3StartByte();
if (startByte >= 0)
{
//Read byte into buffer
FileInputStream fis = new FileInputStream(file);
FileChannel fc = fis.getChannel();
ByteBuffer bb = ByteBuffer.allocate(startByte);
fc.read(bb);
//Write bytes to outputFile
FileOutputStream out = new FileOutputStream(outputFile);
out.write(bb.array());
out.close();
fc.close();
fis.close();
return outputFile;
}
throw new TagNotFoundException("There is no ID3v2Tag data in this file");
}
/**
* Return audio header
* @return
*/
public MP3AudioHeader getMP3AudioHeader()
{
return (MP3AudioHeader) getAudioHeader();
}
/**
* Returns true if this datatype contains an <code>Id3v1</code> tag
*
* @return true if this datatype contains an <code>Id3v1</code> tag
*/
public boolean hasID3v1Tag()
{
return (id3v1tag != null);
}
/**
* Returns true if this datatype contains an <code>Id3v2</code> tag
*
* @return true if this datatype contains an <code>Id3v2</code> tag
*/
public boolean hasID3v2Tag()
{
return (id3v2tag != null);
}
/**
* Returns true if this datatype contains a <code>Lyrics3</code> tag
* TODO disabled until Lyrics3 fixed
* @return true if this datatype contains a <code>Lyrics3</code> tag
*/
/*
public boolean hasLyrics3Tag()
{
return (lyrics3tag != null);
}
*/
/**
* Creates a new MP3File datatype and parse the tag from the given file
* Object.
*
* @param file MP3 file
* @throws IOException on any I/O error
* @throws TagException on any exception generated by this library.
* @throws org.jaudiotagger.audio.exceptions.ReadOnlyFileException
* @throws org.jaudiotagger.audio.exceptions.InvalidAudioFrameException
*/
public MP3File(File file) throws IOException, TagException, ReadOnlyFileException, CannotReadException, InvalidAudioFrameException
{
this(file, LOAD_ALL);
}
/**
* Sets the ID3v1(_1)tag to the tag provided as an argument.
*
* @param id3v1tag
*/
public void setID3v1Tag(ID3v1Tag id3v1tag)
{
logger.config("setting tagv1:v1 tag");
this.id3v1tag = id3v1tag;
}
public void setID3v1Tag(Tag id3v1tag)
{
logger.config("setting tagv1:v1 tag");
this.id3v1tag = (ID3v1Tag) id3v1tag;
}
/**
* Sets the <code>ID3v1</code> tag for this dataType. A new
* <code>ID3v1_1</code> dataType is created from the argument and then used
* here.
*
* @param mp3tag Any MP3Tag dataType can be used and will be converted into a
* new ID3v1_1 dataType.
*/
public void setID3v1Tag(AbstractTag mp3tag)
{
logger.config("setting tagv1:abstract");
id3v1tag = new ID3v11Tag(mp3tag);
}
/**
* Returns the <code>ID3v1</code> tag for this dataType.
*
* @return the <code>ID3v1</code> tag for this dataType
*/
public ID3v1Tag getID3v1Tag()
{
return id3v1tag;
}
/**
* Calculates hash with given algorithm. Buffer size is 32768 byte.
* Hash is calculated EXCLUDING meta-data, like id3v1 or id3v2
*
* @return hash value in byte
* @param algorithm options MD5,SHA-1,SHA-256
* @throws IOException
* @throws InvalidAudioFrameException
* @throws NoSuchAlgorithmException
*/
public byte[] getHash(String algorithm) throws NoSuchAlgorithmException, InvalidAudioFrameException, IOException{
return getHash(algorithm, 32768);
}
/**
* Calculates hash with given buffer size.
* Hash is calculated EXCLUDING meta-data, like id3v1 or id3v2
* @param buffer
* @return byte[] hash value in byte
* @throws IOException
* @throws InvalidAudioFrameException
* @throws NoSuchAlgorithmException
*/
public byte[] getHash(int buffer) throws NoSuchAlgorithmException, InvalidAudioFrameException, IOException{
return getHash("MD5", buffer);
}
/**
* Calculates hash with algorithm "MD5". Buffer size is 32768 byte.
* Hash is calculated EXCLUDING meta-data, like id3v1 or id3v2
*
* @return byte[] hash value.
* @throws IOException
* @throws InvalidAudioFrameException
* @throws NoSuchAlgorithmException
*/
public byte[] getHash() throws NoSuchAlgorithmException, InvalidAudioFrameException, IOException{
return getHash("MD5", 32768);
}
/**
* Calculates hash with algorithm "MD5", "SHA-1" or SHA-256".
* Hash is calculated EXCLUDING meta-data, like id3v1 or id3v2
*
* @return byte[] hash value in byte
* @throws IOException
* @throws InvalidAudioFrameException
* @throws NoSuchAlgorithmException
*/
public byte[] getHash(String algorithm, int bufferSize) throws InvalidAudioFrameException, IOException, NoSuchAlgorithmException
{
File mp3File = getFile();
long startByte = getMP3StartByte(mp3File);
int id3v1TagSize = 0;
if (hasID3v1Tag()){
ID3v1Tag id1tag= getID3v1Tag();
id3v1TagSize = id1tag.getSize();
}
InputStream inStream = new FileInputStream(mp3File);
byte[] buffer = new byte[bufferSize];
MessageDigest digest = MessageDigest.getInstance(algorithm);
inStream.skip(startByte);
int read;
long totalSize = mp3File.length() - startByte - id3v1TagSize;
int pointer = buffer.length;
while (pointer <= totalSize ) {
read = inStream.read(buffer);
digest.update(buffer, 0, read);
pointer += buffer.length;
}
read = inStream.read(buffer,0,(int)totalSize - pointer + buffer.length);
digest.update(buffer, 0, read);
byte[] hash = digest.digest();
return hash;
}
/**
* Sets the <code>ID3v2</code> tag for this dataType. A new
* <code>ID3v2_4</code> dataType is created from the argument and then used
* here.
*
* @param mp3tag Any MP3Tag dataType can be used and will be converted into a
* new ID3v2_4 dataType.
*/
public void setID3v2Tag(AbstractTag mp3tag)
{
id3v2tag = new ID3v24Tag(mp3tag);
}
/**
* Sets the v2 tag to the v2 tag provided as an argument.
* Also store a v24 version of tag as v24 is the interface to be used
* when talking with client applications.
*
* @param id3v2tag
*/
public void setID3v2Tag(AbstractID3v2Tag id3v2tag)
{
this.id3v2tag = id3v2tag;
if (id3v2tag instanceof ID3v24Tag)
{
this.id3v2Asv24tag = (ID3v24Tag) this.id3v2tag;
}
else
{
this.id3v2Asv24tag = new ID3v24Tag(id3v2tag);
}
}
/**
* Set v2 tag ,don't need to set v24 tag because saving
*
* @param id3v2tag
*/
//TODO temp its rather messy
public void setID3v2TagOnly(AbstractID3v2Tag id3v2tag)
{
this.id3v2tag = id3v2tag;
this.id3v2Asv24tag = null;
}
/**
* Returns the <code>ID3v2</code> tag for this datatype.
*
* @return the <code>ID3v2</code> tag for this datatype
*/
public AbstractID3v2Tag getID3v2Tag()
{
return id3v2tag;
}
/**
* @return a representation of tag as v24
*/
public ID3v24Tag getID3v2TagAsv24()
{
return id3v2Asv24tag;
}
/**
* Sets the <code>Lyrics3</code> tag for this dataType. A new
* <code>Lyrics3v2</code> dataType is created from the argument and then
*
* used here.
*
* @param mp3tag Any MP3Tag dataType can be used and will be converted into a
* new Lyrics3v2 dataType.
*/
/*
public void setLyrics3Tag(AbstractTag mp3tag)
{
lyrics3tag = new Lyrics3v2(mp3tag);
}
*/
/**
*
*
* @param lyrics3tag
*/
/*
public void setLyrics3Tag(AbstractLyrics3 lyrics3tag)
{
this.lyrics3tag = lyrics3tag;
}
*/
/**
* Returns the <code>ID3v1</code> tag for this datatype.
*
* @return the <code>ID3v1</code> tag for this datatype
*/
/*
public AbstractLyrics3 getLyrics3Tag()
{
return lyrics3tag;
}
*/
/**
* Remove tag from file
*
* @param mp3tag
* @throws FileNotFoundException
* @throws IOException
*/
public void delete(AbstractTag mp3tag) throws FileNotFoundException, IOException
{
RandomAccessFile raf = new RandomAccessFile(this.file, "rw");
mp3tag.delete(raf);
raf.close();
if(mp3tag instanceof ID3v1Tag)
{
id3v1tag=null;
}
if(mp3tag instanceof AbstractID3v2Tag)
{
id3v2tag=null;
}
}
/**
* Saves the tags in this dataType to the file referred to by this dataType.
*
* @throws IOException on any I/O error
* @throws TagException on any exception generated by this library.
*/
public void save() throws IOException, TagException
{
save(this.file);
}
/**
* Overridden for compatibility with merged code
*
* @throws NoWritePermissionsException if the file could not be written to due to file permissions
* @throws CannotWriteException
*/
public void commit() throws CannotWriteException
{
try
{
save();
}
catch (UnableToModifyFileException umfe)
{
throw new NoWritePermissionsException(umfe);
}
catch (IOException ioe)
{
throw new CannotWriteException(ioe);
}
catch (TagException te)
{
throw new CannotWriteException(te);
}
}
/**
* Check can write to file
*
* @param file
* @throws IOException
*/
public void precheck(File file) throws IOException
{
if (!file.exists())
{
logger.severe(ErrorMessage.GENERAL_WRITE_FAILED_BECAUSE_FILE_NOT_FOUND.getMsg(file.getName()));
throw new IOException(ErrorMessage.GENERAL_WRITE_FAILED_BECAUSE_FILE_NOT_FOUND.getMsg(file.getName()));
}
if (TagOptionSingleton.getInstance().isCheckIsWritable() && !file.canWrite())
{
logger.severe(ErrorMessage.GENERAL_WRITE_FAILED.getMsg(file.getName()));
throw new IOException(ErrorMessage.GENERAL_WRITE_FAILED.getMsg(file.getName()));
}
if (file.length() <= MINIMUM_FILESIZE)
{
logger.severe(ErrorMessage.GENERAL_WRITE_FAILED_BECAUSE_FILE_IS_TOO_SMALL.getMsg(file.getName()));
throw new IOException(ErrorMessage.GENERAL_WRITE_FAILED_BECAUSE_FILE_IS_TOO_SMALL.getMsg(file.getName()));
}
}
/**
* Saves the tags in this dataType to the file argument. It will be saved as
* TagConstants.MP3_FILE_SAVE_WRITE
*
* @param fileToSave file to save the this dataTypes tags to
* @throws FileNotFoundException if unable to find file
* @throws IOException on any I/O error
*/
public void save(File fileToSave) throws IOException
{
//Ensure we are dealing with absolute filepaths not relative ones
File file = fileToSave.getAbsoluteFile();
logger.config("Saving : " + file.getPath());
//Checks before starting write
precheck(file);
RandomAccessFile rfile = null;
try
{
//ID3v2 Tag
if (TagOptionSingleton.getInstance().isId3v2Save())
{
if (id3v2tag == null)
{
rfile = new RandomAccessFile(file, "rw");
(new ID3v24Tag()).delete(rfile);
(new ID3v23Tag()).delete(rfile);
(new ID3v22Tag()).delete(rfile);
logger.config("Deleting ID3v2 tag:"+file.getName());
rfile.close();
}
else
{
logger.config("Writing ID3v2 tag:"+file.getName());
final MP3AudioHeader mp3AudioHeader = (MP3AudioHeader) this.getAudioHeader();
final long mp3StartByte = mp3AudioHeader.getMp3StartByte();
final long newMp3StartByte = id3v2tag.write(file, mp3StartByte);
if (mp3StartByte != newMp3StartByte) {
logger.config("New mp3 start byte: " + newMp3StartByte);
mp3AudioHeader.setMp3StartByte(newMp3StartByte);
}
}
}
rfile = new RandomAccessFile(file, "rw");
//Lyrics 3 Tag
if (TagOptionSingleton.getInstance().isLyrics3Save())
{
if (lyrics3tag != null)
{
lyrics3tag.write(rfile);
}
}
//ID3v1 tag
if (TagOptionSingleton.getInstance().isId3v1Save())
{
logger.config("Processing ID3v1");
if (id3v1tag == null)
{
logger.config("Deleting ID3v1");
(new ID3v1Tag()).delete(rfile);
}
else
{
logger.config("Saving ID3v1");
id3v1tag.write(rfile);
}
}
}
catch (FileNotFoundException ex)
{
logger.log(Level.SEVERE, ErrorMessage.GENERAL_WRITE_FAILED_BECAUSE_FILE_NOT_FOUND.getMsg(file.getName()), ex);
throw ex;
}
catch (IOException iex)
{
logger.log(Level.SEVERE, ErrorMessage.GENERAL_WRITE_FAILED_BECAUSE.getMsg(file.getName(), iex.getMessage()), iex);
throw iex;
}
catch (RuntimeException re)
{
logger.log(Level.SEVERE, ErrorMessage.GENERAL_WRITE_FAILED_BECAUSE.getMsg(file.getName(), re.getMessage()), re);
throw re;
}
finally
{
if (rfile != null)
{
rfile.close();
}
}
}
/**
* Displays MP3File Structure
*/
public String displayStructureAsXML()
{
createXMLStructureFormatter();
tagFormatter.openHeadingElement("file", this.getFile().getAbsolutePath());
if (this.getID3v1Tag() != null)
{
this.getID3v1Tag().createStructure();
}
if (this.getID3v2Tag() != null)
{
this.getID3v2Tag().createStructure();
}
tagFormatter.closeHeadingElement("file");
return tagFormatter.toString();
}
/**
* Displays MP3File Structure
*/
public String displayStructureAsPlainText()
{
createPlainTextStructureFormatter();
tagFormatter.openHeadingElement("file", this.getFile().getAbsolutePath());
if (this.getID3v1Tag() != null)
{
this.getID3v1Tag().createStructure();
}
if (this.getID3v2Tag() != null)
{
this.getID3v2Tag().createStructure();
}
tagFormatter.closeHeadingElement("file");
return tagFormatter.toString();
}
private static void createXMLStructureFormatter()
{
tagFormatter = new XMLTagDisplayFormatter();
}
private static void createPlainTextStructureFormatter()
{
tagFormatter = new PlainTextTagDisplayFormatter();
}
public static AbstractTagDisplayFormatter getStructureFormatter()
{
return tagFormatter;
}
/**
* Set the Tag
*
* If the parameter tag is a v1tag then the v1 tag is set if v2tag then the v2tag.
*
* @param tag
*/
public void setTag(Tag tag)
{
this.tag = tag;
if (tag instanceof ID3v1Tag)
{
setID3v1Tag((ID3v1Tag) tag);
}
else
{
setID3v2Tag((AbstractID3v2Tag) tag);
}
}
/** Create Default Tag
*
* @return
*/
@Override
public Tag createDefaultTag()
{
if(TagOptionSingleton.getInstance().getID3V2Version()==ID3V2Version.ID3_V24)
{
return new ID3v24Tag();
}
else if(TagOptionSingleton.getInstance().getID3V2Version()==ID3V2Version.ID3_V23)
{
return new ID3v23Tag();
}
else if(TagOptionSingleton.getInstance().getID3V2Version()==ID3V2Version.ID3_V22)
{
return new ID3v22Tag();
}
//Default in case not set somehow
return new ID3v24Tag();
}
/**
* Overridden to only consider ID3v2 Tag
*
* @return
*/
@Override
public Tag getTagOrCreateDefault()
{
Tag tag = getID3v2Tag();
if(tag==null)
{
return createDefaultTag();
}
return tag;
}
/**
* Get the ID3v2 tag and convert to preferred version or if the file doesn't have one at all
* create a default tag of preferred version and set it. The file may already contain a ID3v1 tag but because
* this is not terribly useful the v1tag is not considered for this problem.
*
* @return
*/
@Override
public Tag getTagAndConvertOrCreateAndSetDefault()
{
Tag tag = getTagOrCreateDefault();
Tag convertedTag = convertID3Tag((AbstractID3v2Tag)tag, TagOptionSingleton.getInstance().getID3V2Version());
if(convertedTag!=null)
{
setTag(convertedTag);
}
else
{
setTag(tag);
}
return getTag();
}
}
| |
// Copyright 2016 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.server;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.base.Verify;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.net.InetAddresses;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import com.google.common.util.concurrent.Uninterruptibles;
import com.google.devtools.build.lib.clock.BlazeClock;
import com.google.devtools.build.lib.clock.Clock;
import com.google.devtools.build.lib.concurrent.ThreadSafety.Immutable;
import com.google.devtools.build.lib.runtime.BlazeCommandDispatcher.LockingMode;
import com.google.devtools.build.lib.runtime.CommandExecutor;
import com.google.devtools.build.lib.runtime.proto.InvocationPolicyOuterClass.InvocationPolicy;
import com.google.devtools.build.lib.server.CommandProtos.CancelRequest;
import com.google.devtools.build.lib.server.CommandProtos.CancelResponse;
import com.google.devtools.build.lib.server.CommandProtos.PingRequest;
import com.google.devtools.build.lib.server.CommandProtos.PingResponse;
import com.google.devtools.build.lib.server.CommandProtos.RunRequest;
import com.google.devtools.build.lib.server.CommandProtos.RunResponse;
import com.google.devtools.build.lib.server.CommandProtos.StartupOption;
import com.google.devtools.build.lib.util.ExitCode;
import com.google.devtools.build.lib.util.Pair;
import com.google.devtools.build.lib.util.ThreadUtils;
import com.google.devtools.build.lib.util.io.OutErr;
import com.google.devtools.build.lib.vfs.FileSystemUtils;
import com.google.devtools.build.lib.vfs.Path;
import com.google.devtools.common.options.InvocationPolicyParser;
import com.google.devtools.common.options.OptionsParsingException;
import com.google.protobuf.ByteString;
import io.grpc.Server;
import io.grpc.StatusRuntimeException;
import io.grpc.netty.NettyServerBuilder;
import io.grpc.stub.ServerCallStreamObserver;
import io.grpc.stub.StreamObserver;
import java.io.IOException;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.net.InetSocketAddress;
import java.nio.charset.Charset;
import java.security.SecureRandom;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.UUID;
import java.util.concurrent.Exchanger;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import java.util.logging.Logger;
import javax.annotation.concurrent.GuardedBy;
/**
* gRPC server class.
*
* <p>Only this class should depend on gRPC so that we only need to exclude this during
* bootstrapping.
*
* <p>This class is a little complicated and rich in multithreading, so an explanation of its
* innards follows.
*
* <p>We use the direct executor for gRPC so that it calls our methods directly on its event handler
* threads (which it creates itself). This is acceptable for {@code ping()} and {@code cancel()}
* because they run very quickly. For {@code run()}, we transfer the call to our own threads in
* {@code commandExecutorPool}. We do this instead of setting an executor on the server object
* because gRPC insists on serializing calls within a single RPC call, which means that the Runnable
* passed to {@code setOnReadyHandler} doesn't get called while the main RPC method is running,
* which means we can't use flow control, which we need so that gRPC doesn't buffer an unbounded
* amount of outgoing data.
*
* <p>Two threads are spawned for each command: one that handles the command in {@code
* commandExecutorPool} and one that streams the result back to the client in {@code
* streamExecutorPool}.
*
* <p>In addition to these threads, we maintain one extra thread for handling the server timeout and
* an interrupt watcher thread is started for each interrupt request that logs if it takes too long
* to take effect.
*
* <p>Each running RPC has a UUID associated with it that is used to identify it when a client wants
* to cancel it. Cancellation is done by the client sending the server a {@code cancel()} RPC call
* which results in the main thread of the command being interrupted.
*/
public class GrpcServerImpl implements RPCServer {
private static final Logger logger = Logger.getLogger(GrpcServerImpl.class.getName());
// UTF-8 won't do because we want to be able to pass arbitrary binary strings.
// Not that the internals of Bazel handle that correctly, but why not make at least this little
// part correct?
private static final Charset CHARSET = Charset.forName("ISO-8859-1");
private static final long NANOSECONDS_IN_MS = TimeUnit.MILLISECONDS.toNanos(1);
private static final long NANOS_PER_IDLE_CHECK =
TimeUnit.NANOSECONDS.convert(5, TimeUnit.SECONDS);
private class RunningCommand implements AutoCloseable {
private final Thread thread;
private final String id;
private RunningCommand() throws InterruptedException {
thread = Thread.currentThread();
id = UUID.randomUUID().toString();
synchronized (runningCommands) {
if (runningCommands.isEmpty()) {
busy();
}
if (shuttingDown) {
throw new InterruptedException();
}
runningCommands.put(id, this);
runningCommands.notify();
}
logger.info(String.format("Starting command %s on thread %s", id, thread.getName()));
}
@Override
public void close() {
synchronized (runningCommands) {
runningCommands.remove(id);
if (runningCommands.isEmpty()) {
idle();
}
runningCommands.notify();
}
logger.info(String.format("Finished command %s on thread %s", id, thread.getName()));
}
}
/**
* Factory class. Instantiated by reflection.
*
* <p>Used so that method calls using reflection are as simple as possible.
*/
public static class Factory implements RPCServer.Factory {
@Override
public RPCServer create(CommandExecutor commandExecutor, Clock clock, int port,
Path workspace, Path serverDirectory, int maxIdleSeconds) throws IOException {
return new GrpcServerImpl(
commandExecutor, clock, port, workspace, serverDirectory, maxIdleSeconds);
}
}
@VisibleForTesting
enum StreamType {
STDOUT,
STDERR,
}
/** Actions {@link GrpcSink} can do. */
private enum SinkThreadAction {
DISCONNECT,
FINISH,
READY,
SEND,
}
/**
* Sent back and forth between threads wanting to write to the client stream and the stream
* handler thread.
*/
@Immutable
private static final class SinkThreadItem {
private final boolean success;
private final RunResponse message;
private SinkThreadItem(boolean success, RunResponse message) {
this.success = success;
this.message = message;
}
}
/**
* A class that handles communicating through a gRPC interface for a streaming rpc call.
*
* <p>It can do four things:
* <li>Send a response message over the wire. If the channel is ready, it's sent immediately, if
* it's not, blocks until it is. Note that there can always be only one thread in {@link
* #offer(RunResponse)} because it's synchronized. This results in the associated streams
* blocking if gRPC is not ready, which is how we implement pushback.
* <li>Be notified that gRPC is ready. If there is a pending message, it is then sent.
* <li>Be notified that the client disconnected. In this case, an {@link IOException} is reported
* and the thread from which the stream was written to is interrupted so that the server
* becomes free as soon as possible.
* <li>Processing can be terminated. It is reported whether the client disconnected before.
*/
@VisibleForTesting
static class GrpcSink {
private final LinkedBlockingQueue<SinkThreadAction> actionQueue;
private final Exchanger<SinkThreadItem> exchanger;
private final ServerCallStreamObserver<RunResponse> observer;
private final Future<?> future;
private final AtomicReference<Thread> commandThread = new AtomicReference<>();
private final AtomicBoolean disconnected = new AtomicBoolean(false);
private final AtomicLong receivedEventCount = new AtomicLong(0);
@VisibleForTesting
GrpcSink(
final String rpcCommandName,
ServerCallStreamObserver<RunResponse> observer,
ExecutorService executor) {
// This queue is intentionally unbounded: we always act on it fairly quickly so filling up
// RAM is not a concern but we don't want to block in the gRPC cancel/onready handlers.
this.actionQueue = new LinkedBlockingQueue<>();
this.exchanger = new Exchanger<>();
this.observer = observer;
this.observer.setOnCancelHandler(
() -> {
Thread commandThread = GrpcSink.this.commandThread.get();
if (commandThread != null) {
logger.info(
String.format(
"Interrupting thread %s due to the streaming %s call being cancelled "
+ "(likely client hang up or explicit gRPC-level cancellation)",
commandThread.getName(), rpcCommandName));
commandThread.interrupt();
}
actionQueue.offer(SinkThreadAction.DISCONNECT);
});
this.observer.setOnReadyHandler(() -> actionQueue.offer(SinkThreadAction.READY));
this.future = executor.submit(GrpcSink.this::call);
}
@VisibleForTesting
long getReceivedEventCount() {
return receivedEventCount.get();
}
@VisibleForTesting
void setCommandThread(Thread thread) {
Thread old = commandThread.getAndSet(thread);
if (old != null) {
throw new IllegalStateException(String.format("Command state set twice (thread %s ->%s)",
old.getName(), Thread.currentThread().getName()));
}
}
/**
* Sends an item to the client.
*
* @return true if the item was sent successfully, false if the connection to the client was
* lost
*/
@VisibleForTesting
synchronized boolean offer(RunResponse item) {
SinkThreadItem queueItem = new SinkThreadItem(false, item);
actionQueue.offer(SinkThreadAction.SEND);
return exchange(queueItem, false).success;
}
private boolean disconnected() {
return disconnected.get();
}
@VisibleForTesting
boolean finish() {
actionQueue.offer(SinkThreadAction.FINISH);
try {
Uninterruptibles.getUninterruptibly(future);
} catch (ExecutionException e) {
throw new IllegalStateException(e);
}
// Reset the interrupted bit so that it doesn't stay set for the next command that is handled
// by this thread
Thread.interrupted();
return disconnected();
}
private SinkThreadItem exchange(SinkThreadItem item, boolean swallowInterrupts) {
boolean interrupted = false;
SinkThreadItem result;
while (true) {
try {
result = exchanger.exchange(item);
break;
} catch (InterruptedException e) {
interrupted = true;
}
}
if (interrupted && !swallowInterrupts) {
Thread.currentThread().interrupt();
}
return result;
}
private void sendPendingItem() {
SinkThreadItem item = exchange(new SinkThreadItem(true, null), true);
try {
observer.onNext(item.message);
} catch (StatusRuntimeException e) {
// The RPC was cancelled e.g. by the client terminating unexpectedly. We'll eventually get
// notified about this and interrupt the command thread, but in the meantime, we can just
// ignore the error; the client is dead, so there isn't anyone to talk to so swallowing the
// output is fine.
logger.info(
String.format(
"Client cancelled command for streamer thread %s",
Thread.currentThread().getName()));
}
}
/** Main function of the streamer thread. */
private void call() {
boolean itemPending = false;
while (true) {
SinkThreadAction action;
action = Uninterruptibles.takeUninterruptibly(actionQueue);
receivedEventCount.incrementAndGet();
switch (action) {
case FINISH:
if (itemPending) {
exchange(new SinkThreadItem(false, null), true);
itemPending = false;
}
// Reset the interrupted bit so that it doesn't stay set for the next command that is
// handled by this thread
Thread.interrupted();
return;
case READY:
if (itemPending) {
sendPendingItem();
itemPending = false;
}
break;
case DISCONNECT:
logger.info(
"Client disconnected for stream thread " + Thread.currentThread().getName());
disconnected.set(true);
if (itemPending) {
exchange(new SinkThreadItem(false, null), true);
itemPending = false;
}
break;
case SEND:
if (disconnected()) {
exchange(new SinkThreadItem(false, null), true);
} else if (observer.isReady()) {
sendPendingItem();
} else {
itemPending = true;
}
}
}
}
}
/**
* An output stream that forwards the data written to it over the gRPC command stream.
*
* <p>Note that wraping this class with a {@code Channel} can cause a deadlock if there is an
* {@link OutputStream} in between that synchronizes both on {@code #close()} and {@code #write()}
* because then if an interrupt happens in {@link GrpcSink#exchange(SinkThreadItem, boolean)},
* the thread on which {@code interrupt()} was called will wait until the {@code Channel} closes
* itself while holding a lock for interrupting the thread on which {@code #exchange()} is
* being executed and that thread will hold a lock that is needed for the {@code Channel} to be
* closed and call {@code interrupt()} in {@code #exchange()}, which will in turn try to acquire
* the interrupt lock.
*/
@VisibleForTesting
static class RpcOutputStream extends OutputStream {
private static final int CHUNK_SIZE = 8192;
// Store commandId and responseCookie as ByteStrings to avoid String -> UTF8 bytes conversion
// for each serialized chunk of output.
private final ByteString commandIdBytes;
private final ByteString responseCookieBytes;
private final StreamType type;
private final GrpcSink sink;
RpcOutputStream(String commandId, String responseCookie, StreamType type, GrpcSink sink) {
this.commandIdBytes = ByteString.copyFromUtf8(commandId);
this.responseCookieBytes = ByteString.copyFromUtf8(responseCookie);
this.type = type;
this.sink = sink;
}
@Override
public synchronized void write(byte[] b, int off, int inlen) throws IOException {
for (int i = 0; i < inlen; i += CHUNK_SIZE) {
ByteString input = ByteString.copyFrom(b, off + i, Math.min(CHUNK_SIZE, inlen - i));
RunResponse.Builder response = RunResponse
.newBuilder()
.setCookieBytes(responseCookieBytes)
.setCommandIdBytes(commandIdBytes);
switch (type) {
case STDOUT: response.setStandardOutput(input); break;
case STDERR: response.setStandardError(input); break;
default: throw new IllegalStateException();
}
// Send the chunk to the streamer thread. May block.
if (!sink.offer(response.build())) {
// Client disconnected. Terminate the current command as soon as possible. Note that
// throwing IOException is not enough because we are in the habit of swallowing it. Note
// that when gRPC notifies us about the disconnection (see the call to setOnCancelHandler)
// we interrupt the command thread, which should be enough to make the server come around
// as soon as possible.
logger.info(
String.format(
"Client disconnected received for command %s on thread %s",
commandIdBytes.toStringUtf8(), Thread.currentThread().getName()));
throw new IOException("Client disconnected");
}
}
}
@Override
public void write(int byteAsInt) throws IOException {
byte b = (byte) byteAsInt; // make sure we work with bytes in comparisons
write(new byte[] {b}, 0, 1);
}
}
// The synchronized block is here so that if the "PID file deleted" timer or the idle shutdown
// mechanism kicks in during a regular shutdown, they don't race.
@VisibleForTesting // productionVisibility = Visibility.PRIVATE
void signalShutdown() {
synchronized (runningCommands) {
shuttingDown = true;
server.shutdown();
}
}
/**
* A thread that shuts the server down under the following conditions:
*
* <ul>
* <li>The PID file changes (in this case, *very* quickly)
* <li>The workspace directory is deleted
* <li>There is too much memory pressure on the host
* </ul>
*/
private class ShutdownWatcherThread extends Thread {
private long lastIdleCheckNanos;
private ShutdownWatcherThread() {
super("grpc-server-shutdown-watcher");
setDaemon(true);
}
private void doIdleChecksMaybe() {
synchronized (runningCommands) {
if (!runningCommands.isEmpty()) {
lastIdleCheckNanos = -1;
return;
}
long currentNanos = BlazeClock.nanoTime();
if (lastIdleCheckNanos == -1) {
lastIdleCheckNanos = currentNanos;
return;
}
if (currentNanos - lastIdleCheckNanos < NANOS_PER_IDLE_CHECK) {
return;
}
if (!idleServerTasks.continueProcessing()) {
signalShutdown();
server.shutdown();
}
lastIdleCheckNanos = currentNanos;
}
}
@Override
public void run() {
while (true) {
Uninterruptibles.sleepUninterruptibly(5, TimeUnit.SECONDS);
boolean ok = false;
try {
String pidFileContents = new String(FileSystemUtils.readContentAsLatin1(pidFile));
ok = pidFileContents.equals(pidInFile);
} catch (IOException e) {
logger.info("Cannot read PID file: " + e.getMessage());
// Handled by virtue of ok not being set to true
}
if (ok) {
doIdleChecksMaybe();
}
if (!ok) {
synchronized (ShutdownWatcherThread.this) {
if (shuttingDown) {
logger.warning("PID file deleted or overwritten but shutdown is already in progress");
break;
}
shuttingDown = true;
// Someone overwrote the PID file. Maybe it's another server, so shut down as quickly
// as possible without even running the shutdown hooks (that would delete it)
logger.severe("PID file deleted or overwritten, exiting as quickly as possible");
Runtime.getRuntime().halt(ExitCode.BLAZE_INTERNAL_ERROR.getNumericExitCode());
}
}
}
}
}
// These paths are all relative to the server directory
private static final String PORT_FILE = "command_port";
private static final String REQUEST_COOKIE_FILE = "request_cookie";
private static final String RESPONSE_COOKIE_FILE = "response_cookie";
private static final AtomicBoolean runShutdownHooks = new AtomicBoolean(true);
@GuardedBy("runningCommands")
private final Map<String, RunningCommand> runningCommands = new HashMap<>();
private final CommandExecutor commandExecutor;
private final ExecutorService streamExecutorPool;
private final ExecutorService commandExecutorPool;
private final Clock clock;
private final Path serverDirectory;
private final Path workspace;
private final String requestCookie;
private final String responseCookie;
private final AtomicLong interruptCounter = new AtomicLong(0);
private final int maxIdleSeconds;
private final ShutdownWatcherThread shutdownWatcherThread;
private final Path pidFile;
private final String pidInFile;
private final List<Path> filesToDeleteAtExit = new ArrayList<>();
private final int port;
private Server server;
private IdleServerTasks idleServerTasks;
private InetSocketAddress address;
private boolean serving;
private boolean shuttingDown = false;
public GrpcServerImpl(CommandExecutor commandExecutor, Clock clock, int port,
Path workspace, Path serverDirectory, int maxIdleSeconds) throws IOException {
Runtime.getRuntime().addShutdownHook(new Thread(() -> shutdownHook()));
// server.pid was written in the C++ launcher after fork() but before exec() .
// The client only accesses the pid file after connecting to the socket
// which ensures that it gets the correct pid value.
pidFile = serverDirectory.getRelative("server.pid.txt");
pidInFile = new String(FileSystemUtils.readContentAsLatin1(pidFile));
deleteAtExit(pidFile);
this.commandExecutor = commandExecutor;
this.clock = clock;
this.serverDirectory = serverDirectory;
this.workspace = workspace;
this.port = port;
this.maxIdleSeconds = maxIdleSeconds;
this.serving = false;
this.streamExecutorPool =
Executors.newCachedThreadPool(
new ThreadFactoryBuilder().setNameFormat("grpc-stream-%d").setDaemon(true).build());
this.commandExecutorPool =
Executors.newCachedThreadPool(
new ThreadFactoryBuilder().setNameFormat("grpc-command-%d").setDaemon(true).build());
SecureRandom random = new SecureRandom();
requestCookie = generateCookie(random, 16);
responseCookie = generateCookie(random, 16);
shutdownWatcherThread = new ShutdownWatcherThread();
shutdownWatcherThread.start();
idleServerTasks = new IdleServerTasks(workspace);
idleServerTasks.idle();
}
@VisibleForTesting // productionVisibility = Visibility.PRIVATE
String getRequestCookie() {
return requestCookie;
}
@VisibleForTesting // productionVisibility = Visibility.PRIVATE
InetSocketAddress getAddress() {
return address;
}
private void idle() {
Preconditions.checkState(idleServerTasks == null);
idleServerTasks = new IdleServerTasks(workspace);
idleServerTasks.idle();
}
private void busy() {
Preconditions.checkState(idleServerTasks != null);
idleServerTasks.busy();
idleServerTasks = null;
}
private static String generateCookie(SecureRandom random, int byteCount) {
byte[] bytes = new byte[byteCount];
random.nextBytes(bytes);
StringBuilder result = new StringBuilder();
for (byte b : bytes) {
result.append(Integer.toHexString(b + 128));
}
return result.toString();
}
private void startSlowInterruptWatcher(final ImmutableSet<String> commandIds) {
if (commandIds.isEmpty()) {
return;
}
Runnable interruptWatcher = () -> {
try {
Thread.sleep(10 * 1000);
boolean ok;
synchronized (runningCommands) {
ok = Collections.disjoint(commandIds, runningCommands.keySet());
}
if (!ok) {
// At least one command was not interrupted. Interrupt took too long.
ThreadUtils.warnAboutSlowInterrupt();
}
} catch (InterruptedException e) {
// Ignore.
}
};
Thread interruptWatcherThread =
new Thread(interruptWatcher, "interrupt-watcher-" + interruptCounter.incrementAndGet());
interruptWatcherThread.setDaemon(true);
interruptWatcherThread.start();
}
private void timeoutThread() {
synchronized (runningCommands) {
boolean idle = runningCommands.isEmpty();
boolean wasIdle = false;
long shutdownTime = -1;
while (true) {
if (!wasIdle && idle) {
shutdownTime = BlazeClock.nanoTime() + maxIdleSeconds * 1000L * NANOSECONDS_IN_MS;
}
try {
if (idle) {
Verify.verify(shutdownTime > 0);
long waitTime = shutdownTime - BlazeClock.nanoTime();
if (waitTime > 0) {
// Round upwards so that we don't busy-wait in the last millisecond
runningCommands.wait((waitTime + NANOSECONDS_IN_MS - 1) / NANOSECONDS_IN_MS);
}
} else {
runningCommands.wait();
}
} catch (InterruptedException e) {
// Dealt with by checking the current time below.
}
wasIdle = idle;
idle = runningCommands.isEmpty();
if (wasIdle && idle && BlazeClock.nanoTime() >= shutdownTime) {
break;
}
}
}
logger.info("About to shutdown due to idleness");
signalShutdown();
}
/**
* This is called when the server is shut down as a result of a "clean --expunge".
*
* <p>In this case, no files should be deleted on shutdown hooks, since clean also deletes the
* lock file, and there is a small possibility of the following sequence of events:
*
* <ol>
* <li> Client 1 runs "blaze clean --expunge"
* <li> Client 2 runs a command and waits for client 1 to finish
* <li> The clean command deletes everything including the lock file
* <li> Client 2 starts running and since the output base is empty, starts up a new server,
* which creates its own socket and PID files
* <li> The server used by client runs its shutdown hooks, deleting the PID files created by
* the new server
* </ol>
*
* It also disables the "die when the PID file changes" handler so that it doesn't kill the server
* while the "clean --expunge" commmand is running.
*/
@Override
public void prepareForAbruptShutdown() {
disableShutdownHooks();
signalShutdown();
}
@Override
public void interrupt() {
synchronized (runningCommands) {
for (RunningCommand command : runningCommands.values()) {
command.thread.interrupt();
}
startSlowInterruptWatcher(ImmutableSet.copyOf(runningCommands.keySet()));
}
}
@Override
public void serve() throws IOException {
Preconditions.checkState(!serving);
// For reasons only Apple knows, you cannot bind to IPv4-localhost when you run in a sandbox
// that only allows loopback traffic, but binding to IPv6-localhost works fine. This would
// however break on systems that don't support IPv6. So what we'll do is to try to bind to IPv6
// and if that fails, try again with IPv4.
InetSocketAddress address = new InetSocketAddress("[::1]", port);
try {
server =
NettyServerBuilder.forAddress(address)
.addService(commandServer)
.directExecutor()
.build()
.start();
} catch (IOException e) {
address = new InetSocketAddress("127.0.0.1", port);
server =
NettyServerBuilder.forAddress(address)
.addService(commandServer)
.directExecutor()
.build()
.start();
}
if (maxIdleSeconds > 0) {
Thread timeoutThread = new Thread(this::timeoutThread);
timeoutThread.setName("grpc-timeout");
timeoutThread.setDaemon(true);
timeoutThread.start();
}
serving = true;
this.address = new InetSocketAddress(address.getAddress(), server.getPort());
writeServerFile(
PORT_FILE, InetAddresses.toUriString(address.getAddress()) + ":" + server.getPort());
writeServerFile(REQUEST_COOKIE_FILE, requestCookie);
writeServerFile(RESPONSE_COOKIE_FILE, responseCookie);
try {
server.awaitTermination();
} catch (InterruptedException e) {
// TODO(lberki): Handle SIGINT in a reasonable way
throw new IllegalStateException(e);
}
}
private void writeServerFile(String name, String contents) throws IOException {
Path file = serverDirectory.getChild(name);
FileSystemUtils.writeContentAsLatin1(file, contents);
deleteAtExit(file);
}
protected void disableShutdownHooks() {
runShutdownHooks.set(false);
}
private void shutdownHook() {
if (!runShutdownHooks.get()) {
return;
}
List<Path> files;
synchronized (filesToDeleteAtExit) {
files = new ArrayList<>(filesToDeleteAtExit);
}
for (Path path : files) {
try {
path.delete();
} catch (IOException e) {
printStack(e);
}
}
}
/** Schedule the specified file for (attempted) deletion at JVM exit. */
protected void deleteAtExit(final Path path) {
synchronized (filesToDeleteAtExit) {
filesToDeleteAtExit.add(path);
}
}
static void printStack(IOException e) {
/*
* Hopefully this never happens. It's not very nice to just write this
* to the user's console, but I'm not sure what better choice we have.
*/
StringWriter err = new StringWriter();
PrintWriter printErr = new PrintWriter(err);
printErr.println("=======[BLAZE SERVER: ENCOUNTERED IO EXCEPTION]=======");
e.printStackTrace(printErr);
printErr.println("=====================================================");
logger.severe(err.toString());
}
@VisibleForTesting // productionVisibility = Visibility.PRIVATE
void executeCommand(RunRequest request, StreamObserver<RunResponse> observer, GrpcSink sink) {
sink.setCommandThread(Thread.currentThread());
if (!request.getCookie().equals(requestCookie) || request.getClientDescription().isEmpty()) {
try {
observer.onNext(
RunResponse.newBuilder()
.setExitCode(ExitCode.LOCAL_ENVIRONMENTAL_ERROR.getNumericExitCode())
.build());
observer.onCompleted();
} catch (StatusRuntimeException e) {
logger.info("Client cancelled command while rejecting it: " + e.getMessage());
}
return;
}
// There is a small period of time between calling setOnCancelHandler() and setCommandThread()
// during which the command thread is not interrupted when a cancel is signaled. Cover that
// case by explicitly checking for disconnection here.
if (sink.disconnected()) {
return;
}
ImmutableList.Builder<String> args = ImmutableList.builder();
for (ByteString requestArg : request.getArgList()) {
args.add(requestArg.toString(CHARSET));
}
String commandId;
int exitCode;
// TODO(b/63925394): This information needs to be passed to the GotOptionsEvent, which does not
// currently have the explicit startup options. See Improved Command Line Reporting design doc
// for details.
// Convert the startup options record to Java strings, source first.
ImmutableList.Builder<Pair<String, String>> startupOptions = ImmutableList.builder();
for (StartupOption option : request.getStartupOptionsList()) {
startupOptions.add(
new Pair<>(option.getSource().toString(CHARSET), option.getOption().toString(CHARSET)));
}
try (RunningCommand command = new RunningCommand()) {
commandId = command.id;
try {
// Send the client the command id as soon as we know it.
observer.onNext(
RunResponse.newBuilder()
.setCookie(responseCookie)
.setCommandId(commandId)
.build());
} catch (StatusRuntimeException e) {
logger.info(
"The client cancelled the command before receiving the command id: " + e.getMessage());
}
OutErr rpcOutErr = OutErr.create(
new RpcOutputStream(command.id, responseCookie, StreamType.STDOUT, sink),
new RpcOutputStream(command.id, responseCookie, StreamType.STDERR, sink));
try {
InvocationPolicy policy = InvocationPolicyParser.parsePolicy(request.getInvocationPolicy());
exitCode =
commandExecutor.exec(
policy,
args.build(),
rpcOutErr,
request.getBlockForLock() ? LockingMode.WAIT : LockingMode.ERROR_OUT,
request.getClientDescription(),
clock.currentTimeMillis(),
Optional.of(startupOptions.build()));
} catch (OptionsParsingException e) {
rpcOutErr.printErrLn(e.getMessage());
exitCode = ExitCode.COMMAND_LINE_ERROR.getNumericExitCode();
}
} catch (InterruptedException e) {
exitCode = ExitCode.INTERRUPTED.getNumericExitCode();
commandId = ""; // The default value, the client will ignore it
}
if (sink.finish()) {
// Client disconnected. Then we are not allowed to call any methods on the observer.
logger.info(
String.format(
"Client disconnected before we could send exit code for command %s", commandId));
return;
}
// There is a chance that an Uninterruptibles#getUninterruptibly() leaves us with the
// interrupt bit set. So we just reset the interruption state here to make these cancel
// requests not have any effect outside of command execution (after the try block above,
// the cancel request won't find the thread to interrupt)
Thread.interrupted();
boolean shutdown = commandExecutor.shutdown();
if (shutdown) {
signalShutdown();
}
RunResponse response =
RunResponse.newBuilder()
.setCookie(responseCookie)
.setCommandId(commandId)
.setFinished(true)
.setExitCode(exitCode)
.setTerminationExpected(shutdown)
.build();
try {
observer.onNext(response);
observer.onCompleted();
} catch (StatusRuntimeException e) {
// The client cancelled the call. Log an error and go on.
logger.info(
String.format(
"Client cancelled command %s just right before its end: %s",
commandId, e.getMessage()));
}
}
private final CommandServerGrpc.CommandServerImplBase commandServer =
new CommandServerGrpc.CommandServerImplBase() {
@Override
public void run(final RunRequest request, final StreamObserver<RunResponse> observer) {
final GrpcSink sink =
new GrpcSink(
"Run", (ServerCallStreamObserver<RunResponse>) observer, streamExecutorPool);
// Switch to our own threads so that onReadyStateHandler can be called (see class-level
// comment)
commandExecutorPool.execute(() -> executeCommand(request, observer, sink));
}
@Override
public void ping(PingRequest pingRequest, StreamObserver<PingResponse> streamObserver) {
Preconditions.checkState(serving);
try (RunningCommand command = new RunningCommand()) {
PingResponse.Builder response = PingResponse.newBuilder();
if (pingRequest.getCookie().equals(requestCookie)) {
response.setCookie(responseCookie);
}
streamObserver.onNext(response.build());
streamObserver.onCompleted();
} catch (InterruptedException e) {
// Ignore, we are shutting down anyway
}
}
@Override
public void cancel(
final CancelRequest request, final StreamObserver<CancelResponse> streamObserver) {
logger.info(String.format("Got CancelRequest for command id %s", request.getCommandId()));
if (!request.getCookie().equals(requestCookie)) {
streamObserver.onCompleted();
return;
}
// Actually performing the cancellation can result in some blocking which we don't want
// to do on the dispatcher thread, instead offload to command pool.
commandExecutorPool.execute(() -> doCancel(request, streamObserver));
}
private void doCancel(
CancelRequest request, StreamObserver<CancelResponse> streamObserver) {
try (RunningCommand cancelCommand = new RunningCommand()) {
synchronized (runningCommands) {
RunningCommand pendingCommand = runningCommands.get(request.getCommandId());
if (pendingCommand != null) {
logger.info(
String.format(
"Interrupting command %s on thread %s",
request.getCommandId(), pendingCommand.thread.getName()));
pendingCommand.thread.interrupt();
startSlowInterruptWatcher(ImmutableSet.of(request.getCommandId()));
} else {
logger.info("Cannot find command " + request.getCommandId() + " to interrupt");
}
}
try {
streamObserver.onNext(CancelResponse.newBuilder().setCookie(responseCookie).build());
streamObserver.onCompleted();
} catch (StatusRuntimeException e) {
// There is no one to report the failure to
logger.info(
"Client cancelled RPC of cancellation request for " + request.getCommandId());
}
} catch (InterruptedException e) {
// Ignore, we are shutting down anyway
}
}
};
}
| |
/*
* Copyright 2017 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.workbench.common.stunner.lienzo.toolbox.items.impl;
import java.util.function.BiConsumer;
import com.ait.lienzo.client.core.event.NodeDragEndHandler;
import com.ait.lienzo.client.core.event.NodeDragMoveHandler;
import com.ait.lienzo.client.core.event.NodeDragStartHandler;
import com.ait.lienzo.client.core.event.NodeMouseClickHandler;
import com.ait.lienzo.client.core.shape.Group;
import com.ait.lienzo.client.core.shape.IPrimitive;
import com.ait.lienzo.client.core.types.BoundingBox;
import com.ait.lienzo.client.core.types.BoundingPoints;
import com.ait.lienzo.test.LienzoMockitoTestRunner;
import com.ait.tooling.nativetools.client.event.HandlerRegistrationManager;
import com.google.gwt.event.shared.HandlerRegistration;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.uberfire.mvp.Command;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.anyBoolean;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@RunWith(LienzoMockitoTestRunner.class)
public class ButtonItemImplTest {
private final BoundingBox boundingBox = new BoundingBox(0d,
0d,
100d,
200d);
@Mock
private AbstractFocusableGroupItem<?> groupItem;
@Mock
private IPrimitive groupItemPrim;
@Mock
private Group groupItemGroup;
@Mock
private BiConsumer<Group, Command> showExecutor;
@Mock
private BiConsumer<Group, Command> hideExecutor;
@Mock
private BoundingPoints boundingPoints;
@Mock
private HandlerRegistration clickReg;
@Mock
private HandlerRegistration dragStartReg;
@Mock
private HandlerRegistration dragMoveReg;
@Mock
private HandlerRegistration dragEndReg;
@Mock
private HandlerRegistrationManager registrations;
private ButtonItemImpl tested;
@Before
@SuppressWarnings("unchecked")
public void setUp() {
when(groupItem.registrations()).thenReturn(registrations);
when(groupItem.getPrimitive()).thenReturn(groupItemPrim);
when(groupItemPrim.setListening(anyBoolean())).thenReturn(groupItemPrim);
when(groupItemPrim.setDraggable(anyBoolean())).thenReturn(groupItemPrim);
when(groupItemPrim.addNodeMouseClickHandler(any(NodeMouseClickHandler.class))).thenReturn(clickReg);
when(groupItemPrim.addNodeDragStartHandler(any(NodeDragStartHandler.class))).thenReturn(dragStartReg);
when(groupItemPrim.addNodeDragMoveHandler(any(NodeDragMoveHandler.class))).thenReturn(dragMoveReg);
when(groupItemPrim.addNodeDragEndHandler(any(NodeDragEndHandler.class))).thenReturn(dragEndReg);
when(groupItem.asPrimitive()).thenReturn(groupItemGroup);
when(boundingPoints.getBoundingBox()).thenReturn(boundingBox);
when(groupItem.getBoundingBox()).thenReturn(() -> boundingBox);
doAnswer(invocationOnMock -> {
((Command) invocationOnMock.getArguments()[0]).execute();
((Command) invocationOnMock.getArguments()[1]).execute();
return groupItem;
}).when(groupItem).show(any(Command.class),
any(Command.class));
doAnswer(invocationOnMock -> {
((Command) invocationOnMock.getArguments()[0]).execute();
((Command) invocationOnMock.getArguments()[1]).execute();
return groupItem;
}).when(groupItem).hide(any(Command.class),
any(Command.class));
tested =
new ButtonItemImpl(groupItem)
.useHideExecutor(hideExecutor)
.useShowExecutor(showExecutor);
}
@Test
public void testInit() {
assertEquals(groupItemGroup,
tested.asPrimitive());
assertEquals(groupItemPrim,
tested.getPrimitive());
assertEquals(groupItem,
tested.getWrapped());
assertEquals(boundingBox,
tested.getBoundingBox().get());
assertFalse(tested.isVisible());
}
@Test
public void testShow() {
final Command before = mock(Command.class);
final Command after = mock(Command.class);
tested.show(before,
after);
verify(groupItem,
times(1)).show(eq(before),
eq(after));
verify(groupItem,
never()).hide(any(Command.class),
any(Command.class));
verify(before,
times(1)).execute();
verify(after,
times(1)).execute();
}
@Test
public void testHide() {
final Command before = mock(Command.class);
final Command after = mock(Command.class);
tested.hide(before,
after);
verify(groupItem,
times(1)).hide(any(Command.class),
eq(after));
verify(groupItem,
never()).show(any(Command.class),
any(Command.class));
verify(before,
times(1)).execute();
verify(after,
times(1)).execute();
}
@Test
public void testClick() {
NodeMouseClickHandler handler = mock(NodeMouseClickHandler.class);
final ButtonItemImpl cascade = tested.onClick(handler);
assertEquals(tested,
cascade);
;
verify(groupItemPrim,
times(1)).setListening(eq(true));
verify(groupItemPrim,
times(1)).addNodeMouseClickHandler(eq(handler));
verify(registrations,
times(1)).register(eq(clickReg));
tested.destroy();
verify(clickReg,
times(1)).removeHandler();
}
@Test
public void testDragStart() {
NodeDragStartHandler handler = mock(NodeDragStartHandler.class);
final ButtonItemImpl cascade = tested.onDragStart(handler);
assertEquals(tested,
cascade);
;
verify(groupItemPrim,
times(1)).setDraggable(eq(true));
verify(groupItemPrim,
times(1)).addNodeDragStartHandler(eq(handler));
verify(registrations,
times(1)).register(eq(dragStartReg));
tested.destroy();
verify(dragStartReg,
times(1)).removeHandler();
}
@Test
public void testDragMove() {
NodeDragMoveHandler handler = mock(NodeDragMoveHandler.class);
final ButtonItemImpl cascade = tested.onDragMove(handler);
assertEquals(tested,
cascade);
;
verify(groupItemPrim,
times(1)).setDraggable(eq(true));
verify(groupItemPrim,
times(1)).addNodeDragMoveHandler(eq(handler));
verify(registrations,
times(1)).register(eq(dragMoveReg));
tested.destroy();
verify(dragMoveReg,
times(1)).removeHandler();
}
@Test
public void testDragEmd() {
NodeDragEndHandler handler = mock(NodeDragEndHandler.class);
final ButtonItemImpl cascade = tested.onDragEnd(handler);
assertEquals(tested,
cascade);
;
verify(groupItemPrim,
times(1)).setDraggable(eq(true));
verify(groupItemPrim,
times(1)).addNodeDragEndHandler(eq(handler));
verify(registrations,
times(1)).register(eq(dragEndReg));
tested.destroy();
verify(dragEndReg,
times(1)).removeHandler();
}
@Test
public void testDestroy() {
tested.destroy();
verify(groupItem,
times(1)).destroy();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.namenode;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_NAME_DIR_KEY;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.PrintStream;
import java.net.URI;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Properties;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.StartupOption;
import org.apache.hadoop.hdfs.server.common.Storage;
import org.apache.hadoop.hdfs.server.common.Storage.StorageDirectory;
import org.apache.hadoop.test.PathUtils;
import org.apache.hadoop.util.ExitUtil;
import org.apache.hadoop.util.ExitUtil.ExitException;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
public class TestClusterId {
private static final Log LOG = LogFactory.getLog(TestClusterId.class);
File hdfsDir;
Configuration config;
private String getClusterId(Configuration config) throws IOException {
// see if cluster id not empty.
Collection<URI> dirsToFormat = FSNamesystem.getNamespaceDirs(config);
List<URI> editsToFormat = FSNamesystem.getNamespaceEditsDirs(config);
FSImage fsImage = new FSImage(config, dirsToFormat, editsToFormat);
Iterator<StorageDirectory> sdit =
fsImage.getStorage().dirIterator(NNStorage.NameNodeDirType.IMAGE);
StorageDirectory sd = sdit.next();
Properties props = Storage.readPropertiesFile(sd.getVersionFile());
String cid = props.getProperty("clusterID");
LOG.info("successfully formated : sd="+sd.getCurrentDir() + ";cid="+cid);
return cid;
}
@Before
public void setUp() throws IOException {
ExitUtil.disableSystemExit();
String baseDir = PathUtils.getTestDirName(getClass());
hdfsDir = new File(baseDir, "dfs/name");
if (hdfsDir.exists() && !FileUtil.fullyDelete(hdfsDir)) {
throw new IOException("Could not delete test directory '" + hdfsDir + "'");
}
LOG.info("hdfsdir is " + hdfsDir.getAbsolutePath());
// as some tests might change these values we reset them to defaults before
// every test
StartupOption.FORMAT.setForceFormat(false);
StartupOption.FORMAT.setInteractiveFormat(true);
config = new Configuration();
config.set(DFS_NAMENODE_NAME_DIR_KEY, hdfsDir.getPath());
}
@After
public void tearDown() throws IOException {
if (hdfsDir.exists() && !FileUtil.fullyDelete(hdfsDir)) {
throw new IOException("Could not tearDown test directory '" + hdfsDir
+ "'");
}
}
@Test
public void testFormatClusterIdOption() throws IOException {
// 1. should format without cluster id
//StartupOption.FORMAT.setClusterId("");
NameNode.format(config);
// see if cluster id not empty.
String cid = getClusterId(config);
assertTrue("Didn't get new ClusterId", (cid != null && !cid.equals("")) );
// 2. successful format with given clusterid
StartupOption.FORMAT.setClusterId("mycluster");
NameNode.format(config);
// see if cluster id matches with given clusterid.
cid = getClusterId(config);
assertTrue("ClusterId didn't match", cid.equals("mycluster"));
// 3. format without any clusterid again. It should generate new
//clusterid.
StartupOption.FORMAT.setClusterId("");
NameNode.format(config);
String newCid = getClusterId(config);
assertFalse("ClusterId should not be the same", newCid.equals(cid));
}
/**
* Test namenode format with -format option. Format should succeed.
*
* @throws IOException
*/
@Test
public void testFormat() throws IOException {
String[] argv = { "-format" };
try {
NameNode.createNameNode(argv, config);
fail("createNameNode() did not call System.exit()");
} catch (ExitException e) {
assertEquals("Format should have succeeded", 0, e.status);
}
String cid = getClusterId(config);
assertTrue("Didn't get new ClusterId", (cid != null && !cid.equals("")));
}
/**
* Test namenode format with -format option when an empty name directory
* exists. Format should succeed.
*
* @throws IOException
*/
@Test
public void testFormatWithEmptyDir() throws IOException {
if (!hdfsDir.mkdirs()) {
fail("Failed to create dir " + hdfsDir.getPath());
}
String[] argv = { "-format" };
try {
NameNode.createNameNode(argv, config);
fail("createNameNode() did not call System.exit()");
} catch (ExitException e) {
assertEquals("Format should have succeeded", 0, e.status);
}
String cid = getClusterId(config);
assertTrue("Didn't get new ClusterId", (cid != null && !cid.equals("")));
}
/**
* Test namenode format with -format -force options when name directory
* exists. Format should succeed.
*
* @throws IOException
*/
@Test
public void testFormatWithForce() throws IOException {
if (!hdfsDir.mkdirs()) {
fail("Failed to create dir " + hdfsDir.getPath());
}
String[] argv = { "-format", "-force" };
try {
NameNode.createNameNode(argv, config);
fail("createNameNode() did not call System.exit()");
} catch (ExitException e) {
assertEquals("Format should have succeeded", 0, e.status);
}
String cid = getClusterId(config);
assertTrue("Didn't get new ClusterId", (cid != null && !cid.equals("")));
}
/**
* Test namenode format with -format -force -clusterid option when name
* directory exists. Format should succeed.
*
* @throws IOException
*/
@Test
public void testFormatWithForceAndClusterId() throws IOException {
if (!hdfsDir.mkdirs()) {
fail("Failed to create dir " + hdfsDir.getPath());
}
String myId = "testFormatWithForceAndClusterId";
String[] argv = { "-format", "-force", "-clusterid", myId };
try {
NameNode.createNameNode(argv, config);
fail("createNameNode() did not call System.exit()");
} catch (ExitException e) {
assertEquals("Format should have succeeded", 0, e.status);
}
String cId = getClusterId(config);
assertEquals("ClusterIds do not match", myId, cId);
}
/**
* Test namenode format with -clusterid -force option. Format command should
* fail as no cluster id was provided.
*
* @throws IOException
*/
@Test
public void testFormatWithInvalidClusterIdOption() throws IOException {
String[] argv = { "-format", "-clusterid", "-force" };
PrintStream origErr = System.err;
ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintStream stdErr = new PrintStream(baos);
System.setErr(stdErr);
try {
NameNode.createNameNode(argv, config);
// Check if usage is printed
assertTrue(baos.toString("UTF-8").contains("Usage: hdfs namenode"));
} finally {
System.setErr(origErr);
}
// check if the version file does not exists.
File version = new File(hdfsDir, "current/VERSION");
assertFalse("Check version should not exist", version.exists());
}
/**
* Test namenode format with -format -clusterid options. Format should fail
* was no clusterid was sent.
*
* @throws IOException
*/
@Test
public void testFormatWithNoClusterIdOption() throws IOException {
String[] argv = { "-format", "-clusterid" };
PrintStream origErr = System.err;
ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintStream stdErr = new PrintStream(baos);
System.setErr(stdErr);
try {
NameNode.createNameNode(argv, config);
// Check if usage is printed
assertTrue(baos.toString("UTF-8").contains("Usage: hdfs namenode"));
} finally {
System.setErr(origErr);
}
// check if the version file does not exists.
File version = new File(hdfsDir, "current/VERSION");
assertFalse("Check version should not exist", version.exists());
}
/**
* Test namenode format with -format -clusterid and empty clusterid. Format
* should fail as no valid if was provided.
*
* @throws IOException
*/
@Test
public void testFormatWithEmptyClusterIdOption() throws IOException {
String[] argv = { "-format", "-clusterid", "" };
PrintStream origErr = System.err;
ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintStream stdErr = new PrintStream(baos);
System.setErr(stdErr);
try {
NameNode.createNameNode(argv, config);
// Check if usage is printed
assertTrue(baos.toString("UTF-8").contains("Usage: hdfs namenode"));
} finally {
System.setErr(origErr);
}
// check if the version file does not exists.
File version = new File(hdfsDir, "current/VERSION");
assertFalse("Check version should not exist", version.exists());
}
/**
* Test namenode format with -format -nonInteractive options when a non empty
* name directory exists. Format should not succeed.
*
* @throws IOException
*/
@Test
public void testFormatWithNonInteractive() throws IOException {
// we check for a non empty dir, so create a child path
File data = new File(hdfsDir, "file");
if (!data.mkdirs()) {
fail("Failed to create dir " + data.getPath());
}
String[] argv = { "-format", "-nonInteractive" };
try {
NameNode.createNameNode(argv, config);
fail("createNameNode() did not call System.exit()");
} catch (ExitException e) {
assertEquals("Format should have been aborted with exit code 1", 1,
e.status);
}
// check if the version file does not exists.
File version = new File(hdfsDir, "current/VERSION");
assertFalse("Check version should not exist", version.exists());
}
/**
* Test namenode format with -format -nonInteractive options when name
* directory does not exist. Format should succeed.
*
* @throws IOException
*/
@Test
public void testFormatWithNonInteractiveNameDirDoesNotExit()
throws IOException {
String[] argv = { "-format", "-nonInteractive" };
try {
NameNode.createNameNode(argv, config);
fail("createNameNode() did not call System.exit()");
} catch (ExitException e) {
assertEquals("Format should have succeeded", 0, e.status);
}
String cid = getClusterId(config);
assertTrue("Didn't get new ClusterId", (cid != null && !cid.equals("")));
}
/**
* Test namenode format with -force -nonInteractive -force option. Format
* should succeed.
*
* @throws IOException
*/
@Test
public void testFormatWithNonInteractiveAndForce() throws IOException {
if (!hdfsDir.mkdirs()) {
fail("Failed to create dir " + hdfsDir.getPath());
}
String[] argv = { "-format", "-nonInteractive", "-force" };
try {
NameNode.createNameNode(argv, config);
fail("createNameNode() did not call System.exit()");
} catch (ExitException e) {
assertEquals("Format should have succeeded", 0, e.status);
}
String cid = getClusterId(config);
assertTrue("Didn't get new ClusterId", (cid != null && !cid.equals("")));
}
/**
* Test namenode format with -format option when a non empty name directory
* exists. Enter Y when prompted and the format should succeed.
*
* @throws IOException
* @throws InterruptedException
*/
@Test
public void testFormatWithoutForceEnterYes() throws IOException,
InterruptedException {
// we check for a non empty dir, so create a child path
File data = new File(hdfsDir, "file");
if (!data.mkdirs()) {
fail("Failed to create dir " + data.getPath());
}
// capture the input stream
InputStream origIn = System.in;
ByteArrayInputStream bins = new ByteArrayInputStream("Y\n".getBytes());
System.setIn(bins);
String[] argv = { "-format" };
try {
NameNode.createNameNode(argv, config);
fail("createNameNode() did not call System.exit()");
} catch (ExitException e) {
assertEquals("Format should have succeeded", 0, e.status);
}
System.setIn(origIn);
String cid = getClusterId(config);
assertTrue("Didn't get new ClusterId", (cid != null && !cid.equals("")));
}
/**
* Test namenode format with -format option when a non empty name directory
* exists. Enter N when prompted and format should be aborted.
*
* @throws IOException
* @throws InterruptedException
*/
@Test
public void testFormatWithoutForceEnterNo() throws IOException,
InterruptedException {
// we check for a non empty dir, so create a child path
File data = new File(hdfsDir, "file");
if (!data.mkdirs()) {
fail("Failed to create dir " + data.getPath());
}
// capture the input stream
InputStream origIn = System.in;
ByteArrayInputStream bins = new ByteArrayInputStream("N\n".getBytes());
System.setIn(bins);
String[] argv = { "-format" };
try {
NameNode.createNameNode(argv, config);
fail("createNameNode() did not call System.exit()");
} catch (ExitException e) {
assertEquals("Format should not have succeeded", 1, e.status);
}
System.setIn(origIn);
// check if the version file does not exists.
File version = new File(hdfsDir, "current/VERSION");
assertFalse("Check version should not exist", version.exists());
}
}
| |
package org.osmdroid.tileprovider;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Rect;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.Drawable;
import android.os.Build;
import android.os.Handler;
import android.util.Log;
import org.osmdroid.api.IMapView;
import org.osmdroid.config.Configuration;
import org.osmdroid.tileprovider.modules.IFilesystemCache;
import org.osmdroid.tileprovider.modules.MapTileApproximater;
import org.osmdroid.tileprovider.tilesource.ITileSource;
import org.osmdroid.util.MapTileIndex;
import org.osmdroid.util.PointL;
import org.osmdroid.util.RectL;
import org.osmdroid.util.TileLooper;
import org.osmdroid.util.TileSystem;
import org.osmdroid.views.Projection;
import java.util.Collection;
import java.util.ConcurrentModificationException;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashSet;
/**
* This is an abstract class. The tile provider is responsible for:
* <ul>
* <li>determining if a map tile is available,</li>
* <li>notifying the client, via a callback handler</li>
* </ul>
* see {@link MapTileIndex} for an overview of how tiles are served by this provider.
*
* @author Marc Kurtz
* @author Nicolas Gramlich
* @author plusminus on 21:46:22 - 25.09.2008
* @author and many other contributors
*/
public abstract class MapTileProviderBase implements IMapTileProviderCallback {
public static final int MAPTILE_SUCCESS_ID = 0;
public static final int MAPTILE_FAIL_ID = MAPTILE_SUCCESS_ID + 1;
private static int sApproximationBackgroundColor = Color.LTGRAY;
protected final MapTileCache mTileCache;
private final Collection<Handler> mTileRequestCompleteHandlers = new LinkedHashSet<>();
protected boolean mUseDataConnection = true;
protected Drawable mTileNotFoundImage = null;
private ITileSource mTileSource;
/**
* Attempts to get a Drawable that represents a {@link MapTileIndex}. If the tile is not immediately
* available this will return null and attempt to get the tile from known tile sources for
* subsequent future requests. Note that this may return a {@link ReusableBitmapDrawable} in
* which case you should follow proper handling procedures for using that Drawable or it may
* reused while you are working with it.
*
* @see ReusableBitmapDrawable
*/
public abstract Drawable getMapTile(final long pMapTileIndex);
/**
* classes that extend MapTileProviderBase must call this method to prevent memory leaks.
* Updated 5.2+
*/
public void detach() {
clearTileCache();
if (mTileNotFoundImage != null) {
// Only recycle if we are running on a project less than 2.3.3 Gingerbread.
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.GINGERBREAD) {
if (mTileNotFoundImage instanceof BitmapDrawable) {
final Bitmap bitmap = ((BitmapDrawable) mTileNotFoundImage).getBitmap();
if (bitmap != null) {
bitmap.recycle();
}
}
}
if (mTileNotFoundImage instanceof ReusableBitmapDrawable)
BitmapPool.getInstance().returnDrawableToPool((ReusableBitmapDrawable) mTileNotFoundImage);
}
mTileNotFoundImage = null;
clearTileCache();
}
/**
* Sets the default color for approximated tiles.
*
* @param pColor the default color that will be shown for approximated tiles
*/
public static void setApproximationBackgroundColor(final int pColor) {
sApproximationBackgroundColor = pColor;
}
/**
* Gets the minimum zoom level this tile provider can provide
*
* @return the minimum zoom level
*/
public abstract int getMinimumZoomLevel();
/**
* Gets the maximum zoom level this tile provider can provide
*
* @return the maximum zoom level
*/
public abstract int getMaximumZoomLevel();
/**
* Sets the tile source for this tile provider.
*
* @param pTileSource the tile source
*/
public void setTileSource(final ITileSource pTileSource) {
mTileSource = pTileSource;
clearTileCache();
}
/**
* Gets the tile source for this tile provider.
*
* @return the tile source
*/
public ITileSource getTileSource() {
return mTileSource;
}
/**
* Creates a {@link MapTileCache} to be used to cache tiles in memory.
*/
public MapTileCache createTileCache() {
return new MapTileCache();
}
public MapTileProviderBase(final ITileSource pTileSource) {
this(pTileSource, null);
}
public MapTileProviderBase(final ITileSource pTileSource,
final Handler pDownloadFinishedListener) {
mTileCache = this.createTileCache();
mTileRequestCompleteHandlers.add(pDownloadFinishedListener);
mTileSource = pTileSource;
}
/**
* Sets the "sorry we can't load a tile for this location" image. If it's null, the default view
* is shown, which is the standard grey grid controlled by the tiles overlay
* {@link org.osmdroid.views.overlay.TilesOverlay#setLoadingLineColor(int)} and
* {@link org.osmdroid.views.overlay.TilesOverlay#setLoadingBackgroundColor(int)}
*
* @param drawable
* @since 5.2+
*/
public void setTileLoadFailureImage(final Drawable drawable) {
this.mTileNotFoundImage = drawable;
}
/**
* Called by implementation class methods indicating that they have completed the request as
* best it can. The tile is added to the cache, and a MAPTILE_SUCCESS_ID message is sent.
*
* @param pState the map tile request state object
* @param pDrawable the Drawable of the map tile
*/
@Override
public void mapTileRequestCompleted(final MapTileRequestState pState, final Drawable pDrawable) {
// put the tile in the cache
putTileIntoCache(pState.getMapTile(), pDrawable, ExpirableBitmapDrawable.UP_TO_DATE);
// tell our caller we've finished and it should update its view
sendMessage(MAPTILE_SUCCESS_ID);
if (Configuration.getInstance().isDebugTileProviders()) {
Log.d(IMapView.LOGTAG, "MapTileProviderBase.mapTileRequestCompleted(): " + MapTileIndex.toString(pState.getMapTile()));
}
}
/**
* Called by implementation class methods indicating that they have failed to retrieve the
* requested map tile. a MAPTILE_FAIL_ID message is sent.
*
* @param pState the map tile request state object
*/
@Override
public void mapTileRequestFailed(final MapTileRequestState pState) {
if (mTileNotFoundImage != null) {
putTileIntoCache(pState.getMapTile(), mTileNotFoundImage, ExpirableBitmapDrawable.NOT_FOUND);
sendMessage(MAPTILE_SUCCESS_ID);
} else {
sendMessage(MAPTILE_FAIL_ID);
}
if (Configuration.getInstance().isDebugTileProviders()) {
Log.d(IMapView.LOGTAG, "MapTileProviderBase.mapTileRequestFailed(): " + MapTileIndex.toString(pState.getMapTile()));
}
}
/**
* Called by implementation class methods indicating that they have failed to retrieve the
* requested map tile, because the max queue size has been reached
*
* @param pState the map tile request state object
*/
@Override
public void mapTileRequestFailedExceedsMaxQueueSize(final MapTileRequestState pState) {
mapTileRequestFailed(pState);
}
/**
* Called by implementation class methods indicating that they have produced an expired result
* that can be used but better results may be delivered later. The tile is added to the cache,
* and a MAPTILE_SUCCESS_ID message is sent.
*
* @param pState the map tile request state object
* @param pDrawable the Drawable of the map tile
*/
@Override
public void mapTileRequestExpiredTile(MapTileRequestState pState, Drawable pDrawable) {
putTileIntoCache(pState.getMapTile(), pDrawable, ExpirableBitmapDrawable.getState(pDrawable));
// tell our caller we've finished and it should update its view
sendMessage(MAPTILE_SUCCESS_ID);
if (Configuration.getInstance().isDebugTileProviders()) {
Log.d(IMapView.LOGTAG, "MapTileProviderBase.mapTileRequestExpiredTile(): " + MapTileIndex.toString(pState.getMapTile()));
}
}
/**
* @since 5.6.5
*/
protected void putTileIntoCache(final long pMapTileIndex, final Drawable pDrawable, final int pState) {
if (pDrawable == null) {
return;
}
final Drawable before = mTileCache.getMapTile(pMapTileIndex);
if (before != null) {
final int stateBefore = ExpirableBitmapDrawable.getState(before);
if (stateBefore > pState) {
return;
}
}
ExpirableBitmapDrawable.setState(pDrawable, pState);
mTileCache.putTile(pMapTileIndex, pDrawable);
}
/**
* @deprecated Use {@link #putTileIntoCache(long, Drawable, int)}} instead
*/
@Deprecated
protected void putExpiredTileIntoCache(MapTileRequestState pState, Drawable pDrawable) {
putTileIntoCache(pState.getMapTile(), pDrawable, ExpirableBitmapDrawable.EXPIRED);
}
/**
* @deprecated Use {@link #getTileRequestCompleteHandlers()} instead
*/
@Deprecated
public void setTileRequestCompleteHandler(final Handler handler) {
mTileRequestCompleteHandlers.clear();
mTileRequestCompleteHandlers.add(handler);
}
/**
* @since 6.1.0
*/
public Collection<Handler> getTileRequestCompleteHandlers() {
return mTileRequestCompleteHandlers;
}
public void ensureCapacity(final int pCapacity) {
mTileCache.ensureCapacity(pCapacity);
}
/**
* @since 6.0.0
*/
public MapTileCache getTileCache() {
return mTileCache;
}
/**
* purges the cache of all tiles (default is the in memory cache)
*/
public void clearTileCache() {
mTileCache.clear();
}
/**
* Whether to use the network connection if it's available.
*/
@Override
public boolean useDataConnection() {
return mUseDataConnection;
}
/**
* Set whether to use the network connection if it's available.
*
* @param pMode if true use the network connection if it's available. if false don't use the
* network connection even if it's available.
*/
public void setUseDataConnection(final boolean pMode) {
mUseDataConnection = pMode;
}
/**
* Recreate the cache using scaled versions of the tiles currently in it
*
* @param pNewZoomLevel the zoom level that we need now
* @param pOldZoomLevel the previous zoom level that we should get the tiles to rescale
* @param pViewPort the view port we need tiles for
*/
public void rescaleCache(final Projection pProjection, final double pNewZoomLevel,
final double pOldZoomLevel, final Rect pViewPort) {
if (TileSystem.getInputTileZoomLevel(pNewZoomLevel) == TileSystem.getInputTileZoomLevel(pOldZoomLevel)) {
return;
}
final long startMs = System.currentTimeMillis();
if (Configuration.getInstance().isDebugTileProviders())
Log.i(IMapView.LOGTAG, "rescale tile cache from " + pOldZoomLevel + " to " + pNewZoomLevel);
final PointL topLeftMercator = pProjection.toMercatorPixels(pViewPort.left, pViewPort.top, null);
final PointL bottomRightMercator = pProjection.toMercatorPixels(pViewPort.right, pViewPort.bottom,
null);
final RectL viewPortMercator = new RectL(
topLeftMercator.x, topLeftMercator.y, bottomRightMercator.x, bottomRightMercator.y);
final ScaleTileLooper tileLooper = pNewZoomLevel > pOldZoomLevel
? new ZoomInTileLooper()
: new ZoomOutTileLooper();
tileLooper.loop(pNewZoomLevel, viewPortMercator, pOldZoomLevel, getTileSource().getTileSizePixels());
final long endMs = System.currentTimeMillis();
if (Configuration.getInstance().isDebugTileProviders())
Log.i(IMapView.LOGTAG, "Finished rescale in " + (endMs - startMs) + "ms");
}
private abstract class ScaleTileLooper extends TileLooper {
/**
* new (scaled) tiles to add to cache
* NB first generate all and then put all in cache,
* otherwise the ones we need will be pushed out
*/
protected final HashMap<Long, Bitmap> mNewTiles = new HashMap<>();
protected int mOldTileZoomLevel;
protected int mTileSize;
protected int mDiff;
protected int mTileSize_2;
protected Rect mSrcRect;
protected Rect mDestRect;
protected Paint mDebugPaint;
private boolean isWorth;
public void loop(final double pZoomLevel, final RectL pViewPortMercator, final double pOldZoomLevel, final int pTileSize) {
mSrcRect = new Rect();
mDestRect = new Rect();
mDebugPaint = new Paint();
mOldTileZoomLevel = TileSystem.getInputTileZoomLevel(pOldZoomLevel);
mTileSize = pTileSize;
loop(pZoomLevel, pViewPortMercator);
}
@Override
public void initialiseLoop() {
super.initialiseLoop();
mDiff = Math.abs(mTileZoomLevel - mOldTileZoomLevel);
mTileSize_2 = mTileSize >> mDiff;
isWorth = mDiff != 0;
}
@Override
public void handleTile(final long pMapTileIndex, final int pX, final int pY) {
if (!isWorth) {
return;
}
// Get tile from cache.
// If it's found then no need to created scaled version.
// If not found (null) them we've initiated a new request for it,
// and now we'll create a scaled version until the request completes.
final Drawable requestedTile = getMapTile(pMapTileIndex);
if (requestedTile == null) {
try {
computeTile(pMapTileIndex, pX, pY);
} catch (final OutOfMemoryError e) {
Log.e(IMapView.LOGTAG, "OutOfMemoryError rescaling cache");
}
}
}
@Override
public void finaliseLoop() {
// now add the new ones, pushing out the old ones
while (!mNewTiles.isEmpty()) {
final long index = mNewTiles.keySet().iterator().next();
final Bitmap bitmap = mNewTiles.remove(index);
putScaledTileIntoCache(index, bitmap);
}
}
protected abstract void computeTile(final long pMapTileIndex, final int pX, final int pY);
/**
* @since 5.6.5
*/
protected void putScaledTileIntoCache(final long pMapTileIndex, final Bitmap pBitmap) {
final ReusableBitmapDrawable drawable = new ReusableBitmapDrawable(pBitmap);
putTileIntoCache(pMapTileIndex, drawable, ExpirableBitmapDrawable.SCALED);
if (Configuration.getInstance().isDebugMode()) {
Log.d(IMapView.LOGTAG, "Created scaled tile: " + MapTileIndex.toString(pMapTileIndex));
mDebugPaint.setTextSize(40);
final Canvas canvas = new Canvas(pBitmap);
canvas.drawText("scaled", 50, 50, mDebugPaint);
}
}
}
private class ZoomInTileLooper extends ScaleTileLooper {
@Override
public void computeTile(final long pMapTileIndex, final int pX, final int pY) {
// get the correct fraction of the tile from cache and scale up
final long oldTile = MapTileIndex.getTileIndex(mOldTileZoomLevel,
MapTileIndex.getX(pMapTileIndex) >> mDiff, MapTileIndex.getY(pMapTileIndex) >> mDiff);
final Drawable oldDrawable = mTileCache.getMapTile(oldTile);
if (oldDrawable instanceof BitmapDrawable) {
final Bitmap bitmap = MapTileApproximater.approximateTileFromLowerZoom(
(BitmapDrawable) oldDrawable, pMapTileIndex, mDiff);
if (bitmap != null) {
mNewTiles.put(pMapTileIndex, bitmap);
}
}
}
}
private class ZoomOutTileLooper extends ScaleTileLooper {
private static final int MAX_ZOOM_OUT_DIFF = 4;
@Override
protected void computeTile(final long pMapTileIndex, final int pX, final int pY) {
if (mDiff >= MAX_ZOOM_OUT_DIFF) {
return;
}
// get many tiles from cache and make one tile from them
final int xx = MapTileIndex.getX(pMapTileIndex) << mDiff;
final int yy = MapTileIndex.getY(pMapTileIndex) << mDiff;
final int numTiles = 1 << mDiff;
Bitmap bitmap = null;
Canvas canvas = null;
for (int x = 0; x < numTiles; x++) {
for (int y = 0; y < numTiles; y++) {
final long oldTile = MapTileIndex.getTileIndex(mOldTileZoomLevel, xx + x, yy + y);
final Drawable oldDrawable = mTileCache.getMapTile(oldTile);
if (oldDrawable instanceof BitmapDrawable) {
final Bitmap oldBitmap = ((BitmapDrawable) oldDrawable).getBitmap();
if (oldBitmap != null) {
if (bitmap == null) {
bitmap = MapTileApproximater.getTileBitmap(mTileSize);
canvas = new Canvas(bitmap);
canvas.drawColor(sApproximationBackgroundColor);
}
mDestRect.set(
x * mTileSize_2, y * mTileSize_2,
(x + 1) * mTileSize_2, (y + 1) * mTileSize_2);
canvas.drawBitmap(oldBitmap, null, mDestRect, null);
}
}
}
}
if (bitmap != null) {
mNewTiles.put(pMapTileIndex, bitmap);
}
}
}
public abstract IFilesystemCache getTileWriter();
/**
* @return the number of tile requests currently in the queue
* @since 5.6
*/
public abstract long getQueueSize();
/**
* Expire a tile that is in the memory cache
* Typical use is for mapsforge, where the contents of the tile can evolve,
* depending on the neighboring tiles that have been displayed so far.
*
* @since 6.0.3
*/
public void expireInMemoryCache(final long pMapTileIndex) {
final Drawable drawable = mTileCache.getMapTile(pMapTileIndex);
if (drawable != null) {
ExpirableBitmapDrawable.setState(drawable, ExpirableBitmapDrawable.EXPIRED);
}
}
/**
* Concurrency exception management (cf. https://github.com/osmdroid/osmdroid/issues/1446)
* Given the likelihood of consecutive ConcurrentModificationException's,
* we just try again and 3 attempts are supposedly enough.
*
* @since 6.2.0
*/
private void sendMessage(final int pMessageId) {
for (int attempt = 0; attempt < 3; attempt++) {
if (sendMessageFailFast(pMessageId)) {
return;
}
}
}
/**
* Concurrency exception management (cf. https://github.com/osmdroid/osmdroid/issues/1446)
* Of course a for-each loop would make sense, but it's prone to concurrency issues.
*
* @return false if a ConcurrentModificationException was thrown
* @since 6.2.0
*/
@SuppressWarnings("ForLoopReplaceableByForEach")
private boolean sendMessageFailFast(final int pMessageId) {
for (final Iterator<Handler> iterator = mTileRequestCompleteHandlers.iterator(); iterator.hasNext(); ) {
final Handler handler;
try {
handler = iterator.next();
} catch (final ConcurrentModificationException cme) {
return false;
}
if (handler != null) {
handler.sendEmptyMessage(pMessageId);
}
}
return true;
}
}
| |
package com.track.trackxtreme.data;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import com.google.android.gms.maps.model.LatLngBounds;
import com.j256.ormlite.android.apptools.OrmLiteSqliteOpenHelper;
import com.j256.ormlite.dao.Dao;
import com.j256.ormlite.dao.ForeignCollection;
import com.j256.ormlite.dao.GenericRawResults;
import com.j256.ormlite.stmt.ColumnArg;
import com.j256.ormlite.stmt.QueryBuilder;
import com.j256.ormlite.stmt.Where;
import com.j256.ormlite.support.ConnectionSource;
import com.j256.ormlite.table.TableUtils;
import com.track.trackxtreme.RaceListener;
import com.track.trackxtreme.TrackUpdateListener;
import com.track.trackxtreme.TrackUpdater;
import com.track.trackxtreme.data.track.Track;
import com.track.trackxtreme.data.track.TrackPoint;
import com.track.trackxtreme.data.track.TrackRecord;
import android.content.ContentValues;
import android.content.Context;
import android.database.sqlite.SQLiteDatabase;
import android.location.Location;
import android.util.JsonReader;
import android.util.Log;
import android.view.MenuItem;
public class TrackXtremeOpenHelper extends OrmLiteSqliteOpenHelper {
private static final int DB_VERSION = 3;
private static final String TRACKXTREME_DB = "trackxtreme";
private Context context;
private Dao<Track, Integer> trackDao;
private Dao<TrackRecord, Integer> trackRecordDao;
private Dao<TrackPoint, Integer> trackPointDao;
public TrackXtremeOpenHelper(Context context) {
super(context, TRACKXTREME_DB, null, DB_VERSION);
this.context = context;
}
@Override
public void onCreate(SQLiteDatabase db, ConnectionSource connectionSource) {
try {
TableUtils.createTable(connectionSource, Track.class);
TableUtils.createTable(connectionSource, TrackRecord.class);
TableUtils.createTable(connectionSource, TrackPoint.class);
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
System.out.println("TrackXtremeOpenHelper.onCreate()");
// TODO Auto-generated method stub
}
@Override
public void onOpen(SQLiteDatabase db) {
System.out.println("TrackXtremeOpenHelper.onOpen()");
super.onOpen(db);
}
@Override
public void onUpgrade(SQLiteDatabase db, ConnectionSource connectionSource, int oldVer, int newVer) {
Log.d("Database upgrade", "old:" + oldVer + " new :" + newVer);
if (newVer == 3) {
try {
Dao<Track, Integer> dao = getTrackDao();
dao.executeRaw("ALTER TABLE `track` ADD COLUMN round BOOLEAN;");
dao.executeRaw("ALTER TABLE `track` ADD COLUMN start_id INTEGER;");
dao.executeRaw("ALTER TABLE `track` ADD COLUMN end_id INTEGER;");
} catch (SQLException e) {
e.printStackTrace();
}
}
}
public void dropall() {
SQLiteDatabase db = getWritableDatabase();
InputStream in;
try {
in = context.getResources().getAssets().open("database/db.json");
JsonReader json = new JsonReader(new InputStreamReader(in));
json.beginObject();
while (json.hasNext()) {
String dbName = json.nextName();
System.out.println(dbName);
String sql = "DROP TABLE IF EXISTS " + dbName + ";";
db.execSQL(sql);
json.beginObject();
while (json.hasNext()) {
json.nextName();
json.nextString();
}
json.endObject();
}
json.endObject();
json.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
context.deleteDatabase(TRACKXTREME_DB);
}
public long createNewTrackPoint(long trackid, long trackrecordid, Location location, boolean round) throws SQLException {
//Track track = getTrackDao().queryForId((int) trackid);
TrackRecord trackRecord = getTrackRecordDao().queryForId((int) trackrecordid);
TrackPoint tp = new TrackPoint(trackRecord, location);
return getTrackPointDao().create(tp);
}
public List<Track> searchTracks(LatLngBounds latLngBounds) {
System.out.println("searching----------------------------------" + latLngBounds);
try {
// List<Track> list =getTrackDao().queryForAll();
List<Track> list = getTrackDao().queryBuilder().where()
.gt("minLat", latLngBounds.southwest.latitude).and()
.lt("maxLat", latLngBounds.northeast.latitude).and()
.gt("minLon", latLngBounds.southwest.longitude).and()
.lt("maxLon", latLngBounds.northeast.longitude).query();
return list;
} catch (SQLException e) {
return Collections.emptyList();
}
}
public Collection<Track> searchTracksStartEnd(LatLngBounds latLngBounds) {
try {
// List<Track> list =getTrackDao().queryForAll();
Set<Track> set = new HashSet<>();
QueryBuilder<TrackPoint, Integer> trackPointQueryBuilder = getTrackPointDao().queryBuilder();
trackPointQueryBuilder.where()
.gt("latitude", latLngBounds.southwest.latitude).and()
.lt("latitude", latLngBounds.northeast.latitude).and()
.gt("longitude", latLngBounds.southwest.longitude).and()
.lt("longitude", latLngBounds.northeast.longitude);
set.addAll(getTrackDao().queryBuilder()
.join("start_id", "id", trackPointQueryBuilder)
.query());
set.addAll(getTrackDao().queryBuilder()
.join("end_id", "id", trackPointQueryBuilder)
.query());
return set;
} catch (SQLException e) {
e.printStackTrace();
return Collections.emptyList();
}
}
public List<Track> searchTracksPartially(LatLngBounds latLngBounds) {
try {
Where<Track, Integer> where = getTrackDao().queryBuilder().where();
List<Track> list = where
.or(
where.and(
where.or(
where.and(where.gt("minLat", latLngBounds.southwest.latitude),
where.lt("minLat", latLngBounds.northeast.latitude)),
where.and(where.gt("maxLat", latLngBounds.southwest.latitude),
where.lt("maxLat", latLngBounds.northeast.latitude))),
where.or(
where.and(where.gt("minLon", latLngBounds.southwest.longitude),
where.lt("minLon", latLngBounds.northeast.longitude)),
where.and(where.gt("maxLon", latLngBounds.southwest.longitude),
where.lt("maxLon", latLngBounds.northeast.longitude))
)
),
where.and(
where.lt("minLat", latLngBounds.southwest.latitude),
where.gt("maxLat", latLngBounds.northeast.latitude),
where.lt("minLon", latLngBounds.southwest.longitude),
where.gt("maxLon", latLngBounds.northeast.longitude)
)
).query();
return list;
} catch (SQLException e) {
return Collections.emptyList();
}
}
public Dao<Track, Integer> getTrackDao() throws SQLException {
if (trackDao == null) {
trackDao = getDao(Track.class);
}
return trackDao;
}
public Dao<TrackPoint, Integer> getTrackPointDao() throws SQLException {
if (trackPointDao == null) {
trackPointDao = getDao(TrackPoint.class);
}
return trackPointDao;
}
public Dao<TrackRecord, Integer> getTrackRecordDao() throws SQLException {
if (trackRecordDao == null) {
trackRecordDao = getDao(TrackRecord.class);
}
return trackRecordDao;
}
public void saveNewTrack(TrackUpdateListener maplistener) throws SQLException {
Track track = maplistener.getTrack();
TrackRecord trackRecord = maplistener.getTrackRecord();
ArrayList<TrackPoint> trackpoints = maplistener.getTrackpoints();
if (trackpoints.size() > 0) {
TrackPoint start = trackpoints.get(0);
TrackPoint end = trackpoints.get(trackpoints.size() - 1);
track.setStart(start);
track.setEnd(end);
}
//trackRecord.setTrackPoints(trackpoints);
trackRecord.updateData(trackpoints,true);
track.setDistance((int) trackRecord.getDistance());
getTrackDao().create(track);
getTrackRecordDao().create(trackRecord);
getTrackPointDao().create(trackpoints);
}
public void saveNewTrackRecord(TrackUpdater racelistener) throws SQLException {
Track track = racelistener.getTrack();
TrackRecord trackRecord = racelistener.getTrackRecord();
ArrayList<TrackPoint> trackpoints = racelistener.getTrackpoints();
//trackRecord.setTrackPoints(trackpoints);
trackRecord.updateData(trackpoints,false);
getTrackDao().update(track);
getTrackRecordDao().create(trackRecord);
getTrackPointDao().create(trackpoints);
}
public int deleteTrack(TrackRecord item1) throws SQLException {
Track track = item1.getTrack();
ForeignCollection<TrackRecord> records = track.getRecords();
records.getDao().delete(item1);
if (records.size() == 0) {
getTrackDao().delete(track);
}
return records.size();
}
public void update(TrackRecord trackRecord) {
try {
getTrackRecordDao().update(trackRecord);
getTrackDao().update(trackRecord.getTrack());
//getTrackPointDao().upda(trackRecord.getPoints());
} catch (SQLException e) {
e.printStackTrace();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.ddl.table.info.desc;
import java.io.DataOutputStream;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.common.StatsSetupConst;
import org.apache.hadoop.hive.common.TableName;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.HMSHandler;
import org.apache.hadoop.hive.metastore.StatObjectConverter;
import org.apache.hadoop.hive.metastore.TableType;
import org.apache.hadoop.hive.metastore.api.AggrStats;
import org.apache.hadoop.hive.metastore.api.ColumnStatisticsData;
import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
import org.apache.hadoop.hive.ql.ddl.ShowUtils;
import org.apache.hadoop.hive.ql.ddl.table.info.desc.formatter.DescTableFormatter;
import org.apache.hadoop.hive.ql.exec.ColumnInfo;
import org.apache.hadoop.hive.ql.lockmgr.LockException;
import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.ddl.DDLOperation;
import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.PartitionIterable;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.metadata.TableConstraintsInfo;
import org.apache.hadoop.hive.ql.parse.HiveTableName;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.plan.ColStatistics;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.ql.stats.StatsUtils;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.Deserializer;
import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import com.google.common.collect.Lists;
/**
* Operation process of describing a table.
*/
public class DescTableOperation extends DDLOperation<DescTableDesc> {
public DescTableOperation(DDLOperationContext context, DescTableDesc desc) {
super(context, desc);
}
@Override
public int execute() throws Exception {
Table table = getTable();
Partition part = getPartition(table);
final String dbTableName = desc.getDbTableName();
try (DataOutputStream outStream = ShowUtils.getOutputStream(new Path(desc.getResFile()), context)) {
LOG.debug("DDLTask: got data for {}", dbTableName);
List<FieldSchema> cols = new ArrayList<>();
List<ColumnStatisticsObj> colStats = new ArrayList<>();
Deserializer deserializer = getDeserializer(table);
if (desc.getColumnPath() == null) {
getColumnsNoColumnPath(table, part, cols);
} else {
if (desc.isFormatted()) {
getColumnDataColPathSpecified(table, part, cols, colStats, deserializer);
} else {
cols.addAll(Hive.getFieldsFromDeserializer(desc.getColumnPath(), deserializer));
}
}
fixDecimalColumnTypeName(cols);
setConstraintsAndStorageHandlerInfo(table);
handleMaterializedView(table);
// In case the query is served by HiveServer2, don't pad it with spaces,
// as HiveServer2 output is consumed by JDBC/ODBC clients.
boolean isOutputPadded = !SessionState.get().isHiveServerQuery();
DescTableFormatter formatter = DescTableFormatter.getFormatter(context.getConf());
formatter.describeTable(context.getConf(), outStream, desc.getColumnPath(), dbTableName, table, part, cols,
desc.isFormatted(), desc.isExtended(), isOutputPadded, colStats);
LOG.debug("DDLTask: written data for {}", dbTableName);
} catch (SQLException e) {
throw new HiveException(e, ErrorMsg.GENERIC_ERROR, dbTableName);
}
return 0;
}
private Table getTable() throws HiveException {
Table table = context.getDb().getTable(desc.getTableName().getDb(), desc.getTableName().getTable(), false);
if (table == null) {
throw new HiveException(ErrorMsg.INVALID_TABLE, desc.getDbTableName());
}
return table;
}
private Partition getPartition(Table table) throws HiveException {
Partition part = null;
if (desc.getPartitionSpec() != null) {
part = context.getDb().getPartition(table, desc.getPartitionSpec(), false);
if (part == null) {
throw new HiveException(ErrorMsg.INVALID_PARTITION,
StringUtils.join(desc.getPartitionSpec().keySet(), ','), desc.getDbTableName());
}
}
return part;
}
private Deserializer getDeserializer(Table table) throws SQLException {
Deserializer deserializer = table.getDeserializer(true);
return deserializer;
}
private void getColumnsNoColumnPath(Table table, Partition partition, List<FieldSchema> cols) throws HiveException {
cols.addAll(partition == null || table.getTableType() == TableType.VIRTUAL_VIEW ?
table.getCols() : partition.getCols());
if (!desc.isFormatted()) {
cols.addAll(table.getPartCols());
}
// Fetch partition statistics only for describe extended or formatted.
if (desc.isExtended() || desc.isFormatted()) {
boolean disablePartitionStats = HiveConf.getBoolVar(context.getConf(), HiveConf.ConfVars.HIVE_DESCRIBE_PARTITIONED_TABLE_IGNORE_STATS);
if (table.isPartitioned() && partition == null && !disablePartitionStats) {
// No partition specified for partitioned table, lets fetch all.
Map<String, String> tblProps = table.getParameters() == null ?
new HashMap<String, String>() : table.getParameters();
Map<String, Long> valueMap = new HashMap<>();
Map<String, Boolean> stateMap = new HashMap<>();
for (String stat : StatsSetupConst.SUPPORTED_STATS) {
valueMap.put(stat, 0L);
stateMap.put(stat, true);
}
PartitionIterable partitions = new PartitionIterable(context.getDb(), table, null,
MetastoreConf.getIntVar(context.getConf(), MetastoreConf.ConfVars.BATCH_RETRIEVE_MAX));
int numParts = 0;
for (Partition p : partitions) {
Map<String, String> partitionProps = p.getParameters();
Boolean state = StatsSetupConst.areBasicStatsUptoDate(partitionProps);
for (String stat : StatsSetupConst.SUPPORTED_STATS) {
stateMap.put(stat, stateMap.get(stat) && state);
if (partitionProps != null && partitionProps.get(stat) != null) {
valueMap.put(stat, valueMap.get(stat) + Long.parseLong(partitionProps.get(stat)));
}
}
numParts++;
}
tblProps.put(StatsSetupConst.NUM_PARTITIONS, Integer.toString(numParts));
for (String stat : StatsSetupConst.SUPPORTED_STATS) {
StatsSetupConst.setBasicStatsState(tblProps, Boolean.toString(stateMap.get(stat)));
tblProps.put(stat, valueMap.get(stat).toString());
}
table.setParameters(tblProps);
}
}
}
private void getColumnDataColPathSpecified(Table table, Partition part, List<FieldSchema> cols,
List<ColumnStatisticsObj> colStats, Deserializer deserializer)
throws SemanticException, HiveException, MetaException {
// when column name is specified in describe table DDL, colPath will be db_name.table_name.column_name
String colName = desc.getColumnPath().split("\\.")[2];
List<String> colNames = Lists.newArrayList(colName.toLowerCase());
TableName tableName = HiveTableName.of(desc.getDbTableName());
if (null == part) {
if (table.isPartitioned()) {
Map<String, String> tableProps = table.getParameters() == null ?
new HashMap<String, String>() : table.getParameters();
if (table.isPartitionKey(colNames.get(0))) {
getColumnDataForPartitionKeyColumn(table, cols, colStats, colNames, tableProps);
} else {
getColumnsForNotPartitionKeyColumn(cols, colStats, deserializer, colNames, tableName, tableProps);
}
table.setParameters(tableProps);
} else {
cols.addAll(Hive.getFieldsFromDeserializer(desc.getColumnPath(), deserializer));
colStats.addAll(
context.getDb().getTableColumnStatistics(tableName.getDb().toLowerCase(),
tableName.getTable().toLowerCase(), colNames, false));
}
} else {
List<String> partitions = new ArrayList<String>();
// The partition name is converted to lowercase before generating the stats. So we should use the same
// lower case name to get the stats.
String partName = HMSHandler.lowerCaseConvertPartName(part.getName());
partitions.add(partName);
cols.addAll(Hive.getFieldsFromDeserializer(desc.getColumnPath(), deserializer));
Map<String, List<ColumnStatisticsObj>> partitionColumnStatistics = context.getDb().getPartitionColumnStatistics(
tableName.getDb().toLowerCase(), tableName.getTable().toLowerCase(), partitions, colNames, false);
List<ColumnStatisticsObj> partitionColStat = partitionColumnStatistics.get(partName);
if (partitionColStat != null) {
colStats.addAll(partitionColStat);
}
}
}
private void getColumnDataForPartitionKeyColumn(Table table, List<FieldSchema> cols,
List<ColumnStatisticsObj> colStats, List<String> colNames, Map<String, String> tableProps)
throws HiveException, MetaException {
FieldSchema partCol = table.getPartColByName(colNames.get(0));
cols.add(partCol);
PartitionIterable parts = new PartitionIterable(context.getDb(), table, null,
MetastoreConf.getIntVar(context.getConf(), MetastoreConf.ConfVars.BATCH_RETRIEVE_MAX));
ColumnInfo ci = new ColumnInfo(partCol.getName(),
TypeInfoUtils.getTypeInfoFromTypeString(partCol.getType()), null, false);
ColStatistics cs = StatsUtils.getColStatsForPartCol(ci, parts, context.getConf());
ColumnStatisticsData data = new ColumnStatisticsData();
ColStatistics.Range r = cs.getRange();
StatObjectConverter.fillColumnStatisticsData(partCol.getType(), data, r == null ? null : r.minValue,
r == null ? null : r.maxValue, r == null ? null : r.minValue, r == null ? null : r.maxValue,
r == null ? null : r.minValue.toString(), r == null ? null : r.maxValue.toString(),
cs.getNumNulls(), cs.getCountDistint(), null, cs.getAvgColLen(), cs.getAvgColLen(),
cs.getNumTrues(), cs.getNumFalses());
ColumnStatisticsObj cso = new ColumnStatisticsObj(partCol.getName(), partCol.getType(), data);
colStats.add(cso);
StatsSetupConst.setColumnStatsState(tableProps, colNames);
}
private void getColumnsForNotPartitionKeyColumn(List<FieldSchema> cols, List<ColumnStatisticsObj> colStats,
Deserializer deserializer, List<String> colNames, TableName tableName, Map<String, String> tableProps)
throws HiveException {
cols.addAll(Hive.getFieldsFromDeserializer(desc.getColumnPath(), deserializer));
List<String> parts = context.getDb().getPartitionNames(tableName.getDb().toLowerCase(),
tableName.getTable().toLowerCase(), (short) -1);
AggrStats aggrStats = context.getDb().getAggrColStatsFor(
tableName.getDb().toLowerCase(), tableName.getTable().toLowerCase(), colNames, parts, false);
colStats.addAll(aggrStats.getColStats());
if (parts.size() == aggrStats.getPartsFound()) {
StatsSetupConst.setColumnStatsState(tableProps, colNames);
} else {
StatsSetupConst.removeColumnStatsState(tableProps, colNames);
}
}
/**
* Fix the type name of a column of type decimal w/o precision/scale specified. This makes
* the describe table show "decimal(10,0)" instead of "decimal" even if the type stored
* in metastore is "decimal", which is possible with previous hive.
*
* @param cols columns that to be fixed as such
*/
private static void fixDecimalColumnTypeName(List<FieldSchema> cols) {
for (FieldSchema col : cols) {
if (serdeConstants.DECIMAL_TYPE_NAME.equals(col.getType())) {
col.setType(DecimalTypeInfo.getQualifiedName(HiveDecimal.USER_DEFAULT_PRECISION,
HiveDecimal.USER_DEFAULT_SCALE));
}
}
}
private void setConstraintsAndStorageHandlerInfo(Table table) throws HiveException {
if (desc.isExtended() || desc.isFormatted()) {
TableConstraintsInfo tableConstraintsInfo = context.getDb().getTableConstraints(table.getDbName(),
table.getTableName(), false, false);
table.setTableConstraintsInfo(tableConstraintsInfo);
table.setStorageHandlerInfo(context.getDb().getStorageHandlerInfo(table));
}
}
private void handleMaterializedView(Table table) throws LockException {
if (table.isMaterializedView()) {
table.setOutdatedForRewriting(context.getDb().isOutdatedMaterializedView(
table,
new ArrayList<>(table.getCreationMetadata().getTablesUsed()),
false,
SessionState.get().getTxnMgr()));
}
}
}
| |
/*
* $Id:$
*/
package org.lockss.plugin.dove;
import java.io.*;
import org.lockss.util.*;
import org.lockss.test.*;
public class TestDoveHtmlFilterFactory extends LockssTestCase {
static String ENC = Constants.DEFAULT_ENCODING;
private DoveHtmlCrawlFilterFactory cfact;
private DoveHtmlHashFilterFactory hfact;
private MockArchivalUnit mau;
public void setUp() throws Exception {
super.setUp();
cfact = new DoveHtmlCrawlFilterFactory();
hfact = new DoveHtmlHashFilterFactory();
mau = new MockArchivalUnit();
}
private static final String toc_bit =
"<html class=\"no-js\"><!--<![endif]--><head></head>" +
"<body>" +
"<div role=\"main\" class=\"main\" id=\"content\">" +
"<p class=\"back\">Back to " +
"<a href=\"https://www.dovepress.com/browse_journals.php\">Browse Journals</a>" +
"<a href=\"https://www.dovepress.com/the-journal\">Therapeutics and Clinical Risk Management</a>" +
"<a href=\"https://www.dovepress.com/the-journal-archive1-v879\">Volume 11</a> default</p>" +
"<div class=\"tabs-bg group journal-articles group\">" +
"<div class=\"tabs-padding group\"> " +
"<div class=\"article-labels group\">" +
"<img src=\"tcrm_toc_files/logo_pubmed.png\" alt=\"Pub Med\" title=\"Pub Med\"> " +
"<div class=\"highly-accessed\"></div> " +
"</div>" +
"<br clear=\"all\">" +
"<div class=\"archive\">" +
"<ul data-journal_id=\"1\">" +
"<li><a href=\"https://www.dovepress.com/the-journal-archive1\" class=\"\" data-volume_id=\"0\">View all</a> (1106)</li>" +
"<li>" +
"<a href=\"https://www.dovepress.com/the-journal-i1123-j1\" class=\"\">Volume 12, 2016</a> (91)" +
"</li>" +
"<li>" +
"<a href=\"https://www.dovepress.com/the-journal-i1010-j1\" class=\"here\">Volume 11, 2015</a> (210)" +
"</li>" +
"</ul>" +
"</div>" +
"<!-- /end categories -->" +
"</div>" +
"</div>" +
"<div class=\"categories-bg group\"><div class=\"tabs-padding group\"><div class=\"tabs\">" +
"<h4>Archive: Volume 11, 2015</h4>" +
"</div>" +
"<div class=\"tab-content\">" +
"<div class=\"volume-issues issue-1010 \"> <div class=\"tab-item\">" +
"<div class=\"article-labels group\">" +
"<div class=\"tag\">Original Research</div>" +
"</div>" +
"<h3>" +
"<a href=\"https://www.dovepress.com/foo-article-TCRM\">Foo</a>" +
"</h3>" +
"<p>authors</p>" +
"<p class=\"journal\">" +
"<a href=\"https://www.dovepress.com/journal\">This Journal</a> " +
"<a href=\"https://www.dovepress.com/this-journal-archive1-v879\">2015</a>, 11:1853-186</p>" +
"<p class=\"journal\">Published Date: <strong>17 December 2015</strong></p>" +
"</div>" +
"<!-- /end tab-item -->" +
"</div></div>" +
"</div></div>" +
"</div>" +
"<!-- /end main -->" +
"</body></html>";
private static final String toc_bit_hashfiltered =
"<div class=\"volume-issues issue-1010 \"> <div class=\"tab-item\">" +
"<h3>" +
"<a href=\"https://www.dovepress.com/foo-article-TCRM\">Foo</a>" +
"</h3>" +
"<p>authors</p>" +
"<p class=\"journal\">" +
"<a href=\"https://www.dovepress.com/journal\">This Journal</a> " +
"<a href=\"https://www.dovepress.com/this-journal-archive1-v879\">2015</a>, 11:1853-186</p>" +
"<p class=\"journal\">Published Date: <strong>17 December 2015</strong></p>" +
"</div>" +
"<!-- /end tab-item -->" +
"</div>";
private static final String toc_bit_crawlfiltered =
"<html class=\"no-js\"><!--<![endif]--><head></head>" +
"<body>" +
"<div role=\"main\" class=\"main\" id=\"content\">" +
"<p class=\"back\">Back to " +
"<a href=\"https://www.dovepress.com/browse_journals.php\">Browse Journals</a>" +
"<a href=\"https://www.dovepress.com/the-journal\">Therapeutics and Clinical Risk Management</a>" +
"<a href=\"https://www.dovepress.com/the-journal-archive1-v879\">Volume 11</a> default</p>" +
"<div class=\"categories-bg group\"><div class=\"tabs-padding group\">" +
"<div class=\"tab-content\">" +
"<div class=\"volume-issues issue-1010 \"> <div class=\"tab-item\">" +
"<div class=\"article-labels group\">" +
"<div class=\"tag\">Original Research</div>" +
"</div>" +
"<h3>" +
"<a href=\"https://www.dovepress.com/foo-article-TCRM\">Foo</a>" +
"</h3>" +
"<p>authors</p>" +
"<p class=\"journal\">" +
"<a href=\"https://www.dovepress.com/journal\">This Journal</a> " +
"<a href=\"https://www.dovepress.com/this-journal-archive1-v879\">2015</a>, 11:1853-186</p>" +
"<p class=\"journal\">Published Date: <strong>17 December 2015</strong></p>" +
"</div>" +
"<!-- /end tab-item -->" +
"</div></div>" +
"</div></div>" +
"</div>" +
"<!-- /end main -->" +
"</body></html>";
private static final String article_bit =
"<html>" +
"<head></head>" +
"<body>" +
"<div role=\"main\" class=\"main\" id=\"content\">" +
"<div class=\"tab-content\">" +
"<div class=\"articles\">" +
"<div class=\"intro\"> " +
"</div>" +
"<!-- /end intro -->" +
"<div class=\"copy\">" +
"The abstract goes here..." +
"<strong> Keywords:</strong> word" +
"<a href=\"https://www.dovepress.com/the-link-article-TCRM\" target=\"_blank\">A Letter to the Editor has been received and published for this article.</a>" +
"<p class=\"article-cc-license\"></p>" +
"<a class=\"download-btn print-hide\" href=\"https://www.dovepress.com/getfile.php?fileID=1\" id=\"download-pdf\">" +
"Download Article <span>[PDF]</span></a> " +
"<a class=\"download-btn print-hide\" href=\"https://www.dovepress.com/fulltext-article-TCRM\" id=\"view-full-text\">" +
"View Full Text <span>[HTML]</span></a> " +
"<div id=\"article-fulltext\">" +
"<p class=\"h1\">Introduction</p>" +
"</div>" +
"</div>" +
"<!-- /end copy -->" +
"</div>" +
"<!-- /end articles -->" +
"</div>" +
"<!-- /end tab-content -->" +
"<div class=\"categories-bg group\">" +
"<div class=\"tabs-padding group\"><h2>Readers of this article also read:</h2>" +
"<div class=\"tab-content\">" +
"<div class=\"tab-item\">" +
"<div class=\"article-labels group\">" +
"<div class=\"tag\">Review</div>" +
"</div>" +
"<h3>" +
"<a href=\"https://www.dovepress.com/CIA-recommendation1\">Causative</a>" +
"</h3>" +
"<p>an author</p>" +
"<p class=\"journal\">" +
"<a href=\"https://www.dovepress.com/foo\"</a> " +
"<a href=\"https://www.dovepress.com/otherjournal-archive4-v852\">2015</a>, 10:1873-187</p>" +
"<p class=\"journal\">Published Date: <strong>19 November 2015</strong></p>" +
"</div>" +
"<!-- /end tab-item -->" +
"</div>" +
"<!-- /end tabs-content -->" +
"</div></div>" +
"</div>" +
"<!-- /end main -->" +
"</body></html>";
private static final String article_bit_hashfiltered =
"<div class=\"articles\">" +
"<div class=\"intro\"> " +
"</div>" +
"<!-- /end intro -->" +
"<div class=\"copy\">" +
"The abstract goes here..." +
"<strong> Keywords:</strong> word" +
"<a href=\"https://www.dovepress.com/the-link-article-TCRM\" target=\"_blank\">A Letter to the Editor has been received and published for this article.</a>" +
"<p class=\"article-cc-license\"></p>" +
"<a class=\"download-btn print-hide\" href=\"https://www.dovepress.com/getfile.php?fileID=1\" id=\"download-pdf\">" +
"Download Article <span>[PDF]</span></a> " +
"<a class=\"download-btn print-hide\" href=\"https://www.dovepress.com/fulltext-article-TCRM\" id=\"view-full-text\">" +
"View Full Text <span>[HTML]</span></a> " +
"<div id=\"article-fulltext\">" +
"<p class=\"h1\">Introduction</p>" +
"</div>" +
"</div>" +
"<!-- /end copy -->" +
"</div>";
private static final String article_bit_crawlfiltered =
"<html>" +
"<head></head>" +
"<body>" +
"<div role=\"main\" class=\"main\" id=\"content\">" +
"<div class=\"tab-content\">" +
"<div class=\"articles\">" +
"<div class=\"intro\"> " +
"</div>" +
"<!-- /end intro -->" +
"<div class=\"copy\">" +
"<a class=\"download-btn print-hide\" href=\"https://www.dovepress.com/getfile.php?fileID=1\" id=\"download-pdf\">" +
"Download Article <span>[PDF]</span></a>" +
"<a class=\"download-btn print-hide\" href=\"https://www.dovepress.com/fulltext-article-TCRM\" id=\"view-full-text\">" +
"View Full Text <span>[HTML]</span></a>" +
"<div id=\"article-fulltext\">" +
"<p class=\"h1\">Introduction</p>" +
"</div>" +
"</div>" +
"<!-- /end copy -->" +
"</div>" +
"<!-- /end articles -->" +
"</div>" +
"<!-- /end tab-content -->" +
"</div>" +
"<!-- /end main -->" +
"</body></html>";
private static final String manifest_bit=
"<html>" +
"<head></head>" +
"<body>" +
"<div role=\"main\" class=\"main\" id=\"content\">" +
"<div class=\"tabs-bg group\">" +
"<div class=\"tabs-padding group\">" +
"<h1>CLOCKSS - Published Issues: Therapeutics and Clinical Risk Management 2015</h1>" +
"<div class=\"copy sitemap\">" +
"<ul>" +
"<li>" +
"<a href=\"https://www.dovepress.com/the-journal-i1010-j1\">The Journal 2015:default</a>" +
"</li>" +
"</ul>" +
"</div>" +
"</div></div> " +
"</div>" +
"</body></html>";
private static final String manifest_bit_hashfiltered =
"<div class=\"copy sitemap\">" +
"<ul>" +
"<li>" +
"<a href=\"https://www.dovepress.com/the-journal-i1010-j1\">The Journal 2015:default</a>" +
"</li>" +
"</ul>" +
"</div>";
public void testTOCFiltering() throws Exception {
InputStream inStream;
//crawl-filter
inStream = cfact.createFilteredInputStream(mau,
new StringInputStream(toc_bit),
Constants.DEFAULT_ENCODING);
assertEquals(toc_bit_crawlfiltered, StringUtil.fromInputStream(inStream));
//hash-filter
inStream = hfact.createFilteredInputStream(mau,
new StringInputStream(toc_bit),
Constants.DEFAULT_ENCODING);
assertEquals(toc_bit_hashfiltered, StringUtil.fromInputStream(inStream));
}
public void testArticleLandingFiltering() throws Exception {
InputStream inStream;
//crawl-filter
inStream = cfact.createFilteredInputStream(mau,
new StringInputStream(article_bit),
Constants.DEFAULT_ENCODING);
assertEquals(article_bit_crawlfiltered, StringUtil.fromInputStream(inStream));
//hash-filter
inStream = hfact.createFilteredInputStream(mau,
new StringInputStream(article_bit),
Constants.DEFAULT_ENCODING);
assertEquals(article_bit_hashfiltered, StringUtil.fromInputStream(inStream));
}
public void testManifestFiltering() throws Exception {
InputStream inStream;
//crawl-filter
inStream = cfact.createFilteredInputStream(mau,
new StringInputStream(manifest_bit),
Constants.DEFAULT_ENCODING);
assertEquals(manifest_bit, StringUtil.fromInputStream(inStream)); // unaltered
//hash-filter
inStream = hfact.createFilteredInputStream(mau,
new StringInputStream(manifest_bit),
Constants.DEFAULT_ENCODING);
assertEquals(manifest_bit_hashfiltered, StringUtil.fromInputStream(inStream));
}
}
| |
/*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.jps.incremental.fs;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.util.io.IOUtil;
import gnu.trove.THashSet;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.jps.builders.BuildRootDescriptor;
import org.jetbrains.jps.builders.BuildRootIndex;
import org.jetbrains.jps.builders.BuildTarget;
import org.jetbrains.jps.incremental.Utils;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.File;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.locks.ReentrantLock;
/** */
public final class FilesDelta {
private static final Logger LOG = Logger.getInstance("#org.jetbrains.jps.incremental.fs.FilesDelta");
private final ReentrantLock myDataLock = new ReentrantLock();
private final Set<String> myDeletedPaths = new THashSet<>(FileUtil.PATH_HASHING_STRATEGY);
private final Map<BuildRootDescriptor, Set<File>> myFilesToRecompile = new HashMap<>();
public void lockData(){
myDataLock.lock();
}
public void unlockData(){
myDataLock.unlock();
}
public FilesDelta() {
}
FilesDelta(Collection<FilesDelta> deltas) {
for (FilesDelta delta : deltas) {
addAll(delta);
}
}
private void addAll(FilesDelta other) {
other.lockData();
try {
myDeletedPaths.addAll(other.myDeletedPaths);
for (Map.Entry<BuildRootDescriptor, Set<File>> entry : other.myFilesToRecompile.entrySet()) {
_addToRecompiled(entry.getKey(), entry.getValue());
}
}
finally {
other.unlockData();
}
}
public void save(DataOutput out) throws IOException {
lockData();
try {
out.writeInt(myDeletedPaths.size());
for (String path : myDeletedPaths) {
IOUtil.writeString(path, out);
}
out.writeInt(myFilesToRecompile.size());
for (Map.Entry<BuildRootDescriptor, Set<File>> entry : myFilesToRecompile.entrySet()) {
IOUtil.writeString(entry.getKey().getRootId(), out);
final Set<File> files = entry.getValue();
out.writeInt(files.size());
for (File file : files) {
IOUtil.writeString(FileUtil.toSystemIndependentName(file.getPath()), out);
}
}
}
finally {
unlockData();
}
}
public void load(DataInput in, @NotNull BuildTarget<?> target, BuildRootIndex buildRootIndex) throws IOException {
lockData();
try {
myDeletedPaths.clear();
int deletedCount = in.readInt();
while (deletedCount-- > 0) {
myDeletedPaths.add(IOUtil.readString(in));
}
myFilesToRecompile.clear();
int recompileCount = in.readInt();
while (recompileCount-- > 0) {
String rootId = IOUtil.readString(in);
BuildRootDescriptor descriptor = target.findRootDescriptor(rootId, buildRootIndex);
Set<File> files;
if (descriptor != null) {
files = myFilesToRecompile.get(descriptor);
if (files == null) {
files = new THashSet<>(FileUtil.FILE_HASHING_STRATEGY);
myFilesToRecompile.put(descriptor, files);
}
}
else {
LOG.debug("Cannot find root by " + rootId + ", delta will be skipped");
files = new THashSet<>(FileUtil.FILE_HASHING_STRATEGY);
}
int filesCount = in.readInt();
while (filesCount-- > 0) {
final File file = new File(IOUtil.readString(in));
if (Utils.IS_TEST_MODE) {
LOG.info("Loaded " + file.getPath());
}
files.add(file);
}
}
}
finally {
unlockData();
}
}
public static void skip(DataInput in) throws IOException {
int deletedCount = in.readInt();
while (deletedCount-- > 0) {
IOUtil.readString(in);
}
int recompiledCount = in.readInt();
while (recompiledCount-- > 0) {
IOUtil.readString(in);
int filesCount = in.readInt();
while (filesCount-- > 0) {
IOUtil.readString(in);
}
}
}
public boolean hasChanges() {
lockData();
try {
if (!myDeletedPaths.isEmpty()) {
return true;
}
if(!myFilesToRecompile.isEmpty()) {
for (Set<File> files : myFilesToRecompile.values()) {
if (!files.isEmpty()) {
return true;
}
}
}
return false;
}
finally {
unlockData();
}
}
public boolean markRecompile(BuildRootDescriptor root, File file) {
lockData();
try {
final boolean added = _addToRecompiled(root, file);
if (added) {
if (!myDeletedPaths.isEmpty()) { // optimization
myDeletedPaths.remove(FileUtil.toCanonicalPath(file.getPath()));
}
}
return added;
}
finally {
unlockData();
}
}
public boolean markRecompileIfNotDeleted(BuildRootDescriptor root, File file) {
lockData();
try {
String path = null;
final boolean isMarkedDeleted = !myDeletedPaths.isEmpty() && myDeletedPaths.contains(path = FileUtil.toCanonicalPath(file.getPath()));
if (!isMarkedDeleted) {
if (!file.exists()) {
// incorrect paths data recovery, so that the next make should not contain non-existing sources in 'recompile' list
if (path == null) {
path = FileUtil.toCanonicalPath(file.getPath());
}
if (Utils.IS_TEST_MODE) {
LOG.info("Marking deleted: " + path);
}
myDeletedPaths.add(path);
return false;
}
_addToRecompiled(root, file);
return true;
}
return false;
}
finally {
unlockData();
}
}
private boolean _addToRecompiled(BuildRootDescriptor root, File file) {
if (Utils.IS_TEST_MODE) {
LOG.info("Marking dirty: " + file.getPath());
}
return _addToRecompiled(root, Collections.singleton(file));
}
private boolean _addToRecompiled(BuildRootDescriptor root, Collection<File> filesToAdd) {
Set<File> files = myFilesToRecompile.get(root);
if (files == null) {
files = new THashSet<>(FileUtil.FILE_HASHING_STRATEGY);
myFilesToRecompile.put(root, files);
}
return files.addAll(filesToAdd);
}
public void addDeleted(File file) {
final String path = FileUtil.toCanonicalPath(file.getPath());
lockData();
try {
// ensure the file is not marked to recompilation anymore
for (Set<File> files : myFilesToRecompile.values()) {
files.remove(file);
}
myDeletedPaths.add(path);
if (Utils.IS_TEST_MODE) {
LOG.info("Marking deleted: " + path);
}
}
finally {
unlockData();
}
}
public void clearDeletedPaths() {
lockData();
try {
myDeletedPaths.clear();
}
finally {
unlockData();
}
}
public Set<String> getAndClearDeletedPaths() {
lockData();
try {
try {
final THashSet<String> _paths = new THashSet<>(FileUtil.PATH_HASHING_STRATEGY);
_paths.addAll(myDeletedPaths);
return _paths;
}
finally {
myDeletedPaths.clear();
}
}
finally {
unlockData();
}
}
@NotNull
public Map<BuildRootDescriptor, Set<File>> getSourcesToRecompile() {
LOG.assertTrue(myDataLock.isHeldByCurrentThread(), "FilesDelta data must be locked by querying thread");
return myFilesToRecompile;
}
public boolean isMarkedRecompile(BuildRootDescriptor rd, File file) {
lockData();
try {
final Set<File> files = myFilesToRecompile.get(rd);
return files != null && files.contains(file);
}
finally {
unlockData();
}
}
@Nullable
public Set<File> clearRecompile(BuildRootDescriptor root) {
lockData();
try {
return myFilesToRecompile.remove(root);
}
finally {
unlockData();
}
}
}
| |
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.source.hls;
import android.net.Uri;
import androidx.annotation.Nullable;
import android.text.TextUtils;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.SeekParameters;
import com.google.android.exoplayer2.drm.DrmInitData;
import com.google.android.exoplayer2.extractor.Extractor;
import com.google.android.exoplayer2.offline.StreamKey;
import com.google.android.exoplayer2.source.CompositeSequenceableLoaderFactory;
import com.google.android.exoplayer2.source.MediaPeriod;
import com.google.android.exoplayer2.source.MediaSourceEventListener.EventDispatcher;
import com.google.android.exoplayer2.source.SampleStream;
import com.google.android.exoplayer2.source.SequenceableLoader;
import com.google.android.exoplayer2.source.TrackGroup;
import com.google.android.exoplayer2.source.TrackGroupArray;
import com.google.android.exoplayer2.source.hls.playlist.HlsMasterPlaylist;
import com.google.android.exoplayer2.source.hls.playlist.HlsMasterPlaylist.Rendition;
import com.google.android.exoplayer2.source.hls.playlist.HlsMasterPlaylist.Variant;
import com.google.android.exoplayer2.source.hls.playlist.HlsPlaylistTracker;
import com.google.android.exoplayer2.trackselection.TrackSelection;
import com.google.android.exoplayer2.upstream.Allocator;
import com.google.android.exoplayer2.upstream.DataSource;
import com.google.android.exoplayer2.upstream.LoadErrorHandlingPolicy;
import com.google.android.exoplayer2.upstream.TransferListener;
import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.util.MimeTypes;
import com.google.android.exoplayer2.util.Util;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.IdentityHashMap;
import java.util.List;
import java.util.Map;
/**
* A {@link MediaPeriod} that loads an HLS stream.
*/
public final class HlsMediaPeriod implements MediaPeriod, HlsSampleStreamWrapper.Callback,
HlsPlaylistTracker.PlaylistEventListener {
private final HlsExtractorFactory extractorFactory;
private final HlsPlaylistTracker playlistTracker;
private final HlsDataSourceFactory dataSourceFactory;
private final @Nullable TransferListener mediaTransferListener;
private final LoadErrorHandlingPolicy loadErrorHandlingPolicy;
private final EventDispatcher eventDispatcher;
private final Allocator allocator;
private final IdentityHashMap<SampleStream, Integer> streamWrapperIndices;
private final TimestampAdjusterProvider timestampAdjusterProvider;
private final CompositeSequenceableLoaderFactory compositeSequenceableLoaderFactory;
private final boolean allowChunklessPreparation;
private final boolean useSessionKeys;
private @Nullable Callback callback;
private int pendingPrepareCount;
private TrackGroupArray trackGroups;
private HlsSampleStreamWrapper[] sampleStreamWrappers;
private HlsSampleStreamWrapper[] enabledSampleStreamWrappers;
// Maps sample stream wrappers to variant/rendition index by matching array positions.
private int[][] manifestUrlIndicesPerWrapper;
private SequenceableLoader compositeSequenceableLoader;
private boolean notifiedReadingStarted;
/**
* Creates an HLS media period.
*
* @param extractorFactory An {@link HlsExtractorFactory} for {@link Extractor}s for the segments.
* @param playlistTracker A tracker for HLS playlists.
* @param dataSourceFactory An {@link HlsDataSourceFactory} for {@link DataSource}s for segments
* and keys.
* @param mediaTransferListener The transfer listener to inform of any media data transfers. May
* be null if no listener is available.
* @param loadErrorHandlingPolicy A {@link LoadErrorHandlingPolicy}.
* @param eventDispatcher A dispatcher to notify of events.
* @param allocator An {@link Allocator} from which to obtain media buffer allocations.
* @param compositeSequenceableLoaderFactory A factory to create composite {@link
* SequenceableLoader}s for when this media source loads data from multiple streams.
* @param allowChunklessPreparation Whether chunkless preparation is allowed.
* @param useSessionKeys Whether to use #EXT-X-SESSION-KEY tags.
*/
public HlsMediaPeriod(
HlsExtractorFactory extractorFactory,
HlsPlaylistTracker playlistTracker,
HlsDataSourceFactory dataSourceFactory,
@Nullable TransferListener mediaTransferListener,
LoadErrorHandlingPolicy loadErrorHandlingPolicy,
EventDispatcher eventDispatcher,
Allocator allocator,
CompositeSequenceableLoaderFactory compositeSequenceableLoaderFactory,
boolean allowChunklessPreparation,
boolean useSessionKeys) {
this.extractorFactory = extractorFactory;
this.playlistTracker = playlistTracker;
this.dataSourceFactory = dataSourceFactory;
this.mediaTransferListener = mediaTransferListener;
this.loadErrorHandlingPolicy = loadErrorHandlingPolicy;
this.eventDispatcher = eventDispatcher;
this.allocator = allocator;
this.compositeSequenceableLoaderFactory = compositeSequenceableLoaderFactory;
this.allowChunklessPreparation = allowChunklessPreparation;
this.useSessionKeys = useSessionKeys;
compositeSequenceableLoader =
compositeSequenceableLoaderFactory.createCompositeSequenceableLoader();
streamWrapperIndices = new IdentityHashMap<>();
timestampAdjusterProvider = new TimestampAdjusterProvider();
sampleStreamWrappers = new HlsSampleStreamWrapper[0];
enabledSampleStreamWrappers = new HlsSampleStreamWrapper[0];
manifestUrlIndicesPerWrapper = new int[0][];
eventDispatcher.mediaPeriodCreated();
}
public void release() {
playlistTracker.removeListener(this);
for (HlsSampleStreamWrapper sampleStreamWrapper : sampleStreamWrappers) {
sampleStreamWrapper.release();
}
callback = null;
eventDispatcher.mediaPeriodReleased();
}
@Override
public void prepare(Callback callback, long positionUs) {
this.callback = callback;
playlistTracker.addListener(this);
buildAndPrepareSampleStreamWrappers(positionUs);
}
@Override
public void maybeThrowPrepareError() throws IOException {
for (HlsSampleStreamWrapper sampleStreamWrapper : sampleStreamWrappers) {
sampleStreamWrapper.maybeThrowPrepareError();
}
}
@Override
public TrackGroupArray getTrackGroups() {
return trackGroups;
}
// TODO: When the master playlist does not de-duplicate variants by URL and allows Renditions with
// null URLs, this method must be updated to calculate stream keys that are compatible with those
// that may already be persisted for offline.
@Override
public List<StreamKey> getStreamKeys(List<TrackSelection> trackSelections) {
// See HlsMasterPlaylist.copy for interpretation of StreamKeys.
HlsMasterPlaylist masterPlaylist = Assertions.checkNotNull(playlistTracker.getMasterPlaylist());
boolean hasVariants = !masterPlaylist.variants.isEmpty();
int audioWrapperOffset = hasVariants ? 1 : 0;
// Subtitle sample stream wrappers are held last.
int subtitleWrapperOffset = sampleStreamWrappers.length - masterPlaylist.subtitles.size();
TrackGroupArray mainWrapperTrackGroups;
int mainWrapperPrimaryGroupIndex;
int[] mainWrapperVariantIndices;
if (hasVariants) {
HlsSampleStreamWrapper mainWrapper = sampleStreamWrappers[0];
mainWrapperVariantIndices = manifestUrlIndicesPerWrapper[0];
mainWrapperTrackGroups = mainWrapper.getTrackGroups();
mainWrapperPrimaryGroupIndex = mainWrapper.getPrimaryTrackGroupIndex();
} else {
mainWrapperVariantIndices = new int[0];
mainWrapperTrackGroups = TrackGroupArray.EMPTY;
mainWrapperPrimaryGroupIndex = 0;
}
List<StreamKey> streamKeys = new ArrayList<>();
boolean needsPrimaryTrackGroupSelection = false;
boolean hasPrimaryTrackGroupSelection = false;
for (TrackSelection trackSelection : trackSelections) {
TrackGroup trackSelectionGroup = trackSelection.getTrackGroup();
int mainWrapperTrackGroupIndex = mainWrapperTrackGroups.indexOf(trackSelectionGroup);
if (mainWrapperTrackGroupIndex != C.INDEX_UNSET) {
if (mainWrapperTrackGroupIndex == mainWrapperPrimaryGroupIndex) {
// Primary group in main wrapper.
hasPrimaryTrackGroupSelection = true;
for (int i = 0; i < trackSelection.length(); i++) {
int variantIndex = mainWrapperVariantIndices[trackSelection.getIndexInTrackGroup(i)];
streamKeys.add(new StreamKey(HlsMasterPlaylist.GROUP_INDEX_VARIANT, variantIndex));
}
} else {
// Embedded group in main wrapper.
needsPrimaryTrackGroupSelection = true;
}
} else {
// Audio or subtitle group.
for (int i = audioWrapperOffset; i < sampleStreamWrappers.length; i++) {
TrackGroupArray wrapperTrackGroups = sampleStreamWrappers[i].getTrackGroups();
int selectedTrackGroupIndex = wrapperTrackGroups.indexOf(trackSelectionGroup);
if (selectedTrackGroupIndex != C.INDEX_UNSET) {
int groupIndexType =
i < subtitleWrapperOffset
? HlsMasterPlaylist.GROUP_INDEX_AUDIO
: HlsMasterPlaylist.GROUP_INDEX_SUBTITLE;
int[] selectedWrapperUrlIndices = manifestUrlIndicesPerWrapper[i];
for (int trackIndex = 0; trackIndex < trackSelection.length(); trackIndex++) {
int renditionIndex =
selectedWrapperUrlIndices[trackSelection.getIndexInTrackGroup(trackIndex)];
streamKeys.add(new StreamKey(groupIndexType, renditionIndex));
}
break;
}
}
}
}
if (needsPrimaryTrackGroupSelection && !hasPrimaryTrackGroupSelection) {
// A track selection includes a variant-embedded track, but no variant is added yet. We use
// the valid variant with the lowest bitrate to reduce overhead.
int lowestBitrateIndex = mainWrapperVariantIndices[0];
int lowestBitrate = masterPlaylist.variants.get(mainWrapperVariantIndices[0]).format.bitrate;
for (int i = 1; i < mainWrapperVariantIndices.length; i++) {
int variantBitrate =
masterPlaylist.variants.get(mainWrapperVariantIndices[i]).format.bitrate;
if (variantBitrate < lowestBitrate) {
lowestBitrate = variantBitrate;
lowestBitrateIndex = mainWrapperVariantIndices[i];
}
}
streamKeys.add(new StreamKey(HlsMasterPlaylist.GROUP_INDEX_VARIANT, lowestBitrateIndex));
}
return streamKeys;
}
@Override
public long selectTracks(TrackSelection[] selections, boolean[] mayRetainStreamFlags,
SampleStream[] streams, boolean[] streamResetFlags, long positionUs) {
// Map each selection and stream onto a child period index.
int[] streamChildIndices = new int[selections.length];
int[] selectionChildIndices = new int[selections.length];
for (int i = 0; i < selections.length; i++) {
streamChildIndices[i] = streams[i] == null ? C.INDEX_UNSET
: streamWrapperIndices.get(streams[i]);
selectionChildIndices[i] = C.INDEX_UNSET;
if (selections[i] != null) {
TrackGroup trackGroup = selections[i].getTrackGroup();
for (int j = 0; j < sampleStreamWrappers.length; j++) {
if (sampleStreamWrappers[j].getTrackGroups().indexOf(trackGroup) != C.INDEX_UNSET) {
selectionChildIndices[i] = j;
break;
}
}
}
}
boolean forceReset = false;
streamWrapperIndices.clear();
// Select tracks for each child, copying the resulting streams back into a new streams array.
SampleStream[] newStreams = new SampleStream[selections.length];
SampleStream[] childStreams = new SampleStream[selections.length];
TrackSelection[] childSelections = new TrackSelection[selections.length];
int newEnabledSampleStreamWrapperCount = 0;
HlsSampleStreamWrapper[] newEnabledSampleStreamWrappers =
new HlsSampleStreamWrapper[sampleStreamWrappers.length];
for (int i = 0; i < sampleStreamWrappers.length; i++) {
for (int j = 0; j < selections.length; j++) {
childStreams[j] = streamChildIndices[j] == i ? streams[j] : null;
childSelections[j] = selectionChildIndices[j] == i ? selections[j] : null;
}
HlsSampleStreamWrapper sampleStreamWrapper = sampleStreamWrappers[i];
boolean wasReset = sampleStreamWrapper.selectTracks(childSelections, mayRetainStreamFlags,
childStreams, streamResetFlags, positionUs, forceReset);
boolean wrapperEnabled = false;
for (int j = 0; j < selections.length; j++) {
if (selectionChildIndices[j] == i) {
// Assert that the child provided a stream for the selection.
Assertions.checkState(childStreams[j] != null);
newStreams[j] = childStreams[j];
wrapperEnabled = true;
streamWrapperIndices.put(childStreams[j], i);
} else if (streamChildIndices[j] == i) {
// Assert that the child cleared any previous stream.
Assertions.checkState(childStreams[j] == null);
}
}
if (wrapperEnabled) {
newEnabledSampleStreamWrappers[newEnabledSampleStreamWrapperCount] = sampleStreamWrapper;
if (newEnabledSampleStreamWrapperCount++ == 0) {
// The first enabled wrapper is responsible for initializing timestamp adjusters. This
// way, if enabled, variants are responsible. Else audio renditions. Else text renditions.
sampleStreamWrapper.setIsTimestampMaster(true);
if (wasReset || enabledSampleStreamWrappers.length == 0
|| sampleStreamWrapper != enabledSampleStreamWrappers[0]) {
// The wrapper responsible for initializing the timestamp adjusters was reset or
// changed. We need to reset the timestamp adjuster provider and all other wrappers.
timestampAdjusterProvider.reset();
forceReset = true;
}
} else {
sampleStreamWrapper.setIsTimestampMaster(false);
}
}
}
// Copy the new streams back into the streams array.
System.arraycopy(newStreams, 0, streams, 0, newStreams.length);
// Update the local state.
enabledSampleStreamWrappers = Arrays.copyOf(newEnabledSampleStreamWrappers,
newEnabledSampleStreamWrapperCount);
compositeSequenceableLoader =
compositeSequenceableLoaderFactory.createCompositeSequenceableLoader(
enabledSampleStreamWrappers);
return positionUs;
}
@Override
public void discardBuffer(long positionUs, boolean toKeyframe) {
for (HlsSampleStreamWrapper sampleStreamWrapper : enabledSampleStreamWrappers) {
sampleStreamWrapper.discardBuffer(positionUs, toKeyframe);
}
}
@Override
public void reevaluateBuffer(long positionUs) {
compositeSequenceableLoader.reevaluateBuffer(positionUs);
}
@Override
public boolean continueLoading(long positionUs) {
if (trackGroups == null) {
// Preparation is still going on.
for (HlsSampleStreamWrapper wrapper : sampleStreamWrappers) {
wrapper.continuePreparing();
}
return false;
} else {
return compositeSequenceableLoader.continueLoading(positionUs);
}
}
@Override
public long getNextLoadPositionUs() {
return compositeSequenceableLoader.getNextLoadPositionUs();
}
@Override
public long readDiscontinuity() {
if (!notifiedReadingStarted) {
eventDispatcher.readingStarted();
notifiedReadingStarted = true;
}
return C.TIME_UNSET;
}
@Override
public long getBufferedPositionUs() {
return compositeSequenceableLoader.getBufferedPositionUs();
}
@Override
public long seekToUs(long positionUs) {
if (enabledSampleStreamWrappers.length > 0) {
// We need to reset all wrappers if the one responsible for initializing timestamp adjusters
// is reset. Else each wrapper can decide whether to reset independently.
boolean forceReset = enabledSampleStreamWrappers[0].seekToUs(positionUs, false);
for (int i = 1; i < enabledSampleStreamWrappers.length; i++) {
enabledSampleStreamWrappers[i].seekToUs(positionUs, forceReset);
}
if (forceReset) {
timestampAdjusterProvider.reset();
}
}
return positionUs;
}
@Override
public long getAdjustedSeekPositionUs(long positionUs, SeekParameters seekParameters) {
return positionUs;
}
// HlsSampleStreamWrapper.Callback implementation.
@Override
public void onPrepared() {
if (--pendingPrepareCount > 0) {
return;
}
int totalTrackGroupCount = 0;
for (HlsSampleStreamWrapper sampleStreamWrapper : sampleStreamWrappers) {
totalTrackGroupCount += sampleStreamWrapper.getTrackGroups().length;
}
TrackGroup[] trackGroupArray = new TrackGroup[totalTrackGroupCount];
int trackGroupIndex = 0;
for (HlsSampleStreamWrapper sampleStreamWrapper : sampleStreamWrappers) {
int wrapperTrackGroupCount = sampleStreamWrapper.getTrackGroups().length;
for (int j = 0; j < wrapperTrackGroupCount; j++) {
trackGroupArray[trackGroupIndex++] = sampleStreamWrapper.getTrackGroups().get(j);
}
}
trackGroups = new TrackGroupArray(trackGroupArray);
callback.onPrepared(this);
}
@Override
public void onPlaylistRefreshRequired(Uri url) {
playlistTracker.refreshPlaylist(url);
}
@Override
public void onContinueLoadingRequested(HlsSampleStreamWrapper sampleStreamWrapper) {
callback.onContinueLoadingRequested(this);
}
// PlaylistListener implementation.
@Override
public void onPlaylistChanged() {
callback.onContinueLoadingRequested(this);
}
@Override
public boolean onPlaylistError(Uri url, long blacklistDurationMs) {
boolean noBlacklistingFailure = true;
for (HlsSampleStreamWrapper streamWrapper : sampleStreamWrappers) {
noBlacklistingFailure &= streamWrapper.onPlaylistError(url, blacklistDurationMs);
}
callback.onContinueLoadingRequested(this);
return noBlacklistingFailure;
}
// Internal methods.
private void buildAndPrepareSampleStreamWrappers(long positionUs) {
HlsMasterPlaylist masterPlaylist = Assertions.checkNotNull(playlistTracker.getMasterPlaylist());
Map<String, DrmInitData> overridingDrmInitData =
useSessionKeys
? deriveOverridingDrmInitData(masterPlaylist.sessionKeyDrmInitData)
: Collections.emptyMap();
boolean hasVariants = !masterPlaylist.variants.isEmpty();
List<Rendition> audioRenditions = masterPlaylist.audios;
List<Rendition> subtitleRenditions = masterPlaylist.subtitles;
pendingPrepareCount = 0;
ArrayList<HlsSampleStreamWrapper> sampleStreamWrappers = new ArrayList<>();
ArrayList<int[]> manifestUrlIndicesPerWrapper = new ArrayList<>();
if (hasVariants) {
buildAndPrepareMainSampleStreamWrapper(
masterPlaylist,
positionUs,
sampleStreamWrappers,
manifestUrlIndicesPerWrapper,
overridingDrmInitData);
}
// TODO: Build video stream wrappers here.
buildAndPrepareAudioSampleStreamWrappers(
positionUs,
audioRenditions,
sampleStreamWrappers,
manifestUrlIndicesPerWrapper,
overridingDrmInitData);
// Subtitle stream wrappers. We can always use master playlist information to prepare these.
for (int i = 0; i < subtitleRenditions.size(); i++) {
Rendition subtitleRendition = subtitleRenditions.get(i);
HlsSampleStreamWrapper sampleStreamWrapper =
buildSampleStreamWrapper(
C.TRACK_TYPE_TEXT,
new Uri[] {subtitleRendition.url},
new Format[] {subtitleRendition.format},
null,
Collections.emptyList(),
overridingDrmInitData,
positionUs);
manifestUrlIndicesPerWrapper.add(new int[] {i});
sampleStreamWrappers.add(sampleStreamWrapper);
sampleStreamWrapper.prepareWithMasterPlaylistInfo(
new TrackGroupArray(new TrackGroup(subtitleRendition.format)), 0, TrackGroupArray.EMPTY);
}
this.sampleStreamWrappers = sampleStreamWrappers.toArray(new HlsSampleStreamWrapper[0]);
this.manifestUrlIndicesPerWrapper = manifestUrlIndicesPerWrapper.toArray(new int[0][]);
pendingPrepareCount = this.sampleStreamWrappers.length;
// Set timestamp master and trigger preparation (if not already prepared)
this.sampleStreamWrappers[0].setIsTimestampMaster(true);
for (HlsSampleStreamWrapper sampleStreamWrapper : this.sampleStreamWrappers) {
sampleStreamWrapper.continuePreparing();
}
// All wrappers are enabled during preparation.
enabledSampleStreamWrappers = this.sampleStreamWrappers;
}
/**
* This method creates and starts preparation of the main {@link HlsSampleStreamWrapper}.
*
* <p>The main sample stream wrapper is the first element of {@link #sampleStreamWrappers}. It
* provides {@link SampleStream}s for the variant urls in the master playlist. It may be adaptive
* and may contain multiple muxed tracks.
*
* <p>If chunkless preparation is allowed, the media period will try preparation without segment
* downloads. This is only possible if variants contain the CODECS attribute. If not, traditional
* preparation with segment downloads will take place. The following points apply to chunkless
* preparation:
*
* <ul>
* <li>A muxed audio track will be exposed if the codecs list contain an audio entry and the
* master playlist either contains an EXT-X-MEDIA tag without the URI attribute or does not
* contain any EXT-X-MEDIA tag.
* <li>Closed captions will only be exposed if they are declared by the master playlist.
* <li>An ID3 track is exposed preemptively, in case the segments contain an ID3 track.
* </ul>
*
* @param masterPlaylist The HLS master playlist.
* @param positionUs If preparation requires any chunk downloads, the position in microseconds at
* which downloading should start. Ignored otherwise.
* @param sampleStreamWrappers List to which the built main sample stream wrapper should be added.
* @param manifestUrlIndicesPerWrapper List to which the selected variant indices should be added.
* @param overridingDrmInitData Overriding {@link DrmInitData}, keyed by protection scheme type
* (i.e. {@link DrmInitData#schemeType}).
*/
private void buildAndPrepareMainSampleStreamWrapper(
HlsMasterPlaylist masterPlaylist,
long positionUs,
List<HlsSampleStreamWrapper> sampleStreamWrappers,
List<int[]> manifestUrlIndicesPerWrapper,
Map<String, DrmInitData> overridingDrmInitData) {
int[] variantTypes = new int[masterPlaylist.variants.size()];
int videoVariantCount = 0;
int audioVariantCount = 0;
for (int i = 0; i < masterPlaylist.variants.size(); i++) {
Variant variant = masterPlaylist.variants.get(i);
Format format = variant.format;
if (format.height > 0 || Util.getCodecsOfType(format.codecs, C.TRACK_TYPE_VIDEO) != null) {
variantTypes[i] = C.TRACK_TYPE_VIDEO;
videoVariantCount++;
} else if (Util.getCodecsOfType(format.codecs, C.TRACK_TYPE_AUDIO) != null) {
variantTypes[i] = C.TRACK_TYPE_AUDIO;
audioVariantCount++;
} else {
variantTypes[i] = C.TRACK_TYPE_UNKNOWN;
}
}
boolean useVideoVariantsOnly = false;
boolean useNonAudioVariantsOnly = false;
int selectedVariantsCount = variantTypes.length;
if (videoVariantCount > 0) {
// We've identified some variants as definitely containing video. Assume variants within the
// master playlist are marked consistently, and hence that we have the full set. Filter out
// any other variants, which are likely to be audio only.
useVideoVariantsOnly = true;
selectedVariantsCount = videoVariantCount;
} else if (audioVariantCount < variantTypes.length) {
// We've identified some variants, but not all, as being audio only. Filter them out to leave
// the remaining variants, which are likely to contain video.
useNonAudioVariantsOnly = true;
selectedVariantsCount = variantTypes.length - audioVariantCount;
}
Uri[] selectedPlaylistUrls = new Uri[selectedVariantsCount];
Format[] selectedPlaylistFormats = new Format[selectedVariantsCount];
int[] selectedVariantIndices = new int[selectedVariantsCount];
int outIndex = 0;
for (int i = 0; i < masterPlaylist.variants.size(); i++) {
if ((!useVideoVariantsOnly || variantTypes[i] == C.TRACK_TYPE_VIDEO)
&& (!useNonAudioVariantsOnly || variantTypes[i] != C.TRACK_TYPE_AUDIO)) {
Variant variant = masterPlaylist.variants.get(i);
selectedPlaylistUrls[outIndex] = variant.url;
selectedPlaylistFormats[outIndex] = variant.format;
selectedVariantIndices[outIndex++] = i;
}
}
String codecs = selectedPlaylistFormats[0].codecs;
HlsSampleStreamWrapper sampleStreamWrapper =
buildSampleStreamWrapper(
C.TRACK_TYPE_DEFAULT,
selectedPlaylistUrls,
selectedPlaylistFormats,
masterPlaylist.muxedAudioFormat,
masterPlaylist.muxedCaptionFormats,
overridingDrmInitData,
positionUs);
sampleStreamWrappers.add(sampleStreamWrapper);
manifestUrlIndicesPerWrapper.add(selectedVariantIndices);
if (allowChunklessPreparation && codecs != null) {
boolean variantsContainVideoCodecs = Util.getCodecsOfType(codecs, C.TRACK_TYPE_VIDEO) != null;
boolean variantsContainAudioCodecs = Util.getCodecsOfType(codecs, C.TRACK_TYPE_AUDIO) != null;
List<TrackGroup> muxedTrackGroups = new ArrayList<>();
if (variantsContainVideoCodecs) {
Format[] videoFormats = new Format[selectedVariantsCount];
for (int i = 0; i < videoFormats.length; i++) {
videoFormats[i] = deriveVideoFormat(selectedPlaylistFormats[i]);
}
muxedTrackGroups.add(new TrackGroup(videoFormats));
if (variantsContainAudioCodecs
&& (masterPlaylist.muxedAudioFormat != null || masterPlaylist.audios.isEmpty())) {
muxedTrackGroups.add(
new TrackGroup(
deriveAudioFormat(
selectedPlaylistFormats[0],
masterPlaylist.muxedAudioFormat,
/* isPrimaryTrackInVariant= */ false)));
}
List<Format> ccFormats = masterPlaylist.muxedCaptionFormats;
if (ccFormats != null) {
for (int i = 0; i < ccFormats.size(); i++) {
muxedTrackGroups.add(new TrackGroup(ccFormats.get(i)));
}
}
} else if (variantsContainAudioCodecs) {
// Variants only contain audio.
Format[] audioFormats = new Format[selectedVariantsCount];
for (int i = 0; i < audioFormats.length; i++) {
audioFormats[i] =
deriveAudioFormat(
/* variantFormat= */ selectedPlaylistFormats[i],
masterPlaylist.muxedAudioFormat,
/* isPrimaryTrackInVariant= */ true);
}
muxedTrackGroups.add(new TrackGroup(audioFormats));
} else {
// Variants contain codecs but no video or audio entries could be identified.
throw new IllegalArgumentException("Unexpected codecs attribute: " + codecs);
}
TrackGroup id3TrackGroup =
new TrackGroup(
Format.createSampleFormat(
/* id= */ "ID3",
MimeTypes.APPLICATION_ID3,
/* codecs= */ null,
/* bitrate= */ Format.NO_VALUE,
/* drmInitData= */ null));
muxedTrackGroups.add(id3TrackGroup);
sampleStreamWrapper.prepareWithMasterPlaylistInfo(
new TrackGroupArray(muxedTrackGroups.toArray(new TrackGroup[0])),
0,
new TrackGroupArray(id3TrackGroup));
}
}
private void buildAndPrepareAudioSampleStreamWrappers(
long positionUs,
List<Rendition> audioRenditions,
List<HlsSampleStreamWrapper> sampleStreamWrappers,
List<int[]> manifestUrlsIndicesPerWrapper,
Map<String, DrmInitData> overridingDrmInitData) {
ArrayList<Uri> scratchPlaylistUrls =
new ArrayList<>(/* initialCapacity= */ audioRenditions.size());
ArrayList<Format> scratchPlaylistFormats =
new ArrayList<>(/* initialCapacity= */ audioRenditions.size());
ArrayList<Integer> scratchIndicesList =
new ArrayList<>(/* initialCapacity= */ audioRenditions.size());
HashSet<String> alreadyGroupedNames = new HashSet<>();
for (int renditionByNameIndex = 0;
renditionByNameIndex < audioRenditions.size();
renditionByNameIndex++) {
String name = audioRenditions.get(renditionByNameIndex).name;
if (!alreadyGroupedNames.add(name)) {
// This name already has a corresponding group.
continue;
}
boolean renditionsHaveCodecs = true;
scratchPlaylistUrls.clear();
scratchPlaylistFormats.clear();
scratchIndicesList.clear();
// Group all renditions with matching name.
for (int renditionIndex = 0; renditionIndex < audioRenditions.size(); renditionIndex++) {
if (Util.areEqual(name, audioRenditions.get(renditionIndex).name)) {
Rendition rendition = audioRenditions.get(renditionIndex);
scratchIndicesList.add(renditionIndex);
scratchPlaylistUrls.add(rendition.url);
scratchPlaylistFormats.add(rendition.format);
renditionsHaveCodecs &= rendition.format.codecs != null;
}
}
HlsSampleStreamWrapper sampleStreamWrapper =
buildSampleStreamWrapper(
C.TRACK_TYPE_AUDIO,
scratchPlaylistUrls.toArray(new Uri[0]),
scratchPlaylistFormats.toArray(new Format[0]),
/* muxedAudioFormat= */ null,
/* muxedCaptionFormats= */ Collections.emptyList(),
overridingDrmInitData,
positionUs);
manifestUrlsIndicesPerWrapper.add(Util.toArray(scratchIndicesList));
sampleStreamWrappers.add(sampleStreamWrapper);
if (allowChunklessPreparation && renditionsHaveCodecs) {
Format[] renditionFormats = scratchPlaylistFormats.toArray(new Format[0]);
sampleStreamWrapper.prepareWithMasterPlaylistInfo(
new TrackGroupArray(new TrackGroup(renditionFormats)), 0, TrackGroupArray.EMPTY);
}
}
}
private HlsSampleStreamWrapper buildSampleStreamWrapper(
int trackType,
Uri[] playlistUrls,
Format[] playlistFormats,
Format muxedAudioFormat,
List<Format> muxedCaptionFormats,
Map<String, DrmInitData> overridingDrmInitData,
long positionUs) {
HlsChunkSource defaultChunkSource =
new HlsChunkSource(
extractorFactory,
playlistTracker,
playlistUrls,
playlistFormats,
dataSourceFactory,
mediaTransferListener,
timestampAdjusterProvider,
muxedCaptionFormats);
return new HlsSampleStreamWrapper(
trackType,
/* callback= */ this,
defaultChunkSource,
overridingDrmInitData,
allocator,
positionUs,
muxedAudioFormat,
loadErrorHandlingPolicy,
eventDispatcher);
}
private static Map<String, DrmInitData> deriveOverridingDrmInitData(
List<DrmInitData> sessionKeyDrmInitData) {
ArrayList<DrmInitData> mutableSessionKeyDrmInitData = new ArrayList<>(sessionKeyDrmInitData);
HashMap<String, DrmInitData> drmInitDataBySchemeType = new HashMap<>();
for (int i = 0; i < mutableSessionKeyDrmInitData.size(); i++) {
DrmInitData drmInitData = sessionKeyDrmInitData.get(i);
String scheme = drmInitData.schemeType;
// Merge any subsequent drmInitData instances that have the same scheme type. This is valid
// due to the assumptions documented on HlsMediaSource.Builder.setUseSessionKeys, and is
// necessary to get data for different CDNs (e.g. Widevine and PlayReady) into a single
// drmInitData.
int j = i + 1;
while (j < mutableSessionKeyDrmInitData.size()) {
DrmInitData nextDrmInitData = mutableSessionKeyDrmInitData.get(j);
if (TextUtils.equals(nextDrmInitData.schemeType, scheme)) {
drmInitData = drmInitData.merge(nextDrmInitData);
mutableSessionKeyDrmInitData.remove(j);
} else {
j++;
}
}
drmInitDataBySchemeType.put(scheme, drmInitData);
}
return drmInitDataBySchemeType;
}
private static Format deriveVideoFormat(Format variantFormat) {
String codecs = Util.getCodecsOfType(variantFormat.codecs, C.TRACK_TYPE_VIDEO);
String sampleMimeType = MimeTypes.getMediaMimeType(codecs);
return Format.createVideoContainerFormat(
variantFormat.id,
variantFormat.label,
variantFormat.containerMimeType,
sampleMimeType,
codecs,
variantFormat.bitrate,
variantFormat.width,
variantFormat.height,
variantFormat.frameRate,
/* initializationData= */ null,
variantFormat.selectionFlags,
variantFormat.roleFlags);
}
private static Format deriveAudioFormat(
Format variantFormat, Format mediaTagFormat, boolean isPrimaryTrackInVariant) {
String codecs;
int channelCount = Format.NO_VALUE;
int selectionFlags = 0;
int roleFlags = 0;
String language = null;
String label = null;
if (mediaTagFormat != null) {
codecs = mediaTagFormat.codecs;
channelCount = mediaTagFormat.channelCount;
selectionFlags = mediaTagFormat.selectionFlags;
roleFlags = mediaTagFormat.roleFlags;
language = mediaTagFormat.language;
label = mediaTagFormat.label;
} else {
codecs = Util.getCodecsOfType(variantFormat.codecs, C.TRACK_TYPE_AUDIO);
if (isPrimaryTrackInVariant) {
channelCount = variantFormat.channelCount;
selectionFlags = variantFormat.selectionFlags;
roleFlags = mediaTagFormat.roleFlags;
language = variantFormat.language;
label = variantFormat.label;
}
}
String sampleMimeType = MimeTypes.getMediaMimeType(codecs);
int bitrate = isPrimaryTrackInVariant ? variantFormat.bitrate : Format.NO_VALUE;
return Format.createAudioContainerFormat(
variantFormat.id,
label,
variantFormat.containerMimeType,
sampleMimeType,
codecs,
bitrate,
channelCount,
/* sampleRate= */ Format.NO_VALUE,
/* initializationData= */ null,
selectionFlags,
roleFlags,
language);
}
}
| |
/*
* Copyright (C) 2011 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.lattengineer.LattEngineerAPI.gson.internal.bind;
import java.io.IOException;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import io.lattengineer.LattEngineerAPI.gson.Gson;
import io.lattengineer.LattEngineerAPI.gson.JsonElement;
import io.lattengineer.LattEngineerAPI.gson.JsonPrimitive;
import io.lattengineer.LattEngineerAPI.gson.JsonSyntaxException;
import io.lattengineer.LattEngineerAPI.gson.TypeAdapter;
import io.lattengineer.LattEngineerAPI.gson.TypeAdapterFactory;
import io.lattengineer.LattEngineerAPI.gson.internal.$Gson$Types;
import io.lattengineer.LattEngineerAPI.gson.internal.ConstructorConstructor;
import io.lattengineer.LattEngineerAPI.gson.internal.JsonReaderInternalAccess;
import io.lattengineer.LattEngineerAPI.gson.internal.ObjectConstructor;
import io.lattengineer.LattEngineerAPI.gson.internal.Streams;
import io.lattengineer.LattEngineerAPI.gson.internal.reflect.TypeToken;
import io.lattengineer.LattEngineerAPI.gson.stream.JsonReader;
import io.lattengineer.LattEngineerAPI.gson.stream.JsonToken;
import io.lattengineer.LattEngineerAPI.gson.stream.JsonWriter;
/**
* Adapts maps to either JSON objects or JSON arrays.
*
* <h3>Maps as JSON objects</h3>
* For primitive keys or when complex map key serialization is not enabled, this
* converts Java {@link Map Maps} to JSON Objects. This requires that map keys
* can be serialized as strings; this is insufficient for some key types. For
* example, consider a map whose keys are points on a grid. The default JSON
* form encodes reasonably: <pre> {@code
* Map<Point, String> original = new LinkedHashMap<Point, String>();
* original.put(new Point(5, 6), "a");
* original.put(new Point(8, 8), "b");
* System.out.println(gson.toJson(original, type));
* }</pre>
* The above code prints this JSON object:<pre> {@code
* {
* "(5,6)": "a",
* "(8,8)": "b"
* }
* }</pre>
* But GSON is unable to deserialize this value because the JSON string name is
* just the {@link Object#toString() toString()} of the map key. Attempting to
* convert the above JSON to an object fails with a parse exception:
* <pre>com.google.gson.JsonParseException: Expecting object found: "(5,6)"
* at com.google.gson.JsonObjectDeserializationVisitor.visitFieldUsingCustomHandler
* at com.google.gson.ObjectNavigator.navigateClassFields
* ...</pre>
*
* <h3>Maps as JSON arrays</h3>
* An alternative approach taken by this type adapter when it is required and
* complex map key serialization is enabled is to encode maps as arrays of map
* entries. Each map entry is a two element array containing a key and a value.
* This approach is more flexible because any type can be used as the map's key;
* not just strings. But it's also less portable because the receiver of such
* JSON must be aware of the map entry convention.
*
* <p>Register this adapter when you are creating your GSON instance.
* <pre> {@code
* Gson gson = new GsonBuilder()
* .registerTypeAdapter(Map.class, new MapAsArrayTypeAdapter())
* .create();
* }</pre>
* This will change the structure of the JSON emitted by the code above. Now we
* get an array. In this case the arrays elements are map entries:
* <pre> {@code
* [
* [
* {
* "x": 5,
* "y": 6
* },
* "a",
* ],
* [
* {
* "x": 8,
* "y": 8
* },
* "b"
* ]
* ]
* }</pre>
* This format will serialize and deserialize just fine as long as this adapter
* is registered.
*/
public final class MapTypeAdapterFactory implements TypeAdapterFactory {
private final ConstructorConstructor constructorConstructor;
private final boolean complexMapKeySerialization;
public MapTypeAdapterFactory(ConstructorConstructor constructorConstructor,
boolean complexMapKeySerialization) {
this.constructorConstructor = constructorConstructor;
this.complexMapKeySerialization = complexMapKeySerialization;
}
public <T> TypeAdapter<T> create(Gson gson, TypeToken<T> typeToken) {
Type type = typeToken.getType();
Class<? super T> rawType = typeToken.getRawType();
if (!Map.class.isAssignableFrom(rawType)) {
return null;
}
Class<?> rawTypeOfSrc = $Gson$Types.getRawType(type);
Type[] keyAndValueTypes = $Gson$Types.getMapKeyAndValueTypes(type, rawTypeOfSrc);
TypeAdapter<?> keyAdapter = getKeyAdapter(gson, keyAndValueTypes[0]);
TypeAdapter<?> valueAdapter = gson.getAdapter(TypeToken.get(keyAndValueTypes[1]));
ObjectConstructor<T> constructor = constructorConstructor.get(typeToken);
@SuppressWarnings({"unchecked", "rawtypes"})
// we don't define a type parameter for the key or value types
TypeAdapter<T> result = new Adapter(gson, keyAndValueTypes[0], keyAdapter,
keyAndValueTypes[1], valueAdapter, constructor);
return result;
}
/**
* Returns a type adapter that writes the value as a string.
*/
private TypeAdapter<?> getKeyAdapter(Gson context, Type keyType) {
return (keyType == boolean.class || keyType == Boolean.class)
? TypeAdapters.BOOLEAN_AS_STRING
: context.getAdapter(TypeToken.get(keyType));
}
private final class Adapter<K, V> extends TypeAdapter<Map<K, V>> {
private final TypeAdapter<K> keyTypeAdapter;
private final TypeAdapter<V> valueTypeAdapter;
private final ObjectConstructor<? extends Map<K, V>> constructor;
public Adapter(Gson context, Type keyType, TypeAdapter<K> keyTypeAdapter,
Type valueType, TypeAdapter<V> valueTypeAdapter,
ObjectConstructor<? extends Map<K, V>> constructor) {
this.keyTypeAdapter =
new TypeAdapterRuntimeTypeWrapper<K>(context, keyTypeAdapter, keyType);
this.valueTypeAdapter =
new TypeAdapterRuntimeTypeWrapper<V>(context, valueTypeAdapter, valueType);
this.constructor = constructor;
}
public Map<K, V> read(JsonReader in) throws IOException {
JsonToken peek = in.peek();
if (peek == JsonToken.NULL) {
in.nextNull();
return null;
}
Map<K, V> map = constructor.construct();
if (peek == JsonToken.BEGIN_ARRAY) {
in.beginArray();
while (in.hasNext()) {
in.beginArray(); // entry array
K key = keyTypeAdapter.read(in);
V value = valueTypeAdapter.read(in);
V replaced = map.put(key, value);
if (replaced != null) {
throw new JsonSyntaxException("duplicate key: " + key);
}
in.endArray();
}
in.endArray();
} else {
in.beginObject();
while (in.hasNext()) {
JsonReaderInternalAccess.INSTANCE.promoteNameToValue(in);
K key = keyTypeAdapter.read(in);
V value = valueTypeAdapter.read(in);
V replaced = map.put(key, value);
if (replaced != null) {
throw new JsonSyntaxException("duplicate key: " + key);
}
}
in.endObject();
}
return map;
}
public void write(JsonWriter out, Map<K, V> map) throws IOException {
if (map == null) {
out.nullValue();
return;
}
if (!complexMapKeySerialization) {
out.beginObject();
for (Map.Entry<K, V> entry : map.entrySet()) {
out.name(String.valueOf(entry.getKey()));
valueTypeAdapter.write(out, entry.getValue());
}
out.endObject();
return;
}
boolean hasComplexKeys = false;
List<JsonElement> keys = new ArrayList<JsonElement>(map.size());
List<V> values = new ArrayList<V>(map.size());
for (Map.Entry<K, V> entry : map.entrySet()) {
JsonElement keyElement = keyTypeAdapter.toJsonTree(entry.getKey());
keys.add(keyElement);
values.add(entry.getValue());
hasComplexKeys |= keyElement.isJsonArray() || keyElement.isJsonObject();
}
if (hasComplexKeys) {
out.beginArray();
for (int i = 0; i < keys.size(); i++) {
out.beginArray(); // entry array
Streams.write(keys.get(i), out);
valueTypeAdapter.write(out, values.get(i));
out.endArray();
}
out.endArray();
} else {
out.beginObject();
for (int i = 0; i < keys.size(); i++) {
JsonElement keyElement = keys.get(i);
out.name(keyToString(keyElement));
valueTypeAdapter.write(out, values.get(i));
}
out.endObject();
}
}
private String keyToString(JsonElement keyElement) {
if (keyElement.isJsonPrimitive()) {
JsonPrimitive primitive = keyElement.getAsJsonPrimitive();
if (primitive.isNumber()) {
return String.valueOf(primitive.getAsNumber());
} else if (primitive.isBoolean()) {
return Boolean.toString(primitive.getAsBoolean());
} else if (primitive.isString()) {
return primitive.getAsString();
} else {
throw new AssertionError();
}
} else if (keyElement.isJsonNull()) {
return "null";
} else {
throw new AssertionError();
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.util.collection.unsafe.sort;
import org.apache.spark.memory.MemoryConsumer;
import org.apache.spark.memory.TaskMemoryManager;
import org.apache.spark.unsafe.Platform;
import org.apache.spark.unsafe.array.LongArray;
import org.apache.spark.util.collection.TimSort;
import java.util.Comparator;
/**
* Sorts records using an AlphaSort-style key-prefix sort. This sort stores pointers to records
* alongside a user-defined prefix of the record's sorting key. When the underlying sort algorithm
* compares records, it will first compare the stored key prefixes; if the prefixes are not equal,
* then we do not need to traverse the record pointers to compare the actual records. Avoiding these
* random memory accesses improves cache hit rates.
*/
public final class UnsafeInMemorySorter {
private static final class SortComparator implements Comparator<RecordPointerAndKeyPrefix> {
private final RecordComparator recordComparator;
private final PrefixComparator prefixComparator;
private final TaskMemoryManager memoryManager;
SortComparator(
RecordComparator recordComparator,
PrefixComparator prefixComparator,
TaskMemoryManager memoryManager) {
this.recordComparator = recordComparator;
this.prefixComparator = prefixComparator;
this.memoryManager = memoryManager;
}
@Override
public int compare(RecordPointerAndKeyPrefix r1, RecordPointerAndKeyPrefix r2) {
final int prefixComparisonResult = prefixComparator.compare(r1.keyPrefix, r2.keyPrefix);
if (prefixComparisonResult == 0) {
final Object baseObject1 = memoryManager.getPage(r1.recordPointer);
final long baseOffset1 = memoryManager.getOffsetInPage(r1.recordPointer) + 4; // skip length
final Object baseObject2 = memoryManager.getPage(r2.recordPointer);
final long baseOffset2 = memoryManager.getOffsetInPage(r2.recordPointer) + 4; // skip length
return recordComparator.compare(baseObject1, baseOffset1, baseObject2, baseOffset2);
} else {
return prefixComparisonResult;
}
}
}
private final MemoryConsumer consumer;
private final TaskMemoryManager memoryManager;
private final TimSort<RecordPointerAndKeyPrefix, LongArray> sorter;
private final Comparator<RecordPointerAndKeyPrefix> sortComparator;
/**
* Within this buffer, position {@code 2 * i} holds a pointer pointer to the record at
* index {@code i}, while position {@code 2 * i + 1} in the array holds an 8-byte key prefix.
*/
private LongArray array;
/**
* The position in the sort buffer where new records can be inserted.
*/
private int pos = 0;
public UnsafeInMemorySorter(
final MemoryConsumer consumer,
final TaskMemoryManager memoryManager,
final RecordComparator recordComparator,
final PrefixComparator prefixComparator,
int initialSize) {
this(consumer, memoryManager, recordComparator, prefixComparator,
consumer.allocateArray(initialSize * 2));
}
public UnsafeInMemorySorter(
final MemoryConsumer consumer,
final TaskMemoryManager memoryManager,
final RecordComparator recordComparator,
final PrefixComparator prefixComparator,
LongArray array) {
this.consumer = consumer;
this.memoryManager = memoryManager;
if (recordComparator != null) {
this.sorter = new TimSort<>(UnsafeSortDataFormat.INSTANCE);
this.sortComparator = new SortComparator(recordComparator, prefixComparator, memoryManager);
} else {
this.sorter = null;
this.sortComparator = null;
}
this.array = array;
}
/**
* Free the memory used by pointer array.
*/
public void free() {
if (consumer != null) {
consumer.freeArray(array);
array = null;
}
}
public void reset() {
pos = 0;
}
/**
* @return the number of records that have been inserted into this sorter.
*/
public int numRecords() {
return pos / 2;
}
public long getMemoryUsage() {
return array.size() * 8L;
}
public boolean hasSpaceForAnotherRecord() {
return pos + 2 <= array.size();
}
public void expandPointerArray(LongArray newArray) {
if (newArray.size() < array.size()) {
throw new OutOfMemoryError("Not enough memory to grow pointer array");
}
Platform.copyMemory(
array.getBaseObject(),
array.getBaseOffset(),
newArray.getBaseObject(),
newArray.getBaseOffset(),
array.size() * 8L);
consumer.freeArray(array);
array = newArray;
}
/**
* Inserts a record to be sorted. Assumes that the record pointer points to a record length
* stored as a 4-byte integer, followed by the record's bytes.
*
* @param recordPointer pointer to a record in a data page, encoded by {@link TaskMemoryManager}.
* @param keyPrefix a user-defined key prefix
*/
public void insertRecord(long recordPointer, long keyPrefix) {
if (!hasSpaceForAnotherRecord()) {
expandPointerArray(consumer.allocateArray(array.size() * 2));
}
array.set(pos, recordPointer);
pos++;
array.set(pos, keyPrefix);
pos++;
}
public final class SortedIterator extends UnsafeSorterIterator {
private final int numRecords;
private int position;
private Object baseObject;
private long baseOffset;
private long keyPrefix;
private int recordLength;
private SortedIterator(int numRecords) {
this.numRecords = numRecords;
this.position = 0;
}
public SortedIterator clone() {
SortedIterator iter = new SortedIterator(numRecords);
iter.position = position;
iter.baseObject = baseObject;
iter.baseOffset = baseOffset;
iter.keyPrefix = keyPrefix;
iter.recordLength = recordLength;
return iter;
}
@Override
public int getNumRecords() {
return numRecords;
}
@Override
public boolean hasNext() {
return position / 2 < numRecords;
}
@Override
public void loadNext() {
// This pointer points to a 4-byte record length, followed by the record's bytes
final long recordPointer = array.get(position);
baseObject = memoryManager.getPage(recordPointer);
baseOffset = memoryManager.getOffsetInPage(recordPointer) + 4; // Skip over record length
recordLength = Platform.getInt(baseObject, baseOffset - 4);
keyPrefix = array.get(position + 1);
position += 2;
}
@Override
public Object getBaseObject() {
return baseObject;
}
@Override
public long getBaseOffset() {
return baseOffset;
}
@Override
public int getRecordLength() {
return recordLength;
}
@Override
public long getKeyPrefix() {
return keyPrefix;
}
}
/**
* Return an iterator over record pointers in sorted order. For efficiency, all calls to
* {@code next()} will return the same mutable object.
*/
public SortedIterator getSortedIterator() {
if (sorter != null) {
sorter.sort(array, 0, pos / 2, sortComparator);
}
return new SortedIterator(pos / 2);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.streaming.runtime.io;
import org.apache.flink.api.common.typeutils.base.LongSerializer;
import org.apache.flink.runtime.io.disk.iomanager.IOManager;
import org.apache.flink.runtime.io.disk.iomanager.IOManagerAsync;
import org.apache.flink.runtime.io.network.api.EndOfPartitionEvent;
import org.apache.flink.runtime.io.network.api.serialization.RecordSerializer;
import org.apache.flink.runtime.io.network.api.serialization.SpanningRecordSerializer;
import org.apache.flink.runtime.io.network.api.serialization.SpillingAdaptiveSpanningRecordDeserializer;
import org.apache.flink.runtime.io.network.buffer.BufferBuilder;
import org.apache.flink.runtime.io.network.buffer.BufferBuilderTestUtils;
import org.apache.flink.runtime.io.network.buffer.BufferConsumer;
import org.apache.flink.runtime.io.network.partition.consumer.BufferOrEvent;
import org.apache.flink.runtime.io.network.partition.consumer.StreamTestSingleInputGate;
import org.apache.flink.runtime.plugable.DeserializationDelegate;
import org.apache.flink.runtime.plugable.SerializationDelegate;
import org.apache.flink.streaming.api.watermark.Watermark;
import org.apache.flink.streaming.runtime.io.PushingAsyncDataInput.DataOutput;
import org.apache.flink.streaming.runtime.streamrecord.LatencyMarker;
import org.apache.flink.streaming.runtime.streamrecord.StreamElement;
import org.apache.flink.streaming.runtime.streamrecord.StreamElementSerializer;
import org.apache.flink.streaming.runtime.streamrecord.StreamRecord;
import org.apache.flink.streaming.runtime.streamstatus.StatusWatermarkValve;
import org.apache.flink.streaming.runtime.streamstatus.StreamStatus;
import org.junit.After;
import org.junit.Test;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
/**
* Tests for {@link StreamTaskNetworkInput}.
*/
public class StreamTaskNetworkInputTest {
private static final int PAGE_SIZE = 1000;
private final IOManager ioManager = new IOManagerAsync();
@After
public void tearDown() throws Exception {
ioManager.close();
}
@Test
public void testIsAvailableWithBufferedDataInDeserializer() throws Exception {
BufferBuilder bufferBuilder = BufferBuilderTestUtils.createEmptyBufferBuilder(PAGE_SIZE);
BufferConsumer bufferConsumer = bufferBuilder.createBufferConsumer();
serializeRecord(42L, bufferBuilder);
serializeRecord(44L, bufferBuilder);
List<BufferOrEvent> buffers = Collections.singletonList(new BufferOrEvent(bufferConsumer.build(), 0, false));
VerifyRecordsDataOutput output = new VerifyRecordsDataOutput<>();
StreamTaskNetworkInput input = new StreamTaskNetworkInput<>(
new CheckpointedInputGate(
new MockInputGate(1, buffers, false),
new EmptyBufferStorage(),
new CheckpointBarrierTracker(1)),
LongSerializer.INSTANCE,
ioManager,
new StatusWatermarkValve(1, output),
0);
assertHasNextElement(input, output);
assertHasNextElement(input, output);
assertEquals(2, output.getNumberOfEmittedRecords());
}
@Test
public void testReleasingDeserializerTimely()
throws Exception {
int numInputChannels = 2;
LongSerializer inSerializer = LongSerializer.INSTANCE;
StreamTestSingleInputGate inputGate = new StreamTestSingleInputGate<>(numInputChannels, 1024, inSerializer);
TestRecordDeserializer[] deserializers = new TestRecordDeserializer[numInputChannels];
for (int i = 0; i < deserializers.length; i++) {
deserializers[i] = new TestRecordDeserializer(ioManager.getSpillingDirectoriesPaths());
}
TestRecordDeserializer[] copiedDeserializers = Arrays.copyOf(deserializers, deserializers.length);
DataOutput output = new NoOpDataOutput<>();
StreamTaskNetworkInput input = new StreamTaskNetworkInput<>(
new CheckpointedInputGate(
inputGate.getInputGate(),
new EmptyBufferStorage(),
new CheckpointBarrierTracker(1)),
inSerializer,
new StatusWatermarkValve(1, output),
0,
deserializers);
for (int i = 0; i < numInputChannels; i++) {
assertNotNull(deserializers[i]);
inputGate.sendEvent(EndOfPartitionEvent.INSTANCE, i);
input.emitNext(output);
assertNull(deserializers[i]);
assertTrue(copiedDeserializers[i].isCleared());
}
}
private void serializeRecord(long value, BufferBuilder bufferBuilder) throws IOException {
RecordSerializer<SerializationDelegate<StreamElement>> serializer = new SpanningRecordSerializer<>();
SerializationDelegate<StreamElement> serializationDelegate =
new SerializationDelegate<>(
new StreamElementSerializer<>(LongSerializer.INSTANCE));
serializationDelegate.setInstance(new StreamRecord<>(value));
serializer.serializeRecord(serializationDelegate);
assertFalse(serializer.copyToBufferBuilder(bufferBuilder).isFullBuffer());
}
private static void assertHasNextElement(StreamTaskNetworkInput input, DataOutput output) throws Exception {
assertTrue(input.isAvailable().isDone());
InputStatus status = input.emitNext(output);
assertThat(status, is(InputStatus.MORE_AVAILABLE));
}
private static class TestRecordDeserializer
extends SpillingAdaptiveSpanningRecordDeserializer<DeserializationDelegate<StreamElement>> {
private boolean cleared = false;
public TestRecordDeserializer(String[] tmpDirectories) {
super(tmpDirectories);
}
@Override
public void clear() {
cleared = true;
}
public boolean isCleared() {
return cleared;
}
}
private static class NoOpDataOutput<T> implements DataOutput<T> {
@Override
public void emitRecord(StreamRecord<T> record) {
}
@Override
public void emitWatermark(Watermark watermark) {
}
@Override
public void emitStreamStatus(StreamStatus streamStatus) {
}
@Override
public void emitLatencyMarker(LatencyMarker latencyMarker) {
}
}
private static class VerifyRecordsDataOutput<T> extends NoOpDataOutput<T> {
private int numberOfEmittedRecords;
@Override
public void emitRecord(StreamRecord<T> record) {
numberOfEmittedRecords++;
}
int getNumberOfEmittedRecords() {
return numberOfEmittedRecords;
}
}
}
| |
/*
Derby - Class org.apache.derby.iapi.security.Securable
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.derby.iapi.security;
import org.apache.derby.catalog.AliasInfo;
import org.apache.derby.iapi.sql.dictionary.SchemaDescriptor;
/**
* Operations which can be secured. SQL authorization is one way to control
* who can access these operations.
*/
public enum Securable
{
SET_DATABASE_PROPERTY
(
SchemaDescriptor.SYSCS_UTIL_SCHEMA_UUID,
"SYSCS_SET_DATABASE_PROPERTY",
AliasInfo.ALIAS_TYPE_PROCEDURE_AS_CHAR
),
GET_DATABASE_PROPERTY
(
SchemaDescriptor.SYSCS_UTIL_SCHEMA_UUID,
"SYSCS_GET_DATABASE_PROPERTY",
AliasInfo.ALIAS_TYPE_FUNCTION_AS_CHAR
),
FREEZE_DATABASE
(
SchemaDescriptor.SYSCS_UTIL_SCHEMA_UUID,
"SYSCS_FREEZE_DATABASE",
AliasInfo.ALIAS_TYPE_PROCEDURE_AS_CHAR
),
UNFREEZE_DATABASE
(
SchemaDescriptor.SYSCS_UTIL_SCHEMA_UUID,
"SYSCS_UNFREEZE_DATABASE",
AliasInfo.ALIAS_TYPE_PROCEDURE_AS_CHAR
),
CHECKPOINT_DATABASE
(
SchemaDescriptor.SYSCS_UTIL_SCHEMA_UUID,
"SYSCS_CHECKPOINT_DATABASE",
AliasInfo.ALIAS_TYPE_PROCEDURE_AS_CHAR
),
BACKUP_DATABASE
(
SchemaDescriptor.SYSCS_UTIL_SCHEMA_UUID,
"SYSCS_BACKUP_DATABASE",
AliasInfo.ALIAS_TYPE_PROCEDURE_AS_CHAR
),
BACKUP_DATABASE_NOWAIT
(
SchemaDescriptor.SYSCS_UTIL_SCHEMA_UUID,
"SYSCS_BACKUP_DATABASE_NOWAIT",
AliasInfo.ALIAS_TYPE_PROCEDURE_AS_CHAR
),
BACKUP_DATABASE_AND_ENABLE_LOG_ARCHIVE_MODE
(
SchemaDescriptor.SYSCS_UTIL_SCHEMA_UUID,
"SYSCS_BACKUP_DATABASE_AND_ENABLE_LOG_ARCHIVE_MODE",
AliasInfo.ALIAS_TYPE_PROCEDURE_AS_CHAR
),
BACKUP_DATABASE_AND_ENABLE_LOG_ARCHIVE_MODE_NOWAIT
(
SchemaDescriptor.SYSCS_UTIL_SCHEMA_UUID,
"SYSCS_BACKUP_DATABASE_AND_ENABLE_LOG_ARCHIVE_MODE_NOWAIT",
AliasInfo.ALIAS_TYPE_PROCEDURE_AS_CHAR
),
DISABLE_LOG_ARCHIVE_MODE
(
SchemaDescriptor.SYSCS_UTIL_SCHEMA_UUID,
"SYSCS_DISABLE_LOG_ARCHIVE_MODE",
AliasInfo.ALIAS_TYPE_PROCEDURE_AS_CHAR
),
CHECK_TABLE
(
SchemaDescriptor.SYSCS_UTIL_SCHEMA_UUID,
"SYSCS_CHECK_TABLE",
AliasInfo.ALIAS_TYPE_FUNCTION_AS_CHAR
),
INSTALL_JAR
(
SchemaDescriptor.SQLJ_SCHEMA_UUID,
"INSTALL_JAR",
AliasInfo.ALIAS_TYPE_PROCEDURE_AS_CHAR
),
REPLACE_JAR
(
SchemaDescriptor.SQLJ_SCHEMA_UUID,
"REPLACE_JAR",
AliasInfo.ALIAS_TYPE_PROCEDURE_AS_CHAR
),
REMOVE_JAR
(
SchemaDescriptor.SQLJ_SCHEMA_UUID,
"REMOVE_JAR",
AliasInfo.ALIAS_TYPE_PROCEDURE_AS_CHAR
),
EXPORT_TABLE
(
SchemaDescriptor.SYSCS_UTIL_SCHEMA_UUID,
"SYSCS_EXPORT_TABLE",
AliasInfo.ALIAS_TYPE_PROCEDURE_AS_CHAR
),
IMPORT_TABLE
(
SchemaDescriptor.SYSCS_UTIL_SCHEMA_UUID,
"SYSCS_IMPORT_TABLE",
AliasInfo.ALIAS_TYPE_PROCEDURE_AS_CHAR
),
IMPORT_TABLE_LOBS_FROM_EXTFILE
(
SchemaDescriptor.SYSCS_UTIL_SCHEMA_UUID,
"SYSCS_IMPORT_TABLE_LOBS_FROM_EXTFILE",
AliasInfo.ALIAS_TYPE_PROCEDURE_AS_CHAR
),
IMPORT_DATA
(
SchemaDescriptor.SYSCS_UTIL_SCHEMA_UUID,
"SYSCS_IMPORT_DATA",
AliasInfo.ALIAS_TYPE_PROCEDURE_AS_CHAR
),
IMPORT_DATA_LOBS_FROM_EXTFILE
(
SchemaDescriptor.SYSCS_UTIL_SCHEMA_UUID,
"SYSCS_IMPORT_DATA_LOBS_FROM_EXTFILE",
AliasInfo.ALIAS_TYPE_PROCEDURE_AS_CHAR
),
BULK_INSERT
(
SchemaDescriptor.SYSCS_UTIL_SCHEMA_UUID,
"SYSCS_BULK_INSERT",
AliasInfo.ALIAS_TYPE_PROCEDURE_AS_CHAR
),
RELOAD_SECURITY_POLICY
(
SchemaDescriptor.SYSCS_UTIL_SCHEMA_UUID,
"SYSCS_RELOAD_SECURITY_POLICY",
AliasInfo.ALIAS_TYPE_PROCEDURE_AS_CHAR
),
SET_USER_ACCESS
(
SchemaDescriptor.SYSCS_UTIL_SCHEMA_UUID,
"SYSCS_SET_USER_ACCESS",
AliasInfo.ALIAS_TYPE_PROCEDURE_AS_CHAR
),
GET_USER_ACCESS
(
SchemaDescriptor.SYSCS_UTIL_SCHEMA_UUID,
"SYSCS_GET_USER_ACCESS",
AliasInfo.ALIAS_TYPE_FUNCTION_AS_CHAR
),
INVALIDATE_STORED_STATEMENTS
(
SchemaDescriptor.SYSCS_UTIL_SCHEMA_UUID,
"SYSCS_INVALIDATE_STORED_STATEMENTS",
AliasInfo.ALIAS_TYPE_PROCEDURE_AS_CHAR
),
EMPTY_STATEMENT_CACHE
(
SchemaDescriptor.SYSCS_UTIL_SCHEMA_UUID,
"SYSCS_EMPTY_STATEMENT_CACHE",
AliasInfo.ALIAS_TYPE_PROCEDURE_AS_CHAR
),
SET_XPLAIN_MODE
(
SchemaDescriptor.SYSCS_UTIL_SCHEMA_UUID,
"SYSCS_SET_XPLAIN_MODE",
AliasInfo.ALIAS_TYPE_PROCEDURE_AS_CHAR
),
GET_XPLAIN_MODE
(
SchemaDescriptor.SYSCS_UTIL_SCHEMA_UUID,
"SYSCS_GET_XPLAIN_MODE",
AliasInfo.ALIAS_TYPE_FUNCTION_AS_CHAR
),
SET_XPLAIN_SCHEMA
(
SchemaDescriptor.SYSCS_UTIL_SCHEMA_UUID,
"SYSCS_SET_XPLAIN_SCHEMA",
AliasInfo.ALIAS_TYPE_PROCEDURE_AS_CHAR
),
GET_XPLAIN_SCHEMA
(
SchemaDescriptor.SYSCS_UTIL_SCHEMA_UUID,
"SYSCS_GET_XPLAIN_SCHEMA",
AliasInfo.ALIAS_TYPE_FUNCTION_AS_CHAR
),
CREATE_USER
(
SchemaDescriptor.SYSCS_UTIL_SCHEMA_UUID,
"SYSCS_CREATE_USER",
AliasInfo.ALIAS_TYPE_PROCEDURE_AS_CHAR
),
RESET_PASSWORD
(
SchemaDescriptor.SYSCS_UTIL_SCHEMA_UUID,
"SYSCS_RESET_PASSWORD",
AliasInfo.ALIAS_TYPE_PROCEDURE_AS_CHAR
),
DROP_USER
(
SchemaDescriptor.SYSCS_UTIL_SCHEMA_UUID,
"SYSCS_DROP_USER",
AliasInfo.ALIAS_TYPE_PROCEDURE_AS_CHAR
),
;
/** UUID string of schema holding the system routine associated with the operation */
public final String routineSchemaID;
/** Name of the associated system routine */
public final String routineName;
/** Type of routine (function or procedure) */
public final char routineType;
/** Construct a Securable from its attributes */
private Securable
(
String routineSchemaID,
String routineName,
char routineType
)
{
this.routineSchemaID = routineSchemaID;
this.routineName = routineName;
this.routineType = routineType;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.state.api.runtime;
import org.apache.flink.annotation.Internal;
import org.apache.flink.api.common.ExecutionConfig;
import org.apache.flink.api.common.accumulators.Accumulator;
import org.apache.flink.api.common.accumulators.DoubleCounter;
import org.apache.flink.api.common.accumulators.Histogram;
import org.apache.flink.api.common.accumulators.IntCounter;
import org.apache.flink.api.common.accumulators.LongCounter;
import org.apache.flink.api.common.cache.DistributedCache;
import org.apache.flink.api.common.externalresource.ExternalResourceInfo;
import org.apache.flink.api.common.functions.BroadcastVariableInitializer;
import org.apache.flink.api.common.functions.RuntimeContext;
import org.apache.flink.api.common.state.AggregatingState;
import org.apache.flink.api.common.state.AggregatingStateDescriptor;
import org.apache.flink.api.common.state.FoldingState;
import org.apache.flink.api.common.state.FoldingStateDescriptor;
import org.apache.flink.api.common.state.KeyedStateStore;
import org.apache.flink.api.common.state.ListState;
import org.apache.flink.api.common.state.ListStateDescriptor;
import org.apache.flink.api.common.state.MapState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.state.ReducingState;
import org.apache.flink.api.common.state.ReducingStateDescriptor;
import org.apache.flink.api.common.state.StateDescriptor;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.metrics.MetricGroup;
import org.apache.flink.util.Preconditions;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* A streaming {@link RuntimeContext} which delegates to the underlying batch {@code RuntimeContext}
* along with a specified {@link KeyedStateStore}.
*
* <p>This {@code RuntimeContext} has the ability to force eager state registration by
* throwing an exception if state is registered outside of open.
*/
@Internal
public final class SavepointRuntimeContext implements RuntimeContext {
private static final String REGISTRATION_EXCEPTION_MSG =
"State Descriptors may only be registered inside of open";
private final RuntimeContext ctx;
private final KeyedStateStore keyedStateStore;
private final List<StateDescriptor<?, ?>> registeredDescriptors;
private boolean stateRegistrationAllowed;
public SavepointRuntimeContext(RuntimeContext ctx, KeyedStateStore keyedStateStore) {
this.ctx = Preconditions.checkNotNull(ctx);
this.keyedStateStore = Preconditions.checkNotNull(keyedStateStore);
this.stateRegistrationAllowed = true;
this.registeredDescriptors = new ArrayList<>();
}
@Override
public String getTaskName() {
return ctx.getTaskName();
}
@Override
public MetricGroup getMetricGroup() {
return ctx.getMetricGroup();
}
@Override
public int getNumberOfParallelSubtasks() {
return ctx.getNumberOfParallelSubtasks();
}
@Override
public int getMaxNumberOfParallelSubtasks() {
return ctx.getMaxNumberOfParallelSubtasks();
}
@Override
public int getIndexOfThisSubtask() {
return ctx.getIndexOfThisSubtask();
}
@Override
public int getAttemptNumber() {
return ctx.getAttemptNumber();
}
@Override
public String getTaskNameWithSubtasks() {
return ctx.getTaskNameWithSubtasks();
}
@Override
public ExecutionConfig getExecutionConfig() {
return ctx.getExecutionConfig();
}
@Override
public ClassLoader getUserCodeClassLoader() {
return ctx.getUserCodeClassLoader();
}
@Override
public <V, A extends Serializable> void addAccumulator(
String name, Accumulator<V, A> accumulator) {
ctx.addAccumulator(name, accumulator);
}
@Override
public <V, A extends Serializable> Accumulator<V, A> getAccumulator(String name) {
return ctx.getAccumulator(name);
}
@Override
@Deprecated
public Map<String, Accumulator<?, ?>> getAllAccumulators() {
return ctx.getAllAccumulators();
}
@Override
public IntCounter getIntCounter(String name) {
return ctx.getIntCounter(name);
}
@Override
public LongCounter getLongCounter(String name) {
return ctx.getLongCounter(name);
}
@Override
public DoubleCounter getDoubleCounter(String name) {
return ctx.getDoubleCounter(name);
}
@Override
public Histogram getHistogram(String name) {
return ctx.getHistogram(name);
}
@Override
public Set<ExternalResourceInfo> getExternalResourceInfos(String resourceName) {
throw new UnsupportedOperationException("Do not support external resource in current environment");
}
@Override
public boolean hasBroadcastVariable(String name) {
return ctx.hasBroadcastVariable(name);
}
@Override
public <RT> List<RT> getBroadcastVariable(String name) {
return ctx.getBroadcastVariable(name);
}
@Override
public <T, C> C getBroadcastVariableWithInitializer(
String name, BroadcastVariableInitializer<T, C> initializer) {
return ctx.getBroadcastVariableWithInitializer(name, initializer);
}
@Override
public DistributedCache getDistributedCache() {
return ctx.getDistributedCache();
}
@Override
public <T> ValueState<T> getState(ValueStateDescriptor<T> stateProperties) {
if (!stateRegistrationAllowed) {
throw new RuntimeException(REGISTRATION_EXCEPTION_MSG);
}
registeredDescriptors.add(stateProperties);
return keyedStateStore.getState(stateProperties);
}
@Override
public <T> ListState<T> getListState(ListStateDescriptor<T> stateProperties) {
if (!stateRegistrationAllowed) {
throw new RuntimeException(REGISTRATION_EXCEPTION_MSG);
}
registeredDescriptors.add(stateProperties);
return keyedStateStore.getListState(stateProperties);
}
@Override
public <T> ReducingState<T> getReducingState(ReducingStateDescriptor<T> stateProperties) {
if (!stateRegistrationAllowed) {
throw new RuntimeException(REGISTRATION_EXCEPTION_MSG);
}
registeredDescriptors.add(stateProperties);
return keyedStateStore.getReducingState(stateProperties);
}
@Override
public <IN, ACC, OUT> AggregatingState<IN, OUT> getAggregatingState(AggregatingStateDescriptor<IN, ACC, OUT> stateProperties) {
if (!stateRegistrationAllowed) {
throw new RuntimeException(REGISTRATION_EXCEPTION_MSG);
}
registeredDescriptors.add(stateProperties);
return keyedStateStore.getAggregatingState(stateProperties);
}
@Override
@Deprecated
public <T, ACC> FoldingState<T, ACC> getFoldingState(FoldingStateDescriptor<T, ACC> stateProperties) {
if (!stateRegistrationAllowed) {
throw new RuntimeException(REGISTRATION_EXCEPTION_MSG);
}
registeredDescriptors.add(stateProperties);
return keyedStateStore.getFoldingState(stateProperties);
}
@Override
public <UK, UV> MapState<UK, UV> getMapState(MapStateDescriptor<UK, UV> stateProperties) {
if (!stateRegistrationAllowed) {
throw new RuntimeException(REGISTRATION_EXCEPTION_MSG);
}
registeredDescriptors.add(stateProperties);
return keyedStateStore.getMapState(stateProperties);
}
public List<StateDescriptor<?, ?>> getStateDescriptors() {
if (registeredDescriptors.isEmpty()) {
return Collections.emptyList();
}
return new ArrayList<>(registeredDescriptors);
}
public void disableStateRegistration() throws Exception {
stateRegistrationAllowed = false;
}
}
| |
/*
* Copyright 2013 Robert von Burg <eitch@eitchnet.ch>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package li.strolch.model.parameter;
import static li.strolch.model.StrolchModelConstants.INTERPRETATION_NONE;
import static li.strolch.model.StrolchModelConstants.UOM_NONE;
import java.text.MessageFormat;
import li.strolch.exception.StrolchException;
import li.strolch.model.AbstractStrolchElement;
import li.strolch.model.Locator;
import li.strolch.model.Locator.LocatorBuilder;
import li.strolch.model.ParameterizedElement;
import li.strolch.model.StrolchRootElement;
import li.strolch.utils.helper.StringHelper;
/**
* @param <T>
*
* @author Robert von Burg <eitch@eitchnet.ch>
*/
public abstract class AbstractParameter<T> extends AbstractStrolchElement implements Parameter<T> {
protected boolean hidden = false;
protected int index;
protected String interpretation = INTERPRETATION_NONE;
protected String uom = UOM_NONE;
protected ParameterizedElement parent;
/**
* Empty constructor
*/
protected AbstractParameter() {
//
}
/**
* Default constructor
*
* @param id
* the id
* @param name
* the name
*/
public AbstractParameter(String id, String name) {
super(id, name);
}
@Override
public boolean isHidden() {
return this.hidden;
}
@Override
public void setHidden(boolean hidden) {
assertNotReadonly();
this.hidden = hidden;
}
@Override
public String getInterpretation() {
return this.interpretation;
}
@Override
public void setInterpretation(String interpretation) {
assertNotReadonly();
if (StringHelper.isEmpty(interpretation)) {
this.interpretation = INTERPRETATION_NONE;
} else {
this.interpretation = interpretation;
}
}
@Override
public boolean isInterpretationDefined() {
return !INTERPRETATION_NONE.equals(this.interpretation);
}
@Override
public String getUom() {
return this.uom;
}
@Override
public void setUom(String uom) {
assertNotReadonly();
if (StringHelper.isEmpty(uom)) {
this.uom = UOM_NONE;
} else {
this.uom = uom;
}
}
@Override
public boolean isUomDefined() {
return !UOM_NONE.equals(this.uom);
}
@Override
public void setIndex(int index) {
assertNotReadonly();
this.index = index;
}
@Override
public int getIndex() {
return this.index;
}
@Override
public ParameterizedElement getParent() {
return this.parent;
}
@Override
public void setParent(ParameterizedElement parent) {
assertNotReadonly();
this.parent = parent;
}
@Override
public StrolchRootElement getRootElement() {
return this.parent.getRootElement();
}
@Override
public boolean isRootElement() {
return false;
}
@Override
protected void fillLocator(LocatorBuilder lb) {
lb.append(this.id);
}
@Override
public Locator getLocator() {
LocatorBuilder lb = new LocatorBuilder();
if (this.parent != null)
this.parent.fillLocator(lb);
fillLocator(lb);
return lb.build();
}
/**
* Validates that the value is legal. This is the case when it is not null in this implementation
*
* @param value
* the value to check for this parameter instance
*
* @throws StrolchException
* if the value is null
*/
protected void validateValue(T value) throws StrolchException {
if (value == null) {
String msg = "Can not set null value on Parameter {0}"; //$NON-NLS-1$
msg = MessageFormat.format(msg, getLocator());
throw new StrolchException(msg);
}
}
/**
* Fills the {@link Parameter} clone with the id, name, hidden, interpretation and uom
*
* @param clone
* the clone to fill
*/
protected void fillClone(Parameter<?> clone) {
super.fillClone(clone);
clone.setHidden(this.hidden);
clone.setInterpretation(this.interpretation);
clone.setUom(this.uom);
clone.setIndex(this.index);
}
@SuppressWarnings("nls")
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append(getClass().getSimpleName());
builder.append(" [id=");
builder.append(this.id);
builder.append(", name=");
builder.append(this.name);
builder.append(", value=");
builder.append(getValueAsString());
builder.append("]");
return builder.toString();
}
/**
* Compares the value of the given parameter to this parameter
*
* @param otherParam
* the parameter for which the value is to be compared to
*
* @return the {@link Comparable#compareTo(Object)} result
*/
@Override
public abstract int compareTo(Parameter<?> otherParam);
}
| |
package com.commafeed.frontend.resource;
import java.util.Arrays;
import java.util.Date;
import java.util.Optional;
import java.util.UUID;
import javax.inject.Inject;
import javax.inject.Singleton;
import javax.validation.Valid;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.lang3.RandomStringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.apache.http.client.utils.URIBuilder;
import com.codahale.metrics.annotation.Timed;
import com.commafeed.CommaFeedApplication;
import com.commafeed.CommaFeedConfiguration;
import com.commafeed.backend.dao.UserDAO;
import com.commafeed.backend.dao.UserRoleDAO;
import com.commafeed.backend.dao.UserSettingsDAO;
import com.commafeed.backend.feed.FeedUtils;
import com.commafeed.backend.model.User;
import com.commafeed.backend.model.UserRole;
import com.commafeed.backend.model.UserRole.Role;
import com.commafeed.backend.model.UserSettings;
import com.commafeed.backend.model.UserSettings.ReadingMode;
import com.commafeed.backend.model.UserSettings.ReadingOrder;
import com.commafeed.backend.model.UserSettings.ViewMode;
import com.commafeed.backend.service.MailService;
import com.commafeed.backend.service.PasswordEncryptionService;
import com.commafeed.backend.service.UserService;
import com.commafeed.frontend.auth.SecurityCheck;
import com.commafeed.frontend.model.Settings;
import com.commafeed.frontend.model.UserModel;
import com.commafeed.frontend.model.request.LoginRequest;
import com.commafeed.frontend.model.request.PasswordResetRequest;
import com.commafeed.frontend.model.request.ProfileModificationRequest;
import com.commafeed.frontend.model.request.RegistrationRequest;
import com.commafeed.frontend.session.SessionHelper;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import io.dropwizard.hibernate.UnitOfWork;
import io.dropwizard.jersey.validation.ValidationErrorMessage;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
@Path("/user")
@Api(value = "/user")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
@Slf4j
@RequiredArgsConstructor(onConstructor = @__({ @Inject }) )
@Singleton
public class UserREST {
private final UserDAO userDAO;
private final UserRoleDAO userRoleDAO;
private final UserSettingsDAO userSettingsDAO;
private final UserService userService;
private final PasswordEncryptionService encryptionService;
private final MailService mailService;
private final CommaFeedConfiguration config;
@Path("/settings")
@GET
@UnitOfWork
@ApiOperation(value = "Retrieve user settings", notes = "Retrieve user settings", response = Settings.class)
@Timed
public Response getSettings(@SecurityCheck User user) {
Settings s = new Settings();
UserSettings settings = userSettingsDAO.findByUser(user);
if (settings != null) {
s.setReadingMode(settings.getReadingMode().name());
s.setReadingOrder(settings.getReadingOrder().name());
s.setViewMode(settings.getViewMode().name());
s.setShowRead(settings.isShowRead());
s.setEmail(settings.isEmail());
s.setGmail(settings.isGmail());
s.setFacebook(settings.isFacebook());
s.setTwitter(settings.isTwitter());
s.setGoogleplus(settings.isGoogleplus());
s.setTumblr(settings.isTumblr());
s.setPocket(settings.isPocket());
s.setInstapaper(settings.isInstapaper());
s.setBuffer(settings.isBuffer());
s.setReadability(settings.isReadability());
s.setScrollMarks(settings.isScrollMarks());
s.setTheme(settings.getTheme());
s.setCustomCss(settings.getCustomCss());
s.setLanguage(settings.getLanguage());
s.setScrollSpeed(settings.getScrollSpeed());
} else {
s.setReadingMode(ReadingMode.unread.name());
s.setReadingOrder(ReadingOrder.desc.name());
s.setViewMode(ViewMode.title.name());
s.setShowRead(true);
s.setTheme("default");
s.setEmail(true);
s.setGmail(true);
s.setFacebook(true);
s.setTwitter(true);
s.setGoogleplus(true);
s.setTumblr(true);
s.setPocket(true);
s.setInstapaper(true);
s.setBuffer(true);
s.setReadability(true);
s.setScrollMarks(true);
s.setLanguage("en");
s.setScrollSpeed(400);
}
return Response.ok(s).build();
}
@Path("/settings")
@POST
@UnitOfWork
@ApiOperation(value = "Save user settings", notes = "Save user settings")
@Timed
public Response saveSettings(@SecurityCheck User user, @ApiParam(required = true) Settings settings) {
Preconditions.checkNotNull(settings);
UserSettings s = userSettingsDAO.findByUser(user);
if (s == null) {
s = new UserSettings();
s.setUser(user);
}
s.setReadingMode(ReadingMode.valueOf(settings.getReadingMode()));
s.setReadingOrder(ReadingOrder.valueOf(settings.getReadingOrder()));
s.setShowRead(settings.isShowRead());
s.setViewMode(ViewMode.valueOf(settings.getViewMode()));
s.setScrollMarks(settings.isScrollMarks());
s.setTheme(settings.getTheme());
s.setCustomCss(settings.getCustomCss());
s.setLanguage(settings.getLanguage());
s.setScrollSpeed(settings.getScrollSpeed());
s.setEmail(settings.isEmail());
s.setGmail(settings.isGmail());
s.setFacebook(settings.isFacebook());
s.setTwitter(settings.isTwitter());
s.setGoogleplus(settings.isGoogleplus());
s.setTumblr(settings.isTumblr());
s.setPocket(settings.isPocket());
s.setInstapaper(settings.isInstapaper());
s.setBuffer(settings.isBuffer());
s.setReadability(settings.isReadability());
userSettingsDAO.saveOrUpdate(s);
return Response.ok().build();
}
@Path("/profile")
@GET
@UnitOfWork
@ApiOperation(value = "Retrieve user's profile", response = UserModel.class)
@Timed
public Response get(@SecurityCheck User user) {
UserModel userModel = new UserModel();
userModel.setId(user.getId());
userModel.setName(user.getName());
userModel.setEmail(user.getEmail());
userModel.setEnabled(!user.isDisabled());
userModel.setApiKey(user.getApiKey());
for (UserRole role : userRoleDAO.findAll(user)) {
if (role.getRole() == Role.ADMIN) {
userModel.setAdmin(true);
}
}
return Response.ok(userModel).build();
}
@Path("/profile")
@POST
@UnitOfWork
@ApiOperation(value = "Save user's profile")
@Timed
public Response save(@SecurityCheck User user, @ApiParam(required = true) ProfileModificationRequest request) {
Preconditions.checkArgument(StringUtils.isBlank(request.getPassword()) || request.getPassword().length() >= 6);
if (StringUtils.isNotBlank(request.getEmail())) {
User u = userDAO.findByEmail(request.getEmail());
Preconditions.checkArgument(u == null || user.getId().equals(u.getId()));
}
if (CommaFeedApplication.USERNAME_DEMO.equals(user.getName())) {
return Response.status(Status.FORBIDDEN).build();
}
user.setEmail(StringUtils.trimToNull(request.getEmail()));
if (StringUtils.isNotBlank(request.getPassword())) {
byte[] password = encryptionService.getEncryptedPassword(request.getPassword(), user.getSalt());
user.setPassword(password);
user.setApiKey(userService.generateApiKey(user));
}
if (request.isNewApiKey()) {
user.setApiKey(userService.generateApiKey(user));
}
userDAO.saveOrUpdate(user);
return Response.ok().build();
}
@Path("/register")
@POST
@UnitOfWork
@ApiOperation(value = "Register a new account")
@Timed
public Response register(@Valid @ApiParam(required = true) RegistrationRequest req, @Context SessionHelper sessionHelper) {
try {
User registeredUser = userService.register(req.getName(), req.getPassword(), req.getEmail(), Arrays.asList(Role.USER));
userService.login(req.getName(), req.getPassword());
sessionHelper.setLoggedInUser(registeredUser);
return Response.ok().build();
} catch (final IllegalArgumentException e) {
return Response.status(422).entity(new ValidationErrorMessage(ImmutableList.of(e.getMessage()))).type(MediaType.TEXT_PLAIN)
.build();
}
}
@Path("/login")
@POST
@UnitOfWork
@ApiOperation(value = "Login and create a session")
@Timed
public Response login(@ApiParam(required = true) LoginRequest req, @Context SessionHelper sessionHelper) {
Optional<User> user = userService.login(req.getName(), req.getPassword());
if (user.isPresent()) {
sessionHelper.setLoggedInUser(user.get());
return Response.ok().build();
} else {
return Response.status(Response.Status.UNAUTHORIZED).entity("wrong username or password").type(MediaType.TEXT_PLAIN).build();
}
}
@Path("/passwordReset")
@POST
@UnitOfWork
@ApiOperation(value = "send a password reset email")
@Timed
public Response sendPasswordReset(@Valid PasswordResetRequest req) {
User user = userDAO.findByEmail(req.getEmail());
if (user == null) {
return Response.status(Status.PRECONDITION_FAILED).entity("Email not found.").type(MediaType.TEXT_PLAIN).build();
}
try {
user.setRecoverPasswordToken(DigestUtils.sha1Hex(UUID.randomUUID().toString()));
user.setRecoverPasswordTokenDate(new Date());
userDAO.saveOrUpdate(user);
mailService.sendMail(user, "Password recovery", buildEmailContent(user));
return Response.ok().build();
} catch (Exception e) {
log.error(e.getMessage(), e);
return Response.status(Status.INTERNAL_SERVER_ERROR).entity("could not send email: " + e.getMessage())
.type(MediaType.TEXT_PLAIN).build();
}
}
private String buildEmailContent(User user) throws Exception {
String publicUrl = FeedUtils.removeTrailingSlash(config.getApplicationSettings().getPublicUrl());
publicUrl += "/rest/user/passwordResetCallback";
return String.format(
"You asked for password recovery for account '%s', <a href='%s'>follow this link</a> to change your password. Ignore this if you didn't request a password recovery.",
user.getName(), callbackUrl(user, publicUrl));
}
private String callbackUrl(User user, String publicUrl) throws Exception {
return new URIBuilder(publicUrl).addParameter("email", user.getEmail()).addParameter("token", user.getRecoverPasswordToken())
.build().toURL().toString();
}
@Path("/passwordResetCallback")
@GET
@UnitOfWork
@Produces(MediaType.TEXT_HTML)
@Timed
public Response passwordRecoveryCallback(@QueryParam("email") String email, @QueryParam("token") String token) {
Preconditions.checkNotNull(email);
Preconditions.checkNotNull(token);
User user = userDAO.findByEmail(email);
if (user == null) {
return Response.status(Status.UNAUTHORIZED).entity("Email not found.").build();
}
if (user.getRecoverPasswordToken() == null || !user.getRecoverPasswordToken().equals(token)) {
return Response.status(Status.UNAUTHORIZED).entity("Invalid token.").build();
}
if (user.getRecoverPasswordTokenDate().before(DateUtils.addDays(new Date(), -2))) {
return Response.status(Status.UNAUTHORIZED).entity("token expired.").build();
}
String passwd = RandomStringUtils.randomAlphanumeric(10);
byte[] encryptedPassword = encryptionService.getEncryptedPassword(passwd, user.getSalt());
user.setPassword(encryptedPassword);
if (StringUtils.isNotBlank(user.getApiKey())) {
user.setApiKey(userService.generateApiKey(user));
}
user.setRecoverPasswordToken(null);
user.setRecoverPasswordTokenDate(null);
userDAO.saveOrUpdate(user);
String message = "Your new password is: " + passwd;
message += "<br />";
message += String.format("<a href=\"%s\">Back to Homepage</a>", config.getApplicationSettings().getPublicUrl());
return Response.ok(message).build();
}
@Path("/profile/deleteAccount")
@POST
@UnitOfWork
@ApiOperation(value = "Delete the user account")
@Timed
public Response delete(@SecurityCheck User user) {
if (CommaFeedApplication.USERNAME_ADMIN.equals(user.getName()) || CommaFeedApplication.USERNAME_DEMO.equals(user.getName())) {
return Response.status(Status.FORBIDDEN).build();
}
userService.unregister(userDAO.findById(user.getId()));
return Response.ok().build();
}
}
| |
/*
* Copyright 2018 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.config;
import com.thoughtworks.go.domain.CommentRenderer;
import com.thoughtworks.go.domain.ConfigErrors;
import com.thoughtworks.go.domain.DefaultCommentRenderer;
import com.thoughtworks.go.util.StringUtil;
import com.thoughtworks.go.util.XmlUtils;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.http.client.utils.URIBuilder;
import java.net.URISyntaxException;
import java.util.Map;
import java.util.regex.Pattern;
/**
* @understands mingle project for pipeline
*/
@ConfigTag("mingle")
public class MingleConfig implements ParamsAttributeAware, Validatable, CommentRenderer {
@ConfigAttribute(value = "baseUrl", optional = false)
private String baseUrl;
@ConfigAttribute(value = "projectIdentifier", optional = false)
private String projectIdentifier;
@ConfigSubtag
private MqlCriteria mqlCriteria = new MqlCriteria();
private final ConfigErrors configErrors = new ConfigErrors();
private static final String DELIMITER = "/";
public static final String BASE_URL = "baseUrl";
public static final String PROJECT_IDENTIFIER = "projectIdentifier";
public static final String MQL_GROUPING_CONDITIONS = "mqlCriteria";
private static final String MINGLE_URL_PATTERN = "https://.+";
private static final Pattern MINGLE_URL_PATTERN_REGEX = Pattern.compile(String.format("^(%s)$", MINGLE_URL_PATTERN));
private static final String PROJECT_IDENTIFIER_PATTERN = "[^\\s]+";
private static final Pattern PROJECT_IDENTIFIER_PATTERN_REGEX = Pattern.compile(String.format("^(%s)$", PROJECT_IDENTIFIER_PATTERN));
private static final String MINGLE_CARDS_PATH = "/projects/%s/cards/";
public MingleConfig() {
}
public MingleConfig(String baseUrl, String projectIdentifier, String mql) {
this(baseUrl, projectIdentifier);
this.mqlCriteria = new MqlCriteria(mql);
}
public MingleConfig(String baseUrl, String projectIdentifier) {
this.baseUrl = baseUrl;
this.projectIdentifier = projectIdentifier;
}
public boolean validateTree(ValidationContext validationContext) {
validate(validationContext);
return errors().isEmpty();
}
public void validate(ValidationContext validationContext) {
if (isDefined() && XmlUtils.doesNotMatchUsingXsdRegex(MINGLE_URL_PATTERN_REGEX, baseUrl)) {
configErrors.add(BASE_URL, "Should be a URL starting with https://");
}
if (projectIdentifier != null && XmlUtils.doesNotMatchUsingXsdRegex(PROJECT_IDENTIFIER_PATTERN_REGEX, projectIdentifier)) {
configErrors.add(PROJECT_IDENTIFIER, "Should be a valid mingle identifier.");
}
}
public boolean isDefined() {
return baseUrl != null;
}
public ConfigErrors errors() {
return configErrors;
}
public void addError(String fieldName, String message) {
configErrors.add(fieldName, message);
}
public String urlFor(String path) throws URISyntaxException {
URIBuilder baseUri = new URIBuilder(baseUrl);
String originalPath = baseUri.getPath();
if (originalPath == null) {
originalPath = "";
}
if (originalPath.endsWith(DELIMITER) && path.startsWith(DELIMITER)) {
path = path.replaceFirst(DELIMITER, "");
}
return baseUri.setPath(originalPath + path).toString();
}
public String getProjectIdentifier() {
return projectIdentifier;
}
public void setProjectIdentifier(String projectIdentifier) {
this.projectIdentifier = projectIdentifier;
}
public String getQuotedMql() {
String mqlString = mqlCriteria.equals(new MqlCriteria()) ? "" : mqlCriteria.getMql();
return StringUtil.quoteJavascriptString(mqlString);
}
public String getQuotedProjectIdentifier() {
return StringUtil.quoteJavascriptString(projectIdentifier);
}
public String getBaseUrl() {
return baseUrl;
}
public void setBaseUrl(String baseUrl) {
this.baseUrl = baseUrl;
}
public MqlCriteria getMqlCriteria() {
return mqlCriteria;
}
public void setMqlCriteria(String mql) {
this.mqlCriteria = new MqlCriteria(mql);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
MingleConfig that = (MingleConfig) o;
if (baseUrl != null ? !baseUrl.equals(that.baseUrl) : that.baseUrl != null) {
return false;
}
if (mqlCriteria != null ? !mqlCriteria.equals(that.mqlCriteria) : that.mqlCriteria != null) {
return false;
}
if (projectIdentifier != null ? !projectIdentifier.equals(that.projectIdentifier) : that.projectIdentifier != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = baseUrl != null ? baseUrl.hashCode() : 0;
result = 31 * result + (projectIdentifier != null ? projectIdentifier.hashCode() : 0);
result = 31 * result + (mqlCriteria != null ? mqlCriteria.hashCode() : 0);
return result;
}
@Override
public String toString() {
return new ToStringBuilder(this).
append("baseUrl", baseUrl).
append("projectName", projectIdentifier).
append("mqlCriteria", mqlCriteria).
toString();
}
public void setConfigAttributes(Object attributes) {
if (attributes == null) {
return;
}
Map attributeMap = (Map) attributes;
if (attributeMap.containsKey(BASE_URL)) {
baseUrl = (String) attributeMap.get(BASE_URL);
}
if (attributeMap.containsKey(PROJECT_IDENTIFIER)) {
projectIdentifier = (String) attributeMap.get(PROJECT_IDENTIFIER);
}
if (attributeMap.containsKey(MQL_GROUPING_CONDITIONS)) {
mqlCriteria = (mqlCriteria == null) ? new MqlCriteria() : mqlCriteria;
mqlCriteria.setConfigAttributes(attributeMap.get(MQL_GROUPING_CONDITIONS));
}
}
public static MingleConfig create(Object attributes) {
MingleConfig mingleConfig = new MingleConfig();
mingleConfig.setConfigAttributes(attributes);
return mingleConfig;
}
public boolean isDifferentFrom(MingleConfig other) {
if (baseUrl != null ? !baseUrl.equals(other.baseUrl) : other.baseUrl != null) {
return false;
}
if (projectIdentifier != null ? !projectIdentifier.equals(other.projectIdentifier) : other.projectIdentifier != null) {
return false;
}
return true;
}
public String render(String text) {
try {
String urlPart = urlFor(String.format(MINGLE_CARDS_PATH, projectIdentifier));
return new DefaultCommentRenderer(urlPart + "${ID}", "#(\\d+)").render(text);
} catch (URISyntaxException e) {
throw new RuntimeException("Could not construct the URL to generate the link.", e);
}
}
public TrackingTool asTrackingTool() {
try {
String urlPart = urlFor(String.format(MINGLE_CARDS_PATH, projectIdentifier));
return new TrackingTool(urlPart + "${ID}", "#(\\d+)");
} catch (URISyntaxException e) {
throw new RuntimeException("Could not construct the URL to generate the link.", e);
}
}
}
| |
package a;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlType;
import org.jvnet.jaxb2_commons.lang.*;
import org.jvnet.jaxb2_commons.locator.ObjectLocator;
import org.jvnet.jaxb2_commons.locator.util.LocatorUtils;
/**
* <p>
* Java class for Primitives complex type.
* <p>
* <p>
* The following schema fragment specifies the expected content contained within this class.
* <p>
*
* <pre>
* <complexType name="Primitives">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="int" type="{http://www.w3.org/2001/XMLSchema}int"/>
* <element name="long" type="{http://www.w3.org/2001/XMLSchema}long"/>
* <element name="boolean" type="{http://www.w3.org/2001/XMLSchema}boolean"/>
* <element name="double" type="{http://www.w3.org/2001/XMLSchema}double"/>
* <element name="float" type="{http://www.w3.org/2001/XMLSchema}float"/>
* <element name="byte" type="{http://www.w3.org/2001/XMLSchema}byte"/>
* <element name="short" type="{http://www.w3.org/2001/XMLSchema}short"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "Primitives", propOrder = { "_int", "_long", "_boolean", "_double", "_float", "_byte", "_short" })
public class Primitives implements Equals, HashCode {
@XmlElement(name = "int")
@javax.validation.constraints.NotNull
protected Integer _int;
@XmlElement(name = "long")
@javax.validation.constraints.NotNull
protected Long _long;
@XmlElement(name = "boolean")
@javax.validation.constraints.NotNull
protected Boolean _boolean;
@XmlElement(name = "double")
@javax.validation.constraints.NotNull
protected Double _double;
@XmlElement(name = "float")
@javax.validation.constraints.NotNull
protected Float _float;
@XmlElement(name = "byte")
@javax.validation.constraints.NotNull
@javax.validation.constraints.DecimalMax("127")
@javax.validation.constraints.DecimalMin("-128")
protected Byte _byte;
@XmlElement(name = "short")
@javax.validation.constraints.NotNull
@javax.validation.constraints.DecimalMax("32767")
@javax.validation.constraints.DecimalMin("-32768")
protected Short _short;
/**
* Gets the value of the int property.
*/
public Integer getInt() {
return _int;
}
/**
* Sets the value of the int property.
*/
public void setInt(Integer value) {
this._int = value;
}
/**
* Gets the value of the long property.
*/
public Long getLong() {
return _long;
}
/**
* Sets the value of the long property.
*/
public void setLong(Long value) {
this._long = value;
}
/**
* Gets the value of the boolean property.
*/
public Boolean isBoolean() {
return _boolean;
}
/**
* Sets the value of the boolean property.
*/
public void setBoolean(Boolean value) {
this._boolean = value;
}
/**
* Gets the value of the double property.
*/
public Double getDouble() {
return _double;
}
/**
* Sets the value of the double property.
*/
public void setDouble(Double value) {
this._double = value;
}
/**
* Gets the value of the float property.
*/
public Float getFloat() {
return _float;
}
/**
* Sets the value of the float property.
*/
public void setFloat(Float value) {
this._float = value;
}
/**
* Gets the value of the byte property.
*/
public Byte getByte() {
return _byte;
}
/**
* Sets the value of the byte property.
*/
public void setByte(Byte value) {
this._byte = value;
}
/**
* Gets the value of the short property.
*/
public Short getShort() {
return _short;
}
/**
* Sets the value of the short property.
*/
public void setShort(Short value) {
this._short = value;
}
public int hashCode(ObjectLocator locator, HashCodeStrategy strategy) {
int currentHashCode = 1;
{
Integer theInt;
theInt = this.getInt();
currentHashCode = strategy.hashCode(LocatorUtils.property(locator, "_int", theInt), currentHashCode, theInt);
}
{
Long theLong;
theLong = this.getLong();
currentHashCode = strategy.hashCode(LocatorUtils.property(locator, "_long", theLong), currentHashCode, theLong);
}
{
Boolean theBoolean;
theBoolean = this.isBoolean();
currentHashCode = strategy.hashCode(LocatorUtils.property(locator, "_boolean", theBoolean), currentHashCode, theBoolean);
}
{
Double theDouble;
theDouble = this.getDouble();
currentHashCode = strategy.hashCode(LocatorUtils.property(locator, "_double", theDouble), currentHashCode, theDouble);
}
{
Float theFloat;
theFloat = this.getFloat();
currentHashCode = strategy.hashCode(LocatorUtils.property(locator, "_float", theFloat), currentHashCode, theFloat);
}
{
Byte theByte;
theByte = this.getByte();
currentHashCode = strategy.hashCode(LocatorUtils.property(locator, "_byte", theByte), currentHashCode, theByte);
}
{
Short theShort;
theShort = this.getShort();
currentHashCode = strategy.hashCode(LocatorUtils.property(locator, "_short", theShort), currentHashCode, theShort);
}
return currentHashCode;
}
public int hashCode() {
final HashCodeStrategy strategy = JAXBHashCodeStrategy.INSTANCE;
return this.hashCode(null, strategy);
}
public boolean equals(ObjectLocator thisLocator, ObjectLocator thatLocator, Object object, EqualsStrategy strategy) {
if (!(object instanceof Primitives)) {
return false;
}
if (this == object) {
return true;
}
final Primitives that = ((Primitives) object);
{
Integer lhsInt;
lhsInt = this.getInt();
Integer rhsInt;
rhsInt = that.getInt();
if (!strategy.equals(LocatorUtils.property(thisLocator, "_int", lhsInt), LocatorUtils.property(thatLocator, "_int", rhsInt), lhsInt, rhsInt)) {
return false;
}
}
{
Long lhsLong;
lhsLong = this.getLong();
Long rhsLong;
rhsLong = that.getLong();
if (!strategy.equals(LocatorUtils.property(thisLocator, "_long", lhsLong), LocatorUtils.property(thatLocator, "_long", rhsLong), lhsLong,
rhsLong)) {
return false;
}
}
{
Boolean lhsBoolean;
lhsBoolean = this.isBoolean();
Boolean rhsBoolean;
rhsBoolean = that.isBoolean();
if (!strategy.equals(LocatorUtils.property(thisLocator, "_boolean", lhsBoolean), LocatorUtils.property(thatLocator, "_boolean", rhsBoolean),
lhsBoolean, rhsBoolean)) {
return false;
}
}
{
Double lhsDouble;
lhsDouble = this.getDouble();
Double rhsDouble;
rhsDouble = that.getDouble();
if (!strategy.equals(LocatorUtils.property(thisLocator, "_double", lhsDouble), LocatorUtils.property(thatLocator, "_double", rhsDouble), lhsDouble,
rhsDouble)) {
return false;
}
}
{
Float lhsFloat;
lhsFloat = this.getFloat();
Float rhsFloat;
rhsFloat = that.getFloat();
if (!strategy.equals(LocatorUtils.property(thisLocator, "_float", lhsFloat), LocatorUtils.property(thatLocator, "_float", rhsFloat), lhsFloat,
rhsFloat)) {
return false;
}
}
{
Byte lhsByte;
lhsByte = this.getByte();
Byte rhsByte;
rhsByte = that.getByte();
if (!strategy.equals(LocatorUtils.property(thisLocator, "_byte", lhsByte), LocatorUtils.property(thatLocator, "_byte", rhsByte), lhsByte,
rhsByte)) {
return false;
}
}
{
Short lhsShort;
lhsShort = this.getShort();
Short rhsShort;
rhsShort = that.getShort();
if (!strategy.equals(LocatorUtils.property(thisLocator, "_short", lhsShort), LocatorUtils.property(thatLocator, "_short", rhsShort), lhsShort,
rhsShort)) {
return false;
}
}
return true;
}
public boolean equals(Object object) {
final EqualsStrategy strategy = JAXBEqualsStrategy.INSTANCE;
return equals(null, null, object, strategy);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cocoon.portal.pluto.om;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.URL;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Vector;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
import javax.servlet.ServletContext;
import org.apache.avalon.framework.parameters.ParameterException;
import org.apache.avalon.framework.parameters.Parameterizable;
import org.apache.avalon.framework.parameters.Parameters;
import org.apache.avalon.framework.service.ServiceException;
import org.apache.avalon.framework.service.ServiceManager;
import org.apache.cocoon.components.source.SourceUtil;
import org.apache.cocoon.portal.PortalService;
import org.apache.cocoon.portal.avalon.AbstractComponent;
import org.apache.cocoon.portal.deployment.DeploymentEvent;
import org.apache.cocoon.portal.deployment.DeploymentException;
import org.apache.cocoon.portal.deployment.DeploymentStatus;
import org.apache.cocoon.portal.event.Receiver;
import org.apache.cocoon.portal.om.CopletDefinition;
import org.apache.cocoon.portal.om.CopletType;
import org.apache.cocoon.portal.pluto.deployment.Deployer;
import org.apache.cocoon.portal.pluto.deployment.WebApplicationRewriter;
import org.apache.cocoon.thread.RunnableManager;
import org.apache.commons.lang.StringUtils;
import org.apache.excalibur.source.Source;
import org.apache.excalibur.source.SourceResolver;
import org.apache.excalibur.xml.EntityResolver;
import org.apache.pluto.om.common.ObjectID;
import org.apache.pluto.om.entity.PortletApplicationEntityList;
import org.apache.pluto.om.entity.PortletApplicationEntityListCtrl;
import org.apache.pluto.om.portlet.PortletApplicationDefinitionList;
import org.apache.pluto.om.portlet.PortletDefinition;
import org.exolab.castor.mapping.Mapping;
import org.exolab.castor.xml.Unmarshaller;
import org.xml.sax.InputSource;
/**
*
* @version $Id$
*/
public class PortletDefinitionRegistryImpl
extends AbstractComponent
implements PortletDefinitionRegistry, Receiver, Parameterizable, Runnable {
/** Wait ten seconds before scanning. */
protected static final int STARTUP_DELAY = 10 * 1000;
private static final String WEB_XML = "WEB-INF/web.xml";
private static final String PORTLET_XML = "WEB-INF/portlet.xml";
private static final String COPLET_XML = "WEB-INF/coplet.xml";
/** The mapping */
public static final String PORTLET_MAPPING = "resource://org/apache/cocoon/portal/pluto/om/portletdefinitionmapping.xml";
/** The mapping */
public static final String WEBXML_MAPPING = "resource://org/apache/cocoon/portal/pluto/om/servletdefinitionmapping.xml";
/** The portlet application entity list */
protected PortletApplicationEntityListImpl portletApplicationEntities = new PortletApplicationEntityListImpl(this);
// Helper lists and hashtables to access the data as fast as possible
// List containing all portlet applications available in the system
protected PortletApplicationDefinitionListImpl registry = new PortletApplicationDefinitionListImpl();
/** All portlet definitions, hashed by ObjectId */
protected Map portletsKeyObjectId = new HashMap();
/** Our context name. */
protected String contextName;
/** The entity resolver */
protected EntityResolver entityResolver;
/** Path to the webapp directory containing all web apps. This is used to find already
* deployed portlets and to deploy new portlets. */
protected String webAppDir;
protected String localAppDir = "conf/portlets";
protected boolean stripLoggers = false;
/** The castor mapping for the portlet.xml. */
protected Mapping mappingPortletXml = new Mapping();
/** The castor mapping for the web.xml. */
protected Mapping mappingWebXml = new Mapping();
/** Should we scan the webapps directory on startup? */
protected boolean scanOnStartup = true;
/** Create coplets. */
protected boolean createCoplets = true;
/** The name of the coplet base data for portlets. */
protected String copletBaseDataName = "Portlet";
/** The threadpool name to be used for daemon thread. */
protected String threadPoolName = "daemon";
/** The servlet context. */
protected ServletContext servletContext;
/**
* Default constructor.
*/
public PortletDefinitionRegistryImpl() {
// nothing to do
}
/**
* @see org.apache.avalon.framework.service.Serviceable#service(org.apache.avalon.framework.service.ServiceManager)
*/
public void service(ServiceManager serviceManager)
throws ServiceException {
super.service(serviceManager);
this.entityResolver = (EntityResolver) this.manager.lookup(EntityResolver.ROLE);
}
/**
* @see org.apache.avalon.framework.parameters.Parameterizable#parameterize(org.apache.avalon.framework.parameters.Parameters)
*/
public void parameterize(Parameters params) throws ParameterException {
this.webAppDir = params.getParameter("webapp-directory", null);
this.localAppDir = params.getParameter("localapp-directory", this.localAppDir);
this.stripLoggers = params.getParameterAsBoolean("strip-loggers", this.stripLoggers);
this.scanOnStartup = params.getParameterAsBoolean("scan-on-startup", this.scanOnStartup);
this.threadPoolName = params.getParameter("thread-pool-name", this.threadPoolName);
this.createCoplets = params.getParameterAsBoolean("create-coplets", this.createCoplets);
this.copletBaseDataName = params.getParameter("coplet-base-data", this.copletBaseDataName);
}
/**
* @see org.apache.avalon.framework.activity.Disposable#dispose()
*/
public void dispose() {
if ( this.manager != null ) {
this.manager.release(this.entityResolver);
this.entityResolver = null;
}
super.dispose();
}
/**
* @see org.apache.avalon.framework.activity.Initializable#initialize()
*/
public void initialize() throws Exception {
if ( this.getLogger().isInfoEnabled() ) {
this.getLogger().info("Initializing Portlet Definition Registry.");
this.getLogger().info("Local application directory: " + this.localAppDir);
this.getLogger().info("Strip loggers on deployment: " + this.stripLoggers);
if ( this.webAppDir != null ) {
this.getLogger().info("Web application directory: " + this.webAppDir);
}
this.getLogger().info("Scan on startup: " + this.scanOnStartup);
}
super.initialize();
this.servletContext = this.portalService.getRequestContext().getServletContext();
// get our context path
String baseWMDir = this.servletContext.getRealPath("");
if (baseWMDir != null) {
// BEGIN PATCH for IBM WebSphere
if (baseWMDir.endsWith(File.separator)) {
baseWMDir = baseWMDir.substring(0, baseWMDir.length() - 1);
}
// END PATCH for IBM WebSphere
int lastIndex = baseWMDir.lastIndexOf(File.separatorChar);
this.contextName = baseWMDir.substring(lastIndex + 1);
baseWMDir = baseWMDir.substring(0, lastIndex);
if (this.getLogger().isDebugEnabled()) {
this.getLogger().debug("servletContext.getRealPath('') =" + this.servletContext.getRealPath(""));
this.getLogger().debug("baseWMDir = " + baseWMDir);
}
}
if ( this.webAppDir == null ) {
this.webAppDir = baseWMDir;
}
// now check directories
File webAppDirFile = new File(this.webAppDir);
if (webAppDirFile.exists() && webAppDirFile.isDirectory()) {
try {
this.webAppDir = webAppDirFile.getCanonicalPath();
} catch (IOException e) {
// ignore
}
} else {
throw new FileNotFoundException("The depoyment directory for portlet applications \""
+ webAppDirFile.getAbsolutePath() + "\" does not exist.");
}
File localAppDirFile = new File(this.localAppDir);
if (!localAppDirFile.exists()) {
localAppDirFile.mkdirs();
} else if (!localAppDirFile.isDirectory()) {
throw new FileNotFoundException("Invalid depoyment directory for local portlet applications: \""
+ localAppDirFile.getAbsolutePath());
}
try {
this.localAppDir = localAppDirFile.getCanonicalPath();
} catch (IOException e) {
// ignore
}
// load mapping
SourceResolver resolver = null;
try {
resolver = (SourceResolver)this.manager.lookup(SourceResolver.ROLE);
Source source = null;
try {
source = resolver.resolveURI(PORTLET_MAPPING);
this.mappingPortletXml.loadMapping(SourceUtil.getInputSource(source));
} finally {
resolver.release(source);
}
try {
source = resolver.resolveURI(WEBXML_MAPPING);
this.mappingWebXml.loadMapping(SourceUtil.getInputSource(source));
} finally {
resolver.release(source);
}
} finally {
this.manager.release(resolver);
}
// now load existing webapps/portlets
if ( this.scanOnStartup ) {
RunnableManager runnableManager = null;
try {
runnableManager = (RunnableManager)this.manager.lookup(RunnableManager.ROLE);
runnableManager.execute(this.threadPoolName, this, STARTUP_DELAY);
} finally {
this.manager.release(runnableManager);
}
}
((PortletApplicationEntityListCtrl)this.portletApplicationEntities).add("cocoon");
}
/**
* @see java.lang.Runnable#run()
*/
public void run() {
try {
if ( this.webAppDir == null ) {
if (this.getLogger().isWarnEnabled()) {
this.getLogger().warn("Only local portlets are supported when deployed as a war "
+ "and 'webapp-directory' is not configured.");
}
this.contextName = "local";
this.loadLocal();
} else {
this.scanWebapps();
}
} catch (Exception ignore) {
this.getLogger().error("Exception during scanning of portlet applications.", ignore);
}
}
/**
* @see org.apache.cocoon.portal.pluto.om.PortletDefinitionRegistry#getPortletApplicationDefinitionList()
*/
public PortletApplicationDefinitionList getPortletApplicationDefinitionList() {
return registry;
}
/**
* @see org.apache.cocoon.portal.pluto.om.PortletDefinitionRegistry#getPortletDefinition(org.apache.pluto.om.common.ObjectID)
*/
public PortletDefinition getPortletDefinition(ObjectID id) {
return (PortletDefinition)portletsKeyObjectId.get(id);
}
protected void scanWebapps()
throws Exception {
File f = new File(this.webAppDir);
String[] entries = f.list();
List entryList = Arrays.asList(entries);
for (int i=0; i<entries.length; i++) {
File entry = new File(f, entries[i]);
if ( this.getLogger().isDebugEnabled() ) {
this.getLogger().debug("Searching file: " + entry);
}
try {
if (entry.isDirectory()) {
this.loadWebApp(f.getAbsolutePath(), entries[i]);
} else if (entry.isFile()) {
String name = entry.getName();
int index = name.lastIndexOf(".war");
if (index > 0 && name.endsWith(".war")) {
String webModule = name.substring(0, index);
if (!entryList.contains(webModule)) {
this.loadWar(entry, webModule);
}
}
}
} catch (DeploymentException de) {
this.getLogger().error("Error during deployment of portlet application.", de);
}
}
}
protected void loadLocal()
throws Exception {
URL url = this.servletContext.getResource("/" + PORTLET_XML);
if (url != null) {
InputSource portletSource = new InputSource(url.openStream());
portletSource.setSystemId(url.toExternalForm());
url = this.servletContext.getResource("/" + WEB_XML);
final InputSource webSource = new InputSource(url.openStream());
webSource.setSystemId(url.toExternalForm());
url = this.servletContext.getResource("/" + COPLET_XML);
InputSource copletSource = null;
if ( url != null ) {
copletSource = new InputSource(url.openStream());
copletSource.setSystemId(url.toExternalForm());
}
this.load(portletSource, webSource, copletSource, this.contextName);
}
}
protected void loadWar(File warFile, String webModule)
throws Exception {
if (this.getLogger().isDebugEnabled()) {
this.getLogger().debug("Searching war " + warFile.getName());
}
try {
ZipFile war = new ZipFile(warFile);
ZipEntry entry = war.getEntry(PORTLET_XML);
// no portlet.xml -> not a portlet web application
if (entry != null) {
final InputSource portletSource = new InputSource(war.getInputStream(entry));
portletSource.setSystemId("/" + PORTLET_XML);
entry = war.getEntry(WEB_XML);
// no web.xml -> not a web application
if (entry == null) {
return;
}
final InputSource webSource = new InputSource(war.getInputStream(entry));
webSource.setSystemId("/" + WEB_XML);
InputSource copletSource = null;
entry = war.getEntry(COPLET_XML);
if ( entry != null ) {
copletSource = new InputSource(war.getInputStream(entry));
copletSource.setSystemId("/" + COPLET_XML);
}
this.load(portletSource, webSource, copletSource, webModule);
}
} catch (Exception e) {
if (this.getLogger().isDebugEnabled()) {
this.getLogger().debug("Unable to inspect war " + warFile.getName() +". " +
e. getMessage());
}
}
}
protected void loadWebApp(String baseDir, String webModule)
throws Exception {
final String directory = baseDir + File.separatorChar + webModule + File.separatorChar + "WEB-INF";
if (this.getLogger().isInfoEnabled()) {
this.getLogger().info("Searching for portlet application in directory: " + directory);
}
// check for the portlet.xml and web.xml. If there is no portlet.xml this is not a
// portlet application web module. If there is no web.xml this is not a web app.
final File portletXml = new File(directory + File.separatorChar + "portlet.xml");
final File webXml = new File(directory + File.separatorChar + "web.xml");
if (portletXml.exists()&& webXml.exists()) {
if (this.getLogger().isDebugEnabled()) {
this.getLogger().debug("Loading the following Portlet Applications XML files..." +
portletXml +
", " +
webXml);
}
final InputSource portletSource = new InputSource(new FileInputStream(portletXml));
portletSource.setSystemId(portletXml.toURL().toExternalForm());
// web.xml is optional
InputSource webSource = null;
if (webXml.exists()) {
webSource = new InputSource(new FileInputStream(webXml));
webSource.setSystemId(webXml.toURL().toExternalForm());
}
// coplet.xml is optional
final File copletXml = new File(directory + File.separatorChar + "coplet.xml");
InputSource copletSource = null;
if ( copletXml.exists() ) {
copletSource = new InputSource(new FileInputStream(copletXml));
copletSource.setSystemId(copletXml.toURL().toExternalForm());
}
this.load(portletSource, webSource, copletSource, webModule);
}
}
protected void load(InputSource portletXml,
InputSource webXml,
InputSource copletXml,
String webModule)
throws Exception {
if (this.getLogger().isDebugEnabled()) {
this.getLogger().debug("Loading the following Portlet Applications XML files..." +
portletXml.getSystemId() +
", " +
webXml.getSystemId());
}
Unmarshaller unmarshaller = new Unmarshaller(this.mappingPortletXml);
unmarshaller.setIgnoreExtraElements(true);
unmarshaller.setEntityResolver(this.entityResolver);
unmarshaller.setValidation(false);
PortletApplicationDefinitionImpl portletApp =
(PortletApplicationDefinitionImpl) unmarshaller.unmarshal(portletXml);
WebApplicationDefinitionImpl webApp = null;
if (webXml.getByteStream() != null) {
this.getLogger().info("Loading web.xml...");
unmarshaller = new Unmarshaller(this.mappingWebXml);
unmarshaller.setIgnoreExtraElements(true);
unmarshaller.setEntityResolver(this.entityResolver);
unmarshaller.setValidation(false);
webApp = (WebApplicationDefinitionImpl) unmarshaller.unmarshal(webXml);
Vector structure = new Vector();
structure.add(portletApp);
structure.add("/" + webModule);
webApp.postLoad(structure);
// refill structure with necessary information
webApp.preBuild(structure);
webApp.postBuild(structure);
} else {
this.getLogger().info("No web.xml...");
Vector structure = new Vector();
structure.add("/" + webModule);
structure.add(null);
structure.add(null);
portletApp.postLoad(structure);
portletApp.preBuild(structure);
portletApp.postBuild(structure);
}
this.getLogger().debug("portlet.xml loaded");
this.registry.add(portletApp);
if ( this.getLogger().isInfoEnabled() ) {
this.getLogger().info("Portlet application '" + portletApp.getGUID() + "' added to registry.");
}
// fill portletsKeyObjectId and
// register new coplet data for each portlet
final Iterator portlets = portletApp.getPortletDefinitionList().iterator();
while (portlets.hasNext()) {
final PortletDefinition portlet = (PortletDefinition) portlets.next();
portletsKeyObjectId.put(portlet.getId(), portlet);
if (this.contextName.equals(webModule)) {
((PortletDefinitionImpl) portlet).setLocalPortlet(true);
} else if ( portlet.getServletDefinition() == null ) {
throw new DeploymentException("Unable to deploy portlet '" + portlet.getId() +
"'. Servlet definition for '"+WebApplicationRewriter.CONTAINER+"' not found in web.xml.");
}
((PortletDefinitionImpl) portlet).setPortletClassLoader(Thread.currentThread()
.getContextClassLoader());
if ( this.getLogger().isInfoEnabled() ) {
this.getLogger().info("Adding portlet '" + portlet.getId() + "'.");
}
if ( this.createCoplets ) {
// TODO - parse coplet.xml if available
final CopletType cbd = this.portalService.getProfileManager().getCopletType(this.copletBaseDataName);
// TODO - check portletId for invalid characters!
final String defId = StringUtils.replaceChars(portlet.getId().toString(), '.', '_');
final CopletDefinition cd = this.portalService.getCopletFactory().newInstance(cbd, defId);
cd.setAttribute("portlet", portlet.getId().toString());
cd.setAttribute("buffer", Boolean.TRUE);
if ( this.getLogger().isInfoEnabled() ) {
this.getLogger().info("Created coplet data: " + cd.getId());
}
}
}
}
/**
* @see org.apache.cocoon.portal.pluto.om.PortletDefinitionRegistry#getPortletApplicationEntityList()
*/
public PortletApplicationEntityList getPortletApplicationEntityList() {
return this.portletApplicationEntities;
}
/**
* @see org.apache.cocoon.portal.pluto.om.PortletDefinitionRegistry#getPortalService()
*/
public PortalService getPortalService() {
return this.portalService;
}
/**
* @see Receiver
*/
public void inform(DeploymentEvent event) {
String fileName = event.getDeploymentObject().getName();
if (fileName.endsWith(".war")) {
try {
File toFile = new File(this.webAppDir, fileName);
if ( Deployer.deploy(new URL(event.getDeploymentObject().getUri()).openStream(),
toFile.getAbsolutePath(),
this.stripLoggers,
this.getLogger(), this.manager) ) {
// let's wait some seconds to give the web container time to
// deploy the new web app
Thread.sleep(10 * 1000);
final String webModule = fileName.substring(0, fileName.length()-4);
this.loadWebApp(this.webAppDir, webModule);
}
event.setStatus(DeploymentStatus.STATUS_OKAY);
} catch (Exception e) {
this.getLogger().error("Error during deployment of " + event.getDeploymentObject().getName(), e);
event.setStatus(DeploymentStatus.STATUS_FAILED);
}
}
}
}
| |
/*
* Copyright (c) 2013 Ramon Servadei
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.fimtra.util;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.GregorianCalendar;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import com.fimtra.util.FastDateFormat;
/**
* Tests the {@link FastDateFormat}
*
* @author Ramon Servadei
*/
public class FastDateFormatTest
{
FastDateFormat candidate;
@Before
public void setUp() throws Exception
{
this.candidate = new FastDateFormat();
}
@After
public void tearDown() throws Exception
{
}
void test(Calendar c, int year, int month, int day, int hours, int mins, int sec, int millis)
{
// -1 to the month as we test with Jan=1
c.set(year, month - 1, day, hours, mins, sec);
c.set(Calendar.MILLISECOND, millis);
String result = this.candidate.yyyyMMddHHmmssSSS(c.getTimeInMillis());
String expected = FastDateFormat.formatDateTime(year * 10000 + month * 100 + day, hours, mins, sec, millis);
assertEquals(expected, result);
System.err.println(result);
}
@Test
public void testFormats()
{
Calendar c = new GregorianCalendar();
// set to 2012 Dec 15 @ 05:02:01
test(c, 2012, 12, 15, 5, 2, 1, 1);
// test millisecond changes
test(c, 2012, 12, 15, 5, 2, 1, 297);
test(c, 2012, 12, 15, 5, 2, 1, 999);
// test a seconds change but this is only 2 milils later change
test(c, 2012, 12, 15, 5, 2, 2, 1);
test(c, 2012, 12, 15, 5, 2, 2, 999);
test(c, 2012, 12, 15, 5, 2, 4, 998);
// test a minute change
test(c, 2012, 12, 15, 5, 4, 59, 998);
test(c, 2012, 12, 15, 5, 5, 0, 998);
test(c, 2012, 12, 15, 5, 6, 0, 998);
test(c, 2012, 12, 15, 5, 6, 1, 998);
test(c, 2012, 12, 15, 5, 8, 0, 998);
test(c, 2012, 12, 15, 5, 20, 4, 998);
// test over an hour change
test(c, 2012, 12, 15, 7, 20, 59, 998);
test(c, 2012, 12, 15, 8, 20, 1, 998);
test(c, 2012, 12, 15, 9, 20, 4, 998);
test(c, 2012, 12, 15, 10, 25, 4, 998);
// test differences for 1 millisecond that push into the next day
test(c, 2012, 12, 15, 23, 59, 59, 998);
test(c, 2012, 12, 15, 23, 59, 59, 999);
test(c, 2012, 12, 16, 0, 0, 0, 0);
}
@Test
public void testFor24HrsIncrementingBy500ms()
{
Calendar c = new GregorianCalendar();
c.set(2012, 12, 15, 5, 2, 1);
SimpleDateFormat format = new SimpleDateFormat("yyyyMMdd-HH:mm:ss:SSS");
// test for a full 24hrs ticking at 1/2 second
int testCount = ((24 * 60 * 60 * 2) + 1);
final long calTime = c.getTimeInMillis();
Date date = null;
for (int i = 0; i < testCount; i++)
{
date = new Date(calTime + (i * 500) + i);
String sdf = format.format(date);
String fdf = this.candidate.yyyyMMddHHmmssSSS(date.getTime());
assertEquals("at " + i + ", for " + c.getTime() + ", millis=" + c.getTimeInMillis(), sdf, fdf);
}
}
@Test
public void testFor24HrsIncrementingBy30secs()
{
Calendar c = new GregorianCalendar();
c.set(2012, 12, 15, 5, 2, 1);
SimpleDateFormat format = new SimpleDateFormat("yyyyMMdd-HH:mm:ss:SSS");
// test for 24hrs+, increments in 30 secs 800ms and add a millisecond for each loop
int testCount = ((24 * 60 * 60) + 1);
final long calTime = c.getTimeInMillis();
Date date = null;
for (int i = 0; i < testCount; i++)
{
date = new Date(calTime + (i * 30000) + 800 + i);
String sdf = format.format(date);
String fdf = this.candidate.yyyyMMddHHmmssSSS(date.getTime());
assertEquals("at " + i + ", for " + c.getTime() + ", millis=" + c.getTimeInMillis(), sdf, fdf);
}
}
@Test
public void testFor24HrsIncrementingBy30mins()
{
Calendar c = new GregorianCalendar();
c.set(2012, 12, 15, 5, 2, 1);
SimpleDateFormat format = new SimpleDateFormat("yyyyMMdd-HH:mm:ss:SSS");
// test for 24hrs+, increments in 30 mins
int testCount = ((24 * 60 * 60) + 1);
final long calTime = c.getTimeInMillis();
Date date = null;
for (int i = 0; i < testCount; i++)
{
date = new Date(calTime + (i * 60000 * 30) + 997 + i);
String sdf = format.format(date);
String fdf = this.candidate.yyyyMMddHHmmssSSS(date.getTime());
assertEquals("at " + i + ", for " + c.getTime() + ", millis=" + c.getTimeInMillis(), sdf, fdf);
}
}
@Test
public void testPerformanceAgainstSimpleDateFormat()
{
Calendar c = new GregorianCalendar();
c.set(2012, 12, 15, 5, 2, 1);
SimpleDateFormat format = new SimpleDateFormat("yyyyMMdd-HH:mm:ss:SSS");
// perf test, ticking per millis
int testCount = 100000;
final long calTime = c.getTimeInMillis();
Date date = null;
long now = System.currentTimeMillis();
for (int i = 0; i < testCount; i++)
{
date = new Date(calTime + i);
format.format(date);
}
long sdfTime = System.currentTimeMillis() - now;
now = System.currentTimeMillis();
for (int i = 0; i < testCount; i++)
{
date = new Date(calTime + i);
this.candidate.yyyyMMddHHmmssSSS(date.getTime());
}
long fdfTime = System.currentTimeMillis() - now;
now = System.currentTimeMillis();
System.err.println("SimpleDateFormat took " + sdfTime + ", FastDateFormat took " + fdfTime);
assertTrue(fdfTime < sdfTime);
}
static Calendar cal = new GregorianCalendar();
private static String getDateTime(long millis)
{
synchronized (cal)
{
cal.setTimeInMillis(millis);
int yyyyMMdd =
cal.get(Calendar.YEAR) * 10000 + (1 + cal.get(Calendar.MONTH)) * 100 + cal.get(Calendar.DAY_OF_MONTH);
return FastDateFormat.formatDateTime(yyyyMMdd, cal.get(Calendar.HOUR_OF_DAY), cal.get(Calendar.MINUTE),
cal.get(Calendar.SECOND), cal.get(Calendar.MILLISECOND));
}
}
@Test
public void testPerformanceAgainstCalendarWithFormatting()
{
Calendar c = new GregorianCalendar();
c.set(2012, 12, 15, 5, 2, 1);
// perf test, ticking per millis
int testCount = 100000;
final long calTime = c.getTimeInMillis();
Date date = null;
long now = System.currentTimeMillis();
for (int i = 0; i < testCount; i++)
{
getDateTime(calTime + i);
}
long sdfTime = System.currentTimeMillis() - now;
now = System.currentTimeMillis();
for (int i = 0; i < testCount; i++)
{
date = new Date(calTime + i);
this.candidate.yyyyMMddHHmmssSSS(date.getTime());
}
long fdfTime = System.currentTimeMillis() - now;
now = System.currentTimeMillis();
System.err.println("Calendar with formatting took " + sdfTime + ", FastDateFormat took " + fdfTime);
assertTrue(fdfTime < sdfTime);
}
}
| |
package puzzles.hard.skynet2;
import java.util.*;
import java.util.stream.Collectors;
class Solution {
public static void main(String args[]) {
new Solver(new Scanner(System.in)).solve();
}
}
class Solver {
final Scanner scanner;
final List<Node> nodes;
Solver(Scanner scanner) {
this.scanner = scanner;
int nodesCount = scanner.nextInt();
nodes = new ArrayList<>(nodesCount);
for (int id = 0; id < nodesCount; id++) {
nodes.add(new Node(id));
}
int linksCount = scanner.nextInt();
int exitsCount = scanner.nextInt();
for (int i = 0; i < linksCount; i++) {
int N1 = scanner.nextInt();
int N2 = scanner.nextInt();
nodes.get(N1).adj.add(nodes.get(N2));
nodes.get(N2).adj.add(nodes.get(N1));
}
for (int i = 0; i < exitsCount; i++) {
int EI = scanner.nextInt();
nodes.get(EI).exit = true;
}
}
void solve() {
while (true) {
int SI = scanner.nextInt();
recalculateStepsToSkynet(SI);
Link link = killLink();
if (link == null) {
link = dangerousMultilink(nodes.get(SI));
}
if (link == null) {
link = closestLink();
}
System.out.println(link);
nodes.get(link.from).adj.remove(nodes.get(link.to));
nodes.get(link.to).adj.remove(nodes.get(link.from));
}
}
void recalculateStepsToSkynet(int skynetNode) {
nodes.forEach(node -> node.stepsToSkynet = Node.UNREACHEABLE);
Set<Node> visited = new HashSet<>();
Queue<Node> unvisited = new ArrayDeque<>();
unvisited.add(nodes.get(skynetNode));
int steps = 0;
while(!unvisited.isEmpty()) {
Set<Node> neighsOnNextStep = new HashSet<>();
while (!unvisited.isEmpty()) {
Node curr = unvisited.poll();
if (visited.contains(curr)) continue;
visited.add(curr);
curr.stepsToSkynet = steps;
neighsOnNextStep.addAll(curr.adj);
}
unvisited.addAll(neighsOnNextStep);
steps++;
}
}
Link killLink() {
List<Node> exitNodes = nodes.stream().filter(node -> node.exit).collect(Collectors.toList());
for (Node exitNode : exitNodes) {
for (Node node : exitNode.adj) {
if (node.stepsToSkynet == 0) {
return new Link(node.id, exitNode.id);
}
}
}
return null;
}
Link dangerousMultilink(Node skynetNode) {
Set<Node> adjs = new HashSet<>();
List<Node> exitNodes = nodes.stream().filter(node -> node.exit).collect(Collectors.toList());
exitNodes.forEach(exitNode -> adjs.addAll(exitNode.adj));
adjs.removeIf(node -> node.adj.stream().filter(nd -> nd.exit).count() < 2);
if (adjs.isEmpty()) return null;
Set<Node> startNodes = skynetNode.adj;
for (Node adj : adjs) {
for (Node startNode : startNodes) {
if (oneTurnPath(startNode, adj)) {
int from = adj.id;
int to = -1;
int minSteps = 10000;
for (Node node : adj.adj) {
if (!node.exit) continue;
if (node.stepsToSkynet < minSteps) {
minSteps = node.stepsToSkynet;
to = node.id;
}
}
return new Link(from, to);
}
}
}
return null;
}
Link closestLink() {
int minSteps = 1000;
int cutFrom = -1;
int cutAdj = -1;
Set<Node> adjs = new HashSet<>();
List<Node> exitNodes = nodes.stream().filter(node -> node.exit).collect(Collectors.toList());
exitNodes.forEach(exitNode -> adjs.addAll(exitNode.adj));
for (Node adj : adjs) {
if (adj.stepsToSkynet < minSteps) {
minSteps = adj.stepsToSkynet;
cutAdj = adj.id;
} else if (adj.stepsToSkynet == minSteps) {
long cutExits = nodes.get(cutAdj).adj.stream().filter(node -> node.exit).count();
long currExits = nodes.get(adj.id).adj.stream().filter(node -> node.exit).count();
if (currExits > cutExits) {
minSteps = adj.stepsToSkynet;
cutAdj = adj.id;
}
}
}
for (Node node : nodes.get(cutAdj).adj) {
if (node.exit) {
cutFrom = node.id;
break;
}
}
return new Link(cutFrom, cutAdj);
}
private boolean oneTurnPath(Node from, Node to) {
Set<Node> visited = new HashSet<>();
Queue<Node> unvisited = new ArrayDeque<>();
unvisited.add(nodes.get(from.id));
while(!unvisited.isEmpty()) {
Node curr = unvisited.poll();
if (curr.equals(to)) return true;
if (visited.contains(curr)) continue;
if (curr.adj.stream().filter(nd -> nd.exit).count() == 0) continue;
visited.add(curr);
unvisited.addAll(curr.adj);
}
return false;
}
}
class Node {
static final int UNREACHEABLE = -1;
final int id;
final Set<Node> adj = new HashSet<>();
boolean exit;
int stepsToSkynet = UNREACHEABLE;
Node(int id) {
this.id = id;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Node node = (Node) o;
return id == node.id;
}
@Override
public int hashCode() {
return id;
}
}
class Link {
final int from;
final int to;
Link(int from, int to) {
this.from = from;
this.to = to;
}
@Override
public String toString() {
return from + " " + to;
}
}
| |
/*
* Copyright 2014 Google Inc. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.cardboard.photosphere;
import android.content.Context;
import android.opengl.GLES20;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.util.ArrayList;
import java.util.List;
/**
* @author Sree Kumar
* <p/>
* Create a Sphere from set of TRIANGLE_STRIPS and apply the 2D texture inside that.
*/
public class Sphere {
/**
* Maximum allowed depth.
*/
private static final int MAXIMUM_ALLOWED_DEPTH = 5;
/**
* Used in vertex strip calculations, related to properties of a icosahedron.
*/
private static final int VERTEX_MAGIC_NUMBER = 5;
/**
* Each vertex is a 2D coordinate.
*/
private static final int NUM_FLOATS_PER_VERTEX = 3;
/**
* Each texture is a 2D coordinate.
*/
private static final int NUM_FLOATS_PER_TEXTURE = 2;
/**
* Each vertex is made up of 3 points, x, y, z.
*/
private static final int AMOUNT_OF_NUMBERS_PER_VERTEX_POINT = 3;
/**
* Each texture point is made up of 2 points, x, y (in reference to the texture being a 2D image).
*/
private static final int AMOUNT_OF_NUMBERS_PER_TEXTURE_POINT = 2;
/**
* Buffer holding the vertices.
*/
private final List<FloatBuffer> mVertexBuffer = new ArrayList<FloatBuffer>();
/**
* The vertices for the sphere.
*/
private final List<float[]> mVertices = new ArrayList<float[]>();
/**
* Buffer holding the texture coordinates.
*/
private final List<FloatBuffer> mTextureBuffer = new ArrayList<FloatBuffer>();
/**
* Mapping texture coordinates for the vertices.
*/
private final List<float[]> mTexture = new ArrayList<float[]>();
/**
* Total number of strips for the given depth.
*/
private final int mTotalNumStrips;
// number of coordinates per vertex in this array
static final int CORDS_PER_VERTEX = 3;
// Use to access and set the view transformation
private int mMVPMatrixHandle;
private int mPositionHandle;
private int mProgramHandle;
private int mTextureCoordinateHandle;
private int mTextureDataHandle0[] = new int[1];
private final int vertexStride = CORDS_PER_VERTEX * 4; // 4 bytes per vertex
public Sphere(final Context context, final int depth, final float radius) {
// Loading the shader from assets
final String vertexShader = getVertexShader(context);
final String fragmentShader = getFragmentShader(context);
// Compiling the shader
final int vertexShaderHandle = ShaderHelper.compileShader(
GLES20.GL_VERTEX_SHADER, vertexShader);
final int fragmentShaderHandle = ShaderHelper.compileShader(
GLES20.GL_FRAGMENT_SHADER, fragmentShader);
// Setting the attributes for the shader, this Step can be ignored
mProgramHandle = ShaderHelper.createAndLinkProgram(vertexShaderHandle,
fragmentShaderHandle, new String[]{"a_Position",
"a_TexCoordinate"});
// Set our per-vertex lighting program.
GLES20.glUseProgram(mProgramHandle);
// Clamp depth to the range 1 to MAXIMUM_ALLOWED_DEPTH;
final int d = Math.max(1, Math.min(MAXIMUM_ALLOWED_DEPTH, depth));
// Calculate basic values for the sphere.
this.mTotalNumStrips = Maths.power(2, d - 1) * VERTEX_MAGIC_NUMBER;
final int numVerticesPerStrip = Maths.power(2, d) * 3;
final double altitudeStepAngle = Maths.ONE_TWENTY_DEGREES / Maths.power(2, d);
final double azimuthStepAngle = Maths.THREE_SIXTY_DEGREES / this.mTotalNumStrips;
double x, y, z, h, altitude, azimuth;
for (int stripNum = 0; stripNum < this.mTotalNumStrips; stripNum++) {
// Setup arrays to hold the points for this strip.
final float[] vertices = new float[numVerticesPerStrip * NUM_FLOATS_PER_VERTEX]; // NOPMD
final float[] texturePoints = new float[numVerticesPerStrip * NUM_FLOATS_PER_TEXTURE]; // NOPMD
int vertexPos = 0;
int texturePos = 0;
// Calculate position of the first vertex in this strip.
altitude = Maths.NINETY_DEGREES;
azimuth = stripNum * azimuthStepAngle;
// Draw the rest of this strip.
for (int vertexNum = 0; vertexNum < numVerticesPerStrip; vertexNum += 2) {
// First point - Vertex.
y = radius * Math.sin(altitude);
h = radius * Math.cos(altitude);
z = h * Math.sin(azimuth);
x = h * Math.cos(azimuth);
vertices[vertexPos++] = (float) x;
vertices[vertexPos++] = (float) y;
vertices[vertexPos++] = (float) z;
// First point - Texture.
texturePoints[texturePos++] = (float) (1 - azimuth / Maths.THREE_SIXTY_DEGREES);
texturePoints[texturePos++] = (float) (1 - (altitude + Maths.NINETY_DEGREES) / Maths.ONE_EIGHTY_DEGREES);
// Second point - Vertex.
altitude -= altitudeStepAngle;
azimuth -= azimuthStepAngle / 2.0;
y = radius * Math.sin(altitude);
h = radius * Math.cos(altitude);
z = h * Math.sin(azimuth);
x = h * Math.cos(azimuth);
vertices[vertexPos++] = (float) x;
vertices[vertexPos++] = (float) y;
vertices[vertexPos++] = (float) z;
// Second point - Texture.
texturePoints[texturePos++] = (float) (1 - azimuth / Maths.THREE_SIXTY_DEGREES);
texturePoints[texturePos++] = (float) (1 - (altitude + Maths.NINETY_DEGREES) / Maths.ONE_EIGHTY_DEGREES);
azimuth += azimuthStepAngle;
}
this.mVertices.add(vertices);
this.mTexture.add(texturePoints);
ByteBuffer byteBuffer = ByteBuffer.allocateDirect(numVerticesPerStrip * NUM_FLOATS_PER_VERTEX * Float.SIZE);
byteBuffer.order(ByteOrder.nativeOrder());
FloatBuffer fb = byteBuffer.asFloatBuffer();
fb.put(this.mVertices.get(stripNum));
fb.position(0);
this.mVertexBuffer.add(fb);
// Setup texture.
byteBuffer = ByteBuffer.allocateDirect(numVerticesPerStrip * NUM_FLOATS_PER_TEXTURE * Float.SIZE);
byteBuffer.order(ByteOrder.nativeOrder());
fb = byteBuffer.asFloatBuffer();
fb.put(this.mTexture.get(stripNum));
fb.position(0);
this.mTextureBuffer.add(fb);
}
}
public void loadTexture(Context context, int resourceId) {
// Load the static 2D texture
mTextureDataHandle0 = TextureHelper.loadTexture(context,
resourceId);
}
public void deleteCurrentTexture() {
GLES20.glDeleteTextures(mTextureDataHandle0.length, mTextureDataHandle0, 0);
}
public void draw(float[] mvpMatrix) {
// Add program to OpenGL ES environment
// Set program handles for cube drawing.
mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgramHandle,
"u_MVPMatrix");
mPositionHandle = GLES20.glGetAttribLocation(mProgramHandle,
"a_Position");
mTextureCoordinateHandle = GLES20.glGetAttribLocation(mProgramHandle,
"a_TexCoordinate");
GLES20.glEnable(GLES20.GL_CULL_FACE);
GLES20.glCullFace(GLES20.GL_FRONT);
GLES20.glFrontFace(GLES20.GL_CW);
GLES20.glActiveTexture(GLES20.GL_TEXTURE1);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureDataHandle0[0]);
GLES20.glUniform1i(mTextureCoordinateHandle, 0);
for (int i = 0; i < this.mTotalNumStrips; i++) {
// Prepare the triangle coordinate data
GLES20.glVertexAttribPointer(mPositionHandle, CORDS_PER_VERTEX,
GLES20.GL_FLOAT, false,
vertexStride, mVertexBuffer.get(i));
// Enable a handle to the triangle vertices
GLES20.glEnableVertexAttribArray(mPositionHandle);
GLES20.glVertexAttribPointer(mTextureCoordinateHandle,
AMOUNT_OF_NUMBERS_PER_TEXTURE_POINT, GLES20.GL_FLOAT, false, 0,
mTextureBuffer.get(i));
GLES20.glEnableVertexAttribArray(mTextureCoordinateHandle);
// Pass the projection and view transformation to the shader
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mvpMatrix, 0);
// Draw the triangle
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, this.mVertices.get(i).length / AMOUNT_OF_NUMBERS_PER_VERTEX_POINT);
}
// Disable vertex array
GLES20.glDisableVertexAttribArray(mPositionHandle);
GLES20.glDisable(GLES20.GL_CULL_FACE);
}
protected String getVertexShader(Context context) {
return RawResourceReader.readTextFileFromRawResource(context,
R.raw._vertex_shader);
}
protected String getFragmentShader(Context context) {
return RawResourceReader.readTextFileFromRawResource(context,
R.raw._fragment_shader);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.geronimo.osgi.locator;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Enumeration;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Properties;
import java.util.Set;
// Via great attention to detail, all of the imports below
// are optional and the related classes are not loaded unless
// the specified conditions are met.
// COND: running in an OSGi environment and the Activator has been activated
import org.osgi.framework.BundleContext;
import org.osgi.util.tracker.ServiceTracker;
// COND: the above + the geronimo-osgi-registry is installed and visible
import org.apache.geronimo.osgi.registry.api.ProviderRegistry;
// NB in comments is Nota Bene (note well)
// http://en.wikipedia.org/wiki/Nota_bene
public class ProviderLocator {
// our bundle context
static private BundleContext context;
// a service tracker for the registry service
// NB: This is declared as just Object to avoid classloading issues if we're running
// outside of an OSGi environment.
static private Object registryTracker;
private ProviderLocator() {
// private constructor to prevent an instance from getting created.
}
/**
* initialize the tracker statics for this bundle
*
* @param c The starup BundleContext.
*/
public static void init(BundleContext c) {
try {
// just create a tracker for our lookup service
// NB: We use the hard coded name in case the registry service has not
// been started first. The ServiceTracker itself only uses the string name.
// We need to avoid trying to load the ProviderRegistry interface until the
// registry tracker returns a non-null service instance.
registryTracker = new ServiceTracker(c, "org.apache.geronimo.osgi.registry.api.ProviderRegistry", null);
((ServiceTracker)registryTracker).open();
// do this last...it helps indicate if we have an initialized registry.
context = c;
} catch (Throwable e) {
// It is expected that the ServiceTracker constructor will fail if the
// ProviderRegistry class cannot be loaded.
// if there were any errors, then the registry is not available.
registryTracker = null;
}
}
/**
* Cleanup resources on bundle shutdown.
*/
public static void destroy() {
if (registryTracker != null) {
// shutdown our tracking of the provider registry.
((ServiceTracker)registryTracker).close();
registryTracker = null;
}
}
/**
* Locate a class by its provider id indicator. .
*
* @param providerId The provider id (generally, a fully qualified class name).
*
* @return The Class corresponding to this provider id. Returns null
* if this is not registered or the indicated class can't be
* loaded.
*/
static public Class<?> locate(String providerId) {
Object registry = getRegistry();
// if no registry service available, this is a failure
if (registry == null) {
return null;
}
// get the service, if it exists. NB, if there is a service object,
// then the extender and the interface class are available, so this cast should be
// safe now.
// the rest of the work is done by the registry
return ((ProviderRegistry)registry).locate(providerId);
}
/**
* Locate all class files that match a given factory id.
*
* @param providerId The target provider identifier.
*
* @return A List containing the class objects corresponding to the
* provider identifier. Returns an empty list if no
* matching classes can be located.
*/
static public List<Class<?>> locateAll(String providerId) {
Object registry = getRegistry();
// if no registry service available, this is a failure
if (registry == null) {
return new ArrayList<Class<?>>();
}
// get the service, if it exists. NB, if there is a service object,
// then the extender and the interface class are available, so this cast should be
// safe now.
// the rest of the work is done by the registry
return ((ProviderRegistry)registry).locateAll(providerId);
}
/**
* Utility class for locating a class with OSGi registry
* support. Uses the thread context classloader as part of
* the search order.
*
* @param className The name of the target class.
*
* @return The loaded class.
* @exception ClassNotFoundException
* Thrown if the class cannot be located.
*/
static public Class<?> loadClass(String className) throws ClassNotFoundException {
return loadClass(className, null, Thread.currentThread().getContextClassLoader());
}
/**
* Utility class for locating a class with OSGi registry
* support. Uses the thread context classloader as part of
* the search order.
*
* @param className The name of the target class.
*
* @return The loaded class.
* @exception ClassNotFoundException
* Thrown if the class cannot be located.
*/
static public Class<?> loadClass(String className, Class<?> contextClass) throws ClassNotFoundException {
return loadClass(className, contextClass, Thread.currentThread().getContextClassLoader());
}
/**
* Standardized utility method for performing class lookups
* with support for OSGi registry lookups.
*
* @param className The name of the target class.
* @param loader An optional class loader.
*
* @return The loaded class
* @exception ClassNotFoundException
* Thrown if the class cannot be loaded.
*/
static public Class<?> loadClass(String className, Class<?>contextClass, ClassLoader loader) throws ClassNotFoundException {
// ideally, this should be last. However, some of the bundles duplicate classes
// found on the boot delegation, so we need to check this first to keep
// from picking up one of the default implementations.
Class cls = locate(className);
if (cls != null) {
return cls;
}
if (loader != null) {
try {
return loader.loadClass(className);
} catch (ClassNotFoundException x) {
}
}
if (contextClass != null) {
loader = contextClass.getClassLoader();
}
// try again using the class context loader
return Class.forName(className, true, loader);
}
/**
* Get a single service instance that matches an interface
* definition.
*
* @param iface The name of the required interface.
* @param contextClass
* The class requesting the lookup (used for class resolution).
* @param loader A class loader to use for searching for service definitions
* and loading classes.
*
* @return The service instance, or null if no matching services
* can be found.
* @exception Exception Thrown for any classloading or exceptions thrown
* trying to instantiate a service instance.
*/
static public Object getService(String iface, Class<?> contextClass, ClassLoader loader) throws Exception {
// if we are working in an OSGi environment, then process the service
// registry first. Ideally, we would do this last, but because of boot delegation
// issues with some API implementations, we must try the OSGi version first
Object registry = getRegistry();
if (registry != null) {
// get the service, if it exists. NB, if there is a service object,
// then the extender and the interface class are available, so this cast should be
// safe now.
// the rest of the work is done by the registry
Object service = ((ProviderRegistry)registry).getService(iface);
if (service != null) {
return service;
}
}
// try for a classpath locatable instance next. If we find an appropriate class mapping,
// create an instance and return it.
Class<?> cls = locateServiceClass(iface, contextClass, loader);
if (cls != null) {
return cls.newInstance();
}
// a provider was not found
return null;
}
/**
* Locate a service class that matches an interface
* definition.
*
* @param iface The name of the required interface.
* @param contextClass
* The class requesting the lookup (used for class resolution).
* @param loader A class loader to use for searching for service definitions
* and loading classes.
*
* @return The located class, or null if no matching services
* can be found.
* @exception Exception Thrown for any classloading exceptions thrown
* trying to load the class.
*/
static public Class<?> getServiceClass(String iface, Class<?> contextClass, ClassLoader loader) throws ClassNotFoundException {
// if we are working in an OSGi environment, then process the service
// registry first. Ideally, we would do this last, but because of boot delegation
// issues with some API implementations, we must try the OSGi version first
Object registry = getRegistry();
if (registry != null) {
// get the service, if it exists. NB, if there is a service object,
// then the extender and the interface class are available, so this cast should be
// safe now.
// If we've located stuff in the registry, then return it
Class<?> cls = ((ProviderRegistry)registry).getServiceClass(iface);
if (cls != null) {
return cls;
}
}
// try for a classpath locatable instance first. If we find an appropriate class mapping,
// create an instance and return it.
return locateServiceClass(iface, contextClass, loader);
}
/**
* Get a list of services that match a given interface
* name. This searches both the current class path and
* the global repository for matches.
*
* @param iface The name of the required interface.
* @param contextClass
* The class requesting the lookup (used for class resolution).
* @param loader A class loader to use for searching for service definitions
* and loading classes.
*
* @return A list of matching services. Returns an empty list if there
* are no matches.
* @exception Exception Thrown for any classloading or exceptions thrown
* trying to instantiate a service instance.
*/
static public List<Object> getServices(String iface, Class<?> contextClass, ClassLoader loader) throws Exception {
List<Object> services = new ArrayList<Object>();
// because of boot delegation issues with some of the API implementations, it is necessary
// to process the OSGi registered versions first to allow override of JRE provided APIs.
Object registry = getRegistry();
if (registry != null) {
// get the service, if it exists. NB, if there is a service object,
// then the extender and the interface class are available, so this cast should be
// safe now.
// get any registered service instances now
List<Object> globalServices = ((ProviderRegistry)registry).getServices(iface);
// add to our list also
if (globalServices != null) {
services.addAll(globalServices);
}
}
// try for a classpath locatable instance second. If we find an appropriate class mapping,
// create an instance and return it.
Collection<Class<?>> classes = locateServiceClasses(iface, contextClass, loader);
if (classes != null) {
// create an instance of each of these classes
for (Class<?> cls : classes) {
services.add(cls.newInstance());
}
}
// now return the merged set
return services;
}
/**
* Get a list of service class implementations that match
* an interface name. This searches both the current class path and
* the global repository for matches.
*
* @param iface The name of the required interface.
* @param contextClass
* The class requesting the lookup (used for class resolution).
* @param loader A class loader to use for searching for service definitions
* and loading classes.
*
* @return A list of matching provider classes. Returns an empty list if there
* are no matches.
* @exception Exception Thrown for any classloading exceptions thrown
* trying to load a provider class.
*/
static public List<Class<?>> getServiceClasses(String iface, Class<?> contextClass, ClassLoader loader) throws Exception {
Set<Class<?>> serviceClasses = new LinkedHashSet<Class<?>>();
// because of boot delegation issues with some of the API implementations, it is necessary
// to process the OSGi registered versions first to allow override of JRE provided APIs.
Object registry = getRegistry();
if (registry != null) {
// get the service, if it exists. NB, if there is a service object,
// then the extender and the interface class are available, so this cast should be
// safe now.
// get any registered service provider classes now
List<Class<?>> globalServices = ((ProviderRegistry)registry).getServiceClasses(iface);
// add to our list also
if (globalServices != null) {
serviceClasses.addAll(globalServices);
}
}
// try for a classpath locatable classes second. If we find an appropriate class mapping,
// add this to our return collection.
Collection<Class<?>> classes = locateServiceClasses(iface, contextClass, loader);
if (classes != null) {
serviceClasses.addAll(classes);
}
// now return the merged set
return new ArrayList(serviceClasses);
}
/**
* Locate the first class name for a META-INF/services definition
* of a given class. The first matching provider is
* returned.
*
* @param iface The interface class name used for the match.
* @param loader The classloader for locating resources.
*
* @return The mapped provider name, if found. Returns null if
* no mapping is located.
*/
static private String locateServiceClassName(String iface, Class<?> contextClass, ClassLoader loader) {
// search first with the loader class path
String name = locateServiceClassName(iface, loader);
if (name != null) {
return name;
}
// then with the context class, if there is one
if (contextClass != null) {
name = locateServiceClassName(iface, contextClass.getClassLoader());
if (name != null) {
return name;
}
}
// not found
return null;
}
/**
* Locate a classpath-define service mapping.
*
* @param iface The required interface name.
* @param loader The ClassLoader instance to use to locate the service.
*
* @return The mapped class name, if one is found. Returns null if the
* mapping is not located.
*/
static private String locateServiceClassName(String iface, ClassLoader loader) {
if (loader != null) {
try {
// we only look at resources that match the file name, using the specified loader
String service = "META-INF/services/" + iface;
Enumeration<URL> providers = loader.getResources(service);
while (providers.hasMoreElements()) {
List<String>providerNames = parseServiceDefinition(providers.nextElement());
// if there is something defined here, return the first entry
if (!providerNames.isEmpty()) {
return providerNames.get(0);
}
}
} catch (IOException e) {
}
}
// not found
return null;
}
/**
* Locate the first class for a META-INF/services definition
* of a given interface class. The first matching provider is
* returned.
*
* @param iface The interface class name used for the match.
* @param loader The classloader for locating resources.
*
* @return The mapped provider class, if found. Returns null if
* no mapping is located.
*/
static private Class<?> locateServiceClass(String iface, Class<?> contextClass, ClassLoader loader) throws ClassNotFoundException {
String className = locateServiceClassName(iface, contextClass, loader);
if (className == null) {
return null;
}
// we found a name, try loading the class. This will throw an exception if there is an error
return loadClass(className, contextClass, loader);
}
/**
* Locate all class names name for a META-INF/services definition
* of a given class.
*
* @param iface The interface class name used for the match.
* @param loader The classloader for locating resources.
*
* @return The mapped provider name, if found. Returns null if
* no mapping is located.
*/
static private Collection<String> locateServiceClassNames(String iface, Class<?> contextClass, ClassLoader loader) {
Set<String> names = new LinkedHashSet<String>();
locateServiceClassNames(iface, loader, names);
if (contextClass != null) {
locateServiceClassNames(iface, contextClass.getClassLoader(), names);
}
return names;
}
/**
* Locate all class names name for a META-INF/services definition
* of a given class.
*
* @param iface The interface class name used for the match.
* @param loader The classloader for locating resources.
*
* @return The mapped provider name, if found. Returns null if
* no mapping is located.
*/
static void locateServiceClassNames(String iface, ClassLoader loader, Set names) {
if (loader != null) {
try {
// we only look at resources that match the file name, using the specified loader
String service = "META-INF/services/" + iface;
Enumeration<URL> providers = loader.getResources(service);
while (providers.hasMoreElements()) {
List<String>providerNames = parseServiceDefinition(providers.nextElement());
// just add all of these to the list
names.addAll(providerNames);
}
} catch (IOException e) {
}
}
}
/**
* Locate all classes that map to a given provider class definition. This will
* search both the services directories, as well as the provider classes from the
* OSGi provider registry.
*
* @param iface The interface class name used for the match.
* @param loader The classloader for locating resources.
*
* @return A list of all mapped classes, if found. Returns an empty list if
* no mappings are found.
*/
static private Collection<Class<?>> locateServiceClasses(String iface, Class<?> contextClass, ClassLoader loader) throws ClassNotFoundException {
// get the set of names from services definitions on the classpath
Collection<String> classNames = locateServiceClassNames(iface, contextClass, loader);
Set<Class<?>> classes = new LinkedHashSet<Class<?>>();
// load each class and add to our return set
for (String name : classNames) {
classes.add(loadClass(name, contextClass, loader));
}
return classes;
}
/**
* Parse a definition file and return the names of all included implementation classes
* contained within the file.
*
* @param u The URL of the file
*
* @return A list of all matching classes. Returns an empty list
* if no matches are found.
*/
static private List<String> parseServiceDefinition(URL u) {
final String url = u.toString();
List<String> classes = new ArrayList<String>();
// ignore directories
if (url.endsWith("/")) {
return classes;
}
// the identifier used for the provider is the last item in the URL.
final String providerId = url.substring(url.lastIndexOf("/") + 1);
try {
BufferedReader br = new BufferedReader(new InputStreamReader(u.openStream(), "UTF-8"));
// the file can be multiple lines long, with comments. A single file can define multiple providers
// for a single key, so we might need to create multiple entries. If the file does not contain any
// definition lines, then as a default, we use the providerId as an implementation class also.
String line = br.readLine();
while (line != null) {
// we allow comments on these lines, and a line can be all comment
int comment = line.indexOf('#');
if (comment != -1) {
line = line.substring(0, comment);
}
line = line.trim();
// if there is nothing left on the line after stripping white space and comments, skip this
if (line.length() > 0) {
// add this to our list
classes.add(line);
}
// keep reading until the end.
line = br.readLine();
}
br.close();
} catch (IOException e) {
// ignore errors and handle as default
}
return classes;
}
/**
* Perform a service class discovery by looking for a
* property in a target properties file located in the
* java.home directory.
*
* @param path The relative path to the desired properties file.
* @param property The name of the required property.
*
* @return The value of the named property within the properties file. Returns
* null if the property doesn't exist or the properties file doesn't exist.
*/
public static String lookupByJREPropertyFile(String path, String property) throws IOException {
String jreDirectory = System.getProperty("java.home");
File configurationFile = new File(jreDirectory + File.separator + path);
if (configurationFile.exists() && configurationFile.canRead()) {
Properties properties = new Properties();
InputStream in = null;
try {
in = new FileInputStream(configurationFile);
properties.load(in);
return properties.getProperty(property);
} finally {
if (in != null) {
try {
in.close();
} catch (Exception e) {
}
}
}
}
return null;
}
/**
* Retrieve the registry from the tracker if it is available,
* all without causing the interface class to load.
*
* @return The registry service instance, or null if it is not
* available for any reason.
*/
private static Object getRegistry() {
// if not initialized in an OSGi environment, this is a failure
if (registryTracker == null) {
return null;
}
// get the service, if it exists. NB: it is only safe to reference the
// interface class if the tracker returns a non-null service object. The
// interface class will not be loaded in our bundle context until the
// service class can be statisfied. Therefore, we always return this as
// just an object and the call needs to perform the cast, which will
// force the classload at that time.
return ((ServiceTracker)registryTracker).getService();
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Created by IntelliJ IDEA.
* User: mike
* Date: Jun 7, 2002
* Time: 8:30:35 PM
* To change template for new class use
* Code Style | Class Templates options (Tools | IDE Options).
*/
package com.intellij;
import com.intellij.idea.Bombed;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.testFramework.PlatformTestUtil;
import com.intellij.testFramework.TestRunnerUtil;
import com.intellij.util.containers.MultiMap;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
import java.io.IOException;
import java.io.InputStreamReader;
import java.lang.reflect.AnnotatedElement;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.net.URL;
import java.util.*;
@SuppressWarnings({"HardCodedStringLiteral", "UseOfSystemOutOrSystemErr", "CallToPrintStackTrace", "TestOnlyProblems"})
public class TestCaseLoader {
public static final String TARGET_TEST_GROUP = "idea.test.group";
public static final String TARGET_TEST_PATTERNS = "idea.test.patterns";
public static final String PERFORMANCE_TESTS_ONLY_FLAG = "idea.performance.tests";
public static final String INCLUDE_PERFORMANCE_TESTS_FLAG = "idea.include.performance.tests";
public static final String INCLUDE_UNCONVENTIONALLY_NAMED_TESTS_FLAG = "idea.include.unconventionally.named.tests";
public static final String SKIP_COMMUNITY_TESTS = "idea.skip.community.tests";
private final List<Class> myClassList = new ArrayList<Class>();
private final List<Throwable> myClassLoadingErrors = new ArrayList<Throwable>();
private Class myFirstTestClass;
private Class myLastTestClass;
private final TestClassesFilter myTestClassesFilter;
private final boolean myForceLoadPerformanceTests;
public TestCaseLoader(String classFilterName) {
this(classFilterName, false);
}
public TestCaseLoader(String classFilterName, boolean forceLoadPerformanceTests) {
myForceLoadPerformanceTests = forceLoadPerformanceTests;
String patterns = System.getProperty(TARGET_TEST_PATTERNS);
if (patterns != null) {
myTestClassesFilter = new PatternListTestClassFilter(StringUtil.split(patterns, ";"));
System.out.println("Using patterns: [" + patterns +"]");
}
else {
List<URL> groupingFileUrls = Collections.emptyList();
if (!StringUtil.isEmpty(classFilterName)) {
try {
groupingFileUrls = Collections.list(getClass().getClassLoader().getResources(classFilterName));
}
catch (IOException e) {
e.printStackTrace();
}
}
List<String> testGroupNames = StringUtil.split(System.getProperty(TARGET_TEST_GROUP, "").trim(), ";");
MultiMap<String, String> groups = MultiMap.createLinked();
for (URL fileUrl : groupingFileUrls) {
try {
InputStreamReader reader = new InputStreamReader(fileUrl.openStream());
try {
groups.putAllValues(GroupBasedTestClassFilter.readGroups(reader));
}
finally {
reader.close();
}
}
catch (IOException e) {
e.printStackTrace();
System.err.println("Failed to load test groups from " + fileUrl);
}
}
if (groups.isEmpty()) {
System.out.println("Using all classes");
myTestClassesFilter = TestClassesFilter.ALL_CLASSES;
}
else {
System.out.println("Using test groups: " + testGroupNames);
myTestClassesFilter = new GroupBasedTestClassFilter(groups, testGroupNames);
}
}
}
void addClassIfTestCase(Class testCaseClass, String moduleName) {
if (shouldAddTestCase(testCaseClass, moduleName, true) &&
testCaseClass != myFirstTestClass && testCaseClass != myLastTestClass &&
PlatformTestUtil.canRunTest(testCaseClass)) {
myClassList.add(testCaseClass);
}
}
void addFirstTest(Class aClass) {
assert myFirstTestClass == null : "already added: "+aClass;
assert shouldAddTestCase(aClass, null, false) : "not a test: "+aClass;
myFirstTestClass = aClass;
}
void addLastTest(Class aClass) {
assert myLastTestClass == null : "already added: "+aClass;
assert shouldAddTestCase(aClass, null, false) : "not a test: "+aClass;
myLastTestClass = aClass;
}
private boolean shouldAddTestCase(final Class<?> testCaseClass, String moduleName, boolean testForExcluded) {
if ((testCaseClass.getModifiers() & Modifier.ABSTRACT) != 0) return false;
if (testForExcluded && shouldExcludeTestClass(moduleName, testCaseClass)) return false;
if (TestCase.class.isAssignableFrom(testCaseClass) || TestSuite.class.isAssignableFrom(testCaseClass)) {
return true;
}
try {
final Method suiteMethod = testCaseClass.getMethod("suite");
if (Test.class.isAssignableFrom(suiteMethod.getReturnType()) && (suiteMethod.getModifiers() & Modifier.STATIC) != 0) {
return true;
}
}
catch (NoSuchMethodException ignored) { }
return TestRunnerUtil.isJUnit4TestClass(testCaseClass);
}
private boolean shouldExcludeTestClass(String moduleName, Class testCaseClass) {
if (!myForceLoadPerformanceTests && !TestAll.shouldIncludePerformanceTestCase(testCaseClass)) return true;
String className = testCaseClass.getName();
return !myTestClassesFilter.matches(className, moduleName) || isBombed(testCaseClass);
}
public static boolean isBombed(final AnnotatedElement element) {
final Bombed bombedAnnotation = element.getAnnotation(Bombed.class);
if (bombedAnnotation == null) return false;
return !PlatformTestUtil.bombExplodes(bombedAnnotation);
}
public void loadTestCases(final String moduleName, final Collection<String> classNamesIterator) {
for (String className : classNamesIterator) {
try {
Class candidateClass = Class.forName(className, false, getClass().getClassLoader());
addClassIfTestCase(candidateClass, moduleName);
}
catch (Throwable e) {
String message = "Cannot load class " + className + ": " + e.getMessage();
System.err.println(message);
myClassLoadingErrors.add(new Throwable(message, e));
}
}
}
public List<Throwable> getClassLoadingErrors() {
return myClassLoadingErrors;
}
private static final List<String> ourRankList = getTeamCityRankList();
private static List<String> getTeamCityRankList() {
String filePath = System.getProperty("teamcity.tests.recentlyFailedTests.file", null);
if (filePath != null) {
try {
return FileUtil.loadLines(filePath);
}
catch (IOException ignored) { }
}
return Collections.emptyList();
}
private int getRank(Class aClass) {
final String name = aClass.getName();
if (aClass == myFirstTestClass) return -1;
if (aClass == myLastTestClass) return myClassList.size() + ourRankList.size();
int i = ourRankList.indexOf(name);
if (i != -1) {
return i;
}
return ourRankList.size();
}
public List<Class> getClasses() {
List<Class> result = new ArrayList<Class>(myClassList.size());
if (myFirstTestClass != null) {
result.add(myFirstTestClass);
}
result.addAll(myClassList);
if (myLastTestClass != null) {
result.add(myLastTestClass);
}
if (!ourRankList.isEmpty()) {
Collections.sort(result, new Comparator<Class>() {
@Override
public int compare(final Class o1, final Class o2) {
return getRank(o1) - getRank(o2);
}
});
}
return result;
}
public void clearClasses() {
myClassList.clear();
}
}
| |
package com.owera.common.db;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.EmptyStackException;
import java.util.HashMap;
import java.util.Map;
import com.owera.common.log.Logger;
import com.owera.common.util.PropertyReader;
/**
* This connection provider can offer
* - connection pooling
* - specify max connections
* - specify max age
* - throw away all connection involved in SQLExceptions
* - connection meta data
* - count successful accesses
* - count rejected accesses
* - count simultaneous accesses
* - count free connections/used connections
* - multiple database connections (connect to several different db in the same runtime/JVM)
* - logging of all events and debug-logging
* - possible to decide to run without autocommit=true
* - tested and works fine on MySQL. Should work on any Database supporting JDBC.
*
* @author morten
*/
public class ConnectionProvider {
private static final Logger log = new Logger();
private static Map<String, ConnectionPoolData> poolMap = new HashMap<String, ConnectionPoolData>();
private static Map<Connection, ConnectionPoolData> connMap = new HashMap<Connection, ConnectionPoolData>();
public static ConnectionPoolData getConnectionPoolData(ConnectionProperties props) {
return poolMap.get(props.toString());
}
/**
* Use this method if you want to control the autoCommit feature of the
* connection. IMPORTANT! If you use this method, you should make sure than
* NO ONE ELSE (e.g. another app/user/whateever) is reusing the same
* connection without going through this method.
*/
public static Connection getConnection(ConnectionProperties props, boolean autoCommit) throws SQLException, NoAvailableConnectionException {
Connection c = getConnection(props);
if (c.getTransactionIsolation() == Connection.TRANSACTION_NONE)
return c;
if (c.getAutoCommit() && !autoCommit)
c.setAutoCommit(autoCommit);
else if (!c.getAutoCommit() && autoCommit) {
c.setAutoCommit(autoCommit);
}
return c;
}
public static Connection getConnection(ConnectionProperties props) throws SQLException, NoAvailableConnectionException {
long start = System.currentTimeMillis();
NoAvailableConnectionException throwNace = null;
while (System.currentTimeMillis() - start < 10000) {
try {
Connection c = getConnectionImpl(props);
if (log.isDebugEnabled())
log.debug("Connection returned in " + (System.currentTimeMillis() - start) + " ms");
return c;
} catch (NoAvailableConnectionException nace) {
throwNace = nace;
try {
log.warn("Reached connection limit (" + props.getMaxConn() + ") for connection towards " + props.getUrl() + ", will wait 500 ms and retry");
Thread.sleep(500);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
ConnectionPoolData cpd = poolMap.get(props.toString());
cpd.getMetaData().incDenied();
log.error("No more available connections - retried " + cpd.getMetaData().getRetries() + " times, total " + (System.currentTimeMillis() - start) + " ms");
throw throwNace;
}
/**
* The main interface to this class. Retrieves a connection. Will check to
* see if maxiumum number of connections is reached (maxConn), and it that
* case it throws an exception. Also resets a timestamp each time a
* connection is handed out, and puts that information in the usedConn-map.
*/
private static synchronized Connection getConnectionImpl(ConnectionProperties props) throws SQLException, NoAvailableConnectionException {
if (poolMap.get(props.toString()) == null)
poolMap.put(props.toString(), new ConnectionPoolData(props));
ConnectionPoolData cpd = poolMap.get(props.toString());
if (cpd.getCleanup().isBorn() && cpd.getCleanup().isDead()) {
cpd.startCleanup();
}
if (cpd.getFreeConn().size() == 0) {
if (cpd.getUsedConn().size() >= props.getMaxConn()) {
cpd.getMetaData().incRetries();
throw new NoAvailableConnectionException(props.getMaxConn());
}
Connection c = getNewConnection(cpd);
cpd.getMetaData().incAccessed(cpd.getUsedConn().size());
return c;
} else {
try {
Connection c = cpd.getFreeConn().pop();
if (cpd.getAllConn().get(c) < 0) { // marked as old
cpd.getAllConn().put(c, -2l);
connMap.remove(c);
return getConnectionImpl(props);
}
if (c != null) {
cpd.getUsedConn().put(c, new Long(System.currentTimeMillis()));
log.debug("Reusing connection " + c);
cpd.getMetaData().incAccessed(cpd.getUsedConn().size());
return c;
} else {
cpd.getMetaData().incAccessed(cpd.getUsedConn().size());
return getNewConnection(cpd);
}
} catch (EmptyStackException ese) {
cpd.getMetaData().incAccessed(cpd.getUsedConn().size());
return getNewConnection(cpd);
}
}
}
/**
* This method allows you to specify database credentials in a property file
* if you specify file name and database-property-key.
*
* Given a property file with this content:
*
* mydb.url = morten/morten@jdbc:mysql://xaps-a.owera.com:3306/xaps
* mydb.maxconn = 10 mydb.maxage = 600
*
* This method will need to know the name of this file and the key "mydb" to
* read and populate the ConnectionProperties object.
*
* url syntax: <user>/<password>@<jdbc-url-including-dbname> maxage:
* (Connection will be taken out of the pool at this point - will not abort
* running queries/executions) Specified in seconds. Default is 600.
* maxconn: Specified in number of connections. Default is 10.
*
*
* Another feature is "symlinks", where one property points to an already
* defined db-property
*
* anotherdb = mydb
*
* With this setup, the property file can contain the option of having
* several database connections (to various database), but also to be setup
* to point to the same database.
*
* @param propertyfile
* - the name of the property file containing database
* credentials/url and possibly maxage/maxconn
* @param dbkey
* - the key used in one or more properties to identify the
* credentials/url and possibly maxage/maxconn
* @return
*/
public static ConnectionProperties getConnectionProperties(String propertyfile, String dbkey) {
// TODO: Change signature of method: propertyfile, dbname
// TODO: Read user/password/url/maxage/maxconn from propertyfile
PropertyReader pr = new PropertyReader(propertyfile);
// Symlink check
String symlink = pr.getProperty(dbkey);
if (symlink != null && !symlink.contains("@")) {
if (symlink.equals(dbkey)) {
throw new IllegalArgumentException(dbkey + " references itself in the " + propertyfile + ", must point to another database configuration.");
}
dbkey = symlink;
}
// Find database credentials and url:
String url = pr.getProperty(dbkey + ".url");
if (url == null)
url = pr.getProperty(dbkey); // Backward compatibility - a little
// ugly and non-explicit
if (url == null)
return null;
// url is now hopefully a proper configuration on the form
// <user>/<password>@<jdbc-url-including-dbname>
ConnectionProperties props = new ConnectionProperties();
try {
props.setUrl(url.substring(url.indexOf("@") + 1));
props.setUser(url.substring(0, url.indexOf("/")));
props.setPassword(url.substring(url.indexOf("/") + 1, url.indexOf("@")));
} catch (StringIndexOutOfBoundsException seoobe) {
throw new IllegalArgumentException(url + " is not on a correct database-config-format (<user>/<password>@<jdbc-url>");
}
String maxAge = pr.getProperty(dbkey + ".maxage");
if (maxAge != null) {
try {
props.setMaxAge(Long.parseLong(maxAge) * 1000);
} catch (NumberFormatException nfe) {
log.warn(maxAge + " is not a number, default value (600 sec) will be used");
}
}
String maxConn = pr.getProperty(dbkey + ".maxconn");
if (maxConn != null) {
try {
props.setMaxConn(Integer.parseInt(maxConn));
} catch (NumberFormatException nfe) {
log.warn(maxConn + " is not a number, default value (10) will be used");
}
}
if (props.getUrl().indexOf("mysql") > -1)
props.setDriver("com.mysql.jdbc.Driver"); // This class must be specified in the classpath (dynamically loaded)
else
throw new IllegalArgumentException("The url is not pointing to a MySQL database");
return props;
}
/**
* Actually makes a new connection. Problems could be divided in two
* categories: 1. problems with wrong password, url, user and so on 2.
* problems with the driver, classpath and so on The second type of problem
* is more severe, because it will indicated a problem with the installation
* of the system. By throwing a RunTimeException we will probabaly abort the
* operation, all the way to the top.
*/
private static Connection getNewConnection(ConnectionPoolData cpd) throws SQLException, NoAvailableConnectionException {
Statement s = null;
try {
ConnectionProperties props = cpd.getProps();
Class.forName(props.getDriver()).newInstance();
Connection c = DriverManager.getConnection(props.getUrl(), props.getUser(), props.getPassword());
s = c.createStatement();
s.execute("SET SESSION TRANSACTION ISOLATION LEVEL READ COMMITTED");
cpd.getUsedConn().put(c, new Long(System.currentTimeMillis()));
cpd.getAllConn().put(c, new Long(System.currentTimeMillis()));
connMap.put(c, cpd);
log.debug("Created a new connection " + c);
return c;
} catch (SQLException sqle) {
log.error("Tried to create a new connection", sqle);
throw sqle;
} catch (Exception e) {
log.fatal("Tried to create a new connection, but something is seriously wrong", e);
// This should only happen after if the driver
// is not present in the classpath, or if the
// driver is of the wrong version. Instead of throwing
// a checked exception, we throw an unchecked
throw new RuntimeException(e);
} finally {
try {
if (s != null)
s.close();
} catch (Throwable t) {
// do nothing
}
}
}
/**
* Returns a connection to the pool.
*
* If the connection is associated by a an SQL-exception or if it's too old,
* then the connection is not put back into the pool. If not, we check to
* see if the connection is closed or not. if it's closed ('accidentally' by
* an application it is not returned to the pool). But those that passes
* these tests are of course returned to the pool.
*/
public static synchronized void returnConnection(Connection c, SQLException sqle) {
ConnectionPoolData cpd = null;
try {
if (c != null) {
cpd = connMap.get(c);
if (cpd == null) {
log.error("Returned a connection which isn't created by this connectionprovider!!");
} else {
Long tms = cpd.getUsedConn().get(c);
cpd.getUsedConn().remove(c);
if (tms != null)
cpd.getMetaData().addUsedTime(System.currentTimeMillis() - tms);
else
log.error("This method is run twice for the same connection. Wrong usage.");
if (sqle != null) {
cpd.getMetaData().incSqlEx();
cpd.getAllConn().put(c, -2l);
connMap.remove(c);
log.warn("SQLException caused the connection to be invalidated", sqle);
} else if (cpd.getAllConn().get(c) == -1) {
cpd.getAllConn().put(c, -2l);
connMap.remove(c);
log.debug("Connection " + c + " is too old and will be removed");
} else if (c.isClosed()) {
cpd.getAllConn().put(c, -2l);
connMap.remove(c);
log.error("Connection is already closed by the application. Wrong usage.");
} else {
cpd.getFreeConn().push(c);
log.debug("Connection " + c + " is returned to the pool");
}
}
} else {
log.error("Returning Connection=null. Wrong usage.");
}
} catch (Throwable t) {
log.error("An error ocurred. The connection is invalidated.");
if (c != null && cpd != null && cpd.getAllConn() != null) {
cpd.getAllConn().put(c, -2l);
}
}
}
public static synchronized Map<Connection, Long> getUsedConnCopy(ConnectionProperties cp) {
ConnectionPoolData cpd = getConnectionPoolData(cp);
if (cpd != null)
return new HashMap<Connection, Long>(cpd.getUsedConn());
return null;
}
}
| |
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.offline;
import android.net.Uri;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.upstream.DataSource;
import com.google.android.exoplayer2.upstream.DataSpec;
import com.google.android.exoplayer2.upstream.cache.Cache;
import com.google.android.exoplayer2.upstream.cache.CacheDataSource;
import com.google.android.exoplayer2.upstream.cache.CacheUtil;
import com.google.android.exoplayer2.upstream.cache.CacheUtil.CachingCounters;
import com.google.android.exoplayer2.util.PriorityTaskManager;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
/**
* Base class for multi segment stream downloaders.
*
* <p>All of the methods are blocking. Also they are not thread safe, except {@link
* #getTotalSegments()}, {@link #getDownloadedSegments()} and {@link #getDownloadedBytes()}.
*
* @param <M> The type of the manifest object.
* @param <K> The type of the representation key object.
*/
public abstract class SegmentDownloader<M, K> implements Downloader {
/** Smallest unit of content to be downloaded. */
protected static class Segment implements Comparable<Segment> {
/** The start time of the segment in microseconds. */
public final long startTimeUs;
/** The {@link DataSpec} of the segment. */
public final DataSpec dataSpec;
/** Constructs a Segment. */
public Segment(long startTimeUs, DataSpec dataSpec) {
this.startTimeUs = startTimeUs;
this.dataSpec = dataSpec;
}
@Override
public int compareTo(@NonNull Segment other) {
long startOffsetDiff = startTimeUs - other.startTimeUs;
return startOffsetDiff == 0 ? 0 : ((startOffsetDiff < 0) ? -1 : 1);
}
}
private static final int BUFFER_SIZE_BYTES = 128 * 1024;
private final Uri manifestUri;
private final PriorityTaskManager priorityTaskManager;
private final Cache cache;
private final CacheDataSource dataSource;
private final CacheDataSource offlineDataSource;
private M manifest;
private K[] keys;
private volatile int totalSegments;
private volatile int downloadedSegments;
private volatile long downloadedBytes;
/**
* @param manifestUri The {@link Uri} of the manifest to be downloaded.
* @param constructorHelper a {@link DownloaderConstructorHelper} instance.
*/
public SegmentDownloader(Uri manifestUri, DownloaderConstructorHelper constructorHelper) {
this.manifestUri = manifestUri;
this.cache = constructorHelper.getCache();
this.dataSource = constructorHelper.buildCacheDataSource(false);
this.offlineDataSource = constructorHelper.buildCacheDataSource(true);
this.priorityTaskManager = constructorHelper.getPriorityTaskManager();
resetCounters();
}
/**
* Returns the manifest. Downloads and parses it if necessary.
*
* @return The manifest.
* @throws IOException If an error occurs reading data.
*/
public final M getManifest() throws IOException {
return getManifestIfNeeded(false);
}
/**
* Selects multiple representations pointed to by the keys for downloading, checking status. Any
* previous selection is cleared. If keys are null or empty, all representations are downloaded.
*/
public final void selectRepresentations(K[] keys) {
this.keys = (keys != null && keys.length > 0) ? keys.clone() : null;
resetCounters();
}
/**
* Returns keys for all representations.
*
* @see #selectRepresentations(Object[])
*/
public abstract K[] getAllRepresentationKeys() throws IOException;
/**
* Initializes the total segments, downloaded segments and downloaded bytes counters for the
* selected representations.
*
* @throws IOException Thrown when there is an io error while reading from cache.
* @throws DownloadException Thrown if the media cannot be downloaded.
* @throws InterruptedException If the thread has been interrupted.
* @see #getTotalSegments()
* @see #getDownloadedSegments()
* @see #getDownloadedBytes()
*/
@Override
public final void init() throws InterruptedException, IOException {
try {
getManifestIfNeeded(true);
} catch (IOException e) {
// Either the manifest file isn't available offline or not parsable.
return;
}
try {
initStatus(true);
} catch (IOException | InterruptedException e) {
resetCounters();
throw e;
}
}
/**
* Downloads the content for the selected representations in sync or resumes a previously stopped
* download.
*
* @param listener If not null, called during download.
* @throws IOException Thrown when there is an io error while downloading.
* @throws DownloadException Thrown if the media cannot be downloaded.
* @throws InterruptedException If the thread has been interrupted.
*/
@Override
public final synchronized void download(@Nullable ProgressListener listener)
throws IOException, InterruptedException {
priorityTaskManager.add(C.PRIORITY_DOWNLOAD);
try {
getManifestIfNeeded(false);
List<Segment> segments = initStatus(false);
notifyListener(listener); // Initial notification.
Collections.sort(segments);
byte[] buffer = new byte[BUFFER_SIZE_BYTES];
CachingCounters cachingCounters = new CachingCounters();
for (int i = 0; i < segments.size(); i++) {
CacheUtil.cache(segments.get(i).dataSpec, cache, dataSource, buffer,
priorityTaskManager, C.PRIORITY_DOWNLOAD, cachingCounters, true);
downloadedBytes += cachingCounters.newlyCachedBytes;
downloadedSegments++;
notifyListener(listener);
}
} finally {
priorityTaskManager.remove(C.PRIORITY_DOWNLOAD);
}
}
/**
* Returns the total number of segments in the representations which are selected, or {@link
* C#LENGTH_UNSET} if it hasn't been calculated yet.
*
* @see #init()
*/
public final int getTotalSegments() {
return totalSegments;
}
/**
* Returns the total number of downloaded segments in the representations which are selected, or
* {@link C#LENGTH_UNSET} if it hasn't been calculated yet.
*
* @see #init()
*/
public final int getDownloadedSegments() {
return downloadedSegments;
}
/**
* Returns the total number of downloaded bytes in the representations which are selected, or
* {@link C#LENGTH_UNSET} if it hasn't been calculated yet.
*
* @see #init()
*/
@Override
public final long getDownloadedBytes() {
return downloadedBytes;
}
@Override
public float getDownloadPercentage() {
// Take local snapshot of the volatile fields
int totalSegments = this.totalSegments;
int downloadedSegments = this.downloadedSegments;
if (totalSegments == C.LENGTH_UNSET || downloadedSegments == C.LENGTH_UNSET) {
return Float.NaN;
}
return totalSegments == 0 ? 100f : (downloadedSegments * 100f) / totalSegments;
}
@Override
public final void remove() throws InterruptedException {
try {
getManifestIfNeeded(true);
} catch (IOException e) {
// Either the manifest file isn't available offline, or it's not parsable. Continue anyway to
// reset the counters and attempt to remove the manifest file.
}
resetCounters();
if (manifest != null) {
List<Segment> segments = null;
try {
segments = getSegments(offlineDataSource, manifest, getAllRepresentationKeys(), true);
} catch (IOException e) {
// Ignore exceptions. We do our best with what's available offline.
}
if (segments != null) {
for (int i = 0; i < segments.size(); i++) {
remove(segments.get(i).dataSpec.uri);
}
}
manifest = null;
}
remove(manifestUri);
}
/**
* Loads and parses the manifest.
*
* @param dataSource The {@link DataSource} through which to load.
* @param uri The manifest uri.
* @return The manifest.
* @throws IOException If an error occurs reading data.
*/
protected abstract M getManifest(DataSource dataSource, Uri uri) throws IOException;
/**
* Returns a list of {@link Segment}s for given keys.
*
* @param dataSource The {@link DataSource} through which to load any required data.
* @param manifest The manifest containing the segments.
* @param keys The selected representation keys.
* @param allowIncompleteIndex Whether to continue in the case that a load error prevents all
* segments from being listed. If true then a partial segment list will be returned. If false
* an {@link IOException} will be thrown.
* @throws InterruptedException Thrown if the thread was interrupted.
* @throws IOException Thrown if {@code allowPartialIndex} is false and a load error occurs, or if
* the media is not in a form that allows for its segments to be listed.
* @return A list of {@link Segment}s for given keys.
*/
protected abstract List<Segment> getSegments(DataSource dataSource, M manifest, K[] keys,
boolean allowIncompleteIndex) throws InterruptedException, IOException;
private void resetCounters() {
totalSegments = C.LENGTH_UNSET;
downloadedSegments = C.LENGTH_UNSET;
downloadedBytes = C.LENGTH_UNSET;
}
private void remove(Uri uri) {
CacheUtil.remove(cache, CacheUtil.generateKey(uri));
}
private void notifyListener(ProgressListener listener) {
if (listener != null) {
listener.onDownloadProgress(this, getDownloadPercentage(), downloadedBytes);
}
}
/**
* Initializes totalSegments, downloadedSegments and downloadedBytes for selected representations.
* If not offline then downloads missing metadata.
*
* @return A list of not fully downloaded segments.
*/
private synchronized List<Segment> initStatus(boolean offline)
throws IOException, InterruptedException {
DataSource dataSource = getDataSource(offline);
if (keys == null) {
keys = getAllRepresentationKeys();
}
List<Segment> segments = getSegments(dataSource, manifest, keys, offline);
CachingCounters cachingCounters = new CachingCounters();
totalSegments = segments.size();
downloadedSegments = 0;
downloadedBytes = 0;
for (int i = segments.size() - 1; i >= 0; i--) {
Segment segment = segments.get(i);
CacheUtil.getCached(segment.dataSpec, cache, cachingCounters);
downloadedBytes += cachingCounters.alreadyCachedBytes;
if (cachingCounters.alreadyCachedBytes == cachingCounters.contentLength) {
// The segment is fully downloaded.
downloadedSegments++;
segments.remove(i);
}
}
return segments;
}
private M getManifestIfNeeded(boolean offline) throws IOException {
if (manifest == null) {
manifest = getManifest(getDataSource(offline), manifestUri);
}
return manifest;
}
private DataSource getDataSource(boolean offline) {
return offline ? offlineDataSource : dataSource;
}
}
| |
/**
* Copyright 2007-2016, Kaazing Corporation. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaazing.gateway.resource.address.uri;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.kaazing.gateway.resource.address.URLUtils;
/**
* Utils class over URI methods
*
*/
public final class URIUtils {
public static final String NETWORK_INTERFACE_AUTHORITY_PORT = "^(\\[@[a-zA-Z0-9 :]*\\]|@[a-zA-Z0-9:]*):([0-9]*)$";
public static final String NETWORK_INTERFACE_AUTHORITY = "(\\[{0,1}@[a-zA-Z0-9 :]*\\]{0,1})";
private static final String MOCK_HOST = "127.0.0.1";
/**
* Helper method for toString conversion
* @param uri
* @return
*/
public static String uriToString(URI uri) {
return uri.toString();
}
/**
* Helper method for toString conversion
* @param uri
* @return
*/
public static String uriToString(NetworkInterfaceURI uri) {
return uri.toString();
}
/**
* Helper method for retrieving host
* @param uriString
* @return
*/
public static String getHost(String uriString) {
try {
URI uri = new URI(uriString);
if(uri.getHost()==null) {
throw new IllegalArgumentException("Invalid URI syntax. Scheme and host must be provided (port number is optional): " + uriString);
}
if (uri.getAuthority().startsWith("@") && !uri.getHost().startsWith("@")) {
return "@" + uri.getHost();
}
return uri.getHost();
}
catch (URISyntaxException e) {
try {
return (new NetworkInterfaceURI(uriString)).getHost();
}
catch (IllegalArgumentException ne) {
throw new IllegalArgumentException(ne.getMessage(), ne);
}
}
}
/**
* Helper method for retrieving scheme
* @param uriString
* @return
*/
public static String getScheme(String uriString) {
try {
return (new URI(uriString)).getScheme();
}
catch (URISyntaxException e) {
try {
return (new NetworkInterfaceURI(uriString)).getScheme();
}
catch (IllegalArgumentException ne) {
throw new IllegalArgumentException(ne.getMessage(), ne);
}
}
}
/**
* Helper method for retrieving authority
* @param uriString
* @return
*/
public static String getAuthority(String uriString) {
try {
return (new URI(uriString)).getAuthority();
}
catch (URISyntaxException e) {
try {
return (new NetworkInterfaceURI(uriString)).getAuthority();
}
catch (IllegalArgumentException ne) {
throw new IllegalArgumentException(ne.getMessage(), ne);
}
}
}
/**
* Helper method for retrieving fragment
* @param uriString
* @return
*/
public static String getFragment(String uriString) {
try {
return (new URI(uriString)).getFragment();
}
catch (URISyntaxException e) {
try {
return (new NetworkInterfaceURI(uriString)).getFragment();
}
catch (IllegalArgumentException ne) {
throw new IllegalArgumentException(ne.getMessage(), ne);
}
}
}
/**
* Helper method for retrieving path
* @param uriString
* @return
*/
public static String getPath(String uriString) {
try {
return (new URI(uriString)).getPath();
}
catch (URISyntaxException e) {
try {
return (new NetworkInterfaceURI(uriString)).getPath();
}
catch (IllegalArgumentException ne) {
throw new IllegalArgumentException(ne.getMessage(), ne);
}
}
}
/**
* Helper method for retrieving query
* @param uriString
* @return
*/
public static String getQuery(String uriString) {
try {
return (new URI(uriString)).getQuery();
}
catch (URISyntaxException e) {
try {
return (new NetworkInterfaceURI(uriString)).getQuery();
}
catch (IllegalArgumentException ne) {
throw new IllegalArgumentException(ne.getMessage(), ne);
}
}
}
/**
* Helper method for retrieving port
* @param uriString
* @return
*/
public static int getPort(String uriString) {
try {
return (new URI(uriString)).getPort();
}
catch (URISyntaxException e) {
try {
return (new NetworkInterfaceURI(uriString)).getPort();
}
catch (IllegalArgumentException ne) {
throw new IllegalArgumentException(ne.getMessage(), ne);
}
}
}
/**
* Helper method for retrieving port
* @param uriString
* @return
*/
public static String getUserInfo(String uriString) {
try {
return (new URI(uriString)).getUserInfo();
}
catch (URISyntaxException e) {
try {
return (new NetworkInterfaceURI(uriString)).getUserInfo();
}
catch (IllegalArgumentException ne) {
throw new IllegalArgumentException(ne.getMessage(), ne);
}
}
}
/**
* Helper method for building URI as String
* @param scheme
* @param authority
* @param path
* @param query
* @param fragment
* @return
* @throws URISyntaxException
*/
public static String buildURIAsString(String scheme, String authority, String path,
String query, String fragment) throws URISyntaxException {
URI helperURI;
try {
helperURI = new URI(scheme, authority, path, query, fragment);
} catch (URISyntaxException e) {
return NetworkInterfaceURI.buildURIToString(scheme, authority, path, query, fragment);
}
return helperURI.toString();
}
/**
* Helper method for building URI as String
* @param scheme
* @param userInfo
* @param host
* @param port
* @param path
* @param query
* @param fragment
* @return
* @throws URISyntaxException
*/
public static String buildURIAsString(String scheme, String userInfo,
String host, int port, String path, String query, String fragment) throws URISyntaxException {
URI helperURI;
try {
helperURI = new URI(scheme, userInfo, host, port, path, query, fragment);
} catch (URISyntaxException e) {
return NetworkInterfaceURI.buildURIToString(scheme, userInfo, host, port, path, query, fragment);
}
return helperURI.toString();
}
/**
* Helper method for performing resolve as String
* @param uriInitial
* @param uriString
* @return
*/
public static String resolve(String uriInitial, String uriString) {
try {
return uriToString((new URI(uriInitial)).resolve(uriString));
}
catch (URISyntaxException e) {
try {
return (new NetworkInterfaceURI(uriInitial)).resolve(uriString);
}
catch (IllegalArgumentException ne) {
throw new IllegalArgumentException(ne.getMessage(), ne);
}
}
}
/**
* Helper method for modifying URI scheme
* @param uri
* @param newScheme
* @return
*/
public static String modifyURIScheme(String uri, String newScheme) {
try {
URI uriObj = new URI(uri);
return uriToString(URLUtils.modifyURIScheme(uriObj, newScheme));
}
catch (URISyntaxException e) {
try {
return (new NetworkInterfaceURI(uri)).modifyURIScheme(newScheme);
}
catch (IllegalArgumentException ne) {
throw new IllegalArgumentException(ne.getMessage(), ne);
}
}
}
/**
* Helper method for modifying URI authority
* @param uri
* @param newAuthority
* @return
*/
public static String modifyURIAuthority(String uri, String newAuthority) {
try {
URI uriObj = new URI(uri);
// code below modifies new authority considering also network interface syntax
Pattern pattern = Pattern.compile(NETWORK_INTERFACE_AUTHORITY);
Matcher matcher = pattern.matcher(newAuthority);
String matchedToken = MOCK_HOST;
// if newAuthority corresponds to NetworkInterfaceURI syntax
if (matcher.find()) {
matchedToken = matcher.group(0);
newAuthority = newAuthority.replace(matchedToken, MOCK_HOST);
}
URI modifiedURIAuthority = URLUtils.modifyURIAuthority(uriObj, newAuthority);
String uriWithModifiedAuthority = URIUtils.uriToString(modifiedURIAuthority).replace(MOCK_HOST, matchedToken);
return uriWithModifiedAuthority;
}
catch (URISyntaxException e) {
try {
return (new NetworkInterfaceURI(uri)).modifyURIAuthority(newAuthority);
}
catch (IllegalArgumentException ne) {
throw new IllegalArgumentException(ne.getMessage(), ne);
}
}
}
/**
* Helper method for modifying URI port
* @param uri
* @param newPort
* @return
*/
public static String modifyURIPort(String uri, int newPort) {
try {
URI uriObj = new URI(uri);
return uriToString(URLUtils.modifyURIPort(uriObj, newPort));
}
catch (URISyntaxException e) {
try {
return (new NetworkInterfaceURI(uri)).modifyURIPort(newPort);
}
catch (IllegalArgumentException ne) {
throw new IllegalArgumentException(ne.getMessage(), ne);
}
}
}
/**
* Helper method for modiffying the URI path
* @param uri
* @param newPath
* @return
*/
public static String modifyURIPath(String uri, String newPath) {
try {
URI uriObj = new URI(uri);
return uriToString(URLUtils.modifyURIPath(uriObj, newPath));
}
catch (URISyntaxException e) {
try {
return (new NetworkInterfaceURI(uri)).modifyURIPath(newPath);
}
catch (IllegalArgumentException ne) {
throw new IllegalArgumentException(ne.getMessage(), ne);
}
}
}
public static boolean isAbsolute(String uri) {
try {
return (new URI(uri)).isAbsolute();
}
catch (URISyntaxException e) {
try {
return (new NetworkInterfaceURI(uri)).isAbsolute();
}
catch (IllegalArgumentException ne) {
throw new IllegalArgumentException(ne.getMessage(), ne);
}
}
}
/**
* Class performing logic similar to java.net.URI class which supports network interface syntax
*
*/
private static class NetworkInterfaceURI {
private static final String HOST_TEMPLATE = "127.0.0.1";
private URI mockNetworkInterfaceURI;
private Parser parser;
// -- Properties and components of this instance -- similar to java.net.URI
// Components of all URIs: [<scheme>:]<scheme-specific-part>[#<fragment>]
private String scheme; // null ==> relative URI
private String fragment;
// Hierarchical URI components: [//<authority>]<path>[?<query>]
private String authority; // Registry or server
// Server-based authority: [<userInfo>@]<host>[:<port>]
private String userInfo;
private String host; // null ==> registry-based
private int port = -1; // -1 ==> undefined
// Remaining components of hierarchical URIs
private String path; // null ==> opaque
private String query;
private boolean absolute;
public static String buildURIToString(String scheme, String authority, String path, String query, String fragment) {
URI helperURI;
try {
helperURI = new URI(scheme, HOST_TEMPLATE, path, query, fragment);
} catch (URISyntaxException e) {
throw new IllegalArgumentException(e.getMessage(), e);
}
return helperURI.toString().replace(HOST_TEMPLATE, authority);
}
public static String buildURIToString(String scheme, String userInfo, String host, int port, String path, String query,
String fragment) {
URI helperURI;
try {
helperURI = new URI(scheme, userInfo, HOST_TEMPLATE, port, path, query, fragment);
} catch (URISyntaxException e) {
throw new IllegalArgumentException(e.getMessage(), e);
}
return helperURI.toString().replace(HOST_TEMPLATE, host);
}
public NetworkInterfaceURI(String uri) throws IllegalArgumentException {
parser = new Parser(uri);
parser.parse();
}
/**
* Method retrieving host.
* @return - host in uri
*/
public String getHost() {
return host;
}
/**
* Method retrieving scheme.
* @return - scheme in uri
*/
public String getScheme() {
return scheme;
}
/**
* Method retrieving authority.
* @return - authority in uri
*/
public String getAuthority() {
return authority;
}
/**
* Method retrieving fragment.
* @return - fragment in uri
*/
public String getFragment() {
return fragment;
}
/**
* Method retrieving path.
* @return - path in uri
*/
public String getPath() {
return path;
}
/**
* Method retrieving query.
* @return - query in uri
*/
public String getQuery() {
return query;
}
/**
* Method retrieving port.
* @return - port in uri
*/
public int getPort() {
return port;
}
/**
* Method retrieving user info section.
* @return - user info in uri
*/
public String getUserInfo() {
return userInfo;
}
/**
* Method retrieving whether uri is absolute.
* @return - boolean
*/
public boolean isAbsolute() {
return absolute;
}
/**
* Method resolving uris
* @param uriString
* @return
*/
public String resolve(String uriString) {
return parser.resolve(uriString);
}
/**
* Method modifying URI scheme
* @param newScheme
* @return - modified uri
*/
public String modifyURIScheme(String newScheme) {
return buildURIFromTokens(newScheme, host, port, path, query, fragment);
}
/**
* Method modifying UrI authority
* @param newAuthority
* @return - modified uri
*/
public String modifyURIAuthority(String newAuthority) {
return buildURIFromTokens(scheme, newAuthority, path, query, fragment);
}
/**
* Method modifying uri port
* @param newPort
* @return - modified uri
*/
public String modifyURIPort(int newPort) {
return buildURIFromTokens(scheme, host, newPort, path, query, fragment);
}
/**
* Method modifying uri path
* @param newPath
* @return - modified uri
*/
public String modifyURIPath(String newPath) {
return buildURIFromTokens(scheme, host, port, newPath, query, fragment);
}
/**
* Parser performing NetworkInterfaceSyntax validation and String tokens extraction
*
*/
private class Parser {
private String uri;
private String matchedToken;
public Parser(String uri) {
this.uri = uri;
}
/**
* Method performing parsing
*/
private void parse() throws IllegalArgumentException {
if (!uri.startsWith("tcp://") && !uri.startsWith("udp://")) {
throw new IllegalArgumentException("Network interface URI syntax should only "
+ "be applicable for tcp and udp schemes");
}
Pattern pattern = Pattern.compile(NETWORK_INTERFACE_AUTHORITY);
Matcher matcher = pattern.matcher(uri);
if (!matcher.find()) {
throw new IllegalArgumentException("Invalid network interface URI syntax");
}
matchedToken = matcher.group(0);
if (matchedToken.matches(".*:.*:.*")) {
throw new IllegalArgumentException("Multiple ':' characters within network interface syntax not allowed");
}
if (matchedToken.contains(" ") && (!matchedToken.startsWith("[") || !matchedToken.endsWith("]"))) {
throw new IllegalArgumentException("Network interface syntax host contains spaces but misses bracket(s)");
}
mockNetworkInterfaceURI = URI.create(uri.replace(matchedToken, HOST_TEMPLATE));
populateUriDataFromMockInterfaceURI();
}
private String resolve(String uriString) {
return uriToString(mockNetworkInterfaceURI.resolve(uriString)).replace(HOST_TEMPLATE, matchedToken);
}
private void populateUriDataFromMockInterfaceURI() {
scheme = mockNetworkInterfaceURI.getScheme();
fragment = mockNetworkInterfaceURI.getFragment();
authority = mockNetworkInterfaceURI.getAuthority().replace(HOST_TEMPLATE, matchedToken);
userInfo = mockNetworkInterfaceURI.getUserInfo();
host = mockNetworkInterfaceURI.getHost().replace(HOST_TEMPLATE, matchedToken);
port = mockNetworkInterfaceURI.getPort();
path = mockNetworkInterfaceURI.getPath();
query = mockNetworkInterfaceURI.getQuery();
absolute = mockNetworkInterfaceURI.isAbsolute();
}
}
//TODO: Check whether algorithm is correct with java.net.URI
private String buildURIFromTokens(String scheme, String host, int port, String path,
String query, String fragment) {
return scheme + "://" + host + ":" + port + (path.isEmpty() ? "" : "/") + path +
(query != null ? "?" + query : "") +
(fragment != null ? "#" + fragment : "");
}
//TODO: Check whether algorithm is correct with java.net.URI
private String buildURIFromTokens(String scheme, String authority, String path,
String query, String fragment) {
return scheme + "://" + authority + (path.isEmpty() ? "" : "/") + path +
(query != null ? "?" + query : "") +
(fragment != null ? "#" + fragment : "");
}
}
/**
* Create a canonical URI from a given URI. A canonical URI is a URI with:<ul> <li>the host part of the authority
* lower-case since URI semantics dictate that hostnames are case insensitive <li>(optionally, NOT appropriate for Origin
* headers) the path part set to "/" if there was no path in the input URI (this conforms to the WebSocket and HTTP protocol
* specifications and avoids us having to do special handling for path throughout the server code). </ul>
*
* @param uriString the URI to canonicalize, in string form
* @param canonicalizePath if true, append trailing '/' when missing
* @return a URI with the host part of the authority lower-case and (optionally) trailing / added, or null if the uri is null
* @throws IllegalArgumentException if the uriString is not valid syntax
*/
public static String getCanonicalURI(String uriString, boolean canonicalizePath) {
if ((uriString != null) && !"".equals(uriString)) {
return getCanonicalizedURI(uriString, canonicalizePath);
}
return null;
}
/**
* Create a canonical URI from a given URI. A canonical URI is a URI with:<ul> <li>the host part of the authority
* lower-case since URI semantics dictate that hostnames are case insensitive <li>(optionally, NOT appropriate for Origin
* headers) the path part set to "/" except for tcp uris if there was no path in the input URI (this conforms to the
* WebSocket and HTTP protocol specifications and avoids us having to do special handling for path throughout the server
* code). </ul>
*
* @param uri the URI to canonicalize
* @param canonicalizePath if true, append trailing '/' when missing
* @return a URI with the host part of the authority lower-case and (optionally if not tcp) trailing / added, or null if the
* uri is null
* @throws IllegalArgumentException if the uri is not valid syntax
*/
public static String getCanonicalizedURI(String uri, boolean canonicalizePath) {
String canonicalURI = uri;
if (uri != null) {
String host = getHost(uri);
String path = getPath(uri);
final boolean emptyPath = "".equals(path);
final boolean noPathToCanonicalize = canonicalizePath && (path == null || emptyPath);
final boolean trailingSlashPath = "/".equals(path);
final String scheme = getScheme(uri);
final boolean pathlessScheme = "ssl".equals(scheme) || "tcp".equals(scheme) || "pipe".equals(scheme)
|| "udp".equals(scheme) || "mux".equals(scheme);
final boolean trailingSlashWithPathlessScheme = trailingSlashPath && pathlessScheme;
String newPath = trailingSlashWithPathlessScheme ? "" :
noPathToCanonicalize ? (pathlessScheme ? null : "/") : null;
if (((host != null) && !host.equals(host.toLowerCase())) || newPath != null) {
path = newPath == null ? path : newPath;
try {
canonicalURI = buildURIAsString(scheme, getUserInfo(uri), host == null ?
null : host.toLowerCase(), getPort(uri), path, getQuery(uri), getFragment(uri));
} catch (URISyntaxException ex) {
throw new IllegalArgumentException("Invalid URI: " + uri + " in Gateway configuration file", ex);
}
}
}
return canonicalURI;
}
}
| |
/*
* Copyright 2014-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.apple;
import static com.facebook.buck.cxx.toolchain.CxxFlavorSanitizer.sanitize;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsString;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assume.assumeTrue;
import com.facebook.buck.apple.toolchain.ApplePlatform;
import com.facebook.buck.cxx.CxxDescriptionEnhancer;
import com.facebook.buck.cxx.CxxStrip;
import com.facebook.buck.cxx.toolchain.HeaderMode;
import com.facebook.buck.cxx.toolchain.StripStyle;
import com.facebook.buck.io.filesystem.ProjectFilesystem;
import com.facebook.buck.io.filesystem.TestProjectFilesystems;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.BuildTargetFactory;
import com.facebook.buck.model.BuildTargets;
import com.facebook.buck.model.InternalFlavor;
import com.facebook.buck.testutil.MoreAsserts;
import com.facebook.buck.testutil.ProcessResult;
import com.facebook.buck.testutil.TemporaryPaths;
import com.facebook.buck.testutil.integration.ProjectWorkspace;
import com.facebook.buck.testutil.integration.TestDataHelper;
import com.facebook.buck.util.ProcessExecutor;
import com.facebook.buck.util.environment.Platform;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import org.junit.Rule;
import org.junit.Test;
public class AppleLibraryIntegrationTest {
@Rule public TemporaryPaths tmp = new TemporaryPaths();
@Test
public void testAppleLibraryBuildsSomething() throws IOException {
assumeTrue(Platform.detect() == Platform.MACOS);
assumeTrue(AppleNativeIntegrationTestUtils.isApplePlatformAvailable(ApplePlatform.MACOSX));
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "apple_library_builds_something", tmp);
workspace.setUp();
ProjectFilesystem filesystem =
TestProjectFilesystems.createProjectFilesystem(workspace.getDestPath());
BuildTarget target =
BuildTargetFactory.newInstance("//Libraries/TestLibrary:TestLibrary#static,default");
ProcessResult result = workspace.runBuckCommand("build", target.getFullyQualifiedName());
result.assertSuccess();
assertTrue(Files.exists(workspace.getPath(BuildTargets.getGenPath(filesystem, target, "%s"))));
}
@Test
public void appleLibraryUsesPlatformDepOfSpecifiedPlatform() throws Exception {
assumeTrue(Platform.detect() == Platform.MACOS);
assumeTrue(AppleNativeIntegrationTestUtils.isApplePlatformAvailable(ApplePlatform.MACOSX));
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "apple_library_with_platform_deps", tmp);
workspace.setUp();
// arm64 platform dependency works, so the build should succeed
workspace
.runBuckCommand(
"build",
BuildTargetFactory.newInstance("//Apps/TestApp:TestApp#iphoneos-arm64")
.getFullyQualifiedName())
.assertSuccess();
// armv7 platform dependency is broken, so the build should fail
workspace
.runBuckCommand(
"build",
BuildTargetFactory.newInstance("//Apps/TestApp:TestApp#iphoneos-armv7")
.getFullyQualifiedName())
.assertFailure();
}
@Test
public void testAppleLibraryWithHeaderPathPrefix() throws IOException {
assumeTrue(Platform.detect() == Platform.MACOS);
assumeTrue(AppleNativeIntegrationTestUtils.isApplePlatformAvailable(ApplePlatform.MACOSX));
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "apple_library_with_header_path_prefix", tmp);
workspace.setUp();
ProjectFilesystem filesystem =
TestProjectFilesystems.createProjectFilesystem(workspace.getDestPath());
BuildTarget target =
BuildTargetFactory.newInstance("//Libraries/TestLibrary:TestLibrary#static,default");
ProcessResult result = workspace.runBuckCommand("build", target.getFullyQualifiedName());
result.assertSuccess();
assertTrue(Files.exists(workspace.getPath(BuildTargets.getGenPath(filesystem, target, "%s"))));
}
@Test
public void testCanUseAHeaderWithoutPrefix() throws IOException {
assumeTrue(Platform.detect() == Platform.MACOS);
assumeTrue(AppleNativeIntegrationTestUtils.isApplePlatformAvailable(ApplePlatform.MACOSX));
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "apple_library_with_header_path_prefix", tmp);
workspace.setUp();
ProjectFilesystem filesystem =
TestProjectFilesystems.createProjectFilesystem(workspace.getDestPath());
BuildTarget target =
BuildTargetFactory.newInstance("//Libraries/TestLibrary2:TestLibrary2#static,default");
ProcessResult result = workspace.runBuckCommand("build", target.getFullyQualifiedName());
result.assertSuccess();
assertTrue(Files.exists(workspace.getPath(BuildTargets.getGenPath(filesystem, target, "%s"))));
}
@Test
public void testAppleLibraryWithDefaultsInConfigBuildsSomething() throws IOException {
assumeTrue(Platform.detect() == Platform.MACOS);
assumeTrue(AppleNativeIntegrationTestUtils.isApplePlatformAvailable(ApplePlatform.MACOSX));
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "apple_library_builds_something", tmp);
workspace.setUp();
ProjectFilesystem filesystem =
TestProjectFilesystems.createProjectFilesystem(workspace.getDestPath());
workspace.addBuckConfigLocalOption(
"defaults.apple_library", "platform", "iphonesimulator-x86_64");
workspace.addBuckConfigLocalOption("defaults.apple_library", "type", "shared");
BuildTarget target = BuildTargetFactory.newInstance("//Libraries/TestLibrary:TestLibrary");
ProcessResult result = workspace.runBuckCommand("build", target.getFullyQualifiedName());
result.assertSuccess();
BuildTarget implicitTarget =
target.withAppendedFlavors(
InternalFlavor.of("shared"), InternalFlavor.of("iphonesimulator-x86_64"));
assertTrue(
Files.exists(workspace.getPath(BuildTargets.getGenPath(filesystem, implicitTarget, "%s"))));
}
@Test
public void testAppleLibraryWithDefaultsInRuleBuildsSomething() throws IOException {
assumeTrue(Platform.detect() == Platform.MACOS);
assumeTrue(AppleNativeIntegrationTestUtils.isApplePlatformAvailable(ApplePlatform.MACOSX));
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "apple_library_with_platform_and_type", tmp);
workspace.setUp();
ProjectFilesystem filesystem =
TestProjectFilesystems.createProjectFilesystem(workspace.getDestPath());
BuildTarget target = BuildTargetFactory.newInstance("//Libraries/TestLibrary:TestLibrary");
ProcessResult result = workspace.runBuckCommand("build", target.getFullyQualifiedName());
result.assertSuccess();
BuildTarget implicitTarget =
target.withAppendedFlavors(
InternalFlavor.of("shared"), InternalFlavor.of("iphoneos-arm64"));
Path outputPath = workspace.getPath(BuildTargets.getGenPath(filesystem, implicitTarget, "%s"));
assertTrue(Files.exists(outputPath));
}
@Test
public void testAppleLibraryBuildsForWatchOS() throws IOException {
assumeTrue(AppleNativeIntegrationTestUtils.isApplePlatformAvailable(ApplePlatform.WATCHOS));
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "apple_library_builds_something", tmp);
workspace.setUp();
ProjectFilesystem filesystem =
TestProjectFilesystems.createProjectFilesystem(workspace.getDestPath());
BuildTarget target =
BuildTargetFactory.newInstance("//Libraries/TestLibrary:TestLibrary#watchos-armv7k,static");
ProcessResult result = workspace.runBuckCommand("build", target.getFullyQualifiedName());
result.assertSuccess();
assertTrue(Files.exists(workspace.getPath(BuildTargets.getGenPath(filesystem, target, "%s"))));
}
@Test
public void testAppleLibraryBuildsForWatchSimulator() throws IOException {
assumeTrue(
AppleNativeIntegrationTestUtils.isApplePlatformAvailable(ApplePlatform.WATCHSIMULATOR));
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "apple_library_builds_something", tmp);
workspace.setUp();
ProjectFilesystem filesystem =
TestProjectFilesystems.createProjectFilesystem(workspace.getDestPath());
BuildTarget target =
BuildTargetFactory.newInstance(
"//Libraries/TestLibrary:TestLibrary#watchsimulator-i386,static");
ProcessResult result = workspace.runBuckCommand("build", target.getFullyQualifiedName());
result.assertSuccess();
assertTrue(Files.exists(workspace.getPath(BuildTargets.getGenPath(filesystem, target, "%s"))));
}
@Test
public void testAppleLibraryBuildsForAppleTVOS() throws IOException {
assumeTrue(AppleNativeIntegrationTestUtils.isApplePlatformAvailable(ApplePlatform.APPLETVOS));
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "apple_library_builds_something", tmp);
workspace.setUp();
ProjectFilesystem filesystem =
TestProjectFilesystems.createProjectFilesystem(workspace.getDestPath());
BuildTarget target =
BuildTargetFactory.newInstance(
"//Libraries/TestLibrary:TestLibrary#appletvos-arm64,static");
ProcessResult result = workspace.runBuckCommand("build", target.getFullyQualifiedName());
result.assertSuccess();
assertTrue(Files.exists(workspace.getPath(BuildTargets.getGenPath(filesystem, target, "%s"))));
}
@Test
public void testAppleLibraryBuildsForAppleTVSimulator() throws IOException {
assumeTrue(
AppleNativeIntegrationTestUtils.isApplePlatformAvailable(ApplePlatform.APPLETVSIMULATOR));
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "apple_library_builds_something", tmp);
workspace.setUp();
ProjectFilesystem filesystem =
TestProjectFilesystems.createProjectFilesystem(workspace.getDestPath());
BuildTarget target =
BuildTargetFactory.newInstance(
"//Libraries/TestLibrary:TestLibrary#appletvsimulator-x86_64,static");
ProcessResult result = workspace.runBuckCommand("build", target.getFullyQualifiedName());
result.assertSuccess();
assertTrue(Files.exists(workspace.getPath(BuildTargets.getGenPath(filesystem, target, "%s"))));
}
@Test
public void testAppleLibraryBuildsSomethingUsingAppleCxxPlatform() throws IOException {
assumeTrue(Platform.detect() == Platform.MACOS);
assumeTrue(AppleNativeIntegrationTestUtils.isApplePlatformAvailable(ApplePlatform.MACOSX));
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "apple_library_builds_something", tmp);
workspace.setUp();
ProjectFilesystem filesystem =
TestProjectFilesystems.createProjectFilesystem(workspace.getDestPath());
BuildTarget target =
BuildTargetFactory.newInstance("//Libraries/TestLibrary:TestLibrary#static,macosx-x86_64");
ProcessResult result = workspace.runBuckCommand("build", target.getFullyQualifiedName());
result.assertSuccess();
assertTrue(Files.exists(workspace.getPath(BuildTargets.getGenPath(filesystem, target, "%s"))));
}
@Test
public void testAppleLibraryHeaderSymlinkTree() throws IOException {
assumeTrue(Platform.detect() == Platform.MACOS);
assumeTrue(AppleNativeIntegrationTestUtils.isApplePlatformAvailable(ApplePlatform.MACOSX));
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "apple_library_header_symlink_tree", tmp);
workspace.setUp();
ProjectFilesystem filesystem =
TestProjectFilesystems.createProjectFilesystem(workspace.getDestPath());
BuildTarget buildTarget =
BuildTargetFactory.newInstance(
"//Libraries/TestLibrary:TestLibrary#"
+ "default,"
+ CxxDescriptionEnhancer.HEADER_SYMLINK_TREE_FLAVOR);
ProcessResult result = workspace.runBuckCommand("build", buildTarget.getFullyQualifiedName());
result.assertSuccess();
Path inputPath = workspace.getPath(buildTarget.getBasePath()).toRealPath();
Path outputPath =
workspace.getPath(BuildTargets.getGenPath(filesystem, buildTarget, "%s")).toRealPath();
assertIsSymbolicLink(
outputPath.resolve("PrivateHeader.h"), inputPath.resolve("PrivateHeader.h"));
assertIsSymbolicLink(
outputPath.resolve("TestLibrary/PrivateHeader.h"), inputPath.resolve("PrivateHeader.h"));
assertIsSymbolicLink(outputPath.resolve("PublicHeader.h"), inputPath.resolve("PublicHeader.h"));
}
@Test
public void testAppleLibraryBuildsFramework() throws Exception {
assumeTrue(Platform.detect() == Platform.MACOS);
assumeTrue(AppleNativeIntegrationTestUtils.isApplePlatformAvailable(ApplePlatform.MACOSX));
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "apple_library_builds_something", tmp);
workspace.setUp();
ProjectFilesystem filesystem =
TestProjectFilesystems.createProjectFilesystem(workspace.getDestPath());
BuildTarget target =
BuildTargetFactory.newInstance(
"//Libraries/TestLibrary:TestLibrary#framework,macosx-x86_64,no-debug");
ProcessResult result = workspace.runBuckCommand("build", target.getFullyQualifiedName());
result.assertSuccess();
Path frameworkPath =
workspace.getPath(
BuildTargets.getGenPath(
filesystem,
target.withAppendedFlavors(AppleDescriptions.INCLUDE_FRAMEWORKS_FLAVOR),
"%s")
.resolve("TestLibrary.framework"));
assertThat(Files.exists(frameworkPath), is(true));
assertThat(Files.exists(frameworkPath.resolve("Resources/Info.plist")), is(true));
Path libraryPath = frameworkPath.resolve("TestLibrary");
assertThat(Files.exists(libraryPath), is(true));
assertThat(
workspace.runCommand("file", libraryPath.toString()).getStdout().get(),
containsString("dynamically linked shared library"));
}
@Test
public void testAppleLibraryBuildsFrameworkIOS() throws Exception {
assumeTrue(Platform.detect() == Platform.MACOS);
assumeTrue(
AppleNativeIntegrationTestUtils.isApplePlatformAvailable(ApplePlatform.IPHONESIMULATOR));
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "apple_library_builds_something", tmp);
workspace.setUp();
ProjectFilesystem filesystem =
TestProjectFilesystems.createProjectFilesystem(workspace.getDestPath());
BuildTarget target =
BuildTargetFactory.newInstance(
"//Libraries/TestLibrary:TestLibrary#framework,iphonesimulator-x86_64,no-debug");
ProcessResult result = workspace.runBuckCommand("build", target.getFullyQualifiedName());
result.assertSuccess();
Path frameworkPath =
workspace.getPath(
BuildTargets.getGenPath(
filesystem,
target.withAppendedFlavors(AppleDescriptions.INCLUDE_FRAMEWORKS_FLAVOR),
"%s")
.resolve("TestLibrary.framework"));
assertThat(Files.exists(frameworkPath), is(true));
assertThat(Files.exists(frameworkPath.resolve("Info.plist")), is(true));
Path libraryPath = frameworkPath.resolve("TestLibrary");
assertThat(Files.exists(libraryPath), is(true));
assertThat(
workspace.runCommand("file", libraryPath.toString()).getStdout().get(),
containsString("dynamically linked shared library"));
}
@Test
public void appleLibraryBuildsMultiarchFramework() throws Exception {
assumeTrue(Platform.detect() == Platform.MACOS);
assumeTrue(AppleNativeIntegrationTestUtils.isApplePlatformAvailable(ApplePlatform.MACOSX));
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "apple_library_builds_something", tmp);
workspace.setUp();
ProjectFilesystem filesystem =
TestProjectFilesystems.createProjectFilesystem(workspace.getDestPath());
BuildTarget target =
BuildTargetFactory.newInstance(
"//Libraries/TestLibrary:TestLibrary#macosx-x86_64,macosx-i386")
.withAppendedFlavors(
AppleDescriptions.FRAMEWORK_FLAVOR, AppleDebugFormat.NONE.getFlavor());
ProcessResult result = workspace.runBuckCommand("build", target.getFullyQualifiedName());
result.assertSuccess();
Path frameworkPath =
workspace.getPath(
BuildTargets.getGenPath(
filesystem,
target.withAppendedFlavors(AppleDescriptions.INCLUDE_FRAMEWORKS_FLAVOR),
"%s")
.resolve("TestLibrary.framework"));
Path libraryPath = frameworkPath.resolve("TestLibrary");
assertThat(Files.exists(libraryPath), is(true));
ProcessExecutor.Result lipoVerifyResult =
workspace.runCommand("lipo", libraryPath.toString(), "-verify_arch", "i386", "x86_64");
assertEquals(lipoVerifyResult.getStderr().orElse(""), 0, lipoVerifyResult.getExitCode());
assertThat(
workspace.runCommand("file", libraryPath.toString()).getStdout().get(),
containsString("dynamically linked shared library"));
}
@Test
public void testAppleFrameworkWithDsym() throws Exception {
assumeTrue(Platform.detect() == Platform.MACOS);
assumeTrue(AppleNativeIntegrationTestUtils.isApplePlatformAvailable(ApplePlatform.MACOSX));
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "apple_library_builds_something", tmp);
workspace.setUp();
ProjectFilesystem filesystem =
TestProjectFilesystems.createProjectFilesystem(workspace.getDestPath());
ProcessResult result =
workspace.runBuckCommand(
"build",
"//Libraries/TestLibrary:TestLibrary#dwarf-and-dsym,framework,macosx-x86_64",
"--config",
"cxx.cflags=-g");
result.assertSuccess();
Path dsymPath =
tmp.getRoot()
.resolve(filesystem.getBuckPaths().getGenDir())
.resolve(
"Libraries/TestLibrary/"
+ "TestLibrary#dwarf-and-dsym,framework,include-frameworks,macosx-x86_64/"
+ "TestLibrary.framework.dSYM");
assertThat(Files.exists(dsymPath), is(true));
AppleDsymTestUtil.checkDsymFileHasDebugSymbol("+[TestClass answer]", workspace, dsymPath);
}
@Test
public void testAppleDynamicLibraryProducesDylib() throws Exception {
assumeTrue(Platform.detect() == Platform.MACOS);
assumeTrue(AppleNativeIntegrationTestUtils.isApplePlatformAvailable(ApplePlatform.MACOSX));
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "apple_library_shared", tmp);
workspace.setUp();
ProjectFilesystem filesystem =
TestProjectFilesystems.createProjectFilesystem(workspace.getDestPath());
BuildTarget target =
BuildTargetFactory.newInstance("//Libraries/TestLibrary:TestLibrary")
.withAppendedFlavors(
InternalFlavor.of("macosx-x86_64"), CxxDescriptionEnhancer.SHARED_FLAVOR);
ProcessResult result =
workspace.runBuckCommand(
"build", target.getFullyQualifiedName(), "--config", "cxx.cflags=-g");
result.assertSuccess();
Path outputPath = workspace.getPath(BuildTargets.getGenPath(filesystem, target, "%s"));
assertThat(Files.exists(outputPath), is(true));
}
@Test
public void testAppleDynamicLibraryWithDsym() throws Exception {
assumeTrue(Platform.detect() == Platform.MACOS);
assumeTrue(AppleNativeIntegrationTestUtils.isApplePlatformAvailable(ApplePlatform.MACOSX));
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "apple_library_shared", tmp);
workspace.setUp();
ProjectFilesystem filesystem =
TestProjectFilesystems.createProjectFilesystem(workspace.getDestPath());
BuildTarget target =
BuildTargetFactory.newInstance("//Libraries/TestLibrary:TestLibrary")
.withAppendedFlavors(
CxxDescriptionEnhancer.SHARED_FLAVOR,
AppleDebugFormat.DWARF_AND_DSYM.getFlavor(),
InternalFlavor.of("macosx-x86_64"));
ProcessResult result =
workspace.runBuckCommand(
"build", target.getFullyQualifiedName(), "--config", "cxx.cflags=-g");
result.assertSuccess();
BuildTarget implicitTarget =
target.withAppendedFlavors(CxxStrip.RULE_FLAVOR, StripStyle.NON_GLOBAL_SYMBOLS.getFlavor());
Path outputPath = workspace.getPath(BuildTargets.getGenPath(filesystem, implicitTarget, "%s"));
assertThat(Files.exists(outputPath), is(true));
Path dsymPath =
tmp.getRoot()
.resolve(filesystem.getBuckPaths().getGenDir())
.resolve("Libraries/TestLibrary")
.resolve("TestLibrary#apple-dsym,macosx-x86_64,shared.dSYM");
assertThat(Files.exists(dsymPath), is(true));
AppleDsymTestUtil.checkDsymFileHasDebugSymbol("+[TestClass answer]", workspace, dsymPath);
}
@Test
public void frameworkContainsFrameworkDependencies() throws Exception {
assumeTrue(Platform.detect() == Platform.MACOS);
assumeTrue(AppleNativeIntegrationTestUtils.isApplePlatformAvailable(ApplePlatform.MACOSX));
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "apple_library_with_library_dependencies", tmp);
workspace.setUp();
ProjectFilesystem filesystem =
TestProjectFilesystems.createProjectFilesystem(workspace.getDestPath());
BuildTarget target =
BuildTargetFactory.newInstance(
"//Libraries/TestLibrary:TestLibrary#framework,macosx-x86_64");
ProcessResult result = workspace.runBuckCommand("build", target.getFullyQualifiedName());
result.assertSuccess();
Path frameworkPath =
workspace.getPath(
BuildTargets.getGenPath(
filesystem,
target.withAppendedFlavors(
AppleDebugFormat.DWARF.getFlavor(),
AppleDescriptions.INCLUDE_FRAMEWORKS_FLAVOR),
"%s")
.resolve("TestLibrary.framework"));
assertThat(Files.exists(frameworkPath), is(true));
Path frameworksPath = frameworkPath.resolve("Frameworks");
assertThat(Files.exists(frameworksPath), is(true));
Path depPath = frameworksPath.resolve("TestLibraryDep.framework/TestLibraryDep");
assertThat(Files.exists(depPath), is(true));
assertThat(
workspace.runCommand("file", depPath.toString()).getStdout().get(),
containsString("dynamically linked shared library"));
Path transitiveDepPath =
frameworksPath.resolve("TestLibraryTransitiveDep.framework/TestLibraryTransitiveDep");
assertThat(Files.exists(transitiveDepPath), is(true));
assertThat(
workspace.runCommand("file", transitiveDepPath.toString()).getStdout().get(),
containsString("dynamically linked shared library"));
}
@Test
public void frameworkDependenciesDoNotContainTransitiveDependencies() throws Exception {
assumeTrue(Platform.detect() == Platform.MACOS);
assumeTrue(AppleNativeIntegrationTestUtils.isApplePlatformAvailable(ApplePlatform.MACOSX));
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "apple_library_with_library_dependencies", tmp);
workspace.setUp();
ProjectFilesystem filesystem =
TestProjectFilesystems.createProjectFilesystem(workspace.getDestPath());
BuildTarget target =
BuildTargetFactory.newInstance(
"//Libraries/TestLibrary:TestLibrary#framework,macosx-x86_64");
ProcessResult result = workspace.runBuckCommand("build", target.getFullyQualifiedName());
result.assertSuccess();
Path frameworkPath =
workspace.getPath(
BuildTargets.getGenPath(
filesystem,
target.withAppendedFlavors(
AppleDebugFormat.DWARF.getFlavor(),
AppleDescriptions.INCLUDE_FRAMEWORKS_FLAVOR),
"%s")
.resolve("TestLibrary.framework"));
assertThat(Files.exists(frameworkPath), is(true));
Path frameworksPath = frameworkPath.resolve("Frameworks");
assertThat(Files.exists(frameworksPath), is(true));
Path depFrameworksPath = frameworksPath.resolve("TestLibraryDep.framework/Frameworks");
assertThat(Files.exists(depFrameworksPath), is(false));
}
@Test
public void noIncludeFrameworksDoesntContainFrameworkDependencies() throws Exception {
assumeTrue(Platform.detect() == Platform.MACOS);
assumeTrue(AppleNativeIntegrationTestUtils.isApplePlatformAvailable(ApplePlatform.MACOSX));
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "apple_library_with_library_dependencies", tmp);
workspace.setUp();
ProjectFilesystem filesystem =
TestProjectFilesystems.createProjectFilesystem(workspace.getDestPath());
BuildTarget target =
BuildTargetFactory.newInstance(
"//Libraries/TestLibrary:TestLibrary#"
+ "dwarf-and-dsym,framework,macosx-x86_64,no-include-frameworks");
ProcessResult result = workspace.runBuckCommand("build", target.getFullyQualifiedName());
result.assertSuccess();
Path frameworkPath =
workspace.getPath(
BuildTargets.getGenPath(filesystem, target, "%s").resolve("TestLibrary.framework"));
assertThat(Files.exists(frameworkPath), is(true));
assertThat(Files.exists(frameworkPath.resolve("Resources/Info.plist")), is(true));
Path libraryPath = frameworkPath.resolve("TestLibrary");
assertThat(Files.exists(libraryPath), is(true));
assertThat(
workspace.runCommand("file", libraryPath.toString()).getStdout().get(),
containsString("dynamically linked shared library"));
Path frameworksPath = frameworkPath.resolve("Contents/Frameworks");
assertThat(Files.exists(frameworksPath), is(false));
}
@Test
public void testAppleLibraryExportedHeaderSymlinkTree() throws IOException {
assumeTrue(Platform.detect() == Platform.MACOS);
assumeTrue(AppleNativeIntegrationTestUtils.isApplePlatformAvailable(ApplePlatform.MACOSX));
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "apple_library_header_symlink_tree", tmp);
workspace.setUp();
ProjectFilesystem filesystem =
TestProjectFilesystems.createProjectFilesystem(workspace.getDestPath());
BuildTarget buildTarget =
BuildTargetFactory.newInstance("//Libraries/TestLibrary:TestLibrary")
.withAppendedFlavors(
CxxDescriptionEnhancer.EXPORTED_HEADER_SYMLINK_TREE_FLAVOR,
HeaderMode.SYMLINK_TREE_ONLY.getFlavor());
ProcessResult result = workspace.runBuckCommand("build", buildTarget.getFullyQualifiedName());
result.assertSuccess();
Path inputPath = workspace.getPath(buildTarget.getBasePath()).toRealPath();
Path outputPath =
workspace.getPath(BuildTargets.getGenPath(filesystem, buildTarget, "%s")).toRealPath();
assertIsSymbolicLink(
outputPath.resolve("TestLibrary/PublicHeader.h"), inputPath.resolve("PublicHeader.h"));
}
@Test
public void testAppleLibraryIsHermetic() throws IOException {
assumeTrue(Platform.detect() == Platform.MACOS);
assumeTrue(AppleNativeIntegrationTestUtils.isApplePlatformAvailable(ApplePlatform.MACOSX));
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "apple_library_is_hermetic", tmp);
workspace.setUp();
ProjectFilesystem filesystem =
TestProjectFilesystems.createProjectFilesystem(workspace.getDestPath());
BuildTarget target =
BuildTargetFactory.newInstance(
"//Libraries/TestLibrary:TestLibrary#static,iphonesimulator-x86_64");
ProcessResult first =
workspace.runBuckCommand(
workspace.getPath("first"), "build", target.getFullyQualifiedName());
first.assertSuccess();
ProcessResult second =
workspace.runBuckCommand(
workspace.getPath("second"), "build", target.getFullyQualifiedName());
second.assertSuccess();
Path objectPath =
BuildTargets.getGenPath(
filesystem,
target.withFlavors(
InternalFlavor.of("compile-" + sanitize("TestClass.m.o")),
InternalFlavor.of("iphonesimulator-x86_64")),
"%s")
.resolve("TestClass.m.o");
MoreAsserts.assertContentsEqual(
workspace.getPath(Paths.get("first").resolve(objectPath)),
workspace.getPath(Paths.get("second").resolve(objectPath)));
Path libraryPath =
BuildTargets.getGenPath(filesystem, target, "%s").resolve("libTestLibrary.a");
MoreAsserts.assertContentsEqual(
workspace.getPath(Paths.get("first").resolve(libraryPath)),
workspace.getPath(Paths.get("second").resolve(libraryPath)));
}
@Test
public void testBuildEmptySourceAppleLibrary() throws Exception {
assumeTrue(Platform.detect() == Platform.MACOS);
assumeTrue(AppleNativeIntegrationTestUtils.isApplePlatformAvailable(ApplePlatform.MACOSX));
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "empty_source_targets", tmp);
workspace.setUp();
BuildTarget target =
workspace
.newBuildTarget("//:real-none#iphonesimulator-x86_64")
.withAppendedFlavors(CxxDescriptionEnhancer.SHARED_FLAVOR);
ProcessResult result = workspace.runBuckCommand("build", target.getFullyQualifiedName());
result.assertSuccess();
ProjectFilesystem filesystem =
TestProjectFilesystems.createProjectFilesystem(workspace.getDestPath());
Path binaryOutput =
workspace.getPath(BuildTargets.getGenPath(filesystem, target, "%s/libreal-none.dylib"));
assertThat(Files.exists(binaryOutput), is(true));
}
@Test
public void testBuildUsingPrefixHeaderFromCxxPrecompiledHeader() throws Exception {
assumeTrue(Platform.detect() == Platform.MACOS);
assumeTrue(AppleNativeIntegrationTestUtils.isApplePlatformAvailable(ApplePlatform.MACOSX));
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "precompiled_header", tmp);
workspace.setUp();
BuildTarget target = workspace.newBuildTarget("//:library#iphonesimulator-x86_64,static");
ProcessResult result = workspace.runBuckCommand("build", target.getFullyQualifiedName());
result.assertSuccess();
}
@Test
public void testBuildUsingPrecompiledHeaderInOtherCell() throws Exception {
assumeTrue(Platform.detect() == Platform.MACOS);
assumeTrue(AppleNativeIntegrationTestUtils.isApplePlatformAvailable(ApplePlatform.MACOSX));
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "multicell_precompiled_header", tmp);
workspace.setUp();
BuildTarget target = workspace.newBuildTarget("//:library#iphonesimulator-x86_64,static");
ProcessResult result = workspace.runBuckCommand("build", target.getFullyQualifiedName());
result.assertSuccess();
}
@Test
public void testBuildAppleLibraryThatHasSwift() throws Exception {
assumeTrue(Platform.detect() == Platform.MACOS);
assumeTrue(AppleNativeIntegrationTestUtils.isApplePlatformAvailable(ApplePlatform.MACOSX));
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "empty_source_targets", tmp);
workspace.setUp();
BuildTarget target =
workspace
.newBuildTarget("//:none-swift#iphonesimulator-x86_64")
.withAppendedFlavors(CxxDescriptionEnhancer.SHARED_FLAVOR);
ProcessResult result = workspace.runBuckCommand("build", target.getFullyQualifiedName());
result.assertSuccess();
ProjectFilesystem filesystem =
TestProjectFilesystems.createProjectFilesystem(workspace.getDestPath());
Path binaryOutput =
workspace.getPath(BuildTargets.getGenPath(filesystem, target, "%s/libnone-swift.dylib"));
assertThat(Files.exists(binaryOutput), is(true));
assertThat(
workspace.runCommand("otool", "-L", binaryOutput.toString()).getStdout().get(),
containsString("libswiftCore.dylib"));
}
@Test
public void testBuildAppleLibraryWhereObjcUsesObjcDefinedInSwiftViaBridgingHeader()
throws Exception {
testDylibSwiftScenario(
"apple_library_objc_uses_objc_from_swift_via_bridging_diff_lib", "Bar", "Foo");
}
@Test
public void testBuildAppleLibraryWhereObjcUsesSwiftAcrossDifferentLibraries() throws Exception {
testDylibSwiftScenario("apple_library_objc_uses_swift_diff_lib", "Bar", "Foo");
}
@Test
public void testBuildAppleLibraryWhereSwiftUsesObjCAcrossDifferentLibraries() throws Exception {
testDylibSwiftScenario("apple_library_swift_uses_objc_diff_lib", "Bar");
}
@Test
public void testBuildAppleLibraryWhereSwiftUsesSwiftAcrossDifferentLibraries() throws Exception {
testDylibSwiftScenario("apple_library_swift_uses_swift_diff_lib", "Bar");
}
@Test
public void testBuildAppleLibraryWhereObjCUsesSwiftWithinSameLib() throws Exception {
testDylibSwiftScenario("apple_library_objc_uses_swift_same_lib", "Mixed");
}
@Test
public void testBuildAppleLibraryWhereSwiftUsesObjCWithinSameLib() throws Exception {
testDylibSwiftScenario("apple_library_swift_uses_objc_same_lib", "Mixed");
}
@Test
public void testBuildAppleLibraryWhereSwiftDefinedUsingExportFile() throws Exception {
testDylibSwiftScenario("apple_library_swift_using_export_file", "Mixed");
}
public void testDylibSwiftScenario(String scenario, String targetName) throws Exception {
testDylibSwiftScenario(scenario, targetName, targetName);
}
public void testDylibSwiftScenario(
String scenario, String dylibTargetName, String swiftRuntimeDylibTargetName)
throws Exception {
assumeTrue(Platform.detect() == Platform.MACOS);
assumeTrue(AppleNativeIntegrationTestUtils.isApplePlatformAvailable(ApplePlatform.MACOSX));
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, scenario, tmp);
workspace.setUp();
workspace.addBuckConfigLocalOption("apple", "use_swift_delegate", "false");
BuildTarget dylibTarget =
workspace
.newBuildTarget(String.format("//:%s#macosx-x86_64", dylibTargetName))
.withAppendedFlavors(CxxDescriptionEnhancer.SHARED_FLAVOR);
ProcessResult result = workspace.runBuckCommand("build", dylibTarget.getFullyQualifiedName());
result.assertSuccess();
ProjectFilesystem filesystem =
TestProjectFilesystems.createProjectFilesystem(workspace.getDestPath());
String dylibPathFormat = "%s/" + String.format("lib%s.dylib", dylibTargetName);
Path binaryOutput =
workspace.getPath(BuildTargets.getGenPath(filesystem, dylibTarget, dylibPathFormat));
assertThat(Files.exists(binaryOutput), is(true));
BuildTarget swiftRuntimeTarget =
workspace
.newBuildTarget(String.format("//:%s#macosx-x86_64", swiftRuntimeDylibTargetName))
.withAppendedFlavors(CxxDescriptionEnhancer.SHARED_FLAVOR);
String swiftRuntimePathFormat =
"%s/" + String.format("lib%s.dylib", swiftRuntimeDylibTargetName);
Path swiftRuntimeBinaryOutput =
workspace.getPath(
BuildTargets.getGenPath(filesystem, swiftRuntimeTarget, swiftRuntimePathFormat));
assertThat(
workspace.runCommand("otool", "-L", swiftRuntimeBinaryOutput.toString()).getStdout().get(),
containsString("libswiftCore.dylib"));
}
@Test
public void testModulewrap() throws Exception {
assumeTrue(Platform.detect() == Platform.MACOS);
assumeTrue(AppleNativeIntegrationTestUtils.isApplePlatformAvailable(ApplePlatform.MACOSX));
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "apple_library_swift_uses_objc_same_lib", tmp);
workspace.setUp();
workspace.addBuckConfigLocalOption("apple", "use_swift_delegate", "false");
workspace.addBuckConfigLocalOption("swift", "use_modulewrap", "true");
BuildTarget dylibTarget =
workspace
.newBuildTarget("//:Mixed#dwarf-and-dsym,macosx-x86_64")
.withAppendedFlavors(CxxDescriptionEnhancer.SHARED_FLAVOR);
ProcessResult result = workspace.runBuckCommand("build", dylibTarget.getFullyQualifiedName());
result.assertSuccess();
ProjectFilesystem filesystem =
TestProjectFilesystems.createProjectFilesystem(workspace.getDestPath());
Path dwarfPath =
tmp.getRoot()
.resolve(filesystem.getBuckPaths().getGenDir())
.resolve("Mixed#apple-dsym,macosx-x86_64,shared.dSYM")
.resolve("Contents/Resources/DWARF/Mixed");
assertThat(Files.exists(dwarfPath), is(true));
AppleDsymTestUtil.checkDsymFileHasSection("__SWIFT", "__ast", workspace, dwarfPath);
}
@Test
public void testBuildAppleLibraryUsingBridingHeaderAndSwiftDotH() throws Exception {
assumeTrue(Platform.detect() == Platform.MACOS);
assumeTrue(AppleNativeIntegrationTestUtils.isApplePlatformAvailable(ApplePlatform.MACOSX));
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "import_current_module_via_bridging_header", tmp);
workspace.setUp();
BuildTarget target = workspace.newBuildTarget("//:Greeter");
ProcessResult result = workspace.runBuckCommand("build", target.getFullyQualifiedName());
result.assertSuccess();
}
private static void assertIsSymbolicLink(Path link, Path target) throws IOException {
assertTrue(Files.isSymbolicLink(link));
assertEquals(target, Files.readSymbolicLink(link));
}
}
| |
package org.orienteer.core.module;
import com.google.common.collect.Comparators;
import com.google.inject.ProvidedBy;
import com.orientechnologies.orient.core.db.ODatabaseSession;
import com.orientechnologies.orient.core.db.document.ODatabaseDocument;
import com.orientechnologies.orient.core.hook.ODocumentHookAbstract;
import com.orientechnologies.orient.core.hook.ORecordHook;
import com.orientechnologies.orient.core.metadata.schema.OClass.INDEX_TYPE;
import com.orientechnologies.orient.core.metadata.schema.OClass;
import com.orientechnologies.orient.core.metadata.schema.OSchema;
import com.orientechnologies.orient.core.metadata.schema.OType;
import com.orientechnologies.orient.core.metadata.security.OUser;
import com.orientechnologies.orient.core.record.impl.ODocument;
import com.orientechnologies.orient.core.sql.executor.OResultSet;
import com.orientechnologies.orient.core.type.ODocumentWrapper;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.wicket.Component;
import org.apache.wicket.resource.loader.IStringResourceLoader;
import org.apache.wicket.util.string.Strings;
import org.orienteer.core.OClassDomain;
import org.orienteer.core.OrienteerWebApplication;
import org.orienteer.core.component.visualizer.UIVisualizersRegistry;
import org.orienteer.core.dao.DAO;
import org.orienteer.core.dao.ODocumentWrapperProvider;
import org.orienteer.core.dao.OrienteerOClass;
import org.orienteer.core.dao.OrienteerOProperty;
import org.orienteer.core.util.OSchemaHelper;
import org.orienteer.transponder.annotation.DefaultValue;
import org.orienteer.transponder.annotation.EntityPropertyIndex;
import org.orienteer.transponder.annotation.EntityType;
import org.orienteer.transponder.annotation.Query;
import org.orienteer.transponder.annotation.common.Sudo;
import org.orienteer.transponder.orientdb.IODocumentWrapper;
import org.orienteer.transponder.orientdb.ODriver;
import org.orienteer.transponder.orientdb.OrientDBProperty;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import ru.ydn.wicket.wicketorientdb.utils.DBClosure;
import javax.inject.Singleton;
import java.util.LinkedList;
import java.util.List;
import java.util.Locale;
/**
* {@link IOrienteerModule} to simplify localization of an application
*/
@Singleton
public class OrienteerLocalizationModule extends AbstractOrienteerModule {
public static final String NAME = "localization";
public static final String PROP_OUSER_LOCALE = "locale";
public static final Logger LOG = LoggerFactory.getLogger(OrienteerLocalizationModule.class);
public OrienteerLocalizationModule()
{
super(NAME, 2);
}
@Override
public ODocument onInstall(OrienteerWebApplication app, ODatabaseSession db) {
OSchemaHelper helper = OSchemaHelper.bind(db);
DAO.define(IOLocalization.class);
helper.oClass(OUser.CLASS_NAME).oProperty(PROP_OUSER_LOCALE, OType.STRING);
return null;
}
@Override
public void onUninstall(OrienteerWebApplication app, ODatabaseSession db) {
OSchema schema = app.getDatabaseSession().getMetadata().getSchema();
if (schema.existsClass(IOLocalization.CLASS_NAME)) {
schema.dropClass(IOLocalization.CLASS_NAME);
}
}
@Override
public void onInitialize(OrienteerWebApplication app, ODatabaseSession db) {
app.getResourceSettings().getStringResourceLoaders().add(new OrienteerStringResourceLoader());
app.getOrientDbSettings().addORecordHooks(LocalizationInvalidationHook.class);
}
@Override
public void onDestroy(OrienteerWebApplication app, ODatabaseSession db) {
app.getResourceSettings().getStringResourceLoaders()
.removeIf(iStringResourceLoader -> iStringResourceLoader instanceof OrienteerStringResourceLoader);
app.getOrientDbSettings().removeORecordHooks(LocalizationInvalidationHook.class);
}
/**
* {@link ORecordHook} to invalidate localization cache if something changed
*/
public static class LocalizationInvalidationHook extends ODocumentHookAbstract {
public LocalizationInvalidationHook(ODatabaseDocument database) {
super(database);
setIncludeClasses(IOLocalization.CLASS_NAME);
}
private void invalidateCache()
{
OrienteerWebApplication app = OrienteerWebApplication.lookupApplication();
if(app!=null)
{
app.getResourceSettings().getLocalizer().clearCache();
}
}
@Override
public void onRecordAfterCreate(ODocument iDocument) {
invalidateCache();
}
@Override
public void onRecordAfterUpdate(ODocument iDocument) {
invalidateCache();
}
@Override
public void onRecordAfterDelete(ODocument iDocument) {
invalidateCache();
}
@Override
public DISTRIBUTED_EXECUTION_MODE getDistributedExecutionMode() {
return DISTRIBUTED_EXECUTION_MODE.BOTH;
}
};
/**
* Orienteer implementation of {@link IStringResourceLoader} which tries to load string resources from database
*/
private static class OrienteerStringResourceLoader implements IStringResourceLoader {
@Override
public String loadStringResource(Class<?> clazz, String key,
Locale locale, String style, String variation) {
return loadStringResource(key, locale, style, variation);
}
@Override
public String loadStringResource(Component component, String key,
Locale locale, String style, String variation) {
return loadStringResource(key, locale, style, variation);
}
public String loadStringResource(String key, Locale locale, String style, String variation) {
if (Strings.isEmpty(key)) {
LOG.warn("Try to load string resource with empty key!");
}
String language = locale != null ? locale.getLanguage() : null;
IOLocalization localization = DAO.create(IOLocalization.class)
.setKey(key)
.setLanguage(language)
.setStyle(style)
.setVariation(variation);
List<IOLocalization> others = localization.queryOthersWithTheSameKey();
IOLocalization bestMatch = null;
if(others!=null && !others.isEmpty())
{
//Minus is needed to have maxumum first
others.sort((a, b) -> -Integer.compare(a.computeScore(localization), b.computeScore(localization)));
bestMatch = others.get(0);
}
if(bestMatch!=null && bestMatch.isTheBestMatch(localization)) {
return bestMatch.isActive()?bestMatch.getValue():null;
} else {
localization.sudoSave();
return null;
}
}
}
/**
* DAO for OLocalization
*/
@ProvidedBy(ODocumentWrapperProvider.class)
@EntityType(value = IOLocalization.CLASS_NAME)
@OrienteerOClass(nameProperty = "key")
public static interface IOLocalization extends IODocumentWrapper {
public static final String CLASS_NAME = "OLocalization";
@EntityPropertyIndex(name = "key_index", type = ODriver.OINDEX_NOTUNIQUE)
public String getKey();
public IOLocalization setKey(String value);
public String getLanguage();
public IOLocalization setLanguage(String value);
public String getStyle();
public IOLocalization setStyle(String value);
public String getVariation();
public IOLocalization setVariation(String value);
@OrientDBProperty(defaultValue = "false")
@DefaultValue("false")
public boolean isActive();
public IOLocalization setActive(boolean value);
@OrienteerOProperty(visualization = UIVisualizersRegistry.VISUALIZER_TEXTAREA)
public String getValue();
public IOLocalization setValue(String value);
public default boolean checkActive() {
setActive(!Strings.isEmpty(getLanguage()) && !Strings.isEmpty(getValue()));
return isActive();
}
public default int computeScore(IOLocalization target) {
int score = 0;
if (Strings.isEqual(target.getLanguage(), getLanguage())) {
score |= 1<<2;
}
if (Strings.isEqual(target.getStyle(), getStyle())) {
score |= 1<<1;
}
if (Strings.isEqual(target.getVariation(), getVariation())) {
score |= 1;
}
return score;
}
public default boolean isTheBestMatch(IOLocalization target) {
return computeScore(target) == 7;
}
@Sudo
@Query("select from "+CLASS_NAME+" where key = :key")
public List<IOLocalization> queryByKey(String key);
public default List<IOLocalization> queryOthersWithTheSameKey() {
return queryByKey(getKey());
}
@Sudo
public default IOLocalization sudoSave() {
save();
return this;
}
}
}
| |
package org.drip.analytics.holset;
/*
* -*- mode: java; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
*/
/*
* GENERATED on Fri Jan 11 19:54:07 EST 2013 ---- DO NOT DELETE
*/
/*!
* Copyright (C) 2016 Lakshmi Krishnamurthy
* Copyright (C) 2015 Lakshmi Krishnamurthy
* Copyright (C) 2014 Lakshmi Krishnamurthy
* Copyright (C) 2013 Lakshmi Krishnamurthy
* Copyright (C) 2012 Lakshmi Krishnamurthy
* Copyright (C) 2011 Lakshmi Krishnamurthy
*
* This file is part of CreditAnalytics, a free-software/open-source library for
* fixed income analysts and developers - http://www.credit-trader.org
*
* CreditAnalytics is a free, full featured, fixed income credit analytics library, developed with a special focus
* towards the needs of the bonds and credit products community.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
public class ZARHoliday implements org.drip.analytics.holset.LocationHoliday {
public ZARHoliday()
{
}
public java.lang.String getHolidayLoc()
{
return "ZAR";
}
public org.drip.analytics.eventday.Locale getHolidaySet()
{
org.drip.analytics.eventday.Locale lh = new
org.drip.analytics.eventday.Locale();
lh.addStaticHoliday ("01-JAN-1998", "New Years Day");
lh.addStaticHoliday ("10-APR-1998", "Good Friday");
lh.addStaticHoliday ("13-APR-1998", "Family Day");
lh.addStaticHoliday ("27-APR-1998", "Freedom Day");
lh.addStaticHoliday ("01-MAY-1998", "Workers Day");
lh.addStaticHoliday ("16-JUN-1998", "Youths Day");
lh.addStaticHoliday ("10-AUG-1998", "National Womens Day Observed");
lh.addStaticHoliday ("24-SEP-1998", "Heritage Day");
lh.addStaticHoliday ("16-DEC-1998", "Day of Reconciliation");
lh.addStaticHoliday ("25-DEC-1998", "Christmas Day");
lh.addStaticHoliday ("01-JAN-1999", "New Years Day");
lh.addStaticHoliday ("22-MAR-1999", "Human Rights Day Observed");
lh.addStaticHoliday ("02-APR-1999", "Good Friday");
lh.addStaticHoliday ("05-APR-1999", "Family Day");
lh.addStaticHoliday ("27-APR-1999", "Freedom Day");
lh.addStaticHoliday ("16-JUN-1999", "Youths Day");
lh.addStaticHoliday ("09-AUG-1999", "National Womens Day");
lh.addStaticHoliday ("24-SEP-1999", "Heritage Day");
lh.addStaticHoliday ("16-DEC-1999", "Day of Reconciliation");
lh.addStaticHoliday ("27-DEC-1999", "Day of Goodwill Observed");
lh.addStaticHoliday ("21-MAR-2000", "Human Rights Day");
lh.addStaticHoliday ("21-APR-2000", "Good Friday");
lh.addStaticHoliday ("24-APR-2000", "Family Day");
lh.addStaticHoliday ("27-APR-2000", "Freedom Day");
lh.addStaticHoliday ("01-MAY-2000", "Workers Day");
lh.addStaticHoliday ("16-JUN-2000", "Youths Day");
lh.addStaticHoliday ("09-AUG-2000", "National Womens Day");
lh.addStaticHoliday ("25-SEP-2000", "Heritage Day Observed");
lh.addStaticHoliday ("25-DEC-2000", "Christmas Day");
lh.addStaticHoliday ("26-DEC-2000", "Day of Goodwill");
lh.addStaticHoliday ("01-JAN-2001", "New Years Day");
lh.addStaticHoliday ("21-MAR-2001", "Human Rights Day");
lh.addStaticHoliday ("13-APR-2001", "Good Friday");
lh.addStaticHoliday ("16-APR-2001", "Family Day");
lh.addStaticHoliday ("27-APR-2001", "Freedom Day");
lh.addStaticHoliday ("01-MAY-2001", "Workers Day");
lh.addStaticHoliday ("09-AUG-2001", "National Womens Day");
lh.addStaticHoliday ("24-SEP-2001", "Heritage Day");
lh.addStaticHoliday ("17-DEC-2001", "Day of Reconciliation Observed");
lh.addStaticHoliday ("25-DEC-2001", "Christmas Day");
lh.addStaticHoliday ("26-DEC-2001", "Day of Goodwill");
lh.addStaticHoliday ("01-JAN-2002", "New Years Day");
lh.addStaticHoliday ("21-MAR-2002", "Human Rights Day");
lh.addStaticHoliday ("29-MAR-2002", "Good Friday");
lh.addStaticHoliday ("01-APR-2002", "Family Day");
lh.addStaticHoliday ("01-MAY-2002", "Workers Day");
lh.addStaticHoliday ("17-JUN-2002", "Youths Day Observed");
lh.addStaticHoliday ("09-AUG-2002", "National Womens Day");
lh.addStaticHoliday ("24-SEP-2002", "Heritage Day");
lh.addStaticHoliday ("16-DEC-2002", "Day of Reconciliation");
lh.addStaticHoliday ("25-DEC-2002", "Christmas Day");
lh.addStaticHoliday ("26-DEC-2002", "Day of Goodwill");
lh.addStaticHoliday ("01-JAN-2003", "New Years Day");
lh.addStaticHoliday ("21-MAR-2003", "Human Rights Day");
lh.addStaticHoliday ("18-APR-2003", "Good Friday");
lh.addStaticHoliday ("21-APR-2003", "Family Day");
lh.addStaticHoliday ("28-APR-2003", "Freedom Day Observed");
lh.addStaticHoliday ("01-MAY-2003", "Workers Day");
lh.addStaticHoliday ("16-JUN-2003", "Youths Day");
lh.addStaticHoliday ("24-SEP-2003", "Heritage Day");
lh.addStaticHoliday ("16-DEC-2003", "Day of Reconciliation");
lh.addStaticHoliday ("25-DEC-2003", "Christmas Day");
lh.addStaticHoliday ("26-DEC-2003", "Day of Goodwill");
lh.addStaticHoliday ("01-JAN-2004", "New Years Day");
lh.addStaticHoliday ("22-MAR-2004", "Human Rights Day Observed");
lh.addStaticHoliday ("09-APR-2004", "Good Friday");
lh.addStaticHoliday ("12-APR-2004", "Family Day");
lh.addStaticHoliday ("27-APR-2004", "Freedom Day");
lh.addStaticHoliday ("16-JUN-2004", "Youths Day");
lh.addStaticHoliday ("09-AUG-2004", "National Womens Day");
lh.addStaticHoliday ("24-SEP-2004", "Heritage Day");
lh.addStaticHoliday ("16-DEC-2004", "Day of Reconciliation");
lh.addStaticHoliday ("27-DEC-2004", "Day of Goodwill Observed");
lh.addStaticHoliday ("21-MAR-2005", "Human Rights Day");
lh.addStaticHoliday ("25-MAR-2005", "Good Friday");
lh.addStaticHoliday ("28-MAR-2005", "Family Day");
lh.addStaticHoliday ("27-APR-2005", "Freedom Day");
lh.addStaticHoliday ("02-MAY-2005", "Workers Day Observed");
lh.addStaticHoliday ("16-JUN-2005", "Youths Day");
lh.addStaticHoliday ("09-AUG-2005", "National Womens Day");
lh.addStaticHoliday ("16-DEC-2005", "Day of Reconciliation");
lh.addStaticHoliday ("26-DEC-2005", "Day of Goodwill");
lh.addStaticHoliday ("02-JAN-2006", "New Years Day Observed");
lh.addStaticHoliday ("21-MAR-2006", "Human Rights Day");
lh.addStaticHoliday ("14-APR-2006", "Good Friday");
lh.addStaticHoliday ("17-APR-2006", "Family Day");
lh.addStaticHoliday ("27-APR-2006", "Freedom Day");
lh.addStaticHoliday ("01-MAY-2006", "Workers Day");
lh.addStaticHoliday ("16-JUN-2006", "Youths Day");
lh.addStaticHoliday ("09-AUG-2006", "National Womens Day");
lh.addStaticHoliday ("25-SEP-2006", "Heritage Day Observed");
lh.addStaticHoliday ("25-DEC-2006", "Christmas Day");
lh.addStaticHoliday ("26-DEC-2006", "Day of Goodwill");
lh.addStaticHoliday ("01-JAN-2007", "New Years Day");
lh.addStaticHoliday ("21-MAR-2007", "Human Rights Day");
lh.addStaticHoliday ("06-APR-2007", "Good Friday");
lh.addStaticHoliday ("09-APR-2007", "Family Day");
lh.addStaticHoliday ("27-APR-2007", "Freedom Day");
lh.addStaticHoliday ("01-MAY-2007", "Workers Day");
lh.addStaticHoliday ("09-AUG-2007", "National Womens Day");
lh.addStaticHoliday ("24-SEP-2007", "Heritage Day");
lh.addStaticHoliday ("17-DEC-2007", "Day of Reconciliation Observed");
lh.addStaticHoliday ("25-DEC-2007", "Christmas Day");
lh.addStaticHoliday ("26-DEC-2007", "Day of Goodwill");
lh.addStaticHoliday ("01-JAN-2008", "New Years Day");
lh.addStaticHoliday ("21-MAR-2008", "Human Rights Day");
lh.addStaticHoliday ("24-MAR-2008", "Family Day");
lh.addStaticHoliday ("28-APR-2008", "Freedom Day Observed");
lh.addStaticHoliday ("01-MAY-2008", "Workers Day");
lh.addStaticHoliday ("16-JUN-2008", "Youths Day");
lh.addStaticHoliday ("24-SEP-2008", "Heritage Day");
lh.addStaticHoliday ("16-DEC-2008", "Day of Reconciliation");
lh.addStaticHoliday ("25-DEC-2008", "Christmas Day");
lh.addStaticHoliday ("26-DEC-2008", "Day of Goodwill");
lh.addStaticHoliday ("01-JAN-2009", "New Years Day");
lh.addStaticHoliday ("10-APR-2009", "Good Friday");
lh.addStaticHoliday ("13-APR-2009", "Family Day");
lh.addStaticHoliday ("27-APR-2009", "Freedom Day");
lh.addStaticHoliday ("01-MAY-2009", "Workers Day");
lh.addStaticHoliday ("16-JUN-2009", "Youths Day");
lh.addStaticHoliday ("10-AUG-2009", "National Womens Day Observed");
lh.addStaticHoliday ("24-SEP-2009", "Heritage Day");
lh.addStaticHoliday ("16-DEC-2009", "Day of Reconciliation");
lh.addStaticHoliday ("25-DEC-2009", "Christmas Day");
lh.addStaticHoliday ("01-JAN-2010", "New Years Day");
lh.addStaticHoliday ("22-MAR-2010", "Human Rights Day Observed");
lh.addStaticHoliday ("02-APR-2010", "Good Friday");
lh.addStaticHoliday ("05-APR-2010", "Family Day");
lh.addStaticHoliday ("27-APR-2010", "Freedom Day");
lh.addStaticHoliday ("16-JUN-2010", "Youths Day");
lh.addStaticHoliday ("09-AUG-2010", "National Womens Day");
lh.addStaticHoliday ("24-SEP-2010", "Heritage Day");
lh.addStaticHoliday ("16-DEC-2010", "Day of Reconciliation");
lh.addStaticHoliday ("27-DEC-2010", "Day of Goodwill Observed");
lh.addStaticHoliday ("21-MAR-2011", "Human Rights Day");
lh.addStaticHoliday ("22-APR-2011", "Good Friday");
lh.addStaticHoliday ("25-APR-2011", "Family Day");
lh.addStaticHoliday ("27-APR-2011", "Freedom Day");
lh.addStaticHoliday ("02-MAY-2011", "Workers Day Observed");
lh.addStaticHoliday ("16-JUN-2011", "Youths Day");
lh.addStaticHoliday ("09-AUG-2011", "National Womens Day");
lh.addStaticHoliday ("16-DEC-2011", "Day of Reconciliation");
lh.addStaticHoliday ("26-DEC-2011", "Day of Goodwill");
lh.addStaticHoliday ("02-JAN-2012", "New Years Day Observed");
lh.addStaticHoliday ("21-MAR-2012", "Human Rights Day");
lh.addStaticHoliday ("06-APR-2012", "Good Friday");
lh.addStaticHoliday ("09-APR-2012", "Family Day");
lh.addStaticHoliday ("27-APR-2012", "Freedom Day");
lh.addStaticHoliday ("01-MAY-2012", "Workers Day");
lh.addStaticHoliday ("09-AUG-2012", "National Womens Day");
lh.addStaticHoliday ("24-SEP-2012", "Heritage Day");
lh.addStaticHoliday ("17-DEC-2012", "Day of Reconciliation Observed");
lh.addStaticHoliday ("25-DEC-2012", "Christmas Day");
lh.addStaticHoliday ("26-DEC-2012", "Day of Goodwill");
lh.addStaticHoliday ("01-JAN-2013", "New Years Day");
lh.addStaticHoliday ("21-MAR-2013", "Human Rights Day");
lh.addStaticHoliday ("29-MAR-2013", "Good Friday");
lh.addStaticHoliday ("01-APR-2013", "Family Day");
lh.addStaticHoliday ("01-MAY-2013", "Workers Day");
lh.addStaticHoliday ("17-JUN-2013", "Youths Day Observed");
lh.addStaticHoliday ("09-AUG-2013", "National Womens Day");
lh.addStaticHoliday ("24-SEP-2013", "Heritage Day");
lh.addStaticHoliday ("16-DEC-2013", "Day of Reconciliation");
lh.addStaticHoliday ("25-DEC-2013", "Christmas Day");
lh.addStaticHoliday ("26-DEC-2013", "Day of Goodwill");
lh.addStaticHoliday ("01-JAN-2014", "New Years Day");
lh.addStaticHoliday ("21-MAR-2014", "Human Rights Day");
lh.addStaticHoliday ("18-APR-2014", "Good Friday");
lh.addStaticHoliday ("21-APR-2014", "Family Day");
lh.addStaticHoliday ("28-APR-2014", "Freedom Day Observed");
lh.addStaticHoliday ("01-MAY-2014", "Workers Day");
lh.addStaticHoliday ("16-JUN-2014", "Youths Day");
lh.addStaticHoliday ("24-SEP-2014", "Heritage Day");
lh.addStaticHoliday ("16-DEC-2014", "Day of Reconciliation");
lh.addStaticHoliday ("25-DEC-2014", "Christmas Day");
lh.addStaticHoliday ("26-DEC-2014", "Day of Goodwill");
lh.addStaticHoliday ("01-JAN-2015", "New Years Day");
lh.addStaticHoliday ("03-APR-2015", "Good Friday");
lh.addStaticHoliday ("06-APR-2015", "Family Day");
lh.addStaticHoliday ("27-APR-2015", "Freedom Day");
lh.addStaticHoliday ("01-MAY-2015", "Workers Day");
lh.addStaticHoliday ("16-JUN-2015", "Youths Day");
lh.addStaticHoliday ("10-AUG-2015", "National Womens Day Observed");
lh.addStaticHoliday ("24-SEP-2015", "Heritage Day");
lh.addStaticHoliday ("16-DEC-2015", "Day of Reconciliation");
lh.addStaticHoliday ("25-DEC-2015", "Christmas Day");
lh.addStaticHoliday ("01-JAN-2016", "New Years Day");
lh.addStaticHoliday ("21-MAR-2016", "Human Rights Day");
lh.addStaticHoliday ("25-MAR-2016", "Good Friday");
lh.addStaticHoliday ("28-MAR-2016", "Family Day");
lh.addStaticHoliday ("27-APR-2016", "Freedom Day");
lh.addStaticHoliday ("02-MAY-2016", "Workers Day Observed");
lh.addStaticHoliday ("16-JUN-2016", "Youths Day");
lh.addStaticHoliday ("09-AUG-2016", "National Womens Day");
lh.addStaticHoliday ("16-DEC-2016", "Day of Reconciliation");
lh.addStaticHoliday ("26-DEC-2016", "Day of Goodwill");
lh.addStaticHoliday ("02-JAN-2017", "New Years Day Observed");
lh.addStaticHoliday ("21-MAR-2017", "Human Rights Day");
lh.addStaticHoliday ("14-APR-2017", "Good Friday");
lh.addStaticHoliday ("17-APR-2017", "Family Day");
lh.addStaticHoliday ("27-APR-2017", "Freedom Day");
lh.addStaticHoliday ("01-MAY-2017", "Workers Day");
lh.addStaticHoliday ("16-JUN-2017", "Youths Day");
lh.addStaticHoliday ("09-AUG-2017", "National Womens Day");
lh.addStaticHoliday ("25-SEP-2017", "Heritage Day Observed");
lh.addStaticHoliday ("25-DEC-2017", "Christmas Day");
lh.addStaticHoliday ("26-DEC-2017", "Day of Goodwill");
lh.addStaticHoliday ("01-JAN-2018", "New Years Day");
lh.addStaticHoliday ("21-MAR-2018", "Human Rights Day");
lh.addStaticHoliday ("30-MAR-2018", "Good Friday");
lh.addStaticHoliday ("02-APR-2018", "Family Day");
lh.addStaticHoliday ("27-APR-2018", "Freedom Day");
lh.addStaticHoliday ("01-MAY-2018", "Workers Day");
lh.addStaticHoliday ("09-AUG-2018", "National Womens Day");
lh.addStaticHoliday ("24-SEP-2018", "Heritage Day");
lh.addStaticHoliday ("17-DEC-2018", "Day of Reconciliation Observed");
lh.addStaticHoliday ("25-DEC-2018", "Christmas Day");
lh.addStaticHoliday ("26-DEC-2018", "Day of Goodwill");
lh.addStaticHoliday ("01-JAN-2019", "New Years Day");
lh.addStaticHoliday ("21-MAR-2019", "Human Rights Day");
lh.addStaticHoliday ("19-APR-2019", "Good Friday");
lh.addStaticHoliday ("22-APR-2019", "Family Day");
lh.addStaticHoliday ("01-MAY-2019", "Workers Day");
lh.addStaticHoliday ("17-JUN-2019", "Youths Day Observed");
lh.addStaticHoliday ("09-AUG-2019", "National Womens Day");
lh.addStaticHoliday ("24-SEP-2019", "Heritage Day");
lh.addStaticHoliday ("16-DEC-2019", "Day of Reconciliation");
lh.addStaticHoliday ("25-DEC-2019", "Christmas Day");
lh.addStaticHoliday ("26-DEC-2019", "Day of Goodwill");
lh.addStaticHoliday ("01-JAN-2020", "New Years Day");
lh.addStaticHoliday ("10-APR-2020", "Good Friday");
lh.addStaticHoliday ("13-APR-2020", "Family Day");
lh.addStaticHoliday ("27-APR-2020", "Freedom Day");
lh.addStaticHoliday ("01-MAY-2020", "Workers Day");
lh.addStaticHoliday ("16-JUN-2020", "Youths Day");
lh.addStaticHoliday ("10-AUG-2020", "National Womens Day Observed");
lh.addStaticHoliday ("24-SEP-2020", "Heritage Day");
lh.addStaticHoliday ("16-DEC-2020", "Day of Reconciliation");
lh.addStaticHoliday ("25-DEC-2020", "Christmas Day");
lh.addStaticHoliday ("01-JAN-2021", "New Years Day");
lh.addStaticHoliday ("22-MAR-2021", "Human Rights Day Observed");
lh.addStaticHoliday ("02-APR-2021", "Good Friday");
lh.addStaticHoliday ("05-APR-2021", "Family Day");
lh.addStaticHoliday ("27-APR-2021", "Freedom Day");
lh.addStaticHoliday ("16-JUN-2021", "Youths Day");
lh.addStaticHoliday ("09-AUG-2021", "National Womens Day");
lh.addStaticHoliday ("24-SEP-2021", "Heritage Day");
lh.addStaticHoliday ("16-DEC-2021", "Day of Reconciliation");
lh.addStaticHoliday ("27-DEC-2021", "Day of Goodwill Observed");
lh.addStaticHoliday ("21-MAR-2022", "Human Rights Day");
lh.addStaticHoliday ("15-APR-2022", "Good Friday");
lh.addStaticHoliday ("18-APR-2022", "Family Day");
lh.addStaticHoliday ("27-APR-2022", "Freedom Day");
lh.addStaticHoliday ("02-MAY-2022", "Workers Day Observed");
lh.addStaticHoliday ("16-JUN-2022", "Youths Day");
lh.addStaticHoliday ("09-AUG-2022", "National Womens Day");
lh.addStaticHoliday ("16-DEC-2022", "Day of Reconciliation");
lh.addStaticHoliday ("26-DEC-2022", "Day of Goodwill");
lh.addStaticHoliday ("02-JAN-2023", "New Years Day Observed");
lh.addStaticHoliday ("21-MAR-2023", "Human Rights Day");
lh.addStaticHoliday ("07-APR-2023", "Good Friday");
lh.addStaticHoliday ("10-APR-2023", "Family Day");
lh.addStaticHoliday ("27-APR-2023", "Freedom Day");
lh.addStaticHoliday ("01-MAY-2023", "Workers Day");
lh.addStaticHoliday ("16-JUN-2023", "Youths Day");
lh.addStaticHoliday ("09-AUG-2023", "National Womens Day");
lh.addStaticHoliday ("25-SEP-2023", "Heritage Day Observed");
lh.addStaticHoliday ("25-DEC-2023", "Christmas Day");
lh.addStaticHoliday ("26-DEC-2023", "Day of Goodwill");
lh.addStaticHoliday ("01-JAN-2024", "New Years Day");
lh.addStaticHoliday ("21-MAR-2024", "Human Rights Day");
lh.addStaticHoliday ("29-MAR-2024", "Good Friday");
lh.addStaticHoliday ("01-APR-2024", "Family Day");
lh.addStaticHoliday ("01-MAY-2024", "Workers Day");
lh.addStaticHoliday ("17-JUN-2024", "Youths Day Observed");
lh.addStaticHoliday ("09-AUG-2024", "National Womens Day");
lh.addStaticHoliday ("24-SEP-2024", "Heritage Day");
lh.addStaticHoliday ("16-DEC-2024", "Day of Reconciliation");
lh.addStaticHoliday ("25-DEC-2024", "Christmas Day");
lh.addStaticHoliday ("26-DEC-2024", "Day of Goodwill");
lh.addStaticHoliday ("01-JAN-2025", "New Years Day");
lh.addStaticHoliday ("21-MAR-2025", "Human Rights Day");
lh.addStaticHoliday ("18-APR-2025", "Good Friday");
lh.addStaticHoliday ("21-APR-2025", "Family Day");
lh.addStaticHoliday ("28-APR-2025", "Freedom Day Observed");
lh.addStaticHoliday ("01-MAY-2025", "Workers Day");
lh.addStaticHoliday ("16-JUN-2025", "Youths Day");
lh.addStaticHoliday ("24-SEP-2025", "Heritage Day");
lh.addStaticHoliday ("16-DEC-2025", "Day of Reconciliation");
lh.addStaticHoliday ("25-DEC-2025", "Christmas Day");
lh.addStaticHoliday ("26-DEC-2025", "Day of Goodwill");
lh.addStaticHoliday ("01-JAN-2026", "New Years Day");
lh.addStaticHoliday ("03-APR-2026", "Good Friday");
lh.addStaticHoliday ("06-APR-2026", "Family Day");
lh.addStaticHoliday ("27-APR-2026", "Freedom Day");
lh.addStaticHoliday ("01-MAY-2026", "Workers Day");
lh.addStaticHoliday ("16-JUN-2026", "Youths Day");
lh.addStaticHoliday ("10-AUG-2026", "National Womens Day Observed");
lh.addStaticHoliday ("24-SEP-2026", "Heritage Day");
lh.addStaticHoliday ("16-DEC-2026", "Day of Reconciliation");
lh.addStaticHoliday ("25-DEC-2026", "Christmas Day");
lh.addStaticHoliday ("01-JAN-2027", "New Years Day");
lh.addStaticHoliday ("22-MAR-2027", "Human Rights Day Observed");
lh.addStaticHoliday ("26-MAR-2027", "Good Friday");
lh.addStaticHoliday ("29-MAR-2027", "Family Day");
lh.addStaticHoliday ("27-APR-2027", "Freedom Day");
lh.addStaticHoliday ("16-JUN-2027", "Youths Day");
lh.addStaticHoliday ("09-AUG-2027", "National Womens Day");
lh.addStaticHoliday ("24-SEP-2027", "Heritage Day");
lh.addStaticHoliday ("16-DEC-2027", "Day of Reconciliation");
lh.addStaticHoliday ("27-DEC-2027", "Day of Goodwill Observed");
lh.addStaticHoliday ("21-MAR-2028", "Human Rights Day");
lh.addStaticHoliday ("14-APR-2028", "Good Friday");
lh.addStaticHoliday ("17-APR-2028", "Family Day");
lh.addStaticHoliday ("27-APR-2028", "Freedom Day");
lh.addStaticHoliday ("01-MAY-2028", "Workers Day");
lh.addStaticHoliday ("16-JUN-2028", "Youths Day");
lh.addStaticHoliday ("09-AUG-2028", "National Womens Day");
lh.addStaticHoliday ("25-SEP-2028", "Heritage Day Observed");
lh.addStaticHoliday ("25-DEC-2028", "Christmas Day");
lh.addStaticHoliday ("26-DEC-2028", "Day of Goodwill");
lh.addStandardWeekend();
return lh;
}
}
| |
/*
* Copyright (C) 2008 The Android Open Source Project
* Copyright (c) 2012-2013, The Linux Foundation. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.telephony;
import android.os.Parcel;
import android.telephony.Rlog;
import com.android.internal.telephony.GsmAlphabet;
import com.android.internal.telephony.GsmAlphabet.TextEncodingDetails;
import com.android.internal.telephony.SmsConstants;
import com.android.internal.telephony.SmsMessageBase;
import com.android.internal.telephony.SmsMessageBase.SubmitPduBase;
import java.lang.Math;
import java.util.ArrayList;
import java.util.Arrays;
import static android.telephony.TelephonyManager.PHONE_TYPE_CDMA;
/**
* A Short Message Service message.
*/
public class SmsMessage {
private static final String LOG_TAG = "SmsMessage";
/**
* SMS Class enumeration.
* See TS 23.038.
*
*/
public enum MessageClass{
UNKNOWN, CLASS_0, CLASS_1, CLASS_2, CLASS_3;
}
/** User data text encoding code unit size */
public static final int ENCODING_UNKNOWN = 0;
public static final int ENCODING_7BIT = 1;
public static final int ENCODING_8BIT = 2;
public static final int ENCODING_16BIT = 3;
/**
* @hide This value is not defined in global standard. Only in Korea, this is used.
*/
public static final int ENCODING_KSC5601 = 4;
/** The maximum number of payload bytes per message */
public static final int MAX_USER_DATA_BYTES = 140;
/**
* The maximum number of payload bytes per message if a user data header
* is present. This assumes the header only contains the
* CONCATENATED_8_BIT_REFERENCE element.
*/
public static final int MAX_USER_DATA_BYTES_WITH_HEADER = 134;
/** The maximum number of payload septets per message */
public static final int MAX_USER_DATA_SEPTETS = 160;
/**
* The maximum number of payload septets per message if a user data header
* is present. This assumes the header only contains the
* CONCATENATED_8_BIT_REFERENCE element.
*/
public static final int MAX_USER_DATA_SEPTETS_WITH_HEADER = 153;
/**
* Indicates a 3GPP format SMS message.
* @hide pending API council approval
*/
public static final String FORMAT_3GPP = "3gpp";
/**
* Indicates a 3GPP2 format SMS message.
* @hide pending API council approval
*/
public static final String FORMAT_3GPP2 = "3gpp2";
/** Contains actual SmsMessage. Only public for debugging and for framework layer.
*
* @hide
*/
public SmsMessageBase mWrappedSmsMessage;
public static class SubmitPdu {
public byte[] encodedScAddress; // Null if not applicable.
public byte[] encodedMessage;
@Override
public String toString() {
return "SubmitPdu: encodedScAddress = "
+ Arrays.toString(encodedScAddress)
+ ", encodedMessage = "
+ Arrays.toString(encodedMessage);
}
/**
* @hide
*/
protected SubmitPdu(SubmitPduBase spb) {
this.encodedMessage = spb.encodedMessage;
this.encodedScAddress = spb.encodedScAddress;
}
}
private SmsMessage(SmsMessageBase smb) {
mWrappedSmsMessage = smb;
}
/**
* Create an SmsMessage from a raw PDU.
*
* <p><b>This method will soon be deprecated</b> and all applications which handle
* incoming SMS messages by processing the {@code SMS_RECEIVED_ACTION} broadcast
* intent <b>must</b> now pass the new {@code format} String extra from the intent
* into the new method {@code createFromPdu(byte[], String)} which takes an
* extra format parameter. This is required in order to correctly decode the PDU on
* devices that require support for both 3GPP and 3GPP2 formats at the same time,
* such as dual-mode GSM/CDMA and CDMA/LTE phones. Guess format based on Voice
* technology first, if it fails use other format.
*/
public static SmsMessage createFromPdu(byte[] pdu) {
SmsMessage message = null;
// cdma(3gpp2) vs gsm(3gpp) format info was not given,
// guess from active voice phone type
int activePhone = TelephonyManager.getDefault().getCurrentPhoneType();
String format = (PHONE_TYPE_CDMA == activePhone) ?
SmsConstants.FORMAT_3GPP2 : SmsConstants.FORMAT_3GPP;
message = createFromPdu(pdu, format);
if (null == message || null == message.mWrappedSmsMessage) {
// decoding pdu failed based on activePhone type, must be other format
format = (PHONE_TYPE_CDMA == activePhone) ?
SmsConstants.FORMAT_3GPP : SmsConstants.FORMAT_3GPP2;
message = createFromPdu(pdu, format);
}
return message;
}
/**
* Create an SmsMessage from a raw PDU with the specified message format. The
* message format is passed in the {@code SMS_RECEIVED_ACTION} as the {@code format}
* String extra, and will be either "3gpp" for GSM/UMTS/LTE messages in 3GPP format
* or "3gpp2" for CDMA/LTE messages in 3GPP2 format.
*
* @param pdu the message PDU from the SMS_RECEIVED_ACTION intent
* @param format the format extra from the SMS_RECEIVED_ACTION intent
* @hide pending API council approval
*/
public static SmsMessage createFromPdu(byte[] pdu, String format) {
SmsMessageBase wrappedMessage;
if (SmsConstants.FORMAT_3GPP2.equals(format)) {
wrappedMessage = com.android.internal.telephony.cdma.SmsMessage.createFromPdu(pdu);
} else if (SmsConstants.FORMAT_3GPP.equals(format)) {
wrappedMessage = com.android.internal.telephony.gsm.SmsMessage.createFromPdu(pdu);
} else {
Rlog.e(LOG_TAG, "createFromPdu(): unsupported message format " + format);
return null;
}
return new SmsMessage(wrappedMessage);
}
/**
* TS 27.005 3.4.1 lines[0] and lines[1] are the two lines read from the
* +CMT unsolicited response (PDU mode, of course)
* +CMT: [<alpha>],<length><CR><LF><pdu>
*
* Only public for debugging and for RIL
*
* {@hide}
*/
public static SmsMessage newFromCMT(String[] lines) {
// received SMS in 3GPP format
SmsMessageBase wrappedMessage =
com.android.internal.telephony.gsm.SmsMessage.newFromCMT(lines);
return new SmsMessage(wrappedMessage);
}
/** @hide */
public static SmsMessage newFromParcel(Parcel p) {
// received SMS in 3GPP2 format
SmsMessageBase wrappedMessage =
com.android.internal.telephony.cdma.SmsMessage.newFromParcel(p);
return new SmsMessage(wrappedMessage);
}
/**
* Create an SmsMessage from an SMS EF record.
*
* @param index Index of SMS record. This should be index in ArrayList
* returned by SmsManager.getAllMessagesFromSim + 1.
* @param data Record data.
* @return An SmsMessage representing the record.
*
* @hide
*/
public static SmsMessage createFromEfRecord(int index, byte[] data) {
SmsMessageBase wrappedMessage;
if (isCdmaVoice()) {
wrappedMessage = com.android.internal.telephony.cdma.SmsMessage.createFromEfRecord(
index, data);
} else {
wrappedMessage = com.android.internal.telephony.gsm.SmsMessage.createFromEfRecord(
index, data);
}
return wrappedMessage != null ? new SmsMessage(wrappedMessage) : null;
}
/**
* Get the TP-Layer-Length for the given SMS-SUBMIT PDU Basically, the
* length in bytes (not hex chars) less the SMSC header
*
* FIXME: This method is only used by a CTS test case that isn't run on CDMA devices.
* We should probably deprecate it and remove the obsolete test case.
*/
public static int getTPLayerLengthForPDU(String pdu) {
if (isCdmaVoice()) {
return com.android.internal.telephony.cdma.SmsMessage.getTPLayerLengthForPDU(pdu);
} else {
return com.android.internal.telephony.gsm.SmsMessage.getTPLayerLengthForPDU(pdu);
}
}
/*
* TODO(cleanup): It would make some sense if the result of
* preprocessing a message to determine the proper encoding (i.e.
* the resulting data structure from calculateLength) could be
* passed as an argument to the actual final encoding function.
* This would better ensure that the logic behind size calculation
* actually matched the encoding.
*/
/**
* Calculates the number of SMS's required to encode the message body and
* the number of characters remaining until the next message.
*
* @param msgBody the message to encode
* @param use7bitOnly if true, characters that are not part of the
* radio-specific 7-bit encoding are counted as single
* space chars. If false, and if the messageBody contains
* non-7-bit encodable characters, length is calculated
* using a 16-bit encoding.
* @return an int[4] with int[0] being the number of SMS's
* required, int[1] the number of code units used, and
* int[2] is the number of code units remaining until the
* next message. int[3] is an indicator of the encoding
* code unit size (see the ENCODING_* definitions in SmsConstants)
*/
public static int[] calculateLength(CharSequence msgBody, boolean use7bitOnly) {
// this function is for MO SMS
TextEncodingDetails ted = (useCdmaFormatForMoSms()) ?
com.android.internal.telephony.cdma.SmsMessage.calculateLength(msgBody, use7bitOnly) :
com.android.internal.telephony.gsm.SmsMessage.calculateLength(msgBody, use7bitOnly);
int ret[] = new int[4];
ret[0] = ted.msgCount;
ret[1] = ted.codeUnitCount;
ret[2] = ted.codeUnitsRemaining;
ret[3] = ted.codeUnitSize;
return ret;
}
/**
* Divide a message text into several fragments, none bigger than
* the maximum SMS message text size.
*
* @param text text, must not be null.
* @return an <code>ArrayList</code> of strings that, in order,
* comprise the original msg text
*
* @hide
*/
public static ArrayList<String> fragmentText(String text) {
// This function is for MO SMS
TextEncodingDetails ted = (useCdmaFormatForMoSms()) ?
com.android.internal.telephony.cdma.SmsMessage.calculateLength(text, false) :
com.android.internal.telephony.gsm.SmsMessage.calculateLength(text, false);
// TODO(cleanup): The code here could be rolled into the logic
// below cleanly if these MAX_* constants were defined more
// flexibly...
int limit;
if (ted.codeUnitSize == SmsConstants.ENCODING_7BIT) {
int udhLength;
if (ted.languageTable != 0 && ted.languageShiftTable != 0) {
udhLength = GsmAlphabet.UDH_SEPTET_COST_TWO_SHIFT_TABLES;
} else if (ted.languageTable != 0 || ted.languageShiftTable != 0) {
udhLength = GsmAlphabet.UDH_SEPTET_COST_ONE_SHIFT_TABLE;
} else {
udhLength = 0;
}
if (ted.msgCount > 1) {
udhLength += GsmAlphabet.UDH_SEPTET_COST_CONCATENATED_MESSAGE;
}
if (udhLength != 0) {
udhLength += GsmAlphabet.UDH_SEPTET_COST_LENGTH;
}
limit = SmsConstants.MAX_USER_DATA_SEPTETS - udhLength;
} else {
if (ted.msgCount > 1) {
limit = SmsConstants.MAX_USER_DATA_BYTES_WITH_HEADER;
} else {
limit = SmsConstants.MAX_USER_DATA_BYTES;
}
}
int pos = 0; // Index in code units.
int textLen = text.length();
ArrayList<String> result = new ArrayList<String>(ted.msgCount);
while (pos < textLen) {
int nextPos = 0; // Counts code units.
if (ted.codeUnitSize == SmsConstants.ENCODING_7BIT) {
if (useCdmaFormatForMoSms() && ted.msgCount == 1) {
// For a singleton CDMA message, the encoding must be ASCII...
nextPos = pos + Math.min(limit, textLen - pos);
} else {
// For multi-segment messages, CDMA 7bit equals GSM 7bit encoding (EMS mode).
nextPos = GsmAlphabet.findGsmSeptetLimitIndex(text, pos, limit,
ted.languageTable, ted.languageShiftTable);
}
} else { // Assume unicode.
nextPos = pos + Math.min(limit / 2, textLen - pos);
}
if ((nextPos <= pos) || (nextPos > textLen)) {
Rlog.e(LOG_TAG, "fragmentText failed (" + pos + " >= " + nextPos + " or " +
nextPos + " >= " + textLen + ")");
break;
}
result.add(text.substring(pos, nextPos));
pos = nextPos;
}
return result;
}
/**
* Calculates the number of SMS's required to encode the message body and
* the number of characters remaining until the next message, given the
* current encoding.
*
* @param messageBody the message to encode
* @param use7bitOnly if true, characters that are not part of the radio
* specific (GSM / CDMA) alphabet encoding are converted to as a
* single space characters. If false, a messageBody containing
* non-GSM or non-CDMA alphabet characters are encoded using
* 16-bit encoding.
* @return an int[4] with int[0] being the number of SMS's required, int[1]
* the number of code units used, and int[2] is the number of code
* units remaining until the next message. int[3] is the encoding
* type that should be used for the message.
*/
public static int[] calculateLength(String messageBody, boolean use7bitOnly) {
return calculateLength((CharSequence)messageBody, use7bitOnly);
}
/*
* TODO(cleanup): It looks like there is now no useful reason why
* apps should generate pdus themselves using these routines,
* instead of handing the raw data to SMSDispatcher (and thereby
* have the phone process do the encoding). Moreover, CDMA now
* has shared state (in the form of the msgId system property)
* which can only be modified by the phone process, and hence
* makes the output of these routines incorrect. Since they now
* serve no purpose, they should probably just return null
* directly, and be deprecated. Going further in that direction,
* the above parsers of serialized pdu data should probably also
* be gotten rid of, hiding all but the necessarily visible
* structured data from client apps. A possible concern with
* doing this is that apps may be using these routines to generate
* pdus that are then sent elsewhere, some network server, for
* example, and that always returning null would thereby break
* otherwise useful apps.
*/
/**
* Get an SMS-SUBMIT PDU for a destination address and a message.
* This method will not attempt to use any GSM national language 7 bit encodings.
*
* @param scAddress Service Centre address. Null means use default.
* @return a <code>SubmitPdu</code> containing the encoded SC
* address, if applicable, and the encoded message.
* Returns null on encode error.
*/
public static SubmitPdu getSubmitPdu(String scAddress,
String destinationAddress, String message, boolean statusReportRequested) {
SubmitPduBase spb;
if (useCdmaFormatForMoSms()) {
spb = com.android.internal.telephony.cdma.SmsMessage.getSubmitPdu(scAddress,
destinationAddress, message, statusReportRequested, null);
} else {
spb = com.android.internal.telephony.gsm.SmsMessage.getSubmitPdu(scAddress,
destinationAddress, message, statusReportRequested);
}
return new SubmitPdu(spb);
}
/**
* Get an SMS-SUBMIT PDU for a data message to a destination address & port.
* This method will not attempt to use any GSM national language 7 bit encodings.
*
* @param scAddress Service Centre address. null == use default
* @param destinationAddress the address of the destination for the message
* @param destinationPort the port to deliver the message to at the
* destination
* @param data the data for the message
* @return a <code>SubmitPdu</code> containing the encoded SC
* address, if applicable, and the encoded message.
* Returns null on encode error.
*/
public static SubmitPdu getSubmitPdu(String scAddress,
String destinationAddress, short destinationPort, byte[] data,
boolean statusReportRequested) {
SubmitPduBase spb;
if (useCdmaFormatForMoSms()) {
spb = com.android.internal.telephony.cdma.SmsMessage.getSubmitPdu(scAddress,
destinationAddress, destinationPort, data, statusReportRequested);
} else {
spb = com.android.internal.telephony.gsm.SmsMessage.getSubmitPdu(scAddress,
destinationAddress, destinationPort, data, statusReportRequested);
}
return new SubmitPdu(spb);
}
/**
* Returns the address of the SMS service center that relayed this message
* or null if there is none.
*/
public String getServiceCenterAddress() {
return mWrappedSmsMessage.getServiceCenterAddress();
}
/**
* Returns the originating address (sender) of this SMS message in String
* form or null if unavailable
*/
public String getOriginatingAddress() {
return mWrappedSmsMessage.getOriginatingAddress();
}
/**
* Returns the originating address, or email from address if this message
* was from an email gateway. Returns null if originating address
* unavailable.
*/
public String getDisplayOriginatingAddress() {
return mWrappedSmsMessage.getDisplayOriginatingAddress();
}
/**
* Returns the message body as a String, if it exists and is text based.
* @return message body is there is one, otherwise null
*/
public String getMessageBody() {
return mWrappedSmsMessage.getMessageBody();
}
/**
* Returns the class of this message.
*/
public MessageClass getMessageClass() {
switch(mWrappedSmsMessage.getMessageClass()) {
case CLASS_0: return MessageClass.CLASS_0;
case CLASS_1: return MessageClass.CLASS_1;
case CLASS_2: return MessageClass.CLASS_2;
case CLASS_3: return MessageClass.CLASS_3;
default: return MessageClass.UNKNOWN;
}
}
/**
* Returns the message body, or email message body if this message was from
* an email gateway. Returns null if message body unavailable.
*/
public String getDisplayMessageBody() {
return mWrappedSmsMessage.getDisplayMessageBody();
}
/**
* Unofficial convention of a subject line enclosed in parens empty string
* if not present
*/
public String getPseudoSubject() {
return mWrappedSmsMessage.getPseudoSubject();
}
/**
* Returns the service centre timestamp in currentTimeMillis() format
*/
public long getTimestampMillis() {
return mWrappedSmsMessage.getTimestampMillis();
}
/**
* Returns true if message is an email.
*
* @return true if this message came through an email gateway and email
* sender / subject / parsed body are available
*/
public boolean isEmail() {
return mWrappedSmsMessage.isEmail();
}
/**
* @return if isEmail() is true, body of the email sent through the gateway.
* null otherwise
*/
public String getEmailBody() {
return mWrappedSmsMessage.getEmailBody();
}
/**
* @return if isEmail() is true, email from address of email sent through
* the gateway. null otherwise
*/
public String getEmailFrom() {
return mWrappedSmsMessage.getEmailFrom();
}
/**
* Get protocol identifier.
*/
public int getProtocolIdentifier() {
return mWrappedSmsMessage.getProtocolIdentifier();
}
/**
* See TS 23.040 9.2.3.9 returns true if this is a "replace short message"
* SMS
*/
public boolean isReplace() {
return mWrappedSmsMessage.isReplace();
}
/**
* Returns true for CPHS MWI toggle message.
*
* @return true if this is a CPHS MWI toggle message See CPHS 4.2 section
* B.4.2
*/
public boolean isCphsMwiMessage() {
return mWrappedSmsMessage.isCphsMwiMessage();
}
/**
* returns true if this message is a CPHS voicemail / message waiting
* indicator (MWI) clear message
*/
public boolean isMWIClearMessage() {
return mWrappedSmsMessage.isMWIClearMessage();
}
/**
* returns true if this message is a CPHS voicemail / message waiting
* indicator (MWI) set message
*/
public boolean isMWISetMessage() {
return mWrappedSmsMessage.isMWISetMessage();
}
/**
* returns true if this message is a "Message Waiting Indication Group:
* Discard Message" notification and should not be stored.
*/
public boolean isMwiDontStore() {
return mWrappedSmsMessage.isMwiDontStore();
}
/**
* returns the user data section minus the user data header if one was
* present.
*/
public byte[] getUserData() {
return mWrappedSmsMessage.getUserData();
}
/**
* Returns the raw PDU for the message.
*
* @return the raw PDU for the message.
*/
public byte[] getPdu() {
return mWrappedSmsMessage.getPdu();
}
/**
* Returns the status of the message on the SIM (read, unread, sent, unsent).
*
* @return the status of the message on the SIM. These are:
* SmsManager.STATUS_ON_SIM_FREE
* SmsManager.STATUS_ON_SIM_READ
* SmsManager.STATUS_ON_SIM_UNREAD
* SmsManager.STATUS_ON_SIM_SEND
* SmsManager.STATUS_ON_SIM_UNSENT
* @deprecated Use getStatusOnIcc instead.
*/
@Deprecated public int getStatusOnSim() {
return mWrappedSmsMessage.getStatusOnIcc();
}
/**
* Returns the status of the message on the ICC (read, unread, sent, unsent).
*
* @return the status of the message on the ICC. These are:
* SmsManager.STATUS_ON_ICC_FREE
* SmsManager.STATUS_ON_ICC_READ
* SmsManager.STATUS_ON_ICC_UNREAD
* SmsManager.STATUS_ON_ICC_SEND
* SmsManager.STATUS_ON_ICC_UNSENT
*/
public int getStatusOnIcc() {
return mWrappedSmsMessage.getStatusOnIcc();
}
/**
* Returns the record index of the message on the SIM (1-based index).
* @return the record index of the message on the SIM, or -1 if this
* SmsMessage was not created from a SIM SMS EF record.
* @deprecated Use getIndexOnIcc instead.
*/
@Deprecated public int getIndexOnSim() {
return mWrappedSmsMessage.getIndexOnIcc();
}
/**
* Returns the record index of the message on the ICC (1-based index).
* @return the record index of the message on the ICC, or -1 if this
* SmsMessage was not created from a ICC SMS EF record.
*/
public int getIndexOnIcc() {
return mWrappedSmsMessage.getIndexOnIcc();
}
/**
* GSM:
* For an SMS-STATUS-REPORT message, this returns the status field from
* the status report. This field indicates the status of a previously
* submitted SMS, if requested. See TS 23.040, 9.2.3.15 TP-Status for a
* description of values.
* CDMA:
* For not interfering with status codes from GSM, the value is
* shifted to the bits 31-16.
* The value is composed of an error class (bits 25-24) and a status code (bits 23-16).
* Possible codes are described in C.S0015-B, v2.0, 4.5.21.
*
* @return 0 indicates the previously sent message was received.
* See TS 23.040, 9.9.2.3.15 and C.S0015-B, v2.0, 4.5.21
* for a description of other possible values.
*/
public int getStatus() {
return mWrappedSmsMessage.getStatus();
}
/**
* Return true iff the message is a SMS-STATUS-REPORT message.
*/
public boolean isStatusReportMessage() {
return mWrappedSmsMessage.isStatusReportMessage();
}
/**
* Returns true iff the <code>TP-Reply-Path</code> bit is set in
* this message.
*/
public boolean isReplyPathPresent() {
return mWrappedSmsMessage.isReplyPathPresent();
}
/**
* Determines whether or not to use CDMA format for MO SMS.
* If SMS over IMS is supported, then format is based on IMS SMS format,
* otherwise format is based on current phone type.
*
* @return true if Cdma format should be used for MO SMS, false otherwise.
*/
private static boolean useCdmaFormatForMoSms() {
if (!SmsManager.getDefault().isImsSmsSupported()) {
// use Voice technology to determine SMS format.
return isCdmaVoice();
}
// IMS is registered with SMS support, check the SMS format supported
return (SmsConstants.FORMAT_3GPP2.equals(SmsManager.getDefault().getImsSmsFormat()));
}
/**
* Determines whether or not to current phone type is cdma.
*
* @return true if current phone type is cdma, false otherwise.
*/
private static boolean isCdmaVoice() {
int activePhone = TelephonyManager.getDefault().getCurrentPhoneType();
return (PHONE_TYPE_CDMA == activePhone);
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
package com.azure.resourcemanager.appservice.implementation;
import com.azure.core.http.rest.PagedFlux;
import com.azure.core.http.rest.PagedIterable;
import com.azure.core.http.rest.Response;
import com.azure.core.http.rest.SimpleResponse;
import com.azure.core.util.FluxUtil;
import com.azure.core.util.logging.ClientLogger;
import com.azure.resourcemanager.appservice.AppServiceManager;
import com.azure.resourcemanager.appservice.models.AppServicePlan;
import com.azure.resourcemanager.appservice.models.CsmPublishingProfileOptions;
import com.azure.resourcemanager.appservice.models.CsmSlotEntity;
import com.azure.resourcemanager.appservice.models.HostnameBinding;
import com.azure.resourcemanager.appservice.models.MSDeploy;
import com.azure.resourcemanager.appservice.models.OperatingSystem;
import com.azure.resourcemanager.appservice.models.PricingTier;
import com.azure.resourcemanager.appservice.models.PrivateLinkConnectionApprovalRequestResource;
import com.azure.resourcemanager.appservice.models.PrivateLinkConnectionState;
import com.azure.resourcemanager.appservice.models.PublishingProfile;
import com.azure.resourcemanager.appservice.models.WebAppBase;
import com.azure.resourcemanager.appservice.models.WebAppSourceControl;
import com.azure.resourcemanager.appservice.fluent.models.ConnectionStringDictionaryInner;
import com.azure.resourcemanager.appservice.fluent.models.IdentifierInner;
import com.azure.resourcemanager.appservice.fluent.models.MSDeployStatusInner;
import com.azure.resourcemanager.appservice.fluent.models.SiteAuthSettingsInner;
import com.azure.resourcemanager.appservice.fluent.models.SiteConfigResourceInner;
import com.azure.resourcemanager.appservice.fluent.models.SiteInner;
import com.azure.resourcemanager.appservice.fluent.models.SiteLogsConfigInner;
import com.azure.resourcemanager.appservice.fluent.models.SitePatchResourceInner;
import com.azure.resourcemanager.appservice.fluent.models.SiteSourceControlInner;
import com.azure.resourcemanager.appservice.fluent.models.SlotConfigNamesResourceInner;
import com.azure.resourcemanager.appservice.fluent.models.StringDictionaryInner;
import com.azure.resourcemanager.resources.fluentcore.arm.ResourceUtils;
import com.azure.resourcemanager.resources.fluentcore.arm.models.PrivateEndpointConnection;
import com.azure.resourcemanager.resources.fluentcore.arm.models.PrivateEndpointServiceConnectionStatus;
import com.azure.resourcemanager.resources.fluentcore.arm.models.PrivateLinkResource;
import com.azure.resourcemanager.resources.fluentcore.collection.SupportsListingPrivateEndpointConnection;
import com.azure.resourcemanager.resources.fluentcore.collection.SupportsListingPrivateLinkResource;
import com.azure.resourcemanager.resources.fluentcore.collection.SupportsUpdatingPrivateEndpointConnection;
import com.azure.resourcemanager.resources.fluentcore.model.Creatable;
import java.nio.charset.StandardCharsets;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import java.util.stream.Collectors;
import reactor.core.publisher.Mono;
import com.azure.resourcemanager.resources.fluentcore.utils.PagedConverter;
/**
* The base implementation for web apps and function apps.
*
* @param <FluentT> the fluent interface, WebApp or FunctionApp
* @param <FluentImplT> the implementation class for FluentT
* @param <FluentWithCreateT> the definition stage that derives from Creatable
* @param <FluentUpdateT> The definition stage that derives from Appliable
*/
abstract class AppServiceBaseImpl<
FluentT extends WebAppBase,
FluentImplT extends AppServiceBaseImpl<FluentT, FluentImplT, FluentWithCreateT, FluentUpdateT>,
FluentWithCreateT,
FluentUpdateT>
extends WebAppBaseImpl<FluentT, FluentImplT>
implements
SupportsListingPrivateLinkResource,
SupportsListingPrivateEndpointConnection,
SupportsUpdatingPrivateEndpointConnection {
private final ClientLogger logger = new ClientLogger(getClass());
AppServiceBaseImpl(
String name,
SiteInner innerObject,
SiteConfigResourceInner siteConfig,
SiteLogsConfigInner logConfig,
AppServiceManager manager) {
super(name, innerObject, siteConfig, logConfig, manager);
}
@Override
Mono<SiteInner> createOrUpdateInner(SiteInner site) {
return this.manager().serviceClient().getWebApps().createOrUpdateAsync(resourceGroupName(), name(), site);
}
@Override
Mono<SiteInner> updateInner(SitePatchResourceInner siteUpdate) {
return this.manager().serviceClient().getWebApps().updateAsync(resourceGroupName(), name(), siteUpdate);
}
@Override
Mono<SiteInner> getInner() {
return this.manager().serviceClient().getWebApps().getByResourceGroupAsync(resourceGroupName(), name());
}
@Override
Mono<SiteConfigResourceInner> getConfigInner() {
return this.manager().serviceClient().getWebApps().getConfigurationAsync(resourceGroupName(), name());
}
@Override
Mono<SiteConfigResourceInner> createOrUpdateSiteConfig(SiteConfigResourceInner siteConfig) {
return this
.manager()
.serviceClient()
.getWebApps()
.createOrUpdateConfigurationAsync(resourceGroupName(), name(), siteConfig);
}
@Override
Mono<Void> deleteHostnameBinding(String hostname) {
return this.manager().serviceClient().getWebApps()
.deleteHostnameBindingAsync(resourceGroupName(), name(), hostname);
}
@Override
Mono<StringDictionaryInner> listAppSettings() {
return this.manager().serviceClient().getWebApps().listApplicationSettingsAsync(resourceGroupName(), name());
}
@Override
Mono<StringDictionaryInner> updateAppSettings(StringDictionaryInner inner) {
return this.manager().serviceClient().getWebApps()
.updateApplicationSettingsAsync(resourceGroupName(), name(), inner);
}
@Override
Mono<ConnectionStringDictionaryInner> listConnectionStrings() {
return this.manager().serviceClient().getWebApps().listConnectionStringsAsync(resourceGroupName(), name());
}
@Override
Mono<ConnectionStringDictionaryInner> updateConnectionStrings(ConnectionStringDictionaryInner inner) {
return this.manager().serviceClient().getWebApps()
.updateConnectionStringsAsync(resourceGroupName(), name(), inner);
}
@Override
Mono<SlotConfigNamesResourceInner> listSlotConfigurations() {
return this.manager().serviceClient().getWebApps().listSlotConfigurationNamesAsync(resourceGroupName(), name());
}
@Override
Mono<SlotConfigNamesResourceInner> updateSlotConfigurations(SlotConfigNamesResourceInner inner) {
return this.manager().serviceClient().getWebApps()
.updateSlotConfigurationNamesAsync(resourceGroupName(), name(), inner);
}
@Override
Mono<SiteSourceControlInner> createOrUpdateSourceControl(SiteSourceControlInner inner) {
return this.manager().serviceClient().getWebApps()
.createOrUpdateSourceControlAsync(resourceGroupName(), name(), inner);
}
@Override
Mono<Void> deleteSourceControl() {
return this.manager().serviceClient().getWebApps().deleteSourceControlAsync(resourceGroupName(), name());
}
@Override
Mono<SiteAuthSettingsInner> updateAuthentication(SiteAuthSettingsInner inner) {
return manager().serviceClient().getWebApps().updateAuthSettingsAsync(resourceGroupName(), name(), inner);
}
@Override
Mono<SiteAuthSettingsInner> getAuthentication() {
return manager().serviceClient().getWebApps().getAuthSettingsAsync(resourceGroupName(), name());
}
@Override
public Map<String, HostnameBinding> getHostnameBindings() {
return getHostnameBindingsAsync().block();
}
@Override
@SuppressWarnings("unchecked")
public Mono<Map<String, HostnameBinding>> getHostnameBindingsAsync() {
return PagedConverter.mapPage(this
.manager()
.serviceClient()
.getWebApps()
.listHostnameBindingsAsync(resourceGroupName(), name()),
hostNameBindingInner ->
new HostnameBindingImpl<>(hostNameBindingInner, (FluentImplT) AppServiceBaseImpl.this))
.collectList()
.map(
hostNameBindings ->
Collections
.<String, HostnameBinding>unmodifiableMap(
hostNameBindings
.stream()
.collect(
Collectors
.toMap(
binding -> binding.name().replace(name() + "/", ""),
Function.identity()))));
}
@Override
public PublishingProfile getPublishingProfile() {
return getPublishingProfileAsync().block();
}
public Mono<PublishingProfile> getPublishingProfileAsync() {
return FluxUtil
.collectBytesInByteBufferStream(
manager()
.serviceClient()
.getWebApps()
.listPublishingProfileXmlWithSecretsAsync(
resourceGroupName(), name(), new CsmPublishingProfileOptions()))
.map(
bytes -> new PublishingProfileImpl(new String(bytes, StandardCharsets.UTF_8), AppServiceBaseImpl.this));
}
@Override
public WebAppSourceControl getSourceControl() {
return getSourceControlAsync().block();
}
@Override
public Mono<WebAppSourceControl> getSourceControlAsync() {
return manager()
.serviceClient()
.getWebApps()
.getSourceControlAsync(resourceGroupName(), name())
.map(
siteSourceControlInner ->
new WebAppSourceControlImpl<>(siteSourceControlInner, AppServiceBaseImpl.this));
}
@Override
Mono<MSDeployStatusInner> createMSDeploy(MSDeploy msDeployInner) {
return manager().serviceClient().getWebApps()
.createMSDeployOperationAsync(resourceGroupName(), name(), msDeployInner);
}
@Override
public void verifyDomainOwnership(String certificateOrderName, String domainVerificationToken) {
verifyDomainOwnershipAsync(certificateOrderName, domainVerificationToken).block();
}
@Override
public Mono<Void> verifyDomainOwnershipAsync(String certificateOrderName, String domainVerificationToken) {
IdentifierInner identifierInner = new IdentifierInner().withValue(domainVerificationToken);
return this
.manager()
.serviceClient()
.getWebApps()
.createOrUpdateDomainOwnershipIdentifierAsync(
resourceGroupName(), name(), certificateOrderName, identifierInner)
.then(Mono.empty());
}
@Override
public void start() {
startAsync().block();
}
@Override
public Mono<Void> startAsync() {
return manager()
.serviceClient()
.getWebApps()
.startAsync(resourceGroupName(), name())
.then(refreshAsync())
.then(Mono.empty());
}
@Override
public void stop() {
stopAsync().block();
}
@Override
public Mono<Void> stopAsync() {
return manager()
.serviceClient()
.getWebApps()
.stopAsync(resourceGroupName(), name())
.then(refreshAsync())
.then(Mono.empty());
}
@Override
public void restart() {
restartAsync().block();
}
@Override
public Mono<Void> restartAsync() {
return manager()
.serviceClient()
.getWebApps()
.restartAsync(resourceGroupName(), name())
.then(refreshAsync())
.then(Mono.empty());
}
@Override
public void swap(String slotName) {
swapAsync(slotName).block();
}
@Override
public Mono<Void> swapAsync(String slotName) {
return manager()
.serviceClient()
.getWebApps()
.swapSlotWithProductionAsync(resourceGroupName(), name(), new CsmSlotEntity().withTargetSlot(slotName))
.then(refreshAsync())
.then(Mono.empty());
}
@Override
public void applySlotConfigurations(String slotName) {
applySlotConfigurationsAsync(slotName).block();
}
@Override
public Mono<Void> applySlotConfigurationsAsync(String slotName) {
return manager()
.serviceClient()
.getWebApps()
.applySlotConfigToProductionAsync(resourceGroupName(), name(), new CsmSlotEntity().withTargetSlot(slotName))
.then(refreshAsync())
.then(Mono.empty());
}
@Override
public void resetSlotConfigurations() {
resetSlotConfigurationsAsync().block();
}
@Override
public Mono<Void> resetSlotConfigurationsAsync() {
return manager()
.serviceClient()
.getWebApps()
.resetProductionSlotConfigAsync(resourceGroupName(), name())
.then(refreshAsync())
.then(Mono.empty());
}
@Override
public byte[] getContainerLogs() {
return getContainerLogsAsync().block();
}
@Override
public Mono<byte[]> getContainerLogsAsync() {
return FluxUtil
.collectBytesInByteBufferStream(
manager().serviceClient().getWebApps().getWebSiteContainerLogsAsync(resourceGroupName(), name()));
}
@Override
public byte[] getContainerLogsZip() {
return getContainerLogsZipAsync().block();
}
@Override
public Mono<byte[]> getContainerLogsZipAsync() {
return FluxUtil
.collectBytesInByteBufferStream(
manager().serviceClient().getWebApps().getContainerLogsZipAsync(resourceGroupName(), name()));
}
@Override
Mono<SiteLogsConfigInner> updateDiagnosticLogsConfig(SiteLogsConfigInner siteLogsConfigInner) {
return manager()
.serviceClient()
.getWebApps()
.updateDiagnosticLogsConfigAsync(resourceGroupName(), name(), siteLogsConfigInner);
}
private AppServicePlanImpl newDefaultAppServicePlan() {
String planName = this.manager().resourceManager().internalContext().randomResourceName(name() + "plan", 32);
return newDefaultAppServicePlan(planName);
}
private AppServicePlanImpl newDefaultAppServicePlan(String appServicePlanName) {
AppServicePlanImpl appServicePlan =
(AppServicePlanImpl) (this.manager().appServicePlans().define(appServicePlanName)).withRegion(regionName());
if (super.creatableGroup != null && isInCreateMode()) {
appServicePlan = appServicePlan.withNewResourceGroup(super.creatableGroup);
} else {
appServicePlan = appServicePlan.withExistingResourceGroup(resourceGroupName());
}
return appServicePlan;
}
public FluentImplT withNewFreeAppServicePlan() {
return withNewAppServicePlan(OperatingSystem.WINDOWS, PricingTier.FREE_F1);
}
public FluentImplT withNewSharedAppServicePlan() {
return withNewAppServicePlan(OperatingSystem.WINDOWS, PricingTier.SHARED_D1);
}
FluentImplT withNewAppServicePlan(OperatingSystem operatingSystem, PricingTier pricingTier) {
return withNewAppServicePlan(
newDefaultAppServicePlan().withOperatingSystem(operatingSystem).withPricingTier(pricingTier));
}
FluentImplT withNewAppServicePlan(
String appServicePlanName, OperatingSystem operatingSystem, PricingTier pricingTier) {
return withNewAppServicePlan(
newDefaultAppServicePlan(appServicePlanName)
.withOperatingSystem(operatingSystem)
.withPricingTier(pricingTier));
}
public FluentImplT withNewAppServicePlan(PricingTier pricingTier) {
return withNewAppServicePlan(operatingSystem(), pricingTier);
}
public FluentImplT withNewAppServicePlan(String appServicePlanName, PricingTier pricingTier) {
return withNewAppServicePlan(appServicePlanName, operatingSystem(), pricingTier);
}
public FluentImplT withNewAppServicePlan(Creatable<AppServicePlan> appServicePlanCreatable) {
this.addDependency(appServicePlanCreatable);
String id =
ResourceUtils
.constructResourceId(
this.manager().subscriptionId(),
resourceGroupName(),
"Microsoft.Web",
"serverFarms",
appServicePlanCreatable.name(),
"");
innerModel().withServerFarmId(id);
if (appServicePlanCreatable instanceof AppServicePlanImpl) {
return withOperatingSystem(((AppServicePlanImpl) appServicePlanCreatable).operatingSystem());
} else {
throw logger.logExceptionAsError(
new IllegalStateException("Internal error, appServicePlanCreatable must be class AppServicePlanImpl"));
}
}
@SuppressWarnings("unchecked")
private FluentImplT withOperatingSystem(OperatingSystem os) {
if (os == OperatingSystem.LINUX) {
innerModel().withReserved(true);
innerModel().withKind(innerModel().kind() + ",linux");
}
return (FluentImplT) this;
}
public FluentImplT withExistingAppServicePlan(AppServicePlan appServicePlan) {
innerModel().withServerFarmId(appServicePlan.id());
this.withRegion(appServicePlan.regionName());
return withOperatingSystem(appServicePlanOperatingSystem(appServicePlan));
}
@SuppressWarnings("unchecked")
public FluentImplT withPublicDockerHubImage(String imageAndTag) {
cleanUpContainerSettings();
if (siteConfig == null) {
siteConfig = new SiteConfigResourceInner();
}
setAppFrameworkVersion(String.format("DOCKER|%s", imageAndTag));
withAppSetting(SETTING_DOCKER_IMAGE, imageAndTag);
return (FluentImplT) this;
}
public FluentImplT withPrivateDockerHubImage(String imageAndTag) {
return withPublicDockerHubImage(imageAndTag);
}
@SuppressWarnings("unchecked")
public FluentImplT withPrivateRegistryImage(String imageAndTag, String serverUrl) {
imageAndTag = Utils.smartCompletionPrivateRegistryImage(imageAndTag, serverUrl);
cleanUpContainerSettings();
if (siteConfig == null) {
siteConfig = new SiteConfigResourceInner();
}
setAppFrameworkVersion(String.format("DOCKER|%s", imageAndTag));
withAppSetting(SETTING_DOCKER_IMAGE, imageAndTag);
withAppSetting(SETTING_REGISTRY_SERVER, serverUrl);
return (FluentImplT) this;
}
@SuppressWarnings("unchecked")
public FluentImplT withCredentials(String username, String password) {
withAppSetting(SETTING_REGISTRY_USERNAME, username);
withAppSetting(SETTING_REGISTRY_PASSWORD, password);
return (FluentImplT) this;
}
protected abstract void cleanUpContainerSettings();
protected void ensureLinuxPlan() {
if (OperatingSystem.WINDOWS.equals(operatingSystem())) {
throw logger.logExceptionAsError(
new IllegalArgumentException("Docker container settings only apply to Linux app service plans."));
}
}
protected OperatingSystem appServicePlanOperatingSystem(AppServicePlan appServicePlan) {
return appServicePlan.operatingSystem();
}
@Override
public PagedIterable<PrivateLinkResource> listPrivateLinkResources() {
return new PagedIterable<>(listPrivateLinkResourcesAsync());
}
@Override
public PagedFlux<PrivateLinkResource> listPrivateLinkResourcesAsync() {
Mono<Response<List<PrivateLinkResource>>> retList = this.manager().serviceClient().getWebApps()
.getPrivateLinkResourcesWithResponseAsync(this.resourceGroupName(), this.name())
.map(response -> new SimpleResponse<>(response, response.getValue().value().stream()
.map(PrivateLinkResourceImpl::new)
.collect(Collectors.toList())));
return PagedConverter.convertListToPagedFlux(retList);
}
@Override
public PagedIterable<PrivateEndpointConnection> listPrivateEndpointConnections() {
return new PagedIterable<>(listPrivateEndpointConnectionsAsync());
}
@Override
public PagedFlux<PrivateEndpointConnection> listPrivateEndpointConnectionsAsync() {
return PagedConverter.mapPage(this.manager().serviceClient().getWebApps()
.getPrivateEndpointConnectionListAsync(this.resourceGroupName(), this.name()),
PrivateEndpointConnectionImpl::new);
}
@Override
public void approvePrivateEndpointConnection(String privateEndpointConnectionName) {
approvePrivateEndpointConnectionAsync(privateEndpointConnectionName).block();
}
@Override
public Mono<Void> approvePrivateEndpointConnectionAsync(String privateEndpointConnectionName) {
return this.manager().serviceClient().getWebApps()
.approveOrRejectPrivateEndpointConnectionAsync(this.resourceGroupName(), this.name(),
privateEndpointConnectionName,
new PrivateLinkConnectionApprovalRequestResource().withPrivateLinkServiceConnectionState(
new PrivateLinkConnectionState()
.withStatus(PrivateEndpointServiceConnectionStatus.APPROVED.toString())
))
.then();
}
@Override
public void rejectPrivateEndpointConnection(String privateEndpointConnectionName) {
rejectPrivateEndpointConnectionAsync(privateEndpointConnectionName).block();
}
@Override
public Mono<Void> rejectPrivateEndpointConnectionAsync(String privateEndpointConnectionName) {
return this.manager().serviceClient().getWebApps()
.approveOrRejectPrivateEndpointConnectionAsync(this.resourceGroupName(), this.name(),
privateEndpointConnectionName,
new PrivateLinkConnectionApprovalRequestResource().withPrivateLinkServiceConnectionState(
new PrivateLinkConnectionState()
.withStatus(PrivateEndpointServiceConnectionStatus.REJECTED.toString())
))
.then();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.regionserver;
import static org.apache.hadoop.hbase.HBaseTestingUtility.START_KEY;
import static org.apache.hadoop.hbase.HBaseTestingUtility.START_KEY_BYTES;
import static org.apache.hadoop.hbase.HBaseTestingUtility.fam1;
import static org.apache.hadoop.hbase.regionserver.Store.PRIORITY_USER;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestCase;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeepDeletedCells;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoder;
import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoderImpl;
import org.apache.hadoop.hbase.io.hfile.HFileScanner;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionProgress;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequestImpl;
import org.apache.hadoop.hbase.regionserver.compactions.RatioBasedCompactionPolicy;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.wal.WAL;
import org.junit.After;
import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Test major compactions
*/
@Category({RegionServerTests.class, MediumTests.class})
@RunWith(Parameterized.class)
public class TestMajorCompaction {
@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestMajorCompaction.class);
@Parameterized.Parameters
public static Object[] data() {
return new Object[] { "NONE", "BASIC", "EAGER" };
}
@Rule public TestName name;
private static final Logger LOG = LoggerFactory.getLogger(TestMajorCompaction.class.getName());
private static final HBaseTestingUtility UTIL = HBaseTestingUtility.createLocalHTU();
protected Configuration conf = UTIL.getConfiguration();
private HRegion r = null;
private HTableDescriptor htd = null;
private static final byte [] COLUMN_FAMILY = fam1;
private final byte [] STARTROW = Bytes.toBytes(START_KEY);
private static final byte [] COLUMN_FAMILY_TEXT = COLUMN_FAMILY;
private int compactionThreshold;
private byte[] secondRowBytes, thirdRowBytes;
private static final long MAX_FILES_TO_COMPACT = 10;
/** constructor */
public TestMajorCompaction(String compType) {
super();
name = new TestName();
// Set cache flush size to 1MB
conf.setInt(HConstants.HREGION_MEMSTORE_FLUSH_SIZE, 1024*1024);
conf.setInt(HConstants.HREGION_MEMSTORE_BLOCK_MULTIPLIER, 100);
compactionThreshold = conf.getInt("hbase.hstore.compactionThreshold", 3);
conf.set(CompactingMemStore.COMPACTING_MEMSTORE_TYPE_KEY, String.valueOf(compType));
secondRowBytes = START_KEY_BYTES.clone();
// Increment the least significant character so we get to next row.
secondRowBytes[START_KEY_BYTES.length - 1]++;
thirdRowBytes = START_KEY_BYTES.clone();
thirdRowBytes[START_KEY_BYTES.length - 1] =
(byte) (thirdRowBytes[START_KEY_BYTES.length - 1] + 2);
}
@Before
public void setUp() throws Exception {
this.htd = UTIL.createTableDescriptor(name.getMethodName().replace('[','i').replace(']','i'));
this.r = UTIL.createLocalHRegion(htd, null, null);
}
@After
public void tearDown() throws Exception {
WAL wal = ((HRegion)r).getWAL();
((HRegion)r).close();
wal.close();
}
/**
* Test that on a major compaction, if all cells are expired or deleted, then
* we'll end up with no product. Make sure scanner over region returns
* right answer in this case - and that it just basically works.
* @throws IOException exception encountered
*/
@Test
public void testMajorCompactingToNoOutput() throws IOException {
testMajorCompactingWithDeletes(KeepDeletedCells.FALSE);
}
/**
* Test that on a major compaction,Deleted cells are retained if keep deleted cells is set to true
* @throws IOException exception encountered
*/
@Test
public void testMajorCompactingWithKeepDeletedCells() throws IOException {
testMajorCompactingWithDeletes(KeepDeletedCells.TRUE);
}
/**
* Run compaction and flushing memstore
* Assert deletes get cleaned up.
* @throws Exception
*/
@Test
public void testMajorCompaction() throws Exception {
majorCompaction();
}
@Test
public void testDataBlockEncodingInCacheOnly() throws Exception {
majorCompactionWithDataBlockEncoding(true);
}
@Test
public void testDataBlockEncodingEverywhere() throws Exception {
majorCompactionWithDataBlockEncoding(false);
}
public void majorCompactionWithDataBlockEncoding(boolean inCacheOnly)
throws Exception {
Map<HStore, HFileDataBlockEncoder> replaceBlockCache = new HashMap<>();
for (HStore store : r.getStores()) {
HFileDataBlockEncoder blockEncoder = store.getDataBlockEncoder();
replaceBlockCache.put(store, blockEncoder);
final DataBlockEncoding inCache = DataBlockEncoding.PREFIX;
final DataBlockEncoding onDisk = inCacheOnly ? DataBlockEncoding.NONE :
inCache;
((HStore)store).setDataBlockEncoderInTest(new HFileDataBlockEncoderImpl(onDisk));
}
majorCompaction();
// restore settings
for (Entry<HStore, HFileDataBlockEncoder> entry : replaceBlockCache.entrySet()) {
((HStore)entry.getKey()).setDataBlockEncoderInTest(entry.getValue());
}
}
private void majorCompaction() throws Exception {
createStoreFile(r);
for (int i = 0; i < compactionThreshold; i++) {
createStoreFile(r);
}
// Add more content.
HBaseTestCase.addContent(new RegionAsTable(r), Bytes.toString(COLUMN_FAMILY));
// Now there are about 5 versions of each column.
// Default is that there only 3 (MAXVERSIONS) versions allowed per column.
//
// Assert == 3 when we ask for versions.
Result result = r.get(new Get(STARTROW).addFamily(COLUMN_FAMILY_TEXT).readVersions(100));
assertEquals(compactionThreshold, result.size());
// see if CompactionProgress is in place but null
for (HStore store : r.getStores()) {
assertNull(store.getCompactionProgress());
}
r.flush(true);
r.compact(true);
// see if CompactionProgress has done its thing on at least one store
int storeCount = 0;
for (HStore store : r.getStores()) {
CompactionProgress progress = store.getCompactionProgress();
if( progress != null ) {
++storeCount;
assertTrue(progress.currentCompactedKVs > 0);
assertTrue(progress.getTotalCompactingKVs() > 0);
}
assertTrue(storeCount > 0);
}
// look at the second row
// Increment the least significant character so we get to next row.
byte [] secondRowBytes = START_KEY_BYTES.clone();
secondRowBytes[START_KEY_BYTES.length - 1]++;
// Always 3 versions if that is what max versions is.
result = r.get(new Get(secondRowBytes).addFamily(COLUMN_FAMILY_TEXT).readVersions(100));
LOG.debug("Row " + Bytes.toStringBinary(secondRowBytes) + " after " +
"initial compaction: " + result);
assertEquals("Invalid number of versions of row "
+ Bytes.toStringBinary(secondRowBytes) + ".", compactionThreshold,
result.size());
// Now add deletes to memstore and then flush it.
// That will put us over
// the compaction threshold of 3 store files. Compacting these store files
// should result in a compacted store file that has no references to the
// deleted row.
LOG.debug("Adding deletes to memstore and flushing");
Delete delete = new Delete(secondRowBytes, System.currentTimeMillis());
byte [][] famAndQf = {COLUMN_FAMILY, null};
delete.addFamily(famAndQf[0]);
r.delete(delete);
// Assert deleted.
result = r.get(new Get(secondRowBytes).addFamily(COLUMN_FAMILY_TEXT).readVersions(100));
assertTrue("Second row should have been deleted", result.isEmpty());
r.flush(true);
result = r.get(new Get(secondRowBytes).addFamily(COLUMN_FAMILY_TEXT).readVersions(100));
assertTrue("Second row should have been deleted", result.isEmpty());
// Add a bit of data and flush. Start adding at 'bbb'.
createSmallerStoreFile(this.r);
r.flush(true);
// Assert that the second row is still deleted.
result = r.get(new Get(secondRowBytes).addFamily(COLUMN_FAMILY_TEXT).readVersions(100));
assertTrue("Second row should still be deleted", result.isEmpty());
// Force major compaction.
r.compact(true);
assertEquals(1, r.getStore(COLUMN_FAMILY_TEXT).getStorefiles().size());
result = r.get(new Get(secondRowBytes).addFamily(COLUMN_FAMILY_TEXT).readVersions(100));
assertTrue("Second row should still be deleted", result.isEmpty());
// Make sure the store files do have some 'aaa' keys in them -- exactly 3.
// Also, that compacted store files do not have any secondRowBytes because
// they were deleted.
verifyCounts(3,0);
// Multiple versions allowed for an entry, so the delete isn't enough
// Lower TTL and expire to ensure that all our entries have been wiped
final int ttl = 1000;
for (HStore store : r.getStores()) {
ScanInfo old = store.getScanInfo();
ScanInfo si = old.customize(old.getMaxVersions(), ttl, old.getKeepDeletedCells());
store.setScanInfo(si);
}
Thread.sleep(1000);
r.compact(true);
int count = count();
assertEquals("Should not see anything after TTL has expired", 0, count);
}
@Test
public void testTimeBasedMajorCompaction() throws Exception {
// create 2 storefiles and force a major compaction to reset the time
int delay = 10 * 1000; // 10 sec
float jitterPct = 0.20f; // 20%
conf.setLong(HConstants.MAJOR_COMPACTION_PERIOD, delay);
conf.setFloat("hbase.hregion.majorcompaction.jitter", jitterPct);
HStore s = ((HStore) r.getStore(COLUMN_FAMILY));
s.storeEngine.getCompactionPolicy().setConf(conf);
try {
createStoreFile(r);
createStoreFile(r);
r.compact(true);
// add one more file & verify that a regular compaction won't work
createStoreFile(r);
r.compact(false);
assertEquals(2, s.getStorefilesCount());
// ensure that major compaction time is deterministic
RatioBasedCompactionPolicy
c = (RatioBasedCompactionPolicy)s.storeEngine.getCompactionPolicy();
Collection<HStoreFile> storeFiles = s.getStorefiles();
long mcTime = c.getNextMajorCompactTime(storeFiles);
for (int i = 0; i < 10; ++i) {
assertEquals(mcTime, c.getNextMajorCompactTime(storeFiles));
}
// ensure that the major compaction time is within the variance
long jitter = Math.round(delay * jitterPct);
assertTrue(delay - jitter <= mcTime && mcTime <= delay + jitter);
// wait until the time-based compaction interval
Thread.sleep(mcTime);
// trigger a compaction request and ensure that it's upgraded to major
r.compact(false);
assertEquals(1, s.getStorefilesCount());
} finally {
// reset the timed compaction settings
conf.setLong(HConstants.MAJOR_COMPACTION_PERIOD, 1000*60*60*24);
conf.setFloat("hbase.hregion.majorcompaction.jitter", 0.20F);
// run a major to reset the cache
createStoreFile(r);
r.compact(true);
assertEquals(1, s.getStorefilesCount());
}
}
private void verifyCounts(int countRow1, int countRow2) throws Exception {
int count1 = 0;
int count2 = 0;
for (HStoreFile f: r.getStore(COLUMN_FAMILY_TEXT).getStorefiles()) {
HFileScanner scanner = f.getReader().getScanner(false, false);
scanner.seekTo();
do {
byte [] row = CellUtil.cloneRow(scanner.getCell());
if (Bytes.equals(row, STARTROW)) {
count1++;
} else if(Bytes.equals(row, secondRowBytes)) {
count2++;
}
} while(scanner.next());
}
assertEquals(countRow1,count1);
assertEquals(countRow2,count2);
}
private int count() throws IOException {
int count = 0;
for (HStoreFile f: r.getStore(COLUMN_FAMILY_TEXT).getStorefiles()) {
HFileScanner scanner = f.getReader().getScanner(false, false);
if (!scanner.seekTo()) {
continue;
}
do {
count++;
} while(scanner.next());
}
return count;
}
private void createStoreFile(final HRegion region) throws IOException {
createStoreFile(region, Bytes.toString(COLUMN_FAMILY));
}
private void createStoreFile(final HRegion region, String family) throws IOException {
Table loader = new RegionAsTable(region);
HBaseTestCase.addContent(loader, family);
region.flush(true);
}
private void createSmallerStoreFile(final HRegion region) throws IOException {
Table loader = new RegionAsTable(region);
HBaseTestCase.addContent(loader, Bytes.toString(COLUMN_FAMILY), Bytes.toBytes("" +
"bbb"), null);
region.flush(true);
}
/**
* Test for HBASE-5920 - Test user requested major compactions always occurring
*/
@Test
public void testNonUserMajorCompactionRequest() throws Exception {
HStore store = r.getStore(COLUMN_FAMILY);
createStoreFile(r);
for (int i = 0; i < MAX_FILES_TO_COMPACT + 1; i++) {
createStoreFile(r);
}
store.triggerMajorCompaction();
CompactionRequestImpl request = store.requestCompaction().get().getRequest();
assertNotNull("Expected to receive a compaction request", request);
assertEquals(
"System-requested major compaction should not occur if there are too many store files",
false,
request.isMajor());
}
/**
* Test for HBASE-5920
*/
@Test
public void testUserMajorCompactionRequest() throws IOException{
HStore store = r.getStore(COLUMN_FAMILY);
createStoreFile(r);
for (int i = 0; i < MAX_FILES_TO_COMPACT + 1; i++) {
createStoreFile(r);
}
store.triggerMajorCompaction();
CompactionRequestImpl request =
store.requestCompaction(PRIORITY_USER, CompactionLifeCycleTracker.DUMMY, null).get()
.getRequest();
assertNotNull("Expected to receive a compaction request", request);
assertEquals(
"User-requested major compaction should always occur, even if there are too many store files",
true,
request.isMajor());
}
/**
* Test that on a major compaction, if all cells are expired or deleted, then we'll end up with no
* product. Make sure scanner over region returns right answer in this case - and that it just
* basically works.
* @throws IOException
*/
@Test
public void testMajorCompactingToNoOutputWithReverseScan() throws IOException {
createStoreFile(r);
for (int i = 0; i < compactionThreshold; i++) {
createStoreFile(r);
}
// Now delete everything.
Scan scan = new Scan();
scan.setReversed(true);
InternalScanner s = r.getScanner(scan);
do {
List<Cell> results = new ArrayList<>();
boolean result = s.next(results);
assertTrue(!results.isEmpty());
r.delete(new Delete(CellUtil.cloneRow(results.get(0))));
if (!result) {
break;
}
} while (true);
s.close();
// Flush
r.flush(true);
// Major compact.
r.compact(true);
scan = new Scan();
scan.setReversed(true);
s = r.getScanner(scan);
int counter = 0;
do {
List<Cell> results = new ArrayList<>();
boolean result = s.next(results);
if (!result) {
break;
}
counter++;
} while (true);
s.close();
assertEquals(0, counter);
}
private void testMajorCompactingWithDeletes(KeepDeletedCells keepDeletedCells)
throws IOException {
createStoreFile(r);
for (int i = 0; i < compactionThreshold; i++) {
createStoreFile(r);
}
// Now delete everything.
InternalScanner s = r.getScanner(new Scan());
int originalCount = 0;
do {
List<Cell> results = new ArrayList<>();
boolean result = s.next(results);
r.delete(new Delete(CellUtil.cloneRow(results.get(0))));
if (!result) break;
originalCount++;
} while (true);
s.close();
// Flush
r.flush(true);
for (HStore store : this.r.stores.values()) {
ScanInfo old = store.getScanInfo();
ScanInfo si = old.customize(old.getMaxVersions(), old.getTtl(), keepDeletedCells);
store.setScanInfo(si);
}
// Major compact.
r.compact(true);
s = r.getScanner(new Scan().setRaw(true));
int counter = 0;
do {
List<Cell> results = new ArrayList<>();
boolean result = s.next(results);
if (!result) break;
counter++;
} while (true);
assertEquals(keepDeletedCells == KeepDeletedCells.TRUE ? originalCount : 0, counter);
}
}
| |
/*
* Copyright 2018 technosf [https://github.com/technosf]
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.github.technosf.posterer.models.impl.base;
import static java.lang.System.getProperty;
import static org.apache.commons.io.FileUtils.sizeOf;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
import org.apache.commons.io.FilenameUtils;
import org.eclipse.jdt.annotation.Nullable;
import com.github.technosf.posterer.models.Properties;
import com.github.technosf.posterer.models.Proxy;
import com.github.technosf.posterer.models.Request;
import com.github.technosf.posterer.models.impl.ProxyBean;
import com.github.technosf.posterer.models.impl.RequestBean;
/**
* Abstract implementation of basic {@code PreferencesModel} methods based
* on using local files
*
* @author technosf
* @since 0.0.1
* @version 0.0.1
*/
public abstract class AbstractPropertiesModel
implements Properties
{
/**
* Property keys
*/
private static final String PROP_USER_HOME = "user.home";
/**
* Generated properties directory path
*/
private final String PROPERTIES_DIR;
/**
* Generated properties file path
*/
private final String PROPERTIES_FILE;
/**
* The File representing the properties directory
*/
protected final File propsDir;
/**
* The File representing the properties directory
*/
protected final File propsFile;
/**
* RequestBean map
*/
private final Map<Integer, RequestBean> requestProperties =
new HashMap<>();
/**
* KeyStore file paths
*/
private final Set<String> keystoreProperties = new TreeSet<>();
/**
* ProxiesBean map
*/
private final Map<Integer, ProxyBean> proxyProperties =
new HashMap<>();
/**
* End point map - endpoint and ref count
*/
private final Map<String, Integer> endpoints =
new TreeMap<>();
/*
* Is the properties config dirty and need saving to disk?
*/
boolean dirty = false;
/**
* Constructor - create the properties directory
*/
@SuppressWarnings("null")
protected AbstractPropertiesModel(final String prefix,
@Nullable final File directory,
@Nullable final String filename)
{
propsDir = Optional.ofNullable(directory).orElse(
new File(FilenameUtils.concat(getProperty(PROP_USER_HOME),
PROPERTIES_SUBDIR)));
PROPERTIES_DIR = this.propsDir.getAbsolutePath();
if (!propsDir.exists())
{
propsDir.mkdir();
}
String file =
Optional.ofNullable(filename).orElse(PROPERTIES_FILENAME);
if (!prefix.trim().isEmpty())
{
file = prefix + file;
}
PROPERTIES_FILE = FilenameUtils.concat(PROPERTIES_DIR, file);
propsFile = new File(PROPERTIES_FILE);
}
/**
* {@inheritDoc}
*
* @see com.github.technosf.posterer.models.Properties#getRequests()
*/
@Override
public final List<Request> getRequests()
{
return new ArrayList<>(requestProperties.values());
}
/**
* {@inheritDoc}
*
* @see com.github.technosf.posterer.models.Properties#getRequests()
*/
@Override
public final List<Proxy> getProxies()
{
return new ArrayList<>(proxyProperties.values());
}
/**
* {@inheritDoc}
*
* @see com.github.technosf.posterer.models.Properties#getRequests()
*/
@Override
public final List<String> getKeyStores()
{
return new ArrayList<>(keystoreProperties);
}
/**
* {@inheritDoc}
*
* @see com.github.technosf.posterer.models.Properties#getPropertiesDir()
*/
@Override
public final String getPropertiesDir() throws IOException
{
if (!propsDir.exists())
/*
* Properties directory was not created
*/
{
throw new IOException(String.format(
"Directory [%1$s] has not been created.",
PROPERTIES_DIR));
}
else if (!propsDir.isDirectory())
/*
* Properties directory is not a directory
*/
{
throw new IOException(String.format(
"Location [%1$s] is not a directory.",
PROPERTIES_DIR));
}
else if (!propsDir.canWrite())
/*
* Properties directory is not writable
*/
{
throw new IOException(String.format(
"Cannot write to directory: [%1$s]",
PROPERTIES_DIR));
}
return PROPERTIES_DIR;
}
/**
* {@inheritDoc}
*
* @see com.github.technosf.posterer.models.Properties#removeData(com.github.technosf.posterer.models.Properties.impl.PropertiesModel.Request)
*/
@SuppressWarnings("null")
@Override
public final boolean removeData(final @Nullable Request request)
{
if (request != null)
{
RequestBean pdi = new RequestBean(request);
if (pdi.isActionable()
&& (requestProperties.remove(pdi.hashCode()) != null)) // Check and remove the properties
{
return erase(pdi);
}
}
return false;
}
/**
* {@inheritDoc}
*
* @see com.github.technosf.posterer.models.Properties#save()
*/
@Override
public final synchronized boolean save()
{
if (dirty && write())
{
dirty = false;
return true;
}
return false;
}
/**
* {@inheritDoc}
*
* @see com.github.technosf.posterer.models.Properties#addData(com.github.technosf.posterer.models.Proxy)
*/
@SuppressWarnings("null")
@Override
public final boolean addData(final @Nullable File keyStoreFile)
{
boolean result = false;
try
{
String filepath;
if (keyStoreFile != null
&& keyStoreFile.canRead()
&& (result = !keystoreProperties
.contains(filepath =
keyStoreFile.getCanonicalPath())))
{
addKeystore(filepath);
save();
}
}
catch (IOException e)
{
// LOG.error("File issue for keystore file: {}", e);
}
return result;
}
/* ---------------------------------------------------------------- */
/**
* Erase the given requests from the configuration
*
* @param requestBean
* the request to erase
* @return true if the request was erased
*/
protected abstract boolean erase(RequestBean requestBean);
/**
* Write out the current configuration
*
* @return true if the config was written
*/
protected abstract boolean write();
/**
* Adds a keystore path to the configuration
*
* @param filepath
* the keystore file path
*/
protected abstract void addKeystore(String filepath);
/* ---------------------------------------------------------------- */
/**
* Is there a properties file?
*
* @return true if there is
*/
public final boolean isPropsFileExtant()
{
return propsFile.exists();
}
/**
* Returns the size of the properties file in bytes.
*
* @return the file size
*/
public final long sizePropsFile()
{
if (isPropsFileExtant())
{
return sizeOf(propsFile);
}
return 0;
}
/**
* Returns the absolute file system path to the properties file
*
* @return the properties file path
* @throws IOException
* exception accessing the properties file
*/
@SuppressWarnings("null")
public final String pathPropsFile() throws IOException
{
return propsFile.getAbsolutePath();
}
/* ---------------------------------------------------------------- */
/**
* @param requestBean
* @return true if the RequestBean was added
*/
@SuppressWarnings("null")
protected final boolean putIfAbsent(RequestBean requestBean)
{
return null == requestProperties.putIfAbsent(requestBean.hashCode(),
requestBean);
}
/**
* @param requestBean
* @return true if the RequestBean was added
*/
@SuppressWarnings("null")
protected final boolean putIfAbsent(ProxyBean proxyBean)
{
return null == proxyProperties.putIfAbsent(proxyBean.hashCode(),
proxyBean);
}
/**
* @param requestBean
* @return true if the RequestBean was added
*/
protected final boolean putIfAbsent(String keystorefilepath)
{
return keystoreProperties.add(keystorefilepath);
}
/**
* Sets the dirty flag
*/
protected final void dirty()
{
dirty = true;
}
/**
* @return
*/
protected final boolean isDirty()
{
return dirty;
}
/**
* Add an endpoint to the current endpoint map
*
* @param endpoint
*/
protected final synchronized void addEndpoint(final String endpoint)
{
int endpointCount =
(endpoints.containsKey(endpoint) ? endpoints.get(endpoint) : 0);
endpoints.put(endpoint, ++endpointCount);
dirty();
}
/**
* Remove an endpoint from the current endpoint map
*
* @param endpoint
*/
protected final synchronized void removeEndpoint(final String endpoint)
{
int endpointCount =
(endpoints.containsKey(endpoint) ? endpoints.get(endpoint) : 0);
if (endpointCount > 1)
{
endpoints.put(endpoint, --endpointCount);
}
else
{
endpoints.remove(endpoint);
}
dirty();
}
}
| |
package org.tsers.junitquest;
import org.tsers.junitquest.expr.*;
import java.lang.reflect.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.function.BiFunction;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import java.util.stream.Stream;
public class Jutil {
public static int LABEL_OPCODE = -1;
public static int BEGIN_METHOD_OPCODE = -2;
public static BiFunction<Class, List<ExprNode>, ExprNode> createNode = (nodeClass, children) -> {
try {
Constructor<? extends ExprNode> constructor = nodeClass.getConstructor(List.class);
return constructor.newInstance(children);
} catch (Exception e) {
throw new RuntimeException("Cannot find List constructor: " + nodeClass);
}
};
//if children cannot be mapped returns the parent itself
public static BiFunction<ExprNode, Function<ExprNode, ExprNode>, ExprNode> mapChildren = (parent, f) -> {
try {
List<ExprNode> mappedChildren =
parent.getChildren().stream()
.map(f)
.collect(Collectors.toList());
return Jutil.createNode.apply(parent.getClass(), mappedChildren);
} catch (Exception e) {
return parent;
}
};
public static BiFunction<ExprNode, Function<ExprNode, ExprNode>, ExprNode> mapFirstChild = (node, function) -> {
int numOfItemsToMap = 1;
List<ExprNode> mapped =
node.getChildren().stream()
.limit(numOfItemsToMap)
.map(function)
.collect(Collectors.toList());
List<ExprNode> rest =
node.getChildren().stream()
.skip(numOfItemsToMap)
.collect(Collectors.toList());
return Jutil.createNode.apply(node.getClass(), Jutil.combineLists(mapped, rest));
};
public static boolean isStatic(Method method) {
return Modifier.isStatic(method.getModifiers());
}
public static Object[] listToArray(List list) {
return list.stream().toArray(Object[]::new);
}
public static <T> List<T> arrayToList(T[] objects) {
return Arrays.asList(objects);
}
public static Object[] shiftLeft(Object[] object) {
if (object.length == 0) {
return new Object[0];
}
Object ret[] = new Object[object.length - 1];
for (int i = 0; i < ret.length; i++) {
ret[i] = object[i + 1];
}
return ret;
}
public static <T> List<T> combineLists(List<T> a, List<T> b) {
return Stream.concat(a.stream(), b.stream())
.collect(Collectors.toList());
}
public static <T> List<T> combineLists(List<T> a, List<T> b, List<T> c) {
return combineLists(combineLists(a, b), c);
}
public static boolean containsClazz(ExprNode node, Class clazz) {
return calcClazzHeight(node, clazz) > 0;
}
private static int calcClazzHeight(ExprNode node, Class clazz) {
return calcClazzHeightRec(node, 0, clazz);
}
private static int calcClazzHeightRec(ExprNode node, int i, Class clazz) {
int largest = 0;
for (ExprNode c : node.getChildren()) {
int a = calcClazzHeightRec(c, 0, clazz);
if (a > largest) {
largest = a;
}
}
if (clazz.isAssignableFrom(node.getClass())) {
i++;
}
return i + largest;
}
//does not apply children if the node itself is transformed
public static Function<ExprNode, ExprNode> applyRecursively(Function<ExprNode, ExprNode> function) {
return node -> {
ExprNode appliedParent = function.apply(node);
if (!appliedParent.equals(node)) {
return appliedParent;
}
List<ExprNode> appliedChildren = node.getChildren().stream()
.map(applyRecursively(function))
.filter(n -> node != null)
.collect(Collectors.toList());
return appliedParent.copy(appliedChildren);
};
}
public static Function<ExprNode, List<ExprNode>> findAllNodeTypes(Class clazz) {
return node -> {
List<ExprNode> foundChildren = node.getChildren().stream()
.map(findAllNodeTypes(clazz))
.flatMap(n -> n.stream())
.collect(Collectors.toList());
if (clazz.isAssignableFrom(node.getClass())) {
return Jutil.combineLists(foundChildren, Arrays.asList(node));
} else {
return foundChildren;
}
};
}
public static boolean isPrimitive(Class clazz) {
return (clazz == Integer.TYPE || clazz == Double.TYPE
|| clazz == Boolean.TYPE || clazz == Byte.TYPE ||
clazz == Short.TYPE || clazz == Long.TYPE ||
clazz == Float.TYPE || clazz == Character.TYPE);
}
public static Class[] descToParameterTypes(String desc) {
Pattern pattern = Pattern.compile("\\((.*)\\)");
Matcher m = pattern.matcher(desc);
m.find();
String paramsString = m.group(1);
List<String> params = splitDescParams(paramsString);
List<Class> classParams = params.stream()
.map(p -> descParamToClass(p))
.collect(Collectors.toList());
return classParams.toArray(new Class[classParams.size()]);
}
private static Class descParamToClass(String descParam) {
int numberOfArrays = 0;
if (descParam.substring(0, 1).equals("[")) {
descParam = descParam.substring(1);
numberOfArrays++;
}
if (descParam.substring(0, 1).equals("[")) {
throw new RuntimeException("2 dimension arrays not supported");
}
Class clazz = plainDescparamsToClass(descParam);
while (numberOfArrays > 0) {
clazz = Array.newInstance(clazz, 1).getClass();
numberOfArrays--;
}
return clazz;
}
private static Class plainDescparamsToClass(String descParam) {
if (descParam.length() == 1) {
return primitiveDescToClass(descParam);
}
String s = descParam.substring(1);
String s2 = s.substring(0, s.length() - 1);
String s3 = s2.replaceAll("/", ".");
return Instrumenter.getClass(s3);
}
public static Class primitiveDescToClass(String descParam) {
if (descParam.equals("I")) {
return Integer.TYPE;
} else if (descParam.equals("Z")) {
return Boolean.TYPE;
} else if (descParam.equals("J")) {
return Long.TYPE;
} else if (descParam.equals("C")) {
return Character.TYPE;
} else if (descParam.equals("D")) {
return Double.TYPE;
} else if (descParam.equals("F")) {
return Float.TYPE;
} else if (descParam.equals("S")) {
return Short.TYPE;
} else if (descParam.equals("B")) {
return Byte.TYPE;
}
throw new RuntimeException("Not a primitive description: " + descParam);
}
private static List<String> splitDescParams(String descParams) {
List<String> matchedParams = new ArrayList<>();
Pattern paramPattern = Pattern.compile("(\\[*?L.*?;)|(\\[*?I|\\[?Z|\\[?B|\\[*?D|\\[*?F|\\[*?J|\\[*?S|\\[*?C)");
while (true) {
Matcher m2 = paramPattern.matcher(descParams);
if (!m2.find()) {
break;
}
String param = m2.group();
matchedParams.add(param);
descParams = descParams.substring(param.length());
}
return matchedParams;
}
public static Function<ExprNode, ExprNode> curry(BiFunction<ExprNode, String, ExprNode> f, String o) {
return a -> f.apply(a, o);
}
public static Function<ExprNode, ExprNode> curry(BiFunction<ExprNode, ExprNode, ExprNode> f, ExprNode o) {
return a -> f.apply(a, o);
}
//equation with stacknode(0) on the other side and constant node on the other side
public static boolean isSimpleEquation(ExprNode node) {
if (!(node instanceof EqualNode || node instanceof NotEqualNode ||
node instanceof GreaterThanNode || node instanceof GreaterThanEqNode
|| node instanceof LessThanNode || node instanceof LessThanEqNode)) {
return false;
}
if (node.getChildren().size() != 2) {
return false;
}
if (!node.getChildren().stream().anyMatch(n -> n instanceof StackNode && ((StackNode) n).getValue() == 0)) {
return false;
}
if (!node.getChildren().stream().anyMatch(n -> n instanceof ConstantNode)) {
return false;
}
return true;
}
public static boolean methodReturnsInt(String methodDesc) {
return methodDesc.endsWith(")I");
}
public static boolean fieldReturnsInt(String desc) {
return desc.equals("I");
}
public static boolean isClassArray(Class obj) {
return obj != null && obj.isArray();
}
public static Class[] getParameterTypes(AccessibleObject ao) {
if (ao instanceof Method) {
return ((Method) ao).getParameterTypes();
} else if (ao instanceof Constructor) {
return ((Constructor) ao).getParameterTypes();
}
throw new RuntimeException("Cannot get parameter types for: " + ao);
}
public static boolean isStatic(AccessibleObject ao) {
if (ao instanceof Method) {
return Jutil.isStatic((Method) ao);
}
return false;
}
public static Class getDeclaringClass(AccessibleObject ao) {
if (ao instanceof Method) {
return ((Method) ao).getDeclaringClass();
} else if (ao instanceof Constructor) {
return ((Constructor) ao).getDeclaringClass();
}
throw new RuntimeException("Cannot get declaring class for: " + ao);
}
public static AccessibleObject getAccessibleObject(Class clazz, String methodName, String methodDesc) {
Class paramTypes[] = Jutil.descToParameterTypes(methodDesc);
try {
if (methodName.equals("<init>")) {
return clazz.getConstructor(paramTypes);
} else {
return clazz.getMethod(methodName, paramTypes);
}
} catch (Exception e) {
throw new RuntimeException("Cannot find accessible object :" + methodName + " " + methodDesc);
}
}
public static String getJavaName(String fullName) {
String[] splitted = fullName.split(" ");
return splitted[splitted.length - 1];
}
public static String getClassNameFromPackage(String fullName) {
String[] splitted = fullName.split("\\.");
return splitted[splitted.length - 1];
}
}
| |
/**
* Copyright (C) 2008 Stratio (http://stratio.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.stratio.mojo.unix.util;
/*
* The MIT License
*
* Copyright 2009 The Codehaus.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
* of the Software, and to permit persons to whom the Software is furnished to do
* so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
import fj.*;
import fj.data.*;
import fj.data.List;
import java.io.File;
import java.util.*;
import static fj.data.List.*;
import static fj.data.Option.*;
/**
* @author <a href="mailto:trygvis@inamo.no">Trygve Laugstøl</a>
*/
public class RelativePath
implements Comparable<RelativePath>
{
public final String string;
private static final char PATH_SEPARATOR;
static
{
String s = System.getProperty( "file.separator" );
if ( s.length() != 1 )
{
throw new RuntimeException( "Unsupported platform, file.separator has to be exactly one character long" );
}
PATH_SEPARATOR = s.charAt( 0 );
}
public final static RelativePath BASE = new RelativePath( "." )
{
public RelativePath add( String string )
{
String cleaned = clean( string );
if ( cleaned == null )
{
return this;
}
return new RelativePath( cleaned );
}
public RelativePath add( RelativePath relativePath )
{
return relativePath;
}
public RelativePath parent()
{
throw new IllegalStateException( "parent() on BASE" );
}
public boolean isBelowOrSame( RelativePath other )
{
return other.isBase();
}
public Option<RelativePath> subtract( RelativePath parent )
{
if ( parent.isBase() )
{
return some( BASE );
}
return none();
}
public List<String> toList()
{
return nil();
}
public String asAbsolutePath( String basePath )
{
return basePath;
}
public boolean isBase()
{
return true;
}
};
// -----------------------------------------------------------------------
//
// -----------------------------------------------------------------------
public static final Ord<RelativePath> ord = Ord.ord( new F<RelativePath, F<RelativePath, Ordering>>()
{
public F<RelativePath, Ordering> f( final RelativePath a )
{
return new F<RelativePath, Ordering>()
{
public Ordering f( RelativePath b )
{
return Ord.stringOrd.compare( a.string, b.string );
}
};
}
} );
public static final Comparator<RelativePath> comparator = new Comparator<RelativePath>()
{
public int compare( RelativePath a, RelativePath b )
{
return a.string.compareTo( b.string );
}
};
// -----------------------------------------------------------------------
//
// -----------------------------------------------------------------------
private RelativePath( String string )
{
if ( string.contains( "\\" ) )
{
throw new IllegalStateException( "A relative path can't contain '\\'." );
}
this.string = string;
}
public RelativePath add( String string )
{
string = clean( string );
if ( string == null )
{
return this;
}
return new RelativePath( this.string + "/" + string );
}
public RelativePath add( RelativePath relativePath )
{
return new RelativePath( this.string + "/" + relativePath.string );
}
public RelativePath parent()
{
int i = this.string.lastIndexOf( '/' );
if( i >= 0 )
{
return new RelativePath( string.substring( 0, i ) );
}
return BASE;
}
public String asAbsolutePath( String basePath )
{
return basePath + (basePath.endsWith( "/" ) ? "" : "/") + string;
}
public boolean isBase()
{
return false;
}
public String name()
{
int i = string.lastIndexOf( '/' );
if ( i == -1 )
{
return string;
}
return string.substring( i + 1 );
}
/**
* Returns true if <code>other</code> is further down the path than this path.
* <p/>
* <ul>
* <li>".".isBelowOrSame(..) -> true. Everything is below the base path</li>
* <li>"..".isBelowOrSame(".") -> true. Everything is below the base path</li>
* <li>"/foo".isBelowOrSame( "/foo") -> true</li>
* <li>"/foo/bar".isBelowOrSame( "/foo") -> true</li>
* <li>"/foo".isBelowOrSame( "/foo/bar") -> false</li>
* </ul>
*/
public boolean isBelowOrSame( RelativePath parent )
{
return parent.isBase() || // Everything is below or equal to the base path
string.startsWith( parent.string );
}
public Option<RelativePath> subtract( RelativePath parent )
{
if ( isBelowOrSame( parent ) )
{
if ( parent.isBase() || this.string.equals( parent.string ) )
{
return some( this );
}
return some( new RelativePath( this.string.substring( parent.string.length() + 1 ) ) );
}
return none();
}
public List<String> toList()
{
int i = string.lastIndexOf( '/' );
if ( i == -1 )
{
return List.single( string );
}
List<String> list = List.single( string.substring( i + 1 ) );
String s = string.substring( 0, i );
do
{
i = s.lastIndexOf( '/' );
if ( i == -1 )
{
return list.cons( s );
}
list = list.cons( s.substring( i + 1 ) );
s = s.substring( 0, i );
}
while ( true );
}
// -----------------------------------------------------------------------
// Static
// -----------------------------------------------------------------------
public static RelativePath relativePath( String string )
{
string = string == null ? "/" : string.trim();
String s = clean( string );
if ( s == null )
{
return BASE;
}
return new RelativePath( s );
}
static String clean( final String string )
{
String s = removeDuplicateSlashes( string );
if ( isRoot( s ) )
{
return null;
}
if ( s.startsWith( "./" ) )
{
s = s.substring( 2 );
}
else if ( s.startsWith( "/" ) )
{
s = s.substring( 1 );
}
if ( s.endsWith( "/." ) )
{
s = s.substring( 0, s.length() - 2 );
}
else if ( s.endsWith( "/" ) )
{
s = s.substring( 0, s.length() - 1 );
}
if ( isRoot( s ) )
{
return null;
}
return s;
}
private static String removeDuplicateSlashes( String string )
{
StringBuffer buffer = new StringBuffer();
boolean lastWasSlash = false;
for ( int i = 0; i < string.length(); i++ )
{
char c = string.charAt( i );
if ( c == '/' )
{
if ( !lastWasSlash )
{
buffer.append( c );
lastWasSlash = true;
}
}
else
{
buffer.append( c );
lastWasSlash = false;
}
}
return buffer.toString();
}
private static boolean isRoot( String s )
{
return s.length() == 0 || s.equals( "/" ) || s.equals( "." );
}
public static RelativePath relativePathFromFiles( File parent, File child )
{
String c = child.getAbsolutePath();
String p = parent.getAbsolutePath();
if ( !c.startsWith( p ) )
{
throw new RuntimeException( "Not a child path." );
}
String s = c.substring( p.length() );
if ( PATH_SEPARATOR != '/' )
{
s = s.replace( PATH_SEPARATOR, '/' );
}
return relativePath( s );
}
// -----------------------------------------------------------------------
// Object Overrides
// -----------------------------------------------------------------------
public boolean equals( Object o )
{
if ( this == o )
{
return true;
}
if ( !(o instanceof RelativePath) )
{
return false;
}
RelativePath path = (RelativePath) o;
return string.equals( path.string );
}
public int hashCode()
{
return string.hashCode();
}
public String toString()
{
return string;
}
// -----------------------------------------------------------------------
// Comparable
// -----------------------------------------------------------------------
public int compareTo( RelativePath other )
{
return string.compareTo( other.string );
}
}
| |
/*
* Copyright 2014 Mike Penz
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mikepenz.iconics;
import android.content.Context;
import android.os.Build;
import android.text.Spannable;
import android.text.SpannableString;
import android.text.style.CharacterStyle;
import android.text.style.StyleSpan;
import android.util.Log;
import android.widget.Button;
import android.widget.TextView;
import com.mikepenz.iconics.typeface.IIcon;
import com.mikepenz.iconics.typeface.ITypeface;
import com.mikepenz.iconics.utils.GenericsUtil;
import com.mikepenz.iconics.utils.IconicsTypefaceSpan;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
public final class Iconics {
public static final String TAG = Iconics.class.getSimpleName();
private static HashMap<String, ITypeface> FONTS;
public static void init(Context ctx) {
String[] fonts = GenericsUtil.getFields(ctx);
FONTS = new HashMap<>();
for (String fontsClassPath : fonts) {
try {
ITypeface typeface = (ITypeface) Class.forName(fontsClassPath).newInstance();
FONTS.put(typeface.getMappingPrefix(), typeface);
} catch (Exception e) {
Log.e("Android-Iconics", "Can't init: " + fontsClassPath);
}
}
}
public static boolean registerFont(ITypeface font) {
FONTS.put(font.getMappingPrefix(), font);
return true;
}
public static ITypeface getDefault(Context ctx) {
if (FONTS == null) {
init(ctx);
}
if (FONTS != null && FONTS.size() > 0) {
return FONTS.entrySet().iterator().next().getValue();
} else {
throw new RuntimeException("You have to provide at least one Typeface to use this functionality");
}
}
public static Collection<ITypeface> getRegisteredFonts(Context ctx) {
if (FONTS == null) {
init(ctx);
}
return FONTS.values();
}
public static ITypeface findFont(Context ctx, String key) {
if (FONTS == null) {
init(ctx);
}
return FONTS.get(key);
}
public static ITypeface findFont(IIcon icon) {
return icon.getTypeface();
}
private Iconics() {
// Prevent instantiation
}
private static SpannableString style(Context ctx, HashMap<String, ITypeface> fonts, SpannableString textSpanned, List<CharacterStyle> styles, HashMap<String, List<CharacterStyle>> stylesFor) {
if (FONTS == null) {
init(ctx);
}
if (fonts == null || fonts.size() == 0) {
fonts = FONTS;
}
int startIndex = -1;
String fontKey = "";
//remember the position of removed chars
ArrayList<RemoveInfo> removed = new ArrayList<RemoveInfo>();
//StringBuilder text = new StringBuilder(textSpanned.toString());
StringBuilder text = new StringBuilder(textSpanned);
//find the first "{"
while ((startIndex = text.indexOf("{", startIndex + 1)) != -1) {
//make sure we are still within the bounds of the text
if (text.length() < startIndex + 5) {
startIndex = -1;
break;
}
//make sure the found text is a real fontKey
if (!text.substring(startIndex + 4, startIndex + 5).equals("-")) {
break;
}
//get the fontKey
fontKey = text.substring(startIndex + 1, startIndex + 4).toLowerCase();
//check if the fontKey is a registeredFont
if (fonts.containsKey(fontKey)) {
break;
}
}
if (startIndex == -1) {
return new SpannableString(text);
}
//remember total removed chars
int removedChars = 0;
LinkedList<StyleContainer> styleContainers = new LinkedList<StyleContainer>();
do {
//get the information from the iconString
int endIndex = text.substring(startIndex).indexOf("}") + startIndex + 1;
String iconString = text.substring(startIndex + 1, endIndex - 1);
iconString = iconString.replaceAll("-", "_").toLowerCase();
try {
//get the correct character for this Font and Icon
IIcon icon = fonts.get(fontKey).getIcon(iconString);
//we can only add an icon which is a font
if (icon != null) {
char fontChar = icon.getCharacter();
String iconValue = String.valueOf(fontChar);
//get just the icon identifier
text = text.replace(startIndex, endIndex, iconValue);
//store some info about the removed chars
removedChars = removedChars + (endIndex - startIndex);
removed.add(new RemoveInfo(startIndex, (endIndex - startIndex - 1), removedChars));
//add the current icon to the container
styleContainers.add(new StyleContainer(startIndex, startIndex + 1, iconString, fonts.get(fontKey)));
}
} catch (IllegalArgumentException e) {
Log.w(Iconics.TAG, "Wrong icon name: " + iconString);
}
//reset fontKey so we can react if we are at the end but haven't found any more matches
fontKey = null;
//check the rest of the text for matches
while ((startIndex = text.indexOf("{", startIndex + 1)) != -1) {
//make sure we are still within the bounds
if (text.length() < startIndex + 5) {
startIndex = -1;
break;
}
//check if the 5. char is a "-"
if (text.substring(startIndex + 4, startIndex + 5).equals("-")) {
//get the fontKey
fontKey = text.substring(startIndex + 1, startIndex + 4);
//check if the fontKey is registered
if (fonts.containsKey(fontKey)) {
break;
}
}
}
} while (startIndex != -1 && fontKey != null);
SpannableString sb = new SpannableString(text);
//reapply all previous styles
for (StyleSpan span : textSpanned.getSpans(0, textSpanned.length(), StyleSpan.class)) {
int spanStart = newSpanPoint(textSpanned.getSpanStart(span), removed);
int spanEnd = newSpanPoint(textSpanned.getSpanEnd(span), removed);
if (spanStart >= 0 && spanEnd > 0) {
sb.setSpan(span, spanStart, spanEnd, textSpanned.getSpanFlags(span));
}
}
//set all the icons and styles
for (StyleContainer styleContainer : styleContainers) {
sb.setSpan(new IconicsTypefaceSpan("sans-serif", styleContainer.getFont().getTypeface(ctx)), styleContainer.getStartIndex(), styleContainer.getEndIndex(), Spannable.SPAN_EXCLUSIVE_EXCLUSIVE);
if (stylesFor.containsKey(styleContainer.getIcon())) {
for (CharacterStyle style : stylesFor.get(styleContainer.getIcon())) {
sb.setSpan(CharacterStyle.wrap(style), styleContainer.getStartIndex(), styleContainer.getEndIndex(), Spannable.SPAN_EXCLUSIVE_EXCLUSIVE);
}
} else if (styles != null) {
for (CharacterStyle style : styles) {
sb.setSpan(CharacterStyle.wrap(style), styleContainer.getStartIndex(), styleContainer.getEndIndex(), Spannable.SPAN_EXCLUSIVE_EXCLUSIVE);
}
}
}
//sb = applyKerning(sb, 1);
return sb;
}
private static int newSpanPoint(int pos, ArrayList<RemoveInfo> removed) {
for (RemoveInfo removeInfo : removed) {
if (pos < removeInfo.getStart()) {
return pos;
}
pos = pos - removeInfo.getCount();
}
return pos;
}
private static int determineNewSpanPoint(int pos, ArrayList<RemoveInfo> removed) {
for (RemoveInfo removeInfo : removed) {
if (pos > removeInfo.getStart()) {
continue;
}
if (pos > removeInfo.getStart() && pos < removeInfo.getStart() + removeInfo.getCount()) {
return -1;
}
if (pos < removeInfo.getStart()) {
return pos;
} else {
return pos - removeInfo.getTotal();
}
}
return -1;
}
/*
KEEP THIS HERE perhaps we are able to implement proper spacing for the icons
public static SpannableString applyKerning(CharSequence src, float kerning) {
if (src == null) return null;
final int srcLength = src.length();
if (srcLength < 2) return src instanceof SpannableString
? (SpannableString) src
: new SpannableString(src);
final String nonBreakingSpace = "\u00A0";
final SpannableStringBuilder builder = src instanceof SpannableStringBuilder
? (SpannableStringBuilder) src
: new SpannableStringBuilder(src);
for (int i = src.length() - 1; i >= 1; i--) {
builder.insert(i, nonBreakingSpace);
builder.setSpan(new ScaleXSpan(kerning), i, i + 1,
Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);
}
return new SpannableString(builder);
}
*/
public static class IconicsBuilderString {
private Context ctx;
private SpannableString text;
private List<CharacterStyle> withStyles;
private HashMap<String, List<CharacterStyle>> withStylesFor;
private List<ITypeface> fonts;
public IconicsBuilderString(Context ctx, List<ITypeface> fonts, SpannableString text, List<CharacterStyle> styles, HashMap<String, List<CharacterStyle>> stylesFor) {
this.ctx = ctx;
this.fonts = fonts;
this.text = text;
this.withStyles = styles;
this.withStylesFor = stylesFor;
}
public SpannableString build() {
HashMap<String, ITypeface> mappedFonts = new HashMap<String, ITypeface>();
for (ITypeface font : fonts) {
mappedFonts.put(font.getMappingPrefix(), font);
}
return Iconics.style(ctx, mappedFonts, text, withStyles, withStylesFor);
}
}
public static class IconicsBuilderView {
private Context ctx;
private TextView view;
private List<CharacterStyle> withStyles;
private HashMap<String, List<CharacterStyle>> withStylesFor;
private List<ITypeface> fonts;
public IconicsBuilderView(Context ctx, List<ITypeface> fonts, TextView view, List<CharacterStyle> styles, HashMap<String, List<CharacterStyle>> stylesFor) {
this.ctx = ctx;
this.fonts = fonts;
this.view = view;
this.withStyles = styles;
this.withStylesFor = stylesFor;
}
public void build() {
HashMap<String, ITypeface> mappedFonts = new HashMap<String, ITypeface>();
for (ITypeface font : fonts) {
mappedFonts.put(font.getMappingPrefix(), font);
}
if (view.getText() instanceof SpannableString) {
view.setText(Iconics.style(ctx, mappedFonts, (SpannableString) view.getText(), withStyles, withStylesFor));
} else {
view.setText(Iconics.style(ctx, mappedFonts, new SpannableString(view.getText()), withStyles, withStylesFor));
}
if (Build.VERSION.SDK_INT >= 14) {
if (view instanceof Button) {
view.setAllCaps(false);
}
}
}
}
public static class IconicsBuilder {
private List<CharacterStyle> styles = new LinkedList<CharacterStyle>();
private HashMap<String, List<CharacterStyle>> stylesFor = new HashMap<String, List<CharacterStyle>>();
private List<ITypeface> fonts = new LinkedList<ITypeface>();
private Context ctx;
public IconicsBuilder() {
}
public IconicsBuilder ctx(Context ctx) {
this.ctx = ctx;
return this;
}
public IconicsBuilder style(CharacterStyle... styles) {
if (styles != null && styles.length > 0) {
Collections.addAll(this.styles, styles);
}
return this;
}
public IconicsBuilder styleFor(IIcon styleFor, CharacterStyle... styles) {
return styleFor(styleFor.getName(), styles);
}
public IconicsBuilder styleFor(String styleFor, CharacterStyle... styles) {
styleFor = styleFor.replace("-", "_");
if (!stylesFor.containsKey(styleFor)) {
this.stylesFor.put(styleFor, new LinkedList<CharacterStyle>());
}
if (styles != null && styles.length > 0) {
for (CharacterStyle style : styles) {
this.stylesFor.get(styleFor).add(style);
}
}
return this;
}
public IconicsBuilder font(ITypeface font) {
this.fonts.add(font);
return this;
}
public IconicsBuilderString on(SpannableString on) {
return new IconicsBuilderString(ctx, fonts, on, styles, stylesFor);
}
public IconicsBuilderString on(String on) {
return on(new SpannableString(on));
}
public IconicsBuilderString on(CharSequence on) {
return on(on.toString());
}
public IconicsBuilderString on(StringBuilder on) {
return on(on.toString());
}
public IconicsBuilderView on(TextView on) {
return new IconicsBuilderView(ctx, fonts, on, styles, stylesFor);
}
public IconicsBuilderView on(Button on) {
return new IconicsBuilderView(ctx, fonts, on, styles, stylesFor);
}
}
private static class StyleContainer {
private int startIndex;
private int endIndex;
private String icon;
private ITypeface font;
private StyleContainer(int startIndex, int endIndex, String icon, ITypeface font) {
this.startIndex = startIndex;
this.endIndex = endIndex;
this.icon = icon;
this.font = font;
}
public int getStartIndex() {
return startIndex;
}
public int getEndIndex() {
return endIndex;
}
public String getIcon() {
return icon;
}
public ITypeface getFont() {
return font;
}
}
private static class RemoveInfo {
private int start;
private int count;
private int total;
public RemoveInfo(int start, int count) {
this.start = start;
this.count = count;
}
public RemoveInfo(int start, int count, int total) {
this.start = start;
this.count = count;
this.total = total;
}
public int getStart() {
return start;
}
public void setStart(int start) {
this.start = start;
}
public int getCount() {
return count;
}
public void setCount(int count) {
this.count = count;
}
public int getTotal() {
return total;
}
public void setTotal(int total) {
this.total = total;
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.jdbc.test;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Properties;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
import org.apache.calcite.linq4j.Ord;
import org.apache.drill.common.config.DrillConfig;
import org.apache.drill.common.logical.LogicalPlan;
import org.apache.drill.common.logical.data.LogicalOperator;
import org.apache.drill.exec.ExecConstants;
import org.apache.drill.exec.planner.PhysicalPlanReaderTestFactory;
import org.apache.drill.jdbc.ConnectionFactory;
import org.apache.drill.jdbc.ConnectionInfo;
import org.junit.Assert;
import com.google.common.base.Function;
import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSet.Builder;
import com.google.common.collect.Iterables;
/**
* Fluent interface for writing JDBC and query-planning tests.
*/
public class JdbcAssert {
private static ConnectionFactory factory = null;
public static void setFactory(ConnectionFactory factory) {
JdbcAssert.factory = factory;
}
/**
* Returns default bag of properties that is passed to JDBC connection.
* By default, includes options to:
* - turn off the web server
* - indicate DrillConnectionImpl to set up dfs_test.tmp schema location to an exclusive dir just for this test jvm
*/
public static Properties getDefaultProperties() {
final Properties properties = new Properties();
properties.setProperty("drillJDBCUnitTests", "true");
properties.setProperty(ExecConstants.HTTP_ENABLE, "false");
return properties;
}
public static ModelAndSchema withModel(final String model, final String schema) {
final Properties info = getDefaultProperties();
info.setProperty("schema", schema);
info.setProperty("model", "inline:" + model);
return new ModelAndSchema(info, factory);
}
public static ModelAndSchema withFull(final String schema) {
final Properties info = getDefaultProperties();
info.setProperty("schema", schema);
return new ModelAndSchema(info, factory);
}
public static ModelAndSchema withNoDefaultSchema() {
return new ModelAndSchema(getDefaultProperties(), factory);
}
static String toString(ResultSet resultSet, int expectedRecordCount) throws SQLException {
final StringBuilder buf = new StringBuilder();
while (resultSet.next()) {
final ResultSetMetaData metaData = resultSet.getMetaData();
final int n = metaData.getColumnCount();
String sep = "";
for (int i = 1; i <= n; i++) {
buf.append(sep)
.append(metaData.getColumnLabel(i))
.append("=")
.append(resultSet.getObject(i));
sep = "; ";
}
buf.append("\n");
}
return buf.toString();
}
static String toString(ResultSet resultSet) throws SQLException {
StringBuilder buf = new StringBuilder();
final List<Ord<String>> columns = columnLabels(resultSet);
while (resultSet.next()) {
for (Ord<String> column : columns) {
buf.append(column.i == 1 ? "" : "; ").append(column.e).append("=").append(resultSet.getObject(column.i));
}
buf.append("\n");
}
return buf.toString();
}
static Set<String> toStringSet(ResultSet resultSet) throws SQLException {
Builder<String> builder = ImmutableSet.builder();
final List<Ord<String>> columns = columnLabels(resultSet);
while (resultSet.next()) {
StringBuilder buf = new StringBuilder();
for (Ord<String> column : columns) {
buf.append(column.i == 1 ? "" : "; ").append(column.e).append("=").append(resultSet.getObject(column.i));
}
builder.add(buf.toString());
buf.setLength(0);
}
return builder.build();
}
static List<String> toStrings(ResultSet resultSet) throws SQLException {
final List<String> list = new ArrayList<>();
StringBuilder buf = new StringBuilder();
final List<Ord<String>> columns = columnLabels(resultSet);
while (resultSet.next()) {
buf.setLength(0);
for (Ord<String> column : columns) {
buf.append(column.i == 1 ? "" : "; ").append(column.e).append("=").append(resultSet.getObject(column.i));
}
list.add(buf.toString());
}
return list;
}
private static List<Ord<String>> columnLabels(ResultSet resultSet) throws SQLException {
int n = resultSet.getMetaData().getColumnCount();
List<Ord<String>> columns = new ArrayList<>();
for (int i = 1; i <= n; i++) {
columns.add(Ord.of(i, resultSet.getMetaData().getColumnLabel(i)));
}
return columns;
}
public static class ModelAndSchema {
private final Properties info;
private final ConnectionFactoryAdapter adapter;
public ModelAndSchema(final Properties info, final ConnectionFactory factory) {
this.info = info;
this.adapter = new ConnectionFactoryAdapter() {
@Override
public Connection createConnection() throws Exception {
return factory.getConnection(new ConnectionInfo("jdbc:drill:zk=local", ModelAndSchema.this.info));
}
};
}
public TestDataConnection sql(String sql) {
return new TestDataConnection(adapter, sql);
}
public <T> T withConnection(Function<Connection, T> function) throws Exception {
Connection connection = null;
try {
connection = adapter.createConnection();
return function.apply(connection);
} finally {
if (connection != null) {
connection.close();
}
}
}
}
public static class TestDataConnection {
private final ConnectionFactoryAdapter adapter;
private final String sql;
TestDataConnection(ConnectionFactoryAdapter adapter, String sql) {
this.adapter = adapter;
this.sql = sql;
}
/**
* Checks that the current SQL statement returns the expected result.
*/
public TestDataConnection returns(String expected) throws Exception {
Connection connection = null;
Statement statement = null;
try {
connection = adapter.createConnection();
statement = connection.createStatement();
ResultSet resultSet = statement.executeQuery(sql);
expected = expected.trim();
String result = JdbcAssert.toString(resultSet).trim();
resultSet.close();
if (!expected.equals(result)) {
Assert.fail(String.format("Generated string:\n%s\ndoes not match:\n%s", result, expected));
}
return this;
} finally {
if (statement != null) {
statement.close();
}
if (connection != null) {
connection.close();
}
}
}
public TestDataConnection returnsSet(Set<String> expected) throws Exception {
Connection connection = null;
Statement statement = null;
try {
connection = adapter.createConnection();
statement = connection.createStatement();
ResultSet resultSet = statement.executeQuery(sql);
Set<String> result = JdbcAssert.toStringSet(resultSet);
resultSet.close();
if (!expected.equals(result)) {
Assert.fail(String.format("Generated set:\n%s\ndoes not match:\n%s", result, expected));
}
return this;
} finally {
if (statement != null) {
statement.close();
}
if (connection != null) {
connection.close();
}
}
}
/**
* Checks that the current SQL statement returns the expected result lines. Lines are compared unordered; the test
* succeeds if the query returns these lines in any order.
*/
public TestDataConnection returnsUnordered(String... expecteds) throws Exception {
Connection connection = null;
Statement statement = null;
try {
connection = adapter.createConnection();
statement = connection.createStatement();
ResultSet resultSet = statement.executeQuery(sql);
Assert.assertEquals(unsortedList(Arrays.asList(expecteds)), unsortedList(JdbcAssert.toStrings(resultSet)));
resultSet.close();
return this;
} finally {
if (statement != null) {
statement.close();
}
if (connection != null) {
connection.close();
}
}
}
public TestDataConnection displayResults(int recordCount) throws Exception {
// record count check is done in toString method
Connection connection = null;
Statement statement = null;
try {
connection = adapter.createConnection();
statement = connection.createStatement();
ResultSet resultSet = statement.executeQuery(sql);
System.out.println(JdbcAssert.toString(resultSet, recordCount));
resultSet.close();
return this;
} finally {
if (statement != null) {
statement.close();
}
if (connection != null) {
connection.close();
}
}
}
private SortedSet<String> unsortedList(List<String> strings) {
final SortedSet<String> set = new TreeSet<>();
for (String string : strings) {
set.add(string + "\n");
}
return set;
}
public LogicalPlan logicalPlan() {
final String[] plan0 = {null};
Connection connection = null;
Statement statement = null;
final Hook.Closeable x = Hook.LOGICAL_PLAN.add(new Function<String, Void>() {
@Override
public Void apply(String o) {
plan0[0] = o;
return null;
}
});
try {
connection = adapter.createConnection();
statement = connection.prepareStatement(sql);
statement.close();
final String plan = plan0[0].trim();
return LogicalPlan.parse(PhysicalPlanReaderTestFactory.defaultLogicalPlanPersistence(DrillConfig.create()), plan);
} catch (Exception e) {
throw new RuntimeException(e);
} finally {
if (statement != null) {
try {
statement.close();
} catch (SQLException e) {
// ignore
}
}
if (connection != null) {
try {
connection.close();
} catch (SQLException e) {
// ignore
}
}
x.close();
}
}
public <T extends LogicalOperator> T planContains(final Class<T> operatorClazz) {
return (T) Iterables.find(logicalPlan().getSortedOperators(), new Predicate<LogicalOperator>() {
@Override
public boolean apply(LogicalOperator input) {
return input.getClass().equals(operatorClazz);
}
});
}
}
private static interface ConnectionFactoryAdapter {
Connection createConnection() throws Exception;
}
}
// End JdbcAssert.java
| |
/*
* Copyright 2015-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.crosscell;
import static com.facebook.buck.util.environment.Platform.WINDOWS;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.hamcrest.CoreMatchers.not;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
import static org.junit.Assume.assumeThat;
import com.facebook.buck.event.BuckEventBus;
import com.facebook.buck.event.BuckEventBusFactory;
import com.facebook.buck.event.listener.BroadcastEventListener;
import com.facebook.buck.io.MorePaths;
import com.facebook.buck.io.ProjectFilesystem;
import com.facebook.buck.json.BuildFileParseException;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.BuildTargetException;
import com.facebook.buck.model.BuildTargetFactory;
import com.facebook.buck.model.Pair;
import com.facebook.buck.parser.Parser;
import com.facebook.buck.parser.ParserConfig;
import com.facebook.buck.rules.Cell;
import com.facebook.buck.rules.ConstructorArgMarshaller;
import com.facebook.buck.rules.coercer.DefaultTypeCoercerFactory;
import com.facebook.buck.rules.coercer.TypeCoercerFactory;
import com.facebook.buck.testutil.MoreAsserts;
import com.facebook.buck.testutil.integration.ProjectWorkspace;
import com.facebook.buck.testutil.integration.TemporaryPaths;
import com.facebook.buck.testutil.integration.TestDataHelper;
import com.facebook.buck.util.HumanReadableException;
import com.facebook.buck.util.ObjectMappers;
import com.facebook.buck.util.environment.Platform;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.hash.HashCode;
import com.google.common.hash.Hashing;
import com.google.common.util.concurrent.MoreExecutors;
import com.martiansoftware.nailgun.NGContext;
import org.ini4j.Ini;
import org.junit.Ignore;
import org.junit.Rule;
import org.junit.Test;
import java.io.IOException;
import java.io.StringReader;
import java.io.StringWriter;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.concurrent.Executors;
public class InterCellIntegrationTest {
@Rule
public TemporaryPaths tmp = new TemporaryPaths();
@Test
public void ensureThatNormalBuildsWorkAsExpected() throws IOException {
ProjectWorkspace secondary = TestDataHelper.createProjectWorkspaceForScenario(
this,
"inter-cell/export-file/secondary",
tmp);
secondary.setUp();
ProjectWorkspace.ProcessResult result = secondary.runBuckBuild("//:hello");
result.assertSuccess();
}
@Test
public void shouldBeAbleToUseAnExportFileXRepoTarget() throws IOException {
Pair<ProjectWorkspace, ProjectWorkspace> cells = prepare(
"inter-cell/export-file/primary",
"inter-cell/export-file/secondary");
ProjectWorkspace primary = cells.getFirst();
ProjectWorkspace secondary = cells.getSecond();
String expected = secondary.getFileContents("hello.txt");
Path path = primary.buildAndReturnOutput("//:exported-file");
String actual = new String(Files.readAllBytes(path), UTF_8);
assertEquals(expected, actual);
}
@Test
public void shouldBeAbleToUseTargetsCommandXCell() throws IOException {
assumeThat(Platform.detect(), is(not(WINDOWS)));
Pair<ProjectWorkspace, ProjectWorkspace> cells = prepare(
"inter-cell/export-file/primary",
"inter-cell/export-file/secondary");
ProjectWorkspace primary = cells.getFirst();
ProjectWorkspace.ProcessResult result = primary.runBuckCommand(
"targets",
"--show-target-hash",
"//:cxxbinary");
result.assertSuccess();
ProjectWorkspace.ProcessResult result2 = primary.runBuckCommand(
"targets",
"secondary//:cxxlib");
result2.assertSuccess();
}
@Test
public void shouldBeAbleToUseQueryCommandXCell() throws IOException {
assumeThat(Platform.detect(), is(not(WINDOWS)));
ProjectWorkspace primary = createWorkspace("inter-cell/multi-cell/primary");
primary.setUp();
ProjectWorkspace secondary = createWorkspace("inter-cell/multi-cell/secondary");
secondary.setUp();
ProjectWorkspace ternary = createWorkspace("inter-cell/multi-cell/ternary");
ternary.setUp();
registerCell(secondary, "ternary", ternary);
registerCell(primary, "secondary", secondary);
registerCell(primary, "ternary", ternary);
primary.runBuckCommand("targets", "--show-target-hash", "//:cxxbinary");
secondary.runBuckCommand("targets", "--show-target-hash", "//:cxxlib");
ternary.runBuckCommand("targets", "--show-target-hash", "//:cxxlib2");
ProjectWorkspace.ProcessResult result = primary.runBuckCommand(
"query",
"deps(%s)",
"//:cxxbinary");
result.assertSuccess();
assertThat(
result.getStdout(),
is(primary.getFileContents("stdout-cross-cell-dep")));
}
@Test
public void shouldBeAbleToUseProjectCommandXCell() throws IOException {
assumeThat(Platform.detect(), is(not(WINDOWS)));
Pair<ProjectWorkspace, ProjectWorkspace> cells = prepare(
"inter-cell/export-file/primary",
"inter-cell/export-file/secondary");
ProjectWorkspace primary = cells.getFirst();
ProjectWorkspace.ProcessResult result = primary.runBuckCommand("project", "//:cxxbinary");
result.assertSuccess();
}
@Test
public void shouldBeAbleToUseACxxLibraryXCell() throws IOException {
assumeThat(Platform.detect(), is(not(WINDOWS)));
Pair<ProjectWorkspace, ProjectWorkspace> cells = prepare(
"inter-cell/export-file/primary",
"inter-cell/export-file/secondary");
ProjectWorkspace primary = cells.getFirst();
ProjectWorkspace.ProcessResult result = primary.runBuckBuild("//:cxxbinary");
result.assertSuccess();
}
@Test
public void shouldBeAbleToUseMultipleXCell() throws IOException {
assumeThat(Platform.detect(), is(not(WINDOWS)));
ProjectWorkspace primary = createWorkspace("inter-cell/multi-cell/primary");
ProjectWorkspace secondary = createWorkspace("inter-cell/multi-cell/secondary");
ProjectWorkspace ternary = createWorkspace("inter-cell/multi-cell/ternary");
registerCell(secondary, "ternary", ternary);
registerCell(primary, "secondary", secondary);
primary.runBuckCommand("targets", "--show-target-hash", "//:cxxbinary");
secondary.runBuckCommand("targets", "--show-target-hash", "//:cxxlib");
ternary.runBuckCommand("targets", "--show-target-hash", "//:cxxlib2");
ProjectWorkspace.ProcessResult result = primary.runBuckBuild("//:cxxbinary");
result.assertSuccess();
}
@Test
public void xCellCxxLibraryBuildsShouldBeHermetic() throws IOException {
assumeThat(Platform.detect(), is(not(WINDOWS)));
Pair<ProjectWorkspace, ProjectWorkspace> cells = prepare(
"inter-cell/export-file/primary",
"inter-cell/export-file/secondary");
ProjectWorkspace primary = cells.getFirst();
ProjectWorkspace secondary = cells.getSecond();
Path firstBinary = primary.buildAndReturnOutput("//:cxxbinary");
ImmutableMap<String, HashCode> firstPrimaryObjectFiles = findObjectFiles(primary);
ImmutableMap<String, HashCode> firstObjectFiles = findObjectFiles(secondary);
// Now recreate an identical checkout
cells = prepare(
"inter-cell/export-file/primary",
"inter-cell/export-file/secondary");
primary = cells.getFirst();
secondary = cells.getSecond();
Path secondBinary = primary.buildAndReturnOutput("//:cxxbinary");
ImmutableMap<String, HashCode> secondPrimaryObjectFiles = findObjectFiles(primary);
ImmutableMap<String, HashCode> secondObjectFiles = findObjectFiles(secondary);
assertEquals(firstPrimaryObjectFiles, secondPrimaryObjectFiles);
assertEquals(firstObjectFiles, secondObjectFiles);
MoreAsserts.assertContentsEqual(firstBinary, secondBinary);
}
private ImmutableMap<String, HashCode> findObjectFiles(final ProjectWorkspace workspace)
throws IOException {
ProjectFilesystem filesystem = new ProjectFilesystem(workspace.getDestPath());
final Path buckOut = workspace.getPath(filesystem.getBuckPaths().getBuckOut());
final ImmutableMap.Builder<String, HashCode> objectHashCodes = ImmutableMap.builder();
Files.walkFileTree(buckOut, new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
if (MorePaths.getFileExtension(file).equals("o")) {
HashCode hash = MorePaths.asByteSource(file).hash(Hashing.sha1());
objectHashCodes.put(buckOut.relativize(file).toString(), hash);
}
return FileVisitResult.CONTINUE;
}
});
ImmutableMap<String, HashCode> toReturn = objectHashCodes.build();
Preconditions.checkState(!toReturn.isEmpty());
return toReturn;
}
@Test
@Ignore
public void shouldBeAbleToUseAJavaLibraryTargetXCell() throws IOException {
Pair<ProjectWorkspace, ProjectWorkspace> cells = prepare(
"inter-cell/java/primary",
"inter-cell/java/secondary");
ProjectWorkspace primary = cells.getFirst();
registerCell(cells.getSecond(), "primary", primary);
ProjectWorkspace.ProcessResult result = primary.runBuckBuild("//:java-binary", "-v", "5");
result.assertSuccess();
}
@Test
public void buildFileNamesCanBeDifferentCrossCell() throws IOException {
Pair<ProjectWorkspace, ProjectWorkspace> cells = prepare(
"inter-cell/build-file-names/primary",
"inter-cell/build-file-names/secondary");
ProjectWorkspace primary = cells.getFirst();
ProjectWorkspace secondary = cells.getSecond();
Path output = primary.buildAndReturnOutput("//:export");
String expected = secondary.getFileContents("hello-world.txt");
assertEquals(expected, new String(Files.readAllBytes(output), UTF_8));
}
@SuppressWarnings("PMD.EmptyCatchBlock")
@Test
public void xCellVisibilityShouldWorkAsExpected()
throws IOException, InterruptedException, BuildFileParseException, BuildTargetException {
try {
parseTargetForXCellVisibility("//:not-visible-target");
fail("Did not expect parsing to succeed");
} catch (HumanReadableException expected) {
// Everything is as it should be.
}
}
@Test
public void xCellVisibilityPatternsBasedOnPublicBuildTargetsWork()
throws InterruptedException, BuildFileParseException, IOException, BuildTargetException {
parseTargetForXCellVisibility("//:public-target");
}
@Test
public void xCellVisibilityPatternsBasedOnExplicitBuildTargetsWork()
throws InterruptedException, BuildFileParseException, IOException, BuildTargetException {
parseTargetForXCellVisibility("//:visible-target");
}
@Test
public void xCellSingleDirectoryVisibilityPatternsWork()
throws InterruptedException, BuildFileParseException, IOException, BuildTargetException {
parseTargetForXCellVisibility("//sub2:directory");
}
@Test
public void xCellSubDirectoryVisibilityPatternsWork()
throws InterruptedException, BuildFileParseException, IOException, BuildTargetException {
parseTargetForXCellVisibility("//sub:wild-card");
}
private void parseTargetForXCellVisibility(String targetName)
throws IOException, InterruptedException, BuildFileParseException, BuildTargetException {
Pair<ProjectWorkspace, ProjectWorkspace> cells = prepare(
"inter-cell/visibility/primary",
"inter-cell/visibility/secondary");
ProjectWorkspace primary = cells.getFirst();
ProjectWorkspace secondary = cells.getSecond();
registerCell(secondary, "primary", primary);
// We could just do a build, but that's a little extreme since all we need is the target graph
TypeCoercerFactory coercerFactory = new DefaultTypeCoercerFactory(
ObjectMappers.newDefaultInstance());
Parser parser = new Parser(
new BroadcastEventListener(),
new ParserConfig(primary.asCell().getBuckConfig()),
coercerFactory,
new ConstructorArgMarshaller(coercerFactory));
BuckEventBus eventBus = BuckEventBusFactory.newInstance();
Cell primaryCell = primary.asCell();
BuildTarget namedTarget = BuildTargetFactory.newInstance(
primaryCell.getFilesystem(),
targetName);
// It's enough that this parses cleanly.
parser.buildTargetGraph(
eventBus,
primaryCell,
false,
MoreExecutors.listeningDecorator(Executors.newSingleThreadExecutor()),
ImmutableSet.of(namedTarget));
}
@Test
@Ignore
public void allOutputsShouldBePlacedInTheSameRootOutputFolder() {
}
@Test
public void circularCellReferencesAreAllowed() throws IOException {
ProjectWorkspace mainRepo = TestDataHelper.createProjectWorkspaceForScenario(
this,
"inter-cell/circular",
tmp);
mainRepo.setUp();
Path primary = mainRepo.getPath("primary");
ProjectWorkspace.ProcessResult result =
mainRepo.runBuckCommandWithEnvironmentOverridesAndContext(
primary,
Optional.<NGContext>absent(),
ImmutableMap.<String, String>of(),
"build",
"//:bin");
result.assertSuccess();
}
@SuppressWarnings("PMD.EmptyCatchBlock")
@Test
public void shouldBeAbleToUseCommandLineConfigOverrides() throws IOException {
assumeThat(Platform.detect(), is(not(WINDOWS)));
Pair<ProjectWorkspace, ProjectWorkspace> cells = prepare(
"inter-cell/export-file/primary",
"inter-cell/export-file/secondary");
ProjectWorkspace primary = cells.getFirst();
ProjectWorkspace secondary = cells.getSecond();
putConfigValue(
secondary,
"cxx",
"cc",
"/does/not/exist");
try {
primary.runBuckBuild("//:cxxbinary");
fail("Did not expect to finish building");
} catch (HumanReadableException expected) {
assertEquals(
expected.getMessage(),
"Couldn't get dependency 'secondary//:cxxlib' of target '//:cxxbinary':\n" +
"Overridden cxx:cc path not found: /does/not/exist");
}
ProjectWorkspace.ProcessResult result = primary.runBuckBuild(
"--config",
"secondary//cxx.cc=",
"//:cxxbinary");
result.assertSuccess();
}
@Test
public void buildFilesCanIncludeDefsFromOtherCells() throws IOException {
assumeThat(Platform.detect(), is(not(WINDOWS)));
ProjectWorkspace root = createWorkspace("inter-cell/include-defs/root");
ProjectWorkspace other = createWorkspace("inter-cell/include-defs/other");
registerCell(root, "other", other);
registerCell(other, "root", root);
root.runBuckBuild("//:rule", "other//:rule").assertSuccess();
}
private Pair<ProjectWorkspace, ProjectWorkspace> prepare(
String primaryPath,
String secondaryPath) throws IOException {
ProjectWorkspace primary = createWorkspace(primaryPath);
ProjectWorkspace secondary = createWorkspace(secondaryPath);
registerCell(primary, "secondary", secondary);
return new Pair<>(primary, secondary);
}
private ProjectWorkspace createWorkspace(String scenarioName) throws IOException {
final Path tmpSubfolder = tmp.newFolder();
ProjectWorkspace projectWorkspace = TestDataHelper.createProjectWorkspaceForScenario(
this,
scenarioName,
tmpSubfolder);
projectWorkspace.setUp();
return projectWorkspace;
}
private void registerCell(
ProjectWorkspace cellToModifyConfigOf,
String cellName,
ProjectWorkspace cellToRegisterAsCellName) throws IOException {
putConfigValue(
cellToModifyConfigOf,
"repositories",
cellName,
cellToRegisterAsCellName.getPath(".").normalize().toString());
}
private void putConfigValue(
ProjectWorkspace cellToModifyConfigOf,
String section,
String key,
String value) throws IOException {
String config = cellToModifyConfigOf.getFileContents(".buckconfig");
Ini ini = new Ini(new StringReader(config));
ini.put(section, key, value);
StringWriter writer = new StringWriter();
ini.store(writer);
Files.write(cellToModifyConfigOf.getPath(".buckconfig"), writer.toString().getBytes(UTF_8));
}
}
| |
/* (c) 2014 LinkedIn Corp. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use
* this file except in compliance with the License. You may obtain a copy of the
* License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed
* under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied.
*/
package gobblin.runtime;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.io.StringWriter;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import org.apache.hadoop.io.Text;
import com.codahale.metrics.Counter;
import com.codahale.metrics.Gauge;
import com.codahale.metrics.Meter;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Maps;
import com.google.gson.stream.JsonWriter;
import com.linkedin.data.template.StringMap;
import gobblin.rest.JobExecutionInfo;
import gobblin.rest.JobStateEnum;
import gobblin.rest.LauncherTypeEnum;
import gobblin.rest.Metric;
import gobblin.rest.MetricArray;
import gobblin.rest.MetricTypeEnum;
import gobblin.rest.TaskExecutionInfoArray;
import gobblin.configuration.ConfigurationKeys;
import gobblin.configuration.SourceState;
import gobblin.metrics.JobMetrics;
/**
* A class for tracking job state information.
*
* @author ynli
*/
public class JobState extends SourceState {
/**
* An enumeration of possible job states, which are identical to
* {@link gobblin.configuration.WorkUnitState.WorkingState}
* in terms of naming.
*/
public enum RunningState {
PENDING, RUNNING, SUCCESSFUL, COMMITTED, FAILED, CANCELLED
}
private String jobName;
private String jobId;
private long startTime;
private long endTime;
private long duration;
private RunningState state = RunningState.PENDING;
private int tasks;
private Map<String, TaskState> taskStates = Maps.newHashMap();
// Necessary for serialization/deserialization
public JobState() {
}
public JobState(String jobName, String jobId) {
this.jobName = jobName;
this.jobId = jobId;
this.setId(jobId);
}
/**
* Get job name.
*
* @return job name
*/
public String getJobName() {
return this.jobName;
}
/**
* Set job name.
*
* @param jobName job name
*/
public void setJobName(String jobName) {
this.jobName = jobName;
}
/**
* Get job ID.
*
* @return job ID
*/
public String getJobId() {
return jobId;
}
/**
* Set job ID.
*
* @param jobId job ID
*/
public void setJobId(String jobId) {
this.jobId = jobId;
}
/**
* Get job start time.
*
* @return job start time
*/
public long getStartTime() {
return startTime;
}
/**
* Set job start time.
*
* @param startTime job start time
*/
public void setStartTime(long startTime) {
this.startTime = startTime;
}
/**
* Get job end time.
*
* @return job end time
*/
public long getEndTime() {
return endTime;
}
/**
* Set job end time.
*
* @param endTime job end time
*/
public void setEndTime(long endTime) {
this.endTime = endTime;
}
/**
* Get job duration in milliseconds.
*
* @return job duration in milliseconds
*/
public long getDuration() {
return duration;
}
/**
* Set job duration in milliseconds.
*
* @param duration job duration in milliseconds
*/
public void setDuration(long duration) {
this.duration = duration;
}
/**
* Get job running state of type {@link RunningState}.
*
* @return job running state of type {@link RunningState}
*/
public RunningState getState() {
return state;
}
/**
* Set job running state of type {@link RunningState}.
*
* @param state job running state of type {@link RunningState}
*/
public void setState(RunningState state) {
this.state = state;
}
/**
* Get the number of tasks this job consists of.
*
* @return number of tasks this job consists of
*/
public int getTasks() {
return this.tasks;
}
/**
* Set the number of tasks this job consists of.
*
* @param tasks number of tasks this job consists of
*/
public void setTasks(int tasks) {
this.tasks = tasks;
}
/**
* Increment the number of tasks by 1.
*/
public void addTask() {
this.tasks++;
}
/**
* Add a single {@link TaskState}.
*
* @param taskState {@link TaskState} to add
*/
public void addTaskState(TaskState taskState) {
this.taskStates.put(taskState.getTaskId(), taskState);
}
/**
* Add a collection of {@link TaskState}s.
*
* @param taskStates collection of {@link TaskState}s to add
*/
public void addTaskStates(Collection<TaskState> taskStates) {
for (TaskState taskState : taskStates) {
this.taskStates.put(taskState.getTaskId(), taskState);
}
}
/**
* Get the number of completed tasks.
*
* @return number of completed tasks
*/
public int getCompletedTasks() {
return this.taskStates.size();
}
/**
* Get {@link TaskState}s of {@link Task}s of this job.
*
* @return {@link TaskState}s of {@link Task}s of this job
*/
public List<TaskState> getTaskStates() {
return ImmutableList.<TaskState>builder().addAll(this.taskStates.values()).build();
}
/**
* Remove all job-level metrics objects associated with this job.
*/
public void removeMetrics() {
JobMetrics metrics = JobMetrics.get(this.jobName, this.jobId);
for (String name : metrics.getMetricsOfGroup(JobMetrics.MetricGroup.JOB).keySet()) {
if (name.contains(this.jobId)) {
metrics.removeMetric(name);
}
}
}
@Override
public void readFields(DataInput in)
throws IOException {
Text text = new Text();
text.readFields(in);
this.jobName = text.toString();
text.readFields(in);
this.jobId = text.toString();
this.setId(jobId);
this.startTime = in.readLong();
this.endTime = in.readLong();
this.duration = in.readLong();
text.readFields(in);
this.state = RunningState.valueOf(text.toString());
this.tasks = in.readInt();
int numTaskStates = in.readInt();
for (int i = 0; i < numTaskStates; i++) {
TaskState taskState = new TaskState();
taskState.readFields(in);
this.taskStates.put(taskState.getTaskId(), taskState);
}
super.readFields(in);
}
@Override
public void write(DataOutput out)
throws IOException {
Text text = new Text();
text.set(this.jobName);
text.write(out);
text.set(this.jobId);
text.write(out);
out.writeLong(this.startTime);
out.writeLong(this.endTime);
out.writeLong(this.duration);
text.set(this.state.name());
text.write(out);
out.writeInt(this.tasks);
out.writeInt(this.taskStates.size());
for (TaskState taskState : this.taskStates.values()) {
taskState.write(out);
}
super.write(out);
}
/**
* Convert this {@link JobState} to a json document.
*
* @param jsonWriter a {@link com.google.gson.stream.JsonWriter}
* used to write the json document
* @param keepConfig whether to keep all configuration properties
* @throws IOException
*/
public void toJson(JsonWriter jsonWriter, boolean keepConfig)
throws IOException {
jsonWriter.beginObject();
jsonWriter.name("job name").value(this.getJobName()).name("job id").value(this.getJobId()).name("job state")
.value(this.getState().name()).name("start time").value(this.getStartTime()).name("end time")
.value(this.getEndTime()).name("duration").value(this.getDuration()).name("tasks").value(this.getTasks())
.name("completed tasks").value(this.getCompletedTasks());
jsonWriter.name("task states");
jsonWriter.beginArray();
for (TaskState taskState : taskStates.values()) {
taskState.toJson(jsonWriter, keepConfig);
}
jsonWriter.endArray();
if (keepConfig) {
jsonWriter.name("properties");
jsonWriter.beginObject();
for (String key : this.getPropertyNames()) {
jsonWriter.name(key).value(this.getProp(key));
}
jsonWriter.endObject();
}
jsonWriter.endObject();
}
@Override
public String toString() {
StringWriter stringWriter = new StringWriter();
JsonWriter jsonWriter = new JsonWriter(stringWriter);
jsonWriter.setIndent("\t");
try {
this.toJson(jsonWriter, false);
} catch (IOException ioe) {
// Ignored
}
return stringWriter.toString();
}
/**
* Convert this {@link JobState} instance to a {@link JobExecutionInfo} instance.
*
* @return a {@link JobExecutionInfo} instance
*/
public JobExecutionInfo toJobExecutionInfo() {
JobExecutionInfo jobExecutionInfo = new JobExecutionInfo();
jobExecutionInfo.setJobName(this.jobName);
jobExecutionInfo.setJobId(this.jobId);
jobExecutionInfo.setStartTime(this.startTime);
jobExecutionInfo.setEndTime(this.endTime);
jobExecutionInfo.setDuration(this.duration);
jobExecutionInfo.setState(JobStateEnum.valueOf(this.state.name()));
jobExecutionInfo.setLaunchedTasks(this.tasks);
jobExecutionInfo.setCompletedTasks(this.getCompletedTasks());
jobExecutionInfo.setLauncherType(LauncherTypeEnum.valueOf(this.getProp(ConfigurationKeys.JOB_LAUNCHER_TYPE_KEY,
JobLauncherFactory.JobLauncherType.LOCAL.name())));
if (this.contains(ConfigurationKeys.JOB_TRACKING_URL_KEY)) {
jobExecutionInfo.setTrackingUrl(this.getProp(ConfigurationKeys.JOB_TRACKING_URL_KEY));
}
// Add task execution information
TaskExecutionInfoArray taskExecutionInfos = new TaskExecutionInfoArray();
for (TaskState taskState : this.getTaskStates()) {
taskExecutionInfos.add(taskState.toTaskExecutionInfo());
}
jobExecutionInfo.setTaskExecutions(taskExecutionInfos);
// Add job metrics
JobMetrics jobMetrics = JobMetrics.get(this.jobName, this.jobId);
MetricArray metricArray = new MetricArray();
for (Map.Entry<String, ? extends com.codahale.metrics.Metric> entry : jobMetrics
.getMetricsOfType(JobMetrics.MetricType.COUNTER, JobMetrics.MetricGroup.JOB, this.jobId).entrySet()) {
Metric counter = new Metric();
counter.setGroup(JobMetrics.MetricGroup.JOB.name());
counter.setName(entry.getKey());
counter.setType(MetricTypeEnum.valueOf(JobMetrics.MetricType.COUNTER.name()));
counter.setValue(Long.toString(((Counter) entry.getValue()).getCount()));
metricArray.add(counter);
}
for (Map.Entry<String, ? extends com.codahale.metrics.Metric> entry : jobMetrics
.getMetricsOfType(JobMetrics.MetricType.METER, JobMetrics.MetricGroup.JOB, this.jobId).entrySet()) {
Metric meter = new Metric();
meter.setGroup(JobMetrics.MetricGroup.JOB.name());
meter.setName(entry.getKey());
meter.setType(MetricTypeEnum.valueOf(JobMetrics.MetricType.METER.name()));
meter.setValue(Double.toString(((Meter) entry.getValue()).getMeanRate()));
metricArray.add(meter);
}
for (Map.Entry<String, ? extends com.codahale.metrics.Metric> entry : jobMetrics
.getMetricsOfType(JobMetrics.MetricType.GAUGE, JobMetrics.MetricGroup.JOB, this.jobId).entrySet()) {
Metric gauge = new Metric();
gauge.setGroup(JobMetrics.MetricGroup.JOB.name());
gauge.setName(entry.getKey());
gauge.setType(MetricTypeEnum.valueOf(JobMetrics.MetricType.GAUGE.name()));
gauge.setValue(((Gauge) entry.getValue()).getValue().toString());
metricArray.add(gauge);
}
jobExecutionInfo.setMetrics(metricArray);
// Add job properties
Map<String, String> jobProperties = Maps.newHashMap();
for (String name : this.getPropertyNames()) {
String value = this.getProp(name);
if (!Strings.isNullOrEmpty(value))
jobProperties.put(name, value);
}
jobExecutionInfo.setJobProperties(new StringMap(jobProperties));
return jobExecutionInfo;
}
}
| |
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.testsuite.admin.client;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.FixMethodOrder;
import org.junit.Test;
import org.junit.runners.MethodSorters;
import org.keycloak.admin.client.resource.ClientTemplatesResource;
import org.keycloak.admin.client.resource.ProtocolMappersResource;
import org.keycloak.events.admin.OperationType;
import org.keycloak.events.admin.ResourceType;
import org.keycloak.protocol.oidc.OIDCLoginProtocol;
import org.keycloak.protocol.saml.SamlProtocol;
import org.keycloak.representations.idm.ClientTemplateRepresentation;
import org.keycloak.representations.idm.ProtocolMapperRepresentation;
import org.keycloak.testsuite.admin.ApiUtil;
import org.keycloak.testsuite.util.AdminEventPaths;
import javax.ws.rs.NotFoundException;
import javax.ws.rs.core.Response;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
/**
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
@FixMethodOrder(MethodSorters.NAME_ASCENDING)
public class ClientTemplateProtocolMapperTest extends AbstractProtocolMapperTest {
private String oidcClientTemplateId;
private ProtocolMappersResource oidcMappersRsc;
private String samlClientTemplateId;
private ProtocolMappersResource samlMappersRsc;
@Before
public void init() {
oidcClientTemplateId = createTemplate("oidcMapperClient-template", OIDCLoginProtocol.LOGIN_PROTOCOL);
oidcMappersRsc = clientTemplates().get(oidcClientTemplateId).getProtocolMappers();
samlClientTemplateId = createTemplate("samlMapperClient-template", SamlProtocol.LOGIN_PROTOCOL);
samlMappersRsc = clientTemplates().get(samlClientTemplateId).getProtocolMappers();
super.initBuiltinMappers();
}
@After
public void tearDown() {
removeTemplate(oidcClientTemplateId);
removeTemplate(samlClientTemplateId);
}
@Test
public void test01GetMappersList() {
assertTrue(oidcMappersRsc.getMappers().isEmpty());
assertTrue(samlMappersRsc.getMappers().isEmpty());
}
@Test
public void test02CreateOidcMappersFromList() {
testAddAllBuiltinMappers(oidcMappersRsc, "openid-connect", AdminEventPaths.clientTemplateProtocolMappersPath(oidcClientTemplateId));
}
@Test
public void test03CreateSamlMappersFromList() {
testAddAllBuiltinMappers(samlMappersRsc, "saml", AdminEventPaths.clientTemplateProtocolMappersPath(samlClientTemplateId));
}
@Test
public void test04CreateSamlProtocolMapper() {
//{"protocol":"saml",
// "config":{"role":"account.view-profile","new.role.name":"new-role-name"},
// "consentRequired":true,
// "consentText":"My consent text",
// "name":"saml-role-name-maper",
// "protocolMapper":"saml-role-name-mapper"}
ProtocolMapperRepresentation rep = makeSamlMapper("saml-role-name-mapper");
int totalMappers = samlMappersRsc.getMappers().size();
int totalSamlMappers = samlMappersRsc.getMappersPerProtocol("saml").size();
Response resp = samlMappersRsc.createMapper(rep);
resp.close();
String createdId = ApiUtil.getCreatedId(resp);
assertAdminEvents.assertEvent(getRealmId(), OperationType.CREATE, AdminEventPaths.clientTemplateProtocolMapperPath(samlClientTemplateId, createdId), rep, ResourceType.PROTOCOL_MAPPER);
assertEquals(totalMappers + 1, samlMappersRsc.getMappers().size());
assertEquals(totalSamlMappers + 1, samlMappersRsc.getMappersPerProtocol("saml").size());
ProtocolMapperRepresentation created = samlMappersRsc.getMapperById(createdId);
assertEqualMappers(rep, created);
}
@Test
public void test05CreateOidcProtocolMapper() {
//{"protocol":"openid-connect",
// "config":{"role":"myrole"},
// "consentRequired":true,
// "consentText":"My consent text",
// "name":"oidc-hardcoded-role-mapper",
// "protocolMapper":"oidc-hardcoded-role-mapper"}
ProtocolMapperRepresentation rep = makeOidcMapper("oidc-hardcoded-role-mapper");
int totalMappers = oidcMappersRsc.getMappers().size();
int totalOidcMappers = oidcMappersRsc.getMappersPerProtocol("openid-connect").size();
Response resp = oidcMappersRsc.createMapper(rep);
resp.close();
String createdId = ApiUtil.getCreatedId(resp);
assertAdminEvents.assertEvent(getRealmId(), OperationType.CREATE, AdminEventPaths.clientTemplateProtocolMapperPath(oidcClientTemplateId, createdId), rep, ResourceType.PROTOCOL_MAPPER);
assertEquals(totalMappers + 1, oidcMappersRsc.getMappers().size());
assertEquals(totalOidcMappers + 1, oidcMappersRsc.getMappersPerProtocol("openid-connect").size());
ProtocolMapperRepresentation created = oidcMappersRsc.getMapperById(createdId);//findByName(samlMappersRsc, "saml-role-name-mapper");
assertEqualMappers(rep, created);
}
@Test
public void test06UpdateSamlMapper() {
ProtocolMapperRepresentation rep = makeSamlMapper("saml-role-name-mapper2");
Response resp = samlMappersRsc.createMapper(rep);
resp.close();
String createdId = ApiUtil.getCreatedId(resp);
assertAdminEvents.assertEvent(getRealmId(), OperationType.CREATE, AdminEventPaths.clientTemplateProtocolMapperPath(samlClientTemplateId, createdId), rep, ResourceType.PROTOCOL_MAPPER);
rep.getConfig().put("role", "account.manage-account");
rep.setId(createdId);
rep.setConsentRequired(false);
samlMappersRsc.update(createdId, rep);
assertAdminEvents.assertEvent(getRealmId(), OperationType.UPDATE, AdminEventPaths.clientTemplateProtocolMapperPath(samlClientTemplateId, createdId), rep, ResourceType.PROTOCOL_MAPPER);
ProtocolMapperRepresentation updated = samlMappersRsc.getMapperById(createdId);
assertEqualMappers(rep, updated);
}
@Test
public void test07UpdateOidcMapper() {
ProtocolMapperRepresentation rep = makeOidcMapper("oidc-hardcoded-role-mapper2");
Response resp = oidcMappersRsc.createMapper(rep);
resp.close();
String createdId = ApiUtil.getCreatedId(resp);
assertAdminEvents.assertEvent(getRealmId(), OperationType.CREATE, AdminEventPaths.clientTemplateProtocolMapperPath(oidcClientTemplateId, createdId), rep, ResourceType.PROTOCOL_MAPPER);
rep.getConfig().put("role", "myotherrole");
rep.setId(createdId);
rep.setConsentRequired(false);
oidcMappersRsc.update(createdId, rep);
assertAdminEvents.assertEvent(getRealmId(), OperationType.UPDATE, AdminEventPaths.clientTemplateProtocolMapperPath(oidcClientTemplateId, createdId), rep, ResourceType.PROTOCOL_MAPPER);
ProtocolMapperRepresentation updated = oidcMappersRsc.getMapperById(createdId);
assertEqualMappers(rep, updated);
}
@Test
public void testDeleteSamlMapper() {
ProtocolMapperRepresentation rep = makeSamlMapper("saml-role-name-mapper3");
Response resp = samlMappersRsc.createMapper(rep);
resp.close();
String createdId = ApiUtil.getCreatedId(resp);
assertAdminEvents.assertEvent(getRealmId(), OperationType.CREATE, AdminEventPaths.clientTemplateProtocolMapperPath(samlClientTemplateId, createdId), rep, ResourceType.PROTOCOL_MAPPER);
samlMappersRsc.delete(createdId);
assertAdminEvents.assertEvent(getRealmId(), OperationType.DELETE, AdminEventPaths.clientTemplateProtocolMapperPath(samlClientTemplateId, createdId), ResourceType.PROTOCOL_MAPPER);
try {
samlMappersRsc.getMapperById(createdId);
Assert.fail("Not expected to find mapper");
} catch (NotFoundException nfe) {
// Expected
}
}
@Test
public void testDeleteOidcMapper() {
ProtocolMapperRepresentation rep = makeOidcMapper("oidc-hardcoded-role-mapper3");
Response resp = oidcMappersRsc.createMapper(rep);
resp.close();
String createdId = ApiUtil.getCreatedId(resp);
assertAdminEvents.assertEvent(getRealmId(), OperationType.CREATE, AdminEventPaths.clientTemplateProtocolMapperPath(oidcClientTemplateId, createdId), rep, ResourceType.PROTOCOL_MAPPER);
oidcMappersRsc.delete(createdId);
assertAdminEvents.assertEvent(getRealmId(), OperationType.DELETE, AdminEventPaths.clientTemplateProtocolMapperPath(oidcClientTemplateId, createdId), ResourceType.PROTOCOL_MAPPER);
try {
oidcMappersRsc.getMapperById(createdId);
Assert.fail("Not expected to find mapper");
} catch (NotFoundException nfe) {
// Expected
}
}
private ClientTemplatesResource clientTemplates() {
return testRealmResource().clientTemplates();
}
private String createTemplate(String templateName, String protocol) {
ClientTemplateRepresentation rep = new ClientTemplateRepresentation();
rep.setName(templateName);
rep.setFullScopeAllowed(false);
rep.setProtocol(protocol);
Response resp = clientTemplates().create(rep);
Assert.assertEquals(201, resp.getStatus());
resp.close();
String templateId = ApiUtil.getCreatedId(resp);
assertAdminEvents.assertEvent(getRealmId(), OperationType.CREATE, AdminEventPaths.clientTemplateResourcePath(templateId), rep, ResourceType.CLIENT_TEMPLATE);
return templateId;
}
private void removeTemplate(String templateId) {
clientTemplates().get(templateId).remove();
assertAdminEvents.assertEvent(getRealmId(), OperationType.DELETE, AdminEventPaths.clientTemplateResourcePath(templateId), ResourceType.CLIENT_TEMPLATE);
}
}
| |
/*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight.completion;
import com.intellij.codeInsight.CodeInsightSettings;
import com.intellij.codeInsight.completion.impl.CompletionServiceImpl;
import com.intellij.codeInsight.completion.impl.CompletionSorterImpl;
import com.intellij.codeInsight.editorActions.CompletionAutoPopupHandler;
import com.intellij.codeInsight.hint.EditorHintListener;
import com.intellij.codeInsight.hint.HintManager;
import com.intellij.codeInsight.lookup.*;
import com.intellij.codeInsight.lookup.impl.LookupImpl;
import com.intellij.diagnostic.PerformanceWatcher;
import com.intellij.featureStatistics.FeatureUsageTracker;
import com.intellij.injected.editor.DocumentWindow;
import com.intellij.injected.editor.EditorWindow;
import com.intellij.lang.Language;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.actionSystem.IdeActions;
import com.intellij.openapi.application.AccessToken;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.Result;
import com.intellij.openapi.application.WriteAction;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.command.WriteCommandAction;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.progress.util.ProgressIndicatorBase;
import com.intellij.openapi.progress.util.ProgressWrapper;
import com.intellij.openapi.project.DumbService;
import com.intellij.openapi.project.IndexNotReadyException;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.MessageType;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.patterns.ElementPattern;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiReference;
import com.intellij.psi.ReferenceRange;
import com.intellij.psi.util.PsiUtilCore;
import com.intellij.ui.LightweightHint;
import com.intellij.util.Alarm;
import com.intellij.util.ObjectUtils;
import com.intellij.util.ThreeState;
import com.intellij.util.concurrency.Semaphore;
import com.intellij.util.containers.ConcurrentHashMap;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.messages.MessageBusConnection;
import com.intellij.util.ui.update.MergingUpdateQueue;
import com.intellij.util.ui.update.Update;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.TestOnly;
import javax.swing.*;
import java.awt.event.KeyAdapter;
import java.awt.event.KeyEvent;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.util.List;
import java.util.concurrent.atomic.AtomicReference;
/**
* @author peter
*/
public class CompletionProgressIndicator extends ProgressIndicatorBase implements CompletionProcess, Disposable {
private static final Logger LOG = Logger.getInstance("#com.intellij.codeInsight.completion.CompletionProgressIndicator");
private final Editor myEditor;
private final CompletionParameters myParameters;
private final CodeCompletionHandlerBase myHandler;
private final LookupImpl myLookup;
private final MergingUpdateQueue myQueue;
private final Update myUpdate = new Update("update") {
@Override
public void run() {
updateLookup();
}
};
private final Semaphore myFreezeSemaphore;
private final OffsetMap myOffsetMap;
private final List<Pair<Integer, ElementPattern<String>>> myRestartingPrefixConditions = ContainerUtil.createLockFreeCopyOnWriteList();
private final LookupAdapter myLookupListener = new LookupAdapter() {
@Override
public void itemSelected(LookupEvent event) {
finishCompletionProcess(false);
LookupElement item = event.getItem();
if (item == null) return;
setMergeCommand();
myHandler.lookupItemSelected(CompletionProgressIndicator.this, item, event.getCompletionChar(), myLookup.getItems());
}
@Override
public void lookupCanceled(final LookupEvent event) {
finishCompletionProcess(true);
}
};
private volatile int myCount;
private final ConcurrentHashMap<LookupElement, CompletionSorterImpl> myItemSorters =
new ConcurrentHashMap<LookupElement, CompletionSorterImpl>(
ContainerUtil.<LookupElement>identityStrategy());
private final PropertyChangeListener myLookupManagerListener;
private final int myStartCaret;
public CompletionProgressIndicator(final Editor editor,
CompletionParameters parameters,
CodeCompletionHandlerBase handler,
Semaphore freezeSemaphore,
final OffsetMap offsetMap,
boolean hasModifiers) {
myEditor = editor;
myParameters = parameters;
myHandler = handler;
myFreezeSemaphore = freezeSemaphore;
myOffsetMap = offsetMap;
myLookup = (LookupImpl)parameters.getLookup();
myStartCaret = myEditor.getCaretModel().getOffset();
myLookup.setArranger(new CompletionLookupArranger(parameters, this));
myLookup.addLookupListener(myLookupListener);
myLookup.setCalculating(true);
myLookupManagerListener = new PropertyChangeListener() {
@Override
public void propertyChange(PropertyChangeEvent evt) {
if (evt.getNewValue() != null) {
LOG.error("An attempt to change the lookup during completion, phase = " + CompletionServiceImpl.getCompletionPhase());
}
}
};
LookupManager.getInstance(getProject()).addPropertyChangeListener(myLookupManagerListener);
myQueue = new MergingUpdateQueue("completion lookup progress", 300, true, myEditor.getContentComponent());
myQueue.setPassThrough(false);
ApplicationManager.getApplication().assertIsDispatchThread();
Disposer.register(this, offsetMap);
if (hasModifiers && !ApplicationManager.getApplication().isUnitTestMode()) {
trackModifiers();
}
}
public OffsetMap getOffsetMap() {
return myOffsetMap;
}
public int getSelectionEndOffset() {
return getOffsetMap().getOffset(CompletionInitializationContext.SELECTION_END_OFFSET);
}
void duringCompletion(CompletionInitializationContext initContext) {
if (isAutopopupCompletion()) {
if (shouldPreselectFirstSuggestion(myParameters)) {
if (!CodeInsightSettings.getInstance().SELECT_AUTOPOPUP_SUGGESTIONS_BY_CHARS) {
myLookup.setFocusDegree(LookupImpl.FocusDegree.SEMI_FOCUSED);
if (FeatureUsageTracker.getInstance().isToBeAdvertisedInLookup(CodeCompletionFeatures.EDITING_COMPLETION_FINISH_BY_CONTROL_DOT, getProject())) {
myLookup.addAdvertisement("Press " +
CompletionContributor.getActionShortcut(IdeActions.ACTION_CHOOSE_LOOKUP_ITEM_DOT) +
" to choose the selected (or first) suggestion and insert a dot afterwards", null);
}
} else {
myLookup.setFocusDegree(LookupImpl.FocusDegree.FOCUSED);
}
}
else if (FeatureUsageTracker.getInstance()
.isToBeAdvertisedInLookup(CodeCompletionFeatures.EDITING_COMPLETION_CONTROL_ENTER, getProject())) {
myLookup.addAdvertisement("Press " +
CompletionContributor.getActionShortcut(IdeActions.ACTION_CHOOSE_LOOKUP_ITEM_ALWAYS) +
" to choose the selected (or first) suggestion", null);
}
if (!myEditor.isOneLineMode() &&
FeatureUsageTracker.getInstance()
.isToBeAdvertisedInLookup(CodeCompletionFeatures.EDITING_COMPLETION_CONTROL_ARROWS, getProject())) {
myLookup.addAdvertisement(CompletionContributor.getActionShortcut(IdeActions.ACTION_LOOKUP_DOWN) + " and " +
CompletionContributor.getActionShortcut(IdeActions.ACTION_LOOKUP_UP) +
" will move caret down and up in the editor", null);
}
} else if (DumbService.isDumb(getProject())) {
myLookup.addAdvertisement("Completion results might be incomplete until indexing is complete", MessageType.WARNING.getPopupBackground());
}
ProgressManager.checkCanceled();
if (!initContext.getOffsetMap().wasModified(CompletionInitializationContext.IDENTIFIER_END_OFFSET)) {
try {
final int selectionEndOffset = initContext.getSelectionEndOffset();
final PsiReference reference = initContext.getFile().findReferenceAt(selectionEndOffset);
if (reference != null) {
int referenceStart = reference.getElement().getTextRange().getStartOffset();
if (referenceStart + reference.getRangeInElement().getStartOffset() != selectionEndOffset ||
referenceStart == initContext.getStartOffset()) {
initContext.setReplacementOffset(findReplacementOffset(selectionEndOffset, reference));
}
}
}
catch (IndexNotReadyException ignored) {
}
}
for (CompletionContributor contributor : CompletionContributor.forLanguage(initContext.getPositionLanguage())) {
ProgressManager.checkCanceled();
if (DumbService.getInstance(initContext.getProject()).isDumb() && !DumbService.isDumbAware(contributor)) {
continue;
}
contributor.duringCompletion(initContext);
}
}
@NotNull
CompletionSorterImpl getSorter(LookupElement element) {
return myItemSorters.get(element);
}
@Override
public void dispose() {
}
private static int findReplacementOffset(int selectionEndOffset, PsiReference reference) {
final List<TextRange> ranges = ReferenceRange.getAbsoluteRanges(reference);
for (TextRange range : ranges) {
if (range.contains(selectionEndOffset)) {
return range.getEndOffset();
}
}
return reference.getElement().getTextRange().getStartOffset() + reference.getRangeInElement().getEndOffset();
}
void scheduleAdvertising() {
if (myLookup.isAvailableToUser()) {
return;
}
final List<CompletionContributor> list = CompletionContributor.forParameters(myParameters);
for (final CompletionContributor contributor : list) {
if (myLookup.getAdvertisementText() != null) return;
if (!myLookup.isCalculating() && !myLookup.isVisible()) return;
@SuppressWarnings("deprecation") String s = contributor.advertise(myParameters);
if (myLookup.getAdvertisementText() != null) return;
if (s != null) {
myLookup.setAdvertisementText(s);
ApplicationManager.getApplication().invokeLater(new Runnable() {
@Override
public void run() {
if (isAutopopupCompletion() && !myLookup.isAvailableToUser()) {
return;
}
if (!CompletionServiceImpl.isPhase(CompletionPhase.BgCalculation.class, CompletionPhase.ItemsCalculated.class)) {
return;
}
if (CompletionServiceImpl.getCompletionPhase().indicator != CompletionProgressIndicator.this) {
return;
}
updateLookup();
}
}, myQueue.getModalityState());
return;
}
}
}
@Override
public void cancel() {
super.cancel();
}
private boolean isOutdated() {
return CompletionServiceImpl.getCompletionPhase().indicator != this;
}
private void trackModifiers() {
assert !isAutopopupCompletion();
final JComponent contentComponent = myEditor.getContentComponent();
contentComponent.addKeyListener(new ModifierTracker(contentComponent));
}
public void setMergeCommand() {
CommandProcessor.getInstance().setCurrentCommandGroupId(getCompletionCommandName());
}
private String getCompletionCommandName() {
return "Completion" + hashCode();
}
public boolean showLookup() {
return updateLookup();
}
public CompletionParameters getParameters() {
return myParameters;
}
public CodeCompletionHandlerBase getHandler() {
return myHandler;
}
public LookupImpl getLookup() {
return myLookup;
}
private boolean updateLookup() {
ApplicationManager.getApplication().assertIsDispatchThread();
if (isOutdated() || !shouldShowLookup()) return false;
boolean justShown = false;
if (!myLookup.isShown()) {
if (hideAutopopupIfMeaningless()) {
return false;
}
if (Registry.is("dump.threads.on.empty.lookup") && myLookup.isCalculating() && myLookup.getItems().isEmpty()) {
PerformanceWatcher.getInstance().dumpThreads(true);
}
if (StringUtil.isEmpty(myLookup.getAdvertisementText()) && !isAutopopupCompletion() && !DumbService.isDumb(getProject())) {
final String text = DefaultCompletionContributor.getDefaultAdvertisementText(myParameters);
if (text != null) {
myLookup.setAdvertisementText(text);
}
}
if (!myLookup.showLookup()) {
return false;
}
justShown = true;
}
myLookup.refreshUi(true, justShown);
hideAutopopupIfMeaningless();
if (justShown) {
myLookup.ensureSelectionVisible(true);
}
return true;
}
private boolean shouldShowLookup() {
if (isAutopopupCompletion() && myLookup.isCalculating() && Registry.is("ide.completion.delay.autopopup.until.completed")) {
return false;
}
return true;
}
final boolean isInsideIdentifier() {
return getIdentifierEndOffset() != getSelectionEndOffset();
}
public int getIdentifierEndOffset() {
return myOffsetMap.getOffset(CompletionInitializationContext.IDENTIFIER_END_OFFSET);
}
public synchronized void addItem(final CompletionResult item) {
if (!isRunning()) return;
ProgressManager.checkCanceled();
final boolean unitTestMode = ApplicationManager.getApplication().isUnitTestMode();
if (!unitTestMode) {
LOG.assertTrue(!ApplicationManager.getApplication().isDispatchThread());
}
LOG.assertTrue(myParameters.getPosition().isValid());
myItemSorters.put(item.getLookupElement(), (CompletionSorterImpl)item.getSorter());
myLookup.addItem(item.getLookupElement(), item.getPrefixMatcher());
myCount++;
if (myCount == 1) {
new Alarm(Alarm.ThreadToUse.SHARED_THREAD, this).addRequest(new Runnable() {
@Override
public void run() {
myFreezeSemaphore.up();
}
}, 300);
}
myQueue.queue(myUpdate);
}
public void closeAndFinish(boolean hideLookup) {
if (!myLookup.isLookupDisposed()) {
Lookup lookup = LookupManager.getActiveLookup(myEditor);
LOG.assertTrue(lookup == myLookup, "lookup changed: " + lookup + "; " + this);
}
myLookup.removeLookupListener(myLookupListener);
finishCompletionProcess(true);
CompletionServiceImpl.assertPhase(CompletionPhase.NoCompletion.getClass());
if (hideLookup) {
LookupManager.getInstance(getProject()).hideActiveLookup();
}
}
private void finishCompletionProcess(boolean disposeOffsetMap) {
cancel();
ApplicationManager.getApplication().assertIsDispatchThread();
Disposer.dispose(myQueue);
LookupManager.getInstance(getProject()).removePropertyChangeListener(myLookupManagerListener);
CompletionProgressIndicator currentCompletion = CompletionServiceImpl.getCompletionService().getCurrentCompletion();
LOG.assertTrue(currentCompletion == this, currentCompletion + "!=" + this);
CompletionServiceImpl
.assertPhase(CompletionPhase.BgCalculation.class, CompletionPhase.ItemsCalculated.class, CompletionPhase.Synchronous.class,
CompletionPhase.CommittingDocuments.class);
CompletionPhase oldPhase = CompletionServiceImpl.getCompletionPhase();
if (oldPhase instanceof CompletionPhase.CommittingDocuments) {
LOG.assertTrue(((CompletionPhase.CommittingDocuments)oldPhase).isRestartingCompletion(), oldPhase);
((CompletionPhase.CommittingDocuments)oldPhase).replaced = true;
}
CompletionServiceImpl.setCompletionPhase(CompletionPhase.NoCompletion);
if (disposeOffsetMap) {
disposeIndicator();
}
}
void disposeIndicator() {
// our offset map should be disposed under write action, so that duringCompletion (read action) won't access it after disposing
AccessToken token = WriteAction.start();
try {
Disposer.dispose(this);
}
finally {
token.finish();
}
}
@TestOnly
public static void cleanupForNextTest() {
CompletionProgressIndicator currentCompletion = CompletionServiceImpl.getCompletionService().getCurrentCompletion();
if (currentCompletion != null) {
currentCompletion.finishCompletionProcess(true);
CompletionServiceImpl.assertPhase(CompletionPhase.NoCompletion.getClass());
}
else {
CompletionServiceImpl.setCompletionPhase(CompletionPhase.NoCompletion);
}
CompletionLookupArranger.cancelLastCompletionStatisticsUpdate();
}
@Override
public void stop() {
super.stop();
myQueue.cancelAllUpdates();
myFreezeSemaphore.up();
ApplicationManager.getApplication().invokeLater(new Runnable() {
@Override
public void run() {
final CompletionPhase phase = CompletionServiceImpl.getCompletionPhase();
if (!(phase instanceof CompletionPhase.BgCalculation) || phase.indicator != CompletionProgressIndicator.this) return;
LOG.assertTrue(!getProject().isDisposed(), "project disposed");
if (myEditor.isDisposed()) {
LookupManager.getInstance(getProject()).hideActiveLookup();
CompletionServiceImpl.setCompletionPhase(CompletionPhase.NoCompletion);
return;
}
if (myEditor instanceof EditorWindow) {
LOG.assertTrue(((EditorWindow)myEditor).getInjectedFile().isValid(), "injected file !valid");
LOG.assertTrue(((DocumentWindow)myEditor.getDocument()).isValid(), "docWindow !valid");
}
PsiFile file = myLookup.getPsiFile();
LOG.assertTrue(file == null || file.isValid(), "file !valid");
myLookup.setCalculating(false);
if (myCount == 0) {
LookupManager.getInstance(getProject()).hideActiveLookup();
if (!isAutopopupCompletion()) {
final CompletionProgressIndicator current = CompletionServiceImpl.getCompletionService().getCurrentCompletion();
LOG.assertTrue(current == null, current + "!=" + CompletionProgressIndicator.this);
handleEmptyLookup(!((CompletionPhase.BgCalculation)phase).modifiersChanged);
}
}
else {
CompletionServiceImpl.setCompletionPhase(new CompletionPhase.ItemsCalculated(CompletionProgressIndicator.this));
updateLookup();
}
}
}, myQueue.getModalityState());
}
private boolean hideAutopopupIfMeaningless() {
if (!myLookup.isLookupDisposed() && isAutopopupCompletion() && !myLookup.isSelectionTouched() && !myLookup.isCalculating()) {
myLookup.refreshUi(true, false);
final List<LookupElement> items = myLookup.getItems();
for (LookupElement item : items) {
if (!myLookup.itemPattern(item).equals(item.getLookupString())) {
return false;
}
if (item.isValid() && item.isWorthShowingInAutoPopup()) {
return false;
}
}
myLookup.hideLookup(false);
LOG.assertTrue(CompletionServiceImpl.getCompletionService().getCurrentCompletion() == null);
CompletionServiceImpl.setCompletionPhase(new CompletionPhase.EmptyAutoPopup(this));
return true;
}
return false;
}
public boolean fillInCommonPrefix(final boolean explicit) {
if (isInsideIdentifier()) {
return false;
}
final Boolean aBoolean = new WriteCommandAction<Boolean>(getProject()) {
@Override
protected void run(Result<Boolean> result) throws Throwable {
if (!explicit) {
setMergeCommand();
}
try {
result.setResult(myLookup.fillInCommonPrefix(explicit));
}
catch (Exception e) {
LOG.error(e);
}
}
}.execute().getResultObject();
return aBoolean.booleanValue();
}
public void restorePrefix(@NotNull final Runnable customRestore) {
new WriteCommandAction(getProject()) {
@Override
protected void run(Result result) throws Throwable {
setMergeCommand();
customRestore.run();
}
}.execute();
}
public int nextInvocationCount(int invocation, boolean reused) {
return reused ? Math.max(getParameters().getInvocationCount() + 1, 2) : invocation;
}
public Editor getEditor() {
return myEditor;
}
public boolean isRepeatedInvocation(CompletionType completionType, Editor editor) {
if (completionType != myParameters.getCompletionType() || editor != myEditor) {
return false;
}
if (isAutopopupCompletion() && !myLookup.mayBeNoticed()) {
return false;
}
return true;
}
@Override
public boolean isAutopopupCompletion() {
return myHandler.autopopup;
}
@NotNull
public Project getProject() {
return ObjectUtils.assertNotNull(myEditor.getProject());
}
public void addWatchedPrefix(int startOffset, ElementPattern<String> restartCondition) {
myRestartingPrefixConditions.add(Pair.create(startOffset, restartCondition));
}
public void prefixUpdated() {
final int caretOffset = myEditor.getCaretModel().getOffset();
if (caretOffset < myStartCaret) {
scheduleRestart();
myRestartingPrefixConditions.clear();
return;
}
final CharSequence text = myEditor.getDocument().getCharsSequence();
for (Pair<Integer, ElementPattern<String>> pair : myRestartingPrefixConditions) {
int start = pair.first;
if (caretOffset >= start) {
final String newPrefix = text.subSequence(start, caretOffset).toString();
if (pair.second.accepts(newPrefix)) {
scheduleRestart();
myRestartingPrefixConditions.clear();
return;
}
}
}
hideAutopopupIfMeaningless();
}
public void scheduleRestart() {
ApplicationManager.getApplication().assertIsDispatchThread();
cancel();
final CompletionProgressIndicator current = CompletionServiceImpl.getCompletionService().getCurrentCompletion();
if (this != current) {
LOG.error(current + "!=" + this);
}
hideAutopopupIfMeaningless();
CompletionPhase oldPhase = CompletionServiceImpl.getCompletionPhase();
if (oldPhase instanceof CompletionPhase.CommittingDocuments) {
((CompletionPhase.CommittingDocuments)oldPhase).replaced = true;
}
final CompletionPhase.CommittingDocuments phase = new CompletionPhase.CommittingDocuments(this, myEditor);
CompletionServiceImpl.setCompletionPhase(phase);
phase.ignoreCurrentDocumentChange();
final Project project = getProject();
ApplicationManager.getApplication().invokeLater(new Runnable() {
@Override
public void run() {
CompletionAutoPopupHandler.runLaterWithCommitted(project, myEditor.getDocument(), new Runnable() {
@Override
public void run() {
if (phase.checkExpired()) return;
CompletionAutoPopupHandler.invokeCompletion(myParameters.getCompletionType(),
isAutopopupCompletion(), project, myEditor, myParameters.getInvocationCount(),
true);
}
});
}
}, project.getDisposed());
}
@Override
public String toString() {
return "CompletionProgressIndicator[count=" +
myCount +
",phase=" +
CompletionServiceImpl.getCompletionPhase() +
"]@" +
System.identityHashCode(this);
}
protected void handleEmptyLookup(final boolean awaitSecondInvocation) {
LOG.assertTrue(!isAutopopupCompletion());
if (ApplicationManager.getApplication().isUnitTestMode() || !myHandler.invokedExplicitly) {
CompletionServiceImpl.setCompletionPhase(CompletionPhase.NoCompletion);
return;
}
for (final CompletionContributor contributor : CompletionContributor.forParameters(getParameters())) {
final String text = contributor.handleEmptyLookup(getParameters(), getEditor());
if (StringUtil.isNotEmpty(text)) {
LightweightHint hint = showErrorHint(getProject(), getEditor(), text);
CompletionServiceImpl.setCompletionPhase(
awaitSecondInvocation ? new CompletionPhase.NoSuggestionsHint(hint, this) : CompletionPhase.NoCompletion);
return;
}
}
CompletionServiceImpl.setCompletionPhase(CompletionPhase.NoCompletion);
}
private static LightweightHint showErrorHint(Project project, Editor editor, String text) {
final LightweightHint[] result = {null};
final EditorHintListener listener = new EditorHintListener() {
@Override
public void hintShown(final Project project, final LightweightHint hint, final int flags) {
result[0] = hint;
}
};
final MessageBusConnection connection = project.getMessageBus().connect();
connection.subscribe(EditorHintListener.TOPIC, listener);
assert text != null;
HintManager.getInstance().showErrorHint(editor, text, HintManager.UNDER);
connection.disconnect();
return result[0];
}
private static boolean shouldPreselectFirstSuggestion(CompletionParameters parameters) {
if (!ApplicationManager.getApplication().isUnitTestMode()) {
return true;
}
switch (CodeInsightSettings.getInstance().AUTOPOPUP_FOCUS_POLICY) {
case CodeInsightSettings.ALWAYS:
return true;
case CodeInsightSettings.NEVER:
return false;
}
final Language language = PsiUtilCore.getLanguageAtOffset(parameters.getPosition().getContainingFile(), parameters.getOffset());
for (CompletionConfidence confidence : CompletionConfidenceEP.forLanguage(language)) {
final ThreeState result = confidence.shouldFocusLookup(parameters);
if (result != ThreeState.UNSURE) {
LOG.debug(confidence + " has returned shouldFocusLookup=" + result);
return result == ThreeState.YES;
}
}
return false;
}
AtomicReference<LookupElement[]> startCompletion(final CompletionInitializationContext initContext) {
boolean sync = ApplicationManager.getApplication().isUnitTestMode() && !CompletionAutoPopupHandler.ourTestingAutopopup;
final CompletionThreading strategy = sync ? new SyncCompletion() : new AsyncCompletion();
strategy.startThread(ProgressWrapper.wrap(this), new Runnable() {
@Override
public void run() {
scheduleAdvertising();
}
});
final WeighingDelegate weigher = strategy.delegateWeighing(this);
final AtomicReference<LookupElement[]> data = new AtomicReference<LookupElement[]>(null);
class CalculateItems implements Runnable {
@Override
public void run() {
try {
data.set(calculateItems(initContext, weigher));
}
catch (ProcessCanceledException ignore) {
}
catch (Throwable t) {
LOG.error(t);
cancel();
}
}
}
strategy.startThread(this, new CalculateItems());
return data;
}
private LookupElement[] calculateItems(CompletionInitializationContext initContext, WeighingDelegate weigher) {
duringCompletion(initContext);
ProgressManager.checkCanceled();
LookupElement[] result = CompletionService.getCompletionService().performCompletion(myParameters, weigher);
ProgressManager.checkCanceled();
weigher.waitFor();
ProgressManager.checkCanceled();
return result;
}
private static class ModifierTracker extends KeyAdapter {
private final JComponent myContentComponent;
public ModifierTracker(JComponent contentComponent) {
myContentComponent = contentComponent;
}
@Override
public void keyPressed(KeyEvent e) {
processModifier(e);
}
@Override
public void keyReleased(KeyEvent e) {
processModifier(e);
}
private void processModifier(KeyEvent e) {
final int code = e.getKeyCode();
if (code == KeyEvent.VK_CONTROL || code == KeyEvent.VK_META || code == KeyEvent.VK_ALT || code == KeyEvent.VK_SHIFT) {
myContentComponent.removeKeyListener(this);
final CompletionPhase phase = CompletionServiceImpl.getCompletionPhase();
if (phase instanceof CompletionPhase.BgCalculation) {
((CompletionPhase.BgCalculation)phase).modifiersChanged = true;
}
else if (phase instanceof CompletionPhase.InsertedSingleItem) {
CompletionServiceImpl.setCompletionPhase(CompletionPhase.NoCompletion);
}
}
}
}
}
| |
/*******************************************************************************
* Copyright (c) 2015-2018 Skymind, Inc.
*
* This program and the accompanying materials are made available under the
* terms of the Apache License, Version 2.0 which is available at
* https://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* SPDX-License-Identifier: Apache-2.0
******************************************************************************/
package org.datavec.api.transform.sequence.window;
import lombok.Data;
import org.datavec.api.transform.ColumnType;
import org.datavec.api.transform.metadata.ColumnMetaData;
import org.datavec.api.transform.metadata.TimeMetaData;
import org.datavec.api.transform.schema.Schema;
import org.datavec.api.transform.schema.SequenceSchema;
import org.datavec.api.writable.LongWritable;
import org.datavec.api.writable.Writable;
import org.joda.time.DateTimeZone;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.TimeUnit;
/**
* A windowing function based on time, with non-overlapping windows. Time for each entry in the sequence is provided by a Time column<br>
* Functionality here: Calculate windows of data based on a fixed window size (1 minute, 1 hour, etc), with an optional offset.<br>
* Specifically, window start times T are calculated such that (T + timeZoneOffset + offset) % windowSize == 0
* timeZoneOffset comes from the Time column metadata; offset allows for the window to be shifted one way or another,
* for example to allow for windowing like (10:15 to 11:15) instead of (10:00 to 11:00), using an offset of 15 minutes<br>
* <p/>
* Note that the windows generated by this window function need not contain any data - i.e., it can generate empty an empty
* window if no data occurs in the specified time period.
*
* @author Alex Black
*/
@Data
public class TimeWindowFunction implements WindowFunction {
private final String timeColumn;
private final long windowSize;
private final TimeUnit windowSizeUnit;
private final long offsetAmount;
private final TimeUnit offsetUnit;
private final boolean addWindowStartTimeColumn;
private final boolean addWindowEndTimeColumn;
private final boolean excludeEmptyWindows;
private Schema inputSchema;
private final long offsetAmountMilliseconds;
private final long windowSizeMilliseconds;
private DateTimeZone timeZone;
/**
* Constructor with zero offset
*
* @param timeColumn Name of the column that contains the time values (must be a time column)
* @param windowSize Numerical quantity for the size of the time window (used in conjunction with windowSizeUnit)
* @param windowSizeUnit Unit of the time window
*/
public TimeWindowFunction(String timeColumn, long windowSize, TimeUnit windowSizeUnit) {
this(timeColumn, windowSize, windowSizeUnit, 0, null);
}
/**
* Constructor with zero offset, and supports adding columns containing the start and/or end times of the window
*
* @param timeColumn Name of the column that contains the time values (must be a time column)
* @param windowSize Numerical quantity for the size of the time window (used in conjunction with windowSizeUnit)
* @param windowSizeUnit Unit of the time window
* @param addWindowStartTimeColumn If true: add a time column (name: "windowStartTime") that contains the start time
* of the window
* @param addWindowStartTimeColumn If true: add a time column (name: "windowEndTime") that contains the end time
* of the window
*/
public TimeWindowFunction(String timeColumn, long windowSize, TimeUnit windowSizeUnit,
boolean addWindowStartTimeColumn, boolean addWindowEndTimeColumn) {
this(timeColumn, windowSize, windowSizeUnit, 0, null, addWindowStartTimeColumn, addWindowEndTimeColumn, false);
}
/**
* Constructor with optional offset
*
* @param timeColumn Name of the column that contains the time values (must be a time column)
* @param windowSize Numerical quantity for the size of the time window (used in conjunction with windowSizeUnit)
* @param windowSizeUnit Unit of the time window
* @param offset Optional offset amount, to shift start/end of the time window forward or back
* @param offsetUnit Optional offset unit for the offset amount.
*/
public TimeWindowFunction(String timeColumn, long windowSize, TimeUnit windowSizeUnit, long offset,
TimeUnit offsetUnit) {
this(timeColumn, windowSize, windowSizeUnit, offset, offsetUnit, false, false, false);
}
/**
* Constructor with optional offset
*
* @param timeColumn Name of the column that contains the time values (must be a time column)
* @param windowSize Numerical quantity for the size of the time window (used in conjunction with windowSizeUnit)
* @param windowSizeUnit Unit of the time window
* @param offset Optional offset amount, to shift start/end of the time window forward or back
* @param offsetUnit Optional offset unit for the offset amount.
* @param addWindowStartTimeColumn If true: add a column (at the end) with the window start time
* @param addWindowEndTimeColumn If true: add a column (at the end) with the window end time
* @param excludeEmptyWindows If true: exclude any windows that don't have any values in them
*/
public TimeWindowFunction(String timeColumn, long windowSize, TimeUnit windowSizeUnit, long offset,
TimeUnit offsetUnit, boolean addWindowStartTimeColumn, boolean addWindowEndTimeColumn,
boolean excludeEmptyWindows) {
this.timeColumn = timeColumn;
this.windowSize = windowSize;
this.windowSizeUnit = windowSizeUnit;
this.offsetAmount = offset;
this.offsetUnit = offsetUnit;
this.addWindowStartTimeColumn = addWindowStartTimeColumn;
this.addWindowEndTimeColumn = addWindowEndTimeColumn;
this.excludeEmptyWindows = excludeEmptyWindows;
if (offsetAmount == 0 || offsetUnit == null)
this.offsetAmountMilliseconds = 0;
else {
this.offsetAmountMilliseconds = TimeUnit.MILLISECONDS.convert(offset, offsetUnit);
}
this.windowSizeMilliseconds = TimeUnit.MILLISECONDS.convert(windowSize, windowSizeUnit);
}
private TimeWindowFunction(Builder builder) {
this(builder.timeColumn, builder.windowSize, builder.windowSizeUnit, builder.offsetAmount, builder.offsetUnit,
builder.addWindowStartTimeColumn, builder.addWindowEndTimeColumn, builder.excludeEmptyWindows);
}
@Override
public void setInputSchema(Schema schema) {
if (!(schema instanceof SequenceSchema))
throw new IllegalArgumentException(
"Invalid schema: TimeWindowFunction can " + "only operate on SequenceSchema");
if (!schema.hasColumn(timeColumn))
throw new IllegalStateException("Input schema does not have a column with name \"" + timeColumn + "\"");
if (schema.getMetaData(timeColumn).getColumnType() != ColumnType.Time)
throw new IllegalStateException("Invalid column: column \"" + timeColumn + "\" is not of type "
+ ColumnType.Time + "; is " + schema.getMetaData(timeColumn).getColumnType());
this.inputSchema = schema;
timeZone = ((TimeMetaData) schema.getMetaData(timeColumn)).getTimeZone();
}
@Override
public Schema getInputSchema() {
return inputSchema;
}
@Override
public Schema transform(Schema inputSchema) {
if (!addWindowStartTimeColumn && !addWindowEndTimeColumn)
return inputSchema;
List<ColumnMetaData> newMeta = new ArrayList<>();
newMeta.addAll(inputSchema.getColumnMetaData());
if (addWindowStartTimeColumn) {
newMeta.add(new TimeMetaData("windowStartTime"));
}
if (addWindowEndTimeColumn) {
newMeta.add(new TimeMetaData("windowEndTime"));
}
return inputSchema.newSchema(newMeta);
}
@Override
public String toString() {
return "TimeWindowFunction(column=\"" + timeColumn + "\",windowSize=" + windowSize + windowSizeUnit + ",offset="
+ offsetAmount + (offsetAmount != 0 && offsetUnit != null ? offsetUnit : "")
+ (addWindowStartTimeColumn ? ",addWindowStartTimeColumn=true" : "")
+ (addWindowEndTimeColumn ? ",addWindowEndTimeColumn=true" : "")
+ (excludeEmptyWindows ? ",excludeEmptyWindows=true" : "") + ")";
}
@Override
public List<List<List<Writable>>> applyToSequence(List<List<Writable>> sequence) {
int timeColumnIdx = inputSchema.getIndexOfColumn(this.timeColumn);
List<List<List<Writable>>> out = new ArrayList<>();
//We are assuming here that the sequence is already ordered (as is usually the case)
long currentWindowStartTime = Long.MIN_VALUE;
List<List<Writable>> currentWindow = null;
for (List<Writable> timeStep : sequence) {
long currentTime = timeStep.get(timeColumnIdx).toLong();
long windowStartTimeOfThisTimeStep = getWindowStartTimeForTime(currentTime);
//First time step...
if (currentWindowStartTime == Long.MIN_VALUE) {
currentWindowStartTime = windowStartTimeOfThisTimeStep;
currentWindow = new ArrayList<>();
}
//Two possibilities here: (a) we add it to the last time step, or (b) we need to make a new window...
if (currentWindowStartTime < windowStartTimeOfThisTimeStep) {
//New window. But: complication. We might have a bunch of empty windows...
while (currentWindowStartTime < windowStartTimeOfThisTimeStep) {
if (currentWindow != null) {
if (!(excludeEmptyWindows && currentWindow.size() == 0))
out.add(currentWindow);
}
currentWindow = new ArrayList<>();
currentWindowStartTime += windowSizeMilliseconds;
}
}
if (addWindowStartTimeColumn || addWindowEndTimeColumn) {
List<Writable> timeStep2 = new ArrayList<>(timeStep);
if (addWindowStartTimeColumn)
timeStep2.add(new LongWritable(currentWindowStartTime));
if (addWindowEndTimeColumn)
timeStep2.add(new LongWritable(currentWindowStartTime + windowSizeMilliseconds));
currentWindow.add(timeStep2);
} else {
currentWindow.add(timeStep);
}
}
//Add the final window to the output data...
if (!(excludeEmptyWindows && currentWindow.size() == 0) && currentWindow != null)
out.add(currentWindow);
return out;
}
/**
* Calculates the start time of the window for which the specified time belongs, in unix epoch (millisecond) format<br>
* For example, if the window size is 1 hour with offset 0, then a time 10:17 would return 10:00, as the 1 hour window
* is for 10:00:00.000 to 10:59:59.999 inclusive, or 10:00:00.000 (inclusive) to 11:00:00.000 (exclusive)
*
* @param time Time at which to determine the window start time (milliseconds epoch format)
*/
public long getWindowStartTimeForTime(long time) {
//Calculate aggregate offset: aggregate offset is due to both timezone and manual offset
long aggregateOffset = (timeZone.getOffset(time) + this.offsetAmountMilliseconds) % this.windowSizeMilliseconds;
return (time + aggregateOffset) - (time + aggregateOffset) % this.windowSizeMilliseconds;
}
/**
* Calculates the end time of the window for which the specified time belongs, in unix epoch (millisecond) format.
* <b>Note</b>: this value is not included in the interval. Put another way, it is the start time of the <i>next</i>
* interval: i.e., is equivalent to {@link #getWindowStartTimeForTime(long)} + interval (in milliseconds).<br>
* To get the last <i>inclusive</i> time for the interval, subtract 1L (1 millisecond) from the value returned by
* this method.<br>
* For example, if the window size is 1 hour with offset 0, then a time 10:17 would return 11:00, as the 1 hour window
* is for 10:00:00.000 (inclusive) to 11:00:00.000 (exclusive)
*
* @param time Time at which to determine the window start time
* @return
*/
public long getWindowEndTimeForTime(long time) {
return getWindowStartTimeForTime(time) + this.windowSizeMilliseconds;
}
public static class Builder {
private String timeColumn;
private long windowSize = -1;
private TimeUnit windowSizeUnit;
private long offsetAmount;
private TimeUnit offsetUnit;
private boolean addWindowStartTimeColumn = false;
private boolean addWindowEndTimeColumn = false;
private boolean excludeEmptyWindows = false;
public Builder timeColumn(String timeColumn) {
this.timeColumn = timeColumn;
return this;
}
public Builder windowSize(long windowSize, TimeUnit windowSizeUnit) {
this.windowSize = windowSize;
this.windowSizeUnit = windowSizeUnit;
return this;
}
public Builder offset(long offsetAmount, TimeUnit offsetUnit) {
this.offsetAmount = offsetAmount;
this.offsetUnit = offsetUnit;
return this;
}
public Builder addWindowStartTimeColumn(boolean addWindowStartTimeColumn) {
this.addWindowStartTimeColumn = addWindowStartTimeColumn;
return this;
}
public Builder addWindowEndTimeColumn(boolean addWindowEndTimeColumn) {
this.addWindowEndTimeColumn = addWindowEndTimeColumn;
return this;
}
public Builder excludeEmptyWindows(boolean excludeEmptyWindows) {
this.excludeEmptyWindows = excludeEmptyWindows;
return this;
}
public TimeWindowFunction build() {
if (timeColumn == null)
throw new IllegalStateException("Time column is null (not specified)");
if (windowSize == -1 || windowSizeUnit == null)
throw new IllegalStateException("Window size/unit not set");
return new TimeWindowFunction(this);
}
}
}
| |
package org.apache.jsp.include_005fjsp.myTerrier;
import javax.servlet.*;
import javax.servlet.http.*;
import javax.servlet.jsp.*;
import net.violet.platform.util.StaticTools;
import net.violet.platform.util.SessionTools;
import net.violet.platform.datamodel.User;
import net.violet.platform.datamodel.Lang;
import net.violet.platform.util.DicoTools;
import net.violet.platform.util.MyConstantes;
public final class inc_005fmyTerrierMyself_jsp extends org.apache.jasper.runtime.HttpJspBase
implements org.apache.jasper.runtime.JspSourceDependent {
private static java.util.List _jspx_dependants;
static {
_jspx_dependants = new java.util.ArrayList(1);
_jspx_dependants.add("/include_jsp/utils/inc_dico.jsp");
}
private org.apache.jasper.runtime.TagHandlerPool _jspx_tagPool_bean_define_value_id_nobody;
private org.apache.jasper.runtime.TagHandlerPool _jspx_tagPool_html_form_styleId_onsubmit_action_acceptCharset;
private org.apache.jasper.runtime.TagHandlerPool _jspx_tagPool_html_hidden_value_property_nobody;
private org.apache.jasper.runtime.TagHandlerPool _jspx_tagPool_bean_write_property_name_nobody;
private org.apache.jasper.runtime.TagHandlerPool _jspx_tagPool_html_textarea_rows_property;
private org.apache.jasper.runtime.TagHandlerPool _jspx_tagPool_bean_define_type_property_name_id_nobody;
private org.apache.jasper.runtime.TagHandlerPool _jspx_tagPool_logic_equal_value_property_name;
private org.apache.jasper.runtime.TagHandlerPool _jspx_tagPool_logic_notEqual_value_property_name;
private org.apache.jasper.runtime.TagHandlerPool _jspx_tagPool_html_text_styleClass_property_nobody;
private org.apache.jasper.runtime.TagHandlerPool _jspx_tagPool_html_select_property;
private org.apache.jasper.runtime.TagHandlerPool _jspx_tagPool_html_optionsCollection_value_styleClass_property_name_label_nobody;
private org.apache.jasper.runtime.TagHandlerPool _jspx_tagPool_html_radio_value_property;
private org.apache.jasper.runtime.TagHandlerPool _jspx_tagPool_html_text_size_property_nobody;
private org.apache.jasper.runtime.TagHandlerPool _jspx_tagPool_html_text_styleClass_size_property_nobody;
private org.apache.jasper.runtime.TagHandlerPool _jspx_tagPool_html_select_styleClass_property;
private org.apache.jasper.runtime.TagHandlerPool _jspx_tagPool_html_optionsCollection_value_property_label_nobody;
private org.apache.jasper.runtime.TagHandlerPool _jspx_tagPool_logic_greaterThan_value_name;
private org.apache.jasper.runtime.TagHandlerPool _jspx_tagPool_html_checkbox_value_property_nobody;
public Object getDependants() {
return _jspx_dependants;
}
public void _jspInit() {
_jspx_tagPool_bean_define_value_id_nobody = org.apache.jasper.runtime.TagHandlerPool.getTagHandlerPool(getServletConfig());
_jspx_tagPool_html_form_styleId_onsubmit_action_acceptCharset = org.apache.jasper.runtime.TagHandlerPool.getTagHandlerPool(getServletConfig());
_jspx_tagPool_html_hidden_value_property_nobody = org.apache.jasper.runtime.TagHandlerPool.getTagHandlerPool(getServletConfig());
_jspx_tagPool_bean_write_property_name_nobody = org.apache.jasper.runtime.TagHandlerPool.getTagHandlerPool(getServletConfig());
_jspx_tagPool_html_textarea_rows_property = org.apache.jasper.runtime.TagHandlerPool.getTagHandlerPool(getServletConfig());
_jspx_tagPool_bean_define_type_property_name_id_nobody = org.apache.jasper.runtime.TagHandlerPool.getTagHandlerPool(getServletConfig());
_jspx_tagPool_logic_equal_value_property_name = org.apache.jasper.runtime.TagHandlerPool.getTagHandlerPool(getServletConfig());
_jspx_tagPool_logic_notEqual_value_property_name = org.apache.jasper.runtime.TagHandlerPool.getTagHandlerPool(getServletConfig());
_jspx_tagPool_html_text_styleClass_property_nobody = org.apache.jasper.runtime.TagHandlerPool.getTagHandlerPool(getServletConfig());
_jspx_tagPool_html_select_property = org.apache.jasper.runtime.TagHandlerPool.getTagHandlerPool(getServletConfig());
_jspx_tagPool_html_optionsCollection_value_styleClass_property_name_label_nobody = org.apache.jasper.runtime.TagHandlerPool.getTagHandlerPool(getServletConfig());
_jspx_tagPool_html_radio_value_property = org.apache.jasper.runtime.TagHandlerPool.getTagHandlerPool(getServletConfig());
_jspx_tagPool_html_text_size_property_nobody = org.apache.jasper.runtime.TagHandlerPool.getTagHandlerPool(getServletConfig());
_jspx_tagPool_html_text_styleClass_size_property_nobody = org.apache.jasper.runtime.TagHandlerPool.getTagHandlerPool(getServletConfig());
_jspx_tagPool_html_select_styleClass_property = org.apache.jasper.runtime.TagHandlerPool.getTagHandlerPool(getServletConfig());
_jspx_tagPool_html_optionsCollection_value_property_label_nobody = org.apache.jasper.runtime.TagHandlerPool.getTagHandlerPool(getServletConfig());
_jspx_tagPool_logic_greaterThan_value_name = org.apache.jasper.runtime.TagHandlerPool.getTagHandlerPool(getServletConfig());
_jspx_tagPool_html_checkbox_value_property_nobody = org.apache.jasper.runtime.TagHandlerPool.getTagHandlerPool(getServletConfig());
}
public void _jspDestroy() {
_jspx_tagPool_bean_define_value_id_nobody.release();
_jspx_tagPool_html_form_styleId_onsubmit_action_acceptCharset.release();
_jspx_tagPool_html_hidden_value_property_nobody.release();
_jspx_tagPool_bean_write_property_name_nobody.release();
_jspx_tagPool_html_textarea_rows_property.release();
_jspx_tagPool_bean_define_type_property_name_id_nobody.release();
_jspx_tagPool_logic_equal_value_property_name.release();
_jspx_tagPool_logic_notEqual_value_property_name.release();
_jspx_tagPool_html_text_styleClass_property_nobody.release();
_jspx_tagPool_html_select_property.release();
_jspx_tagPool_html_optionsCollection_value_styleClass_property_name_label_nobody.release();
_jspx_tagPool_html_radio_value_property.release();
_jspx_tagPool_html_text_size_property_nobody.release();
_jspx_tagPool_html_text_styleClass_size_property_nobody.release();
_jspx_tagPool_html_select_styleClass_property.release();
_jspx_tagPool_html_optionsCollection_value_property_label_nobody.release();
_jspx_tagPool_logic_greaterThan_value_name.release();
_jspx_tagPool_html_checkbox_value_property_nobody.release();
}
public void _jspService(HttpServletRequest request, HttpServletResponse response)
throws java.io.IOException, ServletException {
JspFactory _jspxFactory = null;
PageContext pageContext = null;
HttpSession session = null;
ServletContext application = null;
ServletConfig config = null;
JspWriter out = null;
Object page = this;
JspWriter _jspx_out = null;
PageContext _jspx_page_context = null;
try {
_jspxFactory = JspFactory.getDefaultFactory();
response.setContentType("text/html;charset=UTF-8");
pageContext = _jspxFactory.getPageContext(this, request, response,
null, true, 8192, true);
_jspx_page_context = pageContext;
application = pageContext.getServletContext();
config = pageContext.getServletConfig();
session = pageContext.getSession();
out = pageContext.getOut();
_jspx_out = out;
out.write("\r\n\r\n\r\n\r\n\r\n\r\n");
response.setContentType("text/html;charset=UTF-8");
out.write("\r\n\r\n\r\n\r\n\r\n");
out.write('\n');
out.write('\r');
out.write('\n');
out.write("\r\n\r\n\r\n");
Lang dico_lang = SessionTools.getLangFromSession(session, request);
out.write('\r');
out.write('\n');
final User theUser = SessionTools.getUserFromSession(request);
final String userId;
if (theUser == null) {
userId = "0";
} else {
userId = theUser.getId().toString();
}
out.write('\r');
out.write('\n');
String user_main = Long.toString(SessionTools.getRabbitIdFromSession(session));
out.write("\r\n\r\n");
// bean:define
org.apache.struts.taglib.bean.DefineTag _jspx_th_bean_define_0 = (org.apache.struts.taglib.bean.DefineTag) _jspx_tagPool_bean_define_value_id_nobody.get(org.apache.struts.taglib.bean.DefineTag.class);
_jspx_th_bean_define_0.setPageContext(_jspx_page_context);
_jspx_th_bean_define_0.setParent(null);
_jspx_th_bean_define_0.setId("userMain");
_jspx_th_bean_define_0.setValue(user_main );
int _jspx_eval_bean_define_0 = _jspx_th_bean_define_0.doStartTag();
if (_jspx_th_bean_define_0.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_bean_define_value_id_nobody.reuse(_jspx_th_bean_define_0);
return;
}
_jspx_tagPool_bean_define_value_id_nobody.reuse(_jspx_th_bean_define_0);
java.lang.String userMain = null;
userMain = (java.lang.String) _jspx_page_context.findAttribute("userMain");
out.write("\r\n\r\n\r\n");
// html:form
org.apache.struts.taglib.html.FormTag _jspx_th_html_form_0 = (org.apache.struts.taglib.html.FormTag) _jspx_tagPool_html_form_styleId_onsubmit_action_acceptCharset.get(org.apache.struts.taglib.html.FormTag.class);
_jspx_th_html_form_0.setPageContext(_jspx_page_context);
_jspx_th_html_form_0.setParent(null);
_jspx_th_html_form_0.setAction("/action/myTerrierMyself");
_jspx_th_html_form_0.setStyleId("idForm");
_jspx_th_html_form_0.setAcceptCharset("UTF-8");
_jspx_th_html_form_0.setOnsubmit("return validateMyProfil();");
int _jspx_eval_html_form_0 = _jspx_th_html_form_0.doStartTag();
if (_jspx_eval_html_form_0 != javax.servlet.jsp.tagext.Tag.SKIP_BODY) {
do {
out.write('\r');
out.write('\n');
if (_jspx_meth_html_hidden_0(_jspx_th_html_form_0, _jspx_page_context))
return;
out.write('\r');
out.write('\n');
out.write('\r');
out.write('\n');
out.write("\r\n\r\n<div class=\"flat-block\"> \r\n\t<div class=\"flat-block-top\">\r\n\t\t<h3 class=\"no-icone\">\r\n\t\t\r\n\t\t\t");
out.write("\r\n\t\t\t");
out.print(DicoTools.dico(dico_lang, "myTerrier/myrabbit_title"));
out.write(" :\r\n\t\t\t");
if (_jspx_meth_bean_write_0(_jspx_th_html_form_0, _jspx_page_context))
return;
out.write("\r\n\t\t\t\t\t\r\n\t\t</h3>\r\n\t</div>\r\n\r\n\t<div class=\"flat-block-content\">\r\n\t\t<div class=\"flat-block-content-inner\">\r\n\t\t");
out.write("\r\n\r\n\t\t");
out.write("\r\n\t\t\r\n\t\t<div class=\"twoCol-left\" >\r\n\t\t\t<label>");
out.print(DicoTools.dico(dico_lang, "myTerrier/myrabbit_annonce"));
out.write("</label>\t<br />\r\n\t\t\t");
// html:textarea
org.apache.struts.taglib.html.TextareaTag _jspx_th_html_textarea_0 = (org.apache.struts.taglib.html.TextareaTag) _jspx_tagPool_html_textarea_rows_property.get(org.apache.struts.taglib.html.TextareaTag.class);
_jspx_th_html_textarea_0.setPageContext(_jspx_page_context);
_jspx_th_html_textarea_0.setParent((javax.servlet.jsp.tagext.Tag) _jspx_th_html_form_0);
_jspx_th_html_textarea_0.setRows("5");
_jspx_th_html_textarea_0.setProperty("rabbitAnnounce");
int _jspx_eval_html_textarea_0 = _jspx_th_html_textarea_0.doStartTag();
if (_jspx_eval_html_textarea_0 != javax.servlet.jsp.tagext.Tag.SKIP_BODY) {
if (_jspx_eval_html_textarea_0 != javax.servlet.jsp.tagext.Tag.EVAL_BODY_INCLUDE) {
out = _jspx_page_context.pushBody();
_jspx_th_html_textarea_0.setBodyContent((javax.servlet.jsp.tagext.BodyContent) out);
_jspx_th_html_textarea_0.doInitBody();
}
do {
out.write("\r\n\t\t\t\t");
// bean:define
org.apache.struts.taglib.bean.DefineTag _jspx_th_bean_define_1 = (org.apache.struts.taglib.bean.DefineTag) _jspx_tagPool_bean_define_type_property_name_id_nobody.get(org.apache.struts.taglib.bean.DefineTag.class);
_jspx_th_bean_define_1.setPageContext(_jspx_page_context);
_jspx_th_bean_define_1.setParent((javax.servlet.jsp.tagext.Tag) _jspx_th_html_textarea_0);
_jspx_th_bean_define_1.setName("myTerrierMyselfForm");
_jspx_th_bean_define_1.setProperty("rabbitAnnounce");
_jspx_th_bean_define_1.setId("rabbitAnnounce");
_jspx_th_bean_define_1.setType("String");
int _jspx_eval_bean_define_1 = _jspx_th_bean_define_1.doStartTag();
if (_jspx_th_bean_define_1.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_bean_define_type_property_name_id_nobody.reuse(_jspx_th_bean_define_1);
return;
}
_jspx_tagPool_bean_define_type_property_name_id_nobody.reuse(_jspx_th_bean_define_1);
String rabbitAnnounce = null;
rabbitAnnounce = (String) _jspx_page_context.findAttribute("rabbitAnnounce");
out.write("\r\n\t\t\t\t");
out.print(rabbitAnnounce);
out.write("\r\n\t\t\t");
int evalDoAfterBody = _jspx_th_html_textarea_0.doAfterBody();
if (evalDoAfterBody != javax.servlet.jsp.tagext.BodyTag.EVAL_BODY_AGAIN)
break;
} while (true);
if (_jspx_eval_html_textarea_0 != javax.servlet.jsp.tagext.Tag.EVAL_BODY_INCLUDE) {
out = _jspx_page_context.popBody();
}
}
if (_jspx_th_html_textarea_0.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_html_textarea_rows_property.reuse(_jspx_th_html_textarea_0);
return;
}
_jspx_tagPool_html_textarea_rows_property.reuse(_jspx_th_html_textarea_0);
out.write("\t\t\r\n\t\t</div>\r\n\r\n\t\t<div class=\"twoCol-right\" >\r\n\t\t\t\t\r\n\t\t\t\t<div align=\"center\">\r\n\t\t\t\t\t");
out.write("\r\n\t\t\t\t\t<table border=\"0\" cellpadding=\"0\" cellspacing=\"0\" ><tr><td align=\"center\" valign=\"middle\" id=\"imageHolder\" >");
// logic:equal
org.apache.struts.taglib.logic.EqualTag _jspx_th_logic_equal_0 = (org.apache.struts.taglib.logic.EqualTag) _jspx_tagPool_logic_equal_value_property_name.get(org.apache.struts.taglib.logic.EqualTag.class);
_jspx_th_logic_equal_0.setPageContext(_jspx_page_context);
_jspx_th_logic_equal_0.setParent((javax.servlet.jsp.tagext.Tag) _jspx_th_html_form_0);
_jspx_th_logic_equal_0.setName("myTerrierMyselfForm");
_jspx_th_logic_equal_0.setProperty("rabbitPicture");
_jspx_th_logic_equal_0.setValue("1");
int _jspx_eval_logic_equal_0 = _jspx_th_logic_equal_0.doStartTag();
if (_jspx_eval_logic_equal_0 != javax.servlet.jsp.tagext.Tag.SKIP_BODY) {
do {
out.write("<img class=\"user_picture\" src=\"../photo/");
out.print(userId);
out.write("_S.jpg\" />");
int evalDoAfterBody = _jspx_th_logic_equal_0.doAfterBody();
if (evalDoAfterBody != javax.servlet.jsp.tagext.BodyTag.EVAL_BODY_AGAIN)
break;
} while (true);
}
if (_jspx_th_logic_equal_0.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_logic_equal_value_property_name.reuse(_jspx_th_logic_equal_0);
return;
}
_jspx_tagPool_logic_equal_value_property_name.reuse(_jspx_th_logic_equal_0);
if (_jspx_meth_logic_notEqual_0(_jspx_th_html_form_0, _jspx_page_context))
return;
out.write("</td></tr></table>\t\r\n\t\t\t\t</div>\r\n\r\n\r\n\t\t\t\t");
out.write("\t\t\t\t\t\r\n\t\t\t\t <div align=\"center\" class=\"uploader-link\">\r\n\t\t\t\t \t\t<div id=\"pictureLoader\"></div>\r\n\t\t\t\t\t <a href=\"javascript:;\" onclick=\"$('#pictureLoader').toggle();\" >");
out.print(DicoTools.dico(dico_lang, "myTerrier/myrabbit_picture_modify"));
out.write("</a>\r\n\t\t\t\t</div>\t\r\n\t\t\t\t\t\r\n\t\t</div>\r\n\t\t\r\n\t\t<hr class=\"spacer\" />\r\n\t\t\r\n\t\t");
out.write("\r\n\t\t <div class=\"form-line\">\r\n\t\t\t\t<label class=\"center\">\r\n\t\t\t\t\t");
out.print(DicoTools.dico(dico_lang , "register/infos_user_first_name"));
out.write("\r\n\t\t\t\t</label>\r\n\t\t\t\t<span>\r\n\t\t\t\t\t");
if (_jspx_meth_html_text_0(_jspx_th_html_form_0, _jspx_page_context))
return;
out.write("\t\r\n\t\t\t\t</span>\r\n\t\t\t</div>\r\n\r\n\t\t\t<div class=\"form-line\">\r\n\t\t\t\t<label class=\"center\">\r\n\t\t\t\t\t");
out.print(DicoTools.dico(dico_lang , "register/infos_user_last_name"));
out.write("\r\n\t\t\t\t</label>\r\n\t\t\t\t<span>\r\n\t\t\t\t\t");
if (_jspx_meth_html_text_1(_jspx_th_html_form_0, _jspx_page_context))
return;
out.write("\t\r\n\t\t\t\t</span>\r\n\t\t\t</div>\r\n\t\t\t\r\n\t\t\r\n\t\t");
out.write("\t\t\t\r\n\t\t <div class=\"form-line\">\r\n\t\t\t<label class=\"center\">\r\n\t\t\t\t");
out.print(DicoTools.dico(dico_lang, "myTerrier/myrabbit_birth_date"));
out.write("\r\n\t\t\t</label>\r\n\t\t\t<span>\r\n\t\t\t\t\t");
if (_jspx_meth_html_select_0(_jspx_th_html_form_0, _jspx_page_context))
return;
out.write("\r\n\t\t\t\t\t");
if (_jspx_meth_html_select_1(_jspx_th_html_form_0, _jspx_page_context))
return;
out.write("\r\n\t\t\t\t\t");
if (_jspx_meth_html_select_2(_jspx_th_html_form_0, _jspx_page_context))
return;
out.write("\r\n\t\r\n\t\t\t</span>\r\n\t\t</div>\t\t\t\t\t\r\n\t\t\t\t\t\t\t\r\n\t\t");
out.write("\t\t\t\r\n\t\t <div class=\"form-line\">\r\n\t\t\t<label class=\"center\">\r\n\t\t\t\t");
out.print(DicoTools.dico(dico_lang, "myTerrier/myrabbit_i_am"));
out.write("\r\n\t\t\t</label>\r\n\t\t\t<span>\r\n\t\t\t\t");
// html:radio
org.apache.struts.taglib.html.RadioTag _jspx_th_html_radio_0 = (org.apache.struts.taglib.html.RadioTag) _jspx_tagPool_html_radio_value_property.get(org.apache.struts.taglib.html.RadioTag.class);
_jspx_th_html_radio_0.setPageContext(_jspx_page_context);
_jspx_th_html_radio_0.setParent((javax.servlet.jsp.tagext.Tag) _jspx_th_html_form_0);
_jspx_th_html_radio_0.setProperty("annuSexe");
_jspx_th_html_radio_0.setValue("F");
int _jspx_eval_html_radio_0 = _jspx_th_html_radio_0.doStartTag();
if (_jspx_eval_html_radio_0 != javax.servlet.jsp.tagext.Tag.SKIP_BODY) {
if (_jspx_eval_html_radio_0 != javax.servlet.jsp.tagext.Tag.EVAL_BODY_INCLUDE) {
out = _jspx_page_context.pushBody();
_jspx_th_html_radio_0.setBodyContent((javax.servlet.jsp.tagext.BodyContent) out);
_jspx_th_html_radio_0.doInitBody();
}
do {
out.print(DicoTools.dico(dico_lang, "myTerrier/myrabbit_i_am_girl"));
int evalDoAfterBody = _jspx_th_html_radio_0.doAfterBody();
if (evalDoAfterBody != javax.servlet.jsp.tagext.BodyTag.EVAL_BODY_AGAIN)
break;
} while (true);
if (_jspx_eval_html_radio_0 != javax.servlet.jsp.tagext.Tag.EVAL_BODY_INCLUDE) {
out = _jspx_page_context.popBody();
}
}
if (_jspx_th_html_radio_0.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_html_radio_value_property.reuse(_jspx_th_html_radio_0);
return;
}
_jspx_tagPool_html_radio_value_property.reuse(_jspx_th_html_radio_0);
out.write("\r\n\t\t\t\t\t \r\n\t\t\t\t");
// html:radio
org.apache.struts.taglib.html.RadioTag _jspx_th_html_radio_1 = (org.apache.struts.taglib.html.RadioTag) _jspx_tagPool_html_radio_value_property.get(org.apache.struts.taglib.html.RadioTag.class);
_jspx_th_html_radio_1.setPageContext(_jspx_page_context);
_jspx_th_html_radio_1.setParent((javax.servlet.jsp.tagext.Tag) _jspx_th_html_form_0);
_jspx_th_html_radio_1.setProperty("annuSexe");
_jspx_th_html_radio_1.setValue("H");
int _jspx_eval_html_radio_1 = _jspx_th_html_radio_1.doStartTag();
if (_jspx_eval_html_radio_1 != javax.servlet.jsp.tagext.Tag.SKIP_BODY) {
if (_jspx_eval_html_radio_1 != javax.servlet.jsp.tagext.Tag.EVAL_BODY_INCLUDE) {
out = _jspx_page_context.pushBody();
_jspx_th_html_radio_1.setBodyContent((javax.servlet.jsp.tagext.BodyContent) out);
_jspx_th_html_radio_1.doInitBody();
}
do {
out.print(DicoTools.dico(dico_lang, "myTerrier/myrabbit_i_am_boy"));
int evalDoAfterBody = _jspx_th_html_radio_1.doAfterBody();
if (evalDoAfterBody != javax.servlet.jsp.tagext.BodyTag.EVAL_BODY_AGAIN)
break;
} while (true);
if (_jspx_eval_html_radio_1 != javax.servlet.jsp.tagext.Tag.EVAL_BODY_INCLUDE) {
out = _jspx_page_context.popBody();
}
}
if (_jspx_th_html_radio_1.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_html_radio_value_property.reuse(_jspx_th_html_radio_1);
return;
}
_jspx_tagPool_html_radio_value_property.reuse(_jspx_th_html_radio_1);
out.write("\r\n\t\t\t</span>\r\n\t\t</div>\t\t\t\t\t\t\t\t\r\n\t\t\t\t\t\t\t\r\n\t\t");
out.write("\t\t\t\r\n\t\t <div class=\"form-line\">\r\n\t\t\t<label class=\"center\">\r\n\t\t\t\t");
out.print(DicoTools.dico(dico_lang, "myTerrier/myrabbit_zip_code"));
out.write("\r\n\t\t\t</label>\r\n\t\t\t<span>\r\n\t\t\t\t");
if (_jspx_meth_html_text_2(_jspx_th_html_form_0, _jspx_page_context))
return;
out.write("\r\n\t\t\t</span>\r\n\t\t</div>\t\r\n\r\n\t\t");
out.write("\t\t\t\r\n\t\t <div class=\"form-line\">\r\n\t\t\t<label class=\"center\">\r\n\t\t\t\t");
out.print(DicoTools.dico(dico_lang, "myTerrier/myrabbit_city"));
out.write("\r\n\t\t\t</label>\r\n\t\t\t<span>\r\n\t\t\t\t");
if (_jspx_meth_html_text_3(_jspx_th_html_form_0, _jspx_page_context))
return;
out.write("\r\n\t\t\t</span>\r\n\t\t</div>\t\r\n\r\n\t\t");
out.write("\t\t\t\r\n\t\t <div class=\"form-line\">\r\n\t\t\t<label class=\"center\">\r\n\t\t\t\t");
out.print(DicoTools.dico(dico_lang, "myTerrier/myrabbit_country"));
out.write("\r\n\t\t\t</label>\r\n\t\t\t<span>\r\n\t\t\t\t");
if (_jspx_meth_html_select_3(_jspx_th_html_form_0, _jspx_page_context))
return;
out.write("\r\n\t\t\t</span>\r\n\t\t</div>\t\r\n\t\t\r\n\t\t");
// logic:greaterThan
org.apache.struts.taglib.logic.GreaterThanTag _jspx_th_logic_greaterThan_0 = (org.apache.struts.taglib.logic.GreaterThanTag) _jspx_tagPool_logic_greaterThan_value_name.get(org.apache.struts.taglib.logic.GreaterThanTag.class);
_jspx_th_logic_greaterThan_0.setPageContext(_jspx_page_context);
_jspx_th_logic_greaterThan_0.setParent((javax.servlet.jsp.tagext.Tag) _jspx_th_html_form_0);
_jspx_th_logic_greaterThan_0.setName("userMain");
_jspx_th_logic_greaterThan_0.setValue("0");
int _jspx_eval_logic_greaterThan_0 = _jspx_th_logic_greaterThan_0.doStartTag();
if (_jspx_eval_logic_greaterThan_0 != javax.servlet.jsp.tagext.Tag.SKIP_BODY) {
do {
out.write("\t\t\t\t\t\t\t\r\n\t\t\t");
out.write("\t\t\t\r\n\t\t\t <div class=\"form-line email\" >\r\n\t\t\t\t<label class=\"center\">\r\n\t\t\t\t\t");
out.print(DicoTools.dico(dico_lang, "myTerrier/myrabbit_email"));
out.write("\r\n\t\t\t\t</label>\r\n\t\t\t\t<span>\r\n\t\t\t\t\t");
if (_jspx_meth_bean_write_1(_jspx_th_logic_greaterThan_0, _jspx_page_context))
return;
out.write("\r\n\t\t\t\t</span>\r\n\t\t\t</div>\t\t\t\t\t\t\t\t\t\t\r\n\t\t");
int evalDoAfterBody = _jspx_th_logic_greaterThan_0.doAfterBody();
if (evalDoAfterBody != javax.servlet.jsp.tagext.BodyTag.EVAL_BODY_AGAIN)
break;
} while (true);
}
if (_jspx_th_logic_greaterThan_0.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_logic_greaterThan_value_name.reuse(_jspx_th_logic_greaterThan_0);
return;
}
_jspx_tagPool_logic_greaterThan_value_name.reuse(_jspx_th_logic_greaterThan_0);
out.write("\r\n\t\t\r\n\t\t");
out.write("\t\t\t\r\n\t\t <div class=\"form-line\">\r\n\t\t\t<label class=\"center\">\r\n\t\t\t\t \r\n\t\t\t</label>\r\n\t\t\t<span>\r\n\t\t\t\t");
if (_jspx_meth_html_checkbox_0(_jspx_th_html_form_0, _jspx_page_context))
return;
out.write("\r\n\t\t\t\t");
out.print(DicoTools.dico(dico_lang, "myTerrier/myrabbit_no_directory"));
out.write("\r\n\t\t\t</span>\r\n\t\t</div>\t\r\n\t\t\t\t\t\t\t\t\t\r\n\t\t<hr class=\"clearer\" />\r\n\r\n\t\t<div class=\"buttons\">\t\t\t\t\t\r\n\t\t\t<input type=\"submit\" class=\"genericBt\" name=\"modifProfil\" value=\"");
out.print(DicoTools.dico(dico_lang, "myTerrier/myrabbit_modify_button"));
out.write("\" />\r\n\t\t</div>\r\n\r\n\r\n\t\t");
out.write("\t\t\r\n\t\t</div>\r\n\t</div>\r\n</div>\r\n");
int evalDoAfterBody = _jspx_th_html_form_0.doAfterBody();
if (evalDoAfterBody != javax.servlet.jsp.tagext.BodyTag.EVAL_BODY_AGAIN)
break;
} while (true);
}
if (_jspx_th_html_form_0.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_html_form_styleId_onsubmit_action_acceptCharset.reuse(_jspx_th_html_form_0);
return;
}
_jspx_tagPool_html_form_styleId_onsubmit_action_acceptCharset.reuse(_jspx_th_html_form_0);
out.write("\r\n\r\n<script type=\"text/javascript\">\n\tsetTimeout(function(){ // load the picture loader\n\t\tdivChangeUrl(\"pictureLoader\", \"myTerrierEditRabbitImage.do\");\t\n\t}, 200);\n</script>");
} catch (Throwable t) {
if (!(t instanceof SkipPageException)){
out = _jspx_out;
if (out != null && out.getBufferSize() != 0)
out.clearBuffer();
if (_jspx_page_context != null) _jspx_page_context.handlePageException(t);
}
} finally {
if (_jspxFactory != null) _jspxFactory.releasePageContext(_jspx_page_context);
}
}
private boolean _jspx_meth_html_hidden_0(javax.servlet.jsp.tagext.JspTag _jspx_th_html_form_0, PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// html:hidden
org.apache.struts.taglib.html.HiddenTag _jspx_th_html_hidden_0 = (org.apache.struts.taglib.html.HiddenTag) _jspx_tagPool_html_hidden_value_property_nobody.get(org.apache.struts.taglib.html.HiddenTag.class);
_jspx_th_html_hidden_0.setPageContext(_jspx_page_context);
_jspx_th_html_hidden_0.setParent((javax.servlet.jsp.tagext.Tag) _jspx_th_html_form_0);
_jspx_th_html_hidden_0.setProperty("dispatch");
_jspx_th_html_hidden_0.setValue("change");
int _jspx_eval_html_hidden_0 = _jspx_th_html_hidden_0.doStartTag();
if (_jspx_th_html_hidden_0.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_html_hidden_value_property_nobody.reuse(_jspx_th_html_hidden_0);
return true;
}
_jspx_tagPool_html_hidden_value_property_nobody.reuse(_jspx_th_html_hidden_0);
return false;
}
private boolean _jspx_meth_bean_write_0(javax.servlet.jsp.tagext.JspTag _jspx_th_html_form_0, PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// bean:write
org.apache.struts.taglib.bean.WriteTag _jspx_th_bean_write_0 = (org.apache.struts.taglib.bean.WriteTag) _jspx_tagPool_bean_write_property_name_nobody.get(org.apache.struts.taglib.bean.WriteTag.class);
_jspx_th_bean_write_0.setPageContext(_jspx_page_context);
_jspx_th_bean_write_0.setParent((javax.servlet.jsp.tagext.Tag) _jspx_th_html_form_0);
_jspx_th_bean_write_0.setName("myTerrierMyselfForm");
_jspx_th_bean_write_0.setProperty("rabbitName");
int _jspx_eval_bean_write_0 = _jspx_th_bean_write_0.doStartTag();
if (_jspx_th_bean_write_0.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_bean_write_property_name_nobody.reuse(_jspx_th_bean_write_0);
return true;
}
_jspx_tagPool_bean_write_property_name_nobody.reuse(_jspx_th_bean_write_0);
return false;
}
private boolean _jspx_meth_logic_notEqual_0(javax.servlet.jsp.tagext.JspTag _jspx_th_html_form_0, PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// logic:notEqual
org.apache.struts.taglib.logic.NotEqualTag _jspx_th_logic_notEqual_0 = (org.apache.struts.taglib.logic.NotEqualTag) _jspx_tagPool_logic_notEqual_value_property_name.get(org.apache.struts.taglib.logic.NotEqualTag.class);
_jspx_th_logic_notEqual_0.setPageContext(_jspx_page_context);
_jspx_th_logic_notEqual_0.setParent((javax.servlet.jsp.tagext.Tag) _jspx_th_html_form_0);
_jspx_th_logic_notEqual_0.setName("myTerrierMyselfForm");
_jspx_th_logic_notEqual_0.setProperty("rabbitPicture");
_jspx_th_logic_notEqual_0.setValue("1");
int _jspx_eval_logic_notEqual_0 = _jspx_th_logic_notEqual_0.doStartTag();
if (_jspx_eval_logic_notEqual_0 != javax.servlet.jsp.tagext.Tag.SKIP_BODY) {
do {
out.write("<img class=\"user_picture\" src=\"../photo/default_S.jpg\" />");
int evalDoAfterBody = _jspx_th_logic_notEqual_0.doAfterBody();
if (evalDoAfterBody != javax.servlet.jsp.tagext.BodyTag.EVAL_BODY_AGAIN)
break;
} while (true);
}
if (_jspx_th_logic_notEqual_0.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_logic_notEqual_value_property_name.reuse(_jspx_th_logic_notEqual_0);
return true;
}
_jspx_tagPool_logic_notEqual_value_property_name.reuse(_jspx_th_logic_notEqual_0);
return false;
}
private boolean _jspx_meth_html_text_0(javax.servlet.jsp.tagext.JspTag _jspx_th_html_form_0, PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// html:text
org.apache.struts.taglib.html.TextTag _jspx_th_html_text_0 = (org.apache.struts.taglib.html.TextTag) _jspx_tagPool_html_text_styleClass_property_nobody.get(org.apache.struts.taglib.html.TextTag.class);
_jspx_th_html_text_0.setPageContext(_jspx_page_context);
_jspx_th_html_text_0.setParent((javax.servlet.jsp.tagext.Tag) _jspx_th_html_form_0);
_jspx_th_html_text_0.setProperty("firstName");
_jspx_th_html_text_0.setStyleClass("formToolTip");
int _jspx_eval_html_text_0 = _jspx_th_html_text_0.doStartTag();
if (_jspx_th_html_text_0.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_html_text_styleClass_property_nobody.reuse(_jspx_th_html_text_0);
return true;
}
_jspx_tagPool_html_text_styleClass_property_nobody.reuse(_jspx_th_html_text_0);
return false;
}
private boolean _jspx_meth_html_text_1(javax.servlet.jsp.tagext.JspTag _jspx_th_html_form_0, PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// html:text
org.apache.struts.taglib.html.TextTag _jspx_th_html_text_1 = (org.apache.struts.taglib.html.TextTag) _jspx_tagPool_html_text_styleClass_property_nobody.get(org.apache.struts.taglib.html.TextTag.class);
_jspx_th_html_text_1.setPageContext(_jspx_page_context);
_jspx_th_html_text_1.setParent((javax.servlet.jsp.tagext.Tag) _jspx_th_html_form_0);
_jspx_th_html_text_1.setProperty("lastName");
_jspx_th_html_text_1.setStyleClass("formToolTip");
int _jspx_eval_html_text_1 = _jspx_th_html_text_1.doStartTag();
if (_jspx_th_html_text_1.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_html_text_styleClass_property_nobody.reuse(_jspx_th_html_text_1);
return true;
}
_jspx_tagPool_html_text_styleClass_property_nobody.reuse(_jspx_th_html_text_1);
return false;
}
private boolean _jspx_meth_html_select_0(javax.servlet.jsp.tagext.JspTag _jspx_th_html_form_0, PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// html:select
org.apache.struts.taglib.html.SelectTag _jspx_th_html_select_0 = (org.apache.struts.taglib.html.SelectTag) _jspx_tagPool_html_select_property.get(org.apache.struts.taglib.html.SelectTag.class);
_jspx_th_html_select_0.setPageContext(_jspx_page_context);
_jspx_th_html_select_0.setParent((javax.servlet.jsp.tagext.Tag) _jspx_th_html_form_0);
_jspx_th_html_select_0.setProperty("jour");
int _jspx_eval_html_select_0 = _jspx_th_html_select_0.doStartTag();
if (_jspx_eval_html_select_0 != javax.servlet.jsp.tagext.Tag.SKIP_BODY) {
if (_jspx_eval_html_select_0 != javax.servlet.jsp.tagext.Tag.EVAL_BODY_INCLUDE) {
out = _jspx_page_context.pushBody();
_jspx_th_html_select_0.setBodyContent((javax.servlet.jsp.tagext.BodyContent) out);
_jspx_th_html_select_0.doInitBody();
}
do {
out.write("\r\n\t\t\t\t\t\t");
if (_jspx_meth_html_optionsCollection_0(_jspx_th_html_select_0, _jspx_page_context))
return true;
out.write("\r\n\t\t\t\t\t");
int evalDoAfterBody = _jspx_th_html_select_0.doAfterBody();
if (evalDoAfterBody != javax.servlet.jsp.tagext.BodyTag.EVAL_BODY_AGAIN)
break;
} while (true);
if (_jspx_eval_html_select_0 != javax.servlet.jsp.tagext.Tag.EVAL_BODY_INCLUDE) {
out = _jspx_page_context.popBody();
}
}
if (_jspx_th_html_select_0.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_html_select_property.reuse(_jspx_th_html_select_0);
return true;
}
_jspx_tagPool_html_select_property.reuse(_jspx_th_html_select_0);
return false;
}
private boolean _jspx_meth_html_optionsCollection_0(javax.servlet.jsp.tagext.JspTag _jspx_th_html_select_0, PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// html:optionsCollection
org.apache.struts.taglib.html.OptionsCollectionTag _jspx_th_html_optionsCollection_0 = (org.apache.struts.taglib.html.OptionsCollectionTag) _jspx_tagPool_html_optionsCollection_value_styleClass_property_name_label_nobody.get(org.apache.struts.taglib.html.OptionsCollectionTag.class);
_jspx_th_html_optionsCollection_0.setPageContext(_jspx_page_context);
_jspx_th_html_optionsCollection_0.setParent((javax.servlet.jsp.tagext.Tag) _jspx_th_html_select_0);
_jspx_th_html_optionsCollection_0.setName("myTerrierMyselfForm");
_jspx_th_html_optionsCollection_0.setProperty("listeJour");
_jspx_th_html_optionsCollection_0.setLabel("label");
_jspx_th_html_optionsCollection_0.setValue("id");
_jspx_th_html_optionsCollection_0.setStyleClass("date-jour");
int _jspx_eval_html_optionsCollection_0 = _jspx_th_html_optionsCollection_0.doStartTag();
if (_jspx_th_html_optionsCollection_0.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_html_optionsCollection_value_styleClass_property_name_label_nobody.reuse(_jspx_th_html_optionsCollection_0);
return true;
}
_jspx_tagPool_html_optionsCollection_value_styleClass_property_name_label_nobody.reuse(_jspx_th_html_optionsCollection_0);
return false;
}
private boolean _jspx_meth_html_select_1(javax.servlet.jsp.tagext.JspTag _jspx_th_html_form_0, PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// html:select
org.apache.struts.taglib.html.SelectTag _jspx_th_html_select_1 = (org.apache.struts.taglib.html.SelectTag) _jspx_tagPool_html_select_property.get(org.apache.struts.taglib.html.SelectTag.class);
_jspx_th_html_select_1.setPageContext(_jspx_page_context);
_jspx_th_html_select_1.setParent((javax.servlet.jsp.tagext.Tag) _jspx_th_html_form_0);
_jspx_th_html_select_1.setProperty("mois");
int _jspx_eval_html_select_1 = _jspx_th_html_select_1.doStartTag();
if (_jspx_eval_html_select_1 != javax.servlet.jsp.tagext.Tag.SKIP_BODY) {
if (_jspx_eval_html_select_1 != javax.servlet.jsp.tagext.Tag.EVAL_BODY_INCLUDE) {
out = _jspx_page_context.pushBody();
_jspx_th_html_select_1.setBodyContent((javax.servlet.jsp.tagext.BodyContent) out);
_jspx_th_html_select_1.doInitBody();
}
do {
out.write("\r\n\t\t\t\t\t\t");
if (_jspx_meth_html_optionsCollection_1(_jspx_th_html_select_1, _jspx_page_context))
return true;
out.write("\r\n\t\t\t\t\t");
int evalDoAfterBody = _jspx_th_html_select_1.doAfterBody();
if (evalDoAfterBody != javax.servlet.jsp.tagext.BodyTag.EVAL_BODY_AGAIN)
break;
} while (true);
if (_jspx_eval_html_select_1 != javax.servlet.jsp.tagext.Tag.EVAL_BODY_INCLUDE) {
out = _jspx_page_context.popBody();
}
}
if (_jspx_th_html_select_1.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_html_select_property.reuse(_jspx_th_html_select_1);
return true;
}
_jspx_tagPool_html_select_property.reuse(_jspx_th_html_select_1);
return false;
}
private boolean _jspx_meth_html_optionsCollection_1(javax.servlet.jsp.tagext.JspTag _jspx_th_html_select_1, PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// html:optionsCollection
org.apache.struts.taglib.html.OptionsCollectionTag _jspx_th_html_optionsCollection_1 = (org.apache.struts.taglib.html.OptionsCollectionTag) _jspx_tagPool_html_optionsCollection_value_styleClass_property_name_label_nobody.get(org.apache.struts.taglib.html.OptionsCollectionTag.class);
_jspx_th_html_optionsCollection_1.setPageContext(_jspx_page_context);
_jspx_th_html_optionsCollection_1.setParent((javax.servlet.jsp.tagext.Tag) _jspx_th_html_select_1);
_jspx_th_html_optionsCollection_1.setName("myTerrierMyselfForm");
_jspx_th_html_optionsCollection_1.setProperty("listeMois");
_jspx_th_html_optionsCollection_1.setLabel("label");
_jspx_th_html_optionsCollection_1.setValue("id");
_jspx_th_html_optionsCollection_1.setStyleClass("date-mois");
int _jspx_eval_html_optionsCollection_1 = _jspx_th_html_optionsCollection_1.doStartTag();
if (_jspx_th_html_optionsCollection_1.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_html_optionsCollection_value_styleClass_property_name_label_nobody.reuse(_jspx_th_html_optionsCollection_1);
return true;
}
_jspx_tagPool_html_optionsCollection_value_styleClass_property_name_label_nobody.reuse(_jspx_th_html_optionsCollection_1);
return false;
}
private boolean _jspx_meth_html_select_2(javax.servlet.jsp.tagext.JspTag _jspx_th_html_form_0, PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// html:select
org.apache.struts.taglib.html.SelectTag _jspx_th_html_select_2 = (org.apache.struts.taglib.html.SelectTag) _jspx_tagPool_html_select_property.get(org.apache.struts.taglib.html.SelectTag.class);
_jspx_th_html_select_2.setPageContext(_jspx_page_context);
_jspx_th_html_select_2.setParent((javax.servlet.jsp.tagext.Tag) _jspx_th_html_form_0);
_jspx_th_html_select_2.setProperty("annee");
int _jspx_eval_html_select_2 = _jspx_th_html_select_2.doStartTag();
if (_jspx_eval_html_select_2 != javax.servlet.jsp.tagext.Tag.SKIP_BODY) {
if (_jspx_eval_html_select_2 != javax.servlet.jsp.tagext.Tag.EVAL_BODY_INCLUDE) {
out = _jspx_page_context.pushBody();
_jspx_th_html_select_2.setBodyContent((javax.servlet.jsp.tagext.BodyContent) out);
_jspx_th_html_select_2.doInitBody();
}
do {
out.write("\r\n\t\t\t\t\t\t");
if (_jspx_meth_html_optionsCollection_2(_jspx_th_html_select_2, _jspx_page_context))
return true;
out.write("\r\n\t\t\t\t\t");
int evalDoAfterBody = _jspx_th_html_select_2.doAfterBody();
if (evalDoAfterBody != javax.servlet.jsp.tagext.BodyTag.EVAL_BODY_AGAIN)
break;
} while (true);
if (_jspx_eval_html_select_2 != javax.servlet.jsp.tagext.Tag.EVAL_BODY_INCLUDE) {
out = _jspx_page_context.popBody();
}
}
if (_jspx_th_html_select_2.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_html_select_property.reuse(_jspx_th_html_select_2);
return true;
}
_jspx_tagPool_html_select_property.reuse(_jspx_th_html_select_2);
return false;
}
private boolean _jspx_meth_html_optionsCollection_2(javax.servlet.jsp.tagext.JspTag _jspx_th_html_select_2, PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// html:optionsCollection
org.apache.struts.taglib.html.OptionsCollectionTag _jspx_th_html_optionsCollection_2 = (org.apache.struts.taglib.html.OptionsCollectionTag) _jspx_tagPool_html_optionsCollection_value_styleClass_property_name_label_nobody.get(org.apache.struts.taglib.html.OptionsCollectionTag.class);
_jspx_th_html_optionsCollection_2.setPageContext(_jspx_page_context);
_jspx_th_html_optionsCollection_2.setParent((javax.servlet.jsp.tagext.Tag) _jspx_th_html_select_2);
_jspx_th_html_optionsCollection_2.setName("myTerrierMyselfForm");
_jspx_th_html_optionsCollection_2.setProperty("listeAnnee");
_jspx_th_html_optionsCollection_2.setLabel("label");
_jspx_th_html_optionsCollection_2.setValue("id");
_jspx_th_html_optionsCollection_2.setStyleClass("date-annee");
int _jspx_eval_html_optionsCollection_2 = _jspx_th_html_optionsCollection_2.doStartTag();
if (_jspx_th_html_optionsCollection_2.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_html_optionsCollection_value_styleClass_property_name_label_nobody.reuse(_jspx_th_html_optionsCollection_2);
return true;
}
_jspx_tagPool_html_optionsCollection_value_styleClass_property_name_label_nobody.reuse(_jspx_th_html_optionsCollection_2);
return false;
}
private boolean _jspx_meth_html_text_2(javax.servlet.jsp.tagext.JspTag _jspx_th_html_form_0, PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// html:text
org.apache.struts.taglib.html.TextTag _jspx_th_html_text_2 = (org.apache.struts.taglib.html.TextTag) _jspx_tagPool_html_text_size_property_nobody.get(org.apache.struts.taglib.html.TextTag.class);
_jspx_th_html_text_2.setPageContext(_jspx_page_context);
_jspx_th_html_text_2.setParent((javax.servlet.jsp.tagext.Tag) _jspx_th_html_form_0);
_jspx_th_html_text_2.setProperty("annuCp");
_jspx_th_html_text_2.setSize("30");
int _jspx_eval_html_text_2 = _jspx_th_html_text_2.doStartTag();
if (_jspx_th_html_text_2.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_html_text_size_property_nobody.reuse(_jspx_th_html_text_2);
return true;
}
_jspx_tagPool_html_text_size_property_nobody.reuse(_jspx_th_html_text_2);
return false;
}
private boolean _jspx_meth_html_text_3(javax.servlet.jsp.tagext.JspTag _jspx_th_html_form_0, PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// html:text
org.apache.struts.taglib.html.TextTag _jspx_th_html_text_3 = (org.apache.struts.taglib.html.TextTag) _jspx_tagPool_html_text_styleClass_size_property_nobody.get(org.apache.struts.taglib.html.TextTag.class);
_jspx_th_html_text_3.setPageContext(_jspx_page_context);
_jspx_th_html_text_3.setParent((javax.servlet.jsp.tagext.Tag) _jspx_th_html_form_0);
_jspx_th_html_text_3.setProperty("annuCity");
_jspx_th_html_text_3.setSize("30");
_jspx_th_html_text_3.setStyleClass("custom");
int _jspx_eval_html_text_3 = _jspx_th_html_text_3.doStartTag();
if (_jspx_th_html_text_3.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_html_text_styleClass_size_property_nobody.reuse(_jspx_th_html_text_3);
return true;
}
_jspx_tagPool_html_text_styleClass_size_property_nobody.reuse(_jspx_th_html_text_3);
return false;
}
private boolean _jspx_meth_html_select_3(javax.servlet.jsp.tagext.JspTag _jspx_th_html_form_0, PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// html:select
org.apache.struts.taglib.html.SelectTag _jspx_th_html_select_3 = (org.apache.struts.taglib.html.SelectTag) _jspx_tagPool_html_select_styleClass_property.get(org.apache.struts.taglib.html.SelectTag.class);
_jspx_th_html_select_3.setPageContext(_jspx_page_context);
_jspx_th_html_select_3.setParent((javax.servlet.jsp.tagext.Tag) _jspx_th_html_form_0);
_jspx_th_html_select_3.setProperty("annuCountry");
_jspx_th_html_select_3.setStyleClass("select-pays");
int _jspx_eval_html_select_3 = _jspx_th_html_select_3.doStartTag();
if (_jspx_eval_html_select_3 != javax.servlet.jsp.tagext.Tag.SKIP_BODY) {
if (_jspx_eval_html_select_3 != javax.servlet.jsp.tagext.Tag.EVAL_BODY_INCLUDE) {
out = _jspx_page_context.pushBody();
_jspx_th_html_select_3.setBodyContent((javax.servlet.jsp.tagext.BodyContent) out);
_jspx_th_html_select_3.doInitBody();
}
do {
out.write("\n\t\t\t\t<!-- PaysData -->\r\n\t\t\t\t\t");
if (_jspx_meth_html_optionsCollection_3(_jspx_th_html_select_3, _jspx_page_context))
return true;
out.write("\r\n\t\t\t\t");
int evalDoAfterBody = _jspx_th_html_select_3.doAfterBody();
if (evalDoAfterBody != javax.servlet.jsp.tagext.BodyTag.EVAL_BODY_AGAIN)
break;
} while (true);
if (_jspx_eval_html_select_3 != javax.servlet.jsp.tagext.Tag.EVAL_BODY_INCLUDE) {
out = _jspx_page_context.popBody();
}
}
if (_jspx_th_html_select_3.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_html_select_styleClass_property.reuse(_jspx_th_html_select_3);
return true;
}
_jspx_tagPool_html_select_styleClass_property.reuse(_jspx_th_html_select_3);
return false;
}
private boolean _jspx_meth_html_optionsCollection_3(javax.servlet.jsp.tagext.JspTag _jspx_th_html_select_3, PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// html:optionsCollection
org.apache.struts.taglib.html.OptionsCollectionTag _jspx_th_html_optionsCollection_3 = (org.apache.struts.taglib.html.OptionsCollectionTag) _jspx_tagPool_html_optionsCollection_value_property_label_nobody.get(org.apache.struts.taglib.html.OptionsCollectionTag.class);
_jspx_th_html_optionsCollection_3.setPageContext(_jspx_page_context);
_jspx_th_html_optionsCollection_3.setParent((javax.servlet.jsp.tagext.Tag) _jspx_th_html_select_3);
_jspx_th_html_optionsCollection_3.setProperty("listePays");
_jspx_th_html_optionsCollection_3.setLabel("label");
_jspx_th_html_optionsCollection_3.setValue("paysCode");
int _jspx_eval_html_optionsCollection_3 = _jspx_th_html_optionsCollection_3.doStartTag();
if (_jspx_th_html_optionsCollection_3.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_html_optionsCollection_value_property_label_nobody.reuse(_jspx_th_html_optionsCollection_3);
return true;
}
_jspx_tagPool_html_optionsCollection_value_property_label_nobody.reuse(_jspx_th_html_optionsCollection_3);
return false;
}
private boolean _jspx_meth_bean_write_1(javax.servlet.jsp.tagext.JspTag _jspx_th_logic_greaterThan_0, PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// bean:write
org.apache.struts.taglib.bean.WriteTag _jspx_th_bean_write_1 = (org.apache.struts.taglib.bean.WriteTag) _jspx_tagPool_bean_write_property_name_nobody.get(org.apache.struts.taglib.bean.WriteTag.class);
_jspx_th_bean_write_1.setPageContext(_jspx_page_context);
_jspx_th_bean_write_1.setParent((javax.servlet.jsp.tagext.Tag) _jspx_th_logic_greaterThan_0);
_jspx_th_bean_write_1.setName("myTerrierMyselfForm");
_jspx_th_bean_write_1.setProperty("rabbitMail");
int _jspx_eval_bean_write_1 = _jspx_th_bean_write_1.doStartTag();
if (_jspx_th_bean_write_1.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_bean_write_property_name_nobody.reuse(_jspx_th_bean_write_1);
return true;
}
_jspx_tagPool_bean_write_property_name_nobody.reuse(_jspx_th_bean_write_1);
return false;
}
private boolean _jspx_meth_html_checkbox_0(javax.servlet.jsp.tagext.JspTag _jspx_th_html_form_0, PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// html:checkbox
org.apache.struts.taglib.html.CheckboxTag _jspx_th_html_checkbox_0 = (org.apache.struts.taglib.html.CheckboxTag) _jspx_tagPool_html_checkbox_value_property_nobody.get(org.apache.struts.taglib.html.CheckboxTag.class);
_jspx_th_html_checkbox_0.setPageContext(_jspx_page_context);
_jspx_th_html_checkbox_0.setParent((javax.servlet.jsp.tagext.Tag) _jspx_th_html_form_0);
_jspx_th_html_checkbox_0.setProperty("annuConfirm");
_jspx_th_html_checkbox_0.setValue("1");
int _jspx_eval_html_checkbox_0 = _jspx_th_html_checkbox_0.doStartTag();
if (_jspx_th_html_checkbox_0.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_html_checkbox_value_property_nobody.reuse(_jspx_th_html_checkbox_0);
return true;
}
_jspx_tagPool_html_checkbox_value_property_nobody.reuse(_jspx_th_html_checkbox_0);
return false;
}
}
| |
package edu.emory.cci.aiw.i2b2etl.dsb;
/*
* #%L
* AIW i2b2 ETL
* %%
* Copyright (C) 2012 - 2014 Emory University
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import java.io.IOException;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.logging.Logger;
import org.arp.javautil.sql.InvalidConnectionSpecArguments;
import org.protempa.DataSourceReadException;
import org.protempa.KeySetSpec;
import org.protempa.backend.BackendInitializationException;
import org.protempa.backend.BackendInstanceSpec;
import org.protempa.backend.annotations.BackendInfo;
import org.protempa.backend.annotations.BackendProperty;
import org.protempa.backend.dsb.relationaldb.ColumnSpec;
import org.protempa.backend.dsb.relationaldb.Operator;
import org.protempa.backend.dsb.relationaldb.EntitySpec;
import org.protempa.backend.dsb.relationaldb.JDBCDateTimeTimestampDateValueFormat;
import org.protempa.backend.dsb.relationaldb.JDBCDateTimeTimestampPositionParser;
import org.protempa.backend.dsb.relationaldb.JDBCPositionFormat;
import org.protempa.backend.dsb.relationaldb.JoinSpec;
import org.protempa.backend.dsb.relationaldb.PropertySpec;
import org.protempa.backend.dsb.relationaldb.ReferenceSpec;
import org.protempa.backend.dsb.relationaldb.RelationalDbDataSourceBackend;
import org.protempa.backend.dsb.relationaldb.mappings.DefaultMappings;
import org.protempa.backend.dsb.relationaldb.mappings.Mappings;
import org.protempa.backend.dsb.relationaldb.mappings.ResourceMappingsFactory;
import org.protempa.proposition.value.AbsoluteTimeGranularity;
import org.protempa.proposition.value.AbsoluteTimeGranularityFactory;
import org.protempa.proposition.value.AbsoluteTimeUnit;
import org.protempa.proposition.value.AbsoluteTimeUnitFactory;
import org.protempa.proposition.value.GranularityFactory;
import org.protempa.proposition.value.UnitFactory;
import org.protempa.proposition.value.ValueType;
/**
*
* @author Andrew Post, Nita Deshpande
*/
@BackendInfo(displayName = "I2B2 Data Source Backend for Phenotype Search")
public final class I2B2DataSourceBackendForPhenotype extends RelationalDbDataSourceBackend {
private final static AbsoluteTimeUnitFactory ABS_TIME_UNIT_FACTORY
= new AbsoluteTimeUnitFactory();
private final static AbsoluteTimeGranularityFactory ABS_TIME_GRANULARITY_FACTORY
= new AbsoluteTimeGranularityFactory();
private static final JDBCPositionFormat POSITION_PARSER
= new JDBCDateTimeTimestampPositionParser();
private static final String DEFAULT_ROOT_FULL_NAME = "Eureka";
private final static String PATIENT_DIMENSION = "patient_dimension";
private final static String VISIT_DIMENSION = "visit_dimension";
private final static String OBSERVATION_FACT = "observation_fact";
private final static String PROVIDER_DIMENSION = "provider_dimension";
private String labsRootFullName;
private String vitalsRootFullName;
private String diagnosisCodesRootFullName;
private String medicationOrdersRootFullName;
private String icd9ProcedureCodesRootFullName;
private String icd10DiagnosisCodesRootFullName;
private String icd10ProcedureCodesRootFullName;
// private String cptProcedureCodesRootFullName;
private final static Logger LOGGER
= Logger.getLogger(I2B2DataSourceBackendForPhenotype.class.getPackage().getName());
private Long resultInstanceId;
public I2B2DataSourceBackendForPhenotype() {
setDefaultKeyIdTable(PATIENT_DIMENSION);
/*
* Per the i2b2 1.7 CRC design docs and stored procedure
* implementation,
* to_char(patient_num) = patient_ide when patient_ide_source = 'HIVE'.
*/
setDefaultKeyIdColumn("patient_num");
setDefaultKeyIdJoinKey("patient_num");
this.labsRootFullName = DEFAULT_ROOT_FULL_NAME;
this.vitalsRootFullName = DEFAULT_ROOT_FULL_NAME;
this.diagnosisCodesRootFullName = DEFAULT_ROOT_FULL_NAME;
this.medicationOrdersRootFullName = DEFAULT_ROOT_FULL_NAME;
this.icd9ProcedureCodesRootFullName = DEFAULT_ROOT_FULL_NAME;
this.icd10DiagnosisCodesRootFullName = DEFAULT_ROOT_FULL_NAME;
this.icd10ProcedureCodesRootFullName = DEFAULT_ROOT_FULL_NAME;
// this.cptProcedureCodesRootFullName = DEFAULT_ROOT_FULL_NAME;
setMappingsFactory(new ResourceMappingsFactory("/etc/i2b2dsb/", getClass()));
}
@Override
public void initialize(BackendInstanceSpec config) throws BackendInitializationException {
super.initialize(config);
}
@Override
protected EntitySpec[] constantSpecs(String keyIdSchema, String keyIdTable, String keyIdColumn, String keyIdJoinKey) throws IOException {
Mappings riId = new DefaultMappings(new HashMap<Object, String>() {
{
put(resultInstanceId, "" + resultInstanceId);
}
});
Mappings hive = new DefaultMappings(new HashMap<Object, String>() {
{
put("HIVE", "HIVE");
}
});
String schemaName = getSchemaName();
return new EntitySpec[]{
new EntitySpec("Patients",
null,
new String[]{"Patient"},
true,
new ColumnSpec(keyIdSchema, keyIdTable, keyIdColumn),
new ColumnSpec[]{
new ColumnSpec(keyIdSchema, keyIdTable, keyIdColumn)
},
null,
null,
new PropertySpec[]{
/*
* This should be patient_ide where patient_ide_source = 'HIVE'.
*/
new PropertySpec("patientId", null, new ColumnSpec(keyIdSchema, keyIdTable, keyIdColumn), ValueType.NOMINALVALUE)
},
new ReferenceSpec[]{
new ReferenceSpec("encounters", "Encounters", new ColumnSpec[]{new ColumnSpec(keyIdSchema, keyIdTable, new JoinSpec(keyIdJoinKey, "patient_num", new ColumnSpec(schemaName, VISIT_DIMENSION, "encounter_num")))}, ReferenceSpec.Type.MANY),
new ReferenceSpec("patientDetails", "Patient Details", new ColumnSpec[]{new ColumnSpec(keyIdSchema, keyIdTable, keyIdColumn)}, ReferenceSpec.Type.MANY),
new ReferenceSpec("patientAliases", "Patient Aliases", new ColumnSpec[]{new ColumnSpec(keyIdSchema, keyIdTable, new JoinSpec(keyIdJoinKey, "patient_num", new ColumnSpec(schemaName, "PATIENT_MAPPING", "PATIENT_IDE")))}, ReferenceSpec.Type.MANY)
},
null, null,
isInKeySetMode()
? new ColumnSpec[]{
new ColumnSpec(keyIdSchema, keyIdTable, "RESULT_INSTANCE_ID", Operator.EQUAL_TO, riId)
}
: null,
null, null, null, null, null),
new EntitySpec("Patient Aliases",
null,
new String[]{"PatientAlias"},
true,
new ColumnSpec(keyIdSchema, keyIdTable, keyIdColumn, new JoinSpec(keyIdJoinKey, "patient_num", new ColumnSpec(schemaName, "PATIENT_MAPPING"))),
new ColumnSpec[]{
new ColumnSpec(schemaName, "PATIENT_MAPPING", "PATIENT_IDE"),
new ColumnSpec(schemaName, "PATIENT_MAPPING", "PATIENT_IDE_SOURCE")
},
null,
null,
new PropertySpec[]{
new PropertySpec("patientId", null, new ColumnSpec(schemaName, "PATIENT_MAPPING", "PATIENT_IDE"), ValueType.NOMINALVALUE),
new PropertySpec("fieldName", null, new ColumnSpec(schemaName, "PATIENT_MAPPING", "PATIENT_IDE_SOURCE"), ValueType.NOMINALVALUE)
},
null,
null, null,
isInKeySetMode()
? new ColumnSpec[]{
new ColumnSpec(schemaName, "PATIENT_MAPPING", new JoinSpec("patient_num", keyIdJoinKey, new ColumnSpec(keyIdSchema, keyIdTable, "RESULT_INSTANCE_ID", Operator.EQUAL_TO, riId))),
new ColumnSpec(schemaName, "PATIENT_MAPPING", "PATIENT_IDE_SOURCE", Operator.NOT_EQUAL_TO, hive)
}
: new ColumnSpec[]{new ColumnSpec(schemaName, "PATIENT_MAPPING", "PATIENT_IDE_SOURCE", Operator.NOT_EQUAL_TO, hive)},
null, null, null, null, null),
new EntitySpec("Patient Details",
null,
new String[]{"PatientDetails"},
true,
new ColumnSpec(keyIdSchema, keyIdTable, keyIdColumn), //new JoinSpec(keyIdJoinKey, "patient_num", new ColumnSpec(schemaName, PATIENT_DIMENSION))),
new ColumnSpec[]{
new ColumnSpec(schemaName, PATIENT_DIMENSION, "patient_num")
},
null,
null,
new PropertySpec[]{
new PropertySpec("dateOfBirth", null, new ColumnSpec(schemaName, PATIENT_DIMENSION, "BIRTH_DATE"), ValueType.DATEVALUE, new JDBCDateTimeTimestampDateValueFormat()),
new PropertySpec("patientId", null, new ColumnSpec(schemaName, PATIENT_DIMENSION, "patient_num"), ValueType.NOMINALVALUE),
new PropertySpec("gender", null, new ColumnSpec(schemaName, PATIENT_DIMENSION, "SEX_CD", Operator.EQUAL_TO, getMappingsFactory().getInstance("gender.txt"), true), ValueType.NOMINALVALUE),
new PropertySpec("race", null, new ColumnSpec(schemaName, PATIENT_DIMENSION, "RACE_CD", Operator.EQUAL_TO, getMappingsFactory().getInstance("race.txt"), true), ValueType.NOMINALVALUE),
new PropertySpec("ethnicity", null, new ColumnSpec(schemaName, PATIENT_DIMENSION, "RACE_CD", Operator.EQUAL_TO, getMappingsFactory().getInstance("ethnicity.txt"), true), ValueType.NOMINALVALUE),
new PropertySpec("language", null, new ColumnSpec(schemaName, PATIENT_DIMENSION, "LANGUAGE_CD", Operator.EQUAL_TO, getMappingsFactory().getInstance("language.txt"), true), ValueType.NOMINALVALUE),
new PropertySpec("maritalStatus", null, new ColumnSpec(schemaName, PATIENT_DIMENSION, "MARITAL_STATUS_CD", Operator.EQUAL_TO, getMappingsFactory().getInstance("marital_status.txt"), true), ValueType.NOMINALVALUE),},
new ReferenceSpec[]{
new ReferenceSpec("encounters", "Encounters",
new ColumnSpec[]{
new ColumnSpec(schemaName, PATIENT_DIMENSION, new JoinSpec("patient_num", "patient_num",
new ColumnSpec(schemaName, VISIT_DIMENSION, "encounter_num")))
}, ReferenceSpec.Type.MANY),
new ReferenceSpec("patient", "Patients",
new ColumnSpec[]{
new ColumnSpec(schemaName, PATIENT_DIMENSION, "patient_num")
}, ReferenceSpec.Type.ONE)
},
null, null,
isInKeySetMode() ? new ColumnSpec[]{new ColumnSpec(schemaName, PATIENT_DIMENSION, new JoinSpec("patient_num", keyIdJoinKey, new ColumnSpec(keyIdSchema, keyIdTable, "RESULT_INSTANCE_ID", Operator.EQUAL_TO, riId)))}: null,
null, null, null, null, null),
new EntitySpec("Providers", null,
new String[]{"AttendingPhysician"},
false,
new ColumnSpec(keyIdSchema, keyIdTable, keyIdColumn, new JoinSpec(keyIdJoinKey, "patient_num", new ColumnSpec(schemaName, PATIENT_DIMENSION, new JoinSpec("patient_num", "patient_num", new ColumnSpec(schemaName, VISIT_DIMENSION, new JoinSpec("encounter_num", "encounter_num", new ColumnSpec(schemaName, OBSERVATION_FACT, new JoinSpec("provider_id", "provider_id", new ColumnSpec(schemaName, PROVIDER_DIMENSION))))))))),
new ColumnSpec[]{
new ColumnSpec(schemaName, PROVIDER_DIMENSION, "provider_id")
},
null, null,
new PropertySpec[]{
new PropertySpec("fullName", null, new ColumnSpec(schemaName, PROVIDER_DIMENSION, "NAME_CHAR"), ValueType.NOMINALVALUE)
},
null, null, null, null, null, null, null, null, null),};
}
@Override
protected EntitySpec[] eventSpecs(String keyIdSchema, String keyIdTable, String keyIdColumn, String keyIdJoinKey) throws IOException {
Mappings riId = new DefaultMappings(new HashMap<Object, String>() {
{
put(resultInstanceId, "" + resultInstanceId);
}
});
String schemaName = getSchemaName();
Mappings icd9DxMappings = getMappingsFactory().getInstance("icd9_diagnosis.txt");
Mappings icd9PxMappings = getMappingsFactory().getInstance("icd9_procedure.txt");
Mappings medsMappings = getMappingsFactory().getInstance("meds.txt");
// Mappings cptMappings = getMappingsFactory().getInstance("cpt_procedure.txt");
Mappings icd10DxMappings = getMappingsFactory().getInstance("icd10cm_diagnosis.txt");
Mappings icd10PcsMappings = getMappingsFactory().getInstance("icd10pcs_procedure.txt");
EntitySpec[] eventSpecs = {
new EntitySpec("Encounters",
null,
new String[]{"Encounter"},
true,
new ColumnSpec(keyIdSchema, keyIdTable, keyIdColumn, new JoinSpec(keyIdJoinKey, "patient_num", new ColumnSpec(schemaName, PATIENT_DIMENSION, new JoinSpec("patient_num", "patient_num", new ColumnSpec(schemaName, VISIT_DIMENSION))))),
new ColumnSpec[]{
/*
* This should be encounter_ide where encounter_ide_source = 'HIVE'.
* Like with patient_num, encounter_num = encounter_ide
* when encounter_ide_source = 'HIVE'.
*/
new ColumnSpec(schemaName, VISIT_DIMENSION, "encounter_num")
},
new ColumnSpec(schemaName, VISIT_DIMENSION, "START_DATE"),
new ColumnSpec(schemaName, VISIT_DIMENSION, "END_DATE"),
new PropertySpec[]{
new PropertySpec("encounterId", null, new ColumnSpec(schemaName, VISIT_DIMENSION, "encounter_num"), ValueType.NOMINALVALUE), },
new ReferenceSpec[]{
new ReferenceSpec("patient", "Patients",
new ColumnSpec[]{
new ColumnSpec(schemaName, VISIT_DIMENSION, "patient_num")},
ReferenceSpec.Type.ONE),
new ReferenceSpec("patientDetails", "Patient Details",
new ColumnSpec[]{
new ColumnSpec(schemaName, VISIT_DIMENSION, "patient_num")},
ReferenceSpec.Type.ONE),
new ReferenceSpec("provider", "Providers",
new ColumnSpec[]{
new ColumnSpec(schemaName, VISIT_DIMENSION, new JoinSpec("encounter_num", "encounter_num", new ColumnSpec(schemaName, OBSERVATION_FACT, "provider_id")))
}, ReferenceSpec.Type.ONE)
},
null, null,
isInKeySetMode()?
new ColumnSpec[]{new ColumnSpec(schemaName, VISIT_DIMENSION, new JoinSpec("patient_num", keyIdJoinKey, new ColumnSpec(keyIdSchema, keyIdTable, "RESULT_INSTANCE_ID", Operator.EQUAL_TO, riId)))}: null,
null, null,
AbsoluteTimeGranularity.DAY, POSITION_PARSER, null),
new EntitySpec("Diagnosis Codes",
null,
icd9DxMappings.readTargets(),
true,
new ColumnSpec(keyIdSchema, keyIdTable, keyIdColumn, new JoinSpec(keyIdJoinKey, "patient_num", new ColumnSpec(schemaName, PATIENT_DIMENSION, new JoinSpec("patient_num", "patient_num", new ColumnSpec(schemaName, VISIT_DIMENSION, new JoinSpec("encounter_num", "encounter_num", new ColumnSpec(schemaName, OBSERVATION_FACT))))))),
new ColumnSpec[]{
new ColumnSpec(schemaName, OBSERVATION_FACT, "encounter_num"),
new ColumnSpec(schemaName, OBSERVATION_FACT, "concept_cd"),
new ColumnSpec(schemaName, OBSERVATION_FACT, "provider_id"),
new ColumnSpec(schemaName, OBSERVATION_FACT, "start_date")
},
new ColumnSpec(schemaName, OBSERVATION_FACT, "START_DATE"),
null,
new PropertySpec[]{
new PropertySpec("code", null, new ColumnSpec(schemaName, OBSERVATION_FACT, "concept_cd"), ValueType.NOMINALVALUE),
new PropertySpec("encounterId", null, new ColumnSpec(schemaName, OBSERVATION_FACT, "encounter_num"), ValueType.NOMINALVALUE),
new PropertySpec("patientId", null, new ColumnSpec(schemaName, PATIENT_DIMENSION, "patient_num"), ValueType.NOMINALVALUE),
new PropertySpec("startDate", null, new ColumnSpec(schemaName, OBSERVATION_FACT, "start_date"), ValueType.DATEVALUE),
new PropertySpec("endDate", null, new ColumnSpec(schemaName, OBSERVATION_FACT, "end_date"), ValueType.DATEVALUE),
},
new ReferenceSpec[]{
new ReferenceSpec("encounter", "Encounters", new ColumnSpec[]{new ColumnSpec(schemaName, OBSERVATION_FACT, "encounter_num")}, ReferenceSpec.Type.ONE)
},
null,
new ColumnSpec(schemaName, OBSERVATION_FACT, "concept_cd", Operator.EQUAL_TO, icd9DxMappings, false), //last boolean is true in spreadsheet class
isInKeySetMode()? new ColumnSpec[]{new ColumnSpec(schemaName, OBSERVATION_FACT, new JoinSpec("encounter_num", "encounter_num", new ColumnSpec(schemaName, VISIT_DIMENSION, new JoinSpec("patient_num", keyIdJoinKey, new ColumnSpec(keyIdSchema, keyIdTable, "RESULT_INSTANCE_ID", Operator.EQUAL_TO, riId)))))}: null,
null, null,
AbsoluteTimeGranularity.DAY, POSITION_PARSER, AbsoluteTimeUnit.YEAR),
new EntitySpec("ICD9 Procedure Codes",
null,
icd9PxMappings.readTargets(), true,
new ColumnSpec(keyIdSchema, keyIdTable, keyIdColumn, new JoinSpec(keyIdJoinKey, "patient_num", new ColumnSpec(schemaName, PATIENT_DIMENSION, new JoinSpec("patient_num", "patient_num", new ColumnSpec(schemaName, VISIT_DIMENSION, new JoinSpec("encounter_num", "encounter_num", new ColumnSpec(schemaName, OBSERVATION_FACT))))))),
new ColumnSpec[]{
new ColumnSpec(schemaName, OBSERVATION_FACT, "encounter_num"),
new ColumnSpec(schemaName, OBSERVATION_FACT, "concept_cd"),
new ColumnSpec(schemaName, OBSERVATION_FACT, "provider_id"),
new ColumnSpec(schemaName, OBSERVATION_FACT, "start_date")
},
new ColumnSpec(schemaName, OBSERVATION_FACT, "START_DATE"),
null,
new PropertySpec[]{
new PropertySpec("code", null, new ColumnSpec(schemaName, OBSERVATION_FACT, "concept_cd"), ValueType.NOMINALVALUE),
new PropertySpec("encounterId", null, new ColumnSpec(schemaName, OBSERVATION_FACT, "encounter_num"), ValueType.NOMINALVALUE),
new PropertySpec("patientId", null, new ColumnSpec(schemaName, PATIENT_DIMENSION, "patient_num"), ValueType.NOMINALVALUE),
new PropertySpec("startDate", null, new ColumnSpec(schemaName, OBSERVATION_FACT, "start_date"), ValueType.DATEVALUE),
new PropertySpec("endDate", null, new ColumnSpec(schemaName, OBSERVATION_FACT, "end_date"), ValueType.DATEVALUE),
},
new ReferenceSpec[]{
new ReferenceSpec("encounter", "Encounters", new ColumnSpec[]{new ColumnSpec(schemaName, OBSERVATION_FACT, "encounter_num")}, ReferenceSpec.Type.ONE)
},
null,
new ColumnSpec(schemaName, OBSERVATION_FACT, "concept_cd", Operator.EQUAL_TO, icd9PxMappings, false),
isInKeySetMode()? new ColumnSpec[]{new ColumnSpec(schemaName, OBSERVATION_FACT, new JoinSpec("encounter_num", "encounter_num", new ColumnSpec(schemaName, VISIT_DIMENSION, new JoinSpec("patient_num", keyIdJoinKey, new ColumnSpec(keyIdSchema, keyIdTable, "RESULT_INSTANCE_ID", Operator.EQUAL_TO, riId)))))}: null,
null, null,
AbsoluteTimeGranularity.DAY,POSITION_PARSER, AbsoluteTimeUnit.YEAR),
new EntitySpec("ICD10 Diagnosis Codes",
null,
icd10DxMappings.readTargets(),
true,
new ColumnSpec(keyIdSchema, keyIdTable, keyIdColumn, new JoinSpec(keyIdJoinKey, "patient_num", new ColumnSpec(schemaName, PATIENT_DIMENSION, new JoinSpec("patient_num", "patient_num", new ColumnSpec(schemaName, VISIT_DIMENSION, new JoinSpec("encounter_num", "encounter_num", new ColumnSpec(schemaName, OBSERVATION_FACT))))))),
new ColumnSpec[]{
new ColumnSpec(schemaName, OBSERVATION_FACT, "encounter_num"),
new ColumnSpec(schemaName, OBSERVATION_FACT, "concept_cd"),
new ColumnSpec(schemaName, OBSERVATION_FACT, "provider_id"),
new ColumnSpec(schemaName, OBSERVATION_FACT, "start_date")
},
new ColumnSpec(schemaName, OBSERVATION_FACT, "START_DATE"),
null,
new PropertySpec[]{
new PropertySpec("code", null, new ColumnSpec(schemaName, OBSERVATION_FACT, "concept_cd"), ValueType.NOMINALVALUE),
new PropertySpec("encounterId", null, new ColumnSpec(schemaName, OBSERVATION_FACT, "encounter_num"), ValueType.NOMINALVALUE),
new PropertySpec("patientId", null, new ColumnSpec(schemaName, PATIENT_DIMENSION, "patient_num"), ValueType.NOMINALVALUE),
new PropertySpec("startDate", null, new ColumnSpec(schemaName, OBSERVATION_FACT, "start_date"), ValueType.DATEVALUE),
new PropertySpec("endDate", null, new ColumnSpec(schemaName, OBSERVATION_FACT, "end_date"), ValueType.DATEVALUE),
},
new ReferenceSpec[]{
new ReferenceSpec("encounter", "Encounters", new ColumnSpec[]{new ColumnSpec(schemaName, OBSERVATION_FACT, "encounter_num")}, ReferenceSpec.Type.ONE)
},
null,
new ColumnSpec(schemaName, OBSERVATION_FACT, "concept_cd", Operator.EQUAL_TO, icd10DxMappings, false), //last boolean is true in spreadsheet class
isInKeySetMode()? new ColumnSpec[]{new ColumnSpec(schemaName, OBSERVATION_FACT, new JoinSpec("encounter_num", "encounter_num", new ColumnSpec(schemaName, VISIT_DIMENSION, new JoinSpec("patient_num", keyIdJoinKey, new ColumnSpec(keyIdSchema, keyIdTable, "RESULT_INSTANCE_ID", Operator.EQUAL_TO, riId)))))}: null,
null, null,
AbsoluteTimeGranularity.DAY, POSITION_PARSER, AbsoluteTimeUnit.YEAR),
new EntitySpec("ICD10 Procedure Codes",
null,
icd10PcsMappings.readTargets(), true,
new ColumnSpec(keyIdSchema, keyIdTable, keyIdColumn, new JoinSpec(keyIdJoinKey, "patient_num", new ColumnSpec(schemaName, PATIENT_DIMENSION, new JoinSpec("patient_num", "patient_num", new ColumnSpec(schemaName, VISIT_DIMENSION, new JoinSpec("encounter_num", "encounter_num", new ColumnSpec(schemaName, OBSERVATION_FACT))))))),
new ColumnSpec[]{
new ColumnSpec(schemaName, OBSERVATION_FACT, "encounter_num"),
new ColumnSpec(schemaName, OBSERVATION_FACT, "concept_cd"),
new ColumnSpec(schemaName, OBSERVATION_FACT, "provider_id"),
new ColumnSpec(schemaName, OBSERVATION_FACT, "start_date")
},
new ColumnSpec(schemaName, OBSERVATION_FACT, "START_DATE"),
null,
new PropertySpec[]{
new PropertySpec("code", null, new ColumnSpec(schemaName, OBSERVATION_FACT, "concept_cd"), ValueType.NOMINALVALUE),
new PropertySpec("encounterId", null, new ColumnSpec(schemaName, OBSERVATION_FACT, "encounter_num"), ValueType.NOMINALVALUE),
new PropertySpec("patientId", null, new ColumnSpec(schemaName, PATIENT_DIMENSION, "patient_num"), ValueType.NOMINALVALUE),
new PropertySpec("startDate", null, new ColumnSpec(schemaName, OBSERVATION_FACT, "start_date"), ValueType.DATEVALUE),
new PropertySpec("endDate", null, new ColumnSpec(schemaName, OBSERVATION_FACT, "end_date"), ValueType.DATEVALUE),
},
new ReferenceSpec[]{
new ReferenceSpec("encounter", "Encounters", new ColumnSpec[]{new ColumnSpec(schemaName, OBSERVATION_FACT, "encounter_num")}, ReferenceSpec.Type.ONE)
},
null,
new ColumnSpec(schemaName, OBSERVATION_FACT, "concept_cd", Operator.EQUAL_TO, icd10PcsMappings, false),
isInKeySetMode()? new ColumnSpec[]{new ColumnSpec(schemaName, OBSERVATION_FACT, new JoinSpec("encounter_num", "encounter_num", new ColumnSpec(schemaName, VISIT_DIMENSION, new JoinSpec("patient_num", keyIdJoinKey, new ColumnSpec(keyIdSchema, keyIdTable, "RESULT_INSTANCE_ID", Operator.EQUAL_TO, riId)))))}: null,
null, null,
AbsoluteTimeGranularity.DAY,POSITION_PARSER, AbsoluteTimeUnit.YEAR),
new EntitySpec("Medication Orders",
null,
medsMappings.readTargets(), true,
new ColumnSpec(keyIdSchema, keyIdTable, keyIdColumn, new JoinSpec(keyIdJoinKey, "patient_num", new ColumnSpec(schemaName, PATIENT_DIMENSION, new JoinSpec("patient_num", "patient_num", new ColumnSpec(schemaName, VISIT_DIMENSION, new JoinSpec("encounter_num", "encounter_num", new ColumnSpec(schemaName, OBSERVATION_FACT))))))),
new ColumnSpec[]{
new ColumnSpec(schemaName, OBSERVATION_FACT, "encounter_num"),
new ColumnSpec(schemaName, OBSERVATION_FACT, "concept_cd"),
new ColumnSpec(schemaName, OBSERVATION_FACT, "provider_id"),
new ColumnSpec(schemaName, OBSERVATION_FACT, "start_date")
},
new ColumnSpec(schemaName, OBSERVATION_FACT, "START_DATE"),
null,
new PropertySpec[]{
new PropertySpec("code", null, new ColumnSpec(schemaName, OBSERVATION_FACT, "concept_cd"), ValueType.NOMINALVALUE),
new PropertySpec("encounterId", null, new ColumnSpec(schemaName, OBSERVATION_FACT, "encounter_num"), ValueType.NOMINALVALUE),
new PropertySpec("patientId", null, new ColumnSpec(schemaName, PATIENT_DIMENSION, "patient_num"), ValueType.NOMINALVALUE),
new PropertySpec("startDate", null, new ColumnSpec(schemaName, OBSERVATION_FACT, "start_date"), ValueType.DATEVALUE),
new PropertySpec("endDate", null, new ColumnSpec(schemaName, OBSERVATION_FACT, "end_date"), ValueType.DATEVALUE),
},
new ReferenceSpec[]{
new ReferenceSpec("encounter", "Encounters", new ColumnSpec[]{new ColumnSpec(schemaName, OBSERVATION_FACT, "encounter_num")}, ReferenceSpec.Type.ONE)
},
null,
new ColumnSpec(schemaName, OBSERVATION_FACT, "concept_cd", Operator.EQUAL_TO, medsMappings, false),
isInKeySetMode()? new ColumnSpec[]{new ColumnSpec(schemaName, OBSERVATION_FACT, new JoinSpec("encounter_num", "encounter_num", new ColumnSpec(schemaName, VISIT_DIMENSION, new JoinSpec("patient_num", keyIdJoinKey, new ColumnSpec(keyIdSchema, keyIdTable, "RESULT_INSTANCE_ID", Operator.EQUAL_TO, riId)))))}: null,
null, null,
AbsoluteTimeGranularity.MINUTE, POSITION_PARSER, AbsoluteTimeUnit.YEAR),
// new EntitySpec("CPT Procedure Codes",
// null,
// cptMappings.readTargets(), true,
// new ColumnSpec(keyIdSchema, keyIdTable, keyIdColumn, new JoinSpec(keyIdJoinKey, "patient_num", new ColumnSpec(schemaName, PATIENT_DIMENSION, new JoinSpec("patient_num", "patient_num", new ColumnSpec(schemaName, VISIT_DIMENSION, new JoinSpec("encounter_num", "encounter_num", new ColumnSpec(schemaName, OBSERVATION_FACT))))))),
// new ColumnSpec[]{
// new ColumnSpec(schemaName, OBSERVATION_FACT, "encounter_num"),
// new ColumnSpec(schemaName, OBSERVATION_FACT, "concept_cd"),
// new ColumnSpec(schemaName, OBSERVATION_FACT, "provider_id"),
// new ColumnSpec(schemaName, OBSERVATION_FACT, "start_date")
// },
// new ColumnSpec(schemaName, OBSERVATION_FACT, "START_DATE"),
// null,
// new PropertySpec[]{
// new PropertySpec("code", null, new ColumnSpec(schemaName, OBSERVATION_FACT, "concept_cd"), ValueType.NOMINALVALUE),},
// new ReferenceSpec[]{
// new ReferenceSpec("encounter", "Encounters", new ColumnSpec[]{new ColumnSpec(schemaName, OBSERVATION_FACT, "encounter_num")}, ReferenceSpec.Type.ONE)
// },
// null,
// new ColumnSpec(schemaName, OBSERVATION_FACT, "concept_cd", Operator.EQUAL_TO, cptMappings, false),
// isInKeySetMode()? new ColumnSpec[]{new ColumnSpec(schemaName, OBSERVATION_FACT, new JoinSpec("encounter_num", "encounter_num", new ColumnSpec(schemaName, VISIT_DIMENSION, new JoinSpec("patient_num", keyIdJoinKey, new ColumnSpec(keyIdSchema, keyIdTable, "RESULT_INSTANCE_ID", Operator.EQUAL_TO, riId)))))}: null,
// null, null,
// AbsoluteTimeGranularity.MINUTE, POSITION_PARSER, null),
};
return eventSpecs;
}
@Override
protected EntitySpec[] primitiveParameterSpecs(String keyIdSchema, String keyIdTable, String keyIdColumn, String keyIdJoinKey) throws IOException {
Mappings riId = new DefaultMappings(new HashMap<Object, String>() {
{
put(resultInstanceId, "" + resultInstanceId);
}
});
String schemaName = getSchemaName();
Mappings labsMappings = getMappingsFactory().getInstance("labs.txt");
Mappings vitalsMappings = getMappingsFactory().getInstance("vitals_result_types.txt");
EntitySpec[] primitiveParameterSpecs = new EntitySpec[]{
new EntitySpec("Labs", null,
labsMappings.readTargets(),
true,
new ColumnSpec(keyIdSchema, keyIdTable, keyIdColumn, new JoinSpec(keyIdJoinKey, "patient_num", new ColumnSpec(schemaName, PATIENT_DIMENSION, new JoinSpec("patient_num", "patient_num", new ColumnSpec(schemaName, VISIT_DIMENSION, new JoinSpec("encounter_num","encounter_num", new ColumnSpec(schemaName, OBSERVATION_FACT))))))),
new ColumnSpec[]{
new ColumnSpec(schemaName, OBSERVATION_FACT, "encounter_num"),
new ColumnSpec(schemaName, OBSERVATION_FACT, "concept_cd"),
new ColumnSpec(schemaName, OBSERVATION_FACT, "provider_id"),
new ColumnSpec(schemaName, OBSERVATION_FACT, "start_date")},
new ColumnSpec(schemaName, OBSERVATION_FACT, "START_DATE"),
null,
new PropertySpec[]{
new PropertySpec("code", null, new ColumnSpec(schemaName, OBSERVATION_FACT, "concept_cd"), ValueType.NOMINALVALUE),
new PropertySpec("encounterId", null, new ColumnSpec(schemaName, OBSERVATION_FACT, "encounter_num"), ValueType.NOMINALVALUE),
new PropertySpec("patientId", null, new ColumnSpec(schemaName, PATIENT_DIMENSION, "patient_num"), ValueType.NOMINALVALUE),
new PropertySpec("startDate", null, new ColumnSpec(schemaName, OBSERVATION_FACT, "start_date"), ValueType.DATEVALUE),
new PropertySpec("endDate", null, new ColumnSpec(schemaName, OBSERVATION_FACT, "end_date"), ValueType.DATEVALUE),
new PropertySpec("numberValue", null, new ColumnSpec(schemaName, OBSERVATION_FACT, "nval_num"), ValueType.DATEVALUE),
new PropertySpec("unitOfMeasure", null, new ColumnSpec(schemaName, OBSERVATION_FACT, "units_cd"), ValueType.DATEVALUE),
},
new ReferenceSpec[]{
new ReferenceSpec("encounter", "Encounters", new ColumnSpec[]{new ColumnSpec(schemaName, VISIT_DIMENSION, "encounter_num")}, ReferenceSpec.Type.ONE)
},
null,
new ColumnSpec(schemaName, OBSERVATION_FACT, "concept_cd", Operator.EQUAL_TO, labsMappings, true), null,
new ColumnSpec(schemaName, OBSERVATION_FACT, "nval_num"),
ValueType.VALUE, AbsoluteTimeGranularity.MINUTE, POSITION_PARSER, AbsoluteTimeUnit.YEAR),
new EntitySpec("Vitals", null,
vitalsMappings.readTargets(),
true,
new ColumnSpec(keyIdSchema, keyIdTable, keyIdColumn, new JoinSpec(keyIdJoinKey, "patient_num", new ColumnSpec(schemaName, PATIENT_DIMENSION, new JoinSpec("patient_num", "patient_num", new ColumnSpec(schemaName, VISIT_DIMENSION, new JoinSpec("encounter_num","encounter_num", new ColumnSpec(schemaName, OBSERVATION_FACT))))))),
new ColumnSpec[]{
new ColumnSpec(schemaName, OBSERVATION_FACT, "encounter_num"),
new ColumnSpec(schemaName, OBSERVATION_FACT, "concept_cd"),
new ColumnSpec(schemaName, OBSERVATION_FACT, "provider_id"),
new ColumnSpec(schemaName, OBSERVATION_FACT, "start_date")},
new ColumnSpec(schemaName, OBSERVATION_FACT, "START_DATE"),
null,
new PropertySpec[]{
new PropertySpec("code", null, new ColumnSpec(schemaName, OBSERVATION_FACT, "concept_cd"), ValueType.NOMINALVALUE),
new PropertySpec("encounterId", null, new ColumnSpec(schemaName, OBSERVATION_FACT, "encounter_num"), ValueType.NOMINALVALUE),
new PropertySpec("patientId", null, new ColumnSpec(schemaName, PATIENT_DIMENSION, "patient_num"), ValueType.NOMINALVALUE),
new PropertySpec("startDate", null, new ColumnSpec(schemaName, OBSERVATION_FACT, "start_date"), ValueType.DATEVALUE),
new PropertySpec("endDate", null, new ColumnSpec(schemaName, OBSERVATION_FACT, "end_date"), ValueType.DATEVALUE),
new PropertySpec("numberValue", null, new ColumnSpec(schemaName, OBSERVATION_FACT, "nval_num"), ValueType.DATEVALUE),
new PropertySpec("unitOfMeasure", null, new ColumnSpec(schemaName, OBSERVATION_FACT, "units_cd"), ValueType.DATEVALUE),
},
new ReferenceSpec[]{
new ReferenceSpec("encounter", "Encounters", new ColumnSpec[]{new ColumnSpec(schemaName, OBSERVATION_FACT, "encounter_num")}, ReferenceSpec.Type.ONE)
},
null,
new ColumnSpec(schemaName, OBSERVATION_FACT, "concept_cd", Operator.EQUAL_TO, vitalsMappings, false),
isInKeySetMode()? new ColumnSpec[]{new ColumnSpec(schemaName, OBSERVATION_FACT, new JoinSpec("encounter_num", "encounter_num", new ColumnSpec(schemaName, VISIT_DIMENSION, new JoinSpec("patient_num", keyIdJoinKey, new ColumnSpec(keyIdSchema, keyIdTable, "RESULT_INSTANCE_ID", Operator.EQUAL_TO, riId)))))}: null,
new ColumnSpec(schemaName, OBSERVATION_FACT, "nval_num"), ValueType.VALUE,
AbsoluteTimeGranularity.DAY, POSITION_PARSER, AbsoluteTimeUnit.YEAR)};
return primitiveParameterSpecs;
}
@Override
public GranularityFactory getGranularityFactory() {
return ABS_TIME_GRANULARITY_FACTORY;
}
@Override
public UnitFactory getUnitFactory() {
return ABS_TIME_UNIT_FACTORY;
}
@BackendProperty(displayName = "Query Master ID")
public void setResultInstanceId(Long resultInstanceId) {
this.resultInstanceId = resultInstanceId;
setKeyLoaderKeyIdSchema(getSchemaName());
setKeyLoaderKeyIdTable("QT_PATIENT_SET_COLLECTION");
setKeyLoaderKeyIdColumn("PATIENT_NUM");
setKeyLoaderKeyIdJoinKey("PATIENT_NUM");
}
public Long getResultInstanceId() {
return this.resultInstanceId;
}
@Override
public KeySetSpec[] getSelectedKeySetSpecs() throws DataSourceReadException {
List<KeySetSpec> result = new ArrayList<>();
if (this.resultInstanceId != null) {
try (Connection con = this.getConnectionSpecInstance().getOrCreate();
PreparedStatement stmt = con.prepareStatement("SELECT A1.NAME, A1.USER_ID, A3.DESCRIPTION FROM QT_QUERY_MASTER A1 JOIN QT_QUERY_INSTANCE A2 ON (A1.QUERY_MASTER_ID=A2.QUERY_MASTER_ID) JOIN QT_QUERY_RESULT_INSTANCE A3 ON (A2.QUERY_INSTANCE_ID=A3.QUERY_INSTANCE_ID) WHERE A3.RESULT_INSTANCE_ID = ? AND A1.DELETE_FLAG = 'N' AND A2.DELETE_FLAG = 'N' AND A3.RESULT_TYPE_ID = 1")) {
stmt.setLong(1, this.resultInstanceId);
try (ResultSet rs = stmt.executeQuery()) {
if (rs.next()) {
result.add(new KeySetSpec(getSourceSystem(), "" + this.resultInstanceId, rs.getString(1) + " (" + rs.getString(2) + ")", rs.getString(3)));
}
}
} catch (SQLException | InvalidConnectionSpecArguments ex) {
throw new DataSourceReadException(ex);
}
}
return result.toArray(new KeySetSpec[result.size()]);
}
@Override
public String getKeyType() {
return "Patient";
}
@BackendProperty
public void setLabsRootFullName(String labsRootFullName) {
if (labsRootFullName == null) {
this.labsRootFullName = "EK_LABS";
} else {
this.labsRootFullName = labsRootFullName;
}
}
public String getLabsRootFullName() {
return labsRootFullName;
}
public String getVitalsRootFullName() {
return vitalsRootFullName;
}
@BackendProperty
public void setVitalsRootFullName(String vitalsRootFullName) {
if (vitalsRootFullName == null) {
this.vitalsRootFullName = "EK_VITALS";
} else {
this.vitalsRootFullName = vitalsRootFullName;
}
}
public String getDiagnosisCodesRootFullName() {
return diagnosisCodesRootFullName;
}
@BackendProperty
public void setDiagnosisCodesRootFullName(String diagnosisCodesRootFullName) {
if (diagnosisCodesRootFullName == null) {
this.diagnosisCodesRootFullName = "EK_ICD9D";
} else {
this.diagnosisCodesRootFullName = diagnosisCodesRootFullName;
}
}
public String getMedicationOrdersRootFullName() {
return medicationOrdersRootFullName;
}
@BackendProperty
public void setMedicationOrdersRootFullName(String medicationOrdersRootFullName) {
if (medicationOrdersRootFullName == null) {
this.medicationOrdersRootFullName = "EK_MED_ORDERS";
} else {
this.medicationOrdersRootFullName = medicationOrdersRootFullName;
}
}
public String getIcd9ProcedureCodesRootFullName() {
return icd9ProcedureCodesRootFullName;
}
@BackendProperty
public void setIcd9ProcedureCodesRootFullName(String icd9ProcedureCodesRootFullName) {
if (icd9ProcedureCodesRootFullName == null) {
this.icd9ProcedureCodesRootFullName = "EK_ICD9P";
} else {
this.icd9ProcedureCodesRootFullName = icd9ProcedureCodesRootFullName;
}
}
public String getIcd10DiagnosisCodesRootFullName() {
return icd10DiagnosisCodesRootFullName;
}
@BackendProperty
public void setIcd10DiagnosisCodesRootFullName(String icd10DiagnosisCodesRootFullName) {
if (icd10DiagnosisCodesRootFullName == null) {
this.icd10DiagnosisCodesRootFullName = "EK_ICD10CM";
} else {
this.icd10DiagnosisCodesRootFullName = icd10DiagnosisCodesRootFullName;
}
}
public String getIcd10ProcedureCodesRootFullName() {
return icd10ProcedureCodesRootFullName;
}
@BackendProperty
public void setIcd10ProcedureCodesRootFullName(String icd10ProcedureCodesRootFullName) {
if (icd9ProcedureCodesRootFullName == null) {
this.icd10ProcedureCodesRootFullName = "EK_ICD10PCS";
} else {
this.icd10ProcedureCodesRootFullName = icd10ProcedureCodesRootFullName;
}
}
// public String getCptProcedureCodesRootFullName() {
// return cptProcedureCodesRootFullName;
// }
//
// @BackendProperty
// public void setCptProcedureCodesRootFullName(String cptProcedureCodesRootFullName) {
// if (cptProcedureCodesRootFullName == null) {
// this.cptProcedureCodesRootFullName = DEFAULT_ROOT_FULL_NAME;
// } else {
// this.cptProcedureCodesRootFullName = cptProcedureCodesRootFullName;
// }
// }
}
| |
package android.support.v4.app;
import android.app.Activity;
import android.content.ComponentCallbacks;
import android.content.Context;
import android.content.Intent;
import android.content.res.Configuration;
import android.content.res.Resources;
import android.os.Bundle;
import android.os.Parcelable;
import android.support.v4.util.DebugUtils;
import android.support.v4.util.SimpleArrayMap;
import android.support.v4.view.LayoutInflaterCompat;
import android.util.SparseArray;
import android.view.ContextMenu;
import android.view.ContextMenu.ContextMenuInfo;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.View.OnCreateContextMenuListener;
import android.view.ViewGroup;
import android.view.animation.Animation;
import java.io.FileDescriptor;
import java.io.PrintWriter;
public class Fragment
implements ComponentCallbacks, View.OnCreateContextMenuListener
{
static final Object USE_DEFAULT_TRANSITION = new Object();
private static final SimpleArrayMap<String, Class<?>> sClassMap = new SimpleArrayMap();
boolean mAdded;
Boolean mAllowEnterTransitionOverlap;
Boolean mAllowReturnTransitionOverlap;
View mAnimatingAway;
public Bundle mArguments;
int mBackStackNesting;
boolean mCalled;
boolean mCheckedForLoaderManager;
FragmentManagerImpl mChildFragmentManager;
ViewGroup mContainer;
int mContainerId;
boolean mDeferStart;
boolean mDetached;
public Object mEnterTransition = null;
public SharedElementCallback mEnterTransitionCallback = null;
public Object mExitTransition = null;
public SharedElementCallback mExitTransitionCallback = null;
public int mFragmentId;
public FragmentManagerImpl mFragmentManager;
boolean mFromLayout;
public boolean mHasMenu;
public boolean mHidden;
public FragmentHostCallback mHost;
boolean mInLayout;
int mIndex = -1;
View mInnerView;
LoaderManagerImpl mLoaderManager;
boolean mLoadersStarted;
boolean mMenuVisible = true;
int mNextAnim;
public Fragment mParentFragment;
Object mReenterTransition = USE_DEFAULT_TRANSITION;
public boolean mRemoving;
boolean mRestored;
public boolean mResumed;
boolean mRetainInstance;
boolean mRetainLoader;
boolean mRetaining;
Object mReturnTransition = USE_DEFAULT_TRANSITION;
Bundle mSavedFragmentState;
SparseArray<Parcelable> mSavedViewState;
public Object mSharedElementEnterTransition = null;
Object mSharedElementReturnTransition = USE_DEFAULT_TRANSITION;
int mState = 0;
int mStateAfterAnimating;
public String mTag;
public Fragment mTarget;
int mTargetIndex = -1;
int mTargetRequestCode;
boolean mUserVisibleHint = true;
public View mView;
String mWho;
public static Fragment instantiate(Context paramContext, String paramString)
{
return instantiate(paramContext, paramString, null);
}
public static Fragment instantiate(Context paramContext, String paramString, Bundle paramBundle)
{
try
{
Class localClass = (Class)sClassMap.get(paramString);
if (localClass == null)
{
localClass = paramContext.getClassLoader().loadClass(paramString);
sClassMap.put(paramString, localClass);
}
Fragment localFragment = (Fragment)localClass.newInstance();
if (paramBundle != null)
{
paramBundle.setClassLoader(localFragment.getClass().getClassLoader());
localFragment.mArguments = paramBundle;
}
return localFragment;
}
catch (ClassNotFoundException localClassNotFoundException)
{
throw new InstantiationException("Unable to instantiate fragment " + paramString + ": make sure class name exists, is public, and has an empty constructor that is public", localClassNotFoundException);
}
catch (InstantiationException localInstantiationException)
{
throw new InstantiationException("Unable to instantiate fragment " + paramString + ": make sure class name exists, is public, and has an empty constructor that is public", localInstantiationException);
}
catch (IllegalAccessException localIllegalAccessException)
{
throw new InstantiationException("Unable to instantiate fragment " + paramString + ": make sure class name exists, is public, and has an empty constructor that is public", localIllegalAccessException);
}
}
static boolean isSupportFragmentClass(Context paramContext, String paramString)
{
try
{
Class localClass = (Class)sClassMap.get(paramString);
if (localClass == null)
{
localClass = paramContext.getClassLoader().loadClass(paramString);
sClassMap.put(paramString, localClass);
}
boolean bool = Fragment.class.isAssignableFrom(localClass);
return bool;
}
catch (ClassNotFoundException localClassNotFoundException) {}
return false;
}
public static Animation onCreateAnimation$24236ca7()
{
return null;
}
public static void onDestroyOptionsMenu() {}
public static void onHiddenChanged$1385ff() {}
public static void onRequestPermissionsResult$6f5af501() {}
public void dump(String paramString, FileDescriptor paramFileDescriptor, PrintWriter paramPrintWriter, String[] paramArrayOfString)
{
paramPrintWriter.print(paramString);
paramPrintWriter.print("mFragmentId=#");
paramPrintWriter.print(Integer.toHexString(this.mFragmentId));
paramPrintWriter.print(" mContainerId=#");
paramPrintWriter.print(Integer.toHexString(this.mContainerId));
paramPrintWriter.print(" mTag=");
paramPrintWriter.println(this.mTag);
paramPrintWriter.print(paramString);
paramPrintWriter.print("mState=");
paramPrintWriter.print(this.mState);
paramPrintWriter.print(" mIndex=");
paramPrintWriter.print(this.mIndex);
paramPrintWriter.print(" mWho=");
paramPrintWriter.print(this.mWho);
paramPrintWriter.print(" mBackStackNesting=");
paramPrintWriter.println(this.mBackStackNesting);
paramPrintWriter.print(paramString);
paramPrintWriter.print("mAdded=");
paramPrintWriter.print(this.mAdded);
paramPrintWriter.print(" mRemoving=");
paramPrintWriter.print(this.mRemoving);
paramPrintWriter.print(" mResumed=");
paramPrintWriter.print(this.mResumed);
paramPrintWriter.print(" mFromLayout=");
paramPrintWriter.print(this.mFromLayout);
paramPrintWriter.print(" mInLayout=");
paramPrintWriter.println(this.mInLayout);
paramPrintWriter.print(paramString);
paramPrintWriter.print("mHidden=");
paramPrintWriter.print(this.mHidden);
paramPrintWriter.print(" mDetached=");
paramPrintWriter.print(this.mDetached);
paramPrintWriter.print(" mMenuVisible=");
paramPrintWriter.print(this.mMenuVisible);
paramPrintWriter.print(" mHasMenu=");
paramPrintWriter.println(this.mHasMenu);
paramPrintWriter.print(paramString);
paramPrintWriter.print("mRetainInstance=");
paramPrintWriter.print(this.mRetainInstance);
paramPrintWriter.print(" mRetaining=");
paramPrintWriter.print(this.mRetaining);
paramPrintWriter.print(" mUserVisibleHint=");
paramPrintWriter.println(this.mUserVisibleHint);
if (this.mFragmentManager != null)
{
paramPrintWriter.print(paramString);
paramPrintWriter.print("mFragmentManager=");
paramPrintWriter.println(this.mFragmentManager);
}
if (this.mHost != null)
{
paramPrintWriter.print(paramString);
paramPrintWriter.print("mHost=");
paramPrintWriter.println(this.mHost);
}
if (this.mParentFragment != null)
{
paramPrintWriter.print(paramString);
paramPrintWriter.print("mParentFragment=");
paramPrintWriter.println(this.mParentFragment);
}
if (this.mArguments != null)
{
paramPrintWriter.print(paramString);
paramPrintWriter.print("mArguments=");
paramPrintWriter.println(this.mArguments);
}
if (this.mSavedFragmentState != null)
{
paramPrintWriter.print(paramString);
paramPrintWriter.print("mSavedFragmentState=");
paramPrintWriter.println(this.mSavedFragmentState);
}
if (this.mSavedViewState != null)
{
paramPrintWriter.print(paramString);
paramPrintWriter.print("mSavedViewState=");
paramPrintWriter.println(this.mSavedViewState);
}
if (this.mTarget != null)
{
paramPrintWriter.print(paramString);
paramPrintWriter.print("mTarget=");
paramPrintWriter.print(this.mTarget);
paramPrintWriter.print(" mTargetRequestCode=");
paramPrintWriter.println(this.mTargetRequestCode);
}
if (this.mNextAnim != 0)
{
paramPrintWriter.print(paramString);
paramPrintWriter.print("mNextAnim=");
paramPrintWriter.println(this.mNextAnim);
}
if (this.mContainer != null)
{
paramPrintWriter.print(paramString);
paramPrintWriter.print("mContainer=");
paramPrintWriter.println(this.mContainer);
}
if (this.mView != null)
{
paramPrintWriter.print(paramString);
paramPrintWriter.print("mView=");
paramPrintWriter.println(this.mView);
}
if (this.mInnerView != null)
{
paramPrintWriter.print(paramString);
paramPrintWriter.print("mInnerView=");
paramPrintWriter.println(this.mView);
}
if (this.mAnimatingAway != null)
{
paramPrintWriter.print(paramString);
paramPrintWriter.print("mAnimatingAway=");
paramPrintWriter.println(this.mAnimatingAway);
paramPrintWriter.print(paramString);
paramPrintWriter.print("mStateAfterAnimating=");
paramPrintWriter.println(this.mStateAfterAnimating);
}
if (this.mLoaderManager != null)
{
paramPrintWriter.print(paramString);
paramPrintWriter.println("Loader Manager:");
this.mLoaderManager.dump(paramString + " ", paramFileDescriptor, paramPrintWriter, paramArrayOfString);
}
if (this.mChildFragmentManager != null)
{
paramPrintWriter.print(paramString);
paramPrintWriter.println("Child " + this.mChildFragmentManager + ":");
this.mChildFragmentManager.dump(paramString + " ", paramFileDescriptor, paramPrintWriter, paramArrayOfString);
}
}
public final boolean equals(Object paramObject)
{
return super.equals(paramObject);
}
public final FragmentActivity getActivity()
{
if (this.mHost == null) {
return null;
}
return (FragmentActivity)this.mHost.mActivity;
}
public final FragmentManager getChildFragmentManager()
{
if (this.mChildFragmentManager == null)
{
instantiateChildFragmentManager();
if (this.mState < 5) {
break label31;
}
this.mChildFragmentManager.dispatchResume();
}
for (;;)
{
return this.mChildFragmentManager;
label31:
if (this.mState >= 4) {
this.mChildFragmentManager.dispatchStart();
} else if (this.mState >= 2) {
this.mChildFragmentManager.dispatchActivityCreated();
} else if (this.mState > 0) {
this.mChildFragmentManager.dispatchCreate();
}
}
}
public final Context getContext()
{
if (this.mHost == null) {
return null;
}
return this.mHost.mContext;
}
public LayoutInflater getLayoutInflater(Bundle paramBundle)
{
LayoutInflater localLayoutInflater = this.mHost.onGetLayoutInflater();
getChildFragmentManager();
LayoutInflaterCompat.setFactory(localLayoutInflater, this.mChildFragmentManager);
return localLayoutInflater;
}
public final LoaderManager getLoaderManager()
{
if (this.mLoaderManager != null) {
return this.mLoaderManager;
}
if (this.mHost == null) {
throw new IllegalStateException("Fragment " + this + " not attached to Activity");
}
this.mCheckedForLoaderManager = true;
this.mLoaderManager = this.mHost.getLoaderManager(this.mWho, this.mLoadersStarted, true);
return this.mLoaderManager;
}
public final Resources getResources()
{
if (this.mHost == null) {
throw new IllegalStateException("Fragment " + this + " not attached to Activity");
}
return this.mHost.mContext.getResources();
}
public final String getString(int paramInt)
{
return getResources().getString(paramInt);
}
public final String getString(int paramInt, Object... paramVarArgs)
{
return getResources().getString(paramInt, paramVarArgs);
}
public final int hashCode()
{
return super.hashCode();
}
final void instantiateChildFragmentManager()
{
this.mChildFragmentManager = new FragmentManagerImpl();
this.mChildFragmentManager.attachController(this.mHost, new FragmentContainer()
{
public final View onFindViewById(int paramAnonymousInt)
{
if (Fragment.this.mView == null) {
throw new IllegalStateException("Fragment does not have a view");
}
return Fragment.this.mView.findViewById(paramAnonymousInt);
}
public final boolean onHasView()
{
return Fragment.this.mView != null;
}
}, this);
}
public final boolean isAdded()
{
return (this.mHost != null) && (this.mAdded);
}
public void onActivityCreated(Bundle paramBundle)
{
this.mCalled = true;
}
public void onActivityResult(int paramInt1, int paramInt2, Intent paramIntent) {}
@Deprecated
public void onAttach(Activity paramActivity)
{
this.mCalled = true;
}
public void onAttach(Context paramContext)
{
this.mCalled = true;
if (this.mHost == null) {}
for (Activity localActivity = null;; localActivity = this.mHost.mActivity)
{
if (localActivity != null)
{
this.mCalled = false;
onAttach(localActivity);
}
return;
}
}
public void onConfigurationChanged(Configuration paramConfiguration)
{
this.mCalled = true;
}
public void onCreate(Bundle paramBundle)
{
this.mCalled = true;
}
public void onCreateContextMenu(ContextMenu paramContextMenu, View paramView, ContextMenu.ContextMenuInfo paramContextMenuInfo)
{
getActivity().onCreateContextMenu(paramContextMenu, paramView, paramContextMenuInfo);
}
public void onCreateOptionsMenu(Menu paramMenu, MenuInflater paramMenuInflater) {}
public View onCreateView(LayoutInflater paramLayoutInflater, ViewGroup paramViewGroup, Bundle paramBundle)
{
return null;
}
public void onDestroy()
{
this.mCalled = true;
if (!this.mCheckedForLoaderManager)
{
this.mCheckedForLoaderManager = true;
this.mLoaderManager = this.mHost.getLoaderManager(this.mWho, this.mLoadersStarted, false);
}
if (this.mLoaderManager != null) {
this.mLoaderManager.doDestroy();
}
}
public void onDestroyView()
{
this.mCalled = true;
}
public void onDetach()
{
this.mCalled = true;
}
public final void onInflate$2c1ed547$6a2adb45()
{
this.mCalled = true;
if (this.mHost == null) {}
for (Activity localActivity = null;; localActivity = this.mHost.mActivity)
{
if (localActivity != null)
{
this.mCalled = false;
this.mCalled = true;
}
return;
}
}
public void onLowMemory()
{
this.mCalled = true;
}
public boolean onOptionsItemSelected(MenuItem paramMenuItem)
{
return false;
}
public void onPause()
{
this.mCalled = true;
}
public void onResume()
{
this.mCalled = true;
}
public void onSaveInstanceState(Bundle paramBundle) {}
public void onStart()
{
this.mCalled = true;
if (!this.mLoadersStarted)
{
this.mLoadersStarted = true;
if (!this.mCheckedForLoaderManager)
{
this.mCheckedForLoaderManager = true;
this.mLoaderManager = this.mHost.getLoaderManager(this.mWho, this.mLoadersStarted, false);
}
if (this.mLoaderManager != null) {
this.mLoaderManager.doStart();
}
}
}
public void onStop()
{
this.mCalled = true;
}
public void onViewCreated(View paramView, Bundle paramBundle) {}
public void onViewStateRestored(Bundle paramBundle)
{
this.mCalled = true;
}
final View performCreateView(LayoutInflater paramLayoutInflater, ViewGroup paramViewGroup, Bundle paramBundle)
{
if (this.mChildFragmentManager != null) {
this.mChildFragmentManager.mStateSaved = false;
}
return onCreateView(paramLayoutInflater, paramViewGroup, paramBundle);
}
final void performReallyStop()
{
if (this.mChildFragmentManager != null) {
this.mChildFragmentManager.moveToState$2563266(2);
}
if (this.mLoadersStarted)
{
this.mLoadersStarted = false;
if (!this.mCheckedForLoaderManager)
{
this.mCheckedForLoaderManager = true;
this.mLoaderManager = this.mHost.getLoaderManager(this.mWho, this.mLoadersStarted, false);
}
if (this.mLoaderManager != null)
{
if (!this.mRetainLoader) {
break label81;
}
this.mLoaderManager.doRetain();
}
}
return;
label81:
this.mLoaderManager.doStop();
}
final void performSaveInstanceState(Bundle paramBundle)
{
onSaveInstanceState(paramBundle);
if (this.mChildFragmentManager != null)
{
Parcelable localParcelable = this.mChildFragmentManager.saveAllState();
if (localParcelable != null) {
paramBundle.putParcelable("android:support:fragments", localParcelable);
}
}
}
public final void setArguments(Bundle paramBundle)
{
if (this.mIndex >= 0) {
throw new IllegalStateException("Fragment already active");
}
this.mArguments = paramBundle;
}
final void setIndex(int paramInt, Fragment paramFragment)
{
this.mIndex = paramInt;
if (paramFragment != null)
{
this.mWho = (paramFragment.mWho + ":" + this.mIndex);
return;
}
this.mWho = ("android:fragment:" + this.mIndex);
}
public final void setMenuVisibility(boolean paramBoolean)
{
if (this.mMenuVisible != paramBoolean)
{
this.mMenuVisible = paramBoolean;
if ((this.mHasMenu) && (isAdded()) && (!this.mHidden)) {
this.mHost.onSupportInvalidateOptionsMenu();
}
}
}
public final void setRetainInstance$1385ff()
{
if (this.mParentFragment != null) {
throw new IllegalStateException("Can't retain fragements that are nested in other fragments");
}
this.mRetainInstance = true;
}
public final void setTargetFragment(Fragment paramFragment, int paramInt)
{
this.mTarget = paramFragment;
this.mTargetRequestCode = paramInt;
}
public final void startActivity(Intent paramIntent)
{
if (this.mHost == null) {
throw new IllegalStateException("Fragment " + this + " not attached to Activity");
}
this.mHost.onStartActivityFromFragment(this, paramIntent, -1);
}
public final void startActivityForResult(Intent paramIntent, int paramInt)
{
if (this.mHost == null) {
throw new IllegalStateException("Fragment " + this + " not attached to Activity");
}
this.mHost.onStartActivityFromFragment(this, paramIntent, paramInt);
}
public String toString()
{
StringBuilder localStringBuilder = new StringBuilder(128);
DebugUtils.buildShortClassTag(this, localStringBuilder);
if (this.mIndex >= 0)
{
localStringBuilder.append(" #");
localStringBuilder.append(this.mIndex);
}
if (this.mFragmentId != 0)
{
localStringBuilder.append(" id=0x");
localStringBuilder.append(Integer.toHexString(this.mFragmentId));
}
if (this.mTag != null)
{
localStringBuilder.append(" ");
localStringBuilder.append(this.mTag);
}
localStringBuilder.append('}');
return localStringBuilder.toString();
}
public static final class InstantiationException
extends RuntimeException
{
public InstantiationException(String paramString, Exception paramException)
{
super(paramException);
}
}
}
/* Location: F:\apktool\apktool\Google_Play_Store6.0.5\classes-dex2jar.jar
* Qualified Name: android.support.v4.app.Fragment
* JD-Core Version: 0.7.0.1
*/
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.wicket.core.request.mapper;
import static org.hamcrest.CoreMatchers.instanceOf;
import java.nio.charset.Charset;
import java.util.Locale;
import org.apache.wicket.MockPage;
import org.apache.wicket.core.request.handler.IPageProvider;
import org.apache.wicket.core.request.handler.IPageRequestHandler;
import org.apache.wicket.core.request.handler.ListenerRequestHandler;
import org.apache.wicket.core.request.handler.PageAndComponentProvider;
import org.apache.wicket.core.request.handler.PageProvider;
import org.apache.wicket.core.request.handler.RenderPageRequestHandler;
import org.apache.wicket.request.IRequestHandler;
import org.apache.wicket.request.Request;
import org.apache.wicket.request.Url;
import org.apache.wicket.request.component.IRequestableComponent;
import org.apache.wicket.request.component.IRequestablePage;
import org.junit.Test;
/**
*
* @author Matej Knopp
*/
public class PageInstanceMapperTest extends AbstractMapperTest
{
private final PageInstanceMapper encoder = new PageInstanceMapper()
{
@Override
protected IMapperContext getContext()
{
return context;
}
};
/**
*
*/
@Test
public void decode1()
{
Url url = Url.parse("wicket/page?4");
IRequestHandler handler = encoder.mapRequest(getRequest(url));
assertThat(handler, instanceOf(RenderPageRequestHandler.class));
RenderPageRequestHandler h = (RenderPageRequestHandler)handler;
checkPage(h.getPage(), 4);
}
/**
*
*/
@Test
public void decode2()
{
Url url = Url.parse("wicket/page?4&a=3&b=3");
IRequestHandler handler = encoder.mapRequest(getRequest(url));
assertThat(handler, instanceOf(RenderPageRequestHandler.class));
RenderPageRequestHandler h = (RenderPageRequestHandler)handler;
checkPage(h.getPage(), 4);
}
/**
*
*/
@Test
public void ignoreIfPageIdentifierHasSegmentsAfterIt()
{
Url url = Url.parse("wicket/page/ingore/me?4&a=3&b=3");
IRequestHandler handler = encoder.mapRequest(getRequest(url));
assertNull(handler);
}
/**
*
*/
@Test
public void decode3()
{
Url url = Url.parse("wicket/page?4--a-b-c");
IRequestHandler handler = encoder.mapRequest(getRequest(url));
assertThat(handler, instanceOf(ListenerRequestHandler.class));
ListenerRequestHandler h = (ListenerRequestHandler)handler;
checkPage(h.getPage(), 4);
assertEquals(h.getComponent().getPageRelativePath(), "a:b:c");
assertNull(h.getBehaviorIndex());
}
/**
*
*/
@Test
public void decode4()
{
Url url = Url.parse("wickett/pagee?4--a:b-c");
IRequestHandler handler = encoder.mapRequest(getRequest(url));
assertNull(handler);
}
/**
*
*/
@Test
public void decode5()
{
Url url = Url.parse("wicket/page?abc");
IRequestHandler handler = encoder.mapRequest(getRequest(url));
assertNull(handler);
}
/**
*
*/
@Test
public void decode6()
{
Url url = Url.parse("wicket/page?4-ILinkListener.5-a-b-c");
IRequestHandler handler = encoder.mapRequest(getRequest(url));
assertThat(handler, instanceOf(ListenerRequestHandler.class));
ListenerRequestHandler h = (ListenerRequestHandler)handler;
checkPage(h.getPage(), 4);
assertEquals(h.getComponent().getPageRelativePath(), "a:b:c");
assertEquals((Object)5, h.getBehaviorIndex());
}
/**
*
*/
@Test
public void decode7()
{
Url url = Url.parse("wicket/page?4-6.5-a-b-c");
context.setNextPageRenderCount(6);
IRequestHandler handler = encoder.mapRequest(getRequest(url));
assertThat(handler, instanceOf(ListenerRequestHandler.class));
ListenerRequestHandler h = (ListenerRequestHandler)handler;
assertEquals(6, h.getPage().getRenderCount());
}
/**
*
*/
@Test(expected = StalePageException.class)
public void decode8()
{
Url url = Url.parse("wicket/page?4-6.5-a-b-c");
context.setNextPageRenderCount(8);
IRequestHandler handler = encoder.mapRequest(getRequest(url));
((IPageRequestHandler)handler).getPage();
}
@Test
public void decode9()
{
final Url url = Url.parse("page?4");
Request request = new Request()
{
@Override
public Url getUrl()
{
return url;
}
@Override
public Locale getLocale()
{
return null;
}
@Override
public Charset getCharset()
{
return Charset.forName("UTF-8");
}
@Override
public Url getClientUrl()
{
return Url.parse("wicket/page");
}
@Override
public Object getContainerRequest()
{
return null;
}
};
IRequestHandler handler = encoder.mapRequest(request);
IRequestablePage page = ((IPageRequestHandler)handler).getPage();
checkPage(page, 4);
}
@Test
public void decode10()
{
final Url url = Url.parse("page?4");
Request request = new Request()
{
@Override
public Url getUrl()
{
return url;
}
@Override
public Locale getLocale()
{
return null;
}
@Override
public Charset getCharset()
{
return Charset.forName("UTF-8");
}
@Override
public Url getClientUrl()
{
return Url.parse("page");
}
@Override
public Object getContainerRequest()
{
return null;
}
};
IRequestHandler handler = encoder.mapRequest(request);
IRequestablePage page = ((IPageRequestHandler)handler).getPage();
checkPage(page, 4);
}
/**
*
*/
@Test
public void encode1()
{
MockPage page = new MockPage(15);
IPageProvider provider = new PageProvider(page);
IRequestHandler handler = new RenderPageRequestHandler(provider);
Url url = encoder.mapHandler(handler);
assertEquals("wicket/page?15", url.toString());
}
/**
*
*/
@Test
public void encode2()
{
MockPage page = new MockPage(15);
page.setRenderCount(5);
IRequestableComponent c = page.get("a:b:c");
PageAndComponentProvider provider = new PageAndComponentProvider(page, c);
IRequestHandler handler = new ListenerRequestHandler(provider);
Url url = encoder.mapHandler(handler);
assertEquals("wicket/page?15-5.-a-b-c", url.toString());
}
}
| |
/*
* Copyright 2015 William Oemler, Blueprint Medicines
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.oncoblocks.centromere.sql.test;
import org.junit.Before;
import org.junit.FixMethodOrder;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.oncoblocks.centromere.core.repository.Evaluation;
import org.oncoblocks.centromere.core.repository.QueryCriteria;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Sort;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.util.Assert;
import java.util.ArrayList;
import java.util.List;
/**
* @author woemler
*/
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(classes = { TestJdbcDataSourceConfig.class, JdbcRepositoryConfig.class })
@FixMethodOrder
public class GenericJdbcRepositoryTests {
@Autowired private SubjectRepository subjectRepository;
private static boolean isConfigured = false;
@Before
public void setup(){
if (isConfigured) return;
subjectRepository.deleteAll();
for (Subject subject: Subject.createDummyData()){
subjectRepository.insert(subject);
}
isConfigured = true;
}
@Test
public void findOneTest(){
Subject subject = subjectRepository.findOne(1L);
Assert.notNull(subject);
Assert.isTrue(subject.getId().equals(1L));
Assert.isTrue(subject.getName().equals("PersonA"));
}
@Test
public void findAllTest(){
List<Subject> subjects = subjectRepository.findAll();
Assert.notNull(subjects);
Assert.notEmpty(subjects);
Assert.isTrue(subjects.size() == 5);
Subject subject = subjects.get(0);
Assert.notNull(subject);
Assert.isTrue(subject.getId().equals(1L));
Assert.isTrue(subject.getName().equals("PersonA"));
}
@Test
public void findAllSorted(){
List<Subject> subjects = subjectRepository.findAll(
new Sort(new Sort.Order(Sort.Direction.DESC, "subjects.subject_id")));
Assert.notNull(subjects);
Assert.notEmpty(subjects);
Subject subject = subjects.get(0);
Assert.notNull(subject);
Assert.isTrue(subject.getId().equals(5L));
Assert.isTrue(subject.getName().equals("A375"));
}
@Test
public void findAllPaged(){
PageRequest pageRequest = new PageRequest(1, 3);
Page<Subject> page = subjectRepository.findAll(pageRequest);
Assert.notNull(page);
Assert.isTrue(page.getTotalPages() == 2);
Assert.isTrue(page.getTotalElements() == 5);
List<Subject> subjects = page.getContent();
Assert.isTrue(subjects.size() == 2);
Subject subject = subjects.get(0);
Assert.isTrue(subject.getId().equals(4L));
}
@Test
public void countTest(){
Long count = subjectRepository.count();
Assert.notNull(count);
Assert.isTrue(count.equals(5L));
}
@Test
public void queryCriteriaTest(){
List<QueryCriteria> queryCriterias = new ArrayList<>();
queryCriterias.add(new QueryCriteria("subjects.name", "PersonB", Evaluation.EQUALS));
List<Subject> subjects = subjectRepository.find(queryCriterias);
Assert.notNull(subjects);
Assert.notEmpty(subjects);
Assert.isTrue(subjects.size() == 1);
Subject subject = subjects.get(0);
Assert.notNull(subject);
Assert.isTrue(subject.getId().equals(2L));
Assert.isTrue(subject.getName().equals("PersonB"));
}
@Test
public void sortedCriteriaTest(){
List<QueryCriteria> queryCriterias = new ArrayList<>();
queryCriterias.add(new QueryCriteria("subjects.gender", "F", Evaluation.EQUALS));
Sort sort = new Sort(new Sort.Order(Sort.Direction.DESC, "subjects.subject_id"));
List<Subject> subjects = subjectRepository.find(queryCriterias, sort);
Assert.notNull(subjects);
Assert.notEmpty(subjects);
Assert.isTrue(subjects.size() == 2);
Subject subject = subjects.get(0);
Assert.notNull(subject);
Assert.isTrue(subject.getId().equals(4L));
}
@Test
public void pagedCriteriaTest(){
List<QueryCriteria> queryCriterias = new ArrayList<>();
queryCriterias.add(new QueryCriteria("subjects.species", "Homo sapiens", Evaluation.EQUALS));
Sort sort = new Sort(new Sort.Order(Sort.Direction.DESC, "subjects.subject_id"));
PageRequest pageRequest = new PageRequest(1, 3, sort);
Page<Subject> page = subjectRepository.find(queryCriterias, pageRequest);
Assert.notNull(page);
Assert.isTrue(page.getTotalPages() == 2);
Assert.isTrue(page.getTotalElements() == 5);
List<Subject> subjects = page.getContent();
Assert.isTrue(subjects.size() == 2);
Subject subject = subjects.get(0);
Assert.notNull(subject);
Assert.isTrue(subject.getId().equals(2L));
}
@Test
public void findAllPagedCriteriaTest(){
List<QueryCriteria> queryCriterias = new ArrayList<>();
PageRequest pageRequest = new PageRequest(1, 3);
Page<Subject> page = subjectRepository.find(queryCriterias, pageRequest);
Assert.notNull(page);
Assert.isTrue(page.getTotalElements() == 5);
Assert.isTrue(page.getTotalPages() == 2);
List<Subject> subjects = page.getContent();
Assert.notNull(subjects);
Assert.notEmpty(subjects);
Assert.isTrue(subjects.size() == 2);
Subject subject = subjects.get(0);
Assert.isTrue(subject.getId().equals(4L));
}
@Test
public void insertTest(){
Subject subject = subjectRepository.insert(
new Subject(6L, "PersonD", "Homo sapiens", "F", "patient", null, null, null));
Assert.notNull(subject);
Assert.isTrue(subject.getId().equals(6L));
subject = subjectRepository.findOne(6L);
Assert.notNull(subject);
Assert.isTrue(subject.getId().equals(6L));
subjectRepository.delete(6L);
}
@Test
public void updateTest(){
Subject subject = subjectRepository.insert(
new Subject(6L, "PersonD", "Homo sapiens", "F", "patient", null, null, null));
subject.setName("TEST_NAME");
Subject updated = subjectRepository.update(subject);
Assert.notNull(updated);
Assert.isTrue(updated.getName().equals("TEST_NAME"));
subject = subjectRepository.findOne(6L);
Assert.notNull(subject);
Assert.isTrue(subject.getName().equals("TEST_NAME"));
subjectRepository.delete(6L);
}
@Test
public void deleteTest(){
Subject subject = subjectRepository.insert(
new Subject(6L, "PersonD", "Homo sapiens", "F", "patient",null, null, null));
Assert.isTrue(subject.getId().equals(6L));
Assert.notNull(subject);
subjectRepository.delete(6L);
subject = subjectRepository.findOne(6L);
Assert.isNull(subject);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.webapp;
import com.google.inject.AbstractModule;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.google.inject.servlet.GuiceFilter;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.http.HttpConfig.Policy;
import org.apache.hadoop.http.HttpServer2;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.security.AdminACLsManager;
import org.apache.hadoop.yarn.webapp.util.WebAppUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.servlet.http.HttpServlet;
import java.io.IOException;
import java.net.ConnectException;
import java.net.URI;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import static com.google.common.base.Preconditions.checkNotNull;
/**
* Helpers to create an embedded webapp.
* <p/>
* <h4>Quick start:</h4>
* <pre>
* WebApp wa = WebApps.$for(myApp).start();</pre>
* Starts a webapp with default routes binds to 0.0.0.0 (all network
* interfaces)
* on an ephemeral port, which can be obtained with:<pre>
* int port = wa.port();</pre>
* <h4>With more options:</h4>
* <pre>
* WebApp wa = WebApps.$for(myApp).at(address, port).
* with(configuration).
* start(new WebApp() {
* @Override public void setup() {
* route("/foo/action", FooController.class);
* route("/foo/:id", FooController.class, "show");
* }
* });</pre>
*/
@InterfaceAudience.LimitedPrivate({"YARN", "MapReduce"})
public class WebApps {
static final Logger LOG = LoggerFactory.getLogger(WebApps.class);
public static class Builder<T> {
static class ServletStruct {
public Class<? extends HttpServlet> clazz;
public String name;
public String spec;
}
final String name;
final String wsName;
final Class<T> api;
final T application;
String bindAddress = "0.0.0.0";
int port = 0;
boolean findPort = false;
Configuration conf;
Policy httpPolicy = null;
boolean devMode = false;
private String spnegoPrincipalKey;
private String spnegoKeytabKey;
private final HashSet<ServletStruct> servlets =
new HashSet<ServletStruct>();
private final HashMap<String, Object> attributes =
new HashMap<String, Object>();
Builder(String name, Class<T> api, T application, String wsName) {
this.name = name;
this.api = api;
this.application = application;
this.wsName = wsName;
}
Builder(String name, Class<T> api, T application) {
this(name, api, application, null);
}
public Builder<T> at(String bindAddress) {
String[] parts = StringUtils.split(bindAddress, ':');
if (parts.length == 2) {
int port = Integer.parseInt(parts[1]);
return at(parts[0], port, port == 0);
}
return at(bindAddress, 0, true);
}
public Builder<T> at(int port) {
return at("0.0.0.0", port, port == 0);
}
public Builder<T> at(String address, int port, boolean findPort) {
this.bindAddress = checkNotNull(address, "bind address");
this.port = port;
this.findPort = findPort;
return this;
}
public Builder<T> withAttribute(String key, Object value) {
attributes.put(key, value);
return this;
}
public Builder<T> withServlet(String name, String pathSpec,
Class<? extends HttpServlet> servlet) {
ServletStruct struct = new ServletStruct();
struct.clazz = servlet;
struct.name = name;
struct.spec = pathSpec;
servlets.add(struct);
return this;
}
public Builder<T> with(Configuration conf) {
this.conf = conf;
return this;
}
public Builder<T> withHttpPolicy(Configuration conf, Policy httpPolicy) {
this.conf = conf;
this.httpPolicy = httpPolicy;
return this;
}
public Builder<T> withHttpSpnegoPrincipalKey(String spnegoPrincipalKey) {
this.spnegoPrincipalKey = spnegoPrincipalKey;
return this;
}
public Builder<T> withHttpSpnegoKeytabKey(String spnegoKeytabKey) {
this.spnegoKeytabKey = spnegoKeytabKey;
return this;
}
public Builder<T> inDevMode() {
devMode = true;
return this;
}
public WebApp start(WebApp webapp) {
if (webapp == null) {
webapp = new WebApp() {
@Override
public void setup() {
// Defaults should be fine in usual cases
}
};
}
webapp.setName(name);
webapp.setWebServices(wsName);
String basePath = "/" + name;
webapp.setRedirectPath(basePath);
List<String> pathList = new ArrayList<String>();
if (basePath.equals("/")) {
webapp.addServePathSpec("/*");
pathList.add("/*");
} else {
webapp.addServePathSpec(basePath);
webapp.addServePathSpec(basePath + "/*");
pathList.add(basePath + "/*");
}
if (wsName != null && !wsName.equals(basePath)) {
if (wsName.equals("/")) {
webapp.addServePathSpec("/*");
pathList.add("/*");
} else {
webapp.addServePathSpec("/" + wsName);
webapp.addServePathSpec("/" + wsName + "/*");
pathList.add("/" + wsName + "/*");
}
}
if (conf == null) {
conf = new Configuration();
}
try {
if (application != null) {
webapp.setHostClass(application.getClass());
} else {
String cls = inferHostClass();
LOG.debug("setting webapp host class to {}", cls);
webapp.setHostClass(Class.forName(cls));
}
if (devMode) {
if (port > 0) {
try {
new URL("http://localhost:" + port + "/__stop").getContent();
LOG.info("stopping existing webapp instance");
Thread.sleep(100);
} catch (ConnectException e) {
LOG.info("no existing webapp instance found: {}", e.toString());
} catch (Exception e) {
// should not be fatal
LOG.warn("error stopping existing instance: {}", e.toString());
}
} else {
LOG.error("dev mode does NOT work with ephemeral port!");
System.exit(1);
}
}
String httpScheme;
if (this.httpPolicy == null) {
httpScheme = WebAppUtils.getHttpSchemePrefix(conf);
} else {
httpScheme =
(httpPolicy == Policy.HTTPS_ONLY) ? WebAppUtils.HTTPS_PREFIX :
WebAppUtils.HTTP_PREFIX;
}
HttpServer2.Builder builder = new HttpServer2.Builder().setName(name)
.addEndpoint(URI.create(httpScheme + bindAddress + ":" + port))
.setConf(conf).setFindPort(findPort)
.setACL(new AdminACLsManager(conf).getAdminAcl())
.setPathSpec(pathList.toArray(new String[0]));
boolean hasSpnegoConf = spnegoPrincipalKey != null &&
conf.get(spnegoPrincipalKey) != null && spnegoKeytabKey != null &&
conf.get(spnegoKeytabKey) != null;
if (hasSpnegoConf) {
builder.setUsernameConfKey(spnegoPrincipalKey)
.setKeytabConfKey(spnegoKeytabKey)
.setSecurityEnabled(UserGroupInformation.isSecurityEnabled());
}
if (httpScheme.equals(WebAppUtils.HTTPS_PREFIX)) {
WebAppUtils.loadSslConfiguration(builder);
}
HttpServer2 server = builder.build();
for (ServletStruct struct : servlets) {
server.addServlet(struct.name, struct.spec, struct.clazz);
}
for (Map.Entry<String, Object> entry : attributes.entrySet()) {
server.setAttribute(entry.getKey(), entry.getValue());
}
HttpServer2.defineFilter(server.getWebAppContext(), "guice",
GuiceFilter.class.getName(), null, new String[]{"/*"});
webapp.setConf(conf);
webapp.setHttpServer(server);
server.start();
LOG.info("Web app /" + name + " started at " +
server.getConnectorAddress(0).getPort());
} catch (ClassNotFoundException e) {
throw new WebAppException("Error starting http server", e);
} catch (IOException e) {
throw new WebAppException("Error starting http server", e);
}
Injector injector = Guice.createInjector(webapp, new AbstractModule() {
@Override
protected void configure() {
if (api != null) {
bind(api).toInstance(application);
}
}
});
LOG.info("Registered webapp guice modules");
// save a guice filter instance for webapp stop (mostly for unit tests)
webapp.setGuiceFilter(injector.getInstance(GuiceFilter.class));
if (devMode) {
injector.getInstance(Dispatcher.class).setDevMode(devMode);
LOG.info("in dev mode!");
}
return webapp;
}
public WebApp start() {
return start(null);
}
private String inferHostClass() {
String thisClass = this.getClass().getName();
Throwable t = new Throwable();
for (StackTraceElement e : t.getStackTrace()) {
if (e.getClassName().equals(thisClass)) {
continue;
}
return e.getClassName();
}
LOG.warn("could not infer host class from", t);
return thisClass;
}
}
/**
* Create a new webapp builder.
*
* @param <T>
* application (holding the embedded webapp) type
* @param prefix
* of the webapp
* @param api
* the api class for the application
* @param app
* the application instance
* @param wsPrefix
* the prefix for the webservice api for this app
* @return a webapp builder
* @see WebApps for a complete example
*/
public static <T> Builder<T> $for(String prefix, Class<T> api, T app,
String wsPrefix) {
return new Builder<T>(prefix, api, app, wsPrefix);
}
/**
* Create a new webapp builder.
*
* @param <T>
* application (holding the embedded webapp) type
* @param prefix
* of the webapp
* @param api
* the api class for the application
* @param app
* the application instance
* @return a webapp builder
* @see WebApps for a complete example
*/
public static <T> Builder<T> $for(String prefix, Class<T> api, T app) {
return new Builder<T>(prefix, api, app);
}
// Short cut mostly for tests/demos
@SuppressWarnings("unchecked")
public static <T> Builder<T> $for(String prefix, T app) {
return $for(prefix, (Class<T>) app.getClass(), app);
}
// Ditto
public static <T> Builder<T> $for(T app) {
return $for("", app);
}
public static <T> Builder<T> $for(String prefix) {
return $for(prefix, null, null);
}
}
| |
/*
* Copyright 2012-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.loader.tools;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.lang.reflect.Method;
import java.util.Arrays;
import java.util.Collection;
import org.springframework.util.ReflectionUtils;
/**
* Utility used to run a process.
*
* @author Phillip Webb
* @author Dave Syer
* @author Andy Wilkinson
* @author Stephane Nicoll
* @since 1.1.0
*/
public class RunProcess {
private static final Method INHERIT_IO_METHOD = ReflectionUtils
.findMethod(ProcessBuilder.class, "inheritIO");
private static final long JUST_ENDED_LIMIT = 500;
private File workingDirectory;
private final String[] command;
private volatile Process process;
private volatile long endTime;
/**
* Creates new {@link RunProcess} instance for the specified command.
* @param command the program to execute and its arguments
*/
public RunProcess(String... command) {
this(null, command);
}
/**
* Creates new {@link RunProcess} instance for the specified working directory and
* command.
* @param workingDirectory the working directory of the child process or {@code null}
* to run in the working directory of the current Java process
* @param command the program to execute and its arguments
*/
public RunProcess(File workingDirectory, String... command) {
this.workingDirectory = workingDirectory;
this.command = command;
}
public int run(boolean waitForProcess, String... args) throws IOException {
return run(waitForProcess, Arrays.asList(args));
}
protected int run(boolean waitForProcess, Collection<String> args)
throws IOException {
ProcessBuilder builder = new ProcessBuilder(this.command);
builder.directory(this.workingDirectory);
builder.command().addAll(args);
builder.redirectErrorStream(true);
boolean inheritedIO = inheritIO(builder);
try {
Process process = builder.start();
this.process = process;
if (!inheritedIO) {
redirectOutput(process);
}
SignalUtils.attachSignalHandler(new Runnable() {
@Override
public void run() {
handleSigInt();
}
});
if (waitForProcess) {
try {
return process.waitFor();
}
catch (InterruptedException ex) {
Thread.currentThread().interrupt();
return 1;
}
}
return 5;
}
finally {
if (waitForProcess) {
this.endTime = System.currentTimeMillis();
this.process = null;
}
}
}
private boolean inheritIO(ProcessBuilder builder) {
if (isInheritIOBroken()) {
return false;
}
try {
INHERIT_IO_METHOD.invoke(builder);
return true;
}
catch (Exception ex) {
return false;
}
}
// There's a bug in the Windows VM (https://bugs.openjdk.java.net/browse/JDK-8023130)
// that means we need to avoid inheritIO
private static boolean isInheritIOBroken() {
if (!System.getProperty("os.name", "none").toLowerCase().contains("windows")) {
return false;
}
String runtime = System.getProperty("java.runtime.version");
if (!runtime.startsWith("1.7")) {
return false;
}
String[] tokens = runtime.split("_");
if (tokens.length < 2) {
return true; // No idea actually, shouldn't happen
}
try {
Integer build = Integer.valueOf(tokens[1].split("[^0-9]")[0]);
if (build < 60) {
return true;
}
}
catch (Exception ex) {
return true;
}
return false;
}
private void redirectOutput(Process process) {
final BufferedReader reader = new BufferedReader(
new InputStreamReader(process.getInputStream()));
new Thread() {
@Override
public void run() {
try {
String line = reader.readLine();
while (line != null) {
System.out.println(line);
line = reader.readLine();
System.out.flush();
}
reader.close();
}
catch (Exception ex) {
// Ignore
}
}
}.start();
}
/**
* Return the running process.
* @return the process or {@code null}
*/
public Process getRunningProcess() {
return this.process;
}
/**
* Return if the process was stopped.
* @return {@code true} if stopped
*/
public boolean handleSigInt() {
// if the process has just ended, probably due to this SIGINT, consider handled.
if (hasJustEnded()) {
return true;
}
return doKill();
}
/**
* Kill this process.
*/
public void kill() {
doKill();
}
private boolean doKill() {
// destroy the running process
Process process = this.process;
if (process != null) {
try {
process.destroy();
process.waitFor();
this.process = null;
return true;
}
catch (InterruptedException ex) {
Thread.currentThread().interrupt();
}
}
return false;
}
public boolean hasJustEnded() {
return System.currentTimeMillis() < (this.endTime + JUST_ENDED_LIMIT);
}
}
| |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.refactoring.util;
import com.intellij.codeInsight.hint.HintManager;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.fileTypes.FileTypeManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.ReadonlyStatusHandler;
import com.intellij.openapi.vfs.VfsUtilCore;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.vfs.VirtualFileVisitor;
import com.intellij.psi.PsiDirectory;
import com.intellij.psi.PsiDirectoryContainer;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.refactoring.RefactoringBundle;
import com.intellij.usageView.UsageInfo;
import com.intellij.util.containers.ContainerUtil;
import gnu.trove.THashSet;
import org.jetbrains.annotations.Nls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
/**
* @author ven
*/
public class CommonRefactoringUtil {
private CommonRefactoringUtil() { }
public static void showErrorMessage(String title, String message, @Nullable String helpId, @NotNull Project project) {
if (ApplicationManager.getApplication().isUnitTestMode()) throw new RuntimeException(message);
RefactoringMessageDialog dialog = new RefactoringMessageDialog(title, message, helpId, "OptionPane.errorIcon", false, project);
dialog.show();
}
// order of usages across different files is irrelevant
public static void sortDepthFirstRightLeftOrder(final UsageInfo[] usages) {
Arrays.sort(usages, (usage1, usage2) -> {
PsiElement element1 = usage1.getElement(), element2 = usage2.getElement();
if (element1 == element2) return 0;
if (element1 == null) return 1;
if (element2 == null) return -1;
return element2.getTextRange().getStartOffset() - element1.getTextRange().getStartOffset();
});
}
/**
* Fatal refactoring problem during unit test run. Corresponds to message of modal dialog shown during user driven refactoring.
*/
public static class RefactoringErrorHintException extends RuntimeException {
public RefactoringErrorHintException(String message) {
super(message);
}
}
public static void showErrorHint(@NotNull Project project,
@Nullable Editor editor,
@NotNull @Nls String message,
@NotNull @Nls String title,
@Nullable String helpId) {
if (ApplicationManager.getApplication().isUnitTestMode()) throw new RefactoringErrorHintException(message);
ApplicationManager.getApplication().invokeLater(() -> {
if (editor == null || editor.getComponent().getRootPane() == null) {
showErrorMessage(title, message, helpId, project);
}
else {
HintManager.getInstance().showErrorHint(editor, message);
}
});
}
public static String htmlEmphasize(@NotNull String text) {
return StringUtil.htmlEmphasize(text);
}
public static boolean checkReadOnlyStatus(@NotNull PsiElement element) {
final VirtualFile file = element.getContainingFile().getVirtualFile();
return file != null && !ReadonlyStatusHandler.getInstance(element.getProject()).ensureFilesWritable(file).hasReadonlyFiles();
}
public static boolean checkReadOnlyStatus(@NotNull Project project, @NotNull PsiElement element) {
return checkReadOnlyStatus(element, project, RefactoringBundle.message("refactoring.cannot.be.performed"));
}
public static boolean checkReadOnlyStatus(@NotNull Project project, @NotNull PsiElement... elements) {
return checkReadOnlyStatus(project, Collections.<PsiElement>emptySet(), Arrays.asList(elements), RefactoringBundle.message("refactoring.cannot.be.performed"), true);
}
public static boolean checkReadOnlyStatus(@NotNull Project project, @NotNull Collection<? extends PsiElement> elements, boolean notifyOnFail) {
return checkReadOnlyStatus(project, Collections.<PsiElement>emptySet(), elements, RefactoringBundle.message("refactoring.cannot.be.performed"), notifyOnFail);
}
public static boolean checkReadOnlyStatus(@NotNull PsiElement element, @NotNull Project project, @NotNull String messagePrefix) {
return element.isWritable() || checkReadOnlyStatus(project, Collections.<PsiElement>emptySet(), Collections.singleton(element), messagePrefix, true);
}
public static boolean checkReadOnlyStatusRecursively(@NotNull Project project, @NotNull Collection<? extends PsiElement> elements) {
return checkReadOnlyStatus(project, elements, Collections.<PsiElement>emptySet(), RefactoringBundle.message("refactoring.cannot.be.performed"), false);
}
public static boolean checkReadOnlyStatusRecursively(@NotNull Project project, @NotNull Collection<? extends PsiElement> elements, boolean notifyOnFail) {
return checkReadOnlyStatus(project, elements, Collections.<PsiElement>emptySet(), RefactoringBundle.message("refactoring.cannot.be.performed"), notifyOnFail);
}
public static boolean checkReadOnlyStatus(@NotNull Project project,
@NotNull Collection<? extends PsiElement> recursive,
@NotNull Collection<? extends PsiElement> flat,
boolean notifyOnFail) {
return checkReadOnlyStatus(project, recursive, flat, RefactoringBundle.message("refactoring.cannot.be.performed"), notifyOnFail);
}
private static boolean checkReadOnlyStatus(@NotNull Project project,
@NotNull Collection<? extends PsiElement> recursive,
@NotNull Collection<? extends PsiElement> flat,
@NotNull String messagePrefix,
boolean notifyOnFail) {
Collection<VirtualFile> readonly = new THashSet<>(); // not writable, but could be checked out
Collection<VirtualFile> failed = new THashSet<>(); // those located in read-only filesystem
boolean seenNonWritablePsiFilesWithoutVirtualFile =
checkReadOnlyStatus(flat, false, readonly, failed) || checkReadOnlyStatus(recursive, true, readonly, failed);
ReadonlyStatusHandler.OperationStatus status = ReadonlyStatusHandler.getInstance(project).ensureFilesWritable(readonly);
ContainerUtil.addAll(failed, status.getReadonlyFiles());
if (notifyOnFail && (!failed.isEmpty() || seenNonWritablePsiFilesWithoutVirtualFile && readonly.isEmpty())) {
StringBuilder message = new StringBuilder(messagePrefix).append('\n');
int i = 0;
for (VirtualFile virtualFile : failed) {
String subj = RefactoringBundle.message(virtualFile.isDirectory() ? "directory.description" : "file.description", virtualFile.getPresentableUrl());
if (virtualFile.getFileSystem().isReadOnly()) {
message.append(RefactoringBundle.message("0.is.located.in.a.jar.file", subj)).append('\n');
}
else {
message.append(RefactoringBundle.message("0.is.read.only", subj)).append('\n');
}
if (i++ > 20) {
message.append("...\n");
break;
}
}
showErrorMessage(RefactoringBundle.message("error.title"), message.toString(), null, project);
return false;
}
return failed.isEmpty();
}
private static boolean checkReadOnlyStatus(Collection<? extends PsiElement> elements,
boolean recursively,
Collection<VirtualFile> readonly,
Collection<VirtualFile> failed) {
boolean seenNonWritablePsiFilesWithoutVirtualFile = false;
for (PsiElement element : elements) {
if (element instanceof PsiDirectory) {
final PsiDirectory dir = (PsiDirectory)element;
final VirtualFile vFile = dir.getVirtualFile();
if (vFile.getFileSystem().isReadOnly()) {
failed.add(vFile);
}
else if (recursively) {
collectReadOnlyFiles(vFile, readonly);
}
else {
readonly.add(vFile);
}
}
else if (element instanceof PsiDirectoryContainer) {
final PsiDirectory[] directories = ((PsiDirectoryContainer)element).getDirectories();
for (PsiDirectory directory : directories) {
VirtualFile virtualFile = directory.getVirtualFile();
if (recursively) {
if (virtualFile.getFileSystem().isReadOnly()) {
failed.add(virtualFile);
}
else {
collectReadOnlyFiles(virtualFile, readonly);
}
}
else if (virtualFile.getFileSystem().isReadOnly()) {
failed.add(virtualFile);
}
else {
readonly.add(virtualFile);
}
}
}
else {
PsiFile file = element.getContainingFile();
if (file == null) {
if (!element.isWritable()) {
seenNonWritablePsiFilesWithoutVirtualFile = true;
}
}
else {
final VirtualFile vFile = file.getVirtualFile();
if (vFile != null) {
readonly.add(vFile);
}
else if (!element.isWritable()) {
seenNonWritablePsiFilesWithoutVirtualFile = true;
}
}
}
}
return seenNonWritablePsiFilesWithoutVirtualFile;
}
public static void collectReadOnlyFiles(@NotNull VirtualFile vFile, @NotNull final Collection<VirtualFile> list) {
final FileTypeManager fileTypeManager = FileTypeManager.getInstance();
VfsUtilCore.visitChildrenRecursively(vFile, new VirtualFileVisitor(VirtualFileVisitor.NO_FOLLOW_SYMLINKS) {
@Override
public boolean visitFile(@NotNull VirtualFile file) {
final boolean ignored = fileTypeManager.isFileIgnored(file);
if (!file.isWritable() && !ignored) {
list.add(file);
}
return !ignored;
}
});
}
public static String capitalize(@NotNull String text) {
return StringUtil.capitalize(text);
}
public static boolean isAncestor(@NotNull PsiElement resolved, @NotNull Collection<? extends PsiElement> scopes) {
for (final PsiElement scope : scopes) {
if (PsiTreeUtil.isAncestor(scope, resolved, false)) return true;
}
return false;
}
}
| |
/*
* Copyright 2016-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.cxx;
import static org.junit.Assert.assertThat;
import com.facebook.buck.cxx.toolchain.CxxPlatform;
import com.facebook.buck.cxx.toolchain.CxxPlatformUtils;
import com.facebook.buck.cxx.toolchain.CxxPlatforms;
import com.facebook.buck.cxx.toolchain.linker.Linker;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.BuildTargetFactory;
import com.facebook.buck.model.FlavorDomain;
import com.facebook.buck.parser.NoSuchBuildTargetException;
import com.facebook.buck.rules.BuildRuleResolver;
import com.facebook.buck.rules.DefaultSourcePathResolver;
import com.facebook.buck.rules.DefaultTargetNodeToBuildRuleTransformer;
import com.facebook.buck.rules.SingleThreadedBuildRuleResolver;
import com.facebook.buck.rules.SourcePathResolver;
import com.facebook.buck.rules.SourcePathRuleFinder;
import com.facebook.buck.rules.TargetGraph;
import com.facebook.buck.rules.TargetGraphAndBuildTargets;
import com.facebook.buck.rules.TargetNode;
import com.facebook.buck.rules.args.Arg;
import com.facebook.buck.rules.coercer.DefaultTypeCoercerFactory;
import com.facebook.buck.rules.macros.StringWithMacrosUtils;
import com.facebook.buck.shell.Genrule;
import com.facebook.buck.testutil.OptionalMatchers;
import com.facebook.buck.testutil.TargetGraphFactory;
import com.facebook.buck.util.Optionals;
import com.facebook.buck.versions.FixedTargetNodeTranslator;
import com.facebook.buck.versions.NaiveVersionSelector;
import com.facebook.buck.versions.TargetNodeTranslator;
import com.facebook.buck.versions.VersionPropagatorBuilder;
import com.facebook.buck.versions.VersionedAliasBuilder;
import com.facebook.buck.versions.VersionedTargetGraphBuilder;
import com.google.common.base.CaseFormat;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import java.util.Optional;
import java.util.concurrent.ForkJoinPool;
import java.util.regex.Pattern;
import org.hamcrest.Matchers;
import org.junit.Test;
public class CxxGenruleDescriptionTest {
private static final ForkJoinPool POOL = new ForkJoinPool(1);
@Test
public void toolPlatformParseTimeDeps() {
for (String macro : ImmutableSet.of("ld", "cc", "cxx")) {
CxxGenruleBuilder builder =
new CxxGenruleBuilder(BuildTargetFactory.newInstance("//:rule#default"))
.setCmd(String.format("$(%s)", macro))
.setOut("foo");
assertThat(
ImmutableSet.copyOf(builder.findImplicitDeps()),
Matchers.equalTo(
ImmutableSet.copyOf(
CxxPlatforms.getParseTimeDeps(CxxPlatformUtils.DEFAULT_PLATFORM))));
}
}
@Test
public void ldFlagsFilter() throws Exception {
for (Linker.LinkableDepType style : Linker.LinkableDepType.values()) {
CxxLibraryBuilder bBuilder =
new CxxLibraryBuilder(BuildTargetFactory.newInstance("//:b"))
.setExportedLinkerFlags(ImmutableList.of(StringWithMacrosUtils.format("-b")));
CxxLibraryBuilder aBuilder =
new CxxLibraryBuilder(BuildTargetFactory.newInstance("//:a"))
.setExportedDeps(ImmutableSortedSet.of(bBuilder.getTarget()))
.setExportedLinkerFlags(ImmutableList.of(StringWithMacrosUtils.format("-a")));
CxxGenruleBuilder builder =
new CxxGenruleBuilder(
BuildTargetFactory.newInstance(
"//:rule#" + CxxPlatformUtils.DEFAULT_PLATFORM.getFlavor()))
.setOut("out")
.setCmd(
String.format(
"$(ldflags-%s-filter //:a //:a)",
CaseFormat.UPPER_UNDERSCORE.to(CaseFormat.LOWER_HYPHEN, style.toString())));
TargetGraph targetGraph =
TargetGraphFactory.newInstance(bBuilder.build(), aBuilder.build(), builder.build());
BuildRuleResolver resolver =
new SingleThreadedBuildRuleResolver(
targetGraph, new DefaultTargetNodeToBuildRuleTransformer());
SourcePathResolver pathResolver =
DefaultSourcePathResolver.from(new SourcePathRuleFinder(resolver));
bBuilder.build(resolver);
aBuilder.build(resolver);
Genrule genrule = (Genrule) builder.build(resolver);
assertThat(
Joiner.on(' ')
.join(Arg.stringify(ImmutableList.of(genrule.getCmd().get()), pathResolver)),
Matchers.containsString("-a"));
assertThat(
Joiner.on(' ')
.join(Arg.stringify(ImmutableList.of(genrule.getCmd().get()), pathResolver)),
Matchers.not(Matchers.containsString("-b")));
}
}
@Test
public void cppflagsNoArgs() throws Exception {
CxxPlatform cxxPlatform =
CxxPlatformUtils.DEFAULT_PLATFORM.withCppflags("-cppflag").withCxxppflags("-cxxppflag");
CxxGenruleBuilder builder =
new CxxGenruleBuilder(
BuildTargetFactory.newInstance("//:rule#" + cxxPlatform.getFlavor()),
new FlavorDomain<>(
"C/C++ Platform", ImmutableMap.of(cxxPlatform.getFlavor(), cxxPlatform)))
.setOut("out")
.setCmd("$(cppflags) $(cxxppflags)");
TargetGraph targetGraph = TargetGraphFactory.newInstance(builder.build());
BuildRuleResolver resolver =
new SingleThreadedBuildRuleResolver(
targetGraph, new DefaultTargetNodeToBuildRuleTransformer());
SourcePathResolver pathResolver =
DefaultSourcePathResolver.from(new SourcePathRuleFinder(resolver));
Genrule genrule = (Genrule) builder.build(resolver);
assertThat(
Joiner.on(' ').join(Arg.stringify(ImmutableList.of(genrule.getCmd().get()), pathResolver)),
Matchers.containsString("-cppflag -cxxppflag"));
}
@Test
public void cflagsNoArgs() throws Exception {
CxxPlatform cxxPlatform =
CxxPlatformUtils.DEFAULT_PLATFORM
.withAsflags("-asflag")
.withCflags("-cflag")
.withCxxflags("-cxxflag");
CxxGenruleBuilder builder =
new CxxGenruleBuilder(
BuildTargetFactory.newInstance("//:rule#" + cxxPlatform.getFlavor()),
new FlavorDomain<>(
"C/C++ Platform", ImmutableMap.of(cxxPlatform.getFlavor(), cxxPlatform)))
.setOut("out")
.setCmd("$(cflags) $(cxxflags)");
TargetGraph targetGraph = TargetGraphFactory.newInstance(builder.build());
BuildRuleResolver resolver =
new SingleThreadedBuildRuleResolver(
targetGraph, new DefaultTargetNodeToBuildRuleTransformer());
SourcePathResolver pathResolver =
DefaultSourcePathResolver.from(new SourcePathRuleFinder(resolver));
Genrule genrule = (Genrule) builder.build(resolver);
for (String expected : ImmutableList.of("-asflag", "-cflag", "-cxxflag")) {
assertThat(
Joiner.on(' ')
.join(Arg.stringify(ImmutableList.of(genrule.getCmd().get()), pathResolver)),
Matchers.containsString(expected));
}
}
@Test
public void targetTranslateConstructorArg() throws NoSuchBuildTargetException {
BuildTarget target = BuildTargetFactory.newInstance("//foo:lib");
BuildTarget original = BuildTargetFactory.newInstance("//hello:world");
BuildTarget translated = BuildTargetFactory.newInstance("//something:else");
CxxGenruleBuilder builder =
new CxxGenruleBuilder(target)
.setCmd(String.format("$(cppflags %s) $(cxxppflags)", original))
.setOut("foo");
TargetNode<CxxGenruleDescriptionArg, CxxGenruleDescription> node = builder.build();
TargetNodeTranslator translator =
new FixedTargetNodeTranslator(
new DefaultTypeCoercerFactory(), ImmutableMap.of(original, translated));
Optional<CxxGenruleDescriptionArg> translatedArg =
node.getDescription()
.translateConstructorArg(
target, node.getCellNames(), translator, node.getConstructorArg());
assertThat(
translatedArg.get().getCmd().get(),
Matchers.equalTo("$(cppflags //something:else) $(cxxppflags)"));
}
@Test
public void versionedTargetReferenceIsTranslatedInVersionedGraph() throws Exception {
VersionPropagatorBuilder dep = new VersionPropagatorBuilder("//:dep");
VersionedAliasBuilder versionedDep =
new VersionedAliasBuilder("//:versioned").setVersions("1.0", "//:dep");
CxxGenruleBuilder genruleBuilder =
new CxxGenruleBuilder(BuildTargetFactory.newInstance("//:genrule"))
.setCmd("$(ldflags-shared //:versioned)")
.setOut("foo");
TargetGraph graph =
TargetGraphFactory.newInstance(dep.build(), versionedDep.build(), genruleBuilder.build());
TargetGraphAndBuildTargets transformed =
VersionedTargetGraphBuilder.transform(
new NaiveVersionSelector(),
TargetGraphAndBuildTargets.of(graph, ImmutableSet.of(genruleBuilder.getTarget())),
POOL,
new DefaultTypeCoercerFactory());
CxxGenruleDescriptionArg arg =
extractArg(
transformed.getTargetGraph().get(genruleBuilder.getTarget()),
CxxGenruleDescriptionArg.class);
assertThat(
arg.getCmd(), OptionalMatchers.present(Matchers.equalTo("$(ldflags-shared //:dep)")));
}
@Test
public void versionPropagatorTargetReferenceIsTranslatedInVersionedGraph() throws Exception {
VersionPropagatorBuilder transitiveDep = new VersionPropagatorBuilder("//:transitive_dep");
VersionedAliasBuilder versionedDep =
new VersionedAliasBuilder("//:versioned").setVersions("1.0", "//:transitive_dep");
VersionPropagatorBuilder dep = new VersionPropagatorBuilder("//:dep").setDeps("//:versioned");
CxxGenruleBuilder genruleBuilder =
new CxxGenruleBuilder(BuildTargetFactory.newInstance("//:genrule"))
.setCmd("$(ldflags-shared //:dep)")
.setOut("foo");
TargetGraph graph =
TargetGraphFactory.newInstance(
transitiveDep.build(), versionedDep.build(), dep.build(), genruleBuilder.build());
TargetGraphAndBuildTargets transformed =
VersionedTargetGraphBuilder.transform(
new NaiveVersionSelector(),
TargetGraphAndBuildTargets.of(graph, ImmutableSet.of(genruleBuilder.getTarget())),
POOL,
new DefaultTypeCoercerFactory());
CxxGenruleDescriptionArg arg =
extractArg(
transformed.getTargetGraph().get(genruleBuilder.getTarget()),
CxxGenruleDescriptionArg.class);
assertThat(
arg.getCmd(),
OptionalMatchers.present(
Matchers.matchesPattern(
Pattern.quote("$(ldflags-shared //:dep#v") + "[a-zA-Z0-9]*" + Pattern.quote(")"))));
}
@Test
public void cxxGenruleInLocationMacro() throws Exception {
CxxGenruleBuilder depBuilder =
new CxxGenruleBuilder(BuildTargetFactory.newInstance("//:dep")).setOut("out");
CxxGenruleBuilder builder =
new CxxGenruleBuilder(BuildTargetFactory.newInstance("//:rule"))
.setCmd("$(location //:dep)")
.setOut("out");
TargetGraph targetGraph = TargetGraphFactory.newInstance(depBuilder.build(), builder.build());
BuildRuleResolver resolver =
new SingleThreadedBuildRuleResolver(
targetGraph, new DefaultTargetNodeToBuildRuleTransformer());
SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(resolver);
SourcePathResolver pathResolver =
DefaultSourcePathResolver.from(new SourcePathRuleFinder(resolver));
CxxGenrule dep = (CxxGenrule) resolver.requireRule(depBuilder.getTarget());
CxxGenrule rule = (CxxGenrule) resolver.requireRule(builder.getTarget());
Genrule genrule =
(Genrule)
ruleFinder
.getRule(rule.getGenrule(CxxPlatformUtils.DEFAULT_PLATFORM))
.orElseThrow(AssertionError::new);
assertThat(
Arg.stringify(Optionals.toStream(genrule.getCmd()).toOnceIterable(), pathResolver),
Matchers.contains(
pathResolver
.getAbsolutePath(dep.getGenrule(CxxPlatformUtils.DEFAULT_PLATFORM))
.toString()));
}
private static <U> U extractArg(TargetNode<?, ?> node, Class<U> clazz) {
return node.castArg(clazz)
.orElseThrow(
() ->
new AssertionError(
String.format(
"%s: expected constructor arg to be of type %s (was %s)",
node, clazz, node.getConstructorArg().getClass())))
.getConstructorArg();
}
}
| |
/*
* Copyright 2011-2017 ETH Zurich. All Rights Reserved.
*
* This software is the proprietary information of ETH Zurich.
* Use is subject to license terms.
*/
package org.tinspin.index.array;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
import org.tinspin.index.QueryIterator;
import org.tinspin.index.QueryIteratorKNN;
import org.tinspin.index.RectangleEntry;
import org.tinspin.index.RectangleEntryDist;
import org.tinspin.index.RectangleIndex;
public class RectArray<T> implements RectangleIndex<T> {
private final double[][] phc;
private final int dims;
private int N;
private RectangleEntry<T>[] values;
private int insPos = 0;
/**
* Setup of an simple array data structure (no indexing).
*
* @param dims dimensions
* @param size size
*
*/
@SuppressWarnings("unchecked")
public RectArray(int dims, int size) {
this.N = size;
this.dims = dims;
phc = new double[2*N][dims];
values = new RectangleEntry[N];
}
@Override
public void insert(double[] lower, double[] upper, T value) {
System.arraycopy(lower, 0, phc[insPos*2], 0, dims);
System.arraycopy(upper, 0, phc[insPos*2+1], 0, dims);
values[insPos] = new KnnEntry<>(lower, upper, value, -1);
insPos++;
}
@Override
public T queryExact(double[] lower, double[] upper) {
for (int j = 0; j < N; j++) {
if (eq(phc[j*2], lower) && eq(phc[j*2+1], upper)) {
return values[j].value();
}
}
return null;
}
private boolean eq(double[] a, double[] b) {
for (int i = 0; i < a.length; i++) {
if (a[i] != b[i]) {
return false;
}
}
return true;
}
private boolean geq(double[] a, double[] b) {
for (int i = 0; i < a.length; i++) {
if (a[i] < b[i]) {
return false;
}
}
return true;
}
private boolean leq(double[] a, double[] b) {
for (int i = 0; i < a.length; i++) {
if (a[i] > b[i]) {
return false;
}
}
return true;
}
@Override
public AQueryIterator queryIntersect(double[] min, double[] max) {
return new AQueryIterator(min, max);
}
private class AQueryIterator implements QueryIterator<RectangleEntry<T>> {
private Iterator<RectangleEntry<T>> it;
public AQueryIterator(double[] min, double[] max) {
reset(min, max);
}
@Override
public boolean hasNext() {
return it.hasNext();
}
@Override
public RectangleEntry<T> next() {
return it.next();
}
@Override
public void reset(double[] min, double[] max) {
ArrayList<RectangleEntry<T>> results = new ArrayList<>();
for (int i = 0; i < N; i++) {
if (leq(phc[i*2], max) && geq(phc[i*2+1], min)) {
results.add(values[i]);
}
}
it = results.iterator();
}
}
@Override
public QueryIterator<RectangleEntry<T>> iterator() {
// TODO Auto-generated method stub
throw new UnsupportedOperationException();
//return null;
}
@Override
public AQueryIteratorKNN queryKNN(double[] center, int k) {
return new AQueryIteratorKNN(center, k);
}
private class AQueryIteratorKNN implements QueryIteratorKNN<RectangleEntryDist<T>> {
private Iterator<RectangleEntryDist<T>> it;
public AQueryIteratorKNN(double[] center, int k) {
reset(center, k);
}
@Override
public boolean hasNext() {
return it.hasNext();
}
@Override
public RectangleEntryDist<T> next() {
return it.next();
}
@SuppressWarnings({ "unchecked", "rawtypes" })
@Override
public void reset(double[] center, int k) {
it = ((List)knnQuery(center, k)).iterator();
}
}
private ArrayList<KnnEntry<T>> knnQuery(double[] center, int k) {
ArrayList<KnnEntry<T>> ret = new ArrayList<>(k);
for (int i = 0; i < phc.length/2; i++) {
double[] min = phc[i*2];
double[] max = phc[i*2+1];
double dist = distREdge(center, min, max);
if (ret.size() < k) {
ret.add(new KnnEntry<>(min, max, values[i].value(), dist));
ret.sort(COMP);
} else if (ret.get(k-1).dist > dist) {
ret.remove(k-1);
ret.add(new KnnEntry<>(min, max, values[i].value(), dist));
ret.sort(COMP);
}
}
return ret;
}
private static double distREdge(double[] center, double[] rLower, double[] rUpper) {
double dist = 0;
for (int i = 0; i < center.length; i++) {
double d = 0;
if (center[i] > rUpper[i]) {
d = center[i] - rUpper[i];
} else if (center[i] < rLower[i]) {
d = rLower[i] - center[i];
}
dist += d*d;
}
return Math.sqrt(dist);
}
private final Comparator<KnnEntry<T>> COMP = new Comparator<KnnEntry<T>>() {
@Override
public int compare(KnnEntry<T> o1, KnnEntry<T> o2) {
return o1.compareTo(o2);
}
};
private static class KnnEntry<T> implements Comparable<KnnEntry<T>>, RectangleEntryDist<T> {
private final double[] min;
private final double[] max;
private final T val;
private final double dist;
KnnEntry(double[] min, double[] max, T val, double dist) {
this.min = min;
this.max = max;
this.val = val;
this.dist = dist;
}
@Override
public int compareTo(KnnEntry<T> o) {
double d = dist-o.dist;
return d < 0 ? -1 : d > 0 ? 1 : 0;
}
@Override
public String toString() {
return "d=" + dist + ":" + Arrays.toString(min) + "/" + Arrays.toString(max);
}
@Override
public double[] lower() {
return min;
}
@Override
public double[] upper() {
return max;
}
@Override
public T value() {
return val;
}
@Override
public double dist() {
return dist;
}
}
@Override
public T update(double[] lo1, double[] up1, double[] lo2, double[] up2) {
for (int i = 0; i < N; i++) {
if (eq(phc[i*2], lo1) && eq(phc[(i*2)+1], up1)) {
System.arraycopy(lo2, 0, phc[i*2], 0, dims);
System.arraycopy(up2, 0, phc[(i*2)+1], 0, dims);
return values[i].value();
}
}
return null;
}
@Override
public T remove(double[] lower, double[] upper) {
for (int i = 0; i < N; i++) {
if (phc[i*2] != null && eq(phc[i*2], lower)
&& eq(phc[(i*2)+1], upper)) {
phc[i*2] = null;
phc[(i*2)+1] = null;
T val = values[i].value();
return val;
}
}
return null;
}
@Override
public String toString() {
return "NaiveArray";
}
@Override
public int getDims() {
return dims;
}
@Override
public int size() {
return N;
}
@Override
public void clear() {
for (int i = 0; i < N; i++) {
values[i] = null;
}
for (int i = 0; i < 2*N; i++) {
phc[i] = null;
}
N = 0;
}
@Override
public Object getStats() {
throw new UnsupportedOperationException();
//return null;
}
@Override
public int getNodeCount() {
return 1;
}
@Override
public int getDepth() {
return 0;
}
@Override
public String toStringTree() {
StringBuilder s = new StringBuilder();
for (int i = 0; i < N; i++) {
s.append(Arrays.toString(phc[i*2]) + "/" + Arrays.toString(phc[i*2+1]) +
" v=" + values[i]);
}
return s.toString();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.executiongraph;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.runtime.akka.AkkaUtils;
import org.apache.flink.runtime.clusterframework.types.AllocationID;
import org.apache.flink.runtime.clusterframework.types.ResourceID;
import org.apache.flink.runtime.deployment.ResultPartitionDeploymentDescriptor;
import org.apache.flink.runtime.deployment.TaskDeploymentDescriptor;
import org.apache.flink.runtime.deployment.TaskDeploymentDescriptorFactory;
import org.apache.flink.runtime.execution.ExecutionState;
import org.apache.flink.runtime.executiongraph.utils.SimpleAckingTaskManagerGateway;
import org.apache.flink.runtime.io.network.partition.ResultPartitionType;
import org.apache.flink.runtime.jobgraph.IntermediateDataSetID;
import org.apache.flink.runtime.jobgraph.JobVertexID;
import org.apache.flink.runtime.jobgraph.ScheduleMode;
import org.apache.flink.runtime.jobmaster.LogicalSlot;
import org.apache.flink.runtime.jobmaster.TestingLogicalSlot;
import org.apache.flink.runtime.jobmaster.TestingLogicalSlotBuilder;
import org.apache.flink.runtime.messages.Acknowledge;
import org.apache.flink.runtime.taskmanager.TaskManagerLocation;
import org.apache.flink.runtime.testutils.DirectScheduledExecutorService;
import org.apache.flink.util.TestLogger;
import org.junit.Test;
import java.net.InetAddress;
import java.util.Collection;
import java.util.concurrent.CompletableFuture;
import static org.apache.flink.runtime.executiongraph.ExecutionGraphTestUtils.getExecutionVertex;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class ExecutionVertexDeploymentTest extends TestLogger {
private static final String ERROR_MESSAGE = "test_failure_error_message";
@Test
public void testDeployCall() {
try {
final JobVertexID jid = new JobVertexID();
final ExecutionJobVertex ejv = getExecutionVertex(jid);
final LogicalSlot slot = new TestingLogicalSlotBuilder().createTestingLogicalSlot();
final ExecutionVertex vertex = new ExecutionVertex(ejv, 0, new IntermediateResult[0],
AkkaUtils.getDefaultTimeout());
assertEquals(ExecutionState.CREATED, vertex.getExecutionState());
vertex.deployToSlot(slot);
assertEquals(ExecutionState.DEPLOYING, vertex.getExecutionState());
// no repeated scheduling
try {
vertex.deployToSlot(slot);
fail("Scheduled from wrong state");
} catch (IllegalStateException e) {
// as expected
}
assertNull(vertex.getFailureCause());
assertTrue(vertex.getStateTimestamp(ExecutionState.CREATED) > 0);
assertTrue(vertex.getStateTimestamp(ExecutionState.DEPLOYING) > 0);
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
@Test
public void testDeployWithSynchronousAnswer() {
try {
final JobVertexID jid = new JobVertexID();
final ExecutionJobVertex ejv = getExecutionVertex(jid, new DirectScheduledExecutorService());
final LogicalSlot slot = new TestingLogicalSlotBuilder().createTestingLogicalSlot();
final ExecutionVertex vertex = new ExecutionVertex(ejv, 0, new IntermediateResult[0],
AkkaUtils.getDefaultTimeout());
assertEquals(ExecutionState.CREATED, vertex.getExecutionState());
vertex.deployToSlot(slot);
assertEquals(ExecutionState.DEPLOYING, vertex.getExecutionState());
// no repeated scheduling
try {
vertex.deployToSlot(slot);
fail("Scheduled from wrong state");
} catch (IllegalStateException e) {
// as expected
}
assertNull(vertex.getFailureCause());
assertTrue(vertex.getStateTimestamp(ExecutionState.CREATED) > 0);
assertTrue(vertex.getStateTimestamp(ExecutionState.DEPLOYING) > 0);
assertTrue(vertex.getStateTimestamp(ExecutionState.RUNNING) == 0);
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
@Test
public void testDeployWithAsynchronousAnswer() {
try {
final JobVertexID jid = new JobVertexID();
final ExecutionJobVertex ejv = getExecutionVertex(jid);
final ExecutionVertex vertex = new ExecutionVertex(ejv, 0, new IntermediateResult[0],
AkkaUtils.getDefaultTimeout());
final LogicalSlot slot = new TestingLogicalSlotBuilder().createTestingLogicalSlot();
assertEquals(ExecutionState.CREATED, vertex.getExecutionState());
vertex.deployToSlot(slot);
// no repeated scheduling
try {
vertex.deployToSlot(slot);
fail("Scheduled from wrong state");
} catch (IllegalStateException e) {
// as expected
}
assertEquals(ExecutionState.DEPLOYING, vertex.getExecutionState());
// no repeated scheduling
try {
vertex.deployToSlot(slot);
fail("Scheduled from wrong state");
} catch (IllegalStateException e) {
// as expected
}
assertTrue(vertex.getStateTimestamp(ExecutionState.CREATED) > 0);
assertTrue(vertex.getStateTimestamp(ExecutionState.DEPLOYING) > 0);
assertTrue(vertex.getStateTimestamp(ExecutionState.RUNNING) == 0);
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
@Test
public void testDeployFailedSynchronous() {
try {
final JobVertexID jid = new JobVertexID();
final ExecutionJobVertex ejv = getExecutionVertex(jid, new DirectScheduledExecutorService());
final ExecutionVertex vertex = new ExecutionVertex(ejv, 0, new IntermediateResult[0],
AkkaUtils.getDefaultTimeout());
final LogicalSlot slot = new TestingLogicalSlotBuilder().setTaskManagerGateway(new SubmitFailingSimpleAckingTaskManagerGateway()).createTestingLogicalSlot();
assertEquals(ExecutionState.CREATED, vertex.getExecutionState());
vertex.deployToSlot(slot);
assertEquals(ExecutionState.FAILED, vertex.getExecutionState());
assertNotNull(vertex.getFailureCause());
assertTrue(vertex.getFailureCause().getMessage().contains(ERROR_MESSAGE));
assertTrue(vertex.getStateTimestamp(ExecutionState.CREATED) > 0);
assertTrue(vertex.getStateTimestamp(ExecutionState.DEPLOYING) > 0);
assertTrue(vertex.getStateTimestamp(ExecutionState.FAILED) > 0);
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
@Test
public void testDeployFailedAsynchronously() {
try {
final JobVertexID jid = new JobVertexID();
final ExecutionJobVertex ejv = getExecutionVertex(jid);
final ExecutionVertex vertex = new ExecutionVertex(ejv, 0, new IntermediateResult[0],
AkkaUtils.getDefaultTimeout());
final LogicalSlot slot = new TestingLogicalSlotBuilder().setTaskManagerGateway(new SubmitFailingSimpleAckingTaskManagerGateway()).createTestingLogicalSlot();
assertEquals(ExecutionState.CREATED, vertex.getExecutionState());
vertex.deployToSlot(slot);
// wait until the state transition must be done
for (int i = 0; i < 100; i++) {
if (vertex.getExecutionState() == ExecutionState.FAILED && vertex.getFailureCause() != null) {
break;
} else {
Thread.sleep(10);
}
}
assertEquals(ExecutionState.FAILED, vertex.getExecutionState());
assertNotNull(vertex.getFailureCause());
assertTrue(vertex.getFailureCause().getMessage().contains(ERROR_MESSAGE));
assertTrue(vertex.getStateTimestamp(ExecutionState.CREATED) > 0);
assertTrue(vertex.getStateTimestamp(ExecutionState.DEPLOYING) > 0);
assertTrue(vertex.getStateTimestamp(ExecutionState.FAILED) > 0);
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
@Test
public void testFailExternallyDuringDeploy() {
try {
final JobVertexID jid = new JobVertexID();
final ExecutionJobVertex ejv = getExecutionVertex(jid, new DirectScheduledExecutorService());
final ExecutionVertex vertex = new ExecutionVertex(ejv, 0, new IntermediateResult[0],
AkkaUtils.getDefaultTimeout());
TestingLogicalSlot testingLogicalSlot = new TestingLogicalSlotBuilder().setTaskManagerGateway(new SubmitBlockingSimpleAckingTaskManagerGateway()).createTestingLogicalSlot();
assertEquals(ExecutionState.CREATED, vertex.getExecutionState());
vertex.deployToSlot(testingLogicalSlot);
assertEquals(ExecutionState.DEPLOYING, vertex.getExecutionState());
Exception testError = new Exception("test error");
vertex.fail(testError);
assertEquals(ExecutionState.FAILED, vertex.getExecutionState());
assertEquals(testError, vertex.getFailureCause());
assertTrue(vertex.getStateTimestamp(ExecutionState.CREATED) > 0);
assertTrue(vertex.getStateTimestamp(ExecutionState.DEPLOYING) > 0);
assertTrue(vertex.getStateTimestamp(ExecutionState.FAILED) > 0);
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
private static class SubmitFailingSimpleAckingTaskManagerGateway extends SimpleAckingTaskManagerGateway {
@Override
public CompletableFuture<Acknowledge> submitTask(TaskDeploymentDescriptor tdd, Time timeout) {
CompletableFuture<Acknowledge> future = new CompletableFuture<>();
future.completeExceptionally(new Exception(ERROR_MESSAGE));
return future;
}
}
private static class SubmitBlockingSimpleAckingTaskManagerGateway extends SimpleAckingTaskManagerGateway {
@Override
public CompletableFuture<Acknowledge> submitTask(TaskDeploymentDescriptor tdd, Time timeout) {
return new CompletableFuture<>();
}
}
/**
* Tests that the lazy scheduling flag is correctly forwarded to the produced partition descriptors.
*/
@Test
public void testTddProducedPartitionsLazyScheduling() throws Exception {
for (ScheduleMode scheduleMode: ScheduleMode.values()) {
ExecutionJobVertex jobVertex = getExecutionVertex(
new JobVertexID(),
new DirectScheduledExecutorService(),
scheduleMode);
IntermediateResult result =
new IntermediateResult(new IntermediateDataSetID(), jobVertex, 1, ResultPartitionType.PIPELINED);
ExecutionAttemptID attemptID = new ExecutionAttemptID();
ExecutionVertex vertex =
new ExecutionVertex(jobVertex, 0, new IntermediateResult[]{result}, Time.minutes(1));
TaskDeploymentDescriptorFactory tddFactory =
TaskDeploymentDescriptorFactory.fromExecutionVertex(vertex, 1);
ExecutionEdge mockEdge = createMockExecutionEdge(1);
result.getPartitions()[0].addConsumerGroup();
result.getPartitions()[0].addConsumer(mockEdge, 0);
TaskManagerLocation location =
new TaskManagerLocation(ResourceID.generate(), InetAddress.getLoopbackAddress(), 1);
TaskDeploymentDescriptor tdd = tddFactory.createDeploymentDescriptor(
new AllocationID(),
0,
null,
Execution.registerProducedPartitions(vertex, location, attemptID, scheduleMode.allowLazyDeployment()).get().values());
Collection<ResultPartitionDeploymentDescriptor> producedPartitions = tdd.getProducedPartitions();
assertEquals(1, producedPartitions.size());
ResultPartitionDeploymentDescriptor desc = producedPartitions.iterator().next();
assertEquals(scheduleMode.allowLazyDeployment(), desc.sendScheduleOrUpdateConsumersMessage());
}
}
private ExecutionEdge createMockExecutionEdge(int maxParallelism) {
ExecutionVertex targetVertex = mock(ExecutionVertex.class);
ExecutionJobVertex targetJobVertex = mock(ExecutionJobVertex.class);
when(targetVertex.getJobVertex()).thenReturn(targetJobVertex);
when(targetJobVertex.getMaxParallelism()).thenReturn(maxParallelism);
ExecutionEdge edge = mock(ExecutionEdge.class);
when(edge.getTarget()).thenReturn(targetVertex);
return edge;
}
}
| |
/*
* (c) 2011-2012 Rdio Inc
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.rdio.simple;
import java.io.IOException;
/**
* The base class for all Rdio clients. It implements the method calling
* and auth but does not actually make HTTP requests.
*/
public abstract class RdioClient {
protected final Consumer consumer;
protected final Token accessToken;
/**
* Create a new Rdio client object without a token.
* @param consumer the OAuth consumer
*/
public RdioClient(Consumer consumer) {
this.consumer = consumer;
this.accessToken = null;
}
/**
* Create a new Rdio client object with a token.
* @param consumer the OAuth consumer
* @param accessToken the OAuth token
*/
public RdioClient(Consumer consumer, Token accessToken) {
this.consumer = consumer;
this.accessToken = accessToken;
}
/**
* Get the consumer
* @return the consumer.
*/
public Consumer getConsumer() {
return consumer;
}
/**
* Get the access token
* @return the access token.
*/
public Token getAccessToken() {
return accessToken;
}
/**
* Make an OAuth signed POST.
* @param url the URL to POST to
* @param params the parameters to post
* @param token the token to sign the call with
* @return the response body
* @throws IOException in the event of any network errors
* @throws RdioException in the event of an Rdio protocol error
*/
protected abstract String signedPost(String url, Parameters params, Token token) throws IOException, RdioException;
/**
* Begin the authentication process. Fetch an OAuth request token associated with the supplied callback.
* @param callback the callback URL or "oob" for the PIN flow
* @return the request token and the authorization URL to direct a user to
* @throws IOException in the event of any network errors
* @throws RdioException in the event of an Rdio protocol error
*/
public AuthState beginAuthentication(String callback) throws IOException, RdioException {
String response = signedPost("http://api.rdio.com/oauth/request_token",
Parameters.build("oauth_callback", callback), null);
Parameters parsed = Parameters.fromPercentEncoded(response);
Token requestToken = new Token(parsed.get("oauth_token"), parsed.get("oauth_token_secret"));
String url = parsed.get("login_url") + "?oauth_token=" + requestToken.token;
return new AuthState(requestToken, url);
}
/**
* Complete the authentication process using the verifier returned from the
* Rdio servers and the request token returned from @{link beginAuthentication}.
* @param verifier the oauth_verifier from the callback or the PIN displayed to the user
* @param requestToken the request token returned from the beginAuthentication call
* @throws IOException in the event of any network errors
* @throws RdioException in the event of an Rdio protocol error
* @return the access token. Pass it to an RdioClient constructor to make authenticated calls
*/
public Token completeAuthentication(String verifier, Token requestToken) throws IOException, RdioException {
String response = signedPost("http://api.rdio.com/oauth/access_token",
Parameters.build("oauth_verifier", verifier), requestToken);
Parameters parsed = Parameters.fromPercentEncoded(response);
return new Token(parsed.get("oauth_token"), parsed.get("oauth_token_secret"));
}
/**
* Make and Rdio API call.
* @param method the name of the method
* @param parameters the parameters of the method
* @return the response JSON text
* @throws IOException in the event of any network errors
* @throws RdioException in the event of an Rdio protocol error
*/
public String call(String method, Parameters parameters) throws IOException, RdioException {
parameters = (Parameters)parameters.clone();
parameters.put("method", method);
return signedPost("http://api.rdio.com/1/", parameters, accessToken);
}
/**
* Make and Rdio API call with no parameters.
* @param method the name of the method
* @return the response JSON text
* @throws IOException in the event of any network errors
* @throws RdioException in the event of an Rdio protocol error
*/
public String call(String method) throws IOException, RdioException {
return call(method, new Parameters());
}
/**
* An OAuth Consumer key and secret pair.
*/
public static class Consumer {
/** The OAuth consumer key */
public final String key;
/** The OAuth consumer secret */
public final String secret;
/**
* @param key the OAuth consumer key
* @param secret the OAuth consumer secret
*/
public Consumer(String key, String secret) {
this.key = key;
this.secret = secret;
}
}
/**
* An OAuth token and token secret pair.
*/
public static final class Token {
/** The OAuth token */
public final String token;
/** The OAuth token secret */
public final String secret;
/**
* @param token the OAuth token
* @param secret the OAuth token secret
*/
public Token(String token, String secret) {
this.token = token;
this.secret = secret;
}
}
/**
* Intermediate state for OAuth authorization.
*/
public static final class AuthState {
/** The OAuth request token.
* This will be passed to completeAuthentication.
*/
public final Token requestToken;
/** The OAuth authorization URL.
* Redirect the user to this URL to approve the app.
*/
public final String url;
/**
* @param requestToken the OAuth request token
* @param url the authorization URL
*/
public AuthState(Token requestToken, String url) {
this.requestToken = requestToken;
this.url = url;
}
}
/**
* An Rdio protocol error.
*/
public static class RdioException extends Exception {
public RdioException(String error) {
super(error);
}
private static final long serialVersionUID = -6660585967984993916L;
}
/**
* Authentication was denied.
*/
public static class AuthorizationException extends RdioException {
public AuthorizationException(String error) {
super(error);
}
private static final long serialVersionUID = 8748775513522136935L;
}
}
| |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.idea.maven.indices;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.project.Project;
import com.intellij.ui.DocumentAdapter;
import com.intellij.ui.SimpleColoredComponent;
import com.intellij.ui.SimpleTextAttributes;
import com.intellij.ui.treeStructure.Tree;
import com.intellij.util.Alarm;
import com.intellij.util.ui.UIUtil;
import com.intellij.util.ui.tree.TreeUtil;
import org.jetbrains.idea.maven.project.MavenId;
import org.jetbrains.idea.maven.utils.MavenLog;
import org.sonatype.nexus.index.ArtifactInfo;
import javax.swing.*;
import javax.swing.event.DocumentEvent;
import javax.swing.event.TreeModelListener;
import javax.swing.event.TreeSelectionEvent;
import javax.swing.event.TreeSelectionListener;
import javax.swing.tree.TreeCellRenderer;
import javax.swing.tree.TreeModel;
import javax.swing.tree.TreePath;
import java.awt.*;
import java.awt.event.KeyAdapter;
import java.awt.event.KeyEvent;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.util.List;
public class MavenArtifactSearchPanel extends JPanel {
private final Project myProject;
private final boolean myClassMode;
private final Listener myListener;
private JTextField mySearchField;
private Tree myResultList;
private final Alarm myAlarm;
public MavenArtifactSearchPanel(Project project, String initialText, boolean classMode, Listener listener, Disposable parent) {
myProject = project;
myClassMode = classMode;
myListener = listener;
initComponents(initialText);
myAlarm = new Alarm(Alarm.ThreadToUse.OWN_THREAD, parent);
}
public JTextField getSearchField() {
return mySearchField;
}
private void initComponents(String initialText) {
mySearchField = new JTextField(initialText);
myResultList = new Tree();
setLayout(new BorderLayout());
add(mySearchField, BorderLayout.NORTH);
add(new JScrollPane(myResultList), BorderLayout.CENTER);
mySearchField.getDocument().addDocumentListener(new DocumentAdapter() {
protected void textChanged(DocumentEvent e) {
scheduleSearch();
}
});
myResultList.addTreeSelectionListener(new TreeSelectionListener() {
public void valueChanged(TreeSelectionEvent e) {
if (myAlarm.getActiveRequestCount() > 0) return;
boolean hasSelection = !myResultList.isSelectionEmpty();
myListener.canSelectStateChanged(MavenArtifactSearchPanel.this, hasSelection);
}
});
mySearchField.addKeyListener(new KeyAdapter() {
@Override
public void keyPressed(KeyEvent e) {
final Object action = getAction(e, myResultList);
if ("selectNext".equals(action)) {
TreeUtil.moveDown(myResultList);
}
else if ("selectPrevious".equals(action)) {
TreeUtil.moveUp(myResultList);
}
else if ("scrollUpChangeSelection".equals(action)) {
TreeUtil.movePageUp(myResultList);
}
else if ("scrollDownChangeSelection".equals(action)) {
TreeUtil.movePageDown(myResultList);
}
}
private Object getAction(final KeyEvent e, final JComponent comp) {
final KeyStroke stroke = KeyStroke.getKeyStroke(e.getKeyCode(), e.getModifiers());
return comp.getInputMap().get(stroke);
}
});
myResultList.setRootVisible(false);
myResultList.setShowsRootHandles(true);
myResultList.setModel(null);
myResultList.setFocusable(false);
myResultList.setCellRenderer(myClassMode ? new MyClassCellRenderer() : new MyArtifactCellRenderer());
myResultList.addMouseListener(new MouseAdapter() {
@Override
public void mouseClicked(MouseEvent e) {
if (e.getClickCount() == 2) {
Object sel = myResultList.getLastSelectedPathComponent();
if (sel != null && myResultList.getModel().isLeaf(sel)) {
myListener.doubleClicked();
}
}
if (!mySearchField.hasFocus()) {
mySearchField.requestFocus();
}
}
});
}
public void scheduleSearch() {
myListener.canSelectStateChanged(this, false);
// evaluate text value in the swing thread
final String text = mySearchField.getText();
myAlarm.cancelAllRequests();
myAlarm.addRequest(new Runnable() {
public void run() {
try {
doSearch(text);
}
catch (Throwable e) {
MavenLog.LOG.warn(e);
}
}
}, 500);
}
private void doSearch(String searchText) {
MavenSearcher searcher = myClassMode ? new MavenClassSearcher() : new MavenArtifactSearcher();
List<MavenArtifactSearchResult> result = searcher.search(myProject, searchText, 200);
final TreeModel model = new MyTreeModel(result);
SwingUtilities.invokeLater(new Runnable() {
public void run() {
if (myProject.isDisposed()) return;
myResultList.setModel(model);
myResultList.setSelectionRow(0);
}
});
}
public MavenId getResult() {
Object sel = myResultList.getLastSelectedPathComponent();
ArtifactInfo info;
if (sel instanceof ArtifactInfo) {
info = (ArtifactInfo)sel;
}
else {
info = ((MavenArtifactSearchResult)sel).versions.get(0);
}
return new MavenId(info.groupId, info.artifactId, info.version);
}
private static class MyTreeModel implements TreeModel {
List<? extends MavenArtifactSearchResult> myItems;
private MyTreeModel(List<? extends MavenArtifactSearchResult> items) {
myItems = items;
}
public Object getRoot() {
return myItems;
}
public Object getChild(Object parent, int index) {
return getList(parent).get(index);
}
public int getChildCount(Object parent) {
return getList(parent).size();
}
public List getList(Object parent) {
if (parent == myItems) return myItems;
if (parent instanceof MavenArtifactSearchResult) return ((MavenArtifactSearchResult)parent).versions;
return null;
}
public boolean isLeaf(Object node) {
return getList(node) == null;
}
public int getIndexOfChild(Object parent, Object child) {
return getList(parent).indexOf(child);
}
public void valueForPathChanged(TreePath path, Object newValue) {
}
public void addTreeModelListener(TreeModelListener l) {
}
public void removeTreeModelListener(TreeModelListener l) {
}
}
private static class MyArtifactCellRenderer extends JPanel implements TreeCellRenderer {
protected SimpleColoredComponent myLeftComponent = new SimpleColoredComponent();
protected SimpleColoredComponent myRightComponent = new SimpleColoredComponent();
private MyArtifactCellRenderer() {
setLayout(new BorderLayout());
}
public Component getTreeCellRendererComponent(JTree tree, Object value, boolean selected, boolean expanded, boolean leaf, int row,
boolean hasFocus) {
myLeftComponent.clear();
myRightComponent.clear();
if (UIUtil.isUnderQuaquaLookAndFeel()) {
setBackground(selected ? UIUtil.getTreeSelectionBackground() : null);
}
else {
if (selected) {
setBackground(UIUtil.getTreeSelectionBackground());
setForeground(UIUtil.getTreeSelectionForeground());
}
else {
setBackground(null);
setForeground(tree.getForeground());
}
}
if (getFont() == null) setFont(tree.getFont());
if (value == tree.getModel().getRoot()) {
myLeftComponent.append("Results", SimpleTextAttributes.REGULAR_ATTRIBUTES);
}
else if (value instanceof MavenArtifactSearchResult) {
formatSearchResult(tree, (MavenArtifactSearchResult)value);
}
else if (value instanceof ArtifactInfo) {
ArtifactInfo info = (ArtifactInfo)value;
myLeftComponent.append(info.groupId + ":" + info.artifactId + ":" + info.version,
SimpleTextAttributes.GRAY_ATTRIBUTES);
}
removeAll();
add(myLeftComponent, BorderLayout.WEST);
JPanel spacer = new JPanel();
spacer.setBorder(BorderFactory.createEmptyBorder(0, 2, 0, 2));
spacer.setBackground(selected ? UIUtil.getTreeSelectionBackground() : tree.getBackground());
add(spacer, BorderLayout.CENTER);
add(myRightComponent, BorderLayout.EAST);
return this;
}
protected void formatSearchResult(JTree tree, MavenArtifactSearchResult searchResult) {
ArtifactInfo first = searchResult.versions.get(0);
ArtifactInfo last = searchResult.versions.get(searchResult.versions.size() - 1);
myLeftComponent.append(first.groupId + ":" + first.artifactId, SimpleTextAttributes.REGULAR_ATTRIBUTES);
myLeftComponent.append(":" + last.version + "-" + first.version, SimpleTextAttributes.GRAY_ATTRIBUTES);
}
}
private static class MyClassCellRenderer extends MyArtifactCellRenderer {
@Override
protected void formatSearchResult(JTree tree, MavenArtifactSearchResult searchResult) {
MavenClassSearchResult classResult = (MavenClassSearchResult)searchResult;
ArtifactInfo info = searchResult.versions.get(0);
myLeftComponent.append(classResult.className, SimpleTextAttributes.REGULAR_ATTRIBUTES);
myLeftComponent.append(" (" + classResult.packageName + ")", SimpleTextAttributes.GRAY_ATTRIBUTES);
myRightComponent.append(" " + info.groupId + ":" + info.artifactId,
SimpleTextAttributes.GRAY_ATTRIBUTES);
}
}
public interface Listener {
void doubleClicked();
void canSelectStateChanged(MavenArtifactSearchPanel from, boolean canSelect);
}
}
| |
/*L
* Copyright Oracle Inc
*
* Distributed under the OSI-approved BSD 3-Clause License.
* See http://ncip.github.com/cadsr-cgmdr-nci-uk/LICENSE.txt for details.
*/
/*
* eXist Open Source Native XML Database
* Copyright (C) 2001-2007 The eXist Project
* http://exist-db.org
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software Foundation
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*
* $Id: QNameValue.java 6537 2007-09-12 07:36:52Z brihaye $
*/
package org.exist.xquery.value;
import java.text.Collator;
import org.exist.dom.QName;
import org.exist.xquery.Constants;
import org.exist.xquery.XPathException;
import org.exist.xquery.XQueryContext;
/**
* Wrapper class around a {@link org.exist.dom.QName} value which extends
* {@link org.exist.xquery.value.AtomicValue}.
*
* @author wolf
*/
public class QNameValue extends AtomicValue {
private XQueryContext context;
private QName qname;
private String stringValue;
/**
* Constructs a new QNameValue by parsing the given name using
* the namespace declarations in context.
*
* @param context
* @param name
* @throws XPathException
*/
public QNameValue(XQueryContext context, String name) throws XPathException {
if (name.length() == 0)
throw new XPathException("err:FORG0001: An empty string is not a valid lexical representation of xs:QName.");
this.context = context;
try {
this.qname = QName.parse(context, name, context.getURIForPrefix(""));
} catch (Exception e) {
throw new XPathException(e);
}
stringValue = computeStringValue();
}
public QNameValue(XQueryContext context, QName name) {
this.context = context;
this.qname = name;
stringValue = computeStringValue();
}
/**
* @see org.exist.xquery.value.AtomicValue#getType()
*/
public int getType() {
return Type.QNAME;
}
/**
* Returns the wrapped QName object.
*/
public QName getQName() {
return qname;
}
/**
* @see org.exist.xquery.value.Sequence#getStringValue()
*/
public String getStringValue() throws XPathException {
//TODO : previous approach was to resolve the qname when needed. We now try to keep the original qname
return stringValue;
}
private String computeStringValue() {
//TODO : previous approach was to resolve the qname when needed. We now try to keep the original qname
String prefix = qname.getPrefix();
//Not clear what to work with here...
if((prefix == null || "".equals(prefix)) && qname.needsNamespaceDecl()) {
prefix = context.getPrefixForURI(qname.getNamespaceURI());
if (prefix != null)
qname.setPrefix(prefix);
//throw new XPathException(
// "namespace " + qname.getNamespaceURI() + " is not defined");
}
//TODO : check that the prefix matches the URI in the current context ?
if (prefix != null && prefix.length() > 0)
return prefix + ':' + qname.getLocalName();
else
return qname.getLocalName();
}
/**
* @see org.exist.xquery.value.Sequence#convertTo(int)
*/
public AtomicValue convertTo(int requiredType) throws XPathException {
switch (requiredType) {
case Type.ATOMIC :
case Type.ITEM :
case Type.QNAME :
return this;
case Type.STRING :
return new StringValue( getStringValue() );
case Type.UNTYPED_ATOMIC :
return new UntypedAtomicValue(getStringValue());
default :
throw new XPathException(
"A QName cannot be converted to " + Type.getTypeName(requiredType));
}
}
/**
* @see org.exist.xquery.value.AtomicValue#compareTo(Collator, int, AtomicValue)
*/
public boolean compareTo(Collator collator, int operator, AtomicValue other) throws XPathException {
if (other.getType() == Type.QNAME) {
int cmp = qname.compareTo(((QNameValue) other).qname);
switch (operator) {
case Constants.EQ :
return cmp == 0;
case Constants.NEQ :
return cmp != 0;
/*
* QNames are unordered
case Constants.GT :
return cmp > 0;
case Constants.GTEQ :
return cmp >= 0;
case Constants.LT :
return cmp < 0;
case Constants.LTEQ :
return cmp >= 0;
*/
default :
throw new XPathException("XPTY0004 : cannot apply operator to QName");
}
} else
throw new XPathException(
"Type error: cannot compare QName to "
+ Type.getTypeName(other.getType()));
}
/**
* @see org.exist.xquery.value.AtomicValue#compareTo(Collator, AtomicValue)
*/
public int compareTo(Collator collator, AtomicValue other) throws XPathException {
if (other.getType() == Type.QNAME) {
return qname.compareTo(((QNameValue) other).qname);
} else
throw new XPathException(
"Type error: cannot compare QName to "
+ Type.getTypeName(other.getType()));
}
/**
* @see org.exist.xquery.value.AtomicValue#max(Collator, AtomicValue)
*/
public AtomicValue max(Collator collator, AtomicValue other) throws XPathException {
throw new XPathException("Invalid argument to aggregate function: QName");
}
public AtomicValue min(Collator collator, AtomicValue other) throws XPathException {
throw new XPathException("Invalid argument to aggregate function: QName");
}
/**
* @see org.exist.xquery.value.Item#conversionPreference(java.lang.Class)
*/
public int conversionPreference(Class javaClass) {
if (javaClass.isAssignableFrom(QNameValue.class))
return 0;
if (javaClass == String.class)
return 1;
if (javaClass == Object.class)
return 20;
return Integer.MAX_VALUE;
}
/**
* @see org.exist.xquery.value.Item#toJavaObject(java.lang.Class)
*/
public Object toJavaObject(Class target) throws XPathException {
if (target.isAssignableFrom(QNameValue.class))
return this;
else if (target == String.class)
return getStringValue();
else if (target == Object.class)
return qname;
throw new XPathException(
"cannot convert value of type "
+ Type.getTypeName(getType())
+ " to Java object of type "
+ target.getName());
}
public String toString() {
try {
return this.getStringValue();
} catch (XPathException e) {
return super.toString();
}
}
public boolean effectiveBooleanValue() throws XPathException {
throw new XPathException("err:FORG0006: value of type " + Type.getTypeName(getType()) +
" has no boolean value.");
}
}
| |
package org.hl7.fhir.r4.hapi.rest.server;
import static org.apache.commons.lang3.StringUtils.isBlank;
/*
* #%L
* HAPI FHIR Structures - DSTU2 (FHIR v1.0.0)
* %%
* Copyright (C) 2014 - 2015 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import static org.apache.commons.lang3.StringUtils.isNotBlank;
import java.util.*;
import java.util.Map.Entry;
import javax.servlet.ServletContext;
import javax.servlet.http.HttpServletRequest;
import org.apache.commons.lang3.StringUtils;
import org.hl7.fhir.exceptions.FHIRException;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.r4.model.*;
import org.hl7.fhir.r4.model.CapabilityStatement.*;
import org.hl7.fhir.r4.model.Enumerations.PublicationStatus;
import org.hl7.fhir.r4.model.OperationDefinition.*;
import ca.uhn.fhir.context.*;
import ca.uhn.fhir.parser.DataFormatException;
import ca.uhn.fhir.rest.annotation.*;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.server.*;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import ca.uhn.fhir.rest.server.method.*;
import ca.uhn.fhir.rest.server.method.OperationMethodBinding.ReturnType;
import ca.uhn.fhir.rest.server.method.SearchParameter;
/**
* Server FHIR Provider which serves the conformance statement for a RESTful server implementation
*
* <p>
* Note: This class is safe to extend, but it is important to note that the same instance of {@link CapabilityStatement} is always returned unless {@link #setCache(boolean)} is called with a value of
* <code>false</code>. This means that if you are adding anything to the returned conformance instance on each call you should call <code>setCache(false)</code> in your provider constructor.
* </p>
*/
public class ServerCapabilityStatementProvider implements IServerConformanceProvider<CapabilityStatement> {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ServerCapabilityStatementProvider.class);
private boolean myCache = true;
private volatile CapabilityStatement myCapabilityStatement;
private IdentityHashMap<OperationMethodBinding, String> myOperationBindingToName;
private HashMap<String, List<OperationMethodBinding>> myOperationNameToBindings;
private String myPublisher = "Not provided";
private RestulfulServerConfiguration myServerConfiguration;
/*
* Add a no-arg constructor and seetter so that the ServerConfirmanceProvider can be Spring-wired with the RestfulService avoiding the potential reference cycle that would happen.
*/
public ServerCapabilityStatementProvider() {
super();
}
public ServerCapabilityStatementProvider(RestfulServer theRestfulServer) {
this.myServerConfiguration = theRestfulServer.createConfiguration();
}
public ServerCapabilityStatementProvider(RestulfulServerConfiguration theServerConfiguration) {
this.myServerConfiguration = theServerConfiguration;
}
private void checkBindingForSystemOps(CapabilityStatementRestComponent rest, Set<SystemRestfulInteraction> systemOps, BaseMethodBinding<?> nextMethodBinding) {
if (nextMethodBinding.getRestOperationType() != null) {
String sysOpCode = nextMethodBinding.getRestOperationType().getCode();
if (sysOpCode != null) {
SystemRestfulInteraction sysOp;
try {
sysOp = SystemRestfulInteraction.fromCode(sysOpCode);
} catch (FHIRException e) {
return;
}
if (sysOp == null) {
return;
}
if (systemOps.contains(sysOp) == false) {
systemOps.add(sysOp);
rest.addInteraction().setCode(sysOp);
}
}
}
}
private Map<String, List<BaseMethodBinding<?>>> collectMethodBindings() {
Map<String, List<BaseMethodBinding<?>>> resourceToMethods = new TreeMap<String, List<BaseMethodBinding<?>>>();
for (ResourceBinding next : myServerConfiguration.getResourceBindings()) {
String resourceName = next.getResourceName();
for (BaseMethodBinding<?> nextMethodBinding : next.getMethodBindings()) {
if (resourceToMethods.containsKey(resourceName) == false) {
resourceToMethods.put(resourceName, new ArrayList<BaseMethodBinding<?>>());
}
resourceToMethods.get(resourceName).add(nextMethodBinding);
}
}
for (BaseMethodBinding<?> nextMethodBinding : myServerConfiguration.getServerBindings()) {
String resourceName = "";
if (resourceToMethods.containsKey(resourceName) == false) {
resourceToMethods.put(resourceName, new ArrayList<BaseMethodBinding<?>>());
}
resourceToMethods.get(resourceName).add(nextMethodBinding);
}
return resourceToMethods;
}
private DateTimeType conformanceDate() {
String buildDate = myServerConfiguration.getConformanceDate();
if (buildDate != null) {
try {
return new DateTimeType(buildDate);
} catch (DataFormatException e) {
// fall through
}
}
return DateTimeType.now();
}
private String createOperationName(OperationMethodBinding theMethodBinding) {
StringBuilder retVal = new StringBuilder();
if (theMethodBinding.getResourceName() != null) {
retVal.append(theMethodBinding.getResourceName());
}
retVal.append('-');
if (theMethodBinding.isCanOperateAtInstanceLevel()) {
retVal.append('i');
}
if (theMethodBinding.isCanOperateAtServerLevel()) {
retVal.append('s');
}
retVal.append('-');
// Exclude the leading $
retVal.append(theMethodBinding.getName(), 1, theMethodBinding.getName().length());
return retVal.toString();
}
/**
* Gets the value of the "publisher" that will be placed in the generated conformance statement. As this is a mandatory element, the value should not be null (although this is not enforced). The
* value defaults to "Not provided" but may be set to null, which will cause this element to be omitted.
*/
public String getPublisher() {
return myPublisher;
}
@Override
@Metadata
public CapabilityStatement getServerConformance(HttpServletRequest theRequest) {
if (myCapabilityStatement != null && myCache) {
return myCapabilityStatement;
}
CapabilityStatement retVal = new CapabilityStatement();
retVal.setPublisher(myPublisher);
retVal.setDateElement(conformanceDate());
retVal.setFhirVersion(FhirVersionEnum.R4.getFhirVersionString());
retVal.setAcceptUnknown(UnknownContentCode.EXTENSIONS); // TODO: make this configurable - this is a fairly big
// effort since the parser
// needs to be modified to actually allow it
retVal.getImplementation().setDescription(myServerConfiguration.getImplementationDescription());
retVal.setKind(CapabilityStatementKind.INSTANCE);
retVal.getSoftware().setName(myServerConfiguration.getServerName());
retVal.getSoftware().setVersion(myServerConfiguration.getServerVersion());
retVal.addFormat(Constants.CT_FHIR_XML_NEW);
retVal.addFormat(Constants.CT_FHIR_JSON_NEW);
retVal.setStatus(PublicationStatus.ACTIVE);
CapabilityStatementRestComponent rest = retVal.addRest();
rest.setMode(RestfulCapabilityMode.SERVER);
Set<SystemRestfulInteraction> systemOps = new HashSet<SystemRestfulInteraction>();
Set<String> operationNames = new HashSet<String>();
Map<String, List<BaseMethodBinding<?>>> resourceToMethods = collectMethodBindings();
for (Entry<String, List<BaseMethodBinding<?>>> nextEntry : resourceToMethods.entrySet()) {
if (nextEntry.getKey().isEmpty() == false) {
Set<TypeRestfulInteraction> resourceOps = new HashSet<TypeRestfulInteraction>();
CapabilityStatementRestResourceComponent resource = rest.addResource();
String resourceName = nextEntry.getKey();
RuntimeResourceDefinition def = myServerConfiguration.getFhirContext().getResourceDefinition(resourceName);
resource.getTypeElement().setValue(def.getName());
ServletContext servletContext = (ServletContext) (theRequest == null ? null : theRequest.getAttribute(RestfulServer.SERVLET_CONTEXT_ATTRIBUTE));
String serverBase = myServerConfiguration.getServerAddressStrategy().determineServerBase(servletContext, theRequest);
resource.getProfile().setReference((def.getResourceProfile(serverBase)));
TreeSet<String> includes = new TreeSet<String>();
// Map<String, CapabilityStatement.RestResourceSearchParam> nameToSearchParam = new HashMap<String,
// CapabilityStatement.RestResourceSearchParam>();
for (BaseMethodBinding<?> nextMethodBinding : nextEntry.getValue()) {
if (nextMethodBinding.getRestOperationType() != null) {
String resOpCode = nextMethodBinding.getRestOperationType().getCode();
if (resOpCode != null) {
TypeRestfulInteraction resOp;
try {
resOp = TypeRestfulInteraction.fromCode(resOpCode);
} catch (Exception e) {
resOp = null;
}
if (resOp != null) {
if (resourceOps.contains(resOp) == false) {
resourceOps.add(resOp);
resource.addInteraction().setCode(resOp);
}
if ("vread".equals(resOpCode)) {
// vread implies read
resOp = TypeRestfulInteraction.READ;
if (resourceOps.contains(resOp) == false) {
resourceOps.add(resOp);
resource.addInteraction().setCode(resOp);
}
}
if (nextMethodBinding.isSupportsConditional()) {
switch (resOp) {
case CREATE:
resource.setConditionalCreate(true);
break;
case DELETE:
if (nextMethodBinding.isSupportsConditionalMultiple()) {
resource.setConditionalDelete(ConditionalDeleteStatus.MULTIPLE);
} else {
resource.setConditionalDelete(ConditionalDeleteStatus.SINGLE);
}
break;
case UPDATE:
resource.setConditionalUpdate(true);
break;
default:
break;
}
}
}
}
}
checkBindingForSystemOps(rest, systemOps, nextMethodBinding);
if (nextMethodBinding instanceof SearchMethodBinding) {
handleSearchMethodBinding(rest, resource, resourceName, def, includes, (SearchMethodBinding) nextMethodBinding);
} else if (nextMethodBinding instanceof DynamicSearchMethodBinding) {
handleDynamicSearchMethodBinding(resource, def, includes, (DynamicSearchMethodBinding) nextMethodBinding);
} else if (nextMethodBinding instanceof OperationMethodBinding) {
OperationMethodBinding methodBinding = (OperationMethodBinding) nextMethodBinding;
String opName = myOperationBindingToName.get(methodBinding);
if (operationNames.add(opName)) {
// Only add each operation (by name) once
rest.addOperation().setName(methodBinding.getName().substring(1)).setDefinition(new Reference("OperationDefinition/" + opName));
}
}
Collections.sort(resource.getInteraction(), new Comparator<ResourceInteractionComponent>() {
@Override
public int compare(ResourceInteractionComponent theO1, ResourceInteractionComponent theO2) {
TypeRestfulInteraction o1 = theO1.getCode();
TypeRestfulInteraction o2 = theO2.getCode();
if (o1 == null && o2 == null) {
return 0;
}
if (o1 == null) {
return 1;
}
if (o2 == null) {
return -1;
}
return o1.ordinal() - o2.ordinal();
}
});
}
for (String nextInclude : includes) {
resource.addSearchInclude(nextInclude);
}
} else {
for (BaseMethodBinding<?> nextMethodBinding : nextEntry.getValue()) {
checkBindingForSystemOps(rest, systemOps, nextMethodBinding);
if (nextMethodBinding instanceof OperationMethodBinding) {
OperationMethodBinding methodBinding = (OperationMethodBinding) nextMethodBinding;
String opName = myOperationBindingToName.get(methodBinding);
if (operationNames.add(opName)) {
ourLog.debug("Found bound operation: {}", opName);
rest.addOperation().setName(methodBinding.getName().substring(1)).setDefinition(new Reference("OperationDefinition/" + opName));
}
}
}
}
}
myCapabilityStatement = retVal;
return retVal;
}
private void handleDynamicSearchMethodBinding(CapabilityStatementRestResourceComponent resource, RuntimeResourceDefinition def, TreeSet<String> includes, DynamicSearchMethodBinding searchMethodBinding) {
includes.addAll(searchMethodBinding.getIncludes());
List<RuntimeSearchParam> searchParameters = new ArrayList<RuntimeSearchParam>();
searchParameters.addAll(searchMethodBinding.getSearchParams());
sortRuntimeSearchParameters(searchParameters);
if (!searchParameters.isEmpty()) {
for (RuntimeSearchParam nextParameter : searchParameters) {
String nextParamName = nextParameter.getName();
// String chain = null;
String nextParamUnchainedName = nextParamName;
if (nextParamName.contains(".")) {
// chain = nextParamName.substring(nextParamName.indexOf('.') + 1);
nextParamUnchainedName = nextParamName.substring(0, nextParamName.indexOf('.'));
}
String nextParamDescription = nextParameter.getDescription();
/*
* If the parameter has no description, default to the one from the resource
*/
if (StringUtils.isBlank(nextParamDescription)) {
RuntimeSearchParam paramDef = def.getSearchParam(nextParamUnchainedName);
if (paramDef != null) {
nextParamDescription = paramDef.getDescription();
}
}
CapabilityStatementRestResourceSearchParamComponent param = resource.addSearchParam();
param.setName(nextParamName);
// if (StringUtils.isNotBlank(chain)) {
// param.addChain(chain);
// }
param.setDocumentation(nextParamDescription);
// param.setType(nextParameter.getParamType());
}
}
}
private void handleSearchMethodBinding(CapabilityStatementRestComponent rest, CapabilityStatementRestResourceComponent resource, String resourceName, RuntimeResourceDefinition def, TreeSet<String> includes,
SearchMethodBinding searchMethodBinding) {
includes.addAll(searchMethodBinding.getIncludes());
List<IParameter> params = searchMethodBinding.getParameters();
List<SearchParameter> searchParameters = new ArrayList<SearchParameter>();
for (IParameter nextParameter : params) {
if ((nextParameter instanceof SearchParameter)) {
searchParameters.add((SearchParameter) nextParameter);
}
}
sortSearchParameters(searchParameters);
if (!searchParameters.isEmpty()) {
// boolean allOptional = searchParameters.get(0).isRequired() == false;
//
// OperationDefinition query = null;
// if (!allOptional) {
// RestOperation operation = rest.addOperation();
// query = new OperationDefinition();
// operation.setDefinition(new ResourceReferenceDt(query));
// query.getDescriptionElement().setValue(searchMethodBinding.getDescription());
// query.addUndeclaredExtension(false, ExtensionConstants.QUERY_RETURN_TYPE, new CodeDt(resourceName));
// for (String nextInclude : searchMethodBinding.getIncludes()) {
// query.addUndeclaredExtension(false, ExtensionConstants.QUERY_ALLOWED_INCLUDE, new StringDt(nextInclude));
// }
// }
for (SearchParameter nextParameter : searchParameters) {
String nextParamName = nextParameter.getName();
String chain = null;
String nextParamUnchainedName = nextParamName;
if (nextParamName.contains(".")) {
chain = nextParamName.substring(nextParamName.indexOf('.') + 1);
nextParamUnchainedName = nextParamName.substring(0, nextParamName.indexOf('.'));
}
String nextParamDescription = nextParameter.getDescription();
/*
* If the parameter has no description, default to the one from the resource
*/
if (StringUtils.isBlank(nextParamDescription)) {
RuntimeSearchParam paramDef = def.getSearchParam(nextParamUnchainedName);
if (paramDef != null) {
nextParamDescription = paramDef.getDescription();
}
}
CapabilityStatementRestResourceSearchParamComponent param = resource.addSearchParam();
param.setName(nextParamUnchainedName);
// if (StringUtils.isNotBlank(chain)) {
// param.addChain(chain);
// }
//
// if (nextParameter.getParamType() == RestSearchParameterTypeEnum.REFERENCE) {
// for (String nextWhitelist : new TreeSet<String>(nextParameter.getQualifierWhitelist())) {
// if (nextWhitelist.startsWith(".")) {
// param.addChain(nextWhitelist.substring(1));
// }
// }
// }
param.setDocumentation(nextParamDescription);
if (nextParameter.getParamType() != null) {
param.getTypeElement().setValueAsString(nextParameter.getParamType().getCode());
}
for (Class<? extends IBaseResource> nextTarget : nextParameter.getDeclaredTypes()) {
RuntimeResourceDefinition targetDef = myServerConfiguration.getFhirContext().getResourceDefinition(nextTarget);
if (targetDef != null) {
ResourceType code;
try {
code = ResourceType.fromCode(targetDef.getName());
} catch (FHIRException e) {
code = null;
}
// if (code != null) {
// param.addTarget(targetDef.getName());
// }
}
}
}
}
}
@Initialize
public void initializeOperations() {
myOperationBindingToName = new IdentityHashMap<OperationMethodBinding, String>();
myOperationNameToBindings = new HashMap<String, List<OperationMethodBinding>>();
Map<String, List<BaseMethodBinding<?>>> resourceToMethods = collectMethodBindings();
for (Entry<String, List<BaseMethodBinding<?>>> nextEntry : resourceToMethods.entrySet()) {
List<BaseMethodBinding<?>> nextMethodBindings = nextEntry.getValue();
for (BaseMethodBinding<?> nextMethodBinding : nextMethodBindings) {
if (nextMethodBinding instanceof OperationMethodBinding) {
OperationMethodBinding methodBinding = (OperationMethodBinding) nextMethodBinding;
if (myOperationBindingToName.containsKey(methodBinding)) {
continue;
}
String name = createOperationName(methodBinding);
ourLog.debug("Detected operation: {}", name);
myOperationBindingToName.put(methodBinding, name);
if (myOperationNameToBindings.containsKey(name) == false) {
myOperationNameToBindings.put(name, new ArrayList<OperationMethodBinding>());
}
myOperationNameToBindings.get(name).add(methodBinding);
}
}
}
}
@Read(type = OperationDefinition.class)
public OperationDefinition readOperationDefinition(@IdParam IdType theId) {
if (theId == null || theId.hasIdPart() == false) {
throw new ResourceNotFoundException(theId);
}
List<OperationMethodBinding> sharedDescriptions = myOperationNameToBindings.get(theId.getIdPart());
if (sharedDescriptions == null || sharedDescriptions.isEmpty()) {
throw new ResourceNotFoundException(theId);
}
OperationDefinition op = new OperationDefinition();
op.setStatus(PublicationStatus.ACTIVE);
op.setKind(OperationKind.OPERATION);
op.setIdempotent(true);
// We reset these to true below if we find a binding that can handle the level
op.setSystem(false);
op.setType(false);
op.setInstance(false);
Set<String> inParams = new HashSet<String>();
Set<String> outParams = new HashSet<String>();
for (OperationMethodBinding sharedDescription : sharedDescriptions) {
if (isNotBlank(sharedDescription.getDescription())) {
op.setDescription(sharedDescription.getDescription());
}
if (sharedDescription.isCanOperateAtInstanceLevel()) {
op.setInstance(true);
}
if (sharedDescription.isCanOperateAtServerLevel()) {
op.setSystem(true);
}
if (sharedDescription.isCanOperateAtTypeLevel()) {
op.setType(true);
}
if (!sharedDescription.isIdempotent()) {
op.setIdempotent(sharedDescription.isIdempotent());
}
op.setCode(sharedDescription.getName().substring(1));
if (sharedDescription.isCanOperateAtInstanceLevel()) {
op.setInstance(sharedDescription.isCanOperateAtInstanceLevel());
}
if (sharedDescription.isCanOperateAtServerLevel()) {
op.setSystem(sharedDescription.isCanOperateAtServerLevel());
}
if (isNotBlank(sharedDescription.getResourceName())) {
op.addResourceElement().setValue(sharedDescription.getResourceName());
}
for (IParameter nextParamUntyped : sharedDescription.getParameters()) {
if (nextParamUntyped instanceof OperationParameter) {
OperationParameter nextParam = (OperationParameter) nextParamUntyped;
OperationDefinitionParameterComponent param = op.addParameter();
if (!inParams.add(nextParam.getName())) {
continue;
}
param.setUse(OperationParameterUse.IN);
if (nextParam.getParamType() != null) {
param.setType(nextParam.getParamType());
}
if (nextParam.getSearchParamType() != null) {
param.getSearchTypeElement().setValueAsString(nextParam.getSearchParamType());
}
param.setMin(nextParam.getMin());
param.setMax(nextParam.getMax() == -1 ? "*" : Integer.toString(nextParam.getMax()));
param.setName(nextParam.getName());
}
}
for (ReturnType nextParam : sharedDescription.getReturnParams()) {
if (!outParams.add(nextParam.getName())) {
continue;
}
OperationDefinitionParameterComponent param = op.addParameter();
param.setUse(OperationParameterUse.OUT);
if (nextParam.getType() != null) {
param.setType(nextParam.getType());
}
param.setMin(nextParam.getMin());
param.setMax(nextParam.getMax() == -1 ? "*" : Integer.toString(nextParam.getMax()));
param.setName(nextParam.getName());
}
}
if (isBlank(op.getName())) {
if (isNotBlank(op.getDescription())) {
op.setName(op.getDescription());
} else {
op.setName(op.getCode());
}
}
if (op.hasSystem() == false) {
op.setSystem(false);
}
if (op.hasInstance() == false) {
op.setInstance(false);
}
return op;
}
/**
* Sets the cache property (default is true). If set to true, the same response will be returned for each invocation.
* <p>
* See the class documentation for an important note if you are extending this class
* </p>
*/
public void setCache(boolean theCache) {
myCache = theCache;
}
/**
* Sets the value of the "publisher" that will be placed in the generated conformance statement. As this is a mandatory element, the value should not be null (although this is not enforced). The
* value defaults to "Not provided" but may be set to null, which will cause this element to be omitted.
*/
public void setPublisher(String thePublisher) {
myPublisher = thePublisher;
}
@Override
public void setRestfulServer(RestfulServer theRestfulServer) {
myServerConfiguration = theRestfulServer.createConfiguration();
}
RestulfulServerConfiguration getServerConfiguration() {
return myServerConfiguration;
}
private void sortRuntimeSearchParameters(List<RuntimeSearchParam> searchParameters) {
Collections.sort(searchParameters, new Comparator<RuntimeSearchParam>() {
@Override
public int compare(RuntimeSearchParam theO1, RuntimeSearchParam theO2) {
return theO1.getName().compareTo(theO2.getName());
}
});
}
private void sortSearchParameters(List<SearchParameter> searchParameters) {
Collections.sort(searchParameters, new Comparator<SearchParameter>() {
@Override
public int compare(SearchParameter theO1, SearchParameter theO2) {
if (theO1.isRequired() == theO2.isRequired()) {
return theO1.getName().compareTo(theO2.getName());
}
if (theO1.isRequired()) {
return -1;
}
return 1;
}
});
}
}
| |
/**
* Copyright 2016 LinkedIn Corp. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*/
package com.github.ambry.config;
import java.math.BigDecimal;
import java.util.Enumeration;
import java.util.HashSet;
import java.util.Properties;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Verifiable properties for configs
*/
public class VerifiableProperties {
private final HashSet<String> referenceSet = new HashSet<String>();
private final Properties props;
protected Logger logger = LoggerFactory.getLogger(getClass());
public VerifiableProperties(Properties props) {
this.props = props;
}
public boolean containsKey(String name) {
return props.containsKey(name);
}
public String getProperty(String name) {
String value = props.getProperty(name);
referenceSet.add(name);
return value;
}
/**
* Read a required integer property value or throw an exception if no such property is found
*/
public int getInt(String name) {
return Integer.parseInt(getString(name));
}
public int getIntInRange(String name, int start, int end) {
if (!containsKey(name)) {
throw new IllegalArgumentException("Missing required property '" + name + "'");
}
return getIntInRange(name, -1, start, end);
}
/**
* Read an integer from the properties instance
* @param name The property name
* @param defaultVal The default value to use if the property is not found
* @return the integer value
*/
public int getInt(String name, int defaultVal) {
return getIntInRange(name, defaultVal, Integer.MIN_VALUE, Integer.MAX_VALUE);
}
public Short getShort(String name, Short defaultVal) {
return getShortInRange(name, defaultVal, Short.MIN_VALUE, Short.MAX_VALUE);
}
/**
* Read an integer from the properties instance. Throw an exception
* if the value is not in the given range (inclusive)
* @param name The property name
* @param defaultVal The default value to use if the property is not found
* @param start The start of the range in which the value must fall (inclusive)
* @param end The end of the range in which the value must fall
* @throws IllegalArgumentException If the value is not in the given range
* @return the integer value
*/
public int getIntInRange(String name, int defaultVal, int start, int end) {
int v = 0;
if (containsKey(name)) {
v = Integer.parseInt(getProperty(name));
} else {
v = defaultVal;
}
if (v >= start && v <= end) {
return v;
} else {
throw new IllegalArgumentException(
name + " has value " + v + " which is not in the range " + start + "-" + end + ".");
}
}
public Short getShortInRange(String name, Short defaultVal, Short start, Short end) {
Short v = 0;
if (containsKey(name)) {
v = Short.parseShort(getProperty(name));
} else {
v = defaultVal;
}
if (v >= start && v <= end) {
return v;
} else {
throw new IllegalArgumentException(
name + " has value " + v + " which is not in the range " + start + "-" + end + ".");
}
}
public Double getDoubleInRange(String name, Double defaultVal, Double start, Double end) {
Double v = 0.0;
if (containsKey(name)) {
v = Double.parseDouble(getProperty(name));
} else {
v = defaultVal;
}
// use big decimal for double comparison
BigDecimal startDecimal = new BigDecimal(start);
BigDecimal endDecimal = new BigDecimal(end);
BigDecimal value = new BigDecimal(v);
if (value.compareTo(startDecimal) >= 0 && value.compareTo(endDecimal) <= 0) {
return v;
} else {
throw new IllegalArgumentException(
name + " has value " + v + " which is not in range " + start + "-" + end + ".");
}
}
/**
* Read a required long property value or throw an exception if no such property is found
*/
public long getLong(String name) {
return Long.parseLong(getString(name));
}
/**
* Read an long from the properties instance
* @param name The property name
* @param defaultVal The default value to use if the property is not found
* @return the long value
*/
public long getLong(String name, long defaultVal) {
return getLongInRange(name, defaultVal, Long.MIN_VALUE, Long.MAX_VALUE);
}
/**
* Read an long from the properties instance. Throw an exception
* if the value is not in the given range (inclusive)
* @param name The property name
* @param defaultVal The default value to use if the property is not found
* @param start The start of the range in which the value must fall (inclusive)
* @param end The end of the range in which the value must fall
* @throws IllegalArgumentException If the value is not in the given range
* @return the long value
*/
public long getLongInRange(String name, long defaultVal, long start, long end) {
long v = 0;
if (containsKey(name)) {
v = Long.parseLong(getProperty(name));
} else {
return defaultVal;
}
if (v >= start && v <= end) {
return v;
} else {
throw new IllegalArgumentException(
name + " has value " + v + " which is not in the range " + start + "-" + end + ".");
}
}
/**
* Get a required argument as a double
* @param name The property name
* @return the value
* @throw IllegalArgumentException If the given property is not present
*/
public double getDouble(String name) {
return Double.parseDouble(getString(name));
}
/**
* Get an optional argument as a double
* @param name The property name
* @default The default value for the property if not present
*/
public double getDouble(String name, double defaultVal) {
if (containsKey(name)) {
return getDouble(name);
} else {
return defaultVal;
}
}
/**
* Read a boolean value from the properties instance
* @param name The property name
* @param defaultVal The default value to use if the property is not found
* @return the boolean value
*/
public boolean getBoolean(String name, boolean defaultVal) {
String v = "";
if (!containsKey(name)) {
return defaultVal;
} else {
v = getProperty(name);
if (v.compareTo("true") == 0 || v.compareTo("false") == 0) {
return Boolean.parseBoolean(v);
} else {
throw new IllegalArgumentException(name + " has value " + v + " which is not true or false.");
}
}
}
public boolean getBoolean(String name) {
return Boolean.parseBoolean(getString(name));
}
/**
* Get a string property, or, if no such property is defined, return the given default value
*/
public String getString(String name, String defaultVal) {
if (containsKey(name)) {
return getProperty(name);
} else {
return defaultVal;
}
}
/**
* Get a string property or throw and exception if no such property is defined.
*/
public String getString(String name) {
if (!containsKey(name)) {
throw new IllegalArgumentException("Missing required property '" + name + "'");
} else {
return getProperty(name);
}
}
public void verify() {
logger.info("Verifying properties");
Enumeration keys = props.propertyNames();
while (keys.hasMoreElements()) {
Object key = keys.nextElement();
if (!referenceSet.contains(key)) {
logger.warn("Property {} is not valid", key);
} else {
logger.info("Property {} is overridden to {}", key, props.getProperty(key.toString()));
}
}
}
public String toString() {
return props.toString();
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.sort;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.QueryWrapperFilter;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.join.BitDocIdSetFilter;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.core.LongFieldMapper;
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.query.support.NestedInnerQueryParseSupport;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.internal.SubSearchContext;
import java.io.IOException;
import java.util.List;
/**
*
*/
public class SortParseElement implements SearchParseElement {
public static final SortField SORT_SCORE = new SortField(null, SortField.Type.SCORE);
private static final SortField SORT_SCORE_REVERSE = new SortField(null, SortField.Type.SCORE, true);
private static final SortField SORT_DOC = new SortField(null, SortField.Type.DOC);
private static final SortField SORT_DOC_REVERSE = new SortField(null, SortField.Type.DOC, true);
public static final ParseField IGNORE_UNMAPPED = new ParseField("ignore_unmapped");
public static final ParseField UNMAPPED_TYPE = new ParseField("unmapped_type");
public static final String SCORE_FIELD_NAME = "_score";
public static final String DOC_FIELD_NAME = "_doc";
private final ImmutableMap<String, SortParser> parsers;
public SortParseElement() {
ImmutableMap.Builder<String, SortParser> builder = ImmutableMap.builder();
addParser(builder, new ScriptSortParser());
addParser(builder, new GeoDistanceSortParser());
this.parsers = builder.build();
}
private void addParser(ImmutableMap.Builder<String, SortParser> parsers, SortParser parser) {
for (String name : parser.names()) {
parsers.put(name, parser);
}
}
@Override
public void parse(XContentParser parser, SearchContext context) throws Exception {
XContentParser.Token token = parser.currentToken();
List<SortField> sortFields = Lists.newArrayListWithCapacity(2);
if (token == XContentParser.Token.START_ARRAY) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
if (token == XContentParser.Token.START_OBJECT) {
addCompoundSortField(parser, context, sortFields);
} else if (token == XContentParser.Token.VALUE_STRING) {
addSortField(context, sortFields, parser.text(), false, null, null, null, null);
} else {
throw new IllegalArgumentException("malformed sort format, within the sort array, an object, or an actual string are allowed");
}
}
} else if (token == XContentParser.Token.VALUE_STRING) {
addSortField(context, sortFields, parser.text(), false, null, null, null, null);
} else if (token == XContentParser.Token.START_OBJECT) {
addCompoundSortField(parser, context, sortFields);
} else {
throw new IllegalArgumentException("malformed sort format, either start with array, object, or an actual string");
}
if (!sortFields.isEmpty()) {
// optimize if we just sort on score non reversed, we don't really need sorting
boolean sort;
if (sortFields.size() > 1) {
sort = true;
} else {
SortField sortField = sortFields.get(0);
if (sortField.getType() == SortField.Type.SCORE && !sortField.getReverse()) {
sort = false;
} else {
sort = true;
}
}
if (sort) {
context.sort(new Sort(sortFields.toArray(new SortField[sortFields.size()])));
}
}
}
private void addCompoundSortField(XContentParser parser, SearchContext context, List<SortField> sortFields) throws Exception {
XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
String fieldName = parser.currentName();
boolean reverse = false;
String missing = null;
String innerJsonName = null;
String unmappedType = null;
MultiValueMode sortMode = null;
NestedInnerQueryParseSupport nestedFilterParseHelper = null;
token = parser.nextToken();
if (token == XContentParser.Token.VALUE_STRING) {
String direction = parser.text();
if (direction.equals("asc")) {
reverse = SCORE_FIELD_NAME.equals(fieldName);
} else if (direction.equals("desc")) {
reverse = !SCORE_FIELD_NAME.equals(fieldName);
} else {
throw new IllegalArgumentException("sort direction [" + fieldName + "] not supported");
}
addSortField(context, sortFields, fieldName, reverse, unmappedType, missing, sortMode, nestedFilterParseHelper);
} else {
if (parsers.containsKey(fieldName)) {
sortFields.add(parsers.get(fieldName).parse(parser, context));
} else {
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
innerJsonName = parser.currentName();
} else if (token.isValue()) {
if ("reverse".equals(innerJsonName)) {
reverse = parser.booleanValue();
} else if ("order".equals(innerJsonName)) {
if ("asc".equals(parser.text())) {
reverse = SCORE_FIELD_NAME.equals(fieldName);
} else if ("desc".equals(parser.text())) {
reverse = !SCORE_FIELD_NAME.equals(fieldName);
}
} else if ("missing".equals(innerJsonName)) {
missing = parser.textOrNull();
} else if (IGNORE_UNMAPPED.match(innerJsonName)) {
// backward compatibility: ignore_unmapped has been replaced with unmapped_type
if (unmappedType == null // don't override if unmapped_type has been provided too
&& parser.booleanValue()) {
unmappedType = LongFieldMapper.CONTENT_TYPE;
}
} else if (UNMAPPED_TYPE.match(innerJsonName)) {
unmappedType = parser.textOrNull();
} else if ("mode".equals(innerJsonName)) {
sortMode = MultiValueMode.fromString(parser.text());
} else if ("nested_path".equals(innerJsonName) || "nestedPath".equals(innerJsonName)) {
if (nestedFilterParseHelper == null) {
nestedFilterParseHelper = new NestedInnerQueryParseSupport(parser, context);
}
nestedFilterParseHelper.setPath(parser.text());
} else {
throw new IllegalArgumentException("sort option [" + innerJsonName + "] not supported");
}
} else if (token == XContentParser.Token.START_OBJECT) {
if ("nested_filter".equals(innerJsonName) || "nestedFilter".equals(innerJsonName)) {
if (nestedFilterParseHelper == null) {
nestedFilterParseHelper = new NestedInnerQueryParseSupport(parser, context);
}
nestedFilterParseHelper.filter();
} else {
throw new IllegalArgumentException("sort option [" + innerJsonName + "] not supported");
}
}
}
addSortField(context, sortFields, fieldName, reverse, unmappedType, missing, sortMode, nestedFilterParseHelper);
}
}
}
}
}
private void addSortField(SearchContext context, List<SortField> sortFields, String fieldName, boolean reverse, String unmappedType, @Nullable final String missing, MultiValueMode sortMode, NestedInnerQueryParseSupport nestedHelper) throws IOException {
if (SCORE_FIELD_NAME.equals(fieldName)) {
if (reverse) {
sortFields.add(SORT_SCORE_REVERSE);
} else {
sortFields.add(SORT_SCORE);
}
} else if (DOC_FIELD_NAME.equals(fieldName)) {
if (reverse) {
sortFields.add(SORT_DOC_REVERSE);
} else {
sortFields.add(SORT_DOC);
}
} else {
FieldMapper fieldMapper = context.smartNameFieldMapper(fieldName);
if (fieldMapper == null) {
if (unmappedType != null) {
fieldMapper = context.mapperService().unmappedFieldMapper(unmappedType);
} else {
throw new SearchParseException(context, "No mapping found for [" + fieldName + "] in order to sort on", null);
}
}
if (!fieldMapper.isSortable()) {
throw new SearchParseException(context, "Sorting not supported for field[" + fieldName + "]", null);
}
// Enable when we also know how to detect fields that do tokenize, but only emit one token
/*if (fieldMapper instanceof StringFieldMapper) {
StringFieldMapper stringFieldMapper = (StringFieldMapper) fieldMapper;
if (stringFieldMapper.fieldType().tokenized()) {
// Fail early
throw new SearchParseException(context, "Can't sort on tokenized string field[" + fieldName + "]");
}
}*/
// We only support AVG and SUM on number based fields
if (!(fieldMapper instanceof NumberFieldMapper) && (sortMode == MultiValueMode.SUM || sortMode == MultiValueMode.AVG)) {
sortMode = null;
}
if (sortMode == null) {
sortMode = resolveDefaultSortMode(reverse);
}
// TODO: remove this in master, we should be explicit when we want to sort on nested fields and don't do anything automatically
if (!(context instanceof SubSearchContext)) {
// Only automatically resolve nested path when sort isn't defined for top_hits
if (nestedHelper == null || nestedHelper.getNestedObjectMapper() == null) {
ObjectMapper objectMapper = context.mapperService().resolveClosestNestedObjectMapper(fieldName);
if (objectMapper != null && objectMapper.nested().isNested()) {
if (nestedHelper == null) {
nestedHelper = new NestedInnerQueryParseSupport(context.queryParserService().getParseContext());
}
nestedHelper.setPath(objectMapper.fullPath());
}
}
}
final Nested nested;
if (nestedHelper != null && nestedHelper.getPath() != null) {
BitDocIdSetFilter rootDocumentsFilter = context.bitsetFilterCache().getBitDocIdSetFilter(Queries.newNonNestedFilter());
Filter innerDocumentsFilter;
if (nestedHelper.filterFound()) {
// TODO: use queries instead
innerDocumentsFilter = new QueryWrapperFilter(nestedHelper.getInnerFilter());
} else {
innerDocumentsFilter = nestedHelper.getNestedObjectMapper().nestedTypeFilter();
}
nested = new Nested(rootDocumentsFilter, innerDocumentsFilter);
} else {
nested = null;
}
IndexFieldData.XFieldComparatorSource fieldComparatorSource = context.fieldData().getForField(fieldMapper)
.comparatorSource(missing, sortMode, nested);
sortFields.add(new SortField(fieldMapper.names().indexName(), fieldComparatorSource, reverse));
}
}
private static MultiValueMode resolveDefaultSortMode(boolean reverse) {
return reverse ? MultiValueMode.MAX : MultiValueMode.MIN;
}
}
| |
package assembler;
import java.io.File;
import java.io.FileNotFoundException;
import java.util.*;
/*
MIT License
Copyright (c) 2017 Paul Olsen
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
public class Assembler {
private int lineNo = 1;
private HashMap<String, Integer> symbolTable = new HashMap<String, Integer>();
private ArrayList<String> program = new ArrayList<String>();
private int pc = 0;
private HashMap<Integer, String> stringLocationMap = new HashMap<Integer, String>();
private TreeMap<Integer, String> pc2line = new TreeMap<Integer, String>();
private Scanner in;
private int variableCount = 0;
private int index = 0;
private ArrayList<String> includedFiles = new ArrayList<String>();
private Assembler root = null; //if null, then this is a top level assembler.
private HashSet<String> branchLabels = new HashSet<String>(); //need to keep track of branch labels
//for includes, because branch labels are always incorrect when loaded. They need to be fixed.
//by an offset.
private String filename;
public Assembler(File source) throws FileNotFoundException {
in = new Scanner(source);
root = this;
filename = source.getName();
includedFiles.add(filename.substring(0, filename.lastIndexOf('.') + 1));
}
public Assembler(File source, Assembler root) throws FileNotFoundException {
this(source);
this.root = root;
}
public void assembleFirstPassOnly() {
firstPass(false);
finish();
in.close();
}
//a two pass assembler.
public void assemble() {
//ArrayList<Integer> pc = new ArrayList<Integer>();
firstPass(true); //the first pass.
secondPass(); //the second pass.
finish(); //closing the scanner.
in.close();
}
private void finish() {
in.close();
}
private void secondPass() {
for(int i = 0; i < program.size(); i++) { //some macros require relative targets for branching,
// we need to make these absolute now.
String x = program.get(i);
if(x.startsWith("+")) {
int target = i + Integer.parseInt(x.substring(1));
program.set(i, "" + target);
}
}
for(int i = 0; i < variableCount; i++) {
program.add("0");
}
placeStrings();
for(int i = 0; i < program.size(); i++) {
if(program.get(i).equals(".begin")) {
program.set(i, "" + symbolTable.get(program.get(i)));
}
if(program.get(i).startsWith(":")) {
Integer address = symbolTable.get(program.get(i));
if(address == null) {
System.out.println("Undefined branch label @" + pc2line.get(pc2line.floorKey(i)));
}
program.set(i, "" + address);
} else if(program.get(i).startsWith("!")) {
program.set(i, program.get(i).substring(1));
Integer address = symbolTable.get(program.get(i));
if(address == null) {System.out.println("Undefined array name: @" + pc2line.get(pc2line.floorKey(i)));
} else {
program.set(i, "" + (program.size() + address));
}
}
}
}
private void placeStrings() {
for(int i : stringLocationMap.keySet()) {
String s = stringLocationMap.get(i);
for(char c : s.toCharArray()) {
program.set(program.size() + i, "" + (int)c);
i++;
}
program.set(program.size() + i, "" + 0);
}
}
private void firstPass(boolean assembleBranchLabels) {
boolean declare = false;
boolean include = false;
boolean including = false;
boolean beginbegan = false;
//the following: always skip to the beginning of the program.
//because includes are always at the top.
//but these lines only need to be done IF this is the top level program (i.e., if root == this).
if(root == this) {
program.add("" + 0xD);
program.add(".begin");
program.add("" + 0x0);
pc += 3;
}
lineNo = 0;
while(in.hasNextLine()) {
lineNo++;
String line = in.nextLine();
line = killComments(line).trim();
if(line.equals("")) continue;
if(line.equals(".declare")) {
declare = true;
include = false;
} else if(line.equals(".include")) {
include = true;
declare = false;
} else if(line.equals(".begin")) {
if(root == this) symbolTable.put(".begin", pc);
include = false;
declare = false;
} else if(declare) {
handleDeclare(line);
} else if(include) {
handleInclude(line);
} else if(line.startsWith(":")) {
symbolTable.put(line, pc);
branchLabels.add(line);
} else {
handleInstruction(line, assembleBranchLabels);
}
}
if(assembleBranchLabels) {
}
}
private void handleInclude(String line) {
String filename = line + ".ftnt";
if(! root.includedFiles.contains(filename)) {
root.includedFiles.add(filename);
try {
Assembler assm = new Assembler(new File(filename), root);
assm.assembleFirstPassOnly();
ArrayList<String> result = assm.program();
//we don't want to include the variable stuff... but we __do__ want to include the
//strings and arrays.
for(String origKey : assm.symbolTable.keySet()) {
String key = redirectKey(origKey, line);
if(assm.branchLabels.contains(origKey)) branchLabels.add(key);
symbolTable.put(key, assm.symbolTable.get(origKey));
}
for(String s : branchLabels) {
symbolTable.put(s, symbolTable.get(s) + pc);
}
index += assm.index;
stringLocationMap.putAll(assm.stringLocationMap);
for(int i = 0; i < assm.program.size(); i++) {
if(assm.program.get(i).startsWith(":") || assm.program.get(i).startsWith("!")) {
assm.program.set(i, redirectKey(assm.program.get(i), line));
}
}
program.addAll(result);
pc += result.size();
pc2line.putAll(assm.pc2line);
variableCount += assm.variableCount;
} catch (FileNotFoundException e) {
System.out.println("Could not link " + filename + " @" + this.filename + ":" + lineNo);
}
} //including a file twice is not an error and has no effect.
}
private String redirectKey(String key, String name) {
if(key.contains(".")) return key;
boolean exclaim = false;
if(name.contains("/")) { //get rid of paths in name.
name = name.substring(name.lastIndexOf('/') + 1);
}
if(key.startsWith(":")) {
key = key.substring(1);
} else { // key starts with !
key = key.substring(2);
exclaim = true;
}
key = ":" + name + "." + key;
if(exclaim) key = "!" + key;
return key;
}
private void handleInstruction(String line, boolean assembleBranchLabels) {
String[] parts = line.split("\\s+");
syntaxCheck(parts);
pc2line.put(pc, filename + ":" + lineNo);
assemble(parts, assembleBranchLabels);
}
private void syntaxCheck(String[] parts) {
try { //try to syntax check the instruction.
Instruction instruction = Instruction.valueOf(parts[0]);
instruction.checkSyntax(parts, System.out, lineNo, filename);
} catch(IllegalArgumentException iae) { //invalid instruction name. Report this.
System.out.println("Undefined Instruction " + parts[0] + " @" + filename + ":" + lineNo);
}
}
private void assemble(String[] parts, boolean assembleBranchLabels) {
try {
Instruction instruction = Instruction.valueOf(parts[0]);
String[] assembled = instruction.assemble(parts, symbolTable, pc, System.out, lineNo, filename);
pc += assembled.length;
for (String i : assembled) {
program.add(i);
}
} catch (IllegalArgumentException iae) {
//do nothing. This error doesn't really matter.
}
}
private static String killComments(String line) {
if(line.startsWith(";")) return "";
if(! line.contains(";")) {
return line;
}
return line.substring(0, line.indexOf(';') - 1);
}
public int parse(String number) {
try {
return Integer.parseInt(number);
} catch(Exception e) {
System.out.println("Invalid integer value @" + filename + ":" + lineNo);
return Integer.MIN_VALUE;
}
}
public void handleDeclare(String line) {
String[] parts = line.split("\\s+");
if(parts.length == 1) {
if(parts[0].contains(".")) {
System.out.println("Illegal variable name: " + parts[0] + " @ " + filename + ":" + lineNo);
return;
}
symbolTable.put(line, --index);
variableCount++;
} else if(parts.length == 2) {
if(parts[0].contains(".")) {
System.out.println("Illegal constant name: " + parts[0] + " @ " + filename + ":" + lineNo);
return;
}
symbolTable.put(parts[0], parse(parts[1]));
} else if(parts.length == 3 && parts[1].equals("length")) {
try {
if(parts[0].contains(".")) {
System.out.println("Illegal array name: " + parts[0] + " @ " + filename + ":" + lineNo);
return;
}
int length = Integer.parseInt(parts[2]);
variableCount += length;
index -= length;
symbolTable.put(parts[0], index);
} catch(NumberFormatException nfe) {
System.out.println("Illegal array length constant @" + filename + ":" + lineNo);
}
} else if(parts.length >= 3 && parts[1].equals("is")) {
if(parts[0].contains(".")) {
System.out.println("Illegal string name: " + parts[0] + " @ " + filename + ":" + lineNo);
return;
}
String[] pieces = line.split("\\s+", 3);
pieces[2] = pieces[2].substring(1, pieces[2].length() - 1); //get rid of single quotes.
int length = pieces[2].length() + 1;
variableCount += length;
index -= length;
symbolTable.put(pieces[0], index);
stringLocationMap.put(index, pieces[2]);
} else {
System.out.println("Illegal constant or variable declaration @" + filename + ":" + lineNo);
}
}
public ArrayList<String> program() {
return program;
}
public Map<String, Integer> symbolTable() {
return symbolTable;
}
public Map<Integer, String> lineMap() {
return pc2line;
}
}
| |
package org.jolokia.docker.maven.config;
import java.util.List;
import java.util.Map;
/**
* @author roland
* @since 02.09.14
*/
public class RunImageConfiguration {
static final RunImageConfiguration DEFAULT = new RunImageConfiguration();
// Environment variables to set when starting the container. key: variable name, value: env value
/**
* @parameter
*/
private Map<String, String> env;
/**
* @parameter
*/
private Map<String,String> labels;
/**
* Path to a property file holding environment variables
*
* @parameter
*/
private String envPropertyFile;
// Command to execute in container
/**
* @parameter
*/
private String cmd;
// container domain name
/**
* @parameter
*/
private String domainname;
// container entry point
/**
* @parameter
*/
private String entrypoint;
// container hostname
/**
* @parameter
*/
private String hostname;
// container user
/**
* @parameter
*/
private String user;
// working directory
/**
* @paramter
*/
private String workingDir;
// memory in bytes
/**
* @parameter
*/
private Long memory;
// total memory (swap + ram) in bytes, -1 to disable
/**
* @parameter
*/
private Long memorySwap;
// Path to a file where the dynamically mapped properties are written to
/**
* @parameter
*/
private String portPropertyFile;
/**
* @parameter
*/
private List<String> dns;
/**
* @parameter
*/
private List<String> dnsSearch;
/**
* @parameter
*/
private List<String> capAdd;
/**
* @parameter
*/
private List<String> capDrop;
/**
* @parameter
*/
private Boolean privileged;
/**
* @parameter
*/
private List<String> extraHosts;
// Port mapping. Can contain symbolic names in which case dynamic
// ports are used
/**
* @parameter
*/
private List<String> ports;
/** @parameter */
private NamingStrategy namingStrategy;
// Naming scheme for how to name container
public enum NamingStrategy {
none, // No extra naming
alias; // Use the alias as defined in the configuration
}
// Mount volumes from the given image's started containers
/**
* @parameter
*/
private VolumeConfiguration volumes;
// Links to other container started
/**
* @parameter
*/
private List<String> links;
// Configuration for how to wait during startup of the container
/**
* @parameter
*/
private WaitConfiguration wait;
/**
* @parameter
*/
private LogConfiguration log;
/**
* @parameter
*/
private RestartPolicy restartPolicy;
public RunImageConfiguration() { }
public Map<String, String> getEnv() {
return env;
}
public Map<String, String> getLabels() {
return labels;
}
public String getEnvPropertyFile() {
return envPropertyFile;
}
public String getEntrypoint() {
return entrypoint;
}
public String getHostname() {
return hostname;
}
public String getDomainname() {
return domainname;
}
public String getUser() {
return user;
}
public Long getMemory() {
return memory;
}
public Long getMemorySwap() {
return memorySwap;
}
public List<String> getPorts() {
return ports;
}
public String getCmd() {
return cmd;
}
public String getPortPropertyFile() {
return portPropertyFile;
}
public String getWorkingDir() {
return workingDir;
}
public WaitConfiguration getWaitConfiguration() {
return wait;
}
public LogConfiguration getLog() {
return log;
}
public List<String> getCapAdd() {
return capAdd;
}
public List<String> getCapDrop() {
return capDrop;
}
public List<String> getDns() {
return dns;
}
public List<String> getDnsSearch() {
return dnsSearch;
}
public List<String> getExtraHosts() {
return extraHosts;
}
public VolumeConfiguration getVolumeConfiguration() {
return volumes;
}
public List<String> getLinks() {
return links;
}
public NamingStrategy getNamingStrategy() {
return namingStrategy == null ? NamingStrategy.none : namingStrategy;
}
public Boolean getPrivileged() {
return privileged;
}
public RestartPolicy getRestartPolicy() {
return (restartPolicy == null) ? RestartPolicy.DEFAULT : restartPolicy;
}
// ======================================================================================
public static class Builder {
private RunImageConfiguration config = new RunImageConfiguration();
public Builder env(Map<String, String> env) {
config.env = env;
return this;
}
public Builder labels(Map<String, String> labels) {
config.labels = labels;
return this;
}
public Builder envPropertyFile(String envPropertyFile) {
config.envPropertyFile = envPropertyFile;
return this;
}
public Builder cmd(String cmd) {
config.cmd = cmd;
return this;
}
public Builder domainname(String domainname) {
config.domainname = domainname;
return this;
}
public Builder entrypoint(String entrypoint) {
config.entrypoint = entrypoint;
return this;
}
public Builder hostname(String hostname) {
config.hostname = hostname;
return this;
}
public Builder portPropertyFile(String portPropertyFile) {
config.portPropertyFile = portPropertyFile;
return this;
}
public Builder workingDir(String workingDir) {
config.workingDir = workingDir;
return this;
}
public Builder user(String user) {
config.user = user;
return this;
}
public Builder memory(Long memory) {
config.memory = memory;
return this;
}
public Builder memorySwap(Long memorySwap) {
config.memorySwap = memorySwap;
return this;
}
public Builder capAdd(List<String> capAdd) {
config.capAdd = capAdd;
return this;
}
public Builder capDrop(List<String> capDrop) {
config.capDrop = capDrop;
return this;
}
public Builder dns(List<String> dns) {
config.dns = dns;
return this;
}
public Builder dnsSearch(List<String> dnsSearch) {
config.dnsSearch = dnsSearch;
return this;
}
public Builder extraHosts(List<String> extraHosts) {
config.extraHosts = extraHosts;
return this;
}
public Builder ports(List<String> ports) {
config.ports = ports;
return this;
}
public Builder volumes(VolumeConfiguration volumes) {
config.volumes = volumes;
return this;
}
public Builder links(List<String> links) {
config.links = links;
return this;
}
public Builder wait(WaitConfiguration wait) {
config.wait = wait;
return this;
}
public Builder log(LogConfiguration log) {
config.log = log;
return this;
}
public Builder namingStrategy(String namingStrategy) {
config.namingStrategy = namingStrategy == null ? NamingStrategy.none : NamingStrategy.valueOf(namingStrategy.toLowerCase());
return this;
}
public Builder privileged(Boolean privileged) {
config.privileged = privileged;
return this;
}
public Builder restartPolicy(RestartPolicy restartPolicy) {
config.restartPolicy = restartPolicy;
return this;
}
public RunImageConfiguration build() {
return config;
}
}
}
| |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.engine.test.api.event;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertNotEquals;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.flowable.bpmn.model.FlowNode;
import org.flowable.engine.common.api.delegate.event.FlowableEngineEntityEvent;
import org.flowable.engine.common.api.delegate.event.FlowableEngineEventType;
import org.flowable.engine.common.api.delegate.event.FlowableEntityEvent;
import org.flowable.engine.common.api.delegate.event.FlowableEvent;
import org.flowable.engine.delegate.event.AbstractFlowableEngineEventListener;
import org.flowable.engine.delegate.event.FlowableActivityCancelledEvent;
import org.flowable.engine.delegate.event.FlowableCancelledEvent;
import org.flowable.engine.delegate.event.FlowableProcessStartedEvent;
import org.flowable.engine.delegate.event.impl.FlowableActivityCancelledEventImpl;
import org.flowable.engine.impl.persistence.entity.ExecutionEntity;
import org.flowable.engine.impl.test.PluggableFlowableTestCase;
import org.flowable.engine.repository.ProcessDefinition;
import org.flowable.engine.runtime.Execution;
import org.flowable.engine.runtime.ProcessInstance;
import org.flowable.engine.test.Deployment;
/**
* Test case for all {@link FlowableEvent}s related to process instances.
*
* @author Tijs Rademakers
*/
public class ProcessInstanceEventsTest extends PluggableFlowableTestCase {
private TestInitializedEntityEventListener listener;
/**
* Test create, update and delete events of process instances.
*/
@Deployment
public void testProcessInstanceEvents() throws Exception {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("oneTaskProcess");
assertNotNull(processInstance);
// Check create-event
assertEquals(6, listener.getEventsReceived().size());
assertEquals(6, FilteredStaticTestFlowableEventListener.getEventsReceived().size());
assertTrue(listener.getEventsReceived().get(0) instanceof FlowableEngineEntityEvent);
// process instance create event
FlowableEngineEntityEvent event = (FlowableEngineEntityEvent) listener.getEventsReceived().get(0);
assertEquals(FlowableEngineEventType.ENTITY_CREATED, event.getType());
assertEquals(processInstance.getId(), ((ProcessInstance) event.getEntity()).getId());
assertEquals(processInstance.getId(), event.getProcessInstanceId());
assertEquals(processInstance.getId(), event.getExecutionId());
assertEquals(processInstance.getProcessDefinitionId(), event.getProcessDefinitionId());
assertEventsEqual(event, FilteredStaticTestFlowableEventListener.getEventsReceived().get(0));
event = (FlowableEngineEntityEvent) listener.getEventsReceived().get(1);
assertEquals(FlowableEngineEventType.PROCESS_CREATED, event.getType());
assertEquals(processInstance.getId(), event.getProcessInstanceId());
assertEquals(processInstance.getId(), event.getExecutionId());
assertEquals(processInstance.getProcessDefinitionId(), event.getProcessDefinitionId());
assertEventsEqual(event, FilteredStaticTestFlowableEventListener.getEventsReceived().get(1));
event = (FlowableEngineEntityEvent) listener.getEventsReceived().get(2);
assertEquals(FlowableEngineEventType.ENTITY_INITIALIZED, event.getType());
assertEquals(processInstance.getId(), event.getProcessInstanceId());
assertEquals(processInstance.getId(), event.getExecutionId());
assertEquals(processInstance.getProcessDefinitionId(), event.getProcessDefinitionId());
assertEventsEqual(event, FilteredStaticTestFlowableEventListener.getEventsReceived().get(2));
// start event create event
event = (FlowableEngineEntityEvent) listener.getEventsReceived().get(3);
assertEquals(FlowableEngineEventType.ENTITY_CREATED, event.getType());
assertEquals(processInstance.getId(), event.getProcessInstanceId());
assertNotEquals(processInstance.getId(), event.getExecutionId());
assertEquals(processInstance.getProcessDefinitionId(), event.getProcessDefinitionId());
assertEventsEqual(event, FilteredStaticTestFlowableEventListener.getEventsReceived().get(3));
// start event create initialized
event = (FlowableEngineEntityEvent) listener.getEventsReceived().get(4);
assertEquals(FlowableEngineEventType.ENTITY_INITIALIZED, event.getType());
assertEquals(processInstance.getId(), event.getProcessInstanceId());
assertNotEquals(processInstance.getId(), event.getExecutionId());
assertEquals(processInstance.getProcessDefinitionId(), event.getProcessDefinitionId());
assertEventsEqual(event, FilteredStaticTestFlowableEventListener.getEventsReceived().get(4));
event = (FlowableEngineEntityEvent) listener.getEventsReceived().get(5);
assertEquals(FlowableEngineEventType.PROCESS_STARTED, event.getType());
assertEquals(processInstance.getId(), event.getProcessInstanceId());
assertEquals(processInstance.getProcessDefinitionId(), event.getProcessDefinitionId());
assertTrue(event instanceof FlowableProcessStartedEvent);
assertNull(((FlowableProcessStartedEvent) event).getNestedProcessDefinitionId());
assertNull(((FlowableProcessStartedEvent) event).getNestedProcessInstanceId());
assertEventsEqual(event, FilteredStaticTestFlowableEventListener.getEventsReceived().get(5));
listener.clearEventsReceived();
FilteredStaticTestFlowableEventListener.clearEventsReceived();
// Check update event when suspended/activated
runtimeService.suspendProcessInstanceById(processInstance.getId());
runtimeService.activateProcessInstanceById(processInstance.getId());
assertEquals(4, listener.getEventsReceived().size());
assertEquals(4, FilteredStaticTestFlowableEventListener.getEventsReceived().size());
event = (FlowableEngineEntityEvent) listener.getEventsReceived().get(0);
assertEquals(processInstance.getId(), ((ProcessInstance) event.getEntity()).getId());
assertEquals(FlowableEngineEventType.ENTITY_SUSPENDED, event.getType());
assertEquals(processInstance.getId(), event.getProcessInstanceId());
assertEquals(processInstance.getId(), event.getExecutionId());
assertEquals(processInstance.getProcessDefinitionId(), event.getProcessDefinitionId());
assertEventsEqual(event, FilteredStaticTestFlowableEventListener.getEventsReceived().get(0));
event = (FlowableEngineEntityEvent) listener.getEventsReceived().get(1);
assertEquals(FlowableEngineEventType.ENTITY_SUSPENDED, event.getType());
assertEquals(processInstance.getId(), event.getProcessInstanceId());
assertNotEquals(processInstance.getId(), event.getExecutionId());
assertEquals(processInstance.getProcessDefinitionId(), event.getProcessDefinitionId());
assertEventsEqual(event, FilteredStaticTestFlowableEventListener.getEventsReceived().get(1));
event = (FlowableEngineEntityEvent) listener.getEventsReceived().get(2);
assertEquals(FlowableEngineEventType.ENTITY_ACTIVATED, event.getType());
assertEquals(processInstance.getId(), ((ProcessInstance) event.getEntity()).getId());
assertEquals(processInstance.getId(), event.getProcessInstanceId());
assertEquals(processInstance.getId(), event.getExecutionId());
assertEquals(processInstance.getProcessDefinitionId(), event.getProcessDefinitionId());
assertEventsEqual(event, FilteredStaticTestFlowableEventListener.getEventsReceived().get(2));
event = (FlowableEngineEntityEvent) listener.getEventsReceived().get(3);
assertEquals(FlowableEngineEventType.ENTITY_ACTIVATED, event.getType());
assertEquals(processInstance.getId(), event.getProcessInstanceId());
assertNotEquals(processInstance.getId(), event.getExecutionId());
assertEquals(processInstance.getProcessDefinitionId(), event.getProcessDefinitionId());
assertEventsEqual(event, FilteredStaticTestFlowableEventListener.getEventsReceived().get(3));
listener.clearEventsReceived();
FilteredStaticTestFlowableEventListener.clearEventsReceived();
// Check update event when process-definition is suspended (should
// cascade suspend/activate all process instances)
repositoryService.suspendProcessDefinitionById(processInstance.getProcessDefinitionId(), true, null);
repositoryService.activateProcessDefinitionById(processInstance.getProcessDefinitionId(), true, null);
assertEquals(4, listener.getEventsReceived().size());
assertEquals(4, FilteredStaticTestFlowableEventListener.getEventsReceived().size());
event = (FlowableEngineEntityEvent) listener.getEventsReceived().get(0);
assertEquals(processInstance.getId(), ((ProcessInstance) event.getEntity()).getId());
assertEquals(FlowableEngineEventType.ENTITY_SUSPENDED, event.getType());
assertEquals(processInstance.getId(), event.getProcessInstanceId());
assertEquals(processInstance.getId(), event.getExecutionId());
assertEquals(processInstance.getProcessDefinitionId(), event.getProcessDefinitionId());
assertEventsEqual(event, FilteredStaticTestFlowableEventListener.getEventsReceived().get(0));
event = (FlowableEngineEntityEvent) listener.getEventsReceived().get(1);
assertEquals(FlowableEngineEventType.ENTITY_SUSPENDED, event.getType());
assertEquals(processInstance.getId(), event.getProcessInstanceId());
assertNotEquals(processInstance.getId(), event.getExecutionId());
assertEquals(processInstance.getProcessDefinitionId(), event.getProcessDefinitionId());
assertEventsEqual(event, FilteredStaticTestFlowableEventListener.getEventsReceived().get(1));
event = (FlowableEngineEntityEvent) listener.getEventsReceived().get(2);
assertEquals(FlowableEngineEventType.ENTITY_ACTIVATED, event.getType());
assertEquals(processInstance.getId(), ((ProcessInstance) event.getEntity()).getId());
assertEquals(processInstance.getId(), event.getProcessInstanceId());
assertEquals(processInstance.getId(), event.getExecutionId());
assertEquals(processInstance.getProcessDefinitionId(), event.getProcessDefinitionId());
assertEventsEqual(event, FilteredStaticTestFlowableEventListener.getEventsReceived().get(2));
event = (FlowableEngineEntityEvent) listener.getEventsReceived().get(3);
assertEquals(FlowableEngineEventType.ENTITY_ACTIVATED, event.getType());
assertEquals(processInstance.getId(), event.getProcessInstanceId());
assertNotEquals(processInstance.getId(), event.getExecutionId());
assertEquals(processInstance.getProcessDefinitionId(), event.getProcessDefinitionId());
assertEventsEqual(event, FilteredStaticTestFlowableEventListener.getEventsReceived().get(3));
listener.clearEventsReceived();
FilteredStaticTestFlowableEventListener.clearEventsReceived();
// Check update-event when business-key is updated
runtimeService.updateBusinessKey(processInstance.getId(), "thekey");
assertEquals(1, listener.getEventsReceived().size());
assertEquals(1, FilteredStaticTestFlowableEventListener.getEventsReceived().size());
event = (FlowableEngineEntityEvent) listener.getEventsReceived().get(0);
assertEquals(processInstance.getId(), ((ProcessInstance) event.getEntity()).getId());
assertEquals(FlowableEngineEventType.ENTITY_UPDATED, event.getType());
assertEquals(processInstance.getId(), event.getProcessInstanceId());
assertEquals(processInstance.getId(), event.getExecutionId());
assertEquals(processInstance.getProcessDefinitionId(), event.getProcessDefinitionId());
assertEventsEqual(event, FilteredStaticTestFlowableEventListener.getEventsReceived().get(0));
listener.clearEventsReceived();
FilteredStaticTestFlowableEventListener.clearEventsReceived();
runtimeService.deleteProcessInstance(processInstance.getId(), "Testing events");
List<FlowableEvent> processCancelledEvents = listener.filterEvents(FlowableEngineEventType.PROCESS_CANCELLED);
assertEquals(1, processCancelledEvents.size());
FlowableCancelledEvent cancelledEvent = (FlowableCancelledEvent) processCancelledEvents.get(0);
assertEquals(FlowableEngineEventType.PROCESS_CANCELLED, cancelledEvent.getType());
assertEquals(processInstance.getId(), cancelledEvent.getProcessInstanceId());
assertEquals(processInstance.getId(), cancelledEvent.getExecutionId());
assertEventsEqual(cancelledEvent, FilteredStaticTestFlowableEventListener.filterEvents(FlowableEngineEventType.PROCESS_CANCELLED).get(0));
listener.clearEventsReceived();
FilteredStaticTestFlowableEventListener.clearEventsReceived();
}
/**
* Test create, update and delete events of process instances.
*/
@Deployment(resources = {"org/flowable/engine/test/api/runtime/nestedSubProcess.bpmn20.xml", "org/flowable/engine/test/api/runtime/subProcess.bpmn20.xml"})
public void testSubProcessInstanceEvents() throws Exception {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("nestedSimpleSubProcess");
assertNotNull(processInstance);
String processDefinitionId = processInstance.getProcessDefinitionId();
// Check create-event one main process the second one Scope execution, and the third one subprocess
assertEquals(12, listener.getEventsReceived().size());
assertTrue(listener.getEventsReceived().get(0) instanceof FlowableEngineEntityEvent);
// process instance created event
FlowableEngineEntityEvent event = (FlowableEngineEntityEvent) listener.getEventsReceived().get(0);
assertEquals(FlowableEngineEventType.ENTITY_CREATED, event.getType());
assertEquals(processInstance.getId(), ((ProcessInstance) event.getEntity()).getId());
assertEquals(processInstance.getId(), event.getProcessInstanceId());
assertEquals(processInstance.getId(), event.getExecutionId());
assertEquals(processDefinitionId, event.getProcessDefinitionId());
event = (FlowableEngineEntityEvent) listener.getEventsReceived().get(1);
String processExecutionId = event.getExecutionId();
assertEquals(FlowableEngineEventType.PROCESS_CREATED, event.getType());
assertEquals(processInstance.getId(), event.getProcessInstanceId());
assertEquals(processInstance.getId(), processExecutionId);
assertEquals(processDefinitionId, event.getProcessDefinitionId());
event = (FlowableEngineEntityEvent) listener.getEventsReceived().get(2);
processExecutionId = event.getExecutionId();
assertEquals(FlowableEngineEventType.ENTITY_INITIALIZED, event.getType());
assertEquals(processInstance.getId(), event.getProcessInstanceId());
assertEquals(processInstance.getId(), processExecutionId);
assertEquals(processDefinitionId, event.getProcessDefinitionId());
// start event created event
event = (FlowableEngineEntityEvent) listener.getEventsReceived().get(3);
processExecutionId = event.getExecutionId();
assertEquals(FlowableEngineEventType.ENTITY_CREATED, event.getType());
assertEquals(processInstance.getId(), event.getProcessInstanceId());
assertNotEquals(processInstance.getId(), processExecutionId);
assertEquals(processDefinitionId, event.getProcessDefinitionId());
// start event initialized event
event = (FlowableEngineEntityEvent) listener.getEventsReceived().get(4);
assertEquals(FlowableEngineEventType.ENTITY_INITIALIZED, event.getType());
assertEquals(processInstance.getId(), event.getProcessInstanceId());
assertNotEquals(processInstance.getId(), ((ExecutionEntity) event.getEntity()).getId());
// Process start
event = (FlowableEngineEntityEvent) listener.getEventsReceived().get(5);
assertEquals(FlowableEngineEventType.PROCESS_STARTED, event.getType());
assertEquals(processInstance.getId(), event.getProcessInstanceId());
assertEquals(processInstance.getProcessDefinitionId(), event.getProcessDefinitionId());
assertTrue(event instanceof FlowableProcessStartedEvent);
assertNull(((FlowableProcessStartedEvent) event).getNestedProcessDefinitionId());
assertNull(((FlowableProcessStartedEvent) event).getNestedProcessInstanceId());
// sub process instance created event
event = (FlowableEngineEntityEvent) listener.getEventsReceived().get(6);
assertEquals(FlowableEngineEventType.ENTITY_CREATED, event.getType());
ExecutionEntity subProcessEntity = (ExecutionEntity) event.getEntity();
assertEquals(processExecutionId, subProcessEntity.getSuperExecutionId());
String subProcessInstanceId = subProcessEntity.getProcessInstanceId();
event = (FlowableEngineEntityEvent) listener.getEventsReceived().get(7);
assertEquals(FlowableEngineEventType.PROCESS_CREATED, event.getType());
subProcessEntity = (ExecutionEntity) event.getEntity();
assertEquals(processExecutionId, subProcessEntity.getSuperExecutionId());
// sub process instance initialized event
event = (FlowableEngineEntityEvent) listener.getEventsReceived().get(8);
assertEquals(FlowableEngineEventType.ENTITY_INITIALIZED, event.getType());
assertEquals(subProcessInstanceId, event.getExecutionId());
String subProcessDefinitionId = ((ExecutionEntity) event.getEntity()).getProcessDefinitionId();
assertNotNull(subProcessDefinitionId);
// sub process instance child execution created event
event = (FlowableEngineEntityEvent) listener.getEventsReceived().get(9);
assertEquals(FlowableEngineEventType.ENTITY_CREATED, event.getType());
assertEquals(subProcessInstanceId, event.getProcessInstanceId());
assertNotEquals(subProcessInstanceId, event.getExecutionId());
subProcessDefinitionId = ((ExecutionEntity) event.getEntity()).getProcessDefinitionId();
assertNotNull(subProcessDefinitionId);
ProcessDefinition subProcessDefinition = repositoryService.getProcessDefinition(subProcessDefinitionId);
assertEquals("simpleSubProcess", subProcessDefinition.getKey());
// sub process instance child execution initialized event
event = (FlowableEngineEntityEvent) listener.getEventsReceived().get(10);
assertEquals(FlowableEngineEventType.ENTITY_INITIALIZED, event.getType());
assertEquals(subProcessInstanceId, event.getProcessInstanceId());
assertNotEquals(subProcessInstanceId, event.getExecutionId());
subProcessDefinitionId = ((ExecutionEntity) event.getEntity()).getProcessDefinitionId();
assertNotNull(subProcessDefinitionId);
event = (FlowableEngineEntityEvent) listener.getEventsReceived().get(11);
assertEquals(FlowableEngineEventType.PROCESS_STARTED, event.getType());
assertEquals(subProcessInstanceId, event.getProcessInstanceId());
assertEquals(subProcessDefinitionId, event.getProcessDefinitionId());
assertTrue(event instanceof FlowableProcessStartedEvent);
assertEquals(processDefinitionId, ((FlowableProcessStartedEvent) event).getNestedProcessDefinitionId());
assertEquals(processInstance.getId(), ((FlowableProcessStartedEvent) event).getNestedProcessInstanceId());
listener.clearEventsReceived();
}
/**
* Test process with signals start.
*/
@Deployment(resources = {"org/flowable/engine/test/bpmn/event/signal/SignalEventTest.testSignalWithGlobalScope.bpmn20.xml"})
public void testSignalProcessInstanceStart() throws Exception {
this.runtimeService.startProcessInstanceByKey("processWithSignalCatch");
listener.clearEventsReceived();
runtimeService.startProcessInstanceByKey("processWithSignalThrow");
listener.clearEventsReceived();
}
/**
* Test Start->End process on PROCESS_COMPLETED event
*/
@Deployment(resources = {"org/flowable/engine/test/api/event/ProcessInstanceEventsTest.noneTaskProcess.bpmn20.xml"})
public void testProcessCompleted_StartEnd() throws Exception {
this.runtimeService.startProcessInstanceByKey("noneTaskProcess");
assertEquals("ActivitiEventType.PROCESS_COMPLETED was expected 1 time.", 1, listener.filterEvents(FlowableEngineEventType.PROCESS_COMPLETED).size());
}
/**
* Test Start->User org.flowable.task.service.Task process on PROCESS_COMPLETED event
*/
@Deployment(resources = {"org/flowable/engine/test/api/event/ProcessInstanceEventsTest.noEndProcess.bpmn20.xml"})
public void testProcessCompleted_NoEnd() throws Exception {
ProcessInstance noEndProcess = this.runtimeService.startProcessInstanceByKey("noEndProcess");
org.flowable.task.api.Task task = taskService.createTaskQuery().processInstanceId(noEndProcess.getId()).singleResult();
taskService.complete(task.getId());
assertEquals("ActivitiEventType.PROCESS_COMPLETED was expected 1 time.", 1, listener.filterEvents(FlowableEngineEventType.PROCESS_COMPLETED).size());
}
/**
* Test +-->Task1 Start-<> +-->Task1
* <p>
* process on PROCESS_COMPLETED event
*/
@Deployment(resources = {"org/flowable/engine/test/api/event/ProcessInstanceEventsTest.parallelGatewayNoEndProcess.bpmn20.xml"})
public void testProcessCompleted_ParallelGatewayNoEnd() throws Exception {
this.runtimeService.startProcessInstanceByKey("noEndProcess");
assertEquals("ActivitiEventType.PROCESS_COMPLETED was expected 1 time.", 1, listener.filterEvents(FlowableEngineEventType.PROCESS_COMPLETED).size());
}
/**
* Test +-->End1 Start-<> +-->End2
* <p/>
* process on PROCESS_COMPLETED event
*/
@Deployment(resources = {"org/flowable/engine/test/api/event/ProcessInstanceEventsTest.parallelGatewayTwoEndsProcess.bpmn20.xml"})
public void testProcessCompleted_ParallelGatewayTwoEnds() throws Exception {
this.runtimeService.startProcessInstanceByKey("noEndProcess");
List<FlowableEvent> events = listener.filterEvents(FlowableEngineEventType.PROCESS_COMPLETED);
assertEquals("ActivitiEventType.PROCESS_COMPLETED was expected 1 time.", 1, events.size());
}
@Deployment(resources = {
"org/flowable/engine/test/bpmn/event/end/TerminateEndEventTest.testTerminateInCallActivityMulitInstance.bpmn",
"org/flowable/engine/test/bpmn/event/end/TerminateEndEventTest.subProcessTerminateTerminateAll.bpmn20.xml"})
public void testProcessCompleted_TerminateInCallActivityMultiInstanceTerminateAll() throws Exception {
runtimeService.startProcessInstanceByKey("terminateEndEventExample");
List<FlowableEvent> events = listener.filterEvents(FlowableEngineEventType.PROCESS_COMPLETED_WITH_TERMINATE_END_EVENT);
assertEquals("FlowableEventType.PROCESS_COMPLETED_WITH_TERMINATE_END_EVENT was expected 6 times.", 6, events.size());
}
@Deployment(resources = {"org/flowable/engine/test/api/runtime/oneTaskProcess.bpmn20.xml"})
public void testProcessInstanceCancelledEvents_cancel() throws Exception {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("oneTaskProcess");
assertNotNull(processInstance);
listener.clearEventsReceived();
runtimeService.deleteProcessInstance(processInstance.getId(), "delete_test");
List<FlowableEvent> processCancelledEvents = listener.filterEvents(FlowableEngineEventType.PROCESS_CANCELLED);
assertEquals("ActivitiEventType.PROCESS_CANCELLED was expected 1 time.", 1, processCancelledEvents.size());
FlowableCancelledEvent processCancelledEvent = (FlowableCancelledEvent) processCancelledEvents.get(0);
assertTrue("The cause has to be the same as deleteProcessInstance method call", FlowableCancelledEvent.class.isAssignableFrom(processCancelledEvent.getClass()));
assertEquals("The process instance has to be the same as in deleteProcessInstance method call", processInstance.getId(), processCancelledEvent.getProcessInstanceId());
assertEquals("The execution instance has to be the same as in deleteProcessInstance method call", processInstance.getId(), processCancelledEvent.getExecutionId());
assertEquals("The cause has to be the same as in deleteProcessInstance method call", "delete_test", processCancelledEvent.getCause());
List<FlowableEvent> taskCancelledEvents = listener.filterEvents(FlowableEngineEventType.ACTIVITY_CANCELLED);
assertEquals("ActivitiEventType.ACTIVITY_CANCELLED was expected 1 time.", 1, taskCancelledEvents.size());
FlowableActivityCancelledEvent activityCancelledEvent = (FlowableActivityCancelledEvent) taskCancelledEvents.get(0);
assertTrue("The cause has to be the same as deleteProcessInstance method call", FlowableActivityCancelledEvent.class.isAssignableFrom(activityCancelledEvent.getClass()));
assertEquals("The process instance has to be the same as in deleteProcessInstance method call", processInstance.getId(), activityCancelledEvent.getProcessInstanceId());
assertEquals("The cause has to be the same as in deleteProcessInstance method call", "delete_test", activityCancelledEvent.getCause());
listener.clearEventsReceived();
}
@Deployment(resources = {"org/flowable/engine/test/api/runtime/nestedSubProcess.bpmn20.xml", "org/flowable/engine/test/api/runtime/subProcess.bpmn20.xml"})
public void testProcessInstanceCancelledEvents_cancelProcessHierarchy() throws Exception {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("nestedSimpleSubProcess");
ProcessInstance subProcess = runtimeService.createProcessInstanceQuery().superProcessInstanceId(processInstance.getId()).singleResult();
assertNotNull(processInstance);
listener.clearEventsReceived();
runtimeService.deleteProcessInstance(processInstance.getId(), "delete_test");
List<FlowableEvent> processCancelledEvents = listener.filterEvents(FlowableEngineEventType.PROCESS_CANCELLED);
assertEquals("ActivitiEventType.PROCESS_CANCELLED was expected 2 times.", 2, processCancelledEvents.size());
FlowableCancelledEvent processCancelledEvent = (FlowableCancelledEvent) processCancelledEvents.get(0);
assertTrue("The cause has to be the same as deleteProcessInstance method call", FlowableCancelledEvent.class.isAssignableFrom(processCancelledEvent.getClass()));
assertEquals("The process instance has to be the same as in deleteProcessInstance method call", subProcess.getId(), processCancelledEvent.getProcessInstanceId());
assertEquals("The execution instance has to be the same as in deleteProcessInstance method call", subProcess.getId(), processCancelledEvent.getExecutionId());
assertEquals("The cause has to be the same as in deleteProcessInstance method call", "delete_test", processCancelledEvent.getCause());
processCancelledEvent = (FlowableCancelledEvent) processCancelledEvents.get(1);
assertTrue("The cause has to be the same as deleteProcessInstance method call", FlowableCancelledEvent.class.isAssignableFrom(processCancelledEvent.getClass()));
assertEquals("The process instance has to be the same as in deleteProcessInstance method call", processInstance.getId(), processCancelledEvent.getProcessInstanceId());
assertEquals("The execution instance has to be the same as in deleteProcessInstance method call", processInstance.getId(), processCancelledEvent.getExecutionId());
assertEquals("The cause has to be the same as in deleteProcessInstance method call", "delete_test", processCancelledEvent.getCause());
assertEquals("No task can be active for deleted process.", 0, this.taskService.createTaskQuery().processInstanceId(processInstance.getId()).count());
List<FlowableEvent> taskCancelledEvents = listener.filterEvents(FlowableEngineEventType.ACTIVITY_CANCELLED);
assertEquals("ActivitiEventType.ACTIVITY_CANCELLED was expected 2 times.", 2, taskCancelledEvents.size());
FlowableActivityCancelledEvent activityCancelledEvent = (FlowableActivityCancelledEvent) taskCancelledEvents.get(0);
assertTrue("The cause has to be the same as deleteProcessInstance method call", FlowableActivityCancelledEvent.class.isAssignableFrom(activityCancelledEvent.getClass()));
assertEquals("The process instance has to point to the subprocess", subProcess.getId(), activityCancelledEvent.getProcessInstanceId());
assertEquals("The cause has to be the same as in deleteProcessInstance method call", "delete_test", activityCancelledEvent.getCause());
activityCancelledEvent = (FlowableActivityCancelledEvent) taskCancelledEvents.get(1);
assertTrue("The cause has to be the same as deleteProcessInstance method call", FlowableActivityCancelledEvent.class.isAssignableFrom(activityCancelledEvent.getClass()));
assertEquals("The process instance has to point to the main process", processInstance.getId(), activityCancelledEvent.getProcessInstanceId());
assertEquals("expect callActivity type", "callActivity", activityCancelledEvent.getActivityType());
assertEquals("The cause has to be the same as in deleteProcessInstance method call", "delete_test", activityCancelledEvent.getCause());
listener.clearEventsReceived();
}
@Deployment(resources = {"org/flowable/engine/test/api/runtime/oneTaskProcess.bpmn20.xml"})
public void testProcessInstanceCancelledEvents_complete() throws Exception {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("oneTaskProcess");
assertNotNull(processInstance);
org.flowable.task.api.Task task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult();
taskService.complete(task.getId());
List<FlowableEvent> processCancelledEvents = listener.filterEvents(FlowableEngineEventType.PROCESS_CANCELLED);
assertEquals("There should be no FlowableEventType.PROCESS_CANCELLED event after process complete.", 0, processCancelledEvents.size());
List<FlowableEvent> taskCancelledEvents = listener.filterEvents(FlowableEngineEventType.ACTIVITY_CANCELLED);
assertEquals("There should be no FlowableEventType.ACTIVITY_CANCELLED event.", 0, taskCancelledEvents.size());
}
@Deployment(resources = {"org/flowable/engine/test/api/runtime/oneTaskProcess.bpmn20.xml"})
public void testProcessInstanceTerminatedEvents_complete() throws Exception {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("oneTaskProcess");
assertNotNull(processInstance);
org.flowable.task.api.Task task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult();
taskService.complete(task.getId());
List<FlowableEvent> processTerminatedEvents = listener.filterEvents(FlowableEngineEventType.PROCESS_CANCELLED);
assertEquals("There should be no FlowableEventType.PROCESS_TERMINATED event after process complete.", 0, processTerminatedEvents.size());
}
@Deployment(resources = "org/flowable/engine/test/bpmn/event/end/TerminateEndEventTest.testProcessTerminate.bpmn")
public void testProcessInstanceTerminatedEvents() throws Exception {
ProcessInstance pi = runtimeService.startProcessInstanceByKey("terminateEndEventExample");
long executionEntities = runtimeService.createExecutionQuery().processInstanceId(pi.getId()).count();
assertEquals(3, executionEntities);
org.flowable.task.api.Task task = taskService.createTaskQuery().processInstanceId(pi.getId()).taskDefinitionKey("preTerminateTask").singleResult();
taskService.complete(task.getId());
List<FlowableEvent> processTerminatedEvents = listener.filterEvents(FlowableEngineEventType.PROCESS_COMPLETED_WITH_TERMINATE_END_EVENT);
assertEquals("There should be exactly one FlowableEventType.PROCESS_COMPLETED_WITH_TERMINATE_END_EVENT event after the task complete.", 1,
processTerminatedEvents.size());
FlowableEngineEntityEvent processCompletedEvent = (FlowableEngineEntityEvent) processTerminatedEvents.get(0);
assertThat(processCompletedEvent.getProcessInstanceId(), is(pi.getProcessInstanceId()));
List<FlowableEvent> activityTerminatedEvents = listener.filterEvents(FlowableEngineEventType.ACTIVITY_CANCELLED);
assertThat("There should be exactly two FlowableEventType.ACTIVITY_CANCELLED event after the task complete.", activityTerminatedEvents.size(), is(1));
for (FlowableEvent event : activityTerminatedEvents) {
FlowableActivityCancelledEventImpl activityEvent = (FlowableActivityCancelledEventImpl) event;
if (activityEvent.getActivityId().equals("preNormalTerminateTask")) {
assertThat("The user task must be terminated", activityEvent.getActivityId(), is("preNormalTerminateTask"));
assertThat("The cause must be terminate end event", ((FlowNode) activityEvent.getCause()).getId(), is("EndEvent_2"));
}
}
}
@Deployment(resources = {"org/flowable/engine/test/bpmn/event/end/TerminateEndEventTest.testTerminateInCallActivity.bpmn",
"org/flowable/engine/test/bpmn/event/end/TerminateEndEventTest.subProcessTerminate.bpmn"})
public void testProcessInstanceTerminatedEvents_callActivity() throws Exception {
ProcessInstance pi = runtimeService.startProcessInstanceByKey("terminateEndEventExample");
org.flowable.task.api.Task task = taskService.createTaskQuery().processInstanceId(pi.getId()).taskDefinitionKey("preNormalEnd").singleResult();
taskService.complete(task.getId());
assertProcessEnded(pi.getId());
List<FlowableEvent> processTerminatedEvents = listener.filterEvents(FlowableEngineEventType.PROCESS_COMPLETED_WITH_TERMINATE_END_EVENT);
assertEquals("There should be exactly one FlowableEventType.PROCESS_COMPLETED_WITH_TERMINATE_END_EVENT event after the task complete.", 1,
processTerminatedEvents.size());
FlowableEngineEntityEvent processCompletedEvent = (FlowableEngineEntityEvent) processTerminatedEvents.get(0);
assertNotEquals(pi.getProcessInstanceId(), processCompletedEvent.getProcessInstanceId());
assertThat(processCompletedEvent.getProcessDefinitionId(), containsString("terminateEndEventSubprocessExample"));
}
@Deployment(resources = {"org/flowable/engine/test/bpmn/event/end/TerminateEndEventTest.testTerminateInSubProcessWithBoundaryTerminateAll.bpmn20.xml"})
public void testTerminateAllInSubProcess() throws Exception {
ProcessInstance pi = runtimeService.startProcessInstanceByKey("terminateEndEventWithBoundary");
org.flowable.task.api.Task task = taskService.createTaskQuery().processInstanceId(pi.getId()).taskDefinitionKey("preTermInnerTask").singleResult();
taskService.complete(task.getId());
assertProcessEnded(pi.getId());
List<FlowableEvent> processTerminatedEvents = listener.filterEvents(FlowableEngineEventType.PROCESS_COMPLETED_WITH_TERMINATE_END_EVENT);
assertEquals("There should be exactly one FlowableEventType.PROCESS_COMPLETED_WITH_TERMINATE_END_EVENT event after the task complete.", 1,
processTerminatedEvents.size());
FlowableEngineEntityEvent processCompletedEvent = (FlowableEngineEntityEvent) processTerminatedEvents.get(0);
assertEquals(pi.getProcessInstanceId(), processCompletedEvent.getProcessInstanceId());
}
@Deployment(resources = {"org/flowable/engine/test/bpmn/event/end/TerminateEndEventTest.testTerminateInParentProcess.bpmn",
"org/flowable/engine/test/api/runtime/oneTaskProcess.bpmn20.xml"})
public void testProcessInstanceTerminatedEvents_terminateInParentProcess() throws Exception {
ProcessInstance pi = runtimeService.startProcessInstanceByKey("terminateParentProcess");
// should terminate the called process and continue the parent
org.flowable.task.api.Task task = taskService.createTaskQuery().processInstanceId(pi.getId()).taskDefinitionKey("preTerminateEnd").singleResult();
taskService.complete(task.getId());
assertProcessEnded(pi.getId());
List<FlowableEvent> processTerminatedEvents = listener.filterEvents(FlowableEngineEventType.PROCESS_COMPLETED_WITH_TERMINATE_END_EVENT);
assertEquals("There should be exactly one FlowableEventType.PROCESS_COMPLETED_WITH_TERMINATE_END_EVENT events after the task complete.", 1,
processTerminatedEvents.size());
FlowableEngineEntityEvent processCompletedEvent = (FlowableEngineEntityEvent) processTerminatedEvents.get(0);
assertThat(processCompletedEvent.getProcessInstanceId(), is(pi.getProcessInstanceId()));
assertThat(processCompletedEvent.getProcessDefinitionId(), containsString("terminateParentProcess"));
List<FlowableEvent> activityTerminatedEvents = listener.filterEvents(FlowableEngineEventType.ACTIVITY_CANCELLED);
assertThat("Two activities must be cancelled.", activityTerminatedEvents.size(), is(2));
for (FlowableEvent event : activityTerminatedEvents) {
FlowableActivityCancelledEventImpl activityEvent = (FlowableActivityCancelledEventImpl) event;
if (activityEvent.getActivityId().equals("theTask")) {
assertThat("The user task must be terminated in the called sub process.", activityEvent.getActivityId(), is("theTask"));
assertThat("The cause must be terminate end event", ((FlowNode) activityEvent.getCause()).getId(), is("EndEvent_3"));
} else if (activityEvent.getActivityId().equals("CallActivity_1")) {
assertThat("The call activity must be terminated", activityEvent.getActivityId(), is("CallActivity_1"));
assertThat("The cause must be terminate end event", ((FlowNode) activityEvent.getCause()).getId(), is("EndEvent_3"));
}
}
}
@Deployment(resources = {
"org/flowable/engine/test/bpmn/event/error/BoundaryErrorEventTest.testCatchErrorOnCallActivity-parent.bpmn20.xml",
"org/flowable/engine/test/bpmn/event/error/BoundaryErrorEventTest.subprocess.bpmn20.xml"
})
public void testProcessCompletedEvents_callActivityErrorEndEvent() throws Exception {
ProcessInstance pi = runtimeService.startProcessInstanceByKey("catchErrorOnCallActivity");
org.flowable.task.api.Task task = taskService.createTaskQuery().singleResult();
assertEquals("Task in subprocess", task.getName());
List<ProcessInstance> subProcesses = runtimeService.createProcessInstanceQuery().superProcessInstanceId(pi.getId()).list();
assertEquals(1, subProcesses.size());
// Completing the task will reach the end error event,
// which is caught on the call activity boundary
taskService.complete(task.getId());
List<FlowableEvent> processCompletedEvents = listener.filterEvents(FlowableEngineEventType.PROCESS_COMPLETED_WITH_ERROR_END_EVENT);
assertEquals("There should be exactly an FlowableEventType.PROCESS_COMPLETED_WITH_ERROR_END_EVENT event after the task complete.", 1,
processCompletedEvents.size());
FlowableEngineEntityEvent processCompletedEvent = (FlowableEngineEntityEvent) processCompletedEvents.get(0);
assertEquals(subProcesses.get(0).getId(), processCompletedEvent.getExecutionId());
task = taskService.createTaskQuery().singleResult();
assertEquals("Escalated Task", task.getName());
// Completing the task will end the process instance
taskService.complete(task.getId());
assertProcessEnded(pi.getId());
}
@Deployment(resources = {
"org/flowable/engine/test/bpmn/multiinstance/MultiInstanceTest.testParallelCallActivity.bpmn20.xml",
"org/flowable/engine/test/bpmn/multiinstance/MultiInstanceTest.externalSubProcess.bpmn20.xml"})
public void testDeleteMultiInstanceCallActivityProcessInstance() {
assertEquals(0, taskService.createTaskQuery().count());
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("miParallelCallActivity");
assertEquals(7, runtimeService.createProcessInstanceQuery().count());
assertEquals(12, taskService.createTaskQuery().count());
this.listener.clearEventsReceived();
runtimeService.deleteProcessInstance(processInstance.getId(), "testing instance deletion");
assertEquals("Task cancelled event has to be fired.", FlowableEngineEventType.ACTIVITY_CANCELLED, this.listener.getEventsReceived().get(0).getType());
assertEquals("SubProcess cancelled event has to be fired.", FlowableEngineEventType.PROCESS_CANCELLED, this.listener.getEventsReceived().get(2).getType());
assertEquals(0, runtimeService.createProcessInstanceQuery().count());
assertEquals(0, taskService.createTaskQuery().count());
}
@Deployment(resources = "org/flowable/engine/test/api/runtime/subProcessWithTerminateEnd.bpmn20.xml")
public void testProcessInstanceTerminatedEventInSubProcess() throws Exception {
ProcessInstance pi = runtimeService.startProcessInstanceByKey("subProcessWithTerminateEndTest");
long executionEntities = runtimeService.createExecutionQuery().processInstanceId(pi.getId()).count();
assertEquals(4, executionEntities);
List<org.flowable.task.api.Task> tasks = taskService.createTaskQuery().processInstanceId(pi.getId()).list();
assertEquals(1, tasks.size());
Execution execution = runtimeService.createExecutionQuery().messageEventSubscriptionName("cancel").singleResult();
assertNotNull(execution);
// message received cancels the SubProcess. We expect an event for all flow elements
// when the process state changes. We expect the activity cancelled event for the task within the
// Subprocess and the SubProcess itself
runtimeService.messageEventReceived("cancel", execution.getId());
List<FlowableEvent> activityTerminatedEvents = listener.filterEvents(FlowableEngineEventType.ACTIVITY_CANCELLED);
assertEquals(2, activityTerminatedEvents.size());
boolean taskFound = false;
boolean subProcessFound = false;
for (FlowableEvent terminatedEvent : activityTerminatedEvents) {
FlowableActivityCancelledEvent activityEvent = (FlowableActivityCancelledEvent) terminatedEvent;
if ("userTask".equals(activityEvent.getActivityType())) {
taskFound = true;
assertEquals("task", activityEvent.getActivityId());
} else if ("subProcess".equals(activityEvent.getActivityType())) {
subProcessFound = true;
assertEquals("embeddedSubprocess", activityEvent.getActivityId());
}
}
assertTrue(taskFound);
assertTrue(subProcessFound);
}
@Deployment(resources = "org/flowable/engine/test/api/runtime/multipleSubprocessTerminateEnd.bpmn20.xml")
public void testProcessInstanceWithMultipleSubprocessAndTerminateEnd2() throws Exception {
ProcessInstance pi = runtimeService.startProcessInstanceByKey("multiplesubProcessWithTerminateEndTest");
List<Execution> subprocesses = runtimeService.createExecutionQuery().processInstanceId(pi.getId())
.onlySubProcessExecutions().list();
assertEquals(2, subprocesses.size());
List<org.flowable.task.api.Task> tasks = taskService.createTaskQuery().processInstanceId(pi.getId()).list();
assertEquals(2, tasks.size());
org.flowable.task.api.Task task2 = null;
for (org.flowable.task.api.Task task : tasks) {
if ("Task in subprocess2".equals(task.getName())) {
task2 = task;
break;
}
}
// Complete user task in subprocess2. This flows out of subprocess2 to
// the terminate end event. This will cause subprocess1 to be cancelled along
// with the user task, boundary event and intermediate catch event defined in or
// on subprocess1.
assertNotNull(task2);
taskService.complete(task2.getId());
// Subprocess2 completed and transitioned to terminate end. We expect
// ACTIVITY_CANCELLED for Subprocess1, task1 defined in subprocess1, boundary event defined on
// and the timer intermediate catch event defined in subprocess1
boolean userTaskFound = false;
boolean subprocessFound = false;
boolean timerCatchEventFound = false;
boolean boundaryEventFound = false;
List<FlowableEvent> activityTerminatedEvents = listener
.filterEvents(FlowableEngineEventType.ACTIVITY_CANCELLED);
assertEquals(4, activityTerminatedEvents.size());
for (FlowableEvent flowableEvent : activityTerminatedEvents) {
FlowableActivityCancelledEvent activityCancelledEvent = (FlowableActivityCancelledEvent) flowableEvent;
if ("intermediateCatchEvent".equals(activityCancelledEvent.getActivityType())) {
assertEquals("timer", activityCancelledEvent.getActivityId());
timerCatchEventFound = true;
} else if ("boundaryEvent".equals(activityCancelledEvent.getActivityType())) {
boundaryEventFound = true;
} else if ("userTask".equals(activityCancelledEvent.getActivityType())) {
assertEquals("Task in subprocess1", activityCancelledEvent.getActivityName());
userTaskFound = true;
} else if ("subProcess".equals(activityCancelledEvent.getActivityType())) {
assertEquals("subprocess1", activityCancelledEvent.getActivityId());
subprocessFound = true;
}
}
assertTrue(timerCatchEventFound);
assertTrue(boundaryEventFound);
assertTrue(userTaskFound);
assertTrue(subprocessFound);
List<FlowableEvent> processCompletedEvents = listener.filterEvents(FlowableEngineEventType.PROCESS_COMPLETED);
assertEquals(0, processCompletedEvents.size());
List<FlowableEvent> processCompletedTerminateEndEvents = listener
.filterEvents(FlowableEngineEventType.PROCESS_COMPLETED_WITH_TERMINATE_END_EVENT);
assertEquals(1, processCompletedTerminateEndEvents.size());
// Only expect PROCESS_COMPLETED_WITH_TERMINATE_END_EVENT, not
// PROCESS_CANCELLED.
List<FlowableEvent> processCanceledEvents = listener.filterEvents(FlowableEngineEventType.PROCESS_CANCELLED);
assertEquals(0, processCanceledEvents.size());
}
private void assertEventsEqual(FlowableEvent event1, FlowableEvent event2) {
assertTrue(EqualsBuilder.reflectionEquals(event1, event2));
}
@Override
protected void initializeServices() {
super.initializeServices();
this.listener = new TestInitializedEntityEventListener();
processEngineConfiguration.getEventDispatcher().addEventListener(this.listener);
FilteredStaticTestFlowableEventListener.clearEventsReceived();
}
@Override
protected void tearDown() throws Exception {
super.tearDown();
if (listener != null) {
listener.clearEventsReceived();
processEngineConfiguration.getEventDispatcher().removeEventListener(listener);
}
}
private class TestInitializedEntityEventListener extends AbstractFlowableEngineEventListener {
private List<FlowableEvent> eventsReceived;
public TestInitializedEntityEventListener() {
eventsReceived = new ArrayList<>();
}
public List<FlowableEvent> getEventsReceived() {
return eventsReceived;
}
public void clearEventsReceived() {
eventsReceived.clear();
}
@Override
public void onEvent(FlowableEvent event) {
if (event instanceof FlowableEntityEvent && ProcessInstance.class.isAssignableFrom(((FlowableEntityEvent) event).getEntity().getClass())) {
// check whether entity in the event is initialized before
// adding to the list.
assertNotNull(((ExecutionEntity) ((FlowableEntityEvent) event).getEntity()).getId());
eventsReceived.add(event);
} else if (FlowableEngineEventType.PROCESS_CANCELLED == event.getType() || FlowableEngineEventType.ACTIVITY_CANCELLED == event.getType()) {
eventsReceived.add(event);
}
}
@Override
public boolean isFailOnException() {
return true;
}
public List<FlowableEvent> filterEvents(FlowableEngineEventType eventType) {// count
// timer cancelled events
List<FlowableEvent> filteredEvents = new ArrayList<>();
List<FlowableEvent> eventsReceived = listener.getEventsReceived();
for (FlowableEvent eventReceived : eventsReceived) {
if (eventType == eventReceived.getType()) {
filteredEvents.add(eventReceived);
}
}
return filteredEvents;
}
}
public static class FilteredStaticTestFlowableEventListener extends StaticTestFlowableEventListener {
@Override
public void onEvent(FlowableEvent event) {
if (event instanceof FlowableEntityEvent && ProcessInstance.class.isAssignableFrom(((FlowableEntityEvent) event).getEntity().getClass())) {
// check whether entity in the event is initialized before
// adding to the list.
assertNotNull(((ExecutionEntity) ((FlowableEntityEvent) event).getEntity()).getId());
super.onEvent(event);
} else if (FlowableEngineEventType.PROCESS_CANCELLED == event.getType() || FlowableEngineEventType.ACTIVITY_CANCELLED == event.getType()) {
super.onEvent(event);
}
}
static List<FlowableEvent> filterEvents(FlowableEngineEventType eventType) {
List<FlowableEvent> filteredEvents = new ArrayList<>();
for (FlowableEvent eventReceived : FilteredStaticTestFlowableEventListener.getEventsReceived()) {
if (eventType == eventReceived.getType()) {
filteredEvents.add(eventReceived);
}
}
return filteredEvents;
}
}
}
| |
/*
* Copyright (c) 2010-2015 Evolveum and contributors
*
* This work is dual-licensed under the Apache License 2.0
* and European Union Public License. See LICENSE file for details.
*/
package com.evolveum.midpoint.repo.sql.query2.hqm;
import com.evolveum.midpoint.prism.query.OrderDirection;
import com.evolveum.midpoint.repo.sql.data.common.RObject;
import com.evolveum.midpoint.repo.sql.query.QueryException;
import com.evolveum.midpoint.repo.sql.query2.definition.JpaEntityDefinition;
import com.evolveum.midpoint.repo.sql.query2.definition.JpaLinkDefinition;
import com.evolveum.midpoint.repo.sql.query2.hqm.condition.Condition;
import com.evolveum.midpoint.repo.sql.util.ClassMapper;
import org.apache.commons.lang.Validate;
import org.jetbrains.annotations.NotNull;
import java.util.ArrayList;
import java.util.List;
/**
* Query in HQL that is being created.
*
* @author mederly
*/
public abstract class HibernateQuery {
private static final String INDENT_STRING = " ";
// projection elements - i.e. select projectionElement1, projectionElement2, ..., projectionElementN from ...
private List<ProjectionElement> projectionElements = new ArrayList<>();
/**
* Primary entity for this query, along with joined entities.
* For example,
* RUser u
* Or,
* RUser u
* left join u.assignments a with ...
*
* (originally, we thought about cross-joins with other entities, hence "primary entity")
*/
private EntityReference primaryEntity; // not null
/**
* List of conditions in the "where" clause. They are to be interpreted as a conjunction.
*/
private List<Condition> conditions = new ArrayList<>();
public static class Ordering {
@NotNull private final String byProperty;
private final OrderDirection direction;
Ordering(@NotNull String byProperty, OrderDirection direction) {
this.byProperty = byProperty;
this.direction = direction;
}
@NotNull
public String getByProperty() {
return byProperty;
}
public OrderDirection getDirection() {
return direction;
}
}
private List<Ordering> orderingList = new ArrayList<>();
public static class Grouping {
@NotNull private final String byProperty;
Grouping(@NotNull String byProperty) {
this.byProperty = byProperty;
}
@NotNull
public String getByProperty() {
return byProperty;
}
}
private List<Grouping> groupingList = new ArrayList<>();
public HibernateQuery(@NotNull JpaEntityDefinition primaryEntityDef) {
primaryEntity = createItemSpecification(primaryEntityDef);
}
protected HibernateQuery(EntityReference primaryEntity) {
this.primaryEntity = primaryEntity;
}
public List<ProjectionElement> getProjectionElements() {
return projectionElements;
}
public void addProjectionElement(ProjectionElement element) {
projectionElements.add(element);
}
public void addProjectionElementsFor(List<String> items) {
for (String item : items) {
addProjectionElement(new GenericProjectionElement(item));
}
}
public EntityReference getPrimaryEntity() {
return primaryEntity;
}
public void setPrimaryEntity(EntityReference primaryEntity) {
this.primaryEntity = primaryEntity;
}
public List<Condition> getConditions() {
return conditions;
}
public void addCondition(Condition condition) {
conditions.add(condition);
}
// Seems to have some side effects. Do not call twice!
public String getAsHqlText(int indent, boolean distinct) {
StringBuilder sb = new StringBuilder();
indent(sb, indent);
sb.append("select");
if (distinct) {
sb.append(" distinct");
}
sb.append("\n");
ProjectionElement.dumpToHql(sb, projectionElements, indent + 1); // we finish at the end of the last line (not at the new line)
sb.append("\n");
indent(sb, indent);
sb.append("from\n");
primaryEntity.dumpToHql(sb, indent + 1);
if (!conditions.isEmpty()) {
sb.append("\n");
indent(sb, indent);
sb.append("where\n");
Condition.dumpToHql(sb, conditions, indent+1);
}
if (!orderingList.isEmpty()) {
sb.append("\n");
indent(sb, indent);
sb.append("order by ");
boolean first = true;
for (Ordering ordering : orderingList) {
if (first) {
first = false;
} else {
sb.append(", ");
}
sb.append(ordering.byProperty);
if (ordering.direction != null) {
switch (ordering.direction) {
case DESCENDING: sb.append(" desc"); break;
case ASCENDING: sb.append(" asc"); break;
default: throw new IllegalStateException("Unknown ordering: " + ordering.direction);
}
}
}
}
if (!groupingList.isEmpty()) {
sb.append("\n");
indent(sb, indent);
sb.append("group by ");
boolean first = true;
for (Grouping grouping : groupingList) {
if (first) {
first = false;
} else {
sb.append(", ");
}
sb.append(grouping.byProperty);
}
}
return sb.toString();
}
public static void indent(StringBuilder sb, int indent) {
while (indent-- > 0) {
sb.append(INDENT_STRING);
}
}
public EntityReference createItemSpecification(JpaEntityDefinition entityDef) {
String alias = createAlias(entityDef);
return new EntityReference(alias, entityDef.getJpaClassName());
}
public String createAlias(JpaEntityDefinition def) {
return createAlias(def.getJpaClassName(), true);
}
public String createAlias(JpaLinkDefinition linkDefinition) {
Validate.notNull(linkDefinition.getJpaName(), "Got unnamed transition");
return createAlias(linkDefinition.getJpaName(), false);
}
private static final int LIMIT = 100;
public String createAlias(String name, boolean entity) {
String prefix;
//we want to skip 'R' prefix for entity definition names (a bit of hack)
int prefixIndex = entity ? 1 : 0;
prefix = Character.toString(name.charAt(prefixIndex)).toLowerCase();
int index = 2;
String alias = prefix;
while (hasAlias(alias)) {
alias = prefix + Integer.toString(index);
index++;
if (index > LIMIT) {
throw new IllegalStateException("Alias index for '" + name
+ "' is more than " + LIMIT + "? This probably should not happen.");
}
}
return alias;
}
private boolean hasAlias(String alias) {
if (primaryEntity != null && primaryEntity.containsAlias(alias)) {
return true;
}
return false;
}
public String getPrimaryEntityAlias() {
return getPrimaryEntity().getAlias();
}
// use with care!
public void setPrimaryEntityAlias(String alias) {
getPrimaryEntity().setAlias(alias);
}
public void addOrdering(String propertyPath, OrderDirection direction) {
orderingList.add(new Ordering(propertyPath, direction));
}
public List<Ordering> getOrderingList() {
return orderingList;
}
public void addGrouping(String propertyPath) {
groupingList.add(new Grouping(propertyPath));
}
public List<Grouping> getGroupingList() {
return groupingList;
}
public abstract RootHibernateQuery getRootQuery();
// used to narrow the primary entity e.g. from RObject to RUser (e.g. during ItemValueRestriction processing)
public void narrowPrimaryEntity(JpaEntityDefinition newDefinition) throws QueryException {
String oldEntityName = getPrimaryEntity().getName();
Class<? extends RObject> oldEntityClass = ClassMapper.getHqlClassForHqlName(oldEntityName);
Class<? extends RObject> newEntityClass = newDefinition.getJpaClass();
if (!(oldEntityClass.isAssignableFrom(newEntityClass))) {
throw new QueryException("Cannot narrow primary entity definition from " + oldEntityClass + " to " + newEntityClass);
}
getPrimaryEntity().setName(newDefinition.getJpaClassName()); // alias stays the same
}
}
| |
/*
* The MIT License
*
* Copyright (c) 2015 The Broad Institute
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package picard.vcf;
import htsjdk.samtools.metrics.MetricsFile;
import htsjdk.samtools.metrics.StringHeader;
import htsjdk.samtools.util.BufferedLineReader;
import htsjdk.samtools.util.IOUtil;
import htsjdk.variant.variantcontext.Allele;
import htsjdk.variant.variantcontext.Genotype;
import htsjdk.variant.variantcontext.GenotypeBuilder;
import htsjdk.variant.variantcontext.VariantContext;
import htsjdk.variant.variantcontext.VariantContextBuilder;
import htsjdk.variant.vcf.VCFFileReader;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import picard.vcf.GenotypeConcordanceStates.CallState;
import picard.vcf.GenotypeConcordanceStates.TruthAndCallStates;
import picard.vcf.GenotypeConcordanceStates.TruthState;
import java.io.*;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.*;
import java.util.zip.GZIPInputStream;
public class GenotypeConcordanceTest {
private static final File OUTPUT_DATA_PATH = IOUtil.createTempDir("GenotypeConcordanceTest", null);
private static final File TEST_DATA_PATH = new File("testdata/picard/vcf/");
// Test VCFs
private static final File CEU_TRIOS_SNPS_VCF = new File(TEST_DATA_PATH, "CEUTrio-snps.vcf");
private static final File CEU_TRIOS_INDELS_VCF = new File(TEST_DATA_PATH, "CEUTrio-indels.vcf");
// Test that missing sites flag for new scheme works for NIST data sets
private static final File NIST_MISSING_SITES_TRUTH_VCF = new File(TEST_DATA_PATH, "NIST.selected.vcf");
// Test that we notice a difference on the first line
private static final File CEU_TRIOS_SNPS_FIRST_LINE_DIFF_VCF = new File(TEST_DATA_PATH, "CEUTrio-snps_first_line_diff.vcf");
// Test that we notice a difference on the last line
private static final File CEU_TRIOS_SNPS_LAST_LINE_DIFF_VCF = new File(TEST_DATA_PATH, "CEUTrio-snps_last_line_diff.vcf");
// Test that we notice a deleted line
private static final File CEU_TRIOS_SNPS_DEL_LINE_VCF = new File(TEST_DATA_PATH, "CEUTrio-snps_del_line.vcf");
// Existing/expected base metrics file names
private static final String CEU_TRIOS_SNPS_VS_CEU_TRIOS_SNPS_GC = "CEUTrio-snps_vs_CEUTrio-snps_GtConcordanceDiff";
private static final String CEU_TRIOS_INDELS_VS_CEU_TRIOS_INDELS_GC = "CEUTrio-indels_vs_CEUTrio-indels_GtConcordanceDiff";
private static final String CEU_TRIOS_SNPS_VS_CEU_TRIOS_SNPS_FIRST_LINE_DIFF_GC = "CEUTrio-snps_CEUTrio-snps_first_line_GtConcordanceDiff";
private static final String CEU_TRIOS_SNPS_VS_CEU_TRIOS_SNPS_LAST_LINE_DIFF_GC = "CEUTrio-snps_CEUTrio-snps_last_line_GtConcordanceDiff";
private static final String CEU_TRIOS_SNPS_VS_CEU_TRIOS_SNPS_DEL_LINE_GC = "CEUTrio-snps_CEUTrio-snps_del_line_GtConcordanceDiff";
private static final String CEU_TRIOS_SNPS_VS_CEU_TRIOS_SNPS_GC_ALL_ROWS = "CEUTrio-snps_vs_CEUTrio-snps_GtConcordanceDiff_AllRows";
private static final String CEU_TRIOS_SNPS_VS_CEU_TRIOS_SNPS_GC_MIN_GQ = "CEUTrio-snps_vs_CEUTrio-snps_GtConcordanceDiff_MinGq";
private static final String CEU_TRIOS_SNPS_VS_CEU_TRIOS_SNPS_GC_MIN_DP = "CEUTrio-snps_vs_CEUTrio-snps_GtConcordanceDiff_MinDp";
private static final String NIST_TRUTH_SNPS_VS_CEU_TRIOS_SNPS_GC = "NIST-truth-snps_vs_CEUTrio-snps_GtConcordanceDiff";
private static final String TRUTH_SAMPLE_NAME = "Foo";
private static final String CALL_SAMPLE_NAME = "Foo";
// A [ref] / T at 10
private static final String snpLoc = "chr1";
private static final int snpLocStart = 10;
private static final int snpLocStop = 10;
private static final Allele Aref = Allele.create("A", true);
private static final Allele C = Allele.create("C");
private static final Allele G = Allele.create("G");
private static final Allele T = Allele.create("T");
private static final Allele AA = Allele.create("AA");
private static final Allele AAA = Allele.create("AAA");
private static final Allele AAAA = Allele.create("AAAA");
private static final Allele AAAAA = Allele.create("AAAAA");
private static final String NORMALIZE_ALLELES_TRUTH = "normalize_alleles_truth.vcf";
private static final String NORMALIZE_ALLELES_CALL = "normalize_alleles_call.vcf";
private static final String NORMALIZE_NO_CALLS_TRUTH = "normalize_no_calls_truth.vcf";
private static final String NORMALIZE_NO_CALLS_CALL = "normalize_no_calls_call.vcf";
@AfterClass
public void tearDown() {
IOUtil.deleteDirectoryTree(OUTPUT_DATA_PATH);
}
@DataProvider(name = "genotypeConcordanceTestFileData")
public Object[][] getGenotypeConcordanceTestFileData() {
return new Object[][]{
{CEU_TRIOS_SNPS_VCF, "NA12878", CEU_TRIOS_SNPS_VCF, "NA12878", null, null, false, false, CEU_TRIOS_SNPS_VS_CEU_TRIOS_SNPS_GC},
{CEU_TRIOS_INDELS_VCF, "NA12878", CEU_TRIOS_INDELS_VCF, "NA12878", null, null, false, false, CEU_TRIOS_INDELS_VS_CEU_TRIOS_INDELS_GC},
{CEU_TRIOS_SNPS_VCF, "NA12878", CEU_TRIOS_SNPS_FIRST_LINE_DIFF_VCF, "NA12878", null, null, false, false, CEU_TRIOS_SNPS_VS_CEU_TRIOS_SNPS_FIRST_LINE_DIFF_GC},
{CEU_TRIOS_SNPS_VCF, "NA12878", CEU_TRIOS_SNPS_LAST_LINE_DIFF_VCF, "NA12878", null, null, false, false, CEU_TRIOS_SNPS_VS_CEU_TRIOS_SNPS_LAST_LINE_DIFF_GC},
{CEU_TRIOS_SNPS_VCF, "NA12878", CEU_TRIOS_SNPS_DEL_LINE_VCF, "NA12878", null, null, false, false, CEU_TRIOS_SNPS_VS_CEU_TRIOS_SNPS_DEL_LINE_GC},
{CEU_TRIOS_SNPS_VCF, "NA12878", CEU_TRIOS_SNPS_VCF, "NA12878", null, null, true, false, CEU_TRIOS_SNPS_VS_CEU_TRIOS_SNPS_GC_ALL_ROWS},
{CEU_TRIOS_SNPS_VCF, "NA12878", CEU_TRIOS_SNPS_VCF, "NA12891", 40, null, false, false, CEU_TRIOS_SNPS_VS_CEU_TRIOS_SNPS_GC_MIN_GQ},
{CEU_TRIOS_SNPS_VCF, "NA12878", CEU_TRIOS_SNPS_VCF, "NA12891", null, 40, false, false, CEU_TRIOS_SNPS_VS_CEU_TRIOS_SNPS_GC_MIN_DP},
{NIST_MISSING_SITES_TRUTH_VCF, "NA12878", CEU_TRIOS_SNPS_VCF, "NA12878", null, null, false, true, NIST_TRUTH_SNPS_VS_CEU_TRIOS_SNPS_GC}
};
}
@Test(dataProvider = "genotypeConcordanceTestFileData")
public void testGenotypeConcordance(final File vcf1, final String sample1, final File vcf2, final String sample2,
final Integer minGq, final Integer minDp, final boolean outputAllRows, final boolean missingSitesFlag,
final String expectedOutputFileBaseName) throws Exception {
final List<Boolean> withVcfs = Arrays.asList(true, false);
for (final boolean withVcf : withVcfs) {
final File outputBaseFileName = new File(OUTPUT_DATA_PATH, "actualGtConc");
final File outputSummaryFile = new File(outputBaseFileName.getAbsolutePath() + GenotypeConcordance.SUMMARY_METRICS_FILE_EXTENSION);
final File outputDetailsFile = new File(outputBaseFileName.getAbsolutePath() + GenotypeConcordance.DETAILED_METRICS_FILE_EXTENSION);
final File outputContingencyFile = new File(outputBaseFileName.getAbsolutePath() + GenotypeConcordance.CONTINGENCY_METRICS_FILE_EXTENSION);
final Path outputVcfFile = Paths.get(outputBaseFileName.getAbsolutePath() + GenotypeConcordance.OUTPUT_VCF_FILE_EXTENSION);
outputSummaryFile.deleteOnExit();
outputDetailsFile.deleteOnExit();
outputContingencyFile.deleteOnExit();
outputVcfFile.toFile().deleteOnExit();
final GenotypeConcordance genotypeConcordance = new GenotypeConcordance();
genotypeConcordance.TRUTH_VCF = vcf1;
genotypeConcordance.TRUTH_SAMPLE = sample1;
genotypeConcordance.CALL_VCF = vcf2;
genotypeConcordance.CALL_SAMPLE = sample2;
if (minGq != null) genotypeConcordance.MIN_GQ = minGq;
if (minDp != null) genotypeConcordance.MIN_DP = minDp;
genotypeConcordance.OUTPUT_ALL_ROWS = outputAllRows;
genotypeConcordance.OUTPUT = outputBaseFileName;
genotypeConcordance.MISSING_SITES_HOM_REF = missingSitesFlag;
if (missingSitesFlag) {
genotypeConcordance.INTERVALS = Collections.singletonList(new File(TEST_DATA_PATH, "IntervalList1PerChrom.interval_list"));
}
genotypeConcordance.OUTPUT_VCF = withVcf;
Assert.assertEquals(genotypeConcordance.instanceMain(new String[0]), 0);
assertMetricsFileEqual(outputSummaryFile, new File(TEST_DATA_PATH, expectedOutputFileBaseName + GenotypeConcordance.SUMMARY_METRICS_FILE_EXTENSION));
assertMetricsFileEqual(outputDetailsFile, new File(TEST_DATA_PATH, expectedOutputFileBaseName + GenotypeConcordance.DETAILED_METRICS_FILE_EXTENSION));
assertMetricsFileEqual(outputContingencyFile, new File(TEST_DATA_PATH, expectedOutputFileBaseName + GenotypeConcordance.CONTINGENCY_METRICS_FILE_EXTENSION));
if (withVcf) {
// An ugly way to compare VCFs
final Path expectedVcf = Paths.get(TEST_DATA_PATH.getAbsolutePath(), expectedOutputFileBaseName + ".vcf");
final BufferedLineReader reader = new BufferedLineReader(new GZIPInputStream(new FileInputStream(outputVcfFile.toFile())));
final Iterator<String> actualLines;
{
final List<String> lines = new ArrayList<>();
while (-1 != reader.peek()) {
lines.add(reader.readLine());
}
reader.close();
actualLines = lines.iterator();
}
final Iterator<String> expectedLines = Files.lines(expectedVcf).iterator();
while (actualLines.hasNext() && expectedLines.hasNext()) {
final String actualLine = actualLines.next();
final String expectedLine = expectedLines.next();
Assert.assertEquals(actualLine, expectedLine);
}
Assert.assertFalse(actualLines.hasNext());
Assert.assertFalse(expectedLines.hasNext());
}
}
}
private void assertMetricsFileEqual(final File actualMetricsFile, final File expectedMetricsFile) throws FileNotFoundException {
// Actual metrics file
final MetricsFile<GenotypeConcordanceSummaryMetrics, Comparable<?>> actual = new MetricsFile<GenotypeConcordanceSummaryMetrics, Comparable<?>>();
actual.read(new FileReader(actualMetricsFile));
// Expected metrics file
final MetricsFile<GenotypeConcordanceSummaryMetrics, Comparable<?>> expected = new MetricsFile<GenotypeConcordanceSummaryMetrics, Comparable<?>>();
expected.read(new FileReader(expectedMetricsFile));
// Note - cannot use .equals as it calls .areHeadersEqual and they are not since the timestamp (at a minimum is different)
Assert.assertTrue(expected.areMetricsEqual(actual));
Assert.assertTrue(expected.areHistogramsEqual(actual));
}
public static GenotypeConcordanceCounts getGenotypeConcordanceCounts(final File truthVCF, final File callVCF, final String callSample, final boolean missingSitesFlag, List<File> intervalFiles){
//gets the Genotype Concordance Counts for the detail tests for each scheme
final File outputBaseFileName = new File(OUTPUT_DATA_PATH, "actualGtConc");
final File outputSummaryFile = new File(outputBaseFileName.getAbsolutePath() + GenotypeConcordance.SUMMARY_METRICS_FILE_EXTENSION);
final File outputDetailsFile = new File(outputBaseFileName.getAbsolutePath() + GenotypeConcordance.DETAILED_METRICS_FILE_EXTENSION);
outputSummaryFile.deleteOnExit();
outputDetailsFile.deleteOnExit();
final GenotypeConcordance genotypeConcordance = new GenotypeConcordance();
genotypeConcordance.TRUTH_VCF = truthVCF;
genotypeConcordance.TRUTH_SAMPLE = "NA12878";
genotypeConcordance.CALL_VCF = callVCF;
genotypeConcordance.CALL_SAMPLE = callSample;
genotypeConcordance.MISSING_SITES_HOM_REF = missingSitesFlag;
genotypeConcordance.INTERVALS = intervalFiles;
genotypeConcordance.OUTPUT = outputBaseFileName;
Assert.assertEquals(genotypeConcordance.instanceMain(new String[0]), 0);
return genotypeConcordance.getSnpCounter();
}
public static void assertNonZeroCountsAgree(final GenotypeConcordanceCounts counter, final Map<TruthAndCallStates, Integer> expectedCountMap) {
for (final TruthState truthState : TruthState.values()) {
for (final CallState callState : CallState.values()) {
Integer expectedCount = expectedCountMap.get(new TruthAndCallStates(truthState, callState));
if (expectedCount == null) expectedCount = 0;
Assert.assertEquals(counter.getCount(truthState, callState), expectedCount.intValue());
}
}
}
@DataProvider(name = "genotypeConcordanceDetermineStateDataProvider")
public Object[][] genotypeConcordanceDetermineStateDataProvider() {
final Object[][] originalUnitTestData = new Object[][]{
{Aref, Aref, TruthState.HOM_REF, Aref, Aref, CallState.HOM_REF},
{Aref, Aref, TruthState.HOM_REF, Aref, C, CallState.HET_REF_VAR1},
{Aref, Aref, TruthState.HOM_REF, Aref, G, CallState.HET_REF_VAR1},
{Aref, Aref, TruthState.HOM_REF, Aref, T, CallState.HET_REF_VAR1},
{Aref, Aref, TruthState.HOM_REF, C, G, CallState.HET_VAR1_VAR2},
{Aref, Aref, TruthState.HOM_REF, C, T, CallState.HET_VAR1_VAR2},
{Aref, Aref, TruthState.HOM_REF, G, T, CallState.HET_VAR1_VAR2},
{Aref, Aref, TruthState.HOM_REF, C, C, CallState.HOM_VAR1},
{Aref, Aref, TruthState.HOM_REF, G, G, CallState.HOM_VAR1},
{Aref, Aref, TruthState.HOM_REF, T, T, CallState.HOM_VAR1},
//---
{Aref, C, TruthState.HET_REF_VAR1, Aref, Aref, CallState.HOM_REF},
{Aref, G, TruthState.HET_REF_VAR1, Aref, Aref, CallState.HOM_REF},
{Aref, T, TruthState.HET_REF_VAR1, Aref, Aref, CallState.HOM_REF},
{Aref, C, TruthState.HET_REF_VAR1, Aref, C, CallState.HET_REF_VAR1},
{Aref, C, TruthState.HET_REF_VAR1, Aref, G, CallState.HET_REF_VAR2},
{Aref, C, TruthState.HET_REF_VAR1, Aref, T, CallState.HET_REF_VAR2},
{Aref, G, TruthState.HET_REF_VAR1, Aref, C, CallState.HET_REF_VAR2},
{Aref, G, TruthState.HET_REF_VAR1, Aref, G, CallState.HET_REF_VAR1},
{Aref, G, TruthState.HET_REF_VAR1, Aref, T, CallState.HET_REF_VAR2},
{Aref, T, TruthState.HET_REF_VAR1, Aref, C, CallState.HET_REF_VAR2},
{Aref, T, TruthState.HET_REF_VAR1, Aref, G, CallState.HET_REF_VAR2},
{Aref, T, TruthState.HET_REF_VAR1, Aref, T, CallState.HET_REF_VAR1},
{Aref, C, TruthState.HET_REF_VAR1, C, G, CallState.HET_VAR1_VAR2},
{Aref, C, TruthState.HET_REF_VAR1, C, T, CallState.HET_VAR1_VAR2},
{Aref, C, TruthState.HET_REF_VAR1, G, T, CallState.HET_VAR3_VAR4}, // Why isn't this called HET_VAR2_VAR3???
{Aref, G, TruthState.HET_REF_VAR1, C, G, CallState.HET_VAR1_VAR2},
{Aref, G, TruthState.HET_REF_VAR1, C, T, CallState.HET_VAR3_VAR4},
{Aref, G, TruthState.HET_REF_VAR1, G, T, CallState.HET_VAR1_VAR2},
{Aref, T, TruthState.HET_REF_VAR1, C, G, CallState.HET_VAR3_VAR4},
{Aref, T, TruthState.HET_REF_VAR1, C, T, CallState.HET_VAR1_VAR2},
{Aref, T, TruthState.HET_REF_VAR1, G, T, CallState.HET_VAR1_VAR2},
{Aref, C, TruthState.HET_REF_VAR1, C, C, CallState.HOM_VAR1},
{Aref, C, TruthState.HET_REF_VAR1, G, G, CallState.HOM_VAR2},
{Aref, C, TruthState.HET_REF_VAR1, T, T, CallState.HOM_VAR2},
{Aref, G, TruthState.HET_REF_VAR1, C, C, CallState.HOM_VAR2},
{Aref, G, TruthState.HET_REF_VAR1, G, G, CallState.HOM_VAR1},
{Aref, G, TruthState.HET_REF_VAR1, T, T, CallState.HOM_VAR2},
{Aref, T, TruthState.HET_REF_VAR1, C, C, CallState.HOM_VAR2},
{Aref, T, TruthState.HET_REF_VAR1, G, G, CallState.HOM_VAR2},
{Aref, T, TruthState.HET_REF_VAR1, T, T, CallState.HOM_VAR1},
//---
{C, G, TruthState.HET_VAR1_VAR2, Aref, Aref, CallState.HOM_REF},
{C, T, TruthState.HET_VAR1_VAR2, Aref, Aref, CallState.HOM_REF},
{G, T, TruthState.HET_VAR1_VAR2, Aref, Aref, CallState.HOM_REF},
{C, G, TruthState.HET_VAR1_VAR2, Aref, C, CallState.HET_REF_VAR1},
{C, G, TruthState.HET_VAR1_VAR2, Aref, G, CallState.HET_REF_VAR1},
{C, G, TruthState.HET_VAR1_VAR2, Aref, T, CallState.HET_REF_VAR3},
{C, T, TruthState.HET_VAR1_VAR2, Aref, C, CallState.HET_REF_VAR1},
{C, T, TruthState.HET_VAR1_VAR2, Aref, G, CallState.HET_REF_VAR3},
{C, T, TruthState.HET_VAR1_VAR2, Aref, T, CallState.HET_REF_VAR1},
{G, T, TruthState.HET_VAR1_VAR2, Aref, C, CallState.HET_REF_VAR3},
{G, T, TruthState.HET_VAR1_VAR2, Aref, G, CallState.HET_REF_VAR1},
{G, T, TruthState.HET_VAR1_VAR2, Aref, T, CallState.HET_REF_VAR1},
{C, G, TruthState.HET_VAR1_VAR2, C, C, CallState.HOM_VAR1},
{C, G, TruthState.HET_VAR1_VAR2, G, G, CallState.HOM_VAR1},
{C, G, TruthState.HET_VAR1_VAR2, T, T, CallState.HOM_VAR3},
{C, T, TruthState.HET_VAR1_VAR2, C, C, CallState.HOM_VAR1},
{C, T, TruthState.HET_VAR1_VAR2, G, G, CallState.HOM_VAR3},
{C, T, TruthState.HET_VAR1_VAR2, T, T, CallState.HOM_VAR1},
{G, T, TruthState.HET_VAR1_VAR2, C, C, CallState.HOM_VAR3},
{G, T, TruthState.HET_VAR1_VAR2, G, G, CallState.HOM_VAR1},
{G, T, TruthState.HET_VAR1_VAR2, T, T, CallState.HOM_VAR1},
{C, G, TruthState.HET_VAR1_VAR2, C, G, CallState.HET_VAR1_VAR2},
{C, G, TruthState.HET_VAR1_VAR2, C, T, CallState.HET_VAR1_VAR3},
{C, G, TruthState.HET_VAR1_VAR2, G, T, CallState.HET_VAR1_VAR3},
{C, T, TruthState.HET_VAR1_VAR2, C, G, CallState.HET_VAR1_VAR3},
{C, T, TruthState.HET_VAR1_VAR2, C, T, CallState.HET_VAR1_VAR2},
{C, T, TruthState.HET_VAR1_VAR2, G, T, CallState.HET_VAR1_VAR3},
{G, T, TruthState.HET_VAR1_VAR2, C, G, CallState.HET_VAR1_VAR3},
{G, T, TruthState.HET_VAR1_VAR2, C, T, CallState.HET_VAR1_VAR3},
{G, T, TruthState.HET_VAR1_VAR2, G, T, CallState.HET_VAR1_VAR2},
//---
{C, C, TruthState.HOM_VAR1, Aref, Aref, CallState.HOM_REF},
{G, G, TruthState.HOM_VAR1, Aref, Aref, CallState.HOM_REF},
{T, T, TruthState.HOM_VAR1, Aref, Aref, CallState.HOM_REF},
{C, C, TruthState.HOM_VAR1, Aref, C, CallState.HET_REF_VAR1},
{C, C, TruthState.HOM_VAR1, Aref, G, CallState.HET_REF_VAR2},
{C, C, TruthState.HOM_VAR1, Aref, T, CallState.HET_REF_VAR2},
{G, G, TruthState.HOM_VAR1, Aref, C, CallState.HET_REF_VAR2},
{G, G, TruthState.HOM_VAR1, Aref, G, CallState.HET_REF_VAR1},
{G, G, TruthState.HOM_VAR1, Aref, T, CallState.HET_REF_VAR2},
{T, T, TruthState.HOM_VAR1, Aref, C, CallState.HET_REF_VAR2},
{T, T, TruthState.HOM_VAR1, Aref, G, CallState.HET_REF_VAR2},
{T, T, TruthState.HOM_VAR1, Aref, T, CallState.HET_REF_VAR1},
{C, C, TruthState.HOM_VAR1, C, C, CallState.HOM_VAR1},
{C, C, TruthState.HOM_VAR1, G, G, CallState.HOM_VAR2},
{C, C, TruthState.HOM_VAR1, T, T, CallState.HOM_VAR2},
{G, G, TruthState.HOM_VAR1, C, C, CallState.HOM_VAR2},
{G, G, TruthState.HOM_VAR1, G, G, CallState.HOM_VAR1},
{G, G, TruthState.HOM_VAR1, T, T, CallState.HOM_VAR2},
{T, T, TruthState.HOM_VAR1, C, C, CallState.HOM_VAR2},
{T, T, TruthState.HOM_VAR1, G, G, CallState.HOM_VAR2},
{T, T, TruthState.HOM_VAR1, T, T, CallState.HOM_VAR1},
{C, C, TruthState.HOM_VAR1, C, G, CallState.HET_VAR1_VAR2},
{C, C, TruthState.HOM_VAR1, C, T, CallState.HET_VAR1_VAR2},
{C, C, TruthState.HOM_VAR1, G, T, CallState.HET_VAR3_VAR4},
{G, G, TruthState.HOM_VAR1, C, G, CallState.HET_VAR1_VAR2},
{G, G, TruthState.HOM_VAR1, C, T, CallState.HET_VAR3_VAR4},
{G, G, TruthState.HOM_VAR1, G, T, CallState.HET_VAR1_VAR2},
{T, T, TruthState.HOM_VAR1, C, G, CallState.HET_VAR3_VAR4},
{T, T, TruthState.HOM_VAR1, C, T, CallState.HET_VAR1_VAR2},
{T, T, TruthState.HOM_VAR1, G, T, CallState.HET_VAR1_VAR2},
// Some Indel Cases
{AA, AA, TruthState.HOM_VAR1, AAAA, AAAAA, CallState.HET_VAR3_VAR4},
{AA, AAA, TruthState.HET_VAR1_VAR2, AAAA, AAAAA, CallState.HET_VAR3_VAR4},
// Mixed Cases
{C, AA, TruthState.IS_MIXED, AAAA, AAAAA, CallState.HET_VAR1_VAR2},
{AA, C, TruthState.IS_MIXED, AAAA, AAAAA, CallState.HET_VAR1_VAR2},
{AA, AA, TruthState.HOM_VAR1, C, AAAAA, CallState.IS_MIXED},
{AA, AAA, TruthState.HET_VAR1_VAR2, AAAA, C, CallState.IS_MIXED},
// No Call cases
{Allele.NO_CALL, Aref, TruthState.NO_CALL, Aref, Aref, CallState.HOM_REF},
{Aref, Allele.NO_CALL, TruthState.NO_CALL, Aref, Aref, CallState.HOM_REF},
{Allele.NO_CALL, Allele.NO_CALL, TruthState.NO_CALL, Aref, Aref, CallState.HOM_REF},
{Aref, Aref, TruthState.HOM_REF, Allele.NO_CALL, Aref, CallState.NO_CALL},
{Aref, Aref, TruthState.HOM_REF, Aref, Allele.NO_CALL, CallState.NO_CALL},
{Aref, Aref, TruthState.HOM_REF, Allele.NO_CALL, Allele.NO_CALL, CallState.NO_CALL}
};
// Rebuild a new set of unit test data with all permutations of alleles.
final List<Object[]> allPermutationUnitTestDataList = new ArrayList<Object[]>();
for (final Object[] unitTestData : originalUnitTestData) {
allPermutationUnitTestDataList.add(unitTestData);
final Allele truthAllele1 = (Allele) unitTestData[0];
final Allele truthAllele2 = (Allele) unitTestData[1];
final TruthState expectedTruthState = (TruthState) unitTestData[2];
final Allele callAllele1 = (Allele) unitTestData[3];
final Allele callAllele2 = (Allele) unitTestData[4];
final CallState expectedCallState = (CallState) unitTestData[5];
if (!callAllele1.equals(callAllele2)) {
allPermutationUnitTestDataList.add(new Object[]{truthAllele1, truthAllele2, expectedTruthState, callAllele2, callAllele1, expectedCallState});
}
if (!truthAllele1.equals(truthAllele2)) {
allPermutationUnitTestDataList.add(new Object[]{truthAllele2, truthAllele1, expectedTruthState, callAllele1, callAllele2, expectedCallState});
if (!callAllele1.equals(callAllele2)) {
allPermutationUnitTestDataList.add(new Object[]{truthAllele2, truthAllele1, expectedTruthState, callAllele2, callAllele1, expectedCallState});
}
}
}
Object[][] allPermutationUnitTestData = new Object[allPermutationUnitTestDataList.size()][];
allPermutationUnitTestData = allPermutationUnitTestDataList.toArray(allPermutationUnitTestData);
return allPermutationUnitTestData;
}
@Test(dataProvider = "genotypeConcordanceDetermineStateDataProvider")
public void testGenotypeConcordanceDetermineState(final Allele truthAllele1, final Allele truthAllele2, final TruthState expectedTruthState,
final Allele callAllele1, final Allele callAllele2, final CallState expectedCallState) throws Exception {
final List<Allele> truthAlleles = makeUniqueListOfAlleles(truthAllele1, truthAllele2);
final Genotype truthGt = GenotypeBuilder.create(TRUTH_SAMPLE_NAME, Arrays.asList(truthAllele1, truthAllele2));
final VariantContext truthVariantContext = new VariantContextBuilder("test", snpLoc, snpLocStart, snpLocStop, truthAlleles).genotypes(truthGt).make();
final List<Allele> callAlleles = makeUniqueListOfAlleles(callAllele1, callAllele2);
final Genotype callGt = GenotypeBuilder.create(CALL_SAMPLE_NAME, Arrays.asList(callAllele1, callAllele2));
final VariantContext callVariantContext = new VariantContextBuilder("test", snpLoc, snpLocStart, snpLocStop, callAlleles).genotypes(callGt).make();
testGenotypeConcordanceDetermineState(truthVariantContext, expectedTruthState, callVariantContext, expectedCallState, 0, 0);
}
@Test
public void testGenotypeConcordanceDetermineStateNull() throws Exception {
final List<Allele> alleles = makeUniqueListOfAlleles(Aref, C);
final Genotype gt1 = GenotypeBuilder.create(TRUTH_SAMPLE_NAME, Arrays.asList(Aref, C));
final VariantContext vc1 = new VariantContextBuilder("test", snpLoc, snpLocStart, snpLocStop, alleles).genotypes(gt1).make();
testGenotypeConcordanceDetermineState(null, TruthState.MISSING, null, CallState.MISSING, 0, 0);
testGenotypeConcordanceDetermineState(vc1, TruthState.HET_REF_VAR1, null, CallState.MISSING, 0, 0);
testGenotypeConcordanceDetermineState(null, TruthState.MISSING, vc1, CallState.HET_REF_VAR1, 0, 0);
}
@Test
public void testGenotypeConcordanceDetermineStateFilter() throws Exception {
final Set<String> filters = new HashSet<String>(Arrays.asList("BAD!"));
// Filtering on the variant context
final List<Allele> alleles1 = makeUniqueListOfAlleles(Aref, C);
final Genotype gt1 = GenotypeBuilder.create(TRUTH_SAMPLE_NAME, Arrays.asList(Aref, C));
final VariantContext vcFiltered = new VariantContextBuilder("test", snpLoc, snpLocStart, snpLocStop, alleles1).genotypes(gt1).filters(filters).make();
final List<Allele> alleles2 = makeUniqueListOfAlleles(Aref, T);
final Genotype gt2 = GenotypeBuilder.create(TRUTH_SAMPLE_NAME, Arrays.asList(Aref, T));
final VariantContext vcNotFiltered = new VariantContextBuilder("test", snpLoc, snpLocStart, snpLocStop, alleles2).genotypes(gt2).make();
testGenotypeConcordanceDetermineState(vcFiltered, TruthState.VC_FILTERED, vcNotFiltered, CallState.HET_REF_VAR1, 0, 0);
testGenotypeConcordanceDetermineState(vcNotFiltered, TruthState.HET_REF_VAR1, vcFiltered, CallState.VC_FILTERED, 0, 0);
testGenotypeConcordanceDetermineState(vcFiltered, TruthState.VC_FILTERED, vcFiltered, CallState.VC_FILTERED, 0, 0);
// Filtering on the genotype
final List<String> gtFilters = new ArrayList<String>(Arrays.asList("WICKED"));
final List<Allele> alleles3 = makeUniqueListOfAlleles(Aref, C);
final Genotype gt3 = new GenotypeBuilder(TRUTH_SAMPLE_NAME, Arrays.asList(Aref, C)).filters(gtFilters).make();
final VariantContext vcGtFiltered = new VariantContextBuilder("test", snpLoc, snpLocStart, snpLocStop, alleles3).genotypes(gt3).make();
testGenotypeConcordanceDetermineState(vcGtFiltered, TruthState.GT_FILTERED, vcNotFiltered, CallState.HET_REF_VAR1, 0, 0);
testGenotypeConcordanceDetermineState(vcNotFiltered, TruthState.HET_REF_VAR1, vcGtFiltered, CallState.GT_FILTERED, 0, 0);
testGenotypeConcordanceDetermineState(vcGtFiltered, TruthState.GT_FILTERED, vcGtFiltered, CallState.GT_FILTERED, 0, 0);
}
@Test
public void testGenotypeConcordanceDetermineStateDp() throws Exception {
final List<Allele> allelesNormal = makeUniqueListOfAlleles(Aref, C);
final Genotype gtNormal = GenotypeBuilder.create(TRUTH_SAMPLE_NAME, Arrays.asList(Aref, C));
final VariantContext vcNormal = new VariantContextBuilder("test", snpLoc, snpLocStart, snpLocStop, allelesNormal).genotypes(gtNormal).make();
final List<Allele> allelesLowDp = makeUniqueListOfAlleles(Aref, C);
final Genotype gtLowDp = new GenotypeBuilder(TRUTH_SAMPLE_NAME, Arrays.asList(Aref, C)).DP(4).make();
final VariantContext vcLowDp = new VariantContextBuilder("test", snpLoc, snpLocStart, snpLocStop, allelesLowDp).genotypes(gtLowDp).make();
testGenotypeConcordanceDetermineState(vcLowDp, TruthState.LOW_DP, vcNormal, CallState.HET_REF_VAR1, 0, 20);
testGenotypeConcordanceDetermineState(vcLowDp, TruthState.HET_REF_VAR1, vcLowDp, CallState.HET_REF_VAR1, 0, 2);
testGenotypeConcordanceDetermineState(vcNormal, TruthState.HET_REF_VAR1, vcLowDp, CallState.LOW_DP, 0, 20);
testGenotypeConcordanceDetermineState(vcNormal, TruthState.HET_REF_VAR1, vcLowDp, CallState.HET_REF_VAR1, 0, 2);
testGenotypeConcordanceDetermineState(vcLowDp, TruthState.LOW_DP, vcLowDp, CallState.LOW_DP, 0, 20);
testGenotypeConcordanceDetermineState(vcLowDp, TruthState.HET_REF_VAR1, vcLowDp, CallState.HET_REF_VAR1, 0, 2);
}
@Test
public void testGenotypeConcordanceDetermineStateGq() throws Exception {
final List<Allele> allelesNormal = makeUniqueListOfAlleles(Aref, C);
final Genotype gtNormal = GenotypeBuilder.create(TRUTH_SAMPLE_NAME, Arrays.asList(Aref, C));
final VariantContext vcNormal = new VariantContextBuilder("test", snpLoc, snpLocStart, snpLocStop, allelesNormal).genotypes(gtNormal).make();
final List<Allele> allelesLowGq = makeUniqueListOfAlleles(Aref, C);
final Genotype gtLowGq = new GenotypeBuilder(TRUTH_SAMPLE_NAME, Arrays.asList(Aref, C)).GQ(4).make();
final VariantContext vcLowGq = new VariantContextBuilder("test", snpLoc, snpLocStart, snpLocStop, allelesLowGq).genotypes(gtLowGq).make();
testGenotypeConcordanceDetermineState(vcLowGq, TruthState.LOW_GQ, vcNormal, CallState.HET_REF_VAR1, 20, 0);
testGenotypeConcordanceDetermineState(vcLowGq, TruthState.HET_REF_VAR1, vcLowGq, CallState.HET_REF_VAR1, 2, 0);
testGenotypeConcordanceDetermineState(vcNormal, TruthState.HET_REF_VAR1, vcLowGq, CallState.LOW_GQ, 20, 0);
testGenotypeConcordanceDetermineState(vcNormal, TruthState.HET_REF_VAR1, vcLowGq, CallState.HET_REF_VAR1, 2, 0);
testGenotypeConcordanceDetermineState(vcLowGq, TruthState.LOW_GQ, vcLowGq, CallState.LOW_GQ, 20, 0);
testGenotypeConcordanceDetermineState(vcLowGq, TruthState.HET_REF_VAR1, vcLowGq, CallState.HET_REF_VAR1, 2, 0);
}
/**
* Test method to determine that the expected truth and call states are returned for a pair of truth and call variant contexts.
* @param truthVariantContext
* @param expectedTruthState
* @param callVariantContext
* @param expectedCallState
* @param minGq
* @param minDp
*/
private void testGenotypeConcordanceDetermineState(final VariantContext truthVariantContext, final TruthState expectedTruthState,
final VariantContext callVariantContext, final CallState expectedCallState,
final int minGq, final int minDp) {
final TruthAndCallStates truthAndCallStates = GenotypeConcordance.determineState(truthVariantContext, TRUTH_SAMPLE_NAME,
callVariantContext, CALL_SAMPLE_NAME, minGq, minDp);
Assert.assertEquals(truthAndCallStates.truthState, expectedTruthState);
Assert.assertEquals(truthAndCallStates.callState, expectedCallState);
}
/**
* Simple method to return a list of unique alleles.
*/
private List<Allele> makeUniqueListOfAlleles(final Allele... alleles) {
final Set<Allele> uniqueAlleles = new HashSet<Allele>();
for (final Allele allele : alleles) {
if (!allele.equals(Allele.NO_CALL)) {
uniqueAlleles.add(allele);
}
}
if (!uniqueAlleles.contains(Aref)) uniqueAlleles.add(Aref);
return new ArrayList<Allele>(uniqueAlleles);
}
/**
* Tests that we normalize indels correctly
*/
@Test
public void testNormalizeAllelesForIndels() {
final Path truthVcfPath = Paths.get(TEST_DATA_PATH.getAbsolutePath(), NORMALIZE_ALLELES_TRUTH);
final Path callVcfPath = Paths.get(TEST_DATA_PATH.getAbsolutePath(), NORMALIZE_ALLELES_CALL);
final VCFFileReader truthReader = new VCFFileReader(truthVcfPath.toFile(), false);
final VCFFileReader callReader = new VCFFileReader(callVcfPath.toFile(), false);
final Iterator<VariantContext> truthIterator = truthReader.iterator();
final Iterator<VariantContext> callIterator = callReader.iterator();
final String truthSample = truthReader.getFileHeader().getSampleNamesInOrder().get(0);
final String callSample = callReader.getFileHeader().getSampleNamesInOrder().get(0);
while (truthIterator.hasNext()) {
final VariantContext truthCtx = truthIterator.next();
final VariantContext callCtx = callIterator.next();
{
final GenotypeConcordance.Alleles alleles = GenotypeConcordance.normalizeAlleles(truthCtx, truthSample, callCtx, callSample);
Assert.assertEquals(alleles.truthAllele1, alleles.callAllele1);
Assert.assertEquals(alleles.truthAllele2, alleles.callAllele2);
}
{
final GenotypeConcordance.Alleles alleles = GenotypeConcordance.normalizeAlleles(callCtx, callSample, truthCtx, truthSample);
Assert.assertEquals(alleles.truthAllele1, alleles.callAllele1);
Assert.assertEquals(alleles.truthAllele2, alleles.callAllele2);
}
}
truthReader.close();
callReader.close();
}
@Test
public void testNoCallVariants() {
final GenotypeConcordance genotypeConcordance = new GenotypeConcordance();
genotypeConcordance.TRUTH_VCF = new File(TEST_DATA_PATH, "mini.vcf");
genotypeConcordance.TRUTH_SAMPLE = "NA20801";
genotypeConcordance.CALL_VCF = new File(TEST_DATA_PATH, "mini.vcf");
genotypeConcordance.CALL_SAMPLE = "NA19920";
genotypeConcordance.OUTPUT = new File(OUTPUT_DATA_PATH, "TwoNoCalls");
genotypeConcordance.OUTPUT_VCF = true;
Assert.assertEquals(genotypeConcordance.instanceMain(new String[0]), 0);
}
@Test
public void testNormalizeAllelesForWritingVCF() throws FileNotFoundException {
final File truthVcfPath = new File(TEST_DATA_PATH.getAbsolutePath(), NORMALIZE_NO_CALLS_TRUTH);
final File callVcfPath = new File(TEST_DATA_PATH.getAbsolutePath(), NORMALIZE_NO_CALLS_CALL);
final File outputBaseFileName = new File(OUTPUT_DATA_PATH, "MultipleRefAlleles");
final File outputContingencyMetrics = new File(outputBaseFileName.getAbsolutePath() + GenotypeConcordance.CONTINGENCY_METRICS_FILE_EXTENSION);
outputContingencyMetrics.deleteOnExit();
final GenotypeConcordance genotypeConcordance = new GenotypeConcordance();
genotypeConcordance.TRUTH_VCF = truthVcfPath;
genotypeConcordance.TRUTH_SAMPLE = "truth";
genotypeConcordance.CALL_VCF = callVcfPath;
genotypeConcordance.CALL_SAMPLE = "truth";
genotypeConcordance.OUTPUT = new File(OUTPUT_DATA_PATH, "MultipleRefAlleles");
genotypeConcordance.OUTPUT_VCF = true;
Assert.assertEquals(genotypeConcordance.instanceMain(new String[0]), 0);
final MetricsFile<GenotypeConcordanceContingencyMetrics, Comparable<?>> output = new MetricsFile<GenotypeConcordanceContingencyMetrics, Comparable<?>>();
output.read(new FileReader(outputContingencyMetrics));
for (final GenotypeConcordanceContingencyMetrics metrics : output.getMetrics()) {
if(metrics.VARIANT_TYPE == VariantContext.Type.INDEL){
Assert.assertEquals(metrics.TP_COUNT, 3);
Assert.assertEquals(metrics.TN_COUNT, 3);
Assert.assertEquals(metrics.FP_COUNT, 0);
Assert.assertEquals(metrics.FN_COUNT, 0);
Assert.assertEquals(metrics.EMPTY_COUNT, 2);
}
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.aries.transaction.jms.internal;
import java.util.concurrent.CopyOnWriteArrayList;
import javax.jms.Connection;
import javax.jms.ConnectionConsumer;
import javax.jms.ConnectionMetaData;
import javax.jms.Destination;
import javax.jms.ExceptionListener;
import javax.jms.JMSException;
import javax.jms.Queue;
import javax.jms.QueueConnection;
import javax.jms.QueueSession;
import javax.jms.ServerSessionPool;
import javax.jms.Session;
import javax.jms.TemporaryQueue;
import javax.jms.TemporaryTopic;
import javax.jms.Topic;
import javax.jms.TopicConnection;
import javax.jms.TopicSession;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Represents a proxy {@link Connection} which is-a {@link TopicConnection} and
* {@link QueueConnection} which is pooled and on {@link #close()} will return
* itself to the sessionPool.
*
* <b>NOTE</b> this implementation is only intended for use when sending
* messages. It does not deal with pooling of consumers; for that look at a
* library like <a href="http://jencks.org/">Jencks</a> such as in <a
* href="http://jencks.org/Message+Driven+POJOs">this example</a>
*
*/
public class PooledConnection implements TopicConnection, QueueConnection {
private static final transient Logger LOG = LoggerFactory.getLogger(PooledConnection.class);
private ConnectionPool pool;
private boolean stopped;
private final CopyOnWriteArrayList<TemporaryQueue> connTempQueues = new CopyOnWriteArrayList<TemporaryQueue>();
private final CopyOnWriteArrayList<TemporaryTopic> connTempTopics = new CopyOnWriteArrayList<TemporaryTopic>();
public PooledConnection(ConnectionPool pool) {
this.pool = pool;
this.pool.incrementReferenceCount();
}
/**
* Factory method to create a new instance.
*/
public PooledConnection newInstance() {
return new PooledConnection(pool);
}
public void close() throws JMSException {
this.cleanupConnectionTemporaryDestinations();
if (this.pool != null) {
this.pool.decrementReferenceCount();
this.pool = null;
}
}
public void start() throws JMSException {
assertNotClosed();
pool.start();
}
public void stop() throws JMSException {
stopped = true;
}
public ConnectionConsumer createConnectionConsumer(Destination destination, String selector, ServerSessionPool serverSessionPool, int maxMessages)
throws JMSException {
return getConnection().createConnectionConsumer(destination, selector, serverSessionPool, maxMessages);
}
public ConnectionConsumer createConnectionConsumer(Topic topic, String s, ServerSessionPool serverSessionPool, int maxMessages) throws JMSException {
return getConnection().createConnectionConsumer(topic, s, serverSessionPool, maxMessages);
}
public ConnectionConsumer createDurableConnectionConsumer(Topic topic, String selector, String s1, ServerSessionPool serverSessionPool, int i)
throws JMSException {
return getConnection().createDurableConnectionConsumer(topic, selector, s1, serverSessionPool, i);
}
public String getClientID() throws JMSException {
return getConnection().getClientID();
}
public ExceptionListener getExceptionListener() throws JMSException {
return getConnection().getExceptionListener();
}
public ConnectionMetaData getMetaData() throws JMSException {
return getConnection().getMetaData();
}
public void setExceptionListener(ExceptionListener exceptionListener) throws JMSException {
getConnection().setExceptionListener(exceptionListener);
}
public void setClientID(String clientID) throws JMSException {
// ignore repeated calls to setClientID() with the same client id
// this could happen when a JMS component such as Spring that uses a
// PooledConnectionFactory shuts down and reinitializes.
if (this.getConnection().getClientID() == null || !this.getClientID().equals(clientID)) {
getConnection().setClientID(clientID);
}
}
public ConnectionConsumer createConnectionConsumer(Queue queue, String selector, ServerSessionPool serverSessionPool, int maxMessages) throws JMSException {
return getConnection().createConnectionConsumer(queue, selector, serverSessionPool, maxMessages);
}
// Session factory methods
// -------------------------------------------------------------------------
public QueueSession createQueueSession(boolean transacted, int ackMode) throws JMSException {
return (QueueSession) createSession(transacted, ackMode);
}
public TopicSession createTopicSession(boolean transacted, int ackMode) throws JMSException {
return (TopicSession) createSession(transacted, ackMode);
}
public Session createSession(boolean transacted, int ackMode) throws JMSException {
PooledSession result;
result = (PooledSession) pool.createSession(transacted, ackMode);
// Add a temporary destination event listener to the session that notifies us when
// the session creates temporary destinations.
result.addTempDestEventListener(new PooledSessionEventListener() {
public void onTemporaryQueueCreate(TemporaryQueue tempQueue) {
connTempQueues.add(tempQueue);
}
public void onTemporaryTopicCreate(TemporaryTopic tempTopic) {
connTempTopics.add(tempTopic);
}
});
return (Session) result;
}
// Implementation methods
// -------------------------------------------------------------------------
public Connection getConnection() throws JMSException {
assertNotClosed();
return pool.getConnection();
}
protected void assertNotClosed() throws JMSException {
if (stopped || pool == null) {
throw new JMSException("Already closed");
}
}
protected Session createSession(SessionKey key) throws JMSException {
return getConnection().createSession(key.isTransacted(), key.getAckMode());
}
public String toString() {
return "PooledConnection { " + pool + " }";
}
/**
* Remove all of the temporary destinations created for this connection.
* This is important since the underlying connection may be reused over a
* long period of time, accumulating all of the temporary destinations from
* each use. However, from the perspective of the lifecycle from the
* client's view, close() closes the connection and, therefore, deletes all
* of the temporary destinations created.
*/
protected void cleanupConnectionTemporaryDestinations() {
for (TemporaryQueue tempQueue : connTempQueues) {
try {
tempQueue.delete();
} catch (JMSException ex) {
LOG.info("failed to delete Temporary Queue \"" + tempQueue.toString() + "\" on closing pooled connection: " + ex.getMessage());
}
}
connTempQueues.clear();
for (TemporaryTopic tempTopic : connTempTopics) {
try {
tempTopic.delete();
} catch (JMSException ex) {
LOG.info("failed to delete Temporary Topic \"" + tempTopic.toString() + "\" on closing pooled connection: " + ex.getMessage());
}
}
connTempTopics.clear();
}
}
| |
/*
* Copyright (c) 2003, 2020, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.sun.imageio.plugins.wbmp;
import java.awt.Rectangle;
import java.awt.image.BufferedImage;
import java.awt.image.DataBufferByte;
import java.awt.image.MultiPixelPackedSampleModel;
import java.awt.image.Raster;
import java.awt.image.WritableRaster;
import javax.imageio.IIOException;
import javax.imageio.ImageReader;
import javax.imageio.ImageReadParam;
import javax.imageio.ImageTypeSpecifier;
import javax.imageio.metadata.IIOMetadata;
import javax.imageio.spi.ImageReaderSpi;
import javax.imageio.stream.ImageInputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import com.sun.imageio.plugins.common.I18N;
import com.sun.imageio.plugins.common.ReaderUtil;
/** This class is the Java Image IO plugin reader for WBMP images.
* It may subsample the image, clip the image,
* and shift the decoded image origin if the proper decoding parameter
* are set in the provided {@code WBMPImageReadParam}.
*/
public class WBMPImageReader extends ImageReader {
/** The input stream where reads from */
private ImageInputStream iis = null;
/** Indicates whether the header is read. */
private boolean gotHeader = false;
/** The original image width. */
private int width;
/** The original image height. */
private int height;
private int wbmpType;
private WBMPMetadata metadata;
/** Constructs {@code WBMPImageReader} from the provided
* {@code ImageReaderSpi}.
*/
public WBMPImageReader(ImageReaderSpi originator) {
super(originator);
}
@Override
public void setInput(Object input,
boolean seekForwardOnly,
boolean ignoreMetadata) {
super.setInput(input, seekForwardOnly, ignoreMetadata);
iis = (ImageInputStream) input; // Always works
gotHeader = false;
}
@Override
public int getNumImages(boolean allowSearch) throws IOException {
if (iis == null) {
throw new IllegalStateException(I18N.getString("GetNumImages0"));
}
if (seekForwardOnly && allowSearch) {
throw new IllegalStateException(I18N.getString("GetNumImages1"));
}
return 1;
}
@Override
public int getWidth(int imageIndex) throws IOException {
checkIndex(imageIndex);
readHeader();
return width;
}
@Override
public int getHeight(int imageIndex) throws IOException {
checkIndex(imageIndex);
readHeader();
return height;
}
@Override
public boolean isRandomAccessEasy(int imageIndex) throws IOException {
checkIndex(imageIndex);
return true;
}
private void checkIndex(int imageIndex) {
if (imageIndex != 0) {
throw new IndexOutOfBoundsException(I18N.getString("WBMPImageReader0"));
}
}
public void readHeader() throws IOException {
if (gotHeader)
return;
if (iis == null) {
throw new IllegalStateException("Input source not set!");
}
metadata = new WBMPMetadata();
wbmpType = iis.readByte(); // TypeField
byte fixHeaderField = iis.readByte();
// check for valid wbmp image
if (fixHeaderField != 0
|| !isValidWbmpType(wbmpType))
{
throw new IIOException(I18N.getString("WBMPImageReader2"));
}
metadata.wbmpType = wbmpType;
// Read image width
width = ReaderUtil.readMultiByteInteger(iis);
metadata.width = width;
// Read image height
height = ReaderUtil.readMultiByteInteger(iis);
metadata.height = height;
gotHeader = true;
}
@Override
public Iterator<ImageTypeSpecifier> getImageTypes(int imageIndex)
throws IOException {
checkIndex(imageIndex);
readHeader();
BufferedImage bi =
new BufferedImage(1, 1, BufferedImage.TYPE_BYTE_BINARY);
ArrayList<ImageTypeSpecifier> list = new ArrayList<>(1);
list.add(new ImageTypeSpecifier(bi));
return list.iterator();
}
@Override
public ImageReadParam getDefaultReadParam() {
return new ImageReadParam();
}
@Override
public IIOMetadata getImageMetadata(int imageIndex)
throws IOException {
checkIndex(imageIndex);
if (metadata == null) {
readHeader();
}
return metadata;
}
@Override
public IIOMetadata getStreamMetadata() throws IOException {
return null;
}
@Override
public BufferedImage read(int imageIndex, ImageReadParam param)
throws IOException {
if (iis == null) {
throw new IllegalStateException(I18N.getString("WBMPImageReader1"));
}
checkIndex(imageIndex);
clearAbortRequest();
processImageStarted(imageIndex);
if (param == null)
param = getDefaultReadParam();
//read header
readHeader();
Rectangle sourceRegion = new Rectangle(0, 0, 0, 0);
Rectangle destinationRegion = new Rectangle(0, 0, 0, 0);
computeRegions(param, this.width, this.height,
param.getDestination(),
sourceRegion,
destinationRegion);
int scaleX = param.getSourceXSubsampling();
int scaleY = param.getSourceYSubsampling();
int xOffset = param.getSubsamplingXOffset();
int yOffset = param.getSubsamplingYOffset();
// If the destination is provided, then use it. Otherwise, create new one
BufferedImage bi = param.getDestination();
if (bi == null)
bi = new BufferedImage(destinationRegion.x + destinationRegion.width,
destinationRegion.y + destinationRegion.height,
BufferedImage.TYPE_BYTE_BINARY);
boolean noTransform =
destinationRegion.equals(new Rectangle(0, 0, width, height)) &&
destinationRegion.equals(new Rectangle(0, 0, bi.getWidth(), bi.getHeight()));
// Get the image data.
WritableRaster tile = bi.getWritableTile(0, 0);
// Get the SampleModel.
MultiPixelPackedSampleModel sm =
(MultiPixelPackedSampleModel)bi.getSampleModel();
if (noTransform) {
if (abortRequested()) {
processReadAborted();
return bi;
}
// If noTransform is necessary, read the data.
iis.read(((DataBufferByte)tile.getDataBuffer()).getData(),
0, height*sm.getScanlineStride());
processImageUpdate(bi,
0, 0,
width, height, 1, 1,
new int[]{0});
processImageProgress(100.0F);
} else {
int len = (this.width + 7) / 8;
byte[] buf = new byte[len];
byte[] data = ((DataBufferByte)tile.getDataBuffer()).getData();
int lineStride = sm.getScanlineStride();
iis.skipBytes(len * sourceRegion.y);
int skipLength = len * (scaleY - 1);
// cache the values to avoid duplicated computation
int[] srcOff = new int[destinationRegion.width];
int[] destOff = new int[destinationRegion.width];
int[] srcPos = new int[destinationRegion.width];
int[] destPos = new int[destinationRegion.width];
for (int i = destinationRegion.x, x = sourceRegion.x, j = 0;
i < destinationRegion.x + destinationRegion.width;
i++, j++, x += scaleX) {
srcPos[j] = x >> 3;
srcOff[j] = 7 - (x & 7);
destPos[j] = i >> 3;
destOff[j] = 7 - (i & 7);
}
for (int j = 0, y = sourceRegion.y,
k = destinationRegion.y * lineStride;
j < destinationRegion.height; j++, y+=scaleY) {
if (abortRequested())
break;
iis.read(buf, 0, len);
for (int i = 0; i < destinationRegion.width; i++) {
//get the bit and assign to the data buffer of the raster
int v = (buf[srcPos[i]] >> srcOff[i]) & 1;
data[k + destPos[i]] |= v << destOff[i];
}
k += lineStride;
iis.skipBytes(skipLength);
processImageUpdate(bi,
0, j,
destinationRegion.width, 1, 1, 1,
new int[]{0});
processImageProgress(100.0F*j/destinationRegion.height);
}
}
if (abortRequested())
processReadAborted();
else
processImageComplete();
return bi;
}
@Override
public boolean canReadRaster() {
return true;
}
@Override
public Raster readRaster(int imageIndex,
ImageReadParam param) throws IOException {
BufferedImage bi = read(imageIndex, param);
return bi.getData();
}
@Override
public void reset() {
super.reset();
iis = null;
gotHeader = false;
}
/*
* This method verifies that given byte is valid wbmp type marker.
* At the moment only 0x0 marker is described by wbmp spec.
*/
boolean isValidWbmpType(int type) {
return type == 0;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.ignite.compute;
import java.net.URI;
import java.util.Map;
import org.apache.camel.Consumer;
import org.apache.camel.Processor;
import org.apache.camel.Producer;
import org.apache.camel.component.ignite.AbstractIgniteEndpoint;
import org.apache.camel.component.ignite.ClusterGroupExpression;
import org.apache.camel.component.ignite.IgniteComponent;
import org.apache.camel.spi.Metadata;
import org.apache.camel.spi.UriEndpoint;
import org.apache.camel.spi.UriParam;
import org.apache.camel.spi.UriPath;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteCompute;
/**
* The Ignite Compute endpoint is one of camel-ignite endpoints which allows you to run
* <a href="https://apacheignite.readme.io/docs/compute-grid">compute operations</a>
* on the cluster by passing in an IgniteCallable, an IgniteRunnable, an IgniteClosure,
* or collections of them, along with their parameters if necessary.
* This endpoint only supports producers.
*/
@UriEndpoint(firstVersion = "2.17.0", scheme = "ignite-compute", title = "Ignite Compute", syntax = "ignite-compute:endpointId", label = "nosql,cache,compute", producerOnly = true)
public class IgniteComputeEndpoint extends AbstractIgniteEndpoint {
@UriPath @Metadata(required = "true")
private String endpointId;
@UriParam(label = "producer")
private ClusterGroupExpression clusterGroupExpression;
@UriParam(label = "producer") @Metadata(required = "true")
private IgniteComputeExecutionType executionType;
@UriParam(label = "producer")
private String taskName;
@UriParam(label = "producer")
private String computeName;
@UriParam(label = "producer")
private Long timeoutMillis;
@Deprecated
public IgniteComputeEndpoint(String uri, URI remainingUri, Map<String, Object> parameters, IgniteComponent igniteComponent) throws ClassNotFoundException {
super(uri, igniteComponent);
}
public IgniteComputeEndpoint(String uri, String remaining, Map<String, Object> parameters, IgniteComputeComponent igniteComponent) throws ClassNotFoundException {
super(uri, igniteComponent);
}
@Override
public Producer createProducer() throws Exception {
return new IgniteComputeProducer(this);
}
@Override
public Consumer createConsumer(Processor processor) throws Exception {
throw new UnsupportedOperationException("The Ignite Compute endpoint does not support consumers.");
}
public IgniteCompute createIgniteCompute() {
Ignite ignite = ignite();
IgniteCompute compute = clusterGroupExpression == null ? ignite.compute() : ignite.compute(clusterGroupExpression.getClusterGroup(ignite));
if (computeName != null) {
compute = compute.withName(computeName);
}
if (timeoutMillis != null) {
compute = compute.withTimeout(timeoutMillis);
}
return compute;
}
/**
* Gets the endpoint ID.
*
* @return endpoint ID (not used)
*/
public String getEndpointId() {
return endpointId;
}
/**
* The endpoint ID (not used).
*
* @param endpointId endpoint ID (not used)
*/
public void setEndpointId(String endpointId) {
this.endpointId = endpointId;
}
/**
* Gets the cluster group expression.
*
* @return cluster group expression
*/
public ClusterGroupExpression getClusterGroupExpression() {
return clusterGroupExpression;
}
/**
* An expression that returns the Cluster Group for the IgniteCompute instance.
*
* @param clusterGroupExpression cluster group expression
*/
public void setClusterGroupExpression(ClusterGroupExpression clusterGroupExpression) {
this.clusterGroupExpression = clusterGroupExpression;
}
/**
* Gets the execution type of this producer.
*
* @return
*/
public IgniteComputeExecutionType getExecutionType() {
return executionType;
}
/**
* The compute operation to perform. Possible values: CALL, BROADCAST, APPLY,
* EXECUTE, RUN, AFFINITY_CALL, AFFINITY_RUN.
* The component expects different payload types depending on the operation.
*
* @param executionType
*/
public void setExecutionType(IgniteComputeExecutionType executionType) {
this.executionType = executionType;
}
/**
* Gets the task name, only applicable if using the {@link IgniteComputeExecutionType#EXECUTE} execution type.
*
* @return
*/
public String getTaskName() {
return taskName;
}
/**
* The task name, only applicable if using the {@link IgniteComputeExecutionType#EXECUTE} execution type.
*
* @param taskName
*/
public void setTaskName(String taskName) {
this.taskName = taskName;
}
/**
* Gets the name of the compute job, which will be set via {@link IgniteCompute#withName(String)}.
*
* @return
*/
public String getComputeName() {
return computeName;
}
/**
* The name of the compute job, which will be set via {@link IgniteCompute#withName(String)}.
*
* @param computeName
*/
public void setComputeName(String computeName) {
this.computeName = computeName;
}
/**
* Gets the timeout interval for triggered jobs, in milliseconds, which will be set via {@link IgniteCompute#withTimeout(long)}.
*
* @return
*/
public Long getTimeoutMillis() {
return timeoutMillis;
}
/**
* The timeout interval for triggered jobs, in milliseconds, which will be set via {@link IgniteCompute#withTimeout(long)}.
*
* @param timeoutMillis
*/
public void setTimeoutMillis(Long timeoutMillis) {
this.timeoutMillis = timeoutMillis;
}
}
| |
package uk.ac.ed.learn9.bb.timetabling.util;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Types;
import uk.ac.ed.learn9.bb.timetabling.RdbIdSource;
import uk.ac.ed.learn9.bb.timetabling.data.AcademicYearCode;
import uk.ac.ed.learn9.bb.timetabling.data.Activity;
import uk.ac.ed.learn9.bb.timetabling.data.ActivityTemplate;
import uk.ac.ed.learn9.bb.timetabling.data.ActivityType;
import uk.ac.ed.learn9.bb.timetabling.data.Module;
import uk.ac.ed.learn9.bb.timetabling.data.TimetablingCourseCode;
/**
* Class for setting up/tearing down test data in the RDB.
*/
public class RdbUtil {
/**
* How to mark a test activity as scheduled, in the RDB. Although an actual
* RDB contains more detail than this, for our purposes we only need to know
* if it's scheduled (and therefore included in the synchronisation) or not
* (and therefore not).
*/
public enum SchedulingMethod {
/**
* The activity has not been scheduled (and therefore is not to be
* synchronised).
*/
NOT_SCHEDULED,
/**
* The activity has been scheduled (and therefore should be synchronised).
*/
SCHEDULED;
}
/**
* How to flag an activity template in relation to synchronisation to the VLE.
* {@link TemplateForVle#NOT_SPECIFIED} is currently considered equivalent
* to {@link TemplateForVle#FOR_VLE}.
*/
public enum TemplateForVle {
NOT_SPECIFIED,
NOT_FOR_VLE,
FOR_VLE;
}
/**
* A random EUCLID course code for generating test activities.
*/
public static final String TEST_COURSE_CODE = "PGHC11335";
/**
* A random EUCLID semester (period) code for generating test activities.
*/
public static final String TEST_SEMESTER = "SEM1";
/**
* A random EUCLID occurrence code for generating test activities.
*/
public static final String TEST_OCCURRENCE = "SV1";
/**
* A random timetabling department ID for use when generating test RDB
* data. As of the time of writing the test RDB does not contain departments,
* so this merely needs to be present, however if departments are later
* used in scheduling this would need to be replaced with a real value.
*/
public static final String DEPARTMENT_ID = "BF20A0ADF91117B06331C6ED3F9FC187";
/**
* An example week pattern for use when creating test RDB data.
*/
public static final String WEEK_PATTERN = "11111111111111111111111111111111111111111111111111111111111111111";
/**
* Creates an activity in the test reporting database.
*
* @param rdb a connection to the TEST reporting database.
* @param activityTemplate the activity template to base this activity on.
* @param module the module the activity will belong to.
* @param schedulingMethod whether the activity has been scheduled.
* @param activityOrdinality a number for this activity within the activities
* under the same template.
* @param idSource the RDB ID generator to use for ID values.
* @return the newly generated activity, for reference.
* @throws SQLException if there was a problem communicating with the reporting
* database.
*/
public static Activity createTestActivity(final Connection rdb,
final ActivityTemplate activityTemplate, final Module module,
final SchedulingMethod schedulingMethod, final int activityOrdinality,
final RdbIdSource idSource)
throws SQLException {
final Activity activity = new Activity();
activity.setActivityId(idSource.getId());
activity.setActivityName(activityTemplate.getTemplateName() + "/"
+ activityOrdinality);
activity.setModule(module);
final PreparedStatement statement = rdb.prepareStatement(
"INSERT INTO ACTIVITY (ID, NAME, HOST_KEY, DESCRIPTION, MODUL, "
+ "SCHEDULING_METHOD, DEPARTMENT, ACTIVITY_TYPE, ACTIVITY_TMPL, "
+ "ZONE, FACTOR, DURATION, LINK_SIZE, PLANNED_SIZE, SUGGESTED_DAYS, "
+ "SUGGESTED_PERIOD, POOLED_RESOURCES, "
+ "DAYS_FOR_MINIMUM, MINIMUM_TIME, MINIMUM_DAYS, DAYS_FOR_MAXIMUM, "
+ "MAXIMUM_TIME, MAXIMUM_DAYS, NAMED_AVAILABILITY, NAMED_USAGE_PREF, NAMED_STARTS_PREF, DICT, "
+ "WEEK_PATTERN, STARTS_PREFS, USAGE_PREFS, BASE_AVAILABILITY) "
+ "(SELECT ? ID, ? NAME, ? HOST_KEY, ? DESCRIPTION, ? MODUL, "
+ "? SCHEDULING_METHOD, DEPARTMENT, ACTIVITY_TYPE, ID ACTIVITY_TMPL, "
+ "ZONE, FACTOR, DURATION, LINK_SIZE, PLANNED_SIZE, SUGGESTED_DAYS, "
+ "SUGGESTED_PERIOD, POOLED_RESOURCES, "
+ "DAYS_FOR_MINIMUM, MINIMUM_TIME, MINIMUM_DAYS, DAYS_FOR_MAXIMUM, "
+ "MAXIMUM_TIME, MAXIMUM_DAYS, NAMED_AVAILABILITY, NAMED_USAGE_PREF, NAMED_STARTS_PREF, DICT, "
+ "WEEK_PATTERN, STARTS_PREFS, USAGE_PREFS, BASE_AVAILABILITY "
+ "FROM TEMPLATE "
+ "WHERE ID=?)"
);
try {
int paramIdx = 1;
statement.setString(paramIdx++, activity.getActivityId());
statement.setString(paramIdx++, activity.getActivityName());
statement.setString(paramIdx++, RdbUtil.generateHostKey(activity.getActivityId()));
statement.setString(paramIdx++, activity.getActivityName());
if (null != module) {
statement.setString(paramIdx++, module.getModuleId());
} else {
statement.setNull(paramIdx++, Types.VARCHAR);
}
switch (schedulingMethod) {
case SCHEDULED:
statement.setInt(paramIdx++, 1);
break;
default:
statement.setInt(paramIdx++, 0);
break;
}
statement.setString(paramIdx++, activityTemplate.getTemplateId());
statement.executeUpdate();
} finally {
statement.close();
}
return activity;
}
/**
* Creates an activity in the test reporting database, where the activity
* doesn't have a template.
*
* @param rdb a connection to the TEST reporting database.
* @param activityName the name of the new activity.
* @param module the module the activity will belong to.
* @param schedulingMethod whether the activity has been scheduled.
* @param idSource the RDB ID generator to use for ID values.
* @return the newly generated activity, for reference.
* @throws SQLException if there was a problem communicating with the reporting
* database.
*/
public static Activity createTestActivity(final Connection rdb,
final String activityName, final Module module,
final SchedulingMethod schedulingMethod, final ActivityType activityType,
final RdbIdSource idSource)
throws SQLException {
final Activity activity = new Activity();
activity.setActivityId(idSource.getId());
activity.setActivityName(activityName);
activity.setModule(module);
final PreparedStatement statement = rdb.prepareStatement(
"INSERT INTO ACTIVITY (ID, NAME, HOST_KEY, DESCRIPTION, MODUL, "
+ "SCHEDULING_METHOD, DEPARTMENT, ACTIVITY_TYPE, ACTIVITY_TMPL, "
+ "WEEK_PATTERN) "
+ "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"
);
try {
int paramIdx = 1;
statement.setString(paramIdx++, activity.getActivityId());
statement.setString(paramIdx++, activity.getActivityName());
statement.setString(paramIdx++, RdbUtil.generateHostKey(activity.getActivityId()));
statement.setString(paramIdx++, activity.getActivityName());
if (null != module) {
statement.setString(paramIdx++, module.getModuleId());
} else {
statement.setNull(paramIdx++, Types.VARCHAR);
}
switch (schedulingMethod) {
case SCHEDULED:
statement.setInt(paramIdx++, 1);
break;
default:
statement.setInt(paramIdx++, 0);
break;
}
statement.setString(paramIdx++, DEPARTMENT_ID);
if (null != activityType) {
statement.setString(paramIdx++, activityType.getTypeId());
} else {
statement.setNull(paramIdx++, Types.VARCHAR);
}
statement.setNull(paramIdx++, Types.VARCHAR);
statement.setString(paramIdx++, WEEK_PATTERN);
statement.executeUpdate();
} finally {
statement.close();
}
return activity;
}
/**
* Creates an activity type in the test reporting database.
*
* @param rdb a connection to the TEST reporting database.
* @param typeName the name of the activity type.
* @param idSource the RDB ID generator to use for ID values.
* @return the newly generated activity, for reference.
* @throws SQLException if there was a problem communicating with the reporting
* database.
*/
public static ActivityType createTestActivityType(final Connection rdb, final String typeName,
final RdbIdSource idSource)
throws SQLException {
final ActivityType activityType = new ActivityType();
activityType.setTypeId(idSource.getId());
activityType.setTypeName(typeName);
final PreparedStatement statement = rdb.prepareStatement(
"Insert into ACTIVITYTYPES (ID,NAME,HOST_KEY,DEPARTMENT) "
+ "VALUES (?,?,?,?)");
try {
int paramIdx = 1;
statement.setString(paramIdx++, activityType.getTypeId());
statement.setString(paramIdx++, activityType.getTypeName());
statement.setString(paramIdx++, RdbUtil.generateHostKey(activityType.getTypeId()));
statement.setString(paramIdx++, DEPARTMENT_ID);
statement.executeUpdate();
} finally {
statement.close();
}
return activityType;
}
/**
* Creates an activity template in the test reporting database.
*
* @param rdb a connection to the TEST reporting database.
* @param module the module this activity template belongs to.
* @param activityType the default type for activities generated from this
* template.
* @param templateName the name of the activity template.
* @param forVle how to mark whether this activity template is intended for
* synchronisation to the VLE.
* @param idSource the RDB ID generator to use for ID values.
* @return the newly generated activity, for reference.
* @throws SQLException if there was a problem communicating with the reporting
* database.
*/
public static ActivityTemplate createTestActivityTemplate(final Connection rdb, final Module module,
final ActivityType activityType, final String templateName,
final TemplateForVle forVle, final RdbIdSource idSource)
throws SQLException {
final ActivityTemplate template = new ActivityTemplate();
template.setTemplateId(idSource.getId());
template.setTemplateName(templateName);
switch (forVle) {
case NOT_FOR_VLE:
template.setUserText5("Not for VLE");
break;
case FOR_VLE:
default:
template.setUserText5(null);
break;
}
final PreparedStatement statement = rdb.prepareStatement(
"Insert into TEMPLATE (ID,NAME,HOST_KEY,DEPARTMENT,ACTIVITY_TYPE,"
+ "MODUL,USER_TEXT_5,WEEK_PATTERN) "
+ "VALUES (?,?,?,?,?,?,?,?)");
try {
int paramIdx = 1;
statement.setString(paramIdx++, template.getTemplateId());
statement.setString(paramIdx++, template.getTemplateName());
statement.setString(paramIdx++, RdbUtil.generateHostKey(template.getTemplateId()));
statement.setString(paramIdx++, DEPARTMENT_ID);
statement.setString(paramIdx++, activityType.getTypeId());
statement.setString(paramIdx++, module.getModuleId());
statement.setString(paramIdx++, template.getUserText5());
statement.setString(paramIdx++, WEEK_PATTERN);
statement.executeUpdate();
} finally {
statement.close();
}
return template;
}
/**
* Constructs a test module in the reporting database, and returns minimal
* data about it in.
*
* @param rdb a connection to the reporting database.
* @param academicYear the academic year the module will belong to.
* @param idSource an ID generator.
* @return the new module.
* @throws SQLException
*/
public static Module createTestModule(final Connection rdb, final AcademicYearCode academicYear,
final RdbIdSource idSource)
throws SQLException {
return RdbUtil.createTestModule(rdb, academicYear, TEST_COURSE_CODE,
TEST_OCCURRENCE, TEST_SEMESTER, idSource);
}
/**
* Constructs a test module in the reporting database, and returns minimal
* data about it in.
*
* @param rdb a connection to the reporting database.
* @param academicYear the academic year the module will belong to.
* @param idSource an ID generator.
* @return the new module;
* @throws SQLException
*/
public static Module createTestModule(final Connection rdb, final AcademicYearCode academicYear,
final String courseCode, final String occurrence, final String semester, final RdbIdSource idSource)
throws SQLException {
final TimetablingCourseCode timetablingCourseCode
= TimetablingCourseCode.buildCode(courseCode, occurrence, semester);
final String learnAcademicYear = academicYear.toString().replace("/", "-");
final Module module = new Module();
module.setModuleId(idSource.getId());
module.setTimetablingCourseCode(timetablingCourseCode.toString());
module.setTimetablingModuleName("Test module "
+ module.getModuleId());
module.setCacheCourseCode(courseCode);
module.setCacheSemesterCode(occurrence);
module.setCacheOccurrenceCode(semester);
module.setLearnAcademicYear(learnAcademicYear);
module.setLearnCourseCode(courseCode + learnAcademicYear + occurrence + semester);
final PreparedStatement statement = rdb.prepareStatement(
"Insert into MODULE (ID,NAME,HOST_KEY,DESCRIPTION,DEPARTMENT,LINK_SIZE,"
+ "PLANNED_SIZE,CREDIT_PROVIDED,RESERVED_SIZE,WEEK_PATTERN,STARTS_PREFS,"
+ "USAGE_PREFS,BASE_AVAILABILITY,USER_TEXT_1,USER_TEXT_2,USER_TEXT_3,"
+ "USER_TEXT_4,USER_TEXT_5,OBSOLETEFROM,LATESTTRANSACTION,WEEKPATTERNLABEL) "
+ "VALUES (?,?,?,?,?,0,0,20,0, "
+ "?,'81A890B186AA85CF1C90FD83AA4D3360',"
+ "'81A890B186AA85CF1C90FD83AA4D3360','81A890B186AA85CF1C90FD83AA4D5D02',"
+ "null,?,null,null,null,-2147483646,240,'N/A - Unbookable 13/14-N/A - Unbookable 13/14')");
try {
int paramIdx = 1;
statement.setString(paramIdx++, module.getModuleId());
statement.setString(paramIdx++, module.getTimetablingModuleName());
statement.setString(paramIdx++, timetablingCourseCode.toString());
statement.setString(paramIdx++, timetablingCourseCode.toString());
statement.setString(paramIdx++, DEPARTMENT_ID);
statement.setString(paramIdx++, WEEK_PATTERN);
statement.setString(paramIdx++, academicYear.toString());
statement.executeUpdate();
} finally {
statement.close();
}
return module;
}
/**
* Really ugly kludge to generate something a bit like a host key.
*
* @param entityId an S+ 32 character ID.
* @return a host key.
*/
protected static String generateHostKey(final String entityId) {
final String subpart = entityId.substring(entityId.length() - 6);
return "#SPLUS" + subpart;
}
/**
* Changes the academic year listed against a module in the reporting database.
*
* @param rdb a connection to the reporting database.
* @param academicYearCode the new academic year code.
* @param testModule the module to change.
* @return the number of records affected.
* @throws SQLException if there was a problem communicating with the database.
*/
public static int updateModuleAyr(final Connection rdb, final AcademicYearCode academicYearCode,
final Module testModule)
throws SQLException {
testModule.setTimetablingAcademicYear(academicYearCode.toString());
final PreparedStatement statement = rdb.prepareStatement(
"UPDATE MODULE SET USER_TEXT_2=? "
+ "WHERE ID=?");
try {
statement.setString(1, academicYearCode.toString());
statement.setString(2, testModule.getModuleId());
return statement.executeUpdate();
} finally {
statement.close();
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.plugin.jdbc;
import com.facebook.presto.spi.ColumnMetadata;
import com.facebook.presto.spi.ColumnHandle;
import com.facebook.presto.spi.ConnectorInsertTableHandle;
import com.facebook.presto.spi.ConnectorMetadata;
import com.facebook.presto.spi.ConnectorOutputTableHandle;
import com.facebook.presto.spi.ConnectorSession;
import com.facebook.presto.spi.ConnectorTableHandle;
import com.facebook.presto.spi.ConnectorTableMetadata;
import com.facebook.presto.spi.PrestoException;
import com.facebook.presto.spi.SchemaTableName;
import com.facebook.presto.spi.SchemaTablePrefix;
import com.facebook.presto.spi.TableNotFoundException;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import io.airlift.slice.Slice;
import javax.inject.Inject;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import static com.facebook.presto.plugin.jdbc.Types.checkType;
import static com.facebook.presto.spi.StandardErrorCode.NOT_SUPPORTED;
import static com.facebook.presto.spi.StandardErrorCode.PERMISSION_DENIED;
import static com.google.common.base.Preconditions.checkNotNull;
public class JdbcMetadata
implements ConnectorMetadata
{
private final JdbcClient jdbcClient;
private final boolean allowDropTable;
@Inject
public JdbcMetadata(JdbcConnectorId connectorId, JdbcClient jdbcClient, JdbcMetadataConfig config)
{
this.jdbcClient = checkNotNull(jdbcClient, "client is null");
checkNotNull(config, "config is null");
allowDropTable = config.isAllowDropTable();
}
@Override
public List<String> listSchemaNames(ConnectorSession session)
{
return ImmutableList.copyOf(jdbcClient.getSchemaNames());
}
@Override
public JdbcTableHandle getTableHandle(ConnectorSession session, SchemaTableName tableName)
{
return jdbcClient.getTableHandle(tableName);
}
@Override
public ConnectorTableMetadata getTableMetadata(ConnectorSession session, ConnectorTableHandle table)
{
JdbcTableHandle handle = checkType(table, JdbcTableHandle.class, "tableHandle");
ImmutableList.Builder<ColumnMetadata> columnMetadata = ImmutableList.builder();
for (JdbcColumnHandle column : jdbcClient.getColumns(handle)) {
columnMetadata.add(column.getColumnMetadata());
}
return new ConnectorTableMetadata(handle.getSchemaTableName(), columnMetadata.build());
}
@Override
public List<SchemaTableName> listTables(ConnectorSession session, String schemaNameOrNull)
{
return jdbcClient.getTableNames(schemaNameOrNull);
}
@Override
public ColumnHandle getSampleWeightColumnHandle(ConnectorSession session, ConnectorTableHandle tableHandle)
{
return null;
}
@Override
public Map<String, ColumnHandle> getColumnHandles(ConnectorSession session, ConnectorTableHandle tableHandle)
{
JdbcTableHandle jdbcTableHandle = checkType(tableHandle, JdbcTableHandle.class, "tableHandle");
ImmutableMap.Builder<String, ColumnHandle> columnHandles = ImmutableMap.builder();
for (JdbcColumnHandle column : jdbcClient.getColumns(jdbcTableHandle)) {
columnHandles.put(column.getColumnMetadata().getName(), column);
}
return columnHandles.build();
}
@Override
public Map<SchemaTableName, List<ColumnMetadata>> listTableColumns(ConnectorSession session, SchemaTablePrefix prefix)
{
ImmutableMap.Builder<SchemaTableName, List<ColumnMetadata>> columns = ImmutableMap.builder();
for (SchemaTableName tableName : listTables(session, prefix.getSchemaName())) {
try {
JdbcTableHandle tableHandle = jdbcClient.getTableHandle(tableName);
if (tableHandle == null) {
continue;
}
columns.put(tableName, getTableMetadata(session, tableHandle).getColumns());
}
catch (TableNotFoundException e) {
// table disappeared during listing operation
}
}
return columns.build();
}
@Override
public ColumnMetadata getColumnMetadata(ConnectorSession session, ConnectorTableHandle tableHandle, ColumnHandle columnHandle)
{
checkType(tableHandle, JdbcTableHandle.class, "tableHandle");
return checkType(columnHandle, JdbcColumnHandle.class, "columnHandle").getColumnMetadata();
}
@Override
public boolean canCreateSampledTables(ConnectorSession session)
{
return false;
}
@Override
public void createTable(ConnectorSession session, ConnectorTableMetadata tableMetadata)
{
throw new PrestoException(NOT_SUPPORTED, "This connector does not support creating tables");
}
@Override
public void dropTable(ConnectorSession session, ConnectorTableHandle tableHandle)
{
if (!allowDropTable) {
throw new PrestoException(PERMISSION_DENIED, "DROP TABLE is disabled in this catalog");
}
JdbcTableHandle handle = checkType(tableHandle, JdbcTableHandle.class, "tableHandle");
jdbcClient.dropTable(handle);
}
@Override
public ConnectorOutputTableHandle beginCreateTable(ConnectorSession session, ConnectorTableMetadata tableMetadata)
{
return jdbcClient.beginCreateTable(tableMetadata);
}
@Override
public void commitCreateTable(ConnectorSession session, ConnectorOutputTableHandle tableHandle, Collection<Slice> fragments)
{
JdbcOutputTableHandle handle = checkType(tableHandle, JdbcOutputTableHandle.class, "tableHandle");
jdbcClient.commitCreateTable(handle, fragments);
}
@Override
public void renameTable(ConnectorSession session, ConnectorTableHandle tableHandle, SchemaTableName newTableName)
{
throw new PrestoException(NOT_SUPPORTED, "This connector does not support renaming tables");
}
@Override
public ConnectorInsertTableHandle beginInsert(ConnectorSession session, ConnectorTableHandle tableHandle)
{
throw new PrestoException(NOT_SUPPORTED, "This connector does not support inserts");
}
@Override
public void commitInsert(ConnectorSession session, ConnectorInsertTableHandle insertHandle, Collection<Slice> fragments)
{
throw new UnsupportedOperationException();
}
@Override
public void createView(ConnectorSession session, SchemaTableName viewName, String viewData, boolean replace)
{
throw new PrestoException(NOT_SUPPORTED, "This connector does not support creating views");
}
@Override
public void dropView(ConnectorSession session, SchemaTableName viewName)
{
throw new PrestoException(NOT_SUPPORTED, "This connector does not support dropping views");
}
@Override
public List<SchemaTableName> listViews(ConnectorSession session, String schemaNameOrNull)
{
return ImmutableList.of();
}
@Override
public Map<SchemaTableName, String> getViews(ConnectorSession session, SchemaTablePrefix prefix)
{
return ImmutableMap.of();
}
}
| |
/*=========================================================================
* Copyright (c) 2010-2014 Pivotal Software, Inc. All Rights Reserved.
* This product is protected by U.S. and international copyright
* and intellectual property laws. Pivotal products are covered by
* one or more patents listed at http://www.pivotal.io/patents.
*=========================================================================
*/
/*=========================================================================
* Copyright (c) 2008-2014 Pivotal Software, Inc. All Rights Reserved.
* This product * is protected by U.S. and international copyright
* and intellectual property laws. Pivotal products are covered by
* one or more patents listed at http://www.pivotal.io/patents.
*=========================================================================
*/
//
// IndexTrackingQueryObserver.java
// gemfire
//
package com.gemstone.gemfire.cache.query.internal;
import it.unimi.dsi.fastutil.objects.Object2ObjectOpenHashMap;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import com.gemstone.gemfire.cache.query.Index;
import com.gemstone.gemfire.cache.query.internal.index.CompactMapRangeIndex;
import com.gemstone.gemfire.cache.query.internal.index.MapRangeIndex;
import com.gemstone.gemfire.internal.cache.PartitionedRegionQueryEvaluator.TestHook;
/**
* Verbose Index information
*
* @see DefaultQuery
* @author Eric Zoerner, Shobhit Agarwal
*/
public class IndexTrackingQueryObserver extends QueryObserverAdapter {
private static final ThreadLocal indexInfo = new ThreadLocal();
private static final ThreadLocal lastIndexUsed = new ThreadLocal();
private volatile TestHook th;
public void beforeIndexLookup(Index index, int oper, Object key) {
Map<String, IndexInfo> indexMap = (Map)this.indexInfo.get();
if (indexMap == null) {
indexMap = new HashMap<String, IndexInfo>();
this.indexInfo.set(indexMap);
}
IndexInfo iInfo;
String indexName;
//Dont create new IndexInfo if one is already there in map for aggregation
//of results later for whole partition region on this node.
if(index instanceof MapRangeIndex || index instanceof CompactMapRangeIndex){
indexName = index.getName()+ "-"+((Object[])key)[1];
} else {
indexName = index.getName();
}
if(indexMap.containsKey(indexName)){
iInfo = indexMap.get(indexName);
} else {
iInfo = new IndexInfo();
}
iInfo.addRegionId(index.getRegion().getFullPath());
indexMap.put(indexName, iInfo);
this.lastIndexUsed.set(index);
if(th != null){
th.hook(1);
}
}
public void beforeIndexLookup(Index index, int lowerBoundOperator,
Object lowerBoundKey, int upperBoundOperator, Object upperBoundKey,
Set NotEqualKeys) {
Map<String, IndexInfo> indexMap = (Map)this.indexInfo.get();
if (indexMap == null) {
indexMap = new HashMap<String, IndexInfo>();
this.indexInfo.set(indexMap);
}
IndexInfo iInfo;
//Dont create new IndexInfo if one is already there in map for aggregation
//of results later for whole partition region on this node.
if(indexMap.containsKey(index.getName())){
iInfo = indexMap.get(index.getName());
} else {
iInfo = new IndexInfo();
}
iInfo.addRegionId(index.getRegion().getFullPath());
indexMap.put(index.getName(), iInfo);
this.lastIndexUsed.set(index);
if(th != null){
th.hook(2);
}
}
/**
* appends the size of the lookup to the last index name in the list
*/
public void afterIndexLookup(Collection results) {
if (results == null) {
// according to javadocs in QueryObserver, can be null if there
// is an exception
return;
}
// append the size of the lookup results (and bucket id if its an Index on bucket)
// to IndexInfo results Map.
Map indexMap = (Map)this.indexInfo.get();
if (lastIndexUsed.get() != null) {
IndexInfo indexInfo = (IndexInfo) indexMap
.get(((Index) this.lastIndexUsed.get()).getName());
if (indexInfo != null) {
indexInfo.getResults().put(
((Index)this.lastIndexUsed.get()).getRegion().getFullPath(),
new Integer(results.size()));
}
}
this.lastIndexUsed.set(null);
if(th != null){
th.hook(3);
}
}
/**
* This should be called only when one query execution on one gemfire node is done.
* NOT for each buckets.
*/
public void reset() {
if(th != null){
th.hook(4);
}
this.indexInfo.set(null);
}
public void setIndexInfo(Map indexInfoMap) {
indexInfo.set(indexInfoMap);
}
public Map getUsedIndexes() {
Map map = (Map)this.indexInfo.get();
if (map == null) {
return Collections.EMPTY_MAP;
}
return map;
}
public void setTestHook(TestHook testHook) {
th = testHook;
}
/**
* This class contains information related to buckets and results found in
* the index on those buckets.
* @author Shobhit Agarwal
*
*/
public class IndexInfo{
// A {RegionFullPath, results} map for an Index lookup on a Region.
private Map<String, Integer> results = new Object2ObjectOpenHashMap();
public Map getResults() {
return results;
}
/**
* Adds a results map (mostly a bucket index lookup results)
* to the "this" IndexInfo.
* @param rslts
*/
public void addResults(Map rslts) {
for(Object obj : rslts.entrySet()){
Entry<String, Integer> ent = (Entry)obj;
this.results.put(ent.getKey(), ent.getValue());
}
}
public Set getRegionIds() {
return results.keySet();
}
public void addRegionId(String regionId) {
this.results.put(regionId, 0);
}
@Override
public String toString() {
int total =0;
for (Integer i: results.values()){
total+=i.intValue();
}
return "(Results: "+ total +")";
}
public void merge(IndexInfo src) {
this.addResults(src.getResults());
}
}
public Map getUsedIndexes(String fullPath) {
Map map = (Map)this.indexInfo.get();
if (map == null) {
return Collections.EMPTY_MAP;
}
Map newMap = new HashMap();
for(Object obj: (Set)map.entrySet()){
Map.Entry<String, IndexInfo> entry = (Map.Entry<String, IndexInfo>)obj;
if(entry != null && entry.getValue().getRegionIds().contains(fullPath)){
newMap.put(entry.getKey(), entry.getValue().getResults().get(fullPath));
}
}
return newMap;
}
public TestHook getTestHook() {
return th;
}
}
| |
package org.transpeg;
public class PegParserParser extends SourceContext {
public PegParserParser(BunSource source, int startIndex, int endIndex) {
super(source, startIndex, endIndex);
}
public PegParserParser(BunSource source) {
super(source, 0, source.sourceText.length());
}
private PegParserParser subParser(int startIndex, int endIndex) {
return new PegParserParser(this.source, startIndex, endIndex);
}
public boolean hasRule() {
this.matchZeroMore(UniCharset.SemiColon);
return this.hasChar();
}
public PegRule parseRule() {
int startIndex = this.getPosition();
if(!this.match(UniCharset.Letter)) {
this.showErrorMessage("expected name");
return null;
}
this.matchZeroMore(UniCharset.NameSymbol);
String label = this.substring(startIndex, this.getPosition());
this.matchZeroMore(UniCharset.WhiteSpaceNewLine);
if(!this.match('<', '-')) {
this.showErrorMessage("expected <-");
return null;
}
Peg p = this.parsePegExpr(label);
if(p != null) {
return new PegRule(label, p);
}
return null;
}
private int skipQuotedString(char endChar) {
for(; this.hasChar(); this.consume(1)) {
char ch = this.getChar();
if(ch == endChar) {
int index = this.getPosition();
this.consume(1);
return index;
}
if(ch == '\\') {
this.consume(1); // skip next char;
}
}
return -1;
}
private int skipGroup(int openChar, int closeChar) {
int order = 1;
while(this.hasChar()) {
char ch = this.nextChar();
if(ch == closeChar) {
order = order - 1;
if(order == 0) {
return this.getPosition() - 1;
}
}
if(ch == openChar) {
order = order + 1;
}
if(ch == '"' || ch == '\'') {
if(this.skipQuotedString(ch) == -1) {
return -1;
}
}
if(ch == '[') {
int pos = this.getPosition() - 1;
if(this.skipQuotedString(']') == -1) {
this.rollback(pos);
this.showErrorMessage("unclosed [");
return -1;
}
}
}
return -1;
}
private Peg parsePostfix(String leftName, Peg left) {
if(left != null) {
if(this.match('*')) {
return new PegZeroMoreExpr(leftName, left);
}
if(this.match('+')) {
return new PegOneMoreExpr(leftName, left);
}
if(this.match('?')) {
return new PegOptionalExpr(leftName, left);
}
}
return left;
}
private String substring(int startIndex, int endIndex) {
return this.source.substring(startIndex, endIndex);
}
private Peg parseSingleExpr(String leftLabel) {
Peg right = null;
this.matchZeroMore(UniCharset.WhiteSpaceNewLine);
if(this.match(';')) {
this.consume(-1);
return null;
}
if(this.match(UniCharset.Letter)) {
int startIndex = this.getPosition() - 1;
int endIndex = this.matchZeroMore(UniCharset.NameSymbol);
right = new PegLabel(leftLabel, this.substring(startIndex, endIndex));
right.setSource(this.source, startIndex);
return this.parsePostfix(leftLabel, right);
}
if(this.match('.')) {
right = new PegAny(leftLabel);
return this.parsePostfix(leftLabel, right);
}
if(this.match('$')) {
boolean allowError = false;
if(this.match('$')) {
allowError = true;
}
right = this.parseSingleExpr(leftLabel);
if(right != null) {
right = new PegSetter(leftLabel, right, allowError);
}
return right;
}
if(this.match('&')) {
right = this.parseSingleExpr(leftLabel);
if(right != null) {
right = new PegAndPredicate(leftLabel, right);
}
return right;
}
if(this.match('!')) {
right = this.parseSingleExpr(leftLabel);
if(right != null) {
right = new PegNotPredicate(leftLabel, right);
}
return right;
}
if(this.match('#') || this.match('@')) {
int startIndex = this.getPosition();
this.matchZeroMore(UniCharset.NodeLabel);
int endIndex = this.getPosition();
right = new PegObjectName(leftLabel, this.substring(startIndex, endIndex));
return right;
}
if(this.match('(')) {
int startIndex = this.getPosition();
int endIndex = this.skipGroup('(', ')');
if(endIndex == -1) {
this.showErrorMessage("unclosed ')'");
return null;
}
PegParserParser sub = this.subParser(startIndex, endIndex);
right = sub.parsePegExpr(leftLabel);
if(right != null) {
right = this.parsePostfix(leftLabel, right);
}
return right;
}
if(this.match('[')) {
int startIndex = this.getPosition();
int endIndex = this.skipQuotedString(']');
if(endIndex == -1) {
this.showErrorMessage("unclosed ']'");
return null;
}
right = new PegCharacter(leftLabel, this.source.substring(startIndex, endIndex));
return this.parsePostfix(leftLabel, right);
}
if(this.match('"')) {
int startIndex = this.getPosition();
int endIndex = this.skipQuotedString('"');
if(endIndex == -1) {
this.showErrorMessage("unclosed \"");
return null;
}
String s = this.source.substring(startIndex, endIndex);
right = new PegString(leftLabel, UniCharset._UnquoteString(s));
return this.parsePostfix(leftLabel, right);
}
if(this.match('\'')) {
int startIndex = this.getPosition();
int endIndex = this.skipQuotedString('\'');
if(endIndex == -1) {
this.showErrorMessage("unclosed '");
return null;
}
String s = this.source.substring(startIndex, endIndex);
right = new PegString(leftLabel, UniCharset._UnquoteString(s));
return this.parsePostfix(leftLabel, right);
}
if(this.match('{')) {
boolean leftJoin = false;
if(this.match('$', ' ') || this.match('$', '\n')) {
leftJoin = true;
}
int startIndex = this.getPosition();
int endIndex = this.skipGroup('{', '}');
if(endIndex == -1) {
this.rollback(startIndex);
this.showErrorMessage("unclosed '}'");
return null;
}
PegParserParser sub = this.subParser(startIndex, endIndex);
right = sub.parsePegExpr(leftLabel);
right = new PegNewObject(leftLabel, leftJoin, right);
right = this.parsePostfix(leftLabel, right);
return right;
}
this.showErrorMessage("unexpected character '" + this.getChar() + "'");
return right;
}
private final Peg parseSequenceExpr(String leftLabel) {
Peg left = this.parseSingleExpr(leftLabel);
if(left == null) {
return left;
}
this.matchZeroMore(UniCharset.WhiteSpaceNewLine);
if(this.hasChar()) {
this.matchZeroMore(UniCharset.WhiteSpaceNewLine);
char ch = this.getChar();
if(ch == '/') {
this.consume(1);
this.matchZeroMore(UniCharset.WhiteSpaceNewLine);
return left;
}
Peg right = this.parseSequenceExpr(leftLabel);
if(right != null) {
left = left.append(right);
}
}
return left;
}
public final Peg parsePegExpr(String leftLabel) {
Peg left = this.parseSequenceExpr(leftLabel);
this.matchZeroMore(UniCharset.WhiteSpaceNewLine);
if(this.match(';')) {
return left;
}
if(this.hasChar()) {
Peg right = this.parsePegExpr(leftLabel);
if(right != null) {
return new PegChoice(leftLabel, left, right);
}
}
return left;
}
private final boolean match(char ch, char ch2) {
if(this.getChar(0) == ch && this.getChar(1) == ch2) {
this.consume(2);
return true;
}
return false;
}
}
class PegRule {
String label;
Peg peg;
public PegRule(String label, Peg p) {
this.label = label;
this.peg = p;
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.dns.fluent;
import com.azure.core.annotation.ReturnType;
import com.azure.core.annotation.ServiceMethod;
import com.azure.core.http.rest.PagedFlux;
import com.azure.core.http.rest.PagedIterable;
import com.azure.core.http.rest.Response;
import com.azure.core.management.polling.PollResult;
import com.azure.core.util.Context;
import com.azure.core.util.polling.PollerFlux;
import com.azure.core.util.polling.SyncPoller;
import com.azure.resourcemanager.dns.fluent.models.ZoneInner;
import com.azure.resourcemanager.resources.fluentcore.collection.InnerSupportsDelete;
import com.azure.resourcemanager.resources.fluentcore.collection.InnerSupportsGet;
import com.azure.resourcemanager.resources.fluentcore.collection.InnerSupportsListing;
import java.nio.ByteBuffer;
import java.util.Map;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
/** An instance of this class provides access to all the operations defined in ZonesClient. */
public interface ZonesClient
extends InnerSupportsGet<ZoneInner>, InnerSupportsListing<ZoneInner>, InnerSupportsDelete<Void> {
/**
* Creates or updates a DNS zone. Does not modify DNS records within the zone.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param zoneName The name of the DNS zone (without a terminating dot).
* @param parameters Describes a DNS zone.
* @param ifMatch The etag of the DNS zone. Omit this value to always overwrite the current zone. Specify the
* last-seen etag value to prevent accidentally overwriting any concurrent changes.
* @param ifNoneMatch Set to '*' to allow a new DNS zone to be created, but to prevent updating an existing zone.
* Other values will be ignored.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return describes a DNS zone.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Mono<Response<ZoneInner>> createOrUpdateWithResponseAsync(
String resourceGroupName, String zoneName, ZoneInner parameters, String ifMatch, String ifNoneMatch);
/**
* Creates or updates a DNS zone. Does not modify DNS records within the zone.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param zoneName The name of the DNS zone (without a terminating dot).
* @param parameters Describes a DNS zone.
* @param ifMatch The etag of the DNS zone. Omit this value to always overwrite the current zone. Specify the
* last-seen etag value to prevent accidentally overwriting any concurrent changes.
* @param ifNoneMatch Set to '*' to allow a new DNS zone to be created, but to prevent updating an existing zone.
* Other values will be ignored.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return describes a DNS zone.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Mono<ZoneInner> createOrUpdateAsync(
String resourceGroupName, String zoneName, ZoneInner parameters, String ifMatch, String ifNoneMatch);
/**
* Creates or updates a DNS zone. Does not modify DNS records within the zone.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param zoneName The name of the DNS zone (without a terminating dot).
* @param parameters Describes a DNS zone.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return describes a DNS zone.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Mono<ZoneInner> createOrUpdateAsync(String resourceGroupName, String zoneName, ZoneInner parameters);
/**
* Creates or updates a DNS zone. Does not modify DNS records within the zone.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param zoneName The name of the DNS zone (without a terminating dot).
* @param parameters Describes a DNS zone.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return describes a DNS zone.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
ZoneInner createOrUpdate(String resourceGroupName, String zoneName, ZoneInner parameters);
/**
* Creates or updates a DNS zone. Does not modify DNS records within the zone.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param zoneName The name of the DNS zone (without a terminating dot).
* @param parameters Describes a DNS zone.
* @param ifMatch The etag of the DNS zone. Omit this value to always overwrite the current zone. Specify the
* last-seen etag value to prevent accidentally overwriting any concurrent changes.
* @param ifNoneMatch Set to '*' to allow a new DNS zone to be created, but to prevent updating an existing zone.
* Other values will be ignored.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return describes a DNS zone.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Response<ZoneInner> createOrUpdateWithResponse(
String resourceGroupName,
String zoneName,
ZoneInner parameters,
String ifMatch,
String ifNoneMatch,
Context context);
/**
* Deletes a DNS zone. WARNING: All DNS records in the zone will also be deleted. This operation cannot be undone.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param zoneName The name of the DNS zone (without a terminating dot).
* @param ifMatch The etag of the DNS zone. Omit this value to always delete the current zone. Specify the last-seen
* etag value to prevent accidentally deleting any concurrent changes.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Mono<Response<Flux<ByteBuffer>>> deleteWithResponseAsync(String resourceGroupName, String zoneName, String ifMatch);
/**
* Deletes a DNS zone. WARNING: All DNS records in the zone will also be deleted. This operation cannot be undone.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param zoneName The name of the DNS zone (without a terminating dot).
* @param ifMatch The etag of the DNS zone. Omit this value to always delete the current zone. Specify the last-seen
* etag value to prevent accidentally deleting any concurrent changes.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
PollerFlux<PollResult<Void>, Void> beginDeleteAsync(String resourceGroupName, String zoneName, String ifMatch);
/**
* Deletes a DNS zone. WARNING: All DNS records in the zone will also be deleted. This operation cannot be undone.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param zoneName The name of the DNS zone (without a terminating dot).
* @param ifMatch The etag of the DNS zone. Omit this value to always delete the current zone. Specify the last-seen
* etag value to prevent accidentally deleting any concurrent changes.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
SyncPoller<PollResult<Void>, Void> beginDelete(String resourceGroupName, String zoneName, String ifMatch);
/**
* Deletes a DNS zone. WARNING: All DNS records in the zone will also be deleted. This operation cannot be undone.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param zoneName The name of the DNS zone (without a terminating dot).
* @param ifMatch The etag of the DNS zone. Omit this value to always delete the current zone. Specify the last-seen
* etag value to prevent accidentally deleting any concurrent changes.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
SyncPoller<PollResult<Void>, Void> beginDelete(
String resourceGroupName, String zoneName, String ifMatch, Context context);
/**
* Deletes a DNS zone. WARNING: All DNS records in the zone will also be deleted. This operation cannot be undone.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param zoneName The name of the DNS zone (without a terminating dot).
* @param ifMatch The etag of the DNS zone. Omit this value to always delete the current zone. Specify the last-seen
* etag value to prevent accidentally deleting any concurrent changes.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Mono<Void> deleteAsync(String resourceGroupName, String zoneName, String ifMatch);
/**
* Deletes a DNS zone. WARNING: All DNS records in the zone will also be deleted. This operation cannot be undone.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param zoneName The name of the DNS zone (without a terminating dot).
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Mono<Void> deleteAsync(String resourceGroupName, String zoneName);
/**
* Deletes a DNS zone. WARNING: All DNS records in the zone will also be deleted. This operation cannot be undone.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param zoneName The name of the DNS zone (without a terminating dot).
* @param ifMatch The etag of the DNS zone. Omit this value to always delete the current zone. Specify the last-seen
* etag value to prevent accidentally deleting any concurrent changes.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
void delete(String resourceGroupName, String zoneName, String ifMatch);
/**
* Deletes a DNS zone. WARNING: All DNS records in the zone will also be deleted. This operation cannot be undone.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param zoneName The name of the DNS zone (without a terminating dot).
* @param ifMatch The etag of the DNS zone. Omit this value to always delete the current zone. Specify the last-seen
* etag value to prevent accidentally deleting any concurrent changes.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
void delete(String resourceGroupName, String zoneName, String ifMatch, Context context);
/**
* Deletes a DNS zone. WARNING: All DNS records in the zone will also be deleted. This operation cannot be undone.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param zoneName The name of the DNS zone (without a terminating dot).
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
void delete(String resourceGroupName, String zoneName);
/**
* Gets a DNS zone. Retrieves the zone properties, but not the record sets within the zone.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param zoneName The name of the DNS zone (without a terminating dot).
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return a DNS zone.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Mono<Response<ZoneInner>> getByResourceGroupWithResponseAsync(String resourceGroupName, String zoneName);
/**
* Gets a DNS zone. Retrieves the zone properties, but not the record sets within the zone.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param zoneName The name of the DNS zone (without a terminating dot).
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return a DNS zone.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Mono<ZoneInner> getByResourceGroupAsync(String resourceGroupName, String zoneName);
/**
* Gets a DNS zone. Retrieves the zone properties, but not the record sets within the zone.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param zoneName The name of the DNS zone (without a terminating dot).
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return a DNS zone.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
ZoneInner getByResourceGroup(String resourceGroupName, String zoneName);
/**
* Gets a DNS zone. Retrieves the zone properties, but not the record sets within the zone.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param zoneName The name of the DNS zone (without a terminating dot).
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return a DNS zone.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Response<ZoneInner> getByResourceGroupWithResponse(String resourceGroupName, String zoneName, Context context);
/**
* Updates a DNS zone. Does not modify DNS records within the zone.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param zoneName The name of the DNS zone (without a terminating dot).
* @param ifMatch The etag of the DNS zone. Omit this value to always overwrite the current zone. Specify the
* last-seen etag value to prevent accidentally overwriting any concurrent changes.
* @param tags Resource tags.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return describes a DNS zone.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Mono<Response<ZoneInner>> updateWithResponseAsync(
String resourceGroupName, String zoneName, String ifMatch, Map<String, String> tags);
/**
* Updates a DNS zone. Does not modify DNS records within the zone.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param zoneName The name of the DNS zone (without a terminating dot).
* @param ifMatch The etag of the DNS zone. Omit this value to always overwrite the current zone. Specify the
* last-seen etag value to prevent accidentally overwriting any concurrent changes.
* @param tags Resource tags.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return describes a DNS zone.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Mono<ZoneInner> updateAsync(String resourceGroupName, String zoneName, String ifMatch, Map<String, String> tags);
/**
* Updates a DNS zone. Does not modify DNS records within the zone.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param zoneName The name of the DNS zone (without a terminating dot).
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return describes a DNS zone.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Mono<ZoneInner> updateAsync(String resourceGroupName, String zoneName);
/**
* Updates a DNS zone. Does not modify DNS records within the zone.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param zoneName The name of the DNS zone (without a terminating dot).
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return describes a DNS zone.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
ZoneInner update(String resourceGroupName, String zoneName);
/**
* Updates a DNS zone. Does not modify DNS records within the zone.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param zoneName The name of the DNS zone (without a terminating dot).
* @param ifMatch The etag of the DNS zone. Omit this value to always overwrite the current zone. Specify the
* last-seen etag value to prevent accidentally overwriting any concurrent changes.
* @param tags Resource tags.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return describes a DNS zone.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Response<ZoneInner> updateWithResponse(
String resourceGroupName, String zoneName, String ifMatch, Map<String, String> tags, Context context);
/**
* Lists the DNS zones within a resource group.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param top The maximum number of record sets to return. If not specified, returns up to 100 record sets.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the response to a Zone List or ListAll operation.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
PagedFlux<ZoneInner> listByResourceGroupAsync(String resourceGroupName, Integer top);
/**
* Lists the DNS zones within a resource group.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the response to a Zone List or ListAll operation.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
PagedFlux<ZoneInner> listByResourceGroupAsync(String resourceGroupName);
/**
* Lists the DNS zones within a resource group.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param top The maximum number of record sets to return. If not specified, returns up to 100 record sets.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the response to a Zone List or ListAll operation.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
PagedIterable<ZoneInner> listByResourceGroup(String resourceGroupName, Integer top, Context context);
/**
* Lists the DNS zones within a resource group.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the response to a Zone List or ListAll operation.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
PagedIterable<ZoneInner> listByResourceGroup(String resourceGroupName);
/**
* Lists the DNS zones in all resource groups in a subscription.
*
* @param top The maximum number of DNS zones to return. If not specified, returns up to 100 zones.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the response to a Zone List or ListAll operation.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
PagedFlux<ZoneInner> listAsync(Integer top);
/**
* Lists the DNS zones in all resource groups in a subscription.
*
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the response to a Zone List or ListAll operation.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
PagedFlux<ZoneInner> listAsync();
/**
* Lists the DNS zones in all resource groups in a subscription.
*
* @param top The maximum number of DNS zones to return. If not specified, returns up to 100 zones.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the response to a Zone List or ListAll operation.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
PagedIterable<ZoneInner> list(Integer top, Context context);
/**
* Lists the DNS zones in all resource groups in a subscription.
*
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the response to a Zone List or ListAll operation.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
PagedIterable<ZoneInner> list();
}
| |
/*
* Copyright (C) 2015 Giuseppe Cardone <ippatsuman@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.ac.ebi.spot.goci.service.junidecode;
/**
* Character map for Unicode characters with codepoint U+30xx.
* @author Giuseppe Cardone
* @version 0.1
*/
class X30 {
public static final String[] map = new String[]{
" ", // 0x00
", ", // 0x01
". ", // 0x02
"\"", // 0x03
"[JIS]", // 0x04
"\"", // 0x05
"/", // 0x06
"0", // 0x07
"<", // 0x08
"> ", // 0x09
"<<", // 0x0a
">> ", // 0x0b
"[", // 0x0c
"] ", // 0x0d
"{", // 0x0e
"} ", // 0x0f
"[(", // 0x10
")] ", // 0x11
"@", // 0x12
"X ", // 0x13
"[", // 0x14
"] ", // 0x15
"[[", // 0x16
"]] ", // 0x17
"((", // 0x18
")) ", // 0x19
"[[", // 0x1a
"]] ", // 0x1b
"~ ", // 0x1c
"``", // 0x1d
"\'\'", // 0x1e
",,", // 0x1f
"@", // 0x20
"1", // 0x21
"2", // 0x22
"3", // 0x23
"4", // 0x24
"5", // 0x25
"6", // 0x26
"7", // 0x27
"8", // 0x28
"9", // 0x29
"", // 0x2a
"", // 0x2b
"", // 0x2c
"", // 0x2d
"", // 0x2e
"", // 0x2f
"~", // 0x30
"+", // 0x31
"+", // 0x32
"+", // 0x33
"+", // 0x34
"", // 0x35
"@", // 0x36
" // ", // 0x37
"+10+", // 0x38
"+20+", // 0x39
"+30+", // 0x3a
"[?]", // 0x3b
"[?]", // 0x3c
"[?]", // 0x3d
"", // 0x3e
"", // 0x3f
"[?]", // 0x40
"a", // 0x41
"a", // 0x42
"i", // 0x43
"i", // 0x44
"u", // 0x45
"u", // 0x46
"e", // 0x47
"e", // 0x48
"o", // 0x49
"o", // 0x4a
"ka", // 0x4b
"ga", // 0x4c
"ki", // 0x4d
"gi", // 0x4e
"ku", // 0x4f
"gu", // 0x50
"ke", // 0x51
"ge", // 0x52
"ko", // 0x53
"go", // 0x54
"sa", // 0x55
"za", // 0x56
"si", // 0x57
"zi", // 0x58
"su", // 0x59
"zu", // 0x5a
"se", // 0x5b
"ze", // 0x5c
"so", // 0x5d
"zo", // 0x5e
"ta", // 0x5f
"da", // 0x60
"ti", // 0x61
"di", // 0x62
"tu", // 0x63
"tu", // 0x64
"du", // 0x65
"te", // 0x66
"de", // 0x67
"to", // 0x68
"do", // 0x69
"na", // 0x6a
"ni", // 0x6b
"nu", // 0x6c
"ne", // 0x6d
"no", // 0x6e
"ha", // 0x6f
"ba", // 0x70
"pa", // 0x71
"hi", // 0x72
"bi", // 0x73
"pi", // 0x74
"hu", // 0x75
"bu", // 0x76
"pu", // 0x77
"he", // 0x78
"be", // 0x79
"pe", // 0x7a
"ho", // 0x7b
"bo", // 0x7c
"po", // 0x7d
"ma", // 0x7e
"mi", // 0x7f
"mu", // 0x80
"me", // 0x81
"mo", // 0x82
"ya", // 0x83
"ya", // 0x84
"yu", // 0x85
"yu", // 0x86
"yo", // 0x87
"yo", // 0x88
"ra", // 0x89
"ri", // 0x8a
"ru", // 0x8b
"re", // 0x8c
"ro", // 0x8d
"wa", // 0x8e
"wa", // 0x8f
"wi", // 0x90
"we", // 0x91
"wo", // 0x92
"n", // 0x93
"vu", // 0x94
"[?]", // 0x95
"[?]", // 0x96
"[?]", // 0x97
"[?]", // 0x98
"", // 0x99
"", // 0x9a
"", // 0x9b
"", // 0x9c
"\"", // 0x9d
"\"", // 0x9e
"[?]", // 0x9f
"[?]", // 0xa0
"a", // 0xa1
"a", // 0xa2
"i", // 0xa3
"i", // 0xa4
"u", // 0xa5
"u", // 0xa6
"e", // 0xa7
"e", // 0xa8
"o", // 0xa9
"o", // 0xaa
"ka", // 0xab
"ga", // 0xac
"ki", // 0xad
"gi", // 0xae
"ku", // 0xaf
"gu", // 0xb0
"ke", // 0xb1
"ge", // 0xb2
"ko", // 0xb3
"go", // 0xb4
"sa", // 0xb5
"za", // 0xb6
"si", // 0xb7
"zi", // 0xb8
"su", // 0xb9
"zu", // 0xba
"se", // 0xbb
"ze", // 0xbc
"so", // 0xbd
"zo", // 0xbe
"ta", // 0xbf
"da", // 0xc0
"ti", // 0xc1
"di", // 0xc2
"tu", // 0xc3
"tu", // 0xc4
"du", // 0xc5
"te", // 0xc6
"de", // 0xc7
"to", // 0xc8
"do", // 0xc9
"na", // 0xca
"ni", // 0xcb
"nu", // 0xcc
"ne", // 0xcd
"no", // 0xce
"ha", // 0xcf
"ba", // 0xd0
"pa", // 0xd1
"hi", // 0xd2
"bi", // 0xd3
"pi", // 0xd4
"hu", // 0xd5
"bu", // 0xd6
"pu", // 0xd7
"he", // 0xd8
"be", // 0xd9
"pe", // 0xda
"ho", // 0xdb
"bo", // 0xdc
"po", // 0xdd
"ma", // 0xde
"mi", // 0xdf
"mu", // 0xe0
"me", // 0xe1
"mo", // 0xe2
"ya", // 0xe3
"ya", // 0xe4
"yu", // 0xe5
"yu", // 0xe6
"yo", // 0xe7
"yo", // 0xe8
"ra", // 0xe9
"ri", // 0xea
"ru", // 0xeb
"re", // 0xec
"ro", // 0xed
"wa", // 0xee
"wa", // 0xef
"wi", // 0xf0
"we", // 0xf1
"wo", // 0xf2
"n", // 0xf3
"vu", // 0xf4
"ka", // 0xf5
"ke", // 0xf6
"va", // 0xf7
"vi", // 0xf8
"ve", // 0xf9
"vo", // 0xfa
"", // 0xfb
"", // 0xfc
"\"", // 0xfd
"\"" // 0xfe
};
}
| |
/*
* Copyright 2014 Avanza Bank AB
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.avanza.astrix.context;
import java.lang.annotation.Annotation;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.ServiceLoader;
import java.util.Set;
import java.util.stream.Stream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.avanza.astrix.beans.api.ApiProviderBeanPublisherModule;
import com.avanza.astrix.beans.config.AstrixConfig;
import com.avanza.astrix.beans.config.AstrixConfigModule;
import com.avanza.astrix.beans.configdiscovery.ConfigDiscoveryModule;
import com.avanza.astrix.beans.core.AstrixBeanKey;
import com.avanza.astrix.beans.core.AstrixBeanSettings.BeanSetting;
import com.avanza.astrix.beans.core.AstrixBeanSettings.BooleanBeanSetting;
import com.avanza.astrix.beans.core.AstrixBeanSettings.IntBeanSetting;
import com.avanza.astrix.beans.core.AstrixBeanSettings.LongBeanSetting;
import com.avanza.astrix.beans.core.AstrixBeansCoreModule;
import com.avanza.astrix.beans.core.AstrixConfigAware;
import com.avanza.astrix.beans.core.AstrixSettings;
import com.avanza.astrix.beans.factory.BeanFactoryModule;
import com.avanza.astrix.beans.factory.StandardFactoryBean;
import com.avanza.astrix.beans.ft.BeanFaultToleranceFactorySpi;
import com.avanza.astrix.beans.ft.DefaultHystrixCommandNamingStrategy;
import com.avanza.astrix.beans.ft.FaultToleranceModule;
import com.avanza.astrix.beans.ft.HystrixCommandNamingStrategy;
import com.avanza.astrix.beans.ft.NoFaultTolerance;
import com.avanza.astrix.beans.publish.ApiProviderClass;
import com.avanza.astrix.beans.publish.ApiProviderPlugins;
import com.avanza.astrix.beans.publish.ApiProviders;
import com.avanza.astrix.beans.publish.BeanPublisherPlugin;
import com.avanza.astrix.beans.publish.BeansPublishModule;
import com.avanza.astrix.beans.registry.AstrixServiceRegistryLibraryProvider;
import com.avanza.astrix.beans.registry.AstrixServiceRegistryServiceProvider;
import com.avanza.astrix.beans.registry.ServiceRegistryDiscoveryModule;
import com.avanza.astrix.beans.service.DirectComponentModule;
import com.avanza.astrix.beans.service.ServiceModule;
import com.avanza.astrix.config.DynamicConfig;
import com.avanza.astrix.config.LongSetting;
import com.avanza.astrix.config.MapConfigSource;
import com.avanza.astrix.config.PropertiesConfigSource;
import com.avanza.astrix.config.Setting;
import com.avanza.astrix.config.SystemPropertiesConfigSource;
import com.avanza.astrix.context.mbeans.AstrixMBeanModule;
import com.avanza.astrix.context.mbeans.MBeanServerFacade;
import com.avanza.astrix.context.mbeans.PlatformMBeanServer;
import com.avanza.astrix.context.metrics.DefaultMetricSpi;
import com.avanza.astrix.context.metrics.MetricsModule;
import com.avanza.astrix.context.metrics.MetricsSpi;
import com.avanza.astrix.modules.Module;
import com.avanza.astrix.modules.ModuleContext;
import com.avanza.astrix.modules.ModuleInstancePostProcessor;
import com.avanza.astrix.modules.Modules;
import com.avanza.astrix.modules.ModulesConfigurer;
import com.avanza.astrix.modules.StrategyContextPreparer;
import com.avanza.astrix.modules.StrategyProvider;
import com.avanza.astrix.provider.core.AstrixApiProvider;
import com.avanza.astrix.provider.core.AstrixExcludedByProfile;
import com.avanza.astrix.provider.core.AstrixIncludedByProfile;
import com.avanza.astrix.serviceunit.AstrixApplicationDescriptor;
import com.avanza.astrix.serviceunit.ServiceUnitModule;
import com.avanza.astrix.serviceunit.SystemServiceApiProvider;
import com.avanza.astrix.versioning.core.ObjectSerializerModule;
import com.avanza.astrix.versioning.jackson1.Jackson1SerializerModule;
/**
* Used to configure and create an {@link AstrixContext}. <p>
*
* @author Elias Lindholm (elilin)
*
*/
public class AstrixConfigurer {
private static final String CLASSPATH_OVERRIDE_SETTINGS = "META-INF/astrix/settings.properties";
private static final Logger log = LoggerFactory.getLogger(AstrixConfigurer.class);
private ApiProviders astrixApiProviders;
private final Collection<StandardFactoryBean<?>> standaloneFactories = new LinkedList<>();
private final List<Module> customModules = new ArrayList<>();
private final Map<Class<?>, StrategyProvider<?>> strategyProviderByType = new HashMap<>();
private final MapConfigSource settings = new MapConfigSource();
private DynamicConfig customConfig = null;
private final DynamicConfig wellKnownConfigSources = DynamicConfig.create(new SystemPropertiesConfigSource(), settings, PropertiesConfigSource.optionalClasspathPropertiesFile(CLASSPATH_OVERRIDE_SETTINGS));
private final Set<String> activeProfiles = new HashSet<>();
private AstrixApplicationDescriptor applicationDescriptor;
public AstrixConfigurer() {
}
public void setApplicationDescriptor(AstrixApplicationDescriptor applicationDescriptor) {
this.applicationDescriptor = applicationDescriptor;
}
/**
* Creates an AstrixContext instance using the current configuration. <p>
*
* @return
*/
public AstrixContext configure() {
DynamicConfig config = createDynamicConfig();
ModulesConfigurer modulesConfigurer = new ModulesConfigurer();
modulesConfigurer.registerDefault(StrategyProvider.create(HystrixCommandNamingStrategy.class, DefaultHystrixCommandNamingStrategy.class));
modulesConfigurer.registerDefault(StrategyProvider.create(BeanFaultToleranceFactorySpi.class, NoFaultTolerance.class));
modulesConfigurer.registerDefault(StrategyProvider.create(MetricsSpi.class, DefaultMetricSpi.class));
modulesConfigurer.registerDefault(StrategyProvider.create(MBeanServerFacade.class, PlatformMBeanServer.class, context -> context.importType(AstrixConfig.class)));
for (Module plugin : customModules) {
modulesConfigurer.register(plugin);
}
loadAstrixContextPlugins(modulesConfigurer);
for (StrategyProvider<?> strategyProvider : this.strategyProviderByType.values()) {
modulesConfigurer.register(strategyProvider);
}
modulesConfigurer.register(new AstrixConfigModule(config, this.settings));
modulesConfigurer.register(new DirectComponentModule());
modulesConfigurer.register(new AstrixBeansCoreModule());
modulesConfigurer.register(new MetricsModule());
modulesConfigurer.register(new AstrixMBeanModule());
modulesConfigurer.register(new ServiceRegistryDiscoveryModule());
modulesConfigurer.register(new ConfigDiscoveryModule());
modulesConfigurer.register(new BeansPublishModule());
modulesConfigurer.register(new ServiceModule());
modulesConfigurer.register(new ObjectSerializerModule());
modulesConfigurer.register(new Jackson1SerializerModule());
modulesConfigurer.register(new ApiProviderBeanPublisherModule());
modulesConfigurer.register(new FaultToleranceModule());
modulesConfigurer.register(new BeanFactoryModule());
if (this.applicationDescriptor != null) {
// Init server parts
setupApplicationInstanceId(config);
modulesConfigurer.register(new ServiceUnitModule(this.applicationDescriptor));
}
modulesConfigurer.registerBeanPostProcessor(new AstrixAwareInjector(config));
Modules modules = modulesConfigurer.configure();
final AstrixContextImpl context = new AstrixContextImpl(modules, this.applicationDescriptor);
Stream<ApiProviderClass> systemApis =
Stream.of(AstrixServiceRegistryServiceProvider.class, AstrixServiceRegistryLibraryProvider.class, SystemServiceApiProvider.class)
.map(ApiProviderClass::create);
Stream.concat(systemApis, getApiProviders(modules, config))
.filter(this::isActive)
.distinct()
.forEach(context::register);
// TODO: Merge with FilteredApiProviders and create module
for (StandardFactoryBean<?> beanFactory : standaloneFactories) {
log.debug("Registering standalone factory: bean={}", beanFactory.getBeanKey());
context.registerBeanFactory(beanFactory);
}
return context;
}
private void setupApplicationInstanceId(DynamicConfig config) {
String applicationInstanceId = AstrixSettings.APPLICATION_INSTANCE_ID.getFrom(config).get();
if (applicationInstanceId == null) {
applicationInstanceId = this.applicationDescriptor.toString();
set(AstrixSettings.APPLICATION_INSTANCE_ID, this.applicationDescriptor.toString());
log.info("No applicationInstanceId set, using name of ApplicationDescriptor as applicationInstanceId: {}", applicationInstanceId);
Objects.requireNonNull(AstrixSettings.APPLICATION_INSTANCE_ID.getFrom(config).get());
} else {
log.info("Current applicationInstanceId={}", applicationInstanceId);
}
}
private void loadAstrixContextPlugins(final ModulesConfigurer modulesConfigurer) {
Iterator<AstrixContextPlugin> contextPlugins = ServiceLoader.load(AstrixContextPlugin.class).iterator();
while (contextPlugins.hasNext()) {
AstrixContextPlugin contextPlugin = contextPlugins.next();
log.debug("Registering AstrixContextPlugin: astrixContextPlugin={}", contextPlugin.getClass().getName());
contextPlugin.registerStrategies(new AstrixStrategiesConfig() {
@Override
public <T> void registerDefaultStrategy(Class<T> strategyType, Class<? extends T> strategyProvider) {
modulesConfigurer.registerDefault(StrategyProvider.create(strategyType, strategyProvider));
}
@Override
public <T> void registerStrategy(Class<T> strategyType,
Class<? extends T> strategyImpl) {
modulesConfigurer.register(StrategyProvider.create(strategyType, strategyImpl));
}
@Override
public <T> void registerStrategy(Class<T> strategyType,
Class<? extends T> strategyImpl,
StrategyContextPreparer contextPreparer) {
modulesConfigurer.register(StrategyProvider.create(strategyType, strategyImpl, contextPreparer));
}
});
modulesConfigurer.register(contextPlugin);
}
}
private DynamicConfig createDynamicConfig() {
if (customConfig != null) {
return DynamicConfig.merged(customConfig, wellKnownConfigSources);
}
String dynamicConfigFactoryClass = AstrixSettings.DYNAMIC_CONFIG_FACTORY.getFrom(wellKnownConfigSources).get();
if (dynamicConfigFactoryClass != null) {
AstrixDynamicConfigFactory dynamicConfigFactory = initFactory(dynamicConfigFactoryClass);
DynamicConfig config = dynamicConfigFactory.create();
return DynamicConfig.merged(config, wellKnownConfigSources);
}
return wellKnownConfigSources;
}
private AstrixDynamicConfigFactory initFactory(String dynamicConfigFactoryClass) {
try {
return (AstrixDynamicConfigFactory) Class.forName(dynamicConfigFactoryClass).newInstance();
} catch (InstantiationException | IllegalAccessException | ClassNotFoundException e) {
throw new RuntimeException("Failed to init AstrixDynamicConfigFactoryClass: " + dynamicConfigFactoryClass, e);
}
}
/*
* Allows api's published using astrix (using @AstrixApiProvider) to have the
* DynamicConfig instance associated with the current AstrixContext injected
*/
private final class AstrixAwareInjector implements ModuleInstancePostProcessor {
private final DynamicConfig config;
public AstrixAwareInjector(DynamicConfig config) {
this.config = config;
}
@Override
public void postProcess(Object bean) {
if (bean instanceof AstrixConfigAware) {
AstrixConfigAware.class.cast(bean).setConfig(config); // TODO: config
}
}
}
private boolean isActive(ApiProviderClass providerClass) {
if (providerClass.isAnnotationPresent(AstrixIncludedByProfile.class)) {
AstrixIncludedByProfile activatedBy = providerClass.getAnnotation(AstrixIncludedByProfile.class);
if (!this.activeProfiles.contains(activatedBy.value())) {
log.debug("Rejecting provider, required profile not active. profile={} provider={}", activatedBy.value(), providerClass.getProviderClassName());
return false;
}
}
if (providerClass.isAnnotationPresent(AstrixExcludedByProfile.class)) {
AstrixExcludedByProfile deactivatedBy = providerClass.getAnnotation(AstrixExcludedByProfile.class);
if (this.activeProfiles.contains(deactivatedBy.value())) {
log.debug("Rejecting provider, excluded by active profile. profile={} provider={}", deactivatedBy.value(), providerClass.getProviderClassName());
return false;
}
}
log.debug("Found provider: provider={}", providerClass.getProviderClassName());
return true;
}
private Stream<ApiProviderClass> getApiProviders(Modules modules, DynamicConfig config) {
if (this.astrixApiProviders != null) {
return astrixApiProviders.getAll();
}
String basePackage = AstrixSettings.API_PROVIDER_SCANNER_BASE_PACKAGE.getFrom(config).get();
if (!basePackage.trim().isEmpty()) {
return new AstrixApiProviderClassScanner(getAllApiProviderAnnotationsTypes(modules), "com.avanza.astrix", basePackage.split(",")).getAll(); // Always scan com.avanza.astrix package
}
return new AstrixApiProviderClassScanner(getAllApiProviderAnnotationsTypes(modules), "com.avanza.astrix").getAll();
}
private List<Class<? extends Annotation>> getAllApiProviderAnnotationsTypes(Modules modules) {
List<Class<? extends Annotation>> result = new ArrayList<>();
for (BeanPublisherPlugin plugin : modules.getInstance(ApiProviderPlugins.class).getAll()) {
result.add(plugin.getProviderAnnotationType());
}
return result;
}
/**
* Sets the base-package used when scanning for {@link AstrixApiProvider}'s.<p>
*
* @param basePackage
* @return
*/
public AstrixConfigurer setBasePackage(String basePackage) {
this.settings.set(AstrixSettings.API_PROVIDER_SCANNER_BASE_PACKAGE, basePackage);
return this;
}
public AstrixConfigurer enableFaultTolerance(boolean enableFaultTolerance) {
this.settings.set(AstrixSettings.ENABLE_FAULT_TOLERANCE, enableFaultTolerance);
return this;
}
// package private. Used for internal testing only
void setAstrixApiProviders(ApiProviders astrixApiProviders) {
this.astrixApiProviders = astrixApiProviders;
}
// package private. Used for internal testing only
<T> AstrixConfigurer registerPlugin(final Class<T> type, final T provider) {
customModules.add(new Module() {
@Override
public void prepare(ModuleContext pluginContext) {
pluginContext.bind(type, provider);
pluginContext.export(type);
}
@Override
public String name() {
return "plugin-" + type.getName() + "[" + provider.getClass().getName() + "]";
}
});
return this;
}
// package private. Used for internal testing only
<T> void registerStrategy(final Class<T> strategyInterface, final T strategyInstance) {
this.strategyProviderByType.put(strategyInterface, StrategyProvider.create(strategyInterface, strategyInstance));
}
public AstrixConfigurer set(String settingName, long value) {
this.settings.set(settingName, Long.toString(value));
return this;
}
public AstrixConfigurer set(String settingName, boolean value) {
this.settings.set(settingName, Boolean.toString(value));
return this;
}
public AstrixConfigurer set(String settingName, String value) {
this.settings.set(settingName, value);
return this;
}
public final <T> AstrixConfigurer set(Setting<T> setting, T value) {
this.settings.set(setting, value);
return this;
}
public final <T> AstrixConfigurer set(LongSetting setting, long value) {
this.settings.set(setting, value);
return this;
}
public AstrixConfigurer setSettings(Map<String, String> settings) {
for (Map.Entry<String, String> setting : settings.entrySet()) {
this.settings.set(setting.getKey(), setting.getValue());
}
return this;
}
/**
* Sets the custom configuration sources that should be used by the AstrixContext.
*
* When set, then the given DynamicConfig instance will take precedence over all well-known configuration
* sources, see list below. When this property is set, Astrix will NOT look for a {@link AstrixDynamicConfigFactory}
* to create the custom configuration. If NOT set, then astrix will query all well-known configuration sources
* for a AstrixDynamicConfigFactory. It one is found, than that factory will be used to create a DynamicConfig instance
* for the custom configuration sources, otherwise no custom configuration sources will be used by the
* created AstrixContext (that is, only well-known configuration sources will be used).
*
* <h6>List of well-known configuration sources</h6>
* <ol>
* <li>System Properties</li>
* <li>Programmatic configuration set on this instance</li>
* <li>META-INF/astrix/settings.properties</li>
* <li>default values</li>
* </ol>
*
* @param config custom DynamicConfig to use
* @return
*/
public AstrixConfigurer setConfig(DynamicConfig config) {
this.customConfig = config;
return this;
}
/**
* Optional property that identifies what subsystem the current context belongs to. Its only
* allowed to invoke non-versioned services within the same subsystem. Attempting
* to invoke a non-versioned service in another subsystem will throw an IllegalSubsystemException. <p>
*
* @param string
* @return
*/
public AstrixConfigurer setSubsystem(String subsystem) {
this.settings.set(AstrixSettings.SUBSYSTEM_NAME, subsystem);
return this;
}
void addFactoryBean(StandardFactoryBean<?> factoryBean) {
this.standaloneFactories.add(factoryBean);
}
void removeSetting(String name) {
this.settings.set(name, null);
}
/**
* Activates a given Astrix profile.
*
* Astrix profiles are used to include/exclude {@link AstrixApiProvider}'s at runtime by annotating them
* with {@link AstrixIncludedByProfile} and/or {@link AstrixExcludedByProfile}, typically to
* replace a given {@link AstrixApiProvider} in testing scenarios.<p>
*
*
* @param profile
* @return
*/
public AstrixConfigurer activateProfile(String profile) {
this.activeProfiles.add(profile);
return this;
}
public void set(BooleanBeanSetting beanSetting, AstrixBeanKey<?> beanKey, boolean value) {
set(beanSetting.nameFor(beanKey), value);
}
public void set(IntBeanSetting beanSetting, AstrixBeanKey<?> beanKey, int value) {
set(beanSetting.nameFor(beanKey), value);
}
public void set(LongBeanSetting beanSetting, AstrixBeanKey<?> beanKey, long value) {
set(beanSetting.nameFor(beanKey), value);
}
void registerModule(Module module) {
this.customModules.add(module);
}
public <T> void set(BeanSetting<T> setting, AstrixBeanKey<?> beanKey, T value) {
set(setting.nameFor(beanKey), asString(value));
}
private String asString(Object value) {
if (value == null) {
return null;
}
return value.toString();
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.