gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.thrift;
import java.util.Collections;
import java.util.Iterator;
import java.util.NoSuchElementException;
import com.google.common.collect.AbstractIterator;
import com.google.common.collect.Iterators;
import com.google.common.collect.PeekingIterator;
import org.apache.cassandra.config.CFMetaData;
import org.apache.cassandra.config.ColumnDefinition;
import org.apache.cassandra.db.*;
import org.apache.cassandra.db.rows.*;
import org.apache.cassandra.db.marshal.AbstractType;
import org.apache.cassandra.db.marshal.MapType;
import org.apache.cassandra.db.partitions.*;
/**
* Given an iterator on a partition of a compact table, this return an iterator that merges the
* static row columns with the other results.
*
* Compact tables stores thrift column_metadata as static columns (see CompactTables for
* details). When reading for thrift however, we want to merge those static values with other
* results because:
* 1) on thrift, all "columns" are sorted together, whether or not they are declared
* column_metadata.
* 2) it's possible that a table add a value for a "dynamic" column, and later that column
* is statically defined. Merging "static" and "dynamic" columns make sure we don't miss
* a value prior to the column declaration.
*
* For example, if a thrift table declare 2 columns "c1" and "c5" and the results from a query
* is:
* Partition: static: { c1: 3, c5: 4 }
* "a" : { value : 2 }
* "c3": { value : 8 }
* "c7": { value : 1 }
* then this class transform it into:
* Partition: "a" : { value : 2 }
* "c1": { value : 3 }
* "c3": { value : 8 }
* "c5": { value : 4 }
* "c7": { value : 1 }
*/
public class ThriftResultsMerger extends WrappingUnfilteredPartitionIterator
{
private final int nowInSec;
private ThriftResultsMerger(UnfilteredPartitionIterator wrapped, int nowInSec)
{
super(wrapped);
this.nowInSec = nowInSec;
}
public static UnfilteredPartitionIterator maybeWrap(UnfilteredPartitionIterator iterator, CFMetaData metadata, int nowInSec)
{
if (!metadata.isStaticCompactTable() && !metadata.isSuper())
return iterator;
return new ThriftResultsMerger(iterator, nowInSec);
}
public static UnfilteredRowIterator maybeWrap(UnfilteredRowIterator iterator, int nowInSec)
{
if (!iterator.metadata().isStaticCompactTable() && !iterator.metadata().isSuper())
return iterator;
return iterator.metadata().isSuper()
? new SuperColumnsPartitionMerger(iterator, nowInSec)
: new PartitionMerger(iterator, nowInSec);
}
protected UnfilteredRowIterator computeNext(UnfilteredRowIterator iter)
{
return iter.metadata().isSuper()
? new SuperColumnsPartitionMerger(iter, nowInSec)
: new PartitionMerger(iter, nowInSec);
}
private static class PartitionMerger extends WrappingUnfilteredRowIterator
{
private final int nowInSec;
// We initialize lazily to avoid having this iterator fetch the wrapped iterator before it's actually asked for it.
private boolean isInit;
private Row staticRow;
private int i; // the index of the next column of static row to return
private ReusableRow nextToMerge;
private Unfiltered nextFromWrapped;
private PartitionMerger(UnfilteredRowIterator results, int nowInSec)
{
super(results);
assert results.metadata().isStaticCompactTable();
this.nowInSec = nowInSec;
}
private void init()
{
assert !isInit;
this.staticRow = super.staticRow();
assert staticRow.columns().complexColumnCount() == 0;
this.nextToMerge = createReusableRow();
updateNextToMerge();
isInit = true;
}
@Override
public Row staticRow()
{
return Rows.EMPTY_STATIC_ROW;
}
private ReusableRow createReusableRow()
{
return new ReusableRow(metadata().clusteringColumns().size(), metadata().partitionColumns().regulars, true, metadata().isCounter());
}
@Override
public boolean hasNext()
{
if (!isInit)
init();
return nextFromWrapped != null || nextToMerge != null || super.hasNext();
}
@Override
public Unfiltered next()
{
if (!isInit)
init();
if (nextFromWrapped == null && super.hasNext())
nextFromWrapped = super.next();
if (nextFromWrapped == null)
{
if (nextToMerge == null)
throw new NoSuchElementException();
return consumeNextToMerge();
}
if (nextToMerge == null)
return consumeNextWrapped();
int cmp = metadata().comparator.compare(nextToMerge, nextFromWrapped);
if (cmp < 0)
return consumeNextToMerge();
if (cmp > 0)
return consumeNextWrapped();
// Same row, but we know the row has only a single column so just pick the more recent
assert nextFromWrapped instanceof Row;
ReusableRow row = createReusableRow();
Rows.merge((Row)consumeNextWrapped(), consumeNextToMerge(), columns().regulars, row.writer(), nowInSec);
return row;
}
private Unfiltered consumeNextWrapped()
{
Unfiltered toReturn = nextFromWrapped;
nextFromWrapped = null;
return toReturn;
}
private Row consumeNextToMerge()
{
Row toReturn = nextToMerge;
updateNextToMerge();
return toReturn;
}
private void updateNextToMerge()
{
while (i < staticRow.columns().simpleColumnCount())
{
Cell cell = staticRow.getCell(staticRow.columns().getSimple(i++));
if (cell != null)
{
// Given a static cell, the equivalent row uses the column name as clustering and the
// value as unique cell value.
Row.Writer writer = nextToMerge.writer();
writer.writeClusteringValue(cell.column().name.bytes);
writer.writeCell(metadata().compactValueColumn(), cell.isCounterCell(), cell.value(), cell.livenessInfo(), cell.path());
writer.endOfRow();
return;
}
}
// Nothing more to merge.
nextToMerge = null;
}
}
private static class SuperColumnsPartitionMerger extends WrappingUnfilteredRowIterator
{
private final int nowInSec;
private final ReusableRow reusableRow;
private final ColumnDefinition superColumnMapColumn;
private final AbstractType<?> columnComparator;
private SuperColumnsPartitionMerger(UnfilteredRowIterator results, int nowInSec)
{
super(results);
assert results.metadata().isSuper();
this.nowInSec = nowInSec;
this.superColumnMapColumn = results.metadata().compactValueColumn();
assert superColumnMapColumn != null && superColumnMapColumn.type instanceof MapType;
this.reusableRow = new ReusableRow(results.metadata().clusteringColumns().size(),
Columns.of(superColumnMapColumn),
true,
results.metadata().isCounter());
this.columnComparator = ((MapType)superColumnMapColumn.type).nameComparator();
}
@Override
public Unfiltered next()
{
Unfiltered next = super.next();
if (next.kind() != Unfiltered.Kind.ROW)
return next;
Row row = (Row)next;
Row.Writer writer = reusableRow.writer();
row.clustering().writeTo(writer);
PeekingIterator<Cell> staticCells = Iterators.peekingIterator(makeStaticCellIterator(row));
if (!staticCells.hasNext())
return row;
Iterator<Cell> cells = row.getCells(superColumnMapColumn);
PeekingIterator<Cell> dynamicCells = Iterators.peekingIterator(cells.hasNext() ? cells : Collections.<Cell>emptyIterator());
while (staticCells.hasNext() && dynamicCells.hasNext())
{
Cell staticCell = staticCells.peek();
Cell dynamicCell = dynamicCells.peek();
int cmp = columnComparator.compare(staticCell.column().name.bytes, dynamicCell.path().get(0));
if (cmp < 0)
{
staticCell = staticCells.next();
writer.writeCell(superColumnMapColumn, staticCell.isCounterCell(), staticCell.value(), staticCell.livenessInfo(), CellPath.create(staticCell.column().name.bytes));
}
else if (cmp > 0)
{
dynamicCells.next().writeTo(writer);
}
else
{
staticCell = staticCells.next();
Cell toMerge = Cells.create(superColumnMapColumn,
staticCell.isCounterCell(),
staticCell.value(),
staticCell.livenessInfo(),
CellPath.create(staticCell.column().name.bytes));
Cells.reconcile(toMerge, dynamicCells.next(), nowInSec).writeTo(writer);
}
}
while (staticCells.hasNext())
{
Cell staticCell = staticCells.next();
writer.writeCell(superColumnMapColumn, staticCell.isCounterCell(), staticCell.value(), staticCell.livenessInfo(), CellPath.create(staticCell.column().name.bytes));
}
while (dynamicCells.hasNext())
{
dynamicCells.next().writeTo(writer);
}
writer.endOfRow();
return reusableRow;
}
private static Iterator<Cell> makeStaticCellIterator(final Row row)
{
return new AbstractIterator<Cell>()
{
private int i;
protected Cell computeNext()
{
while (i < row.columns().simpleColumnCount())
{
Cell cell = row.getCell(row.columns().getSimple(i++));
if (cell != null)
return cell;
}
return endOfData();
}
};
}
}
}
| |
/****************************************************************************
Copyright (c) 2010-2013 cocos2d-x.org
http://www.cocos2d-x.org
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
****************************************************************************/
package org.cocos2dx.lib;
import javax.microedition.khronos.egl.EGL10;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.egl.EGLDisplay;
import org.cocos2dx.lib.Cocos2dxHelper.Cocos2dxHelperListener;
import com.chukong.cocosplay.client.CocosPlayClient;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.content.pm.ApplicationInfo;
import android.content.pm.PackageManager;
import android.graphics.PixelFormat;
import android.opengl.GLSurfaceView;
import android.os.Build;
import android.os.Bundle;
import android.os.Message;
import android.preference.PreferenceManager.OnActivityResultListener;
import android.util.Log;
import android.view.ViewGroup;
import android.widget.FrameLayout;
import android.opengl.GLSurfaceView;
public abstract class Cocos2dxActivity extends Activity implements Cocos2dxHelperListener {
// ===========================================================
// Constants
// ===========================================================
private final static String TAG = Cocos2dxActivity.class.getSimpleName();
// ===========================================================
// Fields
// ===========================================================
private Cocos2dxGLSurfaceView mGLSurfaceView = null;
private int[] mGLContextAttrs = null;
private Cocos2dxHandler mHandler = null;
private static Cocos2dxActivity sContext = null;
private Cocos2dxVideoHelper mVideoHelper = null;
private Cocos2dxWebViewHelper mWebViewHelper = null;
public class Cocos2dxEGLConfigChooser implements GLSurfaceView.EGLConfigChooser
{
protected int[] configAttribs;
public Cocos2dxEGLConfigChooser(int redSize, int greenSize, int blueSize, int alphaSize, int depthSize, int stencilSize)
{
configAttribs = new int[] {redSize, greenSize, blueSize, alphaSize, depthSize, stencilSize};
}
public Cocos2dxEGLConfigChooser(int[] attribs)
{
configAttribs = attribs;
}
public EGLConfig selectConfig(EGL10 egl, EGLDisplay display, EGLConfig[] configs, int[] attribs)
{
for (EGLConfig config : configs) {
int d = findConfigAttrib(egl, display, config,
EGL10.EGL_DEPTH_SIZE, 0);
int s = findConfigAttrib(egl, display, config,
EGL10.EGL_STENCIL_SIZE, 0);
if ((d >= attribs[4]) && (s >= attribs[5])) {
int r = findConfigAttrib(egl, display, config,
EGL10.EGL_RED_SIZE, 0);
int g = findConfigAttrib(egl, display, config,
EGL10.EGL_GREEN_SIZE, 0);
int b = findConfigAttrib(egl, display, config,
EGL10.EGL_BLUE_SIZE, 0);
int a = findConfigAttrib(egl, display, config,
EGL10.EGL_ALPHA_SIZE, 0);
if ((r >= attribs[0]) && (g >= attribs[1])
&& (b >= attribs[2]) && (a >= attribs[3])) {
return config;
}
}
}
return null;
}
private int findConfigAttrib(EGL10 egl, EGLDisplay display,
EGLConfig config, int attribute, int defaultValue) {
int[] value = new int[1];
if (egl.eglGetConfigAttrib(display, config, attribute, value)) {
return value[0];
}
return defaultValue;
}
@Override
public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display)
{
int[] numConfigs = new int[1];
if(egl.eglGetConfigs(display, null, 0, numConfigs))
{
EGLConfig[] configs = new EGLConfig[numConfigs[0]];
int[] EGLattribs = {
EGL10.EGL_RED_SIZE, configAttribs[0],
EGL10.EGL_GREEN_SIZE, configAttribs[1],
EGL10.EGL_BLUE_SIZE, configAttribs[2],
EGL10.EGL_ALPHA_SIZE, configAttribs[3],
EGL10.EGL_DEPTH_SIZE, configAttribs[4],
EGL10.EGL_STENCIL_SIZE,configAttribs[5],
EGL10.EGL_RENDERABLE_TYPE, 4, //EGL_OPENGL_ES2_BIT
EGL10.EGL_NONE
};
int[] choosedConfigNum = new int[1];
egl.eglChooseConfig(display, EGLattribs, configs, numConfigs[0], choosedConfigNum);
if(choosedConfigNum[0]>0)
{
return selectConfig(egl, display, configs, configAttribs);
}
else
{
int[] defaultEGLattribs = {
EGL10.EGL_RED_SIZE, 5,
EGL10.EGL_GREEN_SIZE, 6,
EGL10.EGL_BLUE_SIZE, 5,
EGL10.EGL_ALPHA_SIZE, 0,
EGL10.EGL_DEPTH_SIZE, 0,
EGL10.EGL_STENCIL_SIZE,0,
EGL10.EGL_RENDERABLE_TYPE, 4, //EGL_OPENGL_ES2_BIT
EGL10.EGL_NONE
};
int[] defaultEGLattribsAlpha = {
EGL10.EGL_RED_SIZE, 4,
EGL10.EGL_GREEN_SIZE, 4,
EGL10.EGL_BLUE_SIZE, 4,
EGL10.EGL_ALPHA_SIZE, 4,
EGL10.EGL_DEPTH_SIZE, 0,
EGL10.EGL_STENCIL_SIZE,0,
EGL10.EGL_RENDERABLE_TYPE, 4, //EGL_OPENGL_ES2_BIT
EGL10.EGL_NONE
};
int[] attribs = null;
//choose one can use
if(this.configAttribs[3] == 0)
{
egl.eglChooseConfig(display, defaultEGLattribs, configs, numConfigs[0], choosedConfigNum);
attribs = new int[]{5,6,5,0,0,0};
}
else
{
egl.eglChooseConfig(display, defaultEGLattribsAlpha, configs, numConfigs[0], choosedConfigNum);
attribs = new int[]{4,4,4,4,0,0};
}
if(choosedConfigNum[0] > 0)
{
return selectConfig(egl, display, configs, attribs);
}
else
{
Log.e(DEVICE_POLICY_SERVICE, "Can not select an EGLConfig for rendering.");
return null;
}
}
}
Log.e(DEVICE_POLICY_SERVICE, "Can not select an EGLConfig for rendering.");
return null;
}
}
public static Context getContext() {
return sContext;
}
public void setKeepScreenOn(boolean value) {
final boolean newValue = value;
runOnUiThread(new Runnable() {
@Override
public void run() {
mGLSurfaceView.setKeepScreenOn(newValue);
}
});
}
protected void onLoadNativeLibraries() {
try {
ApplicationInfo ai = getPackageManager().getApplicationInfo(getPackageName(), PackageManager.GET_META_DATA);
Bundle bundle = ai.metaData;
String libName = bundle.getString("android.app.lib_name");
System.loadLibrary(libName);
} catch (Exception e) {
e.printStackTrace();
}
}
// ===========================================================
// Constructors
// ===========================================================
@Override
protected void onCreate(final Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
CocosPlayClient.init(this, false);
onLoadNativeLibraries();
sContext = this;
this.mHandler = new Cocos2dxHandler(this);
Cocos2dxHelper.init(this);
this.mGLContextAttrs = getGLContextAttrs();
this.init();
if (mVideoHelper == null) {
mVideoHelper = new Cocos2dxVideoHelper(this, mFrameLayout);
}
if(mWebViewHelper == null){
mWebViewHelper = new Cocos2dxWebViewHelper(mFrameLayout);
}
}
//native method,call GLViewImpl::getGLContextAttrs() to get the OpenGL ES context attributions
private static native int[] getGLContextAttrs();
// ===========================================================
// Getter & Setter
// ===========================================================
// ===========================================================
// Methods for/from SuperClass/Interfaces
// ===========================================================
@Override
protected void onResume() {
super.onResume();
Cocos2dxHelper.onResume();
this.mGLSurfaceView.onResume();
}
@Override
protected void onPause() {
super.onPause();
Cocos2dxHelper.onPause();
this.mGLSurfaceView.onPause();
}
@Override
protected void onDestroy() {
super.onDestroy();
}
@Override
public void showDialog(final String pTitle, final String pMessage) {
Message msg = new Message();
msg.what = Cocos2dxHandler.HANDLER_SHOW_DIALOG;
msg.obj = new Cocos2dxHandler.DialogMessage(pTitle, pMessage);
this.mHandler.sendMessage(msg);
}
@Override
public void showEditTextDialog(final String pTitle, final String pContent, final int pInputMode, final int pInputFlag, final int pReturnType, final int pMaxLength) {
Message msg = new Message();
msg.what = Cocos2dxHandler.HANDLER_SHOW_EDITBOX_DIALOG;
msg.obj = new Cocos2dxHandler.EditBoxMessage(pTitle, pContent, pInputMode, pInputFlag, pReturnType, pMaxLength);
this.mHandler.sendMessage(msg);
}
@Override
public void runOnGLThread(final Runnable pRunnable) {
this.mGLSurfaceView.queueEvent(pRunnable);
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data)
{
for (OnActivityResultListener listener : Cocos2dxHelper.getOnActivityResultListeners()) {
listener.onActivityResult(requestCode, resultCode, data);
}
super.onActivityResult(requestCode, resultCode, data);
}
protected FrameLayout mFrameLayout = null;
// ===========================================================
// Methods
// ===========================================================
public void init() {
// FrameLayout
ViewGroup.LayoutParams framelayout_params =
new ViewGroup.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT,
ViewGroup.LayoutParams.MATCH_PARENT);
mFrameLayout = new FrameLayout(this);
mFrameLayout.setLayoutParams(framelayout_params);
// Cocos2dxEditText layout
ViewGroup.LayoutParams edittext_layout_params =
new ViewGroup.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT,
ViewGroup.LayoutParams.WRAP_CONTENT);
Cocos2dxEditText edittext = new Cocos2dxEditText(this);
edittext.setLayoutParams(edittext_layout_params);
// ...add to FrameLayout
mFrameLayout.addView(edittext);
// Cocos2dxGLSurfaceView
this.mGLSurfaceView = this.onCreateView();
// ...add to FrameLayout
mFrameLayout.addView(this.mGLSurfaceView);
// Switch to supported OpenGL (ARGB888) mode on emulator
if (isAndroidEmulator())
this.mGLSurfaceView.setEGLConfigChooser(8, 8, 8, 8, 16, 0);
else
this.mGLSurfaceView.setEGLConfigChooser(5, 6, 5, 0, 16, 8);
this.mGLSurfaceView.setCocos2dxRenderer(new Cocos2dxRenderer());
this.mGLSurfaceView.setCocos2dxEditText(edittext);
// Set framelayout as the content view
setContentView(mFrameLayout);
}
public Cocos2dxGLSurfaceView onCreateView() {
Cocos2dxGLSurfaceView glSurfaceView = new Cocos2dxGLSurfaceView(this);
//this line is need on some device if we specify an alpha bits
if(this.mGLContextAttrs[3] > 0) glSurfaceView.getHolder().setFormat(PixelFormat.TRANSLUCENT);
Cocos2dxEGLConfigChooser chooser = new Cocos2dxEGLConfigChooser(this.mGLContextAttrs);
glSurfaceView.setEGLConfigChooser(chooser);
return glSurfaceView;
}
private final static boolean isAndroidEmulator() {
String model = Build.MODEL;
Log.d(TAG, "model=" + model);
String product = Build.PRODUCT;
Log.d(TAG, "product=" + product);
boolean isEmulator = false;
if (product != null) {
isEmulator = product.equals("sdk") || product.contains("_sdk") || product.contains("sdk_");
}
Log.d(TAG, "isEmulator=" + isEmulator);
return isEmulator;
}
// ===========================================================
// Inner and Anonymous Classes
// ===========================================================
}
| |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.connect;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.ServiceLoader;
import java.util.Set;
import org.camunda.connect.impl.ConnectCoreLogger;
import org.camunda.connect.impl.ConnectLogger;
import org.camunda.connect.spi.Connector;
import org.camunda.connect.spi.ConnectorConfigurator;
import org.camunda.connect.spi.ConnectorProvider;
import org.camunda.connect.spi.ConnectorRequest;
/**
* Provides access to all available connectors.
*/
public class Connectors {
protected static ConnectCoreLogger LOG = ConnectLogger.CORE_LOGGER;
public static String HTTP_CONNECTOR_ID = "http-connector";
public static String SOAP_HTTP_CONNECTOR_ID = "soap-http-connector";
/** The global instance of the manager */
static Connectors INSTANCE = new Connectors();
/**
* Provides the global instance of the Connectors manager.
* @return the global instance
*/
public static Connectors getInstance() {
return INSTANCE;
}
/**
* @return the connector for the default http connector id or null if
* no connector is registered for this id
*/
@SuppressWarnings("unchecked")
public static <C extends Connector<? extends ConnectorRequest<?>>> C http() {
return (C) INSTANCE.getConnectorById(HTTP_CONNECTOR_ID);
}
/**
* @return the connector for the default soap http connector id or null
* if no connector is registered for this id
*/
@SuppressWarnings("unchecked")
public static <C extends Connector<? extends ConnectorRequest<?>>> C soap() {
return (C) INSTANCE.getConnectorById(SOAP_HTTP_CONNECTOR_ID);
}
/**
* @return the connector for the given id or null if no connector is
* registered for this id
*/
@SuppressWarnings("unchecked")
public static <C extends Connector<? extends ConnectorRequest<?>>> C getConnector(String connectorId) {
return (C) INSTANCE.getConnectorById(connectorId);
}
/**
* @return all register connectors
*/
public static Set<Connector<? extends ConnectorRequest<?>>> getAvailableConnectors() {
return INSTANCE.getAllAvailableConnectors();
}
/**
* Load all available connectors.
*/
public static void loadConnectors() {
loadConnectors(null);
}
/**
* Load all available connectors with the given classloader.
*/
public static void loadConnectors(ClassLoader classloader) {
INSTANCE.initializeConnectors(classloader);
}
/**
* Register a new connector.
*/
protected static void registerConnector(Connector<?> connector) {
registerConnector(connector.getId(), connector);
}
/**
* Register a new connector under the given connector id.
*/
protected static void registerConnector(String connectorId, Connector<?> connector) {
INSTANCE.registerConnectorInstance(connectorId, connector);
}
protected static void unregisterConnector(String connectorId) {
INSTANCE.unregisterConnectorInstance(connectorId);
}
// instance //////////////////////////////////////////////////////////
protected Map<String, Connector<?>> availableConnectors;
/**
* @return all register connectors
*/
public Set<Connector<? extends ConnectorRequest<?>>> getAllAvailableConnectors() {
ensureConnectorProvidersInitialized();
return new HashSet<Connector<?>>(availableConnectors.values());
}
/**
* @return the connector for the given id or null if no connector is
* registered for this id
*/
@SuppressWarnings("unchecked")
public <C extends Connector<? extends ConnectorRequest<?>>> C getConnectorById(String connectorId) {
ensureConnectorProvidersInitialized();
return (C) availableConnectors.get(connectorId);
}
/**
* Detect all available connectors in the classpath using a {@link ServiceLoader}.
*/
protected void ensureConnectorProvidersInitialized() {
if (availableConnectors == null) {
synchronized (Connectors.class) {
if (availableConnectors == null) {
initializeConnectors(null);
}
}
}
}
protected void initializeConnectors(ClassLoader classLoader) {
Map<String, Connector<?>> connectors = new HashMap<String, Connector<?>>();
if(classLoader == null) {
classLoader = Connectors.class.getClassLoader();
}
// discover available custom connector providers on the classpath
registerConnectors(connectors, classLoader);
// discover and apply connector configurators on the classpath
applyConfigurators(connectors, classLoader);
this.availableConnectors = connectors;
}
protected void registerConnectors(Map<String, Connector<?>> connectors, ClassLoader classLoader) {
ServiceLoader<ConnectorProvider> providers = ServiceLoader.load(ConnectorProvider.class, classLoader);
for (ConnectorProvider provider : providers) {
registerProvider(connectors, provider);
}
}
protected void registerProvider(Map<String, Connector<?>> connectors, ConnectorProvider provider) {
String connectorId = provider.getConnectorId();
if (connectors.containsKey(connectorId)) {
throw LOG.multipleConnectorProvidersFound(connectorId);
}
else {
Connector<?> connectorInstance = provider.createConnectorInstance();
LOG.connectorProviderDiscovered(provider, connectorId, connectorInstance);
connectors.put(connectorId, connectorInstance);
}
}
protected void registerConnectorInstance(String connectorId, Connector<?> connector) {
ensureConnectorProvidersInitialized();
synchronized (Connectors.class) {
availableConnectors.put(connectorId, connector);
}
}
protected void unregisterConnectorInstance(String connectorId) {
ensureConnectorProvidersInitialized();
synchronized (Connectors.class) {
availableConnectors.remove(connectorId);
}
}
@SuppressWarnings("rawtypes")
protected void applyConfigurators(Map<String, Connector<?>> connectors, ClassLoader classLoader) {
ServiceLoader<ConnectorConfigurator> configurators = ServiceLoader.load(ConnectorConfigurator.class, classLoader);
for (ConnectorConfigurator configurator : configurators) {
LOG.connectorConfiguratorDiscovered(configurator);
applyConfigurator(connectors, configurator);
}
}
@SuppressWarnings({ "rawtypes", "unchecked" })
protected void applyConfigurator(Map<String, Connector<?>> connectors, ConnectorConfigurator configurator) {
for (Connector<?> connector : connectors.values()) {
if (configurator.getConnectorClass().isAssignableFrom(connector.getClass())) {
configurator.configure(connector);
}
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.fs.shell;
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY;
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SHELL_MISSING_DEFAULT_FS_WARNING_KEY;
import static org.junit.Assert.*;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.*;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.PrintStream;
import java.net.URI;
import java.net.URISyntaxException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.LinkedList;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FilterFileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.AclEntry;
import org.apache.hadoop.fs.permission.AclStatus;
import org.apache.hadoop.fs.permission.FsPermission;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.mockito.InOrder;
/**
* JUnit test class for {@link org.apache.hadoop.fs.shell.Ls}
*
*/
public class TestLs {
private static Configuration conf;
private static FileSystem mockFs;
private static final Date NOW = new Date();
@BeforeClass
public static void setup() throws IOException {
conf = new Configuration();
conf.set(FS_DEFAULT_NAME_KEY, "mockfs:///");
conf.setClass("fs.mockfs.impl", MockFileSystem.class, FileSystem.class);
mockFs = mock(FileSystem.class);
}
@Before
public void resetMock() throws IOException, URISyntaxException {
reset(mockFs);
AclStatus mockAclStatus = mock(AclStatus.class);
when(mockAclStatus.getEntries()).thenReturn(new ArrayList<AclEntry>());
when(mockFs.getAclStatus(any(Path.class))).thenReturn(mockAclStatus);
when(mockFs.getUri()).thenReturn(new URI(conf.get(FS_DEFAULT_NAME_KEY)));
}
// check that default options are correct
@Test
public void processOptionsNone() throws IOException {
LinkedList<String> options = new LinkedList<String>();
Ls ls = new Ls();
ls.processOptions(options);
assertFalse(ls.isPathOnly());
assertTrue(ls.isDirRecurse());
assertFalse(ls.isHumanReadable());
assertFalse(ls.isRecursive());
assertFalse(ls.isOrderReverse());
assertFalse(ls.isOrderSize());
assertFalse(ls.isOrderTime());
assertFalse(ls.isUseAtime());
assertFalse(ls.isDisplayECPolicy());
}
// check the -C option is recognised
@Test
public void processOptionsPathOnly() throws IOException {
LinkedList<String> options = new LinkedList<String>();
options.add("-C");
Ls ls = new Ls();
ls.processOptions(options);
assertTrue(ls.isPathOnly());
assertTrue(ls.isDirRecurse());
assertFalse(ls.isHumanReadable());
assertFalse(ls.isRecursive());
assertFalse(ls.isOrderReverse());
assertFalse(ls.isOrderSize());
assertFalse(ls.isOrderTime());
assertFalse(ls.isUseAtime());
assertFalse(ls.isDisplayECPolicy());
}
// check the -d option is recognised
@Test
public void processOptionsDirectory() throws IOException {
LinkedList<String> options = new LinkedList<String>();
options.add("-d");
Ls ls = new Ls();
ls.processOptions(options);
assertFalse(ls.isPathOnly());
assertFalse(ls.isDirRecurse());
assertFalse(ls.isHumanReadable());
assertFalse(ls.isRecursive());
assertFalse(ls.isOrderReverse());
assertFalse(ls.isOrderSize());
assertFalse(ls.isOrderTime());
assertFalse(ls.isUseAtime());
assertFalse(ls.isDisplayECPolicy());
}
// check the -h option is recognised
@Test
public void processOptionsHuman() throws IOException {
LinkedList<String> options = new LinkedList<String>();
options.add("-h");
Ls ls = new Ls();
ls.processOptions(options);
assertFalse(ls.isPathOnly());
assertTrue(ls.isDirRecurse());
assertTrue(ls.isHumanReadable());
assertFalse(ls.isRecursive());
assertFalse(ls.isOrderReverse());
assertFalse(ls.isOrderSize());
assertFalse(ls.isOrderTime());
assertFalse(ls.isUseAtime());
assertFalse(ls.isDisplayECPolicy());
}
// check the -R option is recognised
@Test
public void processOptionsRecursive() throws IOException {
LinkedList<String> options = new LinkedList<String>();
options.add("-R");
Ls ls = new Ls();
ls.processOptions(options);
assertFalse(ls.isPathOnly());
assertTrue(ls.isDirRecurse());
assertFalse(ls.isHumanReadable());
assertTrue(ls.isRecursive());
assertFalse(ls.isOrderReverse());
assertFalse(ls.isOrderSize());
assertFalse(ls.isOrderTime());
assertFalse(ls.isUseAtime());
assertFalse(ls.isDisplayECPolicy());
}
// check the -r option is recognised
@Test
public void processOptionsReverse() throws IOException {
LinkedList<String> options = new LinkedList<String>();
options.add("-r");
Ls ls = new Ls();
ls.processOptions(options);
assertFalse(ls.isPathOnly());
assertTrue(ls.isDirRecurse());
assertFalse(ls.isHumanReadable());
assertFalse(ls.isRecursive());
assertTrue(ls.isOrderReverse());
assertFalse(ls.isOrderSize());
assertFalse(ls.isOrderTime());
assertFalse(ls.isUseAtime());
assertFalse(ls.isDisplayECPolicy());
}
// check the -S option is recognised
@Test
public void processOptionsSize() throws IOException {
LinkedList<String> options = new LinkedList<String>();
options.add("-S");
Ls ls = new Ls();
ls.processOptions(options);
assertFalse(ls.isPathOnly());
assertTrue(ls.isDirRecurse());
assertFalse(ls.isHumanReadable());
assertFalse(ls.isRecursive());
assertFalse(ls.isOrderReverse());
assertTrue(ls.isOrderSize());
assertFalse(ls.isOrderTime());
assertFalse(ls.isUseAtime());
assertFalse(ls.isDisplayECPolicy());
}
// check the -t option is recognised
@Test
public void processOptionsMtime() throws IOException {
LinkedList<String> options = new LinkedList<String>();
options.add("-t");
Ls ls = new Ls();
ls.processOptions(options);
assertFalse(ls.isPathOnly());
assertTrue(ls.isDirRecurse());
assertFalse(ls.isHumanReadable());
assertFalse(ls.isRecursive());
assertFalse(ls.isOrderReverse());
assertFalse(ls.isOrderSize());
assertTrue(ls.isOrderTime());
assertFalse(ls.isUseAtime());
assertFalse(ls.isDisplayECPolicy());
}
// check the precedence of the -t and -S options
@Test
public void processOptionsMtimeSize() throws IOException {
LinkedList<String> options = new LinkedList<String>();
options.add("-t");
options.add("-S");
Ls ls = new Ls();
ls.processOptions(options);
assertFalse(ls.isPathOnly());
assertTrue(ls.isDirRecurse());
assertFalse(ls.isHumanReadable());
assertFalse(ls.isRecursive());
assertFalse(ls.isOrderReverse());
assertFalse(ls.isOrderSize());
assertTrue(ls.isOrderTime());
assertFalse(ls.isUseAtime());
assertFalse(ls.isDisplayECPolicy());
}
// check the precedence of the -t, -S and -r options
@Test
public void processOptionsMtimeSizeReverse() throws IOException {
LinkedList<String> options = new LinkedList<String>();
options.add("-t");
options.add("-S");
options.add("-r");
Ls ls = new Ls();
ls.processOptions(options);
assertFalse(ls.isPathOnly());
assertTrue(ls.isDirRecurse());
assertFalse(ls.isHumanReadable());
assertFalse(ls.isRecursive());
assertTrue(ls.isOrderReverse());
assertFalse(ls.isOrderSize());
assertTrue(ls.isOrderTime());
assertFalse(ls.isUseAtime());
assertFalse(ls.isDisplayECPolicy());
}
// chheck the -u option is recognised
@Test
public void processOptionsAtime() throws IOException {
LinkedList<String> options = new LinkedList<String>();
options.add("-u");
Ls ls = new Ls();
ls.processOptions(options);
assertFalse(ls.isPathOnly());
assertTrue(ls.isDirRecurse());
assertFalse(ls.isHumanReadable());
assertFalse(ls.isRecursive());
assertFalse(ls.isOrderReverse());
assertFalse(ls.isOrderSize());
assertFalse(ls.isOrderTime());
assertTrue(ls.isUseAtime());
assertFalse(ls.isDisplayECPolicy());
}
// chheck the -e option is recognised
@Test
public void processOptionsDisplayECPolicy() throws IOException {
LinkedList<String> options = new LinkedList<String>();
options.add("-e");
Ls ls = new Ls();
ls.processOptions(options);
assertFalse(ls.isPathOnly());
assertTrue(ls.isDirRecurse());
assertFalse(ls.isHumanReadable());
assertFalse(ls.isRecursive());
assertFalse(ls.isOrderReverse());
assertFalse(ls.isOrderSize());
assertFalse(ls.isOrderTime());
assertFalse(ls.isUseAtime());
assertTrue(ls.isDisplayECPolicy());
}
// check all options is handled correctly
@Test
public void processOptionsAll() throws IOException {
LinkedList<String> options = new LinkedList<String>();
options.add("-C"); // show file path only
options.add("-d"); // directory
options.add("-h"); // human readable
options.add("-R"); // recursive
options.add("-r"); // reverse order
options.add("-t"); // time order
options.add("-S"); // size order
options.add("-u"); // show atime
options.add("-e"); // show EC policies
Ls ls = new Ls();
ls.processOptions(options);
assertTrue(ls.isPathOnly());
assertFalse(ls.isDirRecurse());
assertTrue(ls.isHumanReadable());
assertFalse(ls.isRecursive()); // -d overrules -R
assertTrue(ls.isOrderReverse());
assertFalse(ls.isOrderSize()); // -t overrules -S
assertTrue(ls.isOrderTime());
assertTrue(ls.isUseAtime());
assertTrue(ls.isDisplayECPolicy());
}
// check listing of a single file
@Test
public void processPathFile() throws IOException {
TestFile testfile = new TestFile("testDir", "testFile");
LinkedList<PathData> pathData = new LinkedList<PathData>();
pathData.add(testfile.getPathData());
PrintStream out = mock(PrintStream.class);
Ls ls = new Ls();
ls.out = out;
LinkedList<String> options = new LinkedList<String>();
ls.processOptions(options);
String lineFormat = TestFile.computeLineFormat(pathData);
ls.processArguments(pathData);
InOrder inOrder = inOrder(out);
inOrder.verify(out).println(testfile.formatLineMtime(lineFormat));
verifyNoMoreInteractions(out);
}
// check listing of multiple files
@Test
public void processPathFiles() throws IOException {
TestFile testfile01 = new TestFile("testDir01", "testFile01");
TestFile testfile02 = new TestFile("testDir02", "testFile02");
TestFile testfile03 = new TestFile("testDir03", "testFile03");
TestFile testfile04 = new TestFile("testDir04", "testFile04");
TestFile testfile05 = new TestFile("testDir05", "testFile05");
TestFile testfile06 = new TestFile("testDir06", "testFile06");
LinkedList<PathData> pathData = new LinkedList<PathData>();
pathData.add(testfile01.getPathData());
pathData.add(testfile02.getPathData());
pathData.add(testfile03.getPathData());
pathData.add(testfile04.getPathData());
pathData.add(testfile05.getPathData());
pathData.add(testfile06.getPathData());
PrintStream out = mock(PrintStream.class);
Ls ls = new Ls();
ls.out = out;
LinkedList<String> options = new LinkedList<String>();
ls.processOptions(options);
String lineFormat = TestFile.computeLineFormat(pathData);
ls.processArguments(pathData);
InOrder inOrder = inOrder(out);
inOrder.verify(out).println(testfile01.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile02.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile03.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile04.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile05.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile06.formatLineMtime(lineFormat));
verifyNoMoreInteractions(out);
}
// check listing of a single directory
@Test
public void processPathDirectory() throws IOException {
TestFile testfile01 = new TestFile("testDirectory", "testFile01");
TestFile testfile02 = new TestFile("testDirectory", "testFile02");
TestFile testfile03 = new TestFile("testDirectory", "testFile03");
TestFile testfile04 = new TestFile("testDirectory", "testFile04");
TestFile testfile05 = new TestFile("testDirectory", "testFile05");
TestFile testfile06 = new TestFile("testDirectory", "testFile06");
TestFile testDir = new TestFile("", "testDirectory");
testDir.setIsDir(true);
testDir.addContents(testfile01, testfile02, testfile03, testfile04,
testfile05, testfile06);
LinkedList<PathData> pathData = new LinkedList<PathData>();
pathData.add(testDir.getPathData());
PrintStream out = mock(PrintStream.class);
Ls ls = new Ls();
ls.out = out;
LinkedList<String> options = new LinkedList<String>();
ls.processOptions(options);
String lineFormat = TestFile.computeLineFormat(pathData);
ls.processArguments(pathData);
InOrder inOrder = inOrder(out);
inOrder.verify(out).println("Found 6 items");
inOrder.verify(out).println(testfile01.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile02.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile03.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile04.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile05.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile06.formatLineMtime(lineFormat));
verifyNoMoreInteractions(out);
}
// check listing of multiple directories
@Test
public void processPathDirectories() throws IOException {
TestFile testfile01 = new TestFile("testDirectory01", "testFile01");
TestFile testfile02 = new TestFile("testDirectory01", "testFile02");
TestFile testfile03 = new TestFile("testDirectory01", "testFile03");
TestFile testDir01 = new TestFile("", "testDirectory01");
testDir01.setIsDir(true);
testDir01.addContents(testfile01, testfile02, testfile03);
TestFile testfile04 = new TestFile("testDirectory02", "testFile04");
TestFile testfile05 = new TestFile("testDirectory02", "testFile05");
TestFile testfile06 = new TestFile("testDirectory02", "testFile06");
TestFile testDir02 = new TestFile("", "testDirectory02");
testDir02.setIsDir(true);
testDir02.addContents(testfile04, testfile05, testfile06);
LinkedList<PathData> pathData = new LinkedList<PathData>();
pathData.add(testDir01.getPathData());
pathData.add(testDir02.getPathData());
PrintStream out = mock(PrintStream.class);
Ls ls = new Ls();
ls.out = out;
LinkedList<String> options = new LinkedList<String>();
ls.processOptions(options);
String lineFormat = TestFile.computeLineFormat(pathData);
ls.processArguments(pathData);
InOrder inOrder = inOrder(out);
inOrder.verify(out).println("Found 3 items");
inOrder.verify(out).println(testfile01.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile02.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile03.formatLineMtime(lineFormat));
inOrder.verify(out).println("Found 3 items");
inOrder.verify(out).println(testfile04.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile05.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile06.formatLineMtime(lineFormat));
verifyNoMoreInteractions(out);
}
// check the default ordering
@Test
public void processPathDirOrderDefault() throws IOException {
TestFile testfile01 = new TestFile("testDirectory", "testFile01");
TestFile testfile02 = new TestFile("testDirectory", "testFile02");
TestFile testfile03 = new TestFile("testDirectory", "testFile03");
TestFile testfile04 = new TestFile("testDirectory", "testFile04");
TestFile testfile05 = new TestFile("testDirectory", "testFile05");
TestFile testfile06 = new TestFile("testDirectory", "testFile06");
TestFile testDir = new TestFile("", "testDirectory");
testDir.setIsDir(true);
// add contents in non-lexigraphic order to show they get sorted
testDir.addContents(testfile01, testfile03, testfile05, testfile02,
testfile04, testfile06);
LinkedList<PathData> pathData = new LinkedList<PathData>();
pathData.add(testDir.getPathData());
PrintStream out = mock(PrintStream.class);
Ls ls = new Ls();
ls.out = out;
LinkedList<String> options = new LinkedList<String>();
ls.processOptions(options);
String lineFormat = TestFile.computeLineFormat(pathData);
ls.processArguments(pathData);
InOrder inOrder = inOrder(out);
inOrder.verify(out).println("Found 6 items");
inOrder.verify(out).println(testfile01.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile02.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile03.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile04.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile05.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile06.formatLineMtime(lineFormat));
verifyNoMoreInteractions(out);
}
// check reverse default ordering
@Test
public void processPathDirOrderDefaultReverse() throws IOException {
TestFile testfile01 = new TestFile("testDirectory", "testFile01");
TestFile testfile02 = new TestFile("testDirectory", "testFile02");
TestFile testfile03 = new TestFile("testDirectory", "testFile03");
TestFile testfile04 = new TestFile("testDirectory", "testFile04");
TestFile testfile05 = new TestFile("testDirectory", "testFile05");
TestFile testfile06 = new TestFile("testDirectory", "testFile06");
TestFile testDir = new TestFile("", "testDirectory");
testDir.setIsDir(true);
// add contents in non-lexigraphic order to show they get sorted
testDir.addContents(testfile01, testfile03, testfile05, testfile02,
testfile04, testfile06);
LinkedList<PathData> pathData = new LinkedList<PathData>();
pathData.add(testDir.getPathData());
PrintStream out = mock(PrintStream.class);
Ls ls = new Ls();
ls.out = out;
LinkedList<String> options = new LinkedList<String>();
options.add("-r");
ls.processOptions(options);
String lineFormat = TestFile.computeLineFormat(pathData);
ls.processArguments(pathData);
InOrder inOrder = inOrder(out);
inOrder.verify(out).println("Found 6 items");
inOrder.verify(out).println(testfile06.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile05.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile04.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile03.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile02.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile01.formatLineMtime(lineFormat));
verifyNoMoreInteractions(out);
}
// check mtime ordering (-t option); most recent first in line with unix
// convention
@Test
public void processPathDirOrderMtime() throws IOException {
TestFile testfile01 = new TestFile("testDirectory", "testFile01");
TestFile testfile02 = new TestFile("testDirectory", "testFile02");
TestFile testfile03 = new TestFile("testDirectory", "testFile03");
TestFile testfile04 = new TestFile("testDirectory", "testFile04");
TestFile testfile05 = new TestFile("testDirectory", "testFile05");
TestFile testfile06 = new TestFile("testDirectory", "testFile06");
// set file mtime in different order to file names
testfile01.setMtime(NOW.getTime() + 10);
testfile02.setMtime(NOW.getTime() + 30);
testfile03.setMtime(NOW.getTime() + 20);
testfile04.setMtime(NOW.getTime() + 60);
testfile05.setMtime(NOW.getTime() + 50);
testfile06.setMtime(NOW.getTime() + 40);
TestFile testDir = new TestFile("", "testDirectory");
testDir.setIsDir(true);
testDir.addContents(testfile01, testfile02, testfile03, testfile04,
testfile05, testfile06);
LinkedList<PathData> pathData = new LinkedList<PathData>();
pathData.add(testDir.getPathData());
PrintStream out = mock(PrintStream.class);
Ls ls = new Ls();
ls.out = out;
LinkedList<String> options = new LinkedList<String>();
options.add("-t");
ls.processOptions(options);
String lineFormat = TestFile.computeLineFormat(pathData);
ls.processArguments(pathData);
InOrder inOrder = inOrder(out);
inOrder.verify(out).println("Found 6 items");
inOrder.verify(out).println(testfile04.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile05.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile06.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile02.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile03.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile01.formatLineMtime(lineFormat));
verifyNoMoreInteractions(out);
}
// check reverse mtime ordering (-t -r options)
@Test
public void processPathDirOrderMtimeReverse() throws IOException {
TestFile testfile01 = new TestFile("testDirectory", "testFile01");
TestFile testfile02 = new TestFile("testDirectory", "testFile02");
TestFile testfile03 = new TestFile("testDirectory", "testFile03");
TestFile testfile04 = new TestFile("testDirectory", "testFile04");
TestFile testfile05 = new TestFile("testDirectory", "testFile05");
TestFile testfile06 = new TestFile("testDirectory", "testFile06");
// set file mtime in different order to file names
testfile01.setMtime(NOW.getTime() + 10);
testfile02.setMtime(NOW.getTime() + 30);
testfile03.setMtime(NOW.getTime() + 20);
testfile04.setMtime(NOW.getTime() + 60);
testfile05.setMtime(NOW.getTime() + 50);
testfile06.setMtime(NOW.getTime() + 40);
TestFile testDir = new TestFile("", "testDirectory");
testDir.setIsDir(true);
testDir.addContents(testfile01, testfile02, testfile03, testfile04,
testfile05, testfile06);
LinkedList<PathData> pathData = new LinkedList<PathData>();
pathData.add(testDir.getPathData());
PrintStream out = mock(PrintStream.class);
Ls ls = new Ls();
ls.out = out;
LinkedList<String> options = new LinkedList<String>();
options.add("-t");
options.add("-r");
ls.processOptions(options);
String lineFormat = TestFile.computeLineFormat(pathData);
ls.processArguments(pathData);
InOrder inOrder = inOrder(out);
inOrder.verify(out).println("Found 6 items");
inOrder.verify(out).println(testfile01.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile03.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile02.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile06.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile05.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile04.formatLineMtime(lineFormat));
verifyNoMoreInteractions(out);
}
// check multiple directories are order independently
@Test
public void processPathDirsOrderMtime() throws IOException {
TestFile testfile01 = new TestFile("testDirectory01", "testFile01");
TestFile testfile02 = new TestFile("testDirectory01", "testFile02");
TestFile testfile03 = new TestFile("testDirectory01", "testFile03");
TestFile testfile04 = new TestFile("testDirectory02", "testFile04");
TestFile testfile05 = new TestFile("testDirectory02", "testFile05");
TestFile testfile06 = new TestFile("testDirectory02", "testFile06");
// set file mtime in different order to file names
testfile01.setMtime(NOW.getTime() + 10);
testfile02.setMtime(NOW.getTime() + 30);
testfile03.setMtime(NOW.getTime() + 20);
testfile04.setMtime(NOW.getTime() + 60);
testfile05.setMtime(NOW.getTime() + 40);
testfile06.setMtime(NOW.getTime() + 50);
TestFile testDir01 = new TestFile("", "testDirectory01");
testDir01.setIsDir(true);
testDir01.addContents(testfile01, testfile02, testfile03);
TestFile testDir02 = new TestFile("", "testDirectory02");
testDir02.setIsDir(true);
testDir02.addContents(testfile04, testfile05, testfile06);
LinkedList<PathData> pathData = new LinkedList<PathData>();
pathData.add(testDir01.getPathData());
pathData.add(testDir02.getPathData());
PrintStream out = mock(PrintStream.class);
Ls ls = new Ls();
ls.out = out;
LinkedList<String> options = new LinkedList<String>();
options.add("-t");
ls.processOptions(options);
String lineFormat = TestFile.computeLineFormat(pathData);
ls.processArguments(pathData);
InOrder inOrder = inOrder(out);
inOrder.verify(out).println("Found 3 items");
inOrder.verify(out).println(testfile02.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile03.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile01.formatLineMtime(lineFormat));
inOrder.verify(out).println("Found 3 items");
inOrder.verify(out).println(testfile04.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile06.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile05.formatLineMtime(lineFormat));
verifyNoMoreInteractions(out);
}
// check mtime ordering with large time gaps between files (checks integer
// overflow issues)
@Test
public void processPathDirOrderMtimeYears() throws IOException {
TestFile testfile01 = new TestFile("testDirectory", "testFile01");
TestFile testfile02 = new TestFile("testDirectory", "testFile02");
TestFile testfile03 = new TestFile("testDirectory", "testFile03");
TestFile testfile04 = new TestFile("testDirectory", "testFile04");
TestFile testfile05 = new TestFile("testDirectory", "testFile05");
TestFile testfile06 = new TestFile("testDirectory", "testFile06");
// set file mtime in different order to file names
testfile01.setMtime(NOW.getTime() + Integer.MAX_VALUE);
testfile02.setMtime(NOW.getTime() + Integer.MIN_VALUE);
testfile03.setMtime(NOW.getTime() + 0);
testfile04.setMtime(NOW.getTime() + Integer.MAX_VALUE + Integer.MAX_VALUE);
testfile05.setMtime(NOW.getTime() + 0);
testfile06.setMtime(NOW.getTime() + Integer.MIN_VALUE + Integer.MIN_VALUE);
TestFile testDir = new TestFile("", "testDirectory");
testDir.setIsDir(true);
testDir.addContents(testfile01, testfile02, testfile03, testfile04,
testfile05, testfile06);
LinkedList<PathData> pathData = new LinkedList<PathData>();
pathData.add(testDir.getPathData());
PrintStream out = mock(PrintStream.class);
Ls ls = new Ls();
ls.out = out;
LinkedList<String> options = new LinkedList<String>();
options.add("-t");
ls.processOptions(options);
String lineFormat = TestFile.computeLineFormat(pathData);
ls.processArguments(pathData);
InOrder inOrder = inOrder(out);
inOrder.verify(out).println("Found 6 items");
inOrder.verify(out).println(testfile04.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile01.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile03.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile05.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile02.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile06.formatLineMtime(lineFormat));
verifyNoMoreInteractions(out);
}
// check length order (-S option)
@Test
public void processPathDirOrderLength() throws IOException {
TestFile testfile01 = new TestFile("testDirectory", "testFile01");
TestFile testfile02 = new TestFile("testDirectory", "testFile02");
TestFile testfile03 = new TestFile("testDirectory", "testFile03");
TestFile testfile04 = new TestFile("testDirectory", "testFile04");
TestFile testfile05 = new TestFile("testDirectory", "testFile05");
TestFile testfile06 = new TestFile("testDirectory", "testFile06");
// set file length in different order to file names
long length = 1234567890;
testfile01.setLength(length + 10);
testfile02.setLength(length + 30);
testfile03.setLength(length + 20);
testfile04.setLength(length + 60);
testfile05.setLength(length + 50);
testfile06.setLength(length + 40);
TestFile testDir = new TestFile("", "testDirectory");
testDir.setIsDir(true);
testDir.addContents(testfile01, testfile02, testfile03, testfile04,
testfile05, testfile06);
LinkedList<PathData> pathData = new LinkedList<PathData>();
pathData.add(testDir.getPathData());
PrintStream out = mock(PrintStream.class);
Ls ls = new Ls();
ls.out = out;
LinkedList<String> options = new LinkedList<String>();
options.add("-S");
ls.processOptions(options);
String lineFormat = TestFile.computeLineFormat(pathData);
ls.processArguments(pathData);
InOrder inOrder = inOrder(out);
inOrder.verify(out).println("Found 6 items");
inOrder.verify(out).println(testfile04.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile05.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile06.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile02.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile03.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile01.formatLineMtime(lineFormat));
verifyNoMoreInteractions(out);
}
// check reverse length order (-S -r options)
@Test
public void processPathDirOrderLengthReverse() throws IOException {
TestFile testfile01 = new TestFile("testDirectory", "testFile01");
TestFile testfile02 = new TestFile("testDirectory", "testFile02");
TestFile testfile03 = new TestFile("testDirectory", "testFile03");
TestFile testfile04 = new TestFile("testDirectory", "testFile04");
TestFile testfile05 = new TestFile("testDirectory", "testFile05");
TestFile testfile06 = new TestFile("testDirectory", "testFile06");
// set file length in different order to file names
long length = 1234567890;
testfile01.setLength(length + 10);
testfile02.setLength(length + 30);
testfile03.setLength(length + 20);
testfile04.setLength(length + 60);
testfile05.setLength(length + 50);
testfile06.setLength(length + 40);
TestFile testDir = new TestFile("", "testDirectory");
testDir.setIsDir(true);
testDir.addContents(testfile01, testfile02, testfile03, testfile04,
testfile05, testfile06);
LinkedList<PathData> pathData = new LinkedList<PathData>();
pathData.add(testDir.getPathData());
PrintStream out = mock(PrintStream.class);
Ls ls = new Ls();
ls.out = out;
LinkedList<String> options = new LinkedList<String>();
options.add("-S");
options.add("-r");
ls.processOptions(options);
String lineFormat = TestFile.computeLineFormat(pathData);
ls.processArguments(pathData);
InOrder inOrder = inOrder(out);
inOrder.verify(out).println("Found 6 items");
inOrder.verify(out).println(testfile01.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile03.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile02.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile06.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile05.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile04.formatLineMtime(lineFormat));
verifyNoMoreInteractions(out);
}
// check length ordering with large size gaps between files (checks integer
// overflow issues)
@Test
public void processPathDirOrderLengthLarge() throws IOException {
TestFile testfile01 = new TestFile("testDirectory", "testFile01");
TestFile testfile02 = new TestFile("testDirectory", "testFile02");
TestFile testfile03 = new TestFile("testDirectory", "testFile03");
TestFile testfile04 = new TestFile("testDirectory", "testFile04");
TestFile testfile05 = new TestFile("testDirectory", "testFile05");
TestFile testfile06 = new TestFile("testDirectory", "testFile06");
// set file length in different order to file names
long length = 1234567890;
testfile01.setLength(length + 3l * Integer.MAX_VALUE);
testfile02.setLength(length + Integer.MAX_VALUE);
testfile03.setLength(length + 2l * Integer.MAX_VALUE);
testfile04.setLength(length + 4l * Integer.MAX_VALUE);
testfile05.setLength(length + 2l * Integer.MAX_VALUE);
testfile06.setLength(length + 0);
TestFile testDir = new TestFile("", "testDirectory");
testDir.setIsDir(true);
testDir.addContents(testfile01, testfile02, testfile03, testfile04,
testfile05, testfile06);
LinkedList<PathData> pathData = new LinkedList<PathData>();
pathData.add(testDir.getPathData());
PrintStream out = mock(PrintStream.class);
Ls ls = new Ls();
ls.out = out;
LinkedList<String> options = new LinkedList<String>();
options.add("-S");
ls.processOptions(options);
String lineFormat = TestFile.computeLineFormat(pathData);
ls.processArguments(pathData);
InOrder inOrder = inOrder(out);
inOrder.verify(out).println("Found 6 items");
inOrder.verify(out).println(testfile04.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile01.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile03.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile05.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile02.formatLineMtime(lineFormat));
inOrder.verify(out).println(testfile06.formatLineMtime(lineFormat));
verifyNoMoreInteractions(out);
}
// check access time display (-u option)
@Test
public void processPathDirectoryAtime() throws IOException {
TestFile testfile01 = new TestFile("testDirectory", "testFile01");
TestFile testfile02 = new TestFile("testDirectory", "testFile02");
TestFile testfile03 = new TestFile("testDirectory", "testFile03");
TestFile testfile04 = new TestFile("testDirectory", "testFile04");
TestFile testfile05 = new TestFile("testDirectory", "testFile05");
TestFile testfile06 = new TestFile("testDirectory", "testFile06");
TestFile testDir = new TestFile("", "testDirectory");
testDir.setIsDir(true);
testDir.addContents(testfile01, testfile02, testfile03, testfile04,
testfile05, testfile06);
LinkedList<PathData> pathData = new LinkedList<PathData>();
pathData.add(testDir.getPathData());
PrintStream out = mock(PrintStream.class);
Ls ls = new Ls();
ls.out = out;
LinkedList<String> options = new LinkedList<String>();
options.add("-u");
ls.processOptions(options);
String lineFormat = TestFile.computeLineFormat(pathData);
ls.processArguments(pathData);
InOrder inOrder = inOrder(out);
inOrder.verify(out).println("Found 6 items");
inOrder.verify(out).println(testfile01.formatLineAtime(lineFormat));
inOrder.verify(out).println(testfile02.formatLineAtime(lineFormat));
inOrder.verify(out).println(testfile03.formatLineAtime(lineFormat));
inOrder.verify(out).println(testfile04.formatLineAtime(lineFormat));
inOrder.verify(out).println(testfile05.formatLineAtime(lineFormat));
inOrder.verify(out).println(testfile06.formatLineAtime(lineFormat));
verifyNoMoreInteractions(out);
}
// check access time order (-u -t options)
@Test
public void processPathDirOrderAtime() throws IOException {
TestFile testfile01 = new TestFile("testDirectory", "testFile01");
TestFile testfile02 = new TestFile("testDirectory", "testFile02");
TestFile testfile03 = new TestFile("testDirectory", "testFile03");
TestFile testfile04 = new TestFile("testDirectory", "testFile04");
TestFile testfile05 = new TestFile("testDirectory", "testFile05");
TestFile testfile06 = new TestFile("testDirectory", "testFile06");
// set file atime in different order to file names
testfile01.setAtime(NOW.getTime() + 10);
testfile02.setAtime(NOW.getTime() + 30);
testfile03.setAtime(NOW.getTime() + 20);
testfile04.setAtime(NOW.getTime() + 60);
testfile05.setAtime(NOW.getTime() + 50);
testfile06.setAtime(NOW.getTime() + 40);
// set file mtime in different order to atime
testfile01.setMtime(NOW.getTime() + 60);
testfile02.setMtime(NOW.getTime() + 50);
testfile03.setMtime(NOW.getTime() + 20);
testfile04.setMtime(NOW.getTime() + 30);
testfile05.setMtime(NOW.getTime() + 10);
testfile06.setMtime(NOW.getTime() + 40);
TestFile testDir = new TestFile("", "testDirectory");
testDir.setIsDir(true);
testDir.addContents(testfile01, testfile02, testfile03, testfile04,
testfile05, testfile06);
LinkedList<PathData> pathData = new LinkedList<PathData>();
pathData.add(testDir.getPathData());
PrintStream out = mock(PrintStream.class);
Ls ls = new Ls();
ls.out = out;
LinkedList<String> options = new LinkedList<String>();
options.add("-t");
options.add("-u");
ls.processOptions(options);
String lineFormat = TestFile.computeLineFormat(pathData);
ls.processArguments(pathData);
InOrder inOrder = inOrder(out);
inOrder.verify(out).println("Found 6 items");
inOrder.verify(out).println(testfile04.formatLineAtime(lineFormat));
inOrder.verify(out).println(testfile05.formatLineAtime(lineFormat));
inOrder.verify(out).println(testfile06.formatLineAtime(lineFormat));
inOrder.verify(out).println(testfile02.formatLineAtime(lineFormat));
inOrder.verify(out).println(testfile03.formatLineAtime(lineFormat));
inOrder.verify(out).println(testfile01.formatLineAtime(lineFormat));
verifyNoMoreInteractions(out);
}
// check reverse access time order (-u -t -r options)
@Test
public void processPathDirOrderAtimeReverse() throws IOException {
TestFile testfile01 = new TestFile("testDirectory", "testFile01");
TestFile testfile02 = new TestFile("testDirectory", "testFile02");
TestFile testfile03 = new TestFile("testDirectory", "testFile03");
TestFile testfile04 = new TestFile("testDirectory", "testFile04");
TestFile testfile05 = new TestFile("testDirectory", "testFile05");
TestFile testfile06 = new TestFile("testDirectory", "testFile06");
// set file atime in different order to file names
testfile01.setAtime(NOW.getTime() + 10);
testfile02.setAtime(NOW.getTime() + 30);
testfile03.setAtime(NOW.getTime() + 20);
testfile04.setAtime(NOW.getTime() + 60);
testfile05.setAtime(NOW.getTime() + 50);
testfile06.setAtime(NOW.getTime() + 40);
// set file mtime in different order to atime
testfile01.setMtime(NOW.getTime() + 60);
testfile02.setMtime(NOW.getTime() + 50);
testfile03.setMtime(NOW.getTime() + 20);
testfile04.setMtime(NOW.getTime() + 30);
testfile05.setMtime(NOW.getTime() + 10);
testfile06.setMtime(NOW.getTime() + 40);
TestFile testDir = new TestFile("", "testDirectory");
testDir.setIsDir(true);
testDir.addContents(testfile01, testfile02, testfile03, testfile04,
testfile05, testfile06);
LinkedList<PathData> pathData = new LinkedList<PathData>();
pathData.add(testDir.getPathData());
PrintStream out = mock(PrintStream.class);
Ls ls = new Ls();
ls.out = out;
LinkedList<String> options = new LinkedList<String>();
options.add("-t");
options.add("-u");
options.add("-r");
ls.processOptions(options);
String lineFormat = TestFile.computeLineFormat(pathData);
ls.processArguments(pathData);
InOrder inOrder = inOrder(out);
inOrder.verify(out).println("Found 6 items");
inOrder.verify(out).println(testfile01.formatLineAtime(lineFormat));
inOrder.verify(out).println(testfile03.formatLineAtime(lineFormat));
inOrder.verify(out).println(testfile02.formatLineAtime(lineFormat));
inOrder.verify(out).println(testfile06.formatLineAtime(lineFormat));
inOrder.verify(out).println(testfile05.formatLineAtime(lineFormat));
inOrder.verify(out).println(testfile04.formatLineAtime(lineFormat));
verifyNoMoreInteractions(out);
}
// check path only display (-C option)
@Test
public void processPathDirectoryPathOnly() throws IOException {
TestFile testfile01 = new TestFile("testDirectory", "testFile01");
TestFile testfile02 = new TestFile("testDirectory", "testFile02");
TestFile testfile03 = new TestFile("testDirectory", "testFile03");
TestFile testfile04 = new TestFile("testDirectory", "testFile04");
TestFile testfile05 = new TestFile("testDirectory", "testFile05");
TestFile testfile06 = new TestFile("testDirectory", "testFile06");
TestFile testDir = new TestFile("", "testDirectory");
testDir.setIsDir(true);
testDir.addContents(testfile01, testfile02, testfile03, testfile04,
testfile05, testfile06);
LinkedList<PathData> pathData = new LinkedList<PathData>();
pathData.add(testDir.getPathData());
PrintStream out = mock(PrintStream.class);
Ls ls = new Ls();
ls.out = out;
LinkedList<String> options = new LinkedList<String>();
options.add("-C");
ls.processOptions(options);
ls.processArguments(pathData);
InOrder inOrder = inOrder(out);
inOrder.verify(out).println(testfile01.getPath().toString());
inOrder.verify(out).println(testfile02.getPath().toString());
inOrder.verify(out).println(testfile03.getPath().toString());
inOrder.verify(out).println(testfile04.getPath().toString());
inOrder.verify(out).println(testfile05.getPath().toString());
inOrder.verify(out).println(testfile06.getPath().toString());
verifyNoMoreInteractions(out);
}
private static void displayWarningOnLocalFileSystem(boolean shouldDisplay)
throws IOException {
Configuration conf = new Configuration();
conf.setBoolean(
HADOOP_SHELL_MISSING_DEFAULT_FS_WARNING_KEY, shouldDisplay);
ByteArrayOutputStream buf = new ByteArrayOutputStream();
PrintStream err = new PrintStream(buf, true);
Ls ls = new Ls(conf);
ls.err = err;
ls.run("file:///.");
assertEquals(shouldDisplay, buf.toString().contains(
"Warning: fs.defaultFS is not set when running \"ls\" command."));
}
@Test
public void displayWarningsOnLocalFileSystem() throws IOException {
// Display warnings.
displayWarningOnLocalFileSystem(true);
// Does not display warnings.
displayWarningOnLocalFileSystem(false);
}
// check the deprecated flag isn't set
@Test
public void isDeprecated() {
Ls ls = new Ls();
boolean actual = ls.isDeprecated();
boolean expected = false;
assertEquals("Ls.isDeprecated", expected, actual);
}
// check there's no replacement command
@Test
public void getReplacementCommand() {
Ls ls = new Ls();
String actual = ls.getReplacementCommand();
String expected = null;
assertEquals("Ls.getReplacementCommand", expected, actual);
}
// check the correct name is returned
@Test
public void getName() {
Ls ls = new Ls();
String actual = ls.getName();
String expected = "ls";
assertEquals("Ls.getName", expected, actual);
}
@Test(expected = UnsupportedOperationException.class)
public void processPathFileDisplayECPolicyWhenUnsupported()
throws IOException {
TestFile testFile = new TestFile("testDirectory", "testFile");
LinkedList<PathData> pathData = new LinkedList<PathData>();
pathData.add(testFile.getPathData());
Ls ls = new Ls();
LinkedList<String> options = new LinkedList<String>();
options.add("-e");
ls.processOptions(options);
ls.processArguments(pathData);
}
@Test(expected = UnsupportedOperationException.class)
public void processPathDirDisplayECPolicyWhenUnsupported()
throws IOException {
TestFile testFile = new TestFile("testDirectory", "testFile");
TestFile testDir = new TestFile("", "testDirectory");
testDir.setIsDir(true);
testDir.addContents(testFile);
LinkedList<PathData> pathData = new LinkedList<PathData>();
pathData.add(testDir.getPathData());
Ls ls = new Ls();
LinkedList<String> options = new LinkedList<String>();
options.add("-e");
ls.processOptions(options);
ls.processArguments(pathData);
}
// test class representing a file to be listed
static class TestFile {
private static final SimpleDateFormat DATE_FORMAT = new SimpleDateFormat(
"yyyy-MM-dd HH:mm");
private static final boolean DEFAULT_ISDIR = false;
private static final String DEFAULT_MODE = "750";
private static final int DEFAULT_REPLICATION = 3;
private static final String DEFAULT_OWNER = "test_owner";
private static final String DEFAULT_GROUP = "test_group";
private static final long DEFAULT_LENGTH = 1234567890L;
private static final long DEFAULT_MTIME = NOW.getTime() - 86400000;
private static final long DEFAULT_ATIME = NOW.getTime() + 86400000;
private static final long DEFAULT_BLOCKSIZE = 64L * 1024 * 1024;
private String dirname;
private String filename;
private boolean isDir;
private FsPermission permission;
private int replication;
private String owner;
private String group;
private long length;
private long mtime;
private long atime;
private long blocksize;
private ArrayList<FileStatus> contents = new ArrayList<FileStatus>();
private Path path = null;
private FileStatus fileStatus = null;
private PathData pathData = null;
public TestFile(String dirname, String filename) {
setDirname(dirname);
setFilename(filename);
setIsDir(DEFAULT_ISDIR);
setPermission(DEFAULT_MODE);
setReplication(DEFAULT_REPLICATION);
setOwner(DEFAULT_OWNER);
setGroup(DEFAULT_GROUP);
setLength(DEFAULT_LENGTH);
setMtime(DEFAULT_MTIME);
setAtime(DEFAULT_ATIME);
setBlocksize(DEFAULT_BLOCKSIZE);
}
public void setDirname(String dirname) {
this.dirname = dirname;
}
public void setFilename(String filename) {
this.filename = filename;
}
public void setIsDir(boolean isDir) {
this.isDir = isDir;
}
public void setPermission(String mode) {
setPermission(new FsPermission(mode));
}
public void setPermission(FsPermission permission) {
this.permission = permission;
}
public void setReplication(int replication) {
this.replication = replication;
}
public void setOwner(String owner) {
this.owner = owner;
}
public void setGroup(String group) {
this.group = group;
}
public void setLength(long length) {
this.length = length;
}
public void setMtime(long mtime) {
this.mtime = mtime;
}
public void setAtime(long atime) {
this.atime = atime;
}
public void setBlocksize(long blocksize) {
this.blocksize = blocksize;
}
public void addContents(TestFile... contents) {
for (TestFile testFile : contents) {
this.contents.add(testFile.getFileStatus());
}
}
private String getDirname() {
return this.dirname;
}
private String getFilename() {
return this.filename;
}
private String getPathname() {
return getDirname() + "/" + getFilename();
}
private boolean isDir() {
return this.isDir;
}
private boolean isFile() {
return !this.isDir();
}
private FsPermission getPermission() {
return this.permission;
}
private int getReplication() {
return this.replication;
}
private String getOwner() {
return this.owner;
}
private String getGroup() {
return this.group;
}
private long getLength() {
return this.length;
}
private long getMtime() {
return this.mtime;
}
private long getAtime() {
return this.atime;
}
private long getBlocksize() {
return this.blocksize;
}
private FileStatus[] getContents() {
return this.contents.toArray(new FileStatus[0]);
}
/**
* Returns a formated output line based on the given format mask, file
* status and file name.
*
* @param lineFormat
* format mask
* @param fileStatus
* file status
* @param fileName
* file name
* @return formated line
*/
private String formatLineMtime(String lineFormat) {
return String.format(lineFormat, (isDir() ? "d" : "-"), getPermission(),
(isFile() ? getReplication() : "-"), getOwner(), getGroup(),
String.valueOf(getLength()),
DATE_FORMAT.format(new Date(getMtime())), getPathname());
}
/**
* Returns a formated output line based on the given format mask, file
* status and file name.
*
* @param lineFormat
* format mask
* @param fileStatus
* file status
* @param fileName
* file name
* @return formated line
*/
private String formatLineAtime(String lineFormat) {
return String.format(lineFormat, (isDir() ? "d" : "-"), getPermission(),
(isFile() ? getReplication() : "-"), getOwner(), getGroup(),
String.valueOf(getLength()),
DATE_FORMAT.format(new Date(getAtime())), getPathname());
}
public FileStatus getFileStatus() {
if (fileStatus == null) {
Path path = getPath();
fileStatus = new FileStatus(getLength(), isDir(), getReplication(),
getBlocksize(), getMtime(), getAtime(), getPermission(),
getOwner(), getGroup(), path);
}
return fileStatus;
}
public Path getPath() {
if (path == null) {
if ((getDirname() != null) && (!getDirname().equals(""))) {
path = new Path(getDirname(), getFilename());
} else {
path = new Path(getFilename());
}
}
return path;
}
public PathData getPathData() throws IOException {
if (pathData == null) {
FileStatus fileStatus = getFileStatus();
Path path = getPath();
when(mockFs.getFileStatus(eq(path))).thenReturn(fileStatus);
pathData = new PathData(path.toString(), conf);
if (getContents().length != 0) {
when(mockFs.listStatus(eq(path))).thenReturn(getContents());
}
}
return pathData;
}
/**
* Compute format string based on maximum column widths. Copied from
* Ls.adjustColumnWidths as these tests are more interested in proving
* regression rather than absolute format.
*
* @param items
* to find the max field width for each column
*/
public static String computeLineFormat(LinkedList<PathData> items) {
int maxRepl = 3, maxLen = 10, maxOwner = 0, maxGroup = 0;
for (PathData item : items) {
FileStatus stat = item.stat;
maxRepl = maxLength(maxRepl, stat.getReplication());
maxLen = maxLength(maxLen, stat.getLen());
maxOwner = maxLength(maxOwner, stat.getOwner());
maxGroup = maxLength(maxGroup, stat.getGroup());
}
StringBuilder fmt = new StringBuilder();
fmt.append("%s%s "); // permission string
fmt.append("%" + maxRepl + "s ");
// Do not use '%-0s' as a formatting conversion, since it will throw a
// a MissingFormatWidthException if it is used in String.format().
// http://docs.oracle.com/javase/1.5.0/docs/api/java/util/Formatter.html#intFlags
fmt.append((maxOwner > 0) ? "%-" + maxOwner + "s " : "%s");
fmt.append((maxGroup > 0) ? "%-" + maxGroup + "s " : "%s");
fmt.append("%" + maxLen + "s ");
fmt.append("%s %s"); // mod time & path
return fmt.toString();
}
/**
* Return the maximum of two values, treating null as 0
*
* @param n
* integer to be compared
* @param value
* value to be compared
* @return maximum of the two inputs
*/
private static int maxLength(int n, Object value) {
return Math.max(n, (value != null) ? String.valueOf(value).length() : 0);
}
}
static class MockFileSystem extends FilterFileSystem {
Configuration conf;
MockFileSystem() {
super(mockFs);
}
@Override
public void initialize(URI uri, Configuration conf) {
this.conf = conf;
}
@Override
public Path makeQualified(Path path) {
return path;
}
@Override
public Configuration getConf() {
return conf;
}
}
}
| |
package com.backtype.hadoop.pail;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.backtype.hadoop.BalancedDistcp;
import com.backtype.hadoop.Coercer;
import com.backtype.hadoop.Consolidator;
import com.backtype.hadoop.PathLister;
import com.backtype.hadoop.RenameMode;
import com.backtype.hadoop.formats.RecordInputStream;
import com.backtype.hadoop.formats.RecordOutputStream;
import com.backtype.support.Utils;
public class Pail<T> extends AbstractPail implements Iterable<T>{
public static Logger LOG = LoggerFactory.getLogger(Pail.class);
public static final String META = "pail.meta";
public class TypedRecordOutputStream implements RecordOutputStream {
private HashMap<String, RecordOutputStream> _workers = new HashMap<String, RecordOutputStream>();
private String _userfilename;
private boolean _overwrite;
public TypedRecordOutputStream(String userfilename, boolean overwrite) {
_userfilename = userfilename;
_overwrite = overwrite;
}
public <T> void writeObject(T obj) throws IOException {
PailStructure<T> structure = ((PailStructure<T>) _structure);
List<String> rootAttrs = structure.getTarget(obj);
List<String> attrs = makeRelative(rootAttrs);
String targetDir = Utils.join(attrs, "/");
if(!_workers.containsKey(targetDir)) {
Path p;
if(targetDir.length()==0) p = new Path(_userfilename);
else p = new Path(targetDir, _userfilename);
List<String> totalAttrs = componentsFromRoot(p.toString());
if(!_structure.isValidTarget(totalAttrs.toArray(new String[totalAttrs.size()]))) {
throw new IllegalArgumentException("Cannot write object " + obj.toString() + " to " + p.toString() +
". Conflicts with the structure of the datastore.");
}
_workers.put(targetDir, Pail.super.openWrite(p.toString(), _overwrite));
}
RecordOutputStream os = _workers.get(targetDir);
os.writeRaw(structure.serialize(obj));
}
public void writeObjects(T... objs) throws IOException {
for(T obj: objs) {
writeObject(obj);
}
}
public void close() throws IOException {
for(RecordOutputStream os: _workers.values()) {
os.close();
}
}
protected List<String> makeRelative(List<String> attrs) {
return Utils.stripRoot(getAttrs(), attrs);
}
public void writeRaw(byte[] record) throws IOException {
writeRaw(record, 0, record.length);
}
public void writeRaw(byte[] record, int start, int length) throws IOException {
if(!_workers.containsKey(_userfilename)) {
checkValidStructure(_userfilename);
_workers.put(_userfilename, Pail.super.openWrite(_userfilename, _overwrite));
}
_workers.get(_userfilename).writeRaw(record, start, length);
}
}
public class TypedRecordInputStream implements RecordInputStream {
private RecordInputStream is;
public TypedRecordInputStream(String userFileName) throws IOException {
is = Pail.super.openRead(userFileName);
}
public T readObject() throws IOException {
byte[] record = readRawRecord();
if(record==null) return null;
else return _structure.deserialize(record);
}
public void close() throws IOException {
is.close();
}
public byte[] readRawRecord() throws IOException {
return is.readRawRecord();
}
}
public static Pail create(String path, PailSpec spec) throws IOException {
return create(Utils.getFS(path), path, spec);
}
public static Pail create(FileSystem fs, String path, PailSpec spec) throws IOException {
return create(fs, path, spec, true);
}
public static Pail create(String path) throws IOException {
return create(Utils.getFS(path), path);
}
public static Pail create(FileSystem fs, String path) throws IOException {
return create(fs, path, (PailSpec) null);
}
public static Pail create(String path, PailStructure structure) throws IOException {
return create(Utils.getFS(path), path, structure);
}
public static Pail create(FileSystem fs, String path, PailStructure structure) throws IOException {
return create(fs, path, new PailSpec(structure));
}
public static Pail create(String path, PailStructure structure, boolean failOnExists) throws IOException {
return create(Utils.getFS(path), path, structure, failOnExists);
}
public static Pail create(FileSystem fs, String path, PailStructure structure, boolean failOnExists) throws IOException {
return create(fs, path, new PailSpec(structure), failOnExists);
}
public static Pail create(String path, boolean failOnExists) throws IOException {
return create(Utils.getFS(path), path, failOnExists);
}
public static Pail create(FileSystem fs, String path, boolean failOnExists) throws IOException {
return create(fs, path, (PailSpec) null, failOnExists);
}
public static Pail create(String path, PailSpec spec, boolean failOnExists) throws IOException {
return create(Utils.getFS(path), path, spec, failOnExists);
}
public static Pail create(FileSystem fs, String path, PailSpec spec, boolean failOnExists) throws IOException {
Path pathp = new Path(path);
PailFormatFactory.create(spec);
PailSpec existing = getSpec(fs, pathp);
if(failOnExists) {
if(existing!=null) {
throw new IllegalArgumentException("Pail already exists at path " + path + " with spec " + existing.toString());
}
if(fs.exists(pathp))
throw new IllegalArgumentException("Path " + path + " already exists");
}
if(spec!=null && existing!=null) {
if(spec.getName()!=null) {
if(!spec.equals(existing))
throw new IllegalArgumentException("Specs do not match " + spec.toString() + ", " + existing.toString());
} else if(spec.getStructure()!=null) {
if(existing.getStructure()==null || !spec.getStructure().getClass().equals(existing.getStructure().getClass())) {
throw new IllegalArgumentException("Specs do not match " + spec.toString() + ", " + existing.toString());
}
}
}
fs.mkdirs(pathp);
if(existing==null) {
if(spec==null) spec = PailFormatFactory.getDefaultCopy();
if(spec.getName()==null) spec = PailFormatFactory.getDefaultCopy().setStructure(spec.getStructure());
spec.writeToFileSystem(fs, new Path(pathp, META));
}
return new Pail(fs, path);
}
private static PailSpec getSpec(FileSystem fs, Path path) throws IOException {
return (PailSpec) getSpecAndRoot(fs, path)[1];
}
private static String getRoot(FileSystem fs, Path path) throws IOException {
return (String) getSpecAndRoot(fs, path)[0];
}
private static Object[] getSpecAndRoot(FileSystem fs, Path path) throws IOException {
Path curr = path;
Object[] ret = null;
while( curr != null ) { // changed as per oscar
//while(true) {
Path meta = new Path(curr, META);
if(fs.exists(meta)) {
if(ret!=null) throw new RuntimeException("At least two meta files up directory tree");
PailSpec spec = PailSpec.readFromFileSystem(fs, meta);
ret = new Object[] {curr.toString(), spec};
}
if(curr.depth()==0) break;
curr = curr.getParent();
}
if(ret==null) ret = new Object[] {null, null};
return ret;
}
private PailFormat _format;
private PailSpec _spec;
private PailStructure<T> _structure;
private String _root;
private FileSystem _fs;
public Pail(String path) throws IOException {
this(Utils.getFS(path), path);
}
public Pail(String path, Configuration conf) throws IOException {
this(Utils.getFS(path, conf), path);
}
public Pail(FileSystem fs, String path) throws IOException {
super(path);
_fs = fs;
_root = getRoot(fs, new Path(path));
if(_root==null || !fs.exists(new Path(path)))
throw new IllegalArgumentException("Pail does not exist at path " + path);
_spec = getSpec(fs, new Path(path));
_structure = _spec.getStructure();
_format = PailFormatFactory.create(_spec);
}
public FileSystem getFileSystem() {
return _fs;
}
public TypedRecordOutputStream openWrite() throws IOException {
return openWrite(UUID.randomUUID().toString(), false);
}
@Override
public TypedRecordOutputStream openWrite(String subFileName, boolean overwrite) throws IOException {
if(subFileName.contains(META)) throw new IllegalArgumentException("Illegal user file name " + subFileName);
checkPathValidity(subFileName);
return new TypedRecordOutputStream(subFileName, overwrite);
}
@Override
public TypedRecordInputStream openRead(String userfilename) throws IOException {
checkPathValidity(userfilename);
checkValidStructure(userfilename);
return new TypedRecordInputStream(userfilename);
}
protected void checkPathValidity(String subFileName) {
List<String> components = Utils.componentize(subFileName);
for(String s: components) {
if(s.startsWith("_")) {
throw new IllegalArgumentException("Cannot have underscores in path names " + subFileName);
}
}
}
public Pail<T> getSubPail(int... attrs) throws IOException {
List<String> elems = new ArrayList<String>();
for(int i: attrs) {
elems.add("" + i);
}
String relPath = Utils.join(elems, "/");
return getSubPail(relPath);
}
public Pail<T> getSubPail(String relpath) throws IOException {
mkdirs(new Path(getInstanceRoot(), relpath));
return new Pail(_fs, new Path(getInstanceRoot(), relpath).toString());
}
public PailSpec getSpec() {
return _spec;
}
public PailFormat getFormat() {
return _format;
}
public String getRoot() {
return _root;
}
public boolean atRoot() {
Path instanceRoot = new Path(getInstanceRoot()).makeQualified(_fs);
Path root = new Path(getRoot()).makeQualified(_fs);
return root.equals(instanceRoot);
}
public List<String> getAttrs() {
return Utils.stripRoot(Utils.componentize(getRoot()), Utils.componentize(getInstanceRoot()));
}
//returns if formats are same
private boolean checkCombineValidity(Pail p, CopyArgs args) throws IOException {
if(args.force) return true;
PailSpec mine = getSpec();
PailSpec other = p.getSpec();
PailStructure structure = mine.getStructure();
boolean typesSame = structure.getType().equals(other.getStructure().getType());
//can always append into a "raw" pail
if(!structure.getType().equals(new byte[0].getClass()) && !typesSame)
throw new IllegalArgumentException("Cannot combine two pails of different types unless target pail is raw");
//check that structure will be maintained
for(String name: p.getUserFileNames()) {
checkValidStructure(name);
}
return mine.getName().equals(other.getName()) && mine.getArgs().equals(other.getArgs());
}
public Pail snapshot(Configuration configuration, FileSystem fileSystem, String path) throws IOException {
Pail ret = createEmptyMimic(fileSystem, path);
ret.copyAppend(this, RenameMode.NO_RENAME, configuration);
return ret;
}
public Pail snapshot(FileSystem fileSystem, String path) throws IOException {
Pail ret = createEmptyMimic(fileSystem, path);
ret.copyAppend(this, RenameMode.NO_RENAME);
return ret;
}
public Pail snapshot(String path) throws IOException {
Pail ret = createEmptyMimic(path);
ret.copyAppend(this, RenameMode.NO_RENAME);
return ret;
}
public void clear() throws IOException {
for(Path p: getStoredFiles()) {
delete(p, false);
}
}
public void deleteSnapshot(Pail snapshot) throws IOException {
for(String username: snapshot.getUserFileNames()) {
delete(username);
}
}
public Pail createEmptyMimic(FileSystem fileSystem, String path) throws IOException {
if(getSpec(fileSystem, new Path(path))!=null) {
throw new IllegalArgumentException("Cannot make empty mimic at " + path + " because it is a subdir of a pail");
}
if(fileSystem.exists(new Path(path))) {
throw new IllegalArgumentException(path + " already exists");
}
return Pail.create(fileSystem, path, getSpec(), true);
}
public Pail createEmptyMimic(String path) throws IOException {
return createEmptyMimic(Utils.getFS(path), path);
}
public void coerce(String path, String name, Map<String, Object> args) throws IOException {
Pail.create(path, new PailSpec(name, args).setStructure(getSpec().getStructure())).copyAppend(this);
}
public void coerce(FileSystem fs, String path, String name, Map<String, Object> args) throws IOException {
Pail.create(fs, path, new PailSpec(name, args).setStructure(getSpec().getStructure())).copyAppend(this);
}
public void copyAppend(Pail p) throws IOException {
copyAppend(p, new CopyArgs());
}
public void copyAppend(Pail p, int renameMode) throws IOException {
CopyArgs args = new CopyArgs();
args.renameMode = renameMode;
copyAppend(p, args);
}
public void copyAppend(Pail p, int renameMode, Configuration configuration) throws IOException {
CopyArgs args = new CopyArgs();
args.renameMode = renameMode;
args.configuration = configuration;
copyAppend(p, args);
}
protected String getQualifiedRoot(Pail p) {
Path path = new Path(p.getInstanceRoot());
return path.makeQualified(p._fs).toString();
}
/**
* Copy append will copy all the files from p into this pail. Appending maintains the
* structure that was present in p.
*
*/
public void copyAppend(Pail p, CopyArgs args) throws IOException {
args = new CopyArgs(args);
if(args.renameMode==null) args.renameMode = RenameMode.ALWAYS_RENAME;
boolean formatsSame = checkCombineValidity(p, args);
String sourceQual = getQualifiedRoot(p);
String destQual = getQualifiedRoot(this);
if(formatsSame) {
BalancedDistcp.distcp(sourceQual, destQual, args.renameMode, new PailPathLister(args.copyMetadata), EXTENSION, args.configuration);
} else {
Coercer.coerce(sourceQual, destQual, args.renameMode, new PailPathLister(args.copyMetadata), p.getFormat(), getFormat(), EXTENSION, args.configuration);
}
}
public void moveAppend(Pail p) throws IOException {
moveAppend(p, new CopyArgs());
}
public void moveAppend(Pail p, int renameMode) throws IOException {
CopyArgs args = new CopyArgs();
args.renameMode = renameMode;
moveAppend(p, args);
}
public void moveAppend(Pail p, CopyArgs args) throws IOException {
args = new CopyArgs(args);
if(args.renameMode==null) args.renameMode = RenameMode.ALWAYS_RENAME;
boolean formatsSame = checkCombineValidity(p, args);
if(!p._fs.getUri().equals(_fs.getUri())) throw new IllegalArgumentException("Cannot move append between different filesystems");
if(!formatsSame) throw new IllegalArgumentException("Cannot move append different format pails together");
for(String name: p.getUserFileNames()) {
String parent = new Path(name).getParent().toString();
_fs.mkdirs(new Path(getInstanceRoot() + "/" + parent));
Path storedPath = p.toStoredPath(name);
Path targetPath = toStoredPath(name);
if(_fs.exists(targetPath) || args.renameMode == RenameMode.ALWAYS_RENAME) {
if(args.renameMode == RenameMode.NO_RENAME)
throw new IllegalArgumentException("Collision of filenames " + targetPath.toString());
if(parent.equals("")) targetPath = toStoredPath("ma_" + UUID.randomUUID().toString());
else targetPath = toStoredPath(parent + "/ma_" + UUID.randomUUID().toString());
}
_fs.rename(storedPath, targetPath);
}
if(args.copyMetadata) {
for(String metaName: p.getMetadataFileNames()) {
Path source = p.toStoredMetadataPath(metaName);
Path dest = toStoredMetadataPath(metaName);
if(_fs.exists(dest)) {
throw new IllegalArgumentException("Metadata collision: " + source.toString() + " -> " + dest.toString());
}
_fs.rename(source, dest);
}
}
}
public void absorb(Pail p) throws IOException {
absorb(p, new CopyArgs());
}
public void absorb(Pail p, int renameMode) throws IOException {
CopyArgs args = new CopyArgs();
args.renameMode = renameMode;
absorb(p, args);
}
public void absorb(Pail p, Configuration configuration) throws IOException {
CopyArgs args = new CopyArgs();
args.configuration = configuration;
absorb(p, args);
}
public void absorb(Pail p, CopyArgs args) throws IOException {
args = new CopyArgs(args);
if(args.renameMode==null) args.renameMode = RenameMode.ALWAYS_RENAME;
boolean formatsSame = checkCombineValidity(p, args);
if(formatsSame && p._fs.getUri().equals(_fs.getUri())) {
moveAppend(p, args);
} else {
copyAppend(p, args);
//TODO: should we go ahead and clear out the input pail for consistency?
}
}
public void s3ConsistencyFix() throws IOException {
for(Path p: getStoredFiles()) {
try {
_fs.getFileStatus(p);
} catch(FileNotFoundException e) {
LOG.info("Fixing file: " + p);
_fs.create(p, true).close();
}
}
}
public void consolidate() throws IOException {
consolidate(Consolidator.DEFAULT_CONSOLIDATION_SIZE);
}
public void consolidate(long maxSize) throws IOException {
List<String> toCheck = new ArrayList<String>();
toCheck.add("");
PailStructure structure = getSpec().getStructure();
List<String> consolidatedirs = new ArrayList<String>();
while(toCheck.size()>0) {
String dir = toCheck.remove(0);
List<String> dirComponents = componentsFromRoot(dir);
if(structure.isValidTarget(dirComponents.toArray(new String[dirComponents.size()]))) {
consolidatedirs.add(toFullPath(dir));
} else {
FileStatus[] contents = listStatus(new Path(toFullPath(dir)));
for(FileStatus f: contents) {
if(!f.isDir()) {
if(f.getPath().toString().endsWith(EXTENSION))
throw new IllegalStateException(f.getPath().toString() + " is not a dir and breaks the structure of " + getInstanceRoot());
} else {
String newDir;
if(dir.length()==0) newDir = f.getPath().getName();
else newDir = dir + "/" + f.getPath().getName();
toCheck.add(newDir);
}
}
}
}
Consolidator.consolidate(_fs, _format, new PailPathLister(false), consolidatedirs, maxSize, EXTENSION);
}
@Override
protected RecordInputStream createInputStream(Path path) throws IOException {
return _format.getInputStream(_fs, path);
}
@Override
protected RecordOutputStream createOutputStream(Path path) throws IOException {
return _format.getOutputStream(_fs, path);
}
@Override
protected boolean delete(Path path, boolean recursive) throws IOException {
return _fs.delete(path, recursive);
}
@Override
protected boolean exists(Path path) throws IOException {
return _fs.exists(path);
}
@Override
protected boolean rename(Path source, Path dest) throws IOException {
return _fs.rename(source, dest);
}
@Override
protected boolean mkdirs(Path path) throws IOException {
return _fs.mkdirs(path);
}
@Override
protected FileStatus[] listStatus(Path path) throws IOException {
FileStatus[] arr = _fs.listStatus(path);
List<FileStatus> ret = new ArrayList<FileStatus>();
for(FileStatus fs: arr) {
if(!fs.isDir() || !fs.getPath().getName().startsWith("_")) {
ret.add(fs);
}
}
return ret.toArray(new FileStatus[ret.size()]);
}
protected String toFullPath(String relpath) {
Path p;
if(relpath.length()==0) p = new Path(getInstanceRoot());
else p = new Path(getInstanceRoot(), relpath);
return p.toString();
}
protected List<String> componentsFromRoot(String relpath) {
String fullpath = toFullPath(relpath);
List<String> full = Utils.componentize(fullpath);
List<String> root = Utils.componentize(getRoot());
return Utils.stripRoot(root, full);
}
protected void checkValidStructure(String userfilename) {
List<String> full = componentsFromRoot(userfilename);
full.remove(full.size() - 1);
full = Utils.cleanHadoopPath(full);
if(!getSpec().getStructure().isValidTarget(full.toArray(new String[full.size()]))) {
throw new IllegalArgumentException(
userfilename + " is not valid with the pail structure " + getSpec().toString() +
" --> " + full.toString());
}
}
protected static class PailPathLister implements PathLister {
boolean _includeMeta;
public PailPathLister() {
this(true);
}
public PailPathLister(boolean includeMeta) {
_includeMeta = includeMeta;
}
public List<Path> getFiles(FileSystem fs, String path) {
try {
Pail p = new Pail(fs, path);
List<Path> ret;
if(_includeMeta) {
ret = p.getStoredFilesAndMetadata();
} else {
ret = p.getStoredFiles();
}
return ret;
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
public boolean isEmpty() throws IOException {
PailIterator it = iterator();
boolean ret = !it.hasNext();
it.close();
return ret;
}
public PailIterator iterator() {
return new PailIterator();
}
public class PailIterator implements Iterator<T> {
private List<String> filesleft;
private TypedRecordInputStream curr = null;
private T nextRecord;
public PailIterator() {
try {
filesleft = getUserFileNames();
} catch(IOException e) {
throw new RuntimeException(e);
}
getNextRecord();
}
private void getNextRecord() {
try {
while(curr==null || (nextRecord = curr.readObject()) == null) {
if(curr!=null) curr.close();
if(filesleft.size()==0) break;
curr = openRead(filesleft.remove(0));
}
} catch(IOException e) {
throw new RuntimeException(e);
}
}
public boolean hasNext() {
return nextRecord != null;
}
public T next() {
T ret = nextRecord;
getNextRecord();
return ret;
}
public void close() throws IOException {
if(curr!=null) {
curr.close();
}
}
public void remove() {
throw new UnsupportedOperationException("Cannot remove records from a pail");
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.api;
import java.io.IOException;
import org.apache.hadoop.classification.InterfaceAudience.Public;
import org.apache.hadoop.classification.InterfaceStability.Stable;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.yarn.api.protocolrecords.CommitResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesResponse;
import org.apache.hadoop.yarn.api.protocolrecords.IncreaseContainersResourceRequest;
import org.apache.hadoop.yarn.api.protocolrecords.IncreaseContainersResourceResponse;
import org.apache.hadoop.yarn.api.protocolrecords.ReInitializeContainerRequest;
import org.apache.hadoop.yarn.api.protocolrecords.ReInitializeContainerResponse;
import org.apache.hadoop.yarn.api.protocolrecords.ResourceLocalizationRequest;
import org.apache.hadoop.yarn.api.protocolrecords.ResourceLocalizationResponse;
import org.apache.hadoop.yarn.api.protocolrecords.RestartContainerResponse;
import org.apache.hadoop.yarn.api.protocolrecords.RollbackResponse;
import org.apache.hadoop.yarn.api.protocolrecords.SignalContainerRequest;
import org.apache.hadoop.yarn.api.protocolrecords.SignalContainerResponse;
import org.apache.hadoop.yarn.api.protocolrecords.StartContainerRequest;
import org.apache.hadoop.yarn.api.protocolrecords.StartContainersRequest;
import org.apache.hadoop.yarn.api.protocolrecords.StartContainersResponse;
import org.apache.hadoop.yarn.api.protocolrecords.StopContainersRequest;
import org.apache.hadoop.yarn.api.protocolrecords.StopContainersResponse;
import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
import org.apache.hadoop.yarn.api.records.ContainerStatus;
import org.apache.hadoop.yarn.exceptions.YarnException;
/**
* <p>The protocol between an <code>ApplicationMaster</code> and a
* <code>NodeManager</code> to start/stop and increase resource of containers
* and to get status of running containers.</p>
*
* <p>If security is enabled the <code>NodeManager</code> verifies that the
* <code>ApplicationMaster</code> has truly been allocated the container
* by the <code>ResourceManager</code> and also verifies all interactions such
* as stopping the container or obtaining status information for the container.
* </p>
*/
@Public
@Stable
public interface ContainerManagementProtocol {
/**
* <p>
* The <code>ApplicationMaster</code> provides a list of
* {@link StartContainerRequest}s to a <code>NodeManager</code> to
* <em>start</em> {@link Container}s allocated to it using this interface.
* </p>
*
* <p>
* The <code>ApplicationMaster</code> has to provide details such as allocated
* resource capability, security tokens (if enabled), command to be executed
* to start the container, environment for the process, necessary
* binaries/jar/shared-objects etc. via the {@link ContainerLaunchContext} in
* the {@link StartContainerRequest}.
* </p>
*
* <p>
* The <code>NodeManager</code> sends a response via
* {@link StartContainersResponse} which includes a list of
* {@link Container}s of successfully launched {@link Container}s, a
* containerId-to-exception map for each failed {@link StartContainerRequest} in
* which the exception indicates errors from per container and a
* allServicesMetaData map between the names of auxiliary services and their
* corresponding meta-data. Note: None-container-specific exceptions will
* still be thrown by the API method itself.
* </p>
* <p>
* The <code>ApplicationMaster</code> can use
* {@link #getContainerStatuses(GetContainerStatusesRequest)} to get updated
* statuses of the to-be-launched or launched containers.
* </p>
*
* @param request
* request to start a list of containers
* @return response including conatinerIds of all successfully launched
* containers, a containerId-to-exception map for failed requests and
* a allServicesMetaData map.
* @throws YarnException
* @throws IOException
*/
@Public
@Stable
StartContainersResponse startContainers(StartContainersRequest request)
throws YarnException, IOException;
/**
* <p>
* The <code>ApplicationMaster</code> requests a <code>NodeManager</code> to
* <em>stop</em> a list of {@link Container}s allocated to it using this
* interface.
* </p>
*
* <p>
* The <code>ApplicationMaster</code> sends a {@link StopContainersRequest}
* which includes the {@link ContainerId}s of the containers to be stopped.
* </p>
*
* <p>
* The <code>NodeManager</code> sends a response via
* {@link StopContainersResponse} which includes a list of {@link ContainerId}
* s of successfully stopped containers, a containerId-to-exception map for
* each failed request in which the exception indicates errors from per
* container. Note: None-container-specific exceptions will still be thrown by
* the API method itself. <code>ApplicationMaster</code> can use
* {@link #getContainerStatuses(GetContainerStatusesRequest)} to get updated
* statuses of the containers.
* </p>
*
* @param request
* request to stop a list of containers
* @return response which includes a list of containerIds of successfully
* stopped containers, a containerId-to-exception map for failed
* requests.
* @throws YarnException
* @throws IOException
*/
@Public
@Stable
StopContainersResponse stopContainers(StopContainersRequest request)
throws YarnException, IOException;
/**
* <p>
* The API used by the <code>ApplicationMaster</code> to request for current
* statuses of <code>Container</code>s from the <code>NodeManager</code>.
* </p>
*
* <p>
* The <code>ApplicationMaster</code> sends a
* {@link GetContainerStatusesRequest} which includes the {@link ContainerId}s
* of all containers whose statuses are needed.
* </p>
*
* <p>
* The <code>NodeManager</code> responds with
* {@link GetContainerStatusesResponse} which includes a list of
* {@link ContainerStatus} of the successfully queried containers and a
* containerId-to-exception map for each failed request in which the exception
* indicates errors from per container. Note: None-container-specific
* exceptions will still be thrown by the API method itself.
* </p>
*
* @param request
* request to get <code>ContainerStatus</code>es of containers with
* the specified <code>ContainerId</code>s
* @return response containing the list of <code>ContainerStatus</code> of the
* successfully queried containers and a containerId-to-exception map
* for failed requests.
*
* @throws YarnException
* @throws IOException
*/
@Public
@Stable
GetContainerStatusesResponse getContainerStatuses(
GetContainerStatusesRequest request) throws YarnException,
IOException;
/**
* <p>
* The API used by the <code>ApplicationMaster</code> to request for
* resource increase of running containers on the <code>NodeManager</code>.
* </p>
*
* @param request
* request to increase resource of a list of containers
* @return response which includes a list of containerIds of containers
* whose resource has been successfully increased and a
* containerId-to-exception map for failed requests.
*
* @throws YarnException
* @throws IOException
*/
@Public
@Unstable
IncreaseContainersResourceResponse increaseContainersResource(
IncreaseContainersResourceRequest request) throws YarnException,
IOException;
SignalContainerResponse signalToContainer(SignalContainerRequest request)
throws YarnException, IOException;
/**
* Localize resources required by the container.
* Currently, this API only works for running containers.
*
* @param request Specify the resources to be localized.
* @return Response that the localize request is accepted.
* @throws YarnException Exception specific to YARN
* @throws IOException IOException thrown from the RPC layer.
*/
@Public
@Unstable
ResourceLocalizationResponse localize(ResourceLocalizationRequest request)
throws YarnException, IOException;
/**
* ReInitialize the Container with a new Launch Context.
* @param request Specify the new ContainerLaunchContext.
* @return Response that the ReInitialize request is accepted.
* @throws YarnException Exception specific to YARN.
* @throws IOException IOException thrown from the RPC layer.
*/
@Public
@Unstable
ReInitializeContainerResponse reInitializeContainer(
ReInitializeContainerRequest request) throws YarnException, IOException;
/**
* Restart the container.
* @param containerId Container Id.
* @return Response that the restart request is accepted.
* @throws YarnException Exception specific to YARN.
* @throws IOException IOException thrown from the RPC layer.
*/
@Public
@Unstable
RestartContainerResponse restartContainer(ContainerId containerId)
throws YarnException, IOException;
/**
* Rollback the Last ReInitialization if possible.
* @param containerId Container Id.
* @return Response that the rollback request is accepted.
* @throws YarnException Exception specific to YARN.
* @throws IOException IOException thrown from the RPC layer.
*/
@Public
@Unstable
RollbackResponse rollbackLastReInitialization(ContainerId containerId)
throws YarnException, IOException;
/**
* Commit the Last ReInitialization if possible. Once the reinitialization
* has been committed, It cannot be rolled back.
* @param containerId Container Id.
* @return Response that the commit request is accepted.
* @throws YarnException Exception specific to YARN.
* @throws IOException IOException thrown from the RPC layer.
*/
@Public
@Unstable
CommitResponse commitLastReInitialization(ContainerId containerId)
throws YarnException, IOException;
}
| |
/*
* Created on Dec 21, 2004
*/
package com.dytech.devlib;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import junit.framework.TestCase;
/** @author Nicholas Read */
@SuppressWarnings("nls")
public class PropBagExTest extends TestCase {
private static final String DOC1 = "doc1.xml";
private static final String DOC2 = "doc2.xml";
private PropBagEx doc1;
private PropBagEx doc2;
@Override
protected void setUp() throws Exception {
super.setUp();
doc1 = new PropBagEx(getClass().getResourceAsStream(DOC1));
doc2 = new PropBagEx(getClass().getResourceAsStream(DOC2));
}
@Override
protected void tearDown() throws Exception {
doc1 = null;
doc2 = null;
super.tearDown();
}
public void testIteratorInForEachLoop() {
for (final PropBagEx xml : doc1.iterator()) {
// Do nothing!
xml.hashCode();
}
}
public void testGetNode() {
assertEquals(doc1.getNode("result/xml/a"), "1");
assertEquals(doc1.getNode("result[1]/xml/a"), "4");
assertEquals(doc1.getNode("@count"), "5");
assertEquals(doc1.getNode("result/xml/@id"), "first");
assertEquals(doc1.getNode("result[1]/xml/@id"), "second");
assertEquals(doc1.getNode("non/existant/node"), "");
assertEquals(doc1.getNode("result////xml////a"), "1");
assertEquals(doc1.getNode("result/xml/a//////"), "1");
}
public void testGetNodeList() {
final List<String> results1 = doc1.getNodeList("result/xml/a");
assertEquals(results1.size(), 3);
assertEquals(results1.get(0), "1");
assertEquals(results1.get(1), "2");
assertEquals(results1.get(2), "3");
final List<String> results2 = doc1.getNodeList("result/xml/@id");
assertEquals(results2.size(), 1);
assertEquals(results2.get(0), "first");
final List<String> results3 = doc1.getNodeList("non/existant/node");
assertEquals(results3.size(), 0);
final List<String> results4 = doc1.getNodeList("result/xml/doesntexist");
assertEquals(results4.size(), 0);
}
public void testGetIntNode() {
assertEquals(doc1.getIntNode("result/xml/a"), 1);
assertEquals(doc1.getIntNode("result[1]/xml/a"), 4);
assertEquals(doc1.getIntNode("@count"), 5);
// Check handling of non-number values
assertEquals(doc1.getIntNode("result/xml/b", 12345), 12345);
try {
doc1.getIntNode("result/xml/c");
assertTrue("NumberFormatException should have been thrown", false);
} catch (final NumberFormatException ex) {
// This is expected.
}
}
public void testGetAttributesForNode() {
final Map attributes = doc1.getAttributesForNode("result/xml");
assertEquals(attributes.size(), 4);
assertEquals(attributes.get("id"), "first");
assertEquals(attributes.get("attr1"), "1");
assertEquals(attributes.get("attr2"), "2");
assertEquals(attributes.get("attr3"), "3");
assertNull(attributes.get("non-existant"));
}
public void testSetNode() {
doc1.setNode("result/xml/a", "newvalue1");
assertEquals(doc1.getNode("result/xml/a"), "newvalue1");
doc1.setNode("result/xml/a[2]", "newvalue2");
assertEquals(doc1.getNode("result/xml/a[2]"), "newvalue2");
doc1.setNode("result/xml/@id", "newvalue3");
assertEquals(doc1.getNode("result/xml/@id"), "newvalue3");
doc1.setNode("@newnode", "newvalue4");
assertEquals(doc1.getNode("@newnode"), "newvalue4");
doc1.setNode("@count", 12345);
assertEquals(doc1.getIntNode("@count"), 12345);
}
public void testSetIfNotNull() {
doc1.setIfNotNull("result/xml/a[2]", "");
assertEquals(doc1.getNode("result/xml/a[2]", null), "");
doc1.setIfNotNull("result/xml/a[3]", null);
assertFalse(doc1.nodeExists("result/xml/a[3]"));
}
public void testSetIfNotEmpty() {
doc1.setIfNotEmpty("result/xml/a[2]", "blah");
assertEquals(doc1.getNode("result/xml/a[2]"), "blah");
doc1.setIfNotEmpty("result/xml/a[3]", "");
assertFalse(doc1.nodeExists("result/xml/a[3]"));
doc1.setIfNotEmpty("result/xml/a[4]", null);
assertFalse(doc1.nodeExists("result/xml/a[4]"));
}
public void testIterator() {
final Iterator<String> values = valuesForResultXmlIdAttribute();
final Iterator<PropBagEx> docIter = doc1.iterator();
while (docIter.hasNext() && values.hasNext()) {
final PropBagEx subdoc = docIter.next();
final String expect = values.next();
final String value = subdoc.getNode("xml/@id");
assertEquals(value, expect);
}
checkIterators(docIter, values);
}
public void testIteratorWithPath() {
final Iterator<String> values = valuesForResultXmlIdAttribute();
final Iterator<PropBagEx> docIter = doc1.iterator("result");
while (docIter.hasNext() && values.hasNext()) {
final PropBagEx subdoc = docIter.next();
final String expect = values.next();
assertEquals(subdoc.getNode("xml/@id"), expect);
}
checkIterators(docIter, values);
}
public void testIteratorWithStar() {
final Iterator<String> values = valuesForFirstResultXmlChildren();
final Iterator<PropBagEx> docIter = doc1.iterator("result/xml/*");
while (docIter.hasNext() && values.hasNext()) {
final PropBagEx subdoc = docIter.next();
final String expect = values.next();
final String value = subdoc.getNode();
assertEquals(value, expect);
}
checkIterators(docIter, values);
}
public void testIterateAll() {
final Iterator<String> values = valuesForAllResultXmlA();
final Iterator<PropBagEx> docIter = doc1.iterateAll("result/xml/a");
while (docIter.hasNext() && values.hasNext()) {
final PropBagEx subdoc = docIter.next();
final String expect = values.next();
final String value = subdoc.getNode();
assertEquals(value, expect);
}
checkIterators(docIter, values);
}
public void testIterateAllWithManySlash() {
final Iterator<String> values = valuesForAllResultXmlA();
final Iterator<PropBagEx> docIter = doc1.iterateAll("//result///xml/a///");
while (docIter.hasNext() && values.hasNext()) {
final PropBagEx subdoc = docIter.next();
final String expect = values.next();
final String value = subdoc.getNode();
assertEquals(value, expect);
}
checkIterators(docIter, values);
}
public void testIterateAllWithStar() {
final Iterator<String> values = valuesForAllResultXmlChildren();
final Iterator<PropBagEx> docIter = doc1.iterateAll("result/xml/*");
while (docIter.hasNext() && values.hasNext()) {
final PropBagEx subdoc = docIter.next();
final String expect = values.next();
final String value = subdoc.getNode();
assertEquals(value, expect);
}
checkIterators(docIter, values);
}
public void testIterateAllWithMoreStars() {
final Iterator<String> values = valuesForAllResultXmlA();
final Iterator<PropBagEx> docIter = doc1.iterateAll("*/*/a");
while (docIter.hasNext() && values.hasNext()) {
final PropBagEx subdoc = docIter.next();
final String expect = values.next();
final String value = subdoc.getNode();
assertEquals(value, expect);
}
checkIterators(docIter, values);
}
public void testIterateValues() {
final Iterator<String> values = valuesForFirstResultXmlA();
final Iterator<String> docIter = doc1.iterateValues("result/xml/a");
while (docIter.hasNext() && values.hasNext()) {
final String value = docIter.next();
final String expect = values.next();
assertEquals(value, expect);
}
checkIterators(docIter, values);
}
public void testIterateAllValues() {
final Iterator<String> values = valuesForAllResultXmlA();
final Iterator<String> docIter = doc1.iterateAllValues("result/xml/a");
while (docIter.hasNext() && values.hasNext()) {
final String value = docIter.next();
final String expect = values.next();
assertEquals(value, expect);
}
checkIterators(docIter, values);
}
public void testIterateAllValuesForAttributes() {
final Iterator<String> values = valuesForResultXmlIdAttribute();
final Iterator<String> docIter = doc1.iterateAllValues("result/xml/@id");
while (docIter.hasNext() && values.hasNext()) {
final String value = docIter.next();
final String expect = values.next();
assertEquals(value, expect);
}
checkIterators(docIter, values);
}
public void testNodeCount() {
assertEquals(doc1.nodeCount("result"), 5);
assertEquals(doc1.nodeCount("result/xml/a"), 3);
assertEquals(doc1.nodeCount("result/xml/a/@test"), 1);
assertEquals(doc1.nodeCount("result/xml/*"), 5);
assertEquals(doc1.nodeCount("result/xml/@id"), 1);
assertEquals(doc1.nodeCount("does/not/exist"), 0);
assertEquals(doc1.nodeCount("result/@none"), 0);
assertEquals(doc1.nodeCount(""), 1);
assertEquals(doc1.nodeCount("/"), 1);
assertEquals(doc1.nodeCount("*"), 5);
}
public void testNodeExists() {
assertTrue(doc1.nodeExists("result"));
assertTrue(doc1.nodeExists("result/xml/a"));
assertTrue(doc1.nodeExists("result/xml/@id"));
assertFalse(doc1.nodeExists("result/xml/nope"));
assertFalse(doc1.nodeExists("result/xml/@extinct"));
assertFalse(doc1.nodeExists("does/not/exist"));
}
public void testDeleteNode() {
assertTrue(doc1.deleteNode("result"));
assertEquals(doc1.getNode("result/xml/@id"), "second");
assertEquals(doc1.nodeCount("result"), 4);
assertTrue(doc1.deleteNode("result[2]/xml/a"));
assertTrue(doc1.nodeExists("result[2]/xml"));
assertTrue(doc1.nodeExists("result[2]/xml/a"));
assertEquals(doc1.nodeCount("result[2]/xml/a"), 1);
assertTrue(doc1.deleteNode("result[2]/xml/a"));
assertFalse(doc1.nodeExists("result[2]/xml/a"));
assertTrue(doc1.nodeExists("result[2]/xml"));
assertEquals(doc1.nodeCount("result[2]/xml/a"), 0);
assertTrue(doc1.deleteNode("@count"));
assertFalse(doc1.nodeExists("@count"));
}
public void testDeleteAll() {
assertTrue(doc1.deleteAll("result"));
assertFalse(doc1.nodeExists("result"));
assertFalse(doc1.deleteAll("result"));
}
public void testGetNodeName() {
assertEquals(doc1.getNodeName(), "results");
final PropBagEx subdoc1 = doc1.getSubtree("result/xml");
assertEquals(subdoc1.getNodeName(), "xml");
}
public void testEqualsDOM() {
assertTrue(doc1.equalsDOM(doc1));
final PropBagEx newdoc = new PropBagEx();
assertFalse(doc1.equalsDOM(newdoc));
assertFalse(doc1.equalsDOM(null));
}
public void testGetSubtree() {
final PropBagEx subdoc1 = doc1.getSubtree("result/xml/a");
assertNotNull(subdoc1);
assertEquals(subdoc1.getNode(), "1");
final PropBagEx subdoc2 = doc1.getSubtree("result[2]/xml/a");
assertNotNull(subdoc2);
assertEquals(subdoc2.getNode(), "5");
final PropBagEx subdoc3 = doc1.getSubtree("result/xml/some/non/existant/tree");
assertNull(subdoc3);
}
public void testNewSubtree() {
// Check it does not already exist.
final PropBagEx subdoc1 = doc1.getSubtree("newtree/here");
assertNull(subdoc1);
final PropBagEx subdoc2 = doc1.newSubtree("newtree/here");
assertNotNull(subdoc2);
subdoc2.setNode("@check", "yes");
final PropBagEx subdoc3 = doc1.getSubtree("newtree/here");
assertNotNull(subdoc3);
assertEquals(subdoc3.getNode("@check"), "yes");
final PropBagEx subdoc4 = doc1.newSubtree("newtree/here");
assertNotNull(subdoc4);
assertEquals(subdoc4.getNode("@check"), "");
assertEquals(doc1.nodeCount("newtree"), 1);
assertEquals(doc1.nodeCount("newtree/here"), 2);
}
public void testAquireSubtree() {
// Check it does not already exist.
final PropBagEx subdoc1 = doc1.getSubtree("newtree/here");
assertNull(subdoc1);
final PropBagEx subdoc2 = doc1.aquireSubtree("newtree/here");
assertNotNull(subdoc2);
assertEquals(doc1.nodeCount("newtree/here"), 1);
final PropBagEx subdoc3 = doc1.aquireSubtree("newtree/here");
assertNotNull(subdoc3);
assertEquals(doc1.nodeCount("newtree"), 1);
assertEquals(doc1.nodeCount("newtree/here"), 1);
}
public void testAppend() {
final PropBagEx subdoc1 = doc2.newSubtree("append1");
subdoc1.append("", doc1);
assertEquals(doc2.getIntNode("append1/results/@count"), 5);
doc2.append("append2", doc1);
assertEquals(doc2.getIntNode("append2/results/@count"), 5);
}
public void testAppendChildren() {
final PropBagEx subdoc1 = doc2.newSubtree("append1");
subdoc1.appendChildren("", doc1);
assertEquals(doc2.getNode("append1/result/xml/@id"), "first");
doc2.appendChildren("append2", doc1);
assertEquals(doc2.getNode("append2/result/xml/@id"), "first");
}
public void testAttributeNamespaces() {
final PropBagEx namespacedoc = new PropBagEx("<namespacetest xml:base=\"basevalue\"/>");
assertEquals(namespacedoc.getNode("@xml:base"), "basevalue");
}
public void testRootNode() {
final PropBagEx subtree = doc1.getSubtree("result[4]/xml/node[1]");
final List<String> values = subtree.getNodeList("");
assertEquals(1, values.size());
assertEquals("value2", values.get(0));
assertEquals(1, subtree.nodeCount(""));
}
public void testSetNodeName() {
// Test renaming a subtree
assertTrue(doc2.nodeExists("child"));
assertFalse(doc2.nodeExists("renamed.child"));
doc2.getSubtree("child").setNodeName("renamed.child");
assertFalse(doc2.nodeExists("child"));
assertTrue(doc2.nodeExists("renamed.child"));
// Test renaming the document root
assertEquals(doc2.getNodeName(), "xml");
doc2.setNodeName("new.root.name");
assertEquals(doc2.getNodeName(), "new.root.name");
}
// Redmine #2459
public void testControlCharsReRead() throws Exception {
final PropBagEx bag = new PropBagEx("<xml/>");
bag.setNode("/test", "\u0003\u0008\u0009");
final String xml = bag.toString();
final PropBagEx newBag = new PropBagEx(xml);
// control charcters are lost. this is expected
final PropBagEx expected = new PropBagEx("<xml><test>\t</test></xml>");
assertTrue(newBag.toString().equals(expected.toString()));
}
public void testControlCharsBulkRead() throws Exception {
final PropBagEx bag = new PropBagEx("<xml><node1>\u0003&\u0009</node1></xml>");
assertTrue(bag.getNode("node1").equals("&\t"));
final String xml = bag.toString();
assertTrue(xml.equals("<xml><node1>&\t</node1></xml>"));
}
public void testEscapedChars() throws Exception {
final PropBagEx escp = new PropBagEx(getClass().getResourceAsStream("escaped.xml"));
assertTrue(escp.getNode("/node1").equals("Escape char tab: \t"));
assertTrue(escp.getNode("/node2").equals("Some more text with an &"));
assertTrue(escp.getNode("/node3/@test").equals("ball&shank"));
}
public void testIterateAllNodesWithName() throws Exception {
final Iterator<String> values = valuesForAllResultXmlA();
final Iterator<PropBagEx> docIter = doc1.iterateAllNodesWithName("a");
while (docIter.hasNext() && values.hasNext()) {
final PropBagEx subdoc = docIter.next();
final String expect = values.next();
final String value = subdoc.getNode();
assertEquals(value, expect);
}
checkIterators(docIter, values);
}
public void testDeleteSubtree() {
final PropBagEx sub = doc1.getSubtree("result[4]");
assertNotNull(sub);
doc1.deleteSubtree(sub);
assertTrue(doc1.getSubtree("result[4]") == null);
final PropBagEx sub2 = doc1.getSubtree("result/xml");
doc1.deleteSubtree(sub2);
// now verify result[0] is empty
final PropBagEx result0 = doc1.getSubtree("result[0]");
assertTrue(result0.getNodeList("*").isEmpty());
}
// ////// HELPERS //////////////////////////////////////////////////////////
private Iterator<String> valuesForResultXmlIdAttribute() {
final Collection<String> values = new ArrayList<String>();
values.add("first");
values.add("second");
values.add("third");
values.add("fourth");
values.add("fifth");
return values.iterator();
}
private Iterator<String> valuesForFirstResultXmlA() {
final Collection<String> values = new ArrayList<String>();
values.add("1");
values.add("2");
values.add("3");
return values.iterator();
}
private Iterator<String> valuesForAllResultXmlA() {
final Collection<String> values = new ArrayList<String>();
values.add("1");
values.add("2");
values.add("3");
values.add("4");
values.add("5");
values.add("6");
values.add("7");
return values.iterator();
}
private Iterator<String> valuesForAllResultXmlChildren() {
final Collection<String> values = new ArrayList<String>();
values.add("1");
values.add("2");
values.add("xx");
values.add("yy");
values.add("3");
values.add("4");
values.add("5");
values.add("6");
values.add("7");
values.add("value1");
values.add("value2");
values.add("value3");
return values.iterator();
}
private Iterator<String> valuesForFirstResultXmlChildren() {
final Collection<String> values = new ArrayList<String>();
values.add("1");
values.add("2");
values.add("xx");
values.add("yy");
values.add("3");
return values.iterator();
}
private void checkIterators(final Iterator<?> source, final Iterator<?> values) {
if (source.hasNext()) {
assertTrue("Document has more elements to iterate", false);
}
if (values.hasNext()) {
assertTrue("Document should have more elements to iterate", false);
}
}
}
| |
/*
* Copyright (c) 2004-2022, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hisp.dhis.dataelement;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections4.SetValuedMap;
import org.hisp.dhis.category.Category;
import org.hisp.dhis.category.CategoryCombo;
import org.hisp.dhis.category.CategoryComboStore;
import org.hisp.dhis.category.CategoryOption;
import org.hisp.dhis.category.CategoryOptionCombo;
import org.hisp.dhis.category.CategoryOptionComboStore;
import org.hisp.dhis.category.CategoryOptionGroup;
import org.hisp.dhis.category.CategoryOptionGroupSet;
import org.hisp.dhis.category.CategoryOptionGroupSetStore;
import org.hisp.dhis.category.CategoryOptionGroupStore;
import org.hisp.dhis.category.CategoryOptionStore;
import org.hisp.dhis.category.CategoryService;
import org.hisp.dhis.category.CategoryStore;
import org.hisp.dhis.common.DataDimensionType;
import org.hisp.dhis.common.DeleteNotAllowedException;
import org.hisp.dhis.common.IdScheme;
import org.hisp.dhis.common.IdentifiableObjectManager;
import org.hisp.dhis.dataset.DataSet;
import org.hisp.dhis.dataset.DataSetElement;
import org.hisp.dhis.program.jdbc.JdbcOrgUnitAssociationsStore;
import org.hisp.dhis.security.acl.AccessStringHelper;
import org.hisp.dhis.security.acl.AclService;
import org.hisp.dhis.user.CurrentUserService;
import org.hisp.dhis.user.User;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
/**
* @author Abyot Asalefew
*/
@Slf4j
@Service( "org.hisp.dhis.category.CategoryService" )
@RequiredArgsConstructor
public class DefaultCategoryService
implements CategoryService
{
// -------------------------------------------------------------------------
// Dependencies
// -------------------------------------------------------------------------
private final CategoryStore categoryStore;
private final CategoryOptionStore categoryOptionStore;
private final CategoryComboStore categoryComboStore;
private final CategoryOptionComboStore categoryOptionComboStore;
private final CategoryOptionGroupStore categoryOptionGroupStore;
private final CategoryOptionGroupSetStore categoryOptionGroupSetStore;
private final IdentifiableObjectManager idObjectManager;
private final CurrentUserService currentUserService;
private final AclService aclService;
@Qualifier( "jdbcCategoryOptionOrgUnitAssociationsStore" )
private final JdbcOrgUnitAssociationsStore jdbcOrgUnitAssociationsStore;
// -------------------------------------------------------------------------
// Category
// -------------------------------------------------------------------------
@Override
@Transactional
public long addCategory( Category dataElementCategory )
{
categoryStore.save( dataElementCategory );
return dataElementCategory.getId();
}
@Override
@Transactional
public void updateCategory( Category dataElementCategory )
{
categoryStore.update( dataElementCategory );
}
@Override
@Transactional
public void deleteCategory( Category dataElementCategory )
{
categoryStore.delete( dataElementCategory );
}
@Override
@Transactional( readOnly = true )
public List<Category> getAllDataElementCategories()
{
return categoryStore.getAll();
}
@Override
@Transactional( readOnly = true )
public Category getCategory( long id )
{
return categoryStore.get( id );
}
@Override
@Transactional( readOnly = true )
public Category getCategory( String uid )
{
return categoryStore.getByUid( uid );
}
@Override
@Transactional( readOnly = true )
public Category getCategoryByName( String name )
{
List<Category> dataElementCategories = new ArrayList<>(
categoryStore.getAllEqName( name ) );
if ( dataElementCategories.isEmpty() )
{
return null;
}
return dataElementCategories.get( 0 );
}
@Override
@Transactional( readOnly = true )
public Category getDefaultCategory()
{
return getCategoryByName( Category.DEFAULT_NAME );
}
@Override
@Transactional( readOnly = true )
public List<Category> getDisaggregationCategories()
{
return categoryStore.getCategoriesByDimensionType( DataDimensionType.DISAGGREGATION );
}
@Override
@Transactional( readOnly = true )
public List<Category> getDisaggregationDataDimensionCategoriesNoAcl()
{
return categoryStore.getCategoriesNoAcl( DataDimensionType.DISAGGREGATION, true );
}
@Override
@Transactional( readOnly = true )
public List<Category> getAttributeCategories()
{
return categoryStore.getCategoriesByDimensionType( DataDimensionType.ATTRIBUTE );
}
@Override
@Transactional( readOnly = true )
public List<Category> getAttributeDataDimensionCategoriesNoAcl()
{
return categoryStore.getCategoriesNoAcl( DataDimensionType.ATTRIBUTE, true );
}
// -------------------------------------------------------------------------
// CategoryOption
// -------------------------------------------------------------------------
@Override
@Transactional
public long addCategoryOption( CategoryOption dataElementCategoryOption )
{
categoryOptionStore.save( dataElementCategoryOption );
return dataElementCategoryOption.getId();
}
@Override
@Transactional
public void updateCategoryOption( CategoryOption dataElementCategoryOption )
{
categoryOptionStore.update( dataElementCategoryOption );
}
@Override
@Transactional
public void deleteCategoryOption( CategoryOption dataElementCategoryOption )
{
categoryOptionStore.delete( dataElementCategoryOption );
}
@Override
@Transactional( readOnly = true )
public CategoryOption getCategoryOption( long id )
{
return categoryOptionStore.get( id );
}
@Override
@Transactional( readOnly = true )
public CategoryOption getCategoryOption( String uid )
{
return categoryOptionStore.getByUid( uid );
}
@Override
@Transactional( readOnly = true )
public CategoryOption getCategoryOptionByName( String name )
{
return categoryOptionStore.getByName( name );
}
@Override
@Transactional( readOnly = true )
public CategoryOption getDefaultCategoryOption()
{
return getCategoryOptionByName( CategoryOption.DEFAULT_NAME );
}
@Override
@Transactional( readOnly = true )
public List<CategoryOption> getAllCategoryOptions()
{
return categoryOptionStore.getAll();
}
@Override
@Transactional( readOnly = true )
public List<CategoryOption> getCategoryOptions( Category category )
{
return categoryOptionStore.getCategoryOptions( category );
}
@Override
@Transactional( readOnly = true )
public List<CategoryOption> getDataWriteCategoryOptions( Category category, User user )
{
if ( user == null )
{
return Lists.newArrayList();
}
return user.isSuper() ? getCategoryOptions( category )
: categoryOptionStore.getDataWriteCategoryOptions( category, user );
}
@Override
@Transactional( readOnly = true )
public Set<CategoryOption> getCoDimensionConstraints( User user )
{
Set<CategoryOption> options = null;
Set<Category> catConstraints = user.getCatDimensionConstraints();
if ( catConstraints != null && !catConstraints.isEmpty() )
{
options = new HashSet<>();
for ( Category category : catConstraints )
{
options.addAll( getCategoryOptions( category ) );
}
}
return options;
}
// -------------------------------------------------------------------------
// CategoryCombo
// -------------------------------------------------------------------------
@Override
@Transactional
public long addCategoryCombo( CategoryCombo dataElementCategoryCombo )
{
categoryComboStore.save( dataElementCategoryCombo );
return dataElementCategoryCombo.getId();
}
@Override
@Transactional
public void updateCategoryCombo( CategoryCombo dataElementCategoryCombo )
{
categoryComboStore.update( dataElementCategoryCombo );
}
@Override
@Transactional
public void deleteCategoryCombo( CategoryCombo dataElementCategoryCombo )
{
categoryComboStore.delete( dataElementCategoryCombo );
}
@Override
@Transactional( readOnly = true )
public List<CategoryCombo> getAllCategoryCombos()
{
return categoryComboStore.getAll();
}
@Override
@Transactional( readOnly = true )
public CategoryCombo getCategoryCombo( long id )
{
return categoryComboStore.get( id );
}
@Override
@Transactional( readOnly = true )
public CategoryCombo getCategoryCombo( String uid )
{
return categoryComboStore.getByUid( uid );
}
@Override
@Transactional( readOnly = true )
public CategoryCombo getCategoryComboByName( String name )
{
return categoryComboStore.getByName( name );
}
@Override
@Transactional( readOnly = true )
public CategoryCombo getDefaultCategoryCombo()
{
return getCategoryComboByName( CategoryCombo.DEFAULT_CATEGORY_COMBO_NAME );
}
@Override
@Transactional( readOnly = true )
public List<CategoryCombo> getDisaggregationCategoryCombos()
{
return categoryComboStore.getCategoryCombosByDimensionType( DataDimensionType.DISAGGREGATION );
}
@Override
@Transactional( readOnly = true )
public List<CategoryCombo> getAttributeCategoryCombos()
{
return categoryComboStore.getCategoryCombosByDimensionType( DataDimensionType.ATTRIBUTE );
}
@Override
@Transactional( readOnly = true )
public String validateCategoryCombo( CategoryCombo categoryCombo )
{
if ( categoryCombo == null )
{
return "category_combo_is_null";
}
if ( categoryCombo.getCategories() == null || categoryCombo.getCategories().isEmpty() )
{
return "category_combo_must_have_at_least_one_category";
}
if ( Sets.newHashSet( categoryCombo.getCategories() ).size() < categoryCombo.getCategories().size() )
{
return "category_combo_cannot_have_duplicate_categories";
}
Set<CategoryOption> categoryOptions = new HashSet<>();
for ( Category category : categoryCombo.getCategories() )
{
if ( category == null || category.getCategoryOptions().isEmpty() )
{
return "categories_must_have_at_least_one_category_option";
}
if ( !Sets.intersection( categoryOptions, Sets.newHashSet( category.getCategoryOptions() ) ).isEmpty() )
{
return "categories_cannot_share_category_options";
}
}
return null;
}
// -------------------------------------------------------------------------
// CategoryOptionCombo
// -------------------------------------------------------------------------
@Override
@Transactional
public long addCategoryOptionCombo( CategoryOptionCombo dataElementCategoryOptionCombo )
{
categoryOptionComboStore.save( dataElementCategoryOptionCombo );
return dataElementCategoryOptionCombo.getId();
}
@Override
@Transactional
public void updateCategoryOptionCombo( CategoryOptionCombo dataElementCategoryOptionCombo )
{
categoryOptionComboStore.update( dataElementCategoryOptionCombo );
}
@Override
@Transactional
public void deleteCategoryOptionCombo( CategoryOptionCombo dataElementCategoryOptionCombo )
{
categoryOptionComboStore.delete( dataElementCategoryOptionCombo );
}
@Override
@Transactional( noRollbackFor = DeleteNotAllowedException.class )
public void deleteCategoryOptionComboNoRollback( CategoryOptionCombo categoryOptionCombo )
{
categoryOptionComboStore.deleteNoRollBack( categoryOptionCombo );
}
@Override
@Transactional( readOnly = true )
public CategoryOptionCombo getCategoryOptionCombo( long id )
{
return categoryOptionComboStore.get( id );
}
@Override
@Transactional( readOnly = true )
public CategoryOptionCombo getCategoryOptionCombo( String uid )
{
return categoryOptionComboStore.getByUid( uid );
}
@Override
@Transactional( readOnly = true )
public CategoryOptionCombo getCategoryOptionComboByCode( String code )
{
return categoryOptionComboStore.getByCode( code );
}
@Override
@Transactional( readOnly = true )
public CategoryOptionCombo getCategoryOptionCombo( CategoryCombo categoryCombo,
Set<CategoryOption> categoryOptions )
{
return categoryOptionComboStore.getCategoryOptionCombo( categoryCombo, categoryOptions );
}
@Override
@Transactional( readOnly = true )
public List<CategoryOptionCombo> getAllCategoryOptionCombos()
{
return categoryOptionComboStore.getAll();
}
@Override
@Transactional
public void generateDefaultDimension()
{
// ---------------------------------------------------------------------
// CategoryOption
// ---------------------------------------------------------------------
CategoryOption categoryOption = new CategoryOption( CategoryOption.DEFAULT_NAME );
categoryOption.setUid( "xYerKDKCefk" );
categoryOption.setCode( "default" );
addCategoryOption( categoryOption );
categoryOption.setPublicAccess( AccessStringHelper.CATEGORY_OPTION_DEFAULT );
updateCategoryOption( categoryOption );
// ---------------------------------------------------------------------
// Category
// ---------------------------------------------------------------------
Category category = new Category( Category.DEFAULT_NAME, DataDimensionType.DISAGGREGATION );
category.setUid( "GLevLNI9wkl" );
category.setCode( "default" );
category.setShortName( "default" );
category.setDataDimension( false );
category.addCategoryOption( categoryOption );
addCategory( category );
category.setPublicAccess( AccessStringHelper.CATEGORY_NO_DATA_SHARING_DEFAULT );
updateCategory( category );
// ---------------------------------------------------------------------
// CategoryCombo
// ---------------------------------------------------------------------
CategoryCombo categoryCombo = new CategoryCombo( CategoryCombo.DEFAULT_CATEGORY_COMBO_NAME,
DataDimensionType.DISAGGREGATION );
categoryCombo.setUid( "bjDvmb4bfuf" );
categoryCombo.setCode( "default" );
categoryCombo.setDataDimensionType( DataDimensionType.DISAGGREGATION );
categoryCombo.addCategory( category );
addCategoryCombo( categoryCombo );
categoryCombo.setPublicAccess( AccessStringHelper.CATEGORY_NO_DATA_SHARING_DEFAULT );
updateCategoryCombo( categoryCombo );
// ---------------------------------------------------------------------
// CategoryOptionCombo
// ---------------------------------------------------------------------
CategoryOptionCombo categoryOptionCombo = new CategoryOptionCombo();
categoryOptionCombo.setUid( "HllvX50cXC0" );
categoryOptionCombo.setCode( "default" );
categoryOptionCombo.setCategoryCombo( categoryCombo );
categoryOptionCombo.addCategoryOption( categoryOption );
addCategoryOptionCombo( categoryOptionCombo );
categoryOptionCombo.setPublicAccess( AccessStringHelper.CATEGORY_NO_DATA_SHARING_DEFAULT );
updateCategoryOptionCombo( categoryOptionCombo );
Set<CategoryOptionCombo> categoryOptionCombos = new HashSet<>();
categoryOptionCombos.add( categoryOptionCombo );
categoryCombo.setOptionCombos( categoryOptionCombos );
updateCategoryCombo( categoryCombo );
categoryOption.setCategoryOptionCombos( categoryOptionCombos );
updateCategoryOption( categoryOption );
}
@Override
@Transactional( readOnly = true )
public CategoryOptionCombo getDefaultCategoryOptionCombo()
{
return categoryOptionComboStore.getByName( CategoryCombo.DEFAULT_CATEGORY_COMBO_NAME );
}
@Override
@Transactional
public void generateOptionCombos( CategoryCombo categoryCombo )
{
categoryCombo.generateOptionCombos();
for ( CategoryOptionCombo optionCombo : categoryCombo.getOptionCombos() )
{
categoryCombo.getOptionCombos().add( optionCombo );
addCategoryOptionCombo( optionCombo );
}
updateCategoryCombo( categoryCombo );
}
@Override
@Transactional
public void updateOptionCombos( Category category )
{
for ( CategoryCombo categoryCombo : getAllCategoryCombos() )
{
if ( categoryCombo.getCategories().contains( category ) )
{
updateOptionCombos( categoryCombo );
}
}
}
@Override
@Transactional
public void updateOptionCombos( CategoryCombo categoryCombo )
{
if ( categoryCombo == null || !categoryCombo.isValid() )
{
log.warn( "Category combo is null or invalid, could not update option combos: " + categoryCombo );
return;
}
List<CategoryOptionCombo> generatedOptionCombos = categoryCombo.generateOptionCombosList();
Set<CategoryOptionCombo> persistedOptionCombos = categoryCombo.getOptionCombos();
boolean modified = false;
for ( CategoryOptionCombo optionCombo : generatedOptionCombos )
{
if ( !persistedOptionCombos.contains( optionCombo ) )
{
categoryCombo.getOptionCombos().add( optionCombo );
addCategoryOptionCombo( optionCombo );
log.info( "Added missing category option combo: " + optionCombo + " for category combo: "
+ categoryCombo.getName() );
modified = true;
}
}
if ( modified )
{
updateCategoryCombo( categoryCombo );
}
}
@Override
@Transactional( readOnly = true )
public CategoryOptionCombo getCategoryOptionComboAcl( IdScheme idScheme, String id )
{
CategoryOptionCombo coc = idObjectManager.getObject( CategoryOptionCombo.class, idScheme, id );
if ( coc != null )
{
User user = currentUserService.getCurrentUser();
for ( CategoryOption categoryOption : coc.getCategoryOptions() )
{
if ( !aclService.canDataWrite( user, categoryOption ) )
{
return null;
}
}
}
return coc;
}
@Override
@Transactional
public void updateCategoryOptionComboNames()
{
categoryOptionComboStore.updateNames();
}
// -------------------------------------------------------------------------
// DataElementOperand
// -------------------------------------------------------------------------
@Override
@Transactional( readOnly = true )
public List<DataElementOperand> getOperands( Collection<DataElement> dataElements )
{
return getOperands( dataElements, false );
}
@Override
@Transactional( readOnly = true )
public List<DataElementOperand> getOperands( Collection<DataElement> dataElements, boolean includeTotals )
{
List<DataElementOperand> operands = Lists.newArrayList();
for ( DataElement dataElement : dataElements )
{
Set<CategoryCombo> categoryCombos = dataElement.getCategoryCombos();
boolean anyIsDefault = categoryCombos.stream().anyMatch( cc -> cc.isDefault() );
if ( includeTotals && !anyIsDefault )
{
operands.add( new DataElementOperand( dataElement ) );
}
for ( CategoryCombo categoryCombo : categoryCombos )
{
operands.addAll( getOperands( dataElement, categoryCombo ) );
}
}
return operands;
}
@Override
@Transactional( readOnly = true )
public List<DataElementOperand> getOperands( DataSet dataSet, boolean includeTotals )
{
List<DataElementOperand> operands = Lists.newArrayList();
for ( DataSetElement element : dataSet.getDataSetElements() )
{
CategoryCombo categoryCombo = element.getResolvedCategoryCombo();
if ( includeTotals && !categoryCombo.isDefault() )
{
operands.add( new DataElementOperand( element.getDataElement() ) );
}
operands.addAll( getOperands( element.getDataElement(), element.getResolvedCategoryCombo() ) );
}
return operands;
}
private List<DataElementOperand> getOperands( DataElement dataElement, CategoryCombo categoryCombo )
{
List<DataElementOperand> operands = Lists.newArrayList();
for ( CategoryOptionCombo categoryOptionCombo : categoryCombo.getSortedOptionCombos() )
{
operands.add( new DataElementOperand( dataElement, categoryOptionCombo ) );
}
return operands;
}
// -------------------------------------------------------------------------
// CategoryOptionGroup
// -------------------------------------------------------------------------
@Override
@Transactional
public long saveCategoryOptionGroup( CategoryOptionGroup group )
{
categoryOptionGroupStore.save( group );
return group.getId();
}
@Override
@Transactional
public void updateCategoryOptionGroup( CategoryOptionGroup group )
{
categoryOptionGroupStore.update( group );
}
@Override
@Transactional( readOnly = true )
public CategoryOptionGroup getCategoryOptionGroup( long id )
{
return categoryOptionGroupStore.get( id );
}
@Override
@Transactional( readOnly = true )
public CategoryOptionGroup getCategoryOptionGroup( String uid )
{
return categoryOptionGroupStore.getByUid( uid );
}
@Override
@Transactional
public void deleteCategoryOptionGroup( CategoryOptionGroup group )
{
categoryOptionGroupStore.delete( group );
}
@Override
@Transactional( readOnly = true )
public List<CategoryOptionGroup> getAllCategoryOptionGroups()
{
return categoryOptionGroupStore.getAll();
}
@Override
@Transactional( readOnly = true )
public List<CategoryOptionGroup> getCategoryOptionGroups( CategoryOptionGroupSet groupSet )
{
return categoryOptionGroupStore.getCategoryOptionGroups( groupSet );
}
@Override
@Transactional( readOnly = true )
public Set<CategoryOptionGroup> getCogDimensionConstraints( User user )
{
Set<CategoryOptionGroup> groups = null;
Set<CategoryOptionGroupSet> cogsConstraints = user.getCogsDimensionConstraints();
if ( cogsConstraints != null && !cogsConstraints.isEmpty() )
{
groups = new HashSet<>();
for ( CategoryOptionGroupSet cogs : cogsConstraints )
{
groups.addAll( getCategoryOptionGroups( cogs ) );
}
}
return groups;
}
// -------------------------------------------------------------------------
// CategoryOptionGroupSet
// -------------------------------------------------------------------------
@Override
@Transactional
public long saveCategoryOptionGroupSet( CategoryOptionGroupSet group )
{
categoryOptionGroupSetStore.save( group );
return group.getId();
}
@Override
@Transactional
public void updateCategoryOptionGroupSet( CategoryOptionGroupSet group )
{
categoryOptionGroupSetStore.update( group );
}
@Override
@Transactional( readOnly = true )
public CategoryOptionGroupSet getCategoryOptionGroupSet( long id )
{
return categoryOptionGroupSetStore.get( id );
}
@Override
@Transactional( readOnly = true )
public CategoryOptionGroupSet getCategoryOptionGroupSet( String uid )
{
return categoryOptionGroupSetStore.getByUid( uid );
}
@Override
@Transactional
public void deleteCategoryOptionGroupSet( CategoryOptionGroupSet group )
{
categoryOptionGroupSetStore.delete( group );
}
@Override
@Transactional( readOnly = true )
public List<CategoryOptionGroupSet> getAllCategoryOptionGroupSets()
{
return categoryOptionGroupSetStore.getAll();
}
@Override
@Transactional( readOnly = true )
public List<CategoryOptionGroupSet> getDisaggregationCategoryOptionGroupSetsNoAcl()
{
return categoryOptionGroupSetStore.getCategoryOptionGroupSetsNoAcl( DataDimensionType.DISAGGREGATION, true );
}
@Override
@Transactional( readOnly = true )
public List<CategoryOptionGroupSet> getAttributeCategoryOptionGroupSetsNoAcl()
{
return categoryOptionGroupSetStore.getCategoryOptionGroupSetsNoAcl( DataDimensionType.ATTRIBUTE, true );
}
@Override
public SetValuedMap<String, String> getCategoryOptionOrganisationUnitsAssociations( Set<String> uids )
{
return jdbcOrgUnitAssociationsStore.getOrganisationUnitsAssociationsForCurrentUser( uids );
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.xpack.ml.dataframe.extractor;
import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.action.fieldcaps.FieldCapabilities;
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse;
import org.elasticsearch.core.Tuple;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsConfig;
import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsDest;
import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsSource;
import org.elasticsearch.xpack.core.ml.dataframe.analyses.BoostedTreeParams;
import org.elasticsearch.xpack.core.ml.dataframe.analyses.Classification;
import org.elasticsearch.xpack.core.ml.dataframe.analyses.OutlierDetection;
import org.elasticsearch.xpack.core.ml.dataframe.analyses.Regression;
import org.elasticsearch.xpack.core.ml.dataframe.explain.FieldSelection;
import org.elasticsearch.xpack.core.ml.inference.preprocessing.OneHotEncoding;
import org.elasticsearch.xpack.core.ml.inference.preprocessing.PreProcessor;
import org.elasticsearch.xpack.ml.extractor.ExtractedField;
import org.elasticsearch.xpack.ml.extractor.ExtractedFields;
import org.elasticsearch.xpack.ml.test.SearchHitBuilder;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import java.util.stream.Collectors;
import static org.hamcrest.Matchers.arrayContaining;
import static org.hamcrest.Matchers.arrayContainingInAnyOrder;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
public class ExtractedFieldsDetectorTests extends ESTestCase {
private static final String[] SOURCE_INDEX = new String[] { "source_index" };
private static final String DEST_INDEX = "dest_index";
private static final String RESULTS_FIELD = "ml";
private FetchSourceContext sourceFiltering;
private FetchSourceContext analyzedFields;
public void testDetect_GivenFloatField() {
FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder().addAggregatableField("some_float", "float")
.build();
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildOutlierDetectionConfig(),
100,
fieldCapabilities,
Collections.emptyMap()
);
Tuple<ExtractedFields, List<FieldSelection>> fieldExtraction = extractedFieldsDetector.detect();
List<ExtractedField> allFields = fieldExtraction.v1().getAllFields();
assertThat(allFields, hasSize(1));
assertThat(allFields.get(0).getName(), equalTo("some_float"));
assertThat(allFields.get(0).getMethod(), equalTo(ExtractedField.Method.DOC_VALUE));
assertFieldSelectionContains(
fieldExtraction.v2(),
FieldSelection.included("some_float", Collections.singleton("float"), false, FieldSelection.FeatureType.NUMERICAL)
);
}
public void testDetect_GivenNumericFieldWithMultipleTypes() {
FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder().addAggregatableField(
"some_number",
"long",
"integer",
"short",
"byte",
"double",
"float",
"half_float",
"scaled_float"
).build();
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildOutlierDetectionConfig(),
100,
fieldCapabilities,
Collections.emptyMap()
);
Tuple<ExtractedFields, List<FieldSelection>> fieldExtraction = extractedFieldsDetector.detect();
List<ExtractedField> allFields = fieldExtraction.v1().getAllFields();
assertThat(allFields, hasSize(1));
assertThat(allFields.get(0).getName(), equalTo("some_number"));
assertThat(allFields.get(0).getMethod(), equalTo(ExtractedField.Method.DOC_VALUE));
assertFieldSelectionContains(
fieldExtraction.v2(),
FieldSelection.included(
"some_number",
new HashSet<>(Arrays.asList("long", "integer", "short", "byte", "double", "float", "half_float", "scaled_float")),
false,
FieldSelection.FeatureType.NUMERICAL
)
);
}
public void testDetect_GivenOutlierDetectionAndNonNumericField() {
FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder().addAggregatableField("some_keyword", "keyword")
.build();
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildOutlierDetectionConfig(),
100,
fieldCapabilities,
Collections.emptyMap()
);
Tuple<ExtractedFields, List<FieldSelection>> fieldExtraction = extractedFieldsDetector.detect();
assertThat(fieldExtraction.v1().getAllFields().isEmpty(), is(true));
assertThat(fieldExtraction.v2().size(), equalTo(1));
assertThat(fieldExtraction.v2().get(0).getName(), equalTo("some_keyword"));
assertThat(fieldExtraction.v2().get(0).isIncluded(), is(false));
assertThat(
fieldExtraction.v2().get(0).getReason(),
equalTo(
"unsupported type; supported types are "
+ "[boolean, byte, double, float, half_float, integer, long, scaled_float, short, unsigned_long]"
)
);
}
public void testDetect_GivenOutlierDetectionAndFieldWithNumericAndNonNumericTypes() {
FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder().addAggregatableField(
"indecisive_field",
"float",
"keyword"
).build();
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildOutlierDetectionConfig(),
100,
fieldCapabilities,
Collections.emptyMap()
);
Tuple<ExtractedFields, List<FieldSelection>> fieldExtraction = extractedFieldsDetector.detect();
assertThat(fieldExtraction.v1().getAllFields().isEmpty(), is(true));
assertThat(fieldExtraction.v2().size(), equalTo(1));
assertThat(fieldExtraction.v2().get(0).getName(), equalTo("indecisive_field"));
assertThat(fieldExtraction.v2().get(0).isIncluded(), is(false));
assertThat(
fieldExtraction.v2().get(0).getReason(),
equalTo(
"unsupported type; supported types are "
+ "[boolean, byte, double, float, half_float, integer, long, scaled_float, short, unsigned_long]"
)
);
}
public void testDetect_GivenOutlierDetectionAndMultipleFields() {
FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder().addAggregatableField("some_float", "float")
.addAggregatableField("some_long", "long")
.addAggregatableField("some_keyword", "keyword")
.addAggregatableField("some_boolean", "boolean")
.build();
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildOutlierDetectionConfig(),
100,
fieldCapabilities,
Collections.emptyMap()
);
Tuple<ExtractedFields, List<FieldSelection>> fieldExtraction = extractedFieldsDetector.detect();
List<ExtractedField> allFields = fieldExtraction.v1().getAllFields();
assertThat(allFields, hasSize(3));
assertThat(
allFields.stream().map(ExtractedField::getName).collect(Collectors.toSet()),
containsInAnyOrder("some_float", "some_long", "some_boolean")
);
assertThat(
allFields.stream().map(ExtractedField::getMethod).collect(Collectors.toSet()),
contains(equalTo(ExtractedField.Method.DOC_VALUE))
);
assertFieldSelectionContains(
fieldExtraction.v2(),
FieldSelection.included("some_boolean", Collections.singleton("boolean"), false, FieldSelection.FeatureType.NUMERICAL),
FieldSelection.included("some_float", Collections.singleton("float"), false, FieldSelection.FeatureType.NUMERICAL),
FieldSelection.excluded(
"some_keyword",
Collections.singleton("keyword"),
"unsupported type; "
+ "supported types are [boolean, byte, double, float, half_float, integer, long, scaled_float, short, unsigned_long]"
),
FieldSelection.included("some_long", Collections.singleton("long"), false, FieldSelection.FeatureType.NUMERICAL)
);
}
public void testDetect_GivenRegressionAndMultipleFields() {
FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder().addAggregatableField("some_float", "float")
.addAggregatableField("some_long", "long")
.addAggregatableField("some_keyword", "keyword")
.addAggregatableField("some_boolean", "boolean")
.addAggregatableField("foo", "double")
.build();
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildRegressionConfig("foo"),
100,
fieldCapabilities,
Collections.emptyMap()
);
Tuple<ExtractedFields, List<FieldSelection>> fieldExtraction = extractedFieldsDetector.detect();
List<ExtractedField> allFields = fieldExtraction.v1().getAllFields();
assertThat(allFields, hasSize(5));
assertThat(
allFields.stream().map(ExtractedField::getName).collect(Collectors.toList()),
containsInAnyOrder("foo", "some_float", "some_keyword", "some_long", "some_boolean")
);
assertThat(
allFields.stream().map(ExtractedField::getMethod).collect(Collectors.toSet()),
contains(equalTo(ExtractedField.Method.DOC_VALUE))
);
assertFieldSelectionContains(
fieldExtraction.v2(),
FieldSelection.included("foo", Collections.singleton("double"), true, FieldSelection.FeatureType.NUMERICAL),
FieldSelection.included("some_boolean", Collections.singleton("boolean"), false, FieldSelection.FeatureType.NUMERICAL),
FieldSelection.included("some_float", Collections.singleton("float"), false, FieldSelection.FeatureType.NUMERICAL),
FieldSelection.included("some_keyword", Collections.singleton("keyword"), false, FieldSelection.FeatureType.CATEGORICAL),
FieldSelection.included("some_long", Collections.singleton("long"), false, FieldSelection.FeatureType.NUMERICAL)
);
}
public void testDetect_GivenRegressionAndRequiredFieldMissing() {
FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder().addAggregatableField("some_float", "float")
.addAggregatableField("some_long", "long")
.addAggregatableField("some_keyword", "keyword")
.build();
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildRegressionConfig("foo"),
100,
fieldCapabilities,
Collections.emptyMap()
);
ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, extractedFieldsDetector::detect);
assertThat(e.getMessage(), equalTo("required field [foo] is missing; analysis requires fields [foo]"));
}
public void testDetect_GivenRegressionAndRequiredFieldExcluded() {
FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder().addAggregatableField("some_float", "float")
.addAggregatableField("some_long", "long")
.addAggregatableField("some_keyword", "keyword")
.addAggregatableField("foo", "float")
.build();
analyzedFields = new FetchSourceContext(true, new String[0], new String[] { "foo" });
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildRegressionConfig("foo"),
100,
fieldCapabilities,
Collections.emptyMap()
);
ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, extractedFieldsDetector::detect);
assertThat(e.getMessage(), equalTo("required field [foo] is missing; analysis requires fields [foo]"));
}
public void testDetect_GivenRegressionAndRequiredFieldNotIncluded() {
FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder().addAggregatableField("some_float", "float")
.addAggregatableField("some_long", "long")
.addAggregatableField("some_keyword", "keyword")
.addAggregatableField("foo", "float")
.build();
analyzedFields = new FetchSourceContext(true, new String[] { "some_float", "some_keyword" }, new String[0]);
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildRegressionConfig("foo"),
100,
fieldCapabilities,
Collections.emptyMap()
);
ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, extractedFieldsDetector::detect);
assertThat(e.getMessage(), equalTo("required field [foo] is missing; analysis requires fields [foo]"));
}
public void testDetect_GivenFieldIsBothIncludedAndExcluded() {
FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder().addAggregatableField("foo", "float")
.addAggregatableField("bar", "float")
.build();
analyzedFields = new FetchSourceContext(true, new String[] { "foo", "bar" }, new String[] { "foo" });
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildOutlierDetectionConfig(),
100,
fieldCapabilities,
Collections.emptyMap()
);
Tuple<ExtractedFields, List<FieldSelection>> fieldExtraction = extractedFieldsDetector.detect();
List<ExtractedField> allFields = fieldExtraction.v1().getAllFields();
assertThat(allFields, hasSize(1));
assertThat(allFields.stream().map(ExtractedField::getName).collect(Collectors.toList()), contains("bar"));
assertFieldSelectionContains(
fieldExtraction.v2(),
FieldSelection.included("bar", Collections.singleton("float"), false, FieldSelection.FeatureType.NUMERICAL),
FieldSelection.excluded("foo", Collections.singleton("float"), "field in excludes list")
);
}
public void testDetect_GivenFieldIsNotIncludedAndIsExcluded() {
FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder().addAggregatableField("foo", "float")
.addAggregatableField("bar", "float")
.build();
analyzedFields = new FetchSourceContext(true, new String[] { "foo" }, new String[] { "bar" });
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildOutlierDetectionConfig(),
100,
fieldCapabilities,
Collections.emptyMap()
);
Tuple<ExtractedFields, List<FieldSelection>> fieldExtraction = extractedFieldsDetector.detect();
List<ExtractedField> allFields = fieldExtraction.v1().getAllFields();
assertThat(allFields, hasSize(1));
assertThat(allFields.stream().map(ExtractedField::getName).collect(Collectors.toList()), contains("foo"));
assertFieldSelectionContains(
fieldExtraction.v2(),
FieldSelection.excluded("bar", Collections.singleton("float"), "field not in includes list"),
FieldSelection.included("foo", Collections.singleton("float"), false, FieldSelection.FeatureType.NUMERICAL)
);
}
public void testDetect_GivenRegressionAndRequiredFieldHasInvalidType() {
FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder().addAggregatableField("some_float", "float")
.addAggregatableField("some_long", "long")
.addAggregatableField("some_keyword", "keyword")
.addAggregatableField("foo", "keyword")
.build();
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildRegressionConfig("foo"),
100,
fieldCapabilities,
Collections.emptyMap()
);
ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, extractedFieldsDetector::detect);
assertThat(
e.getMessage(),
equalTo(
"invalid types [keyword] for required field [foo]; "
+ "expected types are [byte, double, float, half_float, integer, long, scaled_float, short, unsigned_long]"
)
);
}
public void testDetect_GivenClassificationAndRequiredFieldHasInvalidType() {
FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder().addAggregatableField("some_float", "float")
.addAggregatableField("some_long", "long")
.addAggregatableField("some_keyword", "keyword")
.addAggregatableField("foo", "keyword")
.build();
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildClassificationConfig("some_float"),
100,
fieldCapabilities,
Collections.emptyMap()
);
ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, extractedFieldsDetector::detect);
assertThat(
e.getMessage(),
equalTo(
"invalid types [float] for required field [some_float]; "
+ "expected types are [boolean, byte, integer, ip, keyword, long, short, text, unsigned_long]"
)
);
}
public void testDetect_GivenClassificationAndDependentVariableHasInvalidCardinality() {
FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder().addAggregatableField("some_long", "long")
.addAggregatableField("some_keyword", "keyword")
.addAggregatableField("foo", "keyword")
.build();
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildClassificationConfig("some_keyword"),
100,
fieldCapabilities,
Collections.singletonMap("some_keyword", 31L)
);
ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, extractedFieldsDetector::detect);
assertThat(e.getMessage(), equalTo("Field [some_keyword] must have at most [30] distinct values but there were at least [31]"));
}
public void testDetect_GivenIgnoredField() {
FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder().addField("_id", true, true, "float").build();
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildOutlierDetectionConfig(),
100,
fieldCapabilities,
Collections.emptyMap()
);
Tuple<ExtractedFields, List<FieldSelection>> fieldExtraction = extractedFieldsDetector.detect();
assertThat(fieldExtraction.v1().getAllFields().isEmpty(), is(true));
assertThat(fieldExtraction.v2().isEmpty(), is(true));
}
public void testDetect_GivenIncludedIgnoredField() {
FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder().addField("_id", true, false, "float").build();
analyzedFields = new FetchSourceContext(true, new String[] { "_id" }, new String[0]);
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildOutlierDetectionConfig(),
100,
fieldCapabilities,
Collections.emptyMap()
);
ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, extractedFieldsDetector::detect);
assertThat(e.getMessage(), equalTo("No field [_id] could be detected"));
}
public void testDetect_GivenExcludedFieldIsMissing() {
FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder().addAggregatableField("foo", "float").build();
analyzedFields = new FetchSourceContext(true, new String[] { "*" }, new String[] { "bar" });
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildOutlierDetectionConfig(),
100,
fieldCapabilities,
Collections.emptyMap()
);
ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, extractedFieldsDetector::detect);
assertThat(e.getMessage(), equalTo("No field [bar] could be detected"));
}
public void testDetect_GivenExcludedFieldIsUnsupported() {
FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder().addAggregatableField("numeric", "float")
.addAggregatableField("categorical", "keyword")
.build();
analyzedFields = new FetchSourceContext(true, null, new String[] { "categorical" });
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildOutlierDetectionConfig(),
100,
fieldCapabilities,
Collections.emptyMap()
);
Tuple<ExtractedFields, List<FieldSelection>> fieldExtraction = extractedFieldsDetector.detect();
List<ExtractedField> allFields = fieldExtraction.v1().getAllFields();
assertThat(allFields, hasSize(1));
assertThat(allFields.get(0).getName(), equalTo("numeric"));
assertFieldSelectionContains(
fieldExtraction.v2(),
FieldSelection.excluded(
"categorical",
Collections.singleton("keyword"),
"unsupported type; supported types are "
+ "[boolean, byte, double, float, half_float, integer, long, scaled_float, short, unsigned_long]"
),
FieldSelection.included("numeric", Collections.singleton("float"), false, FieldSelection.FeatureType.NUMERICAL)
);
}
public void testDetect_ShouldSortFieldsAlphabetically() {
int fieldCount = randomIntBetween(10, 20);
List<String> fields = new ArrayList<>();
for (int i = 0; i < fieldCount; i++) {
fields.add(randomAlphaOfLength(20));
}
List<String> sortedFields = new ArrayList<>(fields);
Collections.sort(sortedFields);
MockFieldCapsResponseBuilder mockFieldCapsResponseBuilder = new MockFieldCapsResponseBuilder();
for (String field : fields) {
mockFieldCapsResponseBuilder.addAggregatableField(field, "float");
}
FieldCapabilitiesResponse fieldCapabilities = mockFieldCapsResponseBuilder.build();
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildOutlierDetectionConfig(),
100,
fieldCapabilities,
Collections.emptyMap()
);
Tuple<ExtractedFields, List<FieldSelection>> fieldExtraction = extractedFieldsDetector.detect();
List<String> extractedFieldNames = fieldExtraction.v1()
.getAllFields()
.stream()
.map(ExtractedField::getName)
.collect(Collectors.toList());
assertThat(extractedFieldNames, equalTo(sortedFields));
}
public void testDetect_GivenIncludeWithMissingField() {
FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder().addAggregatableField("my_field1", "float")
.addAggregatableField("my_field2", "float")
.build();
analyzedFields = new FetchSourceContext(true, new String[] { "your_field1", "my*" }, new String[0]);
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildOutlierDetectionConfig(),
100,
fieldCapabilities,
Collections.emptyMap()
);
ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, extractedFieldsDetector::detect);
assertThat(e.getMessage(), equalTo("No field [your_field1] could be detected"));
}
public void testDetect_GivenExcludeAllValidFields() {
FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder().addAggregatableField("my_field1", "float")
.addAggregatableField("my_field2", "float")
.build();
analyzedFields = new FetchSourceContext(true, new String[0], new String[] { "my_*" });
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildOutlierDetectionConfig(),
100,
fieldCapabilities,
Collections.emptyMap()
);
Tuple<ExtractedFields, List<FieldSelection>> fieldExtraction = extractedFieldsDetector.detect();
assertThat(fieldExtraction.v1().getAllFields().isEmpty(), is(true));
assertThat(fieldExtraction.v2().size(), equalTo(2));
assertThat(fieldExtraction.v2().stream().filter(FieldSelection::isIncluded).findAny().isPresent(), is(false));
}
public void testDetect_GivenInclusionsAndExclusions() {
FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder().addAggregatableField("my_field1_nope", "float")
.addAggregatableField("my_field1", "float")
.addAggregatableField("your_field2", "float")
.build();
analyzedFields = new FetchSourceContext(true, new String[] { "your*", "my_*" }, new String[] { "*nope" });
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildOutlierDetectionConfig(),
100,
fieldCapabilities,
Collections.emptyMap()
);
Tuple<ExtractedFields, List<FieldSelection>> fieldExtraction = extractedFieldsDetector.detect();
List<String> extractedFieldNames = fieldExtraction.v1()
.getAllFields()
.stream()
.map(ExtractedField::getName)
.collect(Collectors.toList());
assertThat(extractedFieldNames, equalTo(Arrays.asList("my_field1", "your_field2")));
assertFieldSelectionContains(
fieldExtraction.v2(),
FieldSelection.included("my_field1", Collections.singleton("float"), false, FieldSelection.FeatureType.NUMERICAL),
FieldSelection.excluded("my_field1_nope", Collections.singleton("float"), "field in excludes list"),
FieldSelection.included("your_field2", Collections.singleton("float"), false, FieldSelection.FeatureType.NUMERICAL)
);
}
public void testDetect_GivenIncludedFieldHasUnsupportedType() {
FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder().addAggregatableField("my_field1_nope", "float")
.addAggregatableField("my_field1", "float")
.addAggregatableField("your_field2", "float")
.addAggregatableField("your_keyword", "keyword")
.build();
analyzedFields = new FetchSourceContext(true, new String[] { "your*", "my_*" }, new String[] { "*nope" });
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildOutlierDetectionConfig(),
100,
fieldCapabilities,
Collections.emptyMap()
);
ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, extractedFieldsDetector::detect);
assertThat(
e.getMessage(),
equalTo(
"field [your_keyword] has unsupported type [keyword]. "
+ "Supported types are [boolean, byte, double, float, half_float, integer, long, scaled_float, short, unsigned_long]."
)
);
}
public void testDetect_GivenNotIncludedFieldHasUnsupportedType() {
FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder().addAggregatableField("numeric", "float")
.addAggregatableField("categorical", "keyword")
.build();
analyzedFields = new FetchSourceContext(true, new String[] { "numeric" }, null);
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildOutlierDetectionConfig(),
100,
fieldCapabilities,
Collections.emptyMap()
);
Tuple<ExtractedFields, List<FieldSelection>> fieldExtraction = extractedFieldsDetector.detect();
List<ExtractedField> allFields = fieldExtraction.v1().getAllFields();
assertThat(allFields, hasSize(1));
assertThat(allFields.get(0).getName(), equalTo("numeric"));
assertFieldSelectionContains(
fieldExtraction.v2(),
FieldSelection.excluded(
"categorical",
Collections.singleton("keyword"),
"unsupported type; supported types are "
+ "[boolean, byte, double, float, half_float, integer, long, scaled_float, short, unsigned_long]"
),
FieldSelection.included("numeric", Collections.singleton("float"), false, FieldSelection.FeatureType.NUMERICAL)
);
}
public void testDetect_GivenIndexContainsResultsField() {
FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder().addAggregatableField(
RESULTS_FIELD + ".outlier_score",
"float"
)
.addAggregatableField("my_field1", "float")
.addAggregatableField("your_field2", "float")
.addAggregatableField("your_keyword", "keyword")
.build();
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildOutlierDetectionConfig(),
100,
fieldCapabilities,
Collections.emptyMap()
);
Tuple<ExtractedFields, List<FieldSelection>> fieldExtraction = extractedFieldsDetector.detect();
List<String> extractedFieldNames = fieldExtraction.v1()
.getAllFields()
.stream()
.map(ExtractedField::getName)
.collect(Collectors.toList());
assertThat(extractedFieldNames, equalTo(Arrays.asList("my_field1", "your_field2")));
assertFieldSelectionContains(
fieldExtraction.v2(),
FieldSelection.included("my_field1", Collections.singleton("float"), false, FieldSelection.FeatureType.NUMERICAL),
FieldSelection.included("your_field2", Collections.singleton("float"), false, FieldSelection.FeatureType.NUMERICAL),
FieldSelection.excluded(
"your_keyword",
Collections.singleton("keyword"),
"unsupported type; supported types "
+ "are [boolean, byte, double, float, half_float, integer, long, scaled_float, short, unsigned_long]"
)
);
}
public void testDetect_GivenIncludedResultsField() {
FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder().addAggregatableField(
RESULTS_FIELD + ".outlier_score",
"float"
)
.addAggregatableField("my_field1", "float")
.addAggregatableField("your_field2", "float")
.addAggregatableField("your_keyword", "keyword")
.build();
analyzedFields = new FetchSourceContext(true, new String[] { RESULTS_FIELD }, new String[0]);
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildOutlierDetectionConfig(),
100,
fieldCapabilities,
Collections.emptyMap()
);
ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, extractedFieldsDetector::detect);
assertThat(e.getMessage(), equalTo("No field [ml] could be detected"));
}
public void testDetect_GivenLessFieldsThanDocValuesLimit() {
FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder().addAggregatableField("field_1", "float")
.addAggregatableField("field_2", "float")
.addAggregatableField("field_3", "float")
.addAggregatableField("a_keyword", "keyword")
.build();
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildOutlierDetectionConfig(),
4,
fieldCapabilities,
Collections.emptyMap()
);
Tuple<ExtractedFields, List<FieldSelection>> fieldExtraction = extractedFieldsDetector.detect();
List<String> extractedFieldNames = fieldExtraction.v1()
.getAllFields()
.stream()
.map(ExtractedField::getName)
.collect(Collectors.toList());
assertThat(extractedFieldNames, equalTo(Arrays.asList("field_1", "field_2", "field_3")));
assertThat(
fieldExtraction.v1().getAllFields().stream().map(ExtractedField::getMethod).collect(Collectors.toSet()),
contains(equalTo(ExtractedField.Method.DOC_VALUE))
);
}
public void testDetect_GivenEqualFieldsToDocValuesLimit() {
FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder().addAggregatableField("field_1", "float")
.addAggregatableField("field_2", "float")
.addAggregatableField("field_3", "float")
.addAggregatableField("a_keyword", "keyword")
.build();
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildOutlierDetectionConfig(),
3,
fieldCapabilities,
Collections.emptyMap()
);
Tuple<ExtractedFields, List<FieldSelection>> fieldExtraction = extractedFieldsDetector.detect();
List<String> extractedFieldNames = fieldExtraction.v1()
.getAllFields()
.stream()
.map(ExtractedField::getName)
.collect(Collectors.toList());
assertThat(extractedFieldNames, equalTo(Arrays.asList("field_1", "field_2", "field_3")));
assertThat(
fieldExtraction.v1().getAllFields().stream().map(ExtractedField::getMethod).collect(Collectors.toSet()),
contains(equalTo(ExtractedField.Method.DOC_VALUE))
);
}
public void testDetect_GivenMoreFieldsThanDocValuesLimit() {
FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder().addAggregatableField("field_1", "float")
.addAggregatableField("field_2", "float")
.addAggregatableField("field_3", "float")
.addAggregatableField("a_keyword", "keyword")
.build();
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildOutlierDetectionConfig(),
2,
fieldCapabilities,
Collections.emptyMap()
);
Tuple<ExtractedFields, List<FieldSelection>> fieldExtraction = extractedFieldsDetector.detect();
List<String> extractedFieldNames = fieldExtraction.v1()
.getAllFields()
.stream()
.map(ExtractedField::getName)
.collect(Collectors.toList());
assertThat(extractedFieldNames, equalTo(Arrays.asList("field_1", "field_2", "field_3")));
assertThat(
fieldExtraction.v1().getAllFields().stream().map(ExtractedField::getMethod).collect(Collectors.toSet()),
contains(equalTo(ExtractedField.Method.SOURCE))
);
}
private void testDetect_GivenBooleanField(DataFrameAnalyticsConfig config, boolean isRequired, FieldSelection.FeatureType featureType) {
FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder().addAggregatableField("some_boolean", "boolean")
.addAggregatableField("some_integer", "integer")
.build();
Map<String, Long> fieldCardinalities = new HashMap<>(2);
fieldCardinalities.put("some_boolean", 2L);
fieldCardinalities.put("some_integer", 2L);
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(config, 100, fieldCapabilities, fieldCardinalities);
Tuple<ExtractedFields, List<FieldSelection>> fieldExtraction = extractedFieldsDetector.detect();
List<ExtractedField> allFields = fieldExtraction.v1().getAllFields();
assertThat(allFields, hasSize(2));
ExtractedField booleanField = allFields.get(0);
assertThat(booleanField.getTypes(), contains("boolean"));
assertThat(booleanField.getMethod(), equalTo(ExtractedField.Method.DOC_VALUE));
assertFieldSelectionContains(
fieldExtraction.v2().subList(0, 1),
FieldSelection.included("some_boolean", Collections.singleton("boolean"), isRequired, featureType)
);
SearchHit hit = new SearchHitBuilder(42).addField("some_boolean", true).build();
assertThat(booleanField.value(hit), arrayContaining(1));
hit = new SearchHitBuilder(42).addField("some_boolean", false).build();
assertThat(booleanField.value(hit), arrayContaining(0));
hit = new SearchHitBuilder(42).addField("some_boolean", Arrays.asList(false, true, false)).build();
assertThat(booleanField.value(hit), arrayContaining(0, 1, 0));
}
public void testDetect_GivenBooleanField_OutlierDetection() {
// some_boolean is a non-required, numerical feature in outlier detection analysis
testDetect_GivenBooleanField(buildOutlierDetectionConfig(), false, FieldSelection.FeatureType.NUMERICAL);
}
public void testDetect_GivenBooleanField_Regression() {
// some_boolean is a non-required, numerical feature in regression analysis
testDetect_GivenBooleanField(buildRegressionConfig("some_integer"), false, FieldSelection.FeatureType.NUMERICAL);
}
public void testDetect_GivenBooleanField_Classification_BooleanIsFeature() {
// some_boolean is a non-required, numerical feature in classification analysis
testDetect_GivenBooleanField(buildClassificationConfig("some_integer"), false, FieldSelection.FeatureType.NUMERICAL);
}
public void testDetect_GivenBooleanField_Classification_BooleanIsDependentVariable() {
// some_boolean is a required, categorical dependent variable in classification analysis
testDetect_GivenBooleanField(buildClassificationConfig("some_boolean"), true, FieldSelection.FeatureType.CATEGORICAL);
}
public void testDetect_GivenMultiFields() {
FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder().addAggregatableField("a_float", "float")
.addNonAggregatableField("text_without_keyword", "text")
.addNonAggregatableField("text_1", "text")
.addAggregatableField("text_1.keyword", "keyword")
.addNonAggregatableField("text_2", "text")
.addAggregatableField("text_2.keyword", "keyword")
.addAggregatableField("keyword_1", "keyword")
.addNonAggregatableField("keyword_1.text", "text")
.build();
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildRegressionConfig("a_float"),
100,
fieldCapabilities,
Collections.emptyMap()
);
Tuple<ExtractedFields, List<FieldSelection>> fieldExtraction = extractedFieldsDetector.detect();
assertThat(fieldExtraction.v1().getAllFields(), hasSize(5));
List<String> extractedFieldNames = fieldExtraction.v1()
.getAllFields()
.stream()
.map(ExtractedField::getName)
.collect(Collectors.toList());
assertThat(extractedFieldNames, contains("a_float", "keyword_1", "text_1.keyword", "text_2.keyword", "text_without_keyword"));
assertFieldSelectionContains(
fieldExtraction.v2(),
FieldSelection.included("a_float", Collections.singleton("float"), true, FieldSelection.FeatureType.NUMERICAL),
FieldSelection.included("keyword_1", Collections.singleton("keyword"), false, FieldSelection.FeatureType.CATEGORICAL),
FieldSelection.excluded("keyword_1.text", Collections.singleton("text"), "[keyword_1] is preferred because it is aggregatable"),
FieldSelection.excluded("text_1", Collections.singleton("text"), "[text_1.keyword] is preferred because it is aggregatable"),
FieldSelection.included("text_1.keyword", Collections.singleton("keyword"), false, FieldSelection.FeatureType.CATEGORICAL),
FieldSelection.excluded("text_2", Collections.singleton("text"), "[text_2.keyword] is preferred because it is aggregatable"),
FieldSelection.included("text_2.keyword", Collections.singleton("keyword"), false, FieldSelection.FeatureType.CATEGORICAL),
FieldSelection.included("text_without_keyword", Collections.singleton("text"), false, FieldSelection.FeatureType.CATEGORICAL)
);
}
public void testDetect_GivenMultiFieldAndParentIsRequired() {
FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder().addAggregatableField("field_1", "keyword")
.addAggregatableField("field_1.keyword", "keyword")
.addAggregatableField("field_2", "float")
.build();
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildClassificationConfig("field_1"),
100,
fieldCapabilities,
Collections.singletonMap("field_1", 2L)
);
Tuple<ExtractedFields, List<FieldSelection>> fieldExtraction = extractedFieldsDetector.detect();
assertThat(fieldExtraction.v1().getAllFields(), hasSize(2));
List<String> extractedFieldNames = fieldExtraction.v1()
.getAllFields()
.stream()
.map(ExtractedField::getName)
.collect(Collectors.toList());
assertThat(extractedFieldNames, contains("field_1", "field_2"));
assertFieldSelectionContains(
fieldExtraction.v2(),
FieldSelection.included("field_1", Collections.singleton("keyword"), true, FieldSelection.FeatureType.CATEGORICAL),
FieldSelection.excluded("field_1.keyword", Collections.singleton("keyword"), "[field_1] is required instead"),
FieldSelection.included("field_2", Collections.singleton("float"), false, FieldSelection.FeatureType.NUMERICAL)
);
}
public void testDetect_GivenMultiFieldAndMultiFieldIsRequired() {
FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder().addAggregatableField("field_1", "keyword")
.addAggregatableField("field_1.keyword", "keyword")
.addAggregatableField("field_2", "float")
.build();
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildClassificationConfig("field_1.keyword"),
100,
fieldCapabilities,
Collections.singletonMap("field_1.keyword", 2L)
);
Tuple<ExtractedFields, List<FieldSelection>> fieldExtraction = extractedFieldsDetector.detect();
assertThat(fieldExtraction.v1().getAllFields(), hasSize(2));
List<String> extractedFieldNames = fieldExtraction.v1()
.getAllFields()
.stream()
.map(ExtractedField::getName)
.collect(Collectors.toList());
assertThat(extractedFieldNames, contains("field_1.keyword", "field_2"));
assertFieldSelectionContains(
fieldExtraction.v2(),
FieldSelection.excluded("field_1", Collections.singleton("keyword"), "[field_1.keyword] is required instead"),
FieldSelection.included("field_1.keyword", Collections.singleton("keyword"), true, FieldSelection.FeatureType.CATEGORICAL),
FieldSelection.included("field_2", Collections.singleton("float"), false, FieldSelection.FeatureType.NUMERICAL)
);
}
public void testDetect_GivenSeveralMultiFields_ShouldPickFirstSorted() {
FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder().addNonAggregatableField("field_1", "text")
.addAggregatableField("field_1.keyword_3", "keyword")
.addAggregatableField("field_1.keyword_2", "keyword")
.addAggregatableField("field_1.keyword_1", "keyword")
.addAggregatableField("field_2", "float")
.build();
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildRegressionConfig("field_2"),
100,
fieldCapabilities,
Collections.emptyMap()
);
Tuple<ExtractedFields, List<FieldSelection>> fieldExtraction = extractedFieldsDetector.detect();
assertThat(fieldExtraction.v1().getAllFields(), hasSize(2));
List<String> extractedFieldNames = fieldExtraction.v1()
.getAllFields()
.stream()
.map(ExtractedField::getName)
.collect(Collectors.toList());
assertThat(extractedFieldNames, contains("field_1.keyword_1", "field_2"));
assertFieldSelectionContains(
fieldExtraction.v2(),
FieldSelection.excluded(
"field_1",
Collections.singleton("text"),
"[field_1.keyword_1] is preferred because it is aggregatable"
),
FieldSelection.included("field_1.keyword_1", Collections.singleton("keyword"), false, FieldSelection.FeatureType.CATEGORICAL),
FieldSelection.excluded("field_1.keyword_2", Collections.singleton("keyword"), "[field_1.keyword_1] came first"),
FieldSelection.excluded("field_1.keyword_3", Collections.singleton("keyword"), "[field_1.keyword_1] came first"),
FieldSelection.included("field_2", Collections.singleton("float"), true, FieldSelection.FeatureType.NUMERICAL)
);
}
public void testDetect_GivenMultiFields_OverDocValueLimit() {
FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder().addNonAggregatableField("field_1", "text")
.addAggregatableField("field_1.keyword_1", "keyword")
.addAggregatableField("field_2", "float")
.build();
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildRegressionConfig("field_2"),
0,
fieldCapabilities,
Collections.emptyMap()
);
Tuple<ExtractedFields, List<FieldSelection>> fieldExtraction = extractedFieldsDetector.detect();
assertThat(fieldExtraction.v1().getAllFields(), hasSize(2));
List<String> extractedFieldNames = fieldExtraction.v1()
.getAllFields()
.stream()
.map(ExtractedField::getName)
.collect(Collectors.toList());
assertThat(extractedFieldNames, contains("field_1", "field_2"));
assertFieldSelectionContains(
fieldExtraction.v2(),
FieldSelection.included("field_1", Collections.singleton("text"), false, FieldSelection.FeatureType.CATEGORICAL),
FieldSelection.excluded(
"field_1.keyword_1",
Collections.singleton("keyword"),
"[field_1] is preferred because it supports fetching from source"
),
FieldSelection.included("field_2", Collections.singleton("float"), true, FieldSelection.FeatureType.NUMERICAL)
);
}
public void testDetect_GivenParentAndMultiFieldBothAggregatable() {
FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder().addAggregatableField("field_1", "keyword")
.addAggregatableField("field_1.keyword", "keyword")
.addAggregatableField("field_2.keyword", "float")
.addAggregatableField("field_2.double", "double")
.build();
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildRegressionConfig("field_2.double"),
100,
fieldCapabilities,
Collections.emptyMap()
);
Tuple<ExtractedFields, List<FieldSelection>> fieldExtraction = extractedFieldsDetector.detect();
assertThat(fieldExtraction.v1().getAllFields(), hasSize(3));
List<String> extractedFieldNames = fieldExtraction.v1()
.getAllFields()
.stream()
.map(ExtractedField::getName)
.collect(Collectors.toList());
assertThat(extractedFieldNames, contains("field_1", "field_2.double", "field_2.keyword"));
assertFieldSelectionContains(
fieldExtraction.v2(),
FieldSelection.included("field_1", Collections.singleton("keyword"), false, FieldSelection.FeatureType.CATEGORICAL),
FieldSelection.excluded(
"field_1.keyword",
Collections.singleton("keyword"),
"[field_1] is preferred because it is aggregatable"
),
FieldSelection.included("field_2.double", Collections.singleton("double"), true, FieldSelection.FeatureType.NUMERICAL),
FieldSelection.included("field_2.keyword", Collections.singleton("float"), false, FieldSelection.FeatureType.NUMERICAL)
);
}
public void testDetect_GivenParentAndMultiFieldNoneAggregatable() {
FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder().addNonAggregatableField("field_1", "text")
.addNonAggregatableField("field_1.text", "text")
.addAggregatableField("field_2", "float")
.build();
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildRegressionConfig("field_2"),
100,
fieldCapabilities,
Collections.emptyMap()
);
Tuple<ExtractedFields, List<FieldSelection>> fieldExtraction = extractedFieldsDetector.detect();
assertThat(fieldExtraction.v1().getAllFields(), hasSize(2));
List<String> extractedFieldNames = fieldExtraction.v1()
.getAllFields()
.stream()
.map(ExtractedField::getName)
.collect(Collectors.toList());
assertThat(extractedFieldNames, contains("field_1", "field_2"));
assertFieldSelectionContains(
fieldExtraction.v2(),
FieldSelection.included("field_1", Collections.singleton("text"), false, FieldSelection.FeatureType.CATEGORICAL),
FieldSelection.excluded(
"field_1.text",
Collections.singleton("text"),
"[field_1] is preferred because none of the multi-fields are aggregatable"
),
FieldSelection.included("field_2", Collections.singleton("float"), true, FieldSelection.FeatureType.NUMERICAL)
);
}
public void testDetect_GivenMultiFields_AndExplicitlyIncludedFields() {
FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder().addNonAggregatableField("field_1", "text")
.addAggregatableField("field_1.keyword", "keyword")
.addAggregatableField("field_2", "float")
.build();
analyzedFields = new FetchSourceContext(true, new String[] { "field_1", "field_2" }, new String[0]);
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildRegressionConfig("field_2"),
100,
fieldCapabilities,
Collections.emptyMap()
);
Tuple<ExtractedFields, List<FieldSelection>> fieldExtraction = extractedFieldsDetector.detect();
assertThat(fieldExtraction.v1().getAllFields(), hasSize(2));
List<String> extractedFieldNames = fieldExtraction.v1()
.getAllFields()
.stream()
.map(ExtractedField::getName)
.collect(Collectors.toList());
assertThat(extractedFieldNames, contains("field_1", "field_2"));
assertFieldSelectionContains(
fieldExtraction.v2(),
FieldSelection.included("field_1", Collections.singleton("text"), false, FieldSelection.FeatureType.CATEGORICAL),
FieldSelection.excluded("field_1.keyword", Collections.singleton("keyword"), "field not in includes list"),
FieldSelection.included("field_2", Collections.singleton("float"), true, FieldSelection.FeatureType.NUMERICAL)
);
}
public void testDetect_GivenSourceFilteringWithIncludes() {
FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder().addAggregatableField("field_11", "float")
.addAggregatableField("field_12", "float")
.addAggregatableField("field_21", "float")
.addAggregatableField("field_22", "float")
.build();
sourceFiltering = new FetchSourceContext(true, new String[] { "field_1*" }, null);
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildOutlierDetectionConfig(),
100,
fieldCapabilities,
Collections.emptyMap()
);
Tuple<ExtractedFields, List<FieldSelection>> fieldExtraction = extractedFieldsDetector.detect();
List<ExtractedField> allFields = fieldExtraction.v1().getAllFields();
assertThat(allFields, hasSize(2));
assertThat(allFields.get(0).getName(), equalTo("field_11"));
assertThat(allFields.get(1).getName(), equalTo("field_12"));
assertFieldSelectionContains(
fieldExtraction.v2(),
FieldSelection.included("field_11", Collections.singleton("float"), false, FieldSelection.FeatureType.NUMERICAL),
FieldSelection.included("field_12", Collections.singleton("float"), false, FieldSelection.FeatureType.NUMERICAL)
);
}
public void testDetect_GivenSourceFilteringWithExcludes() {
FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder().addAggregatableField("field_11", "float")
.addAggregatableField("field_12", "float")
.addAggregatableField("field_21", "float")
.addAggregatableField("field_22", "float")
.build();
sourceFiltering = new FetchSourceContext(true, null, new String[] { "field_1*" });
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildOutlierDetectionConfig(),
100,
fieldCapabilities,
Collections.emptyMap()
);
Tuple<ExtractedFields, List<FieldSelection>> fieldExtraction = extractedFieldsDetector.detect();
List<ExtractedField> allFields = fieldExtraction.v1().getAllFields();
assertThat(allFields, hasSize(2));
assertThat(allFields.get(0).getName(), equalTo("field_21"));
assertThat(allFields.get(1).getName(), equalTo("field_22"));
assertFieldSelectionContains(
fieldExtraction.v2(),
FieldSelection.included("field_21", Collections.singleton("float"), false, FieldSelection.FeatureType.NUMERICAL),
FieldSelection.included("field_22", Collections.singleton("float"), false, FieldSelection.FeatureType.NUMERICAL)
);
}
public void testDetect_GivenObjectFields() {
FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder().addAggregatableField("float_field", "float")
.addNonAggregatableField("object_field_1", "object")
.addNonAggregatableField("object_field_2", "object")
.build();
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildOutlierDetectionConfig(),
100,
fieldCapabilities,
Collections.emptyMap()
);
Tuple<ExtractedFields, List<FieldSelection>> fieldExtraction = extractedFieldsDetector.detect();
List<ExtractedField> allFields = fieldExtraction.v1().getAllFields();
assertThat(allFields, hasSize(1));
assertThat(allFields.get(0).getName(), equalTo("float_field"));
}
public void testDetect_GivenNestedFields() {
FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder().addAggregatableField("float_field", "float")
.addNonAggregatableField("nested_field_1", "nested")
.addAggregatableField("nested_field_1.a", "float")
.addAggregatableField("nested_field_1.b", "float")
.addNonAggregatableField("nested_field_1.inner_nested", "nested")
.addAggregatableField("nested_field_1.inner_nested.z", "float")
.addNonAggregatableField("nested_field_2", "nested")
.addAggregatableField("nested_field_2.c", "float")
.build();
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildOutlierDetectionConfig(),
100,
fieldCapabilities,
Collections.emptyMap()
);
Tuple<ExtractedFields, List<FieldSelection>> fieldExtraction = extractedFieldsDetector.detect();
List<ExtractedField> allFields = fieldExtraction.v1().getAllFields();
assertThat(allFields, hasSize(1));
assertThat(allFields.get(0).getName(), equalTo("float_field"));
assertFieldSelectionContains(
fieldExtraction.v2(),
FieldSelection.included("float_field", Collections.singleton("float"), false, FieldSelection.FeatureType.NUMERICAL),
FieldSelection.excluded("nested_field_1.*", Collections.singleton("nested"), "nested fields are not supported"),
FieldSelection.excluded("nested_field_2.*", Collections.singleton("nested"), "nested fields are not supported")
);
}
public void testDetect_GivenNestedFieldThatAlsoHasIncompatibleType() {
FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder().addAggregatableField("float_field", "float")
.addNonAggregatableField("nested_field_1", "nested")
.addAggregatableField("nested_field_1.a", "definitely_not_supported")
.build();
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildOutlierDetectionConfig(),
100,
fieldCapabilities,
Collections.emptyMap()
);
Tuple<ExtractedFields, List<FieldSelection>> fieldExtraction = extractedFieldsDetector.detect();
List<ExtractedField> allFields = fieldExtraction.v1().getAllFields();
assertThat(allFields, hasSize(1));
assertThat(allFields.get(0).getName(), equalTo("float_field"));
assertFieldSelectionContains(
fieldExtraction.v2(),
FieldSelection.included("float_field", Collections.singleton("float"), false, FieldSelection.FeatureType.NUMERICAL),
FieldSelection.excluded("nested_field_1.*", Collections.singleton("nested"), "nested fields are not supported")
);
}
public void testDetect_GivenAnalyzedFieldIncludesObjectField() {
FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder().addAggregatableField("float_field", "float")
.addNonAggregatableField("object_field", "object")
.build();
analyzedFields = new FetchSourceContext(true, new String[] { "float_field", "object_field" }, null);
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildOutlierDetectionConfig(),
100,
fieldCapabilities,
Collections.emptyMap()
);
ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, extractedFieldsDetector::detect);
assertThat(e.getMessage(), equalTo("analyzed_fields must not include or exclude object or nested fields: [object_field]"));
}
public void testDetect_GivenAnalyzedFieldIncludesNestedField() {
FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder().addAggregatableField("float_field", "float")
.addNonAggregatableField("nested_field", "nested")
.build();
analyzedFields = new FetchSourceContext(true, new String[] { "float_field", "nested_field" }, null);
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildOutlierDetectionConfig(),
100,
fieldCapabilities,
Collections.emptyMap()
);
ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, extractedFieldsDetector::detect);
assertThat(e.getMessage(), equalTo("analyzed_fields must not include or exclude object or nested fields: [nested_field]"));
}
private static FieldCapabilitiesResponse simpleFieldResponse() {
return new MockFieldCapsResponseBuilder().addField("_id", true, false, "_id")
.addAggregatableField("field_11", "float")
.addNonAggregatableField("field_21", "float")
.addAggregatableField("field_21.child", "float")
.addNonAggregatableField("field_31", "float")
.addAggregatableField("field_31.child", "float")
.addNonAggregatableField("object_field", "object")
.build();
}
public void testDetect_GivenAnalyzedFieldExcludesObjectField() {
FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder().addAggregatableField("float_field", "float")
.addNonAggregatableField("object_field", "object")
.build();
analyzedFields = new FetchSourceContext(true, null, new String[] { "object_field" });
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildOutlierDetectionConfig(),
100,
fieldCapabilities,
Collections.emptyMap()
);
ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, extractedFieldsDetector::detect);
assertThat(e.getMessage(), equalTo("analyzed_fields must not include or exclude object or nested fields: [object_field]"));
}
public void testDetect_GivenAnalyzedFieldExcludesNestedField() {
FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder().addAggregatableField("float_field", "float")
.addNonAggregatableField("nested_field", "nested")
.build();
analyzedFields = new FetchSourceContext(true, null, new String[] { "nested_field" });
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildOutlierDetectionConfig(),
100,
fieldCapabilities,
Collections.emptyMap()
);
ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, extractedFieldsDetector::detect);
assertThat(e.getMessage(), equalTo("analyzed_fields must not include or exclude object or nested fields: [nested_field]"));
}
public void testDetect_givenFeatureProcessorsFailures_ResultsField() {
FieldCapabilitiesResponse fieldCapabilities = simpleFieldResponse();
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildRegressionConfig("field_31", Arrays.asList(buildPreProcessor("ml.result", "foo"))),
100,
fieldCapabilities,
Collections.emptyMap()
);
ElasticsearchStatusException ex = expectThrows(ElasticsearchStatusException.class, extractedFieldsDetector::detect);
assertThat(ex.getMessage(), equalTo("fields contained in results field [ml] cannot be used in a feature_processor"));
}
public void testDetect_givenFeatureProcessorsFailures_Objects() {
FieldCapabilitiesResponse fieldCapabilities = simpleFieldResponse();
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildRegressionConfig("field_31", Arrays.asList(buildPreProcessor("object_field", "foo"))),
100,
fieldCapabilities,
Collections.emptyMap()
);
ElasticsearchStatusException ex = expectThrows(ElasticsearchStatusException.class, extractedFieldsDetector::detect);
assertThat(ex.getMessage(), equalTo("fields for feature_processors must not be objects or nested"));
}
public void testDetect_givenFeatureProcessorsFailures_Nested() {
FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder().addAggregatableField("some_float", "float")
.addNonAggregatableField("nested_field", "nested")
.build();
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildRegressionConfig("some_float", Arrays.asList(buildPreProcessor("nested_field", "foo"))),
100,
fieldCapabilities,
Collections.emptyMap()
);
ElasticsearchStatusException ex = expectThrows(ElasticsearchStatusException.class, extractedFieldsDetector::detect);
assertThat(ex.getMessage(), equalTo("fields for feature_processors must not be objects or nested"));
}
public void testDetect_givenFeatureProcessorsFailures_ChildOfNested() {
FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder().addAggregatableField("some_float", "float")
.addNonAggregatableField("nested_field", "nested")
.addAggregatableField("nested_field.inner_float", "float")
.build();
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildRegressionConfig("some_float", Arrays.asList(buildPreProcessor("nested_field.inner_float", "foo"))),
100,
fieldCapabilities,
Collections.emptyMap()
);
ElasticsearchStatusException ex = expectThrows(ElasticsearchStatusException.class, extractedFieldsDetector::detect);
assertThat(ex.getMessage(), equalTo("nested fields [nested_field.*] cannot be used in a feature_processor"));
}
public void testDetect_givenFeatureProcessorsFailures_ReservedFields() {
FieldCapabilitiesResponse fieldCapabilities = simpleFieldResponse();
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildRegressionConfig("field_31", Arrays.asList(buildPreProcessor("_id", "foo"))),
100,
fieldCapabilities,
Collections.emptyMap()
);
ElasticsearchStatusException ex = expectThrows(ElasticsearchStatusException.class, extractedFieldsDetector::detect);
assertThat(ex.getMessage(), containsString("the following fields cannot be used in feature_processors"));
}
public void testDetect_givenFeatureProcessorsFailures_MissingFieldFromIndex() {
FieldCapabilitiesResponse fieldCapabilities = simpleFieldResponse();
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildRegressionConfig("field_31", Arrays.asList(buildPreProcessor("bar", "foo"))),
100,
fieldCapabilities,
Collections.emptyMap()
);
ElasticsearchStatusException ex = expectThrows(ElasticsearchStatusException.class, extractedFieldsDetector::detect);
assertThat(ex.getMessage(), containsString("the fields [bar] were not found in the field capabilities of the source indices"));
}
public void testDetect_givenFeatureProcessorsFailures_UsingRequiredField() {
FieldCapabilitiesResponse fieldCapabilities = simpleFieldResponse();
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildRegressionConfig("field_31", Arrays.asList(buildPreProcessor("field_31", "foo"))),
100,
fieldCapabilities,
Collections.emptyMap()
);
ElasticsearchStatusException ex = expectThrows(ElasticsearchStatusException.class, extractedFieldsDetector::detect);
assertThat(ex.getMessage(), equalTo("required analysis fields [field_31] cannot be used in a feature_processor"));
}
public void testDetect_givenFeatureProcessorsFailures_BadSourceFiltering() {
FieldCapabilitiesResponse fieldCapabilities = simpleFieldResponse();
sourceFiltering = new FetchSourceContext(true, null, new String[] { "field_1*" });
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildRegressionConfig("field_31", Arrays.asList(buildPreProcessor("field_11", "foo"))),
100,
fieldCapabilities,
Collections.emptyMap()
);
ElasticsearchStatusException ex = expectThrows(ElasticsearchStatusException.class, extractedFieldsDetector::detect);
assertThat(ex.getMessage(), equalTo("fields [field_11] required by field_processors are not included in source filtering."));
}
public void testDetect_givenFeatureProcessorsFailures_MissingAnalyzedField() {
FieldCapabilitiesResponse fieldCapabilities = simpleFieldResponse();
analyzedFields = new FetchSourceContext(true, null, new String[] { "field_1*" });
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildRegressionConfig("field_31", Arrays.asList(buildPreProcessor("field_11", "foo"))),
100,
fieldCapabilities,
Collections.emptyMap()
);
ElasticsearchStatusException ex = expectThrows(ElasticsearchStatusException.class, extractedFieldsDetector::detect);
assertThat(ex.getMessage(), equalTo("fields [field_11] required by field_processors are not included in the analyzed_fields."));
}
public void testDetect_givenFeatureProcessorsFailures_RequiredMultiFields() {
FieldCapabilitiesResponse fieldCapabilities = simpleFieldResponse();
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildRegressionConfig("field_31", Arrays.asList(buildPreProcessor("field_31.child", "foo"))),
100,
fieldCapabilities,
Collections.emptyMap()
);
ElasticsearchStatusException ex = expectThrows(ElasticsearchStatusException.class, extractedFieldsDetector::detect);
assertThat(ex.getMessage(), containsString("feature_processors cannot be applied to required fields for analysis; "));
extractedFieldsDetector = new ExtractedFieldsDetector(
buildRegressionConfig("field_31.child", Arrays.asList(buildPreProcessor("field_31", "foo"))),
100,
fieldCapabilities,
Collections.emptyMap()
);
ex = expectThrows(ElasticsearchStatusException.class, extractedFieldsDetector::detect);
assertThat(ex.getMessage(), containsString("feature_processors cannot be applied to required fields for analysis; "));
}
public void testDetect_givenFeatureProcessorsFailures_BothMultiFields() {
FieldCapabilitiesResponse fieldCapabilities = simpleFieldResponse();
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildRegressionConfig(
"field_31",
Arrays.asList(buildPreProcessor("field_21", "foo"), buildPreProcessor("field_21.child", "bar"))
),
100,
fieldCapabilities,
Collections.emptyMap()
);
ElasticsearchStatusException ex = expectThrows(ElasticsearchStatusException.class, extractedFieldsDetector::detect);
assertThat(ex.getMessage(), containsString("feature_processors refer to both multi-field "));
}
public void testDetect_givenFeatureProcessorsFailures_DuplicateOutputFields() {
FieldCapabilitiesResponse fieldCapabilities = simpleFieldResponse();
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildRegressionConfig("field_31", Arrays.asList(buildPreProcessor("field_11", "foo"), buildPreProcessor("field_21", "foo"))),
100,
fieldCapabilities,
Collections.emptyMap()
);
ElasticsearchStatusException ex = expectThrows(ElasticsearchStatusException.class, extractedFieldsDetector::detect);
assertThat(ex.getMessage(), equalTo("feature_processors must define unique output field names; duplicate fields [foo]"));
}
public void testDetect_withFeatureProcessors() {
FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder().addAggregatableField("field_11", "float")
.addAggregatableField("field_21", "float")
.addNonAggregatableField("field_31", "float")
.addAggregatableField("field_31.child", "float")
.addNonAggregatableField("object_field", "object")
.build();
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildRegressionConfig("field_11", Arrays.asList(buildPreProcessor("field_31", "foo", "bar"))),
100,
fieldCapabilities,
Collections.emptyMap()
);
ExtractedFields extracted = extractedFieldsDetector.detect().v1();
assertThat(extracted.getProcessedFieldInputs(), containsInAnyOrder("field_31"));
assertThat(
extracted.getAllFields().stream().map(ExtractedField::getName).collect(Collectors.toSet()),
containsInAnyOrder("field_11", "field_21", "field_31")
);
assertThat(extracted.getSourceFields(), arrayContainingInAnyOrder("field_31"));
assertThat(
extracted.getDocValueFields().stream().map(ExtractedField::getName).collect(Collectors.toSet()),
containsInAnyOrder("field_21", "field_11")
);
assertThat(extracted.getProcessedFields(), hasSize(1));
}
private DataFrameAnalyticsConfig buildOutlierDetectionConfig() {
return new DataFrameAnalyticsConfig.Builder().setId("foo")
.setSource(new DataFrameAnalyticsSource(SOURCE_INDEX, null, sourceFiltering, null))
.setDest(new DataFrameAnalyticsDest(DEST_INDEX, RESULTS_FIELD))
.setAnalyzedFields(analyzedFields)
.setAnalysis(new OutlierDetection.Builder().build())
.build();
}
private DataFrameAnalyticsConfig buildRegressionConfig(String dependentVariable) {
return buildRegressionConfig(dependentVariable, Collections.emptyList());
}
private DataFrameAnalyticsConfig buildClassificationConfig(String dependentVariable) {
return new DataFrameAnalyticsConfig.Builder().setId("foo")
.setSource(new DataFrameAnalyticsSource(SOURCE_INDEX, null, sourceFiltering, null))
.setDest(new DataFrameAnalyticsDest(DEST_INDEX, RESULTS_FIELD))
.setAnalysis(new Classification(dependentVariable))
.build();
}
private DataFrameAnalyticsConfig buildRegressionConfig(String dependentVariable, List<PreProcessor> featureprocessors) {
return new DataFrameAnalyticsConfig.Builder().setId("foo")
.setSource(new DataFrameAnalyticsSource(SOURCE_INDEX, null, sourceFiltering, null))
.setDest(new DataFrameAnalyticsDest(DEST_INDEX, RESULTS_FIELD))
.setAnalyzedFields(analyzedFields)
.setAnalysis(
new Regression(
dependentVariable,
BoostedTreeParams.builder().build(),
null,
null,
null,
null,
null,
featureprocessors,
null
)
)
.build();
}
private static PreProcessor buildPreProcessor(String inputField, String... outputFields) {
return new OneHotEncoding(
inputField,
Arrays.stream(outputFields).collect(Collectors.toMap((s) -> randomAlphaOfLength(10), Function.identity())),
true
);
}
/**
* We assert each field individually to get useful error messages in case of failure
*/
private static void assertFieldSelectionContains(List<FieldSelection> actual, FieldSelection... expected) {
assertThat(actual, hasSize(expected.length));
for (int i = 0; i < expected.length; i++) {
assertThat("i = " + i, actual.get(i).getName(), equalTo(expected[i].getName()));
assertThat("i = " + i, actual.get(i).getMappingTypes(), equalTo(expected[i].getMappingTypes()));
assertThat("i = " + i, actual.get(i).isIncluded(), equalTo(expected[i].isIncluded()));
assertThat("i = " + i, actual.get(i).isRequired(), equalTo(expected[i].isRequired()));
assertThat("i = " + i, actual.get(i).getFeatureType(), equalTo(expected[i].getFeatureType()));
assertThat("i = " + i, actual.get(i).getReason(), equalTo(expected[i].getReason()));
}
}
public void testDetect_givenFeatureProcessorsFailures_DuplicateOutputFieldsWithUnProcessedField() {
FieldCapabilitiesResponse fieldCapabilities = simpleFieldResponse();
ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector(
buildRegressionConfig("field_31", Arrays.asList(buildPreProcessor("field_11", "field_21"))),
100,
fieldCapabilities,
Collections.emptyMap()
);
ElasticsearchStatusException ex = expectThrows(ElasticsearchStatusException.class, extractedFieldsDetector::detect);
assertThat(
ex.getMessage(),
equalTo("feature_processors output fields must not include non-processed analysis fields; duplicate fields [field_21]")
);
}
private static class MockFieldCapsResponseBuilder {
private final Map<String, Map<String, FieldCapabilities>> fieldCaps = new HashMap<>();
private MockFieldCapsResponseBuilder addAggregatableField(String field, String... types) {
return addField(field, true, types);
}
private MockFieldCapsResponseBuilder addNonAggregatableField(String field, String... types) {
return addField(field, false, types);
}
private MockFieldCapsResponseBuilder addField(String field, boolean isAggregatable, String... types) {
return addField(field, false, isAggregatable, types);
}
private MockFieldCapsResponseBuilder addField(String field, boolean isMetadataField, boolean isAggregatable, String... types) {
Map<String, FieldCapabilities> caps = new HashMap<>();
for (String type : types) {
caps.put(
type,
new FieldCapabilities(field, type, isMetadataField, true, isAggregatable, null, null, null, Collections.emptyMap())
);
}
fieldCaps.put(field, caps);
return this;
}
private FieldCapabilitiesResponse build() {
return new FieldCapabilitiesResponse(new String[] { "test" }, fieldCaps);
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.disruptor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import java.util.Set;
import java.util.WeakHashMap;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicMarkableReference;
import java.util.concurrent.locks.LockSupport;
import com.lmax.disruptor.InsufficientCapacityException;
import com.lmax.disruptor.RingBuffer;
import com.lmax.disruptor.dsl.Disruptor;
import org.apache.camel.Exchange;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Holder for Disruptor references.
* <p/>
* This is used to keep track of the usages of the Disruptors, so we know when a Disruptor is no longer in use, and
* can safely be discarded.
*/
public class DisruptorReference {
private static final Logger LOGGER = LoggerFactory.getLogger(DisruptorReference.class);
private final Set<DisruptorEndpoint> endpoints = Collections
.newSetFromMap(new WeakHashMap<DisruptorEndpoint, Boolean>(4));
private final DisruptorComponent component;
private final String uri;
private final String name;
//The mark on the reference indicates if we are in the process of reconfiguring the Disruptor:
//(ref, mark) : Description
//(null, false) : not started or completely shut down
//(null, true) : in process of reconfiguring
//( x , false) : normally functioning Disruptor
//( x , true) : never set
private final AtomicMarkableReference<Disruptor<ExchangeEvent>> disruptor = new AtomicMarkableReference<Disruptor<ExchangeEvent>>(null, false);
private final DelayedExecutor delayedExecutor = new DelayedExecutor();
private final DisruptorProducerType producerType;
private final int size;
private final DisruptorWaitStrategy waitStrategy;
private final Queue<Exchange> temporaryExchangeBuffer;
//access guarded by this
private ExecutorService executor;
private LifecycleAwareExchangeEventHandler[] handlers = new LifecycleAwareExchangeEventHandler[0];
private int uniqueConsumerCount;
DisruptorReference(final DisruptorComponent component, final String uri, final String name, final int size,
final DisruptorProducerType producerType, final DisruptorWaitStrategy waitStrategy) throws Exception {
this.component = component;
this.uri = uri;
this.name = name;
this.size = size;
this.producerType = producerType;
this.waitStrategy = waitStrategy;
temporaryExchangeBuffer = new ArrayBlockingQueue<Exchange>(size);
reconfigure();
}
public boolean hasNullReference() {
return disruptor.getReference() == null;
}
private Disruptor<ExchangeEvent> getCurrentDisruptor() throws DisruptorNotStartedException {
Disruptor<ExchangeEvent> currentDisruptor = disruptor.getReference();
if (currentDisruptor == null) {
// no current Disruptor reference, we may be reconfiguring or it was not started
// check which by looking at the reference mark...
boolean[] changeIsPending = new boolean[1];
while (currentDisruptor == null) {
currentDisruptor = disruptor.get(changeIsPending);
//Check if we are reconfiguring
if (currentDisruptor == null && !changeIsPending[0]) {
throw new DisruptorNotStartedException(
"Disruptor is not yet started or already shut down.");
} else if (currentDisruptor == null && changeIsPending[0]) {
//We should be back shortly...keep trying but spare CPU resources
LockSupport.parkNanos(1L);
}
}
}
return currentDisruptor;
}
public void tryPublish(final Exchange exchange) throws DisruptorNotStartedException, InsufficientCapacityException {
tryPublishExchangeOnRingBuffer(exchange, getCurrentDisruptor().getRingBuffer());
}
public void publish(final Exchange exchange) throws DisruptorNotStartedException {
publishExchangeOnRingBuffer(exchange, getCurrentDisruptor().getRingBuffer());
}
private void publishExchangeOnRingBuffer(final Exchange exchange,
final RingBuffer<ExchangeEvent> ringBuffer) {
final long sequence = ringBuffer.next();
ringBuffer.get(sequence).setExchange(exchange, uniqueConsumerCount);
ringBuffer.publish(sequence);
}
private void tryPublishExchangeOnRingBuffer(final Exchange exchange, final RingBuffer<ExchangeEvent> ringBuffer) throws InsufficientCapacityException {
final long sequence = ringBuffer.tryNext();
ringBuffer.get(sequence).setExchange(exchange, uniqueConsumerCount);
ringBuffer.publish(sequence);
}
public synchronized void reconfigure() throws Exception {
LOGGER.debug("Reconfiguring disruptor {}", this);
shutdownDisruptor(true);
start();
}
private void start() throws Exception {
LOGGER.debug("Starting disruptor {}", this);
Disruptor<ExchangeEvent> newDisruptor = createDisruptor();
newDisruptor.start();
if (executor != null) {
//and use our delayed executor to really really execute the event handlers now
delayedExecutor.executeDelayedCommands(executor);
}
//make sure all event handlers are correctly started before we continue
for (final LifecycleAwareExchangeEventHandler handler : handlers) {
boolean eventHandlerStarted = false;
while (!eventHandlerStarted) {
try {
//The disruptor start command executed above should have triggered a start signal to all
//event processors which, in their death, should notify our event handlers. They respond by
//switching a latch and we want to await that latch here to make sure they are started.
if (!handler.awaitStarted(10, TimeUnit.SECONDS)) {
//we wait for a relatively long, but limited amount of time to prevent an application using
//this component from hanging indefinitely
//Please report a bug if you can reproduce this
LOGGER.error("Disruptor/event handler failed to start properly, PLEASE REPORT");
}
eventHandlerStarted = true;
} catch (InterruptedException e) {
//just retry
}
}
}
publishBufferedExchanges(newDisruptor);
disruptor.set(newDisruptor, false);
}
private Disruptor<ExchangeEvent> createDisruptor() throws Exception {
//create a new Disruptor
final Disruptor<ExchangeEvent> newDisruptor = new Disruptor<ExchangeEvent>(
ExchangeEventFactory.INSTANCE, size, delayedExecutor, producerType.getProducerType(),
waitStrategy.createWaitStrategyInstance());
//determine the list of eventhandlers to be associated to the Disruptor
final ArrayList<LifecycleAwareExchangeEventHandler> eventHandlers = new ArrayList<LifecycleAwareExchangeEventHandler>();
uniqueConsumerCount = 0;
for (final DisruptorEndpoint endpoint : endpoints) {
final Map<DisruptorConsumer, Collection<LifecycleAwareExchangeEventHandler>> consumerEventHandlers = endpoint.createConsumerEventHandlers();
if (consumerEventHandlers != null) {
uniqueConsumerCount += consumerEventHandlers.keySet().size();
for (Collection<LifecycleAwareExchangeEventHandler> lifecycleAwareExchangeEventHandlers : consumerEventHandlers
.values()) {
eventHandlers.addAll(lifecycleAwareExchangeEventHandlers);
}
}
}
LOGGER.debug("Disruptor created with {} event handlers", eventHandlers.size());
handleEventsWith(newDisruptor,
eventHandlers.toArray(new LifecycleAwareExchangeEventHandler[eventHandlers.size()]));
return newDisruptor;
}
private void handleEventsWith(Disruptor<ExchangeEvent> newDisruptor,
final LifecycleAwareExchangeEventHandler[] newHandlers) {
if (newHandlers == null || newHandlers.length == 0) {
handlers = new LifecycleAwareExchangeEventHandler[1];
handlers[0] = new BlockingExchangeEventHandler();
} else {
handlers = newHandlers;
}
resizeThreadPoolExecutor(handlers.length);
newDisruptor.handleEventsWith(handlers);
}
private void publishBufferedExchanges(Disruptor<ExchangeEvent> newDisruptor) {
//now empty out all buffered Exchange if we had any
final List<Exchange> exchanges = new ArrayList<Exchange>(temporaryExchangeBuffer.size());
while (!temporaryExchangeBuffer.isEmpty()) {
exchanges.add(temporaryExchangeBuffer.remove());
}
RingBuffer<ExchangeEvent> ringBuffer = newDisruptor.getRingBuffer();
//and offer them again to our new ringbuffer
for (final Exchange exchange : exchanges) {
publishExchangeOnRingBuffer(exchange, ringBuffer);
}
}
private void resizeThreadPoolExecutor(final int newSize) {
if (executor == null && newSize > 0) {
LOGGER.debug("Creating new executor with {} threads", newSize);
//no thread pool executor yet, create a new one
executor = component.getCamelContext().getExecutorServiceManager().newFixedThreadPool(this, uri,
newSize);
} else if (executor != null && newSize <= 0) {
LOGGER.debug("Shutting down executor");
//we need to shut down our executor
component.getCamelContext().getExecutorServiceManager().shutdown(executor);
executor = null;
} else if (executor instanceof ThreadPoolExecutor) {
LOGGER.debug("Resizing existing executor to {} threads", newSize);
//our thread pool executor is of type ThreadPoolExecutor, we know how to resize it
final ThreadPoolExecutor threadPoolExecutor = (ThreadPoolExecutor)executor;
//Java 9 support, checkout http://download.java.net/java/jdk9/docs/api/java/util/concurrent/ThreadPoolExecutor.html#setCorePoolSize-int-
// and http://download.java.net/java/jdk9/docs/api/java/util/concurrent/ThreadPoolExecutor.html#setMaximumPoolSize-int-
//for more information
if (newSize <= threadPoolExecutor.getCorePoolSize()) {
threadPoolExecutor.setCorePoolSize(newSize);
threadPoolExecutor.setMaximumPoolSize(newSize);
} else {
threadPoolExecutor.setMaximumPoolSize(newSize);
threadPoolExecutor.setCorePoolSize(newSize);
}
} else if (newSize > 0) {
LOGGER.debug("Shutting down old and creating new executor with {} threads", newSize);
//hmmm...no idea what kind of executor this is...just kill it and start fresh
component.getCamelContext().getExecutorServiceManager().shutdown(executor);
executor = component.getCamelContext().getExecutorServiceManager().newFixedThreadPool(this, uri,
newSize);
}
}
private synchronized void shutdownDisruptor(boolean isReconfiguring) {
LOGGER.debug("Shutting down disruptor {}, reconfiguring: {}", this, isReconfiguring);
Disruptor<ExchangeEvent> currentDisruptor = disruptor.getReference();
disruptor.set(null, isReconfiguring);
if (currentDisruptor != null) {
//check if we had a blocking event handler to keep an empty disruptor 'busy'
if (handlers != null && handlers.length == 1
&& handlers[0] instanceof BlockingExchangeEventHandler) {
// yes we did, unblock it so we can get rid of our backlog,
// The eventhandler will empty its pending exchanges in our temporary buffer
final BlockingExchangeEventHandler blockingExchangeEventHandler = (BlockingExchangeEventHandler)handlers[0];
blockingExchangeEventHandler.unblock();
}
currentDisruptor.shutdown();
//they have already been given a trigger to halt when they are done by shutting down the disruptor
//we do however want to await their completion before they are scheduled to process events from the new
for (final LifecycleAwareExchangeEventHandler eventHandler : handlers) {
boolean eventHandlerFinished = false;
//the disruptor is now empty and all consumers are either done or busy processing their last exchange
while (!eventHandlerFinished) {
try {
//The disruptor shutdown command executed above should have triggered a halt signal to all
//event processors which, in their death, should notify our event handlers. They respond by
//switching a latch and we want to await that latch here to make sure they are done.
if (!eventHandler.awaitStopped(10, TimeUnit.SECONDS)) {
//we wait for a relatively long, but limited amount of time to prevent an application using
//this component from hanging indefinitely
//Please report a bug if you can repruduce this
LOGGER.error("Disruptor/event handler failed to shut down properly, PLEASE REPORT");
}
eventHandlerFinished = true;
} catch (InterruptedException e) {
//just retry
}
}
}
handlers = new LifecycleAwareExchangeEventHandler[0];
}
}
private synchronized void shutdownExecutor() {
resizeThreadPoolExecutor(0);
}
public String getName() {
return name;
}
public long getRemainingCapacity() throws DisruptorNotStartedException {
return getCurrentDisruptor().getRingBuffer().remainingCapacity();
}
public DisruptorWaitStrategy getWaitStrategy() {
return waitStrategy;
}
DisruptorProducerType getProducerType() {
return producerType;
}
public int getBufferSize() {
return size;
}
public int getPendingExchangeCount() {
try {
if (!hasNullReference()) {
return (int)(getBufferSize() - getRemainingCapacity() + temporaryExchangeBuffer.size());
}
} catch (DisruptorNotStartedException e) {
//fall through...
}
return temporaryExchangeBuffer.size();
}
public synchronized void addEndpoint(final DisruptorEndpoint disruptorEndpoint) {
LOGGER.debug("Adding Endpoint: " + disruptorEndpoint);
endpoints.add(disruptorEndpoint);
LOGGER.debug("Endpoint added: {}, new total endpoints {}", disruptorEndpoint, endpoints.size());
}
public synchronized void removeEndpoint(final DisruptorEndpoint disruptorEndpoint) {
LOGGER.debug("Removing Endpoint: " + disruptorEndpoint);
if (getEndpointCount() == 1) {
LOGGER.debug("Last Endpoint removed, shutdown disruptor");
//Shutdown our disruptor
shutdownDisruptor(false);
//As there are no endpoints dependent on this Disruptor, we may also shutdown our executor
shutdownExecutor();
}
endpoints.remove(disruptorEndpoint);
LOGGER.debug("Endpoint removed: {}, new total endpoints {}", disruptorEndpoint, getEndpointCount());
}
public synchronized int getEndpointCount() {
return endpoints.size();
}
@Override
public String toString() {
return "DisruptorReference{" + "uri='" + uri + '\'' + ", endpoint count=" + endpoints.size()
+ ", handler count=" + handlers.length + '}';
}
/**
* Implementation of the {@link LifecycleAwareExchangeEventHandler} interface that blocks all calls to the #onEvent
* method until the #unblock method is called.
*/
private class BlockingExchangeEventHandler extends AbstractLifecycleAwareExchangeEventHandler {
private final CountDownLatch blockingLatch = new CountDownLatch(1);
@Override
public void onEvent(final ExchangeEvent event, final long sequence, final boolean endOfBatch) throws Exception {
blockingLatch.await();
final Exchange exchange = event.getSynchronizedExchange().cancelAndGetOriginalExchange();
if (exchange.getProperty(DisruptorEndpoint.DISRUPTOR_IGNORE_EXCHANGE, false, boolean.class)) {
// Property was set and it was set to true, so don't process Exchange.
LOGGER.trace("Ignoring exchange {}", exchange);
} else {
temporaryExchangeBuffer.offer(exchange);
}
}
public void unblock() {
blockingLatch.countDown();
}
}
/**
* When a consumer is added or removed, we need to create a new Disruptor due to its static configuration. However, we
* would like to reuse our thread pool executor and only add or remove the threads we need. On a reconfiguraion of the
* Disruptor, we need to atomically swap the current RingBuffer with a new and fully configured one in order to keep
* the producers operational without the risk of losing messages. Configuration of a RingBuffer by the Disruptor's
* start method has a side effect that immediately starts execution of the event processors (consumers) on the
* Executor passed as a constructor argument which is stored in a final field. In order to be able to delay actual
* execution of the event processors until the event processors of the previous RingBuffer are done processing and the
* thread pool executor has been resized to match the new consumer count, we delay their execution using this class.
*/
private static class DelayedExecutor implements Executor {
private final Queue<Runnable> delayedCommands = new LinkedList<Runnable>();
@Override
public void execute(final Runnable command) {
delayedCommands.offer(command);
}
public void executeDelayedCommands(final Executor actualExecutor) {
Runnable command;
while ((command = delayedCommands.poll()) != null) {
actualExecutor.execute(command);
}
}
}
}
| |
/*
* Copyright 2013 eBuddy B.V.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ebuddy.cassandra.dao;
import static com.ebuddy.cassandra.dao.AbstractColumnFamilyTemplate.ALL;
import static org.mockito.Mockito.any;
import static org.mockito.Mockito.anyString;
import static org.mockito.Mockito.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertTrue;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang3.ObjectUtils;
import org.mockito.ArgumentCaptor;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import com.ebuddy.cassandra.dao.visitor.ColumnVisitor;
import me.prettyprint.cassandra.model.ExecutingKeyspace;
import me.prettyprint.cassandra.model.ExecutionResult;
import me.prettyprint.cassandra.model.KeyspaceOperationCallback;
import me.prettyprint.cassandra.serializers.StringSerializer;
import me.prettyprint.hector.api.Serializer;
import me.prettyprint.hector.api.beans.ColumnSlice;
import me.prettyprint.hector.api.beans.HColumn;
import me.prettyprint.hector.api.beans.HSuperColumn;
import me.prettyprint.hector.api.beans.Row;
import me.prettyprint.hector.api.beans.Rows;
import me.prettyprint.hector.api.beans.SuperSlice;
import me.prettyprint.hector.api.exceptions.HectorTransportException;
import me.prettyprint.hector.api.factory.HFactory;
import me.prettyprint.hector.api.mutation.Mutator;
/**
* Test for SuperColumnFamilyTemplate.
* @author Eric Zoerner <a href="mailto:ezoerner@ebuddy.com">ezoerner@ebuddy.com</a>
*/
@SuppressWarnings({"unchecked", "rawtypes"})
public class SuperColumnFamilyTemplateTest {
private final String columnFamily = "TestColumnFamily";
private final String rowKey = "testKey";
private final String superColumnName = "testSuperColumnName";
private final String columnName = "testColumnName";
private final List<String> columnNames = Arrays.asList("columnName1", "columnName2");
private final String columnValue = "testColumnValue";
private final List<String> columnValues = Arrays.asList("columnValue1", "columnValue2");
private final List<String> superColumnNames = Arrays.asList("superColumnName1", "superColumnName2");
private final List<String> rowKeys = Arrays.asList("rowKey1", "rowKey2");
@Mock
private ExecutionResult executionResult;
@Mock
private KeyspaceTemplate.HectorBatchContext txnContext;
@Mock
private Mutator<String> mutator;
private SuperColumnFamilyOperations<String,String,String,String> superColumnFamilyTestDao;
@Mock
ColumnVisitor<String, String> columnVisitor;
@BeforeMethod(alwaysRun = true)
private void setUp() {
MockitoAnnotations.initMocks(this);
ExecutingKeyspace keyspace = mock(ExecutingKeyspace.class);
when(keyspace.doExecute(any(KeyspaceOperationCallback.class))).thenReturn(executionResult);
superColumnFamilyTestDao = new SuperColumnFamilyTemplate<String,String,String,String>(keyspace,
columnFamily,
StringSerializer.get(),
StringSerializer.get(),
StringSerializer.get(),
StringSerializer.get());
when(txnContext.getMutator()).thenReturn(mutator);
}
@Test(groups = {"unit"})
public void testReadColumnValue() {
ColumnSlice columnSlice = mock(ColumnSlice.class);
HColumn column = mock(HColumn.class);
String columnValue = "testColumnValue";
when(column.getValue()).thenReturn(columnValue);
when(columnSlice.getColumns()).thenReturn(Collections.singletonList(column));
when(executionResult.get()).thenReturn(columnSlice);
//=========================
String value = superColumnFamilyTestDao.readColumnValue(rowKey, superColumnName, columnName);
//=========================
assertEquals(value, columnValue);
}
@Test(groups={"unit"}, expectedExceptions = HectorTransportException.class)
public void testReadColumnValueAndTranslateHectorException() {
when(executionResult.get()).thenThrow(new HectorTransportException("test hector exception"));
superColumnFamilyTestDao.readColumnValue(rowKey, superColumnName, columnName);
}
@Test(groups = {"unit"})
public void testReadColumnsAsMapSpecifyingNoColumnName() {
Map<String, String> testResultMap = new HashMap<String,String>();
testResultMap.put("testPropKey1", "testPropValue1");
testResultMap.put("testPropKey2", "testPropValue2");
ColumnSlice columnSlice = mock(ColumnSlice.class);
HColumn column1 = mock(HColumn.class);
HColumn column2 = mock(HColumn.class);
setupHColumn(column1, "testPropKey1", "testPropValue1");
setupHColumn(column2, "testPropKey2" , "testPropValue2");
when(columnSlice.getColumns()).thenReturn(Arrays.asList(column1, column2));
when(executionResult.get()).thenReturn(columnSlice);
//=========================
Map actualResult = superColumnFamilyTestDao.readColumnsAsMap(rowKey, superColumnName);
//=========================
assertEquals(actualResult, testResultMap);
}
@Test(groups={"unit"}, expectedExceptions = HectorTransportException.class)
public void testReadColumnsAsMapAndTranslateHectorException() {
when(executionResult.get()).thenThrow(new HectorTransportException("test hector exception"));
superColumnFamilyTestDao.readColumnsAsMap(rowKey, superColumnName);
}
@Test(groups={"unit"})
public void testMultiGetColumnsAsMapSpecifyingNoColumnName() {
Map<String,Map<String,String>> expectedResult = new HashMap<String,Map<String,String>>();
Map<String,String> properties = new HashMap<String, String>();
Iterator<String> itr = columnValues.iterator();
for (String columnName : columnNames) {
properties.put(columnName, itr.next());
}
for(String key : rowKeys) {
expectedResult.put(key, properties);
}
Rows<String,String,String> rows = mock(Rows.class);
when(executionResult.get()).thenReturn(rows);
ColumnSlice<String,String> columnSlice = mock(ColumnSlice.class);
Row<String,String,String> mockRow = mock(Row.class);
// hard codes number of row keys to be 2
when(mockRow.getKey()).thenReturn(rowKeys.get(0)).thenReturn(rowKeys.get(1));
when(mockRow.getColumnSlice()).thenReturn(columnSlice);
HColumn<String,String> mockColumn = mock(HColumn.class);
when(mockColumn.getName()).thenReturn(columnNames.get(0)).thenReturn(columnNames.get(1)).
thenReturn(columnNames.get(0)).thenReturn(columnNames.get(1));
when(mockColumn.getValue()).
thenReturn(columnValues.get(0)).
thenReturn(columnValues.get(1)).
thenReturn(columnValues.get(0)).
thenReturn(columnValues.get(1));
List<HColumn<String,String>> columnList = Arrays.asList(mockColumn,mockColumn);
when(columnSlice.getColumns()).thenReturn(columnList);
Iterator<Row<String,String,String>> mockRowIterator = mock(Iterator.class);
when(rows.iterator()).thenReturn(mockRowIterator);
when(mockRowIterator.hasNext()).thenReturn(true).thenReturn(true).thenReturn(false);
when(mockRowIterator.next()).thenReturn(mockRow).thenReturn(mockRow);
//=========================
Map<String,Map<String,String>> result =
superColumnFamilyTestDao.multiGetColumnsAsMap(rowKeys, superColumnName);
//=========================
assertEquals(result, expectedResult);
}
@Test(groups={"unit"}, expectedExceptions = HectorTransportException.class)
public void testMultiGetColumnsAsMapTranslateHectorException() {
when(executionResult.get()).thenThrow(new HectorTransportException("test hector exception"));
superColumnFamilyTestDao.multiGetColumnsAsMap(rowKeys, superColumnName);
}
@Test(groups={"unit"})
public void testReadRowAsMap() {
Map<String,Map<String,String>> expectedResult = new HashMap<String,Map<String,String>>();
for (String superColumnName : superColumnNames) {
Map<String,String> properties = new HashMap<String,String>();
Iterator<String> itr = columnValues.iterator();
for (String columnName : columnNames) {
properties.put(columnName, itr.next());
}
expectedResult.put(superColumnName, properties);
}
SuperSlice superSlice = mock(SuperSlice.class);
when(executionResult.get()).thenReturn(superSlice);
HSuperColumn superColumn = mock(HSuperColumn.class);
when(superSlice.getSuperColumns()).thenReturn(Arrays.asList(superColumn, superColumn));
when(superColumn.getName()).thenReturn(superColumnNames.get(0)).thenReturn(superColumnNames.get(1));
HColumn column = mock(HColumn.class);
when(superColumn.getColumns()).thenReturn(Arrays.asList(column,column));
when(column.getName()).thenReturn(columnNames.get(0)).
thenReturn(columnNames.get(1)).
thenReturn(columnNames.get(0)).
thenReturn(columnNames.get(1));
when(column.getValue()).
thenReturn(columnValues.get(0)).
thenReturn(columnValues.get(1)).
thenReturn(columnValues.get(0)).
thenReturn(columnValues.get(1));
//=========================
Map<String,Map<String,String>> result = superColumnFamilyTestDao.readRowAsMap(rowKey);
//=========================
assertEquals(result, expectedResult);
}
@Test(groups={"unit"}, expectedExceptions = HectorTransportException.class)
public void testReadRowAsMapTranslateHectorException() {
when(executionResult.get()).thenThrow(new HectorTransportException("test hector exception"));
superColumnFamilyTestDao.readRowAsMap(rowKey);
}
@Test(groups={"unit"})
public void testWriteColumns() {
Map<String,String> properties = new HashMap<String,String>();
Iterator<String> itr = columnValues.iterator();
for (String columnName : columnNames) {
properties.put(columnName, itr.next());
}
//=========================
superColumnFamilyTestDao.writeColumns(rowKey, superColumnName, properties, txnContext);
//=========================
List<HColumn<String,String>> columns = new ArrayList<HColumn<String,String>>();
Iterator<String> itr2 = columnValues.iterator();
for (String columnName : columnNames) {
columns.add(HFactory.createColumn(columnName,
itr2.next(),
StringSerializer.get(),
StringSerializer.get()));
}
HSuperColumn<String,String,String> superColumn = HFactory.createSuperColumn(superColumnName,
columns,
StringSerializer.get(),
StringSerializer.get(),
StringSerializer.get());
ArgumentCaptor<HSuperColumn> superColumnCaptor = ArgumentCaptor.forClass(HSuperColumn.class);
verify(mutator).addInsertion(eq(rowKey), eq(columnFamily), superColumnCaptor.capture());
HSuperColumn actualSuperColumn = superColumnCaptor.getValue();
assertTrue(areSuperColumnsEqual(actualSuperColumn, superColumn));
}
@Test(groups={"unit"}, expectedExceptions = HectorTransportException.class)
public void testWriteColumnsTranslateHectorException() {
when(mutator.addInsertion(eq(rowKey),
eq(columnFamily),
any(HSuperColumn.class))).thenThrow(new HectorTransportException(
"test hector exception"));
Map<String,String> properties = new HashMap<String,String>();
Iterator<String> itr = columnValues.iterator();
for (String columnName : columnNames) {
properties.put(columnName, itr.next());
}
//=========================
superColumnFamilyTestDao.writeColumns(rowKey, superColumnName, properties, txnContext);
//=========================
}
@Test(groups={"unit"})
public void testWriteColumn() {
String propertyValue = columnValue;
//=========================
superColumnFamilyTestDao.writeColumn(rowKey, superColumnName, columnName, propertyValue, txnContext);
//=========================
HColumn<String,String> column = HFactory.createColumn(columnName,
columnValue,
StringSerializer.get(),
StringSerializer.get());
HSuperColumn<String,String,String> superColumn = HFactory.createSuperColumn(superColumnName,
Arrays.asList(column),
StringSerializer.get(),
StringSerializer.get(),
StringSerializer.get());
ArgumentCaptor<HSuperColumn> superColumnCaptor = ArgumentCaptor.forClass(HSuperColumn.class);
verify(mutator).addInsertion(eq(rowKey), eq(columnFamily), superColumnCaptor.capture());
HSuperColumn actualSuperColumn = superColumnCaptor.getValue();
assertTrue(areSuperColumnsEqual(actualSuperColumn, superColumn));
}
@Test(groups={"unit"}, expectedExceptions = HectorTransportException.class)
public void testWriteColumnTranslateHectorException() {
when(mutator.addInsertion(eq(rowKey),
eq(columnFamily),
any(HSuperColumn.class))).
thenThrow(new HectorTransportException("test hector exception"));
String propertyValue = columnValue;
//=========================
superColumnFamilyTestDao.writeColumn(rowKey, superColumnName, columnName, propertyValue, txnContext);
//=========================
}
@Test(groups={"unit"})
public void testDeleteColumns() {
//=========================
superColumnFamilyTestDao.deleteColumns(rowKey, superColumnName, columnNames, txnContext);
//=========================
for (String colName : columnNames) {
verify(mutator).addSubDelete(rowKey,
columnFamily,
superColumnName,
colName,
StringSerializer.get(),
StringSerializer.get());
}
}
@Test(groups={"unit"}, expectedExceptions = HectorTransportException.class)
public void testDeleteColumnsTranslateHectorException() {
when(mutator.addSubDelete(anyString(),
anyString(),
anyString(),
anyString(),
any(Serializer.class),
any(Serializer.class))).
thenThrow(new HectorTransportException("test hector exception"));
//=========================
superColumnFamilyTestDao.deleteColumns(rowKey, superColumnName, columnNames, txnContext);
//=========================
}
@Test(groups = {"unit"})
public void testVisitColumn() {
ColumnSlice columnSlice = mock(ColumnSlice.class);
HColumn column1 = mock(HColumn.class);
HColumn column2 = mock(HColumn.class);
String propertyValue1 = setupHColumn(column1, "testPropKey1", "testPropValue1");
String propertyValue2 = setupHColumn(column2, "testPropKey2", "testPropValue1");
when(columnSlice.getColumns()).thenReturn(Arrays.asList(column1, column2));
when(executionResult.get()).thenReturn(columnSlice);
//=========================
//Map actualResult = superColumnFamilyTestDao.readColumnsAsMap(rowKey, superColumnName);
superColumnFamilyTestDao.visitColumns(rowKey, superColumnName, null, null, ALL, false, columnVisitor);
//=========================
verify(columnVisitor).visit(eq("testPropKey1"), eq(propertyValue1), any(Long.class), any(Integer.class));
verify(columnVisitor).visit(eq("testPropKey2"), eq(propertyValue2), any(Long.class), any(Integer.class));
}
private String setupHColumn(HColumn column1, String columnKey, String columnValue) {
when(column1.getName()).thenReturn(columnKey);
when(column1.getValue()).thenReturn(columnValue);
return columnValue;
}
@SuppressWarnings({"ControlFlowStatementWithoutBraces"})
private boolean areSuperColumnsEqual(HSuperColumn superColumn1, HSuperColumn superColumn2) {
if (superColumn1 == superColumn2) return true;
if (superColumn2 == null) return false;
if (superColumn1 == null) return false;
if (superColumn1.getClass() != superColumn2.getClass()) return false;
if (!ObjectUtils.equals(superColumn1.getName(), superColumn2.getName())) return false;
if (superColumn1.getColumns().size() != superColumn2.getColumns().size()) return false;
Iterator<HColumn> itr1 = superColumn1.getColumns().iterator();
Iterator<HColumn> itr2 = superColumn2.getColumns().iterator();
while(itr1.hasNext()) {
if (!areColumnsEqual(itr1.next(), itr2.next())) return false;
}
return true;
}
@SuppressWarnings({"ControlFlowStatementWithoutBraces", "SimplifiableIfStatement"})
private boolean areColumnsEqual(HColumn column1, HColumn column2) {
if (column1 == column2) return true;
if (column2 == null) return false;
if (column1 == null) return false;
if (column1.getClass() != column2.getClass()) return false;
if (!ObjectUtils.equals(column1.getName(), column2.getName())) return false;
return ObjectUtils.equals(column1.getValue(), column2.getValue());
}
}
| |
/*
* Copyright 2016-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.store.primitives.resources.impl;
import static com.google.common.base.Preconditions.checkArgument;
import java.util.Iterator;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.atomic.AtomicLong;
import org.onosproject.store.service.DocumentPath;
import org.onosproject.store.service.DocumentTree;
import org.onosproject.store.service.DocumentTreeListener;
import org.onosproject.store.service.DocumentTreeNode;
import org.onosproject.store.service.IllegalDocumentModificationException;
import org.onosproject.store.service.NoSuchDocumentPathException;
import org.onosproject.store.service.Ordering;
import org.onosproject.store.service.Versioned;
import com.google.common.base.Supplier;
import com.google.common.collect.Maps;
/**
* Simple implementation of a {@link DocumentTree}.
*
* @param <V> tree node value type
*/
public class DefaultDocumentTree<V> implements DocumentTree<V> {
private static final DocumentPath ROOT_PATH = DocumentPath.from("root");
final DefaultDocumentTreeNode<V> root;
private final Supplier<Long> versionSupplier;
public DefaultDocumentTree() {
AtomicLong versionCounter = new AtomicLong(0);
versionSupplier = versionCounter::incrementAndGet;
root = new DefaultDocumentTreeNode<>(ROOT_PATH, null, versionSupplier.get(), Ordering.NATURAL, null);
}
public DefaultDocumentTree(Supplier<Long> versionSupplier, Ordering ordering) {
root = new DefaultDocumentTreeNode<>(ROOT_PATH, null, versionSupplier.get(), ordering, null);
this.versionSupplier = versionSupplier;
}
DefaultDocumentTree(Supplier<Long> versionSupplier, DefaultDocumentTreeNode<V> root) {
this.root = root;
this.versionSupplier = versionSupplier;
}
@Override
public String name() {
return null;
}
@Override
public DocumentPath root() {
return ROOT_PATH;
}
@Override
public Map<String, Versioned<V>> getChildren(DocumentPath path) {
DocumentTreeNode<V> node = getNode(path);
if (node != null) {
Map<String, Versioned<V>> childrenValues = Maps.newLinkedHashMap();
node.children().forEachRemaining(n -> childrenValues.put(simpleName(n.path()), n.value()));
return childrenValues;
}
throw new NoSuchDocumentPathException();
}
@Override
public Versioned<V> get(DocumentPath path) {
DocumentTreeNode<V> currentNode = getNode(path);
return currentNode != null ? currentNode.value() : null;
}
@Override
public Versioned<V> set(DocumentPath path, V value) {
checkRootModification(path);
DefaultDocumentTreeNode<V> node = getNode(path);
if (node != null) {
return node.update(value, versionSupplier.get());
} else {
create(path, value);
return null;
}
}
@Override
public boolean create(DocumentPath path, V value) {
checkRootModification(path);
DocumentTreeNode<V> node = getNode(path);
if (node != null) {
return false;
}
DocumentPath parentPath = path.parent();
DefaultDocumentTreeNode<V> parentNode = getNode(parentPath);
if (parentNode == null) {
throw new IllegalDocumentModificationException();
}
parentNode.addChild(simpleName(path), value, versionSupplier.get());
return true;
}
@Override
public boolean createRecursive(DocumentPath path, V value) {
checkRootModification(path);
DocumentTreeNode<V> node = getNode(path);
if (node != null) {
return false;
}
DocumentPath parentPath = path.parent();
if (getNode(parentPath) == null) {
createRecursive(parentPath, null);
}
DefaultDocumentTreeNode<V> parentNode = getNode(parentPath);
if (parentNode == null) {
throw new IllegalDocumentModificationException();
}
parentNode.addChild(simpleName(path), value, versionSupplier.get());
return true;
}
@Override
public boolean replace(DocumentPath path, V newValue, long version) {
checkRootModification(path);
DocumentTreeNode<V> node = getNode(path);
if (node != null && node.value() != null && node.value().version() == version) {
set(path, newValue);
return true;
}
return false;
}
@Override
public boolean replace(DocumentPath path, V newValue, V currentValue) {
checkRootModification(path);
if (Objects.equals(newValue, currentValue)) {
return false;
}
DocumentTreeNode<V> node = getNode(path);
if (node != null && Objects.equals(Versioned.valueOrNull(node.value()), currentValue)) {
set(path, newValue);
return true;
}
return false;
}
@Override
public Versioned<V> removeNode(DocumentPath path) {
checkRootModification(path);
DefaultDocumentTreeNode<V> nodeToRemove = getNode(path);
if (nodeToRemove == null) {
throw new NoSuchDocumentPathException();
}
if (nodeToRemove.hasChildren()) {
throw new IllegalDocumentModificationException();
}
DefaultDocumentTreeNode<V> parent = (DefaultDocumentTreeNode<V>) nodeToRemove.parent();
parent.removeChild(simpleName(path));
return nodeToRemove.value();
}
@Override
public void addListener(DocumentPath path, DocumentTreeListener<V> listener) {
// TODO Auto-generated method stub
}
@Override
public void removeListener(DocumentTreeListener<V> listener) {
// TODO Auto-generated method stub
}
private DefaultDocumentTreeNode<V> getNode(DocumentPath path) {
Iterator<String> pathElements = path.pathElements().iterator();
DefaultDocumentTreeNode<V> currentNode = root;
checkArgument("root".equals(pathElements.next()), "Path should start with root: %s", path);
while (pathElements.hasNext() && currentNode != null) {
currentNode = (DefaultDocumentTreeNode<V>) currentNode.child(pathElements.next());
}
return currentNode;
}
private String simpleName(DocumentPath path) {
return path.pathElements().get(path.pathElements().size() - 1);
}
private void checkRootModification(DocumentPath path) {
if (ROOT_PATH.equals(path)) {
throw new IllegalDocumentModificationException();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.java.util.emitter.service;
import com.google.common.collect.ImmutableMap;
import org.apache.druid.java.util.common.DateTimes;
import org.junit.Assert;
import org.junit.Test;
import java.util.Arrays;
import java.util.Collections;
/**
*/
public class ServiceMetricEventTest
{
@Test
public void testStupidTest()
{
ServiceMetricEvent builderEvent = new ServiceMetricEvent.Builder()
.setDimension("user1", "a")
.setDimension("user2", "b")
.setDimension("user3", "c")
.setDimension("user4", "d")
.setDimension("user5", "e")
.setDimension("user6", "f")
.setDimension("user7", "g")
.setDimension("user8", "h")
.setDimension("user9", "i")
.setDimension("user10", "j")
.build("test-metric", 1234)
.build("test", "localhost");
Assert.assertEquals(
ImmutableMap.<String, Object>builder()
.put("feed", "metrics")
.put("timestamp", builderEvent.getCreatedTime().toString())
.put("service", "test")
.put("host", "localhost")
.put("metric", "test-metric")
.put("user1", "a")
.put("user2", "b")
.put("user3", "c")
.put("user4", "d")
.put("user5", "e")
.put("user6", "f")
.put("user7", "g")
.put("user8", "h")
.put("user9", "i")
.put("user10", "j")
.put("value", 1234)
.build(),
builderEvent.toMap()
);
ServiceMetricEvent constructorEvent = ServiceMetricEvent
.builder()
.setDimension("user1", "a")
.setDimension("user2", "b")
.setDimension("user3", "c")
.setDimension("user4", "d")
.setDimension("user5", "e")
.setDimension("user6", "f")
.setDimension("user7", "g")
.setDimension("user8", "h")
.setDimension("user9", "i")
.setDimension("user10", "j")
.build("test-metric", 1234)
.build("test", "localhost");
Assert.assertEquals(
ImmutableMap.<String, Object>builder()
.put("feed", "metrics")
.put("timestamp", constructorEvent.getCreatedTime().toString())
.put("service", "test")
.put("host", "localhost")
.put("metric", "test-metric")
.put("user1", "a")
.put("user2", "b")
.put("user3", "c")
.put("user4", "d")
.put("user5", "e")
.put("user6", "f")
.put("user7", "g")
.put("user8", "h")
.put("user9", "i")
.put("user10", "j")
.put("value", 1234)
.build(), constructorEvent.toMap()
);
ServiceMetricEvent arrayConstructorEvent = ServiceMetricEvent
.builder()
.setDimension("user1", new String[]{"a"})
.setDimension("user2", new String[]{"b"})
.setDimension("user3", new String[]{"c"})
.setDimension("user4", new String[]{"d"})
.setDimension("user5", new String[]{"e"})
.setDimension("user6", new String[]{"f"})
.setDimension("user7", new String[]{"g"})
.setDimension("user8", new String[]{"h"})
.setDimension("user9", new String[]{"i"})
.setDimension("user10", new String[]{"j"})
.build("test-metric", 1234)
.build("test", "localhost");
Assert.assertEquals(
ImmutableMap.<String, Object>builder()
.put("feed", "metrics")
.put("timestamp", arrayConstructorEvent.getCreatedTime().toString())
.put("service", "test")
.put("host", "localhost")
.put("metric", "test-metric")
.put("user1", Collections.singletonList("a"))
.put("user2", Collections.singletonList("b"))
.put("user3", Collections.singletonList("c"))
.put("user4", Collections.singletonList("d"))
.put("user5", Collections.singletonList("e"))
.put("user6", Collections.singletonList("f"))
.put("user7", Collections.singletonList("g"))
.put("user8", Collections.singletonList("h"))
.put("user9", Collections.singletonList("i"))
.put("user10", Collections.singletonList("j"))
.put("value", 1234)
.build(), arrayConstructorEvent.toMap()
);
Assert.assertNotNull(
new ServiceMetricEvent.Builder()
.setDimension("user1", "a")
.setDimension("user2", "b")
.setDimension("user3", "c")
.setDimension("user4", "d")
.setDimension("user5", "e")
.setDimension("user6", "f")
.setDimension("user7", "g")
.setDimension("user8", "h")
.setDimension("user9", "i")
.setDimension("user10", "j")
.build(null, "test-metric", 1234)
.build("test", "localhost")
.getCreatedTime()
);
Assert.assertNotNull(
ServiceMetricEvent.builder()
.setDimension("user1", new String[]{"a"})
.setDimension("user2", new String[]{"b"})
.setDimension("user3", new String[]{"c"})
.setDimension("user4", new String[]{"d"})
.setDimension("user5", new String[]{"e"})
.setDimension("user6", new String[]{"f"})
.setDimension("user7", new String[]{"g"})
.setDimension("user8", new String[]{"h"})
.setDimension("user9", new String[]{"i"})
.setDimension("user10", new String[]{"j"})
.build("test-metric", 1234)
.build("test", "localhost")
.getCreatedTime()
);
Assert.assertEquals(
ImmutableMap.<String, Object>builder()
.put("feed", "metrics")
.put("timestamp", DateTimes.utc(42).toString())
.put("service", "test")
.put("host", "localhost")
.put("metric", "test-metric")
.put("user1", "a")
.put("user2", "b")
.put("user3", "c")
.put("user4", "d")
.put("user5", "e")
.put("user6", "f")
.put("user7", "g")
.put("user8", "h")
.put("user9", "i")
.put("user10", "j")
.put("value", 1234)
.build(),
new ServiceMetricEvent.Builder()
.setDimension("user1", "a")
.setDimension("user2", "b")
.setDimension("user3", "c")
.setDimension("user4", "d")
.setDimension("user5", "e")
.setDimension("user6", "f")
.setDimension("user7", "g")
.setDimension("user8", "h")
.setDimension("user9", "i")
.setDimension("user10", "j")
.build(DateTimes.utc(42), "test-metric", 1234)
.build("test", "localhost")
.toMap()
);
Assert.assertEquals(
ImmutableMap.<String, Object>builder()
.put("feed", "metrics")
.put("timestamp", DateTimes.utc(42).toString())
.put("service", "test")
.put("host", "localhost")
.put("metric", "test-metric")
.put("user1", Collections.singletonList("a"))
.put("user2", Collections.singletonList("b"))
.put("user3", Collections.singletonList("c"))
.put("user4", Collections.singletonList("d"))
.put("user5", Collections.singletonList("e"))
.put("user6", Collections.singletonList("f"))
.put("user7", Collections.singletonList("g"))
.put("user8", Collections.singletonList("h"))
.put("user9", Collections.singletonList("i"))
.put("user10", Collections.singletonList("j"))
.put("value", 1234)
.build(),
ServiceMetricEvent.builder()
.setDimension("user1", new String[]{"a"})
.setDimension("user2", new String[]{"b"})
.setDimension("user3", new String[]{"c"})
.setDimension("user4", new String[]{"d"})
.setDimension("user5", new String[]{"e"})
.setDimension("user6", new String[]{"f"})
.setDimension("user7", new String[]{"g"})
.setDimension("user8", new String[]{"h"})
.setDimension("user9", new String[]{"i"})
.setDimension("user10", new String[]{"j"})
.build(DateTimes.utc(42), "test-metric", 1234)
.build("test", "localhost")
.toMap()
);
Assert.assertEquals(
ImmutableMap.<String, Object>builder()
.put("feed", "metrics")
.put("timestamp", DateTimes.utc(42).toString())
.put("service", "test")
.put("host", "localhost")
.put("metric", "test-metric")
.put("foo", "bar")
.put("baz", Arrays.asList("foo", "qux"))
.put("value", 1234)
.build(),
ServiceMetricEvent.builder()
.setDimension("foo", "bar")
.setDimension("baz", new String[]{"foo", "qux"})
.build(DateTimes.utc(42), "test-metric", 1234)
.build("test", "localhost")
.toMap()
);
}
@Test(expected = IllegalStateException.class)
public void testInfinite()
{
ServiceMetricEvent.builder().build("foo", 1 / 0d);
}
@Test(expected = IllegalStateException.class)
public void testInfinite2()
{
ServiceMetricEvent.builder().build("foo", 1 / 0f);
}
@Test(expected = IllegalStateException.class)
public void testNaN()
{
ServiceMetricEvent.builder().build("foo", 0 / 0d);
}
@Test(expected = IllegalStateException.class)
public void testNaN2()
{
ServiceMetricEvent.builder().build("foo", 0 / 0f);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.vault.packaging;
import org.apache.jackrabbit.util.XMLChar;
import org.jetbrains.annotations.NotNull;
import org.apache.jackrabbit.util.Text;
/**
* {@code PackageId} provides the basic metrics for identifying a package.
* A package id consists of a group, a name and an optional version.
* The group is a relative path, eg: "company/project/subgroup", the name and the version
* can be of any format.
* <p>
* The string representation is {@code <group>:<name>[:<version>]}.
*/
public class PackageId implements Comparable<PackageId> {
/**
* The root path of the packages storage location.
* @deprecated As of 3.1.42, the storage location is implementation details.
*/
@Deprecated
public static final String ETC_PACKAGES = "/etc/packages";
/**
* The root path prefix of the packages storage location.
* @deprecated As of 3.1.42, the storage location is implementation details.
*/
@Deprecated
public static final String ETC_PACKAGES_PREFIX = "/etc/packages/";
public static final PackageId[] EMPTY = new PackageId[0];
private final String group;
private final String name;
private final Version version;
private final String str;
private final boolean fromPath;
/**
* Creates a new package id
* @param path path of the package
*
* @deprecated As of 3.1.42, the storage location is implementation details.
*/
@Deprecated
public PackageId(String path) {
fromPath = true;
path = path.trim();
int idx = path.lastIndexOf('.');
if (idx > 0) {
String ext = path.substring(idx);
if (".zip".equalsIgnoreCase(ext) || ".jar".equalsIgnoreCase(ext)) {
path = path.substring(0, idx);
}
}
idx = path.lastIndexOf('/');
String name;
if (idx < 0) {
name = path;
this.group = "";
} else {
name = path.substring(idx + 1);
String grp = path.substring(0, idx);
if (grp.equals(ETC_PACKAGES)) {
grp = "";
} else if (grp.startsWith(ETC_PACKAGES_PREFIX)) {
grp = grp.substring(ETC_PACKAGES_PREFIX.length());
} else if (grp.startsWith("/")) {
grp = grp.substring(1);
}
this.group = grp;
}
// check if name contains a version
String[] segs = Text.explode(name, '-');
int i=segs.length-1;
while (i>0) {
try {
// accept numbers < 1000 (hotfix case)
if (Integer.parseInt(segs[i]) >= 1000) {
break;
}
} catch (NumberFormatException e) {
// ignore
}
// check if starts with a letter'
if (Character.isJavaIdentifierStart(segs[i].charAt(0))) {
// then need a digit
if (segs[i].length() == 1 || !Character.isDigit(segs[i].charAt(1)) && !"SNAPSHOT".equals(segs[i])) {
break;
}
}
i--;
}
if (i == segs.length-1) {
this.name = name;
version = Version.EMPTY;
} else {
StringBuilder str = new StringBuilder();
for (int j = 0; j<= i; j++) {
if (j > 0) {
str.append('-');
}
str.append(segs[j]);
}
this.name = str.toString();
str.setLength(0);
for (int j = i+1; j<segs.length; j++) {
if (j > i+1) {
str.append('-');
}
str.append(segs[j]);
}
this.version = Version.create(str.toString());
}
this.str = getString(group, this.name, version);
}
/**
* Creates a new package id
* @param path path of the package
* @param version version of the package
*
* @deprecated As of 3.1.42, the storage location is implementation details.
*/
@Deprecated
public PackageId(String path, String version) {
this(path, Version.create(version));
}
/**
* Creates a new package id
* @param path path of the package
* @param version version of the package
*
* @deprecated As of 3.1.42, the storage location is implementation details.
*/
@Deprecated
public PackageId(String path, Version version) {
fromPath = true;
path = path.trim();
int idx = path.lastIndexOf('.');
if (idx > 0) {
String ext = path.substring(idx);
if (".zip".equalsIgnoreCase(ext) || ".jar".equalsIgnoreCase(ext)) {
path = path.substring(0, idx);
}
}
if (version != null && path.endsWith('-'+version.toString())) {
path = path.substring(0, path.length() - version.toString().length() - 1);
}
idx = path.lastIndexOf('/');
if (idx < 0) {
this.name = path;
this.group = "";
} else {
this.name = path.substring(idx + 1);
String grp = path.substring(0, idx);
if (grp.equals(ETC_PACKAGES)) {
grp = "";
} else if (grp.startsWith(ETC_PACKAGES_PREFIX)) {
grp = grp.substring(ETC_PACKAGES_PREFIX.length());
} else if (grp.startsWith("/")) {
grp = grp.substring(1);
}
this.group = grp;
}
// sanitize version
if (version == null || version.toString().length() == 0) {
version = Version.EMPTY;
}
this.version = version;
this.str = getString(group, name, version);
}
/**
* Creates a new package id
* @param group group id
* @param name name
* @param version version
*/
public PackageId(String group, String name, String version) {
this(group, name, Version.create(version));
}
/**
* Creates a new package id
* @param group group id
* @param name name
* @param version version
*/
public PackageId(String group, String name, Version version) {
fromPath = false;
// validate group
if (group.equals(ETC_PACKAGES)) {
group = "";
} else if (group.startsWith(ETC_PACKAGES_PREFIX)) {
group = group.substring(ETC_PACKAGES_PREFIX.length());
} else if (group.startsWith("/")) {
group = group.substring(1);
}
this.group = group;
this.name = name;
this.version = version == null ? Version.EMPTY : version;
this.str = getString(this.group, name, this.version);
}
/**
* Returns a package id from an id string in the format {@code <group>:<name>[:<version>]}. If the given id is null or an
* empty string, {@code null} is returned.
* @param str the string
* @return the package id
*/
public static PackageId fromString(String str) {
if (str == null || str.length() == 0) {
return null;
}
String[] segs = str.split(":");
if (segs.length == 1) {
return new PackageId("", segs[0], "");
} else if (segs.length == 2) {
return new PackageId(segs[0], segs[1], "");
} else {
return new PackageId(segs[0], segs[1], segs[2]);
}
}
/**
* Returns an array of package id from strings.
* @param str the strings
* @return the array of package ids
* @see #fromString(String)
*/
public static PackageId[] fromString(String ... str) {
PackageId[] ret = new PackageId[str.length];
for (int i=0; i<str.length; i++) {
ret[i] = PackageId.fromString(str[i]);
}
return ret;
}
/**
* Creates a comma separated list of id strings in the format {@code <group>:<name>[:<version>]}.
* @param packs the ids
* @return the string
*/
public static String toString(PackageId ... packs) {
String delim = "";
StringBuilder b = new StringBuilder();
for (PackageId pack: packs) {
b.append(delim).append(pack);
delim=",";
}
return b.toString();
}
/**
* Checks if this definition was constructed from a path, rather from a
* group and name.
* @return {@code true} if constructed from path.
*
* @since 2.2.26
* @deprecated As of 3.1.42, the storage location is implementation details.
*/
@Deprecated
public boolean isFromPath() {
return fromPath;
}
/**
* Returns the path of this package. please note that since 2.3 this also
* includes the version, but never the extension (.zip).
*
* @return the path of this package
* @since 2.2
* @deprecated As of 3.1.42, the storage location is implementation details.
*/
@Deprecated
public String getInstallationPath() {
StringBuilder b = new StringBuilder(ETC_PACKAGES_PREFIX);
if (group.length() > 0) {
b.append(group);
b.append("/");
}
b.append(name);
if (version.toString().length() > 0) {
b.append("-").append(version);
}
return b.toString();
}
/**
* Returns the group of this package
* @return the group.
* @since 2.2
*/
public String getGroup() {
return group;
}
/**
* Returns the name of this package (usually this is the last segment of the path).
* @return the name of this package.
*/
public @NotNull String getName() {
return name;
}
/**
* Returns the version of this package or and empty string if n/a.
* @return the version of this package
* @since 2.0
*/
public @NotNull String getVersionString() {
return version.toString();
}
/**
* Returns a download name in the form
* {@code name [ "-" version ] ".zip"}
* @return the download name
* @since 2.0
*/
public String getDownloadName() {
StringBuilder str = new StringBuilder(name);
if (version.toString().length() > 0) {
str.append("-").append(version);
}
str.append(".zip");
return str.toString();
}
/**
* Returns the version of this package or {@code Version.EMPTY} if not set.
* @return the version of this package
*/
public @NotNull Version getVersion() {
return version;
}
/**
* Returns a string representation of this id in the format {@code <group>:<name>[:<version>]}.
*/
@Override
public String toString() {
return str;
}
@Override
public boolean equals(Object o) {
return this == o ||
o instanceof PackageId && str.equals(o.toString());
}
@Override
public int hashCode() {
return str.hashCode();
}
/**
* {@inheritDoc}
*
* Compares this id with the given one.
*/
public int compareTo(PackageId o) {
int comp = group.compareTo(o.getGroup());
if (comp != 0) {
return comp;
}
comp = name.compareTo(o.getName());
if (comp != 0) {
return comp;
}
return version.compareTo(o.getVersion());
}
/**
* Internally get the string representation, colon separated.
* @param group group name
* @param name name
* @param version version
* @return string version
*/
private static String getString(String group, String name, Version version) {
return getString(group, name, version == null ? "" : version.toString());
}
/**
* Internally get the string representation, colon separated.
* @param group group name
* @param name name
* @param version version
* @return string version
*/
private static String getString(String group, String name, String version) {
StringBuilder b = new StringBuilder();
b.append(group).append(':');
b.append(name);
if (version.length() > 0) {
b.append(':').append(version);
}
return b.toString();
}
/**
* Checks if this package id is valid in respect to JCR names.
* @return {@code true} if the names are valid
*/
public boolean isValid() {
return PackageId.isValid(group, name, version == null ? null : version.toString());
}
/**
* Checks if the package id is valid in respect to JCR names.
* @param group the package group name
* @param name the package name
* @param version the (optional) version
* @return {@code true} if the names are valid
*/
public static boolean isValid(String group, String name, String version) {
try {
assertValidJcrName(name);
if (version != null && !version.isEmpty()) {
assertValidJcrName(version);
}
for (String groupSegment: Text.explode(group, '/')) {
assertValidJcrName(groupSegment);
}
return true;
} catch (IllegalArgumentException e) {
return false;
}
}
// the code below is copied from org.apache.jackrabbit.spi.commons.conversion.NameParser
// constants for parser
private static final int STATE_PREFIX_START = 0;
private static final int STATE_PREFIX = 1;
private static final int STATE_NAME_START = 2;
private static final int STATE_NAME = 3;
private static final int STATE_URI_START = 4;
private static final int STATE_URI = 5;
/**
* Parses the {@code jcrName} (either qualified or expanded) and validates it.
* @throws java.lang.IllegalArgumentException if the name is not valid
*/
private static void assertValidJcrName(String jcrName) throws IllegalArgumentException {
// trivial check
int len = jcrName == null ? 0 : jcrName.length();
if (len == 0) {
throw new IllegalArgumentException("empty name");
}
if (".".equals(jcrName) || "..".equals(jcrName)) {
throw new IllegalArgumentException(jcrName);
}
// parse the name
String prefix;
int nameStart = 0;
int state = STATE_PREFIX_START;
boolean trailingSpaces = false;
for (int i = 0; i < len; i++) {
char c = jcrName.charAt(i);
if (c == ':') {
if (state == STATE_PREFIX_START) {
throw new IllegalArgumentException("Prefix must not be empty");
} else if (state == STATE_PREFIX) {
if (trailingSpaces) {
throw new IllegalArgumentException("Trailing spaces not allowed");
}
prefix = jcrName.substring(0, i);
if (!XMLChar.isValidNCName(prefix)) {
throw new IllegalArgumentException("Invalid name prefix: "+ prefix);
}
state = STATE_NAME_START;
} else if (state == STATE_URI) {
// ignore -> validation of uri later on.
} else {
throw new IllegalArgumentException("'" + c + "' not allowed in name");
}
trailingSpaces = false;
} else if (c == ' ') {
if (state == STATE_PREFIX_START || state == STATE_NAME_START) {
throw new IllegalArgumentException("'" + c + "' not valid name start");
}
trailingSpaces = true;
} else if (Character.isWhitespace(c) || c == '[' || c == ']' || c == '*' || c == '|') {
throw new IllegalArgumentException("'" + c + "' not allowed in name");
} else if (c == '/') {
if (state == STATE_URI_START) {
state = STATE_URI;
} else if (state != STATE_URI) {
throw new IllegalArgumentException("'" + c + "' not allowed in name");
}
trailingSpaces = false;
} else if (c == '{') {
if (state == STATE_PREFIX_START) {
state = STATE_URI_START;
} else if (state == STATE_URI_START || state == STATE_URI) {
// second '{' in the uri-part -> no valid expanded jcr-name.
// therefore reset the nameStart and change state.
state = STATE_NAME;
nameStart = 0;
} else if (state == STATE_NAME_START) {
state = STATE_NAME;
nameStart = i;
}
trailingSpaces = false;
} else if (c == '}') {
if (state == STATE_URI_START || state == STATE_URI) {
String tmp = jcrName.substring(1, i);
if (tmp.length() == 0 || tmp.indexOf(':') != -1) {
// The leading "{...}" part is empty or contains
// a colon, so we treat it as a valid namespace URI.
// More detailed validity checks (is it well formed,
// registered, etc.) are not needed here.
state = STATE_NAME_START;
} else if ("internal".equals(tmp)) {
// As a special Jackrabbit backwards compatibility
// feature, support {internal} as a valid URI prefix
state = STATE_NAME_START;
} else if (tmp.indexOf('/') == -1) {
// The leading "{...}" contains neither a colon nor
// a slash, so we can interpret it as a a part of a
// normal local name.
state = STATE_NAME;
nameStart = 0;
} else {
throw new IllegalArgumentException(
"The URI prefix of the name " + jcrName
+ " is neither a valid URI nor a valid part"
+ " of a local name.");
}
} else if (state == STATE_PREFIX_START) {
state = STATE_PREFIX; // prefix start -> validation later on will fail.
} else if (state == STATE_NAME_START) {
state = STATE_NAME;
nameStart = i;
}
trailingSpaces = false;
} else {
if (state == STATE_PREFIX_START) {
state = STATE_PREFIX; // prefix start
} else if (state == STATE_NAME_START) {
state = STATE_NAME;
nameStart = i;
} else if (state == STATE_URI_START) {
state = STATE_URI;
}
trailingSpaces = false;
}
}
// take care of qualified jcrNames starting with '{' that are not having
// a terminating '}' -> make sure there are no illegal characters present.
if (state == STATE_URI && (jcrName.indexOf(':') > -1 || jcrName.indexOf('/') > -1)) {
throw new IllegalArgumentException("Local name may not contain ':' nor '/'");
}
if (nameStart == len || state == STATE_NAME_START) {
throw new IllegalArgumentException("Local name must not be empty");
}
if (trailingSpaces) {
throw new IllegalArgumentException("Trailing spaces not allowed");
}
}
}
| |
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// This is an on-disk cache which maps a 64-bits key to a byte array.
//
// It consists of three files: one index file and two data files. One of the
// data files is "active", and the other is "inactive". New entries are
// appended into the active region until it reaches the size limit. At that
// point the active file and the inactive file are swapped, and the new active
// file is truncated to empty (and the index for that file is also cleared).
// The index is a hash table with linear probing. When the load factor reaches
// 0.5, it does the same thing like when the size limit is reached.
//
// The index file format: (all numbers are stored in little-endian)
// [0] Magic number: 0xB3273030
// [4] MaxEntries: Max number of hash entries per region.
// [8] MaxBytes: Max number of data bytes per region (including header).
// [12] ActiveRegion: The active growing region: 0 or 1.
// [16] ActiveEntries: The number of hash entries used in the active region.
// [20] ActiveBytes: The number of data bytes used in the active region.
// [24] Version number.
// [28] Checksum of [0..28).
// [32] Hash entries for region 0. The size is X = (12 * MaxEntries bytes).
// [32 + X] Hash entries for region 1. The size is also X.
//
// Each hash entry is 12 bytes: 8 bytes key and 4 bytes offset into the data
// file. The offset is 0 when the slot is free. Note that 0 is a valid value
// for key. The keys are used directly as index into a hash table, so they
// should be suitably distributed.
//
// Each data file stores data for one region. The data file is concatenated
// blobs followed by the magic number 0xBD248510.
//
// The blob format:
// [0] Key of this blob
// [8] Checksum of this blob
// [12] Offset of this blob
// [16] Length of this blob (not including header)
// [20] Blob
//
// Below are the interface for BlobCache. The instance of this class does not
// support concurrent use by multiple threads.
//
// public BlobCache(String path, int maxEntries, int maxBytes, boolean reset) throws IOException;
// public void insert(long key, byte[] data) throws IOException;
// public byte[] lookup(long key) throws IOException;
// public void lookup(LookupRequest req) throws IOException;
// public void close();
// public void syncIndex();
// public void syncAll();
// public static void deleteFiles(String path);
//
package com.gotye.sdk.utils;
import java.io.Closeable;
import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.nio.ByteOrder;
import java.nio.MappedByteBuffer;
import java.nio.channels.FileChannel;
import java.util.zip.Adler32;
import android.util.Log;
public class BlobCache implements Closeable {
private static final String TAG = "BlobCache";
private static final int MAGIC_INDEX_FILE = 0xB3273030;
private static final int MAGIC_DATA_FILE = 0xBD248510;
// index header offset
private static final int IH_MAGIC = 0;
private static final int IH_MAX_ENTRIES = 4;
private static final int IH_MAX_BYTES = 8;
private static final int IH_ACTIVE_REGION = 12;
private static final int IH_ACTIVE_ENTRIES = 16;
private static final int IH_ACTIVE_BYTES = 20;
private static final int IH_VERSION = 24;
private static final int IH_CHECKSUM = 28;
private static final int INDEX_HEADER_SIZE = 32;
private static final int DATA_HEADER_SIZE = 4;
// blob header offset
private static final int BH_KEY = 0;
private static final int BH_CHECKSUM = 8;
private static final int BH_OFFSET = 12;
private static final int BH_LENGTH = 16;
private static final int BLOB_HEADER_SIZE = 20;
private RandomAccessFile mIndexFile;
private RandomAccessFile mDataFile0;
private RandomAccessFile mDataFile1;
private FileChannel mIndexChannel;
private MappedByteBuffer mIndexBuffer;
private int mMaxEntries;
private int mMaxBytes;
private int mActiveRegion;
private int mActiveEntries;
private int mActiveBytes;
private int mVersion;
private RandomAccessFile mActiveDataFile;
private RandomAccessFile mInactiveDataFile;
private int mActiveHashStart;
private int mInactiveHashStart;
private byte[] mIndexHeader = new byte[INDEX_HEADER_SIZE];
private byte[] mBlobHeader = new byte[BLOB_HEADER_SIZE];
private Adler32 mAdler32 = new Adler32();
// Creates the cache. Three files will be created:
// path + ".idx", path + ".0", and path + ".1"
// The ".0" file and the ".1" file each stores data for a region. Each of
// them can grow to the size specified by maxBytes. The maxEntries parameter
// specifies the maximum number of entries each region can have. If the
// "reset" parameter is true, the cache will be cleared before use.
public BlobCache(String path, int maxEntries, int maxBytes, boolean reset)
throws IOException {
this(path, maxEntries, maxBytes, reset, 0);
}
public BlobCache(String path, int maxEntries, int maxBytes, boolean reset,
int version) throws IOException {
mIndexFile = new RandomAccessFile(path + ".idx", "rw");
mDataFile0 = new RandomAccessFile(path + ".0", "rw");
mDataFile1 = new RandomAccessFile(path + ".1", "rw");
mVersion = version;
if (!reset && loadIndex()) {
return;
}
resetCache(maxEntries, maxBytes);
if (!loadIndex()) {
closeAll();
throw new IOException("unable to load index");
}
}
// Delete the files associated with the given path previously created
// by the BlobCache constructor.
public static void deleteFiles(String path) {
deleteFileSilently(path + ".idx");
deleteFileSilently(path + ".0");
deleteFileSilently(path + ".1");
}
private static void deleteFileSilently(String path) {
try {
new File(path).delete();
} catch (Throwable t) {
// ignore;
}
}
// Close the cache. All resources are released. No other method should be
// called after this is called.
@Override
public void close() {
syncAll();
closeAll();
}
private void closeAll() {
closeSilently(mIndexChannel);
closeSilently(mIndexFile);
closeSilently(mDataFile0);
closeSilently(mDataFile1);
}
// Returns true if loading index is successful. After this method is called,
// mIndexHeader and index header in file should be kept sync.
private boolean loadIndex() {
try {
mIndexFile.seek(0);
mDataFile0.seek(0);
mDataFile1.seek(0);
byte[] buf = mIndexHeader;
if (mIndexFile.read(buf) != INDEX_HEADER_SIZE) {
Log.w(TAG, "cannot read header");
return false;
}
if (readInt(buf, IH_MAGIC) != MAGIC_INDEX_FILE) {
Log.w(TAG, "cannot read header magic");
return false;
}
if (readInt(buf, IH_VERSION) != mVersion) {
Log.w(TAG, "version mismatch");
return false;
}
mMaxEntries = readInt(buf, IH_MAX_ENTRIES);
mMaxBytes = readInt(buf, IH_MAX_BYTES);
mActiveRegion = readInt(buf, IH_ACTIVE_REGION);
mActiveEntries = readInt(buf, IH_ACTIVE_ENTRIES);
mActiveBytes = readInt(buf, IH_ACTIVE_BYTES);
int sum = readInt(buf, IH_CHECKSUM);
if (checkSum(buf, 0, IH_CHECKSUM) != sum) {
Log.w(TAG, "header checksum does not match");
return false;
}
// Sanity check
if (mMaxEntries <= 0) {
Log.w(TAG, "invalid max entries");
return false;
}
if (mMaxBytes <= 0) {
Log.w(TAG, "invalid max bytes");
return false;
}
if (mActiveRegion != 0 && mActiveRegion != 1) {
Log.w(TAG, "invalid active region");
return false;
}
if (mActiveEntries < 0 || mActiveEntries > mMaxEntries) {
Log.w(TAG, "invalid active entries");
return false;
}
if (mActiveBytes < DATA_HEADER_SIZE || mActiveBytes > mMaxBytes) {
Log.w(TAG, "invalid active bytes");
return false;
}
if (mIndexFile.length() !=
INDEX_HEADER_SIZE + mMaxEntries * 12 * 2) {
Log.w(TAG, "invalid index file length");
return false;
}
// Make sure data file has magic
byte[] magic = new byte[4];
if (mDataFile0.read(magic) != 4) {
Log.w(TAG, "cannot read data file magic");
return false;
}
if (readInt(magic, 0) != MAGIC_DATA_FILE) {
Log.w(TAG, "invalid data file magic");
return false;
}
if (mDataFile1.read(magic) != 4) {
Log.w(TAG, "cannot read data file magic");
return false;
}
if (readInt(magic, 0) != MAGIC_DATA_FILE) {
Log.w(TAG, "invalid data file magic");
return false;
}
// Map index file to memory
mIndexChannel = mIndexFile.getChannel();
mIndexBuffer = mIndexChannel.map(FileChannel.MapMode.READ_WRITE,
0, mIndexFile.length());
mIndexBuffer.order(ByteOrder.LITTLE_ENDIAN);
setActiveVariables();
return true;
} catch (IOException ex) {
Log.e(TAG, "loadIndex failed.", ex);
return false;
}
}
private void setActiveVariables() throws IOException {
mActiveDataFile = (mActiveRegion == 0) ? mDataFile0 : mDataFile1;
mInactiveDataFile = (mActiveRegion == 1) ? mDataFile0 : mDataFile1;
mActiveDataFile.setLength(mActiveBytes);
mActiveDataFile.seek(mActiveBytes);
mActiveHashStart = INDEX_HEADER_SIZE;
mInactiveHashStart = INDEX_HEADER_SIZE;
if (mActiveRegion == 0) {
mInactiveHashStart += mMaxEntries * 12;
} else {
mActiveHashStart += mMaxEntries * 12;
}
}
private void resetCache(int maxEntries, int maxBytes) throws IOException {
mIndexFile.setLength(0); // truncate to zero the index
mIndexFile.setLength(INDEX_HEADER_SIZE + maxEntries * 12 * 2);
mIndexFile.seek(0);
byte[] buf = mIndexHeader;
writeInt(buf, IH_MAGIC, MAGIC_INDEX_FILE);
writeInt(buf, IH_MAX_ENTRIES, maxEntries);
writeInt(buf, IH_MAX_BYTES, maxBytes);
writeInt(buf, IH_ACTIVE_REGION, 0);
writeInt(buf, IH_ACTIVE_ENTRIES, 0);
writeInt(buf, IH_ACTIVE_BYTES, DATA_HEADER_SIZE);
writeInt(buf, IH_VERSION, mVersion);
writeInt(buf, IH_CHECKSUM, checkSum(buf, 0, IH_CHECKSUM));
mIndexFile.write(buf);
// This is only needed if setLength does not zero the extended part.
// writeZero(mIndexFile, maxEntries * 12 * 2);
mDataFile0.setLength(0);
mDataFile1.setLength(0);
mDataFile0.seek(0);
mDataFile1.seek(0);
writeInt(buf, 0, MAGIC_DATA_FILE);
mDataFile0.write(buf, 0, 4);
mDataFile1.write(buf, 0, 4);
}
// Flip the active region and the inactive region.
private void flipRegion() throws IOException {
mActiveRegion = 1 - mActiveRegion;
mActiveEntries = 0;
mActiveBytes = DATA_HEADER_SIZE;
writeInt(mIndexHeader, IH_ACTIVE_REGION, mActiveRegion);
writeInt(mIndexHeader, IH_ACTIVE_ENTRIES, mActiveEntries);
writeInt(mIndexHeader, IH_ACTIVE_BYTES, mActiveBytes);
updateIndexHeader();
setActiveVariables();
clearHash(mActiveHashStart);
syncIndex();
}
// Sync mIndexHeader to the index file.
private void updateIndexHeader() {
writeInt(mIndexHeader, IH_CHECKSUM,
checkSum(mIndexHeader, 0, IH_CHECKSUM));
mIndexBuffer.position(0);
mIndexBuffer.put(mIndexHeader);
}
// Clear the hash table starting from the specified offset.
private void clearHash(int hashStart) {
byte[] zero = new byte[1024];
mIndexBuffer.position(hashStart);
for (int count = mMaxEntries * 12; count > 0;) {
int todo = Math.min(count, 1024);
mIndexBuffer.put(zero, 0, todo);
count -= todo;
}
}
// Inserts a (key, data) pair into the cache.
public void insert(long key, byte[] data) throws IOException {
if (DATA_HEADER_SIZE + BLOB_HEADER_SIZE + data.length > mMaxBytes) {
throw new RuntimeException("blob is too large!");
}
if (mActiveBytes + BLOB_HEADER_SIZE + data.length > mMaxBytes
|| mActiveEntries * 2 >= mMaxEntries) {
flipRegion();
}
if (!lookupInternal(key, mActiveHashStart)) {
// If we don't have an existing entry with the same key, increase
// the entry count.
mActiveEntries++;
writeInt(mIndexHeader, IH_ACTIVE_ENTRIES, mActiveEntries);
}
insertInternal(key, data, data.length);
updateIndexHeader();
}
// Appends the data to the active file. It also updates the hash entry.
// The proper hash entry (suitable for insertion or replacement) must be
// pointed by mSlotOffset.
private void insertInternal(long key, byte[] data, int length)
throws IOException {
byte[] header = mBlobHeader;
int sum = checkSum(data);
writeLong(header, BH_KEY, key);
writeInt(header, BH_CHECKSUM, sum);
writeInt(header, BH_OFFSET, mActiveBytes);
writeInt(header, BH_LENGTH, length);
mActiveDataFile.write(header);
mActiveDataFile.write(data, 0, length);
mIndexBuffer.putLong(mSlotOffset, key);
mIndexBuffer.putInt(mSlotOffset + 8, mActiveBytes);
mActiveBytes += BLOB_HEADER_SIZE + length;
writeInt(mIndexHeader, IH_ACTIVE_BYTES, mActiveBytes);
}
public static class LookupRequest {
public long key; // input: the key to find
public byte[] buffer; // input/output: the buffer to store the blob
public int length; // output: the length of the blob
}
// This method is for one-off lookup. For repeated lookup, use the version
// accepting LookupRequest to avoid repeated memory allocation.
private LookupRequest mLookupRequest = new LookupRequest();
public byte[] lookup(long key) throws IOException {
// LookupRequest mLookupRequest = new LookupRequest();
mLookupRequest.key = key;
mLookupRequest.buffer = null;
if (lookup(mLookupRequest)) {
return mLookupRequest.buffer;
} else {
return null;
}
}
// Returns true if the associated blob for the given key is available.
// The blob is stored in the buffer pointed by req.buffer, and the length
// is in stored in the req.length variable.
//
// The user can input a non-null value in req.buffer, and this method will
// try to use that buffer. If that buffer is not large enough, this method
// will allocate a new buffer and assign it to req.buffer.
//
// This method tries not to throw IOException even if the data file is
// corrupted, but it can still throw IOException if things get strange.
public boolean lookup(LookupRequest req) throws IOException {
// Look up in the active region first.
if (lookupInternal(req.key, mActiveHashStart)) {
if (getBlob(mActiveDataFile, mFileOffset, req)) {
return true;
}
}
// We want to copy the data from the inactive file to the active file
// if it's available. So we keep the offset of the hash entry so we can
// avoid looking it up again.
int insertOffset = mSlotOffset;
// Look up in the inactive region.
if (lookupInternal(req.key, mInactiveHashStart)) {
if (getBlob(mInactiveDataFile, mFileOffset, req)) {
// If we don't have enough space to insert this blob into
// the active file, just return it.
if (mActiveBytes + BLOB_HEADER_SIZE + req.length > mMaxBytes
|| mActiveEntries * 2 >= mMaxEntries) {
return true;
}
// Otherwise copy it over.
mSlotOffset = insertOffset;
try {
insertInternal(req.key, req.buffer, req.length);
mActiveEntries++;
writeInt(mIndexHeader, IH_ACTIVE_ENTRIES, mActiveEntries);
updateIndexHeader();
} catch (Throwable t) {
Log.e(TAG, "cannot copy over");
}
return true;
}
}
return false;
}
// Copies the blob for the specified offset in the specified file to
// req.buffer. If req.buffer is null or too small, allocate a buffer and
// assign it to req.buffer.
// Returns false if the blob is not available (either the index file is
// not sync with the data file, or one of them is corrupted). The length
// of the blob is stored in the req.length variable.
private boolean getBlob(RandomAccessFile file, int offset,
LookupRequest req) throws IOException {
byte[] header = mBlobHeader;
long oldPosition = file.getFilePointer();
try {
file.seek(offset);
if (file.read(header) != BLOB_HEADER_SIZE) {
Log.w(TAG, "cannot read blob header");
return false;
}
long blobKey = readLong(header, BH_KEY);
if (blobKey != req.key) {
Log.w(TAG, "blob key does not match: " + blobKey);
return false;
}
int sum = readInt(header, BH_CHECKSUM);
int blobOffset = readInt(header, BH_OFFSET);
if (blobOffset != offset) {
Log.w(TAG, "blob offset does not match: " + blobOffset);
return false;
}
int length = readInt(header, BH_LENGTH);
if (length < 0 || length > mMaxBytes - offset - BLOB_HEADER_SIZE) {
Log.w(TAG, "invalid blob length: " + length);
return false;
}
if (req.buffer == null || req.buffer.length < length) {
req.buffer = new byte[length];
}
byte[] blob = req.buffer;
req.length = length;
if (file.read(blob, 0, length) != length) {
Log.w(TAG, "cannot read blob data");
return false;
}
if (checkSum(blob, 0, length) != sum) {
Log.w(TAG, "blob checksum does not match: " + sum);
return false;
}
return true;
} catch (Throwable t) {
Log.e(TAG, "getBlob failed.", t);
return false;
} finally {
file.seek(oldPosition);
}
}
// Tries to look up a key in the specified hash region.
// Returns true if the lookup is successful.
// The slot offset in the index file is saved in mSlotOffset. If the lookup
// is successful, it's the slot found. Otherwise it's the slot suitable for
// insertion.
// If the lookup is successful, the file offset is also saved in
// mFileOffset.
private int mSlotOffset;
private int mFileOffset;
private boolean lookupInternal(long key, int hashStart) {
int slot = (int) (key % mMaxEntries);
if (slot < 0) slot += mMaxEntries;
int slotBegin = slot;
while (true) {
int offset = hashStart + slot * 12;
long candidateKey = mIndexBuffer.getLong(offset);
int candidateOffset = mIndexBuffer.getInt(offset + 8);
if (candidateOffset == 0) {
mSlotOffset = offset;
return false;
} else if (candidateKey == key) {
mSlotOffset = offset;
mFileOffset = candidateOffset;
return true;
} else {
if (++slot >= mMaxEntries) {
slot = 0;
}
if (slot == slotBegin) {
Log.w(TAG, "corrupted index: clear the slot.");
mIndexBuffer.putInt(hashStart + slot * 12 + 8, 0);
}
}
}
}
public void syncIndex() {
try {
mIndexBuffer.force();
} catch (Throwable t) {
Log.w(TAG, "sync index failed", t);
}
}
public void syncAll() {
syncIndex();
try {
mDataFile0.getFD().sync();
} catch (Throwable t) {
Log.w(TAG, "sync data file 0 failed", t);
}
try {
mDataFile1.getFD().sync();
} catch (Throwable t) {
Log.w(TAG, "sync data file 1 failed", t);
}
}
// This is for testing only.
//
// Returns the active count (mActiveEntries). This also verifies that
// the active count matches matches what's inside the hash region.
int getActiveCount() {
int count = 0;
for (int i = 0; i < mMaxEntries; i++) {
int offset = mActiveHashStart + i * 12;
long candidateKey = mIndexBuffer.getLong(offset);
int candidateOffset = mIndexBuffer.getInt(offset + 8);
if (candidateOffset != 0) ++count;
}
if (count == mActiveEntries) {
return count;
} else {
Log.e(TAG, "wrong active count: " + mActiveEntries + " vs " + count);
return -1; // signal failure.
}
}
int checkSum(byte[] data) {
mAdler32.reset();
mAdler32.update(data);
return (int) mAdler32.getValue();
}
int checkSum(byte[] data, int offset, int nbytes) {
mAdler32.reset();
mAdler32.update(data, offset, nbytes);
return (int) mAdler32.getValue();
}
static void closeSilently(Closeable c) {
if (c == null) return;
try {
c.close();
} catch (Throwable t) {
// do nothing
}
}
static int readInt(byte[] buf, int offset) {
return (buf[offset] & 0xff)
| ((buf[offset + 1] & 0xff) << 8)
| ((buf[offset + 2] & 0xff) << 16)
| ((buf[offset + 3] & 0xff) << 24);
}
static long readLong(byte[] buf, int offset) {
long result = buf[offset + 7] & 0xff;
for (int i = 6; i >= 0; i--) {
result = (result << 8) | (buf[offset + i] & 0xff);
}
return result;
}
static void writeInt(byte[] buf, int offset, int value) {
for (int i = 0; i < 4; i++) {
buf[offset + i] = (byte) (value & 0xff);
value >>= 8;
}
}
static void writeLong(byte[] buf, int offset, long value) {
for (int i = 0; i < 8; i++) {
buf[offset + i] = (byte) (value & 0xff);
value >>= 8;
}
}
}
| |
package com.fasterxml.jackson.core.write;
import java.io.ByteArrayOutputStream;
import java.io.StringWriter;
import java.nio.charset.Charset;
import java.util.Random;
import com.fasterxml.jackson.core.*;
/**
* Basic testing for scalar-array write methods added in 2.8.
*/
public class ArrayGenerationTest extends BaseTest
{
private final JsonFactory FACTORY = new JsonFactory();
public void testIntArray() throws Exception
{
_testIntArray(false);
_testIntArray(true);
}
public void testLongArray() throws Exception
{
_testLongArray(false);
_testLongArray(true);
}
public void testDoubleArray() throws Exception
{
_testDoubleArray(false);
_testDoubleArray(true);
}
public void testStringArray() throws Exception
{
_testStringArray(false);
_testStringArray(true);
}
private void _testIntArray(boolean useBytes) throws Exception {
// first special cases of 0, 1 values
_testIntArray(useBytes, 0, 0, 0);
_testIntArray(useBytes, 0, 1, 1);
_testIntArray(useBytes, 1, 0, 0);
_testIntArray(useBytes, 1, 1, 1);
// and then some bigger data
_testIntArray(useBytes, 15, 0, 0);
_testIntArray(useBytes, 15, 2, 3);
_testIntArray(useBytes, 39, 0, 0);
_testIntArray(useBytes, 39, 4, 0);
_testIntArray(useBytes, 271, 0, 0);
_testIntArray(useBytes, 271, 0, 4);
_testIntArray(useBytes, 5009, 0, 0);
_testIntArray(useBytes, 5009, 0, 1);
}
private void _testLongArray(boolean useBytes) throws Exception {
// first special cases of 0, 1 values
_testLongArray(useBytes, 0, 0, 0);
_testLongArray(useBytes, 0, 1, 1);
_testLongArray(useBytes, 1, 0, 0);
_testLongArray(useBytes, 1, 1, 1);
// and then some bigger data
_testLongArray(useBytes, 15, 0, 0);
_testLongArray(useBytes, 15, 2, 3);
_testLongArray(useBytes, 39, 0, 0);
_testLongArray(useBytes, 39, 4, 0);
_testLongArray(useBytes, 271, 0, 0);
_testLongArray(useBytes, 271, 0, 4);
_testLongArray(useBytes, 5009, 0, 0);
_testLongArray(useBytes, 5009, 0, 1);
}
private void _testDoubleArray(boolean useBytes) throws Exception {
// first special cases of 0, 1 values
_testDoubleArray(useBytes, 0, 0, 0);
_testDoubleArray(useBytes, 0, 1, 1);
_testDoubleArray(useBytes, 1, 0, 0);
_testDoubleArray(useBytes, 1, 1, 1);
// and then some bigger data
_testDoubleArray(useBytes, 15, 0, 0);
_testDoubleArray(useBytes, 15, 2, 3);
_testDoubleArray(useBytes, 39, 0, 0);
_testDoubleArray(useBytes, 39, 4, 0);
_testDoubleArray(useBytes, 271, 0, 0);
_testDoubleArray(useBytes, 271, 0, 4);
_testDoubleArray(useBytes, 5009, 0, 0);
_testDoubleArray(useBytes, 5009, 0, 1);
}
private void _testStringArray(boolean useBytes) throws Exception {
// first special cases of 0, 1 values
_testStringArray(useBytes, 0, 0, 0);
_testStringArray(useBytes, 0, 1, 1);
_testStringArray(useBytes, 1, 0, 0);
_testStringArray(useBytes, 1, 1, 1);
// and then some bigger data
_testStringArray(useBytes, 15, 0, 0);
_testStringArray(useBytes, 15, 2, 3);
_testStringArray(useBytes, 39, 0, 0);
_testStringArray(useBytes, 39, 4, 0);
_testStringArray(useBytes, 271, 0, 0);
_testStringArray(useBytes, 271, 0, 4);
_testStringArray(useBytes, 5009, 0, 0);
_testStringArray(useBytes, 5009, 0, 1);
}
private void _testIntArray(boolean useBytes, int elements, int pre, int post) throws Exception
{
int[] values = new int[elements+pre+post];
for (int i = pre, end = pre+elements; i < end; ++i) {
values[i] = i-pre;
}
StringWriter sw = new StringWriter();
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
JsonGenerator gen = useBytes ? FACTORY.createGenerator(bytes)
: FACTORY.createGenerator(sw);
gen.writeArray(values, pre, elements);
gen.close();
String json;
if (useBytes) {
json = bytes.toString("UTF-8");
} else {
json = sw.toString();
}
JsonParser p = useBytes ? FACTORY.createParser(bytes.toByteArray())
: FACTORY.createParser(json);
assertToken(JsonToken.START_ARRAY, p.nextToken());
for (int i = 0; i < elements; ++i) {
if ((i & 1) == 0) { // alternate
JsonToken t = p.nextToken();
if (t != JsonToken.VALUE_NUMBER_INT) {
fail("Expected number, got "+t+", element #"+i);
}
assertEquals(i, p.getIntValue());
} else {
assertEquals(i, p.nextIntValue(-1));
}
}
assertToken(JsonToken.END_ARRAY, p.nextToken());
p.close();
}
private void _testLongArray(boolean useBytes, int elements, int pre, int post) throws Exception
{
long[] values = new long[elements+pre+post];
for (int i = pre, end = pre+elements; i < end; ++i) {
values[i] = i-pre;
}
StringWriter sw = new StringWriter();
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
JsonGenerator gen = useBytes ? FACTORY.createGenerator(bytes)
: FACTORY.createGenerator(sw);
gen.writeArray(values, pre, elements);
gen.close();
String json;
if (useBytes) {
json = bytes.toString("UTF-8");
} else {
json = sw.toString();
}
JsonParser p = useBytes ? FACTORY.createParser(bytes.toByteArray())
: FACTORY.createParser(json);
assertToken(JsonToken.START_ARRAY, p.nextToken());
for (int i = 0; i < elements; ++i) {
if ((i & 1) == 0) { // alternate
JsonToken t = p.nextToken();
if (t != JsonToken.VALUE_NUMBER_INT) {
fail("Expected number, got "+t+", element #"+i);
}
assertEquals(i, p.getLongValue());
} else {
assertEquals(i, p.nextLongValue(-1));
}
}
assertToken(JsonToken.END_ARRAY, p.nextToken());
p.close();
}
private void _testDoubleArray(boolean useBytes, int elements, int pre, int post) throws Exception
{
double[] values = new double[elements+pre+post];
for (int i = pre, end = pre+elements; i < end; ++i) {
values[i] = i-pre;
}
StringWriter sw = new StringWriter();
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
JsonGenerator gen = useBytes ? FACTORY.createGenerator(bytes)
: FACTORY.createGenerator(sw);
gen.writeArray(values, pre, elements);
gen.close();
String json;
if (useBytes) {
json = bytes.toString("UTF-8");
} else {
json = sw.toString();
}
JsonParser p = useBytes ? FACTORY.createParser(bytes.toByteArray())
: FACTORY.createParser(json);
assertToken(JsonToken.START_ARRAY, p.nextToken());
for (int i = 0; i < elements; ++i) {
JsonToken t = p.nextToken();
if (t != JsonToken.VALUE_NUMBER_FLOAT) {
fail("Expected floating-point number, got "+t+", element #"+i);
}
assertEquals((double) i, p.getDoubleValue());
}
assertToken(JsonToken.END_ARRAY, p.nextToken());
p.close();
}
private void _testStringArray(boolean useBytes, int elements, int pre, int post) throws Exception
{
int byteLength = 16;
Random random = new Random();
Charset utf8 = Charset.forName("UTF-8");
String[] values = new String[elements+pre+post];
for (int i = pre, end = pre+elements; i < end; ++i) {
byte[] content = new byte[byteLength];
random.nextBytes(content);
values[i] = new String(content, utf8);
}
StringWriter sw = new StringWriter();
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
JsonGenerator gen = useBytes ? FACTORY.createGenerator(bytes)
: FACTORY.createGenerator(sw);
gen.writeArray(values, pre, elements);
gen.close();
String json;
if (useBytes) {
json = bytes.toString("UTF-8");
} else {
json = sw.toString();
}
JsonParser p = useBytes ? FACTORY.createParser(bytes.toByteArray())
: FACTORY.createParser(json);
assertToken(JsonToken.START_ARRAY, p.nextToken());
for (int i = 0; i < elements; ++i) {
JsonToken t = p.nextToken();
if (t != JsonToken.VALUE_STRING) {
fail("Expected string, got "+t+", element #"+i);
}
assertEquals(values[pre+i], p.getValueAsString());
}
assertToken(JsonToken.END_ARRAY, p.nextToken());
p.close();
}
}
| |
/*
* Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package com.gemstone.gemfire.management.internal.cli.domain;
import java.io.Serializable;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.gemstone.gemfire.cache.DataPolicy;
import com.gemstone.gemfire.cache.Region;
import com.gemstone.gemfire.cache.RegionShortcut;
import com.gemstone.gemfire.cache.Scope;
/***
* Data class which contains description of a region and provides the aggregated
* view of the region Used by describe region command
*
* @author Sourabh Bansod
*
*/
public class RegionDescription implements Serializable {
private static final long serialVersionUID = 1L;
private String name;
private boolean isPartition;
private boolean isPersistent;
private boolean isReplicate;
private boolean haslocalDataStorage;
private boolean isLocal = false;
private boolean isReplicatedProxy = false;;
private boolean isAccessor = false;
// Common Non Default Attributes
private Map<String, String> cndRegionAttributes;
private Map<String, String> cndPartitionAttributes;
private Map<String, String> cndEvictionAttributes;
private Map<String, RegionDescriptionPerMember> regionDescPerMemberMap = null;
private Scope scope;
private DataPolicy dataPolicy;
public RegionDescription() {
}
public DataPolicy getDataPolicy() {
return this.dataPolicy;
}
public Scope getScope() {
return this.scope;
}
/**
* Adds the RegionDescription per member to the aggregated view
*
* @param regionDescPerMember
*
*/
public boolean add(RegionDescriptionPerMember regionDescPerMember) {
boolean isAdded = false;
if (regionDescPerMemberMap == null) {
regionDescPerMemberMap = new HashMap<String, RegionDescriptionPerMember>();
regionDescPerMemberMap.put(regionDescPerMember.getHostingMember(), regionDescPerMember);
this.scope = regionDescPerMember.getScope();
this.dataPolicy = regionDescPerMember.getDataPolicy();
this.name = regionDescPerMember.getName();
isPartition = this.dataPolicy.withPartitioning();
isPersistent = this.dataPolicy.withPersistence();
isReplicate = this.dataPolicy.withReplication();
haslocalDataStorage = this.dataPolicy.withStorage();
isLocal = this.scope.isLocal();
isAccessor = regionDescPerMember.isAccessor();
//COPY
this.cndRegionAttributes = new HashMap<String, String>();
this.cndRegionAttributes.putAll(regionDescPerMember.getNonDefaultRegionAttributes());
this.cndPartitionAttributes = new HashMap<String, String>();
this.cndPartitionAttributes.putAll(regionDescPerMember.getNonDefaultPartitionAttributes());
this.cndEvictionAttributes = new HashMap<String, String>();
this.cndEvictionAttributes.putAll(regionDescPerMember.getNonDefaultEvictionAttributes());
if (this.dataPolicy.equals(DataPolicy.EMPTY) && this.scope.equals(Scope.DISTRIBUTED_ACK)) {
isReplicatedProxy = true;
}
//Don't have to show the scope for PR's
isAdded = true;
} else {
if (this.scope.equals(regionDescPerMember.getScope())
&& this.name.equals(regionDescPerMember.getName())
&& this.dataPolicy.equals(regionDescPerMember.getDataPolicy())
&& this.isAccessor == regionDescPerMember.isAccessor()) {
regionDescPerMemberMap.put(regionDescPerMember.getHostingMember(), regionDescPerMember);
findCommon(cndRegionAttributes, regionDescPerMember.getNonDefaultRegionAttributes());
findCommon(cndEvictionAttributes, regionDescPerMember.getNonDefaultEvictionAttributes());
findCommon(cndPartitionAttributes, regionDescPerMember.getNonDefaultPartitionAttributes());
isAdded = true;
}
}
return isAdded;
}
private void findCommon(Map<String, String> commonNdMap, Map<String, String> incomingNdMap) {
//First get the intersection of the both maps
Set<String> commonNdKeySet = commonNdMap.keySet();
Set<String> incomingNdKeySet = incomingNdMap.keySet();
commonNdKeySet.retainAll(incomingNdKeySet);
//Now compare the values
//Take a copy of the set to avoid a CME
Iterator <String> commonKeysIter = (new HashSet<String>(commonNdKeySet)).iterator();
while (commonKeysIter.hasNext()) {
String attribute = commonKeysIter.next();
String commonNdValue = commonNdMap.get(attribute);
String incomingNdValue = incomingNdMap.get(attribute);
if (commonNdValue != null) {
if (!commonNdValue.equals(incomingNdValue)) {
//Remove it from the commonNdMa
commonNdMap.remove(attribute);
}
} else {
if (incomingNdValue != null) {
commonNdMap.remove(attribute);
}
}
}
}
@Override
public boolean equals(Object obj) {
if (obj instanceof RegionDescription) {
RegionDescription regionDesc = (RegionDescription) obj;
return this.getName().equals(regionDesc.getName())
&& this.scope.equals(regionDesc.getScope())
&& this.dataPolicy.equals(regionDesc.getDataPolicy());
}
return true;
}
public Set<String> getHostingMembers() {
return regionDescPerMemberMap.keySet();
}
public String getName() {
return this.name;
}
public boolean isPersistent() {
return this.isPersistent;
}
public boolean isPartition() {
return this.isPartition;
}
public boolean isReplicate() {
return this.isReplicate;
}
public boolean hasLocalStorage() {
return this.haslocalDataStorage;
}
public boolean isLocal() {
return this.isLocal;
}
public boolean isReplicatedProxy() {
return this.isReplicatedProxy;
}
public boolean isAccessor() {
return this.isAccessor;
}
/***
* Get
* @return Map containing attribute name and its associated value
*/
public Map<String, String> getCndRegionAttributes() {
return this.cndRegionAttributes;
}
/***
* Gets the common non-default Eviction Attributes
* @return Map containing attribute name and its associated value
*/
public Map<String, String> getCndEvictionAttributes() {
return this.cndEvictionAttributes;
}
/***
* Gets the common non-default PartitionAttributes
* @return Map containing attribute name and its associated value
*/
public Map<String, String> getCndPartitionAttributes() {
return this.cndPartitionAttributes;
}
public Map<String, RegionDescriptionPerMember> getRegionDescriptionPerMemberMap(){
return this.regionDescPerMemberMap;
}
public String toString() {
StringBuilder sb = new StringBuilder();
return sb.toString();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.security.visibility;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.security.visibility.expression.ExpressionNode;
import org.apache.hadoop.hbase.security.visibility.expression.LeafExpressionNode;
import org.apache.hadoop.hbase.security.visibility.expression.NonLeafExpressionNode;
import org.apache.hadoop.hbase.security.visibility.expression.Operator;
import org.apache.hadoop.hbase.testclassification.SecurityTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
@Category({SecurityTests.class, SmallTests.class})
public class TestExpressionExpander {
@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestExpressionExpander.class);
@Test
public void testPositiveCases() throws Exception {
ExpressionExpander expander = new ExpressionExpander();
// (!a) -> (!a)
NonLeafExpressionNode exp1 = new NonLeafExpressionNode(Operator.NOT,
new LeafExpressionNode("a"));
ExpressionNode result = expander.expand(exp1);
assertTrue(result instanceof NonLeafExpressionNode);
NonLeafExpressionNode nlResult = (NonLeafExpressionNode) result;
assertEquals(Operator.NOT, nlResult.getOperator());
assertEquals("a", ((LeafExpressionNode) nlResult.getChildExps().get(0)).getIdentifier());
// (a | b) -> (a | b)
NonLeafExpressionNode exp2 = new NonLeafExpressionNode(Operator.OR,
new LeafExpressionNode("a"), new LeafExpressionNode("b"));
result = expander.expand(exp2);
assertTrue(result instanceof NonLeafExpressionNode);
nlResult = (NonLeafExpressionNode) result;
assertEquals(Operator.OR, nlResult.getOperator());
assertEquals(2, nlResult.getChildExps().size());
assertEquals("a", ((LeafExpressionNode) nlResult.getChildExps().get(0)).getIdentifier());
assertEquals("b", ((LeafExpressionNode) nlResult.getChildExps().get(1)).getIdentifier());
// (a & b) -> (a & b)
NonLeafExpressionNode exp3 = new NonLeafExpressionNode(Operator.AND,
new LeafExpressionNode("a"), new LeafExpressionNode("b"));
result = expander.expand(exp3);
assertTrue(result instanceof NonLeafExpressionNode);
nlResult = (NonLeafExpressionNode) result;
assertEquals(Operator.AND, nlResult.getOperator());
assertEquals(2, nlResult.getChildExps().size());
assertEquals("a", ((LeafExpressionNode) nlResult.getChildExps().get(0)).getIdentifier());
assertEquals("b", ((LeafExpressionNode) nlResult.getChildExps().get(1)).getIdentifier());
// ((a | b) | c) -> (a | b | c)
NonLeafExpressionNode exp4 = new NonLeafExpressionNode(Operator.OR, new NonLeafExpressionNode(
Operator.OR, new LeafExpressionNode("a"), new LeafExpressionNode("b")),
new LeafExpressionNode("c"));
result = expander.expand(exp4);
assertTrue(result instanceof NonLeafExpressionNode);
nlResult = (NonLeafExpressionNode) result;
assertEquals(Operator.OR, nlResult.getOperator());
assertEquals(3, nlResult.getChildExps().size());
assertEquals("a", ((LeafExpressionNode) nlResult.getChildExps().get(0)).getIdentifier());
assertEquals("b", ((LeafExpressionNode) nlResult.getChildExps().get(1)).getIdentifier());
assertEquals("c", ((LeafExpressionNode) nlResult.getChildExps().get(2)).getIdentifier());
// ((a & b) & c) -> (a & b & c)
NonLeafExpressionNode exp5 = new NonLeafExpressionNode(Operator.AND, new NonLeafExpressionNode(
Operator.AND, new LeafExpressionNode("a"), new LeafExpressionNode("b")),
new LeafExpressionNode("c"));
result = expander.expand(exp5);
assertTrue(result instanceof NonLeafExpressionNode);
nlResult = (NonLeafExpressionNode) result;
assertEquals(Operator.AND, nlResult.getOperator());
assertEquals(3, nlResult.getChildExps().size());
assertEquals("a", ((LeafExpressionNode) nlResult.getChildExps().get(0)).getIdentifier());
assertEquals("b", ((LeafExpressionNode) nlResult.getChildExps().get(1)).getIdentifier());
assertEquals("c", ((LeafExpressionNode) nlResult.getChildExps().get(2)).getIdentifier());
// (a | b) & c -> ((a & c) | (b & c))
NonLeafExpressionNode exp6 = new NonLeafExpressionNode(Operator.AND, new NonLeafExpressionNode(
Operator.OR, new LeafExpressionNode("a"), new LeafExpressionNode("b")),
new LeafExpressionNode("c"));
result = expander.expand(exp6);
assertTrue(result instanceof NonLeafExpressionNode);
nlResult = (NonLeafExpressionNode) result;
assertEquals(Operator.OR, nlResult.getOperator());
assertEquals(2, nlResult.getChildExps().size());
NonLeafExpressionNode temp = (NonLeafExpressionNode) nlResult.getChildExps().get(0);
assertEquals(Operator.AND, temp.getOperator());
assertEquals(2, temp.getChildExps().size());
assertEquals("a", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier());
assertEquals("c", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier());
temp = (NonLeafExpressionNode) nlResult.getChildExps().get(1);
assertEquals(Operator.AND, temp.getOperator());
assertEquals(2, temp.getChildExps().size());
assertEquals("b", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier());
assertEquals("c", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier());
// (a & b) | c -> ((a & b) | c)
NonLeafExpressionNode exp7 = new NonLeafExpressionNode(Operator.OR, new NonLeafExpressionNode(
Operator.AND, new LeafExpressionNode("a"), new LeafExpressionNode("b")),
new LeafExpressionNode("c"));
result = expander.expand(exp7);
assertTrue(result instanceof NonLeafExpressionNode);
nlResult = (NonLeafExpressionNode) result;
assertEquals(Operator.OR, nlResult.getOperator());
assertEquals(2, nlResult.getChildExps().size());
assertEquals("c", ((LeafExpressionNode) nlResult.getChildExps().get(1)).getIdentifier());
nlResult = (NonLeafExpressionNode) nlResult.getChildExps().get(0);
assertEquals(Operator.AND, nlResult.getOperator());
assertEquals(2, nlResult.getChildExps().size());
assertEquals("a", ((LeafExpressionNode) nlResult.getChildExps().get(0)).getIdentifier());
assertEquals("b", ((LeafExpressionNode) nlResult.getChildExps().get(1)).getIdentifier());
// ((a & b) | c) & d -> (((a & b) & d) | (c & d))
NonLeafExpressionNode exp8 = new NonLeafExpressionNode(Operator.AND);
exp8.addChildExp(new NonLeafExpressionNode(Operator.OR, new NonLeafExpressionNode(Operator.AND,
new LeafExpressionNode("a"), new LeafExpressionNode("b")), new LeafExpressionNode("c")));
exp8.addChildExp(new LeafExpressionNode("d"));
result = expander.expand(exp8);
assertTrue(result instanceof NonLeafExpressionNode);
nlResult = (NonLeafExpressionNode) result;
assertEquals(Operator.OR, nlResult.getOperator());
assertEquals(2, nlResult.getChildExps().size());
temp = (NonLeafExpressionNode) nlResult.getChildExps().get(1);
assertEquals(Operator.AND, temp.getOperator());
assertEquals(2, temp.getChildExps().size());
assertEquals("c", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier());
assertEquals("d", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier());
temp = (NonLeafExpressionNode) nlResult.getChildExps().get(0);
assertEquals(Operator.AND, temp.getOperator());
assertEquals(2, temp.getChildExps().size());
assertEquals("d", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier());
temp = (NonLeafExpressionNode) temp.getChildExps().get(0);
assertEquals(Operator.AND, temp.getOperator());
assertEquals(2, temp.getChildExps().size());
assertEquals("a", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier());
assertEquals("b", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier());
// (a | b) | (c | d) -> (a | b | c | d)
NonLeafExpressionNode exp9 = new NonLeafExpressionNode(Operator.OR);
exp9.addChildExp(new NonLeafExpressionNode(Operator.OR, new LeafExpressionNode("a"),
new LeafExpressionNode("b")));
exp9.addChildExp(new NonLeafExpressionNode(Operator.OR, new LeafExpressionNode("c"),
new LeafExpressionNode("d")));
result = expander.expand(exp9);
assertTrue(result instanceof NonLeafExpressionNode);
nlResult = (NonLeafExpressionNode) result;
assertEquals(Operator.OR, nlResult.getOperator());
assertEquals(4, nlResult.getChildExps().size());
assertEquals("a", ((LeafExpressionNode) nlResult.getChildExps().get(0)).getIdentifier());
assertEquals("b", ((LeafExpressionNode) nlResult.getChildExps().get(1)).getIdentifier());
assertEquals("c", ((LeafExpressionNode) nlResult.getChildExps().get(2)).getIdentifier());
assertEquals("d", ((LeafExpressionNode) nlResult.getChildExps().get(3)).getIdentifier());
// (a & b) & (c & d) -> (a & b & c & d)
NonLeafExpressionNode exp10 = new NonLeafExpressionNode(Operator.AND);
exp10.addChildExp(new NonLeafExpressionNode(Operator.AND, new LeafExpressionNode("a"),
new LeafExpressionNode("b")));
exp10.addChildExp(new NonLeafExpressionNode(Operator.AND, new LeafExpressionNode("c"),
new LeafExpressionNode("d")));
result = expander.expand(exp10);
assertTrue(result instanceof NonLeafExpressionNode);
nlResult = (NonLeafExpressionNode) result;
assertEquals(Operator.AND, nlResult.getOperator());
assertEquals(4, nlResult.getChildExps().size());
assertEquals("a", ((LeafExpressionNode) nlResult.getChildExps().get(0)).getIdentifier());
assertEquals("b", ((LeafExpressionNode) nlResult.getChildExps().get(1)).getIdentifier());
assertEquals("c", ((LeafExpressionNode) nlResult.getChildExps().get(2)).getIdentifier());
assertEquals("d", ((LeafExpressionNode) nlResult.getChildExps().get(3)).getIdentifier());
// (a | b) & (c | d) -> ((a & c) | (a & d) | (b & c) | (b & d))
NonLeafExpressionNode exp11 = new NonLeafExpressionNode(Operator.AND);
exp11.addChildExp(new NonLeafExpressionNode(Operator.OR, new LeafExpressionNode("a"),
new LeafExpressionNode("b")));
exp11.addChildExp(new NonLeafExpressionNode(Operator.OR, new LeafExpressionNode("c"),
new LeafExpressionNode("d")));
result = expander.expand(exp11);
assertTrue(result instanceof NonLeafExpressionNode);
nlResult = (NonLeafExpressionNode) result;
assertEquals(Operator.OR, nlResult.getOperator());
assertEquals(4, nlResult.getChildExps().size());
temp = (NonLeafExpressionNode) nlResult.getChildExps().get(0);
assertEquals(Operator.AND, temp.getOperator());
assertEquals(2, temp.getChildExps().size());
assertEquals("a", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier());
assertEquals("c", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier());
temp = (NonLeafExpressionNode) nlResult.getChildExps().get(1);
assertEquals(Operator.AND, temp.getOperator());
assertEquals(2, temp.getChildExps().size());
assertEquals("a", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier());
assertEquals("d", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier());
temp = (NonLeafExpressionNode) nlResult.getChildExps().get(2);
assertEquals(Operator.AND, temp.getOperator());
assertEquals(2, temp.getChildExps().size());
assertEquals("b", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier());
assertEquals("c", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier());
temp = (NonLeafExpressionNode) nlResult.getChildExps().get(3);
assertEquals(Operator.AND, temp.getOperator());
assertEquals(2, temp.getChildExps().size());
assertEquals("b", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier());
assertEquals("d", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier());
// (((a | b) | c) | d) & e -> ((a & e) | (b & e) | (c & e) | (d & e))
NonLeafExpressionNode exp12 = new NonLeafExpressionNode(Operator.AND);
NonLeafExpressionNode tempExp1 = new NonLeafExpressionNode(Operator.OR, new LeafExpressionNode(
"a"), new LeafExpressionNode("b"));
NonLeafExpressionNode tempExp2 = new NonLeafExpressionNode(Operator.OR, tempExp1,
new LeafExpressionNode("c"));
NonLeafExpressionNode tempExp3 = new NonLeafExpressionNode(Operator.OR, tempExp2,
new LeafExpressionNode("d"));
exp12.addChildExp(tempExp3);
exp12.addChildExp(new LeafExpressionNode("e"));
result = expander.expand(exp12);
assertTrue(result instanceof NonLeafExpressionNode);
nlResult = (NonLeafExpressionNode) result;
assertEquals(Operator.OR, nlResult.getOperator());
assertEquals(4, nlResult.getChildExps().size());
temp = (NonLeafExpressionNode) nlResult.getChildExps().get(0);
assertEquals(Operator.AND, temp.getOperator());
assertEquals(2, temp.getChildExps().size());
assertEquals("a", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier());
assertEquals("e", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier());
temp = (NonLeafExpressionNode) nlResult.getChildExps().get(1);
assertEquals(Operator.AND, temp.getOperator());
assertEquals(2, temp.getChildExps().size());
assertEquals("b", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier());
assertEquals("e", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier());
temp = (NonLeafExpressionNode) nlResult.getChildExps().get(2);
assertEquals(Operator.AND, temp.getOperator());
assertEquals(2, temp.getChildExps().size());
assertEquals("c", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier());
assertEquals("e", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier());
temp = (NonLeafExpressionNode) nlResult.getChildExps().get(3);
assertEquals(Operator.AND, temp.getOperator());
assertEquals(2, temp.getChildExps().size());
assertEquals("d", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier());
assertEquals("e", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier());
// (a | b | c) & d -> ((a & d) | (b & d) | (c & d))
NonLeafExpressionNode exp13 = new NonLeafExpressionNode(Operator.AND,
new NonLeafExpressionNode(Operator.OR, new LeafExpressionNode("a"), new LeafExpressionNode(
"b"), new LeafExpressionNode("c")), new LeafExpressionNode("d"));
result = expander.expand(exp13);
assertTrue(result instanceof NonLeafExpressionNode);
nlResult = (NonLeafExpressionNode) result;
assertEquals(Operator.OR, nlResult.getOperator());
assertEquals(3, nlResult.getChildExps().size());
temp = (NonLeafExpressionNode) nlResult.getChildExps().get(0);
assertEquals(Operator.AND, temp.getOperator());
assertEquals(2, temp.getChildExps().size());
assertEquals("a", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier());
assertEquals("d", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier());
temp = (NonLeafExpressionNode) nlResult.getChildExps().get(1);
assertEquals(Operator.AND, temp.getOperator());
assertEquals(2, temp.getChildExps().size());
assertEquals("b", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier());
assertEquals("d", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier());
temp = (NonLeafExpressionNode) nlResult.getChildExps().get(2);
assertEquals(Operator.AND, temp.getOperator());
assertEquals(2, temp.getChildExps().size());
assertEquals("c", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier());
assertEquals("d", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier());
// ((a | b) & (c | d)) & (e | f) -> (((a & c) & e) | ((a & c) & f) | ((a & d) & e) | ((a & d) &
// f) | ((b & c) & e) | ((b & c) & f) | ((b & d) & e) | ((b & d) & f))
NonLeafExpressionNode exp15 = new NonLeafExpressionNode(Operator.AND);
NonLeafExpressionNode temp1 = new NonLeafExpressionNode(Operator.AND);
temp1.addChildExp(new NonLeafExpressionNode(Operator.OR, new LeafExpressionNode("a"),
new LeafExpressionNode("b")));
temp1.addChildExp(new NonLeafExpressionNode(Operator.OR, new LeafExpressionNode("c"),
new LeafExpressionNode("d")));
exp15.addChildExp(temp1);
exp15.addChildExp(new NonLeafExpressionNode(Operator.OR, new LeafExpressionNode("e"),
new LeafExpressionNode("f")));
result = expander.expand(exp15);
assertTrue(result instanceof NonLeafExpressionNode);
nlResult = (NonLeafExpressionNode) result;
assertEquals(Operator.OR, nlResult.getOperator());
assertEquals(8, nlResult.getChildExps().size());
temp = (NonLeafExpressionNode) nlResult.getChildExps().get(0);
assertEquals(Operator.AND, temp.getOperator());
assertEquals(2, temp.getChildExps().size());
assertEquals("e", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier());
temp = (NonLeafExpressionNode) temp.getChildExps().get(0);
assertEquals(Operator.AND, temp.getOperator());
assertEquals(2, temp.getChildExps().size());
assertEquals("a", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier());
assertEquals("c", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier());
temp = (NonLeafExpressionNode) nlResult.getChildExps().get(1);
assertEquals(Operator.AND, temp.getOperator());
assertEquals(2, temp.getChildExps().size());
assertEquals("f", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier());
temp = (NonLeafExpressionNode) temp.getChildExps().get(0);
assertEquals(Operator.AND, temp.getOperator());
assertEquals(2, temp.getChildExps().size());
assertEquals("a", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier());
assertEquals("c", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier());
temp = (NonLeafExpressionNode) nlResult.getChildExps().get(2);
assertEquals(Operator.AND, temp.getOperator());
assertEquals(2, temp.getChildExps().size());
assertEquals("e", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier());
temp = (NonLeafExpressionNode) temp.getChildExps().get(0);
assertEquals(Operator.AND, temp.getOperator());
assertEquals(2, temp.getChildExps().size());
assertEquals("a", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier());
assertEquals("d", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier());
temp = (NonLeafExpressionNode) nlResult.getChildExps().get(3);
assertEquals(Operator.AND, temp.getOperator());
assertEquals(2, temp.getChildExps().size());
assertEquals("f", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier());
temp = (NonLeafExpressionNode) temp.getChildExps().get(0);
assertEquals(Operator.AND, temp.getOperator());
assertEquals(2, temp.getChildExps().size());
assertEquals("a", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier());
assertEquals("d", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier());
temp = (NonLeafExpressionNode) nlResult.getChildExps().get(4);
assertEquals(Operator.AND, temp.getOperator());
assertEquals(2, temp.getChildExps().size());
assertEquals("e", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier());
temp = (NonLeafExpressionNode) temp.getChildExps().get(0);
assertEquals(Operator.AND, temp.getOperator());
assertEquals(2, temp.getChildExps().size());
assertEquals("b", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier());
assertEquals("c", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier());
temp = (NonLeafExpressionNode) nlResult.getChildExps().get(5);
assertEquals(Operator.AND, temp.getOperator());
assertEquals(2, temp.getChildExps().size());
assertEquals("f", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier());
temp = (NonLeafExpressionNode) temp.getChildExps().get(0);
assertEquals(Operator.AND, temp.getOperator());
assertEquals(2, temp.getChildExps().size());
assertEquals("b", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier());
assertEquals("c", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier());
temp = (NonLeafExpressionNode) nlResult.getChildExps().get(6);
assertEquals(Operator.AND, temp.getOperator());
assertEquals(2, temp.getChildExps().size());
assertEquals("e", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier());
temp = (NonLeafExpressionNode) temp.getChildExps().get(0);
assertEquals(Operator.AND, temp.getOperator());
assertEquals(2, temp.getChildExps().size());
assertEquals("b", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier());
assertEquals("d", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier());
temp = (NonLeafExpressionNode) nlResult.getChildExps().get(7);
assertEquals(Operator.AND, temp.getOperator());
assertEquals(2, temp.getChildExps().size());
assertEquals("f", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier());
temp = (NonLeafExpressionNode) temp.getChildExps().get(0);
assertEquals(Operator.AND, temp.getOperator());
assertEquals(2, temp.getChildExps().size());
assertEquals("b", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier());
assertEquals("d", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier());
// !(a | b) -> ((!a) & (!b))
NonLeafExpressionNode exp16 = new NonLeafExpressionNode(Operator.NOT,
new NonLeafExpressionNode(Operator.OR, new LeafExpressionNode("a"), new LeafExpressionNode(
"b")));
result = expander.expand(exp16);
assertTrue(result instanceof NonLeafExpressionNode);
nlResult = (NonLeafExpressionNode) result;
assertEquals(Operator.AND, nlResult.getOperator());
assertEquals(2, nlResult.getChildExps().size());
temp = (NonLeafExpressionNode) nlResult.getChildExps().get(0);
assertEquals(Operator.NOT, temp.getOperator());
assertEquals("a", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier());
temp = (NonLeafExpressionNode) nlResult.getChildExps().get(1);
assertEquals(Operator.NOT, temp.getOperator());
assertEquals("b", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier());
}
}
| |
/*
* Copyright 2014 YarcData LLC
* All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* For more about this software visit:
* http://www.01.org/GraphBuilder
*/
package com.intel.pig.udf.eval.mappings;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Map.Entry;
import org.apache.hadoop.io.Writable;
import org.apache.jena.iri.IRI;
import org.apache.jena.iri.IRIFactory;
import org.apache.jena.riot.RiotException;
import org.apache.jena.riot.system.IRIResolver;
import org.apache.jena.riot.system.PrefixMap;
import org.apache.jena.riot.system.PrefixMapFactory;
import org.apache.pig.backend.executionengine.ExecException;
import org.apache.pig.data.DataBag;
import org.apache.pig.data.Tuple;
import org.apache.pig.data.TupleFactory;
import com.hp.hpl.jena.graph.Node;
import com.hp.hpl.jena.graph.NodeFactory;
import com.hp.hpl.jena.graph.Triple;
import com.hp.hpl.jena.shared.PrefixMapping;
import com.hp.hpl.jena.sparql.util.FmtUtils;
import com.hp.hpl.jena.sparql.util.NodeFactoryExtra;
import com.hp.hpl.jena.vocabulary.RDF;
import com.intel.hadoop.graphbuilder.graphelements.Edge;
import com.intel.hadoop.graphbuilder.graphelements.GraphElement;
import com.intel.hadoop.graphbuilder.graphelements.SerializedGraphElementStringTypeVids;
import com.intel.hadoop.graphbuilder.graphelements.Vertex;
import com.intel.hadoop.graphbuilder.types.DoubleType;
import com.intel.hadoop.graphbuilder.types.FloatType;
import com.intel.hadoop.graphbuilder.types.IntType;
import com.intel.hadoop.graphbuilder.types.LongType;
import com.intel.hadoop.graphbuilder.types.PropertyMap;
import com.intel.hadoop.graphbuilder.types.StringType;
/**
* <p>
* Represents a RDF mapping used in converting property graphs into RDF graphs
* </p>
* <h3>Mapping Format</h3>
* <p>
* Mappings are expressed as Pig maps, the mapping contains any number of the
* supported keys all of which are optional e.g.
* </p>
*
* <pre>
* [ 'base' # 'http://example.org/ontology#',
* 'idBase' # 'http://example.org/instances/',
* 'namespaces' # [ 'ex' # 'http://example.org/base#' ],
* 'useStdNamespaces' # 'true',
* 'includedProperties' # ( 'name', 'age' ),
* 'excludedProperties' # ( 'dob' ),
* 'propertyMap' # [ 'type' # 'rdf:type',
* 'name' # 'http://xmlns.com/foaf/0.1/name' ],
* 'uriProperties' # ( 'parent' ),
* 'idProperty' # 'ssn' ]
* </pre>
*
* <h4>base</h4>
* <p>
* The {@code base} key provides a Base URI which is used to resolve property
* URIs against where properties aren't explicitly mapped to prefixed names or
* absolute URIs. For example with the above mapping a property named
* {@code name} yields the property URI {@code http://example.org/ontology#name}
* . When this is not specified property URIs will be left as relative URIs.
* </p>
* <h4>idBase</h4>
* <p>
* The {@code idBase} key provides a Base URI which is used to resolve vertex
* URIs against. For example with the above mapping a vertex with ID of
* {@code 1} would yield the vertex URI {@code http://example.org/instances/1}.
* When this is not specified the value of the {@code base} key is used if
* possible and if not relative URIs are used.
* </p>
* <h4>namespaces</h4>
* <p>
* The {@code namespaces} key provides a map of namespace definitions which can
* then be used with the {@code propertyMap} key to provide simple mappings of
* property names into specific URIs.
* </p>
* <h4>useStdNamespaces</h4>
* <p>
* The {@code useStdNamespaces} key provides a {@code true} or {@code false}
* value indicating whether standard namespaces are made available. Currently
* these are {@code rdf}, {@code rdfs}, {@code owl} and {@code xsd}. Even when
* enabled users can override the standard mappings by using the
* {@code namespaces} key.
* </p>
* <h4>includedProperties</h4>
* <p>
* The {@code includedProperties} key provides a tuple of property names that
* are included in the mapping to RDF. When specified a property must not also
* be excluded to be included in the mapping. When not specified all properties
* are assumed to be included unless excluded using the
* {@code excludedProperties} key.
* </p>
* <h4>excludedProperties</h4>
* <p>
* The {@code excludedProperties} key provides a tuple of property names that
* are excluded from the mapping to RDF. When specified exclusions take
* precedence over any inclusions that may have been specified. When not
* specified no properties are explicitly excluded though the
* {@code includedProperties} key may still be used to limit properties included
* in the mapping.
* </p>
* <h4>propertyMap</h4>
* <p>
* The {@code propertyMap} key provides a map that is used to customize how
* property names and edge labels are translated into property URIs in the RDF.
* The key is the property or edge label whose translation is to be customized
* and the value is either a Prefixed Name or URI. Prefixed Names may refer to
* namespaces declared either via the {@code namespaces} key or from the
* standard namespaces (where included) as shown in the example mapping.
* </p>
* <p>
* In the example mapping the {@code type} property is mapped to
* {@code rdf:type} which is a prefixed name while the {@code name} property is
* mapped to the URI {@code http://xmlns.com/foaf/0.1/name}.
* </p>
* <h4>uriProperties</h4>
* <p>
* The values of properties on vertices and edges are by default mapped as
* literal objects in the resulting RDF triples. In some cases you may have
* properties whose values should be treated as URIs by listing them in this
* tuple. Properties listed here will have their values mapped to object URIs
* using the same Base URI (specified by the {@code base}) used for creating the
* property URIs.
* </p>
* <h4>idProperty</h4>
* <p>
* The {@code idProperty} key provides a property that will be used to associate
* the vertex ID as a literal value to the generated vertex URI as well as
* embedding the vertex ID in that generated URI.
* </p>
*/
public class RdfMapping extends AbstractMapping {
protected static final String BASE_URI = "base";
protected static final String ID_BASE_URI = "idBase";
protected static final String NAMESPACES = "namespaces";
protected static final String USE_STD_NAMESPACES = "useStdNamespaces";
protected static final String INCLUDED_PROPERTIES = "includedProperties";
protected static final String EXCLUDED_PROPERTIES = "excludedProperties";
protected static final String PROPERTY_MAP = "propertyMap";
protected static final String ID_PROPERTY = "idProperty";
protected static final String URI_PROPERTIES = "uriProperties";
protected static final String EDGE_PROPERTIES_MODE = "edgePropertiesMode";
/**
* Enumeration of modes which control how edge properties are treated
*
*/
public enum EdgePropertiesMode {
/**
* Ignore edge properties when mapping property graphs to RDF
*/
IGNORE,
/**
* Use reification to add edge properties
*/
REIFIED,
/**
* Use n-ary predicate to add edge properties
*/
NARY
}
private String baseUri, idBaseUri, idProperty;
private Set<String> includedProperties = new HashSet<String>();
private Set<String> excludedProperties = new HashSet<String>();
private Set<String> uriProperties = new HashSet<String>();
private Map<String, String> propertyMap = new HashMap<String, String>();
private Map<String, String> namespaces = new HashMap<String, String>();
private boolean useStdNamespaces = false;
private PrefixMap prefixes;
private EdgePropertiesMode edgePropertiesMode = EdgePropertiesMode.IGNORE;
/**
* Creates a new RDF Mapping
*
* @param baseUri
* Base URI
* @param idBaseUri
* ID Base URI
* @param namespaces
* Namespaces
* @param useStdNamespaces
* Whether to use standard namespaces
* @param includedProperties
* Included properties
* @param excludedProperties
* Excluded properties
* @param propertyMap
* Property Mapping
* @param uriProperties
* Properties whose values should be treated as URIs
* @param idProperty
* ID Property
*/
public RdfMapping(String baseUri, String idBaseUri, Map<String, String> namespaces, boolean useStdNamespaces,
Collection<String> includedProperties, Collection<String> excludedProperties,
Map<String, String> propertyMap, Collection<String> uriProperties, String idProperty,
EdgePropertiesMode edgePropertiesMode) {
this.baseUri = baseUri;
this.idBaseUri = idBaseUri;
if (includedProperties != null)
this.includedProperties.addAll(includedProperties);
if (excludedProperties != null)
this.excludedProperties.addAll(excludedProperties);
if (propertyMap != null)
this.propertyMap.putAll(propertyMap);
if (uriProperties != null)
this.uriProperties.addAll(uriProperties);
this.idProperty = idProperty;
this.edgePropertiesMode = edgePropertiesMode;
// Build the prefix map
this.prefixes = this.useStdNamespaces ? PrefixMapFactory.create(PrefixMapping.Standard) : PrefixMapFactory
.create();
if (namespaces != null) {
this.namespaces.putAll(namespaces);
this.prefixes.putAll(namespaces);
}
}
/**
* Creates a new RDF mapping directly from an Object
* <p>
* This constructor assumes that the passed object comes from the processing
* of a Pig script and thus will be a Map generated from Pig. See the
* documentation for {@link RdfMapping} for details of the map format
* expected.
* <p>
*
* @param object
* @throws ExecException
*/
@SuppressWarnings("unchecked")
public RdfMapping(Object object) throws ExecException {
if (object == null)
throw new NullPointerException("Cannot create an edge mapping from a null object");
if (!(object instanceof Map<?, ?>))
throw new IllegalArgumentException("Cannot create an edge mapping from a non-map object");
Map<String, Object> rdfMapping = (Map<String, Object>) object;
this.baseUri = this.getStringValue(rdfMapping, BASE_URI, false);
this.idBaseUri = this.getStringValue(rdfMapping, ID_BASE_URI, false);
List<String> includes = this.getListValue(rdfMapping, INCLUDED_PROPERTIES, false);
if (includes != null)
this.includedProperties.addAll(includes);
List<String> excludes = this.getListValue(rdfMapping, EXCLUDED_PROPERTIES, false);
if (excludes != null)
this.excludedProperties.addAll(excludes);
Map<String, String> pmap = this.getTypedMapValue(rdfMapping, PROPERTY_MAP, false);
if (pmap != null)
this.propertyMap.putAll(pmap);
List<String> uriProperties = this.getListValue(rdfMapping, URI_PROPERTIES, false);
if (uriProperties != null)
this.uriProperties.addAll(uriProperties);
this.idProperty = this.getStringValue(rdfMapping, ID_PROPERTY, false);
String edgePropertiesMode = this.getStringValue(rdfMapping, EDGE_PROPERTIES_MODE, false);
if (edgePropertiesMode != null)
this.edgePropertiesMode = EdgePropertiesMode.valueOf(edgePropertiesMode.toUpperCase().trim());
this.useStdNamespaces = this.getBooleanValue(rdfMapping, USE_STD_NAMESPACES, false);
this.prefixes = this.useStdNamespaces ? PrefixMapFactory.create(PrefixMapping.Standard) : PrefixMapFactory
.create();
Map<String, String> namespaces = this.getTypedMapValue(rdfMapping, NAMESPACES, false);
if (namespaces != null)
this.prefixes.putAll(namespaces);
}
/**
* Gets the Base URI
* <p>
* This is used as the Base URI to generate property URIs which are not
* otherwise mapped to specific URIs. Property URIs are generated by
* resolving the property name as if they were a fragment URI against this
* Base URI. Note that the property map may be used to map properties to
* specific absolute URIs or to map them to namespace based URIs
* </p>
* <p>
* In the event that this is not specified then property URIs are left as
* relative fragment URIs.
* </p>
*
* @return Base URI
*/
public String getBaseUri() {
return this.baseUri;
}
/**
* Gets the ID Base URI
* <p>
* This differs from the {@link #getBaseUri()} in that this is used
* exclusively for prepending to vertex IDs to produce the URIs for
* vertices.
* </p>
* <p>
* In the event that this is not specified then the normal Base URI is used
* instead, if that is also not specified then vertex URIs are left as
* relative URIs.
* <p>
*
* @return ID Base URI
*/
public String getIdBaseUri() {
return this.idBaseUri;
}
/**
* Gets a prefix map containing the available namespace declarations
* <p>
* Depending on whether {@link #usingStandardNamespaces()} is true this may
* be a combination of the user supplied namespaces and the standard
* namespaces
* <p>
*
* @return Prefix map
*/
public PrefixMap getNamespaces() {
return this.prefixes;
}
/**
* Gets whether standard namespaces are being used in addition to any user
* defined ones
* <p>
* Standard namespaces are supplied by {@link PrefixMapping#Standard} which
* contains RDF, RDFS, XSD, OWL and DC
* </p>
*
* @return True if using standard namespaces, false otherwise
*/
public boolean usingStandardNamespaces() {
return this.useStdNamespaces;
}
/**
* Gets whether a given property is included in the mapping.
* <p>
* Whether a property is included is determined by checking several
* conditions. Firstly it looks at whether the mapping explicitly
* includes/excludes properties, if it does not then the property is
* included. However if there are explicit includes/excludes then it has to
* inspect these to determine if the property is included. A property must
* be in the includes list (which if empty implicitly includes everything)
* and not also in the exclude list (which if empty is ignored).
* </p>
*
* @param property
* Property
* @return True if the property is included in the mapping, false otherwise
*/
public boolean includesProperty(String property) {
if (this.includedProperties.size() == 0 && this.excludedProperties.size() == 0) {
return true;
} else {
if (this.includedProperties.size() > 0) {
// Must be included and not excluded
if (!this.includedProperties.contains(property) || this.excludedProperties.contains(property))
return false;
return true;
} else {
// Implicitly all properties are included so must not be
// excluded
return !this.excludedProperties.contains(property);
}
}
}
/**
* Gets the URI for a given vertex
* <p>
* See the documentation on {@link #getIdBaseUri()} to see how this is
* calculated.
* </p>
*
* @param vertex
* Vertex ID
* @return Vertex URI
*/
public String getVertexUri(String vertex) {
if (this.idBaseUri != null) {
return this.resolveUri(vertex, this.idBaseUri);
} else if (this.baseUri != null) {
return this.resolveUri(vertex, this.baseUri);
} else {
return this.resolveUri(vertex, null);
}
}
/**
* Gets the URI for a given property
* <p>
* Determining the URI for a property is a multi-stage process which takes
* account of several settings in the mapping. Firstly it checks to see that
* the property in question is actually included in the mapping and if not
* returns {@code null}.
* </p>
* <p>
* If it is included it then looks in the property mapping since any
* property may optionally be mapped to a URI reference. If it is mapped it
* then has to resolve that URI reference by either expanding the prefixed
* name or using the URI as is. When using the URI as-is it will attempt to
* resolve it against the Base URI if the initial URI is not absolute.
* </p>
* <p>
* If the property is not explicitly mapped then the property name will be
* converted into a URI by appending it to the Base URI (assuming there is
* one). In the event that there is no Base URI then the property URI will
* remain relative.
* </p>
*
* @param property
* Property
* @return Property URI or null if the property should not be included in
* the RDF output
*/
public String getPropertyUri(String property) {
// Ignore properties which aren't included
if (!this.includesProperty(property))
return null;
// Is the property explicitly mapped?
if (this.propertyMap.containsKey(property)) {
String uriref = this.propertyMap.get(property);
return this.resolveUriReference(uriref);
}
// Otherwise resolve against the Base URI
return this.resolveUri(property, this.baseUri);
}
/**
* Gets the ID property (if any)
* <p>
* The ID property allows for associating the vertex ID as a literal value
* to the generated vertex URI in addition to embedding it into the URI.
* <p>
*
* @return ID property
*/
public String getIdProperty() {
return this.idProperty;
}
/**
* Resolves a URI reference
*
* @param uriref
* URI Reference, may be a prefixed name or a relative/absolute
* URI
* @return URI
*/
private String resolveUriReference(String uriref) {
if (uriref == null)
return null;
// Allow the special a shortcut to refer to rdf:type predicate
if (uriref.equals("a"))
return RDF.type.getURI();
// Then try to resolve as prefixed name
if (uriref.contains(":")) {
String[] parts = uriref.split(":");
String nsPrefix = parts[0];
String localName = uriref.substring(nsPrefix.length() + 1);
if (this.prefixes.contains(nsPrefix)) {
return this.prefixes.expand(nsPrefix, localName);
}
}
// Otherwise try to resolve as URI
return this.resolveUri(uriref, this.baseUri);
}
private String resolveUri(String uri, String baseUri) {
IRI iri = IRIFactory.iriImplementation().create(uri);
if (iri.isAbsolute()) {
// Already an absolute URI so can leave as-is
return uri;
} else if (baseUri != null) {
// Attempt to resolve against Base URI
if (uri.startsWith("#") || uri.startsWith("?")) {
// Already a fragment/query string URI so can resolve as-is
return IRIResolver.resolveString(uri, baseUri);
} else if (uri.contains("/") && (uri.indexOf('?') == -1 || uri.indexOf('/') < uri.indexOf('?'))
&& (uri.indexOf('#') == -1 || uri.indexOf('/') < uri.indexOf('#'))) {
// Has path segments (denoted by a /) which aren't after the
// fragment or query string start characters so resolve properly
return IRIResolver.resolveString(uri, baseUri);
} else if (baseUri.endsWith("#")) {
// Base URI ends with a # which implies we want URIs to be the
// fragment portion of the URI
// Therefore resolve as if it were a fragment URI
try {
return IRIResolver.resolveString(new URI(null, null, null, uri).toString(), baseUri);
} catch (RiotException e) {
throw new IllegalArgumentException("Unable to form a valid IRI using " + uri + " with base "
+ baseUri);
} catch (URISyntaxException e) {
throw new IllegalArgumentException("Invalid URI " + uri);
}
}
// Otherwise resolve normally
return IRIResolver.resolveString(uri, baseUri);
} else if (iri.hasViolation(false)) {
// Check for illegal URI after trying to relativize because
// that may succeed and relativization will error if it fails
// anyway
throw new IllegalArgumentException("URI " + uri + " is an illegal IRI");
} else {
// Leave as a relative URI
return uri;
}
}
/**
* Applies the mapping to the given input
*
* @param input
* Input
* @param output
* Output
* @throws ExecException
*/
@SuppressWarnings("unchecked")
public void apply(Tuple input, DataBag output) throws ExecException {
if (input == null || input.size() != 2)
return;
SerializedGraphElementStringTypeVids element = (SerializedGraphElementStringTypeVids) input.get(0);
GraphElement<StringType> graphElement = element.graphElement();
if (graphElement == null)
return;
if (graphElement.isEdge()) {
// Edge Mapping
// Get the predicate URI
Edge<StringType> edge = (Edge<StringType>) graphElement.get();
String predicateUri = this.getPropertyUri(edge.getLabel().get());
if (predicateUri == null)
return;
String sourceId = edge.getSrc().getName().get();
String targetId = edge.getDst().getName().get();
if (sourceId == null || targetId == null)
return;
// Create nodes we need
Node sourceNode = NodeFactory.createURI(this.getVertexUri(sourceId));
Node targetNode = NodeFactory.createURI(this.getVertexUri(targetId));
Node predicate = NodeFactory.createURI(predicateUri);
// Decide if and how we need to map edge properties to triples
switch (this.edgePropertiesMode) {
case REIFIED:
// Check whether we need to reify
if (edge.getProperties().size() > 0) {
// Need to reify the edge triple in order to add edge
// properties
Node edgeNode = NodeFactory.createAnon();
this.outputTriple(new Triple(edgeNode, RDF.type.asNode(), RDF.Statement.asNode()), output);
this.outputTriple(new Triple(edgeNode, RDF.subject.asNode(), sourceNode), output);
this.outputTriple(new Triple(edgeNode, RDF.predicate.asNode(), predicate), output);
this.outputTriple(new Triple(edgeNode, RDF.object.asNode(), targetNode), output);
// Output all relevant properties
this.outputProperties(output, edge.getProperties(), edgeNode);
}
break;
case NARY:
// Check whether we need to express in n-ary form
Node edgeNode = NodeFactory.createAnon();
this.outputTriple(new Triple(sourceNode, predicate, edgeNode), output);
this.outputTriple(new Triple(edgeNode, RDF.value.asNode(), targetNode), output);
// Output all relevant properties
this.outputProperties(output, edge.getProperties(), edgeNode);
break;
case IGNORE:
default:
// No additional work needed since edge properties are ignored
}
// Regardless of edge properties mode we always generate a triple
// that expresses the edge directly
Triple edgeTriple = new Triple(sourceNode, predicate, targetNode);
this.outputTriple(edgeTriple, output);
} else if (element.graphElement().isVertex()) {
// Vertex Mapping
// Get the vertex URI
Vertex<StringType> vertex = (Vertex<StringType>) graphElement.get();
String subjectUri = this.getVertexUri(vertex.getId().getName().get());
if (subjectUri == null)
return;
Node subject = NodeFactory.createURI(subjectUri);
// Add ID Property if this is mapped
if (this.idProperty != null) {
String idUri = this.getPropertyUri(this.idProperty);
if (idUri != null) {
this.outputTriple(
new Triple(subject, NodeFactory.createURI(idUri), NodeFactory.createLiteral(vertex.getId()
.getName().get())), output);
}
}
// Add all relevant properties
outputProperties(output, vertex.getProperties(), subject);
}
}
protected void outputProperties(DataBag output, PropertyMap properties, Node subject) {
for (Writable property : properties.getPropertyKeys()) {
// Create predicate
String propertyName = ((StringType) property).get();
String propertyUri = this.getPropertyUri(propertyName);
if (propertyUri == null)
continue;
// Create object
Node object = this.uriProperties.contains(propertyName) ? this.toUriObject(properties
.getProperty(propertyName)) : this.toLiteralObject(properties.getProperty(propertyName));
if (object == null)
continue;
// Output triple expressing the property
this.outputTriple(new Triple(subject, NodeFactory.createURI(propertyUri), object), output);
}
}
private Node toUriObject(Writable value) {
String strValue = value.toString();
if (strValue == null)
return null;
return NodeFactory.createURI(this.resolveUri(strValue, this.baseUri));
}
private Node toLiteralObject(Writable value) {
// Since the RDF Mapping must apply to something generated by the
// Property Graph Mapping we know that there should in principal only be
// a limited range of types we need to convert
if (value instanceof StringType) {
return NodeFactory.createLiteral(((StringType) value).get());
} else if (value instanceof IntType) {
return NodeFactoryExtra.intToNode(((IntType) value).get());
} else if (value instanceof LongType) {
return NodeFactoryExtra.intToNode(((LongType) value).get());
} else if (value instanceof FloatType) {
return NodeFactoryExtra.floatToNode(((FloatType) value).get());
} else if (value instanceof DoubleType) {
return NodeFactoryExtra.doubleToNode(((DoubleType) value).get());
} else {
// Can't convert other types
return null;
}
}
/**
* Outputs a triple
*
* @param t
* Triple
* @param output
* Output
*/
private void outputTriple(Triple t, DataBag output) {
StringBuilder tripleString = new StringBuilder();
tripleString.append(FmtUtils.stringForTriple(t, null));
tripleString.append(" .");
output.add(TupleFactory.getInstance().newTuple(tripleString.toString()));
}
@Override
public Map<String, Object> toMap() throws ExecException {
Map<String, Object> mapping = new HashMap<String, Object>();
if (this.baseUri != null)
mapping.put(BASE_URI, this.baseUri);
if (this.idBaseUri != null)
mapping.put(ID_BASE_URI, this.idBaseUri);
if (this.idProperty != null)
mapping.put(ID_PROPERTY, this.idProperty);
mapping.put(USE_STD_NAMESPACES, Boolean.toString(this.useStdNamespaces).toLowerCase());
if (this.namespaces.size() > 0)
mapping.put(NAMESPACES, this.namespaces);
if (this.includedProperties.size() > 0)
mapping.put(INCLUDED_PROPERTIES, this.setToTuple(this.includedProperties));
if (this.excludedProperties.size() > 0)
mapping.put(EXCLUDED_PROPERTIES, this.setToTuple(this.excludedProperties));
if (this.propertyMap.size() > 0)
mapping.put(PROPERTY_MAP, this.propertyMap);
if (this.uriProperties.size() > 0)
mapping.put(URI_PROPERTIES, this.setToTuple(this.uriProperties));
if (this.edgePropertiesMode != EdgePropertiesMode.IGNORE)
mapping.put(EDGE_PROPERTIES_MODE, this.edgePropertiesMode.toString());
return mapping;
}
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append('[');
Map<String, String> properties = new HashMap<>();
if (this.baseUri != null)
properties.put(BASE_URI, this.baseUri);
if (this.idBaseUri != null)
properties.put(ID_BASE_URI, this.idBaseUri);
if (this.idProperty != null)
properties.put(ID_PROPERTY, this.idProperty);
if (this.edgePropertiesMode != null)
properties.put(EDGE_PROPERTIES_MODE, this.edgePropertiesMode.toString().toLowerCase());
properties.put(USE_STD_NAMESPACES, Boolean.toString(this.useStdNamespaces).toLowerCase());
Iterator<Entry<String, String>> es = properties.entrySet().iterator();
while (es.hasNext()) {
Entry<String, String> e = es.next();
builder.append('\'');
builder.append(e.getKey());
builder.append("' # '");
builder.append(e.getValue());
builder.append('\'');
if (es.hasNext())
builder.append(',');
builder.append(' ');
}
if (this.includedProperties.size() > 0) {
builder.append(", ");
builder.append(this.tupleToMapKeyValueString(this.includedProperties, INCLUDED_PROPERTIES));
}
if (this.excludedProperties.size() > 0) {
builder.append(", ");
builder.append(this.tupleToMapKeyValueString(this.excludedProperties, EXCLUDED_PROPERTIES));
}
if (this.namespaces.size() > 0) {
builder.append(", ");
builder.append(this.mapToMapKeyValueString(this.namespaces, NAMESPACES));
}
if (this.propertyMap.size() > 0) {
builder.append(", ");
builder.append(this.mapToMapKeyValueString(this.propertyMap, PROPERTY_MAP));
}
if (this.uriProperties.size() > 0) {
builder.append(", ");
builder.append(this.tupleToMapKeyValueString(this.uriProperties, URI_PROPERTIES));
}
builder.append(']');
return builder.toString();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.logaggregation.filecontroller;
import static org.junit.Assert.*;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.OutputStream;
import java.io.Writer;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.yarn.api.records.ApplicationAccessType;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogKey;
import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogValue;
import org.apache.hadoop.yarn.logaggregation.ContainerLogMeta;
import org.apache.hadoop.yarn.logaggregation.ContainerLogsRequest;
import org.apache.hadoop.yarn.logaggregation.filecontroller.LogAggregationFileController;
import org.apache.hadoop.yarn.logaggregation.filecontroller.LogAggregationFileControllerContext;
import org.apache.hadoop.yarn.logaggregation.filecontroller.LogAggregationFileControllerFactory;
import org.apache.hadoop.yarn.logaggregation.filecontroller.tfile.LogAggregationTFileController;
import org.apache.hadoop.yarn.webapp.View.ViewContext;
import org.apache.hadoop.yarn.webapp.view.HtmlBlock.Block;
import org.junit.Test;
/**
* Test LogAggregationFileControllerFactory.
*
*/
public class TestLogAggregationFileControllerFactory {
@Test(timeout = 10000)
public void testLogAggregationFileControllerFactory() throws Exception {
ApplicationId appId = ApplicationId.newInstance(
System.currentTimeMillis(), 1);
String appOwner = "test";
String remoteLogRootDir = "target/app-logs/";
Configuration conf = new YarnConfiguration();
conf.setBoolean(YarnConfiguration.LOG_AGGREGATION_ENABLED, true);
conf.set(YarnConfiguration.NM_REMOTE_APP_LOG_DIR, remoteLogRootDir);
conf.set(YarnConfiguration.NM_REMOTE_APP_LOG_DIR_SUFFIX, "log");
FileSystem fs = FileSystem.get(conf);
LogAggregationFileControllerFactory factory =
new LogAggregationFileControllerFactory(conf);
LinkedList<LogAggregationFileController> list = factory
.getConfiguredLogAggregationFileControllerList();
assertTrue(list.size() == 1);
assertTrue(list.getFirst() instanceof LogAggregationTFileController);
assertTrue(factory.getFileControllerForWrite()
instanceof LogAggregationTFileController);
Path logPath = list.getFirst().getRemoteAppLogDir(appId, appOwner);
try {
if (fs.exists(logPath)) {
fs.delete(logPath, true);
}
assertTrue(fs.mkdirs(logPath));
Writer writer =
new FileWriter(new File(logPath.toString(), "testLog"));
writer.write("test");
writer.close();
assertTrue(factory.getFileControllerForRead(appId, appOwner)
instanceof LogAggregationTFileController);
} finally {
fs.delete(logPath, true);
}
conf.set(YarnConfiguration.LOG_AGGREGATION_FILE_FORMATS,
"TestLogAggregationFileController");
// Did not set class for TestLogAggregationFileController,
// should get the exception.
try {
factory =
new LogAggregationFileControllerFactory(conf);
fail();
} catch (Exception ex) {
// should get exception
}
conf.set(YarnConfiguration.LOG_AGGREGATION_FILE_FORMATS,
"TestLogAggregationFileController,TFile");
conf.setClass(
"yarn.log-aggregation.file-controller.TestLogAggregationFileController"
+ ".class", TestLogAggregationFileController.class,
LogAggregationFileController.class);
conf.set(
"yarn.log-aggregation.TestLogAggregationFileController"
+ ".remote-app-log-dir", remoteLogRootDir);
conf.set(
"yarn.log-aggregation.TestLogAggregationFileController"
+ ".remote-app-log-dir-suffix", "testLog");
factory = new LogAggregationFileControllerFactory(conf);
list = factory.getConfiguredLogAggregationFileControllerList();
assertTrue(list.size() == 2);
assertTrue(list.getFirst() instanceof TestLogAggregationFileController);
assertTrue(list.getLast() instanceof LogAggregationTFileController);
assertTrue(factory.getFileControllerForWrite()
instanceof TestLogAggregationFileController);
logPath = list.getFirst().getRemoteAppLogDir(appId, appOwner);
try {
if (fs.exists(logPath)) {
fs.delete(logPath, true);
}
assertTrue(fs.mkdirs(logPath));
Writer writer =
new FileWriter(new File(logPath.toString(), "testLog"));
writer.write("test");
writer.close();
assertTrue(factory.getFileControllerForRead(appId, appOwner)
instanceof TestLogAggregationFileController);
} finally {
fs.delete(logPath, true);
}
}
private static class TestLogAggregationFileController
extends LogAggregationFileController {
@Override
public void initInternal(Configuration conf) {
String remoteDirStr = String.format(
YarnConfiguration.LOG_AGGREGATION_REMOTE_APP_LOG_DIR_FMT,
this.fileControllerName);
this.remoteRootLogDir = new Path(conf.get(remoteDirStr));
String suffix = String.format(
YarnConfiguration.LOG_AGGREGATION_REMOTE_APP_LOG_DIR_SUFFIX_FMT,
this.fileControllerName);
this.remoteRootLogDirSuffix = conf.get(suffix);
}
@Override
public void closeWriter() {
// Do Nothing
}
@Override
public void write(LogKey logKey, LogValue logValue) throws IOException {
// Do Nothing
}
@Override
public void postWrite(LogAggregationFileControllerContext record)
throws Exception {
// Do Nothing
}
@Override
public void initializeWriter(LogAggregationFileControllerContext context)
throws IOException {
// Do Nothing
}
@Override
public boolean readAggregatedLogs(ContainerLogsRequest logRequest,
OutputStream os) throws IOException {
return false;
}
@Override
public List<ContainerLogMeta> readAggregatedLogsMeta(
ContainerLogsRequest logRequest) throws IOException {
return null;
}
@Override
public void renderAggregatedLogsBlock(Block html, ViewContext context) {
// DO NOTHING
}
@Override
public String getApplicationOwner(Path aggregatedLogPath,
ApplicationId appId)
throws IOException {
return null;
}
@Override
public Map<ApplicationAccessType, String> getApplicationAcls(
Path aggregatedLogPath, ApplicationId appId) throws IOException {
return null;
}
}
}
| |
package org.multibit.viewsystem.swing;
/**
* L2FProd.com Common Components 7.3 License.
*
* Copyright 2005-2007 L2FProd.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.multibit.controller.MultiBitController;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.swing.*;
import javax.swing.border.Border;
import javax.swing.border.CompoundBorder;
import javax.swing.border.EmptyBorder;
import javax.swing.plaf.UIResource;
import javax.swing.text.JTextComponent;
import javax.swing.text.View;
import javax.swing.text.html.HTMLDocument;
import java.awt.*;
import java.util.ArrayList;
import java.util.Hashtable;
import java.util.Locale;
/**
* StatusBar. <BR>
* A status bar is made of multiple zones. A zone can be any JComponent.
*/
public class StatusBar extends JComponent {
private static final long serialVersionUID = 7824115980324911080L;
/**
* The key used to identified the default zone
*/
public final static String DEFAULT_ZONE = "default";
// TODO Consider using HashMap
private Hashtable<String, Component> idToZones;
private Border zoneBorder;
/**
* Construct a new StatusBar
*
*/
public StatusBar(MultiBitController controller) {
setLayout(LookAndFeelTweaks.createHorizontalPercentLayout(controller.getLocaliser().getLocale()));
idToZones = new Hashtable<String, Component>();
setZoneBorder(BorderFactory.createEmptyBorder());
setBackground(MultiBitFrame.BACKGROUND_COLOR);
setBorder(BorderFactory.createEmptyBorder(2, 0, 2, 0));
applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale()));
}
public void setZoneBorder(Border border) {
zoneBorder = border;
}
/**
* Adds a new zone in the StatusBar
*
* @param id
* @param zone
* @param constraints
* one of the constraint support by the
* com.l2fprod.common.swing.PercentLayout
*/
public void addZone(String id, Component zone, String constraints, String tweak) {
// is there already a zone with this id?
Component previousZone = getZone(id);
if (previousZone != null) {
remove(previousZone);
idToZones.remove(id);
}
if (zone instanceof JComponent) {
JComponent jc = (JComponent) zone;
if (jc.getBorder() == null || jc.getBorder() instanceof UIResource) {
if (jc instanceof JLabel) {
if ("left".equals(tweak)) {
jc.setBorder(new CompoundBorder(new EmptyBorder(0, 3, 0, 2), BorderFactory
.createLineBorder(Color.lightGray)));
} else {
if ("right".equals(tweak)) {
jc.setBorder(new CompoundBorder(zoneBorder, new EmptyBorder(0, 2, 0, 1)));
} else {
jc.setBorder(new CompoundBorder(zoneBorder, new EmptyBorder(0, 2, 0, 2)));
}
}
((JLabel) jc).setText(" ");
} else {
if (jc instanceof JPanel) {
// no border
} else {
jc.setBorder(zoneBorder);
}
}
}
}
add(zone, constraints);
idToZones.put(id, zone);
}
public Component getZone(String id) {
return (Component) idToZones.get(id);
}
/**
* For example:
*
* <code>
* setZones(new String[]{"A","B"},
* new JComponent[]{new JLabel(), new JLabel()},
* new String[]{"33%","*"});
* </code>
*
* would construct a new status bar with two zones (two JLabels) named A and
* B, the first zone A will occupy 33 percents of the overall size of the
* status bar and B the left space.
*
* @param ids
* a value of type 'String[]'
* @param zones
* a value of type 'JComponent[]'
* @param constraints
* a value of type 'String[]'
*/
public void setZones(String[] ids, Component[] zones, String[] constraints) {
removeAll();
idToZones.clear();
for (int i = 0, c = zones.length; i < c; i++) {
addZone(ids[i], zones[i], constraints[i], "");
}
}
}
/**
* PercentLayout. <BR>
* Constraint based layout which allow the space to be splitted using
* percentages. The following are allowed when adding components to container:
* <ul>
* <li>container.add(component); <br>
* in this case, the component will be sized to its preferred size
* <li>container.add(component, "100"); <br>
* in this case, the component will have a width (or height) of 100
* <li>container.add(component, "25%"); <br>
* in this case, the component will have a width (or height) of 25 % of the
* container width (or height) <br>
* <li>container.add(component, "*"); <br>
* in this case, the component will take the remaining space. if several
* components use the "*" constraint the space will be divided among the
* components.
* </ul>
*
* @javabean.class name="PercentLayout"
* shortDescription="A layout supports constraints expressed in percent."
*/
class PercentLayout implements LayoutManager2 {
/**
* Useful constant to layout the components horizontally (from top to
* bottom).
*/
public final static int HORIZONTAL = 0;
/**
* Useful constant to layout the components vertically (from left to right).
*/
public final static int VERTICAL = 1;
static class Constraint {
protected Object value;
private Constraint(Object value) {
this.value = value;
}
}
static class NumberConstraint extends Constraint {
public NumberConstraint(int d) {
this(Integer.valueOf(d));
}
public NumberConstraint(Integer d) {
super(d);
}
public int intValue() {
return (Integer) value;
}
}
static class PercentConstraint extends Constraint {
public PercentConstraint(float d) {
super(d);
}
public float floatValue() {
return (Float) value;
}
}
private final static Constraint REMAINING_SPACE = new Constraint("*");
private final static Constraint PREFERRED_SIZE = new Constraint("");
private int orientation;
private int gap;
private Locale locale;
// Consider using HashMap
private Hashtable<Component, Constraint> m_ComponentToConstraint;
/**
* Creates a new HORIZONTAL PercentLayout with a gap of 0.
*/
public PercentLayout(Locale locale) {
this(HORIZONTAL, 0, locale);
}
public PercentLayout(int orientation, int gap, Locale locale) {
setOrientation(orientation);
this.gap = gap;
this.locale = locale;
m_ComponentToConstraint = new Hashtable<Component, Constraint>();
}
public void setGap(int gap) {
this.gap = gap;
}
/**
* @javabean.property bound="true" preferred="true"
*/
public int getGap() {
return gap;
}
public void setOrientation(int orientation) {
if (orientation != HORIZONTAL && orientation != VERTICAL) {
throw new IllegalArgumentException("Orientation must be one of HORIZONTAL or VERTICAL");
}
this.orientation = orientation;
}
/**
* @javabean.property bound="true" preferred="true"
*/
public int getOrientation() {
return orientation;
}
public Constraint getConstraint(Component component) {
return m_ComponentToConstraint.get(component);
}
public void setConstraint(Component component, Object constraints) {
if (constraints instanceof Constraint) {
m_ComponentToConstraint.put(component, (Constraint) constraints);
} else if (constraints instanceof Number) {
setConstraint(component, new NumberConstraint(((Number) constraints).intValue()));
} else if ("*".equals(constraints)) {
setConstraint(component, REMAINING_SPACE);
} else if ("".equals(constraints)) {
setConstraint(component, PREFERRED_SIZE);
} else if (constraints instanceof String) {
String s = (String) constraints;
if (s.endsWith("%")) {
float value = Float.valueOf(s.substring(0, s.length() - 1)) / 100;
if (value > 1 || value < 0) {
throw new IllegalArgumentException("percent value must be >= 0 and <= 100");
}
setConstraint(component, new PercentConstraint(value));
} else {
setConstraint(component, new NumberConstraint(Integer.valueOf(s)));
}
} else if (constraints == null) {
// null constraint means preferred size
setConstraint(component, PREFERRED_SIZE);
} else {
throw new IllegalArgumentException("Invalid Constraint");
}
}
public void addLayoutComponent(Component component, Object constraints) {
setConstraint(component, constraints);
}
/**
* Returns the alignment along the x axis. This specifies how the component
* would like to be aligned relative to other components. The value should
* be a number between 0 and 1 where 0 represents alignment along the
* origin, 1 is aligned the furthest away from the origin, 0.5 is centered,
* etc.
*/
public float getLayoutAlignmentX(Container target) {
return 1.0f / 2.0f;
}
/**
* Returns the alignment along the y axis. This specifies how the component
* would like to be aligned relative to other components. The value should
* be a number between 0 and 1 where 0 represents alignment along the
* origin, 1 is aligned the furthest away from the origin, 0.5 is centered,
* etc.
*/
public float getLayoutAlignmentY(Container target) {
return 1.0f / 2.0f;
}
/**
* Invalidates the layout, indicating that if the layout manager has cached
* information it should be discarded.
*/
public void invalidateLayout(Container target) {
}
/**
* Adds the specified component with the specified name to the layout.
*
* @param name
* the component name
* @param comp
* the component to be added
*/
public void addLayoutComponent(String name, Component comp) {
}
/**
* Removes the specified component from the layout.
*
* @param comp
* the component ot be removed
*/
public void removeLayoutComponent(Component comp) {
m_ComponentToConstraint.remove(comp);
}
/**
* Calculates the minimum size dimensions for the specified panel given the
* components in the specified parent container.
*
* @param parent
* the component to be laid out
* @see #preferredLayoutSize
*/
public Dimension minimumLayoutSize(Container parent) {
return preferredLayoutSize(parent);
}
/**
* Returns the maximum size of this component.
*
* @see java.awt.Component#getMinimumSize()
* @see java.awt.Component#getPreferredSize()
* @see java.awt.LayoutManager
*/
public Dimension maximumLayoutSize(Container parent) {
return new Dimension(Integer.MAX_VALUE, Integer.MAX_VALUE);
}
public Dimension preferredLayoutSize(Container parent) {
Component[] components = parent.getComponents();
Insets insets = parent.getInsets();
int width = 0;
int height = 0;
Dimension componentPreferredSize;
boolean firstVisibleComponent = true;
for (int i = 0, c = components.length; i < c; i++) {
if (components[i].isVisible()) {
componentPreferredSize = components[i].getPreferredSize();
if (orientation == HORIZONTAL) {
height = Math.max(height, componentPreferredSize.height);
width += componentPreferredSize.width;
if (firstVisibleComponent) {
firstVisibleComponent = false;
} else {
width += gap;
}
} else {
height += componentPreferredSize.height;
width = Math.max(width, componentPreferredSize.width);
if (firstVisibleComponent) {
firstVisibleComponent = false;
} else {
height += gap;
}
}
}
}
return new Dimension(width + insets.right + insets.left, height + insets.top + insets.bottom);
}
public void layoutContainer(Container parent) {
Insets insets = parent.getInsets();
Dimension d = parent.getSize();
// calculate the available sizes
d.width = d.width - insets.left - insets.right;
d.height = d.height - insets.top - insets.bottom;
// pre-calculate the size of each components
Component[] components = parent.getComponents();
int[] sizes = new int[components.length];
// calculate the available size
int availableSize = (HORIZONTAL == orientation ? d.width : d.height) - (components.length - 1) * gap;
// PENDING(fred): the following code iterates 4 times on the component
// array, need to find something more efficient!
// give priority to components who want to use their preferred size or
// who
// have a predefined size
for (int i = 0, c = components.length; i < c; i++) {
if (components[i].isVisible()) {
Constraint constraint = m_ComponentToConstraint.get(components[i]);
if (constraint == null || constraint == PREFERRED_SIZE) {
sizes[i] = (HORIZONTAL == orientation ? components[i].getPreferredSize().width : components[i]
.getPreferredSize().height);
availableSize -= sizes[i];
} else if (constraint instanceof NumberConstraint) {
sizes[i] = ((NumberConstraint) constraint).intValue();
availableSize -= sizes[i];
}
}
}
// then work with the components who want a percentage of the remaining
// space
int remainingSize = availableSize;
for (int i = 0, c = components.length; i < c; i++) {
if (components[i].isVisible()) {
Constraint constraint = m_ComponentToConstraint.get(components[i]);
if (constraint instanceof PercentConstraint) {
sizes[i] = (int) (remainingSize * ((PercentConstraint) constraint).floatValue());
availableSize -= sizes[i];
}
}
}
// finally share the remaining space between the other components
ArrayList<Integer> remaining = new ArrayList<Integer>();
for (int i = 0, c = components.length; i < c; i++) {
if (components[i].isVisible()) {
Constraint constraint = m_ComponentToConstraint.get(components[i]);
if (constraint == REMAINING_SPACE) {
remaining.add(i);
sizes[i] = 0;
}
}
}
if (remaining.size() > 0) {
int rest = availableSize / remaining.size();
for (Integer aRemaining : remaining) {
sizes[aRemaining] = rest;
}
}
// all calculations are done, apply the sizes
int currentOffset = (HORIZONTAL == orientation ? insets.left : insets.top);
if (!ComponentOrientation.getOrientation(locale).isLeftToRight()) {
currentOffset = (HORIZONTAL == orientation ? d.width - insets.right : insets.top);
}
for (int i = 0, c = components.length; i < c; i++) {
if (components[i].isVisible()) {
if (HORIZONTAL == orientation) {
if (ComponentOrientation.getOrientation(locale).isLeftToRight()) {
components[i].setBounds(currentOffset, insets.top, sizes[i], d.height);
} else {
components[i].setBounds(currentOffset - sizes[i], insets.top, sizes[i], d.height);
}
} else {
components[i].setBounds(insets.left, currentOffset, d.width, sizes[i]);
}
if (ComponentOrientation.getOrientation(locale).isLeftToRight()) {
currentOffset += gap + sizes[i];
} else {
currentOffset = currentOffset - gap - sizes[i];
}
}
}
}
}
/**
* LookAndFeelTweaks. <br>
*
*/
class LookAndFeelTweaks {
private static final Logger log = LoggerFactory.getLogger(LookAndFeelTweaks.class);
public final static Border PANEL_BORDER = BorderFactory.createEmptyBorder(3, 3, 3, 3);
// TODO These are never used
public final static Border WINDOW_BORDER = BorderFactory.createEmptyBorder(4, 10, 10, 10);
public final static Border EMPTY_BORDER = BorderFactory.createEmptyBorder();
public static void tweak() {
Object listFont = UIManager.get("List.font");
UIManager.put("Table.font", listFont);
UIManager.put("ToolTip.font", listFont);
UIManager.put("TextField.font", listFont);
UIManager.put("FormattedTextField.font", listFont);
UIManager.put("Viewport.background", "Table.background");
}
public static PercentLayout createVerticalPercentLayout(Locale locale) {
return new PercentLayout(PercentLayout.VERTICAL, 8, locale);
}
public static PercentLayout createHorizontalPercentLayout(Locale locale) {
return new PercentLayout(PercentLayout.HORIZONTAL, 4, locale);
}
public static BorderLayout createBorderLayout() {
return new BorderLayout(8, 8);
}
public static void setBorder(JComponent component) {
if (component instanceof JPanel) {
component.setBorder(PANEL_BORDER);
}
}
public static void setBorderLayout(Container container) {
container.setLayout(new BorderLayout(3, 3));
}
public static void makeBold(JComponent component) {
component.setFont(component.getFont().deriveFont(Font.BOLD));
}
public static void makeMultilineLabel(JTextComponent area) {
area.setFont(UIManager.getFont("Label.font"));
area.setEditable(false);
area.setOpaque(false);
if (area instanceof JTextArea) {
((JTextArea) area).setWrapStyleWord(true);
((JTextArea) area).setLineWrap(true);
}
}
public static void htmlize(JComponent component) {
htmlize(component, UIManager.getFont("Button.font"));
}
public static void htmlize(JComponent component, Font font) {
String stylesheet = "body { margin-top: 0; margin-bottom: 0; margin-left: 0; margin-right: 0; font-family: "
+ font.getName() + "; font-size: " + font.getSize() + "pt; }"
+ "a, p, li { margin-top: 0; margin-bottom: 0; margin-left: 0; margin-right: 0; font-family: " + font.getName()
+ "; font-size: " + font.getSize() + "pt; }";
try {
HTMLDocument doc = null;
if (component instanceof JEditorPane) {
if (((JEditorPane) component).getDocument() instanceof HTMLDocument) {
doc = (HTMLDocument) ((JEditorPane) component).getDocument();
}
} else {
View v = (View) component.getClientProperty(javax.swing.plaf.basic.BasicHTML.propertyKey);
if (v != null && v.getDocument() instanceof HTMLDocument) {
doc = (HTMLDocument) v.getDocument();
}
}
if (doc != null) {
doc.getStyleSheet().loadRules(new java.io.StringReader(stylesheet), null);
} // end of if (doc != null)
} catch (Exception e) {
log.error(e.getMessage(), e);
}
}
public static Border addMargin(Border border) {
return new CompoundBorder(border, PANEL_BORDER);
}
}
| |
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.search.child;
import com.carrotsearch.hppc.ObjectFloatOpenHashMap;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.*;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.ToStringUtils;
import org.elasticsearch.ElasticSearchException;
import org.elasticsearch.common.bytes.HashedBytesArray;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lucene.docset.DocIdSets;
import org.elasticsearch.common.lucene.search.ApplyAcceptedDocsFilter;
import org.elasticsearch.common.lucene.search.NoopCollector;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.recycler.Recycler;
import org.elasticsearch.common.recycler.RecyclerUtils;
import org.elasticsearch.index.cache.id.IdReaderTypeCache;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.util.Set;
/**
* A query implementation that executes the wrapped parent query and
* connects the matching parent docs to the related child documents
* using the {@link IdReaderTypeCache}.
*/
// TODO We use a score of 0 to indicate a doc was not scored in uidToScore, this means score of 0 can be problematic, if we move to HPCC, we can use lset/...
public class ParentQuery extends Query {
private final Query originalParentQuery;
private final String parentType;
private final Filter childrenFilter;
private Query rewrittenParentQuery;
private IndexReader rewriteIndexReader;
public ParentQuery(Query parentQuery, String parentType, Filter childrenFilter) {
this.originalParentQuery = parentQuery;
this.parentType = parentType;
this.childrenFilter = new ApplyAcceptedDocsFilter(childrenFilter);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || obj.getClass() != this.getClass()) {
return false;
}
ParentQuery that = (ParentQuery) obj;
if (!originalParentQuery.equals(that.originalParentQuery)) {
return false;
}
if (!parentType.equals(that.parentType)) {
return false;
}
if (getBoost() != that.getBoost()) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = originalParentQuery.hashCode();
result = 31 * result + parentType.hashCode();
result = 31 * result + Float.floatToIntBits(getBoost());
return result;
}
@Override
public String toString(String field) {
StringBuilder sb = new StringBuilder();
sb.append("ParentQuery[").append(parentType).append("](")
.append(originalParentQuery.toString(field)).append(')')
.append(ToStringUtils.boost(getBoost()));
return sb.toString();
}
@Override
// See TopChildrenQuery#rewrite
public Query rewrite(IndexReader reader) throws IOException {
if (rewrittenParentQuery == null) {
rewriteIndexReader = reader;
rewrittenParentQuery = originalParentQuery.rewrite(reader);
}
return this;
}
@Override
public void extractTerms(Set<Term> terms) {
rewrittenParentQuery.extractTerms(terms);
}
@Override
public Weight createWeight(IndexSearcher searcher) throws IOException {
SearchContext searchContext = SearchContext.current();
searchContext.idCache().refresh(searchContext.searcher().getTopReaderContext().leaves());
Recycler.V<ObjectFloatOpenHashMap<HashedBytesArray>> uidToScore = searchContext.cacheRecycler().objectFloatMap(-1);
ParentUidCollector collector = new ParentUidCollector(uidToScore.v(), searchContext, parentType);
final Query parentQuery;
if (rewrittenParentQuery == null) {
parentQuery = rewrittenParentQuery = searcher.rewrite(originalParentQuery);
} else {
assert rewriteIndexReader == searcher.getIndexReader();
parentQuery = rewrittenParentQuery;
}
IndexSearcher indexSearcher = new IndexSearcher(searcher.getIndexReader());
indexSearcher.search(parentQuery, collector);
if (uidToScore.v().isEmpty()) {
uidToScore.release();
return Queries.newMatchNoDocsQuery().createWeight(searcher);
}
ChildWeight childWeight = new ChildWeight(parentQuery.createWeight(searcher), searchContext, uidToScore);
searchContext.addReleasable(childWeight);
return childWeight;
}
private static class ParentUidCollector extends NoopCollector {
private final ObjectFloatOpenHashMap<HashedBytesArray> uidToScore;
private final SearchContext searchContext;
private final String parentType;
private Scorer scorer;
private IdReaderTypeCache typeCache;
ParentUidCollector(ObjectFloatOpenHashMap<HashedBytesArray> uidToScore, SearchContext searchContext, String parentType) {
this.uidToScore = uidToScore;
this.searchContext = searchContext;
this.parentType = parentType;
}
@Override
public void collect(int doc) throws IOException {
if (typeCache == null) {
return;
}
HashedBytesArray parentUid = typeCache.idByDoc(doc);
uidToScore.put(parentUid, scorer.score());
}
@Override
public void setScorer(Scorer scorer) throws IOException {
this.scorer = scorer;
}
@Override
public void setNextReader(AtomicReaderContext context) throws IOException {
typeCache = searchContext.idCache().reader(context.reader()).type(parentType);
}
}
private class ChildWeight extends Weight implements Releasable {
private final Weight parentWeight;
private final SearchContext searchContext;
private final Recycler.V<ObjectFloatOpenHashMap<HashedBytesArray>> uidToScore;
private ChildWeight(Weight parentWeight, SearchContext searchContext, Recycler.V<ObjectFloatOpenHashMap<HashedBytesArray>> uidToScore) {
this.parentWeight = parentWeight;
this.searchContext = searchContext;
this.uidToScore = uidToScore;
}
@Override
public Explanation explain(AtomicReaderContext context, int doc) throws IOException {
return new Explanation(getBoost(), "not implemented yet...");
}
@Override
public Query getQuery() {
return ParentQuery.this;
}
@Override
public float getValueForNormalization() throws IOException {
float sum = parentWeight.getValueForNormalization();
sum *= getBoost() * getBoost();
return sum;
}
@Override
public void normalize(float norm, float topLevelBoost) {
}
@Override
public Scorer scorer(AtomicReaderContext context, boolean scoreDocsInOrder, boolean topScorer, Bits acceptDocs) throws IOException {
DocIdSet childrenDocSet = childrenFilter.getDocIdSet(context, acceptDocs);
if (DocIdSets.isEmpty(childrenDocSet)) {
return null;
}
IdReaderTypeCache idTypeCache = searchContext.idCache().reader(context.reader()).type(parentType);
if (idTypeCache == null) {
return null;
}
return new ChildScorer(this, uidToScore.v(), childrenDocSet.iterator(), idTypeCache);
}
@Override
public boolean release() throws ElasticSearchException {
RecyclerUtils.release(uidToScore);
return true;
}
}
private static class ChildScorer extends Scorer {
private final ObjectFloatOpenHashMap<HashedBytesArray> uidToScore;
private final DocIdSetIterator childrenIterator;
private final IdReaderTypeCache typeCache;
private int currentChildDoc = -1;
private float currentScore;
ChildScorer(Weight weight, ObjectFloatOpenHashMap<HashedBytesArray> uidToScore, DocIdSetIterator childrenIterator, IdReaderTypeCache typeCache) {
super(weight);
this.uidToScore = uidToScore;
this.childrenIterator = childrenIterator;
this.typeCache = typeCache;
}
@Override
public float score() throws IOException {
return currentScore;
}
@Override
public int freq() throws IOException {
// We don't have the original child query hit info here...
// But the freq of the children could be collector and returned here, but makes this Scorer more expensive.
return 1;
}
@Override
public int docID() {
return currentChildDoc;
}
@Override
public int nextDoc() throws IOException {
while (true) {
currentChildDoc = childrenIterator.nextDoc();
if (currentChildDoc == DocIdSetIterator.NO_MORE_DOCS) {
return currentChildDoc;
}
HashedBytesArray uid = typeCache.parentIdByDoc(currentChildDoc);
if (uid == null) {
continue;
}
currentScore = uidToScore.get(uid);
if (currentScore != 0) {
return currentChildDoc;
}
}
}
@Override
public int advance(int target) throws IOException {
currentChildDoc = childrenIterator.advance(target);
if (currentChildDoc == DocIdSetIterator.NO_MORE_DOCS) {
return currentChildDoc;
}
HashedBytesArray uid = typeCache.idByDoc(currentChildDoc);
if (uid == null) {
return nextDoc();
}
currentScore = uidToScore.get(uid);
if (currentScore == 0) {
return nextDoc();
}
return currentChildDoc;
}
@Override
public long cost() {
return childrenIterator.cost();
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.metron.pcapservice;
import java.util.HashMap;
import java.util.Map;
import org.springframework.http.HttpEntity;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpMethod;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.web.client.RestTemplate;
/**
* The Class RestTestingUtil.
*/
public class RestTestingUtil {
/** The host name. */
public static String hostName = null;
/**
* Gets the pcaps by keys.
*
* @param keys
* the keys
* @return the pcaps by keys
*/
@SuppressWarnings("unchecked")
private static void getPcapsByKeys(String keys) {
System.out
.println("**********************getPcapsByKeys ******************************************************************************************");
// 1.
String url = "http://" + hostName
+ "/cisco-rest/pcapGetter/getPcapsByKeys?keys={keys}"
+ "&includeReverseTraffic={includeReverseTraffic}"
+ "&startTime={startTime}" + "&endTime={endTime}"
+ "&maxResponseSize={maxResponseSize}";
// default values
String startTime = "-1";
String endTime = "-1";
String maxResponseSize = "6";
String includeReverseTraffic = "false";
@SuppressWarnings("rawtypes")
Map map = new HashMap();
map.put("keys", keys);
map.put("includeReverseTraffic", includeReverseTraffic);
map.put("startTime", startTime);
map.put("endTime", endTime);
map.put("maxResponseSize", maxResponseSize);
RestTemplate template = new RestTemplate();
// set headers and entity to send
HttpHeaders headers = new HttpHeaders();
headers.set("Accept", MediaType.APPLICATION_OCTET_STREAM_VALUE);
HttpEntity<Object> requestEntity = new HttpEntity<Object>(headers);
// 1.
ResponseEntity<byte[]> response1 = template.exchange(url, HttpMethod.GET,
requestEntity, byte[].class, map);
System.out
.println("----------------------------------------------------------------------------------------------------");
System.out
.format(
"getPcapsByKeys : request= <keys=%s; includeReverseTraffic=%s; startTime=%s; endTime=%s; maxResponseSize=%s> \n response= %s \n",
keys, includeReverseTraffic, startTime, endTime, maxResponseSize,
response1);
System.out
.println("----------------------------------------------------------------------------------------------------");
System.out.println();
// 2. with reverse traffic
includeReverseTraffic = "true";
map.put("includeReverseTraffic", includeReverseTraffic);
ResponseEntity<byte[]> response2 = template.exchange(url, HttpMethod.GET,
requestEntity, byte[].class, map);
System.out
.println("----------------------------------------------------------------------------------------------------");
System.out
.format(
"getPcapsByKeys : request= <keys=%s; includeReverseTraffic=%s; startTime=%s; endTime=%s; maxResponseSize=%s> \n response= %s \n",
keys, includeReverseTraffic, startTime, endTime, maxResponseSize,
response2);
System.out
.println("----------------------------------------------------------------------------------------------------");
System.out.println();
// 3.with time range
startTime = System.getProperty("startTime", "-1");
endTime = System.getProperty("endTime", "-1");
map.put("startTime", startTime);
map.put("endTime", endTime);
ResponseEntity<byte[]> response3 = template.exchange(url, HttpMethod.GET,
requestEntity, byte[].class, map);
System.out
.println("----------------------------------------------------------------------------------------------------");
System.out
.format(
"getPcapsByKeys : request= <keys=%s; includeReverseTraffic=%s; startTime=%s; endTime=%s; maxResponseSize=%s> \n response= %s \n",
keys, includeReverseTraffic, startTime, endTime, maxResponseSize,
response3);
System.out
.println("----------------------------------------------------------------------------------------------------");
System.out.println();
// 4.with maxResponseSize
maxResponseSize = System.getProperty("maxResponseSize", "6");
map.put("maxResponseSize", maxResponseSize);
ResponseEntity<byte[]> response4 = template.exchange(url, HttpMethod.GET,
requestEntity, byte[].class, map);
System.out
.println("----------------------------------------------------------------------------------------------------");
System.out
.format(
"getPcapsByKeys : request= <keys=%s; includeReverseTraffic=%s; startTime=%s; endTime=%s; maxResponseSize=%s> \n response= %s \n",
keys, includeReverseTraffic, startTime, endTime, maxResponseSize,
response4);
System.out
.println("----------------------------------------------------------------------------------------------------");
System.out.println();
}
/**
* Gets the pcaps by keys range.
*
* @param startKey
* the start key
* @param endKey
* the end key
* @return the pcaps by keys range
*/
@SuppressWarnings("unchecked")
private static void getPcapsByKeysRange(String startKey, String endKey) {
System.out
.println("**********************getPcapsByKeysRange ******************************************************************************************");
// 1.
String url = "http://" + hostName
+ "/cisco-rest/pcapGetter/getPcapsByKeyRange?startKey={startKey}"
+ "&endKey={endKey}" + "&startTime={startTime}" + "&endTime={endTime}"
+ "&maxResponseSize={maxResponseSize}";
// default values
String startTime = "-1";
String endTime = "-1";
String maxResponseSize = "6";
@SuppressWarnings("rawtypes")
Map map = new HashMap();
map.put("startKey", startKey);
map.put("endKey", "endKey");
map.put("startTime", startTime);
map.put("endTime", endTime);
map.put("maxResponseSize", maxResponseSize);
RestTemplate template = new RestTemplate();
// set headers and entity to send
HttpHeaders headers = new HttpHeaders();
headers.set("Accept", MediaType.APPLICATION_OCTET_STREAM_VALUE);
HttpEntity<Object> requestEntity = new HttpEntity<Object>(headers);
// 1.
ResponseEntity<byte[]> response1 = template.exchange(url, HttpMethod.GET,
requestEntity, byte[].class, map);
System.out
.println("----------------------------------------------------------------------------------------------------");
System.out
.format(
"getPcapsByKeysRange : request= <startKey=%s; endKey=%s; startTime=%s; endTime=%s; maxResponseSize=%s> \n response= %s \n",
startKey, endKey, startTime, endTime, maxResponseSize, response1);
System.out
.println("----------------------------------------------------------------------------------------------------");
System.out.println();
// 2. with time range
startTime = System.getProperty("startTime", "-1");
endTime = System.getProperty("endTime", "-1");
map.put("startTime", startTime);
map.put("endTime", endTime);
ResponseEntity<byte[]> response2 = template.exchange(url, HttpMethod.GET,
requestEntity, byte[].class, map);
System.out
.println("----------------------------------------------------------------------------------------------------");
System.out
.format(
"getPcapsByKeysRange : request= <startKey=%s; endKey=%s; startTime=%s; endTime=%s; maxResponseSize=%s> \n response= %s \n",
startKey, endKey, startTime, endTime, maxResponseSize, response2);
System.out
.println("----------------------------------------------------------------------------------------------------");
System.out.println();
// 3. with maxResponseSize
maxResponseSize = System.getProperty("maxResponseSize", "6");
map.put("maxResponseSize", maxResponseSize);
ResponseEntity<byte[]> response3 = template.exchange(url, HttpMethod.GET,
requestEntity, byte[].class, map);
System.out
.println("----------------------------------------------------------------------------------------------------");
System.out
.format(
"getPcapsByKeysRange : request= <startKey=%s; endKey=%s; startTime=%s; endTime=%s; maxResponseSize=%s> \n response= %s \n",
startKey, endKey, startTime, endTime, maxResponseSize, response3);
System.out
.println("----------------------------------------------------------------------------------------------------");
System.out.println();
}
private static void getPcapsByIdentifiers(String srcIp, String dstIp, String protocol, String srcPort, String dstPort) {
System.out
.println("**********************getPcapsByKeysRange ******************************************************************************************");
// 1.
String url = "http://" + hostName
+ "/pcapGetter/getPcapsByIdentifiers?srcIp={srcIp}"
+ "&dstIp={dstIp}" + "&protocol={protocol}" + "&srcPort={srcPort}"
+ "&dstPort={dstPort}";
// default values
String startTime = "-1";
String endTime = "-1";
String maxResponseSize = "6";
@SuppressWarnings("rawtypes")
Map map = new HashMap();
map.put("srcIp", srcIp);
map.put("dstIp", dstIp);
map.put("protocol", protocol);
map.put("srcPort", srcPort);
map.put("dstPort", dstPort);
RestTemplate template = new RestTemplate();
// set headers and entity to send
HttpHeaders headers = new HttpHeaders();
headers.set("Accept", MediaType.APPLICATION_OCTET_STREAM_VALUE);
HttpEntity<Object> requestEntity = new HttpEntity<Object>(headers);
// 1.
ResponseEntity<byte[]> response1 = template.exchange(url, HttpMethod.GET,
requestEntity, byte[].class, map);
System.out
.println("----------------------------------------------------------------------------------------------------");
System.out
.format(
"getPcapsByIdentifiers : request= <srcIp=%s; dstIp=%s; protocol=%s; srcPort=%s; dstPort=%s> \n response= %s \n",
srcIp, dstIp, protocol, endTime, srcPort, dstPort, response1);
System.out
.println("----------------------------------------------------------------------------------------------------");
System.out.println();
// 2. with time range
startTime = System.getProperty("startTime", "-1");
endTime = System.getProperty("endTime", "-1");
map.put("startTime", startTime);
map.put("endTime", endTime);
ResponseEntity<byte[]> response2 = template.exchange(url, HttpMethod.GET,
requestEntity, byte[].class, map);
System.out
.println("----------------------------------------------------------------------------------------------------");
System.out
.format(
"getPcapsByIdentifiers : request= <srcIp=%s; dstIp=%s; protocol=%s; srcPort=%s; dstPort=%s> \n response= %s \n",
srcIp, dstIp, protocol, endTime, srcPort, dstPort, response2);
System.out
.println("----------------------------------------------------------------------------------------------------");
System.out.println();
// 3. with maxResponseSize
maxResponseSize = System.getProperty("maxResponseSize", "6");
map.put("maxResponseSize", maxResponseSize);
ResponseEntity<byte[]> response3 = template.exchange(url, HttpMethod.GET,
requestEntity, byte[].class, map);
System.out
.println("----------------------------------------------------------------------------------------------------");
System.out
.format(
"getPcapsByIdentifiers : request= <srcIp=%s; dstIp=%s; protocol=%s; srcPort=%s; dstPort=%s> \n response= %s \n",
srcIp, dstIp, protocol, endTime, srcPort, dstPort, response3);
System.out
.println("----------------------------------------------------------------------------------------------------");
System.out.println();
}
/**
* The main method.
*
* @param args
* the arguments
*/
public static void main(String[] args) {
/*
* Run this program with system properties
*
* -DhostName=mon.hw.com:8090
* -Dkeys=18800006-1800000b-06-0019-b39d,18800006-
* 1800000b-06-0050-5af6-64840-40785
* -DstartKey=18000002-18800002-06-0436-0019-2440-34545
* -DendKey=18000002-18800002-06-b773-0019-2840-34585
*/
hostName = System.getProperty("hostName");
String keys = System.getProperty("keys");
String statyKey = System.getProperty("startKey");
String endKey = System.getProperty("endKey");
getPcapsByKeys(keys);
getPcapsByKeysRange(statyKey, endKey);
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.operator.aggregation.state;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Ordering;
import io.airlift.bytecode.BytecodeBlock;
import io.airlift.bytecode.ClassDefinition;
import io.airlift.bytecode.DynamicClassLoader;
import io.airlift.bytecode.FieldDefinition;
import io.airlift.bytecode.MethodDefinition;
import io.airlift.bytecode.Parameter;
import io.airlift.bytecode.Scope;
import io.airlift.bytecode.Variable;
import io.airlift.bytecode.control.IfStatement;
import io.airlift.bytecode.expression.BytecodeExpression;
import io.airlift.slice.Slice;
import io.trino.array.BlockBigArray;
import io.trino.array.BooleanBigArray;
import io.trino.array.ByteBigArray;
import io.trino.array.DoubleBigArray;
import io.trino.array.IntBigArray;
import io.trino.array.LongBigArray;
import io.trino.array.ObjectBigArray;
import io.trino.array.SliceBigArray;
import io.trino.operator.aggregation.GroupedAccumulator;
import io.trino.spi.block.Block;
import io.trino.spi.block.BlockBuilder;
import io.trino.spi.function.AccumulatorStateFactory;
import io.trino.spi.function.AccumulatorStateMetadata;
import io.trino.spi.function.AccumulatorStateSerializer;
import io.trino.spi.type.RowType;
import io.trino.spi.type.Type;
import io.trino.sql.gen.CallSiteBinder;
import io.trino.sql.gen.SqlTypeBytecodeExpression;
import org.openjdk.jol.info.ClassLayout;
import java.lang.annotation.Annotation;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import static com.google.common.base.CaseFormat.LOWER_CAMEL;
import static com.google.common.base.CaseFormat.UPPER_CAMEL;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static com.google.common.collect.Iterables.getOnlyElement;
import static io.airlift.bytecode.Access.FINAL;
import static io.airlift.bytecode.Access.PRIVATE;
import static io.airlift.bytecode.Access.PUBLIC;
import static io.airlift.bytecode.Access.STATIC;
import static io.airlift.bytecode.Access.a;
import static io.airlift.bytecode.Parameter.arg;
import static io.airlift.bytecode.ParameterizedType.type;
import static io.airlift.bytecode.expression.BytecodeExpressions.add;
import static io.airlift.bytecode.expression.BytecodeExpressions.constantBoolean;
import static io.airlift.bytecode.expression.BytecodeExpressions.constantClass;
import static io.airlift.bytecode.expression.BytecodeExpressions.constantInt;
import static io.airlift.bytecode.expression.BytecodeExpressions.constantNull;
import static io.airlift.bytecode.expression.BytecodeExpressions.constantNumber;
import static io.airlift.bytecode.expression.BytecodeExpressions.defaultValue;
import static io.airlift.bytecode.expression.BytecodeExpressions.equal;
import static io.airlift.bytecode.expression.BytecodeExpressions.getStatic;
import static io.airlift.bytecode.expression.BytecodeExpressions.newInstance;
import static io.trino.spi.type.BigintType.BIGINT;
import static io.trino.spi.type.BooleanType.BOOLEAN;
import static io.trino.spi.type.DoubleType.DOUBLE;
import static io.trino.spi.type.IntegerType.INTEGER;
import static io.trino.spi.type.TinyintType.TINYINT;
import static io.trino.spi.type.VarbinaryType.VARBINARY;
import static io.trino.sql.gen.SqlTypeBytecodeExpression.constantType;
import static io.trino.type.UnknownType.UNKNOWN;
import static io.trino.util.CompilerUtils.defineClass;
import static io.trino.util.CompilerUtils.makeClassName;
import static java.util.Objects.requireNonNull;
public final class StateCompiler
{
private StateCompiler() {}
private static Class<?> getBigArrayType(Class<?> type)
{
if (type.equals(long.class)) {
return LongBigArray.class;
}
if (type.equals(byte.class)) {
return ByteBigArray.class;
}
if (type.equals(double.class)) {
return DoubleBigArray.class;
}
if (type.equals(boolean.class)) {
return BooleanBigArray.class;
}
if (type.equals(int.class)) {
return IntBigArray.class;
}
if (type.equals(Slice.class)) {
return SliceBigArray.class;
}
if (type.equals(Block.class)) {
return BlockBigArray.class;
}
return ObjectBigArray.class;
}
public static Type getSerializedType(Class<?> clazz)
{
return getSerializedType(clazz, ImmutableMap.of());
}
public static Type getSerializedType(Class<?> clazz, Map<String, Type> fieldTypes)
{
AccumulatorStateMetadata metadata = getMetadataAnnotation(clazz);
if (metadata != null && metadata.stateSerializerClass() != void.class) {
try {
AccumulatorStateSerializer<?> stateSerializer = (AccumulatorStateSerializer<?>) metadata.stateSerializerClass().getConstructor().newInstance();
return stateSerializer.getSerializedType();
}
catch (InstantiationException | IllegalAccessException | NoSuchMethodException | InvocationTargetException e) {
throw new RuntimeException(e);
}
}
List<StateField> fields = enumerateFields(clazz, fieldTypes);
return getSerializedType(fields);
}
public static <T> AccumulatorStateSerializer<T> generateStateSerializer(Class<T> clazz)
{
return generateStateSerializer(clazz, new DynamicClassLoader(clazz.getClassLoader()));
}
public static <T> AccumulatorStateSerializer<T> generateStateSerializer(Class<T> clazz, DynamicClassLoader classLoader)
{
return generateStateSerializer(clazz, ImmutableMap.of(), classLoader);
}
public static <T> AccumulatorStateSerializer<T> generateStateSerializer(Class<T> clazz, Map<String, Type> fieldTypes, DynamicClassLoader classLoader)
{
AccumulatorStateMetadata metadata = getMetadataAnnotation(clazz);
if (metadata != null && metadata.stateSerializerClass() != void.class) {
try {
return (AccumulatorStateSerializer<T>) metadata.stateSerializerClass().getConstructor().newInstance();
}
catch (InstantiationException | IllegalAccessException | NoSuchMethodException | InvocationTargetException e) {
throw new RuntimeException(e);
}
}
ClassDefinition definition = new ClassDefinition(
a(PUBLIC, FINAL),
makeClassName(clazz.getSimpleName() + "Serializer"),
type(Object.class),
type(AccumulatorStateSerializer.class));
CallSiteBinder callSiteBinder = new CallSiteBinder();
// Generate constructor
definition.declareDefaultConstructor(a(PUBLIC));
List<StateField> fields = enumerateFields(clazz, fieldTypes);
generateGetSerializedType(definition, fields, callSiteBinder);
generateSerialize(definition, callSiteBinder, clazz, fields);
generateDeserialize(definition, callSiteBinder, clazz, fields);
Class<?> serializerClass = defineClass(definition, AccumulatorStateSerializer.class, callSiteBinder.getBindings(), classLoader);
try {
return (AccumulatorStateSerializer<T>) serializerClass.getConstructor().newInstance();
}
catch (ReflectiveOperationException e) {
throw new RuntimeException(e);
}
}
private static void generateGetSerializedType(ClassDefinition definition, List<StateField> fields, CallSiteBinder callSiteBinder)
{
BytecodeBlock body = definition.declareMethod(a(PUBLIC), "getSerializedType", type(Type.class)).getBody();
Type type = getSerializedType(fields);
body.comment("return %s", type.getTypeSignature())
.append(constantType(callSiteBinder, type))
.retObject();
}
private static Type getSerializedType(List<StateField> fields)
{
if (fields.size() > 1) {
List<Type> types = fields.stream().map(StateField::getSqlType).collect(toImmutableList());
return RowType.anonymous(types);
}
if (fields.size() == 1) {
return getOnlyElement(fields).getSqlType();
}
return UNKNOWN;
}
private static <T> AccumulatorStateMetadata getMetadataAnnotation(Class<T> clazz)
{
AccumulatorStateMetadata metadata = clazz.getAnnotation(AccumulatorStateMetadata.class);
if (metadata != null) {
return metadata;
}
// If the annotation wasn't found, then search the super classes
for (Class<?> superInterface : clazz.getInterfaces()) {
metadata = superInterface.getAnnotation(AccumulatorStateMetadata.class);
if (metadata != null) {
return metadata;
}
}
return null;
}
private static <T> void generateDeserialize(ClassDefinition definition, CallSiteBinder binder, Class<T> clazz, List<StateField> fields)
{
Parameter block = arg("block", Block.class);
Parameter index = arg("index", int.class);
Parameter state = arg("state", Object.class);
MethodDefinition method = definition.declareMethod(a(PUBLIC), "deserialize", type(void.class), block, index, state);
BytecodeBlock deserializerBody = method.getBody();
Scope scope = method.getScope();
if (fields.size() == 1) {
StateField field = getOnlyElement(fields);
Method setter = getSetter(clazz, field);
if (!field.isPrimitiveType()) {
deserializerBody.append(new IfStatement()
.condition(block.invoke("isNull", boolean.class, index))
.ifTrue(state.cast(setter.getDeclaringClass()).invoke(setter, constantNull(field.getType())))
.ifFalse(state.cast(setter.getDeclaringClass()).invoke(setter, constantType(binder, field.getSqlType()).getValue(block, index))));
}
else {
// For primitive type, we need to cast here because we serialize byte fields with TINYINT/INTEGER (whose java type is long).
deserializerBody.append(
state.cast(setter.getDeclaringClass()).invoke(
setter,
constantType(binder, field.getSqlType()).getValue(block, index).cast(field.getType())));
}
}
else if (fields.size() > 1) {
Variable row = scope.declareVariable(Block.class, "row");
deserializerBody.append(row.set(block.invoke("getObject", Object.class, index, constantClass(Block.class)).cast(Block.class)));
int position = 0;
for (StateField field : fields) {
Method setter = getSetter(clazz, field);
if (!field.isPrimitiveType()) {
deserializerBody.append(new IfStatement()
.condition(row.invoke("isNull", boolean.class, constantInt(position)))
.ifTrue(state.cast(setter.getDeclaringClass()).invoke(setter, constantNull(field.getType())))
.ifFalse(state.cast(setter.getDeclaringClass()).invoke(setter, constantType(binder, field.getSqlType()).getValue(row, constantInt(position)))));
}
else {
// For primitive type, we need to cast here because we serialize byte fields with TINYINT/INTEGER (whose java type is long).
deserializerBody.append(
state.cast(setter.getDeclaringClass()).invoke(
setter,
constantType(binder, field.getSqlType()).getValue(row, constantInt(position)).cast(field.getType())));
}
position++;
}
}
deserializerBody.ret();
}
private static <T> void generateSerialize(ClassDefinition definition, CallSiteBinder binder, Class<T> clazz, List<StateField> fields)
{
Parameter state = arg("state", Object.class);
Parameter out = arg("out", BlockBuilder.class);
MethodDefinition method = definition.declareMethod(a(PUBLIC), "serialize", type(void.class), state, out);
Scope scope = method.getScope();
BytecodeBlock serializerBody = method.getBody();
if (fields.size() == 0) {
serializerBody.append(out.invoke("appendNull", BlockBuilder.class).pop());
}
else if (fields.size() == 1) {
Method getter = getGetter(clazz, getOnlyElement(fields));
SqlTypeBytecodeExpression sqlType = constantType(binder, getOnlyElement(fields).getSqlType());
Variable fieldValue = scope.declareVariable(getter.getReturnType(), "value");
serializerBody.append(fieldValue.set(state.cast(getter.getDeclaringClass()).invoke(getter)));
if (!getOnlyElement(fields).isPrimitiveType()) {
serializerBody.append(new IfStatement()
.condition(equal(fieldValue, constantNull(getter.getReturnType())))
.ifTrue(out.invoke("appendNull", BlockBuilder.class).pop())
.ifFalse(sqlType.writeValue(out, fieldValue)));
}
else {
// For primitive type, we need to cast here because we serialize byte fields with TINYINT/INTEGER (whose java type is long).
serializerBody.append(sqlType.writeValue(out, fieldValue.cast(getOnlyElement(fields).getSqlType().getJavaType())));
}
}
else if (fields.size() > 1) {
Variable rowBuilder = scope.declareVariable(BlockBuilder.class, "rowBuilder");
serializerBody.append(rowBuilder.set(out.invoke("beginBlockEntry", BlockBuilder.class)));
for (StateField field : fields) {
Method getter = getGetter(clazz, field);
SqlTypeBytecodeExpression sqlType = constantType(binder, field.getSqlType());
Variable fieldValue = scope.createTempVariable(getter.getReturnType());
serializerBody.append(fieldValue.set(state.cast(getter.getDeclaringClass()).invoke(getter)));
if (!field.isPrimitiveType()) {
serializerBody.append(new IfStatement().condition(equal(fieldValue, constantNull(getter.getReturnType())))
.ifTrue(rowBuilder.invoke("appendNull", BlockBuilder.class).pop())
.ifFalse(sqlType.writeValue(rowBuilder, fieldValue)));
}
else {
// For primitive type, we need to cast here because we serialize byte fields with TINYINT/INTEGER (whose java type is long).
serializerBody.append(sqlType.writeValue(rowBuilder, fieldValue.cast(field.getSqlType().getJavaType())));
}
}
serializerBody.append(out.invoke("closeEntry", BlockBuilder.class).pop());
}
serializerBody.ret();
}
private static Method getSetter(Class<?> clazz, StateField field)
{
try {
return clazz.getMethod(field.getSetterName(), field.getType());
}
catch (NoSuchMethodException e) {
throw new RuntimeException(e);
}
}
private static Method getGetter(Class<?> clazz, StateField field)
{
try {
return clazz.getMethod(field.getGetterName());
}
catch (NoSuchMethodException e) {
throw new RuntimeException(e);
}
}
public static <T> AccumulatorStateFactory<T> generateStateFactory(Class<T> clazz)
{
return generateStateFactory(clazz, new DynamicClassLoader(clazz.getClassLoader()));
}
public static <T> AccumulatorStateFactory<T> generateStateFactory(Class<T> clazz, DynamicClassLoader classLoader)
{
return generateStateFactory(clazz, ImmutableMap.of(), classLoader);
}
public static <T> AccumulatorStateFactory<T> generateStateFactory(Class<T> clazz, Map<String, Type> fieldTypes, DynamicClassLoader classLoader)
{
AccumulatorStateMetadata metadata = getMetadataAnnotation(clazz);
if (metadata != null && metadata.stateFactoryClass() != void.class) {
try {
return (AccumulatorStateFactory<T>) metadata.stateFactoryClass().getConstructor().newInstance();
}
catch (InstantiationException | IllegalAccessException | NoSuchMethodException | InvocationTargetException e) {
throw new RuntimeException(e);
}
}
Class<? extends T> singleStateClass = generateSingleStateClass(clazz, fieldTypes, classLoader);
Class<? extends T> groupedStateClass = generateGroupedStateClass(clazz, fieldTypes, classLoader);
ClassDefinition definition = new ClassDefinition(
a(PUBLIC, FINAL),
makeClassName(clazz.getSimpleName() + "Factory"),
type(Object.class),
type(AccumulatorStateFactory.class));
// Generate constructor
definition.declareDefaultConstructor(a(PUBLIC));
// Generate single state creation method
definition.declareMethod(a(PUBLIC), "createSingleState", type(Object.class))
.getBody()
.newObject(singleStateClass)
.dup()
.invokeConstructor(singleStateClass)
.retObject();
// Generate grouped state creation method
definition.declareMethod(a(PUBLIC), "createGroupedState", type(Object.class))
.getBody()
.newObject(groupedStateClass)
.dup()
.invokeConstructor(groupedStateClass)
.retObject();
// Generate getters for state class
definition.declareMethod(a(PUBLIC), "getSingleStateClass", type(Class.class, singleStateClass))
.getBody()
.push(singleStateClass)
.retObject();
definition.declareMethod(a(PUBLIC), "getGroupedStateClass", type(Class.class, groupedStateClass))
.getBody()
.push(groupedStateClass)
.retObject();
Class<?> factoryClass = defineClass(definition, AccumulatorStateFactory.class, classLoader);
try {
return (AccumulatorStateFactory<T>) factoryClass.getConstructor().newInstance();
}
catch (ReflectiveOperationException e) {
throw new RuntimeException(e);
}
}
private static <T> Class<? extends T> generateSingleStateClass(Class<T> clazz, Map<String, Type> fieldTypes, DynamicClassLoader classLoader)
{
ClassDefinition definition = new ClassDefinition(
a(PUBLIC, FINAL),
makeClassName("Single" + clazz.getSimpleName()),
type(Object.class),
type(clazz));
FieldDefinition instanceSize = generateInstanceSize(definition);
// Add getter for class size
definition.declareMethod(a(PUBLIC), "getEstimatedSize", type(long.class))
.getBody()
.getStaticField(instanceSize)
.retLong();
// Generate constructor
MethodDefinition constructor = definition.declareConstructor(a(PUBLIC));
constructor.getBody()
.append(constructor.getThis())
.invokeConstructor(Object.class);
// Generate fields
List<StateField> fields = enumerateFields(clazz, fieldTypes);
for (StateField field : fields) {
generateField(definition, constructor, field);
}
constructor.getBody()
.ret();
return defineClass(definition, clazz, classLoader);
}
private static FieldDefinition generateInstanceSize(ClassDefinition definition)
{
// Store instance size in static field
FieldDefinition instanceSize = definition.declareField(a(PRIVATE, STATIC, FINAL), "INSTANCE_SIZE", long.class);
definition.getClassInitializer()
.getBody()
.comment("INSTANCE_SIZE = ClassLayout.parseClass(%s.class).instanceSize()", definition.getName())
.push(definition.getType())
.invokeStatic(ClassLayout.class, "parseClass", ClassLayout.class, Class.class)
.invokeVirtual(ClassLayout.class, "instanceSize", int.class)
.intToLong()
.putStaticField(instanceSize);
return instanceSize;
}
private static <T> Class<? extends T> generateGroupedStateClass(Class<T> clazz, Map<String, Type> fieldTypes, DynamicClassLoader classLoader)
{
ClassDefinition definition = new ClassDefinition(
a(PUBLIC, FINAL),
makeClassName("Grouped" + clazz.getSimpleName()),
type(AbstractGroupedAccumulatorState.class),
type(clazz),
type(GroupedAccumulator.class));
FieldDefinition instanceSize = generateInstanceSize(definition);
List<StateField> fields = enumerateFields(clazz, fieldTypes);
// Create constructor
MethodDefinition constructor = definition.declareConstructor(a(PUBLIC));
constructor.getBody()
.append(constructor.getThis())
.invokeConstructor(AbstractGroupedAccumulatorState.class);
// Create ensureCapacity
MethodDefinition ensureCapacity = definition.declareMethod(a(PUBLIC), "ensureCapacity", type(void.class), arg("size", long.class));
// Generate fields, constructor, and ensureCapacity
List<FieldDefinition> fieldDefinitions = new ArrayList<>();
for (StateField field : fields) {
fieldDefinitions.add(generateGroupedField(definition, constructor, ensureCapacity, field));
}
constructor.getBody().ret();
ensureCapacity.getBody().ret();
// Generate getEstimatedSize
MethodDefinition getEstimatedSize = definition.declareMethod(a(PUBLIC), "getEstimatedSize", type(long.class));
BytecodeBlock body = getEstimatedSize.getBody();
Variable size = getEstimatedSize.getScope().declareVariable(long.class, "size");
// initialize size to the size of the instance
body.append(size.set(getStatic(instanceSize)));
// add field to size
for (FieldDefinition field : fieldDefinitions) {
body.append(size.set(add(size, getEstimatedSize.getThis().getField(field).invoke("sizeOf", long.class))));
}
// return size
body.append(size.ret());
return defineClass(definition, clazz, classLoader);
}
private static void generateField(ClassDefinition definition, MethodDefinition constructor, StateField stateField)
{
FieldDefinition field = definition.declareField(a(PRIVATE), UPPER_CAMEL.to(LOWER_CAMEL, stateField.getName()) + "Value", stateField.getType());
// Generate getter
MethodDefinition getter = definition.declareMethod(a(PUBLIC), stateField.getGetterName(), type(stateField.getType()));
getter.getBody()
.append(getter.getThis().getField(field).ret());
// Generate setter
Parameter value = arg("value", stateField.getType());
MethodDefinition setter = definition.declareMethod(a(PUBLIC), stateField.getSetterName(), type(void.class), value);
setter.getBody()
.append(setter.getThis().setField(field, value))
.ret();
constructor.getBody()
.append(constructor.getThis().setField(field, stateField.initialValueExpression()));
}
private static FieldDefinition generateGroupedField(ClassDefinition definition, MethodDefinition constructor, MethodDefinition ensureCapacity, StateField stateField)
{
Class<?> bigArrayType = getBigArrayType(stateField.getType());
FieldDefinition field = definition.declareField(a(PRIVATE), UPPER_CAMEL.to(LOWER_CAMEL, stateField.getName()) + "Values", bigArrayType);
// Generate getter
MethodDefinition getter = definition.declareMethod(a(PUBLIC), stateField.getGetterName(), type(stateField.getType()));
getter.getBody()
.append(getter.getThis().getField(field).invoke(
"get",
stateField.getType(),
getter.getThis().invoke("getGroupId", long.class))
.ret());
// Generate setter
Parameter value = arg("value", stateField.getType());
MethodDefinition setter = definition.declareMethod(a(PUBLIC), stateField.getSetterName(), type(void.class), value);
setter.getBody()
.append(setter.getThis().getField(field).invoke(
"set",
void.class,
setter.getThis().invoke("getGroupId", long.class),
value))
.ret();
Scope ensureCapacityScope = ensureCapacity.getScope();
ensureCapacity.getBody()
.append(ensureCapacity.getThis().getField(field).invoke("ensureCapacity", void.class, ensureCapacityScope.getVariable("size")));
// Initialize field in constructor
constructor.getBody()
.append(constructor.getThis().setField(field, newInstance(field.getType(), stateField.initialValueExpression())));
return field;
}
/**
* Enumerates all the fields in this state interface.
*
* @param clazz a subclass of AccumulatorState
* @param fieldTypes a map of field name and Type
* @return list of state fields. Ordering is guaranteed to be stable, and have all primitive fields at the beginning.
*/
private static List<StateField> enumerateFields(Class<?> clazz, Map<String, Type> fieldTypes)
{
ImmutableList.Builder<StateField> builder = ImmutableList.builder();
for (Method method : clazz.getMethods()) {
if (method.getName().equals("getEstimatedSize")) {
continue;
}
if (method.getName().startsWith("get")) {
Class<?> type = method.getReturnType();
String name = method.getName().substring(3);
builder.add(new StateField(name, type, getInitialValue(method), method.getName(), Optional.ofNullable(fieldTypes.get(name))));
}
if (method.getName().startsWith("is")) {
Class<?> type = method.getReturnType();
checkArgument(type == boolean.class, "Only boolean is support for 'is' methods");
String name = method.getName().substring(2);
builder.add(new StateField(name, type, getInitialValue(method), method.getName(), Optional.of(BOOLEAN)));
}
}
// We need this ordering because the serializer and deserializer are on different machines, and so the ordering of fields must be stable
// NOTE min_max_by depends on this exact ordering, so any changes here will break it
// TODO remove this when transition from state classes to multiple intermediates is complete
Ordering<StateField> ordering = new Ordering<>()
{
@Override
public int compare(StateField left, StateField right)
{
return left.getName().compareTo(right.getName());
}
};
List<StateField> fields = ordering.sortedCopy(builder.build());
checkInterface(clazz, fields);
return fields;
}
private static Object getInitialValue(Method method)
{
Object value = null;
for (Annotation annotation : method.getAnnotations()) {
if (annotation instanceof InitialLongValue) {
checkArgument(value == null, "%s has multiple initialValue annotations", method.getName());
checkArgument(method.getReturnType() == long.class, "%s does not return a long, but is annotated with @InitialLongValue", method.getName());
value = ((InitialLongValue) annotation).value();
}
else if (annotation instanceof InitialDoubleValue) {
checkArgument(value == null, "%s has multiple initialValue annotations", method.getName());
checkArgument(method.getReturnType() == double.class, "%s does not return a double, but is annotated with @InitialDoubleValue", method.getName());
value = ((InitialDoubleValue) annotation).value();
}
else if (annotation instanceof InitialBooleanValue) {
checkArgument(value == null, "%s has multiple initialValue annotations", method.getName());
checkArgument(method.getReturnType() == boolean.class, "%s does not return a boolean, but is annotated with @InitialBooleanValue", method.getName());
value = ((InitialBooleanValue) annotation).value();
}
}
return value;
}
private static void checkInterface(Class<?> clazz, List<StateField> fields)
{
checkArgument(clazz.isInterface(), clazz.getName() + " is not an interface");
Set<String> setters = new HashSet<>();
Set<String> getters = new HashSet<>();
Set<String> isGetters = new HashSet<>();
Map<String, Class<?>> fieldTypes = new HashMap<>();
for (StateField field : fields) {
fieldTypes.put(field.getName(), field.getType());
}
for (Method method : clazz.getMethods()) {
if (Modifier.isStatic(method.getModifiers())) {
continue;
}
if (method.getName().equals("getEstimatedSize")) {
checkArgument(method.getReturnType().equals(long.class), "getEstimatedSize must return long");
checkArgument(method.getParameterTypes().length == 0, "getEstimatedSize may not have parameters");
continue;
}
if (method.getName().startsWith("get")) {
String name = method.getName().substring(3);
checkArgument(fieldTypes.get(name).equals(method.getReturnType()),
"Expected %s to return type %s, but found %s", method.getName(), fieldTypes.get(name), method.getReturnType());
checkArgument(method.getParameterTypes().length == 0, "Expected %s to have zero parameters", method.getName());
getters.add(name);
}
else if (method.getName().startsWith("is")) {
String name = method.getName().substring(2);
checkArgument(fieldTypes.get(name) == boolean.class,
"Expected %s to have type boolean, but found %s", name, fieldTypes.get(name));
checkArgument(method.getParameterTypes().length == 0, "Expected %s to have zero parameters", method.getName());
checkArgument(method.getReturnType() == boolean.class, "Expected %s to return boolean", method.getName());
isGetters.add(name);
}
else if (method.getName().startsWith("set")) {
String name = method.getName().substring(3);
checkArgument(method.getParameterTypes().length == 1, "Expected setter to have one parameter");
checkArgument(fieldTypes.get(name).equals(method.getParameterTypes()[0]),
"Expected %s to accept type %s, but found %s", method.getName(), fieldTypes.get(name), method.getParameterTypes()[0]);
checkArgument(getInitialValue(method) == null, "initial value annotation not allowed on setter");
checkArgument(method.getReturnType().equals(void.class), "%s may not return a value", method.getName());
setters.add(name);
}
else {
throw new IllegalArgumentException("Cannot generate implementation for method: " + method.getName());
}
}
checkArgument(getters.size() + isGetters.size() == setters.size() && setters.size() == fields.size(), "Wrong number of getters/setters");
}
private static final class StateField
{
private final String name;
private final String getterName;
private final Class<?> type;
private final Object initialValue;
private final Optional<Type> sqlType;
private StateField(String name, Class<?> type, Object initialValue, String getterName, Optional<Type> sqlType)
{
this.name = requireNonNull(name, "name is null");
checkArgument(!name.isEmpty(), "name is empty");
this.type = requireNonNull(type, "type is null");
this.getterName = requireNonNull(getterName, "getterName is null");
this.initialValue = initialValue;
checkArgument(sqlType != null, "sqlType is null");
if (sqlType.isPresent()) {
checkArgument(
type.isAssignableFrom(sqlType.get().getJavaType()) ||
((type == byte.class) && TINYINT.equals(sqlType.get())) ||
((type == int.class) && INTEGER.equals(sqlType.get())),
"Stack type (%s) and provided sql type (%s) are incompatible", type.getName(), sqlType.get().getDisplayName());
}
else {
sqlType = sqlTypeFromStackType(type);
}
this.sqlType = sqlType;
}
private static Optional<Type> sqlTypeFromStackType(Class<?> stackType)
{
if (stackType == long.class) {
return Optional.of(BIGINT);
}
if (stackType == double.class) {
return Optional.of(DOUBLE);
}
if (stackType == boolean.class) {
return Optional.of(BOOLEAN);
}
if (stackType == byte.class) {
return Optional.of(TINYINT);
}
if (stackType == int.class) {
return Optional.of(INTEGER);
}
if (stackType == Slice.class) {
return Optional.of(VARBINARY);
}
return Optional.empty();
}
String getGetterName()
{
return getterName;
}
String getSetterName()
{
return "set" + getName();
}
public String getName()
{
return name;
}
public Class<?> getType()
{
return type;
}
Type getSqlType()
{
if (sqlType.isEmpty()) {
throw new IllegalArgumentException("Unsupported type: " + type);
}
return sqlType.get();
}
boolean isPrimitiveType()
{
Class<?> type = getType();
return (type == long.class || type == double.class || type == boolean.class || type == byte.class || type == int.class);
}
public BytecodeExpression initialValueExpression()
{
if (initialValue == null) {
return defaultValue(type);
}
if (initialValue instanceof Number) {
return constantNumber((Number) initialValue);
}
if (initialValue instanceof Boolean) {
return constantBoolean((boolean) initialValue);
}
throw new IllegalArgumentException("Unsupported initial value type: " + initialValue.getClass());
}
}
}
| |
/* BufferedReader.java
Copyright (C) 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
Free Software Foundation, Inc.
This file is part of GNU Classpath.
GNU Classpath is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2, or (at your option)
any later version.
GNU Classpath is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with GNU Classpath; see the file COPYING. If not, write to the
Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301 USA.
Linking this library statically or dynamically with other modules is
making a combined work based on this library. Thus, the terms and
conditions of the GNU General Public License cover the whole
combination.
As a special exception, the copyright holders of this library give you
permission to link this library with independent modules to produce an
executable, regardless of the license terms of these independent
modules, and to copy and distribute the resulting executable under
terms of your choice, provided that you also meet, for each linked
independent module, the terms and conditions of the license of that
module. An independent module is a module which is not derived from
or based on this library. If you modify this library, you may extend
this exception to your version of the library, but you are not
obligated to do so. If you do not wish to do so, delete this
exception statement from your version. */
package java.io;
/* Written using "Java Class Libraries", 2nd edition, plus online
* API docs for JDK 1.2 beta from http://www.javasoft.com.
* Status: Believed complete and correct.
*/
/**
* This subclass of <code>FilterReader</code> buffers input from an
* underlying implementation to provide a possibly more efficient read
* mechanism. It maintains the buffer and buffer state in instance
* variables that are available to subclasses. The default buffer size
* of 8192 chars can be overridden by the creator of the stream.
* <p>
* This class also implements mark/reset functionality. It is capable
* of remembering any number of input chars, to the limits of
* system memory or the size of <code>Integer.MAX_VALUE</code>
*
* @author Per Bothner (bothner@cygnus.com)
* @author Aaron M. Renn (arenn@urbanophile.com)
*/
public class BufferedReader extends Reader
{
Reader in;
char[] buffer;
/* Index of current read position. Must be >= 0 and <= limit. */
/* There is a special case where pos may be equal to limit+1; this
* is used as an indicator that a readLine was done with a '\r' was
* the very last char in the buffer. Since we don't want to read-ahead
* and potentially block, we set pos this way to indicate the situation
* and deal with it later. Doing it this way rather than having a
* separate boolean field to indicate the condition has the advantage
* that it is self-clearing on things like mark/reset.
*/
int pos;
/* Limit of valid data in buffer. Must be >= pos and <= buffer.length. */
/* This can be < pos in the one special case described above. */
int limit;
/* The value -1 means there is no mark, or the mark has been invalidated.
Otherwise, markPos is the index in the buffer of the marked position.
Must be >= 0 and <= pos.
Note we do not explicitly store the read-limit.
The implicit read-limit is (buffer.length - markPos), which is
guaranteed to be >= the read-limit requested in the call to mark. */
int markPos = -1;
// The JCL book specifies the default buffer size as 8K characters.
// This is package-private because it is used by LineNumberReader.
static final int DEFAULT_BUFFER_SIZE = 8192;
/**
* The line buffer for <code>readLine</code>.
*/
private StringBuffer sbuf = null;
/**
* Create a new <code>BufferedReader</code> that will read from the
* specified subordinate stream with a default buffer size of 8192 chars.
*
* @param in The subordinate stream to read from
*/
public BufferedReader(Reader in)
{
this(in, DEFAULT_BUFFER_SIZE);
}
/**
* Create a new <code>BufferedReader</code> that will read from the
* specified subordinate stream with a buffer size that is specified by the
* caller.
*
* @param in The subordinate stream to read from
* @param size The buffer size to use
*
* @exception IllegalArgumentException if size <= 0
*/
public BufferedReader(Reader in, int size)
{
super(in.lock);
if (size <= 0)
throw new IllegalArgumentException("Illegal buffer size: " + size);
this.in = in;
buffer = new char[size];
}
/**
* This method closes the underlying stream and frees any associated
* resources.
*
* @exception IOException If an error occurs
*/
public void close() throws IOException
{
synchronized (lock)
{
if (in != null)
in.close();
in = null;
buffer = null;
}
}
/**
* Returns <code>true</code> to indicate that this class supports mark/reset
* functionality.
*
* @return <code>true</code>
*/
public boolean markSupported()
{
return true;
}
/**
* Mark a position in the input to which the stream can be
* "reset" by calling the <code>reset()</code> method. The parameter
* <code>readLimit</code> is the number of chars that can be read from the
* stream after setting the mark before the mark becomes invalid. For
* example, if <code>mark()</code> is called with a read limit of 10, then
* when 11 chars of data are read from the stream before the
* <code>reset()</code> method is called, then the mark is invalid and the
* stream object instance is not required to remember the mark.
* <p>
* Note that the number of chars that can be remembered by this method
* can be greater than the size of the internal read buffer. It is also
* not dependent on the subordinate stream supporting mark/reset
* functionality.
*
* @param readLimit The number of chars that can be read before the mark
* becomes invalid
*
* @exception IOException If an error occurs
* @exception IllegalArgumentException if readLimit is negative.
*/
public void mark(int readLimit) throws IOException
{
if (readLimit < 0)
throw new IllegalArgumentException("Read-ahead limit is negative");
synchronized (lock)
{
checkStatus();
// In this method we need to be aware of the special case where
// pos + 1 == limit. This indicates that a '\r' was the last char
// in the buffer during a readLine. We'll want to maintain that
// condition after we shift things around and if a larger buffer is
// needed to track readLimit, we'll have to make it one element
// larger to ensure we don't invalidate the mark too early, if the
// char following the '\r' is NOT a '\n'. This is ok because, per
// the spec, we are not required to invalidate when passing readLimit.
//
// Note that if 'pos > limit', then doing 'limit -= pos' will cause
// limit to be negative. This is the only way limit will be < 0.
if (pos + readLimit > limit)
{
char[] old_buffer = buffer;
int extraBuffSpace = 0;
if (pos > limit)
extraBuffSpace = 1;
if (readLimit + extraBuffSpace > limit)
buffer = new char[readLimit + extraBuffSpace];
limit -= pos;
if (limit >= 0)
{
System.arraycopy(old_buffer, pos, buffer, 0, limit);
pos = 0;
}
}
if (limit < 0)
{
// Maintain the relationship of 'pos > limit'.
pos = 1;
limit = markPos = 0;
}
else
markPos = pos;
// Now pos + readLimit <= buffer.length. thus if we need to read
// beyond buffer.length, then we are allowed to invalidate markPos.
}
}
/**
* Reset the stream to the point where the <code>mark()</code> method
* was called. Any chars that were read after the mark point was set will
* be re-read during subsequent reads.
* <p>
* This method will throw an IOException if the number of chars read from
* the stream since the call to <code>mark()</code> exceeds the mark limit
* passed when establishing the mark.
*
* @exception IOException If an error occurs;
*/
public void reset() throws IOException
{
synchronized (lock)
{
checkStatus();
if (markPos < 0)
throw new IOException("mark never set or invalidated");
// Need to handle the extremely unlikely case where a readLine was
// done with a '\r' as the last char in the buffer; which was then
// immediately followed by a mark and a reset with NO intervening
// read of any sort. In that case, setting pos to markPos would
// lose that info and a subsequent read would thus not skip a '\n'
// (if one exists). The value of limit in this rare case is zero.
// We can assume that if limit is zero for other reasons, then
// pos is already set to zero and doesn't need to be readjusted.
if (limit > 0)
pos = markPos;
}
}
/**
* This method determines whether or not a stream is ready to be read. If
* this method returns <code>false</code> then this stream could (but is
* not guaranteed to) block on the next read attempt.
*
* @return <code>true</code> if this stream is ready to be read,
* <code>false</code> otherwise
*
* @exception IOException If an error occurs
*/
public boolean ready() throws IOException
{
synchronized (lock)
{
checkStatus();
return pos < limit || in.ready();
}
}
/**
* This method read chars from a stream and stores them into a caller
* supplied buffer. It starts storing the data at index
* <code>offset</code> into
* the buffer and attempts to read <code>len</code> chars. This method can
* return before reading the number of chars requested. The actual number
* of chars read is returned as an int. A -1 is returned to indicate the
* end of the stream.
* <p>
* This method will block until some data can be read.
*
* @param buf The array into which the chars read should be stored
* @param offset The offset into the array to start storing chars
* @param count The requested number of chars to read
*
* @return The actual number of chars read, or -1 if end of stream.
*
* @exception IOException If an error occurs.
* @exception IndexOutOfBoundsException If offset and count are not
* valid regarding buf.
*/
public int read(char[] buf, int offset, int count) throws IOException
{
if (offset < 0 || offset + count > buf.length || count < 0)
throw new IndexOutOfBoundsException();
synchronized (lock)
{
checkStatus();
// Once again, we need to handle the special case of a readLine
// that has a '\r' at the end of the buffer. In this case, we'll
// need to skip a '\n' if it is the next char to be read.
// This special case is indicated by 'pos > limit'.
boolean retAtEndOfBuffer = false;
int avail = limit - pos;
if (count > avail)
{
if (avail > 0)
count = avail;
else // pos >= limit
{
if (limit == buffer.length)
markPos = -1; // read too far - invalidate the mark.
if (pos > limit)
{
// Set a boolean and make pos == limit to simplify things.
retAtEndOfBuffer = true;
--pos;
}
if (markPos < 0)
{
// Optimization: can read directly into buf.
if (count >= buffer.length && !retAtEndOfBuffer)
return in.read(buf, offset, count);
pos = limit = 0;
}
avail = in.read(buffer, limit, buffer.length - limit);
if (retAtEndOfBuffer && avail > 0 && buffer[limit] == '\n')
{
--avail;
limit++;
}
if (avail < count)
{
if (avail <= 0)
return avail;
count = avail;
}
limit += avail;
}
}
System.arraycopy(buffer, pos, buf, offset, count);
pos += count;
return count;
}
}
/* Read more data into the buffer. Update pos and limit appropriately.
Assumes pos==limit initially. May invalidate the mark if read too much.
Return number of chars read (never 0), or -1 on eof. */
private int fill() throws IOException
{
checkStatus();
// Handle the special case of a readLine that has a '\r' at the end of
// the buffer. In this case, we'll need to skip a '\n' if it is the
// next char to be read. This special case is indicated by 'pos > limit'.
boolean retAtEndOfBuffer = false;
if (pos > limit)
{
retAtEndOfBuffer = true;
--pos;
}
if (markPos >= 0 && limit == buffer.length)
markPos = -1;
if (markPos < 0)
pos = limit = 0;
int count = in.read(buffer, limit, buffer.length - limit);
if (count > 0)
limit += count;
if (retAtEndOfBuffer && buffer[pos] == '\n')
{
--count;
// If the mark was set to the location of the \n, then we
// must change it to fully pretend that the \n does not
// exist.
if (markPos == pos)
++markPos;
++pos;
}
return count;
}
public int read() throws IOException
{
synchronized (lock)
{
checkStatus();
if (pos >= limit && fill () <= 0)
return -1;
return buffer[pos++];
}
}
/* Return the end of the line starting at this.pos and ending at limit.
* The index returns is *before* any line terminators, or limit
* if no line terminators were found.
*/
private int lineEnd(int limit)
{
int i = pos;
for (; i < limit; i++)
{
char ch = buffer[i];
if (ch == '\n' || ch == '\r')
break;
}
return i;
}
/**
* This method reads a single line of text from the input stream, returning
* it as a <code>String</code>. A line is terminated by "\n", a "\r", or
* an "\r\n" sequence. The system dependent line separator is not used.
* The line termination characters are not returned in the resulting
* <code>String</code>.
*
* @return The line of text read, or <code>null</code> if end of stream.
*
* @exception IOException If an error occurs
*/
public String readLine() throws IOException
{
checkStatus();
// Handle the special case where a previous readLine (with no intervening
// reads/skips) had a '\r' at the end of the buffer.
// In this case, we'll need to skip a '\n' if it's the next char to be read.
// This special case is indicated by 'pos > limit'.
if (pos > limit)
{
int ch = read();
if (ch < 0)
return null;
if (ch != '\n')
--pos;
}
int i = lineEnd(limit);
if (i < limit)
{
String str = String.valueOf(buffer, pos, i - pos);
pos = i + 1;
// If the last char in the buffer is a '\r', we must remember
// to check if the next char to be read after the buffer is refilled
// is a '\n'. If so, skip it. To indicate this condition, we set pos
// to be limit + 1, which normally is never possible.
if (buffer[i] == '\r')
if (pos == limit || buffer[pos] == '\n')
pos++;
return str;
}
if (sbuf == null)
sbuf = new StringBuffer(200);
else
sbuf.setLength(0);
sbuf.append(buffer, pos, i - pos);
pos = i;
// We only want to return null when no characters were read before
// EOF. So we must keep track of this separately. Otherwise we
// would treat an empty `sbuf' as an EOF condition, which is wrong
// when there is just a newline.
boolean eof = false;
for (;;)
{
// readLine should block. So we must not return until a -1 is reached.
if (pos >= limit)
{
// here count == 0 isn't sufficient to give a failure.
int count = fill();
if (count < 0)
{
eof = true;
break;
}
continue;
}
int ch = buffer[pos++];
if (ch == '\n' || ch == '\r')
{
// Check here if a '\r' was the last char in the buffer; if so,
// mark it as in the comment above to indicate future reads
// should skip a newline that is the next char read after
// refilling the buffer.
if (ch == '\r')
if (pos == limit || buffer[pos] == '\n')
pos++;
break;
}
i = lineEnd(limit);
sbuf.append(buffer, pos - 1, i - (pos - 1));
pos = i;
}
return (sbuf.length() == 0 && eof) ? null : sbuf.toString();
}
/**
* This method skips the specified number of chars in the stream. It
* returns the actual number of chars skipped, which may be less than the
* requested amount.
* <p>
* This method first discards chars in the buffer, then calls the
* <code>skip</code> method on the underlying stream to skip the
* remaining chars.
*
* @param count The requested number of chars to skip
*
* @return The actual number of chars skipped.
*
* @exception IOException If an error occurs.
* @exception IllegalArgumentException If count is negative.
*/
public long skip(long count) throws IOException
{
synchronized (lock)
{
checkStatus();
if (count < 0)
throw new IllegalArgumentException("skip value is negative");
if (count == 0)
return 0;
// Yet again, we need to handle the special case of a readLine
// that has a '\r' at the end of the buffer. In this case, we need
// to ignore a '\n' if it is the next char to be read.
// This special case is indicated by 'pos > limit' (i.e. avail < 0).
// To simplify things, if we're dealing with the special case for
// readLine, just read the next char (since the fill method will
// skip the '\n' for us). By doing this, we'll have to back up pos.
// That's easier than trying to keep track of whether we've skipped
// one element or not.
if (pos > limit)
{
if (read() < 0)
return 0;
else
--pos;
}
int avail = limit - pos;
if (count < avail)
{
pos += count;
return count;
}
pos = limit;
long todo = count - avail;
if (todo > buffer.length)
{
markPos = -1;
todo -= in.skip(todo);
}
else
{
while (todo > 0)
{
avail = fill();
if (avail <= 0)
break;
if (avail > todo)
avail = (int) todo;
pos += avail;
todo -= avail;
}
}
return count - todo;
}
}
private void checkStatus() throws IOException
{
if (in == null)
throw new IOException("Stream closed");
}
}
| |
/*
* Copyright (c) 2017. tangzx(love.tangzx@qq.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.tang.intellij.lua.debugger.emmy;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.EditorFactory;
import com.intellij.openapi.editor.ex.EditorEx;
import com.intellij.openapi.options.SettingsEditor;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.SystemInfoRt;
import com.tang.intellij.lua.lang.LuaFileType;
import com.tang.intellij.lua.psi.LuaFileUtil;
import org.jetbrains.annotations.NotNull;
import javax.swing.*;
import javax.swing.event.DocumentEvent;
import javax.swing.event.DocumentListener;
import javax.swing.text.AttributeSet;
import javax.swing.text.BadLocationException;
import javax.swing.text.PlainDocument;
import java.awt.*;
public class EmmyDebugSettingsPanel extends SettingsEditor<EmmyDebugConfiguration> implements DocumentListener {
private JComboBox<EmmyDebugTransportType> typeCombox;
private JLabel type;
private JTextField tcpHostInput;
private JTextField tcpPortInput;
private JLabel tcpHostLabel;
private JLabel tcpPortLabel;
private JTextField pipelineInput;
private JLabel pipeNameLabel;
private JPanel panel;
private JPanel codePanel;
private JCheckBox waitIDECheckBox;
private JCheckBox breakWhenIDEConnectedCheckBox;
private JRadioButton x64RadioButton;
private JRadioButton x86RadioButton;
private JPanel winArchPanel;
private ButtonGroup winArchGroup;
private EditorEx editorEx;
public EmmyDebugSettingsPanel(Project project) {
// type
DefaultComboBoxModel<EmmyDebugTransportType> model = new DefaultComboBoxModel<>();
model.addElement(EmmyDebugTransportType.TCP_CLIENT);
model.addElement(EmmyDebugTransportType.TCP_SERVER);
/*for (EmmyDebugTransportType value : EmmyDebugTransportType.values()) {
model.addElement(value);
}*/
typeCombox.addActionListener(e -> {
setType((EmmyDebugTransportType) typeCombox.getSelectedItem());
onChanged();
});
typeCombox.setModel(model);
// tcp
tcpHostInput.setText("localhost");
tcpHostInput.getDocument().addDocumentListener(this);
tcpPortInput.setText("9966");
tcpPortInput.setDocument(new IntegerDocument());
tcpPortInput.getDocument().addDocumentListener(this);
// pipe
pipelineInput.setText("emmylua");
pipelineInput.getDocument().addDocumentListener(this);
waitIDECheckBox.addActionListener(e -> onChanged());
breakWhenIDEConnectedCheckBox.addActionListener(e -> onChanged());
// arch
winArchGroup = new ButtonGroup();
winArchPanel.setVisible(SystemInfoRt.isWindows);
winArchGroup.add(x64RadioButton);
winArchGroup.add(x86RadioButton);
x64RadioButton.addChangeListener(e -> onChanged());
x86RadioButton.addChangeListener(e -> onChanged());
// editor
editorEx = createEditorEx(project);
codePanel.add(editorEx.getComponent(), BorderLayout.CENTER);
updateCode();
}
private void onChanged() {
if (isClient()) {
breakWhenIDEConnectedCheckBox.setEnabled(waitIDECheckBox.isSelected());
} else {
breakWhenIDEConnectedCheckBox.setEnabled(true);
}
fireEditorStateChanged();
updateCode();
}
@Override
protected void resetEditorFrom(@NotNull EmmyDebugConfiguration configuration) {
typeCombox.setSelectedItem(configuration.getType());
setType(configuration.getType());
tcpHostInput.setText(configuration.getHost());
tcpPortInput.setText(String.valueOf(configuration.getPort()));
pipelineInput.setText(configuration.getPipeName());
if (SystemInfoRt.isWindows) {
if (configuration.getWinArch() == EmmyWinArch.X64) {
x64RadioButton.setSelected(true);
} else {
x86RadioButton.setSelected(true);
}
}
}
@Override
protected void applyEditorTo(@NotNull EmmyDebugConfiguration configuration) {
EmmyDebugTransportType type = (EmmyDebugTransportType) typeCombox.getSelectedItem();
assert type != null;
configuration.setType(type);
configuration.setHost(tcpHostInput.getText());
configuration.setPort(Integer.parseInt(tcpPortInput.getText()));
configuration.setPipeName(pipelineInput.getText());
if (SystemInfoRt.isWindows) {
configuration.setWinArch(x64RadioButton.isSelected() ? EmmyWinArch.X64 : EmmyWinArch.X86);
}
}
protected void setType(EmmyDebugTransportType type) {
boolean isTCP = type == EmmyDebugTransportType.TCP_CLIENT || type == EmmyDebugTransportType.TCP_SERVER;
tcpHostLabel.setVisible(isTCP);
tcpPortLabel.setVisible(isTCP);
tcpHostInput.setVisible(isTCP);
tcpPortInput.setVisible(isTCP);
pipeNameLabel.setVisible(!isTCP);
pipelineInput.setVisible(!isTCP);
waitIDECheckBox.setVisible(isClient());
}
private boolean isClient() {
EmmyDebugTransportType type = getType();
return type == EmmyDebugTransportType.TCP_CLIENT || type == EmmyDebugTransportType.PIPE_CLIENT;
}
private EmmyDebugTransportType getType() {
return (EmmyDebugTransportType) typeCombox.getSelectedItem();
}
private String getHost() {
return tcpHostInput.getText();
}
private int getPort() {
int port = 0;
try {
port = Integer.parseInt(tcpPortInput.getText());
} catch (Exception ignored) {
}
return port;
}
private String getPipeName() {
return pipelineInput.getText();
}
@NotNull
@Override
protected JComponent createEditor() {
return panel;
}
private EditorEx createEditorEx(Project project) {
EditorFactory editorFactory = EditorFactory.getInstance();
Document editorDocument = editorFactory.createDocument("");
return (EditorEx)editorFactory.createEditor(editorDocument, project, LuaFileType.INSTANCE, false);
}
private void updateCode() {
ApplicationManager.getApplication().runWriteAction(this::updateCodeImpl);
}
private String getDebuggerFolder() {
if (SystemInfoRt.isWindows)
return LuaFileUtil.INSTANCE.getPluginVirtualFile("debugger/emmy/windows");
if (SystemInfoRt.isMac)
return LuaFileUtil.INSTANCE.getPluginVirtualFile("debugger/emmy/mac");
return LuaFileUtil.INSTANCE.getPluginVirtualFile("debugger/emmy/linux");
}
private void updateCodeImpl() {
StringBuilder sb = new StringBuilder();
if (SystemInfoRt.isWindows) {
EmmyWinArch arch = x64RadioButton.isSelected() ? EmmyWinArch.X64 : EmmyWinArch.X86;
sb.append("package.cpath = package.cpath .. ';")
.append(getDebuggerFolder())
.append("/")
.append(arch.getDesc())
.append("/?.dll'\n");
} else if (SystemInfoRt.isMac) {
sb.append("package.cpath = package.cpath .. ';")
.append(getDebuggerFolder())
.append("/?.dylib'\n");
} else {
sb.append("package.cpath = package.cpath .. ';")
.append(getDebuggerFolder())
.append("/?.so'\n");
}
sb.append("local dbg = require('emmy_core')\n");
EmmyDebugTransportType type = getType();
if (type == EmmyDebugTransportType.PIPE_CLIENT) {
sb.append("dbg.pipeListen('").append(getPipeName()).append("')\n");
}
else if (type == EmmyDebugTransportType.PIPE_SERVER) {
sb.append("dbg.pipeConnect('").append(getPipeName()).append("')\n");
}
else if (type == EmmyDebugTransportType.TCP_CLIENT) {
sb.append("dbg.tcpListen('").append(getHost()).append("', ").append(getPort()).append(")\n");
}
else if (type == EmmyDebugTransportType.TCP_SERVER) {
sb.append("dbg.tcpConnect('").append(getHost()).append("', ").append(getPort()).append(")\n");
}
if (isClient()) {
if (waitIDECheckBox.isSelected()) {
sb.append("dbg.waitIDE()\n");
if (breakWhenIDEConnectedCheckBox.isSelected()) {
sb.append("dbg.breakHere()\n");
}
}
} else {
if (breakWhenIDEConnectedCheckBox.isSelected()) {
sb.append("dbg.breakHere()\n");
}
}
editorEx.getDocument().setText(sb.toString());
}
@Override
public void insertUpdate(DocumentEvent e) {
onChanged();
}
@Override
public void removeUpdate(DocumentEvent e) {
onChanged();
}
@Override
public void changedUpdate(DocumentEvent e) {
onChanged();
}
static class IntegerDocument extends PlainDocument {
public void insertString(int offset, String s, AttributeSet attributeSet) throws BadLocationException {
try {
Integer.parseInt(s);
} catch (Exception ex) {
return;
}
super.insertString(offset, s, attributeSet);
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.transport.failover;
import javax.jms.Connection;
import javax.jms.JMSException;
import javax.jms.MessageConsumer;
import javax.jms.Queue;
import javax.jms.Session;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import org.apache.activemq.ActiveMQConnection;
import org.apache.activemq.ActiveMQConnectionFactory;
import org.apache.activemq.artemis.core.config.Configuration;
import org.apache.activemq.artemis.jms.server.config.impl.JMSConfigurationImpl;
import org.apache.activemq.artemis.jms.server.embedded.EmbeddedJMS;
import org.apache.activemq.broker.artemiswrapper.OpenwireArtemisBaseTest;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
/**
* Complex cluster test that will exercise the dynamic failover capabilities of
* a network of brokers. Using a networking of 3 brokers where the 3rd broker is
* removed and then added back in it is expected in each test that the number of
* connections on the client should start with 3, then have two after the 3rd
* broker is removed and then show 3 after the 3rd broker is reintroduced.
*/
@Ignore
public class FailoverComplexClusterTest extends OpenwireArtemisBaseTest {
private static final String BROKER_A_CLIENT_TC_ADDRESS = "tcp://127.0.0.1:61616";
private static final String BROKER_B_CLIENT_TC_ADDRESS = "tcp://127.0.0.1:61617";
private String clientUrl;
private EmbeddedJMS[] servers = new EmbeddedJMS[3];
private static final int NUMBER_OF_CLIENTS = 30;
private final List<ActiveMQConnection> connections = new ArrayList<>();
@Before
public void setUp() throws Exception {
}
//default setup for most tests
private void commonSetup() throws Exception {
Map<String, String> params = new HashMap<>();
params.put("rebalanceClusterClients", "true");
params.put("updateClusterClients", "true");
params.put("updateClusterClientsOnRemove", "true");
Configuration config0 = createConfig("localhost", 0, params);
Configuration config1 = createConfig("localhost", 1, params);
Configuration config2 = createConfig("localhost", 2, params);
deployClusterConfiguration(config0, 1, 2);
deployClusterConfiguration(config1, 0, 2);
deployClusterConfiguration(config2, 0, 1);
servers[0] = new EmbeddedJMS().setConfiguration(config0).setJmsConfiguration(new JMSConfigurationImpl());
servers[1] = new EmbeddedJMS().setConfiguration(config1).setJmsConfiguration(new JMSConfigurationImpl());
servers[2] = new EmbeddedJMS().setConfiguration(config2).setJmsConfiguration(new JMSConfigurationImpl());
servers[0].start();
servers[1].start();
servers[2].start();
Assert.assertTrue(servers[0].waitClusterForming(100, TimeUnit.MILLISECONDS, 20, 3));
Assert.assertTrue(servers[1].waitClusterForming(100, TimeUnit.MILLISECONDS, 20, 3));
Assert.assertTrue(servers[2].waitClusterForming(100, TimeUnit.MILLISECONDS, 20, 3));
}
@After
public void tearDown() throws Exception {
shutdownClients();
for (EmbeddedJMS server : servers) {
if (server != null) {
server.stop();
}
}
}
/**
* Basic dynamic failover 3 broker test
*
* @throws Exception
*/
@Test
public void testThreeBrokerClusterSingleConnectorBasic() throws Exception {
commonSetup();
setClientUrl("failover://(" + BROKER_A_CLIENT_TC_ADDRESS + "," + BROKER_B_CLIENT_TC_ADDRESS + ")");
createClients();
Thread.sleep(3000);
runTests(false, null, null, null);
}
/**
* Tests a 3 broker configuration to ensure that the backup is random and
* supported in a cluster. useExponentialBackOff is set to false and
* maxReconnectAttempts is set to 1 to move through the list quickly for
* this test.
*
* @throws Exception
*/
@Test
public void testThreeBrokerClusterSingleConnectorBackupFailoverConfig() throws Exception {
commonSetup();
Thread.sleep(2000);
setClientUrl("failover://(" + BROKER_A_CLIENT_TC_ADDRESS + "," + BROKER_B_CLIENT_TC_ADDRESS + ")?backup=true&backupPoolSize=2&useExponentialBackOff=false&initialReconnectDelay=500");
createClients();
Thread.sleep(2000);
runTests(false, null, null, null);
}
/**
* Tests a 3 broker cluster that passes in connection params on the
* transport connector. Prior versions of AMQ passed the TC connection
* params to the client and this should not happen. The chosen param is not
* compatible with the client and will throw an error if used.
*
* @throws Exception
*/
@Test
public void testThreeBrokerClusterSingleConnectorWithParams() throws Exception {
commonSetup();
Thread.sleep(2000);
setClientUrl("failover://(" + BROKER_A_CLIENT_TC_ADDRESS + "," + BROKER_B_CLIENT_TC_ADDRESS + ")");
createClients();
Thread.sleep(2000);
runTests(false, null, null, null);
}
/**
* Tests a 3 broker cluster using a cluster filter of *
*
* @throws Exception
*/
@Test
public void testThreeBrokerClusterWithClusterFilter() throws Exception {
commonSetup();
Thread.sleep(2000);
setClientUrl("failover://(" + BROKER_A_CLIENT_TC_ADDRESS + "," + BROKER_B_CLIENT_TC_ADDRESS + ")");
createClients();
runTests(false, null, "*", null);
}
/**
* Test to verify that a broker with multiple transport connections only the
* one marked to update clients is propagate
*
* @throws Exception
*/
@Test
public void testThreeBrokerClusterMultipleConnectorBasic() throws Exception {
commonSetup();
Thread.sleep(2000);
setClientUrl("failover://(" + BROKER_A_CLIENT_TC_ADDRESS + "," + BROKER_B_CLIENT_TC_ADDRESS + ")");
createClients();
Thread.sleep(2000);
runTests(true, null, null, null);
}
/**
* Test to verify the reintroduction of the A Broker
*
* @throws Exception
*/
@Test
public void testOriginalBrokerRestart() throws Exception {
commonSetup();
Thread.sleep(2000);
setClientUrl("failover://(" + BROKER_A_CLIENT_TC_ADDRESS + "," + BROKER_B_CLIENT_TC_ADDRESS + ")");
createClients();
Thread.sleep(2000);
assertClientsConnectedToThreeBrokers();
stopServer(0);
Thread.sleep(5000);
assertClientsConnectedToTwoBrokers();
restartServer(0);
Thread.sleep(5000);
assertClientsConnectedToThreeBrokers();
}
/**
* Test to ensure clients are evenly to all available brokers in the
* network.
*
* @throws Exception
*/
@Test
public void testThreeBrokerClusterClientDistributions() throws Exception {
commonSetup();
Thread.sleep(2000);
setClientUrl("failover://(" + BROKER_A_CLIENT_TC_ADDRESS + "," + BROKER_B_CLIENT_TC_ADDRESS + ")?useExponentialBackOff=false&initialReconnectDelay=500");
createClients(100);
Thread.sleep(5000);
runClientDistributionTests(false, null, null, null);
}
/**
* Test to verify that clients are distributed with no less than 20% of the
* clients on any one broker.
*
* @throws Exception
*/
@Test
public void testThreeBrokerClusterDestinationFilter() throws Exception {
commonSetup();
Thread.sleep(2000);
setClientUrl("failover://(" + BROKER_A_CLIENT_TC_ADDRESS + "," + BROKER_B_CLIENT_TC_ADDRESS + ")");
createClients();
runTests(false, null, null, "Queue.TEST.FOO.>");
}
@Test
public void testFailOverWithUpdateClientsOnRemove() throws Exception {
// Broker A
Configuration config0 = createConfig(0, "?rebalanceClusterClients=true&updateClusterClients=true&updateClusterClientsOnRemove=true");
// Broker B
Configuration config1 = createConfig(1, "?rebalanceClusterClients=true&updateClusterClients=true&updateClusterClientsOnRemove=true");
deployClusterConfiguration(config0, 1);
deployClusterConfiguration(config1, 0);
servers[0] = new EmbeddedJMS().setConfiguration(config0).setJmsConfiguration(new JMSConfigurationImpl());
servers[0].start();
servers[1] = new EmbeddedJMS().setConfiguration(config1).setJmsConfiguration(new JMSConfigurationImpl());
servers[1].start();
servers[0].waitClusterForming(100, TimeUnit.MILLISECONDS, 20, 2);
servers[1].waitClusterForming(100, TimeUnit.MILLISECONDS, 20, 2);
Thread.sleep(1000);
// create client connecting only to A. It should receive broker B address whet it connects to A.
setClientUrl("failover:(" + BROKER_A_CLIENT_TC_ADDRESS + ")?useExponentialBackOff=true");
createClients(1);
Thread.sleep(5000);
// We stop broker A.
servers[0].stop();
servers[1].waitClusterForming(100, TimeUnit.MILLISECONDS, 20, 1);
Thread.sleep(5000);
// Client should failover to B.
assertAllConnectedTo(BROKER_B_CLIENT_TC_ADDRESS);
}
/**
* Runs a 3 Broker dynamic failover test: <br/>
* <ul>
* <li>asserts clients are distributed across all 3 brokers</li>
* <li>asserts clients are distributed across 2 brokers after removing the 3rd</li>
* <li>asserts clients are distributed across all 3 brokers after
* reintroducing the 3rd broker</li>
* </ul>
*
* @param multi
* @param tcParams
* @param clusterFilter
* @param destinationFilter
* @throws Exception
* @throws InterruptedException
*/
private void runTests(boolean multi,
String tcParams,
String clusterFilter,
String destinationFilter) throws Exception, InterruptedException {
assertClientsConnectedToThreeBrokers();
stopServer(2);
Thread.sleep(5000);
assertClientsConnectedToTwoBrokers();
restartServer(2);
Thread.sleep(5000);
assertClientsConnectedToThreeBrokers();
}
public void setClientUrl(String clientUrl) {
this.clientUrl = clientUrl;
}
protected void createClients() throws Exception {
createClients(NUMBER_OF_CLIENTS);
}
protected void createClients(int numOfClients) throws Exception {
ActiveMQConnectionFactory factory = new ActiveMQConnectionFactory(clientUrl);
for (int i = 0; i < numOfClients; i++) {
ActiveMQConnection c = (ActiveMQConnection) factory.createConnection();
c.start();
Session s = c.createSession(false, Session.AUTO_ACKNOWLEDGE);
Queue queue = s.createQueue(getClass().getName());
MessageConsumer consumer = s.createConsumer(queue);
connections.add(c);
}
}
protected void shutdownClients() throws JMSException {
for (Connection c : connections) {
c.close();
}
}
protected void assertClientsConnectedToThreeBrokers() {
Set<String> set = new HashSet<>();
for (ActiveMQConnection c : connections) {
if (c.getTransportChannel().getRemoteAddress() != null) {
set.add(c.getTransportChannel().getRemoteAddress());
}
}
Assert.assertTrue("Only 3 connections should be found: " + set, set.size() == 3);
}
protected void assertClientsConnectedToTwoBrokers() {
Set<String> set = new HashSet<>();
for (ActiveMQConnection c : connections) {
if (c.getTransportChannel().getRemoteAddress() != null) {
set.add(c.getTransportChannel().getRemoteAddress());
}
}
Assert.assertTrue("Only 2 connections should be found: " + set, set.size() == 2);
}
private void stopServer(int serverID) throws Exception {
servers[serverID].stop();
for (int i = 0; i < servers.length; i++) {
if (i != serverID) {
Assert.assertTrue(servers[i].waitClusterForming(100, TimeUnit.MILLISECONDS, 20, servers.length - 1));
}
}
}
private void restartServer(int serverID) throws Exception {
servers[serverID].start();
for (int i = 0; i < servers.length; i++) {
Assert.assertTrue(servers[i].waitClusterForming(100, TimeUnit.MILLISECONDS, 20, servers.length));
}
}
private void runClientDistributionTests(boolean multi,
String tcParams,
String clusterFilter,
String destinationFilter) throws Exception, InterruptedException {
assertClientsConnectedToThreeBrokers();
//if 2/3 or more of total connections connect to one node, we consider it wrong
//if 1/4 or less of total connects to one node, we consider it wrong
assertClientsConnectionsEvenlyDistributed(.25, .67);
stopServer(2);
Thread.sleep(5000);
assertClientsConnectedToTwoBrokers();
//now there are only 2 nodes
//if 2/3 or more of total connections go to either node, we consider it wrong
//if 1/3 or less of total connections go to either node, we consider it wrong
assertClientsConnectionsEvenlyDistributed(.34, .67);
restartServer(2);
Thread.sleep(5000);
assertClientsConnectedToThreeBrokers();
//now back to 3 nodes. We assume at least the new node will
//have 1/10 of the total connections, and any node's connections
//won't exceed 50%
assertClientsConnectionsEvenlyDistributed(.10, .50);
}
protected void assertClientsConnectionsEvenlyDistributed(double minimumPercentage, double maximumPercentage) {
Map<String, Double> clientConnectionCounts = new HashMap<>();
int total = 0;
for (ActiveMQConnection c : connections) {
String key = c.getTransportChannel().getRemoteAddress();
if (key != null) {
total++;
if (clientConnectionCounts.containsKey(key)) {
double count = clientConnectionCounts.get(key);
count += 1.0;
clientConnectionCounts.put(key, count);
} else {
clientConnectionCounts.put(key, 1.0);
}
}
}
Set<String> keys = clientConnectionCounts.keySet();
List<String> errorMsgs = new ArrayList<>();
for (String key : keys) {
double count = clientConnectionCounts.get(key);
double percentage = count / total;
if (percentage < minimumPercentage || percentage > maximumPercentage) {
errorMsgs.add("Connections distribution expected to be within range [ " + minimumPercentage + ", " + maximumPercentage + "]. Actuall distribution was " + percentage + " for connection " + key);
}
if (errorMsgs.size() > 0) {
for (String err : errorMsgs) {
System.err.println(err);
}
Assert.fail("Test failed. Please see the log message for details");
}
}
}
protected void assertAllConnectedTo(String url) throws Exception {
for (ActiveMQConnection c : connections) {
Assert.assertEquals(url, c.getTransportChannel().getRemoteAddress());
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.servicecontrol.v2.model;
/**
* This message defines core attributes for a resource. A resource is an addressable (named) entity
* provided by the destination service. For example, a file stored on a network storage service.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Service Control API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class Resource extends com.google.api.client.json.GenericJson {
/**
* Annotations is an unstructured key-value map stored with a resource that may be set by external
* tools to store and retrieve arbitrary metadata. They are not queryable and should be preserved
* when modifying objects. More info: https://kubernetes.io/docs/user-guide/annotations
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.Map<String, java.lang.String> annotations;
/**
* Output only. The timestamp when the resource was created. This may be either the time creation
* was initiated or when it was completed.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private String createTime;
/**
* Output only. The timestamp when the resource was deleted. If the resource is not deleted, this
* must be empty.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private String deleteTime;
/**
* Mutable. The display name set by clients. Must be <= 63 characters.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String displayName;
/**
* Output only. An opaque value that uniquely identifies a version or generation of a resource. It
* can be used to confirm that the client and server agree on the ordering of a resource being
* written.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String etag;
/**
* The labels or tags on the resource, such as AWS resource tags and Kubernetes resource labels.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.Map<String, java.lang.String> labels;
/**
* Immutable. The location of the resource. The location encoding is specific to the service
* provider, and new encoding may be introduced as the service evolves. For Google Cloud products,
* the encoding is what is used by Google Cloud APIs, such as `us-east1`, `aws-us-east-1`, and
* `azure-eastus2`. The semantics of `location` is identical to the
* `cloud.googleapis.com/location` label used by some Google Cloud APIs.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String location;
/**
* The stable identifier (name) of a resource on the `service`. A resource can be logically
* identified as "//{resource.service}/{resource.name}". The differences between a resource name
* and a URI are: * Resource name is a logical identifier, independent of network protocol and API
* version. For example, `//pubsub.googleapis.com/projects/123/topics/news-feed`. * URI often
* includes protocol and version information, so it can be used directly by applications. For
* example, `https://pubsub.googleapis.com/v1/projects/123/topics/news-feed`. See
* https://cloud.google.com/apis/design/resource_names for details.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String name;
/**
* The name of the service that this resource belongs to, such as `pubsub.googleapis.com`. The
* service may be different from the DNS hostname that actually serves the request.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String service;
/**
* The type of the resource. The syntax is platform-specific because different platforms define
* their resources differently. For Google APIs, the type format must be "{service}/{kind}", such
* as "pubsub.googleapis.com/Topic".
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String type;
/**
* The unique identifier of the resource. UID is unique in the time and space for this resource
* within the scope of the service. It is typically generated by the server on successful creation
* of a resource and must not be changed. UID is used to uniquely identify resources with resource
* name reuses. This should be a UUID4.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String uid;
/**
* Output only. The timestamp when the resource was last updated. Any change to the resource made
* by users must refresh this value. Changes to a resource made by the service should refresh this
* value.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private String updateTime;
/**
* Annotations is an unstructured key-value map stored with a resource that may be set by external
* tools to store and retrieve arbitrary metadata. They are not queryable and should be preserved
* when modifying objects. More info: https://kubernetes.io/docs/user-guide/annotations
* @return value or {@code null} for none
*/
public java.util.Map<String, java.lang.String> getAnnotations() {
return annotations;
}
/**
* Annotations is an unstructured key-value map stored with a resource that may be set by external
* tools to store and retrieve arbitrary metadata. They are not queryable and should be preserved
* when modifying objects. More info: https://kubernetes.io/docs/user-guide/annotations
* @param annotations annotations or {@code null} for none
*/
public Resource setAnnotations(java.util.Map<String, java.lang.String> annotations) {
this.annotations = annotations;
return this;
}
/**
* Output only. The timestamp when the resource was created. This may be either the time creation
* was initiated or when it was completed.
* @return value or {@code null} for none
*/
public String getCreateTime() {
return createTime;
}
/**
* Output only. The timestamp when the resource was created. This may be either the time creation
* was initiated or when it was completed.
* @param createTime createTime or {@code null} for none
*/
public Resource setCreateTime(String createTime) {
this.createTime = createTime;
return this;
}
/**
* Output only. The timestamp when the resource was deleted. If the resource is not deleted, this
* must be empty.
* @return value or {@code null} for none
*/
public String getDeleteTime() {
return deleteTime;
}
/**
* Output only. The timestamp when the resource was deleted. If the resource is not deleted, this
* must be empty.
* @param deleteTime deleteTime or {@code null} for none
*/
public Resource setDeleteTime(String deleteTime) {
this.deleteTime = deleteTime;
return this;
}
/**
* Mutable. The display name set by clients. Must be <= 63 characters.
* @return value or {@code null} for none
*/
public java.lang.String getDisplayName() {
return displayName;
}
/**
* Mutable. The display name set by clients. Must be <= 63 characters.
* @param displayName displayName or {@code null} for none
*/
public Resource setDisplayName(java.lang.String displayName) {
this.displayName = displayName;
return this;
}
/**
* Output only. An opaque value that uniquely identifies a version or generation of a resource. It
* can be used to confirm that the client and server agree on the ordering of a resource being
* written.
* @return value or {@code null} for none
*/
public java.lang.String getEtag() {
return etag;
}
/**
* Output only. An opaque value that uniquely identifies a version or generation of a resource. It
* can be used to confirm that the client and server agree on the ordering of a resource being
* written.
* @param etag etag or {@code null} for none
*/
public Resource setEtag(java.lang.String etag) {
this.etag = etag;
return this;
}
/**
* The labels or tags on the resource, such as AWS resource tags and Kubernetes resource labels.
* @return value or {@code null} for none
*/
public java.util.Map<String, java.lang.String> getLabels() {
return labels;
}
/**
* The labels or tags on the resource, such as AWS resource tags and Kubernetes resource labels.
* @param labels labels or {@code null} for none
*/
public Resource setLabels(java.util.Map<String, java.lang.String> labels) {
this.labels = labels;
return this;
}
/**
* Immutable. The location of the resource. The location encoding is specific to the service
* provider, and new encoding may be introduced as the service evolves. For Google Cloud products,
* the encoding is what is used by Google Cloud APIs, such as `us-east1`, `aws-us-east-1`, and
* `azure-eastus2`. The semantics of `location` is identical to the
* `cloud.googleapis.com/location` label used by some Google Cloud APIs.
* @return value or {@code null} for none
*/
public java.lang.String getLocation() {
return location;
}
/**
* Immutable. The location of the resource. The location encoding is specific to the service
* provider, and new encoding may be introduced as the service evolves. For Google Cloud products,
* the encoding is what is used by Google Cloud APIs, such as `us-east1`, `aws-us-east-1`, and
* `azure-eastus2`. The semantics of `location` is identical to the
* `cloud.googleapis.com/location` label used by some Google Cloud APIs.
* @param location location or {@code null} for none
*/
public Resource setLocation(java.lang.String location) {
this.location = location;
return this;
}
/**
* The stable identifier (name) of a resource on the `service`. A resource can be logically
* identified as "//{resource.service}/{resource.name}". The differences between a resource name
* and a URI are: * Resource name is a logical identifier, independent of network protocol and API
* version. For example, `//pubsub.googleapis.com/projects/123/topics/news-feed`. * URI often
* includes protocol and version information, so it can be used directly by applications. For
* example, `https://pubsub.googleapis.com/v1/projects/123/topics/news-feed`. See
* https://cloud.google.com/apis/design/resource_names for details.
* @return value or {@code null} for none
*/
public java.lang.String getName() {
return name;
}
/**
* The stable identifier (name) of a resource on the `service`. A resource can be logically
* identified as "//{resource.service}/{resource.name}". The differences between a resource name
* and a URI are: * Resource name is a logical identifier, independent of network protocol and API
* version. For example, `//pubsub.googleapis.com/projects/123/topics/news-feed`. * URI often
* includes protocol and version information, so it can be used directly by applications. For
* example, `https://pubsub.googleapis.com/v1/projects/123/topics/news-feed`. See
* https://cloud.google.com/apis/design/resource_names for details.
* @param name name or {@code null} for none
*/
public Resource setName(java.lang.String name) {
this.name = name;
return this;
}
/**
* The name of the service that this resource belongs to, such as `pubsub.googleapis.com`. The
* service may be different from the DNS hostname that actually serves the request.
* @return value or {@code null} for none
*/
public java.lang.String getService() {
return service;
}
/**
* The name of the service that this resource belongs to, such as `pubsub.googleapis.com`. The
* service may be different from the DNS hostname that actually serves the request.
* @param service service or {@code null} for none
*/
public Resource setService(java.lang.String service) {
this.service = service;
return this;
}
/**
* The type of the resource. The syntax is platform-specific because different platforms define
* their resources differently. For Google APIs, the type format must be "{service}/{kind}", such
* as "pubsub.googleapis.com/Topic".
* @return value or {@code null} for none
*/
public java.lang.String getType() {
return type;
}
/**
* The type of the resource. The syntax is platform-specific because different platforms define
* their resources differently. For Google APIs, the type format must be "{service}/{kind}", such
* as "pubsub.googleapis.com/Topic".
* @param type type or {@code null} for none
*/
public Resource setType(java.lang.String type) {
this.type = type;
return this;
}
/**
* The unique identifier of the resource. UID is unique in the time and space for this resource
* within the scope of the service. It is typically generated by the server on successful creation
* of a resource and must not be changed. UID is used to uniquely identify resources with resource
* name reuses. This should be a UUID4.
* @return value or {@code null} for none
*/
public java.lang.String getUid() {
return uid;
}
/**
* The unique identifier of the resource. UID is unique in the time and space for this resource
* within the scope of the service. It is typically generated by the server on successful creation
* of a resource and must not be changed. UID is used to uniquely identify resources with resource
* name reuses. This should be a UUID4.
* @param uid uid or {@code null} for none
*/
public Resource setUid(java.lang.String uid) {
this.uid = uid;
return this;
}
/**
* Output only. The timestamp when the resource was last updated. Any change to the resource made
* by users must refresh this value. Changes to a resource made by the service should refresh this
* value.
* @return value or {@code null} for none
*/
public String getUpdateTime() {
return updateTime;
}
/**
* Output only. The timestamp when the resource was last updated. Any change to the resource made
* by users must refresh this value. Changes to a resource made by the service should refresh this
* value.
* @param updateTime updateTime or {@code null} for none
*/
public Resource setUpdateTime(String updateTime) {
this.updateTime = updateTime;
return this;
}
@Override
public Resource set(String fieldName, Object value) {
return (Resource) super.set(fieldName, value);
}
@Override
public Resource clone() {
return (Resource) super.clone();
}
}
| |
package org.robolectric;
import static android.os.Build.VERSION_CODES.LOLLIPOP;
import android.app.Application;
import android.content.Context;
import android.content.res.Configuration;
import android.content.res.Resources;
import android.util.DisplayMetrics;
import java.nio.file.Path;
import org.robolectric.android.Bootstrap;
import org.robolectric.android.ConfigurationV25;
import org.robolectric.res.ResourceTable;
import org.robolectric.util.Scheduler;
import org.robolectric.util.TempDirectory;
public class RuntimeEnvironment {
public static Context systemContext;
/**
* @deprecated Please migrate to {@link
* androidx.test.core.app.ApplicationProvider#getApplicationContext}
*/
@Deprecated public static Application application;
private volatile static Thread mainThread = Thread.currentThread();
private static Object activityThread;
private static int apiLevel;
private static Scheduler masterScheduler;
private static ResourceTable systemResourceTable;
private static ResourceTable appResourceTable;
private static ResourceTable compileTimeResourceTable;
private static TempDirectory tempDirectory = new TempDirectory("no-test-yet");
private static Path androidFrameworkJar;
public static Path compileTimeSystemResourcesFile;
private static boolean useLegacyResources;
/**
* Tests if the given thread is currently set as the main thread.
*
* @param thread the thread to test.
* @return <tt>true</tt> if the specified thread is the main thread, <tt>false</tt> otherwise.
* @see #isMainThread()
*/
public static boolean isMainThread(Thread thread) {
return thread == mainThread;
}
/**
* Tests if the current thread is currently set as the main thread.
*
* @return <tt>true</tt> if the current thread is the main thread, <tt>false</tt> otherwise.
*/
public static boolean isMainThread() {
return isMainThread(Thread.currentThread());
}
/**
* Retrieves the main thread. The main thread is the thread to which the main looper is attached.
* Defaults to the thread that initialises the <tt>RuntimeEnvironment</tt> class.
*
* @return The main thread.
* @see #setMainThread(Thread)
* @see #isMainThread()
*/
public static Thread getMainThread() {
return mainThread;
}
/**
* Sets the main thread. The main thread is the thread to which the main looper is attached.
* Defaults to the thread that initialises the <tt>RuntimeEnvironment</tt> class.
*
* @param newMainThread the new main thread.
* @see #setMainThread(Thread)
* @see #isMainThread()
*/
public static void setMainThread(Thread newMainThread) {
mainThread = newMainThread;
}
public static Object getActivityThread() {
return activityThread;
}
public static void setActivityThread(Object newActivityThread) {
activityThread = newActivityThread;
}
/**
* Returns a qualifier string describing the current {@link Configuration} of the system resources.
*
* @return a qualifier string as described (https://developer.android.com/guide/topics/resources/providing-resources.html#QualifierRules)[here].
*/
public static String getQualifiers() {
Resources systemResources = Resources.getSystem();
return getQualifiers(systemResources.getConfiguration(), systemResources.getDisplayMetrics());
}
/**
* Returns a qualifier string describing the given configuration and display metrics.
*
* @param configuration the configuration.
* @param displayMetrics the display metrics.
* @return a qualifier string as described (https://developer.android.com/guide/topics/resources/providing-resources.html#QualifierRules)[here].
*/
public static String getQualifiers(Configuration configuration, DisplayMetrics displayMetrics) {
return ConfigurationV25.resourceQualifierString(configuration, displayMetrics);
}
/**
* Overrides the current device configuration.
*
* If `newQualifiers` starts with a plus (`+`), the prior configuration is used as the base
* configuration, with the given changes applied additively. Otherwise, default values are used
* for unspecified properties, as described [here](http://robolectric.org/device-configuration/).
*
* @param newQualifiers the qualifiers to apply
*/
public static void setQualifiers(String newQualifiers) {
Configuration configuration;
DisplayMetrics displayMetrics = new DisplayMetrics();
if (newQualifiers.startsWith("+")) {
configuration = new Configuration(Resources.getSystem().getConfiguration());
displayMetrics.setTo(Resources.getSystem().getDisplayMetrics());
} else {
configuration = new Configuration();
}
Bootstrap.applyQualifiers(newQualifiers, getApiLevel(), configuration, displayMetrics);
Resources systemResources = Resources.getSystem();
systemResources.updateConfiguration(configuration, displayMetrics);
if (application != null) {
application.getResources().updateConfiguration(configuration, displayMetrics);
}
}
public static int getApiLevel() {
return apiLevel;
}
public static Number castNativePtr(long ptr) {
// Weird, using a ternary here doesn't work, there's some auto promotion of boxed types happening.
if (getApiLevel() >= LOLLIPOP) {
return ptr;
} else {
return (int) ptr;
}
}
/**
* Retrieves the current master scheduler. This scheduler is always used by the main
* {@link android.os.Looper Looper}, and if the global scheduler option is set it is also used for
* the background scheduler and for all other {@link android.os.Looper Looper}s
* @return The current master scheduler.
* @see #setMasterScheduler(Scheduler)
* see org.robolectric.Robolectric#getForegroundThreadScheduler()
* see org.robolectric.Robolectric#getBackgroundThreadScheduler()
*/
public static Scheduler getMasterScheduler() {
return masterScheduler;
}
/**
* Sets the current master scheduler. See {@link #getMasterScheduler()} for details.
* Note that this method is primarily intended to be called by the Robolectric core setup code.
* Changing the master scheduler during a test will have unpredictable results.
* @param masterScheduler the new master scheduler.
* @see #getMasterScheduler()
* see org.robolectric.Robolectric#getForegroundThreadScheduler()
* see org.robolectric.Robolectric#getBackgroundThreadScheduler()
*/
public static void setMasterScheduler(Scheduler masterScheduler) {
RuntimeEnvironment.masterScheduler = masterScheduler;
}
public static void setSystemResourceTable(ResourceTable systemResourceTable) {
RuntimeEnvironment.systemResourceTable = systemResourceTable;
}
public static void setAppResourceTable(ResourceTable appResourceTable) {
RuntimeEnvironment.appResourceTable = appResourceTable;
}
public static ResourceTable getSystemResourceTable() {
return systemResourceTable;
}
public static ResourceTable getAppResourceTable() {
return appResourceTable;
}
public static void setCompileTimeResourceTable(ResourceTable compileTimeResourceTable) {
RuntimeEnvironment.compileTimeResourceTable = compileTimeResourceTable;
}
public static ResourceTable getCompileTimeResourceTable() {
return compileTimeResourceTable;
}
public static void setTempDirectory(TempDirectory tempDirectory) {
RuntimeEnvironment.tempDirectory = tempDirectory;
}
public static TempDirectory getTempDirectory() {
return tempDirectory;
}
public static void setAndroidFrameworkJarPath(Path localArtifactPath) {
RuntimeEnvironment.androidFrameworkJar = localArtifactPath;
}
public static Path getAndroidFrameworkJarPath() {
return RuntimeEnvironment.androidFrameworkJar;
}
/**
* Internal only.
*
* @deprecated Do not use.
*/
@Deprecated
public static boolean useLegacyResources() {
return useLegacyResources;
}
/**
* Internal only.
*
* @deprecated Do not use.
*/
@Deprecated
public static void setUseLegacyResources(boolean useLegacyResources) {
RuntimeEnvironment.useLegacyResources = useLegacyResources;
}
}
| |
/*
* JBoss, Home of Professional Open Source.
* Copyright 2014 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.undertow.servlet.handlers.security;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import io.undertow.servlet.UndertowServletLogger;
import io.undertow.servlet.api.DeploymentInfo;
import io.undertow.servlet.api.SecurityConstraint;
import io.undertow.servlet.api.SecurityInfo;
import io.undertow.servlet.api.SingleConstraintMatch;
import io.undertow.servlet.api.TransportGuaranteeType;
import io.undertow.servlet.api.WebResourceCollection;
import io.undertow.util.Methods;
/**
* @author Stuart Douglas
*/
public class SecurityPathMatches {
private static Set<String> KNOWN_METHODS;
static {
Set<String> methods = new HashSet<>();
methods.add(Methods.GET_STRING);
methods.add(Methods.POST_STRING);
methods.add(Methods.PUT_STRING);
methods.add(Methods.DELETE_STRING);
methods.add(Methods.OPTIONS_STRING);
methods.add(Methods.HEAD_STRING);
methods.add(Methods.TRACE_STRING);
KNOWN_METHODS = Collections.unmodifiableSet(methods);
}
private final boolean denyUncoveredHttpMethods;
private final PathSecurityInformation defaultPathSecurityInformation;
private final Map<String, PathSecurityInformation> exactPathRoleInformation;
private final Map<String, PathSecurityInformation> prefixPathRoleInformation;
private final Map<String, PathSecurityInformation> extensionRoleInformation;
private SecurityPathMatches(final boolean denyUncoveredHttpMethods, final PathSecurityInformation defaultPathSecurityInformation, final Map<String, PathSecurityInformation> exactPathRoleInformation, final Map<String, PathSecurityInformation> prefixPathRoleInformation, final Map<String, PathSecurityInformation> extensionRoleInformation) {
this.denyUncoveredHttpMethods = denyUncoveredHttpMethods;
this.defaultPathSecurityInformation = defaultPathSecurityInformation;
this.exactPathRoleInformation = exactPathRoleInformation;
this.prefixPathRoleInformation = prefixPathRoleInformation;
this.extensionRoleInformation = extensionRoleInformation;
}
/**
* @return <code>true</code> If no security path information has been defined
*/
public boolean isEmpty() {
return defaultPathSecurityInformation.excludedMethodRoles.isEmpty() &&
defaultPathSecurityInformation.perMethodRequiredRoles.isEmpty() &&
defaultPathSecurityInformation.defaultRequiredRoles.isEmpty() &&
exactPathRoleInformation.isEmpty() &&
prefixPathRoleInformation.isEmpty() &&
extensionRoleInformation.isEmpty();
}
public SecurityPathMatch getSecurityInfo(final String path, final String method) {
RuntimeMatch currentMatch = new RuntimeMatch();
handleMatch(method, defaultPathSecurityInformation, currentMatch);
PathSecurityInformation match = exactPathRoleInformation.get(path);
PathSecurityInformation extensionMatch = null;
if (match != null) {
handleMatch(method, match, currentMatch);
return new SecurityPathMatch(currentMatch.type, mergeConstraints(currentMatch));
}
match = prefixPathRoleInformation.get(path);
if (match != null) {
handleMatch(method, match, currentMatch);
return new SecurityPathMatch(currentMatch.type, mergeConstraints(currentMatch));
}
int qsPos = -1;
boolean extension = false;
for (int i = path.length() - 1; i >= 0; --i) {
final char c = path.charAt(i);
if (c == '?') {
//there was a query string, check the exact matches again
final String part = path.substring(0, i);
match = exactPathRoleInformation.get(part);
if (match != null) {
handleMatch(method, match, currentMatch);
return new SecurityPathMatch(currentMatch.type, mergeConstraints(currentMatch));
}
qsPos = i;
extension = false;
} else if (c == '/') {
extension = true;
final String part = path.substring(0, i);
match = prefixPathRoleInformation.get(part);
if (match != null) {
handleMatch(method, match, currentMatch);
return new SecurityPathMatch(currentMatch.type, mergeConstraints(currentMatch));
}
} else if (c == '.') {
if (!extension) {
extension = true;
final String ext;
if (qsPos == -1) {
ext = path.substring(i + 1, path.length());
} else {
ext = path.substring(i + 1, qsPos);
}
extensionMatch = extensionRoleInformation.get(ext);
}
}
}
if (extensionMatch != null) {
handleMatch(method, extensionMatch, currentMatch);
return new SecurityPathMatch(currentMatch.type, mergeConstraints(currentMatch));
}
return new SecurityPathMatch(currentMatch.type, mergeConstraints(currentMatch));
}
/**
* merge all constraints, as per 13.8.1 Combining Constraints
*/
private SingleConstraintMatch mergeConstraints(final RuntimeMatch currentMatch) {
if (currentMatch.uncovered && denyUncoveredHttpMethods) {
return new SingleConstraintMatch(SecurityInfo.EmptyRoleSemantic.DENY, Collections.<String>emptySet());
}
final Set<String> allowedRoles = new HashSet<>();
for (SingleConstraintMatch match : currentMatch.constraints) {
if (match.getRequiredRoles().isEmpty()) {
return new SingleConstraintMatch(match.getEmptyRoleSemantic(), Collections.<String>emptySet());
} else {
allowedRoles.addAll(match.getRequiredRoles());
}
}
return new SingleConstraintMatch(SecurityInfo.EmptyRoleSemantic.PERMIT, allowedRoles);
}
private void handleMatch(final String method, final PathSecurityInformation exact, RuntimeMatch currentMatch) {
List<SecurityInformation> roles = exact.defaultRequiredRoles;
for (SecurityInformation role : roles) {
transport(currentMatch, role.transportGuaranteeType);
currentMatch.constraints.add(new SingleConstraintMatch(role.emptyRoleSemantic, role.roles));
if (role.emptyRoleSemantic == SecurityInfo.EmptyRoleSemantic.DENY || !role.roles.isEmpty()) {
currentMatch.uncovered = false;
}
}
List<SecurityInformation> methodInfo = exact.perMethodRequiredRoles.get(method);
if (methodInfo != null) {
currentMatch.uncovered = false;
for (SecurityInformation role : methodInfo) {
transport(currentMatch, role.transportGuaranteeType);
currentMatch.constraints.add(new SingleConstraintMatch(role.emptyRoleSemantic, role.roles));
}
}
for (ExcludedMethodRoles excluded : exact.excludedMethodRoles) {
if (!excluded.methods.contains(method)) {
currentMatch.uncovered = false;
transport(currentMatch, excluded.securityInformation.transportGuaranteeType);
currentMatch.constraints.add(new SingleConstraintMatch(excluded.securityInformation.emptyRoleSemantic, excluded.securityInformation.roles));
}
}
}
private void transport(RuntimeMatch match, TransportGuaranteeType other) {
if (other.ordinal() > match.type.ordinal()) {
match.type = other;
}
}
public void logWarningsAboutUncoveredMethods() {
logWarningsAboutUncoveredMethods(exactPathRoleInformation, "", "");
logWarningsAboutUncoveredMethods(prefixPathRoleInformation, "", "/*");
logWarningsAboutUncoveredMethods(exactPathRoleInformation, "*.", "");
}
private void logWarningsAboutUncoveredMethods(Map<String, PathSecurityInformation> matches, String prefix, String suffix) {
//according to the spec we should be logging warnings about paths with uncovered HTTP methods
for (Map.Entry<String, PathSecurityInformation> entry : matches.entrySet()) {
if (entry.getValue().perMethodRequiredRoles.isEmpty() && entry.getValue().excludedMethodRoles.isEmpty()) {
continue;
}
Set<String> missing = new HashSet<>(KNOWN_METHODS);
for (String m : entry.getValue().perMethodRequiredRoles.keySet()) {
missing.remove(m);
}
Iterator<String> it = missing.iterator();
while (it.hasNext()) {
String val = it.next();
for (ExcludedMethodRoles excluded : entry.getValue().excludedMethodRoles) {
if (!excluded.methods.contains(val)) {
it.remove();
break;
}
}
}
if (!missing.isEmpty()) {
UndertowServletLogger.ROOT_LOGGER.unsecuredMethodsOnPath(prefix + entry.getKey() + suffix, missing);
}
}
}
public static Builder builder(final DeploymentInfo deploymentInfo) {
return new Builder(deploymentInfo);
}
public static class Builder {
private final DeploymentInfo deploymentInfo;
private final PathSecurityInformation defaultPathSecurityInformation = new PathSecurityInformation();
private final Map<String, PathSecurityInformation> exactPathRoleInformation = new HashMap<>();
private final Map<String, PathSecurityInformation> prefixPathRoleInformation = new HashMap<>();
private final Map<String, PathSecurityInformation> extensionRoleInformation = new HashMap<>();
private Builder(final DeploymentInfo deploymentInfo) {
this.deploymentInfo = deploymentInfo;
}
public void addSecurityConstraint(final SecurityConstraint securityConstraint) {
final Set<String> roles = expandRolesAllowed(securityConstraint.getRolesAllowed());
final SecurityInformation securityInformation = new SecurityInformation(roles, securityConstraint.getTransportGuaranteeType(), securityConstraint.getEmptyRoleSemantic());
for (final WebResourceCollection webResources : securityConstraint.getWebResourceCollections()) {
if (webResources.getUrlPatterns().isEmpty()) {
//default that is applied to everything
setupPathSecurityInformation(defaultPathSecurityInformation, securityInformation, webResources);
}
for (String pattern : webResources.getUrlPatterns()) {
if (pattern.endsWith("/*")) {
String part = pattern.substring(0, pattern.length() - 2);
PathSecurityInformation info = prefixPathRoleInformation.get(part);
if (info == null) {
prefixPathRoleInformation.put(part, info = new PathSecurityInformation());
}
setupPathSecurityInformation(info, securityInformation, webResources);
} else if (pattern.startsWith("*.")) {
String part = pattern.substring(2, pattern.length());
PathSecurityInformation info = extensionRoleInformation.get(part);
if (info == null) {
extensionRoleInformation.put(part, info = new PathSecurityInformation());
}
setupPathSecurityInformation(info, securityInformation, webResources);
} else {
PathSecurityInformation info = exactPathRoleInformation.get(pattern);
if (info == null) {
exactPathRoleInformation.put(pattern, info = new PathSecurityInformation());
}
setupPathSecurityInformation(info, securityInformation, webResources);
}
}
}
}
private Set<String> expandRolesAllowed(final Set<String> rolesAllowed) {
final Set<String> roles = new HashSet<>(rolesAllowed);
if (roles.contains("*")) {
roles.remove("*");
roles.addAll(deploymentInfo.getSecurityRoles());
}
return roles;
}
private void setupPathSecurityInformation(final PathSecurityInformation info, final SecurityInformation securityConstraint, final WebResourceCollection webResources) {
if (webResources.getHttpMethods().isEmpty() &&
webResources.getHttpMethodOmissions().isEmpty()) {
info.defaultRequiredRoles.add(securityConstraint);
} else if (!webResources.getHttpMethods().isEmpty()) {
for (String method : webResources.getHttpMethods()) {
List<SecurityInformation> securityInformations = info.perMethodRequiredRoles.get(method);
if (securityInformations == null) {
info.perMethodRequiredRoles.put(method, securityInformations = new ArrayList<>());
}
securityInformations.add(securityConstraint);
}
} else if (!webResources.getHttpMethodOmissions().isEmpty()) {
info.excludedMethodRoles.add(new ExcludedMethodRoles(webResources.getHttpMethodOmissions(), securityConstraint));
}
}
public SecurityPathMatches build() {
return new SecurityPathMatches(deploymentInfo.isDenyUncoveredHttpMethods(), defaultPathSecurityInformation, exactPathRoleInformation, prefixPathRoleInformation, extensionRoleInformation);
}
}
private static class PathSecurityInformation {
final List<SecurityInformation> defaultRequiredRoles = new ArrayList<>();
final Map<String, List<SecurityInformation>> perMethodRequiredRoles = new HashMap<>();
final List<ExcludedMethodRoles> excludedMethodRoles = new ArrayList<>();
}
private static final class ExcludedMethodRoles {
final Set<String> methods;
final SecurityInformation securityInformation;
ExcludedMethodRoles(final Set<String> methods, final SecurityInformation securityInformation) {
this.methods = methods;
this.securityInformation = securityInformation;
}
}
private static final class SecurityInformation {
final Set<String> roles;
final TransportGuaranteeType transportGuaranteeType;
final SecurityInfo.EmptyRoleSemantic emptyRoleSemantic;
private SecurityInformation(final Set<String> roles, final TransportGuaranteeType transportGuaranteeType, final SecurityInfo.EmptyRoleSemantic emptyRoleSemantic) {
this.emptyRoleSemantic = emptyRoleSemantic;
this.roles = new HashSet<>(roles);
this.transportGuaranteeType = transportGuaranteeType;
}
}
private static final class RuntimeMatch {
TransportGuaranteeType type = TransportGuaranteeType.NONE;
final List<SingleConstraintMatch> constraints = new ArrayList<>();
boolean uncovered = true;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.hadoop.impl.igfs;
import java.lang.reflect.Field;
import java.net.URI;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.ignite.cache.CacheWriteSynchronizationMode;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.configuration.FileSystemConfiguration;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.igfs.IgfsGroupDataBlocksKeyMapper;
import org.apache.ignite.igfs.IgfsIpcEndpointConfiguration;
import org.apache.ignite.igfs.IgfsIpcEndpointType;
import org.apache.ignite.internal.processors.igfs.IgfsCommonAbstractTest;
import org.apache.ignite.internal.util.ipc.shmem.IpcSharedMemoryServerEndpoint;
import org.apache.ignite.internal.util.typedef.G;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.spi.communication.tcp.TcpCommunicationSpi;
import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi;
import org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinder;
import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder;
import static org.apache.ignite.cache.CacheAtomicityMode.TRANSACTIONAL;
import static org.apache.ignite.cache.CacheMode.PARTITIONED;
import static org.apache.ignite.cache.CacheMode.REPLICATED;
import static org.apache.ignite.events.EventType.EVT_JOB_MAPPED;
import static org.apache.ignite.events.EventType.EVT_TASK_FAILED;
import static org.apache.ignite.events.EventType.EVT_TASK_FINISHED;
/**
* IPC cache test.
*/
public class IgniteHadoopFileSystemIpcCacheSelfTest extends IgfsCommonAbstractTest {
/** IP finder. */
private static final TcpDiscoveryIpFinder IP_FINDER = new TcpDiscoveryVmIpFinder(true);
/** Path to test hadoop configuration. */
private static final String HADOOP_FS_CFG = "modules/core/src/test/config/hadoop/core-site.xml";
/** Group size. */
public static final int GRP_SIZE = 128;
/** Started grid counter. */
private static int cnt;
/** {@inheritDoc} */
@Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception {
IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName);
TcpDiscoverySpi discoSpi = new TcpDiscoverySpi();
discoSpi.setIpFinder(IP_FINDER);
cfg.setDiscoverySpi(discoSpi);
FileSystemConfiguration igfsCfg = new FileSystemConfiguration();
igfsCfg.setName("igfs");
igfsCfg.setManagementPort(FileSystemConfiguration.DFLT_MGMT_PORT + cnt);
igfsCfg.setDataCacheConfiguration(dataCacheConfiguration());
igfsCfg.setMetaCacheConfiguration(metaCacheConfiguration());
IgfsIpcEndpointConfiguration endpointCfg = new IgfsIpcEndpointConfiguration();
endpointCfg.setType(IgfsIpcEndpointType.SHMEM);
endpointCfg.setPort(IpcSharedMemoryServerEndpoint.DFLT_IPC_PORT + cnt);
igfsCfg.setIpcEndpointConfiguration(endpointCfg);
igfsCfg.setBlockSize(512 * 1024); // Together with group blocks mapper will yield 64M per node groups.
cfg.setFileSystemConfiguration(igfsCfg);
cfg.setCacheConfiguration(dataCacheConfiguration());
cfg.setIncludeEventTypes(EVT_TASK_FAILED, EVT_TASK_FINISHED, EVT_JOB_MAPPED);
TcpCommunicationSpi commSpi = new TcpCommunicationSpi();
commSpi.setSharedMemoryPort(-1);
cfg.setCommunicationSpi(commSpi);
cnt++;
return cfg;
}
/**
* Gets cache configuration.
*
* @return Cache configuration.
*/
private CacheConfiguration dataCacheConfiguration() {
CacheConfiguration ccfg = defaultCacheConfiguration();
ccfg.setName("partitioned");
ccfg.setCacheMode(PARTITIONED);
ccfg.setNearConfiguration(null);
ccfg.setWriteSynchronizationMode(CacheWriteSynchronizationMode.FULL_SYNC);
ccfg.setAffinityMapper(new IgfsGroupDataBlocksKeyMapper(GRP_SIZE));
ccfg.setBackups(0);
ccfg.setAtomicityMode(TRANSACTIONAL);
return ccfg;
}
/**
* Gets cache configuration.
*
* @return Cache configuration.
*/
private CacheConfiguration metaCacheConfiguration() {
CacheConfiguration ccfg = defaultCacheConfiguration();
ccfg.setName("replicated");
ccfg.setCacheMode(REPLICATED);
ccfg.setWriteSynchronizationMode(CacheWriteSynchronizationMode.FULL_SYNC);
ccfg.setAtomicityMode(TRANSACTIONAL);
return ccfg;
}
/** {@inheritDoc} */
@Override protected void beforeTestsStarted() throws Exception {
startGrids(4);
}
/** {@inheritDoc} */
@Override protected void afterTestsStopped() throws Exception {
G.stopAll(true);
}
/**
* Test how IPC cache map works.
*
* @throws Exception If failed.
*/
@SuppressWarnings("unchecked")
public void testIpcCache() throws Exception {
Field cacheField = HadoopIgfsIpcIo.class.getDeclaredField("ipcCache");
cacheField.setAccessible(true);
Field activeCntField = HadoopIgfsIpcIo.class.getDeclaredField("activeCnt");
activeCntField.setAccessible(true);
Map<String, HadoopIgfsIpcIo> cache = (Map<String, HadoopIgfsIpcIo>)cacheField.get(null);
cache.clear(); // avoid influence of previous tests in the same process.
String name = "igfs:" + getTestIgniteInstanceName(0) + "@";
Configuration cfg = new Configuration();
cfg.addResource(U.resolveIgniteUrl(HADOOP_FS_CFG));
cfg.setBoolean("fs.igfs.impl.disable.cache", true);
cfg.setBoolean(String.format(HadoopIgfsUtils.PARAM_IGFS_ENDPOINT_NO_EMBED, name), true);
// Ensure that existing IO is reused.
FileSystem fs1 = FileSystem.get(new URI("igfs://" + name + "/"), cfg);
assertEquals(1, cache.size());
HadoopIgfsIpcIo io = null;
System.out.println("CACHE: " + cache);
for (String key : cache.keySet()) {
if (key.contains("10500")) {
io = cache.get(key);
break;
}
}
assert io != null;
assertEquals(1, ((AtomicInteger)activeCntField.get(io)).get());
// Ensure that when IO is used by multiple file systems and one of them is closed, IO is not stopped.
FileSystem fs2 = FileSystem.get(new URI("igfs://" + name + "/abc"), cfg);
assertEquals(1, cache.size());
assertEquals(2, ((AtomicInteger)activeCntField.get(io)).get());
fs2.close();
assertEquals(1, cache.size());
assertEquals(1, ((AtomicInteger)activeCntField.get(io)).get());
Field stopField = HadoopIgfsIpcIo.class.getDeclaredField("stopping");
stopField.setAccessible(true);
assert !(Boolean)stopField.get(io);
// Ensure that IO is stopped when nobody else is need it.
fs1.close();
assert cache.isEmpty();
assert (Boolean)stopField.get(io);
}
}
| |
/**
* Copyright (C) [2013] [The FURTHeR Project]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.utah.further.mdr.data.common.domain.asset;
import static edu.utah.further.core.api.text.ToStringCustomStyles.SHORT_WITH_SPACES_STYLE;
import java.sql.Blob;
import java.sql.Timestamp;
import java.util.List;
import javax.persistence.AttributeOverride;
import javax.persistence.AttributeOverrides;
import javax.persistence.Basic;
import javax.persistence.Column;
import javax.persistence.Embedded;
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
import javax.persistence.FetchType;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.Lob;
import javax.persistence.ManyToOne;
import javax.persistence.MappedSuperclass;
import javax.persistence.Transient;
import org.apache.commons.lang.builder.ToStringBuilder;
import edu.utah.further.core.api.collections.CollectionUtil;
import edu.utah.further.core.api.lang.Final;
import edu.utah.further.mdr.api.domain.asset.ActivationInfo;
import edu.utah.further.mdr.api.domain.asset.Resource;
import edu.utah.further.mdr.api.domain.asset.ResourceType;
import edu.utah.further.mdr.api.domain.asset.StorageCode;
/**
* An MDR resource persistent entity.
* <p>
* -----------------------------------------------------------------------------------<br>
* (c) 2008-2013 FURTHeR Project, AVP Health Sciences IT Office, University of Utah<br>
* Contact: {@code <further@utah.edu>}<br>
* Biomedical Informatics, 26 South 2000 East<br>
* Room 5775 HSEB, Salt Lake City, UT 84112<br>
* Day Phone: 1-801-581-4080<br>
* -----------------------------------------------------------------------------------
*
* @author Oren E. Livne {@code <oren.livne@utah.edu>}
* @resource Mar 19, 2009
*/
@MappedSuperclass
public abstract class AbstractResourceEntity implements Resource
{
// ========================= CONSTANTS =================================
/**
* @serial Serializable resource identifier.
*/
private static final long serialVersionUID = 1L;
// ========================= FIELDS ====================================
/**
* The unique identifier of this entity.
*/
@Id
@GeneratedValue
@Column(name = "ASSET_RESOURCE_ID")
@Final
private Long id;
/**
* The MDR virtual path to this resource. Uniquely identifies the resource.
*/
@Column(name = "RELATIVE_RESOURCE_URL", length = 255, nullable = true)
@Basic(fetch = FetchType.EAGER)
private String path;
/**
* Version number of this resource's owning version entity. For optimization of
* resource searches. Must match <code>version.getId()</code>.
*/
@Column(name = "ASSET_VERSION")
@Basic(fetch = FetchType.EAGER)
private Long versionNumber;
/**
* Short description of the resource.
*/
@Column(name = "RESOURCE_NAME", length = 255, nullable = true)
@Basic(fetch = FetchType.EAGER)
private String name;
/**
* Long description of the resource.
*/
@Column(name = "RESOURCE_DSC", length = 2000, nullable = true)
@Basic(fetch = FetchType.EAGER)
private String description;
/**
* Name of the file associated with this resource.
*/
@Column(name = "RESOURCE_FILE_NAME", length = 255, nullable = true)
@Basic(fetch = FetchType.EAGER)
private String fileName;
/**
* Resource storage type code.
*/
@Enumerated(EnumType.STRING)
@Column(name = "STORAGE_CD", length = 20, nullable = true)
@Basic(fetch = FetchType.EAGER)
private StorageCode storageCode;
/**
* Mime type of the resource's stored representation.
*/
@Column(name = "MIME_TYPE", length = 100, nullable = true)
@Basic(fetch = FetchType.EAGER)
private String mimeType;
/**
* Text storage.
*/
@Column(name = "RESOURCE_TEXT", length = 4000, nullable = true)
@Basic(fetch = FetchType.LAZY)
private String text;
/**
* Character Large Object (CLOB) storage.
* <p>
* Trying to set column size to support both Oracle and MySQL.
*
* @see http://www.elver.org/hibernate/ejb3_features.html
*/
@Column(name = "RESOURCE_CLOB", nullable = true, length = 10485760)
@Lob
@Basic(fetch = FetchType.LAZY)
private String clob;
/**
* <p>
* Trying to set column size to support both Oracle and MySQL.
*
* @see http://www.elver.org/hibernate/ejb3_features.html Binary Large Object (CLOB)
* storage.
*/
@Column(name = "RESOURCE_BLOB", nullable = true, length = 10485760)
@Lob
@Basic(fetch = FetchType.LAZY)
private Blob blob;
/**
* URL storage.
*/
@Column(name = "RESOURCE_URL", length = 1000, nullable = true)
@Basic(fetch = FetchType.LAZY)
private String url;
/**
* Holds resource activation dates.
*/
@Embedded
@AttributeOverrides(
{
@AttributeOverride(name = "activationDate", column = @Column(name = "RESOURCE_ACTIVATE_DT", nullable = true)),
@AttributeOverride(name = "deactivationDate", column = @Column(name = "RESOURCE_DEACTIVATE_DT", nullable = true)) })
@Final
private ActivationInfoEntity activationInfo = new ActivationInfoEntity();
// ========================= FIELDS: ASSOCIATIONS ======================
/**
* Link to this resource's asset type entity.
*/
@ManyToOne
@JoinColumn(name = "ASSET_RESOURCE_TYPE_ID")
private ResourceTypeEntity type;
/**
* List of MDR paths of resources linked to this resource. The resource therefore
* implements the Composite Pattern. Each path references the currently-active
* resource with that path.
*/
// @OneToMany(mappedBy = "???", fetch = FetchType.EAGER)
@Transient
// implementation to be added
private List<String> linkedResourcePaths = CollectionUtil.newList();
// ========================= IMPLEMENTATION: Object ====================
/**
* @see java.lang.Object#toString()
*/
@Override
public String toString()
{
final ToStringBuilder builder = new ToStringBuilder(this, SHORT_WITH_SPACES_STYLE)
.append("id", id)
.append("storageCode", storageCode);
return builder.toString();
}
// ========================= IMPLEMENTATION: CopyableFrom ==============
/**
* @param other
* @return
* @see edu.utah.further.mdr.api.domain.asset.Resource#copyFrom(edu.utah.further.mdr.api.domain.asset.Resource)
*/
@Override
public Resource copyFrom(final Resource other)
{
if (other == null)
{
return this;
}
// Identifier is not copied
// Deep-copy fields
this.path = other.getPath();
this.type = (ResourceTypeEntity) other.getType();
setAsset(other.getAsset());
setVersion(other.getVersion());
setPath(other.getPath());
this.versionNumber = other.getVersionNumber();
this.name = other.getName();
this.description = other.getDescription();
this.fileName = other.getFileName();
this.storageCode = other.getStorageCode();
this.mimeType = other.getMimeType();
this.text = other.getText();
setXml(other.getXml());
this.clob = other.getClob();
this.blob = other.getBlob();
this.url = other.getUrl();
this.activationInfo = new ActivationInfoEntity().copyFrom(other
.getActivationInfo());
// Deep-copy collection references but soft-copy their elements
// TODO: copy resource links here
return this;
}
// ========================= IMPLEMENTATION: PersistentEntity ==========
/**
* @return
* @see edu.utah.further.mdr.api.domain.asset.Resource#getId()
*/
@Override
public Long getId()
{
return id;
}
// ========================= IMPLEMENTATION: Resource ==================
// ========================= PRIVATE METHODS ===========================
/**
* @return
* @see edu.utah.further.mdr.api.domain.asset.Resource#getStorageCode()
*/
@Override
public StorageCode getStorageCode()
{
return storageCode;
}
/**
* @param storageCode
* @see edu.utah.further.mdr.api.domain.asset.Resource#setStorageCode(edu.utah.further.mdr.api.domain.asset.StorageCode)
*/
@Override
public void setStorageCode(final StorageCode storageCode)
{
this.storageCode = storageCode;
}
/**
* @return
* @see edu.utah.further.mdr.api.domain.asset.Resource#getMimeType()
*/
@Override
public String getMimeType()
{
return mimeType;
}
/**
* @param mimeType
* @see edu.utah.further.mdr.api.domain.asset.Resource#setMimeType(java.lang.String)
*/
@Override
public void setMimeType(final String mimeType)
{
this.mimeType = mimeType;
}
/**
* @return
* @see edu.utah.further.mdr.api.domain.asset.Resource#getText()
*/
@Override
public String getText()
{
return text;
}
/**
* @param text
* @see edu.utah.further.mdr.api.domain.asset.Resource#setText(java.lang.String)
*/
@Override
public void setText(final String text)
{
this.text = text;
}
/**
* Return the activationInfo property.
*
* @return the activationInfo
*/
@Override
public ActivationInfo getActivationInfo()
{
return activationInfo;
}
/**
* @return
* @see edu.utah.further.mdr.api.domain.asset.Resource#getActivationDate()
*/
@Override
public Timestamp getActivationDate()
{
return activationInfo.getActivationDate();
}
/**
* @param activationDate
* @see edu.utah.further.mdr.api.domain.asset.Resource#setActivationDate(java.sql.Timestamp)
*/
@Override
public void setActivationDate(final Timestamp activationDate)
{
activationInfo.setActivationDate(activationDate);
}
/**
* @return
* @see edu.utah.further.mdr.api.domain.asset.Resource#getDeactivationDate()
*/
@Override
public Timestamp getDeactivationDate()
{
return activationInfo.getDeactivationDate();
}
/**
* @param deactivationDate
* @see edu.utah.further.mdr.api.domain.asset.Resource#setDeactivationDate(java.sql.Timestamp)
*/
@Override
public void setDeactivationDate(final Timestamp deactivationDate)
{
activationInfo.setDeactivationDate(deactivationDate);
}
/**
* @return
* @see edu.utah.further.mdr.api.domain.asset.Resource#getUrl()
*/
@Override
public String getUrl()
{
return url;
}
/**
* @param url
* @see edu.utah.further.mdr.api.domain.asset.Resource#setUrl(java.lang.String)
*/
@Override
public void setUrl(final String url)
{
this.url = url;
}
/**
* @return
* @see edu.utah.further.mdr.api.domain.asset.Resource#getType()
*/
@Override
public ResourceType getType()
{
return type;
}
/**
* @param type
* @see edu.utah.further.mdr.api.domain.asset.Resource#setType(edu.utah.further.mdr.data.impl.domain.asset.AbstractAssetEntity)
*/
@Override
public void setType(final ResourceType type)
{
this.type = (ResourceTypeEntity) type;
}
/**
* @return
* @see edu.utah.further.mdr.api.domain.asset.Resource#getDescription()
*/
@Override
public String getDescription()
{
return description;
}
/**
* @param description
* @see edu.utah.further.mdr.api.domain.asset.Resource#setDescription(java.lang.String)
*/
@Override
public void setDescription(final String description)
{
this.description = description;
}
/**
* Return the name property.
*
* @return the name
*/
@Override
public String getName()
{
return name;
}
/**
* Set a new value for the name property.
*
* @param name
* the name to set
*/
@Override
public void setName(final String name)
{
this.name = name;
}
/**
* @return
* @see edu.utah.further.mdr.api.domain.asset.Resource#getFileName()
*/
@Override
public String getFileName()
{
return fileName;
}
/**
* @param fileName
* @see edu.utah.further.mdr.api.domain.asset.Resource#setFileName(java.lang.String)
*/
@Override
public void setFileName(final String fileName)
{
this.fileName = fileName;
}
/**
* @return
* @see edu.utah.further.mdr.api.domain.asset.Resource#getClob()
*/
@Override
public String getClob()
{
return clob;
}
/**
* @param clob
* @see edu.utah.further.mdr.api.domain.asset.Resource#setClob(java.lang.String)
*/
@Override
public void setClob(final String clob)
{
this.clob = clob;
}
/**
* @return
* @see edu.utah.further.mdr.api.domain.asset.Resource#getBlob()
*/
@Override
public Blob getBlob()
{
return blob;
}
/**
* @param blob
* @see edu.utah.further.mdr.api.domain.asset.Resource#setBlob(java.sql.Blob)
*/
@Override
public void setBlob(final Blob blob)
{
this.blob = blob;
}
/**
* Return the versionNumber property.
*
* @return the versionNumber
*/
@Override
public Long getVersionNumber()
{
return versionNumber;
}
/**
* Set a new value for the versionNumber property.
*
* @param versionNumber
* the versionNumber to set
*/
@Override
public void setVersionNumber(final Long versionNumber)
{
this.versionNumber = versionNumber;
}
/**
* Return the path property.
*
* @return the path
*/
@Override
public String getPath()
{
return path;
}
/**
* Set a new value for the path property.
*
* @param path
* the path to set
*/
@Override
public void setPath(final String path)
{
this.path = path;
}
/**
* Return the linkedResourcePaths property.
*
* @return the linkedResourcePaths
*/
@Override
public List<String> getLinkedResourcePaths()
{
return linkedResourcePaths;
}
/**
* Set a new value for the linkedResourcePaths property.
*
* @param linkedResourcePaths
* the linkedResourcePaths to set
*/
@Override
public void setLinkedResourcePaths(final List<String> linkedResourcePaths)
{
this.linkedResourcePaths = linkedResourcePaths;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache.distributed;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
import javax.cache.Cache;
import javax.cache.configuration.Factory;
import javax.cache.integration.CacheLoaderException;
import javax.cache.integration.CacheWriterException;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.IgniteTransactions;
import org.apache.ignite.cache.CacheWriteSynchronizationMode;
import org.apache.ignite.cache.affinity.Affinity;
import org.apache.ignite.cache.store.CacheStore;
import org.apache.ignite.cache.store.CacheStoreAdapter;
import org.apache.ignite.cluster.ClusterNode;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.configuration.NearCacheConfiguration;
import org.apache.ignite.internal.IgniteInternalFuture;
import org.apache.ignite.internal.TestRecordingCommunicationSpi;
import org.apache.ignite.internal.managers.communication.GridIoMessage;
import org.apache.ignite.internal.processors.cache.IgniteCacheProxy;
import org.apache.ignite.internal.processors.cache.distributed.dht.GridDhtTxFinishRequest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridNearTxFinishRequest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridNearTxFinishResponse;
import org.apache.ignite.internal.processors.cache.distributed.near.GridNearTxPrepareRequest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridNearTxPrepareResponse;
import org.apache.ignite.internal.processors.cache.transactions.TransactionProxyImpl;
import org.apache.ignite.internal.util.lang.GridAbsPredicate;
import org.apache.ignite.internal.util.lang.IgnitePredicateX;
import org.apache.ignite.internal.util.typedef.G;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.lang.IgniteBiInClosure;
import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi;
import org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinder;
import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder;
import org.apache.ignite.testframework.GridTestUtils;
import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
import org.apache.ignite.transactions.Transaction;
import org.apache.ignite.transactions.TransactionConcurrency;
import org.apache.ignite.transactions.TransactionIsolation;
import static org.apache.ignite.cache.CacheAtomicityMode.TRANSACTIONAL;
import static org.apache.ignite.cache.CacheWriteSynchronizationMode.FULL_ASYNC;
import static org.apache.ignite.cache.CacheWriteSynchronizationMode.FULL_SYNC;
import static org.apache.ignite.cache.CacheWriteSynchronizationMode.PRIMARY_SYNC;
/**
*
*/
public class IgniteTxCachePrimarySyncTest extends GridCommonAbstractTest {
/** */
private static final TcpDiscoveryIpFinder ipFinder = new TcpDiscoveryVmIpFinder(true);
/** */
private static final int SRVS = 4;
/** */
private static final int CLIENTS = 2;
/** */
private static final int NODES = SRVS + CLIENTS;
/** */
private boolean clientMode;
/** {@inheritDoc} */
@Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception {
IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName);
((TcpDiscoverySpi)cfg.getDiscoverySpi()).setIpFinder(ipFinder);
cfg.setClientMode(clientMode);
TestRecordingCommunicationSpi commSpi = new TestRecordingCommunicationSpi();
commSpi.setSharedMemoryPort(-1);
cfg.setCommunicationSpi(commSpi);
return cfg;
}
/** {@inheritDoc} */
@Override protected long getTestTimeout() {
return 15 * 60_000;
}
/** {@inheritDoc} */
@Override protected void beforeTestsStarted() throws Exception {
super.beforeTestsStarted();
startGrids(SRVS);
try {
for (int i = 0; i < CLIENTS; i++) {
clientMode = true;
Ignite client = startGrid(SRVS + i);
assertTrue(client.configuration().isClientMode());
}
}
finally {
clientMode = false;
}
}
/** {@inheritDoc} */
@Override protected void afterTestsStopped() throws Exception {
stopAllGrids();
super.afterTestsStopped();
}
/**
* @throws Exception If failed.
*/
public void testSingleKeyCommitFromPrimary() throws Exception {
singleKeyCommitFromPrimary(cacheConfiguration(null, PRIMARY_SYNC, 1, true, false));
singleKeyCommitFromPrimary(cacheConfiguration(null, PRIMARY_SYNC, 2, false, false));
singleKeyCommitFromPrimary(cacheConfiguration(null, PRIMARY_SYNC, 2, false, true));
singleKeyCommitFromPrimary(cacheConfiguration(null, PRIMARY_SYNC, 3, false, false));
}
/**
* @param ccfg Cache configuration.
* @throws Exception If failed.
*/
private void singleKeyCommitFromPrimary(CacheConfiguration<Object, Object> ccfg) throws Exception {
Ignite ignite = ignite(0);
IgniteCache<Object, Object> cache = ignite.createCache(ccfg);
try {
for (int i = 0; i < SRVS; i++) {
Ignite node = ignite(i);
singleKeyCommitFromPrimary(node, ccfg, new IgniteBiInClosure<Integer, IgniteCache<Object, Object>>() {
@Override public void apply(Integer key, IgniteCache<Object, Object> cache) {
cache.put(key, key);
}
});
for (final TransactionConcurrency concurrency : TransactionConcurrency.values()) {
for (final TransactionIsolation isolation : TransactionIsolation.values()) {
singleKeyCommitFromPrimary(node, ccfg, new IgniteBiInClosure<Integer, IgniteCache<Object, Object>>() {
@Override public void apply(Integer key, IgniteCache<Object, Object> cache) {
Ignite ignite = cache.unwrap(Ignite.class);
try (Transaction tx = ignite.transactions().txStart(concurrency, isolation)) {
cache.put(key, key);
tx.commit();
}
}
});
}
}
}
}
finally {
ignite.destroyCache(cache.getName());
}
}
/**
* @param ignite Node executing cache operation.
* @param ccfg Cache configuration.
* @param c Cache update closure.
* @throws Exception If failed.
*/
private void singleKeyCommitFromPrimary(
Ignite ignite,
final CacheConfiguration<Object, Object> ccfg,
IgniteBiInClosure<Integer, IgniteCache<Object, Object>> c) throws Exception {
TestRecordingCommunicationSpi commSpi0 =
(TestRecordingCommunicationSpi)ignite.configuration().getCommunicationSpi();
IgniteCache<Object, Object> cache = ignite.cache(ccfg.getName());
final Integer key = primaryKey(cache);
cache.remove(key);
waitKeyRemoved(ccfg.getName(), key);
commSpi0.record(GridDhtTxFinishRequest.class);
commSpi0.blockMessages(new IgnitePredicateX<GridIoMessage>() {
@Override public boolean applyx(GridIoMessage e) throws IgniteCheckedException {
return e.message() instanceof GridDhtTxFinishRequest;
}
});
c.apply(key, cache);
assertEquals(key, cache.localPeek(key));
U.sleep(50);
for (int i = 0; i < SRVS; i++) {
Ignite node = ignite(i);
if (node != ignite)
assertNull(node.cache(null).localPeek(key));
}
commSpi0.stopBlock(true);
waitKeyUpdated(ignite, ccfg.getBackups() + 1, ccfg.getName(), key);
List<Object> msgs = commSpi0.recordedMessages(true);
assertEquals(ccfg.getBackups(), msgs.size());
cache.remove(key);
waitKeyRemoved(ccfg.getName(), key);
c.apply(key, cache);
waitKeyUpdated(ignite, ccfg.getBackups() + 1, ccfg.getName(), key);
}
/**
* @throws Exception If failed.
*/
public void testSingleKeyPrimaryNodeFail1() throws Exception {
singleKeyPrimaryNodeLeft(cacheConfiguration(null, PRIMARY_SYNC, 1, true, false));
singleKeyPrimaryNodeLeft(cacheConfiguration(null, PRIMARY_SYNC, 2, false, false));
}
/**
* @throws Exception If failed.
*/
public void testSingleKeyPrimaryNodeFail2() throws Exception {
singleKeyPrimaryNodeLeft(cacheConfiguration(null, PRIMARY_SYNC, 2, true, false));
singleKeyPrimaryNodeLeft(cacheConfiguration(null, PRIMARY_SYNC, 3, false, false));
}
/**
* @param ccfg Cache configuration.
* @throws Exception If failed.
*/
private void singleKeyPrimaryNodeLeft(CacheConfiguration<Object, Object> ccfg) throws Exception {
Ignite ignite = ignite(0);
IgniteCache<Object, Object> cache = ignite.createCache(ccfg);
try {
ignite(NODES - 1).createNearCache(ccfg.getName(), new NearCacheConfiguration<>());
for (int i = 0; i < NODES; i++) {
Ignite node = ignite(i);
singleKeyPrimaryNodeLeft(node, ccfg, new IgniteBiInClosure<Integer, IgniteCache<Object, Object>>() {
@Override public void apply(Integer key, IgniteCache<Object, Object> cache) {
cache.put(key, key);
}
});
for (final TransactionConcurrency concurrency : TransactionConcurrency.values()) {
for (final TransactionIsolation isolation : TransactionIsolation.values()) {
singleKeyPrimaryNodeLeft(node, ccfg, new IgniteBiInClosure<Integer, IgniteCache<Object, Object>>() {
@Override public void apply(Integer key, IgniteCache<Object, Object> cache) {
Ignite ignite = cache.unwrap(Ignite.class);
try (Transaction tx = ignite.transactions().txStart(concurrency, isolation)) {
cache.put(key, key);
tx.commit();
}
}
});
}
}
}
}
finally {
ignite.destroyCache(cache.getName());
}
}
/**
* @param client Node executing cache operation.
* @param ccfg Cache configuration.
* @param c Cache update closure.
* @throws Exception If failed.
*/
private void singleKeyPrimaryNodeLeft(
Ignite client,
final CacheConfiguration<Object, Object> ccfg,
final IgniteBiInClosure<Integer, IgniteCache<Object, Object>> c) throws Exception {
Ignite ignite = startGrid(NODES);
final TestRecordingCommunicationSpi commSpiClient =
(TestRecordingCommunicationSpi)client.configuration().getCommunicationSpi();
IgniteCache<Object, Object> cache = ignite.cache(ccfg.getName());
final Integer key = primaryKey(cache);
cache.remove(key);
waitKeyRemoved(ccfg.getName(), key);
commSpiClient.blockMessages(GridNearTxFinishRequest.class, ignite.name());
final IgniteCache<Object, Object> clientCache = client.cache(ccfg.getName());
IgniteInternalFuture<?> fut = GridTestUtils.runAsync(new Callable<Void>() {
@Override public Void call() throws Exception {
c.apply(key, clientCache);
return null;
}
});
boolean waitMsgSnd = GridTestUtils.waitForCondition(new GridAbsPredicate() {
@Override public boolean apply() {
return commSpiClient.hasBlockedMessages();
}
}, 5000);
assertTrue(waitMsgSnd);
ignite.close();
commSpiClient.stopBlock(false);
fut.get();
awaitPartitionMapExchange();
waitKeyUpdated(client, ccfg.getBackups() + 1, ccfg.getName(), key);
clientCache.remove(key);
waitKeyRemoved(ccfg.getName(), key);
c.apply(key, clientCache);
waitKeyUpdated(client, ccfg.getBackups() + 1, ccfg.getName(), key);
}
/**
* @throws Exception If failed.
*/
public void testSingleKeyCommit() throws Exception {
singleKeyCommit(cacheConfiguration(null, PRIMARY_SYNC, 1, true, false));
singleKeyCommit(cacheConfiguration(null, PRIMARY_SYNC, 2, false, false));
singleKeyCommit(cacheConfiguration(null, PRIMARY_SYNC, 2, false, true));
singleKeyCommit(cacheConfiguration(null, PRIMARY_SYNC, 3, false, false));
}
/**
* @param ccfg Cache configuration.
* @throws Exception If failed.
*/
private void singleKeyCommit(CacheConfiguration<Object, Object> ccfg) throws Exception {
Ignite ignite = ignite(0);
IgniteCache<Object, Object> cache = ignite.createCache(ccfg);
try {
ignite(NODES - 1).createNearCache(ccfg.getName(), new NearCacheConfiguration<>());
for (int i = 1; i < NODES; i++) {
Ignite node = ignite(i);
log.info("Test node: " + node.name());
singleKeyCommit(node, ccfg, new IgniteBiInClosure<Integer, IgniteCache<Object, Object>>() {
@Override public void apply(Integer key, IgniteCache<Object, Object> cache) {
cache.put(key, key);
}
});
for (final TransactionConcurrency concurrency : TransactionConcurrency.values()) {
for (final TransactionIsolation isolation : TransactionIsolation.values()) {
singleKeyCommit(node, ccfg, new IgniteBiInClosure<Integer, IgniteCache<Object, Object>>() {
@Override public void apply(Integer key, IgniteCache<Object, Object> cache) {
Ignite ignite = cache.unwrap(Ignite.class);
try (Transaction tx = ignite.transactions().txStart(concurrency, isolation)) {
cache.put(key, key);
tx.commit();
}
}
});
}
}
}
}
finally {
ignite.destroyCache(cache.getName());
}
}
/**
* @param client Node executing cache operation.
* @param ccfg Cache configuration.
* @param c Cache update closure.
* @throws Exception If failed.
*/
private void singleKeyCommit(
Ignite client,
final CacheConfiguration<Object, Object> ccfg,
IgniteBiInClosure<Integer, IgniteCache<Object, Object>> c) throws Exception {
Ignite ignite = ignite(0);
assertNotSame(ignite, client);
TestRecordingCommunicationSpi commSpiClient =
(TestRecordingCommunicationSpi)client.configuration().getCommunicationSpi();
TestRecordingCommunicationSpi commSpi0 =
(TestRecordingCommunicationSpi)ignite.configuration().getCommunicationSpi();
IgniteCache<Object, Object> cache = ignite.cache(ccfg.getName());
final Integer key = primaryKey(cache);
cache.remove(key);
waitKeyRemoved(ccfg.getName(), key);
IgniteCache<Object, Object> clientCache = client.cache(ccfg.getName());
commSpiClient.record(GridNearTxFinishRequest.class);
commSpi0.record(GridDhtTxFinishRequest.class);
commSpi0.blockMessages(new IgnitePredicateX<GridIoMessage>() {
@Override public boolean applyx(GridIoMessage e) throws IgniteCheckedException {
return e.message() instanceof GridDhtTxFinishRequest;
}
});
c.apply(key, clientCache);
assertEquals(key, cache.localPeek(key));
U.sleep(50);
boolean nearCache = ((IgniteCacheProxy)clientCache).context().isNear();
for (int i = 1; i < NODES; i++) {
Ignite node = ignite(i);
if (nearCache
&& node == client &&
!node.affinity(ccfg.getName()).isPrimaryOrBackup(node.cluster().localNode(), key))
assertEquals("Invalid value for node: " + i, key, ignite(i).cache(null).localPeek(key));
else
assertNull("Invalid value for node: " + i, ignite(i).cache(null).localPeek(key));
}
commSpi0.stopBlock(true);
waitKeyUpdated(ignite, ccfg.getBackups() + 1, ccfg.getName(), key);
List<Object> msgs = commSpiClient.recordedMessages(true);
assertEquals(1, msgs.size());
GridNearTxFinishRequest req = (GridNearTxFinishRequest)msgs.get(0);
assertEquals(PRIMARY_SYNC, req.syncMode());
msgs = commSpi0.recordedMessages(true);
assertEquals(ccfg.getBackups(), msgs.size());
clientCache.remove(key);
waitKeyRemoved(ccfg.getName(), key);
c.apply(key, clientCache);
waitKeyUpdated(ignite, ccfg.getBackups() + 1, ccfg.getName(), key);
}
/**
* @throws Exception If failed.
*/
public void testWaitPrimaryResponse() throws Exception {
checkWaitPrimaryResponse(cacheConfiguration(null, PRIMARY_SYNC, 1, true, false));
checkWaitPrimaryResponse(cacheConfiguration(null, PRIMARY_SYNC, 2, false, false));
checkWaitPrimaryResponse(cacheConfiguration(null, PRIMARY_SYNC, 2, false, true));
checkWaitPrimaryResponse(cacheConfiguration(null, PRIMARY_SYNC, 3, false, false));
}
/**
* @param ccfg Cache configuration.
* @throws Exception If failed.
*/
private void checkWaitPrimaryResponse(CacheConfiguration<Object, Object> ccfg) throws Exception {
Ignite ignite = ignite(0);
IgniteCache<Object, Object> cache = ignite.createCache(ccfg);
try {
ignite(NODES - 1).createNearCache(ccfg.getName(), new NearCacheConfiguration<>());
for (int i = 1; i < NODES; i++) {
Ignite node = ignite(i);
log.info("Test node: " + node.name());
checkWaitPrimaryResponse(node, ccfg, new IgniteBiInClosure<Integer, IgniteCache<Object, Object>>() {
@Override public void apply(Integer key, IgniteCache<Object, Object> cache) {
cache.put(key, key);
}
});
checkWaitPrimaryResponse(node, ccfg, new IgniteBiInClosure<Integer, IgniteCache<Object, Object>>() {
@Override public void apply(Integer key, IgniteCache<Object, Object> cache) {
Map<Integer, Integer> map = new HashMap<>();
for (int i = 0; i < 50; i++)
map.put(i, i);
map.put(key, key);
cache.putAll(map);
}
});
for (final TransactionConcurrency concurrency : TransactionConcurrency.values()) {
for (final TransactionIsolation isolation : TransactionIsolation.values()) {
checkWaitPrimaryResponse(node, ccfg, new IgniteBiInClosure<Integer, IgniteCache<Object, Object>>() {
@Override public void apply(Integer key, IgniteCache<Object, Object> cache) {
Ignite ignite = cache.unwrap(Ignite.class);
try (Transaction tx = ignite.transactions().txStart(concurrency, isolation)) {
cache.put(key, key);
tx.commit();
}
}
});
checkWaitPrimaryResponse(node, ccfg, new IgniteBiInClosure<Integer, IgniteCache<Object, Object>>() {
@Override public void apply(Integer key, IgniteCache<Object, Object> cache) {
Map<Integer, Integer> map = new HashMap<>();
for (int i = 0; i < 50; i++)
map.put(i, i);
map.put(key, key);
Ignite ignite = cache.unwrap(Ignite.class);
try (Transaction tx = ignite.transactions().txStart(concurrency, isolation)) {
cache.putAll(map);
tx.commit();
}
}
});
}
}
}
}
finally {
ignite.destroyCache(cache.getName());
}
}
/**
* @param client Node executing cache operation.
* @param ccfg Cache configuration.
* @param c Cache update closure.
* @throws Exception If failed.
*/
private void checkWaitPrimaryResponse(
Ignite client,
final CacheConfiguration<Object, Object> ccfg,
final IgniteBiInClosure<Integer, IgniteCache<Object, Object>> c) throws Exception {
Ignite ignite = ignite(0);
assertNotSame(ignite, client);
TestRecordingCommunicationSpi commSpi0 =
(TestRecordingCommunicationSpi)ignite.configuration().getCommunicationSpi();
IgniteCache<Object, Object> cache = ignite.cache(ccfg.getName());
final Integer key = primaryKey(cache);
cache.remove(key);
waitKeyRemoved(ccfg.getName(), key);
final IgniteCache<Object, Object> clientCache = client.cache(ccfg.getName());
commSpi0.blockMessages(GridNearTxFinishResponse.class, client.name());
IgniteInternalFuture<?> fut = GridTestUtils.runAsync(new Callable<Void>() {
@Override public Void call() throws Exception {
c.apply(key, clientCache);
return null;
}
}, "tx-thread");
U.sleep(100);
assertFalse(fut.isDone());
commSpi0.stopBlock(true);
fut.get();
waitKeyUpdated(ignite, ccfg.getBackups() + 1, ccfg.getName(), key);
}
/**
* @throws Exception If failed.
*/
public void testOnePhaseMessages() throws Exception {
checkOnePhaseMessages(cacheConfiguration(null, PRIMARY_SYNC, 1, false, false));
}
/**
* @param ccfg Cache configuration.
* @throws Exception If failed.
*/
private void checkOnePhaseMessages(CacheConfiguration<Object, Object> ccfg) throws Exception {
Ignite ignite = ignite(0);
IgniteCache<Object, Object> cache = ignite.createCache(ccfg);
try {
for (int i = 1; i < NODES; i++) {
Ignite node = ignite(i);
log.info("Test node: " + node.name());
checkOnePhaseMessages(node, ccfg, new IgniteBiInClosure<Integer, IgniteCache<Object, Object>>() {
@Override public void apply(Integer key, IgniteCache<Object, Object> cache) {
cache.put(key, key);
}
});
for (final TransactionConcurrency concurrency : TransactionConcurrency.values()) {
for (final TransactionIsolation isolation : TransactionIsolation.values()) {
checkOnePhaseMessages(node, ccfg, new IgniteBiInClosure<Integer, IgniteCache<Object, Object>>() {
@Override public void apply(Integer key, IgniteCache<Object, Object> cache) {
Ignite ignite = cache.unwrap(Ignite.class);
try (Transaction tx = ignite.transactions().txStart(concurrency, isolation)) {
cache.put(key, key);
tx.commit();
}
}
});
}
}
}
}
finally {
ignite.destroyCache(cache.getName());
}
}
/**
* @param client Node executing cache operation.
* @param ccfg Cache configuration.
* @param c Cache update closure.
* @throws Exception If failed.
*/
private void checkOnePhaseMessages(
Ignite client,
final CacheConfiguration<Object, Object> ccfg,
final IgniteBiInClosure<Integer, IgniteCache<Object, Object>> c) throws Exception {
Ignite ignite = ignite(0);
assertNotSame(ignite, client);
TestRecordingCommunicationSpi commSpiClient =
(TestRecordingCommunicationSpi)client.configuration().getCommunicationSpi();
TestRecordingCommunicationSpi commSpi0 =
(TestRecordingCommunicationSpi)ignite.configuration().getCommunicationSpi();
IgniteCache<Object, Object> cache = ignite.cache(ccfg.getName());
final Integer key = primaryKey(cache);
cache.remove(key);
waitKeyRemoved(ccfg.getName(), key);
final IgniteCache<Object, Object> clientCache = client.cache(ccfg.getName());
commSpi0.record(GridNearTxFinishResponse.class, GridNearTxPrepareResponse.class);
commSpiClient.record(GridNearTxPrepareRequest.class, GridNearTxFinishRequest.class);
c.apply(key, clientCache);
List<Object> srvMsgs = commSpi0.recordedMessages(true);
assertEquals("Unexpected messages: " + srvMsgs, 1, srvMsgs.size());
assertTrue("Unexpected message: " + srvMsgs.get(0), srvMsgs.get(0) instanceof GridNearTxPrepareResponse);
List<Object> clientMsgs = commSpiClient.recordedMessages(true);
assertEquals("Unexpected messages: " + clientMsgs, 1, clientMsgs.size());
assertTrue("Unexpected message: " + clientMsgs.get(0), clientMsgs.get(0) instanceof GridNearTxPrepareRequest);
GridNearTxPrepareRequest req = (GridNearTxPrepareRequest)clientMsgs.get(0);
assertTrue(req.onePhaseCommit());
for (Ignite ignite0 : G.allGrids())
assertEquals(key, ignite0.cache(cache.getName()).get(key));
}
/**
* @throws Exception If failed.
*/
public void testTxSyncMode() throws Exception {
Ignite ignite = ignite(0);
List<IgniteCache<Object, Object>> caches = new ArrayList<>();
try {
caches.add(createCache(ignite, cacheConfiguration("fullSync1", FULL_SYNC, 1, false, false), true));
caches.add(createCache(ignite, cacheConfiguration("fullSync2", FULL_SYNC, 1, false, false), true));
caches.add(createCache(ignite, cacheConfiguration("fullAsync1", FULL_ASYNC, 1, false, false), true));
caches.add(createCache(ignite, cacheConfiguration("fullAsync2", FULL_ASYNC, 1, false, false), true));
caches.add(createCache(ignite, cacheConfiguration("primarySync1", PRIMARY_SYNC, 1, false, false), true));
caches.add(createCache(ignite, cacheConfiguration("primarySync2", PRIMARY_SYNC, 1, false, false), true));
for (int i = 0; i < NODES; i++) {
checkTxSyncMode(ignite(i), true);
checkTxSyncMode(ignite(i), false);
}
}
finally {
for (IgniteCache<Object, Object> cache : caches)
ignite.destroyCache(cache.getName());
}
}
/**
* @param cacheName Cache name.
* @param key Cache key.
* @throws Exception If failed.
*/
private void waitKeyRemoved(final String cacheName, final Object key) throws Exception {
boolean waitRmv = GridTestUtils.waitForCondition(new GridAbsPredicate() {
@Override public boolean apply() {
for (Ignite ignite : G.allGrids()) {
if (ignite.cache(cacheName).get(key) != null)
return false;
}
return true;
}
}, 5000);
assertTrue(waitRmv);
}
/**
* @param ignite Node.
* @param expNodes Expected number of cache server nodes.
* @param cacheName Cache name.
* @param key Cache key.
* @throws Exception If failed.
*/
private void waitKeyUpdated(Ignite ignite, int expNodes, final String cacheName, final Object key) throws Exception {
Affinity<Object> aff = ignite.affinity(cacheName);
final Collection<ClusterNode> nodes = aff.mapKeyToPrimaryAndBackups(key);
assertEquals(expNodes, nodes.size());
boolean wait = GridTestUtils.waitForCondition(new GridAbsPredicate() {
@Override public boolean apply() {
for (ClusterNode node : nodes) {
Ignite ignite = grid(node);
if (!key.equals(ignite.cache(cacheName).get(key)))
return false;
}
return true;
}
}, 5000);
assertTrue(wait);
for (Ignite ignite0 : G.allGrids())
assertEquals(key, ignite0.cache(cacheName).get(key));
}
/**
* @param ignite Node.
* @param ccfg Cache configuration.
* @param nearCache If {@code true} creates near cache on one of client nodes.
* @return Created cache.
*/
private <K, V> IgniteCache<K, V> createCache(Ignite ignite, CacheConfiguration<K, V> ccfg,
boolean nearCache) {
IgniteCache<K, V> cache = ignite.createCache(ccfg);
if (nearCache)
ignite(NODES - 1).createNearCache(ccfg.getName(), new NearCacheConfiguration<>());
return cache;
}
/**
* @param ignite Node.
* @param commit If {@code true} commits transaction.
*/
private void checkTxSyncMode(Ignite ignite, boolean commit) {
IgniteTransactions txs = ignite.transactions();
IgniteCache<Object, Object> fullSync1 = ignite.cache("fullSync1");
IgniteCache<Object, Object> fullSync2 = ignite.cache("fullSync2");
IgniteCache<Object, Object> fullAsync1 = ignite.cache("fullAsync1");
IgniteCache<Object, Object> fullAsync2 = ignite.cache("fullAsync2");
IgniteCache<Object, Object> primarySync1 = ignite.cache("primarySync1");
IgniteCache<Object, Object> primarySync2 = ignite.cache("primarySync2");
for (int i = 0; i < 3; i++) {
int key = 0;
for (TransactionConcurrency concurrency : TransactionConcurrency.values()) {
for (TransactionIsolation isolation : TransactionIsolation.values()) {
try (Transaction tx = txs.txStart(concurrency, isolation)) {
fullSync1.put(key++, 1);
checkSyncMode(tx, FULL_SYNC);
if (commit)
tx.commit();
}
try (Transaction tx = txs.txStart(concurrency, isolation)) {
fullAsync1.put(key++, 1);
checkSyncMode(tx, FULL_ASYNC);
if (commit)
tx.commit();
}
try (Transaction tx = txs.txStart(concurrency, isolation)) {
primarySync1.put(key++, 1);
checkSyncMode(tx, PRIMARY_SYNC);
if (commit)
tx.commit();
}
try (Transaction tx = txs.txStart(concurrency, isolation)) {
for (int j = 0; j < 100; j++)
fullSync1.put(key++, 1);
checkSyncMode(tx, FULL_SYNC);
if (commit)
tx.commit();
}
try (Transaction tx = txs.txStart(concurrency, isolation)) {
for (int j = 0; j < 100; j++)
fullAsync1.put(key++, 1);
checkSyncMode(tx, FULL_ASYNC);
if (commit)
tx.commit();
}
try (Transaction tx = txs.txStart(concurrency, isolation)) {
for (int j = 0; j < 100; j++)
primarySync1.put(key++, 1);
checkSyncMode(tx, PRIMARY_SYNC);
if (commit)
tx.commit();
}
try (Transaction tx = txs.txStart(concurrency, isolation)) {
fullSync1.put(key++, 1);
fullSync2.put(key++, 1);
checkSyncMode(tx, FULL_SYNC);
if (commit)
tx.commit();
}
try (Transaction tx = txs.txStart(concurrency, isolation)) {
fullAsync1.put(key++, 1);
fullAsync2.put(key++, 1);
checkSyncMode(tx, FULL_ASYNC);
if (commit)
tx.commit();
}
try (Transaction tx = txs.txStart(concurrency, isolation)) {
primarySync1.put(key++, 1);
primarySync2.put(key++, 1);
checkSyncMode(tx, PRIMARY_SYNC);
if (commit)
tx.commit();
}
try (Transaction tx = txs.txStart(concurrency, isolation)) {
fullSync1.put(key++, 1);
primarySync1.put(key++, 1);
checkSyncMode(tx, FULL_SYNC);
if (commit)
tx.commit();
}
try (Transaction tx = txs.txStart(concurrency, isolation)) {
primarySync1.put(key++, 1);
fullSync1.put(key++, 1);
checkSyncMode(tx, FULL_SYNC);
if (commit)
tx.commit();
}
try (Transaction tx = txs.txStart(concurrency, isolation)) {
fullSync1.put(key++, 1);
fullAsync1.put(key++, 1);
checkSyncMode(tx, FULL_SYNC);
if (commit)
tx.commit();
}
try (Transaction tx = txs.txStart(concurrency, isolation)) {
fullAsync1.put(key++, 1);
fullSync1.put(key++, 1);
checkSyncMode(tx, FULL_SYNC);
if (commit)
tx.commit();
}
try (Transaction tx = txs.txStart(concurrency, isolation)) {
fullAsync1.put(key++, 1);
primarySync1.put(key++, 1);
checkSyncMode(tx, PRIMARY_SYNC);
if (commit)
tx.commit();
}
try (Transaction tx = txs.txStart(concurrency, isolation)) {
fullAsync1.put(key++, 1);
primarySync1.put(key++, 1);
fullAsync2.put(key++, 1);
checkSyncMode(tx, PRIMARY_SYNC);
if (commit)
tx.commit();
}
try (Transaction tx = txs.txStart(concurrency, isolation)) {
primarySync1.put(key++, 1);
fullAsync1.put(key++, 1);
checkSyncMode(tx, PRIMARY_SYNC);
if (commit)
tx.commit();
}
try (Transaction tx = txs.txStart(concurrency, isolation)) {
fullSync1.put(key++, 1);
fullAsync1.put(key++, 1);
primarySync1.put(key++, 1);
checkSyncMode(tx, FULL_SYNC);
if (commit)
tx.commit();
}
try (Transaction tx = txs.txStart(concurrency, isolation)) {
fullAsync1.put(key++, 1);
primarySync1.put(key++, 1);
fullSync1.put(key++, 1);
checkSyncMode(tx, FULL_SYNC);
if (commit)
tx.commit();
}
}
}
}
}
/**
* @param tx Transaction.
* @param syncMode Expected write synchronization mode.
*/
private void checkSyncMode(Transaction tx, CacheWriteSynchronizationMode syncMode) {
assertEquals(syncMode, ((TransactionProxyImpl)tx).tx().syncMode());
}
/**
* @param name Cache name.
* @param syncMode Write synchronization mode.
* @param backups Number of backups.
* @param store If {@code true} configures cache store.
* @param nearCache If {@code true} configures near cache.
* @return Cache configuration.
*/
private CacheConfiguration<Object, Object> cacheConfiguration(String name,
CacheWriteSynchronizationMode syncMode,
int backups,
boolean store,
boolean nearCache) {
CacheConfiguration<Object, Object> ccfg = new CacheConfiguration<>();
ccfg.setName(name);
ccfg.setAtomicityMode(TRANSACTIONAL);
ccfg.setWriteSynchronizationMode(syncMode);
ccfg.setBackups(backups);
if (store) {
ccfg.setCacheStoreFactory(new TestStoreFactory());
ccfg.setReadThrough(true);
ccfg.setWriteThrough(true);
}
if (nearCache)
ccfg.setNearConfiguration(new NearCacheConfiguration<>());
return ccfg;
}
/**
*
*/
private static class TestStoreFactory implements Factory<CacheStore<Object, Object>> {
/** {@inheritDoc} */
@SuppressWarnings("unchecked")
@Override public CacheStore<Object, Object> create() {
return new CacheStoreAdapter() {
@Override public Object load(Object key) throws CacheLoaderException {
return null;
}
@Override public void write(Cache.Entry entry) throws CacheWriterException {
// No-op.
}
@Override public void delete(Object key) throws CacheWriterException {
// No-op.
}
};
}
}
}
| |
/**
* OLAT - Online Learning and Training<br>
* http://www.olat.org
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Copyright (c) since 2004 at Multimedia- & E-Learning Services (MELS),<br>
* University of Zurich, Switzerland.
* <hr>
* <a href="http://www.openolat.org">
* OpenOLAT - Online Learning and Training</a><br>
* This file has been modified by the OpenOLAT community. Changes are licensed
* under the Apache 2.0 license as the original file.
*/
package org.olat.course.nodes.projectbroker;
import org.olat.admin.quota.QuotaConstants;
import org.olat.admin.securitygroup.gui.GroupController;
import org.olat.admin.securitygroup.gui.IdentitiesAddEvent;
import org.olat.admin.securitygroup.gui.IdentitiesRemoveEvent;
import org.olat.core.CoreSpringFactory;
import org.olat.core.commons.modules.bc.FolderRunController;
import org.olat.core.commons.modules.bc.vfs.OlatNamedContainerImpl;
import org.olat.core.commons.modules.bc.vfs.OlatRootFolderImpl;
import org.olat.core.commons.services.notifications.SubscriptionContext;
import org.olat.core.gui.UserRequest;
import org.olat.core.gui.components.Component;
import org.olat.core.gui.components.link.Link;
import org.olat.core.gui.components.link.LinkFactory;
import org.olat.core.gui.components.tabbedpane.TabbedPane;
import org.olat.core.gui.components.velocity.VelocityContainer;
import org.olat.core.gui.control.Controller;
import org.olat.core.gui.control.ControllerEventListener;
import org.olat.core.gui.control.Event;
import org.olat.core.gui.control.WindowControl;
import org.olat.core.gui.control.generic.closablewrapper.CloseableModalController;
import org.olat.core.gui.control.generic.modal.DialogBoxController;
import org.olat.core.gui.control.generic.modal.DialogBoxUIFactory;
import org.olat.core.gui.control.generic.tabbable.ActivateableTabbableDefaultController;
import org.olat.core.gui.translator.PackageTranslator;
import org.olat.core.gui.translator.Translator;
import org.olat.core.util.Util;
import org.olat.core.util.mail.MailTemplate;
import org.olat.core.util.vfs.Quota;
import org.olat.core.util.vfs.QuotaManager;
import org.olat.core.util.vfs.callbacks.VFSSecurityCallback;
import org.olat.course.CourseFactory;
import org.olat.course.ICourse;
import org.olat.course.assessment.AssessmentHelper;
import org.olat.course.auditing.UserNodeAuditManager;
import org.olat.course.condition.ConditionEditController;
import org.olat.course.editor.NodeEditController;
import org.olat.course.groupsandrights.CourseGroupManager;
import org.olat.course.nodes.ProjectBrokerCourseNode;
import org.olat.course.nodes.TACourseNode;
import org.olat.course.nodes.ms.MSCourseNodeEditController;
import org.olat.course.nodes.ms.MSEditFormController;
import org.olat.course.nodes.projectbroker.datamodel.ProjectBroker;
import org.olat.course.nodes.projectbroker.service.ProjectBrokerManagerFactory;
import org.olat.course.nodes.projectbroker.service.ProjectBrokerModuleConfiguration;
import org.olat.course.nodes.ta.DropboxForm;
import org.olat.course.properties.CoursePropertyManager;
import org.olat.course.run.userview.UserCourseEnvironment;
import org.olat.group.BusinessGroup;
import org.olat.group.BusinessGroupAddResponse;
import org.olat.group.BusinessGroupService;
import org.olat.group.ui.BGMailHelper;
import org.olat.modules.ModuleConfiguration;
/**
*
*
* @author guretzki
*/
public class ProjectBrokerCourseEditorController extends ActivateableTabbableDefaultController implements ControllerEventListener {
//TODO:cg 28.01.2010 no assessment-tool in V1.0
// public static final String PANE_TAB_CONF_SCORING = "pane.tab.conf.scoring";
public static final String PANE_TAB_CONF_DROPBOX = "pane.tab.conf.dropbox";
public static final String PANE_TAB_CONF_MODULES = "pane.tab.conf.modules";
public static final String PANE_TAB_ACCESSIBILITY = "pane.tab.accessibility";
private static final String PANE_TAB_OPTIONS = "pane.tab.options";
private static final String PANE_TAB_ACCOUNT_MANAGEMENT = "pane.tab.accountmanagement";
private static final String[] paneKeys = { /*PANE_TAB_CONF_SCORING,*/ PANE_TAB_CONF_DROPBOX, PANE_TAB_CONF_MODULES,
PANE_TAB_ACCESSIBILITY };
private Long courseId;
private ProjectBrokerCourseNode node;
private ModuleConfiguration config;
private ProjectBrokerModuleConfiguration projectBrokerModuleConfiguration;
private BusinessGroup accountManagerGroup;
private VelocityContainer accessabilityVC, optionsFormVC, accountManagementFormVC;
private VelocityContainer editModules, editDropbox, editScoring;
private TabbedPane myTabbedPane;
private int dropboxTabPosition, scoringTabPosition;
private ModulesFormController modulesForm;
private DropboxForm dropboxForm;
private MSEditFormController scoringController;
private FolderRunController frc;
// private ConditionEditController dropConditionC, scoringConditionC, returnboxConditionC;
private ConditionEditController projectBrokerConditionController;
private boolean hasLogEntries;
private DialogBoxController dialogBoxController;
private OptionsFormController optionsForm;
private GroupController accountManagerGroupController;
private Link editScoringConfigButton;
private CustomfieldsFormController customfieldsForm;
private ProjectEventFormController projectEventForm;
private CloseableModalController cmc;
private Long projectBrokerId;
private final BusinessGroupService businessGroupService;
/**
* @param ureq
* @param wControl
* @param course
* @param node
* @param groupMgr
*/
protected ProjectBrokerCourseEditorController(UserRequest ureq, WindowControl wControl, ICourse course, ProjectBrokerCourseNode node,
CourseGroupManager groupMgr, UserCourseEnvironment euce) {
super(ureq, wControl);
businessGroupService = CoreSpringFactory.getImpl(BusinessGroupService.class);
this.node = node;
//o_clusterOk by guido: save to hold reference to course inside editor
this.courseId = course.getResourceableId();
this.config = node.getModuleConfiguration();
projectBrokerModuleConfiguration = new ProjectBrokerModuleConfiguration(node.getModuleConfiguration());
Translator fallbackTranslator = new PackageTranslator(Util.getPackageName(DropboxForm.class), ureq.getLocale(), new PackageTranslator(Util.getPackageName(MSCourseNodeEditController.class), ureq.getLocale()));
Translator myTranslator = new PackageTranslator( Util.getPackageName(ProjectBrokerCourseEditorController.class), ureq.getLocale(), fallbackTranslator);
setTranslator(myTranslator);
// check if a project-broker exists
CoursePropertyManager cpm = course.getCourseEnvironment().getCoursePropertyManager();
projectBrokerId = ProjectBrokerManagerFactory.getProjectBrokerManager().getProjectBrokerId(cpm, node);
if (projectBrokerId == null) {
// no project-broker exist => create a new one, happens only once
ProjectBroker projectBroker = ProjectBrokerManagerFactory.getProjectBrokerManager().createAndSaveProjectBroker();
projectBrokerId = projectBroker.getKey();
ProjectBrokerManagerFactory.getProjectBrokerManager().saveProjectBrokerId(projectBrokerId, cpm, node);
}
// Access
accessabilityVC = this.createVelocityContainer("edit_condition");
// ProjectBroker precondition
projectBrokerConditionController = new ConditionEditController(ureq, getWindowControl(), groupMgr, node.getConditionProjectBroker(), "projectBrokerConditionForm",
AssessmentHelper.getAssessableNodes(course.getEditorTreeModel(), node), euce);
this.listenTo(projectBrokerConditionController);
accessabilityVC.put("projectBrokerCondition", projectBrokerConditionController.getInitialComponent());
// Options with dates and custom-fields
optionsFormVC = this.createVelocityContainer("optionsForm");
optionsForm = new OptionsFormController(ureq, wControl, projectBrokerModuleConfiguration, projectBrokerId);
listenTo(optionsForm);
optionsFormVC.put("optionsForm", optionsForm.getInitialComponent());
customfieldsForm = new CustomfieldsFormController(ureq, wControl, projectBrokerModuleConfiguration);
customfieldsForm.addControllerListener(this);
optionsFormVC.put("customfieldsForm", customfieldsForm.getInitialComponent());
projectEventForm = new ProjectEventFormController(ureq, wControl, projectBrokerModuleConfiguration);
projectEventForm.addControllerListener(this);
optionsFormVC.put("projectEventForm", projectEventForm.getInitialComponent());
// Account-Managment
accountManagementFormVC = this.createVelocityContainer("account_management");
String groupName = translate("account.manager.groupname", node.getShortTitle());
String groupDescription = translate("account.manager.groupdescription", node.getShortTitle());
accountManagerGroup = ProjectBrokerManagerFactory.getProjectGroupManager().getAccountManagerGroupFor(cpm, node, course, groupName, groupDescription, ureq.getIdentity());
if (accountManagerGroup != null) {
accountManagerGroupController = new GroupController(ureq, getWindowControl(), true, false, true, false, true, false, accountManagerGroup.getPartipiciantGroup());
listenTo(accountManagerGroupController);
// add mail templates used when adding and removing users
MailTemplate ownerAddUserMailTempl = BGMailHelper.createAddParticipantMailTemplate(accountManagerGroup, ureq.getIdentity());
accountManagerGroupController.setAddUserMailTempl(ownerAddUserMailTempl,false);
MailTemplate ownerAremoveUserMailTempl = BGMailHelper.createRemoveParticipantMailTemplate(accountManagerGroup, ureq.getIdentity());
accountManagerGroupController.setRemoveUserMailTempl(ownerAremoveUserMailTempl,false);
accountManagementFormVC.put("accountManagementController", accountManagerGroupController.getInitialComponent());
}
// Modules config
editModules = this.createVelocityContainer("editModules");
modulesForm = new ModulesFormController(ureq, wControl, config);
listenTo(modulesForm);
editModules.put("editModules", modulesForm.getInitialComponent());
// DropBox config (re-used from task-node)
editDropbox = this.createVelocityContainer("editDropbox");
editDropbox.setTranslator(myTranslator);
dropboxForm = new DropboxForm(ureq, wControl, config);
listenTo(dropboxForm);
editDropbox.put("dropboxform", dropboxForm.getInitialComponent());
// Scoring config
editScoring = this.createVelocityContainer("editScoring");
editScoringConfigButton = LinkFactory.createButtonSmall("scoring.config.enable.button", editScoring, this);
scoringController = new MSEditFormController(ureq, wControl, config);
listenTo(scoringController);
editScoring.put("scoringController", scoringController.getInitialComponent());
// if there is already user data available, make for read only
UserNodeAuditManager am = course.getCourseEnvironment().getAuditManager();
hasLogEntries = am.hasUserNodeLogs(node);
editScoring.contextPut("hasLogEntries", new Boolean(hasLogEntries));
if (hasLogEntries) {
scoringController.setDisplayOnly(true);
}
//Initialstate
editScoring.contextPut("isOverwriting", new Boolean(false));
}
/**
* @see org.olat.core.gui.control.DefaultController#event(org.olat.core.gui.UserRequest,
* org.olat.core.gui.components.Component, org.olat.core.gui.control.Event)
*/
@Override
public void event(UserRequest ureq, Component source, Event event) {
if (getLogger().isDebug()) getLogger().debug("event source=" + source + " " + event.toString());
if (source == editScoringConfigButton){
scoringController.setDisplayOnly(false);
editScoring.contextPut("isOverwriting", new Boolean(true));
}
}
/**
* @see org.olat.core.gui.control.DefaultController#event(org.olat.core.gui.UserRequest,
* org.olat.core.gui.control.Controller, org.olat.core.gui.control.Event)
*/
@Override
public void event(UserRequest urequest, Controller source, Event event) {
if (source == projectBrokerConditionController) {
if (event == Event.CHANGED_EVENT) {
node.setConditionProjectBroker(projectBrokerConditionController.getCondition());
fireEvent(urequest, NodeEditController.NODECONFIG_CHANGED_EVENT);
}
} else if (source == dialogBoxController) {
if (DialogBoxUIFactory.isOkEvent(event)) {
// ok: open task folder
String relPath = TACourseNode.getTaskFolderPathRelToFolderRoot(CourseFactory.loadCourse(courseId), node);
OlatRootFolderImpl rootFolder = new OlatRootFolderImpl(relPath, null);
OlatNamedContainerImpl namedFolder = new OlatNamedContainerImpl(translate("taskfolder"), rootFolder);
namedFolder.setLocalSecurityCallback(new FolderCallback(relPath, false));
removeAsListenerAndDispose(frc);
frc = new FolderRunController(namedFolder, false, urequest, getWindowControl());
listenTo (frc);
removeAsListenerAndDispose(cmc);
cmc = new CloseableModalController(
getWindowControl(), translate("folder.close"), frc.getInitialComponent()
);
listenTo (cmc);
cmc.activate();
fireEvent(urequest, Event.CHANGED_EVENT);
}
} else if (source == scoringController) {
if (event == Event.CANCELLED_EVENT) {
if (hasLogEntries) {
scoringController.setDisplayOnly(true);}
editScoring.contextPut("isOverwriting", new Boolean(false));
return;
} else if (event == Event.DONE_EVENT){
scoringController.updateModuleConfiguration(config);
fireEvent(urequest, NodeEditController.NODECONFIG_CHANGED_EVENT);
}
} else if (source == modulesForm) {
boolean onoff = event.getCommand().endsWith("true");
if (event.getCommand().startsWith("dropbox")) {
config.set(ProjectBrokerCourseNode.CONF_DROPBOX_ENABLED, onoff);
} else if (event.getCommand().startsWith("returnbox")) {
config.set(ProjectBrokerCourseNode.CONF_RETURNBOX_ENABLED, onoff);
}
fireEvent(urequest, NodeEditController.NODECONFIG_CHANGED_EVENT);
return;
} else if (source == accountManagerGroupController) {
if (event instanceof IdentitiesAddEvent) {
IdentitiesAddEvent identitiesAddedEvent = (IdentitiesAddEvent)event;
BusinessGroupAddResponse response = businessGroupService.addParticipants(urequest.getIdentity(), urequest.getUserSession().getRoles(),
identitiesAddedEvent.getAddIdentities(), accountManagerGroup, null);
identitiesAddedEvent.setIdentitiesAddedEvent(response.getAddedIdentities());
identitiesAddedEvent.setIdentitiesWithoutPermission(response.getIdentitiesWithoutPermission());
identitiesAddedEvent.setIdentitiesAlreadyInGroup(response.getIdentitiesAlreadyInGroup());
getLogger().info("Add users as account-managers");
fireEvent(urequest, Event.CHANGED_EVENT );
} else if (event instanceof IdentitiesRemoveEvent) {
businessGroupService.removeParticipants(urequest.getIdentity(), ((IdentitiesRemoveEvent) event).getRemovedIdentities(), accountManagerGroup, null);
getLogger().info("Remove users as account-managers");
fireEvent(urequest, Event.CHANGED_EVENT );
}
} else if (source == optionsForm) {
if (event == Event.CANCELLED_EVENT) {
return;
} else if (event == Event.DONE_EVENT) {
projectBrokerModuleConfiguration.setNbrParticipantsPerTopic(optionsForm.getNnbrOfAttendees());
if (projectBrokerModuleConfiguration.isAcceptSelectionManually() && !optionsForm.getSelectionAccept()) {
// change 'Accept manually' to 'Accept automatically' => enroll all candidates
ProjectBrokerManagerFactory.getProjectGroupManager().acceptAllCandidates(projectBrokerId, urequest.getIdentity(), projectBrokerModuleConfiguration.isAutoSignOut(), optionsForm.getSelectionAccept());
}
projectBrokerModuleConfiguration.setAcceptSelectionManaually(optionsForm.getSelectionAccept());
projectBrokerModuleConfiguration.setSelectionAutoSignOut(optionsForm.getSelectionAutoSignOut());
fireEvent(urequest, NodeEditController.NODECONFIG_CHANGED_EVENT);
}
} else if (source == customfieldsForm || source == projectEventForm) {
if (event == Event.CANCELLED_EVENT) {
return;
} else if (event == Event.DONE_EVENT) {
fireEvent(urequest, NodeEditController.NODECONFIG_CHANGED_EVENT);
}
} else if (event == NodeEditController.NODECONFIG_CHANGED_EVENT){
getLogger().debug("NODECONFIG_CHANGED_node.shortTitle=" + node.getShortTitle());
String groupName = translate("account.manager.groupname", node.getShortTitle());
String groupDescription = translate("account.manager.groupdescription", node.getShortTitle());
accountManagerGroup = ProjectBrokerManagerFactory.getProjectGroupManager().updateAccountManagerGroupName(getIdentity(), groupName, groupDescription, accountManagerGroup);
} else if (source == dropboxForm) {
if (event == Event.CANCELLED_EVENT) {
return;
} else if (event == Event.DONE_EVENT) {
config.set(ProjectBrokerCourseNode.CONF_DROPBOX_ENABLEMAIL, dropboxForm.mailEnabled());
config.set(ProjectBrokerCourseNode.CONF_DROPBOX_CONFIRMATION, dropboxForm.getConfirmation());
fireEvent(urequest, NodeEditController.NODECONFIG_CHANGED_EVENT);
return;
}
} else {
getLogger().warn("Can not handle event in ProjectBrokerCourseEditorController source=" + source + " " + event.toString());
}
}
/**
* @see org.olat.core.gui.control.generic.tabbable.TabbableController#addTabs(org.olat.core.gui.components.TabbedPane)
*/
public void addTabs(TabbedPane theTabbedPane) {
this.myTabbedPane = theTabbedPane;
myTabbedPane.addTab(translate(PANE_TAB_ACCESSIBILITY), accessabilityVC);
myTabbedPane.addTab(translate(PANE_TAB_OPTIONS), optionsFormVC);
myTabbedPane.addTab(translate(PANE_TAB_ACCOUNT_MANAGEMENT), accountManagementFormVC);
myTabbedPane.addTab(translate(PANE_TAB_CONF_MODULES), editModules);
dropboxTabPosition = myTabbedPane.addTab(translate(PANE_TAB_CONF_DROPBOX), editDropbox);
// TODO:cg 28.01.2010 no assessment-tool in V1.0
// scoringTabPosition = myTabbedPane.addTab(translate(PANE_TAB_CONF_SCORING), editScoring);
Boolean bool = (Boolean) config.get(ProjectBrokerCourseNode.CONF_DROPBOX_ENABLED);
myTabbedPane.setEnabled(dropboxTabPosition, (bool != null) ? bool.booleanValue() : true);
bool = (Boolean) config.get(ProjectBrokerCourseNode.CONF_SCORING_ENABLED);
// myTabbedPane.setEnabled(scoringTabPosition, (bool != null) ? bool.booleanValue() : true);
}
/**
* @see org.olat.core.gui.control.DefaultController#doDispose(boolean)
*/
@Override
protected void doDispose() {
//
}
@Override
public String[] getPaneKeys() {
return paneKeys;
}
@Override
public TabbedPane getTabbedPane() {
return myTabbedPane;
}
}
class FolderCallback implements VFSSecurityCallback {
private boolean folderLocked;
private Quota folderQuota = null;
/**
* @param folderLocked
*/
public FolderCallback(String relPath, boolean folderLocked) {
this.folderLocked = folderLocked;
initFolderQuota(relPath);
}
private void initFolderQuota(String relPath) {
QuotaManager qm = QuotaManager.getInstance();
folderQuota = qm.getCustomQuota(relPath);
if (folderQuota == null) {
Quota defQuota = qm.getDefaultQuota(QuotaConstants.IDENTIFIER_DEFAULT_POWER);
folderQuota = QuotaManager.getInstance().createQuota(relPath, defQuota.getQuotaKB(), defQuota.getUlLimitKB());
}
}
/**
* @see org.olat.modules.bc.callbacks.SecurityCallback#canRead(org.olat.modules.bc.Path)
*/
public boolean canRead() {
return true;
}
/**
* @see org.olat.modules.bc.callbacks.SecurityCallback#canWrite(org.olat.modules.bc.Path)
*/
public boolean canWrite() {
return !folderLocked;
}
@Override
public boolean canCreateFolder() {
return !folderLocked;
}
/**
* @see org.olat.modules.bc.callbacks.SecurityCallback#canDelete(org.olat.modules.bc.Path)
*/
public boolean canDelete() {
return !folderLocked;
}
/**
* @see org.olat.modules.bc.callbacks.SecurityCallback#canList(org.olat.modules.bc.Path)
*/
public boolean canList() {
return true;
}
/**
* @see org.olat.core.util.vfs.callbacks.VFSSecurityCallback#canCopy()
*/
public boolean canCopy() {
return true;
}
/**
* @see org.olat.modules.bc.callbacks.SecurityCallback#getQuotaKB(org.olat.modules.bc.Path)
*/
public Quota getQuota() {
return folderQuota;
}
/**
* @see org.olat.core.util.vfs.callbacks.VFSSecurityCallback#setQuota(org.olat.admin.quota.Quota)
*/
public void setQuota(Quota quota) {
folderQuota = quota;
}
/**
* @see org.olat.modules.bc.callbacks.SecurityCallback#getSubscriptionContext()
*/
public SubscriptionContext getSubscriptionContext() {
return null;
}
@Override
public boolean canDeleteRevisionsPermanently() {
return !folderLocked;
}
}
| |
/*
* Copyright 2014 Groupon.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.arpnetworking.metrics.common.tailer;
import com.arpnetworking.commons.builder.OvalBuilder;
import com.arpnetworking.logback.annotations.LogValue;
import com.arpnetworking.steno.LogValueMapFactory;
import com.arpnetworking.steno.Logger;
import com.arpnetworking.steno.LoggerFactory;
import com.arpnetworking.utility.TimerTrigger;
import com.arpnetworking.utility.Trigger;
import com.google.common.base.MoreObjects;
import net.sf.oval.constraint.NotNull;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.SeekableByteChannel;
import java.nio.file.Files;
import java.nio.file.NoSuchFileException;
import java.nio.file.Path;
import java.nio.file.StandardOpenOption;
import java.nio.file.attribute.BasicFileAttributes;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.time.Duration;
import java.util.Objects;
import java.util.Optional;
import javax.xml.bind.DatatypeConverter;
/**
* A reimplementation of the Apache Commons IO tailer based on the 2.5 snapshot
* version. This version attempts to address several shortcomings of the Apache
* Commons implementation. In particular, more robust support for rename-
* recreate file rotations and some progress for copy-truncate cases. The major
* new feature is the {@link PositionStore} which is used to checkpoint
* the offset in the tailed file as identified by a hash of the file prefix.
*
* @author Brandon Arp (brandon dot arp at inscopemetrics dot io)
* @author Ville Koskela (ville dot koskela at inscopemetrics dot io)
*/
public final class StatefulTailer implements Tailer {
@Override
public void stop() {
_isRunning = false;
}
@Override
public void run() {
Thread.currentThread().setUncaughtExceptionHandler(
(thread, throwable) -> LOGGER.error()
.setMessage("Unhandled exception")
.setThrowable(throwable)
.log());
while (_isRunning) {
try (ByteArrayOutputStream lineBuffer = new ByteArrayOutputStream(INITIAL_BUFFER_SIZE)) {
_lineBuffer = lineBuffer;
fileLoop();
} catch (final IOException e) {
// Ignoring exception from closing line buffer because it's not a
// buffered output stream (e.g. nothing to flush).
}
}
}
/**
* Generate a Steno log compatible representation.
*
* @return Steno log compatible representation.
*/
@LogValue
public Object toLogValue() {
return LogValueMapFactory.builder(this)
.put("file", _file)
.put("positionStore", _positionStore)
.put("listener", _listener)
.put("isRunning", _isRunning)
.put("trigger", _trigger)
.build();
}
@Override
public String toString() {
return toLogValue().toString();
}
/**
* Determine if the {@link Tailer} is running.
*
* @return {@code True} if and only if the {@link Tailer} is running.
*/
protected boolean isRunning() {
return _isRunning;
}
private void fileLoop() {
InitialPosition nextInitialPosition = _initialPosition;
int openFileAttempt = 0;
try {
while (isRunning()) {
// Attempt to open the file
openFileAttempt++;
try (SeekableByteChannel reader = Files.newByteChannel(_file, StandardOpenOption.READ)) {
LOGGER.trace()
.setMessage("Opened file")
.addData("file", _file)
.log();
// Position the reader
resume(reader, nextInitialPosition);
_listener.fileOpened();
// Any subsequent file opens we should start at the beginning
nextInitialPosition = InitialPosition.START;
// Read the file
readLoop(reader);
// Reset per file state
_hash = Optional.empty();
openFileAttempt = 0;
} catch (final NoSuchFileException e) {
_listener.fileNotFound();
_trigger.waitOnFileNotFoundTrigger(openFileAttempt);
}
}
// Clients may elect to kill the stateful tailer on an exception by calling stop, or they
// may log the exception and continue. In the latter case it is strongly recommended that
// clients pause before continuing; otherwise, if the error persists the stateful tailer
// may create non-trivial load on the io subsystem.
// NOTE: Any non-exception throwable will kill the stateful tailer.
} catch (final InterruptedException e) {
Thread.currentThread().interrupt();
handleThrowable(e);
// CHECKSTYLE.OFF: IllegalCatch - Allow clients to decide how to handle exceptions
} catch (final Exception e) {
// CHECKSTYLE.ON: IllegalCatch
handleThrowable(e);
} finally {
_hash = Optional.empty();
}
}
private void resume(final SeekableByteChannel reader, final InitialPosition initialPosition) throws IOException {
// Attempt to resume from checkpoint
long position = initialPosition.get(reader);
// Override position with last known position from store
_hash = computeHash(reader, REQUIRED_BYTES_FOR_HASH);
if (_hash.isPresent()) {
final Optional<Long> storedPosition = _positionStore.getPosition(_hash.get());
if (storedPosition.isPresent()) {
// Optionally limit the size of the backlog to process
final long fileSize = reader.size();
if (_maximumOffsetOnResume.isPresent() && fileSize - storedPosition.get() > _maximumOffsetOnResume.get()) {
position = fileSize - _maximumOffsetOnResume.get();
// TODO(vkoskela): Discard the current potentially partial line [AINT-584]
} else {
position = storedPosition.get();
}
}
}
LOGGER.info()
.setMessage("Starting tailer")
.addData("file", _file)
.addData("position", position)
.log();
reader.position(position);
}
// CHECKSTYLE.OFF: MethodLength - Nothing to refactor here.
private void readLoop(final SeekableByteChannel reader) throws IOException, InterruptedException {
Optional<Long> lastChecked = Optional.empty();
Optional<String> currentReaderPrefixHash = Optional.empty();
int currentReaderPrefixHashLength = 0;
while (isRunning()) {
// Obtain properties of file we expect we are reading
final Attributes attributes;
try {
attributes = getAttributes(_file, lastChecked);
} catch (final NoSuchFileException t) {
rotate(
Optional.of(reader),
String.format(
"File rotation detected based attributes access failure; file=%s",
_file));
// Return to the file loop
return;
}
if (attributes.getLength() < reader.position()) {
// File was rotated; either:
// 1) Position is past the length of the file
// 2) The expected file is smaller than the current file
rotate(
Optional.of(reader),
String.format(
"File rotation detected based on length, position and size; file=%s, length=%d, position=%d, size=%d",
_file,
attributes.getLength(),
reader.position(),
reader.size()));
// Return to the file loop
return;
} else {
// File was _likely_ not rotated
if (reader.size() > reader.position()) {
// There is more data in the file
if (!readLines(reader)) {
// There actually isn't any more data in the file; this
// means the file was rotated and the new file has more
// data than the old file (e.g. rotation from empty).
// TODO(vkoskela): Account for missing final newline. [MAI-322]
// There is a degenerate case where the last line in a
// file does not have a newline. Then readLines will
// always find new data, but the file has been rotated
// away. We should buffer the contents of partial lines
// thereby detecting when the length grows whether we
// actually got more data in the current file.
rotate(
Optional.<SeekableByteChannel>empty(),
String.format(
"File rotation detected based on length and no new data; file=%s, length=%d, position=%d",
_file,
attributes.getLength(),
reader.position()));
// Return to the file loop
return;
}
lastChecked = Optional.of(attributes.getLastModifiedTime());
// This control path, specifically, successfully reading
// data from the file does not trigger a wait. This permits
// continuous reading without pausing.
} else if (attributes.isNewer()) {
// The file does not contain any additional data, but its
// last modified date is after the last read date. The file
// must have rotated and contains the same length of
// content. This can happen on periodic systems which log
// the same data at the beginning of each period.
rotate(
Optional.<SeekableByteChannel>empty(),
String.format(
"File rotation detected based equal length and position but newer"
+ "; file=%s, length=%d, position=%d, lastChecked=%s, attributes=%s",
_file,
attributes.getLength(),
reader.position(),
lastChecked.get(),
attributes));
// Return to the file loop
return;
} else {
// The files are the same size and the timestamps are the
// same. This is more common than it sounds since file
// modification timestamps are not very precise on many
// file systems.
//
// Since we're not doing anything at this point let's hash
// the first N bytes of the current file and the expected
// file to see if we're still working on the same file.
final Optional<Boolean> hashesSame = compareByHash(currentReaderPrefixHash, currentReaderPrefixHashLength);
if (hashesSame.isPresent() && !hashesSame.get()) {
// The file rotated with the same length!
rotate(
Optional.<SeekableByteChannel>empty(),
String.format(
"File rotation detected based on hash; file=%s",
_file));
// Return to the file loop
return;
}
// else: the files are empty or the hashes are the same. In
// either case we don't have enough data to determine if
// the files are different; we'll need to wait and see when
// more data is written if the size and age diverge.
// TODO(vkoskela): Configurable maximum rotation hash size. [MAI-323]
// TODO(vkoskela): Configurable minimum rotation hash size. [MAI-324]
// TODO(vkoskela): Configurable identity hash size. [MAI-325]
// TODO(vkoskela): We should add a rehash interval. [MAI-326]
// This interval would be separate from the read interval,
// and generally longer, preventing us from rehashing the
// file every interval; but short enough that we don't wait
// too long before realizing a slowly growing file was
// rotated.
// Read interval
_trigger.waitOnReadTrigger();
}
}
// Compute the prefix hash unless we have an identity
final int newPrefixHashLength = (int) Math.min(reader.size(), REQUIRED_BYTES_FOR_HASH);
if (!_hash.isPresent() && (currentReaderPrefixHashLength != newPrefixHashLength || !currentReaderPrefixHash.isPresent())) {
currentReaderPrefixHashLength = newPrefixHashLength;
currentReaderPrefixHash = computeHash(reader, currentReaderPrefixHashLength);
}
// Update the reader position
updateCheckpoint(reader.position());
}
}
// CHECKSTYLE.ON: MethodLength
private Attributes getAttributes(final Path file, final Optional<Long> lastChecked) throws IOException {
final BasicFileAttributes attributes = Files.readAttributes(file, BasicFileAttributes.class);
LOGGER.trace()
.setMessage("File attributes")
.addData("file", file)
.addData("lastModifiedTime", attributes.lastModifiedTime().toMillis())
.addData("size", attributes.size())
.log();
return new Attributes(
attributes.size(),
attributes.lastModifiedTime().toMillis(),
lastChecked.isPresent() && attributes.lastModifiedTime().toMillis() > lastChecked.get());
}
private void rotate(final Optional<SeekableByteChannel> reader, final String reason) throws InterruptedException, IOException {
// Allow a full read interval before calling it quits on the old file
if (reader.isPresent()) {
_trigger.waitOnReadTrigger();
readLines(reader.get());
}
// Inform the listener
_listener.fileRotated();
LOGGER.info(reason);
}
private boolean readLines(final SeekableByteChannel reader) throws IOException {
// Compute the hash if not already set
if (!_hash.isPresent() && reader.size() >= REQUIRED_BYTES_FOR_HASH) {
_hash = computeHash(reader, REQUIRED_BYTES_FOR_HASH);
}
// Track current position in file and next read position
// NOTE: The next read position is always the beginning of a line
long position = reader.position();
long nextReadPosition = position;
// Reset buffers
_buffer.clear();
_lineBuffer.reset();
// Process available data
int bufferSize = reader.read(_buffer);
boolean hasData = false;
boolean hasCR = false;
while (isRunning() && bufferSize != -1) {
hasData = true;
for (int i = 0; i < bufferSize; i++) {
final byte ch = _buffer.get(i);
switch (ch) {
case '\n':
hasCR = false;
handleLine();
nextReadPosition = position + i + 1;
updateCheckpoint(nextReadPosition);
break;
case '\r':
if (hasCR) {
_lineBuffer.write('\r');
}
hasCR = true;
break;
default:
if (hasCR) {
hasCR = false;
handleLine();
nextReadPosition = position + i + 1;
updateCheckpoint(nextReadPosition);
}
_lineBuffer.write(ch);
}
}
position = reader.position();
_buffer.clear();
bufferSize = reader.read(_buffer);
}
reader.position(nextReadPosition);
return hasData;
}
private Optional<Boolean> compareByHash(final Optional<String> prefixHash, final int prefixLength) {
final int appliedLength;
if (_hash.isPresent()) {
appliedLength = REQUIRED_BYTES_FOR_HASH;
} else {
appliedLength = prefixLength;
}
try (SeekableByteChannel reader = Files.newByteChannel(_file, StandardOpenOption.READ)) {
final Optional<String> filePrefixHash = computeHash(
reader,
appliedLength);
LOGGER.trace()
.setMessage("Comparing hashes")
.addData("hash1", prefixHash)
.addData("filePrefixHash", filePrefixHash)
.addData("size", appliedLength)
.log();
return Optional.of(Objects.equals(_hash.orElse(prefixHash.orElse(null)), filePrefixHash.orElse(null)));
} catch (final IOException e) {
return Optional.empty();
}
}
private Optional<String> computeHash(final SeekableByteChannel reader, final int hashSize) throws IOException {
// Don't hash empty data sets
if (hashSize <= 0) {
return Optional.empty();
}
// Validate sufficient data to compute the hash
final long oldPosition = reader.position();
reader.position(0);
if (reader.size() < hashSize) {
reader.position(oldPosition);
LOGGER.trace()
.setMessage("Reader size insufficient to compute hash")
.addData("hashSize", hashSize)
.addData("readerSize", reader.size())
.log();
return Optional.empty();
}
// Read the data to hash
final ByteBuffer buffer = ByteBuffer.allocate(hashSize);
int totalBytesRead = 0;
while (totalBytesRead < hashSize) {
final int bytesRead = reader.read(buffer);
if (bytesRead < 0) {
LOGGER.warn()
.setMessage("Unexpected end of file reached")
.addData("totalBytesRead", totalBytesRead)
.log();
return Optional.empty();
}
totalBytesRead += bytesRead;
}
// Compute the hash
_md5.reset();
final byte[] digest = _md5.digest(buffer.array());
final String hash = DatatypeConverter.printHexBinary(digest);
LOGGER.trace()
.setMessage("Computed hash")
.addData("hash", hash)
.log();
// Return the reader to its original state
reader.position(oldPosition);
return Optional.of(hash);
}
private void updateCheckpoint(final long position) {
if (_hash.isPresent()) {
_positionStore.setPosition(_hash.get(), position);
}
}
private void handleLine() {
_listener.handle(_lineBuffer.toByteArray());
_lineBuffer.reset();
}
private void handleThrowable(final Throwable t) {
_listener.handle(t);
}
// NOTE: Package private for testing
/* package private */ StatefulTailer(final Builder builder, final Trigger trigger) {
_file = builder._file;
_positionStore = builder._positionStore;
_listener = builder._listener;
_trigger = trigger;
_buffer = ByteBuffer.allocate(INITIAL_BUFFER_SIZE);
try {
_md5 = MessageDigest.getInstance("MD5");
} catch (final NoSuchAlgorithmException e) {
throw new RuntimeException(e);
}
_initialPosition = builder._initialPosition;
_maximumOffsetOnResume = Optional.ofNullable(builder._maximumOffsetOnResume);
_listener.initialize(this);
}
private StatefulTailer(final Builder builder) {
// TODO(vkoskela): Configurable grace period separate from interval. [MAI-327]
this(builder, new TimerTrigger(builder._readInterval));
}
private final Path _file;
private final PositionStore _positionStore;
private final TailerListener _listener;
private final ByteBuffer _buffer;
private final MessageDigest _md5;
private final InitialPosition _initialPosition;
private final Optional<Long> _maximumOffsetOnResume;
private final Trigger _trigger;
private volatile boolean _isRunning = true;
private Optional<String> _hash = Optional.empty();
private ByteArrayOutputStream _lineBuffer;
private static final int REQUIRED_BYTES_FOR_HASH = 512;
private static final int INITIAL_BUFFER_SIZE = 65536;
private static final Logger LOGGER = LoggerFactory.getLogger(StatefulTailer.class);
private static final class Attributes {
private Attributes(
final long length,
final long lastModifiedTime,
final boolean newer) {
_length = length;
_lastModifiedTime = lastModifiedTime;
_newer = newer;
}
public long getLength() {
return _length;
}
public long getLastModifiedTime() {
return _lastModifiedTime;
}
public boolean isNewer() {
return _newer;
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("id", Integer.toHexString(System.identityHashCode(this)))
.add("Length", _length)
.add("LastModifiedTime", _lastModifiedTime)
.add("Newer", _newer)
.toString();
}
private final long _length;
private final long _lastModifiedTime;
private final boolean _newer;
}
/**
* {@link com.arpnetworking.commons.builder.Builder} implementation for
* {@link StatefulTailer}.
*
* @author Brandon Arp (brandon dot arp at inscopemetrics dot io)
*/
public static class Builder extends OvalBuilder<StatefulTailer> {
/**
* Public constructor.
*/
public Builder() {
super((java.util.function.Function<Builder, StatefulTailer>) StatefulTailer::new);
}
/**
* Sets the file to read. Cannot be null or empty.
*
* @param value The file to read.
* @return This instance of {@link Builder}
*/
public Builder setFile(final Path value) {
_file = value;
return this;
}
/**
* Sets the {@link PositionStore} to be used to checkpoint the
* file read position. Cannot be null.
*
* @param value The {@link PositionStore} instance.
* @return This instance of {@link Builder}
*/
public Builder setPositionStore(final PositionStore value) {
_positionStore = value;
return this;
}
/**
* Sets the {@link TailerListener} instance. Cannot be null.
*
* @param value The {@link TailerListener} instance.
* @return This instance of {@link Builder}
*/
public Builder setListener(final TailerListener value) {
_listener = value;
return this;
}
/**
* Sets the interval between file reads. Optional. Default is 500
* milliseconds.
*
* @param value The file read interval.
* @return This instance of {@link Builder}
*/
public Builder setReadInterval(final Duration value) {
_readInterval = value;
return this;
}
/**
* Sets the tailer to start at the current end of the file.
*
* @param initialPosition The initial position of the tailer
* @return This instance of {@link Builder}
*/
public Builder setInitialPosition(final InitialPosition initialPosition) {
_initialPosition = initialPosition;
return this;
}
/**
* Sets the maximum offset on resume. Optional. Default is no maximum.
*
* @param maximumOffsetOnResume The maximum offset on resume.
* @return This instance of {@link Builder}
*/
public Builder setMaximumOffsetOnResume(final Long maximumOffsetOnResume) {
_maximumOffsetOnResume = maximumOffsetOnResume;
return this;
}
@NotNull
private Path _file;
@NotNull
private PositionStore _positionStore;
@NotNull
private TailerListener _listener;
@NotNull
private Duration _readInterval = Duration.ofMillis(250);
@NotNull
private InitialPosition _initialPosition = InitialPosition.START;
private Long _maximumOffsetOnResume = null;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.builder.component.dsl;
import javax.annotation.Generated;
import org.apache.camel.Component;
import org.apache.camel.builder.component.AbstractComponentBuilder;
import org.apache.camel.builder.component.ComponentBuilder;
import org.apache.camel.component.vertx.VertxComponent;
/**
* Send and receive messages to/from Vert.x Event Bus.
*
* Generated by camel-package-maven-plugin - do not edit this file!
*/
@Generated("org.apache.camel.maven.packaging.ComponentDslMojo")
public interface VertxComponentBuilderFactory {
/**
* Vert.x (camel-vertx)
* Send and receive messages to/from Vert.x Event Bus.
*
* Category: eventbus,reactive
* Since: 2.12
* Maven coordinates: org.apache.camel:camel-vertx
*
* @return the dsl builder
*/
static VertxComponentBuilder vertx() {
return new VertxComponentBuilderImpl();
}
/**
* Builder for the Vert.x component.
*/
interface VertxComponentBuilder extends ComponentBuilder<VertxComponent> {
/**
* Hostname for creating an embedded clustered EventBus.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param host the value to set
* @return the dsl builder
*/
default VertxComponentBuilder host(java.lang.String host) {
doSetProperty("host", host);
return this;
}
/**
* Port for creating an embedded clustered EventBus.
*
* The option is a: <code>int</code> type.
*
* Group: common
*
* @param port the value to set
* @return the dsl builder
*/
default VertxComponentBuilder port(int port) {
doSetProperty("port", port);
return this;
}
/**
* Timeout in seconds to wait for clustered Vertx EventBus to be ready.
* The default value is 60.
*
* The option is a: <code>int</code> type.
*
* Default: 60
* Group: common
*
* @param timeout the value to set
* @return the dsl builder
*/
default VertxComponentBuilder timeout(int timeout) {
doSetProperty("timeout", timeout);
return this;
}
/**
* To use the given vertx EventBus instead of creating a new embedded
* EventBus.
*
* The option is a: <code>io.vertx.core.Vertx</code> type.
*
* Group: common
*
* @param vertx the value to set
* @return the dsl builder
*/
default VertxComponentBuilder vertx(io.vertx.core.Vertx vertx) {
doSetProperty("vertx", vertx);
return this;
}
/**
* Options to use for creating vertx.
*
* The option is a: <code>io.vertx.core.VertxOptions</code>
* type.
*
* Group: common
*
* @param vertxOptions the value to set
* @return the dsl builder
*/
default VertxComponentBuilder vertxOptions(
io.vertx.core.VertxOptions vertxOptions) {
doSetProperty("vertxOptions", vertxOptions);
return this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions occurred while the consumer is trying to
* pickup incoming messages, or the likes, will now be processed as a
* message and handled by the routing Error Handler. By default the
* consumer will use the org.apache.camel.spi.ExceptionHandler to deal
* with exceptions, that will be logged at WARN or ERROR level and
* ignored.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default VertxComponentBuilder bridgeErrorHandler(
boolean bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default VertxComponentBuilder lazyStartProducer(
boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether autowiring is enabled. This is used for automatic autowiring
* options (the option must be marked as autowired) by looking up in the
* registry to find if there is a single instance of matching type,
* which then gets configured on the component. This can be used for
* automatic configuring JDBC data sources, JMS connection factories,
* AWS Clients, etc.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: advanced
*
* @param autowiredEnabled the value to set
* @return the dsl builder
*/
default VertxComponentBuilder autowiredEnabled(boolean autowiredEnabled) {
doSetProperty("autowiredEnabled", autowiredEnabled);
return this;
}
/**
* To use a custom VertxFactory implementation.
*
* The option is a:
* <code>io.vertx.core.spi.VertxFactory</code> type.
*
* Group: advanced
*
* @param vertxFactory the value to set
* @return the dsl builder
*/
default VertxComponentBuilder vertxFactory(
io.vertx.core.spi.VertxFactory vertxFactory) {
doSetProperty("vertxFactory", vertxFactory);
return this;
}
}
class VertxComponentBuilderImpl
extends
AbstractComponentBuilder<VertxComponent>
implements
VertxComponentBuilder {
@Override
protected VertxComponent buildConcreteComponent() {
return new VertxComponent();
}
@Override
protected boolean setPropertyOnComponent(
Component component,
String name,
Object value) {
switch (name) {
case "host": ((VertxComponent) component).setHost((java.lang.String) value); return true;
case "port": ((VertxComponent) component).setPort((int) value); return true;
case "timeout": ((VertxComponent) component).setTimeout((int) value); return true;
case "vertx": ((VertxComponent) component).setVertx((io.vertx.core.Vertx) value); return true;
case "vertxOptions": ((VertxComponent) component).setVertxOptions((io.vertx.core.VertxOptions) value); return true;
case "bridgeErrorHandler": ((VertxComponent) component).setBridgeErrorHandler((boolean) value); return true;
case "lazyStartProducer": ((VertxComponent) component).setLazyStartProducer((boolean) value); return true;
case "autowiredEnabled": ((VertxComponent) component).setAutowiredEnabled((boolean) value); return true;
case "vertxFactory": ((VertxComponent) component).setVertxFactory((io.vertx.core.spi.VertxFactory) value); return true;
default: return false;
}
}
}
}
| |
package org.jabref.gui.externalfiletype;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import java.util.Optional;
import java.util.Set;
import java.util.TreeSet;
import org.jabref.Globals;
import org.jabref.gui.IconTheme;
import org.jabref.logic.l10n.Localization;
import org.jabref.model.entry.FileFieldWriter;
import org.jabref.model.entry.LinkedFile;
import org.jabref.model.strings.StringUtil;
import org.jabref.model.util.FileHelper;
import org.jabref.preferences.JabRefPreferences;
public final class ExternalFileTypes {
// This String is used in the encoded list in prefs of external file type
// modifications, in order to indicate a removed default file type:
private static final String FILE_TYPE_REMOVED_FLAG = "REMOVED";
// The only instance of this class:
private static ExternalFileTypes singleton;
// Map containing all registered external file types:
private final Set<ExternalFileType> externalFileTypes = new TreeSet<>();
private final ExternalFileType HTML_FALLBACK_TYPE = new ExternalFileType("URL", "html", "text/html", "", "www",
IconTheme.JabRefIcon.WWW.getSmallIcon());
private ExternalFileTypes() {
updateExternalFileTypes();
}
public static ExternalFileTypes getInstance() {
if (ExternalFileTypes.singleton == null) {
ExternalFileTypes.singleton = new ExternalFileTypes();
}
return ExternalFileTypes.singleton;
}
public static List<ExternalFileType> getDefaultExternalFileTypes() {
List<ExternalFileType> list = new ArrayList<>();
list.add(new ExternalFileType("PDF", "pdf", "application/pdf", "evince", "pdfSmall",
IconTheme.JabRefIcon.PDF_FILE.getSmallIcon()));
list.add(new ExternalFileType("PostScript", "ps", "application/postscript", "evince", "psSmall",
IconTheme.JabRefIcon.FILE.getSmallIcon()));
list.add(new ExternalFileType("Word", "doc", "application/msword", "oowriter", "openoffice",
IconTheme.JabRefIcon.FILE_WORD.getSmallIcon()));
list.add(new ExternalFileType("Word 2007+", "docx",
"application/vnd.openxmlformats-officedocument.wordprocessingml.document", "oowriter", "openoffice",
IconTheme.JabRefIcon.FILE_WORD.getSmallIcon()));
list.add(new ExternalFileType(Localization.lang("OpenDocument text"), "odt",
"application/vnd.oasis.opendocument.text", "oowriter", "openoffice", IconTheme.getImage("openoffice")));
list.add(new ExternalFileType("Excel", "xls", "application/excel", "oocalc", "openoffice",
IconTheme.JabRefIcon.FILE_EXCEL.getSmallIcon()));
list.add(new ExternalFileType("Excel 2007+", "xlsx",
"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet", "oocalc", "openoffice",
IconTheme.JabRefIcon.FILE_EXCEL.getSmallIcon()));
list.add(new ExternalFileType(Localization.lang("OpenDocument spreadsheet"), "ods",
"application/vnd.oasis.opendocument.spreadsheet", "oocalc", "openoffice",
IconTheme.getImage("openoffice")));
list.add(new ExternalFileType("PowerPoint", "ppt", "application/vnd.ms-powerpoint", "ooimpress", "openoffice",
IconTheme.JabRefIcon.FILE_POWERPOINT.getSmallIcon()));
list.add(new ExternalFileType("PowerPoint 2007+", "pptx",
"application/vnd.openxmlformats-officedocument.presentationml.presentation", "ooimpress", "openoffice",
IconTheme.JabRefIcon.FILE_POWERPOINT.getSmallIcon()));
list.add(new ExternalFileType(Localization.lang("OpenDocument presentation"), "odp",
"application/vnd.oasis.opendocument.presentation", "ooimpress", "openoffice",
IconTheme.getImage("openoffice")));
list.add(new ExternalFileType("Rich Text Format", "rtf", "application/rtf", "oowriter", "openoffice",
IconTheme.JabRefIcon.FILE_TEXT.getSmallIcon()));
list.add(new ExternalFileType(Localization.lang("%0 image", "PNG"), "png", "image/png", "gimp", "picture",
IconTheme.JabRefIcon.PICTURE.getSmallIcon()));
list.add(new ExternalFileType(Localization.lang("%0 image", "GIF"), "gif", "image/gif", "gimp", "picture",
IconTheme.JabRefIcon.PICTURE.getSmallIcon()));
list.add(new ExternalFileType(Localization.lang("%0 image", "JPG"), "jpg", "image/jpeg", "gimp", "picture",
IconTheme.JabRefIcon.PICTURE.getSmallIcon()));
list.add(new ExternalFileType("Djvu", "djvu", "image/vnd.djvu", "evince", "psSmall",
IconTheme.JabRefIcon.FILE.getSmallIcon()));
list.add(new ExternalFileType("Text", "txt", "text/plain", "emacs", "emacs",
IconTheme.JabRefIcon.FILE_TEXT.getSmallIcon()));
list.add(new ExternalFileType("LaTeX", "tex", "application/x-latex", "emacs", "emacs",
IconTheme.JabRefIcon.FILE_TEXT.getSmallIcon()));
list.add(new ExternalFileType("CHM", "chm", "application/mshelp", "gnochm", "www",
IconTheme.JabRefIcon.WWW.getSmallIcon()));
list.add(new ExternalFileType(Localization.lang("%0 image", "TIFF"), "tiff", "image/tiff", "gimp", "picture",
IconTheme.JabRefIcon.PICTURE.getSmallIcon()));
list.add(new ExternalFileType("URL", "html", "text/html", "firefox", "www",
IconTheme.JabRefIcon.WWW.getSmallIcon()));
list.add(new ExternalFileType("MHT", "mht", "multipart/related", "firefox", "www",
IconTheme.JabRefIcon.WWW.getSmallIcon()));
list.add(new ExternalFileType("ePUB", "epub", "application/epub+zip", "firefox", "www",
IconTheme.JabRefIcon.WWW.getSmallIcon()));
// On all OSes there is a generic application available to handle file opening,
// so we don't need the default application settings anymore:
for (ExternalFileType type : list) {
type.setOpenWith("");
}
return list;
}
public Set<ExternalFileType> getExternalFileTypeSelection() {
return externalFileTypes;
}
/**
* Look up the external file type registered with this name, if any.
*
* @param name The file type name.
* @return The ExternalFileType registered, or null if none.
*/
public Optional<ExternalFileType> getExternalFileTypeByName(String name) {
for (ExternalFileType type : externalFileTypes) {
if (type.getName().equals(name)) {
return Optional.of(type);
}
}
// Return an instance that signifies an unknown file type:
return Optional.of(new UnknownExternalFileType(name));
}
/**
* Look up the external file type registered for this extension, if any.
*
* @param extension The file extension.
* @return The ExternalFileType registered, or null if none.
*/
public Optional<ExternalFileType> getExternalFileTypeByExt(String extension) {
for (ExternalFileType type : externalFileTypes) {
if (type.getExtension().equalsIgnoreCase(extension)) {
return Optional.of(type);
}
}
return Optional.empty();
}
/**
* Returns true if there is an external file type registered for this extension.
*
* @param extension The file extension.
* @return true if an ExternalFileType with the extension exists, false otherwise
*/
public boolean isExternalFileTypeByExt(String extension) {
for (ExternalFileType type : externalFileTypes) {
if (type.getExtension().equalsIgnoreCase(extension)) {
return true;
}
}
return false;
}
/**
* Look up the external file type name registered for this extension, if any.
*
* @param extension The file extension.
* @return The name of the ExternalFileType registered, or null if none.
*/
public String getExternalFileTypeNameByExt(String extension) {
for (ExternalFileType type : externalFileTypes) {
if (type.getExtension().equalsIgnoreCase(extension)) {
return type.getName();
}
}
return "";
}
/**
* Look up the external file type registered for this filename, if any.
*
* @param filename The name of the file whose type to look up.
* @return The ExternalFileType registered, or null if none.
*/
public Optional<ExternalFileType> getExternalFileTypeForName(String filename) {
int longestFound = -1;
ExternalFileType foundType = null;
for (ExternalFileType type : externalFileTypes) {
if (!type.getExtension().isEmpty() && filename.toLowerCase(Locale.ROOT).endsWith(type.getExtension().toLowerCase(Locale.ROOT))
&& (type.getExtension().length() > longestFound)) {
longestFound = type.getExtension().length();
foundType = type;
}
}
return Optional.ofNullable(foundType);
}
/**
* Look up the external file type registered for this MIME type, if any.
*
* @param mimeType The MIME type.
* @return The ExternalFileType registered, or null if none. For the mime type "text/html", a valid file type is
* guaranteed to be returned.
*/
public Optional<ExternalFileType> getExternalFileTypeByMimeType(String mimeType) {
for (ExternalFileType type : externalFileTypes) {
if (type.getMimeType().equalsIgnoreCase(mimeType)) {
return Optional.of(type);
}
}
if ("text/html".equalsIgnoreCase(mimeType)) {
return Optional.of(HTML_FALLBACK_TYPE);
} else {
return Optional.empty();
}
}
/**
* Reset the List of external file types after user customization.
*
* @param types The new List of external file types. This is the complete list, not just new entries.
*/
public void setExternalFileTypes(List<ExternalFileType> types) {
// First find a list of the default types:
List<ExternalFileType> defTypes = getDefaultExternalFileTypes();
// Make a list of types that are unchanged:
List<ExternalFileType> unchanged = new ArrayList<>();
externalFileTypes.clear();
for (ExternalFileType type : types) {
externalFileTypes.add(type);
// See if we can find a type with matching name in the default type list:
ExternalFileType found = null;
for (ExternalFileType defType : defTypes) {
if (defType.getName().equals(type.getName())) {
found = defType;
break;
}
}
if (found != null) {
// Found it! Check if it is an exact match, or if it has been customized:
if (found.equals(type)) {
unchanged.add(type);
} else {
// It was modified. Remove its entry from the defaults list, since
// the type hasn't been removed:
defTypes.remove(found);
}
}
}
// Go through unchanged types. Remove them from the ones that should be stored,
// and from the list of defaults, since we don't need to mention these in prefs:
for (ExternalFileType type : unchanged) {
defTypes.remove(type);
types.remove(type);
}
// Now set up the array to write to prefs, containing all new types, all modified
// types, and a flag denoting each default type that has been removed:
String[][] array = new String[types.size() + defTypes.size()][];
int i = 0;
for (ExternalFileType type : types) {
array[i] = type.getStringArrayRepresentation();
i++;
}
for (ExternalFileType type : defTypes) {
array[i] = new String[] {type.getName(), FILE_TYPE_REMOVED_FLAG};
i++;
}
Globals.prefs.put(JabRefPreferences.EXTERNAL_FILE_TYPES, FileFieldWriter.encodeStringArray(array));
}
/**
* Set up the list of external file types, either from default values, or from values recorded in Preferences.
*/
private void updateExternalFileTypes() {
// First get a list of the default file types as a starting point:
List<ExternalFileType> types = getDefaultExternalFileTypes();
// If no changes have been stored, simply use the defaults:
if (Globals.prefs.get(JabRefPreferences.EXTERNAL_FILE_TYPES, null) == null) {
externalFileTypes.clear();
externalFileTypes.addAll(types);
return;
}
// Read the prefs information for file types:
String[][] vals = StringUtil
.decodeStringDoubleArray(Globals.prefs.get(JabRefPreferences.EXTERNAL_FILE_TYPES, ""));
for (String[] val : vals) {
if ((val.length == 2) && val[1].equals(FILE_TYPE_REMOVED_FLAG)) {
// This entry indicates that a default entry type should be removed:
ExternalFileType toRemove = null;
for (ExternalFileType type : types) {
if (type.getName().equals(val[0])) {
toRemove = type;
break;
}
}
// If we found it, remove it from the type list:
if (toRemove != null) {
types.remove(toRemove);
}
} else {
// A new or modified entry type. Construct it from the string array:
ExternalFileType type = ExternalFileType.buildFromArgs(val);
// Check if there is a default type with the same name. If so, this is a
// modification of that type, so remove the default one:
ExternalFileType toRemove = null;
for (ExternalFileType defType : types) {
if (type.getName().equals(defType.getName())) {
toRemove = defType;
break;
}
}
// If we found it, remove it from the type list:
if (toRemove != null) {
types.remove(toRemove);
}
// Then add the new one:
types.add(type);
}
}
// Finally, build the list of types based on the modified defaults list:
externalFileTypes.addAll(types);
}
public Optional<ExternalFileType> getExternalFileTypeByFile(Path file) {
final String filePath = file.toString();
final Optional<String> extension = FileHelper.getFileExtension(filePath);
return extension.flatMap(this::getExternalFileTypeByExt);
}
public Optional<ExternalFileType> fromLinkedFile(LinkedFile linkedFile, boolean deduceUnknownType) {
Optional<ExternalFileType> type = getExternalFileTypeByName(linkedFile.getFileType());
boolean isUnknownType = !type.isPresent() || (type.get() instanceof UnknownExternalFileType);
if (isUnknownType && deduceUnknownType) {
// No file type was recognized. Try to find a usable file type based on mime type:
Optional<ExternalFileType> mimeType = getExternalFileTypeByMimeType(linkedFile.getFileType());
if (mimeType.isPresent()) {
return mimeType;
}
// No type could be found from mime type. Try based on the extension:
return FileHelper.getFileExtension(linkedFile.getLink())
.flatMap(this::getExternalFileTypeByExt);
} else {
return type;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.jmeter.protocol.ldap.sampler;
import java.util.Hashtable;
import javax.naming.Context;
import javax.naming.NamingException;
import javax.naming.NamingEnumeration;
import javax.naming.directory.Attributes;
import javax.naming.directory.DirContext;
import javax.naming.directory.InitialDirContext;
import javax.naming.directory.ModificationItem;
import javax.naming.directory.SearchControls;
import javax.naming.directory.SearchResult;
import org.apache.jmeter.util.JMeterUtils;
import org.apache.jorphan.logging.LoggingManager;
import org.apache.log.Logger;
/*******************************************************************************
*
* author Dolf Smits(Dolf.Smits@Siemens.com) created Aug 09 2003 11:00 AM
* company Siemens Netherlands N.V..
*
* Based on the work of: author T.Elanjchezhiyan(chezhiyan@siptech.co.in)
* created Apr 29 2003 11:00 AM company Sip Technologies and Exports Ltd.
*
******************************************************************************/
/*******************************************************************************
* Ldap Client class is main class to create ,modify, search and delete all the
* LDAP functionality available
******************************************************************************/
public class LdapExtClient {
private static final Logger log = LoggingManager.getLoggerForClass();
private static final String CONTEXT_IS_NULL = "Context is null";
/**
* Constructor for the LdapClient object
*/
public LdapExtClient() {
}
/**
* connect to server
*
* @param host
* name of the server to connect
* @param port
* port of the server to connect
* @param rootdn
* base of the tree to operate on
* @param username
* name of the user to use for binding
* @param password
* password to use for binding
* @param connTimeOut
* connection timeout for connecting the server see
* "com.sun.jndi.ldap.connect.timeout"
* @param secure
* flag whether ssl should be used
* @return newly created {@link DirContext}
* @exception NamingException
* when creating the {@link DirContext} fails
*/
public static DirContext connect(String host, String port, String rootdn, String username, String password, String connTimeOut, boolean secure)
throws NamingException {
DirContext dirContext;
Hashtable<String, String> env = new Hashtable<>();
env.put(Context.INITIAL_CONTEXT_FACTORY, "com.sun.jndi.ldap.LdapCtxFactory"); // $NON-NLS-1$
StringBuilder sb = new StringBuilder(80);
if (secure) {
sb.append("ldaps://"); // $NON-NLS-1$
} else {
sb.append("ldap://"); // $NON-NLS-1$
}
sb.append(host);
if (port.length()>0){
sb.append(":"); // $NON-NLS-1$
sb.append(port);
}
sb.append("/"); // $NON-NLS-1$
sb.append(rootdn);
env.put(Context.PROVIDER_URL,sb.toString());
log.info("prov_url= " + env.get(Context.PROVIDER_URL)); // $NON-NLS-1$
if (connTimeOut.length()> 0) {
env.put("com.sun.jndi.ldap.connect.timeout", connTimeOut); // $NON-NLS-1$
}
env.put(Context.REFERRAL, "throw"); // $NON-NLS-1$
env.put("java.naming.batchsize", "0"); // $NON-NLS-1$ // $NON-NLS-2$
env.put(Context.SECURITY_CREDENTIALS, password);
env.put(Context.SECURITY_PRINCIPAL, username);
dirContext = new InitialDirContext(env);
return dirContext;
}
/**
* disconnect from the server
*
* @param dirContext
* context do disconnect (may be <code>null</code>)
*/
public static void disconnect(DirContext dirContext) {
if (dirContext == null) {
log.info("Cannot disconnect null context");
return;
}
try {
dirContext.close();
} catch (NamingException e) {
log.warn("Ldap client disconnect - ", e);
}
}
/***************************************************************************
* Filter the data in the ldap directory for the given search base
*
* @param dirContext
* context to perform the search on
*
* @param searchBase
* base where the search should start
* @param searchFilter
* filter this value from the base
* @param scope
* scope for search. May be one of
* {@link SearchControls#OBJECT_SCOPE},
* {@link SearchControls#ONELEVEL_SCOPE} or
* {@link SearchControls#SUBTREE_SCOPE}
* @param countlim
* max number of results to get, <code>0</code> for all entries
* @param timelim
* max time to wait for entries (in milliseconds), <code>0</code>
* for unlimited time
* @param attrs
* list of attributes to return. If <code>null</code> all
* attributes will be returned. If empty, none will be returned
* @param retobj
* flag whether the objects should be returned
* @param deref
* flag whether objects should be dereferenced
* @return result of the search
* @throws NamingException
* when searching fails
**************************************************************************/
public static NamingEnumeration<SearchResult> searchTest(DirContext dirContext, String searchBase, String searchFilter, int scope, long countlim,
int timelim, String[] attrs, boolean retobj, boolean deref) throws NamingException {
if (dirContext == null) {
throw new NamingException(CONTEXT_IS_NULL);
}
if (log.isDebugEnabled()){
log.debug(
"searchBase=" + searchBase +
" scope=" + scope +
" countlim=" + countlim +
" timelim=" + timelim +
" attrs=" + JMeterUtils.unsplit(attrs,",") +
" retobj=" + retobj +
" deref=" + deref +
" filter=" + searchFilter
);
}
SearchControls searchcontrols = null;
searchcontrols = new SearchControls(scope, countlim, timelim, attrs, retobj, deref);
return dirContext.search(searchBase, searchFilter, searchcontrols);
}
/***************************************************************************
* Filter the data in the ldap directory
*
* @param dirContext
* the context to operate on
* @param filter
* filter this value from the base
* @param entrydn
* distinguished name of entry to compare
* @return result of the search
* @throws NamingException
* when searching fails
**************************************************************************/
public static NamingEnumeration<SearchResult> compare(DirContext dirContext, String filter, String entrydn) throws NamingException {
if (dirContext == null) {
throw new NamingException(CONTEXT_IS_NULL);
}
SearchControls searchcontrols = new SearchControls(0, 1, 0, new String[0], false, false);
return dirContext.search(entrydn, filter, searchcontrols);
}
/***************************************************************************
* ModDN the data in the ldap directory for the given search base
*
* @param dirContext
* context to operate on
* @param ddn
* distinguished name name of object to rename
* @param newdn
* new distinguished name of object
* @throws NamingException
* when renaming fails
*
**************************************************************************/
public static void moddnOp(DirContext dirContext, String ddn, String newdn) throws NamingException {
log.debug("ddn and newDn= " + ddn + "@@@@" + newdn);
if (dirContext == null) {
throw new NamingException(CONTEXT_IS_NULL);
}
dirContext.rename(ddn, newdn);
}
/***************************************************************************
* Modify the attribute in the ldap directory for the given string
*
* @param dirContext
* context to operate on
* @param mods
* list of all the {@link ModificationItem}s to apply on
* <code>string</code>
* @param string
* distinguished name of the object to modify
* @throws NamingException
* when modification fails
**************************************************************************/
public static void modifyTest(DirContext dirContext, ModificationItem[] mods, String string) throws NamingException {
if (dirContext == null) {
throw new NamingException(CONTEXT_IS_NULL);
}
dirContext.modifyAttributes(string, mods);
}
/***************************************************************************
* Create the entry in the ldap directory for the given string
*
* @param dirContext
* context to operate on
* @param attributes
* add all the attributes and values from the attributes object
* @param string
* distinguished name of the subcontext to create
* @return newly created subcontext
* @throws NamingException
* when creating subcontext fails
**************************************************************************/
public static DirContext createTest(DirContext dirContext, Attributes attributes, String string)
throws NamingException {
if (dirContext == null) {
throw new NamingException(CONTEXT_IS_NULL);
}
return dirContext.createSubcontext(string, attributes);
}
/***************************************************************************
* Delete the attribute from the ldap directory
*
* @param dirContext
* context to operate on
* @param string
* distinguished name of the subcontext to destroy
* @throws NamingException
* when destroying the subcontext fails
**************************************************************************/
public static void deleteTest(DirContext dirContext, String string) throws NamingException {
if (dirContext == null) {
throw new NamingException(CONTEXT_IS_NULL);
}
dirContext.destroySubcontext(string);
}
}
| |
/*
* Entwined STM
*
* (c) Copyright 2013 CERN. This software is distributed under the terms of the Apache License Version 2.0, copied
* verbatim in the file "COPYING". In applying this licence, CERN does not waive the privileges and immunities granted
* to it by virtue of its status as an Intergovernmental Organization or submit itself to any jurisdiction.
*/
package cern.entwined;
import com.google.common.base.Function;
/**
* This class is aimed at simplification of client collection and snapshot implementations. Simple adapter mapping array
* index to transactional collection would suffice.
* <p>
* <b>DISCLAIMER</b> This collection must never be used by clients directly! Use it only to implement custom collections
* and snapshots.
*
* @author Ivan Koblik
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
public final class CompositeCollection extends SemiPersistent<CompositeCollection> {
/**
* Array of transactional references or collections.
*/
private final SemiPersistent<SemiPersistent>[] references;
/**
* Array of references that have been accessed by the client.
* <p>
* Access flags may only be modified in the constructor's body or in {@link CompositeCollection#unsafeGet(int)}
* other methods are not allowed to modify them.
*/
private final boolean[] accessed;
/**
* Function must be used to perform first time access to a reference.
*/
private final Function<Integer, SemiPersistent<SemiPersistent>> initialAccessor;
/**
* Creates a new {@link CompositeCollection} with given references.
*
* @param references Transactional references or collections. Elements cannot be null.
*/
public CompositeCollection(SemiPersistent... references) {
Utils.checkNull("References", references);
if (0 == references.length) {
throw new IllegalArgumentException("At least one reference must be specified");
}
// Verify that all items are non null.
for (int i = 0; i < references.length; i++) {
SemiPersistent value = references[i];
if (null == value) {
throw new IllegalArgumentException("Value at " + i + " is null");
}
}
this.references = this.copyReferences(references);
// By default all values in the array are false, see java spec 4.5.5 "Initial Values of Variables"
this.accessed = new boolean[references.length];
this.initialAccessor = new SimpleGet();
}
/**
* Creates a new {@link CompositeCollection} from either a clean or dirty copy of the original collection.
*
* @param originalCollection The original collection.
* @param cleanCopy True if clean copy, false if dirty copy.
*/
private CompositeCollection(CompositeCollection originalCollection, boolean cleanCopy) {
this.references = this.copyReferences(originalCollection.references);
if (cleanCopy) {
this.initialAccessor = new CleanCopy();
this.accessed = new boolean[originalCollection.accessed.length];
} else {
this.initialAccessor = new DirtyCopy();
this.accessed = this.copyAccesses(originalCollection.accessed);
for (int i = 0; i < this.references.length; i++) {
if (this.accessed[i]) {
this.references[i] = this.references[i].dirtyCopy();
}
}
}
}
/**
* Creates a new {@link CompositeCollection} committing localCollection into globalState.
*
* @param localCollection The local collection.
* @param globalState The global state.
*/
private CompositeCollection(CompositeCollection localCollection, CompositeCollection globalState) {
// Copy global state's references, and update accessed references later.
this.initialAccessor = new SimpleGet();
this.references = this.copyReferences(globalState.references);
this.accessed = new boolean[this.references.length];
for (int i = 0; i < this.references.length; i++) {
if (localCollection.accessed[i]) {
// Commit only references that have been accessed.
references[i] = localCollection.references[i].commit(globalState.unsafeGet(i));
}
}
}
/**
* {@inheritDoc} <br>
* <i>Visibility is changed to public to let client collections use STM library collections through this composite,
* but it also means that client collection must not expose this class.</i>
*/
@Override
public final CompositeCollection cleanCopy() {
return new CompositeCollection(this, true);
}
/**
* {@inheritDoc} <br>
* <i>Visibility is changed to public to let client collections use STM library collections through this composite,
* but it also means that client collection must not expose this class.</i>
*/
@Override
public final CompositeCollection dirtyCopy() {
return new CompositeCollection(this, false);
}
/**
* {@inheritDoc} <br>
* <i>Visibility is changed to public to let client collections use STM library collections through this composite,
* but it also means that client collection must not expose this class.</i>
*/
public void update(CompositeCollection changes, boolean onlyReadLogs) {
Utils.checkNull("Updated collections", changes);
for (int i = 0; i < this.references.length; i++) {
if (changes.accessed[i]) {
this.unsafeGet(i).update(changes.references[i], onlyReadLogs);
}
}
}
/*
* (non-Javadoc)
*
* @see cern.oasis.server.stm.Transactional#commit(java.lang.Object)
*/
@Override
public final CompositeCollection commit(CompositeCollection globalState) {
if (!this.isAccessed()) {
// No references have been accessed, return the global state as it is.
return globalState;
} else {
return new CompositeCollection(this, globalState);
}
}
/**
* Returns a transactional reference or collection with the give index.
* <p>
* <b>Note:</b> Be very careful with this method! Due to the way generics are implemented in Java, no real type
* checking is performed. This method silently casts element to the desired type.
* <p>
* See the example below, both lines of code are getting element number zero from the same collection, none of them
* will be statically checked by the compiler , but <b>it is certain that in runtime at least one of them is due to
* fail</b>.
*
* <pre>
* TransactionalRef<Integer> ref = compositeCollection.get(0);
* TransactionalMap<Integer, Double> map = compositeCollection.get(0);
* </pre>
*
* @param <T> The desired type of the resulting value.
* @param idx The index.
* @return Requested transactional reference or collection.
*/
public final <T extends SemiPersistent> T get(int idx) {
if (idx < 0 || idx >= this.references.length) {
throw new IllegalArgumentException("Index " + idx + " is out of bounds");
}
return this.<T> unsafeGet(idx);
}
/**
* This method should be used internally instead of {@link CompositeCollection#get(int)}.
*
* @param <T> The desired type of the resulting value.
* @param idx The index.
* @return Requested transactional reference or collection.
*/
private <T extends SemiPersistent> T unsafeGet(int idx) {
if (this.accessed[idx]) {
return (T) this.references[idx];
} else {
T value = (T) this.initialAccessor.apply(idx);
this.accessed[idx] = true;
this.references[idx] = value;
return value;
}
}
/**
* Copies array of transactional references and collections.
*
* @param source The source array.
* @return Copy of the source array.
*/
private final SemiPersistent<SemiPersistent>[] copyReferences(SemiPersistent<SemiPersistent>[] source) {
SemiPersistent<SemiPersistent>[] copy = new SemiPersistent[source.length];
System.arraycopy(source, 0, copy, 0, source.length);
return copy;
}
/**
* Copies the references access log.
*
* @param access The source array.
* @return Copy of the source array.
*/
private final boolean[] copyAccesses(boolean[] access) {
boolean[] copy = new boolean[access.length];
System.arraycopy(access, 0, copy, 0, access.length);
return copy;
}
/**
* Returns true if any of the collection's items have have been accessed.
*
* @return <code>true</code> if at least one of the collection's items has been accessed.
*/
private final boolean isAccessed() {
for (int i = 0; i < this.accessed.length; i++) {
if (this.accessed[i]) {
return true;
}
}
return false;
}
/**
* Reference access strategy that simply returns the reference.
*
* @author Ivan Koblik
*/
private class SimpleGet implements Function<Integer, SemiPersistent<SemiPersistent>> {
@Override
public SemiPersistent<SemiPersistent> apply(Integer idx) {
return CompositeCollection.this.references[idx];
}
}
/**
* Reference access strategy that returns a cleanCopy of the reference.
*
* @author Ivan Koblik
*/
private class CleanCopy implements Function<Integer, SemiPersistent<SemiPersistent>> {
@Override
public SemiPersistent<SemiPersistent> apply(Integer idx) {
return CompositeCollection.this.references[idx].cleanCopy();
}
}
/**
* Reference access strategy that returns a dirtyCopy the reference.
*
* @author Ivan Koblik
*/
private class DirtyCopy implements Function<Integer, SemiPersistent<SemiPersistent>> {
@Override
public SemiPersistent<SemiPersistent> apply(Integer idx) {
return CompositeCollection.this.references[idx].dirtyCopy();
}
}
}
| |
/*
* Copyright 2002-2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.mail.javamail;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.List;
import java.util.Properties;
import javax.activation.FileTypeMap;
import javax.mail.Address;
import javax.mail.Message;
import javax.mail.MessagingException;
import javax.mail.NoSuchProviderException;
import javax.mail.Session;
import javax.mail.Transport;
import javax.mail.URLName;
import javax.mail.internet.AddressException;
import javax.mail.internet.InternetAddress;
import javax.mail.internet.MimeMessage;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.springframework.mail.MailParseException;
import org.springframework.mail.MailSendException;
import org.springframework.mail.SimpleMailMessage;
import org.springframework.util.ObjectUtils;
import static org.junit.Assert.*;
/**
* @author Juergen Hoeller
* @author Stephane Nicoll
* @since 09.10.2004
*/
public class JavaMailSenderTests {
@Rule
public final ExpectedException thrown = ExpectedException.none();
@Test
public void javaMailSenderWithSimpleMessage() throws MessagingException, IOException {
MockJavaMailSender sender = new MockJavaMailSender();
sender.setHost("host");
sender.setPort(30);
sender.setUsername("username");
sender.setPassword("password");
SimpleMailMessage simpleMessage = new SimpleMailMessage();
simpleMessage.setFrom("me@mail.org");
simpleMessage.setReplyTo("reply@mail.org");
simpleMessage.setTo("you@mail.org");
simpleMessage.setCc(new String[] {"he@mail.org", "she@mail.org"});
simpleMessage.setBcc(new String[] {"us@mail.org", "them@mail.org"});
Date sentDate = new GregorianCalendar(2004, 1, 1).getTime();
simpleMessage.setSentDate(sentDate);
simpleMessage.setSubject("my subject");
simpleMessage.setText("my text");
sender.send(simpleMessage);
assertEquals("host", sender.transport.getConnectedHost());
assertEquals(30, sender.transport.getConnectedPort());
assertEquals("username", sender.transport.getConnectedUsername());
assertEquals("password", sender.transport.getConnectedPassword());
assertTrue(sender.transport.isCloseCalled());
assertEquals(1, sender.transport.getSentMessages().size());
MimeMessage sentMessage = sender.transport.getSentMessage(0);
List<Address> froms = Arrays.asList(sentMessage.getFrom());
assertEquals(1, froms.size());
assertEquals("me@mail.org", ((InternetAddress) froms.get(0)).getAddress());
List<Address> replyTos = Arrays.asList(sentMessage.getReplyTo());
assertEquals("reply@mail.org", ((InternetAddress) replyTos.get(0)).getAddress());
List<Address> tos = Arrays.asList(sentMessage.getRecipients(Message.RecipientType.TO));
assertEquals(1, tos.size());
assertEquals("you@mail.org", ((InternetAddress) tos.get(0)).getAddress());
List<Address> ccs = Arrays.asList(sentMessage.getRecipients(Message.RecipientType.CC));
assertEquals(2, ccs.size());
assertEquals("he@mail.org", ((InternetAddress) ccs.get(0)).getAddress());
assertEquals("she@mail.org", ((InternetAddress) ccs.get(1)).getAddress());
List<Address> bccs = Arrays.asList(sentMessage.getRecipients(Message.RecipientType.BCC));
assertEquals(2, bccs.size());
assertEquals("us@mail.org", ((InternetAddress) bccs.get(0)).getAddress());
assertEquals("them@mail.org", ((InternetAddress) bccs.get(1)).getAddress());
assertEquals(sentDate.getTime(), sentMessage.getSentDate().getTime());
assertEquals("my subject", sentMessage.getSubject());
assertEquals("my text", sentMessage.getContent());
}
public void testJavaMailSenderWithSimpleMessages() throws MessagingException, IOException {
MockJavaMailSender sender = new MockJavaMailSender();
sender.setHost("host");
sender.setUsername("username");
sender.setPassword("password");
SimpleMailMessage simpleMessage1 = new SimpleMailMessage();
simpleMessage1.setTo("he@mail.org");
SimpleMailMessage simpleMessage2 = new SimpleMailMessage();
simpleMessage2.setTo("she@mail.org");
sender.send(simpleMessage1, simpleMessage2);
assertEquals("host", sender.transport.getConnectedHost());
assertEquals("username", sender.transport.getConnectedUsername());
assertEquals("password", sender.transport.getConnectedPassword());
assertTrue(sender.transport.isCloseCalled());
assertEquals(2, sender.transport.getSentMessages().size());
MimeMessage sentMessage1 = sender.transport.getSentMessage(0);
List<Address> tos1 = Arrays.asList(sentMessage1.getRecipients(Message.RecipientType.TO));
assertEquals(1, tos1.size());
assertEquals("he@mail.org", ((InternetAddress) tos1.get(0)).getAddress());
MimeMessage sentMessage2 = sender.transport.getSentMessage(1);
List<Address> tos2 = Arrays.asList(sentMessage2.getRecipients(Message.RecipientType.TO));
assertEquals(1, tos2.size());
assertEquals("she@mail.org", ((InternetAddress) tos2.get(0)).getAddress());
}
public void testJavaMailSenderWithMimeMessage() throws MessagingException {
MockJavaMailSender sender = new MockJavaMailSender();
sender.setHost("host");
sender.setUsername("username");
sender.setPassword("password");
MimeMessage mimeMessage = sender.createMimeMessage();
mimeMessage.setRecipient(Message.RecipientType.TO, new InternetAddress("you@mail.org"));
sender.send(mimeMessage);
assertEquals("host", sender.transport.getConnectedHost());
assertEquals("username", sender.transport.getConnectedUsername());
assertEquals("password", sender.transport.getConnectedPassword());
assertTrue(sender.transport.isCloseCalled());
assertEquals(1, sender.transport.getSentMessages().size());
assertEquals(mimeMessage, sender.transport.getSentMessage(0));
}
@Test
public void javaMailSenderWithMimeMessages() throws MessagingException {
MockJavaMailSender sender = new MockJavaMailSender();
sender.setHost("host");
sender.setUsername("username");
sender.setPassword("password");
MimeMessage mimeMessage1 = sender.createMimeMessage();
mimeMessage1.setRecipient(Message.RecipientType.TO, new InternetAddress("he@mail.org"));
MimeMessage mimeMessage2 = sender.createMimeMessage();
mimeMessage2.setRecipient(Message.RecipientType.TO, new InternetAddress("she@mail.org"));
sender.send(mimeMessage1, mimeMessage2);
assertEquals("host", sender.transport.getConnectedHost());
assertEquals("username", sender.transport.getConnectedUsername());
assertEquals("password", sender.transport.getConnectedPassword());
assertTrue(sender.transport.isCloseCalled());
assertEquals(2, sender.transport.getSentMessages().size());
assertEquals(mimeMessage1, sender.transport.getSentMessage(0));
assertEquals(mimeMessage2, sender.transport.getSentMessage(1));
}
@Test
public void javaMailSenderWithMimeMessagePreparator() {
MockJavaMailSender sender = new MockJavaMailSender();
sender.setHost("host");
sender.setUsername("username");
sender.setPassword("password");
final List<Message> messages = new ArrayList<Message>();
MimeMessagePreparator preparator = new MimeMessagePreparator() {
@Override
public void prepare(MimeMessage mimeMessage) throws MessagingException {
mimeMessage.setRecipient(Message.RecipientType.TO, new InternetAddress("you@mail.org"));
messages.add(mimeMessage);
}
};
sender.send(preparator);
assertEquals("host", sender.transport.getConnectedHost());
assertEquals("username", sender.transport.getConnectedUsername());
assertEquals("password", sender.transport.getConnectedPassword());
assertTrue(sender.transport.isCloseCalled());
assertEquals(1, sender.transport.getSentMessages().size());
assertEquals(messages.get(0), sender.transport.getSentMessage(0));
}
@Test
public void javaMailSenderWithMimeMessagePreparators() {
MockJavaMailSender sender = new MockJavaMailSender();
sender.setHost("host");
sender.setUsername("username");
sender.setPassword("password");
final List<Message> messages = new ArrayList<Message>();
MimeMessagePreparator preparator1 = new MimeMessagePreparator() {
@Override
public void prepare(MimeMessage mimeMessage) throws MessagingException {
mimeMessage.setRecipient(Message.RecipientType.TO, new InternetAddress("he@mail.org"));
messages.add(mimeMessage);
}
};
MimeMessagePreparator preparator2 = new MimeMessagePreparator() {
@Override
public void prepare(MimeMessage mimeMessage) throws MessagingException {
mimeMessage.setRecipient(Message.RecipientType.TO, new InternetAddress("she@mail.org"));
messages.add(mimeMessage);
}
};
sender.send(preparator1, preparator2);
assertEquals("host", sender.transport.getConnectedHost());
assertEquals("username", sender.transport.getConnectedUsername());
assertEquals("password", sender.transport.getConnectedPassword());
assertTrue(sender.transport.isCloseCalled());
assertEquals(2, sender.transport.getSentMessages().size());
assertEquals(messages.get(0), sender.transport.getSentMessage(0));
assertEquals(messages.get(1), sender.transport.getSentMessage(1));
}
@Test
public void javaMailSenderWithMimeMessageHelper() throws MessagingException {
MockJavaMailSender sender = new MockJavaMailSender();
sender.setHost("host");
sender.setUsername("username");
sender.setPassword("password");
MimeMessageHelper message = new MimeMessageHelper(sender.createMimeMessage());
assertNull(message.getEncoding());
assertTrue(message.getFileTypeMap() instanceof ConfigurableMimeFileTypeMap);
message.setTo("you@mail.org");
sender.send(message.getMimeMessage());
assertEquals("host", sender.transport.getConnectedHost());
assertEquals("username", sender.transport.getConnectedUsername());
assertEquals("password", sender.transport.getConnectedPassword());
assertTrue(sender.transport.isCloseCalled());
assertEquals(1, sender.transport.getSentMessages().size());
assertEquals(message.getMimeMessage(), sender.transport.getSentMessage(0));
}
@Test
public void javaMailSenderWithMimeMessageHelperAndSpecificEncoding() throws MessagingException {
MockJavaMailSender sender = new MockJavaMailSender();
sender.setHost("host");
sender.setUsername("username");
sender.setPassword("password");
MimeMessageHelper message = new MimeMessageHelper(sender.createMimeMessage(), "UTF-8");
assertEquals("UTF-8", message.getEncoding());
FileTypeMap fileTypeMap = new ConfigurableMimeFileTypeMap();
message.setFileTypeMap(fileTypeMap);
assertEquals(fileTypeMap, message.getFileTypeMap());
message.setTo("you@mail.org");
sender.send(message.getMimeMessage());
assertEquals("host", sender.transport.getConnectedHost());
assertEquals("username", sender.transport.getConnectedUsername());
assertEquals("password", sender.transport.getConnectedPassword());
assertTrue(sender.transport.isCloseCalled());
assertEquals(1, sender.transport.getSentMessages().size());
assertEquals(message.getMimeMessage(), sender.transport.getSentMessage(0));
}
@Test
public void javaMailSenderWithMimeMessageHelperAndDefaultEncoding() throws MessagingException {
MockJavaMailSender sender = new MockJavaMailSender();
sender.setHost("host");
sender.setUsername("username");
sender.setPassword("password");
sender.setDefaultEncoding("UTF-8");
FileTypeMap fileTypeMap = new ConfigurableMimeFileTypeMap();
sender.setDefaultFileTypeMap(fileTypeMap);
MimeMessageHelper message = new MimeMessageHelper(sender.createMimeMessage());
assertEquals("UTF-8", message.getEncoding());
assertEquals(fileTypeMap, message.getFileTypeMap());
message.setTo("you@mail.org");
sender.send(message.getMimeMessage());
assertEquals("host", sender.transport.getConnectedHost());
assertEquals("username", sender.transport.getConnectedUsername());
assertEquals("password", sender.transport.getConnectedPassword());
assertTrue(sender.transport.isCloseCalled());
assertEquals(1, sender.transport.getSentMessages().size());
assertEquals(message.getMimeMessage(), sender.transport.getSentMessage(0));
}
@Test
public void javaMailSenderWithParseExceptionOnSimpleMessage() {
MockJavaMailSender sender = new MockJavaMailSender();
SimpleMailMessage simpleMessage = new SimpleMailMessage();
simpleMessage.setFrom("");
try {
sender.send(simpleMessage);
}
catch (MailParseException ex) {
// expected
assertTrue(ex.getCause() instanceof AddressException);
}
}
@Test
public void javaMailSenderWithParseExceptionOnMimeMessagePreparator() {
MockJavaMailSender sender = new MockJavaMailSender();
MimeMessagePreparator preparator = new MimeMessagePreparator() {
@Override
public void prepare(MimeMessage mimeMessage) throws MessagingException {
mimeMessage.setFrom(new InternetAddress(""));
}
};
try {
sender.send(preparator);
}
catch (MailParseException ex) {
// expected
assertTrue(ex.getCause() instanceof AddressException);
}
}
@Test
public void javaMailSenderWithCustomSession() throws MessagingException {
final Session session = Session.getInstance(new Properties());
MockJavaMailSender sender = new MockJavaMailSender() {
@Override
protected Transport getTransport(Session sess) throws NoSuchProviderException {
assertEquals(session, sess);
return super.getTransport(sess);
}
};
sender.setSession(session);
sender.setHost("host");
sender.setUsername("username");
sender.setPassword("password");
MimeMessage mimeMessage = sender.createMimeMessage();
mimeMessage.setSubject("custom");
mimeMessage.setRecipient(Message.RecipientType.TO, new InternetAddress("you@mail.org"));
mimeMessage.setSentDate(new GregorianCalendar(2005, 3, 1).getTime());
sender.send(mimeMessage);
assertEquals("host", sender.transport.getConnectedHost());
assertEquals("username", sender.transport.getConnectedUsername());
assertEquals("password", sender.transport.getConnectedPassword());
assertTrue(sender.transport.isCloseCalled());
assertEquals(1, sender.transport.getSentMessages().size());
assertEquals(mimeMessage, sender.transport.getSentMessage(0));
}
@Test
public void javaMailProperties() throws MessagingException {
Properties props = new Properties();
props.setProperty("bogusKey", "bogusValue");
MockJavaMailSender sender = new MockJavaMailSender() {
@Override
protected Transport getTransport(Session sess) throws NoSuchProviderException {
assertEquals("bogusValue", sess.getProperty("bogusKey"));
return super.getTransport(sess);
}
};
sender.setJavaMailProperties(props);
sender.setHost("host");
sender.setUsername("username");
sender.setPassword("password");
MimeMessage mimeMessage = sender.createMimeMessage();
mimeMessage.setRecipient(Message.RecipientType.TO, new InternetAddress("you@mail.org"));
sender.send(mimeMessage);
assertEquals("host", sender.transport.getConnectedHost());
assertEquals("username", sender.transport.getConnectedUsername());
assertEquals("password", sender.transport.getConnectedPassword());
assertTrue(sender.transport.isCloseCalled());
assertEquals(1, sender.transport.getSentMessages().size());
assertEquals(mimeMessage, sender.transport.getSentMessage(0));
}
@Test
public void failedMailServerConnect() throws Exception {
MockJavaMailSender sender = new MockJavaMailSender();
sender.setHost(null);
sender.setUsername("username");
sender.setPassword("password");
SimpleMailMessage simpleMessage1 = new SimpleMailMessage();
try {
sender.send(simpleMessage1);
fail("Should have thrown MailSendException");
}
catch (MailSendException ex) {
// expected
ex.printStackTrace();
assertTrue(ex.getFailedMessages() != null);
assertEquals(1, ex.getFailedMessages().size());
assertSame(simpleMessage1, ex.getFailedMessages().keySet().iterator().next());
assertSame(ex.getCause(), ex.getFailedMessages().values().iterator().next());
}
}
@Test
public void failedMailServerClose() throws Exception {
MockJavaMailSender sender = new MockJavaMailSender();
sender.setHost("");
sender.setUsername("username");
sender.setPassword("password");
SimpleMailMessage simpleMessage1 = new SimpleMailMessage();
try {
sender.send(simpleMessage1);
fail("Should have thrown MailSendException");
}
catch (MailSendException ex) {
// expected
ex.printStackTrace();
assertTrue(ex.getFailedMessages() != null);
assertEquals(0, ex.getFailedMessages().size());
}
}
@Test
public void failedSimpleMessage() throws Exception {
MockJavaMailSender sender = new MockJavaMailSender();
sender.setHost("host");
sender.setUsername("username");
sender.setPassword("password");
SimpleMailMessage simpleMessage1 = new SimpleMailMessage();
simpleMessage1.setTo("he@mail.org");
simpleMessage1.setSubject("fail");
SimpleMailMessage simpleMessage2 = new SimpleMailMessage();
simpleMessage2.setTo("she@mail.org");
try {
sender.send(simpleMessage1, simpleMessage2);
}
catch (MailSendException ex) {
ex.printStackTrace();
assertEquals("host", sender.transport.getConnectedHost());
assertEquals("username", sender.transport.getConnectedUsername());
assertEquals("password", sender.transport.getConnectedPassword());
assertTrue(sender.transport.isCloseCalled());
assertEquals(1, sender.transport.getSentMessages().size());
assertEquals(new InternetAddress("she@mail.org"), sender.transport.getSentMessage(0).getAllRecipients()[0]);
assertEquals(1, ex.getFailedMessages().size());
assertEquals(simpleMessage1, ex.getFailedMessages().keySet().iterator().next());
Object subEx = ex.getFailedMessages().values().iterator().next();
assertTrue(subEx instanceof MessagingException);
assertEquals("failed", ((MessagingException) subEx).getMessage());
}
}
@Test
public void fFailedMimeMessage() throws Exception {
MockJavaMailSender sender = new MockJavaMailSender();
sender.setHost("host");
sender.setUsername("username");
sender.setPassword("password");
MimeMessage mimeMessage1 = sender.createMimeMessage();
mimeMessage1.setRecipient(Message.RecipientType.TO, new InternetAddress("he@mail.org"));
mimeMessage1.setSubject("fail");
MimeMessage mimeMessage2 = sender.createMimeMessage();
mimeMessage2.setRecipient(Message.RecipientType.TO, new InternetAddress("she@mail.org"));
try {
sender.send(mimeMessage1, mimeMessage2);
}
catch (MailSendException ex) {
ex.printStackTrace();
assertEquals("host", sender.transport.getConnectedHost());
assertEquals("username", sender.transport.getConnectedUsername());
assertEquals("password", sender.transport.getConnectedPassword());
assertTrue(sender.transport.isCloseCalled());
assertEquals(1, sender.transport.getSentMessages().size());
assertEquals(mimeMessage2, sender.transport.getSentMessage(0));
assertEquals(1, ex.getFailedMessages().size());
assertEquals(mimeMessage1, ex.getFailedMessages().keySet().iterator().next());
Object subEx = ex.getFailedMessages().values().iterator().next();
assertTrue(subEx instanceof MessagingException);
assertEquals("failed", ((MessagingException) subEx).getMessage());
}
}
@Test
public void testConnection() throws Exception {
MockJavaMailSender sender = new MockJavaMailSender();
sender.setHost("host");
sender.testConnection();
}
@Test
public void testConnectionWithFailure() throws Exception {
MockJavaMailSender sender = new MockJavaMailSender();
sender.setHost(null);
thrown.expect(MessagingException.class);
sender.testConnection();
}
private static class MockJavaMailSender extends JavaMailSenderImpl {
private MockTransport transport;
@Override
protected Transport getTransport(Session session) throws NoSuchProviderException {
this.transport = new MockTransport(session, null);
return transport;
}
}
private static class MockTransport extends Transport {
private String connectedHost = null;
private int connectedPort = -2;
private String connectedUsername = null;
private String connectedPassword = null;
private boolean closeCalled = false;
private List<Message> sentMessages = new ArrayList<Message>();
private MockTransport(Session session, URLName urlName) {
super(session, urlName);
}
public String getConnectedHost() {
return connectedHost;
}
public int getConnectedPort() {
return connectedPort;
}
public String getConnectedUsername() {
return connectedUsername;
}
public String getConnectedPassword() {
return connectedPassword;
}
public boolean isCloseCalled() {
return closeCalled;
}
public List<Message> getSentMessages() {
return sentMessages;
}
public MimeMessage getSentMessage(int index) {
return (MimeMessage) this.sentMessages.get(index);
}
@Override
public void connect(String host, int port, String username, String password) throws MessagingException {
if (host == null) {
throw new MessagingException("no host");
}
this.connectedHost = host;
this.connectedPort = port;
this.connectedUsername = username;
this.connectedPassword = password;
setConnected(true);
}
@Override
public synchronized void close() throws MessagingException {
if ("".equals(connectedHost)) {
throw new MessagingException("close failure");
}
this.closeCalled = true;
}
@Override
public void sendMessage(Message message, Address[] addresses) throws MessagingException {
if ("fail".equals(message.getSubject())) {
throw new MessagingException("failed");
}
if (!ObjectUtils.nullSafeEquals(addresses, message.getAllRecipients())) {
throw new MessagingException("addresses not correct");
}
if (message.getSentDate() == null) {
throw new MessagingException("No sentDate specified");
}
if (message.getSubject() != null && message.getSubject().contains("custom")) {
assertEquals(new GregorianCalendar(2005, 3, 1).getTime(), message.getSentDate());
}
this.sentMessages.add(message);
}
}
}
| |
package org.innovateuk.ifs.project.notes.controller;
import com.fasterxml.jackson.core.type.TypeReference;
import org.innovateuk.ifs.commons.error.ValidationMessages;
import org.innovateuk.ifs.commons.exception.ObjectNotFoundException;
import org.innovateuk.ifs.commons.service.ServiceResult;
import org.innovateuk.ifs.controller.ValidationHandler;
import org.innovateuk.ifs.finance.resource.ProjectFinanceResource;
import org.innovateuk.ifs.financecheck.FinanceCheckService;
import org.innovateuk.ifs.organisation.resource.OrganisationResource;
import org.innovateuk.ifs.project.ProjectService;
import org.innovateuk.ifs.project.finance.service.ProjectFinanceRestService;
import org.innovateuk.ifs.project.notes.form.FinanceChecksNotesAddCommentForm;
import org.innovateuk.ifs.project.notes.form.FinanceChecksNotesFormConstraints;
import org.innovateuk.ifs.project.notes.viewmodel.FinanceChecksNotesViewModel;
import org.innovateuk.ifs.project.resource.ProjectResource;
import org.innovateuk.ifs.project.service.PartnerOrganisationRestService;
import org.innovateuk.ifs.thread.viewmodel.ThreadViewModel;
import org.innovateuk.ifs.thread.viewmodel.ThreadViewModelPopulator;
import org.innovateuk.ifs.threads.attachment.resource.AttachmentResource;
import org.innovateuk.ifs.threads.resource.NoteResource;
import org.innovateuk.ifs.threads.resource.PostResource;
import org.innovateuk.ifs.user.resource.Authority;
import org.innovateuk.ifs.user.resource.UserResource;
import org.innovateuk.ifs.user.service.OrganisationRestService;
import org.innovateuk.ifs.util.EncryptedCookieService;
import org.innovateuk.ifs.util.JsonUtil;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.io.ByteArrayResource;
import org.springframework.http.ResponseEntity;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.security.core.parameters.P;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.validation.BindingResult;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.multipart.MultipartFile;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.validation.Valid;
import java.time.ZonedDateTime;
import java.util.*;
import java.util.function.Supplier;
import static java.util.Collections.emptyList;
import static org.innovateuk.ifs.controller.ErrorToObjectErrorConverterFactory.asGlobalErrors;
import static org.innovateuk.ifs.controller.ErrorToObjectErrorConverterFactory.fieldErrorsToFieldErrors;
import static org.innovateuk.ifs.controller.FileUploadControllerUtils.getMultipartFileBytes;
import static org.innovateuk.ifs.file.controller.FileDownloadControllerUtils.getFileResponseEntity;
/**
* This Controller handles finance check notes activity for the finance team members
*/
@Controller
@RequestMapping(FinanceChecksNotesController.FINANCE_CHECKS_NOTES_BASE_URL)
public class FinanceChecksNotesController {
static final String FINANCE_CHECKS_NOTES_BASE_URL = "/project/{projectId}/finance-check/organisation/{organisationId}/note";
private static final String FINANCE_CHECKS_NOTES_COMMENT_BASE_URL = FINANCE_CHECKS_NOTES_BASE_URL + "/{noteId}/new-comment";
private static final String ATTACHMENT_COOKIE = "finance_checks_notes_new_comment_attachments";
private static final String FORM_COOKIE = "finance_checks_notes_new_comment_form";
private static final String ORIGIN_GET_COOKIE = "finance_checks_notes_new_comment_origin";
private static final String FORM_ATTR = "form";
private static final String NOTES_VIEW = "project/financecheck/notes";
@Autowired
private OrganisationRestService organisationRestService;
@Autowired
private ProjectService projectService;
@Autowired
private PartnerOrganisationRestService partnerOrganisationRestService;
@Autowired
private EncryptedCookieService cookieUtil;
@Autowired
private ProjectFinanceRestService projectFinanceRestService;
@Autowired
private FinanceCheckService financeCheckService;
@Autowired
private ThreadViewModelPopulator threadViewModelPopulator;
@PreAuthorize("hasPermission(#projectId, 'org.innovateuk.ifs.project.resource.ProjectCompositeId', 'ACCESS_FINANCE_CHECKS_NOTES_SECTION')")
@GetMapping
public String showPage(@P("projectId")@PathVariable Long projectId,
@PathVariable Long organisationId,
UserResource loggedInUser,
Model model) {
partnerOrganisationRestService.getPartnerOrganisation(projectId, organisationId);
FinanceChecksNotesViewModel viewModel = populateNoteViewModel(projectId, organisationId, null, null, loggedInUser);
model.addAttribute("model", viewModel);
return NOTES_VIEW;
}
@PreAuthorize("hasPermission(#projectId, 'org.innovateuk.ifs.project.resource.ProjectCompositeId', 'ACCESS_FINANCE_CHECKS_NOTES_SECTION')")
@GetMapping(value = "/attachment/{attachmentId}")
public
@ResponseBody
ResponseEntity<ByteArrayResource> downloadAttachment(@P("projectId")@PathVariable Long projectId,
@PathVariable Long organisationId,
@PathVariable Long attachmentId,
UserResource loggedInUser,
HttpServletRequest request) {
partnerOrganisationRestService.getPartnerOrganisation(projectId, organisationId);
return getFileResponseEntity(financeCheckService.downloadFile(attachmentId), financeCheckService.getAttachmentInfo(attachmentId));
}
@PreAuthorize("hasPermission(#projectId, 'org.innovateuk.ifs.project.resource.ProjectCompositeId', 'ACCESS_FINANCE_CHECKS_NOTES_SECTION')")
@GetMapping("/{noteId}/new-comment")
public String viewNewComment(@P("projectId")@PathVariable Long projectId,
@PathVariable Long organisationId,
@PathVariable Long noteId,
Model model,
UserResource loggedInUser,
HttpServletRequest request,
HttpServletResponse response) {
partnerOrganisationRestService.getPartnerOrganisation(projectId, organisationId);
saveOriginCookie(response, projectId, organisationId, noteId, loggedInUser.getId());
List<Long> attachments = loadAttachmentsFromCookie(request, projectId, organisationId, noteId);
populateNoteViewModel(projectId, organisationId, noteId, model, attachments, loggedInUser);
model.addAttribute(FORM_ATTR, loadForm(request, projectId, organisationId, noteId).orElse(new FinanceChecksNotesAddCommentForm()));
return NOTES_VIEW;
}
private void populateNoteViewModel(Long projectId, Long organisationId, Long noteId, Model model, List<Long> attachments, UserResource loggedInUser) {
FinanceChecksNotesViewModel financeChecksNotesViewModel = populateNoteViewModel(projectId, organisationId, noteId, attachments, loggedInUser);
validateNoteId(financeChecksNotesViewModel, noteId);
model.addAttribute("model", financeChecksNotesViewModel);
}
private void validateNoteId(FinanceChecksNotesViewModel financeChecksNotesViewModel, Long noteId) {
if (financeChecksNotesViewModel.getNotes().stream().noneMatch(threadViewModel -> threadViewModel.getId().equals(noteId))) {
throw new ObjectNotFoundException();
}
}
@PreAuthorize("hasPermission(#projectId, 'org.innovateuk.ifs.project.resource.ProjectCompositeId', 'ACCESS_FINANCE_CHECKS_NOTES_SECTION')")
@PostMapping(value = "/{noteId}/new-comment")
public String saveComment(Model model,
@P("projectId")@PathVariable("projectId") final Long projectId,
@PathVariable final Long organisationId,
@PathVariable final Long noteId,
@Valid @ModelAttribute(FORM_ATTR) final FinanceChecksNotesAddCommentForm form,
@SuppressWarnings("unused") BindingResult bindingResult,
ValidationHandler validationHandler,
UserResource loggedInUser,
HttpServletRequest request,
HttpServletResponse response) {
if (postParametersMatchOrigin(request, projectId, organisationId, noteId, loggedInUser.getId())) {
Supplier<String> failureView = () -> {
List<Long> attachments = loadAttachmentsFromCookie(request, projectId, organisationId, noteId);
FinanceChecksNotesViewModel viewModel = populateNoteViewModel(projectId, organisationId, noteId, attachments, loggedInUser);
model.addAttribute("model", viewModel);
model.addAttribute(FORM_ATTR, form);
return NOTES_VIEW;
};
Supplier<String> saveFailureView = () -> {
FinanceChecksNotesViewModel viewModel = populateNoteViewModel(projectId, organisationId, null, null, loggedInUser);
model.addAttribute("model", viewModel);
model.addAttribute("nonFormErrors", validationHandler.getAllErrors());
model.addAttribute(FORM_ATTR, null);
return NOTES_VIEW;
};
return validationHandler.failNowOrSucceedWith(failureView, () -> {
ValidationMessages validationMessages = new ValidationMessages(bindingResult);
return validationHandler.addAnyErrors(validationMessages, fieldErrorsToFieldErrors(), asGlobalErrors()).
failNowOrSucceedWith(failureView, () -> {
List<AttachmentResource> attachmentResources = new ArrayList<>();
List<Long> attachments = loadAttachmentsFromCookie(request, projectId, organisationId, noteId);
attachments.forEach(attachment -> financeCheckService.getAttachment(attachment).ifSuccessful(fileEntry -> attachmentResources.add(fileEntry)));
PostResource post = new PostResource(null, loggedInUser, form.getComment(), attachmentResources, ZonedDateTime.now());
ServiceResult<Void> saveResult = financeCheckService.saveNotePost(post, noteId);
validationHandler.addAnyErrors(saveResult);
return validationHandler.failNowOrSucceedWith(saveFailureView, () -> {
deleteCookies(response, projectId, organisationId, noteId);
return redirectTo(rootView(projectId, organisationId));
});
});
});
} else {
throw new ObjectNotFoundException();
}
}
@PreAuthorize("hasPermission(#projectId, 'org.innovateuk.ifs.project.resource.ProjectCompositeId', 'ACCESS_FINANCE_CHECKS_NOTES_SECTION')")
@PostMapping(value = "/{noteId}/new-comment", params = "uploadAttachment")
public String saveNewCommentAttachment(Model model,
@P("projectId")@PathVariable("projectId") final Long projectId,
@PathVariable Long organisationId,
@PathVariable Long noteId,
@ModelAttribute(FORM_ATTR) FinanceChecksNotesAddCommentForm form,
@SuppressWarnings("unused") BindingResult bindingResult,
ValidationHandler validationHandler,
UserResource loggedInUser,
HttpServletRequest request,
HttpServletResponse response) {
if (postParametersMatchOrigin(request, projectId, organisationId, noteId, loggedInUser.getId())) {
List<Long> attachments = loadAttachmentsFromCookie(request, projectId, organisationId, noteId);
Supplier<String> onSuccess = () -> redirectTo(formView(projectId, organisationId, noteId));
Supplier<String> onError = () -> {
model.addAttribute("model", populateNoteViewModel(projectId, organisationId, noteId, attachments, loggedInUser));
model.addAttribute("nonFormErrors", validationHandler.getAllErrors());
model.addAttribute("form", form);
return NOTES_VIEW;
};
return validationHandler.performActionOrBindErrorsToField("attachment", onError, onSuccess, () -> {
MultipartFile file = form.getAttachment();
ServiceResult<AttachmentResource> result = financeCheckService.uploadFile(projectId, file.getContentType(), file.getSize(), file.getOriginalFilename(), getMultipartFileBytes(file));
result.ifSuccessful(uploadedAttachment -> {
attachments.add(uploadedAttachment.id);
saveAttachmentsToCookie(response, attachments, projectId, organisationId, noteId);
saveFormToCookie(response, projectId, organisationId, noteId, form);
});
FinanceChecksNotesViewModel viewModel = populateNoteViewModel(projectId, organisationId, noteId, attachments, loggedInUser);
model.addAttribute("model", viewModel);
return result;
});
} else {
throw new ObjectNotFoundException();
}
}
@PreAuthorize("hasPermission(#projectId, 'org.innovateuk.ifs.project.resource.ProjectCompositeId', 'ACCESS_FINANCE_CHECKS_NOTES_SECTION')")
@GetMapping("/{noteId}/new-comment/attachment/{attachmentId}")
public @ResponseBody
ResponseEntity<ByteArrayResource> downloadResponseAttachment(@P("projectId")@PathVariable Long projectId,
@PathVariable Long organisationId,
@PathVariable Long noteId,
@PathVariable Long attachmentId,
UserResource loggedInUser,
HttpServletRequest request) {
partnerOrganisationRestService.getPartnerOrganisation(projectId, organisationId);
List<Long> attachments = loadAttachmentsFromCookie(request, projectId, organisationId, noteId);
if (attachments.contains(attachmentId)) {
return getFileResponseEntity(financeCheckService.downloadFile(attachmentId), financeCheckService.getAttachmentInfo(attachmentId));
} else {
throw new ObjectNotFoundException("Cannot find comment attachment " + attachmentId + " for organisation " + organisationId + " and project " + projectId, emptyList());
}
}
@PreAuthorize("hasPermission(#projectId, 'org.innovateuk.ifs.project.resource.ProjectCompositeId', 'ACCESS_FINANCE_CHECKS_NOTES_SECTION')")
@PostMapping(value = "/{noteId}/new-comment", params = "removeAttachment")
public String removeAttachment(@P("projectId")@PathVariable Long projectId,
@PathVariable Long organisationId,
@PathVariable Long noteId,
@RequestParam(value = "removeAttachment") final Long attachmentId,
@ModelAttribute(FORM_ATTR) FinanceChecksNotesAddCommentForm form,
@SuppressWarnings("unused") BindingResult bindingResult,
ValidationHandler validationHandler,
UserResource loggedInUser,
HttpServletRequest request,
HttpServletResponse response,
Model model) {
if (postParametersMatchOrigin(request, projectId, organisationId, noteId, loggedInUser.getId())) {
List<Long> attachments = loadAttachmentsFromCookie(request, projectId, organisationId, noteId);
if (attachments.contains(attachmentId)) {
financeCheckService.deleteFile(attachmentId)
.andOnSuccess(() -> attachments.remove(attachments.indexOf(attachmentId)));
}
saveAttachmentsToCookie(response, attachments, projectId, organisationId, noteId);
saveFormToCookie(response, projectId, organisationId, noteId, form);
return redirectTo(formView(projectId, organisationId, noteId));
} else {
throw new ObjectNotFoundException();
}
}
@PreAuthorize("hasPermission(#projectId, 'org.innovateuk.ifs.project.resource.ProjectCompositeId', 'ACCESS_FINANCE_CHECKS_NOTES_SECTION')")
@GetMapping("/{noteId}/new-comment/cancel")
public String cancelNewForm(@P("projectId")@PathVariable Long projectId,
@PathVariable Long organisationId,
@PathVariable Long noteId,
Model model,
UserResource loggedInUser,
HttpServletRequest request,
HttpServletResponse response) {
partnerOrganisationRestService.getPartnerOrganisation(projectId, organisationId);
List<Long> attachments = loadAttachmentsFromCookie(request, projectId, organisationId, noteId);
attachments.forEach((id -> financeCheckService.deleteFile(id)));
deleteCookies(response, projectId, organisationId, noteId);
return redirectTo(rootView(projectId, organisationId));
}
private List<ThreadViewModel> loadNoteModel(Long projectId, Long organisationId) {
ProjectFinanceResource projectFinance = projectFinanceRestService.getProjectFinance(projectId, organisationId).getSuccess();
ServiceResult<List<NoteResource>> notesResult = financeCheckService.loadNotes(projectFinance.getId());
if (notesResult.isSuccess()) {
return threadViewModelPopulator.threadViewModelListFromNotes(projectId, organisationId, notesResult.getSuccess());
} else {
return emptyList();
}
}
private FinanceChecksNotesViewModel populateNoteViewModel(Long projectId, Long organisationId, Long noteId, List<Long> attachments, UserResource loggedInUser) {
ProjectResource project = projectService.getById(projectId);
OrganisationResource organisation = organisationRestService.getOrganisationById(organisationId).getSuccess();
OrganisationResource leadOrganisation = projectService.getLeadOrganisation(projectId);
boolean leadPartnerOrganisation = leadOrganisation.getId().equals(organisation.getId());
Map<Long, String> attachmentLinks = new HashMap<>();
if (attachments != null) {
attachments.forEach(id -> financeCheckService.getAttachment(id).ifSuccessful(foundAttachment -> attachmentLinks.put(id, foundAttachment.name)));
}
return new FinanceChecksNotesViewModel(
organisation.getName(),
leadPartnerOrganisation,
project.getId(),
project.getName(),
loadNoteModel(projectId, organisationId),
organisationId,
FINANCE_CHECKS_NOTES_BASE_URL,
attachmentLinks,
FinanceChecksNotesFormConstraints.MAX_NOTE_WORDS,
FinanceChecksNotesFormConstraints.MAX_NOTE_CHARACTERS,
noteId,
project.getApplication(),
project.getProjectState().isActive(),
loggedInUser.hasAuthority(Authority.AUDITOR)
);
}
private String rootView(Long projectId, Long organisationId) {
return String.format(FINANCE_CHECKS_NOTES_BASE_URL, projectId, organisationId);
}
private String getCookieName(Long projectId, Long organisationId, Long noteId) {
return ATTACHMENT_COOKIE + "_" + projectId + "_" + organisationId + "_" + noteId;
}
private String getFormCookieName(Long projectId, Long organisationId, Long noteId) {
return FORM_COOKIE + "_" + projectId + "_" + organisationId + "_" + noteId;
}
private void saveAttachmentsToCookie(HttpServletResponse response, List<Long> attachmentFileIds, Long projectId, Long organisationId, Long noteId) {
String jsonState = JsonUtil.getSerializedObject(attachmentFileIds);
cookieUtil.saveToCookie(response, getCookieName(projectId, organisationId, noteId), jsonState);
}
private void saveFormToCookie(HttpServletResponse response, Long projectId, Long organisationId, Long noteId,
FinanceChecksNotesAddCommentForm form) {
cookieUtil.saveToCookie(response, getFormCookieName(projectId, organisationId, noteId), JsonUtil.getSerializedObject(form));
}
private List<Long> loadAttachmentsFromCookie(HttpServletRequest request, Long projectId, Long organisationId, Long noteId) {
return cookieUtil.getCookieAsList(request, getCookieName(projectId, organisationId, noteId), new TypeReference<List<Long>>() {
});
}
private Optional<FinanceChecksNotesAddCommentForm> loadForm(HttpServletRequest request, Long projectId, Long organisationId, Long noteId) {
return cookieUtil.getCookieAs(request, getFormCookieName(projectId, organisationId, noteId),
new TypeReference<FinanceChecksNotesAddCommentForm>() {});
}
private void saveOriginCookie(HttpServletResponse response, Long projectId, Long organisationId, Long noteId, Long userId) {
String jsonState = JsonUtil.getSerializedObject(Arrays.asList(projectId, organisationId, noteId, userId));
cookieUtil.saveToCookie(response, ORIGIN_GET_COOKIE, jsonState);
}
private boolean postParametersMatchOrigin(HttpServletRequest request, Long projectId, Long organisationId, Long noteId, Long userId){
List<Long> getParams = cookieUtil.getCookieAsList(request, ORIGIN_GET_COOKIE, new TypeReference<List<Long>>() {
});
return getParams.size() == 4 && getParams.get(0).equals(projectId) && getParams.get(1).equals(organisationId) && getParams.get(2).equals(noteId) && getParams.get(3).equals(userId);
}
private void deleteCookies(HttpServletResponse response, Long projectId, Long organisationId, Long noteId) {
cookieUtil.removeCookie(response, getCookieName(projectId, organisationId, noteId));
cookieUtil.removeCookie(response, getFormCookieName(projectId, organisationId, noteId));
cookieUtil.removeCookie(response, ORIGIN_GET_COOKIE);
}
private String redirectTo(final String path) {
return "redirect:" + path;
}
private String formView(final Long projectId, final Long organisationId, Long noteId) {
return String.format(FINANCE_CHECKS_NOTES_COMMENT_BASE_URL, projectId, organisationId, noteId);
}
protected void setThreadViewModelPopulator(ThreadViewModelPopulator threadViewModelPopulator) {
this.threadViewModelPopulator = threadViewModelPopulator;
}
}
| |
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.4-2
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2014.05.16 at 11:58:34 AM IST
//
package org.akomantoso.schema.v3.csd09;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAnyAttribute;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlSchemaType;
import javax.xml.bind.annotation.XmlType;
import javax.xml.bind.annotation.adapters.CollapsedStringAdapter;
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
import javax.xml.namespace.QName;
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence maxOccurs="unbounded" minOccurs="0">
* <element ref="{http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD09}componentData"/>
* </sequence>
* <attGroup ref="{http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD09}idreq"/>
* <attGroup ref="{http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD09}core"/>
* <attGroup ref="{http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD09}name"/>
* <attGroup ref="{http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD09}link"/>
* <attGroup ref="{http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD09}show"/>
* <anyAttribute processContents='lax' namespace='##other'/>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"componentData"
})
@XmlRootElement(name = "componentData")
public class ComponentData {
protected List<ComponentData> componentData;
@XmlAttribute(name = "eId", required = true)
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
@XmlSchemaType(name = "NMTOKEN")
protected String eId;
@XmlAttribute(name = "wId")
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
@XmlSchemaType(name = "NMTOKEN")
protected String wId;
@XmlAttribute(name = "GUID")
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
@XmlSchemaType(name = "NMTOKEN")
protected String guid;
@XmlAttribute(name = "name", required = true)
protected String name;
@XmlAttribute(name = "href", required = true)
@XmlSchemaType(name = "anyURI")
protected String href;
@XmlAttribute(name = "showAs", required = true)
protected String showAs;
@XmlAttribute(name = "shortForm")
protected String shortForm;
@XmlAnyAttribute
private Map<QName, String> otherAttributes = new HashMap<QName, String>();
/**
* Gets the value of the componentData property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the componentData property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getComponentData().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link ComponentData }
*
*
*/
public List<ComponentData> getComponentData() {
if (componentData == null) {
componentData = new ArrayList<ComponentData>();
}
return this.componentData;
}
/**
* Gets the value of the eId property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getEId() {
return eId;
}
/**
* Sets the value of the eId property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setEId(String value) {
this.eId = value;
}
/**
* Gets the value of the wId property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getWId() {
return wId;
}
/**
* Sets the value of the wId property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setWId(String value) {
this.wId = value;
}
/**
* Gets the value of the guid property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getGUID() {
return guid;
}
/**
* Sets the value of the guid property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setGUID(String value) {
this.guid = value;
}
/**
* Gets the value of the name property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getName() {
return name;
}
/**
* Sets the value of the name property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setName(String value) {
this.name = value;
}
/**
* Gets the value of the href property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getHref() {
return href;
}
/**
* Sets the value of the href property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setHref(String value) {
this.href = value;
}
/**
* Gets the value of the showAs property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getShowAs() {
return showAs;
}
/**
* Sets the value of the showAs property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setShowAs(String value) {
this.showAs = value;
}
/**
* Gets the value of the shortForm property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getShortForm() {
return shortForm;
}
/**
* Sets the value of the shortForm property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setShortForm(String value) {
this.shortForm = value;
}
/**
* Gets a map that contains attributes that aren't bound to any typed property on this class.
*
* <p>
* the map is keyed by the name of the attribute and
* the value is the string value of the attribute.
*
* the map returned by this method is live, and you can add new attribute
* by updating the map directly. Because of this design, there's no setter.
*
*
* @return
* always non-null
*/
public Map<QName, String> getOtherAttributes() {
return otherAttributes;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.datanode.fsdataset.impl;
import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
import static org.apache.hadoop.fs.CreateFlag.CREATE;
import static org.apache.hadoop.fs.CreateFlag.LAZY_PERSIST;
import static org.apache.hadoop.fs.StorageType.DEFAULT;
import static org.apache.hadoop.fs.StorageType.RAM_DISK;
import static org.apache.hadoop.hdfs.DFSConfigKeys.*;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.EnumSet;
import java.util.HashSet;
import java.util.Set;
import java.util.UUID;
import com.google.common.base.Preconditions;
import org.apache.commons.io.IOUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CreateFlag;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.DFSClient;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DFSTestUtil;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
import org.apache.hadoop.hdfs.server.datanode.DataNodeTestUtils;
import org.apache.hadoop.hdfs.server.datanode.DatanodeUtil;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsDatasetSpi;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeSpi;
import org.apache.hadoop.hdfs.tools.JMXGet;
import org.apache.hadoop.io.nativeio.NativeIO;
import org.apache.hadoop.net.unix.TemporarySocketDirectory;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.log4j.Level;
import org.junit.After;
import org.junit.Rule;
import org.junit.rules.Timeout;
public abstract class LazyPersistTestCase {
static final byte LAZY_PERSIST_POLICY_ID = (byte) 15;
static {
DFSTestUtil.setNameNodeLogLevel(Level.DEBUG);
GenericTestUtils.setLogLevel(FsDatasetImpl.LOG, Level.DEBUG);
}
protected static final int BLOCK_SIZE = 5 * 1024 * 1024;
protected static final int BUFFER_LENGTH = 4096;
protected static final int EVICTION_LOW_WATERMARK = 1;
private static final long HEARTBEAT_INTERVAL_SEC = 1;
private static final int HEARTBEAT_RECHECK_INTERVAL_MSEC = 500;
private static final String JMX_RAM_DISK_METRICS_PATTERN = "^RamDisk";
private static final String JMX_SERVICE_NAME = "DataNode";
protected static final int LAZY_WRITE_FILE_SCRUBBER_INTERVAL_SEC = 3;
protected static final int LAZY_WRITER_INTERVAL_SEC = 1;
protected static final Log LOG = LogFactory.getLog(LazyPersistTestCase.class);
protected static final short REPL_FACTOR = 1;
protected final long osPageSize =
NativeIO.POSIX.getCacheManipulator().getOperatingSystemPageSize();
protected MiniDFSCluster cluster;
protected DistributedFileSystem fs;
protected DFSClient client;
protected JMXGet jmx;
protected TemporarySocketDirectory sockDir;
@After
public void shutDownCluster() throws Exception {
// Dump all RamDisk JMX metrics before shutdown the cluster
printRamDiskJMXMetrics();
if (fs != null) {
fs.close();
fs = null;
client = null;
}
if (cluster != null) {
cluster.shutdownDataNodes();
cluster.shutdown();
cluster = null;
}
if (jmx != null) {
jmx = null;
}
IOUtils.closeQuietly(sockDir);
sockDir = null;
}
@Rule
public Timeout timeout = new Timeout(300000);
protected final LocatedBlocks ensureFileReplicasOnStorageType(
Path path, StorageType storageType) throws IOException {
// Ensure that returned block locations returned are correct!
LOG.info("Ensure path: " + path + " is on StorageType: " + storageType);
assertThat(fs.exists(path), is(true));
long fileLength = client.getFileInfo(path.toString()).getLen();
LocatedBlocks locatedBlocks =
client.getLocatedBlocks(path.toString(), 0, fileLength);
for (LocatedBlock locatedBlock : locatedBlocks.getLocatedBlocks()) {
assertThat(locatedBlock.getStorageTypes()[0], is(storageType));
}
return locatedBlocks;
}
/**
* Make sure at least one non-transient volume has a saved copy of the replica.
* An infinite loop is used to ensure the async lazy persist tasks are completely
* done before verification. Caller of ensureLazyPersistBlocksAreSaved expects
* either a successful pass or timeout failure.
*/
protected final void ensureLazyPersistBlocksAreSaved(
LocatedBlocks locatedBlocks) throws IOException, InterruptedException {
final String bpid = cluster.getNamesystem().getBlockPoolId();
final Set<Long> persistedBlockIds = new HashSet<Long>();
try (FsDatasetSpi.FsVolumeReferences volumes =
cluster.getDataNodes().get(0).getFSDataset().getFsVolumeReferences()) {
while (persistedBlockIds.size() < locatedBlocks.getLocatedBlocks()
.size()) {
// Take 1 second sleep before each verification iteration
Thread.sleep(1000);
for (LocatedBlock lb : locatedBlocks.getLocatedBlocks()) {
for (FsVolumeSpi v : volumes) {
if (v.isTransientStorage()) {
continue;
}
FsVolumeImpl volume = (FsVolumeImpl) v;
File lazyPersistDir =
volume.getBlockPoolSlice(bpid).getLazypersistDir();
long blockId = lb.getBlock().getBlockId();
File targetDir =
DatanodeUtil.idToBlockDir(lazyPersistDir, blockId);
File blockFile = new File(targetDir, lb.getBlock().getBlockName());
if (blockFile.exists()) {
// Found a persisted copy for this block and added to the Set
persistedBlockIds.add(blockId);
}
}
}
}
}
// We should have found a persisted copy for each located block.
assertThat(persistedBlockIds.size(), is(locatedBlocks.getLocatedBlocks().size()));
}
protected final void makeRandomTestFile(Path path, long length,
boolean isLazyPersist, long seed) throws IOException {
DFSTestUtil.createFile(fs, path, isLazyPersist, BUFFER_LENGTH, length,
BLOCK_SIZE, REPL_FACTOR, seed, true);
}
protected final void makeTestFile(Path path, long length,
boolean isLazyPersist) throws IOException {
EnumSet<CreateFlag> createFlags = EnumSet.of(CREATE);
if (isLazyPersist) {
createFlags.add(LAZY_PERSIST);
}
FSDataOutputStream fos = null;
try {
fos =
fs.create(path,
FsPermission.getFileDefault(),
createFlags,
BUFFER_LENGTH,
REPL_FACTOR,
BLOCK_SIZE,
null);
// Allocate a block.
byte[] buffer = new byte[BUFFER_LENGTH];
for (int bytesWritten = 0; bytesWritten < length; ) {
fos.write(buffer, 0, buffer.length);
bytesWritten += buffer.length;
}
if (length > 0) {
fos.hsync();
}
} finally {
IOUtils.closeQuietly(fos);
}
}
/**
* If ramDiskStorageLimit is >=0, then RAM_DISK capacity is artificially
* capped. If ramDiskStorageLimit < 0 then it is ignored.
*/
protected final void startUpCluster(
int numDatanodes,
boolean hasTransientStorage,
StorageType[] storageTypes,
int ramDiskReplicaCapacity,
long ramDiskStorageLimit,
long evictionLowWatermarkReplicas,
long maxLockedMemory,
boolean useSCR,
boolean useLegacyBlockReaderLocal,
boolean disableScrubber) throws IOException {
initCacheManipulator();
Configuration conf = new Configuration();
conf.setLong(DFS_BLOCK_SIZE_KEY, BLOCK_SIZE);
if (disableScrubber) {
conf.setInt(DFS_NAMENODE_LAZY_PERSIST_FILE_SCRUB_INTERVAL_SEC, 0);
} else {
conf.setInt(DFS_NAMENODE_LAZY_PERSIST_FILE_SCRUB_INTERVAL_SEC,
LAZY_WRITE_FILE_SCRUBBER_INTERVAL_SEC);
}
conf.setLong(DFS_HEARTBEAT_INTERVAL_KEY, HEARTBEAT_INTERVAL_SEC);
conf.setInt(DFS_NAMENODE_HEARTBEAT_RECHECK_INTERVAL_KEY,
HEARTBEAT_RECHECK_INTERVAL_MSEC);
conf.setInt(DFS_DATANODE_LAZY_WRITER_INTERVAL_SEC,
LAZY_WRITER_INTERVAL_SEC);
conf.setLong(DFS_DATANODE_RAM_DISK_LOW_WATERMARK_BYTES,
evictionLowWatermarkReplicas * BLOCK_SIZE);
conf.setInt(DFSConfigKeys.DFS_NAMENODE_SAFEMODE_MIN_DATANODES_KEY, 1);
conf.setLong(DFS_DATANODE_MAX_LOCKED_MEMORY_KEY, maxLockedMemory);
if (useSCR) {
conf.setBoolean(HdfsClientConfigKeys.Read.ShortCircuit.KEY, true);
// Do not share a client context across tests.
conf.set(DFS_CLIENT_CONTEXT, UUID.randomUUID().toString());
conf.set(DFS_BLOCK_LOCAL_PATH_ACCESS_USER_KEY,
UserGroupInformation.getCurrentUser().getShortUserName());
if (useLegacyBlockReaderLocal) {
conf.setBoolean(DFS_CLIENT_USE_LEGACY_BLOCKREADERLOCAL, true);
} else {
sockDir = new TemporarySocketDirectory();
conf.set(DFS_DOMAIN_SOCKET_PATH_KEY, new File(sockDir.getDir(),
this.getClass().getSimpleName() + "._PORT.sock").getAbsolutePath());
}
}
Preconditions.checkState(
ramDiskReplicaCapacity < 0 || ramDiskStorageLimit < 0,
"Cannot specify non-default values for both ramDiskReplicaCapacity "
+ "and ramDiskStorageLimit");
long[] capacities;
if (hasTransientStorage && ramDiskReplicaCapacity >= 0) {
// Convert replica count to byte count, add some delta for .meta and
// VERSION files.
ramDiskStorageLimit = ((long) ramDiskReplicaCapacity * BLOCK_SIZE) +
(BLOCK_SIZE - 1);
}
capacities = new long[] { ramDiskStorageLimit, -1 };
cluster = new MiniDFSCluster
.Builder(conf)
.numDataNodes(numDatanodes)
.storageCapacities(capacities)
.storageTypes(storageTypes != null ? storageTypes :
(hasTransientStorage ? new StorageType[]{RAM_DISK, DEFAULT} : null))
.build();
cluster.waitActive();
fs = cluster.getFileSystem();
client = fs.getClient();
try {
jmx = initJMX();
} catch (Exception e) {
fail("Failed initialize JMX for testing: " + e);
}
LOG.info("Cluster startup complete");
}
/**
* Use a dummy cache manipulator for testing.
*/
public static void initCacheManipulator() {
NativeIO.POSIX.setCacheManipulator(new NativeIO.POSIX.CacheManipulator() {
@Override
public void mlock(String identifier,
ByteBuffer mmap, long length) throws IOException {
LOG.info("LazyPersistTestCase: faking mlock of " + identifier + " bytes.");
}
@Override
public long getMemlockLimit() {
LOG.info("LazyPersistTestCase: fake return " + Long.MAX_VALUE);
return Long.MAX_VALUE;
}
@Override
public boolean verifyCanMlock() {
LOG.info("LazyPersistTestCase: fake return " + true);
return true;
}
});
}
ClusterWithRamDiskBuilder getClusterBuilder() {
return new ClusterWithRamDiskBuilder();
}
/**
* Builder class that allows controlling RAM disk-specific properties for a
* MiniDFSCluster.
*/
class ClusterWithRamDiskBuilder {
public ClusterWithRamDiskBuilder setNumDatanodes(
int numDatanodes) {
this.numDatanodes = numDatanodes;
return this;
}
public ClusterWithRamDiskBuilder setStorageTypes(
StorageType[] storageTypes) {
this.storageTypes = storageTypes;
return this;
}
public ClusterWithRamDiskBuilder setRamDiskReplicaCapacity(
int ramDiskReplicaCapacity) {
this.ramDiskReplicaCapacity = ramDiskReplicaCapacity;
return this;
}
public ClusterWithRamDiskBuilder setRamDiskStorageLimit(
long ramDiskStorageLimit) {
this.ramDiskStorageLimit = ramDiskStorageLimit;
return this;
}
public ClusterWithRamDiskBuilder setMaxLockedMemory(long maxLockedMemory) {
this.maxLockedMemory = maxLockedMemory;
return this;
}
public ClusterWithRamDiskBuilder setUseScr(boolean useScr) {
this.useScr = useScr;
return this;
}
public ClusterWithRamDiskBuilder setHasTransientStorage(
boolean hasTransientStorage) {
this.hasTransientStorage = hasTransientStorage;
return this;
}
public ClusterWithRamDiskBuilder setUseLegacyBlockReaderLocal(
boolean useLegacyBlockReaderLocal) {
this.useLegacyBlockReaderLocal = useLegacyBlockReaderLocal;
return this;
}
public ClusterWithRamDiskBuilder setEvictionLowWatermarkReplicas(
long evictionLowWatermarkReplicas) {
this.evictionLowWatermarkReplicas = evictionLowWatermarkReplicas;
return this;
}
public ClusterWithRamDiskBuilder disableScrubber() {
this.disableScrubber = true;
return this;
}
public void build() throws IOException {
LazyPersistTestCase.this.startUpCluster(
numDatanodes, hasTransientStorage, storageTypes, ramDiskReplicaCapacity,
ramDiskStorageLimit, evictionLowWatermarkReplicas,
maxLockedMemory, useScr, useLegacyBlockReaderLocal, disableScrubber);
}
private int numDatanodes = REPL_FACTOR;
private StorageType[] storageTypes = null;
private int ramDiskReplicaCapacity = -1;
private long ramDiskStorageLimit = -1;
private long maxLockedMemory = Long.MAX_VALUE;
private boolean hasTransientStorage = true;
private boolean useScr = false;
private boolean useLegacyBlockReaderLocal = false;
private long evictionLowWatermarkReplicas = EVICTION_LOW_WATERMARK;
private boolean disableScrubber=false;
}
protected final void triggerBlockReport()
throws IOException, InterruptedException {
// Trigger block report to NN
DataNodeTestUtils.triggerBlockReport(cluster.getDataNodes().get(0));
Thread.sleep(10 * 1000);
}
protected final boolean verifyBlockDeletedFromDir(File dir,
LocatedBlocks locatedBlocks) {
for (LocatedBlock lb : locatedBlocks.getLocatedBlocks()) {
File targetDir =
DatanodeUtil.idToBlockDir(dir, lb.getBlock().getBlockId());
File blockFile = new File(targetDir, lb.getBlock().getBlockName());
if (blockFile.exists()) {
LOG.warn("blockFile: " + blockFile.getAbsolutePath() +
" exists after deletion.");
return false;
}
File metaFile = new File(targetDir,
DatanodeUtil.getMetaName(lb.getBlock().getBlockName(),
lb.getBlock().getGenerationStamp()));
if (metaFile.exists()) {
LOG.warn("metaFile: " + metaFile.getAbsolutePath() +
" exists after deletion.");
return false;
}
}
return true;
}
protected final boolean verifyDeletedBlocks(LocatedBlocks locatedBlocks)
throws IOException, InterruptedException {
LOG.info("Verifying replica has no saved copy after deletion.");
triggerBlockReport();
while(
DataNodeTestUtils.getPendingAsyncDeletions(cluster.getDataNodes().get(0))
> 0L){
Thread.sleep(1000);
}
final String bpid = cluster.getNamesystem().getBlockPoolId();
final FsDatasetSpi<?> dataset =
cluster.getDataNodes().get(0).getFSDataset();
// Make sure deleted replica does not have a copy on either finalized dir of
// transient volume or finalized dir of non-transient volume
try (FsDatasetSpi.FsVolumeReferences volumes =
dataset.getFsVolumeReferences()) {
for (FsVolumeSpi vol : volumes) {
FsVolumeImpl volume = (FsVolumeImpl) vol;
File targetDir = (volume.isTransientStorage()) ?
volume.getBlockPoolSlice(bpid).getFinalizedDir() :
volume.getBlockPoolSlice(bpid).getLazypersistDir();
if (verifyBlockDeletedFromDir(targetDir, locatedBlocks) == false) {
return false;
}
}
}
return true;
}
protected final void verifyRamDiskJMXMetric(String metricName,
long expectedValue) throws Exception {
assertEquals(expectedValue, Integer.parseInt(jmx.getValue(metricName)));
}
protected final boolean verifyReadRandomFile(
Path path, int fileLength, int seed) throws IOException {
byte contents[] = DFSTestUtil.readFileBuffer(fs, path);
byte expected[] = DFSTestUtil.
calculateFileContentsFromSeed(seed, fileLength);
return Arrays.equals(contents, expected);
}
private JMXGet initJMX() throws Exception {
JMXGet jmx = new JMXGet();
jmx.setService(JMX_SERVICE_NAME);
jmx.init();
return jmx;
}
private void printRamDiskJMXMetrics() {
try {
if (jmx != null) {
jmx.printAllMatchedAttributes(JMX_RAM_DISK_METRICS_PATTERN);
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
| |
/*
* RED5 Open Source Flash Server - http://code.google.com/p/red5/
*
* Copyright 2006-2012 by respective authors (see below). All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.red5.io.amf;
import java.beans.PropertyDescriptor;
import java.io.IOException;
import java.lang.reflect.Field;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Vector;
import org.apache.commons.beanutils.BeanUtils;
import org.apache.commons.beanutils.BeanUtilsBean;
import org.apache.commons.beanutils.PropertyUtilsBean;
import org.apache.mina.core.buffer.IoBuffer;
import org.red5.io.amf3.ByteArray;
import org.red5.io.object.BaseInput;
import org.red5.io.object.DataTypes;
import org.red5.io.object.Deserializer;
import org.red5.io.object.RecordSet;
import org.red5.io.object.RecordSetPage;
import org.red5.io.utils.ArrayUtils;
import org.red5.io.utils.ObjectMap;
import org.red5.io.utils.XMLUtils;
import org.red5.server.util.ConversionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Document;
/**
* Input for Red5 data types
*
* @author The Red5 Project (red5@osflash.org)
* @author Luke Hubbard, Codegent Ltd (luke@codegent.com)
*/
@SuppressWarnings("serial")
public class Input extends BaseInput implements org.red5.io.object.Input {
protected static Logger log = LoggerFactory.getLogger(Input.class);
protected static Map<String, String> classAliases = new HashMap<String, String>(3) {
{
put("DSA", "org.red5.compatibility.flex.messaging.messages.AsyncMessageExt");
put("DSC", "org.red5.compatibility.flex.messaging.messages.CommandMessageExt");
put("DSK", "org.red5.compatibility.flex.messaging.messages.AcknowledgeMessageExt");
}
};
protected IoBuffer buf;
protected byte currentDataType;
/**
* Creates Input object from byte buffer
*
* @param buf Byte buffer
*/
public Input(IoBuffer buf) {
super();
this.buf = buf;
}
/**
* Reads the data type.
*
* @return byte Data type
*/
public byte readDataType() {
if (buf != null) {
// XXX Paul: prevent an NPE here by returning the current data type
// when there is a null buffer
currentDataType = buf.get();
} else {
log.error("Why is buf null?");
}
return readDataType(currentDataType);
}
/**
* Reads the data type.
*
* @param dataType Data type as byte
* @return One of AMF class constants with type
* @see org.red5.io.amf.AMF
*/
protected byte readDataType(byte dataType) {
byte coreType;
switch (currentDataType) {
case AMF.TYPE_NULL:
case AMF.TYPE_UNDEFINED:
coreType = DataTypes.CORE_NULL;
break;
case AMF.TYPE_NUMBER:
coreType = DataTypes.CORE_NUMBER;
break;
case AMF.TYPE_BOOLEAN:
coreType = DataTypes.CORE_BOOLEAN;
break;
case AMF.TYPE_STRING:
case AMF.TYPE_LONG_STRING:
coreType = DataTypes.CORE_STRING;
break;
case AMF.TYPE_CLASS_OBJECT:
case AMF.TYPE_OBJECT:
coreType = DataTypes.CORE_OBJECT;
break;
case AMF.TYPE_MIXED_ARRAY:
coreType = DataTypes.CORE_MAP;
break;
case AMF.TYPE_ARRAY:
coreType = DataTypes.CORE_ARRAY;
break;
case AMF.TYPE_DATE:
coreType = DataTypes.CORE_DATE;
break;
case AMF.TYPE_XML:
coreType = DataTypes.CORE_XML;
break;
case AMF.TYPE_REFERENCE:
coreType = DataTypes.OPT_REFERENCE;
break;
case AMF.TYPE_UNSUPPORTED:
case AMF.TYPE_MOVIECLIP:
case AMF.TYPE_RECORDSET:
// These types are not handled by core datatypes
// So add the amf mask to them, this way the deserializer
// will call back to readCustom, we can then handle or return null
coreType = (byte) (currentDataType + DataTypes.CUSTOM_AMF_MASK);
break;
case AMF.TYPE_END_OF_OBJECT:
default:
// End of object, and anything else lets just skip
coreType = DataTypes.CORE_SKIP;
break;
}
return coreType;
}
// Basic
/**
* Reads a null.
*
* @return Object
*/
public Object readNull(Type target) {
return null;
}
/**
* Reads a boolean.
*
* @return boolean
*/
public Boolean readBoolean(Type target) {
// TODO: check values
return (buf.get() == AMF.VALUE_TRUE) ? Boolean.TRUE : Boolean.FALSE;
}
/**
* Reads a Number. In ActionScript 1 and 2 Number type represents all numbers,
* both floats and integers.
*
* @return Number
*/
public Number readNumber(Type target) {
double num = buf.getDouble();
if (num == Math.round(num)) {
if (num < Integer.MAX_VALUE) {
return (int) num;
} else {
return Math.round(num);
}
} else {
return num;
}
}
/**
* Reads string from buffer
* @return String
*/
public String getString() {
return getString(buf);
}
/**
* Reads a string
*
* @return String
*/
public String readString(Type target) {
int len = 0;
switch (currentDataType) {
case AMF.TYPE_LONG_STRING:
len = buf.getInt();
break;
case AMF.TYPE_STRING:
len = buf.getShort() & 0xffff; //buf.getUnsignedShort();
break;
default:
log.debug("Unknown AMF type: {}", currentDataType);
}
int limit = buf.limit();
log.debug("Limit: {}", limit);
String string = bufferToString(buf.buf(), len);
buf.limit(limit); // Reset the limit
return string;
}
/**
* Returns a string based on the buffer
*
* @param buf Byte buffer with data
* @return String Decoded string
*/
public static String getString(IoBuffer buf) {
int len = buf.getShort() & 0xffff; //buf.getUnsignedShort(); XXX is appears to be broken in mina at 2.0.4
log.debug("Length: {}", len);
int limit = buf.limit();
log.debug("Limit: {}", limit);
String string = bufferToString(buf.buf(), len);
buf.limit(limit); // Reset the limit
return string;
}
/**
* Returns a string based on the buffer
*
* @param buf Byte buffer with data
* @return String Decoded string
*/
public static String getString(java.nio.ByteBuffer buf) {
int len = buf.getShort() & 0xffff;
log.debug("Length: {}", len);
int limit = buf.limit();
log.debug("Limit: {}", limit);
String string = bufferToString(buf, len);
buf.limit(limit); // Reset the limit
return string;
}
/**
* Converts the bytes into a string.
*
* @param strBuf
* @return contents of the ByteBuffer as a String
*/
private final static String bufferToString(final java.nio.ByteBuffer strBuf, int len) {
String string = null;
if (strBuf != null) {
int pos = strBuf.position();
log.debug("String buf - position: {} limit: {}", pos, (pos + len));
strBuf.limit(pos + len);
string = AMF.CHARSET.decode(strBuf).toString();
log.debug("String: {}", string);
} else {
log.warn("ByteBuffer was null attempting to read String");
}
return string;
}
/**
* Returns a date
*
* @return Date Decoded string object
*/
public Date readDate(Type target) {
/*
* Date: 0x0B T7 T6 .. T0 Z1 Z2 T7 to T0 form a 64 bit Big Endian number
* that specifies the number of nanoseconds that have passed since
* 1/1/1970 0:00 to the specified time. This format is UTC 1970. Z1 an
* Z0 for a 16 bit Big Endian number indicating the indicated time's
* timezone in minutes.
*/
long ms = (long) buf.getDouble();
// The timezone can be ignored as the date always is encoded in UTC
@SuppressWarnings("unused")
short timeZoneMins = buf.getShort();
Date date = new Date(ms);
storeReference(date);
return date;
}
// Array
public Object readArray(Deserializer deserializer, Type target) {
log.debug("readArray - deserializer: {} target: {}", deserializer, target);
Object result = null;
int count = buf.getInt();
log.debug("Count: {}", count);
List<Object> resultCollection = new ArrayList<Object>(count);
storeReference(result);
for (int i = 0; i < count; i++) {
resultCollection.add(deserializer.deserialize(this, Object.class));
}
// To maintain conformance to the Input API, we should convert the output
// into an Array if the Type asks us to.
Class<?> collection = Collection.class;
if (target instanceof Class<?>) {
collection = (Class<?>) target;
}
if (collection.isArray()) {
result = ArrayUtils.toArray(collection.getComponentType(), resultCollection);
} else {
result = resultCollection;
}
return result;
}
// Map
/**
* Read key - value pairs. This is required for the RecordSet
* deserializer.
*/
public Map<String, Object> readKeyValues(Deserializer deserializer) {
Map<String, Object> result = new HashMap<String, Object>();
readKeyValues(result, deserializer);
return result;
}
/**
* Read key - value pairs into Map object
* @param result Map to put resulting pair to
* @param deserializer Deserializer used
*/
protected void readKeyValues(Map<String, Object> result, Deserializer deserializer) {
while (hasMoreProperties()) {
String name = readPropertyName();
log.debug("property: {}", name);
Object property = deserializer.deserialize(this, Object.class);
log.debug("val: {}", property);
result.put(name, property);
if (hasMoreProperties()) {
skipPropertySeparator();
}
}
skipEndObject();
}
public Object readMap(Deserializer deserializer, Type target) {
// The maximum number used in this mixed array.
int maxNumber = buf.getInt();
log.debug("Read start mixed array: {}", maxNumber);
Object result;
final Map<Object, Object> mixedResult = new LinkedHashMap<Object, Object>(maxNumber);
// we must store the reference before we deserialize any items in it to ensure
// that reference IDs are correct
int reference = storeReference(mixedResult);
Boolean normalArray = true;
while (hasMoreProperties()) {
String key = getString(buf);
log.debug("key: {}", key);
try {
Integer.parseInt(key);
} catch (NumberFormatException e) {
log.debug("key {} is causing non normal array", key);
normalArray = false;
}
Object item = deserializer.deserialize(this, Object.class);
log.debug("item: {}", item);
mixedResult.put(key, item);
}
if (mixedResult.size() <= maxNumber + 1 && normalArray) {
// MixedArray actually is a regular array
log.debug("mixed array is a regular array");
final List<Object> listResult = new ArrayList<Object>(maxNumber);
for (int i = 0; i < maxNumber; i++) {
listResult.add(i, mixedResult.get(String.valueOf(i)));
}
result = listResult;
} else {
// Convert initial indexes
mixedResult.remove("length");
for (int i = 0; i < maxNumber; i++) {
final Object value = mixedResult.remove(String.valueOf(i));
mixedResult.put(i, value);
}
result = mixedResult;
}
// Replace the original reference with the final result
storeReference(reference, result);
skipEndObject();
return result;
}
// Object
/**
* Creates a new instance of the className parameter and
* returns as an Object
*
* @param className Class name as String
* @return Object New object instance (for given class)
*/
@SuppressWarnings("all")
protected Object newInstance(String className) {
log.debug("Loading class: {}", className);
Object instance = null;
Class<?> clazz = null;
//fix for Trac #604
if ("".equals(className) || className == null)
return instance;
try {
//check for special DS class aliases
if (className.length() == 3) {
className = classAliases.get(className);
}
if (className.startsWith("flex.")) {
// Use Red5 compatibility class instead
className = "org.red5.compatibility." + className;
log.debug("Modified classname: {}", className);
}
clazz = Thread.currentThread().getContextClassLoader().loadClass(className);
instance = clazz.newInstance();
} catch (InstantiationException iex) {
try {
//check for default ctor
clazz.getConstructor(null);
log.error("Error loading class: {}", className);
} catch (NoSuchMethodException nse) {
log.error("Error loading class: {}; this can be resolved by adding a default constructor to your class", className);
}
log.debug("Exception was: {}", iex);
} catch (Exception ex) {
log.error("Error loading class: {}", className);
log.debug("Exception was: {}", ex);
}
return instance;
}
/**
* Reads the input as a bean and returns an object
*
* @param deserializer Deserializer used
* @param bean Input as bean
* @return Decoded object
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
protected Object readBean(Deserializer deserializer, Object bean) {
log.debug("read bean");
storeReference(bean);
Class theClass = bean.getClass();
while (hasMoreProperties()) {
String name = readPropertyName();
Type type = getPropertyType(bean, name);
log.debug("property: {}", name);
Object property = deserializer.deserialize(this, type);
log.debug("val: {}", property);
//log.debug("val: "+property.getClass().getName());
if (property != null) {
try {
if (type instanceof Class) {
Class t = (Class) type;
if (!t.isAssignableFrom(property.getClass())) {
property = ConversionUtils.convert(property, t);
}
}
final Field field = theClass.getField(name);
field.set(bean, property);
} catch (Exception ex2) {
try {
BeanUtils.setProperty(bean, name, property);
} catch (Exception ex) {
log.error("Error mapping property: {} ({})", name, property);
}
}
} else {
log.debug("Skipping null property: {}", name);
}
if (hasMoreProperties()) {
skipPropertySeparator();
}
}
skipEndObject();
return bean;
}
/**
* Reads the input as a map and returns a Map
*
* @param deserializer Deserializer to use
* @return Read map
*/
protected Map<String, Object> readSimpleObject(Deserializer deserializer) {
log.debug("read map");
Map<String, Object> result = new ObjectMap<String, Object>();
readKeyValues(result, deserializer);
storeReference(result);
return result;
}
/**
* Reads start object
*
* @param deserializer Deserializer to use
* @return Read object
*/
public Object readObject(Deserializer deserializer, Type target) {
String className;
if (currentDataType == AMF.TYPE_CLASS_OBJECT) {
className = getString(buf);
} else {
className = null;
}
log.debug("readObject: {}", className);
Object result = null;
if (className != null) {
log.debug("read class object");
Object instance;
if (className.equals("RecordSet")) {
result = new RecordSet(this);
storeReference(result);
} else if (className.equals("RecordSetPage")) {
result = new RecordSetPage(this);
storeReference(result);
} else {
instance = newInstance(className);
if (instance != null) {
result = readBean(deserializer, instance);
} else {
log.debug("Forced to use simple object for class {}", className);
result = readSimpleObject(deserializer);
}
}
} else {
result = readSimpleObject(deserializer);
}
return result;
}
/**
* Returns a boolean stating whether there are more properties
*
* @return boolean <code>true</code> if there are more properties to read, <code>false</code> otherwise
*/
public boolean hasMoreProperties() {
byte pad = 0x00;
byte pad0 = buf.get();
byte pad1 = buf.get();
byte type = buf.get();
boolean isEndOfObject = (pad0 == pad && pad1 == pad && type == AMF.TYPE_END_OF_OBJECT);
log.debug("End of object: ? {}", isEndOfObject);
buf.position(buf.position() - 3);
return !isEndOfObject;
}
/**
* Reads property name
*
* @return String Object property name
*/
public String readPropertyName() {
return getString(buf);
}
/**
* Skips property seperator
*/
public void skipPropertySeparator() {
// SKIP
}
/**
* Skips end object
*/
public void skipEndObject() {
// skip two marker bytes
// then end of object byte
buf.skip(3);
}
// Others
/**
* Reads XML
*
* @return String XML as string
*/
public Document readXML(Type target) {
final String xmlString = readString(target);
Document doc = null;
try {
doc = XMLUtils.stringToDoc(xmlString);
} catch (IOException ioex) {
log.error("IOException converting xml to dom", ioex);
}
storeReference(doc);
return doc;
}
/**
* Reads Custom
*
* @return Object Custom type object
*/
public Object readCustom(Type target) {
// Return null for now
return null;
}
/**
* Read ByteArray object. This is not supported by the AMF0 deserializer.
*
* @return ByteArray object
*/
public ByteArray readByteArray(Type target) {
throw new RuntimeException("ByteArray objects not supported with AMF0");
}
/**
* Read Vector<int> object. This is not supported by the AMF0 deserializer.
*
* @return Vector<Integer> object
*/
public Vector<Integer> readVectorInt() {
throw new RuntimeException("Vector objects not supported with AMF0");
}
/**
* Read Vector<Long> object. This is not supported by the AMF0 deserializer.
*
* @return Vector<Long> object
*/
public Vector<Long> readVectorUInt() {
throw new RuntimeException("Vector objects not supported with AMF0");
}
/**
* Read Vector<Number> object. This is not supported by the AMF0 deserializer.
*
* @return Vector<Double> object
*/
public Vector<Double> readVectorNumber() {
throw new RuntimeException("Vector objects not supported with AMF0");
}
/**
* Read Vector<Object> object. This is not supported by the AMF0 deserializer.
*
* @return Vector<Object> object
*/
public Vector<Object> readVectorObject() {
throw new RuntimeException("Vector objects not supported with AMF0");
}
/**
* Reads Reference
*
* @return Object Read reference to object
*/
public Object readReference(Type target) {
//return getReference(buf.getUnsignedShort());
return getReference(buf.getShort() & 0xffff);
}
/**
* Resets map
*
*/
public void reset() {
this.clearReferences();
}
protected Type getPropertyType(Object instance, String propertyName) {
try {
if (instance != null) {
Field field = instance.getClass().getField(propertyName);
return field.getGenericType();
} else {
// instance is null for anonymous class, use default type
}
} catch (NoSuchFieldException e1) {
try {
BeanUtilsBean beanUtilsBean = BeanUtilsBean.getInstance();
PropertyUtilsBean propertyUtils = beanUtilsBean.getPropertyUtils();
PropertyDescriptor propertyDescriptor = propertyUtils.getPropertyDescriptor(instance, propertyName);
return propertyDescriptor.getReadMethod().getGenericReturnType();
} catch (Exception e2) {
// nothing
}
} catch (Exception e) {
// ignore other exceptions
}
// return Object class type by default
return Object.class;
}
}
| |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.coderbd.pos.design;
import com.coderbd.pos.constraints.Enum;
import com.coderbd.pos.entity.SupplierOrder;
import com.coderbd.pos.entity.SupplierOrderPayment;
import com.coderbd.pos.entity.SupplierOrderProduct;
import com.coderbd.pos.service.SupplierOrderPaymentService;
import com.coderbd.pos.service.SupplierOrderService;
import com.coderbd.pos.service.SupplierProductService;
import com.coderbd.pos.utils.DateUtil;
import com.coderbd.pos.utils.Reset;
import java.sql.Timestamp;
import java.util.Date;
import java.util.List;
import javax.swing.JOptionPane;
import javax.swing.JTable;
import javax.swing.JTextField;
import javax.swing.table.DefaultTableModel;
/**
*
* @author Biswajit Debnath
*/
public class SupplierOrderFormPanel extends javax.swing.JPanel {
Double totalBill = 0.0;
Double totalPaid = 0.0;
Double totalDue = 0.0;
private SupplierOrder supplierOrder;
private SupplierOrderService orderService;
private SupplierProductService supplierProductService;
private SupplierOrderPaymentService paymentService;
/**
* Creates new form SupplierOrderFormPanel
*/
public SupplierOrderFormPanel(SupplierOrder supplierOrder, SupplierOrderService orderService, SupplierProductService supplierProductService, SupplierOrderPaymentService paymentService) {
initComponents();
this.supplierOrder = supplierOrder;
this.orderService = orderService;
this.supplierProductService = supplierProductService;
this.paymentService = paymentService;
Reset.resetTable(psoTable);
Reset.resetTable(paymentTable);
panelTitle.setText(supplierOrder.getSupplier().getSupplierName() + ", " + supplierOrder.getSupplier().getSupplierMobile());
addAllOrderProductItemInTable();
addAllPaymentItemInTable();
updateBillingInfo();
}
private void updateBillingInfo() {
totalBill = supplierOrder.getSupplierProductTotalBill();
totalPaid = supplierOrder.getSupplierOrderTotalPayment();
totalDue = totalBill - totalPaid;
totalBillLabel.setText(totalBill.toString());
totalPaidLabel.setText(totalPaid.toString());
totalDueLabel.setText(totalDue.toString());
}
private void addAllOrderProductItemInTable() {
DefaultTableModel tableModel = (DefaultTableModel) psoTable.getModel();
Reset.resetTable(psoTable);
List<SupplierOrderProduct> supplierOrderProducts = supplierOrder.getSupplierProducts();
if (supplierOrderProducts != null) {
for (SupplierOrderProduct sop : supplierOrderProducts) {
Double amount = sop.getSupplierRate() * sop.getSupplierProductQuantity();
Object object[] = {
sop.getSupplierProductName(),
sop.getSupplierProductQuantity(),
sop.getSupplierRate(),
amount
};
tableModel.addRow(object);
}
}
}
private void addOrderProductItemInTable(SupplierOrderProduct sop) {
DefaultTableModel tableModel = (DefaultTableModel) psoTable.getModel();
Double amount = sop.getSupplierRate() * sop.getSupplierProductQuantity();
Object object[] = {
sop.getSupplierProductName(),
sop.getSupplierProductQuantity(),
sop.getSupplierRate(),
amount
};
tableModel.addRow(object);
}
/**
* This is for payment table modification
*/
private void addAllPaymentItemInTable() {
DefaultTableModel tableModel = (DefaultTableModel) paymentTable.getModel();
List<SupplierOrderPayment> supplierOrderPayments = supplierOrder.getSupplierOrderPayments();
if (supplierOrderPayments != null) {
for (SupplierOrderPayment sop : supplierOrderPayments) {
Object object[] = {
sop.getPaymentDate(),
sop.getAmount(),
sop.getDescription()
};
tableModel.addRow(object);
}
}
}
private void addPaymentItemInTable(SupplierOrderPayment sop) {
DefaultTableModel tableModel = (DefaultTableModel) paymentTable.getModel();
Object object[] = {
sop.getPaymentDate(),
sop.getAmount(),
sop.getDescription()
};
tableModel.addRow(object);
}
private void removeItemFromTable(JTable table, int index) {
DefaultTableModel tableModel = (DefaultTableModel) table.getModel();
tableModel.removeRow(index);
}
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
supplierOrderItemInputPanel = new javax.swing.JPanel();
productInputPanel = new javax.swing.JPanel();
pNameField = new javax.swing.JTextField();
pQuantityField = new javax.swing.JTextField();
pRateField = new javax.swing.JTextField();
addPItemButton = new javax.swing.JButton();
removePItemButton = new javax.swing.JButton();
jLabel1 = new javax.swing.JLabel();
jLabel2 = new javax.swing.JLabel();
jLabel3 = new javax.swing.JLabel();
panelTitle = new javax.swing.JLabel();
jButton1 = new javax.swing.JButton();
paymentInputPanel = new javax.swing.JPanel();
paymentDateC = new com.toedter.calendar.JDateChooser();
paymentField = new javax.swing.JTextField();
descField = new javax.swing.JTextField();
addPaymentB = new javax.swing.JButton();
removePaymentB = new javax.swing.JButton();
jLabel4 = new javax.swing.JLabel();
jLabel7 = new javax.swing.JLabel();
jLabel9 = new javax.swing.JLabel();
productPanel = new javax.swing.JPanel();
totalBillLabel = new javax.swing.JLabel();
jLabel5 = new javax.swing.JLabel();
jLabel6 = new javax.swing.JLabel();
jLabel8 = new javax.swing.JLabel();
totalDueLabel = new javax.swing.JLabel();
totalPaidLabel = new javax.swing.JLabel();
paymentPanel = new javax.swing.JPanel();
jPanel3 = new javax.swing.JPanel();
jScrollPane2 = new javax.swing.JScrollPane();
psoTable = new javax.swing.JTable();
jPanel4 = new javax.swing.JPanel();
jScrollPane1 = new javax.swing.JScrollPane();
paymentTable = new javax.swing.JTable();
setPreferredSize(new java.awt.Dimension(850, 520));
setLayout(new java.awt.BorderLayout());
supplierOrderItemInputPanel.setLayout(new java.awt.GridLayout(1, 0));
productInputPanel.setBorder(new javax.swing.border.SoftBevelBorder(javax.swing.border.BevelBorder.RAISED));
addPItemButton.setText("Add Item");
addPItemButton.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
addPItemButtonActionPerformed(evt);
}
});
removePItemButton.setText("Remove Item");
removePItemButton.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
removePItemButtonActionPerformed(evt);
}
});
jLabel1.setText("Product Name");
jLabel2.setText("Quantity");
jLabel3.setText("Buy Rate");
panelTitle.setFont(new java.awt.Font("Tahoma", 1, 14)); // NOI18N
panelTitle.setText("Fashion Park");
jButton1.setText("Update Item");
jButton1.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton1ActionPerformed(evt);
}
});
javax.swing.GroupLayout productInputPanelLayout = new javax.swing.GroupLayout(productInputPanel);
productInputPanel.setLayout(productInputPanelLayout);
productInputPanelLayout.setHorizontalGroup(
productInputPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, productInputPanelLayout.createSequentialGroup()
.addContainerGap()
.addGroup(productInputPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING, false)
.addComponent(removePItemButton, javax.swing.GroupLayout.Alignment.LEADING, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(jLabel1, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(pNameField, javax.swing.GroupLayout.Alignment.LEADING, javax.swing.GroupLayout.PREFERRED_SIZE, 184, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(productInputPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(productInputPanelLayout.createSequentialGroup()
.addComponent(jLabel2, javax.swing.GroupLayout.PREFERRED_SIZE, 94, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jLabel3, javax.swing.GroupLayout.PREFERRED_SIZE, 90, javax.swing.GroupLayout.PREFERRED_SIZE))
.addGroup(productInputPanelLayout.createSequentialGroup()
.addGroup(productInputPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false)
.addComponent(pQuantityField, javax.swing.GroupLayout.DEFAULT_SIZE, 94, Short.MAX_VALUE)
.addComponent(jButton1, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(productInputPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(addPItemButton, javax.swing.GroupLayout.PREFERRED_SIZE, 89, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(pRateField, javax.swing.GroupLayout.PREFERRED_SIZE, 90, javax.swing.GroupLayout.PREFERRED_SIZE))))
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
.addGroup(productInputPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(productInputPanelLayout.createSequentialGroup()
.addContainerGap()
.addComponent(panelTitle, javax.swing.GroupLayout.DEFAULT_SIZE, 380, Short.MAX_VALUE)
.addContainerGap()))
);
productInputPanelLayout.setVerticalGroup(
productInputPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(productInputPanelLayout.createSequentialGroup()
.addContainerGap(41, Short.MAX_VALUE)
.addGroup(productInputPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel1)
.addComponent(jLabel2)
.addComponent(jLabel3))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(productInputPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(pNameField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(pQuantityField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(pRateField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(productInputPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(removePItemButton)
.addComponent(addPItemButton)
.addComponent(jButton1))
.addContainerGap())
.addGroup(productInputPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(productInputPanelLayout.createSequentialGroup()
.addGap(2, 2, 2)
.addComponent(panelTitle, javax.swing.GroupLayout.PREFERRED_SIZE, 19, javax.swing.GroupLayout.PREFERRED_SIZE)
.addContainerGap(100, Short.MAX_VALUE)))
);
supplierOrderItemInputPanel.add(productInputPanel);
addPaymentB.setText("Add Payment");
addPaymentB.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
addPaymentBActionPerformed(evt);
}
});
removePaymentB.setText("Remove Payment");
removePaymentB.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
removePaymentBActionPerformed(evt);
}
});
jLabel4.setText("Payment Date");
jLabel7.setText("Amont");
jLabel9.setText("Description");
javax.swing.GroupLayout paymentInputPanelLayout = new javax.swing.GroupLayout(paymentInputPanel);
paymentInputPanel.setLayout(paymentInputPanelLayout);
paymentInputPanelLayout.setHorizontalGroup(
paymentInputPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(paymentInputPanelLayout.createSequentialGroup()
.addContainerGap()
.addGroup(paymentInputPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false)
.addGroup(paymentInputPanelLayout.createSequentialGroup()
.addComponent(paymentDateC, javax.swing.GroupLayout.PREFERRED_SIZE, 160, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(paymentInputPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false)
.addComponent(paymentField)
.addComponent(removePaymentB, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)))
.addGroup(paymentInputPanelLayout.createSequentialGroup()
.addComponent(jLabel4, javax.swing.GroupLayout.PREFERRED_SIZE, 160, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jLabel7, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(paymentInputPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(addPaymentB, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(descField)
.addComponent(jLabel9, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
.addContainerGap())
);
paymentInputPanelLayout.setVerticalGroup(
paymentInputPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, paymentInputPanelLayout.createSequentialGroup()
.addContainerGap(46, Short.MAX_VALUE)
.addGroup(paymentInputPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel4)
.addComponent(jLabel7)
.addComponent(jLabel9))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(paymentInputPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(paymentInputPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(paymentField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(descField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addComponent(paymentDateC, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(paymentInputPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(addPaymentB)
.addComponent(removePaymentB))
.addGap(12, 12, 12))
);
supplierOrderItemInputPanel.add(paymentInputPanel);
add(supplierOrderItemInputPanel, java.awt.BorderLayout.PAGE_START);
productPanel.setPreferredSize(new java.awt.Dimension(492, 70));
totalBillLabel.setFont(new java.awt.Font("Tahoma", 1, 12)); // NOI18N
totalBillLabel.setText("0.0");
jLabel5.setFont(new java.awt.Font("Tahoma", 1, 14)); // NOI18N
jLabel5.setText("Bill");
jLabel6.setFont(new java.awt.Font("Tahoma", 1, 14)); // NOI18N
jLabel6.setText("Paid");
jLabel8.setFont(new java.awt.Font("Tahoma", 1, 14)); // NOI18N
jLabel8.setText("Due");
totalDueLabel.setFont(new java.awt.Font("Tahoma", 1, 12)); // NOI18N
totalDueLabel.setText("0.0");
totalPaidLabel.setFont(new java.awt.Font("Tahoma", 1, 12)); // NOI18N
totalPaidLabel.setText("0.0");
javax.swing.GroupLayout productPanelLayout = new javax.swing.GroupLayout(productPanel);
productPanel.setLayout(productPanelLayout);
productPanelLayout.setHorizontalGroup(
productPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(productPanelLayout.createSequentialGroup()
.addContainerGap()
.addGroup(productPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false)
.addComponent(jLabel6, javax.swing.GroupLayout.DEFAULT_SIZE, 42, Short.MAX_VALUE)
.addComponent(jLabel5, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(productPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(totalPaidLabel, javax.swing.GroupLayout.DEFAULT_SIZE, 285, Short.MAX_VALUE)
.addComponent(totalBillLabel, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
.addGap(18, 18, 18)
.addComponent(jLabel8, javax.swing.GroupLayout.PREFERRED_SIZE, 35, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(totalDueLabel, javax.swing.GroupLayout.PREFERRED_SIZE, 153, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(257, 257, 257))
);
productPanelLayout.setVerticalGroup(
productPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, productPanelLayout.createSequentialGroup()
.addContainerGap()
.addGroup(productPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(productPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(totalBillLabel, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(jLabel8)
.addComponent(totalDueLabel))
.addComponent(jLabel5, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(productPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(totalPaidLabel)
.addComponent(jLabel6))
.addContainerGap(19, Short.MAX_VALUE))
);
add(productPanel, java.awt.BorderLayout.PAGE_END);
paymentPanel.setLayout(new java.awt.GridLayout(1, 0));
jPanel3.setBorder(new javax.swing.border.SoftBevelBorder(javax.swing.border.BevelBorder.RAISED));
jPanel3.setPreferredSize(new java.awt.Dimension(350, 355));
jPanel3.setLayout(new java.awt.CardLayout());
psoTable.setModel(new javax.swing.table.DefaultTableModel(
new Object [][] {
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null}
},
new String [] {
"Product Name", "Quantity", "Rate", "Amount"
}
) {
Class[] types = new Class [] {
java.lang.Object.class, java.lang.Integer.class, java.lang.Double.class, java.lang.Double.class
};
boolean[] canEdit = new boolean [] {
false, false, false, false
};
public Class getColumnClass(int columnIndex) {
return types [columnIndex];
}
public boolean isCellEditable(int rowIndex, int columnIndex) {
return canEdit [columnIndex];
}
});
psoTable.getTableHeader().setReorderingAllowed(false);
psoTable.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseClicked(java.awt.event.MouseEvent evt) {
psoTableMouseClicked(evt);
}
});
jScrollPane2.setViewportView(psoTable);
jPanel3.add(jScrollPane2, "card2");
paymentPanel.add(jPanel3);
jPanel4.setLayout(new java.awt.CardLayout());
paymentTable.setModel(new javax.swing.table.DefaultTableModel(
new Object [][] {
{null, null, null},
{null, null, null},
{null, null, null},
{null, null, null}
},
new String [] {
"Date", "Amount", "Desc"
}
) {
Class[] types = new Class [] {
java.lang.Object.class, java.lang.Double.class, java.lang.String.class
};
boolean[] canEdit = new boolean [] {
false, false, false
};
public Class getColumnClass(int columnIndex) {
return types [columnIndex];
}
public boolean isCellEditable(int rowIndex, int columnIndex) {
return canEdit [columnIndex];
}
});
jScrollPane1.setViewportView(paymentTable);
jPanel4.add(jScrollPane1, "card2");
paymentPanel.add(jPanel4);
add(paymentPanel, java.awt.BorderLayout.CENTER);
}// </editor-fold>//GEN-END:initComponents
private void addPItemButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_addPItemButtonActionPerformed
// TODO add your handling code here:
SupplierOrderProduct sop = new SupplierOrderProduct();
String productName = supplierOrder.getSupplier().getSupplierName() + "_O" + supplierOrder.getSupplierOrderId() + "_P" + (supplierOrder.getSupplierProducts().size() + 1);
if (pNameField.getText().replace(" ", "").length() != 0 && !pNameField.getText().equals("")) {
String userDefinedName = pNameField.getText();
productName = productName + "_" + userDefinedName;
}
if (!pQuantityField.getText().equals("") && !pRateField.getText().equals("")) {
try {
sop.setSupplierOrderId(supplierOrder.getSupplierOrderId());
sop.setSupplierProductName(productName);
sop.setSupplierProductQuantity(Integer.parseInt(pQuantityField.getText()));
sop.setSupplierRate(Double.parseDouble(pRateField.getText()));
int supplierProductId = supplierProductService.saveSupplierProduct(sop);
if (supplierProductId != Enum.invalidIndex) {
sop.setSupplierProductId(supplierProductId);
addOrderProductItemInTable(sop);
supplierOrder.getSupplierProducts().add(sop);
totalBill = supplierOrder.getSupplierProductTotalBill();
totalDue = totalBill - totalPaid;
updateBillingInfo();
supplierOrder.setTotalBill(totalBill);
supplierOrder.setTotalPaid(totalPaid);
boolean status = orderService.updateSupplierOrder(supplierOrder);
System.out.println("Order Service Update: " + status);
} else {
System.out.println("Product Couldn't Saved");
}
} catch (NumberFormatException nfe) {
System.out.println(nfe.getMessage());
}
}
pNameField.setText("");
pQuantityField.setText("");
pRateField.setText("");
}//GEN-LAST:event_addPItemButtonActionPerformed
private void removePItemButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_removePItemButtonActionPerformed
// TODO add your handling code here:
int rowSelected = psoTable.getSelectedRow();
SupplierOrderProduct sop = supplierOrder.getSupplierProducts().get(rowSelected);
boolean status = supplierProductService.deleteSupplierProduct(sop);
if (status) {
supplierOrder.getSupplierProducts().remove(rowSelected);
removeItemFromTable(psoTable, rowSelected);
totalBill = supplierOrder.getSupplierProductTotalBill();
totalBillLabel.setText(totalBill.toString());
totalDue = totalBill - totalPaid;
totalDueLabel.setText(totalDue.toString());
supplierOrder.setTotalBill(totalBill);
supplierOrder.setTotalPaid(totalPaid);
status = orderService.updateSupplierOrder(supplierOrder);
System.out.println("Order Service Update: " + status);
} else {
System.out.println("Product couldn't remove!");
}
}//GEN-LAST:event_removePItemButtonActionPerformed
private void addPaymentBActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_addPaymentBActionPerformed
// TODO add your handling code here:
try {
Date date = paymentDateC.getDate();
if (date == null) {
date = new Date();
}
Timestamp sqlDate = DateUtil.convertDateToTimestamp(date);
Double payment = Double.parseDouble(paymentField.getText());
String desc = descField.getText();
SupplierOrderPayment sop = new SupplierOrderPayment(sqlDate, payment, desc);
System.out.println("Payment: " + sop);
sop.setSupplierOrderId(supplierOrder.getSupplierOrderId());
System.out.println("ADD PAYMNENT:" + sop);
int supplierOrderPaidId = paymentService.saveSupplierOrderPayment(sop);
System.out.println("pass save!");
if (supplierOrderPaidId != Enum.invalidIndex) {
sop.setSupplierOrderPaymentId(supplierOrderPaidId);
System.out.println("adding started");
addPaymentItemInTable(sop);
System.out.println("adding finished!");
supplierOrder.getSupplierOrderPayments().add(sop);
System.out.println("sopay add");
totalPaid = supplierOrder.getSupplierOrderTotalPayment();
totalDue = totalBill - totalPaid;
System.out.println("sop pay getting");
updateBillingInfo();
paymentField.setText("");
descField.setText("");
supplierOrder.setTotalBill(totalBill);
supplierOrder.setTotalPaid(totalPaid);
boolean status = orderService.updateSupplierOrder(supplierOrder);
System.out.println("Order Service Update: " + status);
} else {
System.out.println("Payment Couldn't Saved!");
}
} catch (NumberFormatException nfe) {
System.out.println(nfe.getMessage());
}
}//GEN-LAST:event_addPaymentBActionPerformed
private void removePaymentBActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_removePaymentBActionPerformed
// TODO add your handling code here:
int rowSelected = paymentTable.getSelectedRow();
SupplierOrderPayment sop = supplierOrder.getSupplierOrderPayments().get(rowSelected);
boolean status = paymentService.deleteSupplierOrderPayment(sop);
if (status) {
supplierOrder.getSupplierOrderPayments().remove(rowSelected);
removeItemFromTable(paymentTable, rowSelected);
totalPaid = supplierOrder.getSupplierOrderTotalPayment();
totalDue = totalBill - totalPaid;
updateBillingInfo();
supplierOrder.setTotalBill(totalBill);
supplierOrder.setTotalPaid(totalPaid);
status = orderService.updateSupplierOrder(supplierOrder);
System.out.println("Order Service Update: " + status);
} else {
System.out.println("Payment Cound't Deleted!");
}
}//GEN-LAST:event_removePaymentBActionPerformed
private void jButton1ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton1ActionPerformed
// TODO add your handling code here:
int rowIndx = psoTable.getSelectedRow();
if (rowIndx != Enum.invalidIndex) {
SupplierOrderProduct sop = supplierOrder.getSupplierProducts().get(rowIndx);
String name = sop.getSupplierProductName();
String[] words = name.split("_");
String lastword = words[words.length - 1];
if (lastword.charAt(0) == 'P' && (lastword.charAt(1) >= '0' && lastword.charAt(1) <= '9')) {
lastword = "";
}
String namePrefix = namePrefix = name.substring(0, name.length() - lastword.length());
Integer quantity = sop.getSupplierProductQuantity();
Double rate = sop.getSupplierRate();
JTextField nameField = new JTextField();
JTextField quantityField = new JTextField();
JTextField rateField = new JTextField();
nameField.setText(lastword);
quantityField.setText(quantity.toString());
rateField.setText(rate.toString());
Object[] fields = {
"Product Name \n(" + namePrefix + ")", nameField,
"Quantity ", quantityField,
"Buy Rate ", rateField
};
int option = JOptionPane.showConfirmDialog(null, fields, "Update Order Product", JOptionPane.OK_CANCEL_OPTION);
if (option == JOptionPane.OK_OPTION) {
String newName = nameField.getText();
if (newName.equals("")) {
newName = namePrefix.substring(0, namePrefix.length() - 1);
} else {
if (namePrefix.charAt(namePrefix.length() - 1) == '_') {
newName = namePrefix + newName;
} else {
newName = namePrefix + "_" + newName;
}
}
Integer newQuantity = Integer.parseInt(quantityField.getText());
Double newRate = Double.parseDouble(rateField.getText());
SupplierOrderProduct nSop = new SupplierOrderProduct();
nSop.setSupplierOrderId(sop.getSupplierOrderId());
nSop.setSupplierProductId(sop.getSupplierProductId());
nSop.setSupplierProductName(newName);
nSop.setSupplierProductQuantity(newQuantity);
nSop.setSupplierRate(newRate);
boolean status = supplierProductService.updateSupplierProduct(nSop);
if (status == true) {
sop.setSupplierProductName(newName);
sop.setSupplierProductQuantity(newQuantity);
sop.setSupplierRate(newRate);
addAllOrderProductItemInTable();
updateBillingInfo();
supplierOrder.setTotalBill(totalBill);
supplierOrder.setTotalPaid(totalPaid);
status = orderService.updateSupplierOrder(supplierOrder);
System.out.println("Order Service Update: " + status);
}
} else {
System.out.println("Cancel!");
}
} else {
System.out.println("Invalid Row");
}
}//GEN-LAST:event_jButton1ActionPerformed
private void psoTableMouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_psoTableMouseClicked
// TODO add your handling code here:
System.out.println("PSO TABLE SELECTED!");
}//GEN-LAST:event_psoTableMouseClicked
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JButton addPItemButton;
private javax.swing.JButton addPaymentB;
private javax.swing.JTextField descField;
private javax.swing.JButton jButton1;
private javax.swing.JLabel jLabel1;
private javax.swing.JLabel jLabel2;
private javax.swing.JLabel jLabel3;
private javax.swing.JLabel jLabel4;
private javax.swing.JLabel jLabel5;
private javax.swing.JLabel jLabel6;
private javax.swing.JLabel jLabel7;
private javax.swing.JLabel jLabel8;
private javax.swing.JLabel jLabel9;
private javax.swing.JPanel jPanel3;
private javax.swing.JPanel jPanel4;
private javax.swing.JScrollPane jScrollPane1;
private javax.swing.JScrollPane jScrollPane2;
private javax.swing.JTextField pNameField;
private javax.swing.JTextField pQuantityField;
private javax.swing.JTextField pRateField;
private javax.swing.JLabel panelTitle;
private com.toedter.calendar.JDateChooser paymentDateC;
private javax.swing.JTextField paymentField;
private javax.swing.JPanel paymentInputPanel;
private javax.swing.JPanel paymentPanel;
private javax.swing.JTable paymentTable;
private javax.swing.JPanel productInputPanel;
private javax.swing.JPanel productPanel;
private javax.swing.JTable psoTable;
private javax.swing.JButton removePItemButton;
private javax.swing.JButton removePaymentB;
private javax.swing.JPanel supplierOrderItemInputPanel;
private javax.swing.JLabel totalBillLabel;
private javax.swing.JLabel totalDueLabel;
private javax.swing.JLabel totalPaidLabel;
// End of variables declaration//GEN-END:variables
}
| |
/* -*- mode: java; c-basic-offset: 2; indent-tabs-mode: nil -*- */
/*
Part of the Processing project - http://processing.org
Copyright (c) 2010-11 Ben Fry and Casey Reas
Copyright (c) 2012-15 The Processing Foundation
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License version 2
as published by the Free Software Foundation.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software Foundation,
Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package processing.mode.java;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.logging.FileHandler;
import java.util.logging.Handler;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.swing.SwingUtilities;
import processing.app.*;
import processing.app.ui.Editor;
import processing.app.ui.EditorException;
import processing.app.ui.EditorState;
import processing.mode.java.runner.Runner;
import processing.mode.java.tweak.SketchParser;
public class JavaMode extends Mode {
public Editor createEditor(Base base, String path,
EditorState state) throws EditorException {
return new JavaEditor(base, path, state, this);
}
public JavaMode(Base base, File folder) {
super(base, folder);
initLogger();
loadPreferences();
}
public String getTitle() {
return "Java";
}
// . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . .
public File[] getExampleCategoryFolders() {
return new File[] {
new File(examplesFolder, "Basics"),
new File(examplesFolder, "Topics"),
new File(examplesFolder, "Demos"),
new File(examplesFolder, "Books")
};
}
public String getDefaultExtension() {
return "pde";
}
public String[] getExtensions() {
return new String[] { "pde", "java" };
}
public String[] getIgnorable() {
return new String[] {
"applet",
"application.macosx",
"application.windows",
"application.linux"
};
}
public Library getCoreLibrary() {
if (coreLibrary == null) {
File coreFolder = Platform.getContentFile("core");
coreLibrary = new Library(coreFolder);
// try {
// coreLibrary = getLibrary("processing.core");
// System.out.println("core found at " + coreLibrary.getLibraryPath());
// } catch (SketchException e) {
// Base.log("Serious problem while locating processing.core", e);
// }
}
return coreLibrary;
}
// . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . .
/*
public Runner handleRun(Sketch sketch,
RunnerListener listener) throws SketchException {
final JavaEditor editor = (JavaEditor) listener;
editor.errorCheckerService.quickErrorCheck();
// if (enableTweak) {
// enableTweak = false;
// return handleTweak(sketch, listener, false);
// } else {
return handleLaunch(sketch, listener, false);
// }
}
public Runner handlePresent(Sketch sketch,
RunnerListener listener) throws SketchException {
final JavaEditor editor = (JavaEditor) listener;
editor.errorCheckerService.quickErrorCheck();
// if (enableTweak) {
// enableTweak = false;
// return handleTweak(sketch, listener, true);
// } else {
return handleLaunch(sketch, listener, true);
// }
}
*/
/** Handles the standard Java "Run" or "Present" */
public Runner handleLaunch(Sketch sketch, RunnerListener listener,
final boolean present) throws SketchException {
JavaBuild build = new JavaBuild(sketch);
// String appletClassName = build.build(false);
String appletClassName = build.build(true);
if (appletClassName != null) {
final Runner runtime = new Runner(build, listener);
new Thread(new Runnable() {
public void run() {
// these block until finished
if (present) {
runtime.present(null);
} else {
runtime.launch(null);
}
}
}).start();
return runtime;
}
return null;
}
/** Start a sketch in tweak mode */
public Runner handleTweak(Sketch sketch,
RunnerListener listener) throws SketchException {
// final boolean present) throws SketchException {
final JavaEditor editor = (JavaEditor) listener;
// editor.errorCheckerService.quickErrorCheck(); // done in prepareRun()
if (isSketchModified(sketch)) {
editor.deactivateRun();
Messages.showMessage(Language.text("menu.file.save"),
Language.text("tweak_mode.save_before_tweak"));
return null;
}
// first try to build the unmodified code
JavaBuild build = new JavaBuild(sketch);
// String appletClassName = build.build(false);
String appletClassName = build.build(true);
if (appletClassName == null) {
// unmodified build failed, so fail
return null;
}
// if compilation passed, modify the code and build again
// save the original sketch code of the user
editor.initBaseCode();
// check for "// tweak" comment in the sketch
boolean requiresTweak = SketchParser.containsTweakComment(editor.baseCode);
// parse the saved sketch to get all (or only with "//tweak" comment) numbers
final SketchParser parser = new SketchParser(editor.baseCode, requiresTweak);
// add our code to the sketch
final boolean launchInteractive = editor.automateSketch(sketch, parser);
build = new JavaBuild(sketch);
appletClassName = build.build(false);
if (appletClassName != null) {
final Runner runtime = new Runner(build, listener);
new Thread(new Runnable() {
public void run() {
// these block until finished
// if (present) {
// runtime.present(null);
// } else {
runtime.launch(null);
// }
// next lines are executed when the sketch quits
if (launchInteractive) {
// fix swing deadlock issue: https://github.com/processing/processing/issues/3928
SwingUtilities.invokeLater(new Runnable() {
public void run() {
editor.initEditorCode(parser.allHandles, false);
editor.stopTweakMode(parser.allHandles);
}
});
}
}
}).start();
if (launchInteractive) {
// fix swing deadlock issue: https://github.com/processing/processing/issues/3928
SwingUtilities.invokeLater(new Runnable() {
public void run() {
// replace editor code with baseCode
editor.initEditorCode(parser.allHandles, false);
editor.updateInterface(parser.allHandles, parser.colorBoxes);
editor.startTweakMode();
}
});
}
return runtime;
}
return null;
}
// TODO Why is this necessary? Why isn't Sketch.isModified() used?
static private boolean isSketchModified(Sketch sketch) {
for (SketchCode sc : sketch.getCode()) {
if (sc.isModified()) {
return true;
}
}
return false;
}
// public void handleStop() {
// if (runtime != null) {
// runtime.close(); // kills the window
// runtime = null; // will this help?
// }
// }
// public boolean handleExportApplet(Sketch sketch) throws SketchException, IOException {
// JavaBuild build = new JavaBuild(sketch);
// return build.exportApplet();
// }
public boolean handleExportApplication(Sketch sketch) throws SketchException, IOException {
JavaBuild build = new JavaBuild(sketch);
return build.exportApplication();
}
/**
* Any modes that extend JavaMode can override this method to add additional
* JARs to be included in the classpath for code completion and error checking
* @return searchPath: file-paths separated by File.pathSeparatorChar
*/
public String getSearchPath() {
return getCoreLibrary().getJarPath();
}
// . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . .
// Merged from ExperimentalMode
void initLogger() {
final boolean VERBOSE_LOGGING = true;
final int LOG_SIZE = 512 * 1024; // max log file size (in bytes)
Logger globalLogger = Logger.getLogger("");
//Logger logger = Logger.getLogger(Logger.GLOBAL_LOGGER_NAME); // doesn't work on os x
if (VERBOSE_LOGGING) {
globalLogger.setLevel(Level.INFO);
} else {
globalLogger.setLevel(Level.WARNING);
}
// enable logging to file
try {
// settings is writable for built-in modes, mode folder is not writable
File logFolder = Base.getSettingsFile("debug");
if (!logFolder.exists()) {
logFolder.mkdir();
}
File logFile = new File(logFolder, "DebugMode.%g.log");
Handler handler = new FileHandler(logFile.getAbsolutePath(), LOG_SIZE, 10, false);
globalLogger.addHandler(handler);
} catch (IOException ex) {
Logger.getLogger(JavaMode.class.getName()).log(Level.SEVERE, null, ex);
} catch (SecurityException ex) {
Logger.getLogger(JavaMode.class.getName()).log(Level.SEVERE, null, ex);
}
}
//ImageIcon classIcon, fieldIcon, methodIcon, localVarIcon;
// protected void loadIcons() {
// String iconPath = getContentFile("data").getAbsolutePath() + File.separator + "icons";
// classIcon = new ImageIcon(iconPath + File.separator + "class_obj.png");
// methodIcon = new ImageIcon(iconPath + File.separator + "methpub_obj.png");
// fieldIcon = new ImageIcon(iconPath + File.separator + "field_protected_obj.png");
// localVarIcon = new ImageIcon(iconPath + File.separator + "field_default_obj.png");
// }
static public volatile boolean errorCheckEnabled = true;
static public volatile boolean warningsEnabled = true;
static public volatile boolean codeCompletionsEnabled = true;
static public volatile boolean debugOutputEnabled = false;
static public volatile boolean errorLogsEnabled = false;
static public volatile boolean autoSaveEnabled = true;
static public volatile boolean autoSavePromptEnabled = true;
static public volatile boolean defaultAutoSaveEnabled = true;
static public volatile boolean ccTriggerEnabled = false;
// static public volatile boolean importSuggestEnabled = true;
static public int autoSaveInterval = 3; //in minutes
/**
* After how many typed characters, code completion is triggered
*/
volatile public static int codeCompletionTriggerLength = 1;
static public final String prefErrorCheck = "pdex.errorCheckEnabled";
static public final String prefWarnings = "pdex.warningsEnabled";
static public final String prefDebugOP = "pdex.dbgOutput";
static public final String prefErrorLogs = "pdex.writeErrorLogs";
static public final String prefAutoSaveInterval = "pdex.autoSaveInterval";
static public final String prefAutoSave = "pdex.autoSave.autoSaveEnabled";
static public final String prefAutoSavePrompt = "pdex.autoSave.promptDisplay";
static public final String prefDefaultAutoSave = "pdex.autoSave.autoSaveByDefault";
static public final String suggestionsFileName = "suggestions.txt";
static public final String COMPLETION_PREF = "pdex.completion";
static public final String COMPLETION_TRIGGER_PREF = "pdex.completion.trigger";
static public final String SUGGEST_IMPORTS_PREF = "pdex.suggest.imports";
// static volatile public boolean enableTweak = false;
/**
* Stores the white list/black list of allowed/blacklisted imports. These are defined in
* suggestions.txt in java mode folder.
*/
static public final Map<String, Set<String>> suggestionsMap = new HashMap<>();
public void loadPreferences() {
Messages.log("Load PDEX prefs");
ensurePrefsExist();
errorCheckEnabled = Preferences.getBoolean(prefErrorCheck);
warningsEnabled = Preferences.getBoolean(prefWarnings);
codeCompletionsEnabled = Preferences.getBoolean(COMPLETION_PREF);
// DEBUG = Preferences.getBoolean(prefDebugOP);
errorLogsEnabled = Preferences.getBoolean(prefErrorLogs);
autoSaveInterval = Preferences.getInteger(prefAutoSaveInterval);
// untitledAutoSaveEnabled = Preferences.getBoolean(prefUntitledAutoSave);
autoSaveEnabled = Preferences.getBoolean(prefAutoSave);
autoSavePromptEnabled = Preferences.getBoolean(prefAutoSavePrompt);
defaultAutoSaveEnabled = Preferences.getBoolean(prefDefaultAutoSave);
ccTriggerEnabled = Preferences.getBoolean(COMPLETION_TRIGGER_PREF);
// importSuggestEnabled = Preferences.getBoolean(prefImportSuggestEnabled);
loadSuggestionsMap();
}
public void savePreferences() {
Messages.log("Saving PDEX prefs");
Preferences.setBoolean(prefErrorCheck, errorCheckEnabled);
Preferences.setBoolean(prefWarnings, warningsEnabled);
Preferences.setBoolean(COMPLETION_PREF, codeCompletionsEnabled);
// Preferences.setBoolean(prefDebugOP, DEBUG);
Preferences.setBoolean(prefErrorLogs, errorLogsEnabled);
Preferences.setInteger(prefAutoSaveInterval, autoSaveInterval);
// Preferences.setBoolean(prefUntitledAutoSave,untitledAutoSaveEnabled);
Preferences.setBoolean(prefAutoSave, autoSaveEnabled);
Preferences.setBoolean(prefAutoSavePrompt, autoSavePromptEnabled);
Preferences.setBoolean(prefDefaultAutoSave, defaultAutoSaveEnabled);
Preferences.setBoolean(COMPLETION_TRIGGER_PREF, ccTriggerEnabled);
// Preferences.setBoolean(prefImportSuggestEnabled, importSuggestEnabled);
}
public void loadSuggestionsMap() {
File suggestionsListFile = new File(getFolder() + File.separator
+ suggestionsFileName);
if (!suggestionsListFile.exists()) {
Messages.loge("Suggestions file not found! "
+ suggestionsListFile.getAbsolutePath());
return;
}
try {
BufferedReader br = new BufferedReader(
new FileReader(suggestionsListFile));
while (true) {
String line = br.readLine();
if (line == null) {
break;
}
line = line.trim();
if (line.startsWith("#")) {
continue;
} else {
if (line.contains("=")) {
String key = line.split("=")[0];
String val = line.split("=")[1];
if (suggestionsMap.containsKey(key)) {
suggestionsMap.get(key).add(val);
} else {
HashSet<String> set = new HashSet<>();
set.add(val);
suggestionsMap.put(key, set);
}
}
}
}
br.close();
} catch (IOException e) {
Messages.loge("IOException while reading suggestions file:"
+ suggestionsListFile.getAbsolutePath());
}
}
public void ensurePrefsExist() {
//TODO: Need to do a better job of managing prefs. Think lists.
if (Preferences.get(prefErrorCheck) == null)
Preferences.setBoolean(prefErrorCheck, errorCheckEnabled);
if (Preferences.get(prefWarnings) == null)
Preferences.setBoolean(prefWarnings, warningsEnabled);
if (Preferences.get(COMPLETION_PREF) == null)
Preferences.setBoolean(COMPLETION_PREF, codeCompletionsEnabled);
if (Preferences.get(prefDebugOP) == null)
// Preferences.setBoolean(prefDebugOP, DEBUG);
if (Preferences.get(prefErrorLogs) == null)
Preferences.setBoolean(prefErrorLogs, errorLogsEnabled);
if (Preferences.get(prefAutoSaveInterval) == null)
Preferences.setInteger(prefAutoSaveInterval, autoSaveInterval);
// if(Preferences.get(prefUntitledAutoSave) == null)
// Preferences.setBoolean(prefUntitledAutoSave,untitledAutoSaveEnabled);
if (Preferences.get(prefAutoSave) == null)
Preferences.setBoolean(prefAutoSave, autoSaveEnabled);
if (Preferences.get(prefAutoSavePrompt) == null)
Preferences.setBoolean(prefAutoSavePrompt, autoSavePromptEnabled);
if (Preferences.get(prefDefaultAutoSave) == null)
Preferences.setBoolean(prefDefaultAutoSave, defaultAutoSaveEnabled);
if (Preferences.get(COMPLETION_TRIGGER_PREF) == null)
Preferences.setBoolean(COMPLETION_TRIGGER_PREF, ccTriggerEnabled);
// if (Preferences.get(prefImportSuggestEnabled) == null)
// Preferences.setBoolean(prefImportSuggestEnabled, importSuggestEnabled);
}
static public void main(String[] args) {
processing.app.Base.main(args);
}
}
| |
package org.activiti.engine.impl.util.json;
import java.io.IOException;
import java.io.Writer;
/*
Copyright (c) 2006 JSON.org
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
The Software shall be used for Good, not Evil.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
/**
* JSONWriter provides a quick and convenient way of producing JSON text.
* The texts produced strictly conform to JSON syntax rules. No whitespace is
* added, so the results are ready for transmission or storage. Each instance of
* JSONWriter can produce one JSON text.
* <p>
* A JSONWriter instance provides a <code>value</code> method for appending
* values to the
* text, and a <code>key</code>
* method for adding keys before values in objects. There are <code>array</code>
* and <code>endArray</code> methods that make and bound array values, and
* <code>object</code> and <code>endObject</code> methods which make and bound
* object values. All of these methods return the JSONWriter instance,
* permitting a cascade style. For example, <pre>
* new JSONWriter(myWriter)
* .object()
* .key("JSON")
* .value("Hello, World!")
* .endObject();</pre> which writes <pre>
* {"JSON":"Hello, World!"}</pre>
* <p>
* The first method called must be <code>array</code> or <code>object</code>.
* There are no methods for adding commas or colons. JSONWriter adds them for
* you. Objects and arrays can be nested up to 20 levels deep.
* <p>
* This can sometimes be easier than using a JSONObject to build a string.
* @author JSON.org
* @version 2010-03-11
*/
public class JSONWriter {
private static final int maxdepth = 20;
/**
* The comma flag determines if a comma should be output before the next
* value.
*/
private boolean comma;
/**
* The current mode. Values:
* 'a' (array),
* 'd' (done),
* 'i' (initial),
* 'k' (key),
* 'o' (object).
*/
protected char mode;
/**
* The object/array stack.
*/
private JSONObject stack[];
/**
* The stack top index. A value of 0 indicates that the stack is empty.
*/
private int top;
/**
* The writer that will receive the output.
*/
protected Writer writer;
/**
* Make a fresh JSONWriter. It can be used to build one JSON text.
*/
public JSONWriter(Writer w) {
this.comma = false;
this.mode = 'i';
this.stack = new JSONObject[maxdepth];
this.top = 0;
this.writer = w;
}
/**
* Append a value.
* @param s A string value.
* @return this
* @throws JSONException If the value is out of sequence.
*/
private JSONWriter append(String s) throws JSONException {
if (s == null) {
throw new JSONException("Null pointer");
}
if (this.mode == 'o' || this.mode == 'a') {
try {
if (this.comma && this.mode == 'a') {
this.writer.write(',');
}
this.writer.write(s);
} catch (IOException e) {
throw new JSONException(e);
}
if (this.mode == 'o') {
this.mode = 'k';
}
this.comma = true;
return this;
}
throw new JSONException("Value out of sequence.");
}
/**
* Begin appending a new array. All values until the balancing
* <code>endArray</code> will be appended to this array. The
* <code>endArray</code> method must be called to mark the array's end.
* @return this
* @throws JSONException If the nesting is too deep, or if the object is
* started in the wrong place (for example as a key or after the end of the
* outermost array or object).
*/
public JSONWriter array() throws JSONException {
if (this.mode == 'i' || this.mode == 'o' || this.mode == 'a') {
this.push(null);
this.append("[");
this.comma = false;
return this;
}
throw new JSONException("Misplaced array.");
}
/**
* End something.
* @param m Mode
* @param c Closing character
* @return this
* @throws JSONException If unbalanced.
*/
private JSONWriter end(char m, char c) throws JSONException {
if (this.mode != m) {
throw new JSONException(m == 'a' ? "Misplaced endArray." :
"Misplaced endObject.");
}
this.pop(m);
try {
this.writer.write(c);
} catch (IOException e) {
throw new JSONException(e);
}
this.comma = true;
return this;
}
/**
* End an array. This method most be called to balance calls to
* <code>array</code>.
* @return this
* @throws JSONException If incorrectly nested.
*/
public JSONWriter endArray() throws JSONException {
return this.end('a', ']');
}
/**
* End an object. This method most be called to balance calls to
* <code>object</code>.
* @return this
* @throws JSONException If incorrectly nested.
*/
public JSONWriter endObject() throws JSONException {
return this.end('k', '}');
}
/**
* Append a key. The key will be associated with the next value. In an
* object, every value must be preceded by a key.
* @param s A key string.
* @return this
* @throws JSONException If the key is out of place. For example, keys
* do not belong in arrays or if the key is null.
*/
public JSONWriter key(String s) throws JSONException {
if (s == null) {
throw new JSONException("Null key.");
}
if (this.mode == 'k') {
try {
stack[top - 1].putOnce(s, Boolean.TRUE);
if (this.comma) {
this.writer.write(',');
}
this.writer.write(JSONObject.quote(s));
this.writer.write(':');
this.comma = false;
this.mode = 'o';
return this;
} catch (IOException e) {
throw new JSONException(e);
}
}
throw new JSONException("Misplaced key.");
}
/**
* Begin appending a new object. All keys and values until the balancing
* <code>endObject</code> will be appended to this object. The
* <code>endObject</code> method must be called to mark the object's end.
* @return this
* @throws JSONException If the nesting is too deep, or if the object is
* started in the wrong place (for example as a key or after the end of the
* outermost array or object).
*/
public JSONWriter object() throws JSONException {
if (this.mode == 'i') {
this.mode = 'o';
}
if (this.mode == 'o' || this.mode == 'a') {
this.append("{");
this.push(new JSONObject());
this.comma = false;
return this;
}
throw new JSONException("Misplaced object.");
}
/**
* Pop an array or object scope.
* @param c The scope to close.
* @throws JSONException If nesting is wrong.
*/
private void pop(char c) throws JSONException {
if (this.top <= 0) {
throw new JSONException("Nesting error.");
}
char m = this.stack[this.top - 1] == null ? 'a' : 'k';
if (m != c) {
throw new JSONException("Nesting error.");
}
this.top -= 1;
this.mode = this.top == 0 ? 'd' : this.stack[this.top - 1] == null ? 'a' : 'k';
}
/**
* Push an array or object scope.
* @throws JSONException If nesting is too deep.
*/
private void push(JSONObject jo) throws JSONException {
if (this.top >= maxdepth) {
throw new JSONException("Nesting too deep.");
}
this.stack[this.top] = jo;
this.mode = jo == null ? 'a' : 'k';
this.top += 1;
}
/**
* Append either the value <code>true</code> or the value
* <code>false</code>.
* @param b A boolean.
* @return this
* @throws JSONException
*/
public JSONWriter value(boolean b) throws JSONException {
return this.append(b ? "true" : "false");
}
/**
* Append a double value.
* @param d A double.
* @return this
* @throws JSONException If the number is not finite.
*/
public JSONWriter value(double d) throws JSONException {
return this.value(new Double(d));
}
/**
* Append a long value.
* @param l A long.
* @return this
* @throws JSONException
*/
public JSONWriter value(long l) throws JSONException {
return this.append(Long.toString(l));
}
/**
* Append an object value.
* @param o The object to append. It can be null, or a Boolean, Number,
* String, JSONObject, or JSONArray, or an object with a toJSONString()
* method.
* @return this
* @throws JSONException If the value is out of sequence.
*/
public JSONWriter value(Object o) throws JSONException {
return this.append(JSONObject.valueToString(o));
}
}
| |
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package vgrechka.phizdetsidea.phizdets.refactoring.move;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Lists;
import com.intellij.openapi.actionSystem.DataContext;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.SelectionModel;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.*;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.refactoring.BaseRefactoringProcessor;
import com.intellij.refactoring.RefactoringBundle;
import com.intellij.refactoring.move.MoveHandlerDelegate;
import com.intellij.refactoring.util.CommonRefactoringUtil;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.containers.ContainerUtil;
import vgrechka.phizdetsidea.phizdets.PyBundle;
import vgrechka.phizdetsidea.phizdets.codeInsight.controlflow.ScopeOwner;
import vgrechka.phizdetsidea.phizdets.psi.*;
import vgrechka.phizdetsidea.phizdets.psi.impl.PyPsiUtils;
import vgrechka.phizdetsidea.phizdets.psi.search.PyOverridingMethodsSearch;
import vgrechka.phizdetsidea.phizdets.psi.search.PySuperMethodsSearch;
import vgrechka.phizdetsidea.phizdets.psi.types.TypeEvalContext;
import vgrechka.phizdetsidea.phizdets.refactoring.move.makeFunctionTopLevel.PyMakeFunctionTopLevelDialog;
import vgrechka.phizdetsidea.phizdets.refactoring.move.makeFunctionTopLevel.PyMakeLocalFunctionTopLevelProcessor;
import vgrechka.phizdetsidea.phizdets.refactoring.move.makeFunctionTopLevel.PyMakeMethodTopLevelProcessor;
import vgrechka.phizdetsidea.phizdets.refactoring.move.moduleMembers.PyMoveModuleMembersDialog;
import vgrechka.phizdetsidea.phizdets.refactoring.move.moduleMembers.PyMoveModuleMembersHelper;
import vgrechka.phizdetsidea.phizdets.refactoring.move.moduleMembers.PyMoveModuleMembersProcessor;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Collections;
import java.util.List;
import static vgrechka.phizdetsidea.phizdets.psi.PyUtil.as;
/**
* @author vlan
*/
public class PyMoveSymbolDelegate extends MoveHandlerDelegate {
@Override
public boolean canMove(PsiElement[] elements, @Nullable PsiElement targetContainer) {
if (!super.canMove(elements, targetContainer)) {
return false;
}
// Local function or method
if (isMovableLocalFunctionOrMethod(elements[0])) {
return true;
}
// Top-level module member
for (PsiElement element : elements) {
if (!PyMoveModuleMembersHelper.isMovableModuleMember(element)) {
return false;
}
}
return true;
}
public void doMove(@NotNull Project project, @NotNull List<PyElement> elements) {
final PsiElement firstElement = elements.get(0);
final String initialPath = StringUtil.notNullize(PyPsiUtils.getContainingFilePath(firstElement));
final BaseRefactoringProcessor processor;
if (isMovableLocalFunctionOrMethod(firstElement)) {
final PyFunction function = (PyFunction)firstElement;
final PyMakeFunctionTopLevelDialog dialog = new PyMakeFunctionTopLevelDialog(project, function, initialPath, initialPath);
if (!dialog.showAndGet()) {
return;
}
if (function.getContainingClass() != null) {
processor = new PyMakeMethodTopLevelProcessor(function, dialog.getTargetPath());
}
else {
processor = new PyMakeLocalFunctionTopLevelProcessor(function, dialog.getTargetPath());
}
processor.setPreviewUsages(dialog.isPreviewUsages());
}
else {
final List<PsiNamedElement> initialElements = Lists.newArrayList();
for (PsiElement element : elements) {
final PsiNamedElement e = PyMoveModuleMembersHelper.extractNamedElement(element);
if (e == null) {
return;
}
initialElements.add(e);
}
final PyMoveModuleMembersDialog dialog = new PyMoveModuleMembersDialog(project, initialElements, initialPath, initialPath);
if (!dialog.showAndGet()) {
return;
}
final PsiNamedElement[] selectedElements = ContainerUtil.findAllAsArray(dialog.getSelectedTopLevelSymbols(), PsiNamedElement.class);
processor = new PyMoveModuleMembersProcessor(selectedElements, dialog.getTargetPath());
processor.setPreviewUsages(dialog.isPreviewUsages());
}
try {
processor.run();
}
catch (IncorrectOperationException e) {
if (ApplicationManager.getApplication().isUnitTestMode()) {
throw e;
}
CommonRefactoringUtil.showErrorMessage(RefactoringBundle.message("error.title"), e.getMessage(), null, project);
}
}
@Override
public boolean tryToMove(@NotNull PsiElement element,
@NotNull Project project,
@Nullable DataContext dataContext,
@Nullable PsiReference reference,
@Nullable Editor editor) {
final PsiFile currentFile = element.getContainingFile();
if (editor != null && currentFile instanceof PyFile && selectionSpansMultipleLines(editor)) {
final List<PyElement> moduleMembers = collectAllMovableElementsInSelection(editor, (PyFile)currentFile);
if (moduleMembers.isEmpty()) {
showBadSelectionErrorHint(project, editor);
}
else {
doMove(project, moduleMembers);
}
return true;
}
// Fallback to the old way to select single element to move
final PsiNamedElement e = PyMoveModuleMembersHelper.extractNamedElement(element);
if (e != null && PyMoveModuleMembersHelper.hasMovableElementType(e)) {
if (PyMoveModuleMembersHelper.isMovableModuleMember(e) || isMovableLocalFunctionOrMethod(e)) {
doMove(project, Collections.singletonList((PyElement)e));
}
else {
showBadSelectionErrorHint(project, editor);
}
return true;
}
return false;
}
private static void showBadSelectionErrorHint(@NotNull Project project, @Nullable Editor editor) {
CommonRefactoringUtil.showErrorHint(project, editor,
PyBundle.message("refactoring.move.module.members.error.selection"),
RefactoringBundle.message("error.title"), null);
}
private static boolean selectionSpansMultipleLines(@NotNull Editor editor) {
final SelectionModel selectionModel = editor.getSelectionModel();
final Document document = editor.getDocument();
return document.getLineNumber(selectionModel.getSelectionStart()) != document.getLineNumber(selectionModel.getSelectionEnd());
}
@NotNull
private static List<PyElement> collectAllMovableElementsInSelection(@NotNull Editor editor, @NotNull PyFile pyFile) {
final SelectionModel selectionModel = editor.getSelectionModel();
final TextRange selectionRange = new TextRange(selectionModel.getSelectionStart(), selectionModel.getSelectionEnd());
final List<PyElement> members = PyMoveModuleMembersHelper.getTopLevelModuleMembers(pyFile);
return ContainerUtil.filter(members, member -> {
final PsiElement body = PyMoveModuleMembersHelper.expandNamedElementBody((PsiNamedElement)member);
return body != null && selectionRange.contains(body.getTextRange());
});
}
@VisibleForTesting
public static boolean isMovableLocalFunctionOrMethod(@NotNull PsiElement element) {
return isLocalFunction(element) || isSuitableInstanceMethod(element);
}
private static boolean isSuitableInstanceMethod(@Nullable PsiElement element) {
final PyFunction function = as(element, PyFunction.class);
if (function == null || function.getContainingClass() == null) {
return false;
}
final String funcName = function.getName();
if (funcName == null || PyUtil.isSpecialName(funcName)) {
return false;
}
final TypeEvalContext typeEvalContext = TypeEvalContext.userInitiated(function.getProject(), function.getContainingFile());
if (PySuperMethodsSearch.search(function, typeEvalContext).findFirst() != null) return false;
if (PyOverridingMethodsSearch.search(function, true).findFirst() != null) return false;
if (function.getDecoratorList() != null || function.getModifier() != null) return false;
if (function.getContainingClass().findPropertyByCallable(function) != null) return false;
return true;
}
private static boolean isLocalFunction(@Nullable PsiElement resolved) {
return resolved instanceof PyFunction && PsiTreeUtil.getParentOfType(resolved, ScopeOwner.class, true) instanceof PyFunction;
}
}
| |
/*******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2012 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.job;
import java.util.Comparator;
import java.util.Date;
import org.pentaho.di.core.Result;
import org.pentaho.di.job.entry.JobEntryCopy;
/**
* This class holds the result of a job entry after it was executed.
* Things we want to keep track of are:<p>
* --> result of the execution (Result)<p>
* --> ...<p>
*
* @author Matt
* @since 16-mrt-2005
*/
public class JobEntryResult implements Cloneable, Comparator<JobEntryResult>, Comparable<JobEntryResult>
{
private Result result;
private String jobEntryName;
private int jobEntryNr;
private String comment;
private String reason;
private Date logDate;
private String jobEntryFilename;
private String logChannelId;
/**
* Creates a new empty job entry result...
*/
public JobEntryResult()
{
logDate = new Date();
}
/**
* Creates a new job entry result...
* @param result the result of the job entry
* @param comment an optional comment
* @param jobEntry the job entry for which this is the result.
*/
public JobEntryResult(Result result, String logChannelId, String comment, String reason, String jobEntryName, int jobEntryNr, String jobEntryFilename)
{
this();
if (result!=null)
{
this.result = (Result) result.clone();
// prevent excessive memory consumption!
// PDI-4721
//
this.result.setLogText(null);
this.result.setRows(null);
}
else
{
this.result = null;
}
this.logChannelId = logChannelId;
this.comment = comment;
this.reason = reason;
this.jobEntryName = jobEntryName;
this.jobEntryNr = jobEntryNr;
this.jobEntryFilename = jobEntryFilename;
}
@Deprecated
public JobEntryResult(Result result, String comment, String reason, JobEntryCopy copy) {
this(result, copy.getEntry().getLogChannel().getLogChannelId(), comment, reason, copy!=null ? copy.getName() : null, copy!=null ? copy.getNr() : 0, copy==null ? null : ( copy.getEntry()!=null ? copy.getEntry().getFilename() : null) );
}
public Object clone()
{
try
{
JobEntryResult jobEntryResult = (JobEntryResult)super.clone();
if (getResult()!=null)
jobEntryResult.setResult((Result)getResult().clone());
return jobEntryResult;
}
catch(CloneNotSupportedException e)
{
return null;
}
}
/**
* @param result The result to set.
*/
public void setResult(Result result)
{
this.result = result;
}
/**
* @return Returns the result.
*/
public Result getResult()
{
return result;
}
/**
* @return Returns the comment.
*/
public String getComment()
{
return comment;
}
/**
* @param comment The comment to set.
*/
public void setComment(String comment)
{
this.comment = comment;
}
/**
* @return Returns the reason.
*/
public String getReason()
{
return reason;
}
/**
* @param reason The reason to set.
*/
public void setReason(String reason)
{
this.reason = reason;
}
/**
* @return Returns the logDate.
*/
public Date getLogDate()
{
return logDate;
}
/**
* @param logDate The logDate to set.
*/
public void setLogDate(Date logDate)
{
this.logDate = logDate;
}
/**
* @return the jobEntryName
*/
public String getJobEntryName() {
return jobEntryName;
}
/**
* @param jobEntryName the jobEntryName to set
*/
public void setJobEntryName(String jobEntryName) {
this.jobEntryName = jobEntryName;
}
/**
* @return the jobEntryFilename
*/
public String getJobEntryFilename() {
return jobEntryFilename;
}
/**
* @param jobEntryFilename the jobEntryFilename to set
*/
public void setJobEntryFilename(String jobEntryFilename) {
this.jobEntryFilename = jobEntryFilename;
}
/**
* @return the jobEntryNr
*/
public int getJobEntryNr() {
return jobEntryNr;
}
/**
* @param jobEntryNr the jobEntryNr to set
*/
public void setJobEntryNr(int jobEntryNr) {
this.jobEntryNr = jobEntryNr;
}
public int compare(JobEntryResult one, JobEntryResult two) {
if (one==null && two!=null) return -1;
if (one!=null && two==null) return 1;
if (one==null && two==null) return 0;
if (one.getJobEntryName()==null && two.getJobEntryName()!=null) return -1;
if (one.getJobEntryName()!=null && two.getJobEntryName()==null) return 1;
if (one.getJobEntryName()==null && two.getJobEntryName()==null) return 0;
int cmp = one.getJobEntryName().compareTo(two.getJobEntryName());
if (cmp!=0) return cmp;
return Integer.valueOf(one.getJobEntryNr()).compareTo(Integer.valueOf(two.getJobEntryNr()));
}
public int compareTo(JobEntryResult two) {
return compare(this, two);
}
public String getLogChannelId() {
return logChannelId;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.api.java.io.jdbc.catalog;
import org.apache.flink.api.java.io.jdbc.JDBCTableSourceSinkFactory;
import org.apache.flink.table.api.ValidationException;
import org.apache.flink.table.catalog.AbstractCatalog;
import org.apache.flink.table.catalog.CatalogBaseTable;
import org.apache.flink.table.catalog.CatalogDatabase;
import org.apache.flink.table.catalog.CatalogFunction;
import org.apache.flink.table.catalog.CatalogPartition;
import org.apache.flink.table.catalog.CatalogPartitionSpec;
import org.apache.flink.table.catalog.ObjectPath;
import org.apache.flink.table.catalog.exceptions.CatalogException;
import org.apache.flink.table.catalog.exceptions.DatabaseAlreadyExistException;
import org.apache.flink.table.catalog.exceptions.DatabaseNotEmptyException;
import org.apache.flink.table.catalog.exceptions.DatabaseNotExistException;
import org.apache.flink.table.catalog.exceptions.FunctionAlreadyExistException;
import org.apache.flink.table.catalog.exceptions.FunctionNotExistException;
import org.apache.flink.table.catalog.exceptions.PartitionAlreadyExistsException;
import org.apache.flink.table.catalog.exceptions.PartitionNotExistException;
import org.apache.flink.table.catalog.exceptions.PartitionSpecInvalidException;
import org.apache.flink.table.catalog.exceptions.TableAlreadyExistException;
import org.apache.flink.table.catalog.exceptions.TableNotExistException;
import org.apache.flink.table.catalog.exceptions.TableNotPartitionedException;
import org.apache.flink.table.catalog.exceptions.TablePartitionedException;
import org.apache.flink.table.catalog.stats.CatalogColumnStatistics;
import org.apache.flink.table.catalog.stats.CatalogTableStatistics;
import org.apache.flink.table.expressions.Expression;
import org.apache.flink.table.factories.TableFactory;
import org.apache.flink.util.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import static org.apache.flink.util.Preconditions.checkArgument;
/**
* Abstract catalog for any JDBC catalogs.
*/
public abstract class AbstractJDBCCatalog extends AbstractCatalog {
private static final Logger LOG = LoggerFactory.getLogger(AbstractJDBCCatalog.class);
protected final String username;
protected final String pwd;
protected final String baseUrl;
protected final String defaultUrl;
public AbstractJDBCCatalog(String catalogName, String defaultDatabase, String username, String pwd, String baseUrl) {
super(catalogName, defaultDatabase);
checkArgument(!StringUtils.isNullOrWhitespaceOnly(username));
checkArgument(!StringUtils.isNullOrWhitespaceOnly(pwd));
checkArgument(!StringUtils.isNullOrWhitespaceOnly(baseUrl));
JDBCCatalogUtils.validateJDBCUrl(baseUrl);
this.username = username;
this.pwd = pwd;
this.baseUrl = baseUrl.endsWith("/") ? baseUrl : baseUrl + "/";
this.defaultUrl = baseUrl + defaultDatabase;
}
@Override
public void open() throws CatalogException {
// test connection, fail early if we cannot connect to database
try (Connection conn = DriverManager.getConnection(defaultUrl, username, pwd)) {
} catch (SQLException e) {
throw new ValidationException(
String.format("Failed connecting to %s via JDBC.", defaultUrl), e);
}
LOG.info("Catalog {} established connection to {}", getName(), defaultUrl);
}
@Override
public void close() throws CatalogException {
LOG.info("Catalog {} closing", getName());
}
// ----- getters ------
public String getUsername() {
return username;
}
public String getPassword() {
return pwd;
}
public String getBaseUrl() {
return baseUrl;
}
// ------ table factory ------
public Optional<TableFactory> getTableFactory() {
return Optional.of(new JDBCTableSourceSinkFactory());
}
// ------ databases ------
@Override
public boolean databaseExists(String databaseName) throws CatalogException {
checkArgument(!StringUtils.isNullOrWhitespaceOnly(databaseName));
return listDatabases().contains(databaseName);
}
@Override
public void createDatabase(String name, CatalogDatabase database, boolean ignoreIfExists) throws DatabaseAlreadyExistException, CatalogException {
throw new UnsupportedOperationException();
}
@Override
public void dropDatabase(String name, boolean ignoreIfNotExists, boolean cascade) throws DatabaseNotExistException, DatabaseNotEmptyException, CatalogException {
throw new UnsupportedOperationException();
}
@Override
public void alterDatabase(String name, CatalogDatabase newDatabase, boolean ignoreIfNotExists) throws DatabaseNotExistException, CatalogException {
throw new UnsupportedOperationException();
}
// ------ tables and views ------
@Override
public void dropTable(ObjectPath tablePath, boolean ignoreIfNotExists) throws TableNotExistException, CatalogException {
throw new UnsupportedOperationException();
}
@Override
public void renameTable(ObjectPath tablePath, String newTableName, boolean ignoreIfNotExists) throws TableNotExistException, TableAlreadyExistException, CatalogException {
throw new UnsupportedOperationException();
}
@Override
public void createTable(ObjectPath tablePath, CatalogBaseTable table, boolean ignoreIfExists) throws TableAlreadyExistException, DatabaseNotExistException, CatalogException {
throw new UnsupportedOperationException();
}
@Override
public void alterTable(ObjectPath tablePath, CatalogBaseTable newTable, boolean ignoreIfNotExists) throws TableNotExistException, CatalogException {
throw new UnsupportedOperationException();
}
@Override
public List<String> listViews(String databaseName) throws DatabaseNotExistException, CatalogException {
return Collections.emptyList();
}
// ------ partitions ------
@Override
public List<CatalogPartitionSpec> listPartitions(ObjectPath tablePath) throws TableNotExistException, TableNotPartitionedException, CatalogException {
return Collections.emptyList();
}
@Override
public List<CatalogPartitionSpec> listPartitions(ObjectPath tablePath, CatalogPartitionSpec partitionSpec) throws TableNotExistException, TableNotPartitionedException, CatalogException {
return Collections.emptyList();
}
@Override
public List<CatalogPartitionSpec> listPartitionsByFilter(ObjectPath tablePath, List<Expression> filters) throws TableNotExistException, TableNotPartitionedException, CatalogException {
return Collections.emptyList();
}
@Override
public CatalogPartition getPartition(ObjectPath tablePath, CatalogPartitionSpec partitionSpec) throws PartitionNotExistException, CatalogException {
throw new UnsupportedOperationException();
}
@Override
public boolean partitionExists(ObjectPath tablePath, CatalogPartitionSpec partitionSpec) throws CatalogException {
throw new UnsupportedOperationException();
}
@Override
public void createPartition(ObjectPath tablePath, CatalogPartitionSpec partitionSpec, CatalogPartition partition, boolean ignoreIfExists) throws TableNotExistException, TableNotPartitionedException, PartitionSpecInvalidException, PartitionAlreadyExistsException, CatalogException {
throw new UnsupportedOperationException();
}
@Override
public void dropPartition(ObjectPath tablePath, CatalogPartitionSpec partitionSpec, boolean ignoreIfNotExists) throws PartitionNotExistException, CatalogException {
throw new UnsupportedOperationException();
}
@Override
public void alterPartition(ObjectPath tablePath, CatalogPartitionSpec partitionSpec, CatalogPartition newPartition, boolean ignoreIfNotExists) throws PartitionNotExistException, CatalogException {
throw new UnsupportedOperationException();
}
// ------ functions ------
@Override
public List<String> listFunctions(String dbName) throws DatabaseNotExistException, CatalogException {
return Collections.emptyList();
}
@Override
public CatalogFunction getFunction(ObjectPath functionPath) throws FunctionNotExistException, CatalogException {
throw new UnsupportedOperationException();
}
@Override
public boolean functionExists(ObjectPath functionPath) throws CatalogException {
throw new UnsupportedOperationException();
}
@Override
public void createFunction(ObjectPath functionPath, CatalogFunction function, boolean ignoreIfExists) throws FunctionAlreadyExistException, DatabaseNotExistException, CatalogException {
throw new UnsupportedOperationException();
}
@Override
public void alterFunction(ObjectPath functionPath, CatalogFunction newFunction, boolean ignoreIfNotExists) throws FunctionNotExistException, CatalogException {
throw new UnsupportedOperationException();
}
@Override
public void dropFunction(ObjectPath functionPath, boolean ignoreIfNotExists) throws FunctionNotExistException, CatalogException {
throw new UnsupportedOperationException();
}
// ------ stats ------
@Override
public CatalogTableStatistics getTableStatistics(ObjectPath tablePath) throws TableNotExistException, CatalogException {
return CatalogTableStatistics.UNKNOWN;
}
@Override
public CatalogColumnStatistics getTableColumnStatistics(ObjectPath tablePath) throws TableNotExistException, CatalogException {
return CatalogColumnStatistics.UNKNOWN;
}
@Override
public CatalogTableStatistics getPartitionStatistics(ObjectPath tablePath, CatalogPartitionSpec partitionSpec) throws PartitionNotExistException, CatalogException {
return CatalogTableStatistics.UNKNOWN;
}
@Override
public CatalogColumnStatistics getPartitionColumnStatistics(ObjectPath tablePath, CatalogPartitionSpec partitionSpec) throws PartitionNotExistException, CatalogException {
return CatalogColumnStatistics.UNKNOWN;
}
@Override
public void alterTableStatistics(ObjectPath tablePath, CatalogTableStatistics tableStatistics, boolean ignoreIfNotExists) throws TableNotExistException, CatalogException {
throw new UnsupportedOperationException();
}
@Override
public void alterTableColumnStatistics(ObjectPath tablePath, CatalogColumnStatistics columnStatistics, boolean ignoreIfNotExists) throws TableNotExistException, CatalogException, TablePartitionedException {
throw new UnsupportedOperationException();
}
@Override
public void alterPartitionStatistics(ObjectPath tablePath, CatalogPartitionSpec partitionSpec, CatalogTableStatistics partitionStatistics, boolean ignoreIfNotExists) throws PartitionNotExistException, CatalogException {
throw new UnsupportedOperationException();
}
@Override
public void alterPartitionColumnStatistics(ObjectPath tablePath, CatalogPartitionSpec partitionSpec, CatalogColumnStatistics columnStatistics, boolean ignoreIfNotExists) throws PartitionNotExistException, CatalogException {
throw new UnsupportedOperationException();
}
}
| |
/**
* NOTE: This copyright does *not* cover user programs that use HQ
* program services by normal system calls through the application
* program interfaces provided as part of the Hyperic Plug-in Development
* Kit or the Hyperic Client Development Kit - this is merely considered
* normal use of the program, and does *not* fall under the heading of
* "derived work".
*
* Copyright (C) [2009-2011], VMware, Inc.
* This file is part of HQ.
*
* HQ is free software; you can redistribute it and/or modify
* it under the terms version 2 of the GNU General Public License as
* published by the Free Software Foundation. This program is distributed
* in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
* even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the GNU General Public License for more
* details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
* USA.
*
*/
package org.hyperic.hq.appdef.server.session;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.annotation.PostConstruct;
import org.hibernate.Hibernate;
import org.hibernate.HibernateException;
import org.hibernate.Query;
import org.hibernate.SQLQuery;
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.hibernate.type.IntegerType;
import org.hibernate.type.StringType;
import org.hyperic.hq.appdef.Agent;
import org.hyperic.hq.common.shared.ServerConfigManager;
import org.hyperic.hq.dao.HibernateDAO;
import org.hyperic.hq.product.Plugin;
import org.hyperic.hq.product.server.session.PluginDAO;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.orm.hibernate3.HibernateCallback;
import org.springframework.orm.hibernate3.HibernateTemplate;
import org.springframework.stereotype.Repository;
@Repository
public class AgentPluginStatusDAO extends HibernateDAO<AgentPluginStatus> {
private static final String LIMIT_S_TO_CURRENT_AGENTS = "join EAM_AGENT agent on s.agent_id = agent.id " +
"where agent.version >= :serverVersion ";
private static final String SYNCHABLE_AGENT_IDS_QUERY_STRING = "select distinct agent_id from EAM_AGENT_PLUGIN_STATUS s "+
LIMIT_S_TO_CURRENT_AGENTS + "and exists (select 1 from EAM_PLATFORM p where p.agent_id = s.agent_id)";
private static final String UNSYNCHABLE_AGENT_IDS_QUERY_STRING = "select distinct id from EAM_AGENT where id not in (" +
SYNCHABLE_AGENT_IDS_QUERY_STRING + ")";
private final AgentDAO agentDAO;
private final PluginDAO pluginDAO;
private final ServerConfigManager serverConfigManager;
@Autowired
public AgentPluginStatusDAO(SessionFactory factory, AgentDAO agentDAO, PluginDAO pluginDAO,
ServerConfigManager serverConfigManager) {
super(AgentPluginStatus.class, factory);
this.agentDAO = agentDAO;
this.pluginDAO = pluginDAO;
this.serverConfigManager = serverConfigManager;
}
@PostConstruct
public void initCache() {
new HibernateTemplate(sessionFactory, true).execute(new HibernateCallback<Object>() {
public Object doInHibernate(Session session) throws HibernateException, SQLException {
session.createQuery("from AgentPluginStatus").list();
return null;
}
});
}
public void saveOrUpdate(AgentPluginStatus agentPluginStatus) {
save(agentPluginStatus);
}
/**
* @return {@link Map} of {@link String} of the jar-name to {@link AgentPluginStatus}
*/
@SuppressWarnings("unchecked")
public Map<String, AgentPluginStatus> getPluginStatusByAgent(Agent agent) {
final List<AgentPluginStatus> list =
getSession().createQuery("from AgentPluginStatus where agent = :agent")
.setParameter("agent", agent)
.list();
final Map<String, AgentPluginStatus> rtn = new HashMap<String, AgentPluginStatus>(list.size());
for (final AgentPluginStatus status : list) {
rtn.put(status.getFileName(), status);
}
return rtn;
}
public Map<Plugin, Collection<AgentPluginStatus>> getOutOfSyncAgentsByPlugin() {
final Map<Plugin, Collection<AgentPluginStatus>> rtn =
new HashMap<Plugin, Collection<AgentPluginStatus>>();
final List<Integer> list = getOutOfSyncPlugins(null);
for (final Integer id : list) {
final AgentPluginStatus st = get(id);
final String pluginName = st.getPluginName();
final Plugin plugin = pluginDAO.findByName(pluginName);
Collection<AgentPluginStatus> tmp;
if (null == (tmp = rtn.get(plugin))) {
tmp = new ArrayList<AgentPluginStatus>();
rtn.put(plugin, tmp);
}
tmp.add(st);
}
return rtn;
}
Map<Agent, Collection<AgentPluginStatus>> getOutOfSyncPluginsByAgent() {
final Map<Agent, Collection<AgentPluginStatus>> rtn =
new HashMap<Agent, Collection<AgentPluginStatus>>();
final List<Integer> list = getOutOfSyncPlugins(null);
for (final Integer id : list) {
final AgentPluginStatus st = get(id);
final int agentId = st.getAgent().getId();
final Agent agent = agentDAO.get(agentId);
Collection<AgentPluginStatus> tmp;
if (null == (tmp = rtn.get(agent))) {
tmp = new ArrayList<AgentPluginStatus>();
rtn.put(agent, tmp);
}
tmp.add(st);
}
return rtn;
}
public List<String> getOutOfSyncPluginNamesByAgentId(int agentId) {
final List<Integer> ids = getOutOfSyncPlugins(agentId);
final List<String> rtn = new ArrayList<String>(ids.size());
for (final Integer id : ids) {
final AgentPluginStatus st = get(id);
final String pluginName = st.getPluginName();
rtn.add(pluginName);
}
return rtn;
}
/**
* Get our of sync plugins for agents, whose version is not older than that of
* the server.
* @param agentId may be null
* @return {@link List} of {@link Integer} which represents the AgentPluginStatusId
*/
@SuppressWarnings("unchecked")
private List<Integer> getOutOfSyncPlugins(Integer agentId) {
String serverMajorVersion = serverConfigManager.getServerMajorVersion();
final String agentSql = agentId == null ? "" : " s.agent_id = :agentId AND ";
final String sql = new StringBuilder(256)
.append("select distinct s.id ")
.append("from EAM_AGENT_PLUGIN_STATUS s ")
.append(LIMIT_S_TO_CURRENT_AGENTS)
.append(" AND (")
.append(agentSql)
.append("not exists ( ")
.append(" select 1 ")
.append(" from EAM_PLUGIN p ")
.append(" join EAM_AGENT_PLUGIN_STATUS st on p.md5 = st.md5 ")
.append(" where st.agent_id = s.agent_id and s.md5 = st.md5 ")
.append(" and p.deleted = '0'")
.append(") ")
.append("OR s.last_sync_status != :syncSuccess)")
.toString();
final SQLQuery query = getSession().createSQLQuery(sql);
if (agentId != null) {
query.setParameter("agentId", agentId);
}
query.setParameter("serverVersion", serverMajorVersion);
return query.addScalar("id", Hibernate.INTEGER)
.setParameter("syncSuccess", AgentPluginStatusEnum.SYNC_SUCCESS.toString())
.list();
}
/**
* @return {@link Collection} of {@link Object[]} where [0] = agentId and [1] = pluginName
*/
@SuppressWarnings("unchecked")
Collection<Object[]> getPluginsNotOnAllAgents() {
final String sql = new StringBuilder(256)
.append("SELECT distinct a.id,p.name from EAM_PLUGIN p, EAM_AGENT a ")
.append("JOIN EAM_PLATFORM pl on pl.agent_id = a.id ")
.append("WHERE not exists ( ")
.append(" SELECT 1 FROM EAM_AGENT_PLUGIN_STATUS s ")
.append(" WHERE a.id = s.agent_id and s.plugin_name = p.name ")
.append(") and p.deleted = '0'")
.toString();
return getSession().createSQLQuery(sql)
.addScalar("id", Hibernate.INTEGER)
.addScalar("name", Hibernate.STRING)
.list();
}
@SuppressWarnings("unchecked")
Collection<Integer> getPluginsNotOnAgent(int agentId) {
final String sql = new StringBuilder(128)
.append("select p.id ")
.append("from EAM_PLUGIN p ")
.append("where not exists (")
.append(" select 1 from EAM_AGENT_PLUGIN_STATUS ")
.append(" where agent_id = :agentId and plugin_name = p.name")
.append(") and p.deleted = '0'")
.toString();
return getSession().createSQLQuery(sql)
.addScalar("id", Hibernate.INTEGER)
.setParameter("agentId", agentId)
.list();
}
public Map<String, AgentPluginStatus> getStatusByAgentId(Integer agentId) {
final String hql = "from AgentPluginStatus where agent.id = :agentId";
@SuppressWarnings("unchecked")
final Collection<AgentPluginStatus> list =
getSession().createQuery(hql)
.setParameter("agentId", agentId)
.list();
final Map<String, AgentPluginStatus> rtn =
new HashMap<String, AgentPluginStatus>(list.size());
for (final AgentPluginStatus status : list) {
rtn.put(status.getPluginName(), status);
}
return rtn;
}
public Map<Integer, Map<String, AgentPluginStatus>> getStatusByAgentIds(Collection<Integer> agentIds) {
if (agentIds == null || agentIds.isEmpty()) {
return Collections.emptyMap();
}
final String hql = "from AgentPluginStatus where agent.id in (:agentIds)";
@SuppressWarnings("unchecked")
final Collection<AgentPluginStatus> list =
getSession().createQuery(hql)
.setParameterList("agentIds", agentIds, new IntegerType())
.list();
final Map<Integer, Map<String, AgentPluginStatus>> rtn =
new HashMap<Integer, Map<String, AgentPluginStatus>>(list.size());
for (final AgentPluginStatus status : list) {
final Integer agentId = status.getAgent().getId();
Map<String, AgentPluginStatus> map = rtn.get(status.getAgent().getId());
if (map == null) {
map = new HashMap<String, AgentPluginStatus>();
rtn.put(agentId, map);
}
map.put(status.getPluginName(), status);
}
return rtn;
}
@SuppressWarnings("unchecked")
public Collection<AgentPluginStatus> getPluginStatusByFileName(String fileName,
Collection<AgentPluginStatusEnum> statuses) {
final String hql = new StringBuilder(128)
.append("from AgentPluginStatus s ")
.append("where s.fileName = :fileName and s.lastSyncStatus in (:statuses) ")
.append("and exists (select 1 from Platform p where p.agent.id = s.agent.id)")
.toString();
Collection<String> vals = new ArrayList<String>(statuses.size());
for (final AgentPluginStatusEnum s : statuses) {
vals.add(s.toString());
}
return getSession().createQuery(hql)
.setParameter("fileName", fileName)
.setParameterList("statuses", vals, new StringType())
.list();
}
@SuppressWarnings("unchecked")
public Set<Integer> getAutoUpdatingAgentIDs() {
String serverMajorVersion = serverConfigManager.getServerMajorVersion();
final SQLQuery query = getSession().createSQLQuery(SYNCHABLE_AGENT_IDS_QUERY_STRING);
query.setParameter("serverVersion", serverMajorVersion);
final List<Integer> ids = query.addScalar("agent_id", Hibernate.INTEGER).list();
final Set<Integer> idsSet = new HashSet<Integer>(ids);
return idsSet;
}
@SuppressWarnings("unchecked")
public Collection<Agent> getAutoUpdatingAgents() {
final Set<Integer> idsSet = getAutoUpdatingAgentIDs();
final List<Agent> rtn = new ArrayList<Agent>(idsSet.size());
for (final Integer agentId : idsSet) {
rtn.add(agentDAO.findById(agentId));
}
return rtn;
}
@SuppressWarnings("unchecked")
public List<Agent> getCurrentNonSyncAgents() {
String serverMajorVersion = serverConfigManager.getServerMajorVersion();
final SQLQuery query = getSession().createSQLQuery(UNSYNCHABLE_AGENT_IDS_QUERY_STRING);
query.setParameter("serverVersion", serverMajorVersion);
final List<Integer> ids = query.addScalar("id", Hibernate.INTEGER).list();
final Set<Integer> idsSet = new HashSet<Integer>(ids);
final List<Agent> rtn = new ArrayList<Agent>(idsSet.size());
for (final Integer agentId : idsSet) {
rtn.add(agentDAO.findById(agentId));
}
return rtn;
}
public Long getNumAutoUpdatingAgents() {
Set<Integer> synchedAgentIDs = getAutoUpdatingAgentIDs();
return ((long)synchedAgentIDs.size());
}
/*
@SuppressWarnings("unchecked")
public Collection<Agent> getAutoUpdatingAgents() {
String serverMajorVersion = serverConfigManager.getServerMajorVersion();
final String hql = new StringBuilder(150)
.append("select distinct agent_id from EAM_AGENT_PLUGIN_STATUS s ")
.append(LIMIT_S_TO_CURRENT_AGENTS)
.append("and exists (select 1 from EAM_PLATFORM p where p.agent_id = s.agent_id)")
.toString();
final SQLQuery query = getSession().createSQLQuery(hql);
query.setParameter("serverVersion", serverMajorVersion);
final List<Integer> ids = query.addScalar("agent_id", Hibernate.INTEGER).list();
final List<Agent> rtn = new ArrayList<Agent>(ids.size());
for (final Integer agentId : ids) {
rtn.add(agentDAO.findById(agentId));
}
return rtn;
}
*/
public void removeAgentPluginStatuses(Integer agentId, Collection<String> pluginFileNames) {
final String hql =
"select id from AgentPluginStatus where agent.id = :agentId and fileName in (:filenames)";
@SuppressWarnings("unchecked")
final List<Integer> list =
getSession().createQuery(hql)
.setParameter("agentId", agentId, new IntegerType())
.setParameterList("filenames", pluginFileNames)
.list();
for (final Integer sapsId : list) {
AgentPluginStatus status = get(sapsId);
if (status == null) {
continue;
}
remove(status);
}
}
public Map<Agent, Collection<AgentPluginStatus>> getPluginsToRemoveFromAgents() {
final String hql = new StringBuilder(64)
.append("select s.id FROM EAM_AGENT_PLUGIN_STATUS s ")
.append("where not exists (")
.append("select 1 from EAM_PLUGIN p where p.name = s.plugin_name and p.deleted = '0')")
.toString();
@SuppressWarnings("unchecked")
final List<Integer> list =
getSession().createSQLQuery(hql)
.addScalar("id", Hibernate.INTEGER)
.list();
if (list.isEmpty()) {
return Collections.emptyMap();
}
final Map<Agent, Collection<AgentPluginStatus>> rtn =
new HashMap<Agent, Collection<AgentPluginStatus>>(list.size());
for (final Integer sapsId : list) {
final AgentPluginStatus status = get(sapsId);
if (status == null) {
continue;
}
final Agent agent = status.getAgent();
if (agent == null) {
continue;
}
Collection<AgentPluginStatus> tmp = rtn.get(agent);
if (tmp == null) {
tmp = new ArrayList<AgentPluginStatus>();
rtn.put(agent, tmp);
}
tmp.add(status);
}
return rtn;
}
@SuppressWarnings("unchecked")
public Collection<AgentPluginStatus> getStatusByAgentAndFileNames(Integer agentId,
Collection<String> fileNames) {
final String hql =
"from AgentPluginStatus where agent.id = :agentId AND fileName in (:fileNames)";
return getSession().createQuery(hql)
.setParameterList("fileNames", fileNames)
.setInteger("agentId", agentId)
.list();
}
public Map<Agent, AgentPluginStatus> getPluginStatusByFileName(String fileName) {
final String hql = "select id from AgentPluginStatus where fileName = :fileName";
@SuppressWarnings("unchecked")
final List<Integer> list =
getSession().createQuery(hql).setParameter("fileName", fileName).list();
final Map<Agent, AgentPluginStatus> rtn = new HashMap<Agent, AgentPluginStatus>(list.size());
for (final Integer sapsId : list) {
final AgentPluginStatus status = get(sapsId);
if (status == null) {
continue;
}
final Agent agent = status.getAgent();
if (agent == null) {
continue;
}
rtn.put(agent, status);
}
return rtn;
}
public Map<String, Long> getFileNameCounts(Collection<String> pluginFileNames) {
if (pluginFileNames != null && pluginFileNames.isEmpty()) {
return Collections.emptyMap();
}
String and = "";
if (pluginFileNames != null) {
and = "and s.fileName in (:filenames) ";
}
final String hql = new StringBuilder(256)
.append("select s.fileName, count(*) from AgentPluginStatus s ")
.append("where exists (")
.append("select 1 from Agent a ")
.append("join a.platforms p ")
.append("where a.id = s.agent.id) ")
.append(and)
.append("group by s.fileName")
.toString();
final Query query = getSession().createQuery(hql);
if (pluginFileNames != null) {
query.setParameterList("filenames", pluginFileNames);
}
@SuppressWarnings("unchecked")
final List<Object[]> list = query.list();
final Map<String, Long> rtn = new HashMap<String, Long>(list.size());
for (final Object[] obj : list) {
rtn.put((String) obj[0], ((Number) obj[1]).longValue());
}
return rtn;
}
public Map<String, Long> getFileNameCounts() {
return getFileNameCounts(null);
}
@SuppressWarnings("unchecked")
public Collection<Plugin> getOrphanedPlugins() {
final String hql = new StringBuilder(200)
.append("from Plugin p where deleted = '1' and not exists (")
.append(" select 1 from AgentPluginStatus s")
.append(" join s.agent a")
.append(" join a.platforms pl")
.append(" where s.fileName = p.path")
.append(")")
.toString();
return getSession().createQuery(hql).list();
}
@SuppressWarnings("unchecked")
public List<AgentPluginStatus> getPluginStatusByAgent(AgentPluginStatusEnum ... keys) {
final List<String> vals = new ArrayList<String>(keys.length);
for (AgentPluginStatusEnum key : keys) {
vals.add(key.toString());
}
final String hql = new StringBuilder(128)
.append("from AgentPluginStatus s ")
.append("where exists (select 1 from Platform p where p.agent.id = s.agent.id) ")
.append("and s.lastSyncStatus in (:statuses)")
.toString();
return getSession()
.createQuery(hql)
.setParameterList("statuses", vals, new StringType())
.list();
}
@SuppressWarnings("unchecked")
public List<Integer> getAllIds() {
final String hql = "select id from AgentPluginStatus";
return getSession().createQuery(hql).list();
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v8/enums/promotion_extension_occasion.proto
package com.google.ads.googleads.v8.enums;
/**
* <pre>
* Container for enum describing a promotion extension occasion.
* For more information about the occasions please check:
* https://support.google.com/google-ads/answer/7367521
* </pre>
*
* Protobuf type {@code google.ads.googleads.v8.enums.PromotionExtensionOccasionEnum}
*/
public final class PromotionExtensionOccasionEnum extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v8.enums.PromotionExtensionOccasionEnum)
PromotionExtensionOccasionEnumOrBuilder {
private static final long serialVersionUID = 0L;
// Use PromotionExtensionOccasionEnum.newBuilder() to construct.
private PromotionExtensionOccasionEnum(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private PromotionExtensionOccasionEnum() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new PromotionExtensionOccasionEnum();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private PromotionExtensionOccasionEnum(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v8.enums.PromotionExtensionOccasionProto.internal_static_google_ads_googleads_v8_enums_PromotionExtensionOccasionEnum_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v8.enums.PromotionExtensionOccasionProto.internal_static_google_ads_googleads_v8_enums_PromotionExtensionOccasionEnum_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v8.enums.PromotionExtensionOccasionEnum.class, com.google.ads.googleads.v8.enums.PromotionExtensionOccasionEnum.Builder.class);
}
/**
* <pre>
* A promotion extension occasion.
* </pre>
*
* Protobuf enum {@code google.ads.googleads.v8.enums.PromotionExtensionOccasionEnum.PromotionExtensionOccasion}
*/
public enum PromotionExtensionOccasion
implements com.google.protobuf.ProtocolMessageEnum {
/**
* <pre>
* Not specified.
* </pre>
*
* <code>UNSPECIFIED = 0;</code>
*/
UNSPECIFIED(0),
/**
* <pre>
* Used for return value only. Represents value unknown in this version.
* </pre>
*
* <code>UNKNOWN = 1;</code>
*/
UNKNOWN(1),
/**
* <pre>
* New Year's.
* </pre>
*
* <code>NEW_YEARS = 2;</code>
*/
NEW_YEARS(2),
/**
* <pre>
* Chinese New Year.
* </pre>
*
* <code>CHINESE_NEW_YEAR = 3;</code>
*/
CHINESE_NEW_YEAR(3),
/**
* <pre>
* Valentine's Day.
* </pre>
*
* <code>VALENTINES_DAY = 4;</code>
*/
VALENTINES_DAY(4),
/**
* <pre>
* Easter.
* </pre>
*
* <code>EASTER = 5;</code>
*/
EASTER(5),
/**
* <pre>
* Mother's Day.
* </pre>
*
* <code>MOTHERS_DAY = 6;</code>
*/
MOTHERS_DAY(6),
/**
* <pre>
* Father's Day.
* </pre>
*
* <code>FATHERS_DAY = 7;</code>
*/
FATHERS_DAY(7),
/**
* <pre>
* Labor Day.
* </pre>
*
* <code>LABOR_DAY = 8;</code>
*/
LABOR_DAY(8),
/**
* <pre>
* Back To School.
* </pre>
*
* <code>BACK_TO_SCHOOL = 9;</code>
*/
BACK_TO_SCHOOL(9),
/**
* <pre>
* Halloween.
* </pre>
*
* <code>HALLOWEEN = 10;</code>
*/
HALLOWEEN(10),
/**
* <pre>
* Black Friday.
* </pre>
*
* <code>BLACK_FRIDAY = 11;</code>
*/
BLACK_FRIDAY(11),
/**
* <pre>
* Cyber Monday.
* </pre>
*
* <code>CYBER_MONDAY = 12;</code>
*/
CYBER_MONDAY(12),
/**
* <pre>
* Christmas.
* </pre>
*
* <code>CHRISTMAS = 13;</code>
*/
CHRISTMAS(13),
/**
* <pre>
* Boxing Day.
* </pre>
*
* <code>BOXING_DAY = 14;</code>
*/
BOXING_DAY(14),
/**
* <pre>
* Independence Day in any country.
* </pre>
*
* <code>INDEPENDENCE_DAY = 15;</code>
*/
INDEPENDENCE_DAY(15),
/**
* <pre>
* National Day in any country.
* </pre>
*
* <code>NATIONAL_DAY = 16;</code>
*/
NATIONAL_DAY(16),
/**
* <pre>
* End of any season.
* </pre>
*
* <code>END_OF_SEASON = 17;</code>
*/
END_OF_SEASON(17),
/**
* <pre>
* Winter Sale.
* </pre>
*
* <code>WINTER_SALE = 18;</code>
*/
WINTER_SALE(18),
/**
* <pre>
* Summer sale.
* </pre>
*
* <code>SUMMER_SALE = 19;</code>
*/
SUMMER_SALE(19),
/**
* <pre>
* Fall Sale.
* </pre>
*
* <code>FALL_SALE = 20;</code>
*/
FALL_SALE(20),
/**
* <pre>
* Spring Sale.
* </pre>
*
* <code>SPRING_SALE = 21;</code>
*/
SPRING_SALE(21),
/**
* <pre>
* Ramadan.
* </pre>
*
* <code>RAMADAN = 22;</code>
*/
RAMADAN(22),
/**
* <pre>
* Eid al-Fitr.
* </pre>
*
* <code>EID_AL_FITR = 23;</code>
*/
EID_AL_FITR(23),
/**
* <pre>
* Eid al-Adha.
* </pre>
*
* <code>EID_AL_ADHA = 24;</code>
*/
EID_AL_ADHA(24),
/**
* <pre>
* Singles Day.
* </pre>
*
* <code>SINGLES_DAY = 25;</code>
*/
SINGLES_DAY(25),
/**
* <pre>
* Women's Day.
* </pre>
*
* <code>WOMENS_DAY = 26;</code>
*/
WOMENS_DAY(26),
/**
* <pre>
* Holi.
* </pre>
*
* <code>HOLI = 27;</code>
*/
HOLI(27),
/**
* <pre>
* Parent's Day.
* </pre>
*
* <code>PARENTS_DAY = 28;</code>
*/
PARENTS_DAY(28),
/**
* <pre>
* St. Nicholas Day.
* </pre>
*
* <code>ST_NICHOLAS_DAY = 29;</code>
*/
ST_NICHOLAS_DAY(29),
/**
* <pre>
* Carnival.
* </pre>
*
* <code>CARNIVAL = 30;</code>
*/
CARNIVAL(30),
/**
* <pre>
* Epiphany, also known as Three Kings' Day.
* </pre>
*
* <code>EPIPHANY = 31;</code>
*/
EPIPHANY(31),
/**
* <pre>
* Rosh Hashanah.
* </pre>
*
* <code>ROSH_HASHANAH = 32;</code>
*/
ROSH_HASHANAH(32),
/**
* <pre>
* Passover.
* </pre>
*
* <code>PASSOVER = 33;</code>
*/
PASSOVER(33),
/**
* <pre>
* Hanukkah.
* </pre>
*
* <code>HANUKKAH = 34;</code>
*/
HANUKKAH(34),
/**
* <pre>
* Diwali.
* </pre>
*
* <code>DIWALI = 35;</code>
*/
DIWALI(35),
/**
* <pre>
* Navratri.
* </pre>
*
* <code>NAVRATRI = 36;</code>
*/
NAVRATRI(36),
/**
* <pre>
* Available in Thai: Songkran.
* </pre>
*
* <code>SONGKRAN = 37;</code>
*/
SONGKRAN(37),
/**
* <pre>
* Available in Japanese: Year-end Gift.
* </pre>
*
* <code>YEAR_END_GIFT = 38;</code>
*/
YEAR_END_GIFT(38),
UNRECOGNIZED(-1),
;
/**
* <pre>
* Not specified.
* </pre>
*
* <code>UNSPECIFIED = 0;</code>
*/
public static final int UNSPECIFIED_VALUE = 0;
/**
* <pre>
* Used for return value only. Represents value unknown in this version.
* </pre>
*
* <code>UNKNOWN = 1;</code>
*/
public static final int UNKNOWN_VALUE = 1;
/**
* <pre>
* New Year's.
* </pre>
*
* <code>NEW_YEARS = 2;</code>
*/
public static final int NEW_YEARS_VALUE = 2;
/**
* <pre>
* Chinese New Year.
* </pre>
*
* <code>CHINESE_NEW_YEAR = 3;</code>
*/
public static final int CHINESE_NEW_YEAR_VALUE = 3;
/**
* <pre>
* Valentine's Day.
* </pre>
*
* <code>VALENTINES_DAY = 4;</code>
*/
public static final int VALENTINES_DAY_VALUE = 4;
/**
* <pre>
* Easter.
* </pre>
*
* <code>EASTER = 5;</code>
*/
public static final int EASTER_VALUE = 5;
/**
* <pre>
* Mother's Day.
* </pre>
*
* <code>MOTHERS_DAY = 6;</code>
*/
public static final int MOTHERS_DAY_VALUE = 6;
/**
* <pre>
* Father's Day.
* </pre>
*
* <code>FATHERS_DAY = 7;</code>
*/
public static final int FATHERS_DAY_VALUE = 7;
/**
* <pre>
* Labor Day.
* </pre>
*
* <code>LABOR_DAY = 8;</code>
*/
public static final int LABOR_DAY_VALUE = 8;
/**
* <pre>
* Back To School.
* </pre>
*
* <code>BACK_TO_SCHOOL = 9;</code>
*/
public static final int BACK_TO_SCHOOL_VALUE = 9;
/**
* <pre>
* Halloween.
* </pre>
*
* <code>HALLOWEEN = 10;</code>
*/
public static final int HALLOWEEN_VALUE = 10;
/**
* <pre>
* Black Friday.
* </pre>
*
* <code>BLACK_FRIDAY = 11;</code>
*/
public static final int BLACK_FRIDAY_VALUE = 11;
/**
* <pre>
* Cyber Monday.
* </pre>
*
* <code>CYBER_MONDAY = 12;</code>
*/
public static final int CYBER_MONDAY_VALUE = 12;
/**
* <pre>
* Christmas.
* </pre>
*
* <code>CHRISTMAS = 13;</code>
*/
public static final int CHRISTMAS_VALUE = 13;
/**
* <pre>
* Boxing Day.
* </pre>
*
* <code>BOXING_DAY = 14;</code>
*/
public static final int BOXING_DAY_VALUE = 14;
/**
* <pre>
* Independence Day in any country.
* </pre>
*
* <code>INDEPENDENCE_DAY = 15;</code>
*/
public static final int INDEPENDENCE_DAY_VALUE = 15;
/**
* <pre>
* National Day in any country.
* </pre>
*
* <code>NATIONAL_DAY = 16;</code>
*/
public static final int NATIONAL_DAY_VALUE = 16;
/**
* <pre>
* End of any season.
* </pre>
*
* <code>END_OF_SEASON = 17;</code>
*/
public static final int END_OF_SEASON_VALUE = 17;
/**
* <pre>
* Winter Sale.
* </pre>
*
* <code>WINTER_SALE = 18;</code>
*/
public static final int WINTER_SALE_VALUE = 18;
/**
* <pre>
* Summer sale.
* </pre>
*
* <code>SUMMER_SALE = 19;</code>
*/
public static final int SUMMER_SALE_VALUE = 19;
/**
* <pre>
* Fall Sale.
* </pre>
*
* <code>FALL_SALE = 20;</code>
*/
public static final int FALL_SALE_VALUE = 20;
/**
* <pre>
* Spring Sale.
* </pre>
*
* <code>SPRING_SALE = 21;</code>
*/
public static final int SPRING_SALE_VALUE = 21;
/**
* <pre>
* Ramadan.
* </pre>
*
* <code>RAMADAN = 22;</code>
*/
public static final int RAMADAN_VALUE = 22;
/**
* <pre>
* Eid al-Fitr.
* </pre>
*
* <code>EID_AL_FITR = 23;</code>
*/
public static final int EID_AL_FITR_VALUE = 23;
/**
* <pre>
* Eid al-Adha.
* </pre>
*
* <code>EID_AL_ADHA = 24;</code>
*/
public static final int EID_AL_ADHA_VALUE = 24;
/**
* <pre>
* Singles Day.
* </pre>
*
* <code>SINGLES_DAY = 25;</code>
*/
public static final int SINGLES_DAY_VALUE = 25;
/**
* <pre>
* Women's Day.
* </pre>
*
* <code>WOMENS_DAY = 26;</code>
*/
public static final int WOMENS_DAY_VALUE = 26;
/**
* <pre>
* Holi.
* </pre>
*
* <code>HOLI = 27;</code>
*/
public static final int HOLI_VALUE = 27;
/**
* <pre>
* Parent's Day.
* </pre>
*
* <code>PARENTS_DAY = 28;</code>
*/
public static final int PARENTS_DAY_VALUE = 28;
/**
* <pre>
* St. Nicholas Day.
* </pre>
*
* <code>ST_NICHOLAS_DAY = 29;</code>
*/
public static final int ST_NICHOLAS_DAY_VALUE = 29;
/**
* <pre>
* Carnival.
* </pre>
*
* <code>CARNIVAL = 30;</code>
*/
public static final int CARNIVAL_VALUE = 30;
/**
* <pre>
* Epiphany, also known as Three Kings' Day.
* </pre>
*
* <code>EPIPHANY = 31;</code>
*/
public static final int EPIPHANY_VALUE = 31;
/**
* <pre>
* Rosh Hashanah.
* </pre>
*
* <code>ROSH_HASHANAH = 32;</code>
*/
public static final int ROSH_HASHANAH_VALUE = 32;
/**
* <pre>
* Passover.
* </pre>
*
* <code>PASSOVER = 33;</code>
*/
public static final int PASSOVER_VALUE = 33;
/**
* <pre>
* Hanukkah.
* </pre>
*
* <code>HANUKKAH = 34;</code>
*/
public static final int HANUKKAH_VALUE = 34;
/**
* <pre>
* Diwali.
* </pre>
*
* <code>DIWALI = 35;</code>
*/
public static final int DIWALI_VALUE = 35;
/**
* <pre>
* Navratri.
* </pre>
*
* <code>NAVRATRI = 36;</code>
*/
public static final int NAVRATRI_VALUE = 36;
/**
* <pre>
* Available in Thai: Songkran.
* </pre>
*
* <code>SONGKRAN = 37;</code>
*/
public static final int SONGKRAN_VALUE = 37;
/**
* <pre>
* Available in Japanese: Year-end Gift.
* </pre>
*
* <code>YEAR_END_GIFT = 38;</code>
*/
public static final int YEAR_END_GIFT_VALUE = 38;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static PromotionExtensionOccasion valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static PromotionExtensionOccasion forNumber(int value) {
switch (value) {
case 0: return UNSPECIFIED;
case 1: return UNKNOWN;
case 2: return NEW_YEARS;
case 3: return CHINESE_NEW_YEAR;
case 4: return VALENTINES_DAY;
case 5: return EASTER;
case 6: return MOTHERS_DAY;
case 7: return FATHERS_DAY;
case 8: return LABOR_DAY;
case 9: return BACK_TO_SCHOOL;
case 10: return HALLOWEEN;
case 11: return BLACK_FRIDAY;
case 12: return CYBER_MONDAY;
case 13: return CHRISTMAS;
case 14: return BOXING_DAY;
case 15: return INDEPENDENCE_DAY;
case 16: return NATIONAL_DAY;
case 17: return END_OF_SEASON;
case 18: return WINTER_SALE;
case 19: return SUMMER_SALE;
case 20: return FALL_SALE;
case 21: return SPRING_SALE;
case 22: return RAMADAN;
case 23: return EID_AL_FITR;
case 24: return EID_AL_ADHA;
case 25: return SINGLES_DAY;
case 26: return WOMENS_DAY;
case 27: return HOLI;
case 28: return PARENTS_DAY;
case 29: return ST_NICHOLAS_DAY;
case 30: return CARNIVAL;
case 31: return EPIPHANY;
case 32: return ROSH_HASHANAH;
case 33: return PASSOVER;
case 34: return HANUKKAH;
case 35: return DIWALI;
case 36: return NAVRATRI;
case 37: return SONGKRAN;
case 38: return YEAR_END_GIFT;
default: return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<PromotionExtensionOccasion>
internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<
PromotionExtensionOccasion> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<PromotionExtensionOccasion>() {
public PromotionExtensionOccasion findValueByNumber(int number) {
return PromotionExtensionOccasion.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return com.google.ads.googleads.v8.enums.PromotionExtensionOccasionEnum.getDescriptor().getEnumTypes().get(0);
}
private static final PromotionExtensionOccasion[] VALUES = values();
public static PromotionExtensionOccasion valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private PromotionExtensionOccasion(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.ads.googleads.v8.enums.PromotionExtensionOccasionEnum.PromotionExtensionOccasion)
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v8.enums.PromotionExtensionOccasionEnum)) {
return super.equals(obj);
}
com.google.ads.googleads.v8.enums.PromotionExtensionOccasionEnum other = (com.google.ads.googleads.v8.enums.PromotionExtensionOccasionEnum) obj;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v8.enums.PromotionExtensionOccasionEnum parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v8.enums.PromotionExtensionOccasionEnum parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v8.enums.PromotionExtensionOccasionEnum parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v8.enums.PromotionExtensionOccasionEnum parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v8.enums.PromotionExtensionOccasionEnum parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v8.enums.PromotionExtensionOccasionEnum parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v8.enums.PromotionExtensionOccasionEnum parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v8.enums.PromotionExtensionOccasionEnum parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v8.enums.PromotionExtensionOccasionEnum parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v8.enums.PromotionExtensionOccasionEnum parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v8.enums.PromotionExtensionOccasionEnum parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v8.enums.PromotionExtensionOccasionEnum parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v8.enums.PromotionExtensionOccasionEnum prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Container for enum describing a promotion extension occasion.
* For more information about the occasions please check:
* https://support.google.com/google-ads/answer/7367521
* </pre>
*
* Protobuf type {@code google.ads.googleads.v8.enums.PromotionExtensionOccasionEnum}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v8.enums.PromotionExtensionOccasionEnum)
com.google.ads.googleads.v8.enums.PromotionExtensionOccasionEnumOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v8.enums.PromotionExtensionOccasionProto.internal_static_google_ads_googleads_v8_enums_PromotionExtensionOccasionEnum_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v8.enums.PromotionExtensionOccasionProto.internal_static_google_ads_googleads_v8_enums_PromotionExtensionOccasionEnum_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v8.enums.PromotionExtensionOccasionEnum.class, com.google.ads.googleads.v8.enums.PromotionExtensionOccasionEnum.Builder.class);
}
// Construct using com.google.ads.googleads.v8.enums.PromotionExtensionOccasionEnum.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v8.enums.PromotionExtensionOccasionProto.internal_static_google_ads_googleads_v8_enums_PromotionExtensionOccasionEnum_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v8.enums.PromotionExtensionOccasionEnum getDefaultInstanceForType() {
return com.google.ads.googleads.v8.enums.PromotionExtensionOccasionEnum.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v8.enums.PromotionExtensionOccasionEnum build() {
com.google.ads.googleads.v8.enums.PromotionExtensionOccasionEnum result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v8.enums.PromotionExtensionOccasionEnum buildPartial() {
com.google.ads.googleads.v8.enums.PromotionExtensionOccasionEnum result = new com.google.ads.googleads.v8.enums.PromotionExtensionOccasionEnum(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v8.enums.PromotionExtensionOccasionEnum) {
return mergeFrom((com.google.ads.googleads.v8.enums.PromotionExtensionOccasionEnum)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v8.enums.PromotionExtensionOccasionEnum other) {
if (other == com.google.ads.googleads.v8.enums.PromotionExtensionOccasionEnum.getDefaultInstance()) return this;
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.ads.googleads.v8.enums.PromotionExtensionOccasionEnum parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.ads.googleads.v8.enums.PromotionExtensionOccasionEnum) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v8.enums.PromotionExtensionOccasionEnum)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v8.enums.PromotionExtensionOccasionEnum)
private static final com.google.ads.googleads.v8.enums.PromotionExtensionOccasionEnum DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v8.enums.PromotionExtensionOccasionEnum();
}
public static com.google.ads.googleads.v8.enums.PromotionExtensionOccasionEnum getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<PromotionExtensionOccasionEnum>
PARSER = new com.google.protobuf.AbstractParser<PromotionExtensionOccasionEnum>() {
@java.lang.Override
public PromotionExtensionOccasionEnum parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new PromotionExtensionOccasionEnum(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<PromotionExtensionOccasionEnum> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<PromotionExtensionOccasionEnum> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v8.enums.PromotionExtensionOccasionEnum getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*
* Copyright (c) 2012-2015, Luigi R. Viggiano
* All rights reserved.
*
* This software is distributable under the BSD license.
* See the terms of the BSD license in the documentation provided with this software.
*/
package org.aeonbits.owner;
import org.aeonbits.owner.Config.DisableFeature;
import org.aeonbits.owner.Config.DisableableFeature;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.UnsupportedEncodingException;
import java.lang.reflect.Method;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.jar.JarOutputStream;
import java.util.zip.ZipEntry;
import static java.io.File.createTempFile;
import static java.lang.String.format;
import static java.net.URLDecoder.decode;
import static java.util.Arrays.asList;
/**
* This class contains utility methods used all over the library.
*
* @author Luigi R. Viggiano
*/
abstract class Util {
interface TimeProvider {
long getTime();
}
interface SystemProvider {
String getProperty(String key);
Map<String, String> getenv();
Properties getProperties();
}
static TimeProvider timeProvider = new TimeProvider() {
public long getTime() {
return System.currentTimeMillis();
}
};
static SystemProvider system = new SystemProvider() {
public String getProperty(String key) {
return System.getProperty(key);
}
public Map<String, String> getenv() {
return System.getenv();
}
public Properties getProperties() {
return System.getProperties();
}
};
/** Don't let anyone instantiate this class */
private Util() {}
static <T> List<T> reverse(List<T> src) {
List<T> copy = new ArrayList<T>(src);
Collections.reverse(copy);
return copy;
}
@SuppressWarnings("unchecked")
static <T> T[] reverse(T[] array) {
T[] copy = array.clone();
Collections.reverse(asList(copy));
return copy;
}
static String expandUserHome(String text) {
if (text.equals("~"))
return system.getProperty("user.home");
if (text.indexOf("~/") == 0 || text.indexOf("file:~/") == 0 || text.indexOf("jar:file:~/") == 0)
return text.replaceFirst("~/", fixBackslashForRegex(system.getProperty("user.home")) + "/");
if (text.indexOf("~\\") == 0 || text.indexOf("file:~\\") == 0 || text.indexOf("jar:file:~\\") == 0)
return text.replaceFirst("~\\\\", fixBackslashForRegex(system.getProperty("user.home")) + "\\\\");
return text;
}
static String fixBackslashForRegex(String text) {
return text.replace("\\", "\\\\");
}
public static String fixBackslashesToSlashes(String path) {
return path.replace('\\', '/');
}
static <T> T ignore() {
// the ignore method does absolutely nothing, but it helps to shut up warnings by pmd and other reporting tools
// complaining about empty catch methods.
return null;
}
static boolean isFeatureDisabled(Method method, DisableableFeature feature) {
Class<DisableFeature> annotation = DisableFeature.class;
return isFeatureDisabled(feature, method.getDeclaringClass().getAnnotation(annotation)) ||
isFeatureDisabled(feature, method.getAnnotation(annotation));
}
private static boolean isFeatureDisabled(DisableableFeature feature, DisableFeature annotation) {
return annotation != null && asList(annotation.value()).contains(feature);
}
static UnsupportedOperationException unsupported(Throwable cause, String msg, Object... args) {
return new UnsupportedOperationException(format(msg, args), cause);
}
static UnsupportedOperationException unsupported(String msg, Object... args) {
return new UnsupportedOperationException(format(msg, args));
}
static <T> T unreachableButCompilerNeedsThis() {
throw new AssertionError("this code should never be reached");
}
static String asString(Object result) {
if (result == null) return null;
return String.valueOf(result);
}
static long now() {
return timeProvider.getTime();
}
static File fileFromURI(URI uri) {
if ("file".equalsIgnoreCase(uri.getScheme())) {
String path = uri.getSchemeSpecificPart();
try {
path = decode(path, "utf-8");
return new File(path);
} catch (UnsupportedEncodingException e) {
return unreachableButCompilerNeedsThis(/* utf-8 is supported in jre libraries */);
}
} else if ("jar".equalsIgnoreCase(uri.getScheme())) {
String path = uri.getSchemeSpecificPart();
try {
return fileFromURI(path.substring(0, path.indexOf('!')));
} catch (URISyntaxException e) {
return ignore(/* non critical */);
}
}
return null;
}
static File fileFromURI(String uriSpec) throws URISyntaxException {
try {
return fileFromURI(new URI(uriSpec));
} catch (URISyntaxException e) {
// Perhaps the path contains backslashes
uriSpec = uriSpec.replace('\\', '/');
return fileFromURI(new URI(uriSpec));
}
}
static boolean eq(Object o1, Object o2) {
return o1 == o2 || o1 != null && o1.equals(o2);
}
static SystemProvider system() {
return system;
}
static void save(File target, Properties p) throws IOException {
File parent = target.getParentFile();
parent.mkdirs();
if (isWindows()) {
store(target, p);
} else {
File tempFile = createTempFile(target.getName(), ".temp", parent);
store(tempFile, p);
rename(tempFile, target);
}
}
private static boolean isWindows() {
return System.getProperty("os.name").toLowerCase().indexOf("win") >= 0;
}
static void delete(File target) {
target.delete();
}
private static void store(File target, Properties p) throws IOException {
OutputStream out = new FileOutputStream(target);
try {
store(out, p);
} finally {
out.close();
}
}
private static void store(OutputStream out, Properties p) throws IOException {
p.store(out, "saved for test");
}
static void saveJar(File target, String entryName, Properties props) throws IOException {
File parent = target.getParentFile();
parent.mkdirs();
storeJar(target, entryName, props);
}
private static void rename(File source, File target) throws IOException {
if (!source.renameTo(target))
throw new IOException(String.format("Failed to overwrite %s to %s", source.toString(), target.toString()));
}
private static void storeJar(File target, String entryName, Properties props) throws IOException {
byte[] bytes = toBytes(props);
InputStream input = new ByteArrayInputStream(bytes);
JarOutputStream output = new JarOutputStream(new FileOutputStream(target));
try {
ZipEntry entry = new ZipEntry(entryName);
output.putNextEntry(entry);
byte[] buffer = new byte[4096];
int size;
while ((size = input.read(buffer)) != -1)
output.write(buffer, 0, size);
} finally {
input.close();
output.close();
}
}
private static byte[] toBytes(Properties props) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
try {
store(out, props);
return out.toByteArray();
} finally {
out.close();
}
}
public static <T> T newInstance(Class<T> clazz) {
try {
return clazz.newInstance();
} catch (Exception e) {
throw unsupported(e,
"Class '%s' cannot be instantiated; see the cause below in the stack trace",
clazz.getCanonicalName());
}
}
public static <T> List<T> newInstance(Class<? extends T>[] classes, List<T> result) {
for (Class<? extends T> clazz : classes)
result.add(newInstance(clazz));
return result;
}
}
| |
/*
* Copyright 2011 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.gwt.inject.rebind.output;
import com.google.gwt.core.ext.GeneratorContext;
import com.google.gwt.core.ext.TreeLogger;
import com.google.gwt.core.ext.UnableToCompleteException;
import com.google.gwt.inject.rebind.ErrorManager;
import com.google.gwt.inject.rebind.GinjectorBindings;
import com.google.gwt.inject.rebind.GinjectorNameGenerator;
import com.google.gwt.inject.rebind.binding.Binding;
import com.google.gwt.inject.rebind.binding.GinjectorBinding;
import com.google.gwt.inject.rebind.reflect.FieldLiteral;
import com.google.gwt.inject.rebind.reflect.MethodLiteral;
import com.google.gwt.inject.rebind.reflect.NoSourceNameException;
import com.google.gwt.inject.rebind.reflect.ReflectUtil;
import com.google.gwt.inject.rebind.util.InjectorMethod;
import com.google.gwt.inject.rebind.util.MethodCallUtil;
import com.google.gwt.inject.rebind.util.NameGenerator;
import com.google.gwt.inject.rebind.util.SourceSnippet;
import com.google.gwt.inject.rebind.util.SourceSnippetBuilder;
import com.google.gwt.inject.rebind.util.SourceSnippets;
import com.google.gwt.inject.rebind.util.SourceWriteUtil;
import com.google.gwt.user.rebind.ClassSourceFileComposerFactory;
import com.google.gwt.user.rebind.SourceWriter;
import com.google.inject.Inject;
import com.google.inject.Key;
import com.google.inject.Singleton;
import com.google.inject.TypeLiteral;
import com.google.inject.spi.InjectionPoint;
import java.io.PrintWriter;
import java.lang.reflect.Field;
import java.lang.reflect.Member;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
/**
* Outputs the generated classes for one or more {@link GinjectorBindings}.
*/
@Singleton
class GinjectorBindingsOutputter {
private final GeneratorContext ctx;
private final ErrorManager errorManager;
private final GinjectorFragmentOutputter.Factory fragmentOutputterFactory;
private final FragmentPackageName.Factory fragmentPackageNameFactory;
private final GinjectorNameGenerator ginjectorNameGenerator;
private final TreeLogger logger;
private final MethodCallUtil methodCallUtil;
private final ReachabilityAnalyzer reachabilityAnalyzer;
private final SourceWriteUtil.Factory sourceWriteUtilFactory;
@Inject
GinjectorBindingsOutputter(GeneratorContext ctx,
ErrorManager errorManager,
GinjectorFragmentOutputter.Factory fragmentOutputterFactory,
FragmentPackageName.Factory fragmentPackageNameFactory,
GinjectorNameGenerator ginjectorNameGenerator,
TreeLogger logger,
MethodCallUtil methodCallUtil,
ReachabilityAnalyzer reachabilityAnalyzer,
SourceWriteUtil.Factory sourceWriteUtilFactory) {
this.ctx = ctx;
this.errorManager = errorManager;
this.fragmentOutputterFactory = fragmentOutputterFactory;
this.fragmentPackageNameFactory = fragmentPackageNameFactory;
this.ginjectorNameGenerator = ginjectorNameGenerator;
this.logger = logger;
this.methodCallUtil = methodCallUtil;
this.reachabilityAnalyzer = reachabilityAnalyzer;
this.sourceWriteUtilFactory = sourceWriteUtilFactory;
}
/**
* Writes the Ginjector class for the given bindings object, and all its
* package-specific fragments.
*/
void write(GinjectorBindings bindings) throws UnableToCompleteException {
TypeLiteral<?> ginjectorInterface = bindings.getGinjectorInterface();
String implClassName = ginjectorNameGenerator.getClassName(bindings);
if (implClassName.contains(".")) {
errorManager.logError("Internal error: the injector class name \"%s\" contains a full stop.",
implClassName);
}
String packageName = ReflectUtil.getUserPackageName(TypeLiteral.get(bindings.getModule()));
PrintWriter printWriter = ctx.tryCreate(logger, packageName, implClassName);
if (printWriter == null) {
// We already created this Ginjector.
return;
}
ClassSourceFileComposerFactory composerFactory = new ClassSourceFileComposerFactory(packageName,
implClassName);
SourceWriter writer = composerFactory.createSourceWriter(ctx, printWriter);
FragmentMap fragments = new FragmentMap(bindings, packageName, implClassName,
fragmentOutputterFactory);
outputBindings(bindings, fragments, writer);
errorManager.checkForError();
fragments.commitAll();
writer.commit(logger);
}
/**
* Outputs the top-level injector for the given {@link GinjectorBindings},
* along with all of its fragments.
*
* <p>The top-level injector contains one field for each fragment of the
* injector, which stores a reference to an instance of that fragment. In
* addition, it contains a getter for every public type created by one of its
* fragments, each of which forwards to a getter in the corresponding
* fragment. In addition to being the injector's public interface, these
* getters are used by each fragment of the injector to retrieve objects
* created by other fragments.
*/
private void outputBindings(GinjectorBindings bindings, FragmentMap fragments,
SourceWriter writer) {
NameGenerator nameGenerator = bindings.getNameGenerator();
// The initialize*() methods contain code that needs to run before the root
// injector is returned to the client, but after the injector hierarchy is
// fully constructed.
// Collects the text of the body of initializeEagerSingletons().
StringBuilder initializeEagerSingletonsBody = new StringBuilder();
// Collects the text of the body of initializeStaticInjections().
StringBuilder initializeStaticInjectionsBody = new StringBuilder();
SourceWriteUtil sourceWriteUtil = sourceWriteUtilFactory.create(bindings);
// Output child modules.
for (GinjectorBindings child : bindings.getChildren()) {
String className = ginjectorNameGenerator.getClassName(child);
String canonicalClassName = ginjectorNameGenerator.getCanonicalClassName(child);
String fieldName = ginjectorNameGenerator.getFieldName(child);
String getterName = nameGenerator.getChildInjectorGetterMethodName(className);
writer.beginJavaDocComment();
writer.print("Child injector for %s", child.getModule());
writer.endJavaDocComment();
writer.println("private %s %s = null;", canonicalClassName, fieldName);
writer.beginJavaDocComment();
writer.print("Getter for child injector for %s", child.getModule());
writer.endJavaDocComment();
sourceWriteUtil.writeMethod(writer,
String.format("public %s %s()", canonicalClassName, getterName),
String.format(
"if (%2$s == null) {\n"
+ " %2$s = new %1$s(this);\n"
+ "}\n\n"
+ "return %2$s;", canonicalClassName, fieldName));
// Ensure that the initializer initializes this child, if necessary.
outputSubInitialize(child, getterName,
initializeEagerSingletonsBody, initializeStaticInjectionsBody);
}
initializeEagerSingletonsBody.append("\n");
initializeStaticInjectionsBody.append("\n");
outputInterfaceField(bindings, sourceWriteUtil, writer);
outputMemberInjections(bindings, fragments, sourceWriteUtil);
outputStaticInjections(bindings, fragments, sourceWriteUtil);
// Output the bindings in the fragments.
for (Map.Entry<Key<?>, Binding> entry : bindings.getBindings()) {
Binding binding = entry.getValue();
if (!reachabilityAnalyzer.isReachable(binding)) {
continue;
}
FragmentPackageName fragmentPackageName =
fragmentPackageNameFactory.create(binding.getGetterMethodPackage());
Key<?> key = entry.getKey();
List<InjectorMethod> helperMethods = new ArrayList();
fragments.get(fragmentPackageName)
.writeBindingGetter(key, binding, bindings.determineScope(key), helperMethods);
outputMethods(helperMethods, fragments);
}
// Output the fragment members.
outputFragments(bindings, fragments, initializeEagerSingletonsBody,
initializeStaticInjectionsBody, sourceWriteUtil, writer);
writeConstructor(bindings, sourceWriteUtil, writer);
writeInitializers(bindings, initializeEagerSingletonsBody, initializeStaticInjectionsBody,
sourceWriteUtil, writer);
}
/**
* Writes code to store and retrieve the current injector interface, if one is
* bound.
*/
private void outputInterfaceField(GinjectorBindings bindings, SourceWriteUtil sourceWriteUtil,
SourceWriter writer) {
// Only the root injector has an interface binding.
if (bindings.getParent() != null) {
return;
}
Class<?> boundGinjectorInterface = getBoundGinjector(bindings);
if (boundGinjectorInterface == null) {
// Sanity-check: if this fails, then we somehow didn't bind the injector
// interface in the root module (the root module should always generate a
// binding for the injector).
errorManager.logError("Injector interface not bound in the root module.");
return;
}
NameGenerator nameGenerator = bindings.getNameGenerator();
String fieldName = nameGenerator.getGinjectorInterfaceFieldName();
String getterName = nameGenerator.getGinjectorInterfaceGetterMethodName();
writer.beginJavaDocComment();
writer.print("The implementation of " + boundGinjectorInterface);
writer.endJavaDocComment();
writer.println("private final %s %s;", boundGinjectorInterface.getCanonicalName(), fieldName);
sourceWriteUtil.writeMethod(writer,
String.format("public %s %s()", boundGinjectorInterface.getCanonicalName(), getterName),
String.format("return %s;", fieldName));
}
/**
* For each fragment in the given {@link FragmentMap}, writes the field that
* stores it and a getter for that field, and adds code to invoke the
* fragment's initializers.
*/
private void outputFragments(GinjectorBindings bindings,
FragmentMap fragments, StringBuilder initializeEagerSingletonsBody,
StringBuilder initializeStaticInjectionsBody, SourceWriteUtil sourceWriteUtil,
SourceWriter writer) {
String implClassName = ginjectorNameGenerator.getClassName(bindings);
NameGenerator nameGenerator = bindings.getNameGenerator();
for (FragmentPackageName fragmentPackageName : fragments.getFragmentPackages()) {
String fragmentCanonicalClassName =
nameGenerator.getFragmentCanonicalClassName(implClassName,
fragmentPackageName);
String fieldName = nameGenerator.getFragmentFieldName(fragmentPackageName);
String getterName = nameGenerator.getFragmentGetterMethodName(fragmentPackageName);
// Create the field.
writer.beginJavaDocComment();
writer.print("Injector fragment for %s", fragmentPackageName);
writer.endJavaDocComment();
writer.print("private %s %s = null;", fragmentCanonicalClassName, fieldName);
// Write the getter.
writer.beginJavaDocComment();
writer.print("Getter for injector fragment for %s", fragmentPackageName);
writer.endJavaDocComment();
sourceWriteUtil.writeMethod(writer,
"public " + fragmentCanonicalClassName + " " + getterName + "()", String.format(
"if (%2$s == null) {\n"
+ " %2$s = new %1$s(this);\n"
+ "}\n\n"
+ "return %2$s;", fragmentCanonicalClassName, fieldName));
if (fragments.get(fragmentPackageName).hasEagerSingletonInitialization()) {
initializeEagerSingletonsBody.append(getterName + "().initializeEagerSingletons();\n");
}
if (fragments.get(fragmentPackageName).hasStaticInjectionInitialization()) {
initializeStaticInjectionsBody.append(getterName + "().initializeStaticInjections();\n");
}
}
}
/**
* Adds member injections to each fragment.
*/
private void outputMemberInjections(GinjectorBindings bindings, FragmentMap fragments,
SourceWriteUtil sourceWriteUtil) {
NameGenerator nameGenerator = bindings.getNameGenerator();
for (TypeLiteral<?> type : bindings.getMemberInjectRequests()) {
if (!reachabilityAnalyzer.isReachableMemberInject(bindings, type)) {
continue;
}
List<InjectorMethod> memberInjectionHelpers = new ArrayList<InjectorMethod>();
try {
sourceWriteUtil.createMemberInjection(type, nameGenerator, memberInjectionHelpers);
outputMethods(memberInjectionHelpers, fragments);
} catch (NoSourceNameException e) {
errorManager.logError(e.getMessage(), e);
}
}
}
void outputStaticInjections(GinjectorBindings bindings, FragmentMap fragments,
SourceWriteUtil sourceWriteUtil) {
for (Class<?> type : bindings.getStaticInjectionRequests()) {
outputStaticInjectionMethods(type, fragments, bindings.getNameGenerator(), sourceWriteUtil);
}
}
/**
* Outputs all the static injection methods for the given class.
*/
void outputStaticInjectionMethods(Class<?> type, FragmentMap fragments,
NameGenerator nameGenerator, SourceWriteUtil sourceWriteUtil) {
String methodName = nameGenerator.convertToValidMemberName("injectStatic_" + type.getName());
SourceSnippetBuilder body = new SourceSnippetBuilder();
for (InjectionPoint injectionPoint : InjectionPoint.forStaticMethodsAndFields(type)) {
Member member = injectionPoint.getMember();
try {
List<InjectorMethod> staticInjectionHelpers = new ArrayList<InjectorMethod>();
if (member instanceof Method) {
MethodLiteral<?, Method> method =
MethodLiteral.get((Method) member, TypeLiteral.get(member.getDeclaringClass()));
body.append(methodCallUtil.createMethodCallWithInjection(method, null, nameGenerator,
staticInjectionHelpers));
} else if (member instanceof Field) {
FieldLiteral<?> field =
FieldLiteral.get((Field) member, TypeLiteral.get(member.getDeclaringClass()));
body.append(sourceWriteUtil.createFieldInjection(field, null, nameGenerator,
staticInjectionHelpers));
}
outputMethods(staticInjectionHelpers, fragments);
} catch (NoSourceNameException e) {
errorManager.logError(e.getMessage(), e);
}
}
// Note that the top-level method that performs static injection will only
// invoke a bunch of other injector methods. Therefore, it doesn't matter
// which package it goes in, and we don't need to invoke getUserPackageName
// (which is good, because in practice users statically inject types that
// have no user package name because they're private inner classes!)
String packageName = type.getPackage().getName();
InjectorMethod method = SourceSnippets.asMethod(false, "private void " + methodName + "()",
packageName, body.build());
GinjectorFragmentOutputter fragment =
fragments.get(fragmentPackageNameFactory.create(packageName));
fragment.outputMethod(method);
fragment.invokeInInitializeStaticInjections(methodName);
}
/**
* Outputs some methods to the fragments they belong to.
*/
void outputMethods(Iterable<InjectorMethod> methods, FragmentMap fragments) {
for (InjectorMethod method : methods) {
FragmentPackageName fragmentPackageName =
fragmentPackageNameFactory.create(method.getPackageName());
GinjectorFragmentOutputter fragment = fragments.get(fragmentPackageName);
fragment.outputMethod(method);
}
}
/**
* Outputs code to invoke the given child's initialize*() routines via its
* member variable.
*/
private void outputSubInitialize(GinjectorBindings child, String childGetterName,
StringBuilder initializeEagerSingletonsBody, StringBuilder initializeStaticInjectionsBody) {
if (child.hasEagerSingletonBindingInSubtree()) {
initializeEagerSingletonsBody
.append(childGetterName)
.append("().initializeEagerSingletons();\n");
}
if (child.hasStaticInjectionRequestInSubtree()) {
initializeStaticInjectionsBody
.append(childGetterName)
.append("().initializeStaticInjections();\n");
}
}
/**
* Gets the Ginjector interface that is bound by the given bindings, if any.
*/
private static Class<?> getBoundGinjector(GinjectorBindings bindings) {
if (bindings.getGinjectorInterface() == null) {
return null;
}
TypeLiteral<?> ginjectorInterface = bindings.getGinjectorInterface();
Key<?> ginjectorKey = Key.get(ginjectorInterface);
if (!bindings.isBound(ginjectorKey)) {
return null;
}
if (!(bindings.getBinding(ginjectorKey) instanceof GinjectorBinding)) {
return null;
}
return ginjectorInterface.getRawType();
}
/**
* Writes the class constructor. If there is a parent injector, also writes a
* field that stores it and a getter (used by fragments in this injector and
* its children).
*
* <p>The arguments to the constructor are:
*
* <p>For injectors other than the root, the parent injector.
*
* <p>For the root injector, the implementation of the ginjector interface.
*/
private void writeConstructor(GinjectorBindings bindings, SourceWriteUtil sourceWriteUtil,
SourceWriter writer) {
String implClassName = ginjectorNameGenerator.getClassName(bindings);
if (bindings.getParent() == null) {
// In outputInterfaceField, we verify that we have a bound injector if we
// are the root module, so this should never be null:
Class<?> boundGinjector = getBoundGinjector(bindings);
String interfaceCanonicalClassName = boundGinjector.getCanonicalName();
String fieldName = bindings.getNameGenerator().getGinjectorInterfaceFieldName();
sourceWriteUtil.writeMethod(writer,
String.format("public %s(%s %s)", implClassName, interfaceCanonicalClassName, fieldName),
String.format("this.%1$s = %1$s;", fieldName));
} else {
String parentImplCanonicalClassName = ginjectorNameGenerator.getCanonicalClassName(
bindings.getParent());
writer.print(String.format("private final %s parent;\n", parentImplCanonicalClassName));
sourceWriteUtil.writeMethod(writer, String.format("public %s getParent()",
parentImplCanonicalClassName), "return parent;");
sourceWriteUtil.writeMethod(writer, String.format("public %1$s(%2$s parent)",
implClassName, parentImplCanonicalClassName), "this.parent = parent;");
}
}
// Setting up the injector works as follows:
//
// When the injectors are constructed, each injector creates its children and
// fragments via field initializers. Then, if the injector is the top-level
// injector, it initializes itself and its children. Initialization is
// performed as a separate step to ensure that the entire injector hierarchy
// is created before we try to invoke any injection method to, e.g., create
// eager singletons. For more details, see
// <http://code.google.com/p/google-gin/issues/detail?id=156>.
private void writeInitializers(
GinjectorBindings bindings,
StringBuilder initializeEagerSingletonsBody, StringBuilder initializeStaticInjectionsBody,
SourceWriteUtil sourceWriteUtil, SourceWriter writer) {
if (bindings.hasEagerSingletonBindingInSubtree()) {
sourceWriteUtil.writeMethod(writer,
"public void initializeEagerSingletons()", initializeEagerSingletonsBody.toString());
}
if (bindings.hasStaticInjectionRequestInSubtree()) {
sourceWriteUtil.writeMethod(writer,
"public void initializeStaticInjections()", initializeStaticInjectionsBody.toString());
}
}
/**
* Creates and tracks the fragment outputter associated with each package
* containing bindings. Visible for testing.
*/
static final class FragmentMap {
private final GinjectorBindings bindings;
private final GinjectorFragmentOutputter.Factory fragmentFactory;
private final Map<FragmentPackageName, GinjectorFragmentOutputter> fragments =
new LinkedHashMap<FragmentPackageName, GinjectorFragmentOutputter>();
private final String ginjectorPackageName;
private final String ginjectorClassName;
FragmentMap(GinjectorBindings bindings, String ginjectorPackageName,
String ginjectorClassName, GinjectorFragmentOutputter.Factory fragmentFactory) {
this.bindings = bindings;
this.ginjectorPackageName = ginjectorPackageName;
this.ginjectorClassName = ginjectorClassName;
this.fragmentFactory = fragmentFactory;
}
/**
* Gets the fragment outputter associated with the given package name,
* creating one if there isn't one yet.
*/
GinjectorFragmentOutputter get(FragmentPackageName packageName) {
if (fragments.containsKey(packageName)) {
return fragments.get(packageName);
} else {
GinjectorFragmentOutputter result = fragmentFactory.create(bindings, packageName,
ginjectorPackageName, ginjectorClassName);
fragments.put(packageName, result);
return result;
}
}
/**
* Gets the package names associated with fragments that were created by
* this map.
*/
Iterable<FragmentPackageName> getFragmentPackages() {
return fragments.keySet();
}
/**
* Commits all the fragments that were created by this map.
*/
void commitAll() {
for (GinjectorFragmentOutputter fragment : fragments.values()) {
fragment.commit();
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.integration;
import org.apache.commons.lang.StringUtils;
import org.apache.zeppelin.AbstractZeppelinIT;
import org.apache.zeppelin.WebDriverManager;
import org.apache.zeppelin.ZeppelinITUtils;
import org.hamcrest.CoreMatchers;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ErrorCollector;
import org.openqa.selenium.By;
import org.openqa.selenium.Keys;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.interactions.Actions;
import org.openqa.selenium.support.ui.Select;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ParagraphActionsIT extends AbstractZeppelinIT {
private static final Logger LOG = LoggerFactory.getLogger(ParagraphActionsIT.class);
@Rule
public ErrorCollector collector = new ErrorCollector();
@Before
public void startUp() {
if (!endToEndTestEnabled()) {
return;
}
driver = WebDriverManager.getWebDriver();
}
@After
public void tearDown() {
if (!endToEndTestEnabled()) {
return;
}
driver.quit();
}
@Test
public void testCreateNewButton() throws Exception {
if (!endToEndTestEnabled()) {
return;
}
try {
createNewNote();
Actions action = new Actions(driver);
waitForParagraph(1, "READY");
Integer oldNosOfParas = driver.findElements(By.xpath("//div[@ng-controller=\"ParagraphCtrl\"]")).size();
collector.checkThat("Before Insert New : the number of paragraph ",
oldNosOfParas,
CoreMatchers.equalTo(1));
driver.findElement(By.xpath(getParagraphXPath(1) + "//span[@class='icon-settings']")).click();
driver.findElement(By.xpath(getParagraphXPath(1) + "//ul/li/a[@ng-click=\"insertNew('below')\"]")).click();
waitForParagraph(2, "READY");
Integer newNosOfParas = driver.findElements(By.xpath("//div[@ng-controller=\"ParagraphCtrl\"]")).size();
collector.checkThat("After Insert New (using Insert New button) : number of paragraph",
oldNosOfParas + 1,
CoreMatchers.equalTo(newNosOfParas));
driver.findElement(By.xpath(getParagraphXPath(1) + "//span[@class='icon-settings']")).click();
driver.findElement(By.xpath(getParagraphXPath(1) + "//ul/li/a[@ng-click='removeParagraph(paragraph)']")).click();
ZeppelinITUtils.sleep(1000, false);
driver.findElement(By.xpath("//div[@class='modal-dialog'][contains(.,'delete this paragraph')]" +
"//div[@class='modal-footer']//button[contains(.,'OK')]")).click();
ZeppelinITUtils.sleep(1000, false);
setTextOfParagraph(1, " original paragraph ");
WebElement newPara = driver.findElement(By.xpath(getParagraphXPath(1) + "//div[contains(@class,'new-paragraph')][1]"));
action.moveToElement(newPara).click().build().perform();
ZeppelinITUtils.sleep(1000, false);
waitForParagraph(1, "READY");
collector.checkThat("Paragraph is created above",
driver.findElement(By.xpath(getParagraphXPath(1) + "//div[contains(@class, 'editor')]")).getText(),
CoreMatchers.equalTo(StringUtils.EMPTY));
setTextOfParagraph(1, " this is above ");
newPara = driver.findElement(By.xpath(getParagraphXPath(2) + "//div[contains(@class,'new-paragraph')][2]"));
action.moveToElement(newPara).click().build().perform();
waitForParagraph(3, "READY");
collector.checkThat("Paragraph is created below",
driver.findElement(By.xpath(getParagraphXPath(3) + "//div[contains(@class, 'editor')]")).getText(),
CoreMatchers.equalTo(StringUtils.EMPTY));
setTextOfParagraph(3, " this is below ");
collector.checkThat("The output field of paragraph1 contains",
driver.findElement(By.xpath(getParagraphXPath(1) + "//div[contains(@class, 'editor')]")).getText(),
CoreMatchers.equalTo(" this is above "));
collector.checkThat("The output field paragraph2 contains",
driver.findElement(By.xpath(getParagraphXPath(2) + "//div[contains(@class, 'editor')]")).getText(),
CoreMatchers.equalTo(" original paragraph "));
collector.checkThat("The output field paragraph3 contains",
driver.findElement(By.xpath(getParagraphXPath(3) + "//div[contains(@class, 'editor')]")).getText(),
CoreMatchers.equalTo(" this is below "));
collector.checkThat("The current number of paragraphs after creating paragraph above and below",
driver.findElements(By.xpath("//div[@ng-controller=\"ParagraphCtrl\"]")).size(),
CoreMatchers.equalTo(3));
deleteTestNotebook(driver);
} catch (Exception e) {
handleException("Exception in ParagraphActionsIT while testCreateNewButton ", e);
}
}
@Test
public void testRemoveButton() throws Exception {
if (!endToEndTestEnabled()) {
return;
}
try {
createNewNote();
waitForParagraph(1, "READY");
driver.findElement(By.xpath(getParagraphXPath(1) + "//span[@class='icon-settings']")).click();
driver.findElement(By.xpath(getParagraphXPath(1) + "//ul/li/a[@ng-click=\"insertNew('below')\"]"))
.click();
waitForParagraph(2, "READY");
Integer oldNosOfParas = driver.findElements(By.xpath
("//div[@ng-controller=\"ParagraphCtrl\"]")).size();
collector.checkThat("Before Remove : Number of paragraphs are ",
oldNosOfParas,
CoreMatchers.equalTo(2));
driver.findElement(By.xpath(getParagraphXPath(1) + "//span[@class='icon-settings']")).click();
clickAndWait(By.xpath(getParagraphXPath(1) + "//ul/li/a[@ng-click='removeParagraph(paragraph)']"));
clickAndWait(By.xpath("//div[@class='modal-dialog'][contains(.,'delete this paragraph')" +
"]//div[@class='modal-footer']//button[contains(.,'OK')]"));
Integer newNosOfParas = driver.findElements(By.xpath
("//div[@ng-controller=\"ParagraphCtrl\"]")).size();
collector.checkThat("After Remove : Number of paragraphs are",
newNosOfParas,
CoreMatchers.equalTo(oldNosOfParas - 1));
deleteTestNotebook(driver);
} catch (Exception e) {
handleException("Exception in ParagraphActionsIT while testRemoveButton ", e);
}
}
@Test
public void testMoveUpAndDown() throws Exception {
if (!endToEndTestEnabled()) {
return;
}
try {
createNewNote();
waitForParagraph(1, "READY");
setTextOfParagraph(1, "1");
driver.findElement(By.xpath(getParagraphXPath(1) + "//span[@class='icon-settings']")).click();
driver.findElement(By.xpath(getParagraphXPath(1) + "//ul/li/a[@ng-click=\"insertNew('below')\"]")).click();
waitForParagraph(2, "READY");
setTextOfParagraph(2, "2");
collector.checkThat("The paragraph1 value contains",
driver.findElement(By.xpath(getParagraphXPath(1) + "//div[contains(@class, 'editor')]")).getText(),
CoreMatchers.equalTo("1"));
collector.checkThat("The paragraph1 value contains",
driver.findElement(By.xpath(getParagraphXPath(2) + "//div[contains(@class, 'editor')]")).getText(),
CoreMatchers.equalTo("2"));
driver.findElement(By.xpath(getParagraphXPath(1) + "//span[@class='icon-settings']")).click();
clickAndWait(By.xpath(getParagraphXPath(1) + "//ul/li/a[@ng-click='moveDown(paragraph)']"));
collector.checkThat("The paragraph1 value contains",
driver.findElement(By.xpath(getParagraphXPath(1) + "//div[contains(@class, 'editor')]")).getText(),
CoreMatchers.equalTo("2"));
collector.checkThat("The paragraph1 value contains",
driver.findElement(By.xpath(getParagraphXPath(2) + "//div[contains(@class, 'editor')]")).getText(),
CoreMatchers.equalTo("1"));
driver.findElement(By.xpath(getParagraphXPath(2) + "//span[@class='icon-settings']")).click();
clickAndWait(By.xpath(getParagraphXPath(2) + "//ul/li/a[@ng-click='moveUp(paragraph)']"));
collector.checkThat("The paragraph1 value contains",
driver.findElement(By.xpath(getParagraphXPath(1) + "//div[contains(@class, 'editor')]")).getText(),
CoreMatchers.equalTo("1"));
collector.checkThat("The paragraph1 value contains",
driver.findElement(By.xpath(getParagraphXPath(2) + "//div[contains(@class, 'editor')]")).getText(),
CoreMatchers.equalTo("2"));
deleteTestNotebook(driver);
} catch (Exception e) {
handleException("Exception in ParagraphActionsIT while testMoveUpAndDown ", e);
}
}
@Test
public void testDisableParagraphRunButton() throws Exception {
if (!endToEndTestEnabled()) {
return;
}
try {
createNewNote();
waitForParagraph(1, "READY");
setTextOfParagraph(1, "println (\"abcd\")");
driver.findElement(By.xpath(getParagraphXPath(1) + "//span[@class='icon-settings']")).click();
clickAndWait(By.xpath(getParagraphXPath(1) + "//ul/li/a[@ng-click='toggleEnableDisable(paragraph)']"));
collector.checkThat("The play button class was ",
driver.findElement(By.xpath(getParagraphXPath(1) + "//span[@class='icon-control-play shortcut-icon']")).isDisplayed(), CoreMatchers.equalTo(false)
);
driver.findElement(By.xpath(".//*[@id='main']//button[contains(@ng-click, 'runAllParagraphs')]")).sendKeys(Keys.ENTER);
ZeppelinITUtils.sleep(1000, true);
driver.findElement(By.xpath("//div[@class='modal-dialog'][contains(.,'Run all paragraphs?')]" +
"//div[@class='modal-footer']//button[contains(.,'OK')]")).click();
ZeppelinITUtils.sleep(2000, false);
collector.checkThat("Paragraph status is ",
getParagraphStatus(1), CoreMatchers.equalTo("READY")
);
deleteTestNotebook(driver);
} catch (Exception e) {
handleException("Exception in ParagraphActionsIT while testDisableParagraphRunButton ", e);
}
}
@Test
public void testRunOnSelectionChange() throws Exception {
if (!endToEndTestEnabled()) {
return;
}
try {
String xpathToRunOnSelectionChangeCheckbox = getParagraphXPath(1) + "//ul/li/form/input[contains(@ng-checked, 'true')]";
String xpathToDropdownMenu = getParagraphXPath(1) + "//select";
String xpathToResultText = getParagraphXPath(1) + "//div[contains(@id,\"_html\")]";
createNewNote();
waitForParagraph(1, "READY");
setTextOfParagraph(1, "%md My selection is ${my selection=1,1|2|3}");
runParagraph(1);
waitForParagraph(1, "FINISHED");
// 1. 'RunOnSelectionChange' is true by default
driver.findElement(By.xpath(getParagraphXPath(1) + "//span[@class='icon-settings']")).click();
collector.checkThat("'Run on selection change' checkbox will be shown under dropdown menu ",
driver.findElement(By.xpath(getParagraphXPath(1) + "//ul/li/form/input[contains(@ng-click, 'turnOnAutoRun(paragraph)')]")).isDisplayed(),
CoreMatchers.equalTo(true));
Select dropDownMenu = new Select(driver.findElement(By.xpath((xpathToDropdownMenu))));
dropDownMenu.selectByVisibleText("2");
waitForParagraph(1, "FINISHED");
collector.checkThat("If 'RunOnSelectionChange' is true, the paragraph result will be updated right after click any options in the dropdown menu ",
driver.findElement(By.xpath(xpathToResultText)).getText(),
CoreMatchers.equalTo("My selection is 2"));
// 2. set 'RunOnSelectionChange' to false
driver.findElement(By.xpath(getParagraphXPath(1) + "//span[@class='icon-settings']")).click();
driver.findElement(By.xpath(xpathToRunOnSelectionChangeCheckbox)).click();
collector.checkThat("If 'Run on selection change' checkbox is unchecked, 'paragraph.config.runOnSelectionChange' will be false ",
driver.findElement(By.xpath(getParagraphXPath(1) + "//ul/li/span[contains(@ng-if, 'paragraph.config.runOnSelectionChange == false')]")).isDisplayed(),
CoreMatchers.equalTo(true));
Select sameDropDownMenu = new Select(driver.findElement(By.xpath((xpathToDropdownMenu))));
sameDropDownMenu.selectByVisibleText("1");
waitForParagraph(1, "FINISHED");
collector.checkThat("If 'RunOnSelectionChange' is false, the paragraph result won't be updated even if we select any options in the dropdown menu ",
driver.findElement(By.xpath(xpathToResultText)).getText(),
CoreMatchers.equalTo("My selection is 2"));
// run paragraph manually by pressing ENTER
driver.findElement(By.xpath(xpathToDropdownMenu)).sendKeys(Keys.ENTER);
waitForParagraph(1, "FINISHED");
collector.checkThat("Even if 'RunOnSelectionChange' is set as false, still can run the paragraph by pressing ENTER ",
driver.findElement(By.xpath(xpathToResultText)).getText(),
CoreMatchers.equalTo("My selection is 1"));
} catch (Exception e) {
handleException("Exception in ParagraphActionsIT while testRunOnSelectionChange ", e);
}
}
@Test
public void testClearOutputButton() throws Exception {
if (!endToEndTestEnabled()) {
return;
}
try {
createNewNote();
waitForParagraph(1, "READY");
String xpathToOutputField = getParagraphXPath(1) + "//div[contains(@id,\"_text\")]";
setTextOfParagraph(1, "println (\"abcd\")");
collector.checkThat("Before Run Output field contains ",
driver.findElements(By.xpath(xpathToOutputField)).size(),
CoreMatchers.equalTo(0));
runParagraph(1);
waitForParagraph(1, "FINISHED");
collector.checkThat("After Run Output field contains ",
driver.findElement(By.xpath(xpathToOutputField)).getText(),
CoreMatchers.equalTo("abcd"));
driver.findElement(By.xpath(getParagraphXPath(1) + "//span[@class='icon-settings']")).click();
clickAndWait(By.xpath(getParagraphXPath(1) +
"//ul/li/a[@ng-click='clearParagraphOutput(paragraph)']"));
collector.checkThat("After Clear Output field contains ",
driver.findElements(By.xpath(xpathToOutputField)).size(),
CoreMatchers.equalTo(0));
deleteTestNotebook(driver);
} catch (Exception e) {
handleException("Exception in ParagraphActionsIT while testClearOutputButton ", e);
}
}
@Test
public void testWidth() throws Exception {
if (!endToEndTestEnabled()) {
return;
}
try {
createNewNote();
waitForParagraph(1, "READY");
collector.checkThat("Default Width is 12 ",
driver.findElement(By.xpath("//div[contains(@class,'col-md-12')]")).isDisplayed(),
CoreMatchers.equalTo(true));
for (Integer newWidth = 1; newWidth <= 11; newWidth++) {
clickAndWait(By.xpath(getParagraphXPath(1) + "//span[@class='icon-settings']"));
String visibleText = newWidth.toString();
new Select(driver.findElement(By.xpath(getParagraphXPath(1)
+ "//ul/li/a/select[(@ng-model='paragraph.config.colWidth')]"))).selectByVisibleText(visibleText);
collector.checkThat("New Width is : " + newWidth,
driver.findElement(By.xpath("//div[contains(@class,'col-md-" + newWidth + "')]")).isDisplayed(),
CoreMatchers.equalTo(true));
}
deleteTestNotebook(driver);
} catch (Exception e) {
handleException("Exception in ParagraphActionsIT while testWidth ", e);
}
}
@Test
public void testFontSize() throws Exception {
if (!endToEndTestEnabled()) {
return;
}
try {
createNewNote();
waitForParagraph(1, "READY");
Float height = Float.valueOf(driver.findElement(By.xpath("//div[contains(@class,'ace_content')]"))
.getCssValue("height").replace("px", ""));
for (Integer newFontSize = 10; newFontSize <= 20; newFontSize++) {
clickAndWait(By.xpath(getParagraphXPath(1) + "//span[@class='icon-settings']"));
String visibleText = newFontSize.toString();
new Select(driver.findElement(By.xpath(getParagraphXPath(1)
+ "//ul/li/a/select[(@ng-model='paragraph.config.fontSize')]"))).selectByVisibleText(visibleText);
Float newHeight = Float.valueOf(driver.findElement(By.xpath("//div[contains(@class,'ace_content')]"))
.getCssValue("height").replace("px", ""));
collector.checkThat("New Font size is : " + newFontSize,
newHeight > height,
CoreMatchers.equalTo(true));
height = newHeight;
}
deleteTestNotebook(driver);
} catch (Exception e) {
handleException("Exception in ParagraphActionsIT while testFontSize ", e);
}
}
@Test
public void testTitleButton() throws Exception {
if (!endToEndTestEnabled()) {
return;
}
try {
createNewNote();
waitForParagraph(1, "READY");
String xpathToTitle = getParagraphXPath(1) + "//div[contains(@class, 'title')]/div";
String xpathToSettingIcon = getParagraphXPath(1) + "//span[@class='icon-settings']";
String xpathToShowTitle = getParagraphXPath(1) + "//ul/li/a[@ng-show='!paragraph.config.title']";
String xpathToHideTitle = getParagraphXPath(1) + "//ul/li/a[@ng-show='paragraph.config.title']";
ZeppelinITUtils.turnOffImplicitWaits(driver);
Integer titleElems = driver.findElements(By.xpath(xpathToTitle)).size();
collector.checkThat("Before Show Title : The title doesn't exist",
titleElems,
CoreMatchers.equalTo(0));
ZeppelinITUtils.turnOnImplicitWaits(driver);
clickAndWait(By.xpath(xpathToSettingIcon));
collector.checkThat("Before Show Title : The title option in option panel of paragraph is labeled as",
driver.findElement(By.xpath(xpathToShowTitle)).getText(),
CoreMatchers.allOf(CoreMatchers.endsWith("Show title"), CoreMatchers.containsString("Ctrl+"),
CoreMatchers.anyOf(CoreMatchers.containsString("Option"), CoreMatchers.containsString("Alt")),
CoreMatchers.containsString("+T")));
clickAndWait(By.xpath(xpathToShowTitle));
collector.checkThat("After Show Title : The title field contains",
driver.findElement(By.xpath(xpathToTitle)).getText(),
CoreMatchers.equalTo("Untitled"));
clickAndWait(By.xpath(xpathToSettingIcon));
collector.checkThat("After Show Title : The title option in option panel of paragraph is labeled as",
driver.findElement(By.xpath(xpathToHideTitle)).getText(),
CoreMatchers.allOf(CoreMatchers.endsWith("Hide title"), CoreMatchers.containsString("Ctrl+"),
CoreMatchers.anyOf(CoreMatchers.containsString("Option"), CoreMatchers.containsString("Alt")),
CoreMatchers.containsString("+T")));
clickAndWait(By.xpath(xpathToHideTitle));
ZeppelinITUtils.turnOffImplicitWaits(driver);
titleElems = driver.findElements(By.xpath(xpathToTitle)).size();
collector.checkThat("After Hide Title : The title field is hidden",
titleElems,
CoreMatchers.equalTo(0));
ZeppelinITUtils.turnOnImplicitWaits(driver);
driver.findElement(By.xpath(xpathToSettingIcon)).click();
driver.findElement(By.xpath(xpathToShowTitle)).click();
driver.findElement(By.xpath(getParagraphXPath(1) + "//div[contains(@class, 'title')]")).click();
driver.findElement(By.xpath(getParagraphXPath(1) + "//input")).sendKeys("NEW TITLE" + Keys.ENTER);
ZeppelinITUtils.sleep(500, false);
collector.checkThat("After Editing the Title : The title field contains ",
driver.findElement(By.xpath(xpathToTitle)).getText(),
CoreMatchers.equalTo("NEW TITLE"));
driver.navigate().refresh();
ZeppelinITUtils.sleep(1000, false);
collector.checkThat("After Page Refresh : The title field contains ",
driver.findElement(By.xpath(xpathToTitle)).getText(),
CoreMatchers.equalTo("NEW TITLE"));
deleteTestNotebook(driver);
} catch (Exception e) {
handleException("Exception in ParagraphActionsIT while testTitleButton ", e);
}
}
@Test
public void testShowAndHideLineNumbers() throws Exception {
if (!endToEndTestEnabled()) {
return;
}
try {
createNewNote();
waitForParagraph(1, "READY");
String xpathToLineNumberField = getParagraphXPath(1) + "//div[contains(@class, 'ace_gutter-layer')]";
String xpathToShowLineNumberButton = getParagraphXPath(1) + "//ul/li/a[@ng-click='showLineNumbers(paragraph)']";
String xpathToHideLineNumberButton = getParagraphXPath(1) + "//ul/li/a[@ng-click='hideLineNumbers(paragraph)']";
collector.checkThat("Before \"Show line number\" the Line Number is Enabled ",
driver.findElement(By.xpath(xpathToLineNumberField)).isDisplayed(),
CoreMatchers.equalTo(false));
driver.findElement(By.xpath(getParagraphXPath(1) + "//span[@class='icon-settings']")).click();
collector.checkThat("Before \"Show line number\" The option panel in paragraph has button labeled ",
driver.findElement(By.xpath(xpathToShowLineNumberButton)).getText(),
CoreMatchers.allOf(CoreMatchers.endsWith("Show line numbers"), CoreMatchers.containsString("Ctrl+"),
CoreMatchers.anyOf(CoreMatchers.containsString("Option"), CoreMatchers.containsString("Alt")),
CoreMatchers.containsString("+M")));
clickAndWait(By.xpath(xpathToShowLineNumberButton));
collector.checkThat("After \"Show line number\" the Line Number is Enabled ",
driver.findElement(By.xpath(xpathToLineNumberField)).isDisplayed(),
CoreMatchers.equalTo(true));
clickAndWait(By.xpath(getParagraphXPath(1) + "//span[@class='icon-settings']"));
collector.checkThat("After \"Show line number\" The option panel in paragraph has button labeled ",
driver.findElement(By.xpath(xpathToHideLineNumberButton)).getText(),
CoreMatchers.allOf(CoreMatchers.endsWith("Hide line numbers"), CoreMatchers.containsString("Ctrl+"),
CoreMatchers.anyOf(CoreMatchers.containsString("Option"), CoreMatchers.containsString("Alt")),
CoreMatchers.containsString("+M")));
clickAndWait(By.xpath(xpathToHideLineNumberButton));
collector.checkThat("After \"Hide line number\" the Line Number is Enabled",
driver.findElement(By.xpath(xpathToLineNumberField)).isDisplayed(),
CoreMatchers.equalTo(false));
deleteTestNotebook(driver);
} catch (Exception e) {
handleException("Exception in ParagraphActionsIT while testShowAndHideLineNumbers ", e);
}
}
@Test
public void testEditOnDoubleClick() throws Exception {
if (!endToEndTestEnabled()) {
return;
}
try {
createNewNote();
Actions action = new Actions(driver);
waitForParagraph(1, "READY");
setTextOfParagraph(1, "%md");
driver.findElement(By.xpath(getParagraphXPath(1) + "//textarea")).sendKeys(Keys.ARROW_RIGHT);
driver.findElement(By.xpath(getParagraphXPath(1) + "//textarea")).sendKeys(Keys.ENTER);
driver.findElement(By.xpath(getParagraphXPath(1) + "//textarea")).sendKeys(Keys.SHIFT + "3");
driver.findElement(By.xpath(getParagraphXPath(1) + "//textarea")).sendKeys(" abc");
runParagraph(1);
waitForParagraph(1, "FINISHED");
collector.checkThat("Markdown editor is hidden after run ",
driver.findElements(By.xpath(getParagraphXPath(1) + "//div[contains(@ng-if, 'paragraph.config.editorHide')]")).size(),
CoreMatchers.equalTo(0));
collector.checkThat("Markdown editor is shown after run ",
driver.findElement(By.xpath(getParagraphXPath(1) + "//div[contains(@ng-show, 'paragraph.config.tableHide')]")).isDisplayed(),
CoreMatchers.equalTo(true));
// to check if editOnDblClick field is fetched correctly after refresh
driver.navigate().refresh();
waitForParagraph(1, "FINISHED");
action.doubleClick(driver.findElement(By.xpath(getParagraphXPath(1)))).perform();
ZeppelinITUtils.sleep(1000, false);
collector.checkThat("Markdown editor is shown after double click ",
driver.findElement(By.xpath(getParagraphXPath(1) + "//div[contains(@ng-if, 'paragraph.config.editorHide')]")).isDisplayed(),
CoreMatchers.equalTo(true));
collector.checkThat("Markdown editor is hidden after double click ",
driver.findElement(By.xpath(getParagraphXPath(1) + "//div[contains(@ng-show, 'paragraph.config.tableHide')]")).isDisplayed(),
CoreMatchers.equalTo(false));
deleteTestNotebook(driver);
} catch (Exception e) {
handleException("Exception in ParagraphActionsIT while testEditOnDoubleClick ", e);
}
}
@Test
public void testSingleDynamicFormTextInput() throws Exception {
if (!endToEndTestEnabled()) {
return;
}
try {
createNewNote();
setTextOfParagraph(1, "%spark println(\"Hello \"+z.textbox(\"name\", \"world\")) ");
runParagraph(1);
waitForParagraph(1, "FINISHED");
collector.checkThat("Output text is equal to value specified initially",
driver.findElement(By.xpath(getParagraphXPath(1) + "//div[contains(@class, 'text plainTextContent')]")).getText(),
CoreMatchers.equalTo("Hello world"));
driver.findElement(By.xpath(getParagraphXPath(1) + "//input")).clear();
driver.findElement(By.xpath(getParagraphXPath(1) + "//input")).sendKeys("Zeppelin");
collector.checkThat("After new data in text input form, output should not be changed",
driver.findElement(By.xpath(getParagraphXPath(1) + "//div[contains(@class, 'text plainTextContent')]")).getText(),
CoreMatchers.equalTo("Hello world"));
runParagraph(1);
waitForParagraph(1, "FINISHED");
collector.checkThat("Only after running the paragraph, we can see the newly updated output",
driver.findElement(By.xpath(getParagraphXPath(1) + "//div[contains(@class, 'text plainTextContent')]")).getText(),
CoreMatchers.equalTo("Hello Zeppelin"));
deleteTestNotebook(driver);
} catch (Exception e) {
handleException("Exception in ParagraphActionsIT while testSingleDynamicFormTextInput ", e);
}
}
@Test
public void testSingleDynamicFormSelectForm() throws Exception {
if (!endToEndTestEnabled()) {
return;
}
try {
createNewNote();
setTextOfParagraph(1, "%spark println(\"Howdy \"+z.select(\"names\", Seq((\"1\",\"Alice\"), " +
"(\"2\",\"Bob\"),(\"3\",\"stranger\"))))");
runParagraph(1);
waitForParagraph(1, "FINISHED");
collector.checkThat("Output text should not display any of the options in select form",
driver.findElement(By.xpath(getParagraphXPath(1) + "//div[contains(@class, 'text plainTextContent')]")).getText(),
CoreMatchers.equalTo("Howdy "));
Select dropDownMenu = new Select(driver.findElement(By.xpath("(" + (getParagraphXPath(1) + "//select)[1]"))));
dropDownMenu.selectByVisibleText("Alice");
collector.checkThat("After selection in drop down menu, output should display the newly selected option",
driver.findElement(By.xpath(getParagraphXPath(1) + "//div[contains(@class, 'text plainTextContent')]")).getText(),
CoreMatchers.equalTo("Howdy 1"));
driver.findElement(By.xpath(getParagraphXPath(1) + "//span[@class='icon-settings']")).click();
clickAndWait(By.xpath(getParagraphXPath(1) + "//ul/li/form/input[contains(@ng-checked, 'true')]"));
Select sameDropDownMenu = new Select(driver.findElement(By.xpath("(" + (getParagraphXPath(1) + "//select)[1]"))));
sameDropDownMenu.selectByVisibleText("Bob");
collector.checkThat("After 'Run on selection change' checkbox is unchecked, the paragraph should not run if selecting a different option",
driver.findElement(By.xpath(getParagraphXPath(1) + "//div[contains(@class, 'text plainTextContent')]")).getText(),
CoreMatchers.equalTo("Howdy 1"));
deleteTestNotebook(driver);
} catch (Exception e) {
handleException("Exception in ParagraphActionsIT while testSingleDynamicFormSelectForm ", e);
}
}
@Test
public void testSingleDynamicFormCheckboxForm() throws Exception {
if (!endToEndTestEnabled()) {
return;
}
try {
createNewNote();
setTextOfParagraph(1, "%spark val options = Seq((\"han\",\"Han\"), (\"leia\",\"Leia\"), " +
"(\"luke\",\"Luke\")); println(\"Greetings \"+z.checkbox(\"skywalkers\",options).mkString(\" and \"))");
runParagraph(1);
waitForParagraph(1, "FINISHED");
collector.checkThat("Output text should display all of the options included in check boxes",
driver.findElement(By.xpath(getParagraphXPath(1) + "//div[contains(@class, 'text plainTextContent')]")).getText(),
CoreMatchers.containsString("Greetings han and leia and luke"));
WebElement firstCheckbox = driver.findElement(By.xpath("(" + getParagraphXPath(1) + "//input[@type='checkbox'])[1]"));
firstCheckbox.click();
collector.checkThat("After unchecking one of the boxes, we can see the newly updated output without the option we unchecked",
driver.findElement(By.xpath(getParagraphXPath(1) + "//div[contains(@class, 'text plainTextContent')]")).getText(),
CoreMatchers.containsString("Greetings leia and luke"));
driver.findElement(By.xpath(getParagraphXPath(1) + "//span[@class='icon-settings']")).click();
clickAndWait(By.xpath(getParagraphXPath(1) + "//ul/li/form/input[contains(@ng-checked, 'true')]"));
WebElement secondCheckbox = driver.findElement(By.xpath("(" + getParagraphXPath(1) + "//input[@type='checkbox'])[2]"));
secondCheckbox.click();
collector.checkThat("After 'Run on selection change' checkbox is unchecked, the paragraph should not run if check box state is modified",
driver.findElement(By.xpath(getParagraphXPath(1) + "//div[contains(@class, 'text plainTextContent')]")).getText(),
CoreMatchers.containsString("Greetings leia and luke"));
runParagraph(1);
waitForParagraph(1, "FINISHED");
deleteTestNotebook(driver);
} catch (Exception e) {
handleException("Exception in ParagraphActionsIT while testSingleDynamicFormCheckboxForm ", e);
}
}
@Test
public void testMultipleDynamicFormsSameType() throws Exception {
if (!endToEndTestEnabled()) {
return;
}
try {
createNewNote();
setTextOfParagraph(1, "%spark println(\"Howdy \"+z.select(\"fruits\", Seq((\"1\",\"Apple\")," +
"(\"2\",\"Orange\"),(\"3\",\"Peach\")))); println(\"Howdy \"+z.select(\"planets\", " +
"Seq((\"1\",\"Venus\"),(\"2\",\"Earth\"),(\"3\",\"Mars\"))))");
runParagraph(1);
waitForParagraph(1, "FINISHED");
collector.checkThat("Output text should not display any of the options in select form",
driver.findElement(By.xpath(getParagraphXPath(1) + "//div[contains(@class, 'text plainTextContent')]")).getText(),
CoreMatchers.equalTo("Howdy \nHowdy "));
Select dropDownMenu = new Select(driver.findElement(By.xpath("(" + (getParagraphXPath(1) + "//select)[1]"))));
dropDownMenu.selectByVisibleText("Apple");
collector.checkThat("After selection in drop down menu, output should display the new option we selected",
driver.findElement(By.xpath(getParagraphXPath(1) + "//div[contains(@class, 'text plainTextContent')]")).getText(),
CoreMatchers.equalTo("Howdy 1\nHowdy "));
driver.findElement(By.xpath(getParagraphXPath(1) + "//span[@class='icon-settings']")).click();
clickAndWait(By.xpath(getParagraphXPath(1) + "//ul/li/form/input[contains(@ng-checked, 'true')]"));
Select sameDropDownMenu = new Select(driver.findElement(By.xpath("(" + (getParagraphXPath(1) + "//select)[2]"))));
sameDropDownMenu.selectByVisibleText("Earth");
waitForParagraph(1, "FINISHED");
collector.checkThat("After 'Run on selection change' checkbox is unchecked, the paragraph should not run if selecting a different option",
driver.findElement(By.xpath(getParagraphXPath(1) + "//div[contains(@class, 'text plainTextContent')]")).getText(),
CoreMatchers.equalTo("Howdy 1\nHowdy "));
deleteTestNotebook(driver);
} catch (Exception e) {
handleException("Exception in ParagraphActionsIT while testMultipleDynamicFormsSameType ", e);
}
}
}
| |
// -*- mode: java; c-basic-offset: 2; -*-
// Copyright 2009-2011 Google, All Rights reserved
// Copyright 2011-2018 MIT, All rights reserved
// Released under the Apache License, Version 2.0
// http://www.apache.org/licenses/LICENSE-2.0
package com.google.appinventor.components.runtime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import android.app.Activity;
import android.content.Intent;
import android.database.Cursor;
import android.net.Uri;
import android.provider.Contacts;
import android.util.Log;
import android.Manifest;
import com.google.appinventor.components.annotations.DesignerComponent;
import com.google.appinventor.components.annotations.PropertyCategory;
import com.google.appinventor.components.annotations.SimpleFunction;
import com.google.appinventor.components.annotations.SimpleObject;
import com.google.appinventor.components.annotations.SimpleProperty;
import com.google.appinventor.components.annotations.UsesPermissions;
import com.google.appinventor.components.common.ComponentCategory;
import com.google.appinventor.components.common.YaVersion;
import com.google.appinventor.components.runtime.util.HoneycombMR1Util;
import com.google.appinventor.components.runtime.util.ErrorMessages;
import com.google.appinventor.components.runtime.util.SdkLevel;
/**
* Component enabling a user to select a contact.
*
* @author sharon@google.com (Sharon Perl)
* @author markf@google.com (Mark Friedman)
* @author: Yifan(Evan) Li (for contact Uri)
*/
@DesignerComponent(version = YaVersion.CONTACTPICKER_COMPONENT_VERSION,
description = "A button that, when clicked on, displays a list of " +
"the contacts to choose among. After the user has made a " +
"selection, the following properties will be set to information about " +
"the chosen contact: <ul>\n" +
"<li> <code>ContactName</code>: the contact's name </li>\n " +
"<li> <code>EmailAddress</code>: the contact's primary email address </li>\n " +
"<li> <code>ContactUri</code>: the contact's URI on the device </li>\n"+
"<li> <code>EmailAddressList</code>: a list of the contact's email addresses </li>\n " +
"<li> <code>PhoneNumber</code>: the contact's primary phone number (on Later Android Verisons)</li>\n " +
"<li> <code>PhoneNumberList</code>: a list of the contact's phone numbers (on Later Android Versions)</li>\n " +
"<li> <code>Picture</code>: the name of the file containing the contact's " +
"image, which can be used as a <code>Picture</code> property value for " +
"the <code>Image</code> or <code>ImageSprite</code> component.</li></ul>\n" +
"</p><p>Other properties affect the appearance of the button " +
"(<code>TextAlignment</code>, <code>BackgroundColor</code>, etc.) and " +
"whether it can be clicked on (<code>Enabled</code>).\n</p>" +
"<p>The ContactPicker component might not work on all phones. For " +
"example, on Android systems before system 3.0, it cannot pick phone " +
"numbers, and the list of email addresses will contain only one email.",
category = ComponentCategory.SOCIAL)
@SimpleObject
@UsesPermissions(permissionNames = "android.permission.READ_CONTACTS")
public class ContactPicker extends Picker implements ActivityResultListener {
private static String[] CONTACT_PROJECTION;
private static String[] DATA_PROJECTION;
private static final String[] PROJECTION = {
Contacts.PeopleColumns.NAME,
Contacts.People.PRIMARY_EMAIL_ID,
};
private static final int NAME_INDEX = 0;
private static final int EMAIL_INDEX = 1;
private static final int PHONE_INDEX = 2;
protected final Activity activityContext;
private final Uri intentUri;
protected String contactName;
protected String emailAddress;
protected String contactUri;
protected String contactPictureUri;
protected String phoneNumber;
protected List emailAddressList;
protected List phoneNumberList;
private boolean havePermission = false; // Do we have READ_CONTACTS permission?
/**
* Create a new ContactPicker component.
*
* @param container the parent container.
*/
public ContactPicker(ComponentContainer container) {
this(container, Contacts.People.CONTENT_URI);
}
protected ContactPicker(ComponentContainer container, Uri intentUri) {
super(container);
activityContext = container.$context();
if (SdkLevel.getLevel() >= SdkLevel.LEVEL_HONEYCOMB_MR1 && intentUri.equals(Contacts.People.CONTENT_URI)) {
this.intentUri = HoneycombMR1Util.getContentUri();
} else if (SdkLevel.getLevel() >= SdkLevel.LEVEL_HONEYCOMB_MR1 && intentUri.equals(Contacts.Phones.CONTENT_URI)) {
this.intentUri = HoneycombMR1Util.getPhoneContentUri();
} else {
this.intentUri = intentUri;
}
}
@Override
public void click() {
if (!havePermission) {
container.$form()
.askPermission(Manifest.permission.READ_CONTACTS,
new PermissionResultHandler() {
@Override
public void HandlePermissionResponse(String permission, boolean granted) {
if (granted) {
ContactPicker.this.havePermission = true;
ContactPicker.this.click();
} else {
container.$form().dispatchPermissionDeniedEvent(ContactPicker.this,
"Click", Manifest.permission.READ_CONTACTS);
}
}
});
return;
}
super.click();
}
/**
* Picture URI for this contact, which can be
* used to retrieve the contact's photo and other fields.
*/
@SimpleProperty(
category = PropertyCategory.BEHAVIOR)
public String Picture() {
return ensureNotNull(contactPictureUri);
}
/**
* Name property getter method.
*/
@SimpleProperty(
category = PropertyCategory.BEHAVIOR)
public String ContactName() {
return ensureNotNull(contactName);
}
/**
* EmailAddress property getter method.
*/
@SimpleProperty(
category = PropertyCategory.BEHAVIOR)
public String EmailAddress() {
// Note(halabelson): I am commenting out this test. Android provider.Contacts was
// deprecated in Donut, but email picking still seems to work on newer versions of the SDK.
// If there's a phone where it does not work, we'll get the error at PuntContactSelection
// Note that there is still a general problem with contact picking on Motoblur.
// if (SdkLevel.getLevel() > SdkLevel.LEVEL_DONUT) {
// container.$form().dispatchErrorOccurredEvent(this, "EmailAddress",
// ErrorMessages.ERROR_FUNCTIONALITY_NOT_SUPPORTED_CONTACT_EMAIL);
// }
return ensureNotNull(emailAddress);
}
/**
* "URI that specifies the location of the contact on the device.",
*/
@SimpleProperty(description = "URI that specifies the location of the contact on the device.",
category = PropertyCategory.BEHAVIOR)
public String ContactUri() {
return ensureNotNull(contactUri);
}
/**
* EmailAddressList property getter method.
*/
@SimpleProperty(
category = PropertyCategory.BEHAVIOR)
public List EmailAddressList() {
return ensureNotNull(emailAddressList);
}
/**
* PhoneNumber property getter method.
*/
@SimpleProperty(
category = PropertyCategory.BEHAVIOR)
public String PhoneNumber() {
return ensureNotNull(phoneNumber);
}
/**
* PhoneNumberList property getter method.
*/
@SimpleProperty(
category = PropertyCategory.BEHAVIOR)
public List PhoneNumberList() {
return ensureNotNull(phoneNumberList);
}
/**
* return nothing, just call another activity which is view contact
*/
@SimpleFunction(description = "view a contact via its URI")
public void ViewContact(String uri) {
if(contactUri != null){
Intent intent = new Intent(Intent.ACTION_VIEW,Uri.parse(uri));
if (intent.resolveActivity(this.activityContext.getPackageManager()) != null) {
this.activityContext.startActivity(intent);
}
}
}
@Override
protected Intent getIntent() {
return new Intent(Intent.ACTION_PICK, intentUri);
}
/**
* Callback method to get the result returned by the contact picker activity
*
* @param requestCode a code identifying the request.
* @param resultCode a code specifying success or failure of the activity
* @param data the returned data, in this case an Intent whose data field
* contains the contact's content provider Uri.
*/
@Override
public void resultReturned(int requestCode, int resultCode, Intent data) {
if (requestCode == this.requestCode && resultCode == Activity.RESULT_OK) {
Log.i("ContactPicker", "received intent is " + data);
Uri receivedContactUri = data.getData();
// Pre- and post-Honeycomb need different URIs.
String desiredContactUri = "";
if (SdkLevel.getLevel() >= SdkLevel.LEVEL_HONEYCOMB_MR1) {
desiredContactUri = "//com.android.contacts/contact";
} else {
desiredContactUri = "//contacts/people";
}
if (checkContactUri(receivedContactUri, desiredContactUri)) {
Cursor contactCursor = null;
Cursor dataCursor = null;
try {
if (SdkLevel.getLevel() >= SdkLevel.LEVEL_HONEYCOMB_MR1) {
CONTACT_PROJECTION = HoneycombMR1Util.getContactProjection();
contactCursor = activityContext.getContentResolver().query(receivedContactUri,
CONTACT_PROJECTION, null, null, null);
String id = postHoneycombGetContactNameAndPicture(contactCursor);
DATA_PROJECTION = HoneycombMR1Util.getDataProjection();
dataCursor = HoneycombMR1Util.getDataCursor(id, activityContext, DATA_PROJECTION);
postHoneycombGetContactEmailAndPhone(dataCursor);
//explicit set TextContactUri
contactUri = receivedContactUri.toString();
} else {
contactCursor = activityContext.getContentResolver().query(receivedContactUri,
PROJECTION, null, null, null);
preHoneycombGetContactInfo(contactCursor, receivedContactUri);
}
Log.i("ContactPicker",
"Contact name = " + contactName + ", email address = " + emailAddress + ",contact Uri = " + contactUri +
", phone number = " + phoneNumber + ", contactPhotoUri = " + contactPictureUri);
} catch (Exception e) {
// There was an exception in trying to extract the cursor from the activity context.
// It's bad form to catch an arbitrary exception, but if there is an error here
// it's unclear what's going on.
Log.i("ContactPicker", "checkContactUri failed: D");
puntContactSelection(ErrorMessages.ERROR_PHONE_UNSUPPORTED_CONTACT_PICKER);
} finally {
if (contactCursor != null) {
contactCursor.close();
}
if (dataCursor != null) {
dataCursor.close();
}
}
} // ends if (checkContactUri ...
AfterPicking();
} // ends if (requestCode ...
}
/**
* For versions before Honeycomb, we get all the contact info from the same table.
*/
public void preHoneycombGetContactInfo(Cursor contactCursor, Uri theContactUri) {
if (contactCursor.moveToFirst()) {
contactName = guardCursorGetString(contactCursor, NAME_INDEX);
String emailId = guardCursorGetString(contactCursor, EMAIL_INDEX);
emailAddress = getEmailAddress(emailId);
contactUri = theContactUri.toString();
contactPictureUri = theContactUri.toString();
emailAddressList = emailAddress.equals("") ? new ArrayList() : Arrays.asList(emailAddress);
}
}
/**
* Assigns contactName and contactPictureUri for Honeycomb and up.
* Returns id for getting emailAddress and phoneNumber.
*/
public String postHoneycombGetContactNameAndPicture(Cursor contactCursor) {
String id = "";
if (contactCursor.moveToFirst()) {
final int ID_INDEX = HoneycombMR1Util.getIdIndex(contactCursor);
final int NAME_INDEX = HoneycombMR1Util.getNameIndex(contactCursor);
final int THUMBNAIL_INDEX = HoneycombMR1Util.getThumbnailIndex(contactCursor);
final int PHOTO_INDEX = HoneycombMR1Util.getPhotoIndex(contactCursor);
id = guardCursorGetString(contactCursor, ID_INDEX);
contactName = guardCursorGetString(contactCursor, NAME_INDEX);
contactPictureUri = guardCursorGetString(contactCursor, THUMBNAIL_INDEX);
Log.i("ContactPicker", "photo_uri=" + guardCursorGetString(contactCursor, PHOTO_INDEX));
}
return id;
}
/**
* Assigns emailAddress, phoneNumber, emailAddressList, and phoneNumberList
* for Honeycomb and up.
*/
public void postHoneycombGetContactEmailAndPhone(Cursor dataCursor) {
phoneNumber = "";
emailAddress = "";
List<String> phoneListToStore = new ArrayList<String>();
List<String> emailListToStore = new ArrayList<String>();
if (dataCursor.moveToFirst()) {
final int PHONE_INDEX = HoneycombMR1Util.getPhoneIndex(dataCursor);
final int EMAIL_INDEX = HoneycombMR1Util.getEmailIndex(dataCursor);
final int MIME_INDEX = HoneycombMR1Util.getMimeIndex(dataCursor);
String phoneType = HoneycombMR1Util.getPhoneType();
String emailType = HoneycombMR1Util.getEmailType();
while (!dataCursor.isAfterLast()) {
String type = guardCursorGetString(dataCursor, MIME_INDEX);
if (type.contains(phoneType)) {
phoneListToStore.add(guardCursorGetString(dataCursor, PHONE_INDEX));
} else if (type.contains(emailType)) {
emailListToStore.add(guardCursorGetString(dataCursor, EMAIL_INDEX));
} else {
Log.i("ContactPicker", "Type mismatch: " + type +
" not " + phoneType +
" or " + emailType);
}
dataCursor.moveToNext();
}
}
if (!phoneListToStore.isEmpty()) {
phoneNumber = phoneListToStore.get(0);
}
if (!emailListToStore.isEmpty()) {
emailAddress = emailListToStore.get(0);
}
phoneNumberList = phoneListToStore;
emailAddressList = emailListToStore;
}
// Check that the contact URI has the right form to permit the information to be
// extracted and try to show a meaningful error notice to the end user of the app.
// Sadly, different phones can produce different kinds of URIs. You
// can also get a different Uri depending on whether or not the user
// does a search to get the contact, versus just picking it. For example,
// Motorola Global phones produce an intent whose data part is null.
// Or using search on Nexus phones will produce a contact URI of the form
// content://com.android.contacts/contact, whereas doing direct selection
// produces a Uri have a specific required pattern that is
// passed in as an argument.
// TODO(halabelson): Create a better set of tests and/or generalize the extraction
// methods to permit more URIs.
// This should be done in conjunction with updating the way we handle contacts.
protected boolean checkContactUri(Uri suspectUri, String requiredPattern) {
Log.i("ContactPicker", "contactUri is " + suspectUri);
if (suspectUri == null || (!("content".equals(suspectUri.getScheme())))) {
Log.i("ContactPicker", "checkContactUri failed: A");
puntContactSelection(
ErrorMessages.ERROR_PHONE_UNSUPPORTED_CONTACT_PICKER);
return false;
}
String UriSpecific = suspectUri.getSchemeSpecificPart();
if (!UriSpecific.startsWith(requiredPattern)) {
Log.i("ContactPicker", "checkContactUri failed: C");
Log.i("ContactPicker", suspectUri.getPath());
puntContactSelection(ErrorMessages.ERROR_PHONE_UNSUPPORTED_CONTACT_PICKER);
return false;
} else {
return true;
}
}
// set the (supposedly) extracted properties to the empty string and
// report an error
protected void puntContactSelection(int errorNumber) {
contactName = "";
emailAddress = "";
contactPictureUri = "";
container.$form().dispatchErrorOccurredEvent(this, "", errorNumber);
}
/**
* Email address getter for pre-Honeycomb.
*/
protected String getEmailAddress(String emailId) {
int id;
try {
id = Integer.parseInt(emailId);
} catch (NumberFormatException e) {
return "";
}
String data = "";
String where = "contact_methods._id = " + id;
String[] projection = {
Contacts.ContactMethods.DATA
};
Cursor cursor = activityContext.getContentResolver().query(
Contacts.ContactMethods.CONTENT_EMAIL_URI,
projection, where, null, null);
try {
if (cursor.moveToFirst()) {
data = guardCursorGetString(cursor, 0);
}
} finally {
cursor.close();
}
// this extra check for null might be redundant, but we given that there are mysterious errors
// on some phones, we'll leave it in just to be extra careful
return ensureNotNull(data);
}
// If the selection returns null, this should be passed back as a
// an empty string to prevent errors if the app tries to convert this
// to a string. In some cases, getString can also throw an exception, for example,
// in selecting the name for a contact where there is no name.
// We also call ensureNotNull in the property selectors for ContactName, etc.
// This would appear to be redundant, but in testing, there have been some mysterious
// error conditions on some phones that permit nulls to sneak through from guardCursonGetString,
// so we'll do the extra check.
protected String guardCursorGetString(Cursor cursor, int index) {
String result;
try {
result = cursor.getString(index);
} catch (Exception e) {
// It's bad practice to catch a general exception, but unfortunately,
// the exception thrown is implementation dependent, according to the
// Android documentation.
result = "";
}
return ensureNotNull(result);
}
protected String ensureNotNull(String value) {
if (value == null) {
return "";
} else {
return value;
}
}
protected List ensureNotNull(List value) {
if (value == null) {
return new ArrayList();
} else {
return value;
}
}
}
| |
/*
* Copyright 2020 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.pmml.models.regression.compiler.factories;
import java.util.AbstractMap;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
import com.github.javaparser.ast.CompilationUnit;
import com.github.javaparser.ast.NodeList;
import com.github.javaparser.ast.body.ClassOrInterfaceDeclaration;
import com.github.javaparser.ast.body.ConstructorDeclaration;
import com.github.javaparser.ast.expr.CastExpr;
import com.github.javaparser.ast.expr.Expression;
import com.github.javaparser.ast.expr.MethodCallExpr;
import com.github.javaparser.ast.expr.MethodReferenceExpr;
import com.github.javaparser.ast.expr.NameExpr;
import com.github.javaparser.ast.expr.ObjectCreationExpr;
import com.github.javaparser.ast.expr.SimpleName;
import com.github.javaparser.ast.expr.StringLiteralExpr;
import com.github.javaparser.ast.expr.ThisExpr;
import com.github.javaparser.ast.stmt.BlockStmt;
import com.github.javaparser.ast.type.ClassOrInterfaceType;
import org.dmg.pmml.regression.RegressionModel;
import org.kie.pmml.api.enums.OP_TYPE;
import org.kie.pmml.api.exceptions.KiePMMLException;
import org.kie.pmml.api.exceptions.KiePMMLInternalException;
import org.kie.pmml.api.iinterfaces.SerializableFunction;
import org.kie.pmml.compiler.commons.utils.CommonCodegenUtils;
import org.kie.pmml.compiler.commons.utils.JavaParserUtils;
import org.kie.pmml.models.regression.compiler.dto.RegressionCompilationDTO;
import org.kie.pmml.models.regression.model.enums.REGRESSION_NORMALIZATION_METHOD;
import org.kie.pmml.models.regression.model.tuples.KiePMMLTableSourceCategory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.kie.pmml.commons.Constants.MISSING_DEFAULT_CONSTRUCTOR;
import static org.kie.pmml.compiler.commons.utils.CommonCodegenUtils.getExpressionForObject;
import static org.kie.pmml.compiler.commons.utils.CommonCodegenUtils.getTypedClassOrInterfaceTypeByTypeNames;
import static org.kie.pmml.compiler.commons.utils.CommonCodegenUtils.getTypedClassOrInterfaceTypeByTypes;
import static org.kie.pmml.compiler.commons.utils.JavaParserUtils.getFullClassName;
public class KiePMMLRegressionTableClassificationFactory {
public static final String KIE_PMML_REGRESSION_TABLE_CLASSIFICATION_TEMPLATE_JAVA =
"KiePMMLRegressionTableClassificationTemplate.tmpl";
public static final String KIE_PMML_REGRESSION_TABLE_CLASSIFICATION_TEMPLATE =
"KiePMMLRegressionTableClassificationTemplate";
public final static List<RegressionModel.NormalizationMethod> SUPPORTED_NORMALIZATION_METHODS =
Arrays.asList(RegressionModel.NormalizationMethod.SOFTMAX,
RegressionModel.NormalizationMethod.SIMPLEMAX,
RegressionModel.NormalizationMethod.NONE,
RegressionModel.NormalizationMethod.LOGIT,
RegressionModel.NormalizationMethod.PROBIT,
RegressionModel.NormalizationMethod.CLOGLOG,
RegressionModel.NormalizationMethod.CAUCHIT);
public static final List<RegressionModel.NormalizationMethod> UNSUPPORTED_NORMALIZATION_METHODS =
Arrays.asList(RegressionModel.NormalizationMethod.EXP,
RegressionModel.NormalizationMethod.LOGLOG);
private static final Logger logger =
LoggerFactory.getLogger(KiePMMLRegressionTableClassificationFactory.class.getName());
private static final String MAIN_CLASS_NOT_FOUND = "Main class not found";
private static AtomicInteger classArity = new AtomicInteger(0);
private KiePMMLRegressionTableClassificationFactory() {
// Avoid instantiation
}
public static Map<String, KiePMMLTableSourceCategory> getRegressionTables(final RegressionCompilationDTO compilationDTO) {
logger.trace("getRegressionTables {}", compilationDTO.getRegressionTables());
LinkedHashMap<String, KiePMMLTableSourceCategory> toReturn =
KiePMMLRegressionTableRegressionFactory.getRegressionTables(compilationDTO);
Map.Entry<String, String> regressionTableEntry = getRegressionTable(compilationDTO, toReturn);
toReturn.put(regressionTableEntry.getKey(), new KiePMMLTableSourceCategory(regressionTableEntry.getValue(),
""));
return toReturn;
}
/**
* @param compilationDTO
* @param regressionTablesMap Explicitly using a <code>LinkedHashMap</code> because insertion order matters
* @return
*/
public static Map.Entry<String, String> getRegressionTable(final RegressionCompilationDTO compilationDTO,
final LinkedHashMap<String,
KiePMMLTableSourceCategory> regressionTablesMap) {
logger.trace("getRegressionTable {}", regressionTablesMap);
String className = "KiePMMLRegressionTableClassification" + classArity.addAndGet(1);
CompilationUnit cloneCU = JavaParserUtils.getKiePMMLModelCompilationUnit(className,
compilationDTO.getPackageName(),
KIE_PMML_REGRESSION_TABLE_CLASSIFICATION_TEMPLATE_JAVA, KIE_PMML_REGRESSION_TABLE_CLASSIFICATION_TEMPLATE);
ClassOrInterfaceDeclaration tableTemplate = cloneCU.getClassByName(className)
.orElseThrow(() -> new KiePMMLException(MAIN_CLASS_NOT_FOUND + ": " + className));
boolean isBinary = compilationDTO.isBinary(regressionTablesMap.size());
final ConstructorDeclaration constructorDeclaration =
tableTemplate.getDefaultConstructor().orElseThrow(() -> new KiePMMLInternalException(String.format(MISSING_DEFAULT_CONSTRUCTOR, tableTemplate.getName())));
setConstructor(compilationDTO, constructorDeclaration, tableTemplate.getName(), null,
compilationDTO.getModelNormalizationMethod(), isBinary);
addMapPopulation(constructorDeclaration.getBody(), regressionTablesMap);
return new AbstractMap.SimpleEntry<>(getFullClassName(cloneCU), cloneCU.toString());
}
/**
* Set the values inside the constructor
* @param compilationDTO
* @param constructorDeclaration
* @param generatedClassName
* @param targetCategory
* @param normalizationMethod
* @param isBinary
*/
static void setConstructor(final RegressionCompilationDTO compilationDTO,
final ConstructorDeclaration constructorDeclaration,
final SimpleName generatedClassName,
final Object targetCategory,
final RegressionModel.NormalizationMethod normalizationMethod,
final boolean isBinary) {
constructorDeclaration.setName(generatedClassName);
final BlockStmt body = constructorDeclaration.getBody();
CommonCodegenUtils.setAssignExpressionValue(body, "targetField",
new StringLiteralExpr(compilationDTO.getTargetFieldName()));
final REGRESSION_NORMALIZATION_METHOD regressionNormalizationMethod =
compilationDTO.getDefaultREGRESSION_NORMALIZATION_METHOD();
CommonCodegenUtils.setAssignExpressionValue(body, "regressionNormalizationMethod",
new NameExpr(regressionNormalizationMethod.getClass().getSimpleName() + "." + regressionNormalizationMethod.name()));
final OP_TYPE opType = compilationDTO.getOP_TYPE();
if (opType != null) {
CommonCodegenUtils.setAssignExpressionValue(body, "opType",
new NameExpr(opType.getClass().getSimpleName() + "." + opType.name()));
}
final Expression targetCategoryExpression = getExpressionForObject(targetCategory);
CommonCodegenUtils.setAssignExpressionValue(body, "targetCategory", targetCategoryExpression);
CommonCodegenUtils.setAssignExpressionValue(body, "isBinary", getExpressionForObject(isBinary));
final Expression probabilityMapFunctionExpression =
createProbabilityMapFunctionExpression(normalizationMethod, isBinary);
CommonCodegenUtils.setAssignExpressionValue(body, "probabilityMapFunction", probabilityMapFunctionExpression);
}
static Expression createProbabilityMapFunctionExpression(final RegressionModel.NormalizationMethod normalizationMethod,
final boolean isBinary) {
if (UNSUPPORTED_NORMALIZATION_METHODS.contains(normalizationMethod)) {
throw new KiePMMLInternalException(String.format("Unsupported NormalizationMethod %s",
normalizationMethod));
} else {
return createProbabilityMapFunctionSupportedExpression(normalizationMethod, isBinary);
}
}
/**
* Create <b>probabilityMapFunction</b> <code>MethodReferenceExpr</code>
* @param normalizationMethod
* @param isBinary
* @return
*/
static MethodReferenceExpr createProbabilityMapFunctionSupportedExpression(final RegressionModel.NormalizationMethod normalizationMethod,
final boolean isBinary) {
String normalizationName = normalizationMethod.name();
if (RegressionModel.NormalizationMethod.NONE.equals(normalizationMethod) && isBinary) {
normalizationName += "Binary";
}
final String thisExpressionMethodName = String.format("get%sProbabilityMap", normalizationName);
final CastExpr castExpr = new CastExpr();
final String stringClassName = String.class.getSimpleName();
final String doubleClassName = Double.class.getSimpleName();
final ClassOrInterfaceType linkedHashMapReferenceType =
getTypedClassOrInterfaceTypeByTypeNames(LinkedHashMap.class.getCanonicalName(),
Arrays.asList(stringClassName, doubleClassName));
final ClassOrInterfaceType consumerType =
getTypedClassOrInterfaceTypeByTypes(SerializableFunction.class.getCanonicalName(),
Arrays.asList(linkedHashMapReferenceType,
linkedHashMapReferenceType));
castExpr.setType(consumerType);
castExpr.setExpression(new ThisExpr());
final MethodReferenceExpr toReturn = new MethodReferenceExpr();
toReturn.setScope(castExpr);
toReturn.setIdentifier(thisExpressionMethodName);
return toReturn;
}
/**
* Add entries <b>category/KiePMMLRegressionTable</b> inside the constructor
* @param body
* @param regressionTablesMap Explicitly using a <code>LinkedHashMap</code> because insertion order matters
*/
static void addMapPopulation(final BlockStmt body,
final LinkedHashMap<String, KiePMMLTableSourceCategory> regressionTablesMap) {
regressionTablesMap.forEach((className, tableSourceCategory) -> {
ObjectCreationExpr objectCreationExpr = new ObjectCreationExpr();
objectCreationExpr.setType(className);
NodeList<Expression> expressions =
NodeList.nodeList(new StringLiteralExpr(tableSourceCategory.getCategory()), objectCreationExpr);
body.addStatement(new MethodCallExpr(new NameExpr("categoryTableMap"), "put", expressions));
});
}
}
| |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.activiti.engine.impl.test;
import java.io.InputStream;
import java.lang.reflect.Method;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Timer;
import java.util.TimerTask;
import java.util.logging.Logger;
import junit.framework.AssertionFailedError;
import org.activiti.engine.ActivitiException;
import org.activiti.engine.ProcessEngine;
import org.activiti.engine.ProcessEngineConfiguration;
import org.activiti.engine.impl.ProcessEngineImpl;
import org.activiti.engine.impl.bpmn.deployer.BpmnDeployer;
import org.activiti.engine.impl.cfg.ProcessEngineConfigurationImpl;
import org.activiti.engine.impl.db.DbSqlSession;
import org.activiti.engine.impl.interceptor.Command;
import org.activiti.engine.impl.interceptor.CommandContext;
import org.activiti.engine.impl.jobexecutor.JobExecutor;
import org.activiti.engine.impl.util.ClassNameUtil;
import org.activiti.engine.impl.util.ReflectUtil;
import org.activiti.engine.repository.DeploymentBuilder;
import org.activiti.engine.runtime.ProcessInstance;
import org.activiti.engine.test.Deployment;
/**
* @author Tom Baeyens
*/
public abstract class TestHelper {
private static Logger log = Logger.getLogger(TestHelper.class.getName());
public static final String EMPTY_LINE = " ";
public static final List<String> TABLENAMES_EXCLUDED_FROM_DB_CLEAN_CHECK = Arrays.asList(
"ACT_GE_PROPERTY"
);
static Map<String, ProcessEngine> processEngines = new HashMap<String, ProcessEngine>();
public static void assertProcessEnded(ProcessEngine processEngine, String processInstanceId) {
ProcessInstance processInstance = processEngine
.getRuntimeService()
.createProcessInstanceQuery()
.processInstanceId(processInstanceId)
.singleResult();
if (processInstance!=null) {
throw new AssertionFailedError("expected finished process instance '"+processInstanceId+"' but it was still in the db");
}
}
public static String annotationDeploymentSetUp(ProcessEngine processEngine, Class<?> testClass, String methodName) {
String deploymentId = null;
Method method = null;
try {
method = testClass.getDeclaredMethod(methodName, (Class<?>[])null);
} catch (Exception e) {
throw new ActivitiException("can't get method by reflection", e);
}
Deployment deploymentAnnotation = method.getAnnotation(Deployment.class);
if (deploymentAnnotation != null) {
log.fine("annotation @Deployment creates deployment for "+ClassNameUtil.getClassNameWithoutPackage(testClass)+"."+methodName);
String[] resources = deploymentAnnotation.resources();
if (resources.length == 0) {
String name = method.getName();
String resource = getBpmnProcessDefinitionResource(testClass, name);
resources = new String[]{resource};
}
DeploymentBuilder deploymentBuilder = processEngine.getRepositoryService()
.createDeployment()
.name(ClassNameUtil.getClassNameWithoutPackage(testClass)+"."+methodName);
for (String resource: resources) {
deploymentBuilder.addClasspathResource(resource);
}
deploymentId = deploymentBuilder.deploy().getId();
}
return deploymentId;
}
public static void annotationDeploymentTearDown(ProcessEngine processEngine, String deploymentId, Class<?> testClass, String methodName) {
log.fine("annotation @Deployment deletes deployment for "+ClassNameUtil.getClassNameWithoutPackage(testClass)+"."+methodName);
if(deploymentId != null) {
processEngine.getRepositoryService().deleteDeployment(deploymentId, true);
}
}
/**
* get a resource location by convention based on a class (type) and a
* relative resource name. The return value will be the full classpath
* location of the type, plus a suffix built from the name parameter:
* <code>BpmnDeployer.BPMN_RESOURCE_SUFFIXES</code>.
* The first resource matching a suffix will be returned.
*/
public static String getBpmnProcessDefinitionResource(Class< ? > type, String name) {
for (String suffix : BpmnDeployer.BPMN_RESOURCE_SUFFIXES) {
String resource = type.getName().replace('.', '/') + "." + name + "." + suffix;
InputStream inputStream = ReflectUtil.getResourceAsStream(resource);
if (inputStream == null) {
continue;
} else {
return resource;
}
}
return type.getName().replace('.', '/') + "." + name + "." + BpmnDeployer.BPMN_RESOURCE_SUFFIXES[0];
}
/** Each test is assumed to clean up all DB content it entered.
* After a test method executed, this method scans all tables to see if the DB is completely clean.
* It throws AssertionFailed in case the DB is not clean.
* If the DB is not clean, it is cleaned by performing a create a drop. */
public static void assertAndEnsureCleanDb(ProcessEngine processEngine) {
log.fine("verifying that db is clean after test");
Map<String, Long> tableCounts = processEngine.getManagementService().getTableCount();
StringBuilder outputMessage = new StringBuilder();
for (String tableName : tableCounts.keySet()) {
if (!TABLENAMES_EXCLUDED_FROM_DB_CLEAN_CHECK.contains(tableName)) {
Long count = tableCounts.get(tableName);
if (count!=0L) {
outputMessage.append(" "+tableName + ": " + count + " record(s) ");
}
}
}
if (outputMessage.length() > 0) {
outputMessage.insert(0, "DB NOT CLEAN: \n");
log.severe(EMPTY_LINE);
log.severe(outputMessage.toString());
((ProcessEngineImpl)processEngine)
.getProcessEngineConfiguration()
.getCommandExecutorTxRequired()
.execute(new Command<Object>() {
public Object execute(CommandContext commandContext) {
DbSqlSession dbSqlSession = commandContext.getSession(DbSqlSession.class);
dbSqlSession.dbSchemaDrop();
dbSqlSession.dbSchemaCreate();
return null;
}
});
throw new AssertionError(outputMessage.toString());
}
}
public static void waitForJobExecutorToProcessAllJobs(ProcessEngineConfigurationImpl processEngineConfiguration, long maxMillisToWait, long intervalMillis) {
JobExecutor jobExecutor = processEngineConfiguration.getJobExecutor();
jobExecutor.start();
try {
Timer timer = new Timer();
InteruptTask task = new InteruptTask(Thread.currentThread());
timer.schedule(task, maxMillisToWait);
boolean areJobsAvailable = true;
try {
while (areJobsAvailable && !task.isTimeLimitExceeded()) {
Thread.sleep(intervalMillis);
areJobsAvailable = areJobsAvailable(processEngineConfiguration);
}
} catch (InterruptedException e) {
} finally {
timer.cancel();
}
if (areJobsAvailable) {
throw new ActivitiException("time limit of " + maxMillisToWait + " was exceeded");
}
} finally {
jobExecutor.shutdown();
}
}
public static boolean areJobsAvailable(ProcessEngineConfigurationImpl processEngineConfiguration) {
return !processEngineConfiguration
.getManagementService()
.createJobQuery()
.executable()
.list()
.isEmpty();
}
private static class InteruptTask extends TimerTask {
protected boolean timeLimitExceeded = false;
protected Thread thread;
public InteruptTask(Thread thread) {
this.thread = thread;
}
public boolean isTimeLimitExceeded() {
return timeLimitExceeded;
}
public void run() {
timeLimitExceeded = true;
thread.interrupt();
}
}
public static ProcessEngine getProcessEngine(String configurationResource) {
ProcessEngine processEngine = processEngines.get(configurationResource);
if (processEngine==null) {
log.fine("==== BUILDING PROCESS ENGINE ========================================================================");
processEngine = ProcessEngineConfiguration
.createProcessEngineConfigurationFromResource(configurationResource)
.buildProcessEngine();
log.fine("==== PROCESS ENGINE CREATED =========================================================================");
processEngines.put(configurationResource, processEngine);
}
return processEngine;
}
public static void closeProcessEngines() {
for (ProcessEngine processEngine: processEngines.values()) {
processEngine.close();
}
processEngines.clear();
}
}
| |
package natlab.backends.x10.codegen;
import natlab.backends.x10.IRx10.ast.ArrayAccess;
import natlab.backends.x10.IRx10.ast.ArraySetStmt;
import natlab.backends.x10.IRx10.ast.AssignStmt;
import natlab.backends.x10.IRx10.ast.DeclStmt;
import natlab.backends.x10.IRx10.ast.EmptyExp;
import natlab.backends.x10.IRx10.ast.Exp;
import natlab.backends.x10.IRx10.ast.IDInfo;
import natlab.backends.x10.IRx10.ast.IDUse;
import natlab.backends.x10.IRx10.ast.List;
import natlab.backends.x10.IRx10.ast.Literally;
import natlab.backends.x10.IRx10.ast.MultiAssignLHS;
import natlab.backends.x10.IRx10.ast.RegionBuilder;
import natlab.backends.x10.IRx10.ast.SimpleArrayExp;
import natlab.backends.x10.IRx10.ast.Stmt;
import natlab.backends.x10.IRx10.ast.StmtBlock;
import natlab.backends.x10.IRx10.ast.Type;
import natlab.tame.tir.TIRAbstractAssignToListStmt;
import natlab.tame.tir.TIRCellArrayGetStmt;
import natlab.tame.tir.TIRCellArraySetStmt;
import ast.CellIndexExpr;
import ast.Expr;
import ast.ParameterizedExpr;
public class CellArrayGetSet {
public static void handleTIRCellAbstractArrayGetStmt(
TIRCellArrayGetStmt node, IRx10ASTGenerator target,
StmtBlock block) {
if (1 == (node).getTargets().asNameList().size()) {
String LHS;
boolean isDecl;
target.symbolMapKey = (node).getTargetName().getID();
LHS = target.symbolMapKey;
if (true == target.symbolMap.containsKey(target.symbolMapKey)) {
isDecl = false;
AssignStmt list_single_assign_stmt = new AssignStmt();
list_single_assign_stmt.setLHS(Helper.generateIDInfo(
target.analysis, target.index, node, LHS));
list_single_assign_stmt.getLHS().setName(
((TIRAbstractAssignToListStmt) node).getTargets()
.getChild(0).getVarName());
setRHSValue(false, list_single_assign_stmt, node, false,
target, block);
target.symbolMap.put(target.symbolMapKey,
list_single_assign_stmt.getLHS());
System.out.println("#####!" + target.symbolMapKey);
block.addStmt(list_single_assign_stmt);
} else {
isDecl = true;
DeclStmt decl_stmt = new DeclStmt();
IDInfo LHSinfo = new IDInfo();
decl_stmt.setLHS(Helper.generateIDInfo(target.analysis,
target.index, node, LHS));
decl_stmt.getLHS().setName(
(node).getTargets().getChild(0).getVarName());
/*
* if it has a colon operator, add a null to the shape - this is
* a hack to tell the compiler that it is an array
*/
for (Exp i : Expressions.getArgs(node.getRHS(), target)) {
if (i instanceof IDUse && ((IDUse) i).getID().equals("__")) {
System.out
.println("its a colon...............................");
decl_stmt.getLHS().getShape().add(null);
}
}
System.out.println("#####!" + target.symbolMapKey);
// block.addStmt(decl_stmt);
DeclStmt pseudoDecl = new DeclStmt();
pseudoDecl.setLHS(decl_stmt.getLHS());
//
AssignStmt pseudoAssign = new AssignStmt();
pseudoAssign.setLHS(decl_stmt.getLHS());
// if (target.currentBlock.size() > 1) {
target.currentBlock.get(0).addStmt(pseudoDecl);
setRHSValue(isDecl, decl_stmt, node, false, target, block);
pseudoAssign.setRHS(decl_stmt.getRHS());
block.addStmt(pseudoAssign);
target.symbolMap.put(target.symbolMapKey, decl_stmt.getLHS());
System.out.println(block.getParent().toString()
+ "^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^");
// }
//
// else {
// block.addStmt(decl_stmt);
// target.symbolMap.put(target.symbolMapKey,
// decl_stmt.getLHS());
//
// }
}
}
else {
AssignStmt list_assign_stmt = new AssignStmt();
MultiAssignLHS LHSinfo = new MultiAssignLHS();
list_assign_stmt.setMultiAssignLHS(LHSinfo);
for (ast.Name name : ((TIRAbstractAssignToListStmt) node)
.getTargets().asNameList()) {
System.out.println("^^" + name.getID());
list_assign_stmt.getMultiAssignLHS().addIDInfo(
Helper.generateIDInfo(target.analysis, target.index,
node, name.getID()));
target.symbolMap.put(name.getID(), Helper.generateIDInfo(
target.analysis, target.index, node, name.getID()));
}
System.out.println("^*^"
+ list_assign_stmt.getMultiAssignLHS().getIDInfoList()
.getNumChild());
list_assign_stmt.setLHS(null);
setRHSValue(false, list_assign_stmt, node, false, target, block);
block.addStmt(list_assign_stmt);
}
}
public static void setRHSValue(boolean isDecl, Stmt decl_or_assgn,
TIRCellArrayGetStmt node, boolean isScalar, IRx10ASTGenerator target,
StmtBlock block) {
ArrayAccess arrayAccess = new ArrayAccess();
arrayAccess.setArrayID(new IDUse(((CellIndexExpr) node.getRHS()).getTarget().getVarName()));
arrayAccess.setIndicesList(Expressions.getArgs(node.getRHS(), target));
RegionBuilder region = new RegionBuilder();
region.setArrayID(arrayAccess.getArrayID());
Exp i;
boolean useregion = false;
for (int j = 0; j < arrayAccess.getIndicesList().getNumChild(); j++) {
i = arrayAccess.getIndicesList().getChild(j);
/*
* Below is a very ugly hack to incorporate x:y type colon
* expressions in array access. TODO : Fix it
*/
if (target.symbolMap.containsKey(((IDUse) i).getID())
&& !Helper.isScalar(target.symbolMap.get(
(((IDUse) i).getID())).getShape())) {
region.addLower(new IDUse(/*((IDUse) i).getID() +*/ "("
+ ((IDUse) i).getID() + ".region.min(0)" + ")")); // MAKE
// SURE
// colon
// op
// returns
// array
// with
// indexing
// starting
// at
// 1
region.addUpper(new IDUse(/*((IDUse) i).getID() +*/ "(" +
// (target.symbolMap.get((((IDUse)i).getID())).getShape()).get(1)
((IDUse) i).getID() + ".region.max(0)" + ")"));
useregion = true;
}
else {
region.addLower((IDUse) i);
region.addUpper((IDUse) i);
}
if (i instanceof IDUse && ((IDUse) i).getID().equals("__")) {
useregion = true;
}
}
if (!useregion) {
if (isDecl) {
((DeclStmt) decl_or_assgn).setRHS(arrayAccess);
} else {
((AssignStmt) decl_or_assgn).setRHS(arrayAccess);
}
} else {
DeclStmt pointDecl = new DeclStmt();
pointDecl.setLHS(new IDInfo(new Type("Point"), "mix10_pt_"
+ arrayAccess.getArrayID().getID(), null, false, null, null));
pointDecl.setMutable(false);
target.currentBlock.get(0).addStmt(pointDecl);
StringBuffer x = new StringBuffer();
if (/* region.getLower(0).equals(region.getUpper(0)) && */!region
.getLower(0).getID().equals("__")) {
x.append("1-(" + region.getLower(0).getID() + " as Int)");
} else {
x.append("0");
}
for (int u = 1; u < region.getLowerList().getNumChild(); u++) {
if (/* region.getLower(u).equals(region.getUpper(u)) && */!region
.getLower(u).getID().equals("__")) {
x.append(", 1-(" + region.getLower(u).getID() + " as Int)");
} else {
x.append(", 0");
}
}
block.addStmt(new Literally("mix10_pt_"
+ arrayAccess.getArrayID().getID() + " = Point.make("
+ x.toString() + ");\n"));
if (isDecl) {
((DeclStmt) decl_or_assgn).setRHS(region);
} else {
((AssignStmt) decl_or_assgn).setRHS(region);
}
}
}
public static void handleTIRCellAbstractArraySetStmt(
TIRCellArraySetStmt node, IRx10ASTGenerator target,
StmtBlock block) {
String LHS;
target.symbolMapKey = ((CellIndexExpr) node.getRHS()).getTarget().getVarName();
LHS = target.symbolMapKey;
if (true != target.symbolMap.containsKey(target.symbolMapKey)) {
DeclStmt array_decl = new DeclStmt();
IDInfo LHSinfo = new IDInfo();
array_decl.setLHS(Helper.generateIDInfo(target.analysis,
target.index, node, LHS));
array_decl.getLHS().setName(LHS);
array_decl.setRHS(new EmptyExp());
target.symbolMap.put(target.symbolMapKey, array_decl.getLHS());
block.addStmt(array_decl);
}
/**
* The array has been declared before. This is just an assignment to its
* index. If not declared before first declare the array and then set
* the index
*/
ArraySetStmt array_set = new ArraySetStmt();
array_set.setLHS(Helper.generateIDInfo(target.analysis, target.index,
node, LHS));
array_set.getLHS().setName(LHS.toString());
boolean hasColon = false;
for (int i = 0; i < node.getIndices().getNumChild(); i++) {
array_set.setIndices(Expressions.makeIRx10Exp(node.getIndices()
.getChild(i), false, target), i);
if (((IDUse) (array_set.getIndices(i))).getID().equals("__")) {
hasColon = true;
}
}
target.symbolMap.put(target.symbolMapKey, array_set.getLHS());
// array_set.getLHS()
// .setValue(
// new ArrayAccess(new IDUse(LHS), Expressions.getArgs(node.getLHS(),
// target))
// );
boolean tf = true;
if (null != array_set.getLHS().getShape())
for (int i = 0; i < array_set.getLHS().getShape().size(); i++) {
if (null != array_set.getLHS().getShape().get(i))
tf &= ("1").equals(array_set.getLHS().getShape().get(i)
.toString());
}
array_set.setRHS(Expressions.makeIRx10Exp(node.getRHS(), tf, target));
// System.out.println(((IDUse)
// ((ArrayAccess)array_set.getLHS().getValue()).getIndices(0)).getID()+"%%");
// TODO - Below is a dirty hack to
// manage colon expression in array set statement
// need to make it proper
if (!hasColon)
block.addStmt(array_set);
else {
Literally pointLoop = new Literally();
pointLoop.setVerbatim("for (p in "
+ ((IDUse) array_set.getRHS()).getID() + ".region)\n");
StringBuffer x = new StringBuffer();
StringBuffer pt = new StringBuffer();
String rhsID = ((IDUse) array_set.getRHS()).getID();
if (((IDUse) array_set.getIndices(0)).getID().equals("__")) {
pt.append("(" + rhsID + ".region.min(" + Integer.toString(0)
+ ")-1) as Int");
} else {
Exp i = array_set.getIndices(0);
if (target.symbolMap.containsKey(((IDUse) i).getID())
&& !Helper.isScalar(target.symbolMap.get(
(((IDUse) i).getID())).getShape())) {
pt.append("(" + ((IDUse) i).getID() + "("
+ ((IDUse) i).getID() + ".region.min(0)"
+ ") -1) as Int");
} else {
pt.append("(" + ((IDUse) i).getID() + "-1) as Int");
}
}
for (int j = 1; j < array_set.getIndicesList().getNumChild(); j++) {
if (((IDUse) array_set.getIndices(j)).getID().equals("__")) {
pt.append(", (" + rhsID + ".region.min("
+ Integer.toString(j) + ")-1) as Int");
} else {
Exp i = array_set.getIndices(j);
if (target.symbolMap.containsKey(((IDUse) i).getID())
&& !Helper.isScalar(target.symbolMap.get(
(((IDUse) i).getID())).getShape())) {
pt.append(", (" + ((IDUse) i).getID() + "("
+ ((IDUse) i).getID() + ".region.min(0)"
+ ") -1) as Int");
} else {
pt.append(", (" + ((IDUse) i).getID() + "-1) as Int");
}
}
}
x.append(array_set.getLHS().getName() + "(p.operator+(Point.make("
+ pt.toString() + ")))= ");
x.append(((IDUse) array_set.getRHS()).getID() + "(p);\n");
block.addStmt(new Literally(x.toString()));
}
}
public static Exp createCellArray(Expr rhs, IRx10ASTGenerator target) {
// TODO Auto-generated method stub
SimpleArrayExp cellArray = new SimpleArrayExp();
cellArray.setType(new Type("Any"));
List<Exp> values = new List<Exp>();
for (int i = 0; i < ((ast.ParameterizedExpr) rhs).getArgs()
.getNumChild(); i++) {
values.add(Expressions.makeIRx10Exp(
((ParameterizedExpr) rhs).getArg(i), false, target));
}
cellArray.setValuesList(values);
/*
* TODO add points when cellvertcat is implemented
*/
return cellArray;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package javax.swing.plaf.basic;
import java.awt.IllegalComponentStateException;
import java.awt.Point;
import java.util.Hashtable;
import javax.swing.JButton;
import javax.swing.JLabel;
import javax.swing.JSlider;
import javax.swing.SwingConstants;
import javax.swing.SwingTestCase;
import javax.swing.UIManager;
import javax.swing.UnsupportedLookAndFeelException;
public class BasicSliderUITest extends SwingTestCase {
private BasicSliderUI sliderUI;
private JSlider slider;
@Override
protected void setUp() throws Exception {
super.setUp();
try {
UIManager.setLookAndFeel(new BasicLookAndFeel() {
private static final long serialVersionUID = 1L;
@Override
public boolean isNativeLookAndFeel() {
return true;
}
@Override
public boolean isSupportedLookAndFeel() {
return true;
}
@Override
public String getDescription() {
return "";
}
@Override
public String getID() {
return "";
}
@Override
public String getName() {
return "";
}
});
} catch (UnsupportedLookAndFeelException e1) {
e1.printStackTrace();
}
slider = new JSlider();
sliderUI = new BasicSliderUI(slider);
}
@Override
protected void tearDown() throws Exception {
sliderUI = null;
slider = null;
super.tearDown();
}
public void testCreateUI() throws Exception {
assertNotNull(BasicSliderUI.createUI(slider));
assertNotSame(BasicSliderUI.createUI(slider),
BasicSliderUI.createUI(slider));
}
public void testCreateTrackListener() throws Exception {
assertNull(sliderUI.slider);
assertNotNull(sliderUI.createTrackListener(slider));
assertNull(sliderUI.slider);
assertNull(sliderUI.trackListener);
assertNotSame(sliderUI.createTrackListener(slider),
sliderUI.createTrackListener(new JSlider()));
}
public void testCreateChangeListener() throws Exception {
assertNull(sliderUI.slider);
assertNotNull(sliderUI.createChangeListener(slider));
assertNull(sliderUI.slider);
assertNull(sliderUI.changeListener);
assertSame(sliderUI.createChangeListener(slider),
sliderUI.createChangeListener(new JSlider()));
}
public void testCreateChangeListenerNotSame() throws Exception {
assertNotSame(sliderUI.createChangeListener(slider),
new BasicSliderUI(slider).createChangeListener(slider));
}
public void testCreateComponentListener() throws Exception {
assertNull(sliderUI.slider);
assertNotNull(sliderUI.createComponentListener(slider));
assertNull(sliderUI.slider);
assertNull(sliderUI.componentListener);
assertSame(sliderUI.createComponentListener(slider),
sliderUI.createComponentListener(new JSlider()));
}
public void testCreateComponentListenerNotSame() throws Exception {
assertNotSame(sliderUI.createComponentListener(slider),
new BasicSliderUI(slider).createComponentListener(slider));
}
public void testCreateFocusListener() throws Exception {
assertNull(sliderUI.slider);
assertNotNull(sliderUI.createFocusListener(slider));
assertNull(sliderUI.slider);
assertNull(sliderUI.focusListener);
assertSame(sliderUI.createFocusListener(slider),
sliderUI.createFocusListener(new JSlider()));
}
public void testCreateFocusListenerNotSame() throws Exception {
assertNotSame(sliderUI.createFocusListener(slider),
new BasicSliderUI(slider).createFocusListener(slider));
}
public void testCreateScrollListener() throws Exception {
assertNull(sliderUI.slider);
assertNotNull(sliderUI.createScrollListener(slider));
assertNull(sliderUI.slider);
assertNull(sliderUI.scrollListener);
assertNotSame(sliderUI.createScrollListener(slider),
sliderUI.createScrollListener(slider));
}
// Regression for HARMONY-2878
public void testCreateScrollListenerNull() throws Exception {
assertNotNull(sliderUI.createScrollListener(null)); // no exception expected
}
public void testCreatePropertyChangeListener() throws Exception {
assertNull(sliderUI.slider);
assertNotNull(sliderUI.createPropertyChangeListener(slider));
assertNull(sliderUI.slider);
assertNull(sliderUI.propertyChangeListener);
assertSame(sliderUI.createPropertyChangeListener(slider),
sliderUI.createPropertyChangeListener(new JSlider()));
}
public void testCreatePropertyChangeListenerNotSame() throws Exception {
assertNotSame(sliderUI.createPropertyChangeListener(slider),
new BasicSliderUI(slider).createPropertyChangeListener(slider));
}
public void testGetShadowColor() throws Exception {
assertNull(sliderUI.getShadowColor());
sliderUI.installUI(slider);
assertEquals(UIManager.getColor("Slider.shadow"), sliderUI.getShadowColor());
}
public void testGetHighlightColor() throws Exception {
assertNull(sliderUI.getHighlightColor());
sliderUI.installUI(slider);
assertEquals(UIManager.getColor("Slider.highlight"), sliderUI.getHighlightColor());
}
public void testGetFocusColor() throws Exception {
assertNull(sliderUI.getFocusColor());
sliderUI.installUI(slider);
assertEquals(UIManager.getColor("Slider.focus"), sliderUI.getFocusColor());
}
public void testGetLowestValueLabel() throws Exception {
sliderUI.installUI(slider);
slider.setLabelTable(slider.createStandardLabels(1));
assertEquals("0", ((JLabel) sliderUI.getLowestValueLabel()).getText());
slider.setLabelTable(slider.createStandardLabels(2, 57));
assertEquals("57", ((JLabel) sliderUI.getLowestValueLabel()).getText());
}
public void testGetHighestValueLabel() throws Exception {
sliderUI.installUI(slider);
slider.setLabelTable(slider.createStandardLabels(1));
assertEquals("100", ((JLabel) sliderUI.getHighestValueLabel()).getText());
slider.setLabelTable(slider.createStandardLabels(2, 56));
assertEquals("100", ((JLabel) sliderUI.getHighestValueLabel()).getText());
}
public void testGetWidthOfHighValueLabel() throws Exception {
sliderUI.installUI(slider);
Hashtable<Integer, JLabel> t = new Hashtable<Integer, JLabel>();
t.put(new Integer("1"), new JLabel("1"));
t.put(new Integer("100"), new JLabel("100"));
JLabel label = new JLabel("1000000");
t.put(new Integer("1000000"), label);
slider.setLabelTable(t);
assertEquals(label.getWidth(), sliderUI.getWidthOfHighValueLabel());
}
public void testGetWidthOfLowValueLabel() throws Exception {
sliderUI.installUI(slider);
Hashtable<Integer, JLabel> t = new Hashtable<Integer, JLabel>();
JLabel label = new JLabel("1");
t.put(new Integer("1"), label);
t.put(new Integer("100"), new JLabel("100"));
t.put(new Integer("1000000"), new JLabel("1000000"));
slider.setLabelTable(t);
assertEquals(label.getWidth(), sliderUI.getWidthOfLowValueLabel());
}
public void testGetHightOfHighValueLabel() throws Exception {
sliderUI.installUI(slider);
Hashtable<Integer, JLabel> t = new Hashtable<Integer, JLabel>();
t.put(new Integer("1"), new JLabel("1"));
t.put(new Integer("100"), new JLabel("100"));
JLabel label = new JLabel("1000000");
t.put(new Integer("1000000"), label);
slider.setLabelTable(t);
assertEquals(label.getHeight(), sliderUI.getHeightOfHighValueLabel());
}
public void testGetHeightOfLowValueLabel() throws Exception {
sliderUI.installUI(slider);
Hashtable<Integer, JLabel> t = new Hashtable<Integer, JLabel>();
JLabel label = new JLabel("1");
t.put(new Integer("1"), label);
t.put(new Integer("100"), new JLabel("100"));
t.put(new Integer("1000000"), new JLabel("1000000"));
slider.setLabelTable(t);
assertEquals(label.getHeight(), sliderUI.getHeightOfLowValueLabel());
}
public void testGetWidthOfWidestLabel() throws Exception {
sliderUI.installUI(slider);
Hashtable<Integer, JLabel> t = new Hashtable<Integer, JLabel>();
t.put(new Integer("1"), new JLabel("1"));
JLabel label = new JLabel("___________100");
t.put(new Integer("100"), label);
t.put(new Integer("1000000"), new JLabel("1000000"));
slider.setLabelTable(t);
assertEquals(label.getWidth(), sliderUI.getWidthOfWidestLabel());
}
public void testGetHeightOfTallestLabel() throws Exception {
sliderUI.installUI(slider);
Hashtable<Integer, JLabel> t = new Hashtable<Integer, JLabel>();
JLabel label = new JLabel("1");
label.setFont(label.getFont().deriveFont(50f));
t.put(new Integer("1"), label);
t.put(new Integer("100"), new JLabel("100"));
t.put(new Integer("1000000"), new JLabel("1000000"));
slider.setLabelTable(t);
assertEquals(label.getHeight(), sliderUI.getHeightOfTallestLabel());
}
public void testSetThumbLocation() throws Exception {
sliderUI.installUI(slider);
sliderUI.setThumbLocation(200, 500);
assertEquals(new Point(200, 500), sliderUI.thumbRect.getLocation());
sliderUI.setThumbLocation(200, -500);
assertEquals(new Point(200, -500), sliderUI.thumbRect.getLocation());
sliderUI.setThumbLocation(-200, 500);
assertEquals(new Point(-200, 500), sliderUI.thumbRect.getLocation());
}
// Regression test for HARMONY-2855
public void testBasicSliderUI() throws Exception {
assertNull(sliderUI.slider);
}
/**
* <code>uninstallUI</code> is called with the same instance of
* <code>JSlider</code> to which this UI was installed.
*/
// Regression test for HARMONY-2855
public void testUninstallUI01() {
sliderUI.installUI(slider);
sliderUI.uninstallUI(slider);
// No exception is expected
}
/**
* <code>uninstallUI</code> is called before <code>installUI</code>
* was called.
*/
// Regression test for HARMONY-2855
public void testUninstallUI02() {
try {
sliderUI.uninstallUI(slider);
fail("IllegalComponentStateException is expected");
} catch (IllegalComponentStateException e) {
// expected
}
}
/**
* <code>uninstallUI</code> is called with another instance of
* <code>JSlider</code>.
*/
// Regression test for HARMONY-2855
public void testUninstallUI03() {
try {
sliderUI.uninstallUI(new JSlider());
fail("IllegalComponentStateException is expected");
} catch (IllegalComponentStateException e) {
// expected
}
}
/**
* <code>uninstallUI</code> is called with instance of another class, i.e.
* not <code>JSlider</code> instance.
*/
// Regression test for HARMONY-2855
public void testUninstallUI04() {
try {
sliderUI.uninstallUI(new JButton());
fail("IllegalComponentStateException is expected");
} catch (IllegalComponentStateException e) {
// expected
}
}
/**
* Regression test for HARMONY-2591
* */
public void testActionScrollerEnabled() {
BasicSliderUI.ActionScroller m = sliderUI.new ActionScroller(new JSlider(),
3, true);
assertTrue(m.isEnabled());
}
/**
* Regression test for HARMONY-4445
*/
public void testMinMaxValue() {
slider.setMaximum(Integer.MAX_VALUE);
slider.setMinimum(0);
slider.setBounds(0,0,100,100);
int half = Integer.MAX_VALUE / 2;
// UI slightly modified to omit unneeded actions - no functional changes
// according to spec
BasicSliderUI tested = new BasicSliderUI(slider) {
@Override
protected void installKeyboardActions(JSlider unneded) {
// Empty. In real BasicSliderUI this method installs Keyboard
// actions
}
@Override
protected void installDefaults(JSlider unneded) {
// Empty. In real BasicSliderUI this method installs defaults
// (colors and fonts)
}
@Override
protected void installListeners(JSlider unneded) {
// Empty. In real BasicSliderUI this method installs listeners
}
};
tested.installUI(slider);
assertEquals(tested.xPositionForValue(half),
getCenterHorisontalPosition(tested));
slider.setOrientation(SwingConstants.VERTICAL);
tested.installUI(slider);
assertEquals(tested.yPositionForValue(half),
getCenterVerticalPosition(tested));
}
private int getCenterVerticalPosition(BasicSliderUI ui) {
return ui.trackRect.y + (ui.trackRect.height / 2);
}
private int getCenterHorisontalPosition(BasicSliderUI ui) {
return ui.trackRect.x + (ui.trackRect.width / 2);
}
}
| |
/*
*
* Copyright (c) 2007, Oracle and/or its affiliates. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* - Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* - Neither the name of Oracle nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
* IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import javax.swing.*;
import javax.swing.event.*;
import javax.swing.text.*;
import javax.swing.border.*;
import javax.swing.colorchooser.*;
import javax.swing.filechooser.*;
import javax.accessibility.*;
import java.awt.*;
import java.awt.event.*;
import java.beans.*;
import java.util.*;
import java.io.*;
import java.applet.*;
import java.net.*;
/**
* A generic SwingSet2 demo module
*
* @author Jeff Dinkins
*/
public class DemoModule extends JApplet {
// The preferred size of the demo
private int PREFERRED_WIDTH = 680;
private int PREFERRED_HEIGHT = 600;
Border loweredBorder = new CompoundBorder(new SoftBevelBorder(SoftBevelBorder.LOWERED),
new EmptyBorder(5,5,5,5));
// Premade convenience dimensions, for use wherever you need 'em.
public static Dimension HGAP2 = new Dimension(2,1);
public static Dimension VGAP2 = new Dimension(1,2);
public static Dimension HGAP5 = new Dimension(5,1);
public static Dimension VGAP5 = new Dimension(1,5);
public static Dimension HGAP10 = new Dimension(10,1);
public static Dimension VGAP10 = new Dimension(1,10);
public static Dimension HGAP15 = new Dimension(15,1);
public static Dimension VGAP15 = new Dimension(1,15);
public static Dimension HGAP20 = new Dimension(20,1);
public static Dimension VGAP20 = new Dimension(1,20);
public static Dimension HGAP25 = new Dimension(25,1);
public static Dimension VGAP25 = new Dimension(1,25);
public static Dimension HGAP30 = new Dimension(30,1);
public static Dimension VGAP30 = new Dimension(1,30);
private SwingSet2 swingset = null;
private JPanel panel = null;
private String resourceName = null;
private String iconPath = null;
private String sourceCode = null;
public DemoModule(SwingSet2 swingset) {
this(swingset, null, null);
}
public DemoModule(SwingSet2 swingset, String resourceName, String iconPath) {
UIManager.put("swing.boldMetal", Boolean.FALSE);
panel = new JPanel();
panel.setLayout(new BorderLayout());
this.resourceName = resourceName;
this.iconPath = iconPath;
this.swingset = swingset;
loadSourceCode();
}
public String getResourceName() {
return resourceName;
}
public JPanel getDemoPanel() {
return panel;
}
public SwingSet2 getSwingSet2() {
return swingset;
}
public String getString(String key) {
if (getSwingSet2() != null) {
return getSwingSet2().getString(key);
}else{
return "nada";
}
}
public char getMnemonic(String key) {
return (getString(key)).charAt(0);
}
public ImageIcon createImageIcon(String filename, String description) {
if(getSwingSet2() != null) {
return getSwingSet2().createImageIcon(filename, description);
} else {
String path = "/resources/images/" + filename;
return new ImageIcon(getClass().getResource(path), description);
}
}
public String getSourceCode() {
return sourceCode;
}
public void loadSourceCode() {
if(getResourceName() != null) {
String filename = getResourceName() + ".java";
sourceCode = new String("<html><body bgcolor=\"#ffffff\"><pre>");
InputStream is;
InputStreamReader isr;
CodeViewer cv = new CodeViewer();
URL url;
try {
url = getClass().getResource(filename);
is = url.openStream();
isr = new InputStreamReader(is, "UTF-8");
BufferedReader reader = new BufferedReader(isr);
// Read one line at a time, htmlize using super-spiffy
// html java code formating utility from www.CoolServlets.com
String line = reader.readLine();
while(line != null) {
sourceCode += cv.syntaxHighlight(line) + " \n ";
line = reader.readLine();
}
sourceCode += new String("</pre></body></html>");
} catch (Exception ex) {
sourceCode = "Could not load file: " + filename;
}
}
}
public String getName() {
return getString(getResourceName() + ".name");
};
public Icon getIcon() {
return createImageIcon(iconPath, getResourceName() + ".name");
};
public String getToolTip() {
return getString(getResourceName() + ".tooltip");
};
public void mainImpl() {
JFrame frame = new JFrame(getName());
frame.getContentPane().setLayout(new BorderLayout());
frame.getContentPane().add(getDemoPanel(), BorderLayout.CENTER);
getDemoPanel().setPreferredSize(new Dimension(PREFERRED_WIDTH, PREFERRED_HEIGHT));
frame.pack();
frame.show();
}
public JPanel createHorizontalPanel(boolean threeD) {
JPanel p = new JPanel();
p.setLayout(new BoxLayout(p, BoxLayout.X_AXIS));
p.setAlignmentY(TOP_ALIGNMENT);
p.setAlignmentX(LEFT_ALIGNMENT);
if(threeD) {
p.setBorder(loweredBorder);
}
return p;
}
public JPanel createVerticalPanel(boolean threeD) {
JPanel p = new JPanel();
p.setLayout(new BoxLayout(p, BoxLayout.Y_AXIS));
p.setAlignmentY(TOP_ALIGNMENT);
p.setAlignmentX(LEFT_ALIGNMENT);
if(threeD) {
p.setBorder(loweredBorder);
}
return p;
}
public static void main(String[] args) {
DemoModule demo = new DemoModule(null);
demo.mainImpl();
}
public void init() {
getContentPane().setLayout(new BorderLayout());
getContentPane().add(getDemoPanel(), BorderLayout.CENTER);
}
void updateDragEnabled(boolean dragEnabled) {}
}
| |
/*
* PreferenceManager.java
*
* Created on March 11, 2004, 2:15 PM
*/
package org.tolweb.treegrow.main;
import java.io.*;
import java.net.*;
import java.util.*;
import org.jdom.*;
import org.jdom.input.*;
import org.jdom.output.*;
import org.tolweb.base.xml.BaseXMLWriter;
/**
* Class used for storing all of the information about the user and their
* associated preferences
*/
public class PreferenceManager {
private Hashtable preferences = new Hashtable();
/** Creates a new instance of PreferenceManager */
public PreferenceManager() {
setUseCustCursors(true);
}
/**
* Sets the username of the user editing the tree
*
* @param user The username
*/
public void setUserName(String user) {
System.out.println("username getting set to: " + user);
preferences.put(XMLConstants.USERNAME, user);
}
/**
* Returns the username of the user editing the tree
*
* @return username
*/
public String getUserName() {
return (String) preferences.get(XMLConstants.USERNAME);
}
/**
* Sets the MD5 hash of the user's password. This is stored locally
*
* @param pw TheMD5 hash of the password
*/
public void setPassword(String pw) {
preferences.put(XMLConstants.PASSWORD, pw);
}
/**
* Returns the local stored password, which is an MD5 of the pw on server
*
* @return the local stored password
*/
public String getPassword() {
return (String) preferences.get(XMLConstants.PASSWORD);
}
/**
* Sets as to whether the user uses custom cursors
*
* @param useCust Whether the user wants to use custom cursor
*/
public void setUseCustCursors(boolean useCust) {
preferences.put(XMLConstants.CUSTOM_CURSORS, Boolean.valueOf(useCust));
}
/**
* Returns whether the user is using custom cursors
*
* @return Whether the user is using custom cursors
*/
public boolean getUseCustCursors() {
Boolean value = (Boolean) preferences.get(XMLConstants.CUSTOM_CURSORS);
return value != null && value.booleanValue();
}
/**
* Sets the last edited date on the ToLEditor.jar file on the
* editor.tolweb.org site. This is used to determine if a download of a
* new file is necessary
*
* @param ts The last modified date of the ToLEditor.jar file on the server
*/
public void setEditorJarTimestamp(long ts) {
preferences.put(XMLConstants.EDITORJAR_TIMESTAMP, Long.valueOf(ts));
}
/**
* Returns the last edited date on the ToLEditor.jar file on the server
*
* @return The last edited date of the ToLEditor.jar file on the server
*/
public long getEditorJarTimestamp() {
Long value = (Long) preferences.get(XMLConstants.EDITORJAR_TIMESTAMP);
if (value != null) {
return value.longValue();
} else {
return 0;
}
}
/**
* Sets the last edited date on the ToLSupportFiles.jar file on the
* editor.tolweb.org site. This is used to determine if a download of a
* new file is necessary
*
* @param ts The last modified date of the ToLSupportFiles.jar file
*/
public void setSupportJarTimestamp(long ts) {
preferences.put(XMLConstants.SUPPORTJAR_TIMESTAMP, Long.valueOf(ts));
}
/**
* Returns the last edited date of the ToLSupportFile.jar file
*
* @return The last edited date of the ToLSupportFile.jar file
*/
public long getSupportJarTimestamp() {
return ((Long) preferences.get(XMLConstants.SUPPORTJAR_TIMESTAMP)).longValue();
}
public boolean hasOpenedFile() {
Boolean value = (Boolean) preferences.get(XMLConstants.LAST_USER);
if (value != null) {
return value.booleanValue();
} else {
return false;
}
}
public void setHasOpenedFile(boolean value) {
preferences.put(XMLConstants.LAST_USER, Boolean.valueOf(value));
}
public int getCopyHolderId() {
Integer value = (Integer) preferences.get(XMLConstants.ID);
if (value != null) {
return value.intValue();
} else {
return -1;
}
}
public void setCopyHolderId(int value) {
preferences.put(XMLConstants.ID, Integer.valueOf(value));
}
public String getCopyHolder() {
return (String) preferences.get(XMLConstants.COPYRIGHTOWNER);
}
public void setCopyHolder(String value) {
preferences.put(XMLConstants.COPYRIGHTOWNER, value);
}
public String getCopyUrl() {
return (String) preferences.get(XMLConstants.COPYRIGHTURL);
}
public void setCopyUrl(String value) {
preferences.put(XMLConstants.COPYRIGHTURL, value);
}
public String getCopyEmail() {
return (String) preferences.get(XMLConstants.EMAIL);
}
public void setCopyEmail(String value) {
preferences.put(XMLConstants.EMAIL, value);
}
public String getCopyDate() {
return (String) preferences.get(XMLConstants.COPYRIGHTDATE);
}
public void setCopyDate(String value) {
preferences.put(XMLConstants.COPYRIGHTDATE, value);
}
public byte getUsePermission() {
Byte value = (Byte) preferences.get(XMLConstants.PERMISSION);
if (value != null) {
return value.byteValue();
} else {
return -1;
}
}
public void setUsePermission(byte value) {
preferences.put(XMLConstants.PERMISSION, new Byte(value));
}
public boolean getInPublicDomain() {
Boolean value = (Boolean) preferences.get(XMLConstants.PUBLICDOMAIN);
if (value != null) {
return value.booleanValue();
} else {
return false;
}
}
public void setInPublicDomain(boolean value) {
preferences.put(XMLConstants.PUBLICDOMAIN, Boolean.valueOf(value));
}
public boolean dontShowCloseDialog() {
Boolean value = (Boolean) preferences.get(XMLConstants.DONTSHOWCLOSE);
if (value != null) {
return value.booleanValue();
} else {
return false;
}
}
public void setDontShowCloseDialog(boolean value) {
preferences.put(XMLConstants.DONTSHOWCLOSE, Boolean.valueOf(value));
}
public void writePreferencesToDisk() {
Enumeration en = preferences.keys();
Element mainElement = new Element(XMLConstants.PREFERENCES);
while (en.hasMoreElements()) {
String nextKey = (String) en.nextElement();
System.out.println("next key is: " + nextKey);
Object value = preferences.get(nextKey);
System.out.println("next value is: " + value);
Element settingElement = new Element(nextKey);
settingElement.addContent(value.toString());
mainElement.addContent(settingElement);
}
Document recentDocument = new Document(mainElement);
try {
FileOutputStream out = new FileOutputStream(Controller.getController().getFileManager().getConfigFile());
XMLOutputter serializer = BaseXMLWriter.getXMLOutputter();
serializer.output(recentDocument, out);
out.flush();
out.close();
} catch (IOException e) {
}
}
public void fetchConfigInfo() {
try {
File configFile = Controller.getController().getFileManager().getConfigFile();
if(!configFile.exists()) {
setUserName(null);
setUseCustCursors(true);
return;
}
URL url = configFile.toURL();
Element rootElmt = null;
SAXBuilder builder = null;
Document doc = null;
builder = new SAXBuilder();
doc = builder.build(url);
rootElmt = doc.getRootElement();
boolean isOld = rootElmt.getName().equals(XMLConstants.SETTINGS);
if (isOld) {
fetchConfigInfoOldStyle(rootElmt);
return;
}
String lastUserText = rootElmt.getChildText(XMLConstants.LAST_USER);
setHasOpenedFile(lastUserText != null && lastUserText.equalsIgnoreCase(XMLConstants.TRUE));
String dontShowClose = rootElmt.getChildText(XMLConstants.DONTSHOWCLOSE);
System.out.println("setting dontshowclose to " + dontShowClose);
setDontShowCloseDialog(dontShowClose != null && dontShowClose.equalsIgnoreCase(XMLConstants.TRUE));
String userName = rootElmt.getChildText(XMLConstants.USERNAME);
setUserName(userName);
String passwordText = rootElmt.getChildText(XMLConstants.PASSWORD);
setPassword(passwordText);
String useCustomCursors = rootElmt.getChildText(XMLConstants.CUSTOM_CURSORS);
setUseCustCursors(useCustomCursors != null && useCustomCursors.equalsIgnoreCase(XMLConstants.TRUE));
String timestamp = rootElmt.getChildText(XMLConstants.EDITORJAR_TIMESTAMP);
if (timestamp != null) {
setEditorJarTimestamp(Long.parseLong(timestamp));
} else {
setEditorJarTimestamp(0);
}
timestamp = rootElmt.getChildText(XMLConstants.SUPPORTJAR_TIMESTAMP);
if (timestamp != null) {
setSupportJarTimestamp(Long.parseLong(timestamp));
} else {
setSupportJarTimestamp(0);
}
String id = rootElmt.getChildText(XMLConstants.ID);
setCopyHolderId(Integer.parseInt(id));
String use = rootElmt.getChildText(XMLConstants.PERMISSION);
if (use != null && !use.equals("")) {
setUsePermission(new Byte(use).byteValue());
} else {
setUsePermission(NodeImage.EVERYWHERE_USE);
}
String date = rootElmt.getChildText(XMLConstants.COPYRIGHTDATE);
setCopyDate(date);
String holder = rootElmt.getChildText(XMLConstants.COPYRIGHTOWNER);
setCopyHolder(holder);
String urlString = rootElmt.getChildText(XMLConstants.COPYRIGHTURL);
setCopyUrl(urlString);
String email = rootElmt.getChildText(XMLConstants.COPYRIGHTEMAIL);
setCopyEmail(email);
String pub = rootElmt.getAttributeValue(XMLConstants.PUBLICDOMAIN);
setInPublicDomain(pub != null && pub.equalsIgnoreCase(XMLConstants.TRUE));
} catch (Exception e) {
}
}
/**
* Here so users won't have to type in their information again when they
* get the new version
*/
public void fetchConfigInfoOldStyle(Element rootElmt) {
boolean useCustomCursors = true;
try {
String attrValue = rootElmt.getAttributeValue(XMLConstants.LAST_USER);
setHasOpenedFile(attrValue != null && attrValue.equals(XMLConstants.ONE));
attrValue = rootElmt.getAttributeValue(XMLConstants.DONTSHOWCLOSE);
setDontShowCloseDialog(attrValue != null && attrValue.equals(XMLConstants.ONE));
Element userElem = rootElmt.getChild(XMLConstants.USERNAME);
String userName = userElem.getTextTrim();
Element passwordElem = rootElmt.getChild(XMLConstants.PASSWORD);
String password = null;
if (passwordElem != null) {
password = passwordElem.getTextTrim();
}
Element cursorElem = rootElmt.getChild(XMLConstants.CUSTOM_CURSORS);
String result = cursorElem.getTextTrim();
if(result == null || result.equals("")) {
setUseCustCursors(true);
} else {
setUseCustCursors(result.equals(XMLConstants.TRUE));
}
long editorJarTimestamp = 0;
Element editorJarTimeElem = rootElmt.getChild(XMLConstants.EDITORJAR_TIMESTAMP);
if (editorJarTimeElem != null) {
result = editorJarTimeElem.getTextTrim();
if(result != null && !result.equals("")) {
editorJarTimestamp = Long.valueOf(result).longValue();
}
}
long supportJarTimestamp = 0;
Element supportJarTimeElem = rootElmt.getChild(XMLConstants.SUPPORTJAR_TIMESTAMP);
if (supportJarTimeElem != null) {
result = supportJarTimeElem.getTextTrim();
if(result != null && !result.equals("")) {
supportJarTimestamp = Long.valueOf(result).longValue();
}
}
Element imagePrefsElt = rootElmt.getChild(XMLConstants.IMAGES);
if (imagePrefsElt != null) {
String id = imagePrefsElt.getAttributeValue(XMLConstants.ID);
setCopyHolderId(new Integer(id).intValue());
String use = imagePrefsElt.getAttributeValue(XMLConstants.PERMISSION);
if (use != null && !use.equals("")) {
setUsePermission(new Byte(use).byteValue());
} else {
setUsePermission(NodeImage.EVERYWHERE_USE);
}
String date = imagePrefsElt.getAttributeValue(XMLConstants.COPYRIGHTDATE);
setCopyDate(date);
String holder = imagePrefsElt.getChildText(XMLConstants.COPYRIGHTOWNER);
setCopyHolder(holder);
String urlString = imagePrefsElt.getChildText(XMLConstants.COPYRIGHTURL);
setCopyUrl(urlString);
String email = imagePrefsElt.getChildText(XMLConstants.COPYRIGHTEMAIL);
setCopyEmail(email);
String pub = imagePrefsElt.getAttributeValue(XMLConstants.PUBLICDOMAIN);
setInPublicDomain(pub != null && pub.equals(XMLConstants.ONE));
} else {
setUsePermission(NodeImage.EVERYWHERE_USE);
}
setUseCustCursors(useCustomCursors);
setUserName(userName);
setPassword(password);
setEditorJarTimestamp(editorJarTimestamp);
setSupportJarTimestamp(supportJarTimestamp);
} catch(Exception e) {
e.printStackTrace();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.prepare;
import org.apache.calcite.adapter.enumerable.EnumerableCalc;
import org.apache.calcite.adapter.enumerable.EnumerableConvention;
import org.apache.calcite.adapter.enumerable.EnumerableInterpretable;
import org.apache.calcite.adapter.enumerable.EnumerableRel;
import org.apache.calcite.adapter.enumerable.EnumerableRules;
import org.apache.calcite.adapter.enumerable.RexToLixTranslator;
import org.apache.calcite.adapter.java.JavaTypeFactory;
import org.apache.calcite.avatica.AvaticaParameter;
import org.apache.calcite.avatica.ColumnMetaData;
import org.apache.calcite.avatica.Meta;
import org.apache.calcite.config.CalciteConnectionConfig;
import org.apache.calcite.config.CalciteSystemProperty;
import org.apache.calcite.interpreter.BindableConvention;
import org.apache.calcite.interpreter.Interpreters;
import org.apache.calcite.jdbc.CalcitePrepare;
import org.apache.calcite.jdbc.CalciteSchema;
import org.apache.calcite.jdbc.CalciteSchema.LatticeEntry;
import org.apache.calcite.linq4j.Linq4j;
import org.apache.calcite.linq4j.Ord;
import org.apache.calcite.linq4j.Queryable;
import org.apache.calcite.linq4j.function.Function1;
import org.apache.calcite.linq4j.tree.BinaryExpression;
import org.apache.calcite.linq4j.tree.BlockStatement;
import org.apache.calcite.linq4j.tree.Blocks;
import org.apache.calcite.linq4j.tree.ConstantExpression;
import org.apache.calcite.linq4j.tree.Expression;
import org.apache.calcite.linq4j.tree.Expressions;
import org.apache.calcite.linq4j.tree.MemberExpression;
import org.apache.calcite.linq4j.tree.MethodCallExpression;
import org.apache.calcite.linq4j.tree.NewExpression;
import org.apache.calcite.linq4j.tree.ParameterExpression;
import org.apache.calcite.materialize.MaterializationService;
import org.apache.calcite.plan.Contexts;
import org.apache.calcite.plan.Convention;
import org.apache.calcite.plan.ConventionTraitDef;
import org.apache.calcite.plan.RelOptCluster;
import org.apache.calcite.plan.RelOptCostFactory;
import org.apache.calcite.plan.RelOptPlanner;
import org.apache.calcite.plan.RelOptRule;
import org.apache.calcite.plan.RelOptTable;
import org.apache.calcite.plan.RelOptUtil;
import org.apache.calcite.plan.volcano.VolcanoPlanner;
import org.apache.calcite.rel.RelCollation;
import org.apache.calcite.rel.RelCollationTraitDef;
import org.apache.calcite.rel.RelCollations;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.RelRoot;
import org.apache.calcite.rel.core.Filter;
import org.apache.calcite.rel.core.Project;
import org.apache.calcite.rel.core.Sort;
import org.apache.calcite.rel.core.TableScan;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.rel.type.RelDataTypeFactory;
import org.apache.calcite.rel.type.RelDataTypeField;
import org.apache.calcite.rex.RexBuilder;
import org.apache.calcite.rex.RexInputRef;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.rex.RexProgram;
import org.apache.calcite.runtime.Bindable;
import org.apache.calcite.runtime.Hook;
import org.apache.calcite.runtime.Typed;
import org.apache.calcite.schema.Schemas;
import org.apache.calcite.schema.Table;
import org.apache.calcite.server.CalciteServerStatement;
import org.apache.calcite.server.DdlExecutor;
import org.apache.calcite.sql.SqlBinaryOperator;
import org.apache.calcite.sql.SqlExplainFormat;
import org.apache.calcite.sql.SqlExplainLevel;
import org.apache.calcite.sql.SqlKind;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.SqlOperator;
import org.apache.calcite.sql.SqlOperatorTable;
import org.apache.calcite.sql.SqlUtil;
import org.apache.calcite.sql.fun.SqlStdOperatorTable;
import org.apache.calcite.sql.parser.SqlParseException;
import org.apache.calcite.sql.parser.SqlParser;
import org.apache.calcite.sql.parser.SqlParserImplFactory;
import org.apache.calcite.sql.parser.impl.SqlParserImpl;
import org.apache.calcite.sql.type.ExtraSqlTypes;
import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.calcite.sql.util.SqlOperatorTables;
import org.apache.calcite.sql.validate.SqlConformance;
import org.apache.calcite.sql.validate.SqlValidator;
import org.apache.calcite.sql2rel.SqlRexConvertletTable;
import org.apache.calcite.sql2rel.SqlToRelConverter;
import org.apache.calcite.sql2rel.StandardConvertletTable;
import org.apache.calcite.tools.FrameworkConfig;
import org.apache.calcite.tools.Frameworks;
import org.apache.calcite.util.ImmutableIntList;
import org.apache.calcite.util.Pair;
import org.apache.calcite.util.Util;
import com.google.common.base.Supplier;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import java.lang.reflect.Type;
import java.math.BigDecimal;
import java.sql.DatabaseMetaData;
import java.sql.Types;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static org.apache.calcite.util.Static.RESOURCE;
/**
* Shit just got real.
*
* <p>This class is public so that projects that create their own JDBC driver
* and server can fine-tune preferences. However, this class and its methods are
* subject to change without notice.</p>
*/
public class CalcitePrepareImpl implements CalcitePrepare {
@Deprecated // to be removed before 2.0
public static final boolean ENABLE_ENUMERABLE =
CalciteSystemProperty.ENABLE_ENUMERABLE.value();
@Deprecated // to be removed before 2.0
public static final boolean ENABLE_STREAM =
CalciteSystemProperty.ENABLE_STREAM.value();
@Deprecated // to be removed before 2.0
public static final List<RelOptRule> ENUMERABLE_RULES =
EnumerableRules.ENUMERABLE_RULES;
/** Whether the bindable convention should be the root convention of any
* plan. If not, enumerable convention is the default. */
public final boolean enableBindable = Hook.ENABLE_BINDABLE.get(false);
private static final Set<String> SIMPLE_SQLS =
ImmutableSet.of(
"SELECT 1",
"select 1",
"SELECT 1 FROM DUAL",
"select 1 from dual",
"values 1",
"VALUES 1");
public CalcitePrepareImpl() {
}
@Override public ParseResult parse(
Context context, String sql) {
return parse_(context, sql, false, false, false);
}
@Override public ConvertResult convert(Context context, String sql) {
return (ConvertResult) parse_(context, sql, true, false, false);
}
@Override public AnalyzeViewResult analyzeView(Context context, String sql, boolean fail) {
return (AnalyzeViewResult) parse_(context, sql, true, true, fail);
}
/** Shared implementation for {@link #parse}, {@link #convert} and
* {@link #analyzeView}. */
private ParseResult parse_(Context context, String sql, boolean convert,
boolean analyze, boolean fail) {
final JavaTypeFactory typeFactory = context.getTypeFactory();
CalciteCatalogReader catalogReader =
new CalciteCatalogReader(
context.getRootSchema(),
context.getDefaultSchemaPath(),
typeFactory,
context.config());
SqlParser parser = createParser(sql);
SqlNode sqlNode;
try {
sqlNode = parser.parseStmt();
} catch (SqlParseException e) {
throw new RuntimeException("parse failed", e);
}
final SqlValidator validator = createSqlValidator(context, catalogReader);
SqlNode sqlNode1 = validator.validate(sqlNode);
if (convert) {
return convert_(
context, sql, analyze, fail, catalogReader, validator, sqlNode1);
}
return new ParseResult(this, validator, sql, sqlNode1,
validator.getValidatedNodeType(sqlNode1));
}
private ParseResult convert_(Context context, String sql, boolean analyze,
boolean fail, CalciteCatalogReader catalogReader, SqlValidator validator,
SqlNode sqlNode1) {
final JavaTypeFactory typeFactory = context.getTypeFactory();
final Convention resultConvention =
enableBindable ? BindableConvention.INSTANCE
: EnumerableConvention.INSTANCE;
// Use the Volcano because it can handle the traits.
final VolcanoPlanner planner = new VolcanoPlanner();
planner.addRelTraitDef(ConventionTraitDef.INSTANCE);
final SqlToRelConverter.Config config =
SqlToRelConverter.config().withTrimUnusedFields(true);
final CalcitePreparingStmt preparingStmt =
new CalcitePreparingStmt(this, context, catalogReader, typeFactory,
context.getRootSchema(), null,
createCluster(planner, new RexBuilder(typeFactory)),
resultConvention, createConvertletTable());
final SqlToRelConverter converter =
preparingStmt.getSqlToRelConverter(validator, catalogReader, config);
final RelRoot root = converter.convertQuery(sqlNode1, false, true);
if (analyze) {
return analyze_(validator, sql, sqlNode1, root, fail);
}
return new ConvertResult(this, validator, sql, sqlNode1,
validator.getValidatedNodeType(sqlNode1), root);
}
private AnalyzeViewResult analyze_(SqlValidator validator, String sql,
SqlNode sqlNode, RelRoot root, boolean fail) {
final RexBuilder rexBuilder = root.rel.getCluster().getRexBuilder();
RelNode rel = root.rel;
final RelNode viewRel = rel;
Project project;
if (rel instanceof Project) {
project = (Project) rel;
rel = project.getInput();
} else {
project = null;
}
Filter filter;
if (rel instanceof Filter) {
filter = (Filter) rel;
rel = filter.getInput();
} else {
filter = null;
}
TableScan scan;
if (rel instanceof TableScan) {
scan = (TableScan) rel;
} else {
scan = null;
}
if (scan == null) {
if (fail) {
throw validator.newValidationError(sqlNode,
RESOURCE.modifiableViewMustBeBasedOnSingleTable());
}
return new AnalyzeViewResult(this, validator, sql, sqlNode,
validator.getValidatedNodeType(sqlNode), root, null, null, null,
null, false);
}
final RelOptTable targetRelTable = scan.getTable();
final RelDataType targetRowType = targetRelTable.getRowType();
final Table table = targetRelTable.unwrap(Table.class);
final List<String> tablePath = targetRelTable.getQualifiedName();
assert table != null;
List<Integer> columnMapping;
final Map<Integer, RexNode> projectMap = new HashMap<>();
if (project == null) {
columnMapping = ImmutableIntList.range(0, targetRowType.getFieldCount());
} else {
columnMapping = new ArrayList<>();
for (Ord<RexNode> node : Ord.zip(project.getProjects())) {
if (node.e instanceof RexInputRef) {
RexInputRef rexInputRef = (RexInputRef) node.e;
int index = rexInputRef.getIndex();
if (projectMap.get(index) != null) {
if (fail) {
throw validator.newValidationError(sqlNode,
RESOURCE.moreThanOneMappedColumn(
targetRowType.getFieldList().get(index).getName(),
Util.last(tablePath)));
}
return new AnalyzeViewResult(this, validator, sql, sqlNode,
validator.getValidatedNodeType(sqlNode), root, null, null, null,
null, false);
}
projectMap.put(index, rexBuilder.makeInputRef(viewRel, node.i));
columnMapping.add(index);
} else {
columnMapping.add(-1);
}
}
}
final RexNode constraint;
if (filter != null) {
constraint = filter.getCondition();
} else {
constraint = rexBuilder.makeLiteral(true);
}
final List<RexNode> filters = new ArrayList<>();
// If we put a constraint in projectMap above, then filters will not be empty despite
// being a modifiable view.
final List<RexNode> filters2 = new ArrayList<>();
boolean retry = false;
RelOptUtil.inferViewPredicates(projectMap, filters, constraint);
if (fail && !filters.isEmpty()) {
final Map<Integer, RexNode> projectMap2 = new HashMap<>();
RelOptUtil.inferViewPredicates(projectMap2, filters2, constraint);
if (!filters2.isEmpty()) {
throw validator.newValidationError(sqlNode,
RESOURCE.modifiableViewMustHaveOnlyEqualityPredicates());
}
retry = true;
}
// Check that all columns that are not projected have a constant value
for (RelDataTypeField field : targetRowType.getFieldList()) {
final int x = columnMapping.indexOf(field.getIndex());
if (x >= 0) {
assert Util.skip(columnMapping, x + 1).indexOf(field.getIndex()) < 0
: "column projected more than once; should have checked above";
continue; // target column is projected
}
if (projectMap.get(field.getIndex()) != null) {
continue; // constant expression
}
if (field.getType().isNullable()) {
continue; // don't need expression for nullable columns; NULL suffices
}
if (fail) {
throw validator.newValidationError(sqlNode,
RESOURCE.noValueSuppliedForViewColumn(field.getName(),
Util.last(tablePath)));
}
return new AnalyzeViewResult(this, validator, sql, sqlNode,
validator.getValidatedNodeType(sqlNode), root, null, null, null,
null, false);
}
final boolean modifiable = filters.isEmpty() || retry && filters2.isEmpty();
return new AnalyzeViewResult(this, validator, sql, sqlNode,
validator.getValidatedNodeType(sqlNode), root, modifiable ? table : null,
ImmutableList.copyOf(tablePath),
constraint, ImmutableIntList.copyOf(columnMapping),
modifiable);
}
@Override public void executeDdl(Context context, SqlNode node) {
final CalciteConnectionConfig config = context.config();
final SqlParserImplFactory parserFactory =
config.parserFactory(SqlParserImplFactory.class, SqlParserImpl.FACTORY);
final DdlExecutor ddlExecutor = parserFactory.getDdlExecutor();
ddlExecutor.executeDdl(context, node);
}
/** Factory method for default SQL parser. */
protected SqlParser createParser(String sql) {
return createParser(sql, createParserConfig());
}
/** Factory method for SQL parser with a given configuration. */
protected SqlParser createParser(String sql, SqlParser.Config parserConfig) {
return SqlParser.create(sql, parserConfig);
}
@Deprecated // to be removed before 2.0
protected SqlParser createParser(String sql,
SqlParser.ConfigBuilder parserConfig) {
return createParser(sql, parserConfig.build());
}
/** Factory method for SQL parser configuration. */
protected SqlParser.Config parserConfig() {
return SqlParser.config();
}
@Deprecated // to be removed before 2.0
protected SqlParser.ConfigBuilder createParserConfig() {
return SqlParser.configBuilder();
}
/** Factory method for default convertlet table. */
protected SqlRexConvertletTable createConvertletTable() {
return StandardConvertletTable.INSTANCE;
}
/** Factory method for cluster. */
protected RelOptCluster createCluster(RelOptPlanner planner,
RexBuilder rexBuilder) {
return RelOptCluster.create(planner, rexBuilder);
}
/** Creates a collection of planner factories.
*
* <p>The collection must have at least one factory, and each factory must
* create a planner. If the collection has more than one planner, Calcite will
* try each planner in turn.</p>
*
* <p>One of the things you can do with this mechanism is to try a simpler,
* faster, planner with a smaller rule set first, then fall back to a more
* complex planner for complex and costly queries.</p>
*
* <p>The default implementation returns a factory that calls
* {@link #createPlanner(org.apache.calcite.jdbc.CalcitePrepare.Context)}.</p>
*/
protected List<Function1<Context, RelOptPlanner>> createPlannerFactories() {
return Collections.singletonList(
context -> createPlanner(context, null, null));
}
/** Creates a query planner and initializes it with a default set of
* rules. */
protected RelOptPlanner createPlanner(CalcitePrepare.Context prepareContext) {
return createPlanner(prepareContext, null, null);
}
/** Creates a query planner and initializes it with a default set of
* rules. */
protected RelOptPlanner createPlanner(
final CalcitePrepare.Context prepareContext,
org.apache.calcite.plan.Context externalContext,
RelOptCostFactory costFactory) {
if (externalContext == null) {
externalContext = Contexts.of(prepareContext.config());
}
final VolcanoPlanner planner =
new VolcanoPlanner(costFactory, externalContext);
planner.addRelTraitDef(ConventionTraitDef.INSTANCE);
if (CalciteSystemProperty.ENABLE_COLLATION_TRAIT.value()) {
planner.addRelTraitDef(RelCollationTraitDef.INSTANCE);
}
planner.setTopDownOpt(prepareContext.config().topDownOpt());
RelOptUtil.registerDefaultRules(planner,
prepareContext.config().materializationsEnabled(),
enableBindable);
final CalcitePrepare.SparkHandler spark = prepareContext.spark();
if (spark.enabled()) {
spark.registerRules(
new SparkHandler.RuleSetBuilder() {
@Override public void addRule(RelOptRule rule) {
// TODO:
}
@Override public void removeRule(RelOptRule rule) {
// TODO:
}
});
}
Hook.PLANNER.run(planner); // allow test to add or remove rules
return planner;
}
@Override public <T> CalciteSignature<T> prepareQueryable(
Context context,
Queryable<T> queryable) {
return prepare_(context, Query.of(queryable), queryable.getElementType(),
-1);
}
@Override public <T> CalciteSignature<T> prepareSql(
Context context,
Query<T> query,
Type elementType,
long maxRowCount) {
return prepare_(context, query, elementType, maxRowCount);
}
<T> CalciteSignature<T> prepare_(
Context context,
Query<T> query,
Type elementType,
long maxRowCount) {
if (SIMPLE_SQLS.contains(query.sql)) {
return simplePrepare(context, query.sql);
}
final JavaTypeFactory typeFactory = context.getTypeFactory();
CalciteCatalogReader catalogReader =
new CalciteCatalogReader(
context.getRootSchema(),
context.getDefaultSchemaPath(),
typeFactory,
context.config());
final List<Function1<Context, RelOptPlanner>> plannerFactories =
createPlannerFactories();
if (plannerFactories.isEmpty()) {
throw new AssertionError("no planner factories");
}
RuntimeException exception = Util.FoundOne.NULL;
for (Function1<Context, RelOptPlanner> plannerFactory : plannerFactories) {
final RelOptPlanner planner = plannerFactory.apply(context);
if (planner == null) {
throw new AssertionError("factory returned null planner");
}
try {
return prepare2_(context, query, elementType, maxRowCount,
catalogReader, planner);
} catch (RelOptPlanner.CannotPlanException e) {
exception = e;
}
}
throw exception;
}
/** Quickly prepares a simple SQL statement, circumventing the usual
* preparation process. */
private <T> CalciteSignature<T> simplePrepare(Context context, String sql) {
final JavaTypeFactory typeFactory = context.getTypeFactory();
final RelDataType x =
typeFactory.builder()
.add(SqlUtil.deriveAliasFromOrdinal(0), SqlTypeName.INTEGER)
.build();
@SuppressWarnings("unchecked")
final List<T> list = (List) ImmutableList.of(1);
final List<String> origin = null;
final List<List<String>> origins =
Collections.nCopies(x.getFieldCount(), origin);
final List<ColumnMetaData> columns =
getColumnMetaDataList(typeFactory, x, x, origins);
final Meta.CursorFactory cursorFactory =
Meta.CursorFactory.deduce(columns, null);
return new CalciteSignature<>(
sql,
ImmutableList.of(),
ImmutableMap.of(),
x,
columns,
cursorFactory,
context.getRootSchema(),
ImmutableList.of(),
-1, dataContext -> Linq4j.asEnumerable(list),
Meta.StatementType.SELECT);
}
/**
* Deduces the broad type of statement.
* Currently returns SELECT for most statement types, but this may change.
*
* @param kind Kind of statement
*/
private Meta.StatementType getStatementType(SqlKind kind) {
switch (kind) {
case INSERT:
case DELETE:
case UPDATE:
return Meta.StatementType.IS_DML;
default:
return Meta.StatementType.SELECT;
}
}
/**
* Deduces the broad type of statement for a prepare result.
* Currently returns SELECT for most statement types, but this may change.
*
* @param preparedResult Prepare result
*/
private Meta.StatementType getStatementType(Prepare.PreparedResult preparedResult) {
if (preparedResult.isDml()) {
return Meta.StatementType.IS_DML;
} else {
return Meta.StatementType.SELECT;
}
}
<T> CalciteSignature<T> prepare2_(
Context context,
Query<T> query,
Type elementType,
long maxRowCount,
CalciteCatalogReader catalogReader,
RelOptPlanner planner) {
final JavaTypeFactory typeFactory = context.getTypeFactory();
final EnumerableRel.Prefer prefer;
if (elementType == Object[].class) {
prefer = EnumerableRel.Prefer.ARRAY;
} else {
prefer = EnumerableRel.Prefer.CUSTOM;
}
final Convention resultConvention =
enableBindable ? BindableConvention.INSTANCE
: EnumerableConvention.INSTANCE;
final CalcitePreparingStmt preparingStmt =
new CalcitePreparingStmt(this, context, catalogReader, typeFactory,
context.getRootSchema(), prefer, createCluster(planner, new RexBuilder(typeFactory)),
resultConvention, createConvertletTable());
final RelDataType x;
final Prepare.PreparedResult preparedResult;
final Meta.StatementType statementType;
if (query.sql != null) {
final CalciteConnectionConfig config = context.config();
SqlParser.Config parserConfig = parserConfig()
.withQuotedCasing(config.quotedCasing())
.withUnquotedCasing(config.unquotedCasing())
.withQuoting(config.quoting())
.withConformance(config.conformance())
.withCaseSensitive(config.caseSensitive());
final SqlParserImplFactory parserFactory =
config.parserFactory(SqlParserImplFactory.class, null);
if (parserFactory != null) {
parserConfig = parserConfig.withParserFactory(parserFactory);
}
SqlParser parser = createParser(query.sql, parserConfig);
SqlNode sqlNode;
try {
sqlNode = parser.parseStmt();
statementType = getStatementType(sqlNode.getKind());
} catch (SqlParseException e) {
throw new RuntimeException(
"parse failed: " + e.getMessage(), e);
}
Hook.PARSE_TREE.run(new Object[] {query.sql, sqlNode});
if (sqlNode.getKind().belongsTo(SqlKind.DDL)) {
executeDdl(context, sqlNode);
return new CalciteSignature<>(query.sql,
ImmutableList.of(),
ImmutableMap.of(), null,
ImmutableList.of(), Meta.CursorFactory.OBJECT,
null, ImmutableList.of(), -1, null,
Meta.StatementType.OTHER_DDL);
}
final SqlValidator validator =
createSqlValidator(context, catalogReader);
preparedResult = preparingStmt.prepareSql(
sqlNode, Object.class, validator, true);
switch (sqlNode.getKind()) {
case INSERT:
case DELETE:
case UPDATE:
case EXPLAIN:
// FIXME: getValidatedNodeType is wrong for DML
x = RelOptUtil.createDmlRowType(sqlNode.getKind(), typeFactory);
break;
default:
x = validator.getValidatedNodeType(sqlNode);
}
} else if (query.queryable != null) {
x = context.getTypeFactory().createType(elementType);
preparedResult =
preparingStmt.prepareQueryable(query.queryable, x);
statementType = getStatementType(preparedResult);
} else {
assert query.rel != null;
x = query.rel.getRowType();
preparedResult = preparingStmt.prepareRel(query.rel);
statementType = getStatementType(preparedResult);
}
final List<AvaticaParameter> parameters = new ArrayList<>();
final RelDataType parameterRowType = preparedResult.getParameterRowType();
for (RelDataTypeField field : parameterRowType.getFieldList()) {
RelDataType type = field.getType();
parameters.add(
new AvaticaParameter(
false,
getPrecision(type),
getScale(type),
getTypeOrdinal(type),
getTypeName(type),
getClassName(type),
field.getName()));
}
RelDataType jdbcType = makeStruct(typeFactory, x);
final List<List<String>> originList = preparedResult.getFieldOrigins();
final List<ColumnMetaData> columns =
getColumnMetaDataList(typeFactory, x, jdbcType, originList);
Class resultClazz = null;
if (preparedResult instanceof Typed) {
resultClazz = (Class) ((Typed) preparedResult).getElementType();
}
final Meta.CursorFactory cursorFactory =
preparingStmt.resultConvention == BindableConvention.INSTANCE
? Meta.CursorFactory.ARRAY
: Meta.CursorFactory.deduce(columns, resultClazz);
//noinspection unchecked
final Bindable<T> bindable = preparedResult.getBindable(cursorFactory);
return new CalciteSignature<>(
query.sql,
parameters,
preparingStmt.internalParameters,
jdbcType,
columns,
cursorFactory,
context.getRootSchema(),
preparedResult instanceof Prepare.PreparedResultImpl
? ((Prepare.PreparedResultImpl) preparedResult).collations
: ImmutableList.of(),
maxRowCount,
bindable,
statementType);
}
private SqlValidator createSqlValidator(Context context,
CalciteCatalogReader catalogReader) {
final SqlOperatorTable opTab0 =
context.config().fun(SqlOperatorTable.class,
SqlStdOperatorTable.instance());
final List<SqlOperatorTable> list = new ArrayList<>();
list.add(opTab0);
list.add(catalogReader);
final SqlOperatorTable opTab = SqlOperatorTables.chain(list);
final JavaTypeFactory typeFactory = context.getTypeFactory();
final CalciteConnectionConfig connectionConfig = context.config();
final SqlValidator.Config config = SqlValidator.Config.DEFAULT
.withLenientOperatorLookup(connectionConfig.lenientOperatorLookup())
.withSqlConformance(connectionConfig.conformance())
.withDefaultNullCollation(connectionConfig.defaultNullCollation())
.withIdentifierExpansion(true);
return new CalciteSqlValidator(opTab, catalogReader, typeFactory,
config);
}
private List<ColumnMetaData> getColumnMetaDataList(
JavaTypeFactory typeFactory, RelDataType x, RelDataType jdbcType,
List<List<String>> originList) {
final List<ColumnMetaData> columns = new ArrayList<>();
for (Ord<RelDataTypeField> pair : Ord.zip(jdbcType.getFieldList())) {
final RelDataTypeField field = pair.e;
final RelDataType type = field.getType();
final RelDataType fieldType =
x.isStruct() ? x.getFieldList().get(pair.i).getType() : type;
columns.add(
metaData(typeFactory, columns.size(), field.getName(), type,
fieldType, originList.get(pair.i)));
}
return columns;
}
private ColumnMetaData metaData(JavaTypeFactory typeFactory, int ordinal,
String fieldName, RelDataType type, RelDataType fieldType,
List<String> origins) {
final ColumnMetaData.AvaticaType avaticaType =
avaticaType(typeFactory, type, fieldType);
return new ColumnMetaData(
ordinal,
false,
true,
false,
false,
type.isNullable()
? DatabaseMetaData.columnNullable
: DatabaseMetaData.columnNoNulls,
true,
type.getPrecision(),
fieldName,
origin(origins, 0),
origin(origins, 2),
getPrecision(type),
getScale(type),
origin(origins, 1),
null,
avaticaType,
true,
false,
false,
avaticaType.columnClassName());
}
private ColumnMetaData.AvaticaType avaticaType(JavaTypeFactory typeFactory,
RelDataType type, RelDataType fieldType) {
final String typeName = getTypeName(type);
if (type.getComponentType() != null) {
final ColumnMetaData.AvaticaType componentType =
avaticaType(typeFactory, type.getComponentType(), null);
final Type clazz = typeFactory.getJavaClass(type.getComponentType());
final ColumnMetaData.Rep rep = ColumnMetaData.Rep.of(clazz);
assert rep != null;
return ColumnMetaData.array(componentType, typeName, rep);
} else {
int typeOrdinal = getTypeOrdinal(type);
switch (typeOrdinal) {
case Types.STRUCT:
final List<ColumnMetaData> columns = new ArrayList<>(type.getFieldList().size());
for (RelDataTypeField field : type.getFieldList()) {
columns.add(
metaData(typeFactory, field.getIndex(), field.getName(),
field.getType(), null, null));
}
return ColumnMetaData.struct(columns);
case ExtraSqlTypes.GEOMETRY:
typeOrdinal = Types.VARCHAR;
// fall through
default:
final Type clazz =
typeFactory.getJavaClass(Util.first(fieldType, type));
final ColumnMetaData.Rep rep = ColumnMetaData.Rep.of(clazz);
assert rep != null;
return ColumnMetaData.scalar(typeOrdinal, typeName, rep);
}
}
}
private static String origin(List<String> origins, int offsetFromEnd) {
return origins == null || offsetFromEnd >= origins.size()
? null
: origins.get(origins.size() - 1 - offsetFromEnd);
}
private int getTypeOrdinal(RelDataType type) {
return type.getSqlTypeName().getJdbcOrdinal();
}
private static String getClassName(@SuppressWarnings("unused") RelDataType type) {
return Object.class.getName(); // CALCITE-2613
}
private static int getScale(RelDataType type) {
return type.getScale() == RelDataType.SCALE_NOT_SPECIFIED
? 0
: type.getScale();
}
private static int getPrecision(RelDataType type) {
return type.getPrecision() == RelDataType.PRECISION_NOT_SPECIFIED
? 0
: type.getPrecision();
}
/** Returns the type name in string form. Does not include precision, scale
* or whether nulls are allowed. Example: "DECIMAL" not "DECIMAL(7, 2)";
* "INTEGER" not "JavaType(int)". */
private static String getTypeName(RelDataType type) {
final SqlTypeName sqlTypeName = type.getSqlTypeName();
switch (sqlTypeName) {
case ARRAY:
case MULTISET:
case MAP:
case ROW:
return type.toString(); // e.g. "INTEGER ARRAY"
case INTERVAL_YEAR_MONTH:
return "INTERVAL_YEAR_TO_MONTH";
case INTERVAL_DAY_HOUR:
return "INTERVAL_DAY_TO_HOUR";
case INTERVAL_DAY_MINUTE:
return "INTERVAL_DAY_TO_MINUTE";
case INTERVAL_DAY_SECOND:
return "INTERVAL_DAY_TO_SECOND";
case INTERVAL_HOUR_MINUTE:
return "INTERVAL_HOUR_TO_MINUTE";
case INTERVAL_HOUR_SECOND:
return "INTERVAL_HOUR_TO_SECOND";
case INTERVAL_MINUTE_SECOND:
return "INTERVAL_MINUTE_TO_SECOND";
default:
return sqlTypeName.getName(); // e.g. "DECIMAL", "INTERVAL_YEAR_MONTH"
}
}
protected void populateMaterializations(Context context,
RelOptCluster cluster, Prepare.Materialization materialization) {
// REVIEW: initialize queryRel and tableRel inside MaterializationService,
// not here?
try {
final CalciteSchema schema = materialization.materializedTable.schema;
CalciteCatalogReader catalogReader =
new CalciteCatalogReader(
schema.root(),
materialization.viewSchemaPath,
context.getTypeFactory(),
context.config());
final CalciteMaterializer materializer =
new CalciteMaterializer(this, context, catalogReader, schema,
cluster, createConvertletTable());
materializer.populate(materialization);
} catch (Exception e) {
throw new RuntimeException("While populating materialization "
+ materialization.materializedTable.path(), e);
}
}
private static RelDataType makeStruct(
RelDataTypeFactory typeFactory,
RelDataType type) {
if (type.isStruct()) {
return type;
}
return typeFactory.builder().add("$0", type).build();
}
@Deprecated // to be removed before 2.0
public <R> R perform(CalciteServerStatement statement,
Frameworks.PrepareAction<R> action) {
return perform(statement, action.getConfig(), action);
}
/** Executes a prepare action. */
public <R> R perform(CalciteServerStatement statement,
FrameworkConfig config, Frameworks.BasePrepareAction<R> action) {
final CalcitePrepare.Context prepareContext =
statement.createPrepareContext();
final JavaTypeFactory typeFactory = prepareContext.getTypeFactory();
final CalciteSchema schema =
config.getDefaultSchema() != null
? CalciteSchema.from(config.getDefaultSchema())
: prepareContext.getRootSchema();
CalciteCatalogReader catalogReader =
new CalciteCatalogReader(schema.root(),
schema.path(null),
typeFactory,
prepareContext.config());
final RexBuilder rexBuilder = new RexBuilder(typeFactory);
final RelOptPlanner planner =
createPlanner(prepareContext,
config.getContext(),
config.getCostFactory());
final RelOptCluster cluster = createCluster(planner, rexBuilder);
return action.apply(cluster, catalogReader,
prepareContext.getRootSchema().plus(), statement);
}
/** Holds state for the process of preparing a SQL statement. */
static class CalcitePreparingStmt extends Prepare
implements RelOptTable.ViewExpander {
protected final RelOptPlanner planner;
protected final RexBuilder rexBuilder;
protected final CalcitePrepareImpl prepare;
protected final CalciteSchema schema;
protected final RelDataTypeFactory typeFactory;
protected final SqlRexConvertletTable convertletTable;
private final EnumerableRel.Prefer prefer;
private final RelOptCluster cluster;
private final Map<String, Object> internalParameters =
new LinkedHashMap<>();
@SuppressWarnings("unused")
private int expansionDepth;
private SqlValidator sqlValidator;
CalcitePreparingStmt(CalcitePrepareImpl prepare,
Context context,
CatalogReader catalogReader,
RelDataTypeFactory typeFactory,
CalciteSchema schema,
EnumerableRel.Prefer prefer,
RelOptCluster cluster,
Convention resultConvention,
SqlRexConvertletTable convertletTable) {
super(context, catalogReader, resultConvention);
this.prepare = prepare;
this.schema = schema;
this.prefer = prefer;
this.cluster = cluster;
this.planner = cluster.getPlanner();
this.rexBuilder = cluster.getRexBuilder();
this.typeFactory = typeFactory;
this.convertletTable = convertletTable;
}
@Override protected void init(Class runtimeContextClass) {
}
public PreparedResult prepareQueryable(
final Queryable queryable,
RelDataType resultType) {
return prepare_(() -> {
final RelOptCluster cluster =
prepare.createCluster(planner, rexBuilder);
return new LixToRelTranslator(cluster, CalcitePreparingStmt.this)
.translate(queryable);
}, resultType);
}
public PreparedResult prepareRel(final RelNode rel) {
return prepare_(() -> rel, rel.getRowType());
}
private PreparedResult prepare_(Supplier<RelNode> fn,
RelDataType resultType) {
Class runtimeContextClass = Object.class;
init(runtimeContextClass);
final RelNode rel = fn.get();
final RelDataType rowType = rel.getRowType();
final List<Pair<Integer, String>> fields =
Pair.zip(ImmutableIntList.identity(rowType.getFieldCount()),
rowType.getFieldNames());
final RelCollation collation =
rel instanceof Sort
? ((Sort) rel).collation
: RelCollations.EMPTY;
RelRoot root = new RelRoot(rel, resultType, SqlKind.SELECT, fields,
collation, new ArrayList<>());
if (timingTracer != null) {
timingTracer.traceTime("end sql2rel");
}
final RelDataType jdbcType =
makeStruct(rexBuilder.getTypeFactory(), resultType);
fieldOrigins = Collections.nCopies(jdbcType.getFieldCount(), null);
parameterRowType = rexBuilder.getTypeFactory().builder().build();
// Structured type flattening, view expansion, and plugging in
// physical storage.
root = root.withRel(flattenTypes(root.rel, true));
// Trim unused fields.
root = trimUnusedFields(root);
final List<Materialization> materializations = ImmutableList.of();
final List<CalciteSchema.LatticeEntry> lattices = ImmutableList.of();
root = optimize(root, materializations, lattices);
if (timingTracer != null) {
timingTracer.traceTime("end optimization");
}
return implement(root);
}
@Override protected SqlToRelConverter getSqlToRelConverter(
SqlValidator validator,
CatalogReader catalogReader,
SqlToRelConverter.Config config) {
return new SqlToRelConverter(this, validator, catalogReader, cluster,
convertletTable, config);
}
@Override public RelNode flattenTypes(
RelNode rootRel,
boolean restructure) {
final SparkHandler spark = context.spark();
if (spark.enabled()) {
return spark.flattenTypes(planner, rootRel, restructure);
}
return rootRel;
}
@Override protected RelNode decorrelate(SqlToRelConverter sqlToRelConverter,
SqlNode query, RelNode rootRel) {
return sqlToRelConverter.decorrelate(query, rootRel);
}
@Override public RelRoot expandView(RelDataType rowType, String queryString,
List<String> schemaPath, List<String> viewPath) {
expansionDepth++;
SqlParser parser = prepare.createParser(queryString);
SqlNode sqlNode;
try {
sqlNode = parser.parseQuery();
} catch (SqlParseException e) {
throw new RuntimeException("parse failed", e);
}
// View may have different schema path than current connection.
final CatalogReader catalogReader =
this.catalogReader.withSchemaPath(schemaPath);
SqlValidator validator = createSqlValidator(catalogReader);
final SqlToRelConverter.Config config =
SqlToRelConverter.config().withTrimUnusedFields(true);
SqlToRelConverter sqlToRelConverter =
getSqlToRelConverter(validator, catalogReader, config);
RelRoot root =
sqlToRelConverter.convertQuery(sqlNode, true, false);
--expansionDepth;
return root;
}
protected SqlValidator createSqlValidator(CatalogReader catalogReader) {
return prepare.createSqlValidator(context,
(CalciteCatalogReader) catalogReader);
}
@Override protected SqlValidator getSqlValidator() {
if (sqlValidator == null) {
sqlValidator = createSqlValidator(catalogReader);
}
return sqlValidator;
}
@Override protected PreparedResult createPreparedExplanation(
RelDataType resultType,
RelDataType parameterRowType,
RelRoot root,
SqlExplainFormat format,
SqlExplainLevel detailLevel) {
return new CalcitePreparedExplain(resultType, parameterRowType, root,
format, detailLevel);
}
@Override protected PreparedResult implement(RelRoot root) {
Hook.PLAN_BEFORE_IMPLEMENTATION.run(root);
RelDataType resultType = root.rel.getRowType();
boolean isDml = root.kind.belongsTo(SqlKind.DML);
final Bindable bindable;
if (resultConvention == BindableConvention.INSTANCE) {
bindable = Interpreters.bindable(root.rel);
} else {
EnumerableRel enumerable = (EnumerableRel) root.rel;
if (!root.isRefTrivial()) {
final List<RexNode> projects = new ArrayList<>();
final RexBuilder rexBuilder = enumerable.getCluster().getRexBuilder();
for (int field : Pair.left(root.fields)) {
projects.add(rexBuilder.makeInputRef(enumerable, field));
}
RexProgram program = RexProgram.create(enumerable.getRowType(),
projects, null, root.validatedRowType, rexBuilder);
enumerable = EnumerableCalc.create(enumerable, program);
}
try {
CatalogReader.THREAD_LOCAL.set(catalogReader);
final SqlConformance conformance = context.config().conformance();
internalParameters.put("_conformance", conformance);
bindable = EnumerableInterpretable.toBindable(internalParameters,
context.spark(), enumerable, prefer);
} finally {
CatalogReader.THREAD_LOCAL.remove();
}
}
if (timingTracer != null) {
timingTracer.traceTime("end codegen");
}
if (timingTracer != null) {
timingTracer.traceTime("end compilation");
}
return new PreparedResultImpl(
resultType,
parameterRowType,
fieldOrigins,
root.collation.getFieldCollations().isEmpty()
? ImmutableList.of()
: ImmutableList.of(root.collation),
root.rel,
mapTableModOp(isDml, root.kind),
isDml) {
@Override public String getCode() {
throw new UnsupportedOperationException();
}
@Override public Bindable getBindable(Meta.CursorFactory cursorFactory) {
return bindable;
}
@Override public Type getElementType() {
return ((Typed) bindable).getElementType();
}
};
}
@Override protected List<Materialization> getMaterializations() {
final List<Prepare.Materialization> materializations =
context.config().materializationsEnabled()
? MaterializationService.instance().query(schema)
: ImmutableList.of();
for (Prepare.Materialization materialization : materializations) {
prepare.populateMaterializations(context, cluster, materialization);
}
return materializations;
}
@Override protected List<LatticeEntry> getLattices() {
return Schemas.getLatticeEntries(schema);
}
}
/** An {@code EXPLAIN} statement, prepared and ready to execute. */
private static class CalcitePreparedExplain extends Prepare.PreparedExplain {
CalcitePreparedExplain(
RelDataType resultType,
RelDataType parameterRowType,
RelRoot root,
SqlExplainFormat format,
SqlExplainLevel detailLevel) {
super(resultType, parameterRowType, root, format, detailLevel);
}
@Override public Bindable getBindable(final Meta.CursorFactory cursorFactory) {
final String explanation = getCode();
return dataContext -> {
switch (cursorFactory.style) {
case ARRAY:
return Linq4j.singletonEnumerable(new String[] {explanation});
case OBJECT:
default:
return Linq4j.singletonEnumerable(explanation);
}
};
}
}
/** Translator from Java AST to {@link RexNode}. */
interface ScalarTranslator {
RexNode toRex(BlockStatement statement);
List<RexNode> toRexList(BlockStatement statement);
RexNode toRex(Expression expression);
ScalarTranslator bind(List<ParameterExpression> parameterList,
List<RexNode> values);
}
/** Basic translator. */
static class EmptyScalarTranslator implements ScalarTranslator {
private final RexBuilder rexBuilder;
EmptyScalarTranslator(RexBuilder rexBuilder) {
this.rexBuilder = rexBuilder;
}
public static ScalarTranslator empty(RexBuilder builder) {
return new EmptyScalarTranslator(builder);
}
@Override public List<RexNode> toRexList(BlockStatement statement) {
final List<Expression> simpleList = simpleList(statement);
final List<RexNode> list = new ArrayList<>();
for (Expression expression1 : simpleList) {
list.add(toRex(expression1));
}
return list;
}
@Override public RexNode toRex(BlockStatement statement) {
return toRex(Blocks.simple(statement));
}
private static List<Expression> simpleList(BlockStatement statement) {
Expression simple = Blocks.simple(statement);
if (simple instanceof NewExpression) {
NewExpression newExpression = (NewExpression) simple;
return newExpression.arguments;
} else {
return Collections.singletonList(simple);
}
}
@Override public RexNode toRex(Expression expression) {
switch (expression.getNodeType()) {
case MemberAccess:
// Case-sensitive name match because name was previously resolved.
return rexBuilder.makeFieldAccess(
toRex(
((MemberExpression) expression).expression),
((MemberExpression) expression).field.getName(),
true);
case GreaterThan:
return binary(expression, SqlStdOperatorTable.GREATER_THAN);
case LessThan:
return binary(expression, SqlStdOperatorTable.LESS_THAN);
case Parameter:
return parameter((ParameterExpression) expression);
case Call:
MethodCallExpression call = (MethodCallExpression) expression;
SqlOperator operator =
RexToLixTranslator.JAVA_TO_SQL_METHOD_MAP.get(call.method);
if (operator != null) {
return rexBuilder.makeCall(
type(call),
operator,
toRex(
Expressions.<Expression>list()
.appendIfNotNull(call.targetExpression)
.appendAll(call.expressions)));
}
throw new RuntimeException(
"Could translate call to method " + call.method);
case Constant:
final ConstantExpression constant =
(ConstantExpression) expression;
Object value = constant.value;
if (value instanceof Number) {
Number number = (Number) value;
if (value instanceof Double || value instanceof Float) {
return rexBuilder.makeApproxLiteral(
BigDecimal.valueOf(number.doubleValue()));
} else if (value instanceof BigDecimal) {
return rexBuilder.makeExactLiteral((BigDecimal) value);
} else {
return rexBuilder.makeExactLiteral(
BigDecimal.valueOf(number.longValue()));
}
} else if (value instanceof Boolean) {
return rexBuilder.makeLiteral((Boolean) value);
} else {
return rexBuilder.makeLiteral(constant.toString());
}
default:
throw new UnsupportedOperationException(
"unknown expression type " + expression.getNodeType() + " "
+ expression);
}
}
private RexNode binary(Expression expression, SqlBinaryOperator op) {
BinaryExpression call = (BinaryExpression) expression;
return rexBuilder.makeCall(type(call), op,
toRex(ImmutableList.of(call.expression0, call.expression1)));
}
private List<RexNode> toRex(List<Expression> expressions) {
final List<RexNode> list = new ArrayList<>();
for (Expression expression : expressions) {
list.add(toRex(expression));
}
return list;
}
protected RelDataType type(Expression expression) {
final Type type = expression.getType();
return ((JavaTypeFactory) rexBuilder.getTypeFactory()).createType(type);
}
@Override public ScalarTranslator bind(
List<ParameterExpression> parameterList, List<RexNode> values) {
return new LambdaScalarTranslator(
rexBuilder, parameterList, values);
}
public RexNode parameter(ParameterExpression param) {
throw new RuntimeException("unknown parameter " + param);
}
}
/** Translator that looks for parameters. */
private static class LambdaScalarTranslator extends EmptyScalarTranslator {
private final List<ParameterExpression> parameterList;
private final List<RexNode> values;
LambdaScalarTranslator(
RexBuilder rexBuilder,
List<ParameterExpression> parameterList,
List<RexNode> values) {
super(rexBuilder);
this.parameterList = parameterList;
this.values = values;
}
@Override public RexNode parameter(ParameterExpression param) {
int i = parameterList.indexOf(param);
if (i >= 0) {
return values.get(i);
}
throw new RuntimeException("unknown parameter " + param);
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.execution;
import com.google.common.collect.ImmutableList;
import io.trino.execution.warnings.WarningCollector;
import io.trino.spi.TrinoException;
import io.trino.spi.resourcegroups.ResourceGroupId;
import io.trino.spi.type.TimeZoneNotSupportedException;
import io.trino.sql.tree.FunctionCall;
import io.trino.sql.tree.Identifier;
import io.trino.sql.tree.IntervalLiteral;
import io.trino.sql.tree.NodeLocation;
import io.trino.sql.tree.QualifiedName;
import io.trino.sql.tree.SetTimeZone;
import io.trino.sql.tree.StringLiteral;
import io.trino.testing.LocalQueryRunner;
import org.testng.annotations.AfterClass;
import org.testng.annotations.Test;
import java.net.URI;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.ExecutorService;
import static io.airlift.concurrent.MoreFutures.getFutureValue;
import static io.airlift.concurrent.Threads.daemonThreadsNamed;
import static io.trino.SessionTestUtils.TEST_SESSION;
import static io.trino.SystemSessionProperties.TIME_ZONE_ID;
import static io.trino.sql.tree.IntervalLiteral.IntervalField.HOUR;
import static io.trino.sql.tree.IntervalLiteral.IntervalField.MINUTE;
import static io.trino.sql.tree.IntervalLiteral.Sign.NEGATIVE;
import static io.trino.sql.tree.IntervalLiteral.Sign.POSITIVE;
import static java.util.Collections.emptyList;
import static java.util.concurrent.Executors.newCachedThreadPool;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.testng.Assert.assertEquals;
public class TestSetTimeZoneTask
{
private final LocalQueryRunner localQueryRunner;
private ExecutorService executor = newCachedThreadPool(daemonThreadsNamed(getClass().getSimpleName() + "-%s"));
public TestSetTimeZoneTask()
{
localQueryRunner = LocalQueryRunner.create(TEST_SESSION);
}
@AfterClass(alwaysRun = true)
public void tearDown()
{
executor.shutdownNow();
executor = null;
}
@Test
public void testSetTimeZoneLocal()
{
QueryStateMachine stateMachine = createQueryStateMachine("SET TIME ZONE LOCAL");
SetTimeZone setTimeZone = new SetTimeZone(
new NodeLocation(1, 1),
Optional.empty());
executeSetTimeZone(setTimeZone, stateMachine);
Map<String, String> setSessionProperties = stateMachine.getSetSessionProperties();
assertThat(setSessionProperties).hasSize(1);
assertEquals(setSessionProperties.get(TIME_ZONE_ID), "America/Bahia_Banderas");
}
@Test
public void testSetTimeZoneStringLiteral()
{
QueryStateMachine stateMachine = createQueryStateMachine("SET TIME ZONE 'America/Los_Angeles'");
SetTimeZone setTimeZone = new SetTimeZone(
new NodeLocation(1, 1),
Optional.of(new StringLiteral("America/Los_Angeles")));
executeSetTimeZone(setTimeZone, stateMachine);
Map<String, String> setSessionProperties = stateMachine.getSetSessionProperties();
assertThat(setSessionProperties).hasSize(1);
assertEquals(setSessionProperties.get(TIME_ZONE_ID), "America/Los_Angeles");
}
@Test
public void testSetTimeZoneVarcharFunctionCall()
{
QueryStateMachine stateMachine = createQueryStateMachine("SET TIME ZONE concat_ws('/', 'America', 'Los_Angeles')");
SetTimeZone setTimeZone = new SetTimeZone(
new NodeLocation(1, 1),
Optional.of(new FunctionCall(
new NodeLocation(1, 15),
QualifiedName.of(ImmutableList.of(new Identifier(new NodeLocation(1, 15), "concat_ws", false))),
ImmutableList.of(
new StringLiteral(
new NodeLocation(1, 25),
"/"),
new StringLiteral(
new NodeLocation(1, 30),
"America"),
new StringLiteral(
new NodeLocation(1, 41),
"Los_Angeles")))));
executeSetTimeZone(setTimeZone, stateMachine);
Map<String, String> setSessionProperties = stateMachine.getSetSessionProperties();
assertThat(setSessionProperties).hasSize(1);
assertEquals(setSessionProperties.get(TIME_ZONE_ID), "America/Los_Angeles");
}
@Test
public void testSetTimeZoneInvalidFunctionCall()
{
QueryStateMachine stateMachine = createQueryStateMachine("SET TIME ZONE e()");
SetTimeZone setTimeZone = new SetTimeZone(
new NodeLocation(1, 1),
Optional.of(new FunctionCall(
new NodeLocation(1, 15),
QualifiedName.of(ImmutableList.of(new Identifier(new NodeLocation(1, 15), "e", false))),
ImmutableList.of())));
assertThatThrownBy(() -> executeSetTimeZone(setTimeZone, stateMachine))
.isInstanceOf(TrinoException.class)
.hasMessage("Expected expression of varchar or interval day-time type, but 'e()' has double type");
}
@Test
public void testSetTimeZoneStringLiteralInvalidZoneId()
{
QueryStateMachine stateMachine = createQueryStateMachine("SET TIME ZONE 'Matrix/Zion'");
SetTimeZone setTimeZone = new SetTimeZone(
new NodeLocation(1, 1),
Optional.of(new StringLiteral("Matrix/Zion")));
assertThatThrownBy(() -> executeSetTimeZone(setTimeZone, stateMachine))
.isInstanceOf(TimeZoneNotSupportedException.class);
}
@Test
public void testSetTimeZoneIntervalLiteral()
{
QueryStateMachine stateMachine = createQueryStateMachine("SET TIME ZONE INTERVAL '10' HOUR");
SetTimeZone setTimeZone = new SetTimeZone(
new NodeLocation(1, 1),
Optional.of(new IntervalLiteral("10", POSITIVE, HOUR)));
executeSetTimeZone(setTimeZone, stateMachine);
Map<String, String> setSessionProperties = stateMachine.getSetSessionProperties();
assertThat(setSessionProperties).hasSize(1);
assertEquals(setSessionProperties.get(TIME_ZONE_ID), "+10:00");
}
@Test
public void testSetTimeZoneIntervalDayTimeTypeFunctionCall()
{
QueryStateMachine stateMachine = createQueryStateMachine("SET TIME ZONE parse_duration('8h')");
SetTimeZone setTimeZone = new SetTimeZone(
new NodeLocation(1, 1),
Optional.of(new FunctionCall(
new NodeLocation(1, 24),
QualifiedName.of(ImmutableList.of(new Identifier(new NodeLocation(1, 24), "parse_duration", false))),
ImmutableList.of(
new StringLiteral(
new NodeLocation(1, 39),
"8h")))));
executeSetTimeZone(setTimeZone, stateMachine);
Map<String, String> setSessionProperties = stateMachine.getSetSessionProperties();
assertThat(setSessionProperties).hasSize(1);
assertEquals(setSessionProperties.get(TIME_ZONE_ID), "+08:00");
}
@Test
public void testSetTimeZoneIntervalDayTimeTypeInvalidFunctionCall()
{
QueryStateMachine stateMachine = createQueryStateMachine("SET TIME ZONE parse_duration('3601s')");
SetTimeZone setTimeZone = new SetTimeZone(
new NodeLocation(1, 1),
Optional.of(new FunctionCall(
new NodeLocation(1, 24),
QualifiedName.of(ImmutableList.of(new Identifier(new NodeLocation(1, 24), "parse_duration", false))),
ImmutableList.of(
new StringLiteral(
new NodeLocation(1, 39),
"3601s")))));
assertThatThrownBy(() -> executeSetTimeZone(setTimeZone, stateMachine))
.isInstanceOf(TrinoException.class)
.hasMessage("Invalid time zone offset interval: interval contains seconds");
}
@Test
public void testSetTimeZoneIntervalLiteralGreaterThanOffsetTimeZoneMax()
{
QueryStateMachine stateMachine = createQueryStateMachine("SET TIME ZONE INTERVAL '15' HOUR");
SetTimeZone setTimeZone = new SetTimeZone(
new NodeLocation(1, 1),
Optional.of(new IntervalLiteral("15", POSITIVE, HOUR)));
assertThatThrownBy(() -> executeSetTimeZone(setTimeZone, stateMachine))
.isInstanceOf(TrinoException.class)
.hasMessage("Invalid offset minutes 900");
}
@Test
public void testSetTimeZoneIntervalLiteralLessThanOffsetTimeZoneMin()
{
QueryStateMachine stateMachine = createQueryStateMachine("SET TIME ZONE INTERVAL -'15' HOUR");
SetTimeZone setTimeZone = new SetTimeZone(
new NodeLocation(1, 1),
Optional.of(new IntervalLiteral("15", NEGATIVE, HOUR)));
assertThatThrownBy(() -> executeSetTimeZone(setTimeZone, stateMachine))
.isInstanceOf(TrinoException.class)
.hasMessage("Invalid offset minutes -900");
}
@Test
public void testSetTimeIntervalLiteralZoneHourToMinute()
{
QueryStateMachine stateMachine = createQueryStateMachine("SET TIME ZONE INTERVAL -'08:00' HOUR TO MINUTE");
SetTimeZone setTimeZone = new SetTimeZone(
new NodeLocation(1, 1),
Optional.of(new IntervalLiteral("8", NEGATIVE, HOUR, Optional.of(MINUTE))));
executeSetTimeZone(setTimeZone, stateMachine);
Map<String, String> setSessionProperties = stateMachine.getSetSessionProperties();
assertThat(setSessionProperties).hasSize(1);
assertEquals(setSessionProperties.get(TIME_ZONE_ID), "-08:00");
}
private QueryStateMachine createQueryStateMachine(String query)
{
return QueryStateMachine.begin(
query,
Optional.empty(),
TEST_SESSION,
URI.create("fake://uri"),
new ResourceGroupId("test"),
false,
localQueryRunner.getTransactionManager(),
localQueryRunner.getAccessControl(),
executor,
localQueryRunner.getMetadata(),
WarningCollector.NOOP,
Optional.empty());
}
private void executeSetTimeZone(SetTimeZone setTimeZone, QueryStateMachine stateMachine)
{
getFutureValue(new SetTimeZoneTask(localQueryRunner.getSqlParser(), localQueryRunner.getGroupProvider(), localQueryRunner.getStatsCalculator())
.execute(
setTimeZone,
localQueryRunner.getTransactionManager(),
localQueryRunner.getMetadata(),
localQueryRunner.getAccessControl(),
stateMachine,
emptyList(),
WarningCollector.NOOP));
}
}
| |
/*
Process Fastq File in memory efficient mode
*/
package FastQprocess;
import java.io.FileInputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.util.Scanner;
import java.util.zip.GZIPInputStream;
import pub.FastQ;
/**
* <p>
* FastaQprocessMemory</p>
* <p>
* Created on 2016-1-11 12:17:19</p>
* <p>
* Author Email: zhaoqi3@mail2.sysu.edu.cn</p>
*
* @author ZHAO Qi
* @date 2016-1-11 12:17:19
* @version java 1.6.0
* @version
*/
public class FastaQprocessMemory {
public void processPairedFileEffi(int left, int right, String file1, String file2) throws IOException {
//read two fastq
FileInputStream inputStream1 = null;
FileInputStream inputStream2 = null;
Scanner sc1 = null;
Scanner sc2 = null;
String tempstr11;
String tempstr12;
String tempstr13;
String tempstr14;
String tempstr21;
String tempstr22;
String tempstr23;
String tempstr24;
FastQ fq;
int PairCount = 0;
int totalread = 0;
String outfile1 = file1 + "_trimed_L" + left + "_R" + right + ".txt";
String outfile2 = file2 + "_trimed_L" + left + "_R" + right + ".txt";
try {
inputStream1 = new FileInputStream(file1);
inputStream2 = new FileInputStream(file2);
if (file1.endsWith(".gz") || file1.endsWith(".gzip")) {
GZIPInputStream gzinputstream1 = new GZIPInputStream(inputStream1);
GZIPInputStream gzinputstream2 = new GZIPInputStream(inputStream2);
sc1 = new Scanner(gzinputstream1, "UTF-8");
sc2 = new Scanner(gzinputstream2, "UTF-8");
} else {
sc1 = new Scanner(inputStream1, "UTF-8");
sc2 = new Scanner(inputStream2, "UTF-8");
}
FileWriter fw1 = new FileWriter(outfile1);
FileWriter fw2 = new FileWriter(outfile2);
while (sc1.hasNextLine() && sc2.hasNextLine()) {
tempstr11 = sc1.nextLine();
tempstr12 = sc1.nextLine();
tempstr13 = sc1.nextLine();
tempstr14 = sc1.nextLine();
tempstr21 = sc2.nextLine();
tempstr22 = sc2.nextLine();
tempstr23 = sc2.nextLine();
tempstr24 = sc2.nextLine();
if (left + right >= tempstr12.length()) {
System.out.println("Trim length exced max reads length, please check your parameters");
break;
} else {
if (tempstr11.split("\\s+")[0].equals(tempstr21.split("\\s+")[0])) {
PairCount++;
FastQ fq1 = new FastQ(tempstr11, tempstr12, tempstr13, tempstr14);
FastQ fq2 = new FastQ(tempstr21, tempstr22, tempstr23, tempstr24);
this.trim(fq1, left, right);
this.trim(fq2, left, right);
fw1.append(fq1.toString() + "\n");
fw2.append(fq2.toString() + "\n");
}
// System.out.println(line);
fw2.flush();
fw1.flush();
}
}
System.out.println("total reads = " + totalread);
fw1.close();
fw2.close();
// note that Scanner suppresses exceptions
if (sc1.ioException() != null) {
throw sc1.ioException();
}
if (sc2.ioException() != null) {
throw sc2.ioException();
}
} finally {
if (inputStream1 != null) {
inputStream1.close();
}
if (sc1 != null) {
sc1.close();
}
if (inputStream2 != null) {
inputStream2.close();
}
if (sc2 != null) {
sc2.close();
}
}
System.out.println("Paired reads:\t" + PairCount);
}
public void processSingleFileEffi(int left, int right, String file1, String outfile) throws IOException {
//read two fastq
FileInputStream inputStream1 = null;
Scanner sc1 = null;
String tempstr11;
String tempstr12;
String tempstr13;
String tempstr14;
FastQ fq;
int PairCount = 0;
int totalread = 0;
try {
inputStream1 = new FileInputStream(file1);
if (file1.endsWith(".gz") || file1.endsWith(".gzip")) {
GZIPInputStream gzinputstream1 = new GZIPInputStream(inputStream1);
sc1 = new Scanner(gzinputstream1, "UTF-8");
} else {
sc1 = new Scanner(inputStream1, "UTF-8");
}
System.out.println("used memory (Mb): "
+ (Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()) / 1024 / 1024);
FileWriter fw1 = new FileWriter(outfile);
while (sc1.hasNextLine()) {
tempstr11 = sc1.nextLine();
tempstr12 = sc1.nextLine();
tempstr13 = sc1.nextLine();
tempstr14 = sc1.nextLine();
if (left + right >= tempstr12.length()) {
System.out.println("Trim length exced max reads length, please check your parameters");
break;
} else {
PairCount++;
FastQ fq1 = new FastQ(tempstr11, tempstr12, tempstr13, tempstr14);
this.trim(fq1, left, right);
fw1.append(fq1.toString() + "\n");
// System.out.println(line);
fw1.flush();
}
}
System.out.println("total reads = " + totalread);
fw1.close();
// note that Scanner suppresses exceptions
if (sc1.ioException() != null) {
throw sc1.ioException();
}
} finally {
if (inputStream1 != null) {
inputStream1.close();
}
if (sc1 != null) {
sc1.close();
}
}
}
public void processPairedFileEffi_filtered_length(int lengthThreshold, String file1, String file2) throws IOException {
//read two fastq
FileInputStream inputStream1 = null;
FileInputStream inputStream2 = null;
Scanner sc1 = null;
Scanner sc2 = null;
String tempstr11;
String tempstr12;
String tempstr13;
String tempstr14;
String tempstr21;
String tempstr22;
String tempstr23;
String tempstr24;
FastQ fq;
int PairCount = 0;
int totalread = 0;
String outfile1 = file1 + "_R1.fq";
String outfile2 = file2 + "_R2.fq";
try {
inputStream1 = new FileInputStream(file1);
inputStream2 = new FileInputStream(file2);
if (file1.endsWith(".gz") || file1.endsWith(".gzip")) {
GZIPInputStream gzinputstream1 = new GZIPInputStream(inputStream1);
GZIPInputStream gzinputstream2 = new GZIPInputStream(inputStream2);
sc1 = new Scanner(gzinputstream1, "UTF-8");
sc2 = new Scanner(gzinputstream2, "UTF-8");
} else {
sc1 = new Scanner(inputStream1, "UTF-8");
sc2 = new Scanner(inputStream2, "UTF-8");
}
FileWriter fw1 = new FileWriter(outfile1);
FileWriter fw2 = new FileWriter(outfile2);
while (sc1.hasNextLine() && sc2.hasNextLine()) {
tempstr11 = sc1.nextLine();
tempstr12 = sc1.nextLine();
tempstr13 = sc1.nextLine();
tempstr14 = sc1.nextLine();
tempstr21 = sc2.nextLine();
tempstr22 = sc2.nextLine();
tempstr23 = sc2.nextLine();
tempstr24 = sc2.nextLine();
if(tempstr12.length()<=lengthThreshold) continue;
if(tempstr22.length()<=lengthThreshold) continue;
if (tempstr11.split("\\s+")[0].equals(tempstr21.split("\\s+")[0])) {
PairCount++;
FastQ fq1 = new FastQ(tempstr11, tempstr12, tempstr13, tempstr14);
FastQ fq2 = new FastQ(tempstr21, tempstr22, tempstr23, tempstr24);
fw1.append(fq1.toString() + "\n");
fw2.append(fq2.toString() + "\n");
}
// System.out.println(line);
fw2.flush();
fw1.flush();
}
System.out.println("total reads = " + totalread);
fw1.close();
fw2.close();
// note that Scanner suppresses exceptions
if (sc1.ioException() != null) {
throw sc1.ioException();
}
if (sc2.ioException() != null) {
throw sc2.ioException();
}
} finally {
if (inputStream1 != null) {
inputStream1.close();
}
if (sc1 != null) {
sc1.close();
}
if (inputStream2 != null) {
inputStream2.close();
}
if (sc2 != null) {
sc2.close();
}
}
System.out.println("Paired reads:\t" + PairCount);
}
public void trim(FastQ fq, int left, int right) {
if (left + right >= fq.getSeqlength()) {
System.out.println("Trim length exced reads length:" + fq.getDescription() + " please check your parameters");
} else {
int templength = fq.getSeqlength();//to avoid trimed lengh was get again
fq.setSequence(fq.getSequence().substring(left, templength - right));
fq.setQuality(fq.getQuality().substring(left, templength - right));
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.handler.component;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.IndexReaderContext;
import org.apache.lucene.index.ReaderUtil;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.FieldComparator;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.grouping.GroupDocs;
import org.apache.lucene.search.grouping.SearchGroup;
import org.apache.lucene.search.grouping.TopGroups;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.InPlaceMergeSorter;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.params.*;
import org.apache.solr.common.params.CursorMarkParams;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.common.util.StrUtils;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.ResultContext;
import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.schema.FieldType;
import org.apache.solr.schema.IndexSchema;
import org.apache.solr.schema.SchemaField;
import org.apache.solr.search.CursorMark;
import org.apache.solr.search.DocIterator;
import org.apache.solr.search.DocList;
import org.apache.solr.search.DocListAndSet;
import org.apache.solr.search.DocSlice;
import org.apache.solr.search.Grouping;
import org.apache.solr.search.QParser;
import org.apache.solr.search.QParserPlugin;
import org.apache.solr.search.QueryParsing;
import org.apache.solr.search.ReturnFields;
import org.apache.solr.search.SolrIndexSearcher;
import org.apache.solr.search.SolrReturnFields;
import org.apache.solr.search.SortSpec;
import org.apache.solr.search.SyntaxError;
import org.apache.solr.search.grouping.CommandHandler;
import org.apache.solr.search.grouping.GroupingSpecification;
import org.apache.solr.search.grouping.distributed.ShardRequestFactory;
import org.apache.solr.search.grouping.distributed.ShardResponseProcessor;
import org.apache.solr.search.grouping.distributed.command.QueryCommand;
import org.apache.solr.search.grouping.distributed.command.SearchGroupsFieldCommand;
import org.apache.solr.search.grouping.distributed.command.TopGroupsFieldCommand;
import org.apache.solr.search.grouping.distributed.requestfactory.SearchGroupsRequestFactory;
import org.apache.solr.search.grouping.distributed.requestfactory.StoredFieldsShardRequestFactory;
import org.apache.solr.search.grouping.distributed.requestfactory.TopGroupsShardRequestFactory;
import org.apache.solr.search.grouping.distributed.responseprocessor.SearchGroupShardResponseProcessor;
import org.apache.solr.search.grouping.distributed.responseprocessor.StoredFieldsShardResponseProcessor;
import org.apache.solr.search.grouping.distributed.responseprocessor.TopGroupsShardResponseProcessor;
import org.apache.solr.search.grouping.distributed.shardresultserializer.SearchGroupsResultTransformer;
import org.apache.solr.search.grouping.distributed.shardresultserializer.TopGroupsResultTransformer;
import org.apache.solr.search.grouping.endresulttransformer.EndResultTransformer;
import org.apache.solr.search.grouping.endresulttransformer.GroupedEndResultTransformer;
import org.apache.solr.search.grouping.endresulttransformer.MainEndResultTransformer;
import org.apache.solr.search.grouping.endresulttransformer.SimpleEndResultTransformer;
import org.apache.solr.util.SolrPluginUtils;
import org.apache.commons.lang.StringUtils;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
/**
* TODO!
*
*
* @since solr 1.3
*/
public class QueryComponent extends SearchComponent
{
public static final String COMPONENT_NAME = "query";
@Override
public void prepare(ResponseBuilder rb) throws IOException
{
SolrQueryRequest req = rb.req;
SolrParams params = req.getParams();
if (!params.getBool(COMPONENT_NAME, true)) {
return;
}
SolrQueryResponse rsp = rb.rsp;
// Set field flags
ReturnFields returnFields = new SolrReturnFields( req );
rsp.setReturnFields( returnFields );
int flags = 0;
if (returnFields.wantsScore()) {
flags |= SolrIndexSearcher.GET_SCORES;
}
rb.setFieldFlags( flags );
String defType = params.get(QueryParsing.DEFTYPE, QParserPlugin.DEFAULT_QTYPE);
// get it from the response builder to give a different component a chance
// to set it.
String queryString = rb.getQueryString();
if (queryString == null) {
// this is the normal way it's set.
queryString = params.get( CommonParams.Q );
rb.setQueryString(queryString);
}
try {
QParser parser = QParser.getParser(rb.getQueryString(), defType, req);
Query q = parser.getQuery();
if (q == null) {
// normalize a null query to a query that matches nothing
q = new BooleanQuery();
}
rb.setQuery( q );
rb.setSortSpec( parser.getSort(true) );
rb.setQparser(parser);
final String cursorStr = rb.req.getParams().get(CursorMarkParams.CURSOR_MARK_PARAM);
if (null != cursorStr) {
final CursorMark cursorMark = new CursorMark(rb.req.getSchema(),
rb.getSortSpec());
cursorMark.parseSerializedTotem(cursorStr);
rb.setCursorMark(cursorMark);
}
String[] fqs = req.getParams().getParams(CommonParams.FQ);
if (fqs!=null && fqs.length!=0) {
List<Query> filters = rb.getFilters();
// if filters already exists, make a copy instead of modifying the original
filters = filters == null ? new ArrayList<Query>(fqs.length) : new ArrayList<Query>(filters);
for (String fq : fqs) {
if (fq != null && fq.trim().length()!=0) {
QParser fqp = QParser.getParser(fq, null, req);
filters.add(fqp.getQuery());
}
}
// only set the filters if they are not empty otherwise
// fq=&someotherParam= will trigger all docs filter for every request
// if filter cache is disabled
if (!filters.isEmpty()) {
rb.setFilters( filters );
}
}
} catch (SyntaxError e) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
}
if (params.getBool(GroupParams.GROUP, false)) {
prepareGrouping(rb);
}
}
private void prepareGrouping(ResponseBuilder rb) throws IOException {
SolrQueryRequest req = rb.req;
SolrParams params = req.getParams();
if (null != rb.getCursorMark()) {
// It's hard to imagine, conceptually, what it would mean to combine
// grouping with a cursor - so for now we just don't allow the combination at all
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Can not use Grouping with " +
CursorMarkParams.CURSOR_MARK_PARAM);
}
SolrIndexSearcher.QueryCommand cmd = rb.getQueryCommand();
SolrIndexSearcher searcher = rb.req.getSearcher();
GroupingSpecification groupingSpec = new GroupingSpecification();
rb.setGroupingSpec(groupingSpec);
//TODO: move weighting of sort
Sort groupSort = searcher.weightSort(cmd.getSort());
if (groupSort == null) {
groupSort = Sort.RELEVANCE;
}
// groupSort defaults to sort
String groupSortStr = params.get(GroupParams.GROUP_SORT);
//TODO: move weighting of sort
Sort sortWithinGroup = groupSortStr == null ? groupSort : searcher.weightSort(QueryParsing.parseSortSpec(groupSortStr, req).getSort());
if (sortWithinGroup == null) {
sortWithinGroup = Sort.RELEVANCE;
}
groupingSpec.setSortWithinGroup(sortWithinGroup);
groupingSpec.setGroupSort(groupSort);
String formatStr = params.get(GroupParams.GROUP_FORMAT, Grouping.Format.grouped.name());
Grouping.Format responseFormat;
try {
responseFormat = Grouping.Format.valueOf(formatStr);
} catch (IllegalArgumentException e) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, String.format(Locale.ROOT, "Illegal %s parameter", GroupParams.GROUP_FORMAT));
}
groupingSpec.setResponseFormat(responseFormat);
groupingSpec.setFields(params.getParams(GroupParams.GROUP_FIELD));
groupingSpec.setQueries(params.getParams(GroupParams.GROUP_QUERY));
groupingSpec.setFunctions(params.getParams(GroupParams.GROUP_FUNC));
groupingSpec.setGroupOffset(params.getInt(GroupParams.GROUP_OFFSET, 0));
groupingSpec.setGroupLimit(params.getInt(GroupParams.GROUP_LIMIT, 1));
groupingSpec.setOffset(rb.getSortSpec().getOffset());
groupingSpec.setLimit(rb.getSortSpec().getCount());
groupingSpec.setIncludeGroupCount(params.getBool(GroupParams.GROUP_TOTAL_COUNT, false));
groupingSpec.setMain(params.getBool(GroupParams.GROUP_MAIN, false));
groupingSpec.setNeedScore((cmd.getFlags() & SolrIndexSearcher.GET_SCORES) != 0);
groupingSpec.setTruncateGroups(params.getBool(GroupParams.GROUP_TRUNCATE, false));
}
/**
* Actually run the query
*/
@Override
public void process(ResponseBuilder rb) throws IOException
{
SolrQueryRequest req = rb.req;
SolrQueryResponse rsp = rb.rsp;
SolrParams params = req.getParams();
if (!params.getBool(COMPONENT_NAME, true)) {
return;
}
SolrIndexSearcher searcher = req.getSearcher();
if (rb.getQueryCommand().getOffset() < 0) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "'start' parameter cannot be negative");
}
// -1 as flag if not set.
long timeAllowed = (long)params.getInt( CommonParams.TIME_ALLOWED, -1 );
if (null != rb.getCursorMark() && 0 < timeAllowed) {
// fundementally incompatible
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Can not search using both " +
CursorMarkParams.CURSOR_MARK_PARAM + " and " + CommonParams.TIME_ALLOWED);
}
// Optional: This could also be implemented by the top-level searcher sending
// a filter that lists the ids... that would be transparent to
// the request handler, but would be more expensive (and would preserve score
// too if desired).
String ids = params.get(ShardParams.IDS);
if (ids != null) {
SchemaField idField = searcher.getSchema().getUniqueKeyField();
List<String> idArr = StrUtils.splitSmart(ids, ",", true);
int[] luceneIds = new int[idArr.size()];
int docs = 0;
for (int i=0; i<idArr.size(); i++) {
int id = req.getSearcher().getFirstMatch(
new Term(idField.getName(), idField.getType().toInternal(idArr.get(i))));
if (id >= 0)
luceneIds[docs++] = id;
}
DocListAndSet res = new DocListAndSet();
res.docList = new DocSlice(0, docs, luceneIds, null, docs, 0);
if (rb.isNeedDocSet()) {
// TODO: create a cache for this!
List<Query> queries = new ArrayList<Query>();
queries.add(rb.getQuery());
List<Query> filters = rb.getFilters();
if (filters != null) queries.addAll(filters);
res.docSet = searcher.getDocSet(queries);
}
rb.setResults(res);
ResultContext ctx = new ResultContext();
ctx.docs = rb.getResults().docList;
ctx.query = null; // anything?
rsp.add("response", ctx);
return;
}
SolrIndexSearcher.QueryCommand cmd = rb.getQueryCommand();
cmd.setTimeAllowed(timeAllowed);
SolrIndexSearcher.QueryResult result = new SolrIndexSearcher.QueryResult();
//
// grouping / field collapsing
//
GroupingSpecification groupingSpec = rb.getGroupingSpec();
if (groupingSpec != null) {
try {
boolean needScores = (cmd.getFlags() & SolrIndexSearcher.GET_SCORES) != 0;
if (params.getBool(GroupParams.GROUP_DISTRIBUTED_FIRST, false)) {
CommandHandler.Builder topsGroupsActionBuilder = new CommandHandler.Builder()
.setQueryCommand(cmd)
.setNeedDocSet(false) // Order matters here
.setIncludeHitCount(true)
.setSearcher(searcher);
for (String field : groupingSpec.getFields()) {
topsGroupsActionBuilder.addCommandField(new SearchGroupsFieldCommand.Builder()
.setField(searcher.getSchema().getField(field))
.setGroupSort(groupingSpec.getGroupSort())
.setTopNGroups(cmd.getOffset() + cmd.getLen())
.setIncludeGroupCount(groupingSpec.isIncludeGroupCount())
.build()
);
}
CommandHandler commandHandler = topsGroupsActionBuilder.build();
commandHandler.execute();
SearchGroupsResultTransformer serializer = new SearchGroupsResultTransformer(searcher);
rsp.add("firstPhase", commandHandler.processResult(result, serializer));
rsp.add("totalHitCount", commandHandler.getTotalHitCount());
rb.setResult(result);
return;
} else if (params.getBool(GroupParams.GROUP_DISTRIBUTED_SECOND, false)) {
CommandHandler.Builder secondPhaseBuilder = new CommandHandler.Builder()
.setQueryCommand(cmd)
.setTruncateGroups(groupingSpec.isTruncateGroups() && groupingSpec.getFields().length > 0)
.setSearcher(searcher);
for (String field : groupingSpec.getFields()) {
String[] topGroupsParam = params.getParams(GroupParams.GROUP_DISTRIBUTED_TOPGROUPS_PREFIX + field);
if (topGroupsParam == null) {
topGroupsParam = new String[0];
}
List<SearchGroup<BytesRef>> topGroups = new ArrayList<SearchGroup<BytesRef>>(topGroupsParam.length);
for (String topGroup : topGroupsParam) {
SearchGroup<BytesRef> searchGroup = new SearchGroup<BytesRef>();
if (!topGroup.equals(TopGroupsShardRequestFactory.GROUP_NULL_VALUE)) {
searchGroup.groupValue = new BytesRef(searcher.getSchema().getField(field).getType().readableToIndexed(topGroup));
}
topGroups.add(searchGroup);
}
secondPhaseBuilder.addCommandField(
new TopGroupsFieldCommand.Builder()
.setField(searcher.getSchema().getField(field))
.setGroupSort(groupingSpec.getGroupSort())
.setSortWithinGroup(groupingSpec.getSortWithinGroup())
.setFirstPhaseGroups(topGroups)
.setMaxDocPerGroup(groupingSpec.getGroupOffset() + groupingSpec.getGroupLimit())
.setNeedScores(needScores)
.setNeedMaxScore(needScores)
.build()
);
}
for (String query : groupingSpec.getQueries()) {
secondPhaseBuilder.addCommandField(new QueryCommand.Builder()
.setDocsToCollect(groupingSpec.getOffset() + groupingSpec.getLimit())
.setSort(groupingSpec.getGroupSort())
.setQuery(query, rb.req)
.setDocSet(searcher)
.build()
);
}
CommandHandler commandHandler = secondPhaseBuilder.build();
commandHandler.execute();
TopGroupsResultTransformer serializer = new TopGroupsResultTransformer(rb);
rsp.add("secondPhase", commandHandler.processResult(result, serializer));
rb.setResult(result);
return;
}
int maxDocsPercentageToCache = params.getInt(GroupParams.GROUP_CACHE_PERCENTAGE, 0);
boolean cacheSecondPassSearch = maxDocsPercentageToCache >= 1 && maxDocsPercentageToCache <= 100;
Grouping.TotalCount defaultTotalCount = groupingSpec.isIncludeGroupCount() ?
Grouping.TotalCount.grouped : Grouping.TotalCount.ungrouped;
int limitDefault = cmd.getLen(); // this is normally from "rows"
Grouping grouping =
new Grouping(searcher, result, cmd, cacheSecondPassSearch, maxDocsPercentageToCache, groupingSpec.isMain());
grouping.setSort(groupingSpec.getGroupSort())
.setGroupSort(groupingSpec.getSortWithinGroup())
.setDefaultFormat(groupingSpec.getResponseFormat())
.setLimitDefault(limitDefault)
.setDefaultTotalCount(defaultTotalCount)
.setDocsPerGroupDefault(groupingSpec.getGroupLimit())
.setGroupOffsetDefault(groupingSpec.getGroupOffset())
.setGetGroupedDocSet(groupingSpec.isTruncateGroups());
if (groupingSpec.getFields() != null) {
for (String field : groupingSpec.getFields()) {
grouping.addFieldCommand(field, rb.req);
}
}
if (groupingSpec.getFunctions() != null) {
for (String groupByStr : groupingSpec.getFunctions()) {
grouping.addFunctionCommand(groupByStr, rb.req);
}
}
if (groupingSpec.getQueries() != null) {
for (String groupByStr : groupingSpec.getQueries()) {
grouping.addQueryCommand(groupByStr, rb.req);
}
}
if (rb.doHighlights || rb.isDebug() || params.getBool(MoreLikeThisParams.MLT, false)) {
// we need a single list of the returned docs
cmd.setFlags(SolrIndexSearcher.GET_DOCLIST);
}
grouping.execute();
if (grouping.isSignalCacheWarning()) {
rsp.add(
"cacheWarning",
String.format(Locale.ROOT, "Cache limit of %d percent relative to maxdoc has exceeded. Please increase cache size or disable caching.", maxDocsPercentageToCache)
);
}
rb.setResult(result);
if (grouping.mainResult != null) {
ResultContext ctx = new ResultContext();
ctx.docs = grouping.mainResult;
ctx.query = null; // TODO? add the query?
rsp.add("response", ctx);
rsp.getToLog().add("hits", grouping.mainResult.matches());
} else if (!grouping.getCommands().isEmpty()) { // Can never be empty since grouping.execute() checks for this.
rsp.add("grouped", result.groupedResults);
rsp.getToLog().add("hits", grouping.getCommands().get(0).getMatches());
}
return;
} catch (SyntaxError e) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
}
}
// normal search result
searcher.search(result,cmd);
rb.setResult( result );
ResultContext ctx = new ResultContext();
ctx.docs = rb.getResults().docList;
ctx.query = rb.getQuery();
rsp.add("response", ctx);
rsp.getToLog().add("hits", rb.getResults().docList.matches());
if ( ! rb.req.getParams().getBool(ShardParams.IS_SHARD,false) ) {
if (null != rb.getNextCursorMark()) {
rb.rsp.add(CursorMarkParams.CURSOR_MARK_NEXT,
rb.getNextCursorMark().getSerializedTotem());
}
}
doFieldSortValues(rb, searcher);
doPrefetch(rb);
}
protected void doFieldSortValues(ResponseBuilder rb, SolrIndexSearcher searcher) throws IOException
{
SolrQueryRequest req = rb.req;
SolrQueryResponse rsp = rb.rsp;
// The query cache doesn't currently store sort field values, and SolrIndexSearcher doesn't
// currently have an option to return sort field values. Because of this, we
// take the documents given and re-derive the sort values.
//
// TODO: See SOLR-5595
boolean fsv = req.getParams().getBool(ResponseBuilder.FIELD_SORT_VALUES,false);
if(fsv){
NamedList<Object[]> sortVals = new NamedList<Object[]>(); // order is important for the sort fields
IndexReaderContext topReaderContext = searcher.getTopReaderContext();
List<AtomicReaderContext> leaves = topReaderContext.leaves();
AtomicReaderContext currentLeaf = null;
if (leaves.size()==1) {
// if there is a single segment, use that subReader and avoid looking up each time
currentLeaf = leaves.get(0);
leaves=null;
}
DocList docList = rb.getResults().docList;
// sort ids from lowest to highest so we can access them in order
int nDocs = docList.size();
final long[] sortedIds = new long[nDocs];
final float[] scores = new float[nDocs]; // doc scores, parallel to sortedIds
DocList docs = rb.getResults().docList;
DocIterator it = docs.iterator();
for (int i=0; i<nDocs; i++) {
sortedIds[i] = (((long)it.nextDoc()) << 32) | i;
scores[i] = docs.hasScores() ? it.score() : Float.NaN;
}
// sort ids and scores together
new InPlaceMergeSorter() {
@Override
protected void swap(int i, int j) {
long tmpId = sortedIds[i];
float tmpScore = scores[i];
sortedIds[i] = sortedIds[j];
scores[i] = scores[j];
sortedIds[j] = tmpId;
scores[j] = tmpScore;
}
@Override
protected int compare(int i, int j) {
// In Java 6 there is no Long#compare(long,long):
final long v1 = sortedIds[i];
final long v2 = sortedIds[j];
if (v1 > v2) {
return 1;
} else if (v1 < v2) {
return -1;
} else {
return 0;
}
}
}.sort(0, sortedIds.length);
SortSpec sortSpec = rb.getSortSpec();
Sort sort = searcher.weightSort(sortSpec.getSort());
SortField[] sortFields = sort==null ? new SortField[]{SortField.FIELD_SCORE} : sort.getSort();
List<SchemaField> schemaFields = sortSpec.getSchemaFields();
for (int fld = 0; fld < schemaFields.size(); fld++) {
SchemaField schemaField = schemaFields.get(fld);
FieldType ft = null == schemaField? null : schemaField.getType();
SortField sortField = sortFields[fld];
SortField.Type type = sortField.getType();
// :TODO: would be simpler to always serialize every position of SortField[]
if (type==SortField.Type.SCORE || type==SortField.Type.DOC) continue;
FieldComparator comparator = null;
Object[] vals = new Object[nDocs];
int lastIdx = -1;
int idx = 0;
for (int i = 0; i < sortedIds.length; ++i) {
long idAndPos = sortedIds[i];
float score = scores[i];
int doc = (int)(idAndPos >>> 32);
int position = (int)idAndPos;
if (leaves != null) {
idx = ReaderUtil.subIndex(doc, leaves);
currentLeaf = leaves.get(idx);
if (idx != lastIdx) {
// we switched segments. invalidate comparator.
comparator = null;
}
}
if (comparator == null) {
comparator = sortField.getComparator(1,0);
comparator = comparator.setNextReader(currentLeaf);
}
doc -= currentLeaf.docBase; // adjust for what segment this is in
comparator.setScorer(new FakeScorer(doc, score));
comparator.copy(0, doc);
Object val = comparator.value(0);
if (null != ft) val = ft.marshalSortValue(val);
vals[position] = val;
}
sortVals.add(sortField.getField(), vals);
}
rsp.add("sort_values", sortVals);
}
}
protected void doPrefetch(ResponseBuilder rb) throws IOException
{
SolrQueryRequest req = rb.req;
SolrQueryResponse rsp = rb.rsp;
//pre-fetch returned documents
if (!req.getParams().getBool(ShardParams.IS_SHARD,false) && rb.getResults().docList != null && rb.getResults().docList.size()<=50) {
SolrPluginUtils.optimizePreFetchDocs(rb, rb.getResults().docList, rb.getQuery(), req, rsp);
}
}
@Override
public int distributedProcess(ResponseBuilder rb) throws IOException {
if (rb.grouping()) {
return groupedDistributedProcess(rb);
} else {
return regularDistributedProcess(rb);
}
}
private int groupedDistributedProcess(ResponseBuilder rb) {
int nextStage = ResponseBuilder.STAGE_DONE;
ShardRequestFactory shardRequestFactory = null;
if (rb.stage < ResponseBuilder.STAGE_PARSE_QUERY) {
nextStage = ResponseBuilder.STAGE_PARSE_QUERY;
} else if (rb.stage == ResponseBuilder.STAGE_PARSE_QUERY) {
createDistributedIdf(rb);
nextStage = ResponseBuilder.STAGE_TOP_GROUPS;
} else if (rb.stage < ResponseBuilder.STAGE_TOP_GROUPS) {
nextStage = ResponseBuilder.STAGE_TOP_GROUPS;
} else if (rb.stage == ResponseBuilder.STAGE_TOP_GROUPS) {
shardRequestFactory = new SearchGroupsRequestFactory();
nextStage = ResponseBuilder.STAGE_EXECUTE_QUERY;
} else if (rb.stage < ResponseBuilder.STAGE_EXECUTE_QUERY) {
nextStage = ResponseBuilder.STAGE_EXECUTE_QUERY;
} else if (rb.stage == ResponseBuilder.STAGE_EXECUTE_QUERY) {
shardRequestFactory = new TopGroupsShardRequestFactory();
nextStage = ResponseBuilder.STAGE_GET_FIELDS;
} else if (rb.stage < ResponseBuilder.STAGE_GET_FIELDS) {
nextStage = ResponseBuilder.STAGE_GET_FIELDS;
} else if (rb.stage == ResponseBuilder.STAGE_GET_FIELDS) {
shardRequestFactory = new StoredFieldsShardRequestFactory();
nextStage = ResponseBuilder.STAGE_DONE;
}
if (shardRequestFactory != null) {
for (ShardRequest shardRequest : shardRequestFactory.constructRequest(rb)) {
rb.addRequest(this, shardRequest);
}
}
return nextStage;
}
private int regularDistributedProcess(ResponseBuilder rb) {
if (rb.stage < ResponseBuilder.STAGE_PARSE_QUERY)
return ResponseBuilder.STAGE_PARSE_QUERY;
if (rb.stage == ResponseBuilder.STAGE_PARSE_QUERY) {
createDistributedIdf(rb);
return ResponseBuilder.STAGE_EXECUTE_QUERY;
}
if (rb.stage < ResponseBuilder.STAGE_EXECUTE_QUERY) return ResponseBuilder.STAGE_EXECUTE_QUERY;
if (rb.stage == ResponseBuilder.STAGE_EXECUTE_QUERY) {
createMainQuery(rb);
return ResponseBuilder.STAGE_GET_FIELDS;
}
if (rb.stage < ResponseBuilder.STAGE_GET_FIELDS) return ResponseBuilder.STAGE_GET_FIELDS;
if (rb.stage == ResponseBuilder.STAGE_GET_FIELDS) {
createRetrieveDocs(rb);
return ResponseBuilder.STAGE_DONE;
}
return ResponseBuilder.STAGE_DONE;
}
@Override
public void handleResponses(ResponseBuilder rb, ShardRequest sreq) {
if (rb.grouping()) {
handleGroupedResponses(rb, sreq);
} else {
handleRegularResponses(rb, sreq);
}
}
private void handleGroupedResponses(ResponseBuilder rb, ShardRequest sreq) {
ShardResponseProcessor responseProcessor = null;
if ((sreq.purpose & ShardRequest.PURPOSE_GET_TOP_GROUPS) != 0) {
responseProcessor = new SearchGroupShardResponseProcessor();
} else if ((sreq.purpose & ShardRequest.PURPOSE_GET_TOP_IDS) != 0) {
responseProcessor = new TopGroupsShardResponseProcessor();
} else if ((sreq.purpose & ShardRequest.PURPOSE_GET_FIELDS) != 0) {
responseProcessor = new StoredFieldsShardResponseProcessor();
}
if (responseProcessor != null) {
responseProcessor.process(rb, sreq);
}
}
private void handleRegularResponses(ResponseBuilder rb, ShardRequest sreq) {
if ((sreq.purpose & ShardRequest.PURPOSE_GET_TOP_IDS) != 0) {
mergeIds(rb, sreq);
}
if ((sreq.purpose & ShardRequest.PURPOSE_GET_FIELDS) != 0) {
returnFields(rb, sreq);
}
}
@Override
public void finishStage(ResponseBuilder rb) {
if (rb.stage != ResponseBuilder.STAGE_GET_FIELDS) {
return;
}
if (rb.grouping()) {
groupedFinishStage(rb);
} else {
regularFinishStage(rb);
}
}
private static final EndResultTransformer MAIN_END_RESULT_TRANSFORMER = new MainEndResultTransformer();
private static final EndResultTransformer SIMPLE_END_RESULT_TRANSFORMER = new SimpleEndResultTransformer();
@SuppressWarnings("unchecked")
private void groupedFinishStage(final ResponseBuilder rb) {
// To have same response as non-distributed request.
GroupingSpecification groupSpec = rb.getGroupingSpec();
if (rb.mergedTopGroups.isEmpty()) {
for (String field : groupSpec.getFields()) {
rb.mergedTopGroups.put(field, new TopGroups(null, null, 0, 0, new GroupDocs[]{}, Float.NaN));
}
rb.resultIds = new HashMap<Object, ShardDoc>();
}
EndResultTransformer.SolrDocumentSource solrDocumentSource = new EndResultTransformer.SolrDocumentSource() {
@Override
public SolrDocument retrieve(ScoreDoc doc) {
ShardDoc solrDoc = (ShardDoc) doc;
return rb.retrievedDocuments.get(solrDoc.id);
}
};
EndResultTransformer endResultTransformer;
if (groupSpec.isMain()) {
endResultTransformer = MAIN_END_RESULT_TRANSFORMER;
} else if (Grouping.Format.grouped == groupSpec.getResponseFormat()) {
endResultTransformer = new GroupedEndResultTransformer(rb.req.getSearcher());
} else if (Grouping.Format.simple == groupSpec.getResponseFormat() && !groupSpec.isMain()) {
endResultTransformer = SIMPLE_END_RESULT_TRANSFORMER;
} else {
return;
}
Map<String, Object> combinedMap = new LinkedHashMap<String, Object>();
combinedMap.putAll(rb.mergedTopGroups);
combinedMap.putAll(rb.mergedQueryCommandResults);
endResultTransformer.transform(combinedMap, rb, solrDocumentSource);
}
private void regularFinishStage(ResponseBuilder rb) {
// We may not have been able to retrieve all the docs due to an
// index change. Remove any null documents.
for (Iterator<SolrDocument> iter = rb._responseDocs.iterator(); iter.hasNext();) {
if (iter.next() == null) {
iter.remove();
rb._responseDocs.setNumFound(rb._responseDocs.getNumFound()-1);
}
}
rb.rsp.add("response", rb._responseDocs);
if (null != rb.getNextCursorMark()) {
rb.rsp.add(CursorMarkParams.CURSOR_MARK_NEXT,
rb.getNextCursorMark().getSerializedTotem());
}
}
private void createDistributedIdf(ResponseBuilder rb) {
// TODO
}
private void createMainQuery(ResponseBuilder rb) {
ShardRequest sreq = new ShardRequest();
sreq.purpose = ShardRequest.PURPOSE_GET_TOP_IDS;
sreq.params = new ModifiableSolrParams(rb.req.getParams());
// TODO: base on current params or original params?
// don't pass through any shards param
sreq.params.remove(ShardParams.SHARDS);
// set the start (offset) to 0 for each shard request so we can properly merge
// results from the start.
if(rb.shards_start > -1) {
// if the client set shards.start set this explicitly
sreq.params.set(CommonParams.START,rb.shards_start);
} else {
sreq.params.set(CommonParams.START, "0");
}
// TODO: should we even use the SortSpec? That's obtained from the QParser, and
// perhaps we shouldn't attempt to parse the query at this level?
// Alternate Idea: instead of specifying all these things at the upper level,
// we could just specify that this is a shard request.
if(rb.shards_rows > -1) {
// if the client set shards.rows set this explicity
sreq.params.set(CommonParams.ROWS,rb.shards_rows);
} else {
sreq.params.set(CommonParams.ROWS, rb.getSortSpec().getOffset() + rb.getSortSpec().getCount());
}
// in this first phase, request only the unique key field
// and any fields needed for merging.
sreq.params.set(ResponseBuilder.FIELD_SORT_VALUES,"true");
if ( (rb.getFieldFlags() & SolrIndexSearcher.GET_SCORES)!=0 || rb.getSortSpec().includesScore()) {
sreq.params.set(CommonParams.FL, rb.req.getSchema().getUniqueKeyField().getName() + ",score");
} else {
sreq.params.set(CommonParams.FL, rb.req.getSchema().getUniqueKeyField().getName());
}
rb.addRequest(this, sreq);
}
private void mergeIds(ResponseBuilder rb, ShardRequest sreq) {
SortSpec ss = rb.getSortSpec();
Sort sort = ss.getSort();
SortField[] sortFields = null;
if(sort != null) sortFields = sort.getSort();
else {
sortFields = new SortField[]{SortField.FIELD_SCORE};
}
IndexSchema schema = rb.req.getSchema();
SchemaField uniqueKeyField = schema.getUniqueKeyField();
// id to shard mapping, to eliminate any accidental dups
HashMap<Object,String> uniqueDoc = new HashMap<Object,String>();
// Merge the docs via a priority queue so we don't have to sort *all* of the
// documents... we only need to order the top (rows+start)
ShardFieldSortedHitQueue queue;
queue = new ShardFieldSortedHitQueue(sortFields, ss.getOffset() + ss.getCount(), rb.req.getSearcher());
NamedList<Object> shardInfo = null;
if(rb.req.getParams().getBool(ShardParams.SHARDS_INFO, false)) {
shardInfo = new SimpleOrderedMap<Object>();
rb.rsp.getValues().add(ShardParams.SHARDS_INFO,shardInfo);
}
long numFound = 0;
Float maxScore=null;
boolean partialResults = false;
for (ShardResponse srsp : sreq.responses) {
SolrDocumentList docs = null;
if(shardInfo!=null) {
SimpleOrderedMap<Object> nl = new SimpleOrderedMap<Object>();
if (srsp.getException() != null) {
Throwable t = srsp.getException();
if(t instanceof SolrServerException) {
t = ((SolrServerException)t).getCause();
}
nl.add("error", t.toString() );
StringWriter trace = new StringWriter();
t.printStackTrace(new PrintWriter(trace));
nl.add("trace", trace.toString() );
if (srsp.getShardAddress() != null) {
nl.add("shardAddress", srsp.getShardAddress());
}
}
else {
docs = (SolrDocumentList)srsp.getSolrResponse().getResponse().get("response");
nl.add("numFound", docs.getNumFound());
nl.add("maxScore", docs.getMaxScore());
nl.add("shardAddress", srsp.getShardAddress());
}
if(srsp.getSolrResponse()!=null) {
nl.add("time", srsp.getSolrResponse().getElapsedTime());
}
shardInfo.add(srsp.getShard(), nl);
}
// now that we've added the shard info, let's only proceed if we have no error.
if (srsp.getException() != null) {
partialResults = true;
continue;
}
if (docs == null) { // could have been initialized in the shards info block above
docs = (SolrDocumentList)srsp.getSolrResponse().getResponse().get("response");
}
NamedList<?> responseHeader = (NamedList<?>)srsp.getSolrResponse().getResponse().get("responseHeader");
if (responseHeader != null && Boolean.TRUE.equals(responseHeader.get("partialResults"))) {
partialResults = true;
}
// calculate global maxScore and numDocsFound
if (docs.getMaxScore() != null) {
maxScore = maxScore==null ? docs.getMaxScore() : Math.max(maxScore, docs.getMaxScore());
}
numFound += docs.getNumFound();
NamedList sortFieldValues = (NamedList)(srsp.getSolrResponse().getResponse().get("sort_values"));
NamedList unmarshalledSortFieldValues = unmarshalSortValues(ss, sortFieldValues, schema);
// go through every doc in this response, construct a ShardDoc, and
// put it in the priority queue so it can be ordered.
for (int i=0; i<docs.size(); i++) {
SolrDocument doc = docs.get(i);
Object id = doc.getFieldValue(uniqueKeyField.getName());
String prevShard = uniqueDoc.put(id, srsp.getShard());
if (prevShard != null) {
// duplicate detected
numFound--;
// For now, just always use the first encountered since we can't currently
// remove the previous one added to the priority queue. If we switched
// to the Java5 PriorityQueue, this would be easier.
continue;
// make which duplicate is used deterministic based on shard
// if (prevShard.compareTo(srsp.shard) >= 0) {
// TODO: remove previous from priority queue
// continue;
// }
}
ShardDoc shardDoc = new ShardDoc();
shardDoc.id = id;
shardDoc.shard = srsp.getShard();
shardDoc.orderInShard = i;
Object scoreObj = doc.getFieldValue("score");
if (scoreObj != null) {
if (scoreObj instanceof String) {
shardDoc.score = Float.parseFloat((String)scoreObj);
} else {
shardDoc.score = (Float)scoreObj;
}
}
shardDoc.sortFieldValues = unmarshalledSortFieldValues;
queue.insertWithOverflow(shardDoc);
} // end for-each-doc-in-response
} // end for-each-response
// The queue now has 0 -> queuesize docs, where queuesize <= start + rows
// So we want to pop the last documents off the queue to get
// the docs offset -> queuesize
int resultSize = queue.size() - ss.getOffset();
resultSize = Math.max(0, resultSize); // there may not be any docs in range
Map<Object,ShardDoc> resultIds = new HashMap<Object,ShardDoc>();
for (int i=resultSize-1; i>=0; i--) {
ShardDoc shardDoc = queue.pop();
shardDoc.positionInResponse = i;
// Need the toString() for correlation with other lists that must
// be strings (like keys in highlighting, explain, etc)
resultIds.put(shardDoc.id.toString(), shardDoc);
}
// Add hits for distributed requests
// https://issues.apache.org/jira/browse/SOLR-3518
rb.rsp.addToLog("hits", numFound);
SolrDocumentList responseDocs = new SolrDocumentList();
if (maxScore!=null) responseDocs.setMaxScore(maxScore);
responseDocs.setNumFound(numFound);
responseDocs.setStart(ss.getOffset());
// size appropriately
for (int i=0; i<resultSize; i++) responseDocs.add(null);
// save these results in a private area so we can access them
// again when retrieving stored fields.
// TODO: use ResponseBuilder (w/ comments) or the request context?
rb.resultIds = resultIds;
rb._responseDocs = responseDocs;
populateNextCursorMarkFromMergedShards(rb);
if (partialResults) {
rb.rsp.getResponseHeader().add( "partialResults", Boolean.TRUE );
}
}
/**
* Inspects the state of the {@link ResponseBuilder} and populates the next
* {@link ResponseBuilder#setNextCursorMark} as appropriate based on the merged
* sort values from individual shards
*
* @param rb A <code>ResponseBuilder</code> that already contains merged
* <code>ShardDocs</code> in <code>resultIds</code>, may or may not be
* part of a Cursor based request (method will NOOP if not needed)
*/
private void populateNextCursorMarkFromMergedShards(ResponseBuilder rb) {
final CursorMark lastCursorMark = rb.getCursorMark();
if (null == lastCursorMark) {
// Not a cursor based request
return; // NOOP
}
assert null != rb.resultIds : "resultIds was not set in ResponseBuilder";
Collection<ShardDoc> docsOnThisPage = rb.resultIds.values();
if (0 == docsOnThisPage.size()) {
// nothing more matching query, re-use existing totem so user can "resume"
// search later if it makes sense for this sort.
rb.setNextCursorMark(lastCursorMark);
return;
}
ShardDoc lastDoc = null;
// ShardDoc and rb.resultIds are weird structures to work with...
for (ShardDoc eachDoc : docsOnThisPage) {
if (null == lastDoc || lastDoc.positionInResponse < eachDoc.positionInResponse) {
lastDoc = eachDoc;
}
}
SortField[] sortFields = lastCursorMark.getSortSpec().getSort().getSort();
List<Object> nextCursorMarkValues = new ArrayList<Object>(sortFields.length);
for (SortField sf : sortFields) {
if (sf.getType().equals(SortField.Type.SCORE)) {
assert null != lastDoc.score : "lastDoc has null score";
nextCursorMarkValues.add(lastDoc.score);
} else {
assert null != sf.getField() : "SortField has null field";
List<Object> fieldVals = (List<Object>) lastDoc.sortFieldValues.get(sf.getField());
nextCursorMarkValues.add(fieldVals.get(lastDoc.orderInShard));
}
}
CursorMark nextCursorMark = lastCursorMark.createNext(nextCursorMarkValues);
assert null != nextCursorMark : "null nextCursorMark";
rb.setNextCursorMark(nextCursorMark);
}
private NamedList unmarshalSortValues(SortSpec sortSpec,
NamedList sortFieldValues,
IndexSchema schema) {
NamedList unmarshalledSortValsPerField = new NamedList();
if (0 == sortFieldValues.size()) return unmarshalledSortValsPerField;
List<SchemaField> schemaFields = sortSpec.getSchemaFields();
SortField[] sortFields = sortSpec.getSort().getSort();
int marshalledFieldNum = 0;
for (int sortFieldNum = 0; sortFieldNum < sortFields.length; sortFieldNum++) {
final SortField sortField = sortFields[sortFieldNum];
final SortField.Type type = sortField.getType();
// :TODO: would be simpler to always serialize every position of SortField[]
if (type==SortField.Type.SCORE || type==SortField.Type.DOC) continue;
final String sortFieldName = sortField.getField();
final String valueFieldName = sortFieldValues.getName(marshalledFieldNum);
assert sortFieldName.equals(valueFieldName)
: "sortFieldValues name key does not match expected SortField.getField";
List sortVals = (List)sortFieldValues.getVal(marshalledFieldNum);
final SchemaField schemaField = schemaFields.get(sortFieldNum);
if (null == schemaField) {
unmarshalledSortValsPerField.add(sortField.getField(), sortVals);
} else {
FieldType fieldType = schemaField.getType();
List unmarshalledSortVals = new ArrayList();
for (Object sortVal : sortVals) {
unmarshalledSortVals.add(fieldType.unmarshalSortValue(sortVal));
}
unmarshalledSortValsPerField.add(sortField.getField(), unmarshalledSortVals);
}
marshalledFieldNum++;
}
return unmarshalledSortValsPerField;
}
private void createRetrieveDocs(ResponseBuilder rb) {
// TODO: in a system with nTiers > 2, we could be passed "ids" here
// unless those requests always go to the final destination shard
// for each shard, collect the documents for that shard.
HashMap<String, Collection<ShardDoc>> shardMap = new HashMap<String,Collection<ShardDoc>>();
for (ShardDoc sdoc : rb.resultIds.values()) {
Collection<ShardDoc> shardDocs = shardMap.get(sdoc.shard);
if (shardDocs == null) {
shardDocs = new ArrayList<ShardDoc>();
shardMap.put(sdoc.shard, shardDocs);
}
shardDocs.add(sdoc);
}
SchemaField uniqueField = rb.req.getSchema().getUniqueKeyField();
// Now create a request for each shard to retrieve the stored fields
for (Collection<ShardDoc> shardDocs : shardMap.values()) {
ShardRequest sreq = new ShardRequest();
sreq.purpose = ShardRequest.PURPOSE_GET_FIELDS;
sreq.shards = new String[] {shardDocs.iterator().next().shard};
sreq.params = new ModifiableSolrParams();
// add original params
sreq.params.add( rb.req.getParams());
// no need for a sort, we already have order
sreq.params.remove(CommonParams.SORT);
sreq.params.remove(CursorMarkParams.CURSOR_MARK_PARAM);
// we already have the field sort values
sreq.params.remove(ResponseBuilder.FIELD_SORT_VALUES);
if(!rb.rsp.getReturnFields().wantsField(uniqueField.getName())) {
sreq.params.add(CommonParams.FL, uniqueField.getName());
}
ArrayList<String> ids = new ArrayList<String>(shardDocs.size());
for (ShardDoc shardDoc : shardDocs) {
// TODO: depending on the type, we may need more tha a simple toString()?
ids.add(shardDoc.id.toString());
}
sreq.params.add(ShardParams.IDS, StrUtils.join(ids, ','));
rb.addRequest(this, sreq);
}
}
private void returnFields(ResponseBuilder rb, ShardRequest sreq) {
// Keep in mind that this could also be a shard in a multi-tiered system.
// TODO: if a multi-tiered system, it seems like some requests
// could/should bypass middlemen (like retrieving stored fields)
// TODO: merge fsv to if requested
if ((sreq.purpose & ShardRequest.PURPOSE_GET_FIELDS) != 0) {
boolean returnScores = (rb.getFieldFlags() & SolrIndexSearcher.GET_SCORES) != 0;
assert(sreq.responses.size() == 1);
ShardResponse srsp = sreq.responses.get(0);
SolrDocumentList docs = (SolrDocumentList)srsp.getSolrResponse().getResponse().get("response");
String keyFieldName = rb.req.getSchema().getUniqueKeyField().getName();
boolean removeKeyField = !rb.rsp.getReturnFields().wantsField(keyFieldName);
for (SolrDocument doc : docs) {
Object id = doc.getFieldValue(keyFieldName);
ShardDoc sdoc = rb.resultIds.get(id.toString());
if (sdoc != null) {
if (returnScores && sdoc.score != null) {
doc.setField("score", sdoc.score);
}
if(removeKeyField) {
doc.removeFields(keyFieldName);
}
rb._responseDocs.set(sdoc.positionInResponse, doc);
}
}
}
}
/////////////////////////////////////////////
/// SolrInfoMBean
////////////////////////////////////////////
@Override
public String getDescription() {
return "query";
}
@Override
public String getSource() {
return "$URL: https://svn.apache.org/repos/asf/lucene/dev/branches/lucene_solr_4_7/solr/core/src/java/org/apache/solr/handler/component/QueryComponent.java $";
}
@Override
public URL[] getDocs() {
return null;
}
/**
* Fake scorer for a single document
*
* TODO: when SOLR-5595 is fixed, this wont be needed, as we dont need to recompute sort values here from the comparator
*/
private static class FakeScorer extends Scorer {
final int docid;
final float score;
FakeScorer(int docid, float score) {
super(null);
this.docid = docid;
this.score = score;
}
@Override
public int docID() {
return docid;
}
@Override
public float score() throws IOException {
return score;
}
@Override
public int freq() throws IOException {
throw new UnsupportedOperationException();
}
@Override
public int nextDoc() throws IOException {
throw new UnsupportedOperationException();
}
@Override
public int advance(int target) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public long cost() {
return 1;
}
}
}
| |
/*******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2012 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.ui.job.entries.sql;
import org.eclipse.swt.SWT;
import org.eclipse.swt.custom.CCombo;
import org.eclipse.swt.events.FocusAdapter;
import org.eclipse.swt.events.FocusEvent;
import org.eclipse.swt.events.KeyAdapter;
import org.eclipse.swt.events.KeyEvent;
import org.eclipse.swt.events.ModifyEvent;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.events.MouseAdapter;
import org.eclipse.swt.events.MouseEvent;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.events.ShellAdapter;
import org.eclipse.swt.events.ShellEvent;
import org.eclipse.swt.layout.FormAttachment;
import org.eclipse.swt.layout.FormData;
import org.eclipse.swt.layout.FormLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.FileDialog;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Listener;
import org.eclipse.swt.widgets.MessageBox;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.Text;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.Props;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.job.JobMeta;
import org.pentaho.di.job.entries.sql.JobEntrySQL;
import org.pentaho.di.job.entry.JobEntryDialogInterface;
import org.pentaho.di.job.entry.JobEntryInterface;
import org.pentaho.di.repository.Repository;
import org.pentaho.di.ui.core.gui.WindowProperty;
import org.pentaho.di.ui.core.widget.StyledTextComp;
import org.pentaho.di.ui.core.widget.TextVar;
import org.pentaho.di.ui.job.dialog.JobDialog;
import org.pentaho.di.ui.job.entry.JobEntryDialog;
import org.pentaho.di.ui.trans.step.BaseStepDialog;
import org.pentaho.di.ui.trans.steps.tableinput.SQLValuesHighlight;
/**
* This dialog allows you to edit the SQL job entry settings. (select the connection and the sql
* script to be executed)
*
* @author Matt
* @since 19-06-2003
*/
public class JobEntrySQLDialog extends JobEntryDialog implements JobEntryDialogInterface
{
private static Class<?> PKG = JobEntrySQL.class; // for i18n purposes, needed by Translator2!! $NON-NLS-1$
private static final String[] FILETYPES = new String[] { BaseMessages.getString(PKG, "JobSQL.Filetype.Sql"), BaseMessages.getString(PKG, "JobSQL.Filetype.Text"), BaseMessages.getString(PKG, "JobSQL.Filetype.All") };
private Label wlName;
private Text wName;
private FormData fdlName, fdName;
private CCombo wConnection;
private Label wlUseSubs;
private Button wUseSubs;
private Button wSQLFromFile;
private Label wlSQLFromFile;
private FormData fdlUseSubs, fdUseSubs;
private FormData fdlSQLFromFile, fdSQLFromFile;
private Label wlSQL;
private StyledTextComp wSQL;
private FormData fdlSQL, fdSQL;
private Label wlPosition;
private FormData fdlPosition;
private Button wOK, wCancel;
private Listener lsOK, lsCancel;
private JobEntrySQL jobEntry;
private Shell shell;
private SelectionAdapter lsDef;
private boolean changed;
private Label wlUseOneStatement;
private Button wSendOneStatement;
private FormData fdlUseOneStatement, fdUseOneStatement;
// File
private Label wlFilename;
private Button wbFilename;
private TextVar wFilename;
private FormData fdlFilename, fdbFilename, fdFilename;
public JobEntrySQLDialog(Shell parent, JobEntryInterface jobEntryInt, Repository rep, JobMeta jobMeta)
{
super(parent, jobEntryInt, rep, jobMeta);
jobEntry = (JobEntrySQL) jobEntryInt;
if (this.jobEntry.getName() == null)
this.jobEntry.setName(BaseMessages.getString(PKG, "JobSQL.Name.Default"));
}
public JobEntryInterface open()
{
Shell parent = getParent();
Display display = parent.getDisplay();
shell = new Shell(parent, props.getJobsDialogStyle());
props.setLook(shell);
JobDialog.setShellImage(shell, jobEntry);
ModifyListener lsMod = new ModifyListener()
{
public void modifyText(ModifyEvent e)
{
jobEntry.setChanged();
}
};
changed = jobEntry.hasChanged();
FormLayout formLayout = new FormLayout();
formLayout.marginWidth = Const.FORM_MARGIN;
formLayout.marginHeight = Const.FORM_MARGIN;
shell.setLayout(formLayout);
shell.setText(BaseMessages.getString(PKG, "JobSQL.Title"));
int middle = props.getMiddlePct();
int margin = Const.MARGIN;
wOK = new Button(shell, SWT.PUSH);
wOK.setText(BaseMessages.getString(PKG, "System.Button.OK"));
wCancel = new Button(shell, SWT.PUSH);
wCancel.setText(BaseMessages.getString(PKG, "System.Button.Cancel"));
BaseStepDialog.positionBottomButtons(shell, new Button[] { wOK, wCancel }, margin, null);
// Filename line
wlName = new Label(shell, SWT.RIGHT);
wlName.setText(BaseMessages.getString(PKG, "JobSQL.Name.Label"));
props.setLook(wlName);
fdlName = new FormData();
fdlName.left = new FormAttachment(0, 0);
fdlName.right = new FormAttachment(middle, 0);
fdlName.top = new FormAttachment(0, margin);
wlName.setLayoutData(fdlName);
wName = new Text(shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER);
props.setLook(wName);
wName.addModifyListener(lsMod);
fdName = new FormData();
fdName.left = new FormAttachment(middle, 0);
fdName.top = new FormAttachment(0, margin);
fdName.right = new FormAttachment(100, 0);
wName.setLayoutData(fdName);
// Connection line
wConnection = addConnectionLine(shell, wName, middle, margin);
if (jobEntry.getDatabase()==null && jobMeta.nrDatabases()==1) wConnection.select(0);
wConnection.addModifyListener(lsMod);
// SQL from file?
wlSQLFromFile = new Label(shell, SWT.RIGHT);
wlSQLFromFile.setText(BaseMessages.getString(PKG, "JobSQL.SQLFromFile.Label"));
props.setLook(wlSQLFromFile);
fdlSQLFromFile = new FormData();
fdlSQLFromFile.left = new FormAttachment(0, 0);
fdlSQLFromFile.top = new FormAttachment(wConnection,2*margin);
fdlSQLFromFile.right = new FormAttachment(middle, -margin);
wlSQLFromFile.setLayoutData(fdlSQLFromFile);
wSQLFromFile = new Button(shell, SWT.CHECK);
props.setLook(wSQLFromFile);
wSQLFromFile.setToolTipText(BaseMessages.getString(PKG, "JobSQL.SQLFromFile.Tooltip"));
fdSQLFromFile = new FormData();
fdSQLFromFile.left = new FormAttachment(middle, 0);
fdSQLFromFile.top = new FormAttachment(wConnection, 2*margin);
fdSQLFromFile.right = new FormAttachment(100, 0);
wSQLFromFile.setLayoutData(fdSQLFromFile);
wSQLFromFile.addSelectionListener(new SelectionAdapter()
{
public void widgetSelected(SelectionEvent e)
{
activeSQLFromFile();
jobEntry.setChanged();
}
});
// Filename line
wlFilename = new Label(shell, SWT.RIGHT);
wlFilename.setText(BaseMessages.getString(PKG, "JobSQL.Filename.Label"));
props.setLook(wlFilename);
fdlFilename = new FormData();
fdlFilename.left = new FormAttachment(0, 0);
fdlFilename.top = new FormAttachment(wSQLFromFile, margin);
fdlFilename.right = new FormAttachment(middle, -margin);
wlFilename.setLayoutData(fdlFilename);
wbFilename = new Button(shell, SWT.PUSH | SWT.CENTER);
props.setLook(wbFilename);
wbFilename.setText(BaseMessages.getString(PKG, "System.Button.Browse"));
fdbFilename = new FormData();
fdbFilename.right = new FormAttachment(100, 0);
fdbFilename.top = new FormAttachment(wSQLFromFile, margin);
wbFilename.setLayoutData(fdbFilename);
wFilename = new TextVar(jobMeta, shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER);
props.setLook(wFilename);
wFilename.setToolTipText(BaseMessages.getString(PKG, "JobSQL.Filename.Tooltip"));
wFilename.addModifyListener(lsMod);
fdFilename = new FormData();
fdFilename.left = new FormAttachment(middle, 0);
fdFilename.top = new FormAttachment(wSQLFromFile, margin);
fdFilename.right = new FormAttachment(wbFilename, -margin);
wFilename.setLayoutData(fdFilename);
// Whenever something changes, set the tooltip to the expanded version:
wFilename.addModifyListener(new ModifyListener()
{
public void modifyText(ModifyEvent e)
{
wFilename.setToolTipText(jobMeta.environmentSubstitute(wFilename.getText()));
}
});
wbFilename.addSelectionListener(new SelectionAdapter()
{
public void widgetSelected(SelectionEvent e)
{
FileDialog dialog = new FileDialog(shell, SWT.OPEN);
dialog.setFilterExtensions(new String[] { "*.sql", "*.txt", "*" });
if (wFilename.getText() != null)
{
dialog.setFileName(jobMeta.environmentSubstitute(wFilename.getText()));
}
dialog.setFilterNames(FILETYPES);
if (dialog.open() != null)
{
wFilename.setText(dialog.getFilterPath() + Const.FILE_SEPARATOR
+ dialog.getFileName());
}
}
});
// Send one SQL Statement?
wlUseOneStatement = new Label(shell, SWT.RIGHT);
wlUseOneStatement.setText(BaseMessages.getString(PKG, "JobSQL.SendOneStatement.Label"));
props.setLook(wlUseOneStatement);
fdlUseOneStatement = new FormData();
fdlUseOneStatement.left = new FormAttachment(0, 0);
fdlUseOneStatement.top = new FormAttachment(wbFilename, margin);
fdlUseOneStatement.right = new FormAttachment(middle, -margin);
wlUseOneStatement.setLayoutData(fdlUseOneStatement);
wSendOneStatement = new Button(shell, SWT.CHECK);
props.setLook(wSendOneStatement);
wSendOneStatement.setToolTipText(BaseMessages.getString(PKG, "JobSQL.SendOneStatement.Tooltip"));
fdUseOneStatement = new FormData();
fdUseOneStatement.left = new FormAttachment(middle, 0);
fdUseOneStatement.top = new FormAttachment(wbFilename, margin);
fdUseOneStatement.right = new FormAttachment(100, 0);
wSendOneStatement.setLayoutData(fdUseOneStatement);
wSendOneStatement.addSelectionListener(new SelectionAdapter()
{
public void widgetSelected(SelectionEvent e)
{
jobEntry.setChanged();
}
});
// Use variable substitution?
wlUseSubs = new Label(shell, SWT.RIGHT);
wlUseSubs.setText(BaseMessages.getString(PKG, "JobSQL.UseVariableSubst.Label"));
props.setLook(wlUseSubs);
fdlUseSubs = new FormData();
fdlUseSubs.left = new FormAttachment(0, 0);
fdlUseSubs.top = new FormAttachment(wSendOneStatement, margin);
fdlUseSubs.right = new FormAttachment(middle, -margin);
wlUseSubs.setLayoutData(fdlUseSubs);
wUseSubs = new Button(shell, SWT.CHECK);
props.setLook(wUseSubs);
wUseSubs.setToolTipText(BaseMessages.getString(PKG, "JobSQL.UseVariableSubst.Tooltip"));
fdUseSubs = new FormData();
fdUseSubs.left = new FormAttachment(middle, 0);
fdUseSubs.top = new FormAttachment(wSendOneStatement, margin);
fdUseSubs.right = new FormAttachment(100, 0);
wUseSubs.setLayoutData(fdUseSubs);
wUseSubs.addSelectionListener(new SelectionAdapter()
{
public void widgetSelected(SelectionEvent e)
{
jobEntry.setUseVariableSubstitution(!jobEntry.getUseVariableSubstitution());
jobEntry.setChanged();
}
});
wlPosition = new Label(shell, SWT.NONE);
wlPosition.setText(BaseMessages.getString(PKG, "JobSQL.LineNr.Label", "0"));
props.setLook(wlPosition);
fdlPosition = new FormData();
fdlPosition.left = new FormAttachment(0, 0);
fdlPosition.right= new FormAttachment(100, 0);
fdlPosition.bottom = new FormAttachment(wOK, -margin);
wlPosition.setLayoutData(fdlPosition);
// Script line
wlSQL = new Label(shell, SWT.NONE);
wlSQL.setText(BaseMessages.getString(PKG, "JobSQL.Script.Label"));
props.setLook(wlSQL);
fdlSQL = new FormData();
fdlSQL.left = new FormAttachment(0, 0);
fdlSQL.top = new FormAttachment(wUseSubs, margin);
wlSQL.setLayoutData(fdlSQL);
wSQL=new StyledTextComp(jobEntry, shell, SWT.MULTI | SWT.LEFT | SWT.BORDER | SWT.H_SCROLL | SWT.V_SCROLL, "");
props.setLook(wSQL, Props.WIDGET_STYLE_FIXED);
wSQL.addModifyListener(lsMod);
fdSQL = new FormData();
fdSQL.left = new FormAttachment(0, 0);
fdSQL.top = new FormAttachment(wlSQL, margin);
fdSQL.right = new FormAttachment(100, -10);
fdSQL.bottom = new FormAttachment(wlPosition, -margin);
wSQL.setLayoutData(fdSQL);
// Add listeners
lsCancel = new Listener()
{
public void handleEvent(Event e)
{
cancel();
}
};
lsOK = new Listener()
{
public void handleEvent(Event e)
{
ok();
}
};
wCancel.addListener(SWT.Selection, lsCancel);
wOK.addListener(SWT.Selection, lsOK);
lsDef = new SelectionAdapter()
{
public void widgetDefaultSelected(SelectionEvent e)
{
ok();
}
};
wName.addSelectionListener(lsDef);
// Detect X or ALT-F4 or something that kills this window...
shell.addShellListener(new ShellAdapter()
{
public void shellClosed(ShellEvent e)
{
cancel();
}
});
wSQL.addModifyListener(new ModifyListener()
{
public void modifyText(ModifyEvent arg0)
{
setPosition();
}
}
);
wSQL.addKeyListener(new KeyAdapter(){
public void keyPressed(KeyEvent e) { setPosition(); }
public void keyReleased(KeyEvent e) { setPosition(); }
}
);
wSQL.addFocusListener(new FocusAdapter(){
public void focusGained(FocusEvent e) { setPosition(); }
public void focusLost(FocusEvent e) { setPosition(); }
}
);
wSQL.addMouseListener(new MouseAdapter(){
public void mouseDoubleClick(MouseEvent e) { setPosition(); }
public void mouseDown(MouseEvent e) { setPosition(); }
public void mouseUp(MouseEvent e) { setPosition(); }
}
);
wSQL.addModifyListener(lsMod);
// Text Higlighting
wSQL.addLineStyleListener(new SQLValuesHighlight());
getData();
activeSQLFromFile();
BaseStepDialog.setSize(shell);
shell.open();
props.setDialogSize(shell, "JobSQLDialogSize");
while (!shell.isDisposed())
{
if (!display.readAndDispatch())
display.sleep();
}
return jobEntry;
}
public void setPosition(){
String scr = wSQL.getText();
int linenr = wSQL.getLineAtOffset(wSQL.getCaretOffset())+1;
int posnr = wSQL.getCaretOffset();
// Go back from position to last CR: how many positions?
int colnr=0;
while (posnr>0 && scr.charAt(posnr-1)!='\n' && scr.charAt(posnr-1)!='\r')
{
posnr--;
colnr++;
}
wlPosition.setText(BaseMessages.getString(PKG, "JobSQL.Position.Label",""+linenr,""+colnr));
}
public void dispose()
{
WindowProperty winprop = new WindowProperty(shell);
props.setScreen(winprop);
shell.dispose();
}
/**
* Copy information from the meta-data input to the dialog fields.
*/
public void getData()
{
if (jobEntry.getName() != null)
wName.setText(jobEntry.getName());
if (jobEntry.getSQL() != null)
wSQL.setText(jobEntry.getSQL());
DatabaseMeta dbinfo = jobEntry.getDatabase();
if (dbinfo != null && dbinfo.getName() != null)
wConnection.setText(dbinfo.getName());
else
wConnection.setText("");
wUseSubs.setSelection(jobEntry.getUseVariableSubstitution());
wSQLFromFile.setSelection(jobEntry.getSQLFromFile());
wSendOneStatement.setSelection(jobEntry.isSendOneStatement());
if (jobEntry.getSQLFilename() != null)
wFilename.setText(jobEntry.getSQLFilename());
wName.selectAll();
}
private void activeSQLFromFile()
{
wlFilename.setEnabled(wSQLFromFile.getSelection());
wFilename.setEnabled(wSQLFromFile.getSelection());
wbFilename.setEnabled(wSQLFromFile.getSelection());
wSQL.setEnabled(!wSQLFromFile.getSelection());
wlSQL.setEnabled(!wSQLFromFile.getSelection());
wlPosition.setEnabled(!wSQLFromFile.getSelection());
}
private void cancel()
{
jobEntry.setChanged(changed);
jobEntry = null;
dispose();
}
private void ok()
{
if(Const.isEmpty(wName.getText()))
{
MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_ERROR );
mb.setText(BaseMessages.getString(PKG, "System.StepJobEntryNameMissing.Title"));
mb.setMessage(BaseMessages.getString(PKG, "System.JobEntryNameMissing.Msg"));
mb.open();
return;
}
jobEntry.setName(wName.getText());
jobEntry.setSQL(wSQL.getText());
jobEntry.setUseVariableSubstitution(wUseSubs.getSelection());
jobEntry.setSQLFromFile(wSQLFromFile.getSelection());
jobEntry.setSQLFilename(wFilename.getText());
jobEntry.setSendOneStatement(wSendOneStatement.getSelection());
jobEntry.setDatabase(jobMeta.findDatabase(wConnection.getText()));
dispose();
}
}
| |
package cn.newcapec.jwxt.jcxxgl.model;
import java.math.BigDecimal;
import java.util.Date;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
/**
* JwxtJcxxCgxx entity. @author MyEclipse Persistence Tools
*/
@Entity
@Table(name="JWXT_JCXX_CGXX"
,schema="JWXT"
)
public class JwxtJcxxCgxx extends cn.newcapec.function.digitalcampus.common.model.AbstractModel implements java.io.Serializable {
// Fields
private String id;
private String jzwh;
private String jzwmc;
private String fwcqm;
private String syzkm;
private String xqh;
private String jzwflm;
private BigDecimal jzwcs;
private String jcny;
private String jzwdz;
private String jzwzkm;
private String jzwtp;
private Double jzwzdmj;
private String jzwyt;
private String cgzt;
private String cjr;
private String jlssdw;
private Date whsj;
private Date cjsj;
// Constructors
/** default constructor */
public JwxtJcxxCgxx() {
}
/** minimal constructor */
public JwxtJcxxCgxx(String id, String jzwh, String jzwmc, String xqh, BigDecimal jzwcs, String jzwdz, String cjr, String jlssdw, Date cjsj) {
this.id = id;
this.jzwh = jzwh;
this.jzwmc = jzwmc;
this.xqh = xqh;
this.jzwcs = jzwcs;
this.jzwdz = jzwdz;
this.cjr = cjr;
this.jlssdw = jlssdw;
this.cjsj = cjsj;
}
/** full constructor */
public JwxtJcxxCgxx(String id, String jzwh, String jzwmc, String fwcqm, String syzkm, String xqh, String jzwflm, BigDecimal jzwcs, String jcny, String jzwdz, String jzwzkm, String jzwtp, Double jzwzdmj, String jzwyt, String cgzt, String cjr, String jlssdw, Date whsj, Date cjsj) {
this.id = id;
this.jzwh = jzwh;
this.jzwmc = jzwmc;
this.fwcqm = fwcqm;
this.syzkm = syzkm;
this.xqh = xqh;
this.jzwflm = jzwflm;
this.jzwcs = jzwcs;
this.jcny = jcny;
this.jzwdz = jzwdz;
this.jzwzkm = jzwzkm;
this.jzwtp = jzwtp;
this.jzwzdmj = jzwzdmj;
this.jzwyt = jzwyt;
this.cgzt = cgzt;
this.cjr = cjr;
this.jlssdw = jlssdw;
this.whsj = whsj;
this.cjsj = cjsj;
}
// Property accessors
@Id
@Column(name="ID", unique=true, nullable=false, length=32)
public String getId() {
return this.id;
}
public void setId(String id) {
this.id = id;
}
@Column(name="JZWH", nullable=false, length=32)
public String getJzwh() {
return this.jzwh;
}
public void setJzwh(String jzwh) {
this.jzwh = jzwh;
}
@Column(name="JZWMC", nullable=false, length=200)
public String getJzwmc() {
return this.jzwmc;
}
public void setJzwmc(String jzwmc) {
this.jzwmc = jzwmc;
}
@Column(name="FWCQM", length=2)
public String getFwcqm() {
return this.fwcqm;
}
public void setFwcqm(String fwcqm) {
this.fwcqm = fwcqm;
}
@Column(name="SYZKM", length=2)
public String getSyzkm() {
return this.syzkm;
}
public void setSyzkm(String syzkm) {
this.syzkm = syzkm;
}
@Column(name="XQH", nullable=false, length=32)
public String getXqh() {
return this.xqh;
}
public void setXqh(String xqh) {
this.xqh = xqh;
}
@Column(name="JZWFLM", length=2)
public String getJzwflm() {
return this.jzwflm;
}
public void setJzwflm(String jzwflm) {
this.jzwflm = jzwflm;
}
@Column(name="JZWCS", nullable=false, precision=22, scale=0)
public BigDecimal getJzwcs() {
return this.jzwcs;
}
public void setJzwcs(BigDecimal jzwcs) {
this.jzwcs = jzwcs;
}
@Column(name="JCNY", length=10)
public String getJcny() {
return this.jcny;
}
public void setJcny(String jcny) {
this.jcny = jcny;
}
@Column(name="JZWDZ", nullable=false, length=200)
public String getJzwdz() {
return this.jzwdz;
}
public void setJzwdz(String jzwdz) {
this.jzwdz = jzwdz;
}
@Column(name="JZWZKM", length=2)
public String getJzwzkm() {
return this.jzwzkm;
}
public void setJzwzkm(String jzwzkm) {
this.jzwzkm = jzwzkm;
}
@Column(name="JZWTP", length=100)
public String getJzwtp() {
return this.jzwtp;
}
public void setJzwtp(String jzwtp) {
this.jzwtp = jzwtp;
}
@Column(name="JZWZDMJ", precision=9)
public Double getJzwzdmj() {
return this.jzwzdmj;
}
public void setJzwzdmj(Double jzwzdmj) {
this.jzwzdmj = jzwzdmj;
}
@Column(name="JZWYT", length=2)
public String getJzwyt() {
return this.jzwyt;
}
public void setJzwyt(String jzwyt) {
this.jzwyt = jzwyt;
}
@Column(name="CGZT", length=2)
public String getCgzt() {
return this.cgzt;
}
public void setCgzt(String cgzt) {
this.cgzt = cgzt;
}
@Column(name="CJR", nullable=false, length=32)
public String getCjr() {
return this.cjr;
}
public void setCjr(String cjr) {
this.cjr = cjr;
}
@Column(name="JLSSDW", nullable=false, length=32)
public String getJlssdw() {
return this.jlssdw;
}
public void setJlssdw(String jlssdw) {
this.jlssdw = jlssdw;
}
@Temporal(TemporalType.DATE)
@Column(name="WHSJ", length=7)
public Date getWhsj() {
return this.whsj;
}
public void setWhsj(Date whsj) {
this.whsj = whsj;
}
@Temporal(TemporalType.DATE)
@Column(name="CJSJ", nullable=false, length=7)
public Date getCjsj() {
return this.cjsj;
}
public void setCjsj(Date cjsj) {
this.cjsj = cjsj;
}
}
| |
/*
* Copyright 2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.vertx.example.proton.server;
import static io.vertx.proton.ProtonHelper.message;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.qpid.proton.amqp.messaging.AmqpValue;
import org.apache.qpid.proton.amqp.messaging.Section;
import org.apache.qpid.proton.amqp.transport.AmqpError;
import org.apache.qpid.proton.amqp.transport.ErrorCondition;
import org.apache.qpid.proton.message.Message;
import io.vertx.core.AbstractVerticle;
import io.vertx.core.Vertx;
import io.vertx.example.util.Runner;
import io.vertx.proton.ProtonConnection;
import io.vertx.proton.ProtonReceiver;
import io.vertx.proton.ProtonSender;
import io.vertx.proton.ProtonServer;
/**
* HelloServer
*
* Allows attaching senders and receivers to any address, printing the messages
* received from producers, and periodically sending any consumers a message.
*/
public class HelloServer extends AbstractVerticle {
private static final int PORT = 5672;
// Convenience method so you can run it in your IDE
public static void main(String[] args) {
Runner.runExample(HelloServer.class);
}
@Override
public void start() throws Exception {
ProtonServer server = ProtonServer.create(vertx);
// Configure how new connections are handled
server.connectHandler((connection) -> {
initConnection(vertx, connection);
});
server.listen(PORT, (res) -> {
if (res.succeeded()) {
System.out.println("Listening on port " + res.result().actualPort());
} else {
System.out.println("Failed to start listening on port " + PORT + ":");
res.cause().printStackTrace();
}
});
}
// Initialise then open new connections
private static void initConnection(Vertx vertx, ProtonConnection connection) {
connection.openHandler(res -> {
System.out.println("Client connection opened, container-id: " + connection.getRemoteContainer());
connection.open();
});
connection.closeHandler(c -> {
System.out.println("Client closing connection, container-id: " + connection.getRemoteContainer());
connection.close();
connection.disconnect();
});
connection.disconnectHandler(c -> {
System.out.println("Client socket disconnected, container-id: " + connection.getRemoteContainer());
connection.disconnect();
});
connection.sessionOpenHandler(session -> {
session.closeHandler(x -> {
session.close();
session.free();
});
session.open();
});
connection.senderOpenHandler(sender -> {
initSender(vertx, connection, sender);
});
connection.receiverOpenHandler(HelloServer::initReceiver);
}
// Initialise then open new sender (when a client receiver/consumer attaches)
private static void initSender(Vertx vertx, ProtonConnection connection, ProtonSender sender) {
org.apache.qpid.proton.amqp.messaging.Source remoteSource = (org.apache.qpid.proton.amqp.messaging.Source) sender.getRemoteSource();
if(remoteSource == null) {
// Example doesn't support 'looking up' existing links, so we will just close with an error
sender.setTarget(null);
sender.setCondition(new ErrorCondition(AmqpError.INVALID_FIELD, "No source terminus specified"));
sender.open();
sender.close();
return;
}
// Configure the servers local source+target details.
// Just reflecting the remote details (+ set dynamic address if requested).
// This is rather naive, for example use only, proper servers should
// ensure that they advertise their own Source settings which actually
// reflect what is in place.
if(remoteSource.getDynamic()) {
String dynamicAddress = UUID.randomUUID().toString();
remoteSource.setAddress(dynamicAddress);
}
sender.setSource(remoteSource);
sender.setTarget(sender.getRemoteTarget());
// Can optionally add a sendQueueDrainHandler to await receiver
// granting credit. Here we will just schedule sends to happen
// periodically assuming there is credit available at the time.
AtomicInteger sent = new AtomicInteger();
final long timer = vertx.setPeriodic(1000, t -> {
if (connection.isDisconnected()) {
vertx.cancelTimer(t);
} else {
if(!sender.sendQueueFull()) {
int msgNum = sent.incrementAndGet();
System.out.println("Sending message " + msgNum + " to client, for address: " + remoteSource.getAddress());
Message m = message("Hello " + msgNum + " from Server!");
sender.send(m, delivery -> {
System.out.println("Message " + msgNum + " was received by the client.");
});
}
}
});
sender.detachHandler(x -> {
vertx.cancelTimer(timer);
sender.detach();
sender.free();
});
sender.closeHandler(x -> {
vertx.cancelTimer(timer);
sender.close();
sender.free();
});
sender.open();
}
// Initialise then open new receiver (when a client sender/producer attaches)
private static void initReceiver(ProtonReceiver receiver) {
org.apache.qpid.proton.amqp.messaging.Target remoteTarget = (org.apache.qpid.proton.amqp.messaging.Target) receiver.getRemoteTarget();
if(remoteTarget == null) {
// Example doesn't support 'looking up' existing links, so we will just close with an error.
receiver.setTarget(null);
receiver.setCondition(new ErrorCondition(AmqpError.INVALID_FIELD, "No target terminus specified"));
receiver.open();
receiver.close();
return;
}
// Configure the servers local source+target details.
// Just reflecting the remote details (+ set dynamic address if requested).
// This is rather naive, for example use only, proper servers should
// ensure that they advertise their own Target settings which actually
// reflect what is in place.
if(remoteTarget.getDynamic()) {
String dynamicAddress = UUID.randomUUID().toString();
remoteTarget.setAddress(dynamicAddress);
}
receiver.setTarget(remoteTarget);
receiver.setSource(receiver.getRemoteSource());
// Handle arriving messages. Just prints out their details.
// Unless configured otherwise, the receiver automatically accepts messages when the
// handler returns if another disposition hasn't been applied, and also grants
// credit when opened and replenishes it as messages are received.
receiver.handler((delivery, msg) -> {
String address = remoteTarget.getAddress() ;
if (address == null) {
address = msg.getAddress();
}
Section body = msg.getBody();
if (body instanceof AmqpValue) {
String content = (String) ((AmqpValue) body).getValue();
System.out.println("Received message for address: " + address + ", body: " + content);
}
});
receiver.detachHandler(x -> {
receiver.detach();
receiver.free();
});
receiver.closeHandler(x -> {
receiver.close();
receiver.free();
});
receiver.open();
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.query;
import org.apache.lucene.search.TermInSetQuery;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.PointInSetQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.get.GetRequest;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.get.GetResult;
import org.elasticsearch.indices.TermsLookup;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.AbstractQueryTestCase;
import org.junit.Before;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.either;
import static org.hamcrest.Matchers.instanceOf;
public class TermsQueryBuilderTests extends AbstractQueryTestCase<TermsQueryBuilder> {
private List<Object> randomTerms;
private String termsPath;
@Before
public void randomTerms() {
List<Object> randomTerms = new ArrayList<>();
String[] strings = generateRandomStringArray(10, 10, false, true);
for (String string : strings) {
randomTerms.add(string);
if (rarely()) {
randomTerms.add(null);
}
}
this.randomTerms = randomTerms;
termsPath = randomAlphaOfLength(10).replace('.', '_');
}
@Override
protected TermsQueryBuilder doCreateTestQueryBuilder() {
TermsQueryBuilder query;
// terms query or lookup query
if (randomBoolean()) {
// make between 0 and 5 different values of the same type
String fieldName;
do {
fieldName = getRandomFieldName();
} while (fieldName.equals(GEO_POINT_FIELD_NAME) || fieldName.equals(GEO_SHAPE_FIELD_NAME));
Object[] values = new Object[randomInt(5)];
for (int i = 0; i < values.length; i++) {
values[i] = getRandomValueForFieldName(fieldName);
}
query = new TermsQueryBuilder(fieldName, values);
} else {
// right now the mock service returns us a list of strings
query = new TermsQueryBuilder(randomBoolean() ? randomAlphaOfLengthBetween(1,10) : STRING_FIELD_NAME, randomTermsLookup());
}
return query;
}
private TermsLookup randomTermsLookup() {
return new TermsLookup(randomAlphaOfLength(10), randomAlphaOfLength(10), randomAlphaOfLength(10),
termsPath).routing(randomBoolean() ? randomAlphaOfLength(10) : null);
}
@Override
protected void doAssertLuceneQuery(TermsQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
if (queryBuilder.termsLookup() == null && (queryBuilder.values() == null || queryBuilder.values().isEmpty())) {
assertThat(query, instanceOf(MatchNoDocsQuery.class));
MatchNoDocsQuery matchNoDocsQuery = (MatchNoDocsQuery) query;
assertThat(matchNoDocsQuery.toString(), containsString("No terms supplied for \"terms\" query."));
} else if (queryBuilder.termsLookup() != null && randomTerms.size() == 0){
assertThat(query, instanceOf(MatchNoDocsQuery.class));
MatchNoDocsQuery matchNoDocsQuery = (MatchNoDocsQuery) query;
assertThat(matchNoDocsQuery.toString(), containsString("No terms supplied for \"terms\" query."));
} else {
assertThat(query, either(instanceOf(TermInSetQuery.class))
.or(instanceOf(PointInSetQuery.class))
.or(instanceOf(ConstantScoreQuery.class)));
if (query instanceof ConstantScoreQuery) {
assertThat(((ConstantScoreQuery) query).getQuery(), instanceOf(BooleanQuery.class));
}
// we only do the check below for string fields (otherwise we'd have to decode the values)
if (queryBuilder.fieldName().equals(INT_FIELD_NAME) || queryBuilder.fieldName().equals(DOUBLE_FIELD_NAME)
|| queryBuilder.fieldName().equals(BOOLEAN_FIELD_NAME) || queryBuilder.fieldName().equals(DATE_FIELD_NAME)) {
return;
}
// expected returned terms depending on whether we have a terms query or a terms lookup query
List<Object> terms;
if (queryBuilder.termsLookup() != null) {
terms = randomTerms;
} else {
terms = queryBuilder.values();
}
TermInSetQuery expected = new TermInSetQuery(queryBuilder.fieldName(),
terms.stream().filter(Objects::nonNull).map(Object::toString).map(BytesRef::new).collect(Collectors.toList()));
assertEquals(expected, query);
}
}
public void testEmtpyFieldName() {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new TermsQueryBuilder(null, "term"));
assertEquals("field name cannot be null.", e.getMessage());
e = expectThrows(IllegalArgumentException.class, () -> new TermsQueryBuilder("", "term"));
assertEquals("field name cannot be null.", e.getMessage());
}
public void testEmtpyTermsLookup() {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new TermsQueryBuilder("field", (TermsLookup) null));
assertEquals("No value or termsLookup specified for terms query", e.getMessage());
}
public void testNullValues() {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new TermsQueryBuilder("field", (String[]) null));
assertThat(e.getMessage(), containsString("No value specified for terms query"));
e = expectThrows(IllegalArgumentException.class, () -> new TermsQueryBuilder("field", (int[]) null));
assertThat(e.getMessage(), containsString("No value specified for terms query"));
e = expectThrows(IllegalArgumentException.class, () -> new TermsQueryBuilder("field", (long[]) null));
assertThat(e.getMessage(), containsString("No value specified for terms query"));
e = expectThrows(IllegalArgumentException.class, () -> new TermsQueryBuilder("field", (float[]) null));
assertThat(e.getMessage(), containsString("No value specified for terms query"));
e = expectThrows(IllegalArgumentException.class, () -> new TermsQueryBuilder("field", (double[]) null));
assertThat(e.getMessage(), containsString("No value specified for terms query"));
e = expectThrows(IllegalArgumentException.class, () -> new TermsQueryBuilder("field", (Object[]) null));
assertThat(e.getMessage(), containsString("No value specified for terms query"));
e = expectThrows(IllegalArgumentException.class, () -> new TermsQueryBuilder("field", (Iterable<?>) null));
assertThat(e.getMessage(), containsString("No value specified for terms query"));
}
public void testBothValuesAndLookupSet() throws IOException {
String query = "{\n" +
" \"terms\": {\n" +
" \"field\": [\n" +
" \"blue\",\n" +
" \"pill\"\n" +
" ],\n" +
" \"field_lookup\": {\n" +
" \"index\": \"pills\",\n" +
" \"type\": \"red\",\n" +
" \"id\": \"3\",\n" +
" \"path\": \"white rabbit\"\n" +
" }\n" +
" }\n" +
"}";
ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(query));
assertThat(e.getMessage(), containsString("[" + TermsQueryBuilder.NAME + "] query does not support more than one field."));
}
@Override
public GetResponse executeGet(GetRequest getRequest) {
String json;
try {
XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint();
builder.startObject();
builder.array(termsPath, randomTerms.toArray(new Object[randomTerms.size()]));
builder.endObject();
json = builder.string();
} catch (IOException ex) {
throw new ElasticsearchException("boom", ex);
}
return new GetResponse(new GetResult(getRequest.index(), getRequest.type(), getRequest.id(), 0, true, new BytesArray(json), null));
}
public void testNumeric() throws IOException {
{
TermsQueryBuilder builder = new TermsQueryBuilder("foo", new int[]{1, 3, 4});
TermsQueryBuilder copy = (TermsQueryBuilder) assertSerialization(builder);
List<Object> values = copy.values();
assertEquals(Arrays.asList(1L, 3L, 4L), values);
}
{
TermsQueryBuilder builder = new TermsQueryBuilder("foo", new double[]{1, 3, 4});
TermsQueryBuilder copy = (TermsQueryBuilder) assertSerialization(builder);
List<Object> values = copy.values();
assertEquals(Arrays.asList(1d, 3d, 4d), values);
}
{
TermsQueryBuilder builder = new TermsQueryBuilder("foo", new float[]{1, 3, 4});
TermsQueryBuilder copy = (TermsQueryBuilder) assertSerialization(builder);
List<Object> values = copy.values();
assertEquals(Arrays.asList(1f, 3f, 4f), values);
}
{
TermsQueryBuilder builder = new TermsQueryBuilder("foo", new long[]{1, 3, 4});
TermsQueryBuilder copy = (TermsQueryBuilder) assertSerialization(builder);
List<Object> values = copy.values();
assertEquals(Arrays.asList(1L, 3L, 4L), values);
}
}
public void testTermsQueryWithMultipleFields() throws IOException {
String query = XContentFactory.jsonBuilder().startObject()
.startObject("terms").array("foo", 123).array("bar", 456).endObject()
.endObject().string();
ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(query));
assertEquals("[" + TermsQueryBuilder.NAME + "] query does not support multiple fields", e.getMessage());
}
public void testFromJson() throws IOException {
String json =
"{\n" +
" \"terms\" : {\n" +
" \"user\" : [ \"kimchy\", \"elasticsearch\" ],\n" +
" \"boost\" : 1.0\n" +
" }\n" +
"}";
TermsQueryBuilder parsed = (TermsQueryBuilder) parseQuery(json);
checkGeneratedJson(json, parsed);
assertEquals(json, 2, parsed.values().size());
}
@Override
public void testMustRewrite() throws IOException {
TermsQueryBuilder termsQueryBuilder = new TermsQueryBuilder(STRING_FIELD_NAME, randomTermsLookup());
UnsupportedOperationException e = expectThrows(UnsupportedOperationException.class,
() -> termsQueryBuilder.toQuery(createShardContext()));
assertEquals("query must be rewritten first", e.getMessage());
assertEquals(rewriteAndFetch(termsQueryBuilder, createShardContext()), new TermsQueryBuilder(STRING_FIELD_NAME,
randomTerms.stream().filter(x -> x != null).collect(Collectors.toList()))); // terms lookup removes null values
}
public void testGeo() throws Exception {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
TermsQueryBuilder query = new TermsQueryBuilder(GEO_POINT_FIELD_NAME, "2,3");
QueryShardContext context = createShardContext();
QueryShardException e = expectThrows(QueryShardException.class,
() -> query.toQuery(context));
assertEquals("Geo fields do not support exact searching, use dedicated geo queries instead: [mapped_geo_point]",
e.getMessage());
}
@Override
protected boolean isCachable(TermsQueryBuilder queryBuilder) {
// even though we use a terms lookup here we do this during rewrite and that means we are cachable on toQuery
// that's why we return true here all the time
return super.isCachable(queryBuilder);
}
public void testSerializationFailsUnlessFetched() throws IOException {
QueryBuilder builder = new TermsQueryBuilder(STRING_FIELD_NAME, randomTermsLookup());
QueryBuilder termsQueryBuilder = Rewriteable.rewrite(builder, createShardContext());
IllegalStateException ise = expectThrows(IllegalStateException.class, () -> termsQueryBuilder.writeTo(new BytesStreamOutput(10)));
assertEquals(ise.getMessage(), "supplier must be null, can't serialize suppliers, missing a rewriteAndFetch?");
builder = rewriteAndFetch(builder, createShardContext());
builder.writeTo(new BytesStreamOutput(10));
}
public void testConversion() {
List<Object> list = Arrays.asList();
assertSame(Collections.emptyList(), TermsQueryBuilder.convert(list));
assertEquals(list, TermsQueryBuilder.convertBack(TermsQueryBuilder.convert(list)));
list = Arrays.asList("abc");
assertEquals(Arrays.asList(new BytesRef("abc")), TermsQueryBuilder.convert(list));
assertEquals(list, TermsQueryBuilder.convertBack(TermsQueryBuilder.convert(list)));
list = Arrays.asList("abc", new BytesRef("def"));
assertEquals(Arrays.asList(new BytesRef("abc"), new BytesRef("def")), TermsQueryBuilder.convert(list));
assertEquals(Arrays.asList("abc", "def"), TermsQueryBuilder.convertBack(TermsQueryBuilder.convert(list)));
list = Arrays.asList(5, 42L);
assertEquals(Arrays.asList(5L, 42L), TermsQueryBuilder.convert(list));
assertEquals(Arrays.asList(5L, 42L), TermsQueryBuilder.convertBack(TermsQueryBuilder.convert(list)));
list = Arrays.asList(5, 42d);
assertEquals(Arrays.asList(5, 42d), TermsQueryBuilder.convert(list));
assertEquals(Arrays.asList(5, 42d), TermsQueryBuilder.convertBack(TermsQueryBuilder.convert(list)));
}
}
| |
package com.distelli.europa.registry;
import com.distelli.europa.db.RegistryManifestDb;
import com.distelli.europa.models.ContainerRepo;
import com.distelli.europa.models.Registry;
import com.distelli.europa.models.RegistryManifest;
import com.distelli.europa.util.Tag;
import com.distelli.gcr.models.GcrBlobUpload;
import com.distelli.gcr.models.GcrManifest;
import lombok.EqualsAndHashCode;
import javax.inject.Inject;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
/**
* Copy an image between two repositories.
*
* Use this class by injecting the builder then calling {@link #run()}; for example:
*
* <p>
* <pre><code>
*{@literal @}Inject
* private CopyImageBetweenRepos.Builder _copyImageBetweenReposBuilder;
*
* public void fooMethod() {
* _copyImageBetweenReposBuilder
* .sourceRepo(fooRepo)
* .destinationRepo(barRepo)
* .sourceReference(fooDigest)
* .destinationTag("latest")
* .build()
* .run()
* }
* </code></pre>
*/
@EqualsAndHashCode
public final class CopyImageBetweenRepos {
private final ContainerRepo sourceRepo;
private final ContainerRepo destinationRepo;
private final String sourceReference;
private final Set<String> destinationTags;
private RegistryManifestDb _manifestDb;
private RegistryFactory _registryFactory;
/**
* Perform the copy operation.
*
* @throws ManifestNotFoundException thrown if we cannot load the desired
* manifest from the source registry
* @throws IOException thrown if we have any issues reading or writing an
* object for a registry
*/
public void run() throws ManifestNotFoundException, IOException {
if (sourceRepo.isLocal() && destinationRepo.isLocal()) {
copyLocal();
} else {
copyRemote();
}
}
private void copyLocal() throws ManifestNotFoundException {
RegistryManifest manifest = _manifestDb.getManifestByRepoIdTag(sourceRepo.getDomain(),
sourceRepo.getId(),
sourceReference);
if (manifest == null) {
throw new ManifestNotFoundException(sourceRepo.getName(), sourceReference);
}
for (String tag : destinationTags) {
RegistryManifest copy = manifest.toBuilder()
.domain(destinationRepo.getDomain())
.containerRepoId(destinationRepo.getId())
.tag(tag)
.build();
_manifestDb.put(copy);
}
}
private void copyRemote() throws ManifestNotFoundException, IOException {
boolean crossRepositoryBlobMount = (sourceRepo.getProvider() == destinationRepo.getProvider() &&
sourceRepo.getCredId().equalsIgnoreCase(destinationRepo.getCredId()));
String crossBlobMountFrom = (crossRepositoryBlobMount) ? sourceRepo.getName() : null;
Registry sourceRegistry;
Registry destinationRegistry;
GcrManifest manifest;
sourceRegistry = _registryFactory.createRegistry(sourceRepo, Boolean.FALSE, null);
destinationRegistry = _registryFactory.createRegistry(destinationRepo, Boolean.TRUE, crossBlobMountFrom);
manifest = sourceRegistry.getManifest(sourceRepo.getName(), sourceReference);
if (manifest == null) {
throw new ManifestNotFoundException(sourceRepo.getName(), sourceReference);
}
for (String digest : manifest.getReferencedDigests()) {
GcrBlobUpload upload = destinationRegistry.createBlobUpload(destinationRepo.getName(),
digest,
crossBlobMountFrom);
if (!upload.isComplete()) {
upload.setMediaType(manifest.getMediaType());
sourceRegistry.getBlob(sourceRepo.getName(),
digest,
(in, meta) -> destinationRegistry.blobUploadChunk(upload,
in,
meta.getLength(),
digest));
}
}
for (String tag : destinationTags) {
destinationRegistry.putManifest(destinationRepo.getName(), tag, manifest);
}
}
private CopyImageBetweenRepos(Builder builder) {
if (null == builder._manifestDb || null == builder._registryFactory) {
throw new IllegalStateException("Injector.injectMembers(this) has not been called");
}
if (null == builder.sourceRepo) {
throw new IllegalArgumentException("Source repository must not be null");
}
if (null == builder.destinationRepo) {
throw new IllegalArgumentException("Destination repository must not be null");
}
if (null == builder.sourceReference) {
throw new IllegalArgumentException("Source reference must not be null");
}
if (!Tag.isValid(builder.sourceReference)) {
throw new IllegalArgumentException("Source reference must be a valid tag or digest");
}
if (!builder.destinationTags.isEmpty()) {
for (String tag : builder.destinationTags) {
if (!Tag.isValid(tag)) {
throw new IllegalArgumentException(String.format("Destination tag must be a valid tag or digest, got invalid value %s", tag));
}
}
}
this.sourceRepo = builder.sourceRepo;
this.destinationRepo = builder.destinationRepo;
this.sourceReference = builder.sourceReference;
this.destinationTags = (builder.destinationTags.isEmpty())
? Collections.unmodifiableSet(new HashSet<>(Collections.singleton(builder.sourceReference)))
: Collections.unmodifiableSet(builder.destinationTags);
this._manifestDb = builder._manifestDb;
this._registryFactory = builder._registryFactory;
}
/**
* Use this via dependency injection:
*
* <pre><code>
*{@literal @}Inject
* private CopyImageBetweenRepos.Builder _copyImageBetweenReposBuilder;
* </code></pre>
*/
public static class Builder {
private ContainerRepo sourceRepo;
private ContainerRepo destinationRepo;
private String sourceReference;
private Set<String> destinationTags = new HashSet<>();
@Inject
private RegistryManifestDb _manifestDb;
@Inject
private RegistryFactory _registryFactory;
/**
* Set the source repo to copy from.
*/
public Builder sourceRepo(ContainerRepo sourceRepo) {
this.sourceRepo = sourceRepo;
return this;
}
/**
* Set the destination repo to copy to.
*/
public Builder destinationRepo(ContainerRepo destinationRepo) {
this.destinationRepo = destinationRepo;
return this;
}
/**
* Set the tag or manifest digest SHA for the source image.
*/
public Builder sourceReference(String sourceReference) {
this.sourceReference = sourceReference;
return this;
}
/**
* Add a tag to use for the destination image.
*
* If none are set, it will use the value set with {@link #sourceReference(String)}
*/
public Builder destinationTag(String destinationTag) {
this.destinationTags.add(destinationTag);
return this;
}
/**
* Add multiple tags to use for the destination image.
*
* If none are set, it will use the value set with {@link #sourceReference(String)}
*/
public Builder destinationTags(Collection<String> destinationTags) {
this.destinationTags.addAll(destinationTags);
return this;
}
public CopyImageBetweenRepos build() {
return new CopyImageBetweenRepos(this);
}
}
}
| |
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.core.logging;
import java.util.ArrayList;
import java.util.List;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.variables.VariableSpace;
import org.pentaho.di.core.xml.XMLHandler;
import org.pentaho.di.repository.RepositoryAttributeInterface;
import org.pentaho.di.trans.HasDatabasesInterface;
import org.w3c.dom.Node;
public abstract class BaseLogTable {
public static final String XML_TAG = "field";
public static String PROP_LOG_TABLE_CONNECTION_NAME = "_LOG_TABLE_CONNECTION_NAME";
public static String PROP_LOG_TABLE_SCHEMA_NAME = "_LOG_TABLE_SCHEMA_NAME";
public static String PROP_LOG_TABLE_TABLE_NAME = "_LOG_TABLE_TABLE_NAME";
public static String PROP_LOG_TABLE_FIELD_ID = "_LOG_TABLE_FIELD_ID";
public static String PROP_LOG_TABLE_FIELD_NAME = "_LOG_TABLE_FIELD_NAME";
public static String PROP_LOG_TABLE_FIELD_ENABLED = "_LOG_TABLE_FIELD_ENABLED";
public static String PROP_LOG_TABLE_FIELD_SUBJECT = "_LOG_TABLE_FIELD_SUBJECT";
public static String PROP_LOG_TABLE_INTERVAL = "LOG_TABLE_INTERVAL";
public static String PROP_LOG_TABLE_SIZE_LIMIT = "LOG_TABLE_SIZE_LIMIT";
public static String PROP_LOG_TABLE_TIMEOUT_DAYS = "_LOG_TABLE_TIMEOUT_IN_DAYS";
protected VariableSpace space;
protected HasDatabasesInterface databasesInterface;
protected String connectionName;
protected String schemaName;
protected String tableName;
protected String timeoutInDays;
protected List<LogTableField> fields;
public BaseLogTable( VariableSpace space, HasDatabasesInterface databasesInterface, String connectionName,
String schemaName, String tableName ) {
this.space = space;
this.databasesInterface = databasesInterface;
this.connectionName = connectionName;
this.schemaName = schemaName;
this.tableName = tableName;
this.fields = new ArrayList<LogTableField>();
}
public void replaceMeta( BaseLogTable baseLogTable ) {
this.space = baseLogTable.space;
this.databasesInterface = baseLogTable.databasesInterface;
this.connectionName = baseLogTable.connectionName;
this.schemaName = baseLogTable.schemaName;
this.tableName = baseLogTable.tableName;
this.timeoutInDays = baseLogTable.timeoutInDays;
fields.clear();
for ( LogTableField field : baseLogTable.fields ) {
try {
fields.add( (LogTableField) field.clone() );
} catch ( CloneNotSupportedException e ) {
throw new RuntimeException( "Clone problem with the base log table", e );
}
}
}
public String toString() {
if ( isDefined() ) {
return getDatabaseMeta().getName() + "-" + getActualTableName();
}
return super.toString();
}
@Override
protected Object clone() throws CloneNotSupportedException {
return super.clone();
}
/**
* Save this core information of the log table to the repository using the specified attribute interface.
*
* @param attributeInterface
* The attribute interface to use to set attributes
* @throws KettleException
*/
public void saveToRepository( RepositoryAttributeInterface attributeInterface ) throws KettleException {
attributeInterface.setAttribute( getLogTableCode() + PROP_LOG_TABLE_CONNECTION_NAME, getConnectionName() );
attributeInterface.setAttribute( getLogTableCode() + PROP_LOG_TABLE_SCHEMA_NAME, getSchemaName() );
attributeInterface.setAttribute( getLogTableCode() + PROP_LOG_TABLE_TABLE_NAME, getTableName() );
attributeInterface.setAttribute( getLogTableCode() + PROP_LOG_TABLE_TIMEOUT_DAYS, getTimeoutInDays() );
// Store the fields too...
//
for ( int i = 0; i < getFields().size(); i++ ) {
LogTableField field = getFields().get( i );
attributeInterface.setAttribute( getLogTableCode() + PROP_LOG_TABLE_FIELD_ID + i, field.getId() );
attributeInterface.setAttribute( getLogTableCode() + PROP_LOG_TABLE_FIELD_NAME + i, field.getFieldName() );
attributeInterface.setAttribute( getLogTableCode() + PROP_LOG_TABLE_FIELD_ENABLED + i, field.isEnabled() );
if ( field.isSubjectAllowed() ) {
attributeInterface.setAttribute(
getLogTableCode() + PROP_LOG_TABLE_FIELD_SUBJECT + i, field.getSubject() == null ? null : field
.getSubject().toString() );
}
}
}
public void loadFromRepository( RepositoryAttributeInterface attributeInterface ) throws KettleException {
String connectionNameFromRepository =
attributeInterface.getAttributeString( getLogTableCode() + PROP_LOG_TABLE_CONNECTION_NAME );
if ( connectionNameFromRepository != null ) {
connectionName = connectionNameFromRepository;
}
String schemaNameFromRepository =
attributeInterface.getAttributeString( getLogTableCode() + PROP_LOG_TABLE_SCHEMA_NAME );
if ( schemaNameFromRepository != null ) {
schemaName = schemaNameFromRepository;
}
String tableNameFromRepository =
attributeInterface.getAttributeString( getLogTableCode() + PROP_LOG_TABLE_TABLE_NAME );
if ( tableNameFromRepository != null ) {
tableName = tableNameFromRepository;
}
timeoutInDays = attributeInterface.getAttributeString( getLogTableCode() + PROP_LOG_TABLE_TIMEOUT_DAYS );
for ( int i = 0; i < getFields().size(); i++ ) {
String id = attributeInterface.getAttributeString( getLogTableCode() + PROP_LOG_TABLE_FIELD_ID + i );
// Only read further if the ID is available.
// For backward compatibility, this might not be provided yet!
//
if ( id != null ) {
LogTableField field = findField( id );
if ( field != null ) {
field.setFieldName( attributeInterface.getAttributeString( getLogTableCode()
+ PROP_LOG_TABLE_FIELD_NAME + i ) );
field.setEnabled( attributeInterface.getAttributeBoolean( getLogTableCode()
+ PROP_LOG_TABLE_FIELD_ENABLED + i ) );
if ( field.isSubjectAllowed() ) {
field.setSubject( attributeInterface.getAttributeString( getLogTableCode()
+ PROP_LOG_TABLE_FIELD_SUBJECT + i ) );
}
}
}
}
}
public abstract String getLogTableCode();
public abstract String getConnectionNameVariable();
public abstract String getSchemaNameVariable();
public abstract String getTableNameVariable();
/**
* @return the databaseMeta
*/
public DatabaseMeta getDatabaseMeta() {
String name = getActualConnectionName();
if ( name == null ) {
return null;
}
if ( databasesInterface == null ) {
return null;
}
return databasesInterface.findDatabase( name );
}
/**
* @return the connectionName
*/
public String getActualConnectionName() {
String name = space.environmentSubstitute( connectionName );
if ( Const.isEmpty( name ) ) {
name = space.getVariable( getConnectionNameVariable() );
}
if ( Const.isEmpty( name ) ) {
return null;
} else {
return name;
}
}
/**
* @return the schemaName
*/
public String getActualSchemaName() {
if ( !Const.isEmpty( schemaName ) ) {
return space.environmentSubstitute( schemaName );
}
String name = space.getVariable( getSchemaNameVariable() );
if ( Const.isEmpty( name ) ) {
return null;
} else {
return name;
}
}
/**
* @param schemaName
* the schemaName to set
*/
public void setSchemaName( String schemaName ) {
this.schemaName = schemaName;
}
public String getSchemaName() {
return schemaName;
}
/**
* @return the tableName
*/
public String getActualTableName() {
if ( !Const.isEmpty( tableName ) ) {
return space.environmentSubstitute( tableName );
}
String name = space.getVariable( getTableNameVariable() );
if ( Const.isEmpty( name ) ) {
return null;
} else {
return name;
}
}
public String getTableName() {
return tableName;
}
/**
* @param tableName
* the tableName to set
*/
public void setTableName( String tableName ) {
this.tableName = tableName;
}
public String getQuotedSchemaTableCombination() {
return getDatabaseMeta().getQuotedSchemaTableCombination( getActualSchemaName(), getActualTableName() );
}
/**
* @return the fields
*/
public List<LogTableField> getFields() {
return fields;
}
/**
* @param fields
* the fields to set
*/
public void setFields( List<LogTableField> fields ) {
this.fields = fields;
}
/**
* Find a log table field in this log table definition. Use the id of the field to do the lookup.
*
* @param id
* the id of the field to search for
* @return the log table field or null if nothing was found.
*/
public LogTableField findField( String id ) {
for ( LogTableField field : fields ) {
if ( field.getId().equals( id ) ) {
return field;
}
}
return null;
}
/**
* Get the subject of a field with the specified ID
*
* @param id
* @return the subject or null if no field could be find with the specified id
*/
public Object getSubject( String id ) {
LogTableField field = findField( id );
if ( field == null ) {
return null;
}
return field.getSubject();
}
/**
* Return the subject in the form of a string for the specified ID.
*
* @param id
* the id of the field to look for.
* @return the string of the subject (name of step) or null if nothing was found.
*/
public String getSubjectString( String id ) {
LogTableField field = findField( id );
if ( field == null ) {
return null;
}
if ( field.getSubject() == null ) {
return null;
}
return field.getSubject().toString();
}
public boolean containsKeyField() {
for ( LogTableField field : fields ) {
if ( field.isKey() ) {
return true;
}
}
return false;
}
/**
* @return the field that represents the log date field or null if none was defined.
*/
public LogTableField getLogDateField() {
for ( LogTableField field : fields ) {
if ( field.isLogDateField() ) {
return field;
}
}
return null;
}
/**
* @return the field that represents the key to this logging table (batch id etc)
*/
public LogTableField getKeyField() {
for ( LogTableField field : fields ) {
if ( field.isKey() ) {
return field;
}
}
return null;
}
/**
* @return the field that represents the logging text (or null if none is found)
*/
public LogTableField getLogField() {
for ( LogTableField field : fields ) {
if ( field.isLogField() ) {
return field;
}
}
return null;
}
/**
* @return the field that represents the status (or null if none is found)
*/
public LogTableField getStatusField() {
for ( LogTableField field : fields ) {
if ( field.isStatusField() ) {
return field;
}
}
return null;
}
/**
* @return the field that represents the number of errors (or null if none is found)
*/
public LogTableField getErrorsField() {
for ( LogTableField field : fields ) {
if ( field.isErrorsField() ) {
return field;
}
}
return null;
}
/**
* @return the field that represents the name of the object that is being used (or null if none is found)
*/
public LogTableField getNameField() {
for ( LogTableField field : fields ) {
if ( field.isNameField() ) {
return field;
}
}
return null;
}
protected String getFieldsXML() {
StringBuilder retval = new StringBuilder();
for ( LogTableField field : fields ) {
retval.append( XMLHandler.openTag( XML_TAG ) );
retval.append( XMLHandler.addTagValue( "id", field.getId(), false ) );
retval.append( XMLHandler.addTagValue( "enabled", field.isEnabled(), false ) );
retval.append( XMLHandler.addTagValue( "name", field.getFieldName(), false ) );
if ( field.isSubjectAllowed() ) {
retval.append( XMLHandler.addTagValue( "subject", field.getSubject() == null ? null : field
.getSubject().toString(), false ) );
}
retval.append( XMLHandler.closeTag( XML_TAG ) );
}
return retval.toString();
}
public void loadFieldsXML( Node node ) {
int nr = XMLHandler.countNodes( node, BaseLogTable.XML_TAG );
for ( int i = 0; i < nr; i++ ) {
Node fieldNode = XMLHandler.getSubNodeByNr( node, BaseLogTable.XML_TAG, i );
String id = XMLHandler.getTagValue( fieldNode, "id" );
LogTableField field = findField( id );
if ( field == null && i < fields.size() ) {
field = fields.get( i ); // backward compatible until we go GA
}
if ( field != null ) {
field.setFieldName( XMLHandler.getTagValue( fieldNode, "name" ) );
field.setEnabled( "Y".equalsIgnoreCase( XMLHandler.getTagValue( fieldNode, "enabled" ) ) );
}
}
}
public boolean isDefined() {
return getDatabaseMeta() != null && !Const.isEmpty( getActualTableName() );
}
/**
* @return the timeoutInDays
*/
public String getTimeoutInDays() {
return timeoutInDays;
}
/**
* @param timeoutInDays
* the timeoutInDays to set
*/
public void setTimeoutInDays( String timeoutInDays ) {
this.timeoutInDays = timeoutInDays;
}
/**
* @return the connectionName
*/
public String getConnectionName() {
return connectionName;
}
/**
* @param connectionName
* the connectionName to set
*/
public void setConnectionName( String connectionName ) {
this.connectionName = connectionName;
}
protected String getLogBuffer( VariableSpace space, String logChannelId, LogStatus status, String limit ) {
StringBuffer buffer = KettleLogStore.getAppender().getBuffer( logChannelId, true );
if ( Const.isEmpty( limit ) ) {
String defaultLimit = space.getVariable( Const.KETTLE_LOG_SIZE_LIMIT, null );
if ( !Const.isEmpty( defaultLimit ) ) {
limit = defaultLimit;
}
}
// See if we need to limit the amount of rows
//
int nrLines = Const.isEmpty( limit ) ? -1 : Const.toInt( space.environmentSubstitute( limit ), -1 );
if ( nrLines > 0 ) {
int start = buffer.length() - 1;
for ( int i = 0; i < nrLines && start > 0; i++ ) {
start = buffer.lastIndexOf( Const.CR, start - 1 );
}
if ( start > 0 ) {
buffer.delete( 0, start + Const.CR.length() );
}
}
return buffer.append( Const.CR + status.getStatus().toUpperCase() + Const.CR ).toString();
}
// PDI-7070: implement equals for comparison of job/trans log table to its parent log table
@Override
public boolean equals( Object obj ) {
if ( obj == null || !( obj instanceof BaseLogTable ) ) {
return false;
}
BaseLogTable blt = (BaseLogTable) obj;
// Get actual names for comparison
String cName = this.getActualConnectionName();
String sName = this.getActualSchemaName();
String tName = this.getActualTableName();
return ( ( cName == null ? blt.getActualConnectionName() == null : cName
.equals( blt.getActualConnectionName() ) )
&& ( sName == null ? blt.getActualSchemaName() == null : sName.equals( blt.getActualSchemaName() ) )
&& ( tName == null ? blt.getActualTableName() == null : tName.equals( blt.getActualTableName() ) ) );
}
public void setAllGlobalParametersToNull() {
schemaName = isGlobalParameter( schemaName ) ? null : schemaName;
connectionName = isGlobalParameter( connectionName ) ? null : connectionName;
tableName = isGlobalParameter( tableName ) ? null : tableName;
timeoutInDays = isGlobalParameter( timeoutInDays ) ? null : timeoutInDays;
}
protected boolean isGlobalParameter( String parameter ) {
if ( parameter == null ) {
return false;
}
if ( parameter.startsWith( "${" ) && parameter.endsWith( "}" ) ) {
return System.getProperty( parameter.substring( 2, parameter.length() - 1 ) ) != null;
}
return false;
}
}
| |
package ceylon.language;
import com.redhat.ceylon.compiler.java.metadata.Defaulted;
import com.redhat.ceylon.compiler.java.metadata.Name;
import com.redhat.ceylon.compiler.java.runtime.model.ReifiedType;
import com.redhat.ceylon.compiler.java.runtime.model.TypeDescriptor;
public final class Integer
implements ReifiedType,
java.io.Serializable,
java.lang.Comparable<Integer> {
private static final long serialVersionUID = 3611850372864102202L;
static final long TWO_FIFTY_THREE = 1L << 53;
public final static TypeDescriptor $TypeDescriptor$ =
TypeDescriptor.klass(Integer.class);
public static long smallest(@Name("x") long x, @Name("y") long y) {
return Math.min(x, y);
}
public static long largest(@Name("x") long x, @Name("y") long y) {
return Math.max(x, y);
}
final long value;
@SharedAnnotation$annotation$
public Integer(@Name("integer") long integer) {
value = integer;
}
public static Integer instance(long l) {
return new Integer(l);
}
public long longValue() {
return value;
}
public Integer plus(@Name("other") Integer other) {
return instance(value + other.value);
}
public static long plus(long value, long otherValue) {
return value + otherValue;
}
public Integer minus(@Name("other") Integer other) {
return instance(value - other.value);
}
public static long minus(long value, long otherValue) {
return value - otherValue;
}
public Integer times(@Name("other") Integer other) {
return instance(value * other.value);
}
public static long times(long value, long otherValue) {
return value * otherValue;
}
public Integer divided(@Name("other") Integer other) {
return instance(value / other.value);
}
public static long divided(long value, long otherValue) {
return value / otherValue;
}
private static final long POWER_BY_SQUARING_BREAKEVEN = 6;
private static long powerBySquaring(long base, long power) {
long result = 1;
long x = base;
while (power != 0) {
if ((power & 1L) == 1L) {
result *= x;
power -= 1;
}
x *= x;
power /= 2;
}
return result;
}
private static long powerByMultiplying(long base, long power) {
long result = 1;
while (power > 0) {
result *= base;
power--;
}
return result;
}
public Integer power(@Name("other") Integer other) {
return instance(power(value, other.value));
}
public static long power(long value, long otherValue) {
long power = otherValue;
if (value == -1) {
return power % 2 == 0 ? 1L : -1L;
}
else if (value == 1) {
return 1L;
}
else if (power < 0) {
throw new AssertionError(value + "^" + power +
" cannot be represented as an Integer");
}
else if (power == 0) {
return 1L;
}
else if (power == 1) {
return value;
}
else if (power >= POWER_BY_SQUARING_BREAKEVEN) {
return powerBySquaring(value, power);
}
else {
return powerByMultiplying(value, power);
}
}
private static int powerBySquaring(int base, int power) {
int result = 1;
int x = base;
while (power != 0) {
if ((power & 1) == 1) {
result *= x;
power -= 1;
}
x *= x;
power /= 2;
}
return result;
}
private static int powerByMultiplying(int base, int power) {
int result = 1;
while (power > 0) {
result *= base;
power--;
}
return result;
}
public static long $power$(long value, long otherValue) {
return power(value, otherValue);
}
public static int $power$(int value, int otherValue) {
int power = otherValue;
if (value == -1) {
return power % 2 == 0 ? 1 : -1;
}
else if (value == 1) {
return 1;
}
else if (power < 0) {
throw new AssertionError(value + "^" + power +
" cannot be represented as an Integer");
}
else if (power == 0) {
return 1;
}
else if (power == 1) {
return value;
}
else if (power >= POWER_BY_SQUARING_BREAKEVEN) {
return powerBySquaring(value, power);
}
else {
return powerByMultiplying(value, power);
}
}
public Float plus(Float other) {
return Float.instance(value + other.value);
}
public static double plus(long value, double otherValue) {
return value + otherValue;
}
public Float minus(Float other) {
return Float.instance(value - other.value);
}
public static double minus(long value, double otherValue) {
return value - otherValue;
}
public Float times(Float other) {
return Float.instance(value * other.value);
}
public static double times(long value, double otherValue) {
return value * otherValue;
}
public Float divided(Float other) {
return Float.instance(value / other.value);
}
public static double divided(long value, double otherValue) {
return value / otherValue;
}
public Float power(Float other) {
return Float.instance(Math.pow(value, other.value)); // FIXME: ugly
}
public static double power(long value, double otherValue) {
return Math.pow(value, otherValue); // FIXME: ugly
}
public static double $power$(long value, double otherValue) {
return Math.pow(value, otherValue); // FIXME: ugly
}
public Integer getMagnitude() {
return instance(Math.abs(value));
}
public static long getMagnitude(long value) {
return Math.abs(value);
}
public Integer getFractionalPart() {
return instance(0);
}
public static long getFractionalPart(long value) {
return 0;
}
public Integer getWholePart() {
return this;
}
public static long getWholePart(long value) {
return value;
}
public boolean getPositive() {
return value > 0;
}
public static boolean getPositive(long value) {
return value > 0;
}
public boolean getNegative() {
return value < 0;
}
public static boolean getNegative(long value) {
return value < 0;
}
public long getSign() {
if (value > 0)
return 1;
if (value < 0)
return -1;
return 0;
}
public static long getSign(long value) {
if (value > 0)
return 1;
if (value < 0)
return -1;
return 0;
}
public Integer remainder(@Name("other") Integer other) {
return instance(value % other.value);
}
public static long remainder(long value, long otherValue) {
return value % otherValue;
}
public Integer modulo(@Name("modulus") Integer modulus) {
return instance(modulo(value, modulus.value));
}
public static long modulo(long value, long modulus) {
if(modulus < 0)
throw new AssertionError("modulus must be positive: "+modulus);
long ret = value % modulus;
if(ret < 0)
return ret + modulus;
return ret;
}
public final boolean divides(@Name("other") Integer other) {
return other.value % value == 0;
}
public static boolean divides(long value, long otherValue) {
return otherValue % value == 0;
}
public Integer getNegated() {
return instance(-value);
}
public static long getNegated(long value) {
return -value;
}
@Override
public int compareTo(Integer other) {
return Long.compare(value, other.value);
}
@Override
public java.lang.String toString() {
return java.lang.Long.toString(value);
}
public static java.lang.String toString(long value) {
return java.lang.Long.toString(value);
}
// Enumerable
public static long neighbour(long value, long offset) {
long neighbour = value+offset;
//Overflow iff both arguments have the opposite sign of the result
if (((value^neighbour) & (offset^neighbour)) < 0) {
throw new OverflowException(value + " has no neighbour with offset " + offset);
}
return neighbour;
}
public Integer neighbour(@Name("offset") long offset) {
return instance(neighbour(value,offset));
}
public static long offset(long value, long other) {
long offset = value-other;
//Overflow iff the arguments have different signs and
//the sign of the result is different than the sign of x
if (((value^other) & (value^offset)) < 0) {
throw new OverflowException(
"offset from " + value + " to " + other + " cannot be represented as a 64 bit integer.");
}
return offset;
}
public long offset(@Name("other") Integer other) {
return offset(value, other.value);
}
public static long offsetSign(long value, long other) {
if (value>other) {
return 1;
}
else if (value<other) {
return -1;
}
else {
return 0;
}
}
public long offsetSign(@Name("other") Integer other) {
return offsetSign(value, other.value);
}
// Conversions between numeric types
public double getFloat() {
return getFloat(value);
}
public static double getFloat(long value) {
if (value <= -TWO_FIFTY_THREE || TWO_FIFTY_THREE <= value) {
throw new OverflowException(value + " cannot be coerced into a 64 bit floating point value");
}
else {
return (double) value;
}
}
public double getNearestFloat() {
return (double) value;
}
public static double getNearestFloat(long value) {
return (double) value;
}
public byte getByte() {
return getByte(value);
}
public static byte getByte(long value) {
return (byte) value;
}
public boolean getEven() {
return (value&1)==0;
}
public static boolean getEven(long value) {
return (value&1)==0;
}
public boolean getUnit() {
return value==1;
}
public static boolean getUnit(long value) {
return value==1;
}
public boolean getZero() {
return value==0;
}
public static boolean getZero(long value) {
return value==0;
}
public Integer getPredecessor() {
return Integer.instance(value - 1);
}
public static long getPredecessor(long value) {
return value - 1;
}
public Integer getSuccessor() {
return Integer.instance(value + 1);
}
public static long getSuccessor(long value) {
return value + 1;
}
public boolean equals(@Name("that") java.lang.Object that) {
return equals(value, that);
}
public static boolean equals(long value, java.lang.Object that) {
if (that instanceof Integer) {
return value == ((Integer)that).value;
}
else if (that instanceof Float) {
return value == ((Float) that).value
&& value > -TWO_FIFTY_THREE
&& value < TWO_FIFTY_THREE;
}
else {
return false;
}
}
@Override
public int hashCode() {
return (int)(value ^ (value >>> 32));
}
public static int hashCode(long value) {
return (int)(value ^ (value >>> 32));
}
public Integer getNot() {
return instance(~value);
}
public static long getNot(long value){
return ~value;
}
public Integer leftLogicalShift(@Name("shift") long shift) {
return instance(value << shift);
}
public static long leftLogicalShift(long value, long shift) {
return value << shift;
}
public Integer rightLogicalShift(@Name("shift") long shift) {
return instance(value >>> shift);
}
public static long rightLogicalShift(long value, long shift) {
return value >>> shift;
}
public Integer rightArithmeticShift(@Name("shift") long shift) {
return instance(value >> shift);
}
public static long rightArithmeticShift(long value, long shift) {
return value >> shift;
}
public Integer and(@Name("other") Integer other) {
return instance(value & other.value);
}
public static long and(long value, long other){
return value & other;
}
public Integer or(@Name("other") Integer other) {
return instance(value | other.value);
}
public static long or(long value, long other){
return value | other;
}
public Integer xor(@Name("other") Integer other) {
return instance(value ^ other.value);
}
public static long xor(long value, long other){
return value ^ other;
}
public boolean get(@Name("index") long index) {
return get(value, index);
}
public static boolean get(long value, long index) {
if (index < 0 || index > 63) {
return false;
}
long mask = 1l << index;
return (value & mask) != 0;
}
public Integer set(long index) {
return instance(set(value, index));
}
public Integer set(@Name("index") long index,
@Name("bit") @Defaulted boolean bit) {
return instance(set(value, index, bit));
}
public boolean set$bit(long index) {
return true;
}
public static long set(long value, long index) {
return set(value, index, true);
}
public static long set(long value, long index, boolean bit) {
if (index < 0 || index > 63) {
return value;
}
long mask = 1l << index;
return bit ? value | mask : value & ~mask;
}
public Integer clear(@Name("index") long index) {
return instance(clear(value, index));
}
public static long clear(long value, long index) {
if (index < 0 || index > 63) {
return value;
}
long mask = 1l << index;
return value & ~mask;
}
public Integer flip(@Name("index") long index) {
return instance(flip(value, index));
}
public static long flip(long value, long index) {
if (index < 0 || index > 63) {
return value;
}
long mask = 1l << index;
return value ^ mask;
}
@Override
public TypeDescriptor $getType$() {
return $TypeDescriptor$;
}
public static boolean largerThan(long value, Integer other) {
return value>other.value;
}
public static boolean largerThan(long value, long other) {
return value>other;
}
public boolean largerThan(@Name("other") Integer other) {
return value>other.value;
}
public static boolean notSmallerThan(long value, Integer other) {
return value>=other.value;
}
public static boolean notSmallerThan(long value, long other) {
return value>=other;
}
public boolean notSmallerThan(@Name("other") Integer other) {
return value>=other.value;
}
public static boolean smallerThan(long value, Integer other) {
return value<other.value;
}
public static boolean smallerThan(long value, long other) {
return value<other;
}
public boolean smallerThan(@Name("other") Integer other) {
return value<other.value;
}
public static boolean notLargerThan(long value, Integer other) {
return value<=other.value;
}
public static boolean notLargerThan(long value, long other) {
return value<=other;
}
public boolean notLargerThan(@Name("other") Integer other) {
return value<=other.value;
}
public Integer timesInteger(@Name("integer") long integer) {
return instance(value*integer);
}
public static long timesInteger(long value, long integer) {
return value*integer;
}
public Integer plusInteger(@Name("integer") long integer) {
return instance(value+integer);
}
public static long plusInteger(long value, long integer) {
return value+integer;
}
public Integer powerOfInteger(@Name("integer") long integer) {
return instance(power(value,integer));
}
public static long powerOfInteger(long value, long integer) {
return power(value,integer);
}
public static Integer valueOf(java.lang.String string) {
return instance(java.lang.Long.parseLong(string));
}
}
| |
package cz.webarchiv.WadminKonspekt;
import cz.webarchiv.WadminKonspekt.oai.OAIHarvester;
import cz.webarchiv.WadminKonspekt.oai.OAIRecord;
import cz.webarchiv.WadminKonspekt.wadmin.Resource;
import cz.webarchiv.WadminKonspekt.wadmin.WAdmin;
import cz.webarchiv.WadminKonspekt.wayback.Drawer;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.PrintStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Properties;
/**
*
* @author Vaclav Rosecky
*/
public class Main {
/*
public static void main(String args[]) throws Exception {
int[] array = new int[]{1, 5, 5, 3, 2, 1, 4, 6, 2, 2};
Drawer.draw(array, new File("output.png"));
}
*/
public static void main(String args[]) throws Exception {
if (args.length != 1) {
System.err.println("Usage java -jar WadminKonspekt.jar file_with_properties");
System.exit(1);
}
Properties properties = null;
String file = args[0];
try {
properties = loadProperties(new File(file));
} catch (IOException ioe) {
System.err.format("Error reading properties from file %s.", file);
System.exit(2);
}
Map<String, String> subCategoriesMapping = Helper.loadSubCategories();
String OAIProvider = properties.getProperty("oai.provider");
String OAIBase = properties.getProperty("oai.base");
String OAIMetadataFormat = properties.getProperty("oai.metadataformat");
OAIHarvester harvester = new OAIHarvester(OAIProvider, OAIBase, OAIMetadataFormat);
String dbURL = properties.getProperty("wadmin.db.url");
String dbUser = properties.getProperty("wadmin.db.user");
String dbPassword = properties.getProperty("wadmin.db.password");
WAdmin wadmin = new WAdmin(dbURL, dbUser, dbPassword);
List<String> hosts = new ArrayList<String>();
// vygenerovani seznamu povolenych hostname pro wayback
try {
File contractFile = new File(properties.getProperty("contract.file"));
PrintStream cs = new PrintStream(new FileOutputStream(contractFile));
for (Resource res : wadmin.getContractedResources()) {
try {
hosts.addAll(getHosts(new URL(res.getUrl())));
} catch (MalformedURLException me) {
System.err.format("WARN: resource with id=%s has invalid url:", res.getId(), res.getUrl());
}
}
for (String host : hosts) {
cs.println(host);
}
} catch (IOException ioe) {
System.err.println("ERROR: error occured when generating contract file");
System.exit(3);
}
// vygenerovani seznamu URL
try {
PrintStream cs = new PrintStream(new FileOutputStream(new File(properties.getProperty("urls.file"))));
for (Resource res : wadmin.getContractedResources()) {
if (res.getAlephId() != null) {
cs.println(res.getId() + " " + res.getAlephId() + " " +res.getUrl());
}
}
cs.close();
} catch (IOException ioe) {
System.err.println("ERROR: error occured when generating contract file");
System.exit(3);
}
// System.exit(3);
File dumpFile = new File(properties.getProperty("dump.file"));
PrintStream ds = new PrintStream(new FileOutputStream(dumpFile));
List<OAIRecord> records = harvester.getResources();
// Drawer drawer = new Drawer();
for (OAIRecord record : records) {
System.err.println("Sysno:"+record.getId());
Resource resource = wadmin.getResourceByAlephId(record.getId());
if (resource == null) {
System.err.println("Resource with sysno=" + record.getId() + " does not exist in WA Admin.");
continue;
}
resource.setDescription(record.getDescription());
resource.setConspectusId(record.getConspectusId());
// System.out.println(record.getConspectusId());
String subcategory = record.getConspectusSubcategoryId();
if (subCategoriesMapping.containsKey(subcategory.trim())) {
subcategory = subCategoriesMapping.get(subcategory.trim());
}
int subcategoryId = wadmin.getConspectusSubcategoryId(subcategory);
if (subcategoryId == -1) {
System.err.println("Subcategory " + subcategory + " does not exists in WA Admin. Resource has aleph no " + record.getId() + ".");
} else {
resource.setConspectusSubcategoryId(subcategoryId);
try {
wadmin.updateResource(resource, record.getKeywords());
} catch (Exception ex) {
System.err.println("Error when processing record"+record.getId());
}
}
// drawer.draw(resource.getUrl(), new File("graphs/stat_" + resource.getId() + ".png"));
// System.out.format("%s\t%s\t%s\n", resource.getId(), record.getCNB(), resource.getUrl());
}
}
/*
List<OAIRecord> records = harvester.getResources();
for (OAIRecord record : records) {
Resource resource = wadmin.getResourceByAlephId(record.getId());
try {
URL url = new URL(resource.getUrl());
String host = url.getHost();
System.out.println(host);
if (host.contains("www")) {
System.out.println(host.replace("www.", ""));
} else {
System.out.println("www"+"."+host);
}
} catch (Exception ex) {
ex.printStackTrace();
}
}
*/
private static String PREFIX_WWW = "www.";
private static List<String> getHosts(URL url) {
List<String> hosts = new ArrayList<String>();
String host = url.getHost();
hosts.add(host);
if (host.startsWith(PREFIX_WWW)) {
hosts.add(host.substring(4));
} else {
hosts.add(PREFIX_WWW + host);
}
return hosts;
}
public static Properties loadProperties(File file) throws IOException {
Properties properties = new Properties();
FileInputStream fis = null;
try {
fis = new FileInputStream(file);
properties.load(fis);
} finally {
if (fis != null) {
fis.close();
}
}
return properties;
}
public static void tmain(String args[]) throws Exception {
Drawer drawer = new Drawer();
Map<String, String> subCategoriesMapping = Helper.loadSubCategories();
OAIHarvester harvester = null; // new OAIHarvester();
WAdmin wadmin = new WAdmin();
List<OAIRecord> records = harvester.getResources();
for (OAIRecord record : records) {
Resource resource = wadmin.getResourceByAlephId(record.getId());
if (resource == null) {
System.err.println("Resource with aleph no =" + record.getId() + " and title='" + record.getTitle() + "' does not exist");
continue;
}
resource.setDescription(record.getDescription());
resource.setConspectusId(record.getConspectusId());
String subcategory = record.getConspectusSubcategoryId();
if (subCategoriesMapping.containsKey(subcategory.trim())) {
subcategory = subCategoriesMapping.get(subcategory.trim());
}
int subcategoryId = wadmin.getConspectusSubcategoryId(subcategory);
if (subcategoryId == -1) {
System.err.println("Subcategory " + subcategory + " does not exists in WA Admin. Resource has aleph no " + record.getId() + ".");
} else {
resource.setConspectusSubcategoryId(subcategoryId);
wadmin.updateResource(resource, record.getKeywords());
}
drawer.draw(resource.getUrl(), new File("graphs/stat_" + resource.getId() + ".png"));
System.out.format("%s\t%s\t%s\n", resource.getId(), record.getCNB(), resource.getUrl());
}
}
/*
WAdmin admin = new WAdmin();
List<Resource> resources = admin.getResources();
AlephConnector conn = new AlephConnector();
conn.resolve(resources);
*/
/*
OAIHarvester alephCon = new OAIHarvester();
WAdmin wadmin = new WAdmin();
Set<String> notFound = new HashSet<String>();
List<OAIRecord> records = alephCon.getResources();
for (OAIRecord record : records) {
try {
for (String word : record.getKeywords()) {
int id = wadmin.getKeywordIdAddIfNotExists(word);
Resource res = wadmin.getResourceByAlephId(record.getId());
try {
wadmin.getConspectusSubcategoryId(record.getConspectusSubcategoryId());
} catch (Exception ex) {
notFound.add(record.getConspectusSubcategoryId());
// System.err.println(record.getConspectusSubcategoryId());
}
// res.setDescription(record.getDescription());
// res.setConspectusId(record.getConspectusId());
// wadmin.updateResource(res);
// wadmin.addKeyword(res, word);
}
} catch (Exception ex) {
ex.printStackTrace();
}
System.out.println("================================");
for (String str : notFound) {
System.out.println(str);
}
System.out.println("================================");
*/
/*
try {
Resource resource = wadmin.getResourceByAlephId(record.getId());
int subcategoryId = wadmin.getConspectusSubcategoryId(record.getConspectusSubcategoryId());
if (resource.getConspectusSubcategoryId() == 0) {
System.out.format("%s -> %s\n", record.getId(), subcategoryId);
resource.setConspectusSubcategoryId(subcategoryId);
wadmin.updateResource(resource);
}
} catch (Exception ex) {
}
*/
// System.out.format("%s has id=%s(MDT is %s)\n", resource.getId(), subcategoryId, record.getConspectusSubcategoryId());
// }
// }
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.config;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.concurrent.TimeUnit;
import com.google.common.base.Joiner;
import com.google.common.collect.Sets;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A class that contains configuration properties for the cassandra node it runs within.
*
* Properties declared as volatile can be mutated via JMX.
*/
public class Config
{
private static final Logger logger = LoggerFactory.getLogger(Config.class);
/*
* Prefix for Java properties for internal Cassandra configuration options
*/
public static final String PROPERTY_PREFIX = "cassandra.";
public String cluster_name = "Test Cluster";
public String authenticator;
public String authorizer;
public String role_manager;
public volatile int permissions_validity_in_ms = 2000;
public volatile int permissions_cache_max_entries = 1000;
public volatile int permissions_update_interval_in_ms = -1;
public volatile int roles_validity_in_ms = 2000;
public volatile int roles_cache_max_entries = 1000;
public volatile int roles_update_interval_in_ms = -1;
public volatile int credentials_validity_in_ms = 2000;
public volatile int credentials_cache_max_entries = 1000;
public volatile int credentials_update_interval_in_ms = -1;
/* Hashing strategy Random or OPHF */
public String partitioner;
public Boolean auto_bootstrap = true;
public volatile boolean hinted_handoff_enabled = true;
public Set<String> hinted_handoff_disabled_datacenters = Sets.newConcurrentHashSet();
public volatile Integer max_hint_window_in_ms = 3 * 3600 * 1000; // three hours
public String hints_directory;
public ParameterizedClass seed_provider;
public DiskAccessMode disk_access_mode = DiskAccessMode.auto;
public DiskFailurePolicy disk_failure_policy = DiskFailurePolicy.ignore;
public CommitFailurePolicy commit_failure_policy = CommitFailurePolicy.stop;
/* initial token in the ring */
public String initial_token;
public Integer num_tokens = 1;
/** Triggers automatic allocation of tokens if set, using the replication strategy of the referenced keyspace */
public String allocate_tokens_for_keyspace = null;
public volatile Long request_timeout_in_ms = 10000L;
public volatile Long read_request_timeout_in_ms = 5000L;
public volatile Long range_request_timeout_in_ms = 10000L;
public volatile Long write_request_timeout_in_ms = 2000L;
public volatile Long counter_write_request_timeout_in_ms = 5000L;
public volatile Long cas_contention_timeout_in_ms = 1000L;
public volatile Long truncate_request_timeout_in_ms = 60000L;
/**
* @deprecated use {@link this#streaming_keep_alive_period_in_secs} instead
*/
@Deprecated
public Integer streaming_socket_timeout_in_ms = 86400000; //24 hours
public Integer streaming_keep_alive_period_in_secs = 300; //5 minutes
public boolean cross_node_timeout = false;
public volatile long slow_query_log_timeout_in_ms = 500L;
public volatile Double phi_convict_threshold = 8.0;
public Integer concurrent_reads = 32;
public Integer concurrent_writes = 32;
public Integer concurrent_counter_writes = 32;
public Integer concurrent_materialized_view_writes = 32;
@Deprecated
public Integer concurrent_replicates = null;
public Integer memtable_flush_writers = null;
public Integer memtable_heap_space_in_mb;
public Integer memtable_offheap_space_in_mb;
public Float memtable_cleanup_threshold = null;
public Integer storage_port = 7000;
public Integer ssl_storage_port = 7001;
public String listen_address;
public String listen_interface;
public Boolean listen_interface_prefer_ipv6 = false;
public String broadcast_address;
public Boolean listen_on_broadcast_address = false;
public String internode_authenticator;
/* intentionally left set to true, despite being set to false in stock 2.2 cassandra.yaml
we don't want to surprise Thrift users who have the setting blank in the yaml during 2.1->2.2 upgrade */
public Boolean start_rpc = true;
public String rpc_address;
public String rpc_interface;
public Boolean rpc_interface_prefer_ipv6 = false;
public String broadcast_rpc_address;
public Integer rpc_port = 9160;
public Integer rpc_listen_backlog = 50;
public String rpc_server_type = "sync";
public Boolean rpc_keepalive = true;
public Integer rpc_min_threads = 16;
public Integer rpc_max_threads = Integer.MAX_VALUE;
public Integer rpc_send_buff_size_in_bytes;
public Integer rpc_recv_buff_size_in_bytes;
public Integer internode_send_buff_size_in_bytes;
public Integer internode_recv_buff_size_in_bytes;
public Boolean start_native_transport = false;
public Integer native_transport_port = 9042;
public Integer native_transport_port_ssl = null;
public Integer native_transport_max_threads = 128;
public Integer native_transport_max_frame_size_in_mb = 256;
public volatile Long native_transport_max_concurrent_connections = -1L;
public volatile Long native_transport_max_concurrent_connections_per_ip = -1L;
@Deprecated
public Integer thrift_max_message_length_in_mb = 16;
/**
* Max size of values in SSTables, in MegaBytes.
* Default is the same as the native protocol frame limit: 256Mb.
* See AbstractType for how it is used.
*/
public Integer max_value_size_in_mb = 256;
public Integer thrift_framed_transport_size_in_mb = 15;
public Boolean snapshot_before_compaction = false;
public Boolean auto_snapshot = true;
/* if the size of columns or super-columns are more than this, indexing will kick in */
public Integer column_index_size_in_kb = 64;
public Integer column_index_cache_size_in_kb = 2;
public volatile int batch_size_warn_threshold_in_kb = 5;
public volatile int batch_size_fail_threshold_in_kb = 50;
public Integer unlogged_batch_across_partitions_warn_threshold = 10;
public Integer concurrent_compactors;
public volatile Integer compaction_throughput_mb_per_sec = 16;
public volatile Integer compaction_large_partition_warning_threshold_mb = 100;
public Integer min_free_space_per_drive_in_mb = 50;
/**
* @deprecated retry support removed on CASSANDRA-10992
*/
@Deprecated
public Integer max_streaming_retries = 3;
public volatile Integer stream_throughput_outbound_megabits_per_sec = 200;
public volatile Integer inter_dc_stream_throughput_outbound_megabits_per_sec = 200;
public String[] data_file_directories = new String[0];
public String saved_caches_directory;
// Commit Log
public String commitlog_directory;
public Integer commitlog_total_space_in_mb;
public CommitLogSync commitlog_sync;
public Double commitlog_sync_batch_window_in_ms;
public Integer commitlog_sync_period_in_ms;
public int commitlog_segment_size_in_mb = 32;
public ParameterizedClass commitlog_compression;
public int commitlog_max_compression_buffers_in_pool = 3;
public TransparentDataEncryptionOptions transparent_data_encryption_options = new TransparentDataEncryptionOptions();
public Integer max_mutation_size_in_kb;
// Change-data-capture logs
public Boolean cdc_enabled = false;
public String cdc_raw_directory;
public Integer cdc_total_space_in_mb;
public Integer cdc_free_space_check_interval_ms = 250;
@Deprecated
public int commitlog_periodic_queue_size = -1;
public String endpoint_snitch;
public Boolean dynamic_snitch = true;
public volatile Integer dynamic_snitch_update_interval_in_ms = 100;
public volatile Integer dynamic_snitch_reset_interval_in_ms = 600000;
public volatile Double dynamic_snitch_badness_threshold = 0.1;
public String request_scheduler;
public RequestSchedulerId request_scheduler_id;
public RequestSchedulerOptions request_scheduler_options;
public EncryptionOptions.ServerEncryptionOptions server_encryption_options = new EncryptionOptions.ServerEncryptionOptions();
public EncryptionOptions.ClientEncryptionOptions client_encryption_options = new EncryptionOptions.ClientEncryptionOptions();
// this encOptions is for backward compatibility (a warning is logged by DatabaseDescriptor)
public EncryptionOptions.ServerEncryptionOptions encryption_options;
public InternodeCompression internode_compression = InternodeCompression.none;
@Deprecated
public Integer index_interval = null;
public int hinted_handoff_throttle_in_kb = 1024;
public int batchlog_replay_throttle_in_kb = 1024;
public int max_hints_delivery_threads = 2;
public int hints_flush_period_in_ms = 10000;
public int max_hints_file_size_in_mb = 128;
public ParameterizedClass hints_compression;
public int sstable_preemptive_open_interval_in_mb = 50;
public volatile boolean incremental_backups = false;
public boolean trickle_fsync = false;
public int trickle_fsync_interval_in_kb = 10240;
public Long key_cache_size_in_mb = null;
public volatile int key_cache_save_period = 14400;
public volatile int key_cache_keys_to_save = Integer.MAX_VALUE;
public String row_cache_class_name = "org.apache.cassandra.cache.OHCProvider";
public long row_cache_size_in_mb = 0;
public volatile int row_cache_save_period = 0;
public volatile int row_cache_keys_to_save = Integer.MAX_VALUE;
public Long counter_cache_size_in_mb = null;
public volatile int counter_cache_save_period = 7200;
public volatile int counter_cache_keys_to_save = Integer.MAX_VALUE;
private static boolean isClientMode = false;
public Integer file_cache_size_in_mb;
public boolean buffer_pool_use_heap_if_exhausted = true;
public DiskOptimizationStrategy disk_optimization_strategy = DiskOptimizationStrategy.ssd;
public double disk_optimization_estimate_percentile = 0.95;
public double disk_optimization_page_cross_chance = 0.1;
public boolean inter_dc_tcp_nodelay = true;
public MemtableAllocationType memtable_allocation_type = MemtableAllocationType.heap_buffers;
private static boolean outboundBindAny = false;
public volatile int tombstone_warn_threshold = 1000;
public volatile int tombstone_failure_threshold = 100000;
public volatile Long index_summary_capacity_in_mb;
public volatile int index_summary_resize_interval_in_minutes = 60;
public int gc_log_threshold_in_ms = 200;
public int gc_warn_threshold_in_ms = 0;
// TTL for different types of trace events.
public int tracetype_query_ttl = (int) TimeUnit.DAYS.toSeconds(1);
public int tracetype_repair_ttl = (int) TimeUnit.DAYS.toSeconds(7);
/*
* Strategy to use for coalescing messages in OutboundTcpConnection.
* Can be fixed, movingaverage, timehorizon, disabled. Setting is case and leading/trailing
* whitespace insensitive. You can also specify a subclass of CoalescingStrategies.CoalescingStrategy by name.
*/
public String otc_coalescing_strategy = "TIMEHORIZON";
/*
* How many microseconds to wait for coalescing. For fixed strategy this is the amount of time after the first
* messgae is received before it will be sent with any accompanying messages. For moving average this is the
* maximum amount of time that will be waited as well as the interval at which messages must arrive on average
* for coalescing to be enabled.
*/
public static final int otc_coalescing_window_us_default = 200;
public int otc_coalescing_window_us = otc_coalescing_window_us_default;
public int windows_timer_interval = 0;
/**
* Size of the CQL prepared statements cache in MB.
* Defaults to 1/256th of the heap size or 10MB, whichever is greater.
*/
public Long prepared_statements_cache_size_mb = null;
/**
* Size of the Thrift prepared statements cache in MB.
* Defaults to 1/256th of the heap size or 10MB, whichever is greater.
*/
public Long thrift_prepared_statements_cache_size_mb = null;
public boolean enable_user_defined_functions = false;
public boolean enable_scripted_user_defined_functions = false;
/**
* Optionally disable asynchronous UDF execution.
* Disabling asynchronous UDF execution also implicitly disables the security-manager!
* By default, async UDF execution is enabled to be able to detect UDFs that run too long / forever and be
* able to fail fast - i.e. stop the Cassandra daemon, which is currently the only appropriate approach to
* "tell" a user that there's something really wrong with the UDF.
* When you disable async UDF execution, users MUST pay attention to read-timeouts since these may indicate
* UDFs that run too long or forever - and this can destabilize the cluster.
*/
public boolean enable_user_defined_functions_threads = true;
/**
* Time in milliseconds after a warning will be emitted to the log and to the client that a UDF runs too long.
* (Only valid, if enable_user_defined_functions_threads==true)
*/
public long user_defined_function_warn_timeout = 500;
/**
* Time in milliseconds after a fatal UDF run-time situation is detected and action according to
* user_function_timeout_policy will take place.
* (Only valid, if enable_user_defined_functions_threads==true)
*/
public long user_defined_function_fail_timeout = 1500;
/**
* Defines what to do when a UDF ran longer than user_defined_function_fail_timeout.
* Possible options are:
* - 'die' - i.e. it is able to emit a warning to the client before the Cassandra Daemon will shut down.
* - 'die_immediate' - shut down C* daemon immediately (effectively prevent the chance that the client will receive a warning).
* - 'ignore' - just log - the most dangerous option.
* (Only valid, if enable_user_defined_functions_threads==true)
*/
public UserFunctionTimeoutPolicy user_function_timeout_policy = UserFunctionTimeoutPolicy.die;
public static boolean getOutboundBindAny()
{
return outboundBindAny;
}
public static void setOutboundBindAny(boolean value)
{
outboundBindAny = value;
}
/**
* @deprecated migrate to {@link DatabaseDescriptor#isClientInitialized()}
*/
@Deprecated
public static boolean isClientMode()
{
return isClientMode;
}
/**
* Client mode means that the process is a pure client, that uses C* code base but does
* not read or write local C* database files.
*
* @deprecated migrate to {@link DatabaseDescriptor#clientInitialization(boolean)}
*/
@Deprecated
public static void setClientMode(boolean clientMode)
{
isClientMode = clientMode;
}
public enum CommitLogSync
{
periodic,
batch
}
public enum InternodeCompression
{
all, none, dc
}
public enum DiskAccessMode
{
auto,
mmap,
mmap_index_only,
standard,
}
public enum MemtableAllocationType
{
unslabbed_heap_buffers,
heap_buffers,
offheap_buffers,
offheap_objects
}
public enum DiskFailurePolicy
{
best_effort,
stop,
ignore,
stop_paranoid,
die
}
public enum CommitFailurePolicy
{
stop,
stop_commit,
ignore,
die,
}
public enum UserFunctionTimeoutPolicy
{
ignore,
die,
die_immediate
}
public enum RequestSchedulerId
{
keyspace
}
public enum DiskOptimizationStrategy
{
ssd,
spinning
}
private static final List<String> SENSITIVE_KEYS = new ArrayList<String>() {{
add("client_encryption_options");
add("server_encryption_options");
}};
public static void log(Config config)
{
Map<String, String> configMap = new TreeMap<>();
for (Field field : Config.class.getFields())
{
// ignore the constants
if (Modifier.isFinal(field.getModifiers()))
continue;
String name = field.getName();
if (SENSITIVE_KEYS.contains(name))
{
configMap.put(name, "<REDACTED>");
continue;
}
String value;
try
{
// Field.get() can throw NPE if the value of the field is null
value = field.get(config).toString();
}
catch (NullPointerException | IllegalAccessException npe)
{
value = "null";
}
configMap.put(name, value);
}
logger.info("Node configuration:[{}]", Joiner.on("; ").join(configMap.entrySet()));
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapreduce.v2.hs.client;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.Arrays;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapreduce.v2.api.HSAdminRefreshProtocol;
import org.apache.hadoop.mapreduce.v2.hs.HSProxies;
import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
import org.apache.hadoop.security.RefreshUserMappingsProtocol;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.tools.GetUserMappingsProtocol;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
@Private
public class HSAdmin extends Configured implements Tool {
public HSAdmin() {
super();
}
public HSAdmin(JobConf conf) {
super(conf);
}
@Override
public void setConf(Configuration conf) {
if (conf != null) {
conf = addSecurityConfiguration(conf);
}
super.setConf(conf);
}
private Configuration addSecurityConfiguration(Configuration conf) {
conf = new JobConf(conf);
conf.set(CommonConfigurationKeys.HADOOP_SECURITY_SERVICE_USER_NAME_KEY,
conf.get(JHAdminConfig.MR_HISTORY_PRINCIPAL, ""));
return conf;
}
/**
* Displays format of commands.
*
* @param cmd
* The command that is being executed.
*/
private static void printUsage(String cmd) {
if ("-refreshUserToGroupsMappings".equals(cmd)) {
System.err
.println("Usage: mapred hsadmin [-refreshUserToGroupsMappings]");
} else if ("-refreshSuperUserGroupsConfiguration".equals(cmd)) {
System.err
.println("Usage: mapred hsadmin [-refreshSuperUserGroupsConfiguration]");
} else if ("-refreshAdminAcls".equals(cmd)) {
System.err.println("Usage: mapred hsadmin [-refreshAdminAcls]");
} else if ("-refreshLoadedJobCache".equals(cmd)) {
System.err.println("Usage: mapred hsadmin [-refreshLoadedJobCache]");
} else if ("-refreshJobRetentionSettings".equals(cmd)) {
System.err
.println("Usage: mapred hsadmin [-refreshJobRetentionSettings]");
} else if ("-refreshLogRetentionSettings".equals(cmd)) {
System.err
.println("Usage: mapred hsadmin [-refreshLogRetentionSettings]");
} else if ("-getGroups".equals(cmd)) {
System.err.println("Usage: mapred hsadmin" + " [-getGroups [username]]");
} else {
System.err.println("Usage: mapred hsadmin");
System.err.println(" [-refreshUserToGroupsMappings]");
System.err.println(" [-refreshSuperUserGroupsConfiguration]");
System.err.println(" [-refreshAdminAcls]");
System.err.println(" [-refreshLoadedJobCache]");
System.err.println(" [-refreshJobRetentionSettings]");
System.err.println(" [-refreshLogRetentionSettings]");
System.err.println(" [-getGroups [username]]");
System.err.println(" [-help [cmd]]");
System.err.println();
ToolRunner.printGenericCommandUsage(System.err);
}
}
private static void printHelp(String cmd) {
String summary = "hsadmin is the command to execute Job History server administrative commands.\n"
+ "The full syntax is: \n\n"
+ "mapred hsadmin"
+ " [-refreshUserToGroupsMappings]"
+ " [-refreshSuperUserGroupsConfiguration]"
+ " [-refreshAdminAcls]"
+ " [-refreshLoadedJobCache]"
+ " [-refreshLogRetentionSettings]"
+ " [-refreshJobRetentionSettings]"
+ " [-getGroups [username]]" + " [-help [cmd]]\n";
String refreshUserToGroupsMappings = "-refreshUserToGroupsMappings: Refresh user-to-groups mappings\n";
String refreshSuperUserGroupsConfiguration = "-refreshSuperUserGroupsConfiguration: Refresh superuser proxy groups mappings\n";
String refreshAdminAcls = "-refreshAdminAcls: Refresh acls for administration of Job history server\n";
String refreshLoadedJobCache = "-refreshLoadedJobCache: Refresh loaded job cache of Job history server\n";
String refreshJobRetentionSettings = "-refreshJobRetentionSettings:" +
"Refresh job history period,job cleaner settings\n";
String refreshLogRetentionSettings = "-refreshLogRetentionSettings:" +
"Refresh log retention period and log retention check interval\n";
String getGroups = "-getGroups [username]: Get the groups which given user belongs to\n";
String help = "-help [cmd]: \tDisplays help for the given command or all commands if none\n"
+ "\t\tis specified.\n";
if ("refreshUserToGroupsMappings".equals(cmd)) {
System.out.println(refreshUserToGroupsMappings);
} else if ("help".equals(cmd)) {
System.out.println(help);
} else if ("refreshSuperUserGroupsConfiguration".equals(cmd)) {
System.out.println(refreshSuperUserGroupsConfiguration);
} else if ("refreshAdminAcls".equals(cmd)) {
System.out.println(refreshAdminAcls);
} else if ("refreshLoadedJobCache".equals(cmd)) {
System.out.println(refreshLoadedJobCache);
} else if ("refreshJobRetentionSettings".equals(cmd)) {
System.out.println(refreshJobRetentionSettings);
} else if ("refreshLogRetentionSettings".equals(cmd)) {
System.out.println(refreshLogRetentionSettings);
} else if ("getGroups".equals(cmd)) {
System.out.println(getGroups);
} else {
System.out.println(summary);
System.out.println(refreshUserToGroupsMappings);
System.out.println(refreshSuperUserGroupsConfiguration);
System.out.println(refreshAdminAcls);
System.out.println(refreshLoadedJobCache);
System.out.println(refreshJobRetentionSettings);
System.out.println(refreshLogRetentionSettings);
System.out.println(getGroups);
System.out.println(help);
System.out.println();
ToolRunner.printGenericCommandUsage(System.out);
}
}
private int getGroups(String[] usernames) throws IOException {
// Get groups users belongs to
if (usernames.length == 0) {
usernames = new String[] { UserGroupInformation.getCurrentUser()
.getUserName() };
}
// Get the current configuration
Configuration conf = getConf();
InetSocketAddress address = conf.getSocketAddr(
JHAdminConfig.JHS_ADMIN_ADDRESS,
JHAdminConfig.DEFAULT_JHS_ADMIN_ADDRESS,
JHAdminConfig.DEFAULT_JHS_ADMIN_PORT);
GetUserMappingsProtocol getUserMappingProtocol = HSProxies.createProxy(
conf, address, GetUserMappingsProtocol.class,
UserGroupInformation.getCurrentUser());
for (String username : usernames) {
StringBuilder sb = new StringBuilder();
sb.append(username + " :");
for (String group : getUserMappingProtocol.getGroupsForUser(username)) {
sb.append(" ");
sb.append(group);
}
System.out.println(sb);
}
return 0;
}
private int refreshUserToGroupsMappings() throws IOException {
// Get the current configuration
Configuration conf = getConf();
InetSocketAddress address = conf.getSocketAddr(
JHAdminConfig.JHS_ADMIN_ADDRESS,
JHAdminConfig.DEFAULT_JHS_ADMIN_ADDRESS,
JHAdminConfig.DEFAULT_JHS_ADMIN_PORT);
RefreshUserMappingsProtocol refreshProtocol = HSProxies.createProxy(conf,
address, RefreshUserMappingsProtocol.class,
UserGroupInformation.getCurrentUser());
// Refresh the user-to-groups mappings
refreshProtocol.refreshUserToGroupsMappings();
return 0;
}
private int refreshSuperUserGroupsConfiguration() throws IOException {
// Refresh the super-user groups
Configuration conf = getConf();
InetSocketAddress address = conf.getSocketAddr(
JHAdminConfig.JHS_ADMIN_ADDRESS,
JHAdminConfig.DEFAULT_JHS_ADMIN_ADDRESS,
JHAdminConfig.DEFAULT_JHS_ADMIN_PORT);
RefreshUserMappingsProtocol refreshProtocol = HSProxies.createProxy(conf,
address, RefreshUserMappingsProtocol.class,
UserGroupInformation.getCurrentUser());
// Refresh the super-user group mappings
refreshProtocol.refreshSuperUserGroupsConfiguration();
return 0;
}
private int refreshAdminAcls() throws IOException {
// Refresh the admin acls
Configuration conf = getConf();
InetSocketAddress address = conf.getSocketAddr(
JHAdminConfig.JHS_ADMIN_ADDRESS,
JHAdminConfig.DEFAULT_JHS_ADMIN_ADDRESS,
JHAdminConfig.DEFAULT_JHS_ADMIN_PORT);
HSAdminRefreshProtocol refreshProtocol = HSProxies.createProxy(conf,
address, HSAdminRefreshProtocol.class,
UserGroupInformation.getCurrentUser());
refreshProtocol.refreshAdminAcls();
return 0;
}
private int refreshLoadedJobCache() throws IOException {
// Refresh the loaded job cache
Configuration conf = getConf();
InetSocketAddress address = conf.getSocketAddr(
JHAdminConfig.JHS_ADMIN_ADDRESS,
JHAdminConfig.DEFAULT_JHS_ADMIN_ADDRESS,
JHAdminConfig.DEFAULT_JHS_ADMIN_PORT);
HSAdminRefreshProtocol refreshProtocol = HSProxies.createProxy(conf,
address, HSAdminRefreshProtocol.class,
UserGroupInformation.getCurrentUser());
refreshProtocol.refreshLoadedJobCache();
return 0;
}
private int refreshJobRetentionSettings() throws IOException {
// Refresh job retention settings
Configuration conf = getConf();
InetSocketAddress address = conf.getSocketAddr(
JHAdminConfig.JHS_ADMIN_ADDRESS,
JHAdminConfig.DEFAULT_JHS_ADMIN_ADDRESS,
JHAdminConfig.DEFAULT_JHS_ADMIN_PORT);
HSAdminRefreshProtocol refreshProtocol = HSProxies.createProxy(conf,
address, HSAdminRefreshProtocol.class,
UserGroupInformation.getCurrentUser());
refreshProtocol.refreshJobRetentionSettings();
return 0;
}
private int refreshLogRetentionSettings() throws IOException {
// Refresh log retention settings
Configuration conf = getConf();
InetSocketAddress address = conf.getSocketAddr(
JHAdminConfig.JHS_ADMIN_ADDRESS,
JHAdminConfig.DEFAULT_JHS_ADMIN_ADDRESS,
JHAdminConfig.DEFAULT_JHS_ADMIN_PORT);
HSAdminRefreshProtocol refreshProtocol = HSProxies.createProxy(conf,
address, HSAdminRefreshProtocol.class,
UserGroupInformation.getCurrentUser());
refreshProtocol.refreshLogRetentionSettings();
return 0;
}
@Override
public int run(String[] args) throws Exception {
if (args.length < 1) {
printUsage("");
return -1;
}
int exitCode = -1;
int i = 0;
String cmd = args[i++];
if ("-refreshUserToGroupsMappings".equals(cmd)
|| "-refreshSuperUserGroupsConfiguration".equals(cmd)
|| "-refreshAdminAcls".equals(cmd)
|| "-refreshLoadedJobCache".equals(cmd)
|| "-refreshJobRetentionSettings".equals(cmd)
|| "-refreshLogRetentionSettings".equals(cmd)) {
if (args.length != 1) {
printUsage(cmd);
return exitCode;
}
}
exitCode = 0;
if ("-refreshUserToGroupsMappings".equals(cmd)) {
exitCode = refreshUserToGroupsMappings();
} else if ("-refreshSuperUserGroupsConfiguration".equals(cmd)) {
exitCode = refreshSuperUserGroupsConfiguration();
} else if ("-refreshAdminAcls".equals(cmd)) {
exitCode = refreshAdminAcls();
} else if ("-refreshLoadedJobCache".equals(cmd)) {
exitCode = refreshLoadedJobCache();
} else if ("-refreshJobRetentionSettings".equals(cmd)) {
exitCode = refreshJobRetentionSettings();
} else if ("-refreshLogRetentionSettings".equals(cmd)) {
exitCode = refreshLogRetentionSettings();
} else if ("-getGroups".equals(cmd)) {
String[] usernames = Arrays.copyOfRange(args, i, args.length);
exitCode = getGroups(usernames);
} else if ("-help".equals(cmd)) {
if (i < args.length) {
printHelp(args[i]);
} else {
printHelp("");
}
} else {
exitCode = -1;
System.err.println(cmd.substring(1) + ": Unknown command");
printUsage("");
}
return exitCode;
}
public static void main(String[] args) throws Exception {
JobConf conf = new JobConf();
int result = ToolRunner.run(new HSAdmin(conf), args);
System.exit(result);
}
}
| |
/*
* Zed Attack Proxy (ZAP) and its related class files.
*
* ZAP is an HTTP/HTTPS proxy for assessing web application security.
*
* Copyright 2016 The ZAP development team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.zaproxy.zap.extension.api;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.isEmptyString;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.notNullValue;
import static org.junit.Assert.assertThat;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.FileConfiguration;
import org.junit.Test;
import org.zaproxy.zap.utils.ZapXmlConfiguration;
/**
* Unit test for {@link OptionsParamApi}.
*/
public class OptionsParamApiUnitTest {
private static final String API_ENABLED_KEY = "api.enabled";
private static final String API_SECURE_KEY = "api.secure";
private static final String API_KEY_KEY = "api.key";
private static final String API_DISABLEKEY_KEY = "api.disablekey";
private static final String API_INCERRORDETAILS_KEY = "api.incerrordetails";
private static final String API_AUTOFILLKEY_KEY = "api.autofillkey";
private static final String API_ENABLEJSONP_KEY = "api.enablejsonp";
private static final String API_NO_KEY_FOR_SAFE_OPS = "api.nokeyforsafeops";
private static final String API_REPORT_PERM_ERRORS = "api.reportpermerrors";
@Test
public void shouldNotHaveConfigByDefault() {
// Given / When
OptionsParamApi param = new OptionsParamApi();
// Then
assertThat(param.getConfig(), is(equalTo(null)));
}
@Test
public void shouldHaveEnabledStateByDefault() {
// Given / When
OptionsParamApi param = new OptionsParamApi();
// Then
assertThat(param.isEnabled(), is(equalTo(true)));
}
@Test(expected = NullPointerException.class)
public void shouldFailToSetEnabledStateWithoutConfig() {
// Given / When
OptionsParamApi param = new OptionsParamApi();
// When
param.setEnabled(true);
// Then = NullPointerException
}
@Test
public void shouldSetEnabledStateWithConfig() {
// Given / When
OptionsParamApi param = createOptionsParamApiWithConfig();
// When
param.setEnabled(false);
// Then
assertThat(param.isEnabled(), is(equalTo(false)));
assertThat(param.getConfig().getBoolean(API_ENABLED_KEY), is(equalTo(false)));
}
@Test
public void shouldHaveSecureOnlyDisabledByDefault() {
// Given / When
OptionsParamApi param = new OptionsParamApi();
// Then
assertThat(param.isSecureOnly(), is(equalTo(false)));
}
@Test(expected = NullPointerException.class)
public void shouldFailToSetSecureOnlyWithoutConfig() {
// Given / When
OptionsParamApi param = new OptionsParamApi();
// When
param.setSecureOnly(true);
// Then = NullPointerException
}
@Test
public void shouldSetSecureOnlyWithConfig() {
// Given / When
OptionsParamApi param = createOptionsParamApiWithConfig();
// When
param.setSecureOnly(true);
// Then
assertThat(param.isSecureOnly(), is(equalTo(true)));
assertThat(param.getConfig().getBoolean(API_SECURE_KEY), is(equalTo(true)));
}
@Test
public void shouldHaveKeyEnabledByDefault() {
// Given / When
OptionsParamApi param = new OptionsParamApi();
// Then
assertThat(param.isDisableKey(), is(equalTo(false)));
}
@Test(expected = NullPointerException.class)
public void shouldFailToSetDisableKeyWithoutConfig() {
// Given / When
OptionsParamApi param = new OptionsParamApi();
// When
param.setDisableKey(true);
// Then = NullPointerException
}
@Test
public void shouldSetDisableKeyWithConfig() {
// Given / When
OptionsParamApi param = createOptionsParamApiWithConfig();
// When
param.setDisableKey(true);
// Then
assertThat(param.isEnabled(), is(equalTo(true)));
assertThat(param.getConfig().getBoolean(API_DISABLEKEY_KEY), is(equalTo(true)));
}
@Test
public void shouldHaveIncErrorDetailsDisabledByDefault() {
// Given / When
OptionsParamApi param = new OptionsParamApi();
// Then
assertThat(param.isIncErrorDetails(), is(equalTo(false)));
}
@Test(expected = NullPointerException.class)
public void shouldFailToSetIncErrorDetailsWithoutConfig() {
// Given / When
OptionsParamApi param = new OptionsParamApi();
// When
param.setIncErrorDetails(true);
// Then = NullPointerException
}
@Test
public void shouldSetIncErrorDetailsWithConfig() {
// Given / When
OptionsParamApi param = createOptionsParamApiWithConfig();
// When
param.setIncErrorDetails(true);
// Then
assertThat(param.isIncErrorDetails(), is(equalTo(true)));
assertThat(param.getConfig().getBoolean(API_INCERRORDETAILS_KEY), is(equalTo(true)));
}
@Test
public void shouldHaveAutofillKeyDisabledByDefault() {
// Given / When
OptionsParamApi param = new OptionsParamApi();
// Then
assertThat(param.isAutofillKey(), is(equalTo(false)));
}
@Test(expected = NullPointerException.class)
public void shouldFailToSetAutofillKeyWithoutConfig() {
// Given / When
OptionsParamApi param = new OptionsParamApi();
// When
param.setAutofillKey(true);
// Then = NullPointerException
}
@Test
public void shouldSetAutofillKeyWithConfig() {
// Given / When
OptionsParamApi param = createOptionsParamApiWithConfig();
// When
param.setAutofillKey(true);
// Then
assertThat(param.isAutofillKey(), is(equalTo(true)));
assertThat(param.getConfig().getBoolean(API_AUTOFILLKEY_KEY), is(equalTo(true)));
}
@Test
public void shouldHaveEnableJSONPDisabledByDefault() {
// Given / When
OptionsParamApi param = new OptionsParamApi();
// Then
assertThat(param.isEnableJSONP(), is(equalTo(false)));
}
@Test(expected = NullPointerException.class)
public void shouldFailToSetEnableJSONPWithoutConfig() {
// Given / When
OptionsParamApi param = new OptionsParamApi();
// When
param.setEnableJSONP(true);
// Then = NullPointerException
}
@Test
public void shouldSetEnableJSONPWithConfig() {
// Given / When
OptionsParamApi param = createOptionsParamApiWithConfig();
// When
param.setEnableJSONP(true);
// Then
assertThat(param.isEnableJSONP(), is(equalTo(true)));
assertThat(param.getConfig().getBoolean(API_ENABLEJSONP_KEY), is(equalTo(true)));
}
@Test
public void shouldHaveReportPermErrorsDisabledByDefault() {
// Given / When
OptionsParamApi param = new OptionsParamApi();
// Then
assertThat(param.isReportPermErrors(), is(equalTo(false)));
}
@Test(expected = NullPointerException.class)
public void shouldFailToSetReportPermErrorsWithoutConfig() {
// Given / When
OptionsParamApi param = new OptionsParamApi();
// When
param.setReportPermErrors(true);
// Then = NullPointerException
}
@Test
public void shouldSetReportPermErrorsWithConfig() {
// Given / When
OptionsParamApi param = createOptionsParamApiWithConfig();
// When
param.setReportPermErrors(true);
// Then
assertThat(param.isReportPermErrors(), is(equalTo(true)));
assertThat(param.getConfig().getBoolean(API_REPORT_PERM_ERRORS), is(equalTo(true)));
}
@Test
public void shouldHaveNonceTimeToLiveInSecsSetTo5MinsByDefault() {
// Given / When
OptionsParamApi param = new OptionsParamApi();
// Then
assertThat(param.getNonceTimeToLiveInSecs(), is(equalTo(5 * 60)));
}
@Test
public void shouldHaveNoKeyForViewsOrSafeOthersDisabledByDefault() {
// Given / When
OptionsParamApi param = new OptionsParamApi();
// Then
assertThat(param.isNoKeyForSafeOps(), is(equalTo(false)));
}
@Test(expected = NullPointerException.class)
public void shouldFailToSetNoKeyForViewsOrSafeOthersWithoutConfig() {
// Given / When
OptionsParamApi param = new OptionsParamApi();
// When
param.setNoKeyForSafeOps(true);
// Then = NullPointerException
}
@Test
public void shouldSetNoKeyForViewsOrSafeOthersWithConfig() {
// Given / When
OptionsParamApi param = createOptionsParamApiWithConfig();
// When
param.setNoKeyForSafeOps(true);
// Then
assertThat(param.isNoKeyForSafeOps(), is(equalTo(true)));
assertThat(param.getConfig().getBoolean(API_NO_KEY_FOR_SAFE_OPS), is(equalTo(true)));
}
@Test
public void shouldHaveEmptyRealKeyByDefault() {
// Given / When
OptionsParamApi param = new OptionsParamApi();
// Then
assertThat(param.getRealKey(), isEmptyString());
}
@Test
public void shouldHaveGeneratedKeyByDefault() {
// Given / When
OptionsParamApi param = new OptionsParamApi();
// Then
assertThat(param.getKey(), is(not(equalTo(""))));
}
@Test(expected = NullPointerException.class)
public void shouldFailToSetKeyWithoutConfig() {
// Given / When
OptionsParamApi param = new OptionsParamApi();
// When
param.setKey("");
// Then = NullPointerException
}
@Test
public void shouldSetKeyWithConfig() {
// Given / When
OptionsParamApi param = createOptionsParamApiWithConfig();
String apiKey = "Key";
// When
param.setKey(apiKey);
// Then
assertThat(param.getKey(), is(equalTo(apiKey)));
assertThat(param.getConfig().getString(API_KEY_KEY), is(equalTo(apiKey)));
}
@Test
public void shouldSaveGeneratedKeyWithConfig() {
// Given / When
OptionsParamApi param = new OptionsParamApi();
Configuration conf = new Configuration();
param.load(conf);
param.setKey(null);
// When
String key = param.getKey();
// Then
assertThat(key, is(not(equalTo(""))));
assertThat(conf.getString(API_KEY_KEY), is(equalTo(key)));
assertThat(conf.isSaved(), is(equalTo(true)));
}
@Test
public void shouldReturnEmptyKeyIfKeyDisabled() {
// Given
OptionsParamApi param = createOptionsParamApiWithConfig();
param.setDisableKey(true);
param.setKey("Key");
// When
String key = param.getKey();
// Then
assertThat(key, is(equalTo("")));
assertThat(param.getRealKey(), is(equalTo("Key")));
}
@Test
public void shouldParseLoadedFileConfiguration() {
// Given
OptionsParamApi param = new OptionsParamApi();
FileConfiguration config = createTestConfig();
// When
param.load(config);
// Then
assertThat(param.isEnabled(), is(equalTo(false)));
assertThat(param.isSecureOnly(), is(equalTo(true)));
assertThat(param.isDisableKey(), is(equalTo(true)));
assertThat(param.isIncErrorDetails(), is(equalTo(true)));
assertThat(param.isAutofillKey(), is(equalTo(true)));
assertThat(param.isEnableJSONP(), is(equalTo(true)));
assertThat(param.getRealKey(), is(equalTo("ApiKey")));
}
@Test
public void shouldBeCloneableByDefault() {
// Given
OptionsParamApi param = new OptionsParamApi();
// When
OptionsParamApi clone = param.clone();
// Then
assertThat(clone, is(notNullValue()));
assertThat(param.isEnabled(), is(equalTo(true)));
assertThat(param.isSecureOnly(), is(equalTo(false)));
assertThat(param.isDisableKey(), is(equalTo(false)));
assertThat(param.isIncErrorDetails(), is(equalTo(false)));
assertThat(param.isAutofillKey(), is(equalTo(false)));
assertThat(param.isEnableJSONP(), is(equalTo(false)));
assertThat(param.getRealKey(), is(equalTo("")));
}
@Test
public void shouldHaveLoadedConfigsAfterCloning() {
// Given
OptionsParamApi param = new OptionsParamApi();
FileConfiguration config = createTestConfig();
param.load(config);
// When
OptionsParamApi clone = param.clone();
// Then
assertThat(clone, is(notNullValue()));
assertThat(param.isEnabled(), is(equalTo(false)));
assertThat(param.isSecureOnly(), is(equalTo(true)));
assertThat(param.isDisableKey(), is(equalTo(true)));
assertThat(param.isIncErrorDetails(), is(equalTo(true)));
assertThat(param.isAutofillKey(), is(equalTo(true)));
assertThat(param.isEnableJSONP(), is(equalTo(true)));
assertThat(param.getRealKey(), is(equalTo("ApiKey")));
}
@Test
public void shouldUseDefaultValuesWhenLoadingFileConfigurationWithInvalidValues() {
// Given
OptionsParamApi param = new OptionsParamApi();
FileConfiguration config = createTestConfigWithInvalidValues();
// When
param.load(config);
// Then
assertThat(param.isEnabled(), is(equalTo(true)));
assertThat(param.isSecureOnly(), is(equalTo(false)));
assertThat(param.isDisableKey(), is(equalTo(false)));
assertThat(param.isIncErrorDetails(), is(equalTo(false)));
assertThat(param.isAutofillKey(), is(equalTo(false)));
assertThat(param.isEnableJSONP(), is(equalTo(false)));
assertThat(param.getRealKey(), is(equalTo("")));
}
private static OptionsParamApi createOptionsParamApiWithConfig() {
OptionsParamApi param = new OptionsParamApi();
param.load(new ZapXmlConfiguration());
return param;
}
private static FileConfiguration createTestConfig() {
ZapXmlConfiguration config = new ZapXmlConfiguration();
config.setProperty(API_ENABLED_KEY, "false");
config.setProperty(API_SECURE_KEY, "true");
config.setProperty(API_KEY_KEY, "ApiKey");
config.setProperty(API_DISABLEKEY_KEY, "true");
config.setProperty(API_INCERRORDETAILS_KEY, "true");
config.setProperty(API_AUTOFILLKEY_KEY, "true");
config.setProperty(API_ENABLEJSONP_KEY, "true");
config.setProperty(API_NO_KEY_FOR_SAFE_OPS, "true");
config.setProperty(API_REPORT_PERM_ERRORS, "true");
return config;
}
private static FileConfiguration createTestConfigWithInvalidValues() {
ZapXmlConfiguration config = new ZapXmlConfiguration();
config.setProperty(API_ENABLED_KEY, "Not Boolean");
config.setProperty(API_SECURE_KEY, "Not Boolean");
config.setProperty(API_DISABLEKEY_KEY, "Not Boolean");
config.setProperty(API_INCERRORDETAILS_KEY, "Not Boolean");
config.setProperty(API_AUTOFILLKEY_KEY, "Not Boolean");
config.setProperty(API_ENABLEJSONP_KEY, "Not Boolean");
config.setProperty(API_NO_KEY_FOR_SAFE_OPS, "Not Boolean");
config.setProperty(API_REPORT_PERM_ERRORS, "Not Boolean");
return config;
}
private static class Configuration extends ZapXmlConfiguration {
private static final long serialVersionUID = 3822957830178644758L;
private boolean saved;
@Override
public void save() throws ConfigurationException {
saved = true;
}
public boolean isSaved() {
return saved;
}
}
}
| |
/*
* ====================================================================
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
*/
package org.apache.hc.client5.http.utils;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.List;
import java.util.Locale;
import java.util.Stack;
import org.apache.hc.core5.http.HttpHost;
import org.apache.hc.core5.net.URIAuthority;
import org.apache.hc.core5.net.URIBuilder;
import org.apache.hc.core5.util.Args;
import org.apache.hc.core5.util.TextUtils;
/**
* A collection of utilities for {@link URI URIs}, to workaround
* bugs within the class or for ease-of-use features.
*
* @since 4.0
*/
public class URIUtils {
/**
* A convenience method for creating a new {@link URI} whose scheme, host
* and port are taken from the target host, but whose path, query and
* fragment are taken from the existing URI. The fragment is only used if
* dropFragment is false. The path is set to "/" if not explicitly specified.
*
* @param uri
* Contains the path, query and fragment to use.
* @param target
* Contains the scheme, host and port to use.
* @param dropFragment
* True if the fragment should not be copied.
*
* @throws URISyntaxException
* If the resulting URI is invalid.
*/
public static URI rewriteURI(
final URI uri,
final HttpHost target,
final boolean dropFragment) throws URISyntaxException {
Args.notNull(uri, "URI");
if (uri.isOpaque()) {
return uri;
}
final URIBuilder uribuilder = new URIBuilder(uri);
if (target != null) {
uribuilder.setScheme(target.getSchemeName());
uribuilder.setHost(target.getHostName());
uribuilder.setPort(target.getPort());
} else {
uribuilder.setScheme(null);
uribuilder.setHost((String) null);
uribuilder.setPort(-1);
}
if (dropFragment) {
uribuilder.setFragment(null);
}
if (TextUtils.isEmpty(uribuilder.getPath())) {
uribuilder.setPath("/");
}
return uribuilder.build();
}
/**
* A convenience method for
* {@link URIUtils#rewriteURI(URI, HttpHost, boolean)} that always keeps the
* fragment.
*/
public static URI rewriteURI(
final URI uri,
final HttpHost target) throws URISyntaxException {
return rewriteURI(uri, target, false);
}
/**
* A convenience method that creates a new {@link URI} whose scheme, host, port, path,
* query are taken from the existing URI, dropping any fragment or user-information.
* The path is set to "/" if not explicitly specified. The existing URI is returned
* unmodified if it has no fragment or user-information and has a path.
*
* @param uri
* original URI.
* @throws URISyntaxException
* If the resulting URI is invalid.
*/
public static URI rewriteURI(final URI uri) throws URISyntaxException {
Args.notNull(uri, "URI");
if (uri.isOpaque()) {
return uri;
}
final URIBuilder uribuilder = new URIBuilder(uri);
if (uribuilder.getUserInfo() != null) {
uribuilder.setUserInfo(null);
}
if (TextUtils.isEmpty(uribuilder.getPath())) {
uribuilder.setPath("/");
}
if (uribuilder.getHost() != null) {
uribuilder.setHost(uribuilder.getHost().toLowerCase(Locale.ROOT));
}
uribuilder.setFragment(null);
return uribuilder.build();
}
/**
* Resolves a URI reference against a base URI. Work-around for bug in
* java.net.URI (http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=4708535)
*
* @param baseURI the base URI
* @param reference the URI reference
* @return the resulting URI
*/
public static URI resolve(final URI baseURI, final String reference) {
return resolve(baseURI, URI.create(reference));
}
/**
* Resolves a URI reference against a base URI. Work-around for bugs in
* java.net.URI (e.g. http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=4708535)
*
* @param baseURI the base URI
* @param reference the URI reference
* @return the resulting URI
*/
public static URI resolve(final URI baseURI, final URI reference){
Args.notNull(baseURI, "Base URI");
Args.notNull(reference, "Reference URI");
final String s = reference.toASCIIString();
if (s.startsWith("?")) {
String baseUri = baseURI.toASCIIString();
final int i = baseUri.indexOf('?');
baseUri = i > -1 ? baseUri.substring(0, i) : baseUri;
return URI.create(baseUri + s);
}
final boolean emptyReference = s.isEmpty();
URI resolved;
if (emptyReference) {
resolved = baseURI.resolve(URI.create("#"));
final String resolvedString = resolved.toASCIIString();
resolved = URI.create(resolvedString.substring(0, resolvedString.indexOf('#')));
} else {
resolved = baseURI.resolve(reference);
}
try {
return normalizeSyntax(resolved);
} catch (final URISyntaxException ex) {
throw new IllegalArgumentException(ex);
}
}
/**
* Removes dot segments according to RFC 3986, section 5.2.4 and
* Syntax-Based Normalization according to RFC 3986, section 6.2.2.
*
* @param uri the original URI
* @return the URI without dot segments
*/
static URI normalizeSyntax(final URI uri) throws URISyntaxException {
if (uri.isOpaque() || uri.getAuthority() == null) {
// opaque and file: URIs
return uri;
}
Args.check(uri.isAbsolute(), "Base URI must be absolute");
final URIBuilder builder = new URIBuilder(uri);
final String path = builder.getPath();
if (path != null && !path.equals("/")) {
final String[] inputSegments = path.split("/");
final Stack<String> outputSegments = new Stack<>();
for (final String inputSegment : inputSegments) {
if ((inputSegment.isEmpty()) || (".".equals(inputSegment))) {
// Do nothing
} else if ("..".equals(inputSegment)) {
if (!outputSegments.isEmpty()) {
outputSegments.pop();
}
} else {
outputSegments.push(inputSegment);
}
}
final StringBuilder outputBuffer = new StringBuilder();
for (final String outputSegment : outputSegments) {
outputBuffer.append('/').append(outputSegment);
}
if (path.lastIndexOf('/') == path.length() - 1) {
// path.endsWith("/") || path.equals("")
outputBuffer.append('/');
}
builder.setPath(outputBuffer.toString());
}
if (builder.getScheme() != null) {
builder.setScheme(builder.getScheme().toLowerCase(Locale.ROOT));
}
if (builder.getHost() != null) {
builder.setHost(builder.getHost().toLowerCase(Locale.ROOT));
}
return builder.build();
}
/**
* Extracts target host from the given {@link URI}.
*
* @param uri
* @return the target host if the URI is absolute or {@code null} if the URI is
* relative or does not contain a valid host name.
*
* @since 4.1
*/
public static HttpHost extractHost(final URI uri) {
if (uri == null) {
return null;
}
HttpHost target = null;
if (uri.isAbsolute()) {
int port = uri.getPort(); // may be overridden later
String host = uri.getHost();
if (host == null) { // normal parse failed; let's do it ourselves
// authority does not seem to care about the valid character-set for host names
host = uri.getAuthority();
if (host != null) {
// Strip off any leading user credentials
final int at = host.indexOf('@');
if (at >= 0) {
if (host.length() > at+1 ) {
host = host.substring(at+1);
} else {
host = null; // @ on its own
}
}
// Extract the port suffix, if present
if (host != null) {
final int colon = host.indexOf(':');
if (colon >= 0) {
final int pos = colon + 1;
int len = 0;
for (int i = pos; i < host.length(); i++) {
if (Character.isDigit(host.charAt(i))) {
len++;
} else {
break;
}
}
if (len > 0) {
try {
port = Integer.parseInt(host.substring(pos, pos + len));
} catch (final NumberFormatException ex) {
}
}
host = host.substring(0, colon);
}
}
}
}
final String scheme = uri.getScheme();
if (!TextUtils.isBlank(host)) {
try {
target = new HttpHost(host, port, scheme);
} catch (final IllegalArgumentException ignore) {
}
}
}
return target;
}
/**
* Derives the interpreted (absolute) URI that was used to generate the last
* request. This is done by extracting the request-uri and target origin for
* the last request and scanning all the redirect locations for the last
* fragment identifier, then combining the result into a {@link URI}.
*
* @param originalURI
* original request before any redirects
* @param target
* if the last URI is relative, it is resolved against this target,
* or {@code null} if not available.
* @param redirects
* collection of redirect locations since the original request
* or {@code null} if not available.
* @return interpreted (absolute) URI
*/
public static URI resolve(
final URI originalURI,
final HttpHost target,
final List<URI> redirects) throws URISyntaxException {
Args.notNull(originalURI, "Request URI");
final URIBuilder uribuilder;
if (redirects == null || redirects.isEmpty()) {
uribuilder = new URIBuilder(originalURI);
} else {
uribuilder = new URIBuilder(redirects.get(redirects.size() - 1));
String frag = uribuilder.getFragment();
// read interpreted fragment identifier from redirect locations
for (int i = redirects.size() - 1; frag == null && i >= 0; i--) {
frag = redirects.get(i).getFragment();
}
uribuilder.setFragment(frag);
}
// read interpreted fragment identifier from original request
if (uribuilder.getFragment() == null) {
uribuilder.setFragment(originalURI.getFragment());
}
// last target origin
if (target != null && !uribuilder.isAbsolute()) {
uribuilder.setScheme(target.getSchemeName());
uribuilder.setHost(target.getHostName());
uribuilder.setPort(target.getPort());
}
return uribuilder.build();
}
/**
* Convenience factory method for {@link URI} instances.
*
* @since 5.0
*/
public static URI create(final HttpHost host, final String path) throws URISyntaxException {
final URIBuilder builder = new URIBuilder(path);
if (host != null) {
builder.setHost(host.getHostName()).setPort(host.getPort()).setScheme(host.getSchemeName());
}
return builder.build();
}
/**
* Convenience factory method for {@link URI} instances.
*
* @since 5.0
*/
public static URI create(final String scheme, final URIAuthority host, final String path) throws URISyntaxException {
final URIBuilder builder = new URIBuilder(path);
if (scheme != null) {
builder.setScheme(scheme);
}
if (host != null) {
builder.setHost(host.getHostName()).setPort(host.getPort());
}
return builder.build();
}
/**
* This class should not be instantiated.
*/
private URIUtils() {
}
}
| |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.util.xml.impl;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.patterns.ElementPattern;
import static com.intellij.patterns.XmlPatterns.*;
import com.intellij.psi.PsiElement;
import com.intellij.psi.xml.XmlAttribute;
import com.intellij.psi.xml.XmlDocument;
import com.intellij.psi.xml.XmlFile;
import com.intellij.psi.xml.XmlTag;
import com.intellij.semantic.SemContributor;
import com.intellij.semantic.SemRegistrar;
import com.intellij.semantic.SemService;
import com.intellij.util.NullableFunction;
import com.intellij.util.Processor;
import com.intellij.util.xml.DomManager;
import com.intellij.util.xml.EvaluatedXmlName;
import com.intellij.util.xml.XmlName;
import com.intellij.util.xml.reflect.CustomDomChildrenDescription;
import com.intellij.util.xml.reflect.DomChildrenDescription;
import com.intellij.util.xml.reflect.DomCollectionChildDescription;
import com.intellij.util.xml.reflect.DomFixedChildDescription;
import gnu.trove.THashSet;
import org.jetbrains.annotations.Nullable;
import java.util.Collection;
import java.util.Set;
/**
* @author peter
*/
public class DomSemContributor extends SemContributor {
private final SemService mySemService;
private final DomManagerImpl myDomManager;
public DomSemContributor(SemService semService, DomManager domManager) {
mySemService = semService;
myDomManager = (DomManagerImpl)domManager;
}
public void registerSemProviders(SemRegistrar registrar) {
registrar.registerSemElementProvider(DomManagerImpl.FILE_DESCRIPTION_KEY, xmlFile(), new NullableFunction<XmlFile, FileDescriptionCachedValueProvider>() {
public FileDescriptionCachedValueProvider fun(XmlFile xmlFile) {
ApplicationManager.getApplication().assertReadAccessAllowed();
return new FileDescriptionCachedValueProvider(myDomManager, xmlFile);
}
});
registrar.registerSemElementProvider(DomManagerImpl.DOM_HANDLER_KEY, xmlTag().withParent(psiElement(XmlDocument.class).withParent(xmlFile())), new NullableFunction<XmlTag, DomInvocationHandler>() {
public DomInvocationHandler fun(XmlTag xmlTag) {
final FileDescriptionCachedValueProvider provider =
mySemService.getSemElement(DomManagerImpl.FILE_DESCRIPTION_KEY, xmlTag.getContainingFile());
assert provider != null;
final DomFileElementImpl element = provider.getFileElement();
if (element != null) {
final DomRootInvocationHandler handler = element.getRootHandler();
if (handler.getXmlTag() == xmlTag) {
xmlTag.putUserData(DomManagerImpl.CACHED_DOM_HANDLER, handler);
return handler;
}
}
return null;
}
});
final ElementPattern<XmlTag> nonRootTag = xmlTag().withParent(xmlTag());
registrar.registerSemElementProvider(DomManagerImpl.DOM_INDEXED_HANDLER_KEY, nonRootTag, new NullableFunction<XmlTag, IndexedElementInvocationHandler>() {
public IndexedElementInvocationHandler fun(XmlTag tag) {
final XmlTag parentTag = PhysicalDomParentStrategy.getParentTag(tag);
assert parentTag != null;
DomInvocationHandler parent = mySemService.getSemElement(DomManagerImpl.DOM_HANDLER_KEY, parentTag);
if (parent == null) return null;
final String localName = tag.getLocalName();
final String namespace = tag.getNamespace();
final DomFixedChildDescription description =
findChildrenDescription(parent.getGenericInfo().getFixedChildrenDescriptions(), tag, parent);
if (description != null) {
final int totalCount = description.getCount();
int index = 0;
PsiElement current = tag;
while (true) {
current = current.getPrevSibling();
if (current == null) {
break;
}
if (current instanceof XmlTag) {
final XmlTag xmlTag = (XmlTag)current;
if (localName.equals(xmlTag.getName()) && namespace.equals(xmlTag.getNamespace())) {
index++;
if (index >= totalCount) {
return null;
}
}
}
}
final IndexedElementInvocationHandler handler =
new IndexedElementInvocationHandler(parent.createEvaluatedXmlName(description.getXmlName()), (FixedChildDescriptionImpl)description, index,
new PhysicalDomParentStrategy(tag, myDomManager), myDomManager, namespace);
tag.putUserData(DomManagerImpl.CACHED_DOM_HANDLER, handler);
return handler;
}
return null;
}
});
registrar.registerSemElementProvider(DomManagerImpl.DOM_COLLECTION_HANDLER_KEY, nonRootTag, new NullableFunction<XmlTag, CollectionElementInvocationHandler>() {
public CollectionElementInvocationHandler fun(XmlTag tag) {
final XmlTag parentTag = PhysicalDomParentStrategy.getParentTag(tag);
assert parentTag != null;
DomInvocationHandler parent = mySemService.getSemElement(DomManagerImpl.DOM_HANDLER_KEY, parentTag);
if (parent == null) return null;
final DomCollectionChildDescription description = findChildrenDescription(parent.getGenericInfo().getCollectionChildrenDescriptions(), tag, parent);
if (description != null) {
final CollectionElementInvocationHandler handler =
new CollectionElementInvocationHandler(description.getType(), tag, (AbstractCollectionChildDescription)description, parent);
tag.putUserData(DomManagerImpl.CACHED_DOM_HANDLER, handler);
return handler;
}
return null;
}
});
registrar.registerSemElementProvider(DomManagerImpl.DOM_CUSTOM_HANDLER_KEY, nonRootTag, new NullableFunction<XmlTag, CollectionElementInvocationHandler>() {
private ThreadLocal<Set<XmlTag>> myCalculating = new ThreadLocal<Set<XmlTag>>() {
@Override
protected Set<XmlTag> initialValue() {
return new THashSet<XmlTag>();
}
};
public CollectionElementInvocationHandler fun(XmlTag tag) {
if (StringUtil.isEmpty(tag.getName())) return null;
final XmlTag parentTag = PhysicalDomParentStrategy.getParentTag(tag);
assert parentTag != null;
if (!myCalculating.get().add(tag)) {
return null;
}
DomInvocationHandler parent;
try {
parent = mySemService.getSemElement(DomManagerImpl.DOM_HANDLER_KEY, parentTag);
}
finally {
myCalculating.get().remove(tag);
}
if (parent == null) return null;
final CustomDomChildrenDescription customDescription = parent.getGenericInfo().getCustomNameChildrenDescription();
if (customDescription == null) return null;
if (mySemService.getSemElement(DomManagerImpl.DOM_INDEXED_HANDLER_KEY, tag) == null &&
mySemService.getSemElement(DomManagerImpl.DOM_COLLECTION_HANDLER_KEY, tag) == null) {
final CollectionElementInvocationHandler handler =
new CollectionElementInvocationHandler(customDescription.getType(), tag, (AbstractCollectionChildDescription)customDescription, parent);
tag.putUserData(DomManagerImpl.CACHED_DOM_HANDLER, handler);
return handler;
}
return null;
}
});
registrar.registerSemElementProvider(DomManagerImpl.DOM_ATTRIBUTE_HANDLER_KEY, xmlAttribute(), new NullableFunction<XmlAttribute, AttributeChildInvocationHandler>() {
public AttributeChildInvocationHandler fun(final XmlAttribute attribute) {
final XmlTag tag = PhysicalDomParentStrategy.getParentTag(attribute);
final DomInvocationHandler handler = mySemService.getSemElement(DomManagerImpl.DOM_HANDLER_KEY, tag);
if (handler == null) return null;
final String localName = attribute.getLocalName();
final Ref<AttributeChildInvocationHandler> result = Ref.create(null);
handler.getGenericInfo().processAttributeChildrenDescriptions(new Processor<AttributeChildDescriptionImpl>() {
public boolean process(AttributeChildDescriptionImpl description) {
if (description.getXmlName().getLocalName().equals(localName)) {
final EvaluatedXmlName evaluatedXmlName = handler.createEvaluatedXmlName(description.getXmlName());
final String ns = evaluatedXmlName.getNamespace(tag, handler.getFile());
//see XmlTagImpl.getAttribute(localName, namespace)
if (ns.equals(tag.getNamespace()) && localName.equals(attribute.getName()) ||
ns.equals(attribute.getNamespace())) {
final AttributeChildInvocationHandler attributeHandler =
new AttributeChildInvocationHandler(evaluatedXmlName, description, myDomManager,
new PhysicalDomParentStrategy(attribute, myDomManager));
attribute.putUserData(DomManagerImpl.CACHED_DOM_HANDLER, attributeHandler);
result.set(attributeHandler);
return false;
}
}
return true;
}
});
return result.get();
}
});
}
@Nullable
private static <T extends DomChildrenDescription> T findChildrenDescription(Collection<T> descriptions, XmlTag tag, DomInvocationHandler parent) {
final String localName = tag.getLocalName();
final String namespace = tag.getNamespace();
final String qName = tag.getName();
final XmlFile file = parent.getFile();
for (final T description : descriptions) {
final XmlName xmlName = description.getXmlName();
if (localName.equals(xmlName.getLocalName()) || qName.equals(xmlName.getLocalName())) {
final EvaluatedXmlName evaluatedXmlName = parent.createEvaluatedXmlName(xmlName);
if (DomImplUtil.isNameSuitable(evaluatedXmlName, localName, qName, namespace, file)) {
return description;
}
}
}
return null;
}
}
| |
package seedu.task.logic.parser;
import static seedu.task.commons.core.Messages.MESSAGE_INVALID_COMMAND_FORMAT;
import static seedu.task.commons.core.Messages.MESSAGE_UNKNOWN_COMMAND;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import seedu.task.commons.core.Config;
import seedu.task.commons.exceptions.IllegalValueException;
import seedu.task.commons.util.StringUtil;
import seedu.task.logic.commands.*;
/**
* Parses user input.
*/
public class Parser {
/**
* Used for initial separation of command word and args.
*/
private static final Pattern BASIC_COMMAND_FORMAT = Pattern.compile("(?<commandWord>\\S+)(?<arguments>.*)");
private static final Pattern TASK_INDEX_ARGS_FORMAT = Pattern.compile("(?<targetIndex>.+)");
private static final Pattern KEYWORDS_ARGS_FORMAT = Pattern.compile("(?<keywords>\\w+(?:\\s+\\w+)*)"); // one
// or
// more
// keywords
// separated
// by
// whitespace
private static final Pattern TASK_DATA_ARGS_FORMAT = // '/' forward slashes
// are reserved for
// delimiter prefixes
Pattern.compile("(?<taskName>[^/]+)" + "( sd/(?<startDate>[^/]*)){0,1}" + "( st/(?<startTime>[^/]*)){0,1}"
+ "( d/(?<dueDate>[^/]*)){0,1}" + "( e/(?<dueTime>[^/]*)){0,1}" + "( i/(?<importance>[^/]*)){0,1}"
+ "(?<tagArguments>(?: t/[^/]+)*)"); // variable number of
// tags
private static final Pattern EDIT_TASK_DATA_ARGS_FORMAT = // '/' forward
// slashes are
// reserved for
// delimiter
// prefixes
Pattern.compile("(?<targetIndex>\\d+)" + "(?<taskName>[^/]+){0,1}" + "( sd/(?<startDate>[^/]*)){0,1}"
+ "( st/(?<startTime>[^/]*)){0,1}" + "( d/(?<dueDate>[^/]*)){0,1}" + "( e/(?<dueTime>[^/]*)){0,1}"
+ "( i/(?<importance>[^/]*)){0,1}" + "(?<tagArguments>(?: t/[^/]+)*)"); // variable
// number
// of
// tags
private static final String EMPTY_STRING = "";
private static final String TARGET_INDEX_SPLIT = " ";
private static final int FIRST_INDEX = 0;
private Config config;
public Parser() {
}
public Parser(Config config) {
this.config = config;
}
/**
* Parses user input into command for execution.
*
* @param userInput
* full user input string
* @return the command based on the user input
*/
public Command parseCommand(String userInput) {
final Matcher matcher = BASIC_COMMAND_FORMAT.matcher(userInput.trim());
if (!matcher.matches()) {
return new IncorrectCommand(String.format(MESSAGE_INVALID_COMMAND_FORMAT, HelpCommand.MESSAGE_USAGE));
}
final String commandWord = matcher.group("commandWord");
final String arguments = matcher.group("arguments");
switch (commandWord) {
case AddCommand.SHORTCUT:
case AddCommand.COMMAND_WORD:
return prepareAdd(arguments);
// @@author A0152952A
case BareCommand.SHORTCUT:
case BareCommand.COMMAND_WORD:
return prepareBare(arguments);
case EditCommand.SHORTCUT:
// @@author
case EditCommand.COMMAND_WORD:
return prepareEdit(arguments);
case SelectCommand.SHORTCUT:
case SelectCommand.COMMAND_WORD:
return prepareSelect(arguments);
case DeleteCommand.SHORTCUT:
case DeleteCommand.COMMAND_WORD:
return prepareDelete(arguments);
case DeleteMarkedCommand.COMMAND_WORD:
return prepareDeleteMarked(arguments);
case ClearMarkedCommand.COMMAND_WORD:
return new ClearMarkedCommand();
case ClearCommand.COMMAND_WORD:
return new ClearCommand();
case MarkCommand.SHORTCUT:
// @@author A0127720M
case MarkCommand.COMMAND_WORD:
return prepareMark(arguments);
// @@author
case FindCommand.SHORTCUT:
case FindCommand.COMMAND_WORD:
return prepareFind(arguments);
case ListCommand.SHORTCUT:
case ListCommand.COMMAND_WORD:
return new ListCommand();
case ExitCommand.COMMAND_WORD:
return new ExitCommand();
case HelpCommand.SHORTCUT:
case HelpCommand.COMMAND_WORD:
return new HelpCommand(arguments);
case UndoCommand.SHORTCUT:
case UndoCommand.COMMAND_WORD:
return prepareUndo(arguments);
case RedoCommand.SHORTCUT:
case RedoCommand.COMMAND_WORD:
return prepareRedo(arguments);
case ChangeDirectoryCommand.COMMAND_WORD:
return prepareChangeDirectory(arguments, config, false);
case ChangeDirectoryCommand.COMMAND_WORD_SAVE:
return prepareChangeDirectory(arguments, config, true);
default:
return new IncorrectCommand(MESSAGE_UNKNOWN_COMMAND);
}
}
// @@author A0139284X
/**
* Parses arguments in the context of the redo task command.
*
* @param args
* full command args string
* @return the prepared command
*/
private Command prepareRedo(String args) {
Optional<Integer[]> index = parseIndex(args);
int targetIndex;
if (!index.isPresent()) {
return new RedoCommand(RedoCommand.DEFAULT_NUMBER_OF_REDO);
} else {
targetIndex = Integer.parseInt(index.get()[FIRST_INDEX].toString());
return new RedoCommand(targetIndex);
}
}
/**
* Parses arguments in the context of the undo command.
*
* @param args
* full command args string
* @return the prepared command
*/
private Command prepareUndo(String args) {
Optional<Integer[]> index = parseIndex(args);
int targetIndex;
if (!index.isPresent()) {
return new UndoCommand(UndoCommand.DEFAULT_NUMBER_OF_UNDO);
} else {
targetIndex = Integer.parseInt(index.get()[FIRST_INDEX].toString());
return new UndoCommand(targetIndex);
}
}
// @@author A0127720M
/**
* Parses arguments in the context of the delete marked task command.
*
* @param args
* full command args string
* @return the prepared command
*/
private Command prepareDeleteMarked(String args) {
Optional<Integer[]> index = parseIndex(args);
if (!index.isPresent()) {
return new IncorrectCommand(String.format(MESSAGE_INVALID_COMMAND_FORMAT, DeleteMarkedCommand.MESSAGE_USAGE));
}
return new DeleteMarkedCommand(index.get());
}
/**
* Parses arguments in the context of the add task command.
*
* @param args
* full command args string
* @return the prepared command
*/
private Command prepareAdd(String args) {
final Matcher matcher = TASK_DATA_ARGS_FORMAT.matcher(args.trim());
// Validate arg string format
if (!matcher.matches()) {
return new IncorrectCommand(String.format(MESSAGE_INVALID_COMMAND_FORMAT, AddCommand.MESSAGE_USAGE));
}
try {
return new AddCommand(matcher.group("taskName"), matcher.group("startDate"), matcher.group("startTime"),
matcher.group("dueDate"), matcher.group("dueTime"), matcher.group("importance"),
getTagsFromArgs(matcher.group("tagArguments")));
} catch (IllegalValueException ive) {
return new IncorrectCommand(ive.getMessage());
}
}
/**
* Extracts the new task's tags from the add command's tag arguments string.
* Merges duplicate tag strings.
*/
private static Set<String> getTagsFromArgs(String tagArguments) throws IllegalValueException {
// no tags
if (tagArguments.isEmpty()) {
return Collections.emptySet();
}
// replace first delimiter prefix, then split
final Collection<String> tagStrings = Arrays.asList(tagArguments.replaceFirst(" t/", "").split(" t/"));
return new HashSet<>(tagStrings);
}
/**
* Parses arguments in the context of the delete task command.
*
* @param args
* full command args string
* @return the prepared command
*/
private Command prepareDelete(String args) {
Optional<Integer[]> index = parseIndex(args);
if (!index.isPresent()) {
return new IncorrectCommand(String.format(MESSAGE_INVALID_COMMAND_FORMAT, DeleteCommand.MESSAGE_USAGE));
}
return new DeleteCommand(index.get());
}
// @@author A0152952A - reused
private Command prepareBare(String args) {
final Matcher matcher = EDIT_TASK_DATA_ARGS_FORMAT.matcher(args.trim());
// Validate arg string format
if (!matcher.matches()) {
return new IncorrectCommand(String.format(MESSAGE_INVALID_COMMAND_FORMAT, BareCommand.MESSAGE_USAGE));
}
try {
return new BareCommand(matcher.group("targetIndex"), matcher.group("taskName"), matcher.group("startDate"),
matcher.group("startTime"), matcher.group("dueDate"), matcher.group("dueTime"),
matcher.group("importance"), getTagsFromArgs(matcher.group("tagArguments")));
} catch (IllegalValueException ive) {
return new IncorrectCommand(ive.getMessage());
}
}
// @@author
/**
* Parses arguments in the context of the delete task command.
*
* @param args
* full command args string
* @return the prepared command
*/
// @@author A0142360U
private Command prepareEdit(String args) {
final Matcher matcher = EDIT_TASK_DATA_ARGS_FORMAT.matcher(args.trim());
// Validate arg string format
if (!matcher.matches()) {
return new IncorrectCommand(String.format(MESSAGE_INVALID_COMMAND_FORMAT, EditCommand.MESSAGE_USAGE));
}
try {
return new EditCommand(matcher.group("targetIndex"), matcher.group("taskName"), matcher.group("startDate"),
matcher.group("startTime"), matcher.group("dueDate"), matcher.group("dueTime"),
matcher.group("importance"), getTagsFromArgs(matcher.group("tagArguments")));
} catch (IllegalValueException ive) {
return new IncorrectCommand(ive.getMessage());
}
}
// @@author A0142360U
/*
* Check if argument is empty and execute changeDirectoryCommand if not
*
*/
private Command prepareChangeDirectory(String args, Config config, boolean transferTasks) {
if (args.equals(EMPTY_STRING)) {
return new IncorrectCommand(
String.format(MESSAGE_INVALID_COMMAND_FORMAT, ChangeDirectoryCommand.MESSAGE_USAGE));
}
return new ChangeDirectoryCommand(args, config, transferTasks);
}
// @@author
/**
* Parses arguments in the context of the select task command.
*
* @param args
* full command args string
* @return the prepared command
*/
private Command prepareSelect(String args) {
Optional<Integer[]> index = parseIndex(args);
int targetIndex;
if (!index.isPresent()) {
return new IncorrectCommand(String.format(MESSAGE_INVALID_COMMAND_FORMAT, SelectCommand.MESSAGE_USAGE));
}
targetIndex = Integer.parseInt(index.get()[FIRST_INDEX].toString());
return new SelectCommand(targetIndex);
}
// @@author A0127720M
private Command prepareMark(String arguments) {
Optional<Integer[]> index = parseIndex(arguments);
if (!index.isPresent()) {
return new IncorrectCommand(String.format(MESSAGE_INVALID_COMMAND_FORMAT, MarkCommand.MESSAGE_USAGE));
}
return new MarkCommand(index.get());
}
// @@author A0139284X
/**
* Returns the specified index in the {@code command} IF a positive unsigned
* integer is given as the index. Returns an {@code Optional.empty()}
* otherwise.
*/
private Optional<Integer[]> parseIndex(String command) {
final Matcher matcher = TASK_INDEX_ARGS_FORMAT.matcher(command.trim());
String[] indexes;
Integer[] targetIndexes = null;
int counter = 0;
if (!matcher.matches()) {
return Optional.empty();
}
String index = matcher.group("targetIndex");
indexes = index.split(TARGET_INDEX_SPLIT);
targetIndexes = new Integer[indexes.length];
for (String s : indexes) {
if (!StringUtil.isUnsignedInteger(s)) {
return Optional.empty();
}
targetIndexes[counter] = Integer.parseInt(s);
counter++;
}
return Optional.of(targetIndexes);
}
// @@author
/**
* Parses arguments in the context of the find task command.
*
* @param args
* full command args string
* @return the prepared command
*/
private Command prepareFind(String args) {
final Matcher matcher = KEYWORDS_ARGS_FORMAT.matcher(args.trim());
if (!matcher.matches()) {
return new IncorrectCommand(String.format(MESSAGE_INVALID_COMMAND_FORMAT, FindCommand.MESSAGE_USAGE));
}
// keywords delimited by whitespace
final String[] keywords = matcher.group("keywords").split("\\s+");
final Set<String> keywordSet = new HashSet<>(Arrays.asList(keywords));
return new FindCommand(keywordSet);
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.amplifybackend.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
* <p>
* The request body for CreateBackendStorage.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/amplifybackend-2020-08-11/CreateBackendStorage"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class CreateBackendStorageRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The app ID.
* </p>
*/
private String appId;
/**
* <p>
* The name of the backend environment.
* </p>
*/
private String backendEnvironmentName;
/**
* <p>
* The resource configuration for creating backend storage.
* </p>
*/
private CreateBackendStorageResourceConfig resourceConfig;
/**
* <p>
* The name of the storage resource.
* </p>
*/
private String resourceName;
/**
* <p>
* The app ID.
* </p>
*
* @param appId
* The app ID.
*/
public void setAppId(String appId) {
this.appId = appId;
}
/**
* <p>
* The app ID.
* </p>
*
* @return The app ID.
*/
public String getAppId() {
return this.appId;
}
/**
* <p>
* The app ID.
* </p>
*
* @param appId
* The app ID.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateBackendStorageRequest withAppId(String appId) {
setAppId(appId);
return this;
}
/**
* <p>
* The name of the backend environment.
* </p>
*
* @param backendEnvironmentName
* The name of the backend environment.
*/
public void setBackendEnvironmentName(String backendEnvironmentName) {
this.backendEnvironmentName = backendEnvironmentName;
}
/**
* <p>
* The name of the backend environment.
* </p>
*
* @return The name of the backend environment.
*/
public String getBackendEnvironmentName() {
return this.backendEnvironmentName;
}
/**
* <p>
* The name of the backend environment.
* </p>
*
* @param backendEnvironmentName
* The name of the backend environment.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateBackendStorageRequest withBackendEnvironmentName(String backendEnvironmentName) {
setBackendEnvironmentName(backendEnvironmentName);
return this;
}
/**
* <p>
* The resource configuration for creating backend storage.
* </p>
*
* @param resourceConfig
* The resource configuration for creating backend storage.
*/
public void setResourceConfig(CreateBackendStorageResourceConfig resourceConfig) {
this.resourceConfig = resourceConfig;
}
/**
* <p>
* The resource configuration for creating backend storage.
* </p>
*
* @return The resource configuration for creating backend storage.
*/
public CreateBackendStorageResourceConfig getResourceConfig() {
return this.resourceConfig;
}
/**
* <p>
* The resource configuration for creating backend storage.
* </p>
*
* @param resourceConfig
* The resource configuration for creating backend storage.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateBackendStorageRequest withResourceConfig(CreateBackendStorageResourceConfig resourceConfig) {
setResourceConfig(resourceConfig);
return this;
}
/**
* <p>
* The name of the storage resource.
* </p>
*
* @param resourceName
* The name of the storage resource.
*/
public void setResourceName(String resourceName) {
this.resourceName = resourceName;
}
/**
* <p>
* The name of the storage resource.
* </p>
*
* @return The name of the storage resource.
*/
public String getResourceName() {
return this.resourceName;
}
/**
* <p>
* The name of the storage resource.
* </p>
*
* @param resourceName
* The name of the storage resource.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateBackendStorageRequest withResourceName(String resourceName) {
setResourceName(resourceName);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getAppId() != null)
sb.append("AppId: ").append(getAppId()).append(",");
if (getBackendEnvironmentName() != null)
sb.append("BackendEnvironmentName: ").append(getBackendEnvironmentName()).append(",");
if (getResourceConfig() != null)
sb.append("ResourceConfig: ").append(getResourceConfig()).append(",");
if (getResourceName() != null)
sb.append("ResourceName: ").append(getResourceName());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof CreateBackendStorageRequest == false)
return false;
CreateBackendStorageRequest other = (CreateBackendStorageRequest) obj;
if (other.getAppId() == null ^ this.getAppId() == null)
return false;
if (other.getAppId() != null && other.getAppId().equals(this.getAppId()) == false)
return false;
if (other.getBackendEnvironmentName() == null ^ this.getBackendEnvironmentName() == null)
return false;
if (other.getBackendEnvironmentName() != null && other.getBackendEnvironmentName().equals(this.getBackendEnvironmentName()) == false)
return false;
if (other.getResourceConfig() == null ^ this.getResourceConfig() == null)
return false;
if (other.getResourceConfig() != null && other.getResourceConfig().equals(this.getResourceConfig()) == false)
return false;
if (other.getResourceName() == null ^ this.getResourceName() == null)
return false;
if (other.getResourceName() != null && other.getResourceName().equals(this.getResourceName()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getAppId() == null) ? 0 : getAppId().hashCode());
hashCode = prime * hashCode + ((getBackendEnvironmentName() == null) ? 0 : getBackendEnvironmentName().hashCode());
hashCode = prime * hashCode + ((getResourceConfig() == null) ? 0 : getResourceConfig().hashCode());
hashCode = prime * hashCode + ((getResourceName() == null) ? 0 : getResourceName().hashCode());
return hashCode;
}
@Override
public CreateBackendStorageRequest clone() {
return (CreateBackendStorageRequest) super.clone();
}
}
| |
package se.kth.scs.partitioning.hovercut;
import com.mysql.jdbc.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
import se.kth.scs.partitioning.Partition;
import se.kth.scs.partitioning.PartitionState;
import se.kth.scs.partitioning.Vertex;
/**
*
* @author Hooman
*/
public class HovercutMysqlState implements PartitionState {
// private final Connection con; // TODO: Support for multiple connections, one per thread.
private final short k; // Number of partitions. The partition ID must be from 0 up to k.
private final String dbUrl;
private final String dbUser;
private final String dbPass;
private final ThreadLocal<Connection> cons = new ThreadLocal<>();
public HovercutMysqlState(short k, String dbUrl, String dbUser, String dbPass, boolean clearDb) throws SQLException {
this.k = k;
this.dbUrl = dbUrl;
this.dbUser = dbUser;
this.dbPass = dbPass;
Connection con = createConnection(dbUrl, dbUser, dbPass);
// con.setAutoCommit(false);
if (clearDb) {
HovercutMySqlQueries.clearAllTables(con);
List<Partition> partitions = new ArrayList<>(k);
for (short i = 0; i < k; i++) {
Partition p = new Partition(i);
partitions.add(p);
}
HovercutMySqlQueries.putPartitions(partitions, con);
con.close();
// con.commit();
}
}
private Connection createConnection(String dbUrl, String dbUser, String dbPass) throws SQLException {
// + "cachePrepStmts=true&prepStmtCacheSize=250&prepStmtCacheSqlLimit=2048&"
// + "useUnbufferedIO=false&useReadAheadInput=false"
return (Connection) DriverManager.getConnection(
String.format("%s?user=%s&password=%s&rewriteBatchedStatements=true", dbUrl, dbUser, dbPass));
}
private Connection getConnection() throws SQLException {
Connection con = cons.get();
if (con == null) {
con = createConnection(dbUrl, dbUser, dbPass);
cons.set(con);
}
return con;
}
@Override
public short getNumberOfPartitions() {
return k;
}
@Override
public void applyState() {
try {
Connection con = getConnection();
con.commit();
} catch (SQLException ex) {
ex.printStackTrace();
}
}
@Override
public void releaseResources(boolean clearAll) {
// TODO: implement clearAll.
// No overall resource is used.
}
@Override
public Vertex getVertex(int vid) {
Vertex v = null;
try {
Connection con = getConnection();
v = HovercutMySqlQueries.getVertex(vid, con);
} catch (SQLException ex) {
ex.printStackTrace();
}
return v;
}
@Override
public Map<Integer, Vertex> getVertices(Set<Integer> vids) {
Map<Integer, Vertex> vertices = null;
try {
Connection con = getConnection();
vertices = HovercutMySqlQueries.getVertices(vids, con);
} catch (SQLException ex) {
ex.printStackTrace();
}
return vertices;
}
@Override
public Map<Integer, Vertex> getAllVertices(int expectedSize) {
Map<Integer, Vertex> vertices = null;
try {
Connection con = getConnection();
vertices = HovercutMySqlQueries.getAllVertices(con);
} catch (SQLException ex) {
ex.printStackTrace();
}
return vertices;
}
@Override
public void putVertex(Vertex v) {
try {
Connection con = getConnection();
HovercutMySqlQueries.putVertex(v, con);
} catch (SQLException ex) {
ex.printStackTrace();
}
}
@Override
public void putVertices(Collection<Vertex> vs) {
try {
Connection con = getConnection();
HovercutMySqlQueries.putVertices(vs, con);
} catch (SQLException ex) {
ex.printStackTrace();
}
}
@Override
public Partition getPartition(short pid) {
Partition p = null;
try {
Connection con = getConnection();
p = HovercutMySqlQueries.getPartition(pid, con);
} catch (SQLException ex) {
ex.printStackTrace();
}
return p;
}
@Override
public List<Partition> getPartions(short[] pids) {
List<Partition> partitions = null;
try {
Connection con = getConnection();
partitions = HovercutMySqlQueries.getPartitions(pids, con);
} catch (SQLException ex) {
ex.printStackTrace();
}
return partitions;
}
@Override
public List<Partition> getAllPartitions() {
List<Partition> partitions = null;
try {
Connection con = getConnection();
partitions = HovercutMySqlQueries.getAllPartitions(con);
} catch (SQLException ex) {
ex.printStackTrace();
}
return partitions;
}
@Override
public void putPartition(Partition p) {
try {
Connection con = getConnection();
HovercutMySqlQueries.putPartition(p, con);
} catch (SQLException ex) {
ex.printStackTrace();
}
}
@Override
public void putPartitions(List<Partition> p) {
try {
Connection con = getConnection();
HovercutMySqlQueries.putPartitions(p, con);
} catch (SQLException ex) {
ex.printStackTrace();
}
}
@Override
public void releaseTaskResources() {
try {
Connection con = getConnection();
con.close();
} catch (SQLException ex) {
ex.printStackTrace();
}
}
@Override
public void waitForAllUpdates(int expectedSize) {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
}
| |
package com.myrunning.leaderboard.web;
/**
* File: ListCompetitorsController.java
* Author: Joshua Forester
* Date: 2009/08/24
* Description: Controller object for listing Competitors.
**/
import java.util.List;
import java.util.ArrayList;
import javax.servlet.http.HttpServletRequest;
import org.springframework.ui.ModelMap;
import org.springframework.stereotype.Controller;
import org.springframework.validation.BindingResult;
import org.springframework.web.bind.support.SessionStatus;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.apache.log4j.Logger;
import org.apache.log4j.MDC;
import com.myrunning.leaderboard.global.Constants;
import com.myrunning.leaderboard.model.Admin;
import com.myrunning.leaderboard.model.Person;
import com.myrunning.leaderboard.model.Competitor;
import com.myrunning.leaderboard.model.Course;
import com.myrunning.leaderboard.db.DataAccessFilter;
import com.myrunning.leaderboard.db.dao.ifacedao.DataResourceDao;
import com.myrunning.leaderboard.db.dao.ifacedao.IdentityDao;
import com.myrunning.leaderboard.db.dao.ifacedao.PersonDao;
import com.myrunning.leaderboard.db.dao.ifacedao.AdminDao;
import com.myrunning.leaderboard.db.dao.ifacedao.CompetitorDao;
import com.myrunning.leaderboard.db.dao.ifacedao.CourseDao;
@Controller
public class ListCompetitorsController extends NewLayoutController {
static Logger logger = Logger.getLogger(ListCompetitorsController.class);
@Autowired
private DataResourceDao dataResourceDao;
@Autowired
private IdentityDao identityDao;
@Autowired
@Qualifier("personDao")
private PersonDao personDao;
@Autowired
private CompetitorDao competitorDao;
@Autowired
private AdminDao adminDao;
@Autowired
private CourseDao courseDao;
@RequestMapping(value="/competitors", method = RequestMethod.GET)
public String getAllCompetitors(HttpServletRequest request,
ModelMap model) {
List<Competitor> competitors = competitorDao.getAll(false);
model.addAttribute("competitors", competitors);
setMultiViewRenderObject(model, "competitors");
return decorateView(".listCompetitors_view", request);
}
@RequestMapping(value="/competitors/{competitorId}", method = RequestMethod.GET)
public String getCompetitor(@PathVariable Long competitorId,
HttpServletRequest request,
ModelMap model) {
List<Competitor> competitors = new ArrayList<Competitor>();
competitors.add(competitorDao.getCompetitorById(competitorId));
model.addAttribute("competitors", competitors);
setMultiViewRenderObject(model, "competitors");
return decorateView(".listCompetitors_view", request);
}
@RequestMapping(value="/teams/{teamId}/competitors", method = RequestMethod.GET)
public String getTeamCompetitors(@PathVariable Long teamId,
HttpServletRequest request,
ModelMap model) {
List<Competitor> competitors = competitorDao.getCompetitorsByTeamId(teamId, false);
model.addAttribute("competitors", competitors);
setMultiViewRenderObject(model, "competitors");
return decorateView(".listCompetitors_view", request);
}
/*
//TODO: implement getCompetitorByCourseId
@RequestMapping(value = "/courses/{courseId}/competitors", method = RequestMethod.GET)
public String getCourseCompetitors(@PathVariable Long courseId,
HttpServletRequest request,
ModelMap model) {
List<Competitor> competitors = competitorDao.getCompetitorsByCourseId(courseId, false);
model.addAttribute("competitors", competitors);
setMultiViewRenderObject(model, "competitors");
return decorateView(".listCompetitors_view", request);
}
*/
@RequestMapping(value = "/courses/{courseId}/competitors/map.htm", method = RequestMethod.GET)
public String getDescription(@PathVariable Long courseId,
HttpServletRequest request,
ModelMap model) {
Course course = courseDao.getCourseById(courseId);
model.addAttribute("course", course);
model.addAttribute("courseId", courseId);
setMultiViewRenderObject(model, "courseId");
return decorateView(".showCompetitorMap_dashboard", request);
}
@RequestMapping(value="/deleteCompetitor.htm", method=RequestMethod.GET)
public String deleteCompetitor(@RequestParam(value="competitor_id") Integer competitorId, HttpServletRequest request, ModelMap model) {
Admin admin = (Admin) request.getSession().getAttribute(Constants.ADMIN_SESSION_KEY);
if (admin == null) {
return Constants.OWNERSHIP_ERROR;
}
if (DataAccessFilter.filterDataResourceById(competitorId, admin.getId()) == null) {
return Constants.OWNERSHIP_ERROR;
}
if (competitorId != null) {
Competitor competitor = competitorDao.getCompetitorById(competitorId);
model.addAttribute("competitor", competitor);
}
return decorateView(".deleteCompetitorConfirm_view", request);
}
@RequestMapping(value="/deleteCompetitorConfirmed.htm", method=RequestMethod.POST)
public String deleteCompetitorConfirmed(@ModelAttribute("competitor") Competitor competitor, BindingResult result, SessionStatus status, HttpServletRequest request) {
// validate competitor
Admin admin = (Admin) request.getSession().getAttribute(Constants.ADMIN_SESSION_KEY);
if (admin == null) {
return Constants.OWNERSHIP_ERROR;
}
if (DataAccessFilter.filterDataResourceById(competitor, admin.getId()) == null) {
return Constants.OWNERSHIP_ERROR;
}
// get full competitor object
competitor = competitorDao.getCompetitorById(competitor.getId());
// deletes competitor and associated DATA_RESOURCE
competitorDao.deleteCompetitorById(competitor.getId());
dataResourceDao.deleteDataResourceById(competitor.getId());
// remove underlying PERSON/DATA_RESOURCE if no
// admin/competitors associated with it
if ((adminDao.getAdminById(competitor.getPersonId()) == null) &&
(competitorDao.getCompetitorsByPersonId(competitor.getPersonId(), false).isEmpty())) {
logger.debug("no admins associated, deleting person " + competitor.getPersonId());
Person person = personDao.getPersonById(competitor.getPersonId());
personDao.deletePersonById(person.getId());
dataResourceDao.deleteDataResourceById(person.getId());
// remove underlying IDENTITY/DATA_RESOURCE if no persons associated with it
if (personDao.getPersonsByIdentityId(person.getIdentityId(), null, false).isEmpty()) {
logger.debug("no identities associated, deleting identity " + person.getIdentityId());
identityDao.deleteIdentityById(person.getIdentityId());
dataResourceDao.deleteDataResourceById(person.getIdentityId());
} else {
logger.debug("persons associated with identity found. not removing identity" + person.getIdentityId());
}
} else {
logger.debug("competitors/admin associated with person found. not removing person." + competitor.getPersonId());
}
return decorateView(".deleteCompetitorSuccess_dashboard", request);
}
}
| |
package cz.lukashruby.api.xml.onlinekoupelny;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlType;
import java.math.BigDecimal;
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "ProductType", propOrder = {"category", "producer", "serie", "catalog", "ean", "productName", "description", "retailPrice", "price", "eshopPrice", "recommendedPrice", "guarantee", "stock", "stockCount", "delivery", "modifiedAt", "removed", "variants", "images", "related"})
public class ProductType {
@XmlElement(name = "Category", required = true)
protected String category;
@XmlElement(name = "Producer", required = true)
protected String producer;
@XmlElement(name = "Serie", required = true)
protected String serie;
@XmlElement(name = "Catalog")
protected String catalog;
@XmlElement(name = "EAN")
protected Long ean;
@XmlElement(name = "ProductName", required = true)
protected String productName;
@XmlElement(name = "Description", required = true)
protected String description;
@XmlElement(name = "RetailPrice")
protected BigDecimal retailPrice;
@XmlElement(name = "Price")
protected BigDecimal price;
@XmlElement(name = "EshopPrice")
protected BigDecimal eshopPrice;
@XmlElement(name = "RecommendedPrice")
protected BigDecimal recommendedPrice;
@XmlElement(name = "Guarantee")
protected Byte guarantee;
@XmlElement(name = "Stock")
protected String stock;
@XmlElement(name = "StockCount")
protected String stockCount;
@XmlElement(name = "Delivery")
protected Byte delivery;
@XmlElement(name = "ModifiedAt", required = true)
protected String modifiedAt;
@XmlElement(name = "Removed")
protected byte removed;
@XmlElement(name = "Variants", required = true)
protected VariantsType variants;
@XmlElement(name = "Images", required = true)
protected ImagesType images;
@XmlElement(name = "Related", required = true)
protected RelatedType related;
public String getCategory() {
return this.category;
}
public void setCategory(String value) {
this.category = value;
}
public String getProducer() {
return this.producer;
}
public void setProducer(String value) {
this.producer = value;
}
public String getSerie() {
return this.serie;
}
public void setSerie(String value) {
this.serie = value;
}
public String getCatalog() {
return this.catalog;
}
public void setCatalog(String value) {
this.catalog = value;
}
public Long getEAN() {
return this.ean;
}
public void setEAN(Long value) {
this.ean = value;
}
public String getProductName() {
return this.productName;
}
public void setProductName(String value) {
this.productName = value;
}
public String getDescription() {
return this.description;
}
public void setDescription(String value) {
this.description = value;
}
public BigDecimal getRetailPrice() {
return this.retailPrice;
}
public void setRetailPrice(BigDecimal value) {
this.retailPrice = value;
}
public BigDecimal getPrice() {
return this.price;
}
public void setPrice(BigDecimal value) {
this.price = value;
}
public BigDecimal getEshopPrice() {
return this.eshopPrice;
}
public void setEshopPrice(BigDecimal value) {
this.eshopPrice = value;
}
public BigDecimal getRecommendedPrice() {
return this.recommendedPrice;
}
public void setRecommendedPrice(BigDecimal value) {
this.recommendedPrice = value;
}
public Byte getGuarantee() {
return this.guarantee;
}
public void setGuarantee(Byte value) {
this.guarantee = value;
}
public String getStock() {
return this.stock;
}
public void setStock(String value) {
this.stock = value;
}
public String getStockCount() {
return this.stockCount;
}
public void setStockCount(String value) {
this.stockCount = value;
}
public Byte getDelivery() {
return this.delivery;
}
public void setDelivery(Byte value) {
this.delivery = value;
}
public String getModifiedAt() {
return this.modifiedAt;
}
public void setModifiedAt(String value) {
this.modifiedAt = value;
}
public byte getRemoved() {
return this.removed;
}
public void setRemoved(byte value) {
this.removed = value;
}
public VariantsType getVariants() {
return this.variants;
}
public void setVariants(VariantsType value) {
this.variants = value;
}
public ImagesType getImages() {
return this.images;
}
public void setImages(ImagesType value) {
this.images = value;
}
public RelatedType getRelated() {
return this.related;
}
public void setRelated(RelatedType value) {
this.related = value;
}
}
| |
package org.innovateuk.ifs.application.transactional;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.Multimap;
import com.google.common.collect.Multimaps;
import org.innovateuk.ifs.application.domain.Application;
import org.innovateuk.ifs.application.resource.QuestionApplicationCompositeId;
import org.innovateuk.ifs.application.resource.QuestionStatusResource;
import org.innovateuk.ifs.application.validation.ApplicationValidationUtil;
import org.innovateuk.ifs.application.validation.ApplicationValidatorService;
import org.innovateuk.ifs.commons.error.CommonErrors;
import org.innovateuk.ifs.commons.error.ValidationMessages;
import org.innovateuk.ifs.commons.service.ServiceResult;
import org.innovateuk.ifs.finance.transactional.ApplicationFinanceService;
import org.innovateuk.ifs.form.domain.Question;
import org.innovateuk.ifs.form.domain.Section;
import org.innovateuk.ifs.form.resource.SectionResource;
import org.innovateuk.ifs.form.transactional.SectionService;
import org.innovateuk.ifs.procurement.milestone.transactional.ApplicationProcurementMilestoneService;
import org.innovateuk.ifs.transactional.BaseTransactionalService;
import org.innovateuk.ifs.user.domain.ProcessRole;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.util.*;
import static java.util.Collections.singleton;
import static java.util.function.Function.identity;
import static java.util.stream.Collectors.*;
import static org.innovateuk.ifs.commons.service.ServiceResult.serviceSuccess;
import static org.innovateuk.ifs.form.resource.SectionType.*;
import static org.innovateuk.ifs.util.CollectionFunctions.simpleFilter;
import static org.innovateuk.ifs.util.CollectionFunctions.simpleMapSet;
import static org.innovateuk.ifs.util.EntityLookupCallbacks.find;
/**
* Implements {@link SectionStatusService}
*/
@Service
public class SectionStatusServiceImpl extends BaseTransactionalService implements SectionStatusService {
@Autowired
private SectionService sectionService;
@Autowired
private QuestionStatusService questionStatusService;
@Autowired
private ApplicationFinanceService financeService;
@Autowired
private ApplicationProcurementMilestoneService applicationProcurementMilestoneService;
@Autowired
private ApplicationValidationUtil validationUtil;
@Autowired
private ApplicationValidatorService applicationValidatorService;
@Override
public ServiceResult<Map<Long, Set<Long>>> getCompletedSections(final long applicationId) {
return getApplication(applicationId).andOnSuccessReturn(this::completedSections);
}
@Override
public ServiceResult<Set<Long>> getCompletedSections(final long applicationId, final long organisationId) {
return find(application(applicationId)).
andOnSuccess(application -> {
List<Section> sections = application.getCompetition().getSections();
List<ProcessRole> applicantTypeProcessRoles = application.getProcessRoles().stream()
.filter(ProcessRole::isLeadApplicantOrCollaborator)
.collect(toList());
Set<Long> organisations = applicantTypeProcessRoles.stream()
.map(ProcessRole::getOrganisationId)
.collect(toSet());
Map<Long, Question> questions = application.getCompetition()
.getQuestions()
.stream()
.collect(toMap(Question::getId, identity()));
Map<Long, List<Long>> organisationIdToCompletedQuestionIds = getCompletedQuestionsGroupedByOrganisationId(applicationId, singleton(organisationId), questions);
return serviceSuccess(sections.stream()
.filter(section -> isSectionComplete(section, organisationIdToCompletedQuestionIds, application, organisationId, organisations))
.map(Section::getId)
.collect(toSet()));
});
}
private boolean isFinanceOverviewComplete(Application application, Map<Long, List<Long>> completedQuestionsByOrganisations, Set<Long> applicationOrganisations) {
List<Section> sections = application.getCompetition().getSections();
Section financeSection = sections.stream().filter(section -> section.getType() == FINANCE).collect(toList()).get(0);
for (long organisationId : applicationOrganisations) {
if (!completedQuestionsByOrganisations.containsKey(organisationId)) {
return false;
}
Map<Long, List<Long>> map = new HashMap<>();
map.put(organisationId, completedQuestionsByOrganisations.get(organisationId));
if (!isSectionComplete(
financeSection,
map,
application,
organisationId,
applicationOrganisations)) {
return false;
}
}
return applicationValidatorService.isFinanceOverviewComplete(application);
}
@Override
@Transactional
public ServiceResult<ValidationMessages> markSectionAsComplete(final long sectionId,
final long applicationId,
final long markedAsCompleteById) {
return find(section(sectionId), application(applicationId)).andOnSuccess((section, application) -> {
ValidationMessages sectionIsValid = validationUtil.isSectionValid(markedAsCompleteById, section, application);
if (!sectionIsValid.hasErrors()) {
markSectionAsCompleteNoValidate(section, application, markedAsCompleteById);
}
return serviceSuccess(sectionIsValid);
});
}
@Override
@Transactional
public ServiceResult<Void> markSectionAsNotRequired(long sectionId, long applicationId, long markedAsCompleteById) {
return find(section(sectionId), application(applicationId)).andOnSuccess((section, application) -> {
markSectionAsCompleteNoValidate(section, application, markedAsCompleteById);
return serviceSuccess();
});
}
private void markSectionAsCompleteNoValidate(Section section, Application application, long markedAsCompleteById) {
if (section.getType() == PROJECT_COST_FINANCES
&& section.getCompetition().isProcurementMilestones()) {
resetProcurementMilestoneIfProjectCostsAreNotEqualToPaymentTotal(application, markedAsCompleteById);
}
sectionService.getQuestionsForSectionAndSubsections(section.getId()).andOnSuccessReturnVoid(questions -> questions.forEach(q -> {
questionStatusService.markAsCompleteNoValidate(new QuestionApplicationCompositeId(q, application.getId()), markedAsCompleteById);
// Assign back to lead applicant.
//TODO seems weird? Remove??
// questionStatusService.assign(new QuestionApplicationCompositeId(q, application.getId()), application.getLeadApplicantProcessRole().getId(), markedAsCompleteById);
}));
}
private void resetProcurementMilestoneIfProjectCostsAreNotEqualToPaymentTotal(Application application, long markedAsCompleteById) {
find(
processRole(markedAsCompleteById),
() -> find(sectionRepository.findByTypeAndCompetitionId(PAYMENT_MILESTONES, application.getCompetition().getId()), CommonErrors.notFoundError(Section.class, PAYMENT_MILESTONES, application.getCompetition().getId())))
.andOnSuccessReturnVoid((processRole, section) -> {
questionStatusService.getQuestionStatusForOrganisationOnApplication(section.getQuestions().get(0).getId(), application.getId(), processRole.getOrganisationId()).andOnSuccessReturnVoid(questionStatus -> {
if (!applicationProcurementMilestoneService.arePaymentMilestonesEqualToFunding(application.getId(), processRole.getOrganisationId()).getSuccess()
&& !questionStatus.isEmpty() && Boolean.TRUE.equals(questionStatus.get(0).getMarkedAsComplete())) {
questionStatusService.markAsInComplete(new QuestionApplicationCompositeId(section.getQuestions().get(0).getId(), application.getId()), markedAsCompleteById);
};
});
});
}
@Override
@Transactional
public ServiceResult<Void> markSectionAsInComplete(final long sectionId,
final long applicationId,
final long markedAsInCompleteById) {
return sectionService.getQuestionsForSectionAndSubsections(sectionId).andOnSuccessReturnVoid(questions -> questions.forEach(q ->
questionStatusService.markAsInComplete(new QuestionApplicationCompositeId(q, applicationId), markedAsInCompleteById)
));
}
@Override
public ServiceResult<Boolean> sectionsCompleteForAllOrganisations(long applicationId) {
return getApplication(applicationId).andOnSuccess(application -> {
Set<Long> sections = sectionService.getByCompetitionId(application.getCompetition().getId()).getSuccess()
.stream()
.map(SectionResource::getId)
.collect(toSet());
Set<Long> completedSections = getCompletedSections(applicationId).getSuccess()
.values()
.stream()
.flatMap(Set::stream)
.collect(toSet());
return serviceSuccess(sections.equals(completedSections));
});
}
private Map<Long, Set<Long>> completedSections(Application application) {
List<Section> sections = application.getCompetition().getSections();
List<ProcessRole> applicantTypeProcessRoles = simpleFilter(application.getProcessRoles(), ProcessRole::isLeadApplicantOrCollaborator);
Set<Long> organisations = simpleMapSet(applicantTypeProcessRoles, ProcessRole::getOrganisationId);
Map<Long, Question> questions = application.getCompetition().getQuestions().stream().collect(toMap(Question::getId, identity()));
Map<Long, List<Long>> organisationIdToCompletedQuestionIds = getCompletedQuestionsGroupedByOrganisationId(application.getId(), organisations, questions);
Map<Long, Set<Long>> organisationMap = new HashMap<>();
for (Long organisationId : organisations) {
Set<Long> completedSections = new LinkedHashSet<>();
for (Section section : sections) {
if (isSectionComplete(section, organisationIdToCompletedQuestionIds, application, organisationId, organisations)) {
completedSections.add(section.getId());
}
}
organisationMap.put(organisationId, completedSections);
}
return organisationMap;
}
private boolean isSectionComplete(Section section,
Map<Long, List<Long>> completedQuestionsByOrganisations,
Application application,
long organisationId,
Set<Long> applicationOrganisations) {
if (section.getType() == OVERVIEW_FINANCES) {
return isFinanceOverviewComplete(application, completedQuestionsByOrganisations, applicationOrganisations);
}
if (section.hasChildSections()) {
for (Section childSection : section.getChildSections()) {
boolean complete = isSectionComplete(childSection, completedQuestionsByOrganisations, application, organisationId, applicationOrganisations);
if (!complete) {
return false;
}
}
}
for (Question question : section.getQuestions()) {
if (!completedQuestionsByOrganisations.containsKey(organisationId)
|| !completedQuestionsByOrganisations.get(organisationId).contains((question.getId()))) {
return false;
}
}
return true;
}
private Map<Long, List<Long>> getCompletedQuestionsGroupedByOrganisationId(long applicationId, Set<Long> organisationIds, Map<Long, Question> questions) {
Multimap<Long, QuestionStatusResource> completedQuestionStatuses = ArrayListMultimap.create();
questionStatusService.findCompletedQuestionsByApplicationId(applicationId).getSuccess()
.forEach(questionStatus -> {
if (questions.get(questionStatus.getQuestion()).hasMultipleStatuses()) {
completedQuestionStatuses.put(questionStatus.getMarkedAsCompleteByOrganisationId(), questionStatus);
} else {
organisationIds.forEach(organisationId -> completedQuestionStatuses.put(organisationId, questionStatus));
}
});
return Multimaps.asMap(completedQuestionStatuses)
.entrySet()
.stream()
.collect(toMap(Map.Entry::getKey,
questionStatuses -> questionStatuses.getValue()
.stream()
.map(QuestionStatusResource::getQuestion)
.collect(toList())
));
}
}
| |
package me.jakelane.wrapperforfacebook;
import android.Manifest;
import android.content.Intent;
import android.content.SharedPreferences;
import android.graphics.Bitmap;
import android.graphics.Color;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.Drawable;
import android.os.Bundle;
import android.os.Handler;
import android.preference.PreferenceManager;
import android.support.annotation.NonNull;
import android.support.design.widget.NavigationView;
import android.support.design.widget.Snackbar;
import android.support.v4.content.res.ResourcesCompat;
import android.support.v4.view.GravityCompat;
import android.support.v4.widget.DrawerLayout;
import android.support.v4.widget.SwipeRefreshLayout;
import android.support.v7.app.ActionBarDrawerToggle;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.webkit.URLUtil;
import android.widget.FrameLayout;
import android.widget.ImageView;
import android.widget.TextView;
import com.facebook.AccessToken;
import com.facebook.CallbackManager;
import com.facebook.FacebookCallback;
import com.facebook.FacebookException;
import com.facebook.FacebookSdk;
import com.facebook.GraphRequest;
import com.facebook.GraphResponse;
import com.facebook.login.LoginBehavior;
import com.facebook.login.LoginManager;
import com.facebook.login.LoginResult;
import com.github.clans.fab.FloatingActionMenu;
import com.greysonparrelli.permiso.Permiso;
import com.mikepenz.actionitembadge.library.ActionItemBadge;
import com.mikepenz.actionitembadge.library.utils.BadgeStyle;
import com.squareup.picasso.Picasso;
import com.squareup.picasso.Target;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.util.Arrays;
import java.util.List;
import im.delight.android.webview.AdvancedWebView;
public class MainActivity extends AppCompatActivity implements NavigationView.OnNavigationItemSelectedListener {
static final String FACEBOOK_URL_BASE = "https://m.facebook.com/";
private static final String FACEBOOK_URL_BASE_ENCODED = "https%3A%2F%2Fm.facebook.com%2F";
private static final List<String> HOSTNAMES = Arrays.asList("facebook.com", "*.facebook.com", "*.fbcdn.net", "*.akamaihd.net");
private final BadgeStyle BADGE_SIDE_FULL = new BadgeStyle(BadgeStyle.Style.LARGE, R.layout.menu_badge_full, R.color.colorAccent, R.color.colorAccent, Color.WHITE);
// Members
SwipeRefreshLayout swipeView;
NavigationView mNavigationView;
View mCoordinatorLayoutView;
private FloatingActionMenu mMenuFAB;
private AdvancedWebView mWebView;
private final View.OnClickListener mFABClickListener = new View.OnClickListener() {
@Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.textFAB:
mWebView.loadUrl("javascript:(function()%7Btry%7Bdocument.querySelector('button%5Bname%3D%22view_overview%22%5D').click()%7Dcatch(_)%7Bwindow.location.href%3D%22" + FACEBOOK_URL_BASE_ENCODED + "%3Fpageload%3Dcomposer%22%7D%7D)()");
break;
case R.id.photoFAB:
mWebView.loadUrl("javascript:(function()%7Btry%7Bdocument.querySelector('button%5Bname%3D%22view_photo%22%5D').click()%7Dcatch(_)%7Bwindow.location.href%3D%22" + FACEBOOK_URL_BASE_ENCODED + "%3Fpageload%3Dcomposer_photo%22%7D%7D)()");
break;
case R.id.checkinFAB:
mWebView.loadUrl("javascript:(function()%7Btry%7Bdocument.querySelector('button%5Bname%3D%22view_location%22%5D').click()%7Dcatch(_)%7Bwindow.location.href%3D%22" + FACEBOOK_URL_BASE_ENCODED + "%3Fpageload%3Dcomposer_checkin%22%7D%7D)()");
break;
default:
break;
}
mMenuFAB.close(true);
}
};
private MenuItem mNotificationButton;
private CallbackManager callbackManager;
private Snackbar loginSnackbar = null;
@SuppressWarnings("FieldCanBeLocal") // Will be garbage collected as a local variable
private SharedPreferences.OnSharedPreferenceChangeListener listener;
private boolean requiresReload = false;
private String mUserLink = null;
private SharedPreferences mPreferences;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
FacebookSdk.sdkInitialize(this.getApplicationContext());
setContentView(R.layout.activity_main);
Permiso.getInstance().setActivity(this);
// Preferences
PreferenceManager.setDefaultValues(this, R.xml.settings, false);
mPreferences = PreferenceManager.getDefaultSharedPreferences(this);
listener = new SharedPreferences.OnSharedPreferenceChangeListener() {
public void onSharedPreferenceChanged(SharedPreferences prefs, String key) {
switch (key) {
case SettingsActivity.KEY_PREF_JUMP_TOP_BUTTON:
mNavigationView.getMenu().findItem(R.id.nav_jump_top).setVisible(prefs.getBoolean(key, false));
break;
case SettingsActivity.KEY_PREF_STOP_IMAGES:
mWebView.getSettings().setBlockNetworkImage(prefs.getBoolean(key, false));
requiresReload = true;
break;
case SettingsActivity.KEY_PREF_BACK_BUTTON:
mNavigationView.getMenu().findItem(R.id.nav_back).setVisible(prefs.getBoolean(key, false));
break;
case SettingsActivity.KEY_PREF_MESSAGING:
mNavigationView.getMenu().findItem(R.id.nav_messages).setVisible(prefs.getBoolean(key, false));
break;
case SettingsActivity.KEY_PREF_LOCATION:
if (prefs.getBoolean(key, false)) {
Permiso.getInstance().requestPermissions(new Permiso.IOnPermissionResult() {
@Override
public void onPermissionResult(Permiso.ResultSet resultSet) {
if (resultSet.areAllPermissionsGranted()) {
mWebView.setGeolocationEnabled(true);
} else {
Snackbar.make(mCoordinatorLayoutView, R.string.permission_denied, Snackbar.LENGTH_SHORT).show();
}
}
@Override
public void onRationaleRequested(Permiso.IOnRationaleProvided callback, String... permissions) {
// TODO Permiso.getInstance().showRationaleInDialog("Title", "Message", null, callback);
callback.onRationaleProvided();
}
}, Manifest.permission.ACCESS_FINE_LOCATION);
}
break;
case SettingsActivity.KEY_PREF_MOST_RECENT_MENU:
boolean most_recent = prefs.getBoolean(key, true);
mNavigationView.getMenu().findItem(R.id.nav_news).setVisible(!most_recent);
mNavigationView.getMenu().findItem(R.id.nav_top_stories).setVisible(most_recent);
mNavigationView.getMenu().findItem(R.id.nav_most_recent).setVisible(most_recent);
requiresReload = true;
break;
case SettingsActivity.KEY_PREF_FAB_SCROLL:
mMenuFAB.showMenuButton(true);
break;
case SettingsActivity.KEY_PREF_HIDE_EDITOR:
requiresReload = true;
break;
case SettingsActivity.KEY_PREF_HIDE_SPONSORED:
requiresReload = true;
break;
case SettingsActivity.KEY_PREF_HIDE_BIRTHDAYS:
requiresReload = true;
break;
case SettingsActivity.KEY_PREF_NOTIFICATIONS_ENABLED:
PollReceiver.scheduleAlarms(getApplicationContext(), false);
break;
case SettingsActivity.KEY_PREF_NOTIFICATION_INTERVAL:
PollReceiver.scheduleAlarms(getApplicationContext(), false);
break;
default:
break;
}
}
};
mPreferences.registerOnSharedPreferenceChangeListener(listener);
// Setup the toolbar
Toolbar mToolbar = (Toolbar) findViewById(R.id.toolbar);
setSupportActionBar(mToolbar);
// Setup the DrawLayout
final DrawerLayout drawer = (DrawerLayout) findViewById(R.id.drawer_layout);
final ActionBarDrawerToggle toggle = new ActionBarDrawerToggle(this, drawer, mToolbar, R.string.navigation_drawer_open, R.string.navigation_drawer_close);
drawer.addDrawerListener(toggle);
toggle.syncState();
mNavigationView = (NavigationView) findViewById(R.id.nav_view);
mNavigationView.setNavigationItemSelectedListener(this);
// Create the badge for messages
ActionItemBadge.update(this, mNavigationView.getMenu().findItem(R.id.nav_messages), (Drawable) null, BADGE_SIDE_FULL, Integer.MIN_VALUE);
ActionItemBadge.update(this, mNavigationView.getMenu().findItem(R.id.nav_friendreq), (Drawable) null, BADGE_SIDE_FULL, Integer.MIN_VALUE);
// Hide buttons if they are disabled
if (!mPreferences.getBoolean(SettingsActivity.KEY_PREF_MESSAGING, false)) {
mNavigationView.getMenu().findItem(R.id.nav_messages).setVisible(false);
}
if (!mPreferences.getBoolean(SettingsActivity.KEY_PREF_JUMP_TOP_BUTTON, false)) {
mNavigationView.getMenu().findItem(R.id.nav_jump_top).setVisible(false);
}
if (!mPreferences.getBoolean(SettingsActivity.KEY_PREF_BACK_BUTTON, false)) {
mNavigationView.getMenu().findItem(R.id.nav_back).setVisible(false);
}
boolean most_recent = mPreferences.getBoolean(SettingsActivity.KEY_PREF_MOST_RECENT_MENU, true);
mNavigationView.getMenu().findItem(R.id.nav_news).setVisible(!most_recent);
mNavigationView.getMenu().findItem(R.id.nav_top_stories).setVisible(most_recent);
mNavigationView.getMenu().findItem(R.id.nav_most_recent).setVisible(most_recent);
// Bind the Coordinator to member
mCoordinatorLayoutView = findViewById(R.id.coordinatorLayout);
// Start the Swipe to reload listener
swipeView = (SwipeRefreshLayout) findViewById(R.id.swipeLayout);
swipeView.setColorSchemeResources(R.color.colorPrimary);
swipeView.setOnRefreshListener(new SwipeRefreshLayout.OnRefreshListener() {
@Override
public void onRefresh() {
mWebView.reload();
}
});
// Inflate the FAB menu
mMenuFAB = (FloatingActionMenu) findViewById(R.id.menuFAB);
// Nasty hack to get the FAB menu button
mMenuFAB.getChildAt(3).setOnLongClickListener(new View.OnLongClickListener() {
@Override
public boolean onLongClick(View v) {
mMenuFAB.hideMenu(true);
Handler handler = new Handler();
handler.postDelayed(new Runnable() {
@Override
public void run() {
// Show your View after 3 seconds
mMenuFAB.showMenu(true);
}
}, 3000);
return false;
}
});
findViewById(R.id.textFAB).setOnClickListener(mFABClickListener);
findViewById(R.id.photoFAB).setOnClickListener(mFABClickListener);
findViewById(R.id.checkinFAB).setOnClickListener(mFABClickListener);
// Load the WebView
mWebView = (AdvancedWebView) findViewById(R.id.webview);
assert mWebView != null;
mWebView.addPermittedHostnames(HOSTNAMES);
mWebView.setGeolocationEnabled(mPreferences.getBoolean(SettingsActivity.KEY_PREF_LOCATION, false));
mWebView.setListener(this, new WebViewListener(this, mWebView));
mWebView.addJavascriptInterface(new JavaScriptInterfaces(this), "android");
registerForContextMenu(mWebView);
mWebView.getSettings().setBlockNetworkImage(mPreferences.getBoolean(SettingsActivity.KEY_PREF_STOP_IMAGES, false));
mWebView.getSettings().setAppCacheEnabled(true);
mWebView.getSettings().setSupportZoom(true);
mWebView.getSettings().setBuiltInZoomControls(false);
mWebView.getSettings().setLoadWithOverviewMode(true);
mWebView.getSettings().setUseWideViewPort(true);
// Impersonate iPhone to prevent advertising garbage
mWebView.getSettings().setUserAgentString("Mozilla/5.0 (Linux; Android 2.2; Nexus 5 Build/_BuildID_) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/30.0.0.0 Mobile Safari/537.36");
// Long press
registerForContextMenu(mWebView);
mWebView.setLongClickable(true);
mWebView.setWebChromeClient(new CustomWebChromeClient(this, mWebView, (FrameLayout) findViewById(R.id.fullscreen_custom_content)));
// Add OnClick listener to Profile picture
ImageView profileImage = (ImageView) mNavigationView.getHeaderView(0).findViewById(R.id.profile_picture);
profileImage.setClickable(true);
profileImage.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (mUserLink != null) {
drawer.closeDrawers();
mWebView.loadUrl(mUserLink);
}
}
});
callbackManager = CallbackManager.Factory.create();
FacebookCallback<LoginResult> loginResult = new FacebookCallback<LoginResult>() {
@Override
public void onSuccess(LoginResult loginResult) {
mWebView.loadUrl(chooseUrl());
updateUserInfo();
}
@Override
public void onCancel() {
checkLoggedInState();
}
@Override
public void onError(FacebookException error) {
Snackbar.make(mCoordinatorLayoutView, R.string.error_login, Snackbar.LENGTH_LONG).show();
Log.e(Helpers.LogTag, error.toString());
LoginManager.getInstance().logOut();
checkLoggedInState();
}
};
LoginManager.getInstance().setLoginBehavior(LoginBehavior.WEB_ONLY);
LoginManager.getInstance().registerCallback(callbackManager, loginResult);
if (checkLoggedInState()) {
mWebView.loadUrl(chooseUrl());
updateUserInfo();
}
}
@Override
protected void onResume() {
super.onResume();
mWebView.onResume();
Permiso.getInstance().setActivity(this);
// Check if we need to show a page reload snackbar
if (requiresReload) {
Snackbar reloadSnackbar = Snackbar.make(mCoordinatorLayoutView, R.string.hide_editor_newsfeed_snackbar, Snackbar.LENGTH_LONG);
reloadSnackbar.setAction(R.string.menu_refresh, new View.OnClickListener() {
@Override
public void onClick(View v) {
mWebView.reload();
}
});
reloadSnackbar.show();
requiresReload = false;
}
registerForContextMenu(mWebView);
}
@Override
protected void onDestroy() {
mWebView.onDestroy();
super.onDestroy();
}
@Override
protected void onActivityResult(final int requestCode, final int resultCode, final Intent data) {
super.onActivityResult(requestCode, resultCode, data);
mWebView.onActivityResult(requestCode, resultCode, data);
callbackManager.onActivityResult(requestCode, resultCode, data);
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
Permiso.getInstance().onRequestPermissionResult(requestCode, permissions, grantResults);
}
@Override
public void onBackPressed() {
DrawerLayout drawer = (DrawerLayout) findViewById(R.id.drawer_layout);
if (drawer.isDrawerOpen(GravityCompat.START)) {
drawer.closeDrawer(GravityCompat.START);
} else if (mWebView.canGoBack()) {
mWebView.goBack();
} else {
super.onBackPressed();
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.main, menu);
mNotificationButton = menu.findItem(R.id.action_notifications);
ActionItemBadge.update(this, mNotificationButton, ResourcesCompat.getDrawable(getResources(), R.drawable.ic_menu_notifications, null), ActionItemBadge.BadgeStyles.RED, Integer.MIN_VALUE);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here
int id = item.getItemId();
if (id == R.id.action_notifications) {
// Load the notification page
mWebView.loadUrl("javascript:(function()%7Btry%7Bdocument.querySelector('%23notifications_jewel%20%3E%20a').click()%7Dcatch(_)%7Bwindow.location.href%3D'" + FACEBOOK_URL_BASE_ENCODED + "notifications.php'%7D%7D)()");
Helpers.uncheckRadioMenu(mNavigationView.getMenu());
}
// Update the notifications
JavaScriptHelpers.updateNums(mWebView);
return super.onOptionsItemSelected(item);
}
@Override
public boolean onNavigationItemSelected(MenuItem item) {
// Handle navigation view item clicks here.
switch (item.getItemId()) {
case R.id.nav_news:
mWebView.loadUrl("javascript:(function()%7Btry%7Bdocument.querySelector('%23feed_jewel%20%3E%20a').click()%7Dcatch(_)%7Bwindow.location.href%3D'" + FACEBOOK_URL_BASE_ENCODED + "home.php'%7D%7D)()");
item.setChecked(true);
case R.id.nav_top_stories:
mWebView.loadUrl("javascript:(function()%7Btry%7Bdocument.querySelector('a%5Bhref*%3D%22%2Fhome.php%3Fsk%3Dh_nor%22%5D').click()%7Dcatch(_)%7Bwindow.location.href%3D%22" + FACEBOOK_URL_BASE_ENCODED + "home.php%3Fsk%3Dh_nor%22%7D%7D)()");
item.setChecked(true);
break;
case R.id.nav_most_recent:
mWebView.loadUrl("javascript:(function()%7Btry%7Bdocument.querySelector('a%5Bhref*%3D%22%2Fhome.php%3Fsk%3Dh_chr%22%5D').click()%7Dcatch(_)%7Bwindow.location.href%3D%22" + FACEBOOK_URL_BASE_ENCODED + "home.php%3Fsk%3Dh_chr%22%7D%7D)()");
item.setChecked(true);
break;
case R.id.nav_friendreq:
mWebView.loadUrl("javascript:(function()%7Btry%7Bdocument.querySelector('%23requests_jewel%20%3E%20a').click()%7Dcatch(_)%7Bwindow.location.href%3D'" + FACEBOOK_URL_BASE_ENCODED + "friends%2Fcenter%2Frequests%2F'%7D%7D)()");
item.setChecked(true);
break;
case R.id.nav_messages:
mWebView.loadUrl("javascript:(function()%7Btry%7Bdocument.querySelector('%23messages_jewel%20%3E%20a').click()%7Dcatch(_)%7Bwindow.location.href%3D'" + FACEBOOK_URL_BASE_ENCODED + "messages%2F'%7D%7D)()");
JavaScriptHelpers.updateNums(mWebView);
item.setChecked(true);
break;
case R.id.nav_search:
mWebView.loadUrl("javascript:(function()%7Btry%7Bdocument.querySelector('%23search_jewel%20%3E%20a').click()%7Dcatch(_)%7Bwindow.location.href%3D'" + FACEBOOK_URL_BASE_ENCODED + "search%2F'%7D%7D)()");
item.setChecked(true);
break;
case R.id.nav_mainmenu:
mWebView.loadUrl("javascript:(function()%7Btry%7Bdocument.querySelector('%23bookmarks_jewel%20%3E%20a').click()%7Dcatch(_)%7Bwindow.location.href%3D'" + FACEBOOK_URL_BASE_ENCODED + "home.php'%7D%7D)()");
item.setChecked(true);
break;
case R.id.nav_fblogin:
LoginManager.getInstance().logInWithReadPermissions(this, Helpers.FB_PERMISSIONS);
break;
case R.id.nav_jump_top:
mWebView.scrollTo(0, 0);
break;
case R.id.nav_back:
mWebView.goBack();
break;
case R.id.nav_reload:
mWebView.reload();
break;
case R.id.nav_forward:
mWebView.goForward();
break;
case R.id.nav_settings:
Intent settingsActivity = new Intent(MainActivity.this, SettingsActivity.class);
startActivity(settingsActivity);
break;
default:
break;
}
DrawerLayout drawer = (DrawerLayout) findViewById(R.id.drawer_layout);
drawer.closeDrawer(GravityCompat.START);
return true;
}
public void setLoading(boolean loading) {
// Toggle the WebView and Spinner visibility
mWebView.setVisibility(loading ? View.GONE : View.VISIBLE);
swipeView.setRefreshing(loading);
}
public boolean checkLoggedInState() {
if (loginSnackbar != null) {
loginSnackbar.dismiss();
}
if (AccessToken.getCurrentAccessToken() != null && Helpers.getCookie() != null) {
// Logged in, show webview
mWebView.setVisibility(View.VISIBLE);
// Hide login button
mNavigationView.getMenu().findItem(R.id.nav_fblogin).setVisible(false);
// Enable navigation buttons
mNavigationView.getMenu().setGroupEnabled(R.id.group_fbnav, true);
// Start the Notification service (if not already running)
PollReceiver.scheduleAlarms(getApplicationContext(), false);
return true;
} else {
// Not logged in (possibly logged into Facebook OAuth and/or webapp)
loginSnackbar = Helpers.loginPrompt(mCoordinatorLayoutView);
setLoading(false);
mWebView.setVisibility(View.GONE);
// Show login button
mNavigationView.getMenu().findItem(R.id.nav_fblogin).setVisible(true);
// Disable navigation buttons
mNavigationView.getMenu().setGroupEnabled(R.id.group_fbnav, false);
// Cancel the Notification service if we are logged out
PollReceiver.scheduleAlarms(getApplicationContext(), true);
// Kill the Feed URL, so we don't get the wrong notifications
mPreferences.edit().putString("feed_uri", null).apply();
return false;
}
}
private void updateUserInfo() {
GraphRequest request = GraphRequest.newMeRequest(AccessToken.getCurrentAccessToken(), new GraphRequest.GraphJSONObjectCallback() {
@Override
public void onCompleted(JSONObject object, GraphResponse response) {
// Update header
try {
String userID = object.getString("id");
mUserLink = object.getString("link");
// Set the user's name under the header
((TextView) findViewById(R.id.profile_name)).setText(object.getString("name"));
// Set the cover photo with resizing
Picasso.with(getApplicationContext()).load("https://graph.facebook.com/" + userID + "/picture?type=large").into((ImageView) findViewById(R.id.profile_picture));
final View header = findViewById(R.id.header_layout);
Picasso.with(getApplicationContext()).load(object.getJSONObject("cover").getString("source")).resize(header.getWidth(), header.getHeight()).centerCrop().error(R.drawable.side_nav_bar).into(new Target() {
@Override
public void onBitmapLoaded(Bitmap bitmap, Picasso.LoadedFrom from) {
Log.v(Helpers.LogTag, "Set cover photo");
header.setBackground(new BitmapDrawable(getResources(), bitmap));
}
@Override
public void onBitmapFailed(Drawable errorDrawable) {}
@Override
public void onPrepareLoad(Drawable placeHolderDrawable) {}
});
} catch (NullPointerException e) {
Snackbar.make(mCoordinatorLayoutView, R.string.error_facebook_noconnection, Snackbar.LENGTH_LONG).show();
} catch (JSONException e) {
e.printStackTrace();
Snackbar.make(mCoordinatorLayoutView, R.string.error_facebook_error, Snackbar.LENGTH_LONG).show();
} catch (Exception e) {
e.printStackTrace();
Snackbar.make(mCoordinatorLayoutView, R.string.error_super_wrong, Snackbar.LENGTH_LONG).show();
}
}
});
Bundle parameters = new Bundle();
parameters.putString("fields", "id,name,cover,link");
request.setParameters(parameters);
request.executeAsync();
}
public void setNotificationNum(int num) {
if (num > 0) {
ActionItemBadge.update(mNotificationButton, ResourcesCompat.getDrawable(getResources(), R.drawable.ic_menu_notifications_active, null), num);
} else {
// Hide the badge and show the washed-out button
ActionItemBadge.update(mNotificationButton, ResourcesCompat.getDrawable(getResources(), R.drawable.ic_menu_notifications, null), Integer.MIN_VALUE);
}
}
public void setMessagesNum(int num) {
// Only update message count if enabled
if (mPreferences.getBoolean(SettingsActivity.KEY_PREF_MESSAGING, false)) {
if (num > 0) {
ActionItemBadge.update(mNavigationView.getMenu().findItem(R.id.nav_messages), num);
} else {
// Hide the badge and show the washed-out button
ActionItemBadge.update(mNavigationView.getMenu().findItem(R.id.nav_messages), Integer.MIN_VALUE);
}
}
}
public void setRequestsNum(int num) {
if (num > 0) {
ActionItemBadge.update(mNavigationView.getMenu().findItem(R.id.nav_friendreq), num);
} else {
// Hide the badge and show the washed-out button
ActionItemBadge.update(mNavigationView.getMenu().findItem(R.id.nav_friendreq), Integer.MIN_VALUE);
}
}
private String chooseUrl() {
// Handle intents
Intent intent = getIntent();
String action = intent.getAction();
String type = intent.getType();
if (Intent.ACTION_SEND.equals(action) && type != null) {
if (URLUtil.isValidUrl(intent.getStringExtra(Intent.EXTRA_TEXT))) {
try {
Log.v(Helpers.LogTag, "Shared URL Intent");
return "https://mbasic.facebook.com/composer/?text=" + URLEncoder.encode(intent.getStringExtra(Intent.EXTRA_TEXT), "utf-8");
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
}
}
} else if (Intent.ACTION_VIEW.equals(action) && intent.getData() != null && URLUtil.isValidUrl(intent.getData().toString())) {
// If there is a intent containing a facebook link, go there
Log.v(Helpers.LogTag, "Opened URL Intent");
return intent.getData().toString();
}
// If nothing has happened at this point, we want the default url
return FACEBOOK_URL_BASE;
}
}
| |
/* -*- mode: java; c-basic-offset: 2; indent-tabs-mode: nil -*- */
/*
Part of the Processing project - http://processing.org
Copyright (c) 2012-15 The Processing Foundation
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License version 2
as published by the Free Software Foundation.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software Foundation, Inc.
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package processing.mode.java;
import java.awt.Color;
import java.awt.Cursor;
import java.awt.Dimension;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.RenderingHints;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.awt.event.MouseMotionAdapter;
import java.util.ArrayList;
import java.util.List;
import javax.swing.JPanel;
import javax.swing.SwingWorker;
import javax.swing.text.BadLocationException;
import processing.app.SketchCode;
import processing.app.Util;
import processing.mode.java.pdex.ErrorCheckerService;
import processing.mode.java.pdex.ErrorMarker;
import processing.mode.java.pdex.Problem;
import processing.app.Language;
/**
* The bar on the left of the text area which displays all errors as rectangles. <br>
* <br>
* All errors and warnings of a sketch are drawn on the bar, clicking on one,
* scrolls to the tab and location. Error messages displayed on hover. Markers
* are not in sync with the error line. Similar to eclipse's right error bar
* which displays the overall errors in a document
*
* @author Manindra Moharana <me@mkmoharana.com>
*
*/
public class ErrorColumn extends JPanel {
/**
* Preferred height of the component
*/
protected int preferredHeight;
/**
* Preferred height of the component
*/
protected int preferredWidth = 12;
/**
* Height of marker
*/
public static final int errorMarkerHeight = 4;
/**
* Color of Error Marker
*/
public Color errorColor; // = new Color(0xED2630);
/**
* Color of Warning Marker
*/
public Color warningColor; // = new Color(0xFFC30E);
/**
* Background color of the component
*/
public Color backgroundColor; // = new Color(0x2C343D);
/**
* JavaEditor instance
*/
protected JavaEditor editor;
/**
* ErrorCheckerService instance
*/
protected ErrorCheckerService errorCheckerService;
/**
* Stores error markers displayed PER TAB along the error bar.
*/
protected List<ErrorMarker> errorPoints = new ArrayList<ErrorMarker>();
/**
* Stores previous list of error markers.
*/
protected ArrayList<ErrorMarker> errorPointsOld = new ArrayList<ErrorMarker>();
public void paintComponent(Graphics g) {
Graphics2D g2d = (Graphics2D) g;
g2d.setRenderingHint(RenderingHints.KEY_ANTIALIASING,
RenderingHints.VALUE_ANTIALIAS_ON);
g.setColor(backgroundColor);
g.fillRect(0, 0, getWidth(), getHeight());
for (ErrorMarker emarker : errorPoints) {
if (emarker.getType() == ErrorMarker.Error) {
g.setColor(errorColor);
} else {
g.setColor(warningColor);
}
g.fillRect(2, emarker.getY(), (getWidth() - 3), errorMarkerHeight);
}
}
public Dimension getPreferredSize() {
return new Dimension(preferredWidth, preferredHeight);
}
public Dimension getMinimumSize() {
return getPreferredSize();
}
public ErrorColumn(JavaEditor editor, int height, JavaMode mode) {
this.editor = editor;
this.preferredHeight = height;
this.errorCheckerService = editor.errorCheckerService;
errorColor = mode.getColor("errorbar.errorcolor"); //, errorColor);
warningColor = mode.getColor("errorbar.warningcolor"); //, warningColor);
//backgroundColor = mode.getColor("errorbar.backgroundcolor"); //, backgroundColor);
backgroundColor = mode.getColor("gutter.bgcolor");
addListeners();
}
/**
* Update error markers in the error bar.
*
* @param problems
* - List of problems.
*/
synchronized public void updateErrorPoints(final List<Problem> problems) {
// NOTE TO SELF: ErrorMarkers are calculated for the present tab only
// Error Marker index in the arraylist is LOCALIZED for current tab.
// Also, need to do the update in the UI thread via SwingWorker to prevent
// concurrency issues.
final int fheight = this.getHeight();
SwingWorker<Object, Object> worker = new SwingWorker<Object, Object>() {
protected Object doInBackground() throws Exception {
SketchCode sc = editor.getSketch().getCurrentCode();
int totalLines = 0, currentTab = editor.getSketch()
.getCurrentCodeIndex();
try {
totalLines = Util.countLines(sc.getDocument()
.getText(0, sc.getDocument().getLength())) + 1;
} catch (BadLocationException e) {
e.printStackTrace();
}
// System.out.println("Total lines: " + totalLines);
synchronized (errorPoints) {
errorPointsOld.clear();
for (ErrorMarker marker : errorPoints) {
errorPointsOld.add(marker);
}
errorPoints.clear();
// Each problem.getSourceLine() will have an extra line added
// because of
// class declaration in the beginning as well as default imports
synchronized (problems) {
for (Problem problem : problems) {
if (problem.getTabIndex() == currentTab) {
// Ratio of error line to total lines
float y = (problem.getLineNumber() + 1)
/ ((float) totalLines);
// Ratio multiplied by height of the error bar
y *= fheight - 15; // -15 is just a vertical offset
errorPoints
.add(new ErrorMarker(problem, (int) y,
problem.isError() ? ErrorMarker.Error
: ErrorMarker.Warning));
// System.out.println("Y: " + y);
}
}
}
}
return null;
}
protected void done() {
repaint();
}
};
try {
worker.execute(); // I eat concurrency bugs for breakfast.
} catch (Exception exp) {
System.out.println("Errorbar update markers is slacking."
+ exp.getMessage());
// e.printStackTrace();
}
}
/**
* Check if new errors have popped up in the sketch since the last check
*
* @return true - if errors have changed
*/
public boolean errorPointsChanged() {
if (errorPointsOld.size() != errorPoints.size()) {
editor.getTextArea().repaint();
// System.out.println("2 Repaint " + System.currentTimeMillis());
return true;
}
else {
for (int i = 0; i < errorPoints.size(); i++) {
if (errorPoints.get(i).getY() != errorPointsOld.get(i).getY()) {
editor.getTextArea().repaint();
// System.out.println("3 Repaint " +
// System.currentTimeMillis());
return true;
}
}
}
return false;
}
/**
* Add various mouse listeners.
*/
protected void addListeners() {
addMouseListener(new MouseAdapter() {
// Find out which error/warning the user has clicked
// and then scroll to that
@Override
public void mouseClicked(final MouseEvent e) {
SwingWorker<Object, Object> worker = new SwingWorker<Object, Object>() {
protected Object doInBackground() throws Exception {
for (ErrorMarker eMarker : errorPoints) {
// -2 and +2 are extra allowance, clicks in the
// vicinity of the markers register that way
if (e.getY() >= eMarker.getY() - 2
&& e.getY() <= eMarker.getY() + 2 + errorMarkerHeight) {
errorCheckerService.scrollToErrorLine(eMarker.getProblem());
return null;
}
}
return null;
}
};
try {
worker.execute();
} catch (Exception exp) {
System.out.println("Errorbar mouseClicked is slacking."
+ exp.getMessage());
// e.printStackTrace();
}
}
});
// Tooltip on hover
addMouseMotionListener(new MouseMotionAdapter() {
public void mouseMoved(final MouseEvent evt) {
SwingWorker<Object, Object> worker = new SwingWorker<Object, Object>() {
protected Object doInBackground() throws Exception {
for (ErrorMarker eMarker : errorPoints) {
if (evt.getY() >= eMarker.getY() - 2 &&
evt.getY() <= eMarker.getY() + 2 + errorMarkerHeight) {
Problem p = eMarker.getProblem();
String msg = ((p.isError()
? Language.text("editor.status.error")
: Language.text("editor.status.warning")) + ": "
+ p.getMessage());
setToolTipText(msg);
setCursor(Cursor.getPredefinedCursor(Cursor.HAND_CURSOR));
break;
}
}
return null;
}
};
try {
worker.execute();
} catch (Exception exp) {
System.out
.println("Errorbar mousemoved Worker is slacking."
+ exp.getMessage());
// e.printStackTrace();
}
}
});
}
}
| |
/* released under bsd licence
* see LICENCE file or http://www.opensource.org/licenses/bsd-license.php for details
* Institute of Applied Simulation (ZHAW)
* Author Thomas Niederberger
*/
package ch.zhaw.ias.dito.config;
import java.util.Arrays;
import java.util.Set;
import java.util.TreeSet;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlElementWrapper;
import javax.xml.bind.annotation.XmlTransient;
import javax.xml.bind.annotation.XmlType;
import net.jcip.annotations.NotThreadSafe;
import ch.zhaw.ias.dito.Coding;
import ch.zhaw.ias.dito.DVector;
import ch.zhaw.ias.dito.QuestionType;
import ch.zhaw.ias.dito.Utils;
/**
* TODO it should be considered to cache the values of min, max and values for performance reasons
* @author Thomas Niederberger (nith) - institute of applied simulation (IAS)
*/
@NotThreadSafe
@XmlType(propOrder={"column", "name" , "questionType", "enabled", "scaling", "questionWeight", "distanceWeight", "exclude", "offset"})
public final class Question {
private int column;
private String name;
private Double scaling;
private Double questionWeight;
private Double distanceWeight;
private QuestionType questionType;
private Boolean enabled;
@XmlElementWrapper(name = "excludeValues")
@XmlElement(name = "value")
private double[] exclude;
private Double offset;
private DVector data;
public Question() {
this(-1, "", QuestionType.ORDINAL, 1.0, 1.0, 1.0, new double[0], 0.0);
}
public Question(Integer column, String name, QuestionType questionType, Double scaling, Double questionWeight,
Double distanceWeight, double[] exclude, Double offset) {
this.column = column;
this.name = name;
this.scaling = scaling;
this.questionWeight = questionWeight;
this.distanceWeight = distanceWeight;
this.questionType = questionType;
this.enabled = true;
this.exclude = exclude;
this.offset = offset;
}
@Override
public boolean equals(Object obj) {
if (obj instanceof Question == false) {
return false;
}
Question q = (Question) obj;
return column == q.column
&& name.equals(q.name)
&& scaling.equals(q.scaling)
&& questionWeight.equals(q.questionWeight)
&& distanceWeight.equals(q.distanceWeight)
&& enabled.equals(q.enabled)
&& Arrays.equals(exclude, q.exclude)
&& offset.equals(q.offset);
}
public Object getDistinctValues() {
Set<Double> values = new TreeSet<Double>();
data.addValuesToCollection(values);
return Utils.toCommaSeparatedString(values.iterator());
}
public Object getValue(TableColumn col) {
if (col == TableColumn.NUMBER) {
return getColumn();
} else if (col == TableColumn.NAME) {
return getName();
} else if (col == TableColumn.VALUES) {
return getDistinctValues();
} else if (col == TableColumn.EXCLUDE) {
return Utils.toCommaSeparatedString(exclude);
} else if (col == TableColumn.MIN) {
return min();
} else if (col == TableColumn.MAX) {
return max();
} else if (col == TableColumn.TYPE) {
return questionType;
} else if (col == TableColumn.DISTANCE_WEIGHT) {
return getDistanceWeight();
} else if (col == TableColumn.QUESTION_WEIGHT) {
return getQuestionWeight();
} else if (col == TableColumn.SCALING) {
return getScaling();
} else if (col == TableColumn.OFFSET) {
return getOffset();
}
throw new IllegalArgumentException("this is the end of the world");
}
public void setValue(TableColumn col, Object value) {
if (col == TableColumn.NAME) {
name = value.toString();
} else if (col == TableColumn.EXCLUDE) {
exclude = (double[]) value;
} else if (col == TableColumn.DISTANCE_WEIGHT) {
distanceWeight = Double.parseDouble(value.toString());
} else if (col == TableColumn.QUESTION_WEIGHT) {
questionWeight = Double.parseDouble(value.toString());
} else if (col == TableColumn.SCALING) {
scaling = Double.parseDouble(value.toString());
} else if (col == TableColumn.TYPE) {
questionType = (QuestionType) value;
} else if (col == TableColumn.OFFSET) {
offset = Double.parseDouble(value.toString());
} else {
throw new IllegalArgumentException("column " + col + " is not editable");
}
}
public double min() {
return data.min();
}
public double max() {
return data.max();
}
/**
* Sets a new data vector. For consistency reasons this changes other attributes of Question (i.e. QuestionType) too.
* TODO maybe other values must be changed too.
* @param data
*/
public void setData(DVector data) {
this.data = data;
QuestionType type = data.getDefaultQuestionType();
// set new type if QuestionType is Binary or Metric
// is it strictly necessary to set the type to binary??
/*if (type == QuestionType.BINARY || type == QuestionType.METRIC) {
setQuestionType(type);
} else if (questionType == QuestionType.BINARY || questionType == QuestionType.METRIC) {
setQuestionType(type);
}*/
if (type == QuestionType.BINARY || type == QuestionType.METRIC) {
setQuestionType(type);
}
}
public void clearData() {
this.data = null;
}
@XmlTransient
public DVector getData() {
return data;
}
public int getColumn() {
return column;
}
public String getName() {
return name;
}
public Double getScaling() {
return scaling;
}
public void setScaling(Double scaling) {
this.scaling = scaling;
}
public Double getQuestionWeight() {
return questionWeight;
}
public void setQuestionWeight(Double questionWeight) {
this.questionWeight = questionWeight;
}
public Double getDistanceWeight() {
return distanceWeight;
}
public void setQuestionType(QuestionType questionType) {
this.questionType = questionType;
}
public QuestionType getQuestionType() {
return questionType;
}
public void setDistanceWeight(Double distanceWeight) {
this.distanceWeight = distanceWeight;
}
public void setName(String name) {
this.name = name;
}
public void setColumn(int column) {
this.column = column;
}
public Boolean getEnabled() {
return enabled;
}
public void setEnabled(Boolean enabled) {
this.enabled = enabled;
}
public DVector[] recode(Coding coding) {
return data.recode(coding, questionType);
}
@XmlTransient
public double[] getExclude() {
return exclude;
}
public void setExclude(double[] exclude) {
this.exclude = exclude;
}
public DVector getExcludedVector() {
return data.exclude(exclude);
}
public Double getOffset() {
return offset;
}
public void setOffset(Double offset) {
this.offset = offset;
}
}
| |
package org.apache.archiva.consumers.lucene;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.io.File;
import java.io.IOException;
import java.util.Calendar;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.apache.archiva.scheduler.ArchivaTaskScheduler;
import org.apache.archiva.scheduler.indexing.ArtifactIndexingTask;
import org.apache.commons.io.FileUtils;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.FileTypes;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.consumers.KnownRepositoryContentConsumer;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
import org.codehaus.plexus.spring.PlexusInSpringTestCase;
import org.codehaus.plexus.taskqueue.TaskQueueException;
/**
* NexusIndexerConsumerTest
*/
public class NexusIndexerConsumerTest
extends PlexusInSpringTestCase
{
private final class ArchivaTaskSchedulerStub
implements ArchivaTaskScheduler<ArtifactIndexingTask>
{
Set<File> indexed = new HashSet<File>();
public void queueTask( ArtifactIndexingTask task )
throws TaskQueueException
{
switch ( task.getAction() )
{
case ADD:
indexed.add( task.getResourceFile() );
break;
case DELETE:
indexed.remove( task.getResourceFile() );
break;
case FINISH:
try
{
task.getContext().close( false );
}
catch ( IOException e )
{
throw new TaskQueueException( e.getMessage() );
}
break;
}
}
}
private KnownRepositoryContentConsumer nexusIndexerConsumer;
private ManagedRepositoryConfiguration repositoryConfig;
private ArchivaTaskSchedulerStub scheduler;
@Override
protected void setUp()
throws Exception
{
super.setUp();
scheduler = new ArchivaTaskSchedulerStub();
ArchivaConfiguration configuration = (ArchivaConfiguration) lookup( ArchivaConfiguration.class );
FileTypes filetypes = (FileTypes) lookup( FileTypes.class );
nexusIndexerConsumer = new NexusIndexerConsumer( scheduler, configuration, filetypes );
// initialize to set the file types to be processed
( (Initializable) nexusIndexerConsumer ).initialize();
repositoryConfig = new ManagedRepositoryConfiguration();
repositoryConfig.setId( "test-repo" );
repositoryConfig.setLocation( getBasedir() + "/target/test-classes/test-repo" );
repositoryConfig.setLayout( "default" );
repositoryConfig.setName( "Test Repository" );
repositoryConfig.setScanned( true );
repositoryConfig.setSnapshots( false );
repositoryConfig.setReleases( true );
}
@Override
protected void tearDown()
throws Exception
{
// delete created index in the repository
File indexDir = new File( repositoryConfig.getLocation(), ".indexer" );
FileUtils.deleteDirectory( indexDir );
assertFalse( indexDir.exists() );
indexDir = new File( repositoryConfig.getLocation(), ".index" );
FileUtils.deleteDirectory( indexDir );
assertFalse( indexDir.exists() );
super.tearDown();
}
public void testIndexerIndexArtifact()
throws Exception
{
File artifactFile =
new File( repositoryConfig.getLocation(),
"org/apache/archiva/archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" );
// begin scan
Date now = Calendar.getInstance().getTime();
nexusIndexerConsumer.beginScan( repositoryConfig, now );
nexusIndexerConsumer.processFile( "org/apache/archiva/archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" );
nexusIndexerConsumer.completeScan();
assertTrue( scheduler.indexed.contains( artifactFile ) );
}
public void testIndexerArtifactAlreadyIndexed()
throws Exception
{
File artifactFile =
new File( repositoryConfig.getLocation(),
"org/apache/archiva/archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" );
// begin scan
Date now = Calendar.getInstance().getTime();
nexusIndexerConsumer.beginScan( repositoryConfig, now );
nexusIndexerConsumer.processFile( "org/apache/archiva/archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" );
nexusIndexerConsumer.completeScan();
assertTrue( scheduler.indexed.contains( artifactFile ) );
// scan and index again
now = Calendar.getInstance().getTime();
nexusIndexerConsumer.beginScan( repositoryConfig, now );
nexusIndexerConsumer.processFile( "org/apache/archiva/archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" );
nexusIndexerConsumer.completeScan();
assertTrue( scheduler.indexed.contains( artifactFile ) );
}
public void testIndexerIndexArtifactThenPom()
throws Exception
{
File artifactFile =
new File( repositoryConfig.getLocation(),
"org/apache/archiva/archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" );
// begin scan
Date now = Calendar.getInstance().getTime();
nexusIndexerConsumer.beginScan( repositoryConfig, now );
nexusIndexerConsumer.processFile( "org/apache/archiva/archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" );
nexusIndexerConsumer.completeScan();
assertTrue( scheduler.indexed.contains( artifactFile ) );
artifactFile =
new File( repositoryConfig.getLocation(), "org/apache/archiva/archiva-index-methods-jar-test/1.0/pom.xml" );
// scan and index again
now = Calendar.getInstance().getTime();
nexusIndexerConsumer.beginScan( repositoryConfig, now );
nexusIndexerConsumer.processFile( "org/apache/archiva/archiva-index-methods-jar-test/1.0/pom.xml" );
nexusIndexerConsumer.completeScan();
assertTrue( scheduler.indexed.contains( artifactFile ) );
}
// MRM-1275 - Include other file types for the index consumer instead of just the indexable-content
public void testIncludedFileTypes()
throws Exception
{
List<String> includes = nexusIndexerConsumer.getIncludes();
assertTrue( ".pom artifacts should be processed.", includes.contains( "**/*.pom" ) );
assertTrue( ".xml artifacts should be processed.", includes.contains( "**/*.xml" ) );
assertTrue( ".txt artifacts should be processed.", includes.contains( "**/*.txt" ) );
assertTrue( ".jar artifacts should be processed.", includes.contains( "**/*.jar" ) );
assertTrue( ".war artifacts should be processed.", includes.contains( "**/*.war" ) );
assertTrue( ".zip artifacts should be processed.", includes.contains( "**/*.zip" ) );
}
@Override
protected String getPlexusConfigLocation()
{
return "/org/apache/archiva/consumers/lucene/LuceneConsumersTest.xml";
}
}
| |
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.execution.actions;
import com.intellij.execution.*;
import com.intellij.execution.configurations.ConfigurationType;
import com.intellij.execution.impl.ExecutionManagerImpl;
import com.intellij.execution.runners.ExecutionUtil;
import com.intellij.execution.ui.RunContentDescriptor;
import com.intellij.icons.AllIcons;
import com.intellij.ide.DataManager;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.actionSystem.ex.ComboBoxAction;
import com.intellij.openapi.project.DumbAware;
import com.intellij.openapi.project.DumbAwareAction;
import com.intellij.openapi.project.IndexNotReadyException;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Condition;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.wm.ex.WindowManagerEx;
import com.intellij.openapi.wm.impl.IdeFrameImpl;
import com.intellij.ui.IdeBorderFactory;
import com.intellij.ui.SizedIcon;
import com.intellij.ui.components.panels.NonOpaquePanel;
import com.intellij.util.IconUtil;
import com.intellij.util.ui.EmptyIcon;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.*;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
public class RunConfigurationsComboBoxAction extends ComboBoxAction implements DumbAware {
public static final Icon CHECKED_ICON = new SizedIcon(AllIcons.Actions.Checked, 16, 16);
public static final Icon CHECKED_SELECTED_ICON = new SizedIcon(AllIcons.Actions.Checked_selected, 16, 16);
public static final Icon EMPTY_ICON = EmptyIcon.ICON_16;
@Override
public void actionPerformed(AnActionEvent e) {
if (e.getPresentation().getClientProperty(CUSTOM_COMPONENT_PROPERTY) == null) {
Project project = e.getProject();
IdeFrameImpl frame = project != null ? WindowManagerEx.getInstanceEx().getFrame(project) : null;
if (frame != null) {
e.getPresentation().putClientProperty(CUSTOM_COMPONENT_PROPERTY, frame.getComponent());
}
}
super.actionPerformed(e);
}
@Override
public void update(AnActionEvent e) {
Presentation presentation = e.getPresentation();
Project project = e.getData(CommonDataKeys.PROJECT);
if (ActionPlaces.isMainMenuOrActionSearch(e.getPlace())) {
presentation.setDescription(ExecutionBundle.message("choose.run.configuration.action.description"));
}
try {
if (project == null || project.isDisposed() || !project.isInitialized()) {
updatePresentation(null, null, null, presentation);
presentation.setEnabled(false);
}
else {
updatePresentation(ExecutionTargetManager.getActiveTarget(project),
RunManagerEx.getInstanceEx(project).getSelectedConfiguration(),
project,
presentation);
presentation.setEnabled(true);
}
}
catch (IndexNotReadyException e1) {
presentation.setEnabled(false);
}
}
private static void updatePresentation(@Nullable ExecutionTarget target,
@Nullable RunnerAndConfigurationSettings settings,
@Nullable Project project,
@NotNull Presentation presentation) {
if (project != null && target != null && settings != null) {
String name = settings.getName();
if (target != DefaultExecutionTarget.INSTANCE) {
name += " | " + target.getDisplayName();
} else {
if (!settings.canRunOn(target)) {
name += " | Nothing to run on";
}
}
presentation.setText(name, false);
setConfigurationIcon(presentation, settings, project);
}
else {
presentation.setText(""); // IDEA-21657
presentation.setIcon(null);
}
}
private static void setConfigurationIcon(final Presentation presentation,
final RunnerAndConfigurationSettings settings,
final Project project) {
try {
Icon icon = RunManagerEx.getInstanceEx(project).getConfigurationIcon(settings);
ExecutionManagerImpl executionManager = ExecutionManagerImpl.getInstance(project);
List<RunContentDescriptor> runningDescriptors = executionManager.getRunningDescriptors(s -> s == settings);
if (runningDescriptors.size() == 1) {
icon = ExecutionUtil.getLiveIndicator(icon);
}
if (runningDescriptors.size() > 1) {
icon = IconUtil.addText(icon, String.valueOf(runningDescriptors.size()));
}
presentation.setIcon(icon);
}
catch (IndexNotReadyException ignored) {
}
}
@Override
protected boolean shouldShowDisabledActions() {
return true;
}
@Override
public JComponent createCustomComponent(final Presentation presentation) {
ComboBoxButton button = createComboBoxButton(presentation);
button.setBorder(BorderFactory.createEmptyBorder(0, 2, 0, 2));
NonOpaquePanel panel = new NonOpaquePanel(new BorderLayout());
panel.setBorder(IdeBorderFactory.createEmptyBorder(0, 0, 0, 2));
panel.add(button);
return panel;
}
@Override
@NotNull
protected DefaultActionGroup createPopupActionGroup(final JComponent button) {
final DefaultActionGroup allActionsGroup = new DefaultActionGroup();
final Project project = CommonDataKeys.PROJECT.getData(DataManager.getInstance().getDataContext(button));
if (project != null) {
final RunManagerEx runManager = RunManagerEx.getInstanceEx(project);
allActionsGroup.add(ActionManager.getInstance().getAction(IdeActions.ACTION_EDIT_RUN_CONFIGURATIONS));
allActionsGroup.add(new SaveTemporaryAction());
allActionsGroup.addSeparator();
RunnerAndConfigurationSettings selected = RunManager.getInstance(project).getSelectedConfiguration();
if (selected != null) {
ExecutionTarget activeTarget = ExecutionTargetManager.getActiveTarget(project);
for (ExecutionTarget eachTarget : ExecutionTargetManager.getTargetsToChooseFor(project, selected)) {
allActionsGroup.add(new SelectTargetAction(project, eachTarget, eachTarget.equals(activeTarget)));
}
allActionsGroup.addSeparator();
}
final ConfigurationType[] types = runManager.getConfigurationFactories();
for (ConfigurationType type : types) {
final DefaultActionGroup actionGroup = new DefaultActionGroup();
Map<String,List<RunnerAndConfigurationSettings>> structure = runManager.getStructure(type);
for (Map.Entry<String, List<RunnerAndConfigurationSettings>> entry : structure.entrySet()) {
DefaultActionGroup group = entry.getKey() != null ? new DefaultActionGroup(entry.getKey(), true) : actionGroup;
group.getTemplatePresentation().setIcon(AllIcons.Nodes.Folder);
for (RunnerAndConfigurationSettings settings : entry.getValue()) {
group.add(new SelectConfigAction(settings, project));
}
if (group != actionGroup) {
actionGroup.add(group);
}
}
allActionsGroup.add(actionGroup);
allActionsGroup.addSeparator();
}
}
return allActionsGroup;
}
private static class SaveTemporaryAction extends DumbAwareAction {
public SaveTemporaryAction() {
Presentation presentation = getTemplatePresentation();
presentation.setIcon(AllIcons.Actions.Menu_saveall);
}
@Override
public void actionPerformed(final AnActionEvent e) {
final Project project = e.getData(CommonDataKeys.PROJECT);
if (project != null) {
RunnerAndConfigurationSettings settings = chooseTempSettings(project);
if (settings != null) {
final RunManager runManager = RunManager.getInstance(project);
runManager.makeStable(settings);
}
}
}
@Override
public void update(final AnActionEvent e) {
final Presentation presentation = e.getPresentation();
final Project project = e.getData(CommonDataKeys.PROJECT);
if (project == null) {
disable(presentation);
return;
}
RunnerAndConfigurationSettings settings = chooseTempSettings(project);
if (settings == null) {
disable(presentation);
}
else {
presentation.setText(ExecutionBundle.message("save.temporary.run.configuration.action.name", settings.getName()));
presentation.setDescription(presentation.getText());
presentation.setVisible(true);
presentation.setEnabled(true);
}
}
private static void disable(final Presentation presentation) {
presentation.setEnabled(false);
presentation.setVisible(false);
}
@Nullable
private static RunnerAndConfigurationSettings chooseTempSettings(@NotNull Project project) {
RunnerAndConfigurationSettings selectedConfiguration = RunManager.getInstance(project).getSelectedConfiguration();
if (selectedConfiguration != null && selectedConfiguration.isTemporary()) {
return selectedConfiguration;
}
Iterator<RunnerAndConfigurationSettings> iterator = RunManager.getInstance(project).getTempConfigurationsList().iterator();
return iterator.hasNext() ? iterator.next() : null;
}
}
private static class SelectTargetAction extends AnAction {
private final Project myProject;
private final ExecutionTarget myTarget;
public SelectTargetAction(final Project project, final ExecutionTarget target, boolean selected) {
myProject = project;
myTarget = target;
String name = target.getDisplayName();
Presentation presentation = getTemplatePresentation();
presentation.setText(name, false);
presentation.setDescription("Select " + name);
presentation.setIcon(selected ? CHECKED_ICON : EMPTY_ICON);
presentation.setSelectedIcon(selected ? CHECKED_SELECTED_ICON : EMPTY_ICON);
}
@Override
public void actionPerformed(AnActionEvent e) {
ExecutionTargetManager.setActiveTarget(myProject, myTarget);
updatePresentation(ExecutionTargetManager.getActiveTarget(myProject),
RunManagerEx.getInstanceEx(myProject).getSelectedConfiguration(),
myProject,
e.getPresentation());
}
@Override
public boolean isDumbAware() {
return Registry.is("dumb.aware.run.configurations");
}
}
private static class SelectConfigAction extends DumbAwareAction {
private final RunnerAndConfigurationSettings myConfiguration;
private final Project myProject;
public SelectConfigAction(final RunnerAndConfigurationSettings configuration, final Project project) {
myConfiguration = configuration;
myProject = project;
String name = configuration.getName();
if (name == null || name.length() == 0) {
name = " ";
}
final Presentation presentation = getTemplatePresentation();
presentation.setText(name, false);
final ConfigurationType type = configuration.getType();
if (type != null) {
presentation.setDescription("Select " + type.getConfigurationTypeDescription() + " '" + name + "'");
}
updateIcon(presentation);
}
private void updateIcon(final Presentation presentation) {
setConfigurationIcon(presentation, myConfiguration, myProject);
}
@Override
public void actionPerformed(final AnActionEvent e) {
RunManager.getInstance(myProject).setSelectedConfiguration(myConfiguration);
updatePresentation(ExecutionTargetManager.getActiveTarget(myProject), myConfiguration, myProject, e.getPresentation());
}
@Override
public void update(final AnActionEvent e) {
super.update(e);
updateIcon(e.getPresentation());
}
}
}
| |
/*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2019 Serge Rider (serge@jkiss.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jkiss.dbeaver.tools.transfer.stream.exporter;
import org.jkiss.dbeaver.DBException;
import org.jkiss.dbeaver.Log;
import org.jkiss.dbeaver.model.DBPNamedObject;
import org.jkiss.dbeaver.model.DBUtils;
import org.jkiss.dbeaver.model.data.DBDAttributeBinding;
import org.jkiss.dbeaver.model.data.DBDContent;
import org.jkiss.dbeaver.model.data.DBDContentStorage;
import org.jkiss.dbeaver.model.data.DBDContentValueHandler;
import org.jkiss.dbeaver.model.exec.DBCResultSet;
import org.jkiss.dbeaver.model.exec.DBCSession;
import org.jkiss.dbeaver.model.runtime.DBRProgressMonitor;
import org.jkiss.dbeaver.model.sql.SQLConstants;
import org.jkiss.dbeaver.model.sql.SQLDialect;
import org.jkiss.dbeaver.model.sql.SQLUtils;
import org.jkiss.dbeaver.tools.transfer.DTUtils;
import org.jkiss.dbeaver.tools.transfer.stream.IStreamDataExporterSite;
import org.jkiss.dbeaver.utils.ContentUtils;
import org.jkiss.dbeaver.utils.GeneralUtils;
import org.jkiss.utils.CommonUtils;
import java.io.File;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.Reader;
/**
* SQL Exporter
*/
public class DataExporterSQL extends StreamExporterAbstract {
private static final Log log = Log.getLog(DataExporterSQL.class);
private static final String PROP_INCLUDE_AUTO_GENERATED = "includeAutoGenerated";
private static final String PROP_OMIT_SCHEMA = "omitSchema";
private static final String PROP_ROWS_IN_STATEMENT = "rowsInStatement";
private static final char STRING_QUOTE = '\'';
private boolean includeAutoGenerated;
private String rowDelimiter;
private boolean omitSchema;
private int rowsInStatement;
private String tableName;
private DBDAttributeBinding[] columns;
private transient StringBuilder sqlBuffer = new StringBuilder(100);
private transient long rowCount;
private SQLDialect dialect;
private boolean isSkipColumn(DBDAttributeBinding attr) {
return attr.isPseudoAttribute() || (!includeAutoGenerated && attr.isAutoGenerated());
}
@Override
public void init(IStreamDataExporterSite site) throws DBException
{
super.init(site);
if (site.getProperties().containsKey(PROP_INCLUDE_AUTO_GENERATED)) {
includeAutoGenerated = CommonUtils.toBoolean(site.getProperties().get(PROP_INCLUDE_AUTO_GENERATED));
}
if (site.getProperties().containsKey(PROP_OMIT_SCHEMA)) {
omitSchema = CommonUtils.toBoolean(site.getProperties().get(PROP_OMIT_SCHEMA));
}
try {
rowsInStatement = Integer.parseInt(String.valueOf(site.getProperties().get(PROP_ROWS_IN_STATEMENT)));
} catch (NumberFormatException e) {
rowsInStatement = 10;
}
rowDelimiter = GeneralUtils.getDefaultLineSeparator();
dialect = SQLUtils.getDialectFromObject(site.getSource());
}
@Override
public void dispose()
{
super.dispose();
}
@Override
public void exportHeader(DBCSession session) throws DBException, IOException
{
columns = getSite().getAttributes();
DBPNamedObject source = getSite().getSource();
tableName = DTUtils.getTableName(session.getDataSource(), source, omitSchema);
rowCount = 0;
}
@Override
public void exportRow(DBCSession session, DBCResultSet resultSet, Object[] row) throws DBException, IOException
{
PrintWriter out = getWriter();
SQLDialect.MultiValueInsertMode insertMode = rowsInStatement == 1 ? SQLDialect.MultiValueInsertMode.NOT_SUPPORTED : getMultiValueInsertMode();
int columnsSize = columns.length;
boolean firstRow = false;
if (insertMode == SQLDialect.MultiValueInsertMode.NOT_SUPPORTED || rowCount % rowsInStatement == 0) {
sqlBuffer.setLength(0);
if (rowCount > 0) {
if (insertMode == SQLDialect.MultiValueInsertMode.PLAIN) {
sqlBuffer.append(");").append(rowDelimiter);
} else if (insertMode == SQLDialect.MultiValueInsertMode.GROUP_ROWS) {
sqlBuffer.append(";").append(rowDelimiter);
}
}
sqlBuffer.append("INSERT INTO ").append(tableName).append(" (");
boolean hasColumn = false;
for (int i = 0; i < columnsSize; i++) {
DBDAttributeBinding column = columns[i];
if (isSkipColumn(column)) {
continue;
}
if (hasColumn) {
sqlBuffer.append(',');
}
hasColumn = true;
sqlBuffer.append(DBUtils.getQuotedIdentifier(column));
}
sqlBuffer.append(") VALUES ");
if (insertMode != SQLDialect.MultiValueInsertMode.GROUP_ROWS) {
sqlBuffer.append("(");
}
if (rowsInStatement > 1) {
sqlBuffer.append(rowDelimiter);
}
out.write(sqlBuffer.toString());
firstRow = true;
}
if (insertMode != SQLDialect.MultiValueInsertMode.NOT_SUPPORTED && !firstRow) {
out.write(",");
}
if (insertMode == SQLDialect.MultiValueInsertMode.GROUP_ROWS) {
out.write("(");
}
rowCount++;
boolean hasValue = false;
for (int i = 0; i < columnsSize; i++) {
DBDAttributeBinding column = columns[i];
if (isSkipColumn(column)) {
continue;
}
if (hasValue) {
out.write(',');
}
hasValue = true;
Object value = row[i];
if (DBUtils.isNullValue(value)) {
// just skip it
out.write(SQLConstants.NULL_VALUE);
} else if (row[i] instanceof DBDContent) {
DBDContent content = (DBDContent)row[i];
try {
if (column.getValueHandler() instanceof DBDContentValueHandler) {
((DBDContentValueHandler) column.getValueHandler()).writeStreamValue(session.getProgressMonitor(), session.getDataSource(), column, content, out);
} else {
// Content
// Inline textual content and handle binaries in some special way
DBDContentStorage cs = content.getContents(session.getProgressMonitor());
if (cs != null) {
if (ContentUtils.isTextContent(content)) {
try (Reader contentReader = cs.getContentReader()) {
writeStringValue(contentReader);
}
} else {
getSite().writeBinaryData(cs);
}
}
}
} catch (Exception e) {
log.warn(e);
} finally {
content.release();
}
} else if (value instanceof File) {
out.write("@");
out.write(((File)value).getAbsolutePath());
} else {
out.write(SQLUtils.convertValueToSQL(session.getDataSource(), column, row[i]));
}
}
if (insertMode != SQLDialect.MultiValueInsertMode.PLAIN) {
out.write(")");
}
if (insertMode == SQLDialect.MultiValueInsertMode.NOT_SUPPORTED) {
out.write(";");
}
out.write(rowDelimiter);
}
@Override
public void exportFooter(DBRProgressMonitor monitor) {
switch (getMultiValueInsertMode()) {
case GROUP_ROWS:
if (rowCount > 0) {
getWriter().write(";");
}
break;
case PLAIN:
if (rowCount > 0) {
getWriter().write(");");
}
break;
default:
break;
}
}
private void writeStringValue(String value)
{
PrintWriter out = getWriter();
out.write(STRING_QUOTE);
if (dialect != null) {
out.write(dialect.escapeString(value));
} else {
out.write(value);
}
out.write(STRING_QUOTE);
}
private void writeStringValue(Reader reader) throws IOException
{
try {
PrintWriter out = getWriter();
out.write(STRING_QUOTE);
// Copy reader
char buffer[] = new char[2000];
for (;;) {
int count = reader.read(buffer);
if (count <= 0) {
break;
}
if (dialect != null) {
out.write(dialect.escapeString(String.valueOf(buffer, 0, count)));
} else {
out.write(buffer, 0, count);
}
}
out.write(STRING_QUOTE);
} finally {
ContentUtils.close(reader);
}
}
private SQLDialect.MultiValueInsertMode getMultiValueInsertMode() {
SQLDialect.MultiValueInsertMode insertMode = SQLDialect.MultiValueInsertMode.NOT_SUPPORTED;
if (dialect != null) {
insertMode = dialect.getMultiValueInsertMode();
}
return insertMode;
}
}
| |
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// This file is autogenerated by:
// mojo/public/tools/bindings/mojom_bindings_generator.py
// For:
// services/device/public/interfaces/nfc.mojom
//
package org.chromium.device.mojom;
import org.chromium.base.annotations.SuppressFBWarnings;
import org.chromium.mojo.bindings.DeserializationException;
class Nfc_Internal {
public static final org.chromium.mojo.bindings.Interface.Manager<Nfc, Nfc.Proxy> MANAGER =
new org.chromium.mojo.bindings.Interface.Manager<Nfc, Nfc.Proxy>() {
public String getName() {
return "device::mojom::NFC";
}
public int getVersion() {
return 0;
}
public Proxy buildProxy(org.chromium.mojo.system.Core core,
org.chromium.mojo.bindings.MessageReceiverWithResponder messageReceiver) {
return new Proxy(core, messageReceiver);
}
public Stub buildStub(org.chromium.mojo.system.Core core, Nfc impl) {
return new Stub(core, impl);
}
public Nfc[] buildArray(int size) {
return new Nfc[size];
}
};
private static final int SET_CLIENT_ORDINAL = 0;
private static final int PUSH_ORDINAL = 1;
private static final int CANCEL_PUSH_ORDINAL = 2;
private static final int WATCH_ORDINAL = 3;
private static final int CANCEL_WATCH_ORDINAL = 4;
private static final int CANCEL_ALL_WATCHES_ORDINAL = 5;
private static final int SUSPEND_NFC_OPERATIONS_ORDINAL = 6;
private static final int RESUME_NFC_OPERATIONS_ORDINAL = 7;
static final class Proxy extends org.chromium.mojo.bindings.Interface.AbstractProxy implements Nfc.Proxy {
Proxy(org.chromium.mojo.system.Core core,
org.chromium.mojo.bindings.MessageReceiverWithResponder messageReceiver) {
super(core, messageReceiver);
}
@Override
public void setClient(
NfcClient client) {
NfcSetClientParams _message = new NfcSetClientParams();
_message.client = client;
getProxyHandler().getMessageReceiver().accept(
_message.serializeWithHeader(
getProxyHandler().getCore(),
new org.chromium.mojo.bindings.MessageHeader(SET_CLIENT_ORDINAL)));
}
@Override
public void push(
NfcMessage message, NfcPushOptions options,
PushResponse callback) {
NfcPushParams _message = new NfcPushParams();
_message.message = message;
_message.options = options;
getProxyHandler().getMessageReceiver().acceptWithResponder(
_message.serializeWithHeader(
getProxyHandler().getCore(),
new org.chromium.mojo.bindings.MessageHeader(
PUSH_ORDINAL,
org.chromium.mojo.bindings.MessageHeader.MESSAGE_EXPECTS_RESPONSE_FLAG,
0)),
new NfcPushResponseParamsForwardToCallback(callback));
}
@Override
public void cancelPush(
int target,
CancelPushResponse callback) {
NfcCancelPushParams _message = new NfcCancelPushParams();
_message.target = target;
getProxyHandler().getMessageReceiver().acceptWithResponder(
_message.serializeWithHeader(
getProxyHandler().getCore(),
new org.chromium.mojo.bindings.MessageHeader(
CANCEL_PUSH_ORDINAL,
org.chromium.mojo.bindings.MessageHeader.MESSAGE_EXPECTS_RESPONSE_FLAG,
0)),
new NfcCancelPushResponseParamsForwardToCallback(callback));
}
@Override
public void watch(
NfcWatchOptions options,
WatchResponse callback) {
NfcWatchParams _message = new NfcWatchParams();
_message.options = options;
getProxyHandler().getMessageReceiver().acceptWithResponder(
_message.serializeWithHeader(
getProxyHandler().getCore(),
new org.chromium.mojo.bindings.MessageHeader(
WATCH_ORDINAL,
org.chromium.mojo.bindings.MessageHeader.MESSAGE_EXPECTS_RESPONSE_FLAG,
0)),
new NfcWatchResponseParamsForwardToCallback(callback));
}
@Override
public void cancelWatch(
int id,
CancelWatchResponse callback) {
NfcCancelWatchParams _message = new NfcCancelWatchParams();
_message.id = id;
getProxyHandler().getMessageReceiver().acceptWithResponder(
_message.serializeWithHeader(
getProxyHandler().getCore(),
new org.chromium.mojo.bindings.MessageHeader(
CANCEL_WATCH_ORDINAL,
org.chromium.mojo.bindings.MessageHeader.MESSAGE_EXPECTS_RESPONSE_FLAG,
0)),
new NfcCancelWatchResponseParamsForwardToCallback(callback));
}
@Override
public void cancelAllWatches(
CancelAllWatchesResponse callback) {
NfcCancelAllWatchesParams _message = new NfcCancelAllWatchesParams();
getProxyHandler().getMessageReceiver().acceptWithResponder(
_message.serializeWithHeader(
getProxyHandler().getCore(),
new org.chromium.mojo.bindings.MessageHeader(
CANCEL_ALL_WATCHES_ORDINAL,
org.chromium.mojo.bindings.MessageHeader.MESSAGE_EXPECTS_RESPONSE_FLAG,
0)),
new NfcCancelAllWatchesResponseParamsForwardToCallback(callback));
}
@Override
public void suspendNfcOperations(
) {
NfcSuspendNfcOperationsParams _message = new NfcSuspendNfcOperationsParams();
getProxyHandler().getMessageReceiver().accept(
_message.serializeWithHeader(
getProxyHandler().getCore(),
new org.chromium.mojo.bindings.MessageHeader(SUSPEND_NFC_OPERATIONS_ORDINAL)));
}
@Override
public void resumeNfcOperations(
) {
NfcResumeNfcOperationsParams _message = new NfcResumeNfcOperationsParams();
getProxyHandler().getMessageReceiver().accept(
_message.serializeWithHeader(
getProxyHandler().getCore(),
new org.chromium.mojo.bindings.MessageHeader(RESUME_NFC_OPERATIONS_ORDINAL)));
}
}
static final class Stub extends org.chromium.mojo.bindings.Interface.Stub<Nfc> {
Stub(org.chromium.mojo.system.Core core, Nfc impl) {
super(core, impl);
}
@Override
public boolean accept(org.chromium.mojo.bindings.Message message) {
try {
org.chromium.mojo.bindings.ServiceMessage messageWithHeader =
message.asServiceMessage();
org.chromium.mojo.bindings.MessageHeader header = messageWithHeader.getHeader();
if (!header.validateHeader(org.chromium.mojo.bindings.MessageHeader.NO_FLAG)) {
return false;
}
switch(header.getType()) {
case org.chromium.mojo.bindings.interfacecontrol.InterfaceControlMessagesConstants.RUN_OR_CLOSE_PIPE_MESSAGE_ID:
return org.chromium.mojo.bindings.InterfaceControlMessagesHelper.handleRunOrClosePipe(
Nfc_Internal.MANAGER, messageWithHeader);
case SET_CLIENT_ORDINAL: {
NfcSetClientParams data =
NfcSetClientParams.deserialize(messageWithHeader.getPayload());
getImpl().setClient(data.client);
return true;
}
case SUSPEND_NFC_OPERATIONS_ORDINAL: {
NfcSuspendNfcOperationsParams.deserialize(messageWithHeader.getPayload());
getImpl().suspendNfcOperations();
return true;
}
case RESUME_NFC_OPERATIONS_ORDINAL: {
NfcResumeNfcOperationsParams.deserialize(messageWithHeader.getPayload());
getImpl().resumeNfcOperations();
return true;
}
default:
return false;
}
} catch (org.chromium.mojo.bindings.DeserializationException e) {
System.err.println(e.toString());
return false;
}
}
@Override
public boolean acceptWithResponder(org.chromium.mojo.bindings.Message message, org.chromium.mojo.bindings.MessageReceiver receiver) {
try {
org.chromium.mojo.bindings.ServiceMessage messageWithHeader =
message.asServiceMessage();
org.chromium.mojo.bindings.MessageHeader header = messageWithHeader.getHeader();
if (!header.validateHeader(org.chromium.mojo.bindings.MessageHeader.MESSAGE_EXPECTS_RESPONSE_FLAG)) {
return false;
}
switch(header.getType()) {
case org.chromium.mojo.bindings.interfacecontrol.InterfaceControlMessagesConstants.RUN_MESSAGE_ID:
return org.chromium.mojo.bindings.InterfaceControlMessagesHelper.handleRun(
getCore(), Nfc_Internal.MANAGER, messageWithHeader, receiver);
case PUSH_ORDINAL: {
NfcPushParams data =
NfcPushParams.deserialize(messageWithHeader.getPayload());
getImpl().push(data.message, data.options, new NfcPushResponseParamsProxyToResponder(getCore(), receiver, header.getRequestId()));
return true;
}
case CANCEL_PUSH_ORDINAL: {
NfcCancelPushParams data =
NfcCancelPushParams.deserialize(messageWithHeader.getPayload());
getImpl().cancelPush(data.target, new NfcCancelPushResponseParamsProxyToResponder(getCore(), receiver, header.getRequestId()));
return true;
}
case WATCH_ORDINAL: {
NfcWatchParams data =
NfcWatchParams.deserialize(messageWithHeader.getPayload());
getImpl().watch(data.options, new NfcWatchResponseParamsProxyToResponder(getCore(), receiver, header.getRequestId()));
return true;
}
case CANCEL_WATCH_ORDINAL: {
NfcCancelWatchParams data =
NfcCancelWatchParams.deserialize(messageWithHeader.getPayload());
getImpl().cancelWatch(data.id, new NfcCancelWatchResponseParamsProxyToResponder(getCore(), receiver, header.getRequestId()));
return true;
}
case CANCEL_ALL_WATCHES_ORDINAL: {
NfcCancelAllWatchesParams.deserialize(messageWithHeader.getPayload());
getImpl().cancelAllWatches(new NfcCancelAllWatchesResponseParamsProxyToResponder(getCore(), receiver, header.getRequestId()));
return true;
}
default:
return false;
}
} catch (org.chromium.mojo.bindings.DeserializationException e) {
System.err.println(e.toString());
return false;
}
}
}
static final class NfcSetClientParams extends org.chromium.mojo.bindings.Struct {
private static final int STRUCT_SIZE = 16;
private static final org.chromium.mojo.bindings.DataHeader[] VERSION_ARRAY = new org.chromium.mojo.bindings.DataHeader[] {new org.chromium.mojo.bindings.DataHeader(16, 0)};
private static final org.chromium.mojo.bindings.DataHeader DEFAULT_STRUCT_INFO = VERSION_ARRAY[0];
public NfcClient client;
private NfcSetClientParams(int version) {
super(STRUCT_SIZE, version);
}
public NfcSetClientParams() {
this(0);
}
public static NfcSetClientParams deserialize(org.chromium.mojo.bindings.Message message) {
return decode(new org.chromium.mojo.bindings.Decoder(message));
}
/**
* Similar to the method above, but deserializes from a |ByteBuffer| instance.
*
* @throws org.chromium.mojo.bindings.DeserializationException on deserialization failure.
*/
public static NfcSetClientParams deserialize(java.nio.ByteBuffer data) {
if (data == null)
return null;
return deserialize(new org.chromium.mojo.bindings.Message(
data, new java.util.ArrayList<org.chromium.mojo.system.Handle>()));
}
@SuppressWarnings("unchecked")
public static NfcSetClientParams decode(org.chromium.mojo.bindings.Decoder decoder0) {
if (decoder0 == null) {
return null;
}
decoder0.increaseStackDepth();
NfcSetClientParams result;
try {
org.chromium.mojo.bindings.DataHeader mainDataHeader = decoder0.readAndValidateDataHeader(VERSION_ARRAY);
result = new NfcSetClientParams(mainDataHeader.elementsOrVersion);
if (mainDataHeader.elementsOrVersion >= 0) {
result.client = decoder0.readServiceInterface(8, false, NfcClient.MANAGER);
}
} finally {
decoder0.decreaseStackDepth();
}
return result;
}
@SuppressWarnings("unchecked")
@Override
protected final void encode(org.chromium.mojo.bindings.Encoder encoder) {
org.chromium.mojo.bindings.Encoder encoder0 = encoder.getEncoderAtDataOffset(DEFAULT_STRUCT_INFO);
encoder0.encode(client, 8, false, NfcClient.MANAGER);
}
/**
* @see Object#equals(Object)
*/
@Override
public boolean equals(Object object) {
if (object == this)
return true;
if (object == null)
return false;
if (getClass() != object.getClass())
return false;
NfcSetClientParams other = (NfcSetClientParams) object;
if (!org.chromium.mojo.bindings.BindingsHelper.equals(this.client, other.client))
return false;
return true;
}
/**
* @see Object#hashCode()
*/
@Override
public int hashCode() {
final int prime = 31;
int result = prime + getClass().hashCode();
result = prime * result + org.chromium.mojo.bindings.BindingsHelper.hashCode(client);
return result;
}
}
static final class NfcPushParams extends org.chromium.mojo.bindings.Struct {
private static final int STRUCT_SIZE = 24;
private static final org.chromium.mojo.bindings.DataHeader[] VERSION_ARRAY = new org.chromium.mojo.bindings.DataHeader[] {new org.chromium.mojo.bindings.DataHeader(24, 0)};
private static final org.chromium.mojo.bindings.DataHeader DEFAULT_STRUCT_INFO = VERSION_ARRAY[0];
public NfcMessage message;
public NfcPushOptions options;
private NfcPushParams(int version) {
super(STRUCT_SIZE, version);
}
public NfcPushParams() {
this(0);
}
public static NfcPushParams deserialize(org.chromium.mojo.bindings.Message message) {
return decode(new org.chromium.mojo.bindings.Decoder(message));
}
/**
* Similar to the method above, but deserializes from a |ByteBuffer| instance.
*
* @throws org.chromium.mojo.bindings.DeserializationException on deserialization failure.
*/
public static NfcPushParams deserialize(java.nio.ByteBuffer data) {
if (data == null)
return null;
return deserialize(new org.chromium.mojo.bindings.Message(
data, new java.util.ArrayList<org.chromium.mojo.system.Handle>()));
}
@SuppressWarnings("unchecked")
public static NfcPushParams decode(org.chromium.mojo.bindings.Decoder decoder0) {
if (decoder0 == null) {
return null;
}
decoder0.increaseStackDepth();
NfcPushParams result;
try {
org.chromium.mojo.bindings.DataHeader mainDataHeader = decoder0.readAndValidateDataHeader(VERSION_ARRAY);
result = new NfcPushParams(mainDataHeader.elementsOrVersion);
if (mainDataHeader.elementsOrVersion >= 0) {
org.chromium.mojo.bindings.Decoder decoder1 = decoder0.readPointer(8, false);
result.message = NfcMessage.decode(decoder1);
}
if (mainDataHeader.elementsOrVersion >= 0) {
org.chromium.mojo.bindings.Decoder decoder1 = decoder0.readPointer(16, true);
result.options = NfcPushOptions.decode(decoder1);
}
} finally {
decoder0.decreaseStackDepth();
}
return result;
}
@SuppressWarnings("unchecked")
@Override
protected final void encode(org.chromium.mojo.bindings.Encoder encoder) {
org.chromium.mojo.bindings.Encoder encoder0 = encoder.getEncoderAtDataOffset(DEFAULT_STRUCT_INFO);
encoder0.encode(message, 8, false);
encoder0.encode(options, 16, true);
}
/**
* @see Object#equals(Object)
*/
@Override
public boolean equals(Object object) {
if (object == this)
return true;
if (object == null)
return false;
if (getClass() != object.getClass())
return false;
NfcPushParams other = (NfcPushParams) object;
if (!org.chromium.mojo.bindings.BindingsHelper.equals(this.message, other.message))
return false;
if (!org.chromium.mojo.bindings.BindingsHelper.equals(this.options, other.options))
return false;
return true;
}
/**
* @see Object#hashCode()
*/
@Override
public int hashCode() {
final int prime = 31;
int result = prime + getClass().hashCode();
result = prime * result + org.chromium.mojo.bindings.BindingsHelper.hashCode(message);
result = prime * result + org.chromium.mojo.bindings.BindingsHelper.hashCode(options);
return result;
}
}
static final class NfcPushResponseParams extends org.chromium.mojo.bindings.Struct {
private static final int STRUCT_SIZE = 16;
private static final org.chromium.mojo.bindings.DataHeader[] VERSION_ARRAY = new org.chromium.mojo.bindings.DataHeader[] {new org.chromium.mojo.bindings.DataHeader(16, 0)};
private static final org.chromium.mojo.bindings.DataHeader DEFAULT_STRUCT_INFO = VERSION_ARRAY[0];
public NfcError error;
private NfcPushResponseParams(int version) {
super(STRUCT_SIZE, version);
}
public NfcPushResponseParams() {
this(0);
}
public static NfcPushResponseParams deserialize(org.chromium.mojo.bindings.Message message) {
return decode(new org.chromium.mojo.bindings.Decoder(message));
}
/**
* Similar to the method above, but deserializes from a |ByteBuffer| instance.
*
* @throws org.chromium.mojo.bindings.DeserializationException on deserialization failure.
*/
public static NfcPushResponseParams deserialize(java.nio.ByteBuffer data) {
if (data == null)
return null;
return deserialize(new org.chromium.mojo.bindings.Message(
data, new java.util.ArrayList<org.chromium.mojo.system.Handle>()));
}
@SuppressWarnings("unchecked")
public static NfcPushResponseParams decode(org.chromium.mojo.bindings.Decoder decoder0) {
if (decoder0 == null) {
return null;
}
decoder0.increaseStackDepth();
NfcPushResponseParams result;
try {
org.chromium.mojo.bindings.DataHeader mainDataHeader = decoder0.readAndValidateDataHeader(VERSION_ARRAY);
result = new NfcPushResponseParams(mainDataHeader.elementsOrVersion);
if (mainDataHeader.elementsOrVersion >= 0) {
org.chromium.mojo.bindings.Decoder decoder1 = decoder0.readPointer(8, true);
result.error = NfcError.decode(decoder1);
}
} finally {
decoder0.decreaseStackDepth();
}
return result;
}
@SuppressWarnings("unchecked")
@Override
protected final void encode(org.chromium.mojo.bindings.Encoder encoder) {
org.chromium.mojo.bindings.Encoder encoder0 = encoder.getEncoderAtDataOffset(DEFAULT_STRUCT_INFO);
encoder0.encode(error, 8, true);
}
/**
* @see Object#equals(Object)
*/
@Override
public boolean equals(Object object) {
if (object == this)
return true;
if (object == null)
return false;
if (getClass() != object.getClass())
return false;
NfcPushResponseParams other = (NfcPushResponseParams) object;
if (!org.chromium.mojo.bindings.BindingsHelper.equals(this.error, other.error))
return false;
return true;
}
/**
* @see Object#hashCode()
*/
@Override
public int hashCode() {
final int prime = 31;
int result = prime + getClass().hashCode();
result = prime * result + org.chromium.mojo.bindings.BindingsHelper.hashCode(error);
return result;
}
}
static class NfcPushResponseParamsForwardToCallback extends org.chromium.mojo.bindings.SideEffectFreeCloseable
implements org.chromium.mojo.bindings.MessageReceiver {
private final Nfc.PushResponse mCallback;
NfcPushResponseParamsForwardToCallback(Nfc.PushResponse callback) {
this.mCallback = callback;
}
@Override
public boolean accept(org.chromium.mojo.bindings.Message message) {
try {
org.chromium.mojo.bindings.ServiceMessage messageWithHeader =
message.asServiceMessage();
org.chromium.mojo.bindings.MessageHeader header = messageWithHeader.getHeader();
if (!header.validateHeader(PUSH_ORDINAL,
org.chromium.mojo.bindings.MessageHeader.MESSAGE_IS_RESPONSE_FLAG)) {
return false;
}
NfcPushResponseParams response = NfcPushResponseParams.deserialize(messageWithHeader.getPayload());
mCallback.call(response.error);
return true;
} catch (org.chromium.mojo.bindings.DeserializationException e) {
return false;
}
}
}
static class NfcPushResponseParamsProxyToResponder implements Nfc.PushResponse {
private final org.chromium.mojo.system.Core mCore;
private final org.chromium.mojo.bindings.MessageReceiver mMessageReceiver;
private final long mRequestId;
NfcPushResponseParamsProxyToResponder(
org.chromium.mojo.system.Core core,
org.chromium.mojo.bindings.MessageReceiver messageReceiver,
long requestId) {
mCore = core;
mMessageReceiver = messageReceiver;
mRequestId = requestId;
}
@Override
public void call(NfcError error) {
NfcPushResponseParams _response = new NfcPushResponseParams();
_response.error = error;
org.chromium.mojo.bindings.ServiceMessage _message =
_response.serializeWithHeader(
mCore,
new org.chromium.mojo.bindings.MessageHeader(
PUSH_ORDINAL,
org.chromium.mojo.bindings.MessageHeader.MESSAGE_IS_RESPONSE_FLAG,
mRequestId));
mMessageReceiver.accept(_message);
}
}
static final class NfcCancelPushParams extends org.chromium.mojo.bindings.Struct {
private static final int STRUCT_SIZE = 16;
private static final org.chromium.mojo.bindings.DataHeader[] VERSION_ARRAY = new org.chromium.mojo.bindings.DataHeader[] {new org.chromium.mojo.bindings.DataHeader(16, 0)};
private static final org.chromium.mojo.bindings.DataHeader DEFAULT_STRUCT_INFO = VERSION_ARRAY[0];
public int target;
private NfcCancelPushParams(int version) {
super(STRUCT_SIZE, version);
}
public NfcCancelPushParams() {
this(0);
}
public static NfcCancelPushParams deserialize(org.chromium.mojo.bindings.Message message) {
return decode(new org.chromium.mojo.bindings.Decoder(message));
}
/**
* Similar to the method above, but deserializes from a |ByteBuffer| instance.
*
* @throws org.chromium.mojo.bindings.DeserializationException on deserialization failure.
*/
public static NfcCancelPushParams deserialize(java.nio.ByteBuffer data) {
if (data == null)
return null;
return deserialize(new org.chromium.mojo.bindings.Message(
data, new java.util.ArrayList<org.chromium.mojo.system.Handle>()));
}
@SuppressWarnings("unchecked")
public static NfcCancelPushParams decode(org.chromium.mojo.bindings.Decoder decoder0) {
if (decoder0 == null) {
return null;
}
decoder0.increaseStackDepth();
NfcCancelPushParams result;
try {
org.chromium.mojo.bindings.DataHeader mainDataHeader = decoder0.readAndValidateDataHeader(VERSION_ARRAY);
result = new NfcCancelPushParams(mainDataHeader.elementsOrVersion);
if (mainDataHeader.elementsOrVersion >= 0) {
result.target = decoder0.readInt(8);
NfcPushTarget.validate(result.target);
}
} finally {
decoder0.decreaseStackDepth();
}
return result;
}
@SuppressWarnings("unchecked")
@Override
protected final void encode(org.chromium.mojo.bindings.Encoder encoder) {
org.chromium.mojo.bindings.Encoder encoder0 = encoder.getEncoderAtDataOffset(DEFAULT_STRUCT_INFO);
encoder0.encode(target, 8);
}
/**
* @see Object#equals(Object)
*/
@Override
public boolean equals(Object object) {
if (object == this)
return true;
if (object == null)
return false;
if (getClass() != object.getClass())
return false;
NfcCancelPushParams other = (NfcCancelPushParams) object;
if (this.target!= other.target)
return false;
return true;
}
/**
* @see Object#hashCode()
*/
@Override
public int hashCode() {
final int prime = 31;
int result = prime + getClass().hashCode();
result = prime * result + org.chromium.mojo.bindings.BindingsHelper.hashCode(target);
return result;
}
}
static final class NfcCancelPushResponseParams extends org.chromium.mojo.bindings.Struct {
private static final int STRUCT_SIZE = 16;
private static final org.chromium.mojo.bindings.DataHeader[] VERSION_ARRAY = new org.chromium.mojo.bindings.DataHeader[] {new org.chromium.mojo.bindings.DataHeader(16, 0)};
private static final org.chromium.mojo.bindings.DataHeader DEFAULT_STRUCT_INFO = VERSION_ARRAY[0];
public NfcError error;
private NfcCancelPushResponseParams(int version) {
super(STRUCT_SIZE, version);
}
public NfcCancelPushResponseParams() {
this(0);
}
public static NfcCancelPushResponseParams deserialize(org.chromium.mojo.bindings.Message message) {
return decode(new org.chromium.mojo.bindings.Decoder(message));
}
/**
* Similar to the method above, but deserializes from a |ByteBuffer| instance.
*
* @throws org.chromium.mojo.bindings.DeserializationException on deserialization failure.
*/
public static NfcCancelPushResponseParams deserialize(java.nio.ByteBuffer data) {
if (data == null)
return null;
return deserialize(new org.chromium.mojo.bindings.Message(
data, new java.util.ArrayList<org.chromium.mojo.system.Handle>()));
}
@SuppressWarnings("unchecked")
public static NfcCancelPushResponseParams decode(org.chromium.mojo.bindings.Decoder decoder0) {
if (decoder0 == null) {
return null;
}
decoder0.increaseStackDepth();
NfcCancelPushResponseParams result;
try {
org.chromium.mojo.bindings.DataHeader mainDataHeader = decoder0.readAndValidateDataHeader(VERSION_ARRAY);
result = new NfcCancelPushResponseParams(mainDataHeader.elementsOrVersion);
if (mainDataHeader.elementsOrVersion >= 0) {
org.chromium.mojo.bindings.Decoder decoder1 = decoder0.readPointer(8, true);
result.error = NfcError.decode(decoder1);
}
} finally {
decoder0.decreaseStackDepth();
}
return result;
}
@SuppressWarnings("unchecked")
@Override
protected final void encode(org.chromium.mojo.bindings.Encoder encoder) {
org.chromium.mojo.bindings.Encoder encoder0 = encoder.getEncoderAtDataOffset(DEFAULT_STRUCT_INFO);
encoder0.encode(error, 8, true);
}
/**
* @see Object#equals(Object)
*/
@Override
public boolean equals(Object object) {
if (object == this)
return true;
if (object == null)
return false;
if (getClass() != object.getClass())
return false;
NfcCancelPushResponseParams other = (NfcCancelPushResponseParams) object;
if (!org.chromium.mojo.bindings.BindingsHelper.equals(this.error, other.error))
return false;
return true;
}
/**
* @see Object#hashCode()
*/
@Override
public int hashCode() {
final int prime = 31;
int result = prime + getClass().hashCode();
result = prime * result + org.chromium.mojo.bindings.BindingsHelper.hashCode(error);
return result;
}
}
static class NfcCancelPushResponseParamsForwardToCallback extends org.chromium.mojo.bindings.SideEffectFreeCloseable
implements org.chromium.mojo.bindings.MessageReceiver {
private final Nfc.CancelPushResponse mCallback;
NfcCancelPushResponseParamsForwardToCallback(Nfc.CancelPushResponse callback) {
this.mCallback = callback;
}
@Override
public boolean accept(org.chromium.mojo.bindings.Message message) {
try {
org.chromium.mojo.bindings.ServiceMessage messageWithHeader =
message.asServiceMessage();
org.chromium.mojo.bindings.MessageHeader header = messageWithHeader.getHeader();
if (!header.validateHeader(CANCEL_PUSH_ORDINAL,
org.chromium.mojo.bindings.MessageHeader.MESSAGE_IS_RESPONSE_FLAG)) {
return false;
}
NfcCancelPushResponseParams response = NfcCancelPushResponseParams.deserialize(messageWithHeader.getPayload());
mCallback.call(response.error);
return true;
} catch (org.chromium.mojo.bindings.DeserializationException e) {
return false;
}
}
}
static class NfcCancelPushResponseParamsProxyToResponder implements Nfc.CancelPushResponse {
private final org.chromium.mojo.system.Core mCore;
private final org.chromium.mojo.bindings.MessageReceiver mMessageReceiver;
private final long mRequestId;
NfcCancelPushResponseParamsProxyToResponder(
org.chromium.mojo.system.Core core,
org.chromium.mojo.bindings.MessageReceiver messageReceiver,
long requestId) {
mCore = core;
mMessageReceiver = messageReceiver;
mRequestId = requestId;
}
@Override
public void call(NfcError error) {
NfcCancelPushResponseParams _response = new NfcCancelPushResponseParams();
_response.error = error;
org.chromium.mojo.bindings.ServiceMessage _message =
_response.serializeWithHeader(
mCore,
new org.chromium.mojo.bindings.MessageHeader(
CANCEL_PUSH_ORDINAL,
org.chromium.mojo.bindings.MessageHeader.MESSAGE_IS_RESPONSE_FLAG,
mRequestId));
mMessageReceiver.accept(_message);
}
}
static final class NfcWatchParams extends org.chromium.mojo.bindings.Struct {
private static final int STRUCT_SIZE = 16;
private static final org.chromium.mojo.bindings.DataHeader[] VERSION_ARRAY = new org.chromium.mojo.bindings.DataHeader[] {new org.chromium.mojo.bindings.DataHeader(16, 0)};
private static final org.chromium.mojo.bindings.DataHeader DEFAULT_STRUCT_INFO = VERSION_ARRAY[0];
public NfcWatchOptions options;
private NfcWatchParams(int version) {
super(STRUCT_SIZE, version);
}
public NfcWatchParams() {
this(0);
}
public static NfcWatchParams deserialize(org.chromium.mojo.bindings.Message message) {
return decode(new org.chromium.mojo.bindings.Decoder(message));
}
/**
* Similar to the method above, but deserializes from a |ByteBuffer| instance.
*
* @throws org.chromium.mojo.bindings.DeserializationException on deserialization failure.
*/
public static NfcWatchParams deserialize(java.nio.ByteBuffer data) {
if (data == null)
return null;
return deserialize(new org.chromium.mojo.bindings.Message(
data, new java.util.ArrayList<org.chromium.mojo.system.Handle>()));
}
@SuppressWarnings("unchecked")
public static NfcWatchParams decode(org.chromium.mojo.bindings.Decoder decoder0) {
if (decoder0 == null) {
return null;
}
decoder0.increaseStackDepth();
NfcWatchParams result;
try {
org.chromium.mojo.bindings.DataHeader mainDataHeader = decoder0.readAndValidateDataHeader(VERSION_ARRAY);
result = new NfcWatchParams(mainDataHeader.elementsOrVersion);
if (mainDataHeader.elementsOrVersion >= 0) {
org.chromium.mojo.bindings.Decoder decoder1 = decoder0.readPointer(8, false);
result.options = NfcWatchOptions.decode(decoder1);
}
} finally {
decoder0.decreaseStackDepth();
}
return result;
}
@SuppressWarnings("unchecked")
@Override
protected final void encode(org.chromium.mojo.bindings.Encoder encoder) {
org.chromium.mojo.bindings.Encoder encoder0 = encoder.getEncoderAtDataOffset(DEFAULT_STRUCT_INFO);
encoder0.encode(options, 8, false);
}
/**
* @see Object#equals(Object)
*/
@Override
public boolean equals(Object object) {
if (object == this)
return true;
if (object == null)
return false;
if (getClass() != object.getClass())
return false;
NfcWatchParams other = (NfcWatchParams) object;
if (!org.chromium.mojo.bindings.BindingsHelper.equals(this.options, other.options))
return false;
return true;
}
/**
* @see Object#hashCode()
*/
@Override
public int hashCode() {
final int prime = 31;
int result = prime + getClass().hashCode();
result = prime * result + org.chromium.mojo.bindings.BindingsHelper.hashCode(options);
return result;
}
}
static final class NfcWatchResponseParams extends org.chromium.mojo.bindings.Struct {
private static final int STRUCT_SIZE = 24;
private static final org.chromium.mojo.bindings.DataHeader[] VERSION_ARRAY = new org.chromium.mojo.bindings.DataHeader[] {new org.chromium.mojo.bindings.DataHeader(24, 0)};
private static final org.chromium.mojo.bindings.DataHeader DEFAULT_STRUCT_INFO = VERSION_ARRAY[0];
public int id;
public NfcError error;
private NfcWatchResponseParams(int version) {
super(STRUCT_SIZE, version);
}
public NfcWatchResponseParams() {
this(0);
}
public static NfcWatchResponseParams deserialize(org.chromium.mojo.bindings.Message message) {
return decode(new org.chromium.mojo.bindings.Decoder(message));
}
/**
* Similar to the method above, but deserializes from a |ByteBuffer| instance.
*
* @throws org.chromium.mojo.bindings.DeserializationException on deserialization failure.
*/
public static NfcWatchResponseParams deserialize(java.nio.ByteBuffer data) {
if (data == null)
return null;
return deserialize(new org.chromium.mojo.bindings.Message(
data, new java.util.ArrayList<org.chromium.mojo.system.Handle>()));
}
@SuppressWarnings("unchecked")
public static NfcWatchResponseParams decode(org.chromium.mojo.bindings.Decoder decoder0) {
if (decoder0 == null) {
return null;
}
decoder0.increaseStackDepth();
NfcWatchResponseParams result;
try {
org.chromium.mojo.bindings.DataHeader mainDataHeader = decoder0.readAndValidateDataHeader(VERSION_ARRAY);
result = new NfcWatchResponseParams(mainDataHeader.elementsOrVersion);
if (mainDataHeader.elementsOrVersion >= 0) {
result.id = decoder0.readInt(8);
}
if (mainDataHeader.elementsOrVersion >= 0) {
org.chromium.mojo.bindings.Decoder decoder1 = decoder0.readPointer(16, true);
result.error = NfcError.decode(decoder1);
}
} finally {
decoder0.decreaseStackDepth();
}
return result;
}
@SuppressWarnings("unchecked")
@Override
protected final void encode(org.chromium.mojo.bindings.Encoder encoder) {
org.chromium.mojo.bindings.Encoder encoder0 = encoder.getEncoderAtDataOffset(DEFAULT_STRUCT_INFO);
encoder0.encode(id, 8);
encoder0.encode(error, 16, true);
}
/**
* @see Object#equals(Object)
*/
@Override
public boolean equals(Object object) {
if (object == this)
return true;
if (object == null)
return false;
if (getClass() != object.getClass())
return false;
NfcWatchResponseParams other = (NfcWatchResponseParams) object;
if (this.id!= other.id)
return false;
if (!org.chromium.mojo.bindings.BindingsHelper.equals(this.error, other.error))
return false;
return true;
}
/**
* @see Object#hashCode()
*/
@Override
public int hashCode() {
final int prime = 31;
int result = prime + getClass().hashCode();
result = prime * result + org.chromium.mojo.bindings.BindingsHelper.hashCode(id);
result = prime * result + org.chromium.mojo.bindings.BindingsHelper.hashCode(error);
return result;
}
}
static class NfcWatchResponseParamsForwardToCallback extends org.chromium.mojo.bindings.SideEffectFreeCloseable
implements org.chromium.mojo.bindings.MessageReceiver {
private final Nfc.WatchResponse mCallback;
NfcWatchResponseParamsForwardToCallback(Nfc.WatchResponse callback) {
this.mCallback = callback;
}
@Override
public boolean accept(org.chromium.mojo.bindings.Message message) {
try {
org.chromium.mojo.bindings.ServiceMessage messageWithHeader =
message.asServiceMessage();
org.chromium.mojo.bindings.MessageHeader header = messageWithHeader.getHeader();
if (!header.validateHeader(WATCH_ORDINAL,
org.chromium.mojo.bindings.MessageHeader.MESSAGE_IS_RESPONSE_FLAG)) {
return false;
}
NfcWatchResponseParams response = NfcWatchResponseParams.deserialize(messageWithHeader.getPayload());
mCallback.call(response.id, response.error);
return true;
} catch (org.chromium.mojo.bindings.DeserializationException e) {
return false;
}
}
}
static class NfcWatchResponseParamsProxyToResponder implements Nfc.WatchResponse {
private final org.chromium.mojo.system.Core mCore;
private final org.chromium.mojo.bindings.MessageReceiver mMessageReceiver;
private final long mRequestId;
NfcWatchResponseParamsProxyToResponder(
org.chromium.mojo.system.Core core,
org.chromium.mojo.bindings.MessageReceiver messageReceiver,
long requestId) {
mCore = core;
mMessageReceiver = messageReceiver;
mRequestId = requestId;
}
@Override
public void call(Integer id, NfcError error) {
NfcWatchResponseParams _response = new NfcWatchResponseParams();
_response.id = id;
_response.error = error;
org.chromium.mojo.bindings.ServiceMessage _message =
_response.serializeWithHeader(
mCore,
new org.chromium.mojo.bindings.MessageHeader(
WATCH_ORDINAL,
org.chromium.mojo.bindings.MessageHeader.MESSAGE_IS_RESPONSE_FLAG,
mRequestId));
mMessageReceiver.accept(_message);
}
}
static final class NfcCancelWatchParams extends org.chromium.mojo.bindings.Struct {
private static final int STRUCT_SIZE = 16;
private static final org.chromium.mojo.bindings.DataHeader[] VERSION_ARRAY = new org.chromium.mojo.bindings.DataHeader[] {new org.chromium.mojo.bindings.DataHeader(16, 0)};
private static final org.chromium.mojo.bindings.DataHeader DEFAULT_STRUCT_INFO = VERSION_ARRAY[0];
public int id;
private NfcCancelWatchParams(int version) {
super(STRUCT_SIZE, version);
}
public NfcCancelWatchParams() {
this(0);
}
public static NfcCancelWatchParams deserialize(org.chromium.mojo.bindings.Message message) {
return decode(new org.chromium.mojo.bindings.Decoder(message));
}
/**
* Similar to the method above, but deserializes from a |ByteBuffer| instance.
*
* @throws org.chromium.mojo.bindings.DeserializationException on deserialization failure.
*/
public static NfcCancelWatchParams deserialize(java.nio.ByteBuffer data) {
if (data == null)
return null;
return deserialize(new org.chromium.mojo.bindings.Message(
data, new java.util.ArrayList<org.chromium.mojo.system.Handle>()));
}
@SuppressWarnings("unchecked")
public static NfcCancelWatchParams decode(org.chromium.mojo.bindings.Decoder decoder0) {
if (decoder0 == null) {
return null;
}
decoder0.increaseStackDepth();
NfcCancelWatchParams result;
try {
org.chromium.mojo.bindings.DataHeader mainDataHeader = decoder0.readAndValidateDataHeader(VERSION_ARRAY);
result = new NfcCancelWatchParams(mainDataHeader.elementsOrVersion);
if (mainDataHeader.elementsOrVersion >= 0) {
result.id = decoder0.readInt(8);
}
} finally {
decoder0.decreaseStackDepth();
}
return result;
}
@SuppressWarnings("unchecked")
@Override
protected final void encode(org.chromium.mojo.bindings.Encoder encoder) {
org.chromium.mojo.bindings.Encoder encoder0 = encoder.getEncoderAtDataOffset(DEFAULT_STRUCT_INFO);
encoder0.encode(id, 8);
}
/**
* @see Object#equals(Object)
*/
@Override
public boolean equals(Object object) {
if (object == this)
return true;
if (object == null)
return false;
if (getClass() != object.getClass())
return false;
NfcCancelWatchParams other = (NfcCancelWatchParams) object;
if (this.id!= other.id)
return false;
return true;
}
/**
* @see Object#hashCode()
*/
@Override
public int hashCode() {
final int prime = 31;
int result = prime + getClass().hashCode();
result = prime * result + org.chromium.mojo.bindings.BindingsHelper.hashCode(id);
return result;
}
}
static final class NfcCancelWatchResponseParams extends org.chromium.mojo.bindings.Struct {
private static final int STRUCT_SIZE = 16;
private static final org.chromium.mojo.bindings.DataHeader[] VERSION_ARRAY = new org.chromium.mojo.bindings.DataHeader[] {new org.chromium.mojo.bindings.DataHeader(16, 0)};
private static final org.chromium.mojo.bindings.DataHeader DEFAULT_STRUCT_INFO = VERSION_ARRAY[0];
public NfcError error;
private NfcCancelWatchResponseParams(int version) {
super(STRUCT_SIZE, version);
}
public NfcCancelWatchResponseParams() {
this(0);
}
public static NfcCancelWatchResponseParams deserialize(org.chromium.mojo.bindings.Message message) {
return decode(new org.chromium.mojo.bindings.Decoder(message));
}
/**
* Similar to the method above, but deserializes from a |ByteBuffer| instance.
*
* @throws org.chromium.mojo.bindings.DeserializationException on deserialization failure.
*/
public static NfcCancelWatchResponseParams deserialize(java.nio.ByteBuffer data) {
if (data == null)
return null;
return deserialize(new org.chromium.mojo.bindings.Message(
data, new java.util.ArrayList<org.chromium.mojo.system.Handle>()));
}
@SuppressWarnings("unchecked")
public static NfcCancelWatchResponseParams decode(org.chromium.mojo.bindings.Decoder decoder0) {
if (decoder0 == null) {
return null;
}
decoder0.increaseStackDepth();
NfcCancelWatchResponseParams result;
try {
org.chromium.mojo.bindings.DataHeader mainDataHeader = decoder0.readAndValidateDataHeader(VERSION_ARRAY);
result = new NfcCancelWatchResponseParams(mainDataHeader.elementsOrVersion);
if (mainDataHeader.elementsOrVersion >= 0) {
org.chromium.mojo.bindings.Decoder decoder1 = decoder0.readPointer(8, true);
result.error = NfcError.decode(decoder1);
}
} finally {
decoder0.decreaseStackDepth();
}
return result;
}
@SuppressWarnings("unchecked")
@Override
protected final void encode(org.chromium.mojo.bindings.Encoder encoder) {
org.chromium.mojo.bindings.Encoder encoder0 = encoder.getEncoderAtDataOffset(DEFAULT_STRUCT_INFO);
encoder0.encode(error, 8, true);
}
/**
* @see Object#equals(Object)
*/
@Override
public boolean equals(Object object) {
if (object == this)
return true;
if (object == null)
return false;
if (getClass() != object.getClass())
return false;
NfcCancelWatchResponseParams other = (NfcCancelWatchResponseParams) object;
if (!org.chromium.mojo.bindings.BindingsHelper.equals(this.error, other.error))
return false;
return true;
}
/**
* @see Object#hashCode()
*/
@Override
public int hashCode() {
final int prime = 31;
int result = prime + getClass().hashCode();
result = prime * result + org.chromium.mojo.bindings.BindingsHelper.hashCode(error);
return result;
}
}
static class NfcCancelWatchResponseParamsForwardToCallback extends org.chromium.mojo.bindings.SideEffectFreeCloseable
implements org.chromium.mojo.bindings.MessageReceiver {
private final Nfc.CancelWatchResponse mCallback;
NfcCancelWatchResponseParamsForwardToCallback(Nfc.CancelWatchResponse callback) {
this.mCallback = callback;
}
@Override
public boolean accept(org.chromium.mojo.bindings.Message message) {
try {
org.chromium.mojo.bindings.ServiceMessage messageWithHeader =
message.asServiceMessage();
org.chromium.mojo.bindings.MessageHeader header = messageWithHeader.getHeader();
if (!header.validateHeader(CANCEL_WATCH_ORDINAL,
org.chromium.mojo.bindings.MessageHeader.MESSAGE_IS_RESPONSE_FLAG)) {
return false;
}
NfcCancelWatchResponseParams response = NfcCancelWatchResponseParams.deserialize(messageWithHeader.getPayload());
mCallback.call(response.error);
return true;
} catch (org.chromium.mojo.bindings.DeserializationException e) {
return false;
}
}
}
static class NfcCancelWatchResponseParamsProxyToResponder implements Nfc.CancelWatchResponse {
private final org.chromium.mojo.system.Core mCore;
private final org.chromium.mojo.bindings.MessageReceiver mMessageReceiver;
private final long mRequestId;
NfcCancelWatchResponseParamsProxyToResponder(
org.chromium.mojo.system.Core core,
org.chromium.mojo.bindings.MessageReceiver messageReceiver,
long requestId) {
mCore = core;
mMessageReceiver = messageReceiver;
mRequestId = requestId;
}
@Override
public void call(NfcError error) {
NfcCancelWatchResponseParams _response = new NfcCancelWatchResponseParams();
_response.error = error;
org.chromium.mojo.bindings.ServiceMessage _message =
_response.serializeWithHeader(
mCore,
new org.chromium.mojo.bindings.MessageHeader(
CANCEL_WATCH_ORDINAL,
org.chromium.mojo.bindings.MessageHeader.MESSAGE_IS_RESPONSE_FLAG,
mRequestId));
mMessageReceiver.accept(_message);
}
}
static final class NfcCancelAllWatchesParams extends org.chromium.mojo.bindings.Struct {
private static final int STRUCT_SIZE = 8;
private static final org.chromium.mojo.bindings.DataHeader[] VERSION_ARRAY = new org.chromium.mojo.bindings.DataHeader[] {new org.chromium.mojo.bindings.DataHeader(8, 0)};
private static final org.chromium.mojo.bindings.DataHeader DEFAULT_STRUCT_INFO = VERSION_ARRAY[0];
private NfcCancelAllWatchesParams(int version) {
super(STRUCT_SIZE, version);
}
public NfcCancelAllWatchesParams() {
this(0);
}
public static NfcCancelAllWatchesParams deserialize(org.chromium.mojo.bindings.Message message) {
return decode(new org.chromium.mojo.bindings.Decoder(message));
}
/**
* Similar to the method above, but deserializes from a |ByteBuffer| instance.
*
* @throws org.chromium.mojo.bindings.DeserializationException on deserialization failure.
*/
public static NfcCancelAllWatchesParams deserialize(java.nio.ByteBuffer data) {
if (data == null)
return null;
return deserialize(new org.chromium.mojo.bindings.Message(
data, new java.util.ArrayList<org.chromium.mojo.system.Handle>()));
}
@SuppressWarnings("unchecked")
public static NfcCancelAllWatchesParams decode(org.chromium.mojo.bindings.Decoder decoder0) {
if (decoder0 == null) {
return null;
}
decoder0.increaseStackDepth();
NfcCancelAllWatchesParams result;
try {
org.chromium.mojo.bindings.DataHeader mainDataHeader = decoder0.readAndValidateDataHeader(VERSION_ARRAY);
result = new NfcCancelAllWatchesParams(mainDataHeader.elementsOrVersion);
} finally {
decoder0.decreaseStackDepth();
}
return result;
}
@SuppressWarnings("unchecked")
@Override
protected final void encode(org.chromium.mojo.bindings.Encoder encoder) {
encoder.getEncoderAtDataOffset(DEFAULT_STRUCT_INFO);
}
/**
* @see Object#equals(Object)
*/
@Override
public boolean equals(Object object) {
if (object == this)
return true;
if (object == null)
return false;
if (getClass() != object.getClass())
return false;
return true;
}
/**
* @see Object#hashCode()
*/
@Override
public int hashCode() {
final int prime = 31;
int result = prime + getClass().hashCode();
return result;
}
}
static final class NfcCancelAllWatchesResponseParams extends org.chromium.mojo.bindings.Struct {
private static final int STRUCT_SIZE = 16;
private static final org.chromium.mojo.bindings.DataHeader[] VERSION_ARRAY = new org.chromium.mojo.bindings.DataHeader[] {new org.chromium.mojo.bindings.DataHeader(16, 0)};
private static final org.chromium.mojo.bindings.DataHeader DEFAULT_STRUCT_INFO = VERSION_ARRAY[0];
public NfcError error;
private NfcCancelAllWatchesResponseParams(int version) {
super(STRUCT_SIZE, version);
}
public NfcCancelAllWatchesResponseParams() {
this(0);
}
public static NfcCancelAllWatchesResponseParams deserialize(org.chromium.mojo.bindings.Message message) {
return decode(new org.chromium.mojo.bindings.Decoder(message));
}
/**
* Similar to the method above, but deserializes from a |ByteBuffer| instance.
*
* @throws org.chromium.mojo.bindings.DeserializationException on deserialization failure.
*/
public static NfcCancelAllWatchesResponseParams deserialize(java.nio.ByteBuffer data) {
if (data == null)
return null;
return deserialize(new org.chromium.mojo.bindings.Message(
data, new java.util.ArrayList<org.chromium.mojo.system.Handle>()));
}
@SuppressWarnings("unchecked")
public static NfcCancelAllWatchesResponseParams decode(org.chromium.mojo.bindings.Decoder decoder0) {
if (decoder0 == null) {
return null;
}
decoder0.increaseStackDepth();
NfcCancelAllWatchesResponseParams result;
try {
org.chromium.mojo.bindings.DataHeader mainDataHeader = decoder0.readAndValidateDataHeader(VERSION_ARRAY);
result = new NfcCancelAllWatchesResponseParams(mainDataHeader.elementsOrVersion);
if (mainDataHeader.elementsOrVersion >= 0) {
org.chromium.mojo.bindings.Decoder decoder1 = decoder0.readPointer(8, true);
result.error = NfcError.decode(decoder1);
}
} finally {
decoder0.decreaseStackDepth();
}
return result;
}
@SuppressWarnings("unchecked")
@Override
protected final void encode(org.chromium.mojo.bindings.Encoder encoder) {
org.chromium.mojo.bindings.Encoder encoder0 = encoder.getEncoderAtDataOffset(DEFAULT_STRUCT_INFO);
encoder0.encode(error, 8, true);
}
/**
* @see Object#equals(Object)
*/
@Override
public boolean equals(Object object) {
if (object == this)
return true;
if (object == null)
return false;
if (getClass() != object.getClass())
return false;
NfcCancelAllWatchesResponseParams other = (NfcCancelAllWatchesResponseParams) object;
if (!org.chromium.mojo.bindings.BindingsHelper.equals(this.error, other.error))
return false;
return true;
}
/**
* @see Object#hashCode()
*/
@Override
public int hashCode() {
final int prime = 31;
int result = prime + getClass().hashCode();
result = prime * result + org.chromium.mojo.bindings.BindingsHelper.hashCode(error);
return result;
}
}
static class NfcCancelAllWatchesResponseParamsForwardToCallback extends org.chromium.mojo.bindings.SideEffectFreeCloseable
implements org.chromium.mojo.bindings.MessageReceiver {
private final Nfc.CancelAllWatchesResponse mCallback;
NfcCancelAllWatchesResponseParamsForwardToCallback(Nfc.CancelAllWatchesResponse callback) {
this.mCallback = callback;
}
@Override
public boolean accept(org.chromium.mojo.bindings.Message message) {
try {
org.chromium.mojo.bindings.ServiceMessage messageWithHeader =
message.asServiceMessage();
org.chromium.mojo.bindings.MessageHeader header = messageWithHeader.getHeader();
if (!header.validateHeader(CANCEL_ALL_WATCHES_ORDINAL,
org.chromium.mojo.bindings.MessageHeader.MESSAGE_IS_RESPONSE_FLAG)) {
return false;
}
NfcCancelAllWatchesResponseParams response = NfcCancelAllWatchesResponseParams.deserialize(messageWithHeader.getPayload());
mCallback.call(response.error);
return true;
} catch (org.chromium.mojo.bindings.DeserializationException e) {
return false;
}
}
}
static class NfcCancelAllWatchesResponseParamsProxyToResponder implements Nfc.CancelAllWatchesResponse {
private final org.chromium.mojo.system.Core mCore;
private final org.chromium.mojo.bindings.MessageReceiver mMessageReceiver;
private final long mRequestId;
NfcCancelAllWatchesResponseParamsProxyToResponder(
org.chromium.mojo.system.Core core,
org.chromium.mojo.bindings.MessageReceiver messageReceiver,
long requestId) {
mCore = core;
mMessageReceiver = messageReceiver;
mRequestId = requestId;
}
@Override
public void call(NfcError error) {
NfcCancelAllWatchesResponseParams _response = new NfcCancelAllWatchesResponseParams();
_response.error = error;
org.chromium.mojo.bindings.ServiceMessage _message =
_response.serializeWithHeader(
mCore,
new org.chromium.mojo.bindings.MessageHeader(
CANCEL_ALL_WATCHES_ORDINAL,
org.chromium.mojo.bindings.MessageHeader.MESSAGE_IS_RESPONSE_FLAG,
mRequestId));
mMessageReceiver.accept(_message);
}
}
static final class NfcSuspendNfcOperationsParams extends org.chromium.mojo.bindings.Struct {
private static final int STRUCT_SIZE = 8;
private static final org.chromium.mojo.bindings.DataHeader[] VERSION_ARRAY = new org.chromium.mojo.bindings.DataHeader[] {new org.chromium.mojo.bindings.DataHeader(8, 0)};
private static final org.chromium.mojo.bindings.DataHeader DEFAULT_STRUCT_INFO = VERSION_ARRAY[0];
private NfcSuspendNfcOperationsParams(int version) {
super(STRUCT_SIZE, version);
}
public NfcSuspendNfcOperationsParams() {
this(0);
}
public static NfcSuspendNfcOperationsParams deserialize(org.chromium.mojo.bindings.Message message) {
return decode(new org.chromium.mojo.bindings.Decoder(message));
}
/**
* Similar to the method above, but deserializes from a |ByteBuffer| instance.
*
* @throws org.chromium.mojo.bindings.DeserializationException on deserialization failure.
*/
public static NfcSuspendNfcOperationsParams deserialize(java.nio.ByteBuffer data) {
if (data == null)
return null;
return deserialize(new org.chromium.mojo.bindings.Message(
data, new java.util.ArrayList<org.chromium.mojo.system.Handle>()));
}
@SuppressWarnings("unchecked")
public static NfcSuspendNfcOperationsParams decode(org.chromium.mojo.bindings.Decoder decoder0) {
if (decoder0 == null) {
return null;
}
decoder0.increaseStackDepth();
NfcSuspendNfcOperationsParams result;
try {
org.chromium.mojo.bindings.DataHeader mainDataHeader = decoder0.readAndValidateDataHeader(VERSION_ARRAY);
result = new NfcSuspendNfcOperationsParams(mainDataHeader.elementsOrVersion);
} finally {
decoder0.decreaseStackDepth();
}
return result;
}
@SuppressWarnings("unchecked")
@Override
protected final void encode(org.chromium.mojo.bindings.Encoder encoder) {
encoder.getEncoderAtDataOffset(DEFAULT_STRUCT_INFO);
}
/**
* @see Object#equals(Object)
*/
@Override
public boolean equals(Object object) {
if (object == this)
return true;
if (object == null)
return false;
if (getClass() != object.getClass())
return false;
return true;
}
/**
* @see Object#hashCode()
*/
@Override
public int hashCode() {
final int prime = 31;
int result = prime + getClass().hashCode();
return result;
}
}
static final class NfcResumeNfcOperationsParams extends org.chromium.mojo.bindings.Struct {
private static final int STRUCT_SIZE = 8;
private static final org.chromium.mojo.bindings.DataHeader[] VERSION_ARRAY = new org.chromium.mojo.bindings.DataHeader[] {new org.chromium.mojo.bindings.DataHeader(8, 0)};
private static final org.chromium.mojo.bindings.DataHeader DEFAULT_STRUCT_INFO = VERSION_ARRAY[0];
private NfcResumeNfcOperationsParams(int version) {
super(STRUCT_SIZE, version);
}
public NfcResumeNfcOperationsParams() {
this(0);
}
public static NfcResumeNfcOperationsParams deserialize(org.chromium.mojo.bindings.Message message) {
return decode(new org.chromium.mojo.bindings.Decoder(message));
}
/**
* Similar to the method above, but deserializes from a |ByteBuffer| instance.
*
* @throws org.chromium.mojo.bindings.DeserializationException on deserialization failure.
*/
public static NfcResumeNfcOperationsParams deserialize(java.nio.ByteBuffer data) {
if (data == null)
return null;
return deserialize(new org.chromium.mojo.bindings.Message(
data, new java.util.ArrayList<org.chromium.mojo.system.Handle>()));
}
@SuppressWarnings("unchecked")
public static NfcResumeNfcOperationsParams decode(org.chromium.mojo.bindings.Decoder decoder0) {
if (decoder0 == null) {
return null;
}
decoder0.increaseStackDepth();
NfcResumeNfcOperationsParams result;
try {
org.chromium.mojo.bindings.DataHeader mainDataHeader = decoder0.readAndValidateDataHeader(VERSION_ARRAY);
result = new NfcResumeNfcOperationsParams(mainDataHeader.elementsOrVersion);
} finally {
decoder0.decreaseStackDepth();
}
return result;
}
@SuppressWarnings("unchecked")
@Override
protected final void encode(org.chromium.mojo.bindings.Encoder encoder) {
encoder.getEncoderAtDataOffset(DEFAULT_STRUCT_INFO);
}
/**
* @see Object#equals(Object)
*/
@Override
public boolean equals(Object object) {
if (object == this)
return true;
if (object == null)
return false;
if (getClass() != object.getClass())
return false;
return true;
}
/**
* @see Object#hashCode()
*/
@Override
public int hashCode() {
final int prime = 31;
int result = prime + getClass().hashCode();
return result;
}
}
}
| |
package MapGenerator.MainStructure;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Random;
import java.util.Set;
/**
* Room consists of multiple fields. Each room object has it level.
* Room is able to move in map's grid and change its level in room's hierarchy
* @author Lukas Jelinek
*/
public class Room {
private Map map;
private int roomID;
private int x,y,level;
private int width,height;
private HashMap<Room,ArrayList<Field>> boundaries;
private ArrayList<ArrayList<Field>> fields_grid;
private Random rand;
/**
* @param m reference to map
* @param rand random generator object
* @param roomID ID of this room
* @param x left top corner of room fields
* @param y left top corner of rooms field
* @param level initial level in room's hierarchy
* @param width width of the room
* @param height height of the room
*/
public Room(Map m,Random rand,int roomID,int x, int y, int level, int width, int height) {
this.map = m;
this.rand = rand;
this.roomID=roomID;
this.x = x;
this.y = y;
this.level = level;
this.height = height;
this.width = width;
boundaries = new HashMap<>(20);
refresh();
}
/**
* @param height new height of the room
*/
public void setHeight(int height) {
this.height = height;
}
/**
* @return height of the room
*/
public int getHeight(){
return height;
}
/**
* @return width of the room
*/
public int getWidth(){
return width;
}
/**
* @param width new width of the room
*/
public void setWidth(int width) {
this.width = width;
}
/**
* @return X coordinate of room's reference frame in map frame
*/
public int getX() {
return x;
}
/**
* @param x sets X coordinate of room's reference frame in map frame
*/
public void setX(int x) {
this.x = x;
}
/**
* @return Y coordinate of room's reference frame in map frame
*/
public int getY() {
return y;
}
/**
* @param y sets Y coordinate of room's reference frame in map frame
*/
public void setY(int y) {
this.y = y;
}
/**
* @return current level ot he room
*/
public int getLevel() {
return level;
}
/**
* @param level sets new level of this room
*/
public void setLevel(int level) {
this.level = level;
for(int row=0;row<height;++row)
for (int col = 0; col < width; ++col) {
if(fields_grid.get(row).get(col) != null){
fields_grid.get(row).get(col).removeOwner(this);
fields_grid.get(row).get(col).addOwner(this);
}
}
}
/**
* @return ID of the room
*/
public int getRoomID(){
return roomID;
}
/**
* reallocate rooms local field grid
*/
private void refresh(){
fields_grid = new ArrayList<>();
for(int row=0;row<height;++row){
fields_grid.add(new ArrayList<>());
for (int col = 0; col < width; ++col) {
Field current_field = map.getField(x+col,y+row);
if( current_field!= null){
fields_grid.get(row).add(current_field);
current_field.addOwner(this);
}else{
fields_grid.get(row).add(null);
}
}
}
}
/**
* @param other exchange this room whit other room. Their levels exchange
*/
public void swapWith(Room other){
int other_level= other.getLevel();
other.setLevel(level);
setLevel(other_level);
}
/**
* Rise room lowest possible amount of levels. Based on rooms which are above this room.
*/
public void rise() {
int higher_level=level;
Room higher_room=null;
Room higher_room_loc;
for(int row=0;row<height;++row){
for(int col=0;col<width;++col){
if(fields_grid.get(row).get(col) != null){
if(higher_level == level){
higher_room = fields_grid.get(row).get(col).getHigherRoom(this);
higher_level = higher_room.getLevel();
}
else{
higher_room_loc = fields_grid.get(row).get(col).getHigherRoom(this);
if(higher_room_loc.getLevel() < higher_level && higher_room_loc.getLevel() != level){
// field [row,col] has some other room which is above this room
// in lower level than current lowest level
higher_level=higher_room_loc.getLevel();
higher_room = higher_room_loc;
}
}
}
}
}
if(higher_room != this)
swapWith(higher_room);
}
/**
* Lower room lowest possible amount of levels. Based on rooms which are bellow this room.
*/
public void lower() {
int lower_level=level;
Room lower_room = null;
Room lower_room_loc;
for(int row=0;row<height;++row){
for(int col=0;col<width;++col){
if(fields_grid.get(row).get(col) != null){
if(lower_level == level){
lower_room = fields_grid.get(row).get(col).getLowerRoom(this);
lower_level = lower_room.getLevel();
}
else{
lower_room_loc = fields_grid.get(row).get(col).getLowerRoom(this);
if( lower_room_loc.getLevel() > lower_level && lower_room_loc.getLevel() != level){
// field [row,col] has some other room which is below this room
// in higher level than current lowest level
lower_level=lower_room_loc.getLevel();
lower_room=lower_room_loc;
}
}
}
}
}
if(lower_room != this)
swapWith(lower_room);
}
/**
* Moves this room right in map's grid
*/
public void moveRight(){
if(x+1 >map.getWidth()-width-2)
return;
for(int row=0;row<height;++row){
if(fields_grid.get(row).get(0) != null) fields_grid.get(row).get(0).removeOwner(this);
}
x++;
refresh();
}
/**
* Moves this room left in map's grid
*/
public void moveLeft(){
if(x-1 <2)
return;
for(int row=0;row<height;++row){
if(fields_grid.get(row).get(width-1) != null) fields_grid.get(row).get(width-1).removeOwner(this);
}
x--;
refresh();
}
/**
* Moves this room up in map's grid
*/
public void moveUp(){
if(y-1 <2)
return;
for(int col=0;col<width;++col){
if(fields_grid.get(height-1).get(col) != null) fields_grid.get(height-1).get(col).removeOwner(this);
}
y--;
refresh();
}
/**
* Moves this room down in map's grid
*/
public void moveDown(){
if(y+1 > map.getHeight()-height-2)
return;
for(int col=0;col<width;++col){
if(fields_grid.get(0).get(col) != null) fields_grid.get(0).get(col).removeOwner(this);
}
y++;
refresh();
}
/**
* Adds new neighbour of this room.
* @param key reference to neighbouring room
* @param val field which is at the border of this and key room
*/
public void addNeighbour(Room key,Field val){
if (key == null)return;
if(boundaries.containsKey(key)){
boundaries.get(key).add(val);
}else{
boundaries.put(key,new ArrayList<>());
boundaries.get(key).add(val);
}
}
/**
* Removes all neighbours and their fields.
*/
public void clearNeighbours(){
boundaries.clear();
}
/**
* @return All neighbouring room references.
*/
public Set<Room> getNeighbours(){
return boundaries.keySet();
}
/**
* All fields on the border between this room and r room.
* @param r room bordering with this room
* @return all fields at the border of this room and r room
*/
public ArrayList<Field> getNeighboursFields(Room r){
if(boundaries.containsKey(r))
return boundaries.get(r);
else
return null;
}
/**
* Add door to one the one of the border fields between this room and othe room
* @param other neighbour of this rom
* @return true if success
*/
public boolean addDoorWith(Room other) {
ArrayList<Field> all_walls = new ArrayList<>();
if(boundaries.containsKey(other)) {
all_walls.addAll(boundaries.get(other));
}
if(other.getNeighboursFields(this) != null){
all_walls.addAll(other.getNeighboursFields(this));
}
if(all_walls.size()==0) return false;
if(all_walls.stream().filter(c->c.getState() == Field.State.DOOR).count() != 0)
return false;
int pos=rand.nextInt(all_walls.size());
all_walls.get(pos).setState(Field.State.DOOR);
return true;
}
/**
* @return String with information about all neighbours of this room and their border fields
*/
@Override
public String toString() {
StringBuilder outp = new StringBuilder();
outp.append(roomID);
outp.append(" room's neighbours: ");
getNeighbours().stream().forEach(c->outp.append(String.valueOf(c.getRoomID())+","));
outp.append("\n");
boundaries.forEach((room, fields) -> {outp.append(room.getRoomID()+"-fields: ");
fields.forEach(field ->outp.append(field.printCoords()));
});
outp.append("\n");
return outp.toString();
}
}
| |
/*
* Copyright 2008 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.template.soy.basetree;
import static com.google.common.base.Preconditions.checkNotNull;
import com.google.common.collect.Lists;
import java.util.List;
/**
* Mixin implementation of the parent-specific aspect of the ParentNode interface. Requires the
* master to be a ParentNode.
*
* <p>Important: Do not use outside of Soy code (treat as superpackage-private).
*
* <p>The parameter N represents the interface or class that is the superclass of all possible
* children for the master ParentNode. E.g. for a Soy parse tree node, N is usually SoyNode, but for
* SoyFileSetNode N is SoyFileNode, for SoyFileNode N is TemplateNode, etc; for a Soy expression
* parse tree, N is usually ExprNode.
*/
public final class MixinParentNode<N extends Node> {
/** Just spaces. */
private static final String SPACES = " ";
/** The master node that delegates to this instance. */
private final ParentNode<N> master;
/** The children of the master node (accessed via this instance). */
private final List<N> children;
/** @param master The master node that delegates to this instance. */
public MixinParentNode(ParentNode<N> master) {
this.master = checkNotNull(master);
this.children = Lists.newArrayList();
}
/**
* Copy constructor.
*
* @param orig The node to copy.
* @param newMaster The master node for the copy.
*/
public MixinParentNode(MixinParentNode<N> orig, ParentNode<N> newMaster, CopyState copyState) {
this.master = checkNotNull(newMaster);
this.children = Lists.newArrayListWithCapacity(orig.children.size());
for (N origChild : orig.children) {
@SuppressWarnings("unchecked")
N newChild = (N) origChild.copy(copyState);
this.children.add(newChild);
newChild.setParent(this.master);
}
}
/**
* Gets the number of children.
*
* @return The number of children.
*/
public int numChildren() {
return children.size();
}
/**
* Gets the child at the given index.
*
* @param index The index of the child to get.
* @return The child at the given index.
*/
public N getChild(int index) {
return children.get(index);
}
/**
* Finds the index of the given child.
*
* @param child The child to find the index of.
* @return The index of the given child, or -1 if the given child is not a child of this node.
*/
public int getChildIndex(Node child) {
return children.indexOf(child);
}
/**
* Gets the list of children.
*
* <p>Note: The returned list is not a copy. Please do not modify the list directly. Instead, use
* the other methods in this class that are intended for modifying children. Also, if you're
* iterating over the children list as you're modifying it, then you should first make a copy of
* the children list to iterate over, in order to avoid ConcurrentModificationException.
*
* @return The list of children.
*/
public List<N> getChildren() {
return children;
}
/**
* Adds the given child.
*
* @param child The child to add.
*/
public void addChild(N child) {
checkNotNull(child);
tryRemoveFromOldParent(child);
children.add(child);
child.setParent(master);
}
/**
* Adds the given child at the given index (shifting existing children if necessary).
*
* @param index The index to add the child at.
* @param child The child to add.
*/
public void addChild(int index, N child) {
checkNotNull(child);
tryRemoveFromOldParent(child);
children.add(index, child);
child.setParent(master);
}
/**
* Removes the child at the given index.
*
* @param index The index of the child to remove.
*/
public void removeChild(int index) {
N child = children.remove(index);
child.setParent(null);
}
/**
* Removes the given child.
*
* @param child The child to remove.
*/
public void removeChild(N child) {
children.remove(child);
child.setParent(null);
}
/**
* Replaces the child at the given index with the given new child.
*
* @param index The index of the child to replace.
* @param newChild The new child.
*/
public void replaceChild(int index, N newChild) {
checkNotNull(newChild);
tryRemoveFromOldParent(newChild);
N oldChild = children.set(index, newChild);
oldChild.setParent(null);
newChild.setParent(master);
}
/**
* Replaces the given current child with the given new child.
*
* @param currChild The current child to be replaced.
* @param newChild The new child.
*/
public void replaceChild(N currChild, N newChild) {
replaceChild(getChildIndex(currChild), newChild);
}
/** Clears the list of children. */
public void clearChildren() {
for (int i = 0; i < children.size(); i++) {
children.get(i).setParent(null);
}
children.clear();
}
/**
* Adds the given children.
*
* @param children The children to add.
*/
@SuppressWarnings("unchecked")
public void addChildren(List<? extends N> children) {
// NOTE: if the input list comes from another node, this could cause
// ConcurrentModificationExceptions as nodes are moved from one parent to another. To avoid
// this we make a copy of the input list.
for (Node child : children.toArray(new Node[0])) {
addChild((N) child);
}
}
/**
* Adds the given children at the given index (shifting existing children if necessary).
*
* @param index The index to add the children at.
* @param children The children to add.
*/
public void addChildren(int index, List<? extends N> children) {
List<N> origChildren = Lists.newArrayList(this.children);
int origNumChildren = this.children.size();
// Temporarily remove the original children from index onward (in reverse order).
for (int i = origNumChildren - 1; i >= index; i--) {
removeChild(i);
}
// Add the new children.
addChildren(children);
// Add back the original children that we temporarily removed (in correct order).
addChildren(origChildren.subList(index, origNumChildren));
}
/**
* Appends the source strings for all the children to the given StringBuilder.
*
* @param sb The StringBuilder to which to append the children's source strings.
*/
public void appendSourceStringForChildren(StringBuilder sb) {
for (N child : children) {
sb.append(child.toSourceString());
}
}
/**
* Builds a string that visually shows the subtree rooted at this node (for debugging). Each line
* of the string will be indented by the given indentation amount. You should pass an indentation
* of 0 unless this method is being called as part of building a larger tree string.
*
* @param indent The indentation for each line of the tree string (usually pass 0).
* @return A string that visually shows the subtree rooted at this node.
*/
public String toTreeString(int indent) {
StringBuilder sb = new StringBuilder();
sb.append(SPACES, 0, indent).append("[").append(master).append("]\n");
return sb.toString();
}
private static <N extends Node> void tryRemoveFromOldParent(N child) {
// Java's type system isn't sophisticated enough to type the return value of getParent() but
// since it is the parent of N we know it can accept N as a child
@SuppressWarnings("unchecked")
ParentNode<? super N> oldParent = (ParentNode<? super N>) child.getParent();
if (oldParent != null) {
oldParent.removeChild(child);
}
}
}
| |
package com.horcu.apps.balln.models.game;
import java.util.HashMap;
import java.util.Map;
import javax.annotation.Generated;
import android.os.Parcel;
import android.os.Parcelable;
import com.fasterxml.jackson.annotation.JsonAnyGetter;
import com.fasterxml.jackson.annotation.JsonAnySetter;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonPropertyOrder;
import com.horcu.apps.balln.db.horcuDatabase;
import org.apache.commons.lang.builder.EqualsBuilder;
import org.apache.commons.lang.builder.HashCodeBuilder;
import ollie.Model;
import ollie.annotation.Column;
import ollie.annotation.PrimaryKey;
import ollie.annotation.Table;
public class Game extends Model implements Parcelable
{
public Game(){}
public Long id;
public String scheduled;
public String homeRotation;
public String awayRotation;
public Long homeTeamId;
public Long awayTeamId;
public String venueId;
public Long broadcastId;
public Long weatherId;
public final static Creator<Game> CREATOR = new Creator<Game>() {
public Game createFromParcel(Parcel in) {
Game instance = new Game();
instance.id = ((Long) in.readValue((Long.class.getClassLoader())));
instance.scheduled = ((String) in.readValue((String.class.getClassLoader())));
instance.homeRotation = ((String) in.readValue((String.class.getClassLoader())));
instance.awayRotation = ((String) in.readValue((String.class.getClassLoader())));
instance.homeTeamId = ((Long) in.readValue((Long.class.getClassLoader())));
instance.awayTeamId = ((Long) in.readValue((Long.class.getClassLoader())));
instance.venueId = ((String) in.readValue((String.class.getClassLoader())));
instance.broadcastId = ((Long) in.readValue((Long.class.getClassLoader())));
instance.weatherId = ((Long) in.readValue((Long.class.getClassLoader())));
// instance.additionalProperties = ((Map<String, Object> ) in.readValue((Map.class.getClassLoader())));
return instance;
}
public Game[] newArray(int size) {
return (new Game[size]);
}
};
/**
*
* @return
* The id
*/
public Long getId() {
return id;
}
/**
*
* @param id
* The id
*/
public void setId(Long id) {
this.id = id;
}
/**
*
* @return
* The scheduled
*/
public String getScheduled() {
return scheduled;
}
/**
*
* @param scheduled
* The scheduled
*/
public void setScheduled(String scheduled) {
this.scheduled = scheduled;
}
/**
*
* @return
* The homeRotation
*/
public String getHomeRotation() {
return homeRotation;
}
/**
*
* @param homeRotation
* The home_rotation
*/
public void setHomeRotation(String homeRotation) {
this.homeRotation = homeRotation;
}
/**
*
* @return
* The awayRotation
*/
public String getAwayRotation() {
return awayRotation;
}
/**
*
* @param awayRotation
* The away_rotation
*/
public void setAwayRotation(String awayRotation) {
this.awayRotation = awayRotation;
}
/**
*
* @return
* The homeTeamId
*/
public Long getHomeTeamId() {
return homeTeamId;
}
/**
*
* @param homeTeamId
* The home_team
*/
public void setHomeTeamId(Long homeTeamId) {
this.homeTeamId = homeTeamId;
}
/**
*
* @return
* The awayTeamId
*/
public Long getAwayTeamId() {
return awayTeamId;
}
/**
*
* @param awayTeamId
* The away_team
*/
public void setAwayTeamId(Long awayTeamId) {
this.awayTeamId = awayTeamId;
}
/**
*
* @return
* The venueId
*/
public String getVenueId() {
return venueId;
}
/**
*
* @param venueId
* The venueId
*/
public void setVenueId(String venueId) {
this.venueId = venueId;
}
/**
*
* @return
* The broadcastId
*/
public Long getBroadcastId() {
return broadcastId;
}
/**
*
* @param broadcastId
* The broadcastId
*/
public void setBroadcastId(Long broadcastId) {
this.broadcastId = broadcastId;
}
/**
*
* @return
* The weatherId
*/
public Long getWeatherId() {
return weatherId;
}
/**
*
* @param weatherId
* The weatherId
*/
public void setWeatherId(Long weatherId) {
this.weatherId = weatherId;
}
public void writeToParcel(Parcel dest, int flags) {
dest.writeValue(id);
dest.writeValue(scheduled);
dest.writeValue(homeRotation);
dest.writeValue(awayRotation);
dest.writeValue(homeTeamId);
dest.writeValue(awayTeamId);
dest.writeValue(venueId);
dest.writeValue(broadcastId);
dest.writeValue(weatherId);
// dest.writeValue(additionalProperties);
}
public int describeContents() {
return 0;
}
}
| |
// SMSLib for Java v3
// A Java API library for sending and receiving SMS via a GSM modem
// or other supported gateways.
// Web Site: http://www.smslib.org
//
// Copyright (C) 2002-2012, Thanasis Delenikas, Athens/GREECE.
// SMSLib is distributed under the terms of the Apache License version 2.0
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.smslib.http;
import java.io.IOException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.StringTokenizer;
import org.smslib.AGateway;
import org.smslib.GatewayException;
import org.smslib.Message.MessageEncodings;
import org.smslib.OutboundMessage;
import org.smslib.OutboundMessage.FailureCauses;
import org.smslib.OutboundMessage.MessageStatuses;
import org.smslib.StatusReportMessage.DeliveryStatuses;
import org.smslib.TimeoutException;
import org.smslib.helper.Logger;
/**
* Gateway for BulkSMS bulk operator (http://www.bulksms.com) Outbound only -
* implements HTTP interface.
*/
public class BulkSmsHTTPGateway extends HTTPGateway
{
public enum Regions
{
INTERNATIONAL, UNITEDKINGDOM, SOUTHAFRICA, SPAIN, USA, GERMANY
}
String providerUrl = "http://bulksms.vsms.net:5567";
String username, password;
Object SYNC_Commander;
public BulkSmsHTTPGateway(String id, String myUsername, String myPassword)
{
super(id);
setRegion(Regions.INTERNATIONAL);
this.username = myUsername;
this.password = myPassword;
this.SYNC_Commander = new Object();
setAttributes(AGateway.GatewayAttributes.SEND | AGateway.GatewayAttributes.CUSTOMFROM | AGateway.GatewayAttributes.BIGMESSAGES | AGateway.GatewayAttributes.FLASHSMS);
}
public BulkSmsHTTPGateway(String id, String myUsername, String myPassword, Regions region)
{
this(id, myUsername, myPassword);
setRegion(region);
}
@Override
public void startGateway() throws TimeoutException, GatewayException, IOException, InterruptedException
{
Logger.getInstance().logInfo("Starting gateway.", null, getGatewayId());
super.startGateway();
}
@Override
public void stopGateway() throws TimeoutException, GatewayException, IOException, InterruptedException
{
Logger.getInstance().logInfo("Stopping gateway.", null, getGatewayId());
super.stopGateway();
}
@Override
public float queryBalance() throws TimeoutException, GatewayException, IOException, InterruptedException
{
URL url;
List<HttpHeader> request = new ArrayList<HttpHeader>();
List<String> response;
String reqLine;
request.add(new HttpHeader("username", this.username, false));
request.add(new HttpHeader("password", this.password, false));
reqLine = ExpandHttpHeaders(request);
url = new URL(this.providerUrl + "/eapi/user/get_credits/1/1.1" + "?" + reqLine);
synchronized (this.SYNC_Commander)
{
response = HttpGet(url);
}
if (response.get(0).charAt(0) == '0') return Float.parseFloat(response.get(0).substring(response.get(0).indexOf('|') + 1));
return -1;
}
@Override
public DeliveryStatuses queryMessage(String refNo) throws TimeoutException, GatewayException, IOException, InterruptedException
{
URL url;
List<HttpHeader> request = new ArrayList<HttpHeader>();
List<String> response;
String reqLine;
request.add(new HttpHeader("username", this.username, false));
request.add(new HttpHeader("password", this.password, false));
request.add(new HttpHeader("batch_id", refNo, false));
reqLine = ExpandHttpHeaders(request);
url = new URL(this.providerUrl + "/eapi/status_reports/get_report/2/2.0" + "?" + reqLine);
synchronized (this.SYNC_Commander)
{
response = HttpGet(url);
}
if (response.get(0).indexOf("0|Results to follow") == 0)
{
StringTokenizer tokens = new StringTokenizer(response.get(2), "|");
tokens.nextToken();
setDeliveryErrorCode(Integer.parseInt(tokens.nextToken()));
switch (getDeliveryErrorCode())
{
case 11:
return DeliveryStatuses.DELIVERED;
case 0:
case 10:
case 12:
return DeliveryStatuses.KEEPTRYING;
case 63:
case 64:
return DeliveryStatuses.KEEPTRYING;
default:
return DeliveryStatuses.ABORTED;
}
}
return DeliveryStatuses.UNKNOWN;
}
@Override
public boolean sendMessage(OutboundMessage msg) throws TimeoutException, GatewayException, IOException, InterruptedException
{
URL url = null;
List<HttpHeader> request = new ArrayList<HttpHeader>();
List<String> response;
boolean ok = false;
request.add(new HttpHeader("username", this.username, false));
request.add(new HttpHeader("password", this.password, false));
request.add(new HttpHeader("message", msg.getText(), msg.getEncoding() == MessageEncodings.ENCUCS2));
if (msg.getEncoding() == MessageEncodings.ENCUCS2) request.add(new HttpHeader("dca", "16bit", false));
if (msg.getRecipient().charAt(0) == '+') request.add(new HttpHeader("msisdn", msg.getRecipient().substring(1), false));
else request.add(new HttpHeader("msisdn", msg.getRecipient(), false));
request.add(new HttpHeader("allow_concat_text_sms", "1", false));
if (msg.getStatusReport()) request.add(new HttpHeader("want_report", "1", false));
if (msg.getFlashSms()) request.add(new HttpHeader("msg_class", "0", false));
if (msg.getFrom() != null && msg.getFrom().length() != 0) request.add(new HttpHeader("source_id", msg.getFrom(), false));
else if (getFrom() != null && getFrom().length() != 0) request.add(new HttpHeader("source_id", getFrom(), false));
url = new URL(this.providerUrl + "/eapi/submission/send_sms/2/2.0");
synchronized (this.SYNC_Commander)
{
response = HttpPost(url, request);
}
if (response.get(0).charAt(0) == '0')
{
StringTokenizer tokens = new StringTokenizer(response.get(0), "|");
tokens.nextToken();
tokens.nextToken();
msg.setRefNo(tokens.nextToken());
msg.setDispatchDate(new Date());
msg.setGatewayId(getGatewayId());
msg.setMessageStatus(MessageStatuses.SENT);
incOutboundMessageCount();
ok = true;
}
else
{
StringTokenizer tokens = new StringTokenizer(response.get(0), "|");
switch (Integer.parseInt(tokens.nextToken()))
{
case 22:
msg.setFailureCause(FailureCauses.GATEWAY_FAILURE);
break;
case 23:
msg.setFailureCause(FailureCauses.GATEWAY_AUTH);
break;
case 24:
msg.setFailureCause(FailureCauses.BAD_FORMAT);
break;
case 25:
case 26:
case 27:
case 28:
msg.setFailureCause(FailureCauses.NO_CREDIT);
break;
case 40:
msg.setFailureCause(FailureCauses.GATEWAY_FAILURE);
break;
}
msg.setRefNo(null);
msg.setDispatchDate(null);
msg.setMessageStatus(MessageStatuses.FAILED);
ok = false;
}
return ok;
}
void setRegion(Regions r)
{
switch (r)
{
case INTERNATIONAL:
this.providerUrl = "http://bulksms.vsms.net:5567";
break;
case UNITEDKINGDOM:
this.providerUrl = "http://www.bulksms.co.uk:5567";
break;
case SOUTHAFRICA:
this.providerUrl = "http://bulksms.2way.co.za:5567";
break;
case SPAIN:
this.providerUrl = "http://bulksms.com.es:5567";
break;
case USA:
this.providerUrl = "http://usa.bulksms.com:5567";
break;
case GERMANY:
this.providerUrl = "http://bulksms.de:5567";
break;
}
}
}
| |
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.workbench.screens.guided.dtable.client.widget.table.popovers;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import javax.enterprise.inject.Instance;
import com.ait.lienzo.client.core.shape.Viewport;
import com.ait.lienzo.client.core.types.Transform;
import com.ait.lienzo.test.LienzoMockitoTestRunner;
import com.google.gwt.user.client.ui.AbsolutePanel;
import org.drools.workbench.models.guided.dtable.shared.model.BaseColumn;
import org.drools.workbench.models.guided.dtable.shared.model.ConditionCol52;
import org.drools.workbench.models.guided.dtable.shared.model.DescriptionCol52;
import org.drools.workbench.models.guided.dtable.shared.model.GuidedDecisionTable52;
import org.drools.workbench.models.guided.dtable.shared.model.Pattern52;
import org.drools.workbench.models.guided.dtable.shared.model.RowNumberCol52;
import org.drools.workbench.screens.guided.dtable.client.widget.table.GuidedDecisionTableModellerView;
import org.drools.workbench.screens.guided.dtable.client.widget.table.GuidedDecisionTableView;
import org.drools.workbench.screens.guided.dtable.client.widget.table.popovers.definitions.ColumnDefinitionBuilder;
import org.drools.workbench.screens.guided.dtable.client.widget.table.popovers.definitions.ColumnDefinitionFactory;
import org.drools.workbench.screens.guided.dtable.client.widget.table.popovers.definitions.ConditionCol52DefinitionBuilder;
import org.drools.workbench.screens.guided.dtable.service.GuidedDecisionTableEditorService;
import org.jboss.errai.common.client.api.Caller;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.kie.workbench.common.widgets.client.datamodel.AsyncPackageDataModelOracle;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.Mock;
import org.uberfire.backend.vfs.Path;
import org.uberfire.client.callbacks.Callback;
import org.uberfire.ext.wires.core.grids.client.model.Bounds;
import org.uberfire.ext.wires.core.grids.client.model.GridColumn;
import org.uberfire.ext.wires.core.grids.client.model.impl.BaseBounds;
import org.uberfire.ext.wires.core.grids.client.model.impl.BaseGridColumn;
import org.uberfire.ext.wires.core.grids.client.model.impl.BaseGridData;
import org.uberfire.ext.wires.core.grids.client.model.impl.BaseHeaderMetaData;
import org.uberfire.ext.wires.core.grids.client.widget.grid.columns.RowNumberColumn;
import org.uberfire.ext.wires.core.grids.client.widget.grid.renderers.columns.GridColumnRenderer;
import org.uberfire.ext.wires.core.grids.client.widget.grid.renderers.grids.GridRenderer;
import org.uberfire.ext.wires.core.grids.client.widget.grid.renderers.grids.impl.BaseGridRendererHelper;
import org.uberfire.ext.wires.core.grids.client.widget.layer.GridLayer;
import org.uberfire.mocks.CallerMock;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.any;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@RunWith(LienzoMockitoTestRunner.class)
public class ColumnHeaderPopOverImplTest {
@Mock
private PopOverView view;
@Mock
private GuidedDecisionTableEditorService service;
private Caller<GuidedDecisionTableEditorService> serviceCaller;
@Mock
private GuidedDecisionTableView.Presenter dtPresenter;
private GuidedDecisionTable52 model;
@Mock
private AsyncPackageDataModelOracle dmo;
@Mock
private GuidedDecisionTableModellerView modellerView;
@Mock
private GridLayer gridLayer;
@Mock
private AbsolutePanel domElementContainer;
@Mock
private Viewport viewport;
@Mock
private Transform transform;
@Mock
private GuidedDecisionTableView gridWidget;
@Mock
private GridRenderer renderer;
@Mock
private GridColumnRenderer<String> columnRenderer;
@Mock
private BaseGridRendererHelper rendererHelper;
@Captor
private ArgumentCaptor<PopOverView.ContentProvider> contentProviderArgumentCaptor;
private BaseGridData uiModel;
private BaseGridColumn uiColumn1;
private BaseGridColumn<String> uiColumn2;
private Bounds bounds = new BaseBounds( -50, -50, 250, 250 );
private ColumnHeaderPopOver popOver;
@Before
@SuppressWarnings("unchecked")
public void setup() {
this.model = new GuidedDecisionTable52();
this.model.getExpandedColumns().get( 0 ).setHeader( "#" );
this.model.getExpandedColumns().get( 1 ).setHeader( "description" );
this.uiColumn1 = new RowNumberColumn();
this.uiColumn2 = new BaseGridColumn<>( new BaseHeaderMetaData( "description" ),
columnRenderer,
100.0 );
this.uiModel = new BaseGridData() {{
setHeaderRowCount( 2 );
}};
uiModel.appendColumn( uiColumn1 );
uiModel.appendColumn( uiColumn2 );
serviceCaller = new CallerMock<>( service );
when( service.toSource( any( Path.class ),
any( GuidedDecisionTable52.class ) ) ).thenReturn( "source" );
final Instance<ColumnDefinitionBuilder> buildersInstance = makeBuildersInstance();
final ColumnDefinitionFactory columnDefinitionFactory = new ColumnDefinitionFactory( buildersInstance );
when( renderer.getHeaderHeight() ).thenReturn( 64.0 );
when( renderer.getHeaderRowHeight() ).thenReturn( 32.0 );
when( dtPresenter.getView() ).thenReturn( gridWidget );
when( dtPresenter.getModel() ).thenReturn( model );
when( dtPresenter.getDataModelOracle() ).thenReturn( dmo );
when( modellerView.getGridLayerView() ).thenReturn( gridLayer );
when( gridLayer.getDomElementContainer() ).thenReturn( domElementContainer );
when( gridLayer.getVisibleBounds() ).thenReturn( bounds );
when( gridLayer.getViewport() ).thenReturn( viewport );
when( gridWidget.getModel() ).thenReturn( uiModel );
when( gridWidget.getViewport() ).thenReturn( viewport );
when( gridWidget.getRenderer() ).thenReturn( renderer );
when( gridWidget.getRendererHelper() ).thenReturn( rendererHelper );
when( gridWidget.getWidth() ).thenReturn( 150.0 );
when( gridWidget.getHeight() ).thenReturn( 64.0 );
when( gridWidget.getX() ).thenReturn( 50.0 );
when( gridWidget.getY() ).thenReturn( 50.0 );
when( domElementContainer.getAbsoluteLeft() ).thenReturn( 200 );
when( viewport.getTransform() ).thenReturn( transform );
when( rendererHelper.getColumnOffset( uiColumn1 ) ).thenReturn( 0.0 );
when( rendererHelper.getColumnOffset( uiColumn2 ) ).thenReturn( uiColumn1.getWidth() );
final BaseGridRendererHelper.RenderingInformation ri = new BaseGridRendererHelper.RenderingInformation( bounds,
uiModel.getColumns(),
new BaseGridRendererHelper.RenderingBlockInformation(
new ArrayList<GridColumn<?>>() {{
add( uiColumn2 );
}},
0.0,
0.0,
0.0,
100.0 ),
new BaseGridRendererHelper.RenderingBlockInformation(
new ArrayList<GridColumn<?>>() {{
add( uiColumn1 );
}},
25.0,
0.0,
0.0,
50.0 ),
0,
0,
Collections.<Double>emptyList(),
false,
false,
0,
2,
0 );
when( rendererHelper.getRenderingInformation() ).thenReturn( ri );
final ColumnHeaderPopOver wrapped = new ColumnHeaderPopOverImpl( view,
columnDefinitionFactory );
this.popOver = spy( wrapped );
}
@Test
public void hideView() {
popOver.hide();
verify( view,
times( 1 ) ).hide();
}
@Test
public void showColumnHeaderPositioningFloatingBlockColumns_Scale100pct() {
when( transform.getScaleX() ).thenReturn( 1.0 );
when( transform.getScaleY() ).thenReturn( 1.0 );
popOver.show( modellerView,
dtPresenter,
0 );
verify( view,
times( 1 ) ).show( contentProviderArgumentCaptor.capture() );
final PopOverView.ContentProvider contentProvider = contentProviderArgumentCaptor.getValue();
contentProvider.getContent( ( PopOverView.Content content ) -> {
assertEquals( 350,
content.getX() );
assertEquals( 148,
content.getY() );
assertEquals( "#",
content.getContent() );
}
);
}
@Test
public void showColumnHeaderPositioningBodyBlockColumns_Scale100pct() {
when( transform.getScaleX() ).thenReturn( 1.0 );
when( transform.getScaleY() ).thenReturn( 1.0 );
popOver.show( modellerView,
dtPresenter,
1 );
verify( view,
times( 1 ) ).show( contentProviderArgumentCaptor.capture() );
final PopOverView.ContentProvider contentProvider = contentProviderArgumentCaptor.getValue();
contentProvider.getContent( ( PopOverView.Content content ) -> {
assertEquals( 400,
content.getX() );
assertEquals( 148,
content.getY() );
assertEquals( "description",
content.getContent() );
}
);
}
@Test
public void showColumnHeaderPositioningFloatingBlockColumns_Scale75pct() {
when( transform.getScaleX() ).thenReturn( 0.75 );
when( transform.getScaleY() ).thenReturn( 0.75 );
popOver.show( modellerView,
dtPresenter,
0 );
verify( view,
times( 1 ) ).show( contentProviderArgumentCaptor.capture() );
final PopOverView.ContentProvider contentProvider = contentProviderArgumentCaptor.getValue();
contentProvider.getContent( ( PopOverView.Content content ) -> {
assertEquals( 312,
content.getX() );
assertEquals( 111,
content.getY() );
assertEquals( "#",
content.getContent() );
}
);
}
@Test
public void showColumnHeaderPositioningBodyBlockColumns_Scale75pct() {
when( transform.getScaleX() ).thenReturn( 0.75 );
when( transform.getScaleY() ).thenReturn( 0.75 );
popOver.show( modellerView,
dtPresenter,
1 );
verify( view,
times( 1 ) ).show( contentProviderArgumentCaptor.capture() );
final PopOverView.ContentProvider contentProvider = contentProviderArgumentCaptor.getValue();
contentProvider.getContent( ( PopOverView.Content content ) -> {
assertEquals( 350,
content.getX() );
assertEquals( 111,
content.getY() );
assertEquals( "description",
content.getContent() );
}
);
}
@Test
public void showColumnServiceInvocation() {
final Pattern52 p = new Pattern52();
p.getChildColumns().add( new ConditionCol52() );
this.model.getConditions().add( p );
final BaseGridColumn<String> uiColumn3 = new BaseGridColumn<>( new BaseHeaderMetaData( "condition" ),
columnRenderer,
100.0 );
uiModel.appendColumn( uiColumn3 );
when( transform.getScaleX() ).thenReturn( 1.0 );
when( transform.getScaleY() ).thenReturn( 1.0 );
popOver.show( modellerView,
dtPresenter,
2 );
verify( view,
times( 1 ) ).show( contentProviderArgumentCaptor.capture() );
popOver.show( modellerView,
dtPresenter,
2 );
verify( view,
times( 2 ) ).show( contentProviderArgumentCaptor.capture() );
//Emulate Timer execution
final PopOverView.ContentProvider contentProvider = contentProviderArgumentCaptor.getValue();
contentProvider.getContent( ( PopOverView.Content content ) -> {
assertEquals( "source",
content.getContent() );
}
);
verify( service,
times( 1 ) ).toSource( any( Path.class ),
any( GuidedDecisionTable52.class ) );
}
private Instance<ColumnDefinitionBuilder> makeBuildersInstance() {
final List<ColumnDefinitionBuilder> builders = new ArrayList<>();
builders.add( new ConditionCol52DefinitionBuilder( serviceCaller ) );
builders.add( new ColumnDefinitionBuilder() {
@Override
public Class getSupportedColumnType() {
return RowNumberCol52.class;
}
@Override
public void generateDefinition( final GuidedDecisionTableView.Presenter dtPresenter,
final BaseColumn column,
final Callback<String> afterGenerationCallback ) {
afterGenerationCallback.callback( column.getHeader() );
}
} );
builders.add( new ColumnDefinitionBuilder() {
@Override
public Class getSupportedColumnType() {
return DescriptionCol52.class;
}
@Override
public void generateDefinition( final GuidedDecisionTableView.Presenter dtPresenter,
final BaseColumn column,
final Callback<String> afterGenerationCallback ) {
afterGenerationCallback.callback( column.getHeader() );
}
} );
return new MockInstanceImpl<>( builders );
}
}
| |
package org.sfu.chase.core;
import java.awt.event.ActionEvent;
import java.io.File;
import org.sfu.chase.core.ClustFramework;
import org.sfu.chase.gui.PChasePainter;
import org.sfu.chase.input.DataModel;
import org.sfu.chase.input.InputDialog;
import org.sfu.chase.util.UpdateManager;
import still.data.MemoryTable;
import still.data.Operator;
import still.data.Table;
import still.operators.BasicOp;
public class ChaseOp extends BasicOp
{
private static final long serialVersionUID = 5118434778904392340L;
public ClustFramework m_Framework;
private static String m_gffFilePath;// = "/Users/hyounesy/SFU/Research/BioVis/data/Brad/Allenhancers_nopromok4me3.gff";
// "/Users/hyounesy/SFU/research/BioVis/Brad/Allenhancers_nopromok4me3.gff"; //SFU
// "/Users/hyounesy/_wig/tss_hg19_+-3000_noNeighbors.gff"
private static int m_NumGroups = 19;
static InputDialog m_InputDialog;
public ChaseOp( Table newTable, boolean isActive, String paramString )
{
this( newTable, isActive );
}
public ChaseOp( Table newTable, boolean isActive )
{
super(newTable, isActive);
m_Framework = new ClustFramework();
m_Framework.setTable(input, m_NumGroups);
m_Framework.readRegions(m_gffFilePath);
loadOperatorView();
}
public ChaseOp(DataModel dataModel)
{
super(new MemoryTable(dataModel.getDataTable().getData(), null), true);
m_Framework = new ClustFramework();
m_Framework.setTable(dataModel, true);
if ((new File(m_InputDialog.getWorkspaceFilename()).exists())) {
m_Framework.loadFramework(m_InputDialog.getWorkspaceFilename());
}
loadOperatorView();
}
public static String getMenuName()
{
return "View:chaseFunc";
}
public String toString()
{
return "[EpiClust]";
}
public String getSaveString( ) {
return "";
}
public void activate()
{
this.isActive = true;
this.updateMap();
function = new BasicFunction(this);
isLazy = true;
setView( new ChaseView( this ) );
}
@Override
protected void computeNewColumns()
{
//TODO: create/recompute the operator output here
if (m_Framework != null)
{
m_Framework.setTable(input, m_NumGroups);
// (PchasePainter)(((chaseView)this.view).m_Painter).
}
m_NewColumns = null;
}
@Override
public void loadOperatorView()
{
// comment out if no view
setView( new ChaseView( this ) );
}
public class ChaseView extends BasicOp.BasicView
{
private static final long serialVersionUID = 4698263995759097051L;
public ChaseView(Operator op)
{
super(op);
init();
}
@Override
protected void createPainter(Operator op)
{
m_Painter = new PChasePainter(op);
((PChasePainter)m_Painter).setFramework(m_Framework);
}
@Override
protected void buildGUI()
{
this.removeAll();
m_Painter.frame.setTitle("ChAsE: Chromatin Analysis and Exploration Tool");
//m_Painter.frame.setExtendedState(Frame.MAXIMIZED_BOTH);
}
@Override
public void actionPerformed(ActionEvent e)
{
//TODO: code to handle the GUI actions
}
public PChasePainter getChasePainter()
{
return (PChasePainter)m_Painter;
}
}
public boolean modifyInput()
{
int result = m_InputDialog.showModal();
if (result != 0)
{
boolean bResetClusters = (result & InputDialog.MODIFIED_REGIONS) != 0;
m_Framework.setTable(m_InputDialog.getDataModel(), bResetClusters);
PChasePainter chasePainter = ((ChaseView)getView()).getChasePainter();
chasePainter.refreshWorkspace(bResetClusters);
//((ChaseView)getView()).getChasePainter().frame.setTitle(m_InputDialog.getRegionName);
return true;
}
return false;
}
public boolean openDialog()
{
int result = m_InputDialog.showOpenDialog();
if (result != 0)
{
boolean bResetClusters = (result & InputDialog.MODIFIED_REGIONS) != 0;
m_Framework.setTable(m_InputDialog.getDataModel(), bResetClusters);
PChasePainter chasePainter = ((ChaseView)getView()).getChasePainter();
if ((new File(m_InputDialog.getWorkspaceFilename()).exists())) {
m_Framework.loadFramework(m_InputDialog.getWorkspaceFilename());
}
chasePainter.refreshWorkspace(bResetClusters);
return true;
}
return false;
}
public void saveWorkspace()
{
m_Framework.saveFramework(m_InputDialog.getWorkspaceFilename());
}
public static void main( final String[] args )
{
/*
// not working after Java 1.6: https://developer.apple.com/library/mac/documentation/Java/Conceptual/Java14Development/07-NativePlatformIntegration/NativePlatformIntegration.html
// http://stackoverflow.com/questions/3154638/setting-java-swing-application-name-on-mac
try {
// take the menu bar off the jframe
System.setProperty("apple.laf.useScreenMenuBar", "true");
// set the name of the application menu item
System.setProperty("com.apple.mrj.application.apple.menu.about.name", "ChAsE");
// set the look and feel
UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName());
} catch (Exception ex) {
ex.printStackTrace();
}
*/
UpdateManager.checkForUpdates();
m_InputDialog = new InputDialog();
if (m_InputDialog.showModal() != 0)
{
@SuppressWarnings("unused")
ChaseOp op = new ChaseOp(m_InputDialog.getDataModel());
}
else
{
System.exit(0);
}
}
}
| |
package org.grapheco.elfinder.controller.executors;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.servlet.ServletContext;
import javax.servlet.http.HttpServletRequest;
import org.apache.commons.fileupload.FileItemStream;
import org.apache.log4j.Logger;
import org.json.JSONObject;
import org.grapheco.elfinder.controller.MultipleUploadItems;
import org.grapheco.elfinder.controller.executor.AbstractJsonCommandExecutor;
import org.grapheco.elfinder.controller.executor.CommandExecutor;
import org.grapheco.elfinder.controller.executor.FsItemEx;
import org.grapheco.elfinder.service.FsItemFilter;
import org.grapheco.elfinder.service.FsService;
public class UploadCommandExecutor extends AbstractJsonCommandExecutor
implements CommandExecutor
{
Logger _logger = Logger.getLogger(this.getClass());
// large file will be splitted into many parts
class Part
{
long _start;
long _size;
FileItemStream _content;
public Part(long start, long size, FileItemStream fileItemStream)
{
super();
this._start = start;
this._size = size;
this._content = fileItemStream;
}
}
// a large file with many parts
static class Parts
{
public static synchronized Parts getOrCreate(
HttpServletRequest request, String chunkId, String fileName,
long total, long totalSize)
{
//chunkId is not an unique number for files uploaded in one upload form
String key = String.format("chunk_%s_%s", chunkId, fileName);
// stores chunks in application context
Parts parts = (Parts) request.getServletContext().getAttribute(key);
if (parts == null)
{
parts = new Parts(chunkId, fileName, total, totalSize);
request.getServletContext().setAttribute(key, parts);
}
return parts;
}
private String _chunkId;
// number of parts
private long _numberOfParts;
private long _totalSize;
private String _fileName;
// all chunks
Map<Long, Part> _parts = new HashMap<Long, Part>();
public Parts(String chunkId, String fileName, long numberOfParts,
long totalSize)
{
_chunkId = chunkId;
_fileName = fileName;
_numberOfParts = numberOfParts;
_totalSize = totalSize;
}
public synchronized void addPart(long partIndex, Part part)
{
_parts.put(partIndex, part);
}
public boolean isReady()
{
return _parts.size() == _numberOfParts;
}
public InputStream openInputStream() throws IOException
{
return new InputStream()
{
long partIndex = 0;
Part part = _parts.get(partIndex);
InputStream is = part._content.openStream();
@Override
public int read() throws IOException
{
while (true)
{
// current part is not read completely
int c = is.read();
if (c != -1)
{
return c;
}
// next part?
if (partIndex == _numberOfParts - 1)
{
is.close();
return -1;
}
part = _parts.get(++partIndex);
is.close();
is = part._content.openStream();
}
}
};
}
public void checkParts() throws IOException
{
long totalSize = 0;
for (long i = 0; i < _numberOfParts; i++)
{
Part part = _parts.get(i);
totalSize += part._size;
}
if (totalSize != _totalSize)
throw new IOException(String.format(
"invalid file size: excepted %d, but is %d",
_totalSize, totalSize));
}
public void removeFromApplicationContext(HttpServletRequest request)
{
String key = String.format("chunk_%s_%s", _chunkId, _fileName);
request.getServletContext().removeAttribute(key);
}
}
interface FileWriter
{
FsItemEx createAndSave(String fileName, InputStream is)
throws IOException;
}
@Override
public void execute(FsService fsService, HttpServletRequest request,
ServletContext servletContext, JSONObject json) throws Exception
{
MultipleUploadItems uploads = MultipleUploadItems.loadFrom(request);
final List<FsItemEx> added = new ArrayList<FsItemEx>();
String target = request.getParameter("target");
final FsItemEx dir = super.findItem(fsService, target);
final FsItemFilter filter = getRequestedFilter(request);
FileWriter fw = new FileWriter()
{
@Override
public FsItemEx createAndSave(String fileName, InputStream is)
throws IOException
{
// fis.getName() returns full path such as 'C:\temp\abc.txt' in
// IE10
// while returns 'abc.txt' in Chrome
// see
// https://github.com/bluejoe2008/elfinder-2.x-servlet/issues/22
java.nio.file.Path p = java.nio.file.Paths.get(fileName);
FsItemEx newFile = new FsItemEx(dir, p.getFileName().toString());
/*
* String fileName = fis.getName(); FsItemEx newFile = new
* FsItemEx(dir, fileName);
*/
newFile.createFile();
newFile.writeStream(is);
if (filter.accepts(newFile))
added.add(newFile);
return newFile;
}
};
// chunked upload
if (request.getParameter("cid") != null)
{
processChunkUpload(request, uploads, fw);
}
else
{
processUpload(uploads, fw);
}
json.put("added", files2JsonArray(request, added));
}
private void processChunkUpload(HttpServletRequest request,
MultipleUploadItems uploads, FileWriter fw)
throws NumberFormatException, IOException
{
// cid : unique id of chunked uploading file
String cid = request.getParameter("cid");
// solr-5.5.2.tgz.48_65.part
String chunk = request.getParameter("chunk");
// 100270176,2088962,136813192
String range = request.getParameter("range");
String[] tokens = range.split(",");
Matcher m = Pattern.compile("(.*)\\.([0-9]+)\\_([0-9]+)\\.part")
.matcher(chunk);
if (m.find())
{
String fileName = m.group(1);
long index = Long.parseLong(m.group(2));
long total = Long.parseLong(m.group(3));
Parts parts = Parts.getOrCreate(request, cid, fileName, total + 1,
Long.parseLong(tokens[2]));
long start = Long.parseLong(tokens[0]);
long size = Long.parseLong(tokens[1]);
_logger.debug(String.format("uploaded part(%d/%d) of file: %s",
index, total, fileName));
parts.addPart(index, new Part(start, size, uploads
.items("upload[]").get(0)));
_logger.debug(String.format(">>>>%d", parts._parts.size()));
if (parts.isReady())
{
parts.checkParts();
_logger.debug(String.format("file is uploadded completely: %s",
fileName));
fw.createAndSave(fileName, parts.openInputStream());
// remove from application context
parts.removeFromApplicationContext(request);
}
}
}
private void processUpload(MultipleUploadItems uploads, FileWriter fw)
throws IOException
{
for (FileItemStream fis : uploads.items("upload[]"))
{
fw.createAndSave(fis.getName(), fis.openStream());
}
}
}
| |
/*
* Copyright 1999-2010 Luca Garulli (l.garulli--at--orientechnologies.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.orientechnologies.orient.test.database.auto;
import java.io.IOException;
import org.testng.Assert;
import org.testng.annotations.Parameters;
import org.testng.annotations.Test;
import com.orientechnologies.orient.client.db.ODatabaseHelper;
import com.orientechnologies.orient.client.remote.OStorageRemoteThread;
import com.orientechnologies.orient.core.db.ODatabase;
import com.orientechnologies.orient.core.db.ODatabaseListener;
import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx;
import com.orientechnologies.orient.core.tx.OTransaction.TXTYPE;
import com.orientechnologies.orient.enterprise.channel.binary.OChannelBinaryProtocol;
import com.orientechnologies.orient.enterprise.channel.binary.ORemoteServerEventListener;
import com.orientechnologies.orient.server.handler.distributed.OClusterProtocol;
/**
* Tests the right calls of all the db's listener API.
*
* @author Sylvain Spinelli
*
*/
public class DbListenerTest {
protected ODatabaseDocumentTx database;
protected String dbUrl;
protected int onAfterTxCommit = 0;
protected int onAfterTxRollback = 0;
protected int onBeforeTxBegin = 0;
protected int onBeforeTxCommit = 0;
protected int onBeforeTxRollback = 0;
protected int onClose = 0;
protected int onCreate = 0;
protected int onDelete = 0;
protected int onOpen = 0;
protected int onCorruption = 0;
protected int onRecordPulled = 0;
protected int onClusterConfigurationChange = 0;
protected int onAvailableDatabaseChange = 0;
public class DbListener implements ODatabaseListener {
public void onAfterTxCommit(ODatabase iDatabase) {
onAfterTxCommit++;
}
public void onAfterTxRollback(ODatabase iDatabase) {
onAfterTxRollback++;
}
public void onBeforeTxBegin(ODatabase iDatabase) {
onBeforeTxBegin++;
}
public void onBeforeTxCommit(ODatabase iDatabase) {
onBeforeTxCommit++;
}
public void onBeforeTxRollback(ODatabase iDatabase) {
onBeforeTxRollback++;
}
public void onClose(ODatabase iDatabase) {
onClose++;
}
public void onCreate(ODatabase iDatabase) {
onCreate++;
}
public void onDelete(ODatabase iDatabase) {
onDelete++;
}
public void onOpen(ODatabase iDatabase) {
onOpen++;
}
public boolean onCorruptionRepairDatabase(ODatabase iDatabase, final String iReason, String iWhatWillbeFixed) {
onCorruption++;
return true;
}
}
@Parameters(value = "url")
public DbListenerTest(String iURL) {
dbUrl = iURL;
database = new ODatabaseDocumentTx(iURL);
}
@Test
public void testEmbeddedDbListeners() throws IOException {
if (database.getURL().startsWith("remote:"))
return;
if (database.exists())
ODatabaseHelper.deleteDatabase(database);
database.registerListener(new DbListener());
ODatabaseHelper.createDatabase(database, dbUrl);
Assert.assertEquals(onCreate, 1);
database.close();
Assert.assertEquals(onClose, 1);
database.registerListener(new DbListener());
database.open("admin", "admin");
Assert.assertEquals(onOpen, 1);
database.begin(TXTYPE.OPTIMISTIC);
Assert.assertEquals(onBeforeTxBegin, 1);
database.newInstance().save();
database.commit();
Assert.assertEquals(onBeforeTxCommit, 1);
Assert.assertEquals(onAfterTxCommit, 1);
database.begin(TXTYPE.OPTIMISTIC);
Assert.assertEquals(onBeforeTxBegin, 2);
database.newInstance().save();
database.rollback();
Assert.assertEquals(onBeforeTxRollback, 1);
Assert.assertEquals(onAfterTxRollback, 1);
ODatabaseHelper.deleteDatabase(database);
Assert.assertEquals(onClose, 2);
Assert.assertEquals(onDelete, 1);
}
@Test
public void testRemoteDbListeners() throws IOException {
if (!database.getURL().startsWith("remote:"))
return;
database.registerListener(new DbListener());
database.open("admin", "admin");
Assert.assertEquals(onOpen, 1);
database.begin(TXTYPE.OPTIMISTIC);
Assert.assertEquals(onBeforeTxBegin, 1);
database.newInstance().save();
database.commit();
Assert.assertEquals(onBeforeTxCommit, 1);
Assert.assertEquals(onAfterTxCommit, 1);
database.begin(TXTYPE.OPTIMISTIC);
Assert.assertEquals(onBeforeTxBegin, 2);
database.newInstance().save();
database.rollback();
Assert.assertEquals(onBeforeTxRollback, 1);
Assert.assertEquals(onAfterTxRollback, 1);
database.close();
Assert.assertEquals(onClose, 1);
}
@Test
public void testAsynchEventListeners() throws IOException {
if (!database.getURL().startsWith("remote:"))
return;
database.open("admin", "admin");
((OStorageRemoteThread) database.getStorage()).setRemoteServerEventListener(new ORemoteServerEventListener() {
public void onRequest(byte iRequestCode, Object iObject) {
switch (iRequestCode) {
case OChannelBinaryProtocol.REQUEST_PUSH_RECORD:
onRecordPulled++;
break;
// case OBinaryProtocol.PUSH_NODE2CLIENT_DB_CONFIG:
// onClusterConfigurationChange++;
// break;
case OClusterProtocol.PUSH_LEADER_AVAILABLE_DBS:
onAvailableDatabaseChange++;
break;
}
}
});
database.close();
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.